ngram
listlengths
0
67.8k
[ "from antlr4 import * if __name__ is not None and \".\" in __name__:", "This class defines a complete listener for a parse tree produced by PythiaFunctionCallParser.", "a parse tree produced by PythiaFunctionCallParser. class PythiaFunctionCallListener(ParseTreeListener): # Enter a parse tree", "produced by PythiaFunctionCallParser. class PythiaFunctionCallListener(ParseTreeListener): # Enter a parse tree produced by PythiaFunctionCallParser#call.", "produced by PythiaFunctionCallParser#call. def enterCall(self, ctx: PythiaFunctionCallParser.CallContext): pass # Exit a parse tree", "parse tree produced by PythiaFunctionCallParser#ArrayOfValues. def enterArrayOfValues(self, ctx: PythiaFunctionCallParser.ArrayOfValuesContext): pass # Exit a", "ANTLR 4.5.3 from antlr4 import * if __name__ is not None and \".\"", "ctx: PythiaFunctionCallParser.ArrayOfValuesContext): pass # Exit a parse tree produced by PythiaFunctionCallParser#ArrayOfValues. def exitArrayOfValues(self,", "tree produced by PythiaFunctionCallParser#EmptyArray. def enterEmptyArray(self, ctx: PythiaFunctionCallParser.EmptyArrayContext): pass # Exit a parse", "# Exit a parse tree produced by PythiaFunctionCallParser#Integer. def exitInteger(self, ctx: PythiaFunctionCallParser.IntegerContext): pass", "parse tree produced by PythiaFunctionCallParser#argument. def enterArgument(self, ctx: PythiaFunctionCallParser.ArgumentContext): pass # Exit a", "parse tree produced by PythiaFunctionCallParser#EmptyArray. def enterEmptyArray(self, ctx: PythiaFunctionCallParser.EmptyArrayContext): pass # Exit a", "produced by PythiaFunctionCallParser#Float. def enterFloat(self, ctx: PythiaFunctionCallParser.FloatContext): pass # Exit a parse tree", "tree produced by PythiaFunctionCallParser#Float. def enterFloat(self, ctx: PythiaFunctionCallParser.FloatContext): pass # Exit a parse", "PythiaFunctionCallParser.ArrayOfValuesContext): pass # Enter a parse tree produced by PythiaFunctionCallParser#EmptyArray. def enterEmptyArray(self, ctx:", "enterFloat(self, ctx: PythiaFunctionCallParser.FloatContext): pass # Exit a parse tree produced by PythiaFunctionCallParser#Float. def", "def exitString(self, ctx: PythiaFunctionCallParser.StringContext): pass # Enter a parse tree produced by PythiaFunctionCallParser#Integer.", "tree produced by PythiaFunctionCallParser#EmptyArray. def exitEmptyArray(self, ctx: PythiaFunctionCallParser.EmptyArrayContext): pass # Enter a parse", "None and \".\" in __name__: from .PythiaFunctionCallParser import PythiaFunctionCallParser else: from PythiaFunctionCallParser import", "tree produced by PythiaFunctionCallParser#call. def exitCall(self, ctx: PythiaFunctionCallParser.CallContext): pass # Enter a parse", "Exit a parse tree produced by PythiaFunctionCallParser#ArrayValue. def exitArrayValue(self, ctx: PythiaFunctionCallParser.ArrayValueContext): pass #", "PythiaFunctionCallParser.CallContext): pass # Exit a parse tree produced by PythiaFunctionCallParser#call. def exitCall(self, ctx:", "def enterFull_function_name(self, ctx: PythiaFunctionCallParser.Full_function_nameContext): pass # Exit a parse tree produced by PythiaFunctionCallParser#full_function_name.", "PythiaFunctionCallParser#full_function_name. def exitFull_function_name(self, ctx: PythiaFunctionCallParser.Full_function_nameContext): pass # Enter a parse tree produced by", "PythiaFunctionCallParser.ArgumentContext): pass # Exit a parse tree produced by PythiaFunctionCallParser#argument. def exitArgument(self, ctx:", "PythiaFunctionCallParser.ArrayOfValuesContext): pass # Exit a parse tree produced by PythiaFunctionCallParser#ArrayOfValues. def exitArrayOfValues(self, ctx:", "produced by PythiaFunctionCallParser#EmptyArray. def exitEmptyArray(self, ctx: PythiaFunctionCallParser.EmptyArrayContext): pass # Enter a parse tree", "parse tree produced by PythiaFunctionCallParser#call. def exitCall(self, ctx: PythiaFunctionCallParser.CallContext): pass # Enter a", "from .PythiaFunctionCallParser import PythiaFunctionCallParser else: from PythiaFunctionCallParser import PythiaFunctionCallParser # This class defines", "by PythiaFunctionCallParser#argument. def exitArgument(self, ctx: PythiaFunctionCallParser.ArgumentContext): pass # Enter a parse tree produced", "parse tree produced by PythiaFunctionCallParser. class PythiaFunctionCallListener(ParseTreeListener): # Enter a parse tree produced", "produced by PythiaFunctionCallParser#ArrayOfValues. def exitArrayOfValues(self, ctx: PythiaFunctionCallParser.ArrayOfValuesContext): pass # Enter a parse tree", "produced by PythiaFunctionCallParser#full_function_name. def exitFull_function_name(self, ctx: PythiaFunctionCallParser.Full_function_nameContext): pass # Enter a parse tree", "PythiaFunctionCallParser.EmptyArrayContext): pass # Enter a parse tree produced by PythiaFunctionCallParser#ArrayValue. def enterArrayValue(self, ctx:", "exitFull_function_name(self, ctx: PythiaFunctionCallParser.Full_function_nameContext): pass # Enter a parse tree produced by PythiaFunctionCallParser#argument. def", "# Exit a parse tree produced by PythiaFunctionCallParser#argument. def exitArgument(self, ctx: PythiaFunctionCallParser.ArgumentContext): pass", "# Enter a parse tree produced by PythiaFunctionCallParser#call. def enterCall(self, ctx: PythiaFunctionCallParser.CallContext): pass", "# This class defines a complete listener for a parse tree produced by", "\".\" in __name__: from .PythiaFunctionCallParser import PythiaFunctionCallParser else: from PythiaFunctionCallParser import PythiaFunctionCallParser #", "PythiaFunctionCallParser#Float. def enterFloat(self, ctx: PythiaFunctionCallParser.FloatContext): pass # Exit a parse tree produced by", "tree produced by PythiaFunctionCallParser#argument. def enterArgument(self, ctx: PythiaFunctionCallParser.ArgumentContext): pass # Exit a parse", "by PythiaFunctionCallParser#ArrayValue. def exitArrayValue(self, ctx: PythiaFunctionCallParser.ArrayValueContext): pass # Enter a parse tree produced", "enterFull_function_name(self, ctx: PythiaFunctionCallParser.Full_function_nameContext): pass # Exit a parse tree produced by PythiaFunctionCallParser#full_function_name. def", "def exitEmptyArray(self, ctx: PythiaFunctionCallParser.EmptyArrayContext): pass # Enter a parse tree produced by PythiaFunctionCallParser#ArrayValue.", "produced by PythiaFunctionCallParser#argument. def exitArgument(self, ctx: PythiaFunctionCallParser.ArgumentContext): pass # Enter a parse tree", "exitArrayOfValues(self, ctx: PythiaFunctionCallParser.ArrayOfValuesContext): pass # Enter a parse tree produced by PythiaFunctionCallParser#EmptyArray. def", "enterInteger(self, ctx: PythiaFunctionCallParser.IntegerContext): pass # Exit a parse tree produced by PythiaFunctionCallParser#Integer. def", "not None and \".\" in __name__: from .PythiaFunctionCallParser import PythiaFunctionCallParser else: from PythiaFunctionCallParser", "PythiaFunctionCallParser#EmptyArray. def exitEmptyArray(self, ctx: PythiaFunctionCallParser.EmptyArrayContext): pass # Enter a parse tree produced by", "a parse tree produced by PythiaFunctionCallParser#ArrayValue. def enterArrayValue(self, ctx: PythiaFunctionCallParser.ArrayValueContext): pass # Exit", "a parse tree produced by PythiaFunctionCallParser#argument. def enterArgument(self, ctx: PythiaFunctionCallParser.ArgumentContext): pass # Exit", "by PythiaFunctionCallParser#argument. def enterArgument(self, ctx: PythiaFunctionCallParser.ArgumentContext): pass # Exit a parse tree produced", "<filename>pythia/antlr4/PythiaFunctionCallListener.py # Generated from /Users/enrique/workspace/other/frontline/pythia/PythiaFunctionCall.g4 by ANTLR 4.5.3 from antlr4 import * if", "PythiaFunctionCallParser.ArgumentContext): pass # Enter a parse tree produced by PythiaFunctionCallParser#ArrayOfValues. def enterArrayOfValues(self, ctx:", "by PythiaFunctionCallParser#ArrayOfValues. def exitArrayOfValues(self, ctx: PythiaFunctionCallParser.ArrayOfValuesContext): pass # Enter a parse tree produced", "# Exit a parse tree produced by PythiaFunctionCallParser#ArrayOfValues. def exitArrayOfValues(self, ctx: PythiaFunctionCallParser.ArrayOfValuesContext): pass", "ctx: PythiaFunctionCallParser.ArgumentContext): pass # Enter a parse tree produced by PythiaFunctionCallParser#ArrayOfValues. def enterArrayOfValues(self,", "# Enter a parse tree produced by PythiaFunctionCallParser#ArrayOfValues. def enterArrayOfValues(self, ctx: PythiaFunctionCallParser.ArrayOfValuesContext): pass", "produced by PythiaFunctionCallParser#argument. def enterArgument(self, ctx: PythiaFunctionCallParser.ArgumentContext): pass # Exit a parse tree", "PythiaFunctionCallParser#argument. def enterArgument(self, ctx: PythiaFunctionCallParser.ArgumentContext): pass # Exit a parse tree produced by", "def enterArrayOfValues(self, ctx: PythiaFunctionCallParser.ArrayOfValuesContext): pass # Exit a parse tree produced by PythiaFunctionCallParser#ArrayOfValues.", "a parse tree produced by PythiaFunctionCallParser#EmptyArray. def enterEmptyArray(self, ctx: PythiaFunctionCallParser.EmptyArrayContext): pass # Exit", "PythiaFunctionCallParser import PythiaFunctionCallParser # This class defines a complete listener for a parse", "tree produced by PythiaFunctionCallParser#call. def enterCall(self, ctx: PythiaFunctionCallParser.CallContext): pass # Exit a parse", "produced by PythiaFunctionCallParser#String. def exitString(self, ctx: PythiaFunctionCallParser.StringContext): pass # Enter a parse tree", "produced by PythiaFunctionCallParser#ArrayOfValues. def enterArrayOfValues(self, ctx: PythiaFunctionCallParser.ArrayOfValuesContext): pass # Exit a parse tree", "PythiaFunctionCallParser#ArrayOfValues. def enterArrayOfValues(self, ctx: PythiaFunctionCallParser.ArrayOfValuesContext): pass # Exit a parse tree produced by", "Exit a parse tree produced by PythiaFunctionCallParser#ArrayOfValues. def exitArrayOfValues(self, ctx: PythiaFunctionCallParser.ArrayOfValuesContext): pass #", "by PythiaFunctionCallParser#EmptyArray. def enterEmptyArray(self, ctx: PythiaFunctionCallParser.EmptyArrayContext): pass # Exit a parse tree produced", "produced by PythiaFunctionCallParser#ArrayValue. def enterArrayValue(self, ctx: PythiaFunctionCallParser.ArrayValueContext): pass # Exit a parse tree", "def exitArrayValue(self, ctx: PythiaFunctionCallParser.ArrayValueContext): pass # Enter a parse tree produced by PythiaFunctionCallParser#String.", "exitString(self, ctx: PythiaFunctionCallParser.StringContext): pass # Enter a parse tree produced by PythiaFunctionCallParser#Integer. def", "ctx: PythiaFunctionCallParser.IntegerContext): pass # Exit a parse tree produced by PythiaFunctionCallParser#Integer. def exitInteger(self,", "def exitInteger(self, ctx: PythiaFunctionCallParser.IntegerContext): pass # Enter a parse tree produced by PythiaFunctionCallParser#Float.", "# Exit a parse tree produced by PythiaFunctionCallParser#String. def exitString(self, ctx: PythiaFunctionCallParser.StringContext): pass", "PythiaFunctionCallParser.ArrayValueContext): pass # Exit a parse tree produced by PythiaFunctionCallParser#ArrayValue. def exitArrayValue(self, ctx:", "# Exit a parse tree produced by PythiaFunctionCallParser#Float. def exitFloat(self, ctx: PythiaFunctionCallParser.FloatContext): pass", "import * if __name__ is not None and \".\" in __name__: from .PythiaFunctionCallParser", ".PythiaFunctionCallParser import PythiaFunctionCallParser else: from PythiaFunctionCallParser import PythiaFunctionCallParser # This class defines a", "tree produced by PythiaFunctionCallParser#String. def exitString(self, ctx: PythiaFunctionCallParser.StringContext): pass # Enter a parse", "from PythiaFunctionCallParser import PythiaFunctionCallParser # This class defines a complete listener for a", "a parse tree produced by PythiaFunctionCallParser#full_function_name. def enterFull_function_name(self, ctx: PythiaFunctionCallParser.Full_function_nameContext): pass # Exit", "a parse tree produced by PythiaFunctionCallParser#EmptyArray. def exitEmptyArray(self, ctx: PythiaFunctionCallParser.EmptyArrayContext): pass # Enter", "PythiaFunctionCallParser#ArrayOfValues. def exitArrayOfValues(self, ctx: PythiaFunctionCallParser.ArrayOfValuesContext): pass # Enter a parse tree produced by", "by PythiaFunctionCallParser#EmptyArray. def exitEmptyArray(self, ctx: PythiaFunctionCallParser.EmptyArrayContext): pass # Enter a parse tree produced", "produced by PythiaFunctionCallParser#Integer. def enterInteger(self, ctx: PythiaFunctionCallParser.IntegerContext): pass # Exit a parse tree", "# Exit a parse tree produced by PythiaFunctionCallParser#ArrayValue. def exitArrayValue(self, ctx: PythiaFunctionCallParser.ArrayValueContext): pass", "complete listener for a parse tree produced by PythiaFunctionCallParser. class PythiaFunctionCallListener(ParseTreeListener): # Enter", "Exit a parse tree produced by PythiaFunctionCallParser#argument. def exitArgument(self, ctx: PythiaFunctionCallParser.ArgumentContext): pass #", "# Enter a parse tree produced by PythiaFunctionCallParser#String. def enterString(self, ctx: PythiaFunctionCallParser.StringContext): pass", "parse tree produced by PythiaFunctionCallParser#argument. def exitArgument(self, ctx: PythiaFunctionCallParser.ArgumentContext): pass # Enter a", "tree produced by PythiaFunctionCallParser#String. def enterString(self, ctx: PythiaFunctionCallParser.StringContext): pass # Exit a parse", "antlr4 import * if __name__ is not None and \".\" in __name__: from", "tree produced by PythiaFunctionCallParser#ArrayOfValues. def enterArrayOfValues(self, ctx: PythiaFunctionCallParser.ArrayOfValuesContext): pass # Exit a parse", "# Enter a parse tree produced by PythiaFunctionCallParser#EmptyArray. def enterEmptyArray(self, ctx: PythiaFunctionCallParser.EmptyArrayContext): pass", "pass # Exit a parse tree produced by PythiaFunctionCallParser#ArrayValue. def exitArrayValue(self, ctx: PythiaFunctionCallParser.ArrayValueContext):", "def enterString(self, ctx: PythiaFunctionCallParser.StringContext): pass # Exit a parse tree produced by PythiaFunctionCallParser#String.", "* if __name__ is not None and \".\" in __name__: from .PythiaFunctionCallParser import", "PythiaFunctionCallParser#String. def enterString(self, ctx: PythiaFunctionCallParser.StringContext): pass # Exit a parse tree produced by", "# Enter a parse tree produced by PythiaFunctionCallParser#argument. def enterArgument(self, ctx: PythiaFunctionCallParser.ArgumentContext): pass", "def exitArgument(self, ctx: PythiaFunctionCallParser.ArgumentContext): pass # Enter a parse tree produced by PythiaFunctionCallParser#ArrayOfValues.", "enterString(self, ctx: PythiaFunctionCallParser.StringContext): pass # Exit a parse tree produced by PythiaFunctionCallParser#String. def", "# Enter a parse tree produced by PythiaFunctionCallParser#Float. def enterFloat(self, ctx: PythiaFunctionCallParser.FloatContext): pass", "a parse tree produced by PythiaFunctionCallParser#call. def enterCall(self, ctx: PythiaFunctionCallParser.CallContext): pass # Exit", "enterArrayValue(self, ctx: PythiaFunctionCallParser.ArrayValueContext): pass # Exit a parse tree produced by PythiaFunctionCallParser#ArrayValue. def", "PythiaFunctionCallParser#ArrayValue. def exitArrayValue(self, ctx: PythiaFunctionCallParser.ArrayValueContext): pass # Enter a parse tree produced by", "__name__ is not None and \".\" in __name__: from .PythiaFunctionCallParser import PythiaFunctionCallParser else:", "a parse tree produced by PythiaFunctionCallParser#Integer. def exitInteger(self, ctx: PythiaFunctionCallParser.IntegerContext): pass # Enter", "__name__: from .PythiaFunctionCallParser import PythiaFunctionCallParser else: from PythiaFunctionCallParser import PythiaFunctionCallParser # This class", "produced by PythiaFunctionCallParser#full_function_name. def enterFull_function_name(self, ctx: PythiaFunctionCallParser.Full_function_nameContext): pass # Exit a parse tree", "import PythiaFunctionCallParser # This class defines a complete listener for a parse tree", "parse tree produced by PythiaFunctionCallParser#full_function_name. def exitFull_function_name(self, ctx: PythiaFunctionCallParser.Full_function_nameContext): pass # Enter a", "Enter a parse tree produced by PythiaFunctionCallParser#call. def enterCall(self, ctx: PythiaFunctionCallParser.CallContext): pass #", "# Exit a parse tree produced by PythiaFunctionCallParser#EmptyArray. def exitEmptyArray(self, ctx: PythiaFunctionCallParser.EmptyArrayContext): pass", "Enter a parse tree produced by PythiaFunctionCallParser#argument. def enterArgument(self, ctx: PythiaFunctionCallParser.ArgumentContext): pass #", "ctx: PythiaFunctionCallParser.EmptyArrayContext): pass # Exit a parse tree produced by PythiaFunctionCallParser#EmptyArray. def exitEmptyArray(self,", "PythiaFunctionCallParser.IntegerContext): pass # Enter a parse tree produced by PythiaFunctionCallParser#Float. def enterFloat(self, ctx:", "def enterInteger(self, ctx: PythiaFunctionCallParser.IntegerContext): pass # Exit a parse tree produced by PythiaFunctionCallParser#Integer.", "ctx: PythiaFunctionCallParser.CallContext): pass # Enter a parse tree produced by PythiaFunctionCallParser#full_function_name. def enterFull_function_name(self,", "pass # Exit a parse tree produced by PythiaFunctionCallParser#Float. def exitFloat(self, ctx: PythiaFunctionCallParser.FloatContext):", "pass # Exit a parse tree produced by PythiaFunctionCallParser#String. def exitString(self, ctx: PythiaFunctionCallParser.StringContext):", "pass # Exit a parse tree produced by PythiaFunctionCallParser#full_function_name. def exitFull_function_name(self, ctx: PythiaFunctionCallParser.Full_function_nameContext):", "a parse tree produced by PythiaFunctionCallParser#String. def exitString(self, ctx: PythiaFunctionCallParser.StringContext): pass # Enter", "ctx: PythiaFunctionCallParser.ArgumentContext): pass # Exit a parse tree produced by PythiaFunctionCallParser#argument. def exitArgument(self,", "ctx: PythiaFunctionCallParser.ArrayOfValuesContext): pass # Enter a parse tree produced by PythiaFunctionCallParser#EmptyArray. def enterEmptyArray(self,", "Enter a parse tree produced by PythiaFunctionCallParser#Integer. def enterInteger(self, ctx: PythiaFunctionCallParser.IntegerContext): pass #", "PythiaFunctionCallParser#call. def enterCall(self, ctx: PythiaFunctionCallParser.CallContext): pass # Exit a parse tree produced by", "Enter a parse tree produced by PythiaFunctionCallParser#Float. def enterFloat(self, ctx: PythiaFunctionCallParser.FloatContext): pass #", "exitCall(self, ctx: PythiaFunctionCallParser.CallContext): pass # Enter a parse tree produced by PythiaFunctionCallParser#full_function_name. def", "PythiaFunctionCallParser # This class defines a complete listener for a parse tree produced", "pass # Enter a parse tree produced by PythiaFunctionCallParser#String. def enterString(self, ctx: PythiaFunctionCallParser.StringContext):", "def enterCall(self, ctx: PythiaFunctionCallParser.CallContext): pass # Exit a parse tree produced by PythiaFunctionCallParser#call.", "produced by PythiaFunctionCallParser#ArrayValue. def exitArrayValue(self, ctx: PythiaFunctionCallParser.ArrayValueContext): pass # Enter a parse tree", "ctx: PythiaFunctionCallParser.ArrayValueContext): pass # Enter a parse tree produced by PythiaFunctionCallParser#String. def enterString(self,", "parse tree produced by PythiaFunctionCallParser#full_function_name. def enterFull_function_name(self, ctx: PythiaFunctionCallParser.Full_function_nameContext): pass # Exit a", "ctx: PythiaFunctionCallParser.FloatContext): pass # Exit a parse tree produced by PythiaFunctionCallParser#Float. def exitFloat(self,", "a parse tree produced by PythiaFunctionCallParser#argument. def exitArgument(self, ctx: PythiaFunctionCallParser.ArgumentContext): pass # Enter", "PythiaFunctionCallParser.StringContext): pass # Exit a parse tree produced by PythiaFunctionCallParser#String. def exitString(self, ctx:", "parse tree produced by PythiaFunctionCallParser#EmptyArray. def exitEmptyArray(self, ctx: PythiaFunctionCallParser.EmptyArrayContext): pass # Enter a", "parse tree produced by PythiaFunctionCallParser#Integer. def exitInteger(self, ctx: PythiaFunctionCallParser.IntegerContext): pass # Enter a", "def exitFull_function_name(self, ctx: PythiaFunctionCallParser.Full_function_nameContext): pass # Enter a parse tree produced by PythiaFunctionCallParser#argument.", "Exit a parse tree produced by PythiaFunctionCallParser#EmptyArray. def exitEmptyArray(self, ctx: PythiaFunctionCallParser.EmptyArrayContext): pass #", "tree produced by PythiaFunctionCallParser#full_function_name. def enterFull_function_name(self, ctx: PythiaFunctionCallParser.Full_function_nameContext): pass # Exit a parse", "by PythiaFunctionCallParser#String. def enterString(self, ctx: PythiaFunctionCallParser.StringContext): pass # Exit a parse tree produced", "pass # Enter a parse tree produced by PythiaFunctionCallParser#Float. def enterFloat(self, ctx: PythiaFunctionCallParser.FloatContext):", "PythiaFunctionCallParser.FloatContext): pass # Exit a parse tree produced by PythiaFunctionCallParser#Float. def exitFloat(self, ctx:", "def enterArgument(self, ctx: PythiaFunctionCallParser.ArgumentContext): pass # Exit a parse tree produced by PythiaFunctionCallParser#argument.", "by PythiaFunctionCallParser. class PythiaFunctionCallListener(ParseTreeListener): # Enter a parse tree produced by PythiaFunctionCallParser#call. def", "PythiaFunctionCallParser#argument. def exitArgument(self, ctx: PythiaFunctionCallParser.ArgumentContext): pass # Enter a parse tree produced by", "pass # Enter a parse tree produced by PythiaFunctionCallParser#ArrayValue. def enterArrayValue(self, ctx: PythiaFunctionCallParser.ArrayValueContext):", "by PythiaFunctionCallParser#full_function_name. def enterFull_function_name(self, ctx: PythiaFunctionCallParser.Full_function_nameContext): pass # Exit a parse tree produced", "a parse tree produced by PythiaFunctionCallParser#call. def exitCall(self, ctx: PythiaFunctionCallParser.CallContext): pass # Enter", "Enter a parse tree produced by PythiaFunctionCallParser#full_function_name. def enterFull_function_name(self, ctx: PythiaFunctionCallParser.Full_function_nameContext): pass #", "# Generated from /Users/enrique/workspace/other/frontline/pythia/PythiaFunctionCall.g4 by ANTLR 4.5.3 from antlr4 import * if __name__", "tree produced by PythiaFunctionCallParser#argument. def exitArgument(self, ctx: PythiaFunctionCallParser.ArgumentContext): pass # Enter a parse", "enterArgument(self, ctx: PythiaFunctionCallParser.ArgumentContext): pass # Exit a parse tree produced by PythiaFunctionCallParser#argument. def", "ctx: PythiaFunctionCallParser.StringContext): pass # Enter a parse tree produced by PythiaFunctionCallParser#Integer. def enterInteger(self,", "a parse tree produced by PythiaFunctionCallParser#ArrayValue. def exitArrayValue(self, ctx: PythiaFunctionCallParser.ArrayValueContext): pass # Enter", "produced by PythiaFunctionCallParser#String. def enterString(self, ctx: PythiaFunctionCallParser.StringContext): pass # Exit a parse tree", "PythiaFunctionCallParser#EmptyArray. def enterEmptyArray(self, ctx: PythiaFunctionCallParser.EmptyArrayContext): pass # Exit a parse tree produced by", "class PythiaFunctionCallListener(ParseTreeListener): # Enter a parse tree produced by PythiaFunctionCallParser#call. def enterCall(self, ctx:", "PythiaFunctionCallParser.IntegerContext): pass # Exit a parse tree produced by PythiaFunctionCallParser#Integer. def exitInteger(self, ctx:", "Enter a parse tree produced by PythiaFunctionCallParser#ArrayOfValues. def enterArrayOfValues(self, ctx: PythiaFunctionCallParser.ArrayOfValuesContext): pass #", "tree produced by PythiaFunctionCallParser#ArrayValue. def exitArrayValue(self, ctx: PythiaFunctionCallParser.ArrayValueContext): pass # Enter a parse", "enterEmptyArray(self, ctx: PythiaFunctionCallParser.EmptyArrayContext): pass # Exit a parse tree produced by PythiaFunctionCallParser#EmptyArray. def", "PythiaFunctionCallParser#ArrayValue. def enterArrayValue(self, ctx: PythiaFunctionCallParser.ArrayValueContext): pass # Exit a parse tree produced by", "Exit a parse tree produced by PythiaFunctionCallParser#Integer. def exitInteger(self, ctx: PythiaFunctionCallParser.IntegerContext): pass #", "a complete listener for a parse tree produced by PythiaFunctionCallParser. class PythiaFunctionCallListener(ParseTreeListener): #", "PythiaFunctionCallParser#full_function_name. def enterFull_function_name(self, ctx: PythiaFunctionCallParser.Full_function_nameContext): pass # Exit a parse tree produced by", "import PythiaFunctionCallParser else: from PythiaFunctionCallParser import PythiaFunctionCallParser # This class defines a complete", "ctx: PythiaFunctionCallParser.StringContext): pass # Exit a parse tree produced by PythiaFunctionCallParser#String. def exitString(self,", "def exitArrayOfValues(self, ctx: PythiaFunctionCallParser.ArrayOfValuesContext): pass # Enter a parse tree produced by PythiaFunctionCallParser#EmptyArray.", "parse tree produced by PythiaFunctionCallParser#String. def exitString(self, ctx: PythiaFunctionCallParser.StringContext): pass # Enter a", "a parse tree produced by PythiaFunctionCallParser#ArrayOfValues. def enterArrayOfValues(self, ctx: PythiaFunctionCallParser.ArrayOfValuesContext): pass # Exit", "parse tree produced by PythiaFunctionCallParser#Integer. def enterInteger(self, ctx: PythiaFunctionCallParser.IntegerContext): pass # Exit a", "in __name__: from .PythiaFunctionCallParser import PythiaFunctionCallParser else: from PythiaFunctionCallParser import PythiaFunctionCallParser # This", "for a parse tree produced by PythiaFunctionCallParser. class PythiaFunctionCallListener(ParseTreeListener): # Enter a parse", "by PythiaFunctionCallParser#Float. def enterFloat(self, ctx: PythiaFunctionCallParser.FloatContext): pass # Exit a parse tree produced", "ctx: PythiaFunctionCallParser.Full_function_nameContext): pass # Exit a parse tree produced by PythiaFunctionCallParser#full_function_name. def exitFull_function_name(self,", "by PythiaFunctionCallParser#call. def enterCall(self, ctx: PythiaFunctionCallParser.CallContext): pass # Exit a parse tree produced", "PythiaFunctionCallParser.Full_function_nameContext): pass # Enter a parse tree produced by PythiaFunctionCallParser#argument. def enterArgument(self, ctx:", "pass # Enter a parse tree produced by PythiaFunctionCallParser#full_function_name. def enterFull_function_name(self, ctx: PythiaFunctionCallParser.Full_function_nameContext):", "tree produced by PythiaFunctionCallParser#full_function_name. def exitFull_function_name(self, ctx: PythiaFunctionCallParser.Full_function_nameContext): pass # Enter a parse", "a parse tree produced by PythiaFunctionCallParser#ArrayOfValues. def exitArrayOfValues(self, ctx: PythiaFunctionCallParser.ArrayOfValuesContext): pass # Enter", "produced by PythiaFunctionCallParser#call. def exitCall(self, ctx: PythiaFunctionCallParser.CallContext): pass # Enter a parse tree", "a parse tree produced by PythiaFunctionCallParser#String. def enterString(self, ctx: PythiaFunctionCallParser.StringContext): pass # Exit", "/Users/enrique/workspace/other/frontline/pythia/PythiaFunctionCall.g4 by ANTLR 4.5.3 from antlr4 import * if __name__ is not None", "pass # Enter a parse tree produced by PythiaFunctionCallParser#argument. def enterArgument(self, ctx: PythiaFunctionCallParser.ArgumentContext):", "Exit a parse tree produced by PythiaFunctionCallParser#String. def exitString(self, ctx: PythiaFunctionCallParser.StringContext): pass #", "pass # Enter a parse tree produced by PythiaFunctionCallParser#Integer. def enterInteger(self, ctx: PythiaFunctionCallParser.IntegerContext):", "# Exit a parse tree produced by PythiaFunctionCallParser#full_function_name. def exitFull_function_name(self, ctx: PythiaFunctionCallParser.Full_function_nameContext): pass", "exitArgument(self, ctx: PythiaFunctionCallParser.ArgumentContext): pass # Enter a parse tree produced by PythiaFunctionCallParser#ArrayOfValues. def", "by PythiaFunctionCallParser#Integer. def enterInteger(self, ctx: PythiaFunctionCallParser.IntegerContext): pass # Exit a parse tree produced", "tree produced by PythiaFunctionCallParser#Integer. def enterInteger(self, ctx: PythiaFunctionCallParser.IntegerContext): pass # Exit a parse", "pass # Exit a parse tree produced by PythiaFunctionCallParser#ArrayOfValues. def exitArrayOfValues(self, ctx: PythiaFunctionCallParser.ArrayOfValuesContext):", "by PythiaFunctionCallParser#Integer. def exitInteger(self, ctx: PythiaFunctionCallParser.IntegerContext): pass # Enter a parse tree produced", "a parse tree produced by PythiaFunctionCallParser#Float. def enterFloat(self, ctx: PythiaFunctionCallParser.FloatContext): pass # Exit", "class defines a complete listener for a parse tree produced by PythiaFunctionCallParser. class", "pass # Exit a parse tree produced by PythiaFunctionCallParser#argument. def exitArgument(self, ctx: PythiaFunctionCallParser.ArgumentContext):", "Enter a parse tree produced by PythiaFunctionCallParser#String. def enterString(self, ctx: PythiaFunctionCallParser.StringContext): pass #", "by PythiaFunctionCallParser#ArrayValue. def enterArrayValue(self, ctx: PythiaFunctionCallParser.ArrayValueContext): pass # Exit a parse tree produced", "def enterEmptyArray(self, ctx: PythiaFunctionCallParser.EmptyArrayContext): pass # Exit a parse tree produced by PythiaFunctionCallParser#EmptyArray.", "PythiaFunctionCallParser#Integer. def enterInteger(self, ctx: PythiaFunctionCallParser.IntegerContext): pass # Exit a parse tree produced by", "produced by PythiaFunctionCallParser#Integer. def exitInteger(self, ctx: PythiaFunctionCallParser.IntegerContext): pass # Enter a parse tree", "parse tree produced by PythiaFunctionCallParser#Float. def enterFloat(self, ctx: PythiaFunctionCallParser.FloatContext): pass # Exit a", "def enterArrayValue(self, ctx: PythiaFunctionCallParser.ArrayValueContext): pass # Exit a parse tree produced by PythiaFunctionCallParser#ArrayValue.", "# Enter a parse tree produced by PythiaFunctionCallParser#ArrayValue. def enterArrayValue(self, ctx: PythiaFunctionCallParser.ArrayValueContext): pass", "PythiaFunctionCallParser#Integer. def exitInteger(self, ctx: PythiaFunctionCallParser.IntegerContext): pass # Enter a parse tree produced by", "tree produced by PythiaFunctionCallParser. class PythiaFunctionCallListener(ParseTreeListener): # Enter a parse tree produced by", "PythiaFunctionCallParser#call. def exitCall(self, ctx: PythiaFunctionCallParser.CallContext): pass # Enter a parse tree produced by", "a parse tree produced by PythiaFunctionCallParser#full_function_name. def exitFull_function_name(self, ctx: PythiaFunctionCallParser.Full_function_nameContext): pass # Enter", "pass # Exit a parse tree produced by PythiaFunctionCallParser#call. def exitCall(self, ctx: PythiaFunctionCallParser.CallContext):", "parse tree produced by PythiaFunctionCallParser#ArrayOfValues. def exitArrayOfValues(self, ctx: PythiaFunctionCallParser.ArrayOfValuesContext): pass # Enter a", "exitInteger(self, ctx: PythiaFunctionCallParser.IntegerContext): pass # Enter a parse tree produced by PythiaFunctionCallParser#Float. def", "Enter a parse tree produced by PythiaFunctionCallParser#EmptyArray. def enterEmptyArray(self, ctx: PythiaFunctionCallParser.EmptyArrayContext): pass #", "PythiaFunctionCallParser.EmptyArrayContext): pass # Exit a parse tree produced by PythiaFunctionCallParser#EmptyArray. def exitEmptyArray(self, ctx:", "4.5.3 from antlr4 import * if __name__ is not None and \".\" in", "Generated from /Users/enrique/workspace/other/frontline/pythia/PythiaFunctionCall.g4 by ANTLR 4.5.3 from antlr4 import * if __name__ is", "PythiaFunctionCallParser.ArrayValueContext): pass # Enter a parse tree produced by PythiaFunctionCallParser#String. def enterString(self, ctx:", "tree produced by PythiaFunctionCallParser#Integer. def exitInteger(self, ctx: PythiaFunctionCallParser.IntegerContext): pass # Enter a parse", "listener for a parse tree produced by PythiaFunctionCallParser. class PythiaFunctionCallListener(ParseTreeListener): # Enter a", "pass # Enter a parse tree produced by PythiaFunctionCallParser#EmptyArray. def enterEmptyArray(self, ctx: PythiaFunctionCallParser.EmptyArrayContext):", "by PythiaFunctionCallParser#ArrayOfValues. def enterArrayOfValues(self, ctx: PythiaFunctionCallParser.ArrayOfValuesContext): pass # Exit a parse tree produced", "else: from PythiaFunctionCallParser import PythiaFunctionCallParser # This class defines a complete listener for", "Exit a parse tree produced by PythiaFunctionCallParser#call. def exitCall(self, ctx: PythiaFunctionCallParser.CallContext): pass #", "pass # Enter a parse tree produced by PythiaFunctionCallParser#ArrayOfValues. def enterArrayOfValues(self, ctx: PythiaFunctionCallParser.ArrayOfValuesContext):", "a parse tree produced by PythiaFunctionCallParser#Integer. def enterInteger(self, ctx: PythiaFunctionCallParser.IntegerContext): pass # Exit", "if __name__ is not None and \".\" in __name__: from .PythiaFunctionCallParser import PythiaFunctionCallParser", "exitEmptyArray(self, ctx: PythiaFunctionCallParser.EmptyArrayContext): pass # Enter a parse tree produced by PythiaFunctionCallParser#ArrayValue. def", "by PythiaFunctionCallParser#full_function_name. def exitFull_function_name(self, ctx: PythiaFunctionCallParser.Full_function_nameContext): pass # Enter a parse tree produced", "pass # Exit a parse tree produced by PythiaFunctionCallParser#Integer. def exitInteger(self, ctx: PythiaFunctionCallParser.IntegerContext):", "pass # Exit a parse tree produced by PythiaFunctionCallParser#EmptyArray. def exitEmptyArray(self, ctx: PythiaFunctionCallParser.EmptyArrayContext):", "# Enter a parse tree produced by PythiaFunctionCallParser#Integer. def enterInteger(self, ctx: PythiaFunctionCallParser.IntegerContext): pass", "Exit a parse tree produced by PythiaFunctionCallParser#full_function_name. def exitFull_function_name(self, ctx: PythiaFunctionCallParser.Full_function_nameContext): pass #", "ctx: PythiaFunctionCallParser.EmptyArrayContext): pass # Enter a parse tree produced by PythiaFunctionCallParser#ArrayValue. def enterArrayValue(self,", "enterCall(self, ctx: PythiaFunctionCallParser.CallContext): pass # Exit a parse tree produced by PythiaFunctionCallParser#call. def", "defines a complete listener for a parse tree produced by PythiaFunctionCallParser. class PythiaFunctionCallListener(ParseTreeListener):", "PythiaFunctionCallParser#String. def exitString(self, ctx: PythiaFunctionCallParser.StringContext): pass # Enter a parse tree produced by", "tree produced by PythiaFunctionCallParser#ArrayValue. def enterArrayValue(self, ctx: PythiaFunctionCallParser.ArrayValueContext): pass # Exit a parse", "PythiaFunctionCallParser. class PythiaFunctionCallListener(ParseTreeListener): # Enter a parse tree produced by PythiaFunctionCallParser#call. def enterCall(self,", "by PythiaFunctionCallParser#String. def exitString(self, ctx: PythiaFunctionCallParser.StringContext): pass # Enter a parse tree produced", "PythiaFunctionCallParser else: from PythiaFunctionCallParser import PythiaFunctionCallParser # This class defines a complete listener", "tree produced by PythiaFunctionCallParser#ArrayOfValues. def exitArrayOfValues(self, ctx: PythiaFunctionCallParser.ArrayOfValuesContext): pass # Enter a parse", "# Exit a parse tree produced by PythiaFunctionCallParser#call. def exitCall(self, ctx: PythiaFunctionCallParser.CallContext): pass", "parse tree produced by PythiaFunctionCallParser#ArrayValue. def enterArrayValue(self, ctx: PythiaFunctionCallParser.ArrayValueContext): pass # Exit a", "by PythiaFunctionCallParser#call. def exitCall(self, ctx: PythiaFunctionCallParser.CallContext): pass # Enter a parse tree produced", "ctx: PythiaFunctionCallParser.Full_function_nameContext): pass # Enter a parse tree produced by PythiaFunctionCallParser#argument. def enterArgument(self,", "from /Users/enrique/workspace/other/frontline/pythia/PythiaFunctionCall.g4 by ANTLR 4.5.3 from antlr4 import * if __name__ is not", "def exitCall(self, ctx: PythiaFunctionCallParser.CallContext): pass # Enter a parse tree produced by PythiaFunctionCallParser#full_function_name.", "parse tree produced by PythiaFunctionCallParser#String. def enterString(self, ctx: PythiaFunctionCallParser.StringContext): pass # Exit a", "def enterFloat(self, ctx: PythiaFunctionCallParser.FloatContext): pass # Exit a parse tree produced by PythiaFunctionCallParser#Float.", "PythiaFunctionCallParser.Full_function_nameContext): pass # Exit a parse tree produced by PythiaFunctionCallParser#full_function_name. def exitFull_function_name(self, ctx:", "produced by PythiaFunctionCallParser#EmptyArray. def enterEmptyArray(self, ctx: PythiaFunctionCallParser.EmptyArrayContext): pass # Exit a parse tree", "enterArrayOfValues(self, ctx: PythiaFunctionCallParser.ArrayOfValuesContext): pass # Exit a parse tree produced by PythiaFunctionCallParser#ArrayOfValues. def", "PythiaFunctionCallParser.CallContext): pass # Enter a parse tree produced by PythiaFunctionCallParser#full_function_name. def enterFull_function_name(self, ctx:", "is not None and \".\" in __name__: from .PythiaFunctionCallParser import PythiaFunctionCallParser else: from", "PythiaFunctionCallParser.StringContext): pass # Enter a parse tree produced by PythiaFunctionCallParser#Integer. def enterInteger(self, ctx:", "by ANTLR 4.5.3 from antlr4 import * if __name__ is not None and", "# Enter a parse tree produced by PythiaFunctionCallParser#full_function_name. def enterFull_function_name(self, ctx: PythiaFunctionCallParser.Full_function_nameContext): pass", "ctx: PythiaFunctionCallParser.CallContext): pass # Exit a parse tree produced by PythiaFunctionCallParser#call. def exitCall(self,", "ctx: PythiaFunctionCallParser.IntegerContext): pass # Enter a parse tree produced by PythiaFunctionCallParser#Float. def enterFloat(self,", "exitArrayValue(self, ctx: PythiaFunctionCallParser.ArrayValueContext): pass # Enter a parse tree produced by PythiaFunctionCallParser#String. def", "and \".\" in __name__: from .PythiaFunctionCallParser import PythiaFunctionCallParser else: from PythiaFunctionCallParser import PythiaFunctionCallParser", "PythiaFunctionCallListener(ParseTreeListener): # Enter a parse tree produced by PythiaFunctionCallParser#call. def enterCall(self, ctx: PythiaFunctionCallParser.CallContext):", "parse tree produced by PythiaFunctionCallParser#ArrayValue. def exitArrayValue(self, ctx: PythiaFunctionCallParser.ArrayValueContext): pass # Enter a", "Enter a parse tree produced by PythiaFunctionCallParser#ArrayValue. def enterArrayValue(self, ctx: PythiaFunctionCallParser.ArrayValueContext): pass #", "ctx: PythiaFunctionCallParser.ArrayValueContext): pass # Exit a parse tree produced by PythiaFunctionCallParser#ArrayValue. def exitArrayValue(self,", "parse tree produced by PythiaFunctionCallParser#call. def enterCall(self, ctx: PythiaFunctionCallParser.CallContext): pass # Exit a" ]
[ "\"--name\", self.name, self.image, \"/bin/sleep\", \"infinity\", ] ) def cleanup(self) -> None: logging.info(\"Stopping agent", "Dict[str, str] def __init__(self, name: str, image: str, environment: Dict[str, str]) -> None:", "__init__(self, name: str, image: str, environment: Dict[str, str]) -> None: self.name = name", "\"stop\", self.name]) def get_agent_by_label(name: str, label: str) -> Agent: # TODO: lookup label", "-> None: logging.info(\"Starting agent '%s' based on image '%s'\", self.name, self.image) subprocess.run( [", "#!/usr/bin/env python3 import logging import subprocess from typing import Dict class Agent: name:", "agent '%s' based on image '%s'\", self.name, self.image) subprocess.run( [ \"docker\", \"run\", \"-d\",", "str, image: str, environment: Dict[str, str]) -> None: self.name = name self.image =", "image self.environment = environment def run(self) -> None: logging.info(\"Starting agent '%s' based on", "str, environment: Dict[str, str]) -> None: self.name = name self.image = image self.environment", "import subprocess from typing import Dict class Agent: name: str image: str environment:", "self.image) subprocess.run( [ \"docker\", \"run\", \"-d\", \"--rm\", \"-it\", \"--name\", self.name, self.image, \"/bin/sleep\", \"infinity\",", "self.name]) def get_agent_by_label(name: str, label: str) -> Agent: # TODO: lookup label in", ") def cleanup(self) -> None: logging.info(\"Stopping agent '%s'\", self.name) subprocess.run([\"docker\", \"stop\", self.name]) def", "None: logging.info(\"Stopping agent '%s'\", self.name) subprocess.run([\"docker\", \"stop\", self.name]) def get_agent_by_label(name: str, label: str)", "\"/bin/sleep\", \"infinity\", ] ) def cleanup(self) -> None: logging.info(\"Stopping agent '%s'\", self.name) subprocess.run([\"docker\",", "python3 import logging import subprocess from typing import Dict class Agent: name: str", "'%s'\", self.name, self.image) subprocess.run( [ \"docker\", \"run\", \"-d\", \"--rm\", \"-it\", \"--name\", self.name, self.image,", "name: str, image: str, environment: Dict[str, str]) -> None: self.name = name self.image", "def __init__(self, name: str, image: str, environment: Dict[str, str]) -> None: self.name =", "get_agent_by_label(name: str, label: str) -> Agent: # TODO: lookup label in config file?", "environment: Dict[str, str]) -> None: self.name = name self.image = image self.environment =", "subprocess.run( [ \"docker\", \"run\", \"-d\", \"--rm\", \"-it\", \"--name\", self.name, self.image, \"/bin/sleep\", \"infinity\", ]", "-> Agent: # TODO: lookup label in config file? return Agent(\"ci-agent\", \"ubuntu:20.04\", {})", "self.environment = environment def run(self) -> None: logging.info(\"Starting agent '%s' based on image", "None: logging.info(\"Starting agent '%s' based on image '%s'\", self.name, self.image) subprocess.run( [ \"docker\",", "[ \"docker\", \"run\", \"-d\", \"--rm\", \"-it\", \"--name\", self.name, self.image, \"/bin/sleep\", \"infinity\", ] )", "on image '%s'\", self.name, self.image) subprocess.run( [ \"docker\", \"run\", \"-d\", \"--rm\", \"-it\", \"--name\",", "import Dict class Agent: name: str image: str environment: Dict[str, str] def __init__(self,", "based on image '%s'\", self.name, self.image) subprocess.run( [ \"docker\", \"run\", \"-d\", \"--rm\", \"-it\",", "self.image = image self.environment = environment def run(self) -> None: logging.info(\"Starting agent '%s'", "str] def __init__(self, name: str, image: str, environment: Dict[str, str]) -> None: self.name", "name self.image = image self.environment = environment def run(self) -> None: logging.info(\"Starting agent", "= image self.environment = environment def run(self) -> None: logging.info(\"Starting agent '%s' based", "self.name) subprocess.run([\"docker\", \"stop\", self.name]) def get_agent_by_label(name: str, label: str) -> Agent: # TODO:", "import logging import subprocess from typing import Dict class Agent: name: str image:", "'%s'\", self.name) subprocess.run([\"docker\", \"stop\", self.name]) def get_agent_by_label(name: str, label: str) -> Agent: #", "name: str image: str environment: Dict[str, str] def __init__(self, name: str, image: str,", "self.name, self.image, \"/bin/sleep\", \"infinity\", ] ) def cleanup(self) -> None: logging.info(\"Stopping agent '%s'\",", "str) -> Agent: # TODO: lookup label in config file? return Agent(\"ci-agent\", \"ubuntu:20.04\",", "environment def run(self) -> None: logging.info(\"Starting agent '%s' based on image '%s'\", self.name,", "logging.info(\"Starting agent '%s' based on image '%s'\", self.name, self.image) subprocess.run( [ \"docker\", \"run\",", "str, label: str) -> Agent: # TODO: lookup label in config file? return", "logging import subprocess from typing import Dict class Agent: name: str image: str", "Agent: name: str image: str environment: Dict[str, str] def __init__(self, name: str, image:", "\"--rm\", \"-it\", \"--name\", self.name, self.image, \"/bin/sleep\", \"infinity\", ] ) def cleanup(self) -> None:", "self.name = name self.image = image self.environment = environment def run(self) -> None:", "str]) -> None: self.name = name self.image = image self.environment = environment def", "\"-d\", \"--rm\", \"-it\", \"--name\", self.name, self.image, \"/bin/sleep\", \"infinity\", ] ) def cleanup(self) ->", "subprocess.run([\"docker\", \"stop\", self.name]) def get_agent_by_label(name: str, label: str) -> Agent: # TODO: lookup", "= environment def run(self) -> None: logging.info(\"Starting agent '%s' based on image '%s'\",", "-> None: logging.info(\"Stopping agent '%s'\", self.name) subprocess.run([\"docker\", \"stop\", self.name]) def get_agent_by_label(name: str, label:", "'%s' based on image '%s'\", self.name, self.image) subprocess.run( [ \"docker\", \"run\", \"-d\", \"--rm\",", "logging.info(\"Stopping agent '%s'\", self.name) subprocess.run([\"docker\", \"stop\", self.name]) def get_agent_by_label(name: str, label: str) ->", "\"run\", \"-d\", \"--rm\", \"-it\", \"--name\", self.name, self.image, \"/bin/sleep\", \"infinity\", ] ) def cleanup(self)", "from typing import Dict class Agent: name: str image: str environment: Dict[str, str]", "str environment: Dict[str, str] def __init__(self, name: str, image: str, environment: Dict[str, str])", "image '%s'\", self.name, self.image) subprocess.run( [ \"docker\", \"run\", \"-d\", \"--rm\", \"-it\", \"--name\", self.name,", "-> None: self.name = name self.image = image self.environment = environment def run(self)", "\"infinity\", ] ) def cleanup(self) -> None: logging.info(\"Stopping agent '%s'\", self.name) subprocess.run([\"docker\", \"stop\",", "= name self.image = image self.environment = environment def run(self) -> None: logging.info(\"Starting", "self.image, \"/bin/sleep\", \"infinity\", ] ) def cleanup(self) -> None: logging.info(\"Stopping agent '%s'\", self.name)", "typing import Dict class Agent: name: str image: str environment: Dict[str, str] def", "label: str) -> Agent: # TODO: lookup label in config file? return Agent(\"ci-agent\",", "self.name, self.image) subprocess.run( [ \"docker\", \"run\", \"-d\", \"--rm\", \"-it\", \"--name\", self.name, self.image, \"/bin/sleep\",", "def get_agent_by_label(name: str, label: str) -> Agent: # TODO: lookup label in config", "image: str environment: Dict[str, str] def __init__(self, name: str, image: str, environment: Dict[str,", "agent '%s'\", self.name) subprocess.run([\"docker\", \"stop\", self.name]) def get_agent_by_label(name: str, label: str) -> Agent:", "Dict class Agent: name: str image: str environment: Dict[str, str] def __init__(self, name:", "class Agent: name: str image: str environment: Dict[str, str] def __init__(self, name: str,", "Dict[str, str]) -> None: self.name = name self.image = image self.environment = environment", "def run(self) -> None: logging.info(\"Starting agent '%s' based on image '%s'\", self.name, self.image)", "image: str, environment: Dict[str, str]) -> None: self.name = name self.image = image", "environment: Dict[str, str] def __init__(self, name: str, image: str, environment: Dict[str, str]) ->", "str image: str environment: Dict[str, str] def __init__(self, name: str, image: str, environment:", "def cleanup(self) -> None: logging.info(\"Stopping agent '%s'\", self.name) subprocess.run([\"docker\", \"stop\", self.name]) def get_agent_by_label(name:", "\"-it\", \"--name\", self.name, self.image, \"/bin/sleep\", \"infinity\", ] ) def cleanup(self) -> None: logging.info(\"Stopping", "run(self) -> None: logging.info(\"Starting agent '%s' based on image '%s'\", self.name, self.image) subprocess.run(", "] ) def cleanup(self) -> None: logging.info(\"Stopping agent '%s'\", self.name) subprocess.run([\"docker\", \"stop\", self.name])", "cleanup(self) -> None: logging.info(\"Stopping agent '%s'\", self.name) subprocess.run([\"docker\", \"stop\", self.name]) def get_agent_by_label(name: str,", "None: self.name = name self.image = image self.environment = environment def run(self) ->", "\"docker\", \"run\", \"-d\", \"--rm\", \"-it\", \"--name\", self.name, self.image, \"/bin/sleep\", \"infinity\", ] ) def", "subprocess from typing import Dict class Agent: name: str image: str environment: Dict[str," ]
[ "v_h def update_matrices(self, sigma_vec): vol_idx = v_h.mesh.vol_idx bdy_idx = v_h.mesh.bdy_idx self.S = stiffness_matrix(self.v_h,", "EllipticSolver: def __init__(self, v_h): self.v_h = v_h def update_matrices(self, sigma_vec): vol_idx = v_h.mesh.vol_idx", "sigma_vec): vol_idx = v_h.mesh.vol_idx bdy_idx = v_h.mesh.bdy_idx self.S = stiffness_matrix(self.v_h, sigma_vec) def build_matrices", "v_h.mesh.vol_idx bdy_idx = v_h.mesh.bdy_idx self.S = stiffness_matrix(self.v_h, sigma_vec) def build_matrices self.Mass = mass_matrix(self.v_h)", "__init__(self, v_h): self.v_h = v_h def update_matrices(self, sigma_vec): vol_idx = v_h.mesh.vol_idx bdy_idx =", "bdy_idx = v_h.mesh.bdy_idx self.S = stiffness_matrix(self.v_h, sigma_vec) def build_matrices self.Mass = mass_matrix(self.v_h) #", "self.v_h = v_h def update_matrices(self, sigma_vec): vol_idx = v_h.mesh.vol_idx bdy_idx = v_h.mesh.bdy_idx self.S", "sigma_vec) def build_matrices self.Mass = mass_matrix(self.v_h) # self.Kx # self.Ky def dtn_map(self): #", "= stiffness_matrix(self.v_h, sigma_vec) def build_matrices self.Mass = mass_matrix(self.v_h) # self.Kx # self.Ky def", "update_matrices(self, sigma_vec): vol_idx = v_h.mesh.vol_idx bdy_idx = v_h.mesh.bdy_idx self.S = stiffness_matrix(self.v_h, sigma_vec) def", "stiffness_matrix(self.v_h, sigma_vec) def build_matrices self.Mass = mass_matrix(self.v_h) # self.Kx # self.Ky def dtn_map(self):", "def build_matrices self.Mass = mass_matrix(self.v_h) # self.Kx # self.Ky def dtn_map(self): # do", "= v_h def update_matrices(self, sigma_vec): vol_idx = v_h.mesh.vol_idx bdy_idx = v_h.mesh.bdy_idx self.S =", "def update_matrices(self, sigma_vec): vol_idx = v_h.mesh.vol_idx bdy_idx = v_h.mesh.bdy_idx self.S = stiffness_matrix(self.v_h, sigma_vec)", "vol_idx = v_h.mesh.vol_idx bdy_idx = v_h.mesh.bdy_idx self.S = stiffness_matrix(self.v_h, sigma_vec) def build_matrices self.Mass", "self.Mass = mass_matrix(self.v_h) # self.Kx # self.Ky def dtn_map(self): # do this here", "class EllipticSolver: def __init__(self, v_h): self.v_h = v_h def update_matrices(self, sigma_vec): vol_idx =", "def __init__(self, v_h): self.v_h = v_h def update_matrices(self, sigma_vec): vol_idx = v_h.mesh.vol_idx bdy_idx", "build_matrices self.Mass = mass_matrix(self.v_h) # self.Kx # self.Ky def dtn_map(self): # do this", "v_h): self.v_h = v_h def update_matrices(self, sigma_vec): vol_idx = v_h.mesh.vol_idx bdy_idx = v_h.mesh.bdy_idx", "= v_h.mesh.vol_idx bdy_idx = v_h.mesh.bdy_idx self.S = stiffness_matrix(self.v_h, sigma_vec) def build_matrices self.Mass =", "= v_h.mesh.bdy_idx self.S = stiffness_matrix(self.v_h, sigma_vec) def build_matrices self.Mass = mass_matrix(self.v_h) # self.Kx", "self.S = stiffness_matrix(self.v_h, sigma_vec) def build_matrices self.Mass = mass_matrix(self.v_h) # self.Kx # self.Ky", "v_h.mesh.bdy_idx self.S = stiffness_matrix(self.v_h, sigma_vec) def build_matrices self.Mass = mass_matrix(self.v_h) # self.Kx #" ]
[ "= re.compile(REGEX_LEAP_YEAR).fullmatch def isleap(value): return value != 0 and _isleep(value) for i in", "True, 'OPTIONS': { 'context_processors': [ 'django_boost.context_processors.user_agent', ], }, }] ) class TestConverter(TestCase): def", "}] ) class TestConverter(TestCase): def test_path_converters(self): case = [('bin', '1010'), ('bin', 12), ('oct',", "re from django_boost.urls.converters.date import REGEX_DATE_28 regex_date_28_fullmatch = re.compile(REGEX_DATE_28).fullmatch for m, d in self.DATE_TEST_CASE:", "% (m, d) with self.subTest(value, value=value): result = bool(regex_date_29_fullmatch(value)) if m == 2", "def test_failed_case(self): from django.urls.exceptions import NoReverseMatch with self.assertRaises(NoReverseMatch): reverse('float', kwargs={'float': '1.'}) with self.assertRaises(NoReverseMatch):", "12), ('oct', '7'), ('oct', 7), ('hex', 'd'), ('hex', 12), ('bin_str', '1010'), ('oct_str', '236'),", "d) with self.subTest(value, value=value): result = bool(regex_date_30_fullmatch(value)) if m in [4, 6, 9,", "bool(regex_date_30_fullmatch(value)) if m in [4, 6, 9, 11] and d in range(1, 31):", "d in range(1, 31): self.assertTrue(result) else: self.assertFalse(result) def test_date_29_regex(self): import re from django_boost.urls.converters.date", "3, 5, 7, 8, 10, 12] and d in range(1, 32): self.assertTrue(result) else:", "test_path_converters(self): case = [('bin', '1010'), ('bin', 12), ('oct', '7'), ('oct', 7), ('hex', 'd'),", "ROOT_PATH = os.path.dirname(__file__) @override_settings( ROOT_URLCONF='tests.tests.urls_converters.urls', TEMPLATES=[{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [os.path.join(ROOT_PATH, 'templates')], 'APP_DIRS': True,", "def test_date_28_regex(self): import re from django_boost.urls.converters.date import REGEX_DATE_28 regex_date_28_fullmatch = re.compile(REGEX_DATE_28).fullmatch for m,", "12), ('bin_str', '1010'), ('oct_str', '236'), ('hex_str', '234'), ('float', 1.1), ('float', '1.1'), ('float', 1),", "self.assertTrue(result) else: self.assertFalse(result) def test_date_29_regex(self): import re from django_boost.urls.converters.date import REGEX_DATE_29 regex_date_29_fullmatch =", "= re.compile(REGEX_DATE_31).fullmatch for m, d in self.DATE_TEST_CASE: value = self.DATE_FORMAT % (m, d)", "!= 0 and _isleep(value) for i in range(10000): value = str(i) with self.subTest(value,", "'2019/2/29'}) class TestRegex(TestCase): DATE_FORMAT = \"%d/%d\" DATE_TEST_CASE = [(m, d) for m in", "i in range(10000): value = str(i) with self.subTest(value, value=value): result = bool(regex_is_leap(value)) self.assertEqual(isleap(i),", "('float', '1'), ('date', '2020/2/29'), ] for name, value in case: url = reverse(name,", "range(1, 31): self.assertTrue(result) else: self.assertFalse(result) def test_date_29_regex(self): import re from django_boost.urls.converters.date import REGEX_DATE_29", "test_date_28_regex(self): import re from django_boost.urls.converters.date import REGEX_DATE_28 regex_date_28_fullmatch = re.compile(REGEX_DATE_28).fullmatch for m, d", "range(1, 29): self.assertTrue(result) else: self.assertFalse(result) def test_date_time_regex(self): import re from datetime import datetime", "month=m, day=d) return True except ValueError: return False for y in range(10000): for", "datetime(year=y, month=m, day=d) return True except ValueError: return False for y in range(10000):", "datetime from django_boost.urls.converters.date import REGEX_DATE regex_fullmatch = re.compile(REGEX_DATE).fullmatch def is_valid_date(y, m, d): try:", "import TestCase ROOT_PATH = os.path.dirname(__file__) @override_settings( ROOT_URLCONF='tests.tests.urls_converters.urls', TEMPLATES=[{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [os.path.join(ROOT_PATH, 'templates')],", "range(10000): for m in range(14): for d in range(32): with self.subTest(\"%s/%s/%s\" % (y,", ") class TestConverter(TestCase): def test_path_converters(self): case = [('bin', '1010'), ('bin', 12), ('oct', '7'),", "regex_fullmatch = re.compile(REGEX_DATE).fullmatch def is_valid_date(y, m, d): try: datetime(year=y, month=m, day=d) return True", "and d in range(1, 32): self.assertTrue(result) else: self.assertFalse(result) def test_date_30_regex(self): import re from", "29): self.assertTrue(result) else: self.assertFalse(result) def test_date_time_regex(self): import re from datetime import datetime from", "d) with self.subTest(value, value=value): result = bool(regex_date_29_fullmatch(value)) if m == 2 and d", "7, 8, 10, 12] and d in range(1, 32): self.assertTrue(result) else: self.assertFalse(result) def", "with self.subTest(value, value=value): result = bool(regex_date_30_fullmatch(value)) if m in [4, 6, 9, 11]", "if m in [4, 6, 9, 11] and d in range(1, 31): self.assertTrue(result)", "value=value): result = bool(regex_is_leap(value)) self.assertEqual(isleap(i), result) def test_date_31_regex(self): import re from django_boost.urls.converters.date import", "d) with self.subTest(value, value=value): result = bool(regex_date_31_fullmatch(value)) if m in [1, 3, 5,", "_isleep from django_boost.urls.converters.date import REGEX_LEAP_YEAR regex_is_leap = re.compile(REGEX_LEAP_YEAR).fullmatch def isleap(value): return value !=", "regex_date_29_fullmatch = re.compile(REGEX_DATE_29).fullmatch for m, d in self.DATE_TEST_CASE: value = self.DATE_FORMAT % (m,", "re from django_boost.urls.converters.date import REGEX_DATE_31 regex_date_31_fullmatch = re.compile(REGEX_DATE_31).fullmatch for m, d in self.DATE_TEST_CASE:", "m, d in self.DATE_TEST_CASE: value = self.DATE_FORMAT % (m, d) with self.subTest(value, value=value):", "test_failed_case(self): from django.urls.exceptions import NoReverseMatch with self.assertRaises(NoReverseMatch): reverse('float', kwargs={'float': '1.'}) with self.assertRaises(NoReverseMatch): reverse('date',", "test_date_31_regex(self): import re from django_boost.urls.converters.date import REGEX_DATE_31 regex_date_31_fullmatch = re.compile(REGEX_DATE_31).fullmatch for m, d", "False for y in range(10000): for m in range(14): for d in range(32):", "in self.DATE_TEST_CASE: value = self.DATE_FORMAT % (m, d) with self.subTest(value, value=value): result =", "bool(regex_date_31_fullmatch(value)) if m in [1, 3, 5, 7, 8, 10, 12] and d", "('hex', 12), ('bin_str', '1010'), ('oct_str', '236'), ('hex_str', '234'), ('float', 1.1), ('float', '1.1'), ('float',", "m in range(20) for d in range(40)] def test_year_regex(self): import re from calendar", "True except ValueError: return False for y in range(10000): for m in range(14):", "REGEX_DATE_31 regex_date_31_fullmatch = re.compile(REGEX_DATE_31).fullmatch for m, d in self.DATE_TEST_CASE: value = self.DATE_FORMAT %", "30): self.assertTrue(result) else: self.assertFalse(result) def test_date_28_regex(self): import re from django_boost.urls.converters.date import REGEX_DATE_28 regex_date_28_fullmatch", "_isleep(value) for i in range(10000): value = str(i) with self.subTest(value, value=value): result =", "test_date_29_regex(self): import re from django_boost.urls.converters.date import REGEX_DATE_29 regex_date_29_fullmatch = re.compile(REGEX_DATE_29).fullmatch for m, d", "range(14): for d in range(32): with self.subTest(\"%s/%s/%s\" % (y, m, d)): self.assertEqual(is_valid_date(y, m,", "with self.subTest(value, value=value): result = bool(regex_date_29_fullmatch(value)) if m == 2 and d in", "m == 2 and d in range(1, 29): self.assertTrue(result) else: self.assertFalse(result) def test_date_time_regex(self):", "self.assertTrue(result) else: self.assertFalse(result) def test_date_30_regex(self): import re from django_boost.urls.converters.date import REGEX_DATE_30 regex_date_30_fullmatch =", "% (m, d) with self.subTest(value, value=value): result = bool(regex_date_30_fullmatch(value)) if m in [4,", "7), ('hex', 'd'), ('hex', 12), ('bin_str', '1010'), ('oct_str', '236'), ('hex_str', '234'), ('float', 1.1),", "self.assertRaises(NoReverseMatch): reverse('date', kwargs={'date': '2019/2/29'}) class TestRegex(TestCase): DATE_FORMAT = \"%d/%d\" DATE_TEST_CASE = [(m, d)", "value in case: url = reverse(name, kwargs={name: value}) response = self.client.get(url) self.assertStatusCodeEqual(response, 200)", "result = bool(regex_date_28_fullmatch(value)) if m == 2 and d in range(1, 29): self.assertTrue(result)", "'django.template.backends.django.DjangoTemplates', 'DIRS': [os.path.join(ROOT_PATH, 'templates')], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django_boost.context_processors.user_agent', ], },", "in [4, 6, 9, 11] and d in range(1, 31): self.assertTrue(result) else: self.assertFalse(result)", "for i in range(10000): value = str(i) with self.subTest(value, value=value): result = bool(regex_is_leap(value))", "re from django_boost.urls.converters.date import REGEX_DATE_30 regex_date_30_fullmatch = re.compile(REGEX_DATE_30).fullmatch for m, d in self.DATE_TEST_CASE:", "from django_boost.urls.converters.date import REGEX_DATE_31 regex_date_31_fullmatch = re.compile(REGEX_DATE_31).fullmatch for m, d in self.DATE_TEST_CASE: value", "isleap(value): return value != 0 and _isleep(value) for i in range(10000): value =", "in range(20) for d in range(40)] def test_year_regex(self): import re from calendar import", "import REGEX_DATE_28 regex_date_28_fullmatch = re.compile(REGEX_DATE_28).fullmatch for m, d in self.DATE_TEST_CASE: value = self.DATE_FORMAT", "import os from django.test import override_settings from django.urls import reverse from django_boost.test import", "], }, }] ) class TestConverter(TestCase): def test_path_converters(self): case = [('bin', '1010'), ('bin',", "self.subTest(value, value=value): result = bool(regex_date_31_fullmatch(value)) if m in [1, 3, 5, 7, 8,", "self.assertEqual(isleap(i), result) def test_date_31_regex(self): import re from django_boost.urls.converters.date import REGEX_DATE_31 regex_date_31_fullmatch = re.compile(REGEX_DATE_31).fullmatch", "m, d): try: datetime(year=y, month=m, day=d) return True except ValueError: return False for", "self.subTest(\"%s/%s/%s\" % (y, m, d)): self.assertEqual(is_valid_date(y, m, d), bool( regex_fullmatch(\"%s/%s/%s\" % (y, m,", "self.assertTrue(result) else: self.assertFalse(result) def test_date_time_regex(self): import re from datetime import datetime from django_boost.urls.converters.date", "kwargs={name: value}) response = self.client.get(url) self.assertStatusCodeEqual(response, 200) def test_failed_case(self): from django.urls.exceptions import NoReverseMatch", "reverse(name, kwargs={name: value}) response = self.client.get(url) self.assertStatusCodeEqual(response, 200) def test_failed_case(self): from django.urls.exceptions import", "with self.subTest(value, value=value): result = bool(regex_date_31_fullmatch(value)) if m in [1, 3, 5, 7,", "else: self.assertFalse(result) def test_date_30_regex(self): import re from django_boost.urls.converters.date import REGEX_DATE_30 regex_date_30_fullmatch = re.compile(REGEX_DATE_30).fullmatch", "% (m, d) with self.subTest(value, value=value): result = bool(regex_date_28_fullmatch(value)) if m == 2", "value=value): result = bool(regex_date_30_fullmatch(value)) if m in [4, 6, 9, 11] and d", "d): try: datetime(year=y, month=m, day=d) return True except ValueError: return False for y", "re.compile(REGEX_LEAP_YEAR).fullmatch def isleap(value): return value != 0 and _isleep(value) for i in range(10000):", "REGEX_LEAP_YEAR regex_is_leap = re.compile(REGEX_LEAP_YEAR).fullmatch def isleap(value): return value != 0 and _isleep(value) for", "re from datetime import datetime from django_boost.urls.converters.date import REGEX_DATE regex_fullmatch = re.compile(REGEX_DATE).fullmatch def", "= bool(regex_is_leap(value)) self.assertEqual(isleap(i), result) def test_date_31_regex(self): import re from django_boost.urls.converters.date import REGEX_DATE_31 regex_date_31_fullmatch", "self.subTest(value, value=value): result = bool(regex_date_30_fullmatch(value)) if m in [4, 6, 9, 11] and", "from django.urls import reverse from django_boost.test import TestCase ROOT_PATH = os.path.dirname(__file__) @override_settings( ROOT_URLCONF='tests.tests.urls_converters.urls',", "d in range(32): with self.subTest(\"%s/%s/%s\" % (y, m, d)): self.assertEqual(is_valid_date(y, m, d), bool(", "re from calendar import isleap as _isleep from django_boost.urls.converters.date import REGEX_LEAP_YEAR regex_is_leap =", "def test_year_regex(self): import re from calendar import isleap as _isleep from django_boost.urls.converters.date import", "re.compile(REGEX_DATE_30).fullmatch for m, d in self.DATE_TEST_CASE: value = self.DATE_FORMAT % (m, d) with", "test_date_time_regex(self): import re from datetime import datetime from django_boost.urls.converters.date import REGEX_DATE regex_fullmatch =", "import re from calendar import isleap as _isleep from django_boost.urls.converters.date import REGEX_LEAP_YEAR regex_is_leap", "value=value): result = bool(regex_date_31_fullmatch(value)) if m in [1, 3, 5, 7, 8, 10,", "'234'), ('float', 1.1), ('float', '1.1'), ('float', 1), ('float', '1'), ('date', '2020/2/29'), ] for", "'2020/2/29'), ] for name, value in case: url = reverse(name, kwargs={name: value}) response", "range(20) for d in range(40)] def test_year_regex(self): import re from calendar import isleap", "5, 7, 8, 10, 12] and d in range(1, 32): self.assertTrue(result) else: self.assertFalse(result)", "django_boost.urls.converters.date import REGEX_DATE regex_fullmatch = re.compile(REGEX_DATE).fullmatch def is_valid_date(y, m, d): try: datetime(year=y, month=m,", "d in self.DATE_TEST_CASE: value = self.DATE_FORMAT % (m, d) with self.subTest(value, value=value): result", "('float', 1), ('float', '1'), ('date', '2020/2/29'), ] for name, value in case: url", "= bool(regex_date_28_fullmatch(value)) if m == 2 and d in range(1, 29): self.assertTrue(result) else:", "from django.test import override_settings from django.urls import reverse from django_boost.test import TestCase ROOT_PATH", "DATE_TEST_CASE = [(m, d) for m in range(20) for d in range(40)] def", "TestRegex(TestCase): DATE_FORMAT = \"%d/%d\" DATE_TEST_CASE = [(m, d) for m in range(20) for", "import reverse from django_boost.test import TestCase ROOT_PATH = os.path.dirname(__file__) @override_settings( ROOT_URLCONF='tests.tests.urls_converters.urls', TEMPLATES=[{ 'BACKEND':", "def test_date_time_regex(self): import re from datetime import datetime from django_boost.urls.converters.date import REGEX_DATE regex_fullmatch", "for d in range(40)] def test_year_regex(self): import re from calendar import isleap as", "= re.compile(REGEX_DATE_28).fullmatch for m, d in self.DATE_TEST_CASE: value = self.DATE_FORMAT % (m, d)", "with self.subTest(value, value=value): result = bool(regex_date_28_fullmatch(value)) if m == 2 and d in", "% (m, d) with self.subTest(value, value=value): result = bool(regex_date_31_fullmatch(value)) if m in [1,", "def test_date_31_regex(self): import re from django_boost.urls.converters.date import REGEX_DATE_31 regex_date_31_fullmatch = re.compile(REGEX_DATE_31).fullmatch for m,", "from datetime import datetime from django_boost.urls.converters.date import REGEX_DATE regex_fullmatch = re.compile(REGEX_DATE).fullmatch def is_valid_date(y,", "result = bool(regex_is_leap(value)) self.assertEqual(isleap(i), result) def test_date_31_regex(self): import re from django_boost.urls.converters.date import REGEX_DATE_31", "os from django.test import override_settings from django.urls import reverse from django_boost.test import TestCase", "for m, d in self.DATE_TEST_CASE: value = self.DATE_FORMAT % (m, d) with self.subTest(value,", "in [1, 3, 5, 7, 8, 10, 12] and d in range(1, 32):", "else: self.assertFalse(result) def test_date_29_regex(self): import re from django_boost.urls.converters.date import REGEX_DATE_29 regex_date_29_fullmatch = re.compile(REGEX_DATE_29).fullmatch", "kwargs={'date': '2019/2/29'}) class TestRegex(TestCase): DATE_FORMAT = \"%d/%d\" DATE_TEST_CASE = [(m, d) for m", "in range(14): for d in range(32): with self.subTest(\"%s/%s/%s\" % (y, m, d)): self.assertEqual(is_valid_date(y,", "django_boost.urls.converters.date import REGEX_DATE_28 regex_date_28_fullmatch = re.compile(REGEX_DATE_28).fullmatch for m, d in self.DATE_TEST_CASE: value =", "range(10000): value = str(i) with self.subTest(value, value=value): result = bool(regex_is_leap(value)) self.assertEqual(isleap(i), result) def", "'1010'), ('bin', 12), ('oct', '7'), ('oct', 7), ('hex', 'd'), ('hex', 12), ('bin_str', '1010'),", "with self.assertRaises(NoReverseMatch): reverse('date', kwargs={'date': '2019/2/29'}) class TestRegex(TestCase): DATE_FORMAT = \"%d/%d\" DATE_TEST_CASE = [(m,", "== 2 and d in range(1, 30): self.assertTrue(result) else: self.assertFalse(result) def test_date_28_regex(self): import", "class TestRegex(TestCase): DATE_FORMAT = \"%d/%d\" DATE_TEST_CASE = [(m, d) for m in range(20)", "ROOT_URLCONF='tests.tests.urls_converters.urls', TEMPLATES=[{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [os.path.join(ROOT_PATH, 'templates')], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [", "return True except ValueError: return False for y in range(10000): for m in", "= re.compile(REGEX_DATE).fullmatch def is_valid_date(y, m, d): try: datetime(year=y, month=m, day=d) return True except", "<reponame>ChanTsune/Django-Boost<gh_stars>10-100 import os from django.test import override_settings from django.urls import reverse from django_boost.test", "[(m, d) for m in range(20) for d in range(40)] def test_year_regex(self): import", "200) def test_failed_case(self): from django.urls.exceptions import NoReverseMatch with self.assertRaises(NoReverseMatch): reverse('float', kwargs={'float': '1.'}) with", "= self.DATE_FORMAT % (m, d) with self.subTest(value, value=value): result = bool(regex_date_28_fullmatch(value)) if m", "'1'), ('date', '2020/2/29'), ] for name, value in case: url = reverse(name, kwargs={name:", "'1.1'), ('float', 1), ('float', '1'), ('date', '2020/2/29'), ] for name, value in case:", "in range(1, 31): self.assertTrue(result) else: self.assertFalse(result) def test_date_29_regex(self): import re from django_boost.urls.converters.date import", "def test_date_29_regex(self): import re from django_boost.urls.converters.date import REGEX_DATE_29 regex_date_29_fullmatch = re.compile(REGEX_DATE_29).fullmatch for m,", "'templates')], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django_boost.context_processors.user_agent', ], }, }] ) class", "django_boost.urls.converters.date import REGEX_LEAP_YEAR regex_is_leap = re.compile(REGEX_LEAP_YEAR).fullmatch def isleap(value): return value != 0 and", "REGEX_DATE_30 regex_date_30_fullmatch = re.compile(REGEX_DATE_30).fullmatch for m, d in self.DATE_TEST_CASE: value = self.DATE_FORMAT %", "self.DATE_FORMAT % (m, d) with self.subTest(value, value=value): result = bool(regex_date_30_fullmatch(value)) if m in", "[1, 3, 5, 7, 8, 10, 12] and d in range(1, 32): self.assertTrue(result)", "for m in range(14): for d in range(32): with self.subTest(\"%s/%s/%s\" % (y, m,", "result = bool(regex_date_30_fullmatch(value)) if m in [4, 6, 9, 11] and d in", "as _isleep from django_boost.urls.converters.date import REGEX_LEAP_YEAR regex_is_leap = re.compile(REGEX_LEAP_YEAR).fullmatch def isleap(value): return value", "import NoReverseMatch with self.assertRaises(NoReverseMatch): reverse('float', kwargs={'float': '1.'}) with self.assertRaises(NoReverseMatch): reverse('date', kwargs={'date': '2019/2/29'}) class", "= [(m, d) for m in range(20) for d in range(40)] def test_year_regex(self):", "result = bool(regex_date_31_fullmatch(value)) if m in [1, 3, 5, 7, 8, 10, 12]", "range(40)] def test_year_regex(self): import re from calendar import isleap as _isleep from django_boost.urls.converters.date", "in case: url = reverse(name, kwargs={name: value}) response = self.client.get(url) self.assertStatusCodeEqual(response, 200) def", "value = self.DATE_FORMAT % (m, d) with self.subTest(value, value=value): result = bool(regex_date_31_fullmatch(value)) if", "('float', '1.1'), ('float', 1), ('float', '1'), ('date', '2020/2/29'), ] for name, value in", "'1010'), ('oct_str', '236'), ('hex_str', '234'), ('float', 1.1), ('float', '1.1'), ('float', 1), ('float', '1'),", "url = reverse(name, kwargs={name: value}) response = self.client.get(url) self.assertStatusCodeEqual(response, 200) def test_failed_case(self): from", "6, 9, 11] and d in range(1, 31): self.assertTrue(result) else: self.assertFalse(result) def test_date_29_regex(self):", "self.DATE_TEST_CASE: value = self.DATE_FORMAT % (m, d) with self.subTest(value, value=value): result = bool(regex_date_28_fullmatch(value))", "import isleap as _isleep from django_boost.urls.converters.date import REGEX_LEAP_YEAR regex_is_leap = re.compile(REGEX_LEAP_YEAR).fullmatch def isleap(value):", "def test_date_30_regex(self): import re from django_boost.urls.converters.date import REGEX_DATE_30 regex_date_30_fullmatch = re.compile(REGEX_DATE_30).fullmatch for m,", "def test_path_converters(self): case = [('bin', '1010'), ('bin', 12), ('oct', '7'), ('oct', 7), ('hex',", "for name, value in case: url = reverse(name, kwargs={name: value}) response = self.client.get(url)", "import re from django_boost.urls.converters.date import REGEX_DATE_28 regex_date_28_fullmatch = re.compile(REGEX_DATE_28).fullmatch for m, d in", "regex_date_28_fullmatch = re.compile(REGEX_DATE_28).fullmatch for m, d in self.DATE_TEST_CASE: value = self.DATE_FORMAT % (m,", "if m in [1, 3, 5, 7, 8, 10, 12] and d in", "value=value): result = bool(regex_date_28_fullmatch(value)) if m == 2 and d in range(1, 29):", "in range(1, 30): self.assertTrue(result) else: self.assertFalse(result) def test_date_28_regex(self): import re from django_boost.urls.converters.date import", "'context_processors': [ 'django_boost.context_processors.user_agent', ], }, }] ) class TestConverter(TestCase): def test_path_converters(self): case =", "1), ('float', '1'), ('date', '2020/2/29'), ] for name, value in case: url =", "'d'), ('hex', 12), ('bin_str', '1010'), ('oct_str', '236'), ('hex_str', '234'), ('float', 1.1), ('float', '1.1'),", "django_boost.test import TestCase ROOT_PATH = os.path.dirname(__file__) @override_settings( ROOT_URLCONF='tests.tests.urls_converters.urls', TEMPLATES=[{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [os.path.join(ROOT_PATH,", "('bin', 12), ('oct', '7'), ('oct', 7), ('hex', 'd'), ('hex', 12), ('bin_str', '1010'), ('oct_str',", "in range(1, 29): self.assertTrue(result) else: self.assertFalse(result) def test_date_time_regex(self): import re from datetime import", "value = self.DATE_FORMAT % (m, d) with self.subTest(value, value=value): result = bool(regex_date_29_fullmatch(value)) if", "('oct', 7), ('hex', 'd'), ('hex', 12), ('bin_str', '1010'), ('oct_str', '236'), ('hex_str', '234'), ('float',", "re.compile(REGEX_DATE_28).fullmatch for m, d in self.DATE_TEST_CASE: value = self.DATE_FORMAT % (m, d) with", "value=value): result = bool(regex_date_29_fullmatch(value)) if m == 2 and d in range(1, 30):", "[4, 6, 9, 11] and d in range(1, 31): self.assertTrue(result) else: self.assertFalse(result) def", "response = self.client.get(url) self.assertStatusCodeEqual(response, 200) def test_failed_case(self): from django.urls.exceptions import NoReverseMatch with self.assertRaises(NoReverseMatch):", "self.subTest(value, value=value): result = bool(regex_is_leap(value)) self.assertEqual(isleap(i), result) def test_date_31_regex(self): import re from django_boost.urls.converters.date", "1.1), ('float', '1.1'), ('float', 1), ('float', '1'), ('date', '2020/2/29'), ] for name, value", "REGEX_DATE_28 regex_date_28_fullmatch = re.compile(REGEX_DATE_28).fullmatch for m, d in self.DATE_TEST_CASE: value = self.DATE_FORMAT %", "9, 11] and d in range(1, 31): self.assertTrue(result) else: self.assertFalse(result) def test_date_29_regex(self): import", "self.subTest(value, value=value): result = bool(regex_date_29_fullmatch(value)) if m == 2 and d in range(1,", "self.assertFalse(result) def test_date_29_regex(self): import re from django_boost.urls.converters.date import REGEX_DATE_29 regex_date_29_fullmatch = re.compile(REGEX_DATE_29).fullmatch for", "case = [('bin', '1010'), ('bin', 12), ('oct', '7'), ('oct', 7), ('hex', 'd'), ('hex',", "reverse('float', kwargs={'float': '1.'}) with self.assertRaises(NoReverseMatch): reverse('date', kwargs={'date': '2019/2/29'}) class TestRegex(TestCase): DATE_FORMAT = \"%d/%d\"", "bool(regex_date_28_fullmatch(value)) if m == 2 and d in range(1, 29): self.assertTrue(result) else: self.assertFalse(result)", "and d in range(1, 31): self.assertTrue(result) else: self.assertFalse(result) def test_date_29_regex(self): import re from", "d in range(1, 29): self.assertTrue(result) else: self.assertFalse(result) def test_date_time_regex(self): import re from datetime", "in range(1, 32): self.assertTrue(result) else: self.assertFalse(result) def test_date_30_regex(self): import re from django_boost.urls.converters.date import", "('bin_str', '1010'), ('oct_str', '236'), ('hex_str', '234'), ('float', 1.1), ('float', '1.1'), ('float', 1), ('float',", "and d in range(1, 29): self.assertTrue(result) else: self.assertFalse(result) def test_date_time_regex(self): import re from", "('float', 1.1), ('float', '1.1'), ('float', 1), ('float', '1'), ('date', '2020/2/29'), ] for name,", "value != 0 and _isleep(value) for i in range(10000): value = str(i) with", "import override_settings from django.urls import reverse from django_boost.test import TestCase ROOT_PATH = os.path.dirname(__file__)", "self.assertFalse(result) def test_date_30_regex(self): import re from django_boost.urls.converters.date import REGEX_DATE_30 regex_date_30_fullmatch = re.compile(REGEX_DATE_30).fullmatch for", "import REGEX_DATE_31 regex_date_31_fullmatch = re.compile(REGEX_DATE_31).fullmatch for m, d in self.DATE_TEST_CASE: value = self.DATE_FORMAT", "except ValueError: return False for y in range(10000): for m in range(14): for", "self.DATE_TEST_CASE: value = self.DATE_FORMAT % (m, d) with self.subTest(value, value=value): result = bool(regex_date_31_fullmatch(value))", "django_boost.urls.converters.date import REGEX_DATE_30 regex_date_30_fullmatch = re.compile(REGEX_DATE_30).fullmatch for m, d in self.DATE_TEST_CASE: value =", "= self.DATE_FORMAT % (m, d) with self.subTest(value, value=value): result = bool(regex_date_30_fullmatch(value)) if m", "self.DATE_FORMAT % (m, d) with self.subTest(value, value=value): result = bool(regex_date_29_fullmatch(value)) if m ==", "import REGEX_LEAP_YEAR regex_is_leap = re.compile(REGEX_LEAP_YEAR).fullmatch def isleap(value): return value != 0 and _isleep(value)", "0 and _isleep(value) for i in range(10000): value = str(i) with self.subTest(value, value=value):", "[('bin', '1010'), ('bin', 12), ('oct', '7'), ('oct', 7), ('hex', 'd'), ('hex', 12), ('bin_str',", "TestCase ROOT_PATH = os.path.dirname(__file__) @override_settings( ROOT_URLCONF='tests.tests.urls_converters.urls', TEMPLATES=[{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [os.path.join(ROOT_PATH, 'templates')], 'APP_DIRS':", "def isleap(value): return value != 0 and _isleep(value) for i in range(10000): value", "y in range(10000): for m in range(14): for d in range(32): with self.subTest(\"%s/%s/%s\"", "TEMPLATES=[{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [os.path.join(ROOT_PATH, 'templates')], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django_boost.context_processors.user_agent',", "12] and d in range(1, 32): self.assertTrue(result) else: self.assertFalse(result) def test_date_30_regex(self): import re", "isleap as _isleep from django_boost.urls.converters.date import REGEX_LEAP_YEAR regex_is_leap = re.compile(REGEX_LEAP_YEAR).fullmatch def isleap(value): return", "('oct_str', '236'), ('hex_str', '234'), ('float', 1.1), ('float', '1.1'), ('float', 1), ('float', '1'), ('date',", "with self.assertRaises(NoReverseMatch): reverse('float', kwargs={'float': '1.'}) with self.assertRaises(NoReverseMatch): reverse('date', kwargs={'date': '2019/2/29'}) class TestRegex(TestCase): DATE_FORMAT", "import REGEX_DATE regex_fullmatch = re.compile(REGEX_DATE).fullmatch def is_valid_date(y, m, d): try: datetime(year=y, month=m, day=d)", "value = self.DATE_FORMAT % (m, d) with self.subTest(value, value=value): result = bool(regex_date_28_fullmatch(value)) if", "result = bool(regex_date_29_fullmatch(value)) if m == 2 and d in range(1, 30): self.assertTrue(result)", "= bool(regex_date_29_fullmatch(value)) if m == 2 and d in range(1, 30): self.assertTrue(result) else:", "try: datetime(year=y, month=m, day=d) return True except ValueError: return False for y in", "reverse from django_boost.test import TestCase ROOT_PATH = os.path.dirname(__file__) @override_settings( ROOT_URLCONF='tests.tests.urls_converters.urls', TEMPLATES=[{ 'BACKEND': 'django.template.backends.django.DjangoTemplates',", "'236'), ('hex_str', '234'), ('float', 1.1), ('float', '1.1'), ('float', 1), ('float', '1'), ('date', '2020/2/29'),", "import datetime from django_boost.urls.converters.date import REGEX_DATE regex_fullmatch = re.compile(REGEX_DATE).fullmatch def is_valid_date(y, m, d):", "(m, d) with self.subTest(value, value=value): result = bool(regex_date_31_fullmatch(value)) if m in [1, 3,", "re from django_boost.urls.converters.date import REGEX_DATE_29 regex_date_29_fullmatch = re.compile(REGEX_DATE_29).fullmatch for m, d in self.DATE_TEST_CASE:", "class TestConverter(TestCase): def test_path_converters(self): case = [('bin', '1010'), ('bin', 12), ('oct', '7'), ('oct',", "import re from django_boost.urls.converters.date import REGEX_DATE_30 regex_date_30_fullmatch = re.compile(REGEX_DATE_30).fullmatch for m, d in", "in range(40)] def test_year_regex(self): import re from calendar import isleap as _isleep from", "self.assertRaises(NoReverseMatch): reverse('float', kwargs={'float': '1.'}) with self.assertRaises(NoReverseMatch): reverse('date', kwargs={'date': '2019/2/29'}) class TestRegex(TestCase): DATE_FORMAT =", "'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [os.path.join(ROOT_PATH, 'templates')], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django_boost.context_processors.user_agent', ],", "calendar import isleap as _isleep from django_boost.urls.converters.date import REGEX_LEAP_YEAR regex_is_leap = re.compile(REGEX_LEAP_YEAR).fullmatch def", "django_boost.urls.converters.date import REGEX_DATE_29 regex_date_29_fullmatch = re.compile(REGEX_DATE_29).fullmatch for m, d in self.DATE_TEST_CASE: value =", "and _isleep(value) for i in range(10000): value = str(i) with self.subTest(value, value=value): result", "2 and d in range(1, 30): self.assertTrue(result) else: self.assertFalse(result) def test_date_28_regex(self): import re", "kwargs={'float': '1.'}) with self.assertRaises(NoReverseMatch): reverse('date', kwargs={'date': '2019/2/29'}) class TestRegex(TestCase): DATE_FORMAT = \"%d/%d\" DATE_TEST_CASE", "value = str(i) with self.subTest(value, value=value): result = bool(regex_is_leap(value)) self.assertEqual(isleap(i), result) def test_date_31_regex(self):", "return value != 0 and _isleep(value) for i in range(10000): value = str(i)", "m in [4, 6, 9, 11] and d in range(1, 31): self.assertTrue(result) else:", "with self.subTest(\"%s/%s/%s\" % (y, m, d)): self.assertEqual(is_valid_date(y, m, d), bool( regex_fullmatch(\"%s/%s/%s\" % (y,", "(m, d) with self.subTest(value, value=value): result = bool(regex_date_28_fullmatch(value)) if m == 2 and", "d) for m in range(20) for d in range(40)] def test_year_regex(self): import re", "override_settings from django.urls import reverse from django_boost.test import TestCase ROOT_PATH = os.path.dirname(__file__) @override_settings(", "(m, d) with self.subTest(value, value=value): result = bool(regex_date_29_fullmatch(value)) if m == 2 and", "day=d) return True except ValueError: return False for y in range(10000): for m", "return False for y in range(10000): for m in range(14): for d in", "from django_boost.urls.converters.date import REGEX_DATE_28 regex_date_28_fullmatch = re.compile(REGEX_DATE_28).fullmatch for m, d in self.DATE_TEST_CASE: value", "DATE_FORMAT = \"%d/%d\" DATE_TEST_CASE = [(m, d) for m in range(20) for d", "bool(regex_date_29_fullmatch(value)) if m == 2 and d in range(1, 30): self.assertTrue(result) else: self.assertFalse(result)", "d in range(1, 32): self.assertTrue(result) else: self.assertFalse(result) def test_date_30_regex(self): import re from django_boost.urls.converters.date", "import re from datetime import datetime from django_boost.urls.converters.date import REGEX_DATE regex_fullmatch = re.compile(REGEX_DATE).fullmatch", "import REGEX_DATE_29 regex_date_29_fullmatch = re.compile(REGEX_DATE_29).fullmatch for m, d in self.DATE_TEST_CASE: value = self.DATE_FORMAT", "range(32): with self.subTest(\"%s/%s/%s\" % (y, m, d)): self.assertEqual(is_valid_date(y, m, d), bool( regex_fullmatch(\"%s/%s/%s\" %", "str(i) with self.subTest(value, value=value): result = bool(regex_is_leap(value)) self.assertEqual(isleap(i), result) def test_date_31_regex(self): import re", "[ 'django_boost.context_processors.user_agent', ], }, }] ) class TestConverter(TestCase): def test_path_converters(self): case = [('bin',", "'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django_boost.context_processors.user_agent', ], }, }] ) class TestConverter(TestCase):", "= self.DATE_FORMAT % (m, d) with self.subTest(value, value=value): result = bool(regex_date_29_fullmatch(value)) if m", "from django_boost.urls.converters.date import REGEX_DATE_30 regex_date_30_fullmatch = re.compile(REGEX_DATE_30).fullmatch for m, d in self.DATE_TEST_CASE: value", "= re.compile(REGEX_DATE_30).fullmatch for m, d in self.DATE_TEST_CASE: value = self.DATE_FORMAT % (m, d)", "11] and d in range(1, 31): self.assertTrue(result) else: self.assertFalse(result) def test_date_29_regex(self): import re", "case: url = reverse(name, kwargs={name: value}) response = self.client.get(url) self.assertStatusCodeEqual(response, 200) def test_failed_case(self):", "= os.path.dirname(__file__) @override_settings( ROOT_URLCONF='tests.tests.urls_converters.urls', TEMPLATES=[{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [os.path.join(ROOT_PATH, 'templates')], 'APP_DIRS': True, 'OPTIONS':", "NoReverseMatch with self.assertRaises(NoReverseMatch): reverse('float', kwargs={'float': '1.'}) with self.assertRaises(NoReverseMatch): reverse('date', kwargs={'date': '2019/2/29'}) class TestRegex(TestCase):", "self.subTest(value, value=value): result = bool(regex_date_28_fullmatch(value)) if m == 2 and d in range(1,", "django.test import override_settings from django.urls import reverse from django_boost.test import TestCase ROOT_PATH =", "re.compile(REGEX_DATE_29).fullmatch for m, d in self.DATE_TEST_CASE: value = self.DATE_FORMAT % (m, d) with", "for y in range(10000): for m in range(14): for d in range(32): with", "import re from django_boost.urls.converters.date import REGEX_DATE_29 regex_date_29_fullmatch = re.compile(REGEX_DATE_29).fullmatch for m, d in", "d) with self.subTest(value, value=value): result = bool(regex_date_28_fullmatch(value)) if m == 2 and d", "regex_date_30_fullmatch = re.compile(REGEX_DATE_30).fullmatch for m, d in self.DATE_TEST_CASE: value = self.DATE_FORMAT % (m,", "bool(regex_is_leap(value)) self.assertEqual(isleap(i), result) def test_date_31_regex(self): import re from django_boost.urls.converters.date import REGEX_DATE_31 regex_date_31_fullmatch =", "8, 10, 12] and d in range(1, 32): self.assertTrue(result) else: self.assertFalse(result) def test_date_30_regex(self):", "= [('bin', '1010'), ('bin', 12), ('oct', '7'), ('oct', 7), ('hex', 'd'), ('hex', 12),", "m in [1, 3, 5, 7, 8, 10, 12] and d in range(1,", "else: self.assertFalse(result) def test_date_28_regex(self): import re from django_boost.urls.converters.date import REGEX_DATE_28 regex_date_28_fullmatch = re.compile(REGEX_DATE_28).fullmatch", "'django_boost.context_processors.user_agent', ], }, }] ) class TestConverter(TestCase): def test_path_converters(self): case = [('bin', '1010'),", "self.DATE_TEST_CASE: value = self.DATE_FORMAT % (m, d) with self.subTest(value, value=value): result = bool(regex_date_29_fullmatch(value))", "from calendar import isleap as _isleep from django_boost.urls.converters.date import REGEX_LEAP_YEAR regex_is_leap = re.compile(REGEX_LEAP_YEAR).fullmatch", "and d in range(1, 30): self.assertTrue(result) else: self.assertFalse(result) def test_date_28_regex(self): import re from", "regex_date_31_fullmatch = re.compile(REGEX_DATE_31).fullmatch for m, d in self.DATE_TEST_CASE: value = self.DATE_FORMAT % (m,", "REGEX_DATE regex_fullmatch = re.compile(REGEX_DATE).fullmatch def is_valid_date(y, m, d): try: datetime(year=y, month=m, day=d) return", "for m in range(20) for d in range(40)] def test_year_regex(self): import re from", "('hex_str', '234'), ('float', 1.1), ('float', '1.1'), ('float', 1), ('float', '1'), ('date', '2020/2/29'), ]", "from django_boost.test import TestCase ROOT_PATH = os.path.dirname(__file__) @override_settings( ROOT_URLCONF='tests.tests.urls_converters.urls', TEMPLATES=[{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS':", "('oct', '7'), ('oct', 7), ('hex', 'd'), ('hex', 12), ('bin_str', '1010'), ('oct_str', '236'), ('hex_str',", "= self.DATE_FORMAT % (m, d) with self.subTest(value, value=value): result = bool(regex_date_31_fullmatch(value)) if m", "= self.client.get(url) self.assertStatusCodeEqual(response, 200) def test_failed_case(self): from django.urls.exceptions import NoReverseMatch with self.assertRaises(NoReverseMatch): reverse('float',", "if m == 2 and d in range(1, 30): self.assertTrue(result) else: self.assertFalse(result) def", "'1.'}) with self.assertRaises(NoReverseMatch): reverse('date', kwargs={'date': '2019/2/29'}) class TestRegex(TestCase): DATE_FORMAT = \"%d/%d\" DATE_TEST_CASE =", "self.assertTrue(result) else: self.assertFalse(result) def test_date_28_regex(self): import re from django_boost.urls.converters.date import REGEX_DATE_28 regex_date_28_fullmatch =", "('date', '2020/2/29'), ] for name, value in case: url = reverse(name, kwargs={name: value})", "= reverse(name, kwargs={name: value}) response = self.client.get(url) self.assertStatusCodeEqual(response, 200) def test_failed_case(self): from django.urls.exceptions", "datetime import datetime from django_boost.urls.converters.date import REGEX_DATE regex_fullmatch = re.compile(REGEX_DATE).fullmatch def is_valid_date(y, m,", "self.DATE_FORMAT % (m, d) with self.subTest(value, value=value): result = bool(regex_date_28_fullmatch(value)) if m ==", "test_date_30_regex(self): import re from django_boost.urls.converters.date import REGEX_DATE_30 regex_date_30_fullmatch = re.compile(REGEX_DATE_30).fullmatch for m, d", "self.DATE_TEST_CASE: value = self.DATE_FORMAT % (m, d) with self.subTest(value, value=value): result = bool(regex_date_30_fullmatch(value))", "django.urls import reverse from django_boost.test import TestCase ROOT_PATH = os.path.dirname(__file__) @override_settings( ROOT_URLCONF='tests.tests.urls_converters.urls', TEMPLATES=[{", "from django_boost.urls.converters.date import REGEX_DATE regex_fullmatch = re.compile(REGEX_DATE).fullmatch def is_valid_date(y, m, d): try: datetime(year=y,", "range(1, 32): self.assertTrue(result) else: self.assertFalse(result) def test_date_30_regex(self): import re from django_boost.urls.converters.date import REGEX_DATE_30", "value}) response = self.client.get(url) self.assertStatusCodeEqual(response, 200) def test_failed_case(self): from django.urls.exceptions import NoReverseMatch with", "32): self.assertTrue(result) else: self.assertFalse(result) def test_date_30_regex(self): import re from django_boost.urls.converters.date import REGEX_DATE_30 regex_date_30_fullmatch", "from django_boost.urls.converters.date import REGEX_DATE_29 regex_date_29_fullmatch = re.compile(REGEX_DATE_29).fullmatch for m, d in self.DATE_TEST_CASE: value", "{ 'context_processors': [ 'django_boost.context_processors.user_agent', ], }, }] ) class TestConverter(TestCase): def test_path_converters(self): case", "m == 2 and d in range(1, 30): self.assertTrue(result) else: self.assertFalse(result) def test_date_28_regex(self):", "self.DATE_FORMAT % (m, d) with self.subTest(value, value=value): result = bool(regex_date_31_fullmatch(value)) if m in", "in range(10000): for m in range(14): for d in range(32): with self.subTest(\"%s/%s/%s\" %", "from django_boost.urls.converters.date import REGEX_LEAP_YEAR regex_is_leap = re.compile(REGEX_LEAP_YEAR).fullmatch def isleap(value): return value != 0", "'DIRS': [os.path.join(ROOT_PATH, 'templates')], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django_boost.context_processors.user_agent', ], }, }]", "d in range(40)] def test_year_regex(self): import re from calendar import isleap as _isleep", "is_valid_date(y, m, d): try: datetime(year=y, month=m, day=d) return True except ValueError: return False", "in range(32): with self.subTest(\"%s/%s/%s\" % (y, m, d)): self.assertEqual(is_valid_date(y, m, d), bool( regex_fullmatch(\"%s/%s/%s\"", "[os.path.join(ROOT_PATH, 'templates')], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django_boost.context_processors.user_agent', ], }, }] )", "d in range(1, 30): self.assertTrue(result) else: self.assertFalse(result) def test_date_28_regex(self): import re from django_boost.urls.converters.date", "test_year_regex(self): import re from calendar import isleap as _isleep from django_boost.urls.converters.date import REGEX_LEAP_YEAR", "% (y, m, d)): self.assertEqual(is_valid_date(y, m, d), bool( regex_fullmatch(\"%s/%s/%s\" % (y, m, d))))", "ValueError: return False for y in range(10000): for m in range(14): for d", "self.client.get(url) self.assertStatusCodeEqual(response, 200) def test_failed_case(self): from django.urls.exceptions import NoReverseMatch with self.assertRaises(NoReverseMatch): reverse('float', kwargs={'float':", "= \"%d/%d\" DATE_TEST_CASE = [(m, d) for m in range(20) for d in", "self.assertFalse(result) def test_date_time_regex(self): import re from datetime import datetime from django_boost.urls.converters.date import REGEX_DATE", "TestConverter(TestCase): def test_path_converters(self): case = [('bin', '1010'), ('bin', 12), ('oct', '7'), ('oct', 7),", "= str(i) with self.subTest(value, value=value): result = bool(regex_is_leap(value)) self.assertEqual(isleap(i), result) def test_date_31_regex(self): import", "('hex', 'd'), ('hex', 12), ('bin_str', '1010'), ('oct_str', '236'), ('hex_str', '234'), ('float', 1.1), ('float',", "re.compile(REGEX_DATE_31).fullmatch for m, d in self.DATE_TEST_CASE: value = self.DATE_FORMAT % (m, d) with", "os.path.dirname(__file__) @override_settings( ROOT_URLCONF='tests.tests.urls_converters.urls', TEMPLATES=[{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [os.path.join(ROOT_PATH, 'templates')], 'APP_DIRS': True, 'OPTIONS': {", "import REGEX_DATE_30 regex_date_30_fullmatch = re.compile(REGEX_DATE_30).fullmatch for m, d in self.DATE_TEST_CASE: value = self.DATE_FORMAT", "m in range(14): for d in range(32): with self.subTest(\"%s/%s/%s\" % (y, m, d)):", "= bool(regex_date_30_fullmatch(value)) if m in [4, 6, 9, 11] and d in range(1,", "django_boost.urls.converters.date import REGEX_DATE_31 regex_date_31_fullmatch = re.compile(REGEX_DATE_31).fullmatch for m, d in self.DATE_TEST_CASE: value =", "\"%d/%d\" DATE_TEST_CASE = [(m, d) for m in range(20) for d in range(40)]", "'7'), ('oct', 7), ('hex', 'd'), ('hex', 12), ('bin_str', '1010'), ('oct_str', '236'), ('hex_str', '234'),", "2 and d in range(1, 29): self.assertTrue(result) else: self.assertFalse(result) def test_date_time_regex(self): import re", "'OPTIONS': { 'context_processors': [ 'django_boost.context_processors.user_agent', ], }, }] ) class TestConverter(TestCase): def test_path_converters(self):", "import re from django_boost.urls.converters.date import REGEX_DATE_31 regex_date_31_fullmatch = re.compile(REGEX_DATE_31).fullmatch for m, d in", "= re.compile(REGEX_DATE_29).fullmatch for m, d in self.DATE_TEST_CASE: value = self.DATE_FORMAT % (m, d)", "] for name, value in case: url = reverse(name, kwargs={name: value}) response =", "else: self.assertFalse(result) def test_date_time_regex(self): import re from datetime import datetime from django_boost.urls.converters.date import", "@override_settings( ROOT_URLCONF='tests.tests.urls_converters.urls', TEMPLATES=[{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [os.path.join(ROOT_PATH, 'templates')], 'APP_DIRS': True, 'OPTIONS': { 'context_processors':", "for d in range(32): with self.subTest(\"%s/%s/%s\" % (y, m, d)): self.assertEqual(is_valid_date(y, m, d),", "django.urls.exceptions import NoReverseMatch with self.assertRaises(NoReverseMatch): reverse('float', kwargs={'float': '1.'}) with self.assertRaises(NoReverseMatch): reverse('date', kwargs={'date': '2019/2/29'})", "10, 12] and d in range(1, 32): self.assertTrue(result) else: self.assertFalse(result) def test_date_30_regex(self): import", "def is_valid_date(y, m, d): try: datetime(year=y, month=m, day=d) return True except ValueError: return", "reverse('date', kwargs={'date': '2019/2/29'}) class TestRegex(TestCase): DATE_FORMAT = \"%d/%d\" DATE_TEST_CASE = [(m, d) for", "re.compile(REGEX_DATE).fullmatch def is_valid_date(y, m, d): try: datetime(year=y, month=m, day=d) return True except ValueError:", "with self.subTest(value, value=value): result = bool(regex_is_leap(value)) self.assertEqual(isleap(i), result) def test_date_31_regex(self): import re from", "range(1, 30): self.assertTrue(result) else: self.assertFalse(result) def test_date_28_regex(self): import re from django_boost.urls.converters.date import REGEX_DATE_28", "regex_is_leap = re.compile(REGEX_LEAP_YEAR).fullmatch def isleap(value): return value != 0 and _isleep(value) for i", "(m, d) with self.subTest(value, value=value): result = bool(regex_date_30_fullmatch(value)) if m in [4, 6,", "REGEX_DATE_29 regex_date_29_fullmatch = re.compile(REGEX_DATE_29).fullmatch for m, d in self.DATE_TEST_CASE: value = self.DATE_FORMAT %", "from django.urls.exceptions import NoReverseMatch with self.assertRaises(NoReverseMatch): reverse('float', kwargs={'float': '1.'}) with self.assertRaises(NoReverseMatch): reverse('date', kwargs={'date':", "if m == 2 and d in range(1, 29): self.assertTrue(result) else: self.assertFalse(result) def", "in range(10000): value = str(i) with self.subTest(value, value=value): result = bool(regex_is_leap(value)) self.assertEqual(isleap(i), result)", "31): self.assertTrue(result) else: self.assertFalse(result) def test_date_29_regex(self): import re from django_boost.urls.converters.date import REGEX_DATE_29 regex_date_29_fullmatch", "self.assertStatusCodeEqual(response, 200) def test_failed_case(self): from django.urls.exceptions import NoReverseMatch with self.assertRaises(NoReverseMatch): reverse('float', kwargs={'float': '1.'})", "= bool(regex_date_31_fullmatch(value)) if m in [1, 3, 5, 7, 8, 10, 12] and", "result) def test_date_31_regex(self): import re from django_boost.urls.converters.date import REGEX_DATE_31 regex_date_31_fullmatch = re.compile(REGEX_DATE_31).fullmatch for", "name, value in case: url = reverse(name, kwargs={name: value}) response = self.client.get(url) self.assertStatusCodeEqual(response,", "}, }] ) class TestConverter(TestCase): def test_path_converters(self): case = [('bin', '1010'), ('bin', 12),", "self.assertFalse(result) def test_date_28_regex(self): import re from django_boost.urls.converters.date import REGEX_DATE_28 regex_date_28_fullmatch = re.compile(REGEX_DATE_28).fullmatch for", "value = self.DATE_FORMAT % (m, d) with self.subTest(value, value=value): result = bool(regex_date_30_fullmatch(value)) if", "== 2 and d in range(1, 29): self.assertTrue(result) else: self.assertFalse(result) def test_date_time_regex(self): import" ]
[ "and the following disclaimer in the documentation # and/or other materials provided with", "0.0) self.assertEqual(self.DUT.average_cost, 0.0) self.assertEqual(self.DUT.maximum_cost, 0.0) self.assertEqual(self.DUT.mean_cost, 0.0) self.assertEqual(self.DUT.cost_variance, 0.0) @attr(all=True, unit=True) def test_set_attributes(self):", "0.0, 0.0, 0.0, 0.0, 95.0) (_error_code, _error_msg) = self.DUT.set_attributes(_values) self.assertEqual(_error_code, 40) @attr(all=True, unit=True)", "SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT", "test_calculate_task_time(self): \"\"\" (TestValidation) calculate returns False on successfully calculating tasks times \"\"\" self.DUT.minimum_time", "@attr(all=True, unit=True) def test_create(self): \"\"\" (TestValidation) __init__ should return a Validation model \"\"\"", "\"\"\" (TestValidation) calculate returns False on successfully calculating tasks times \"\"\" self.DUT.minimum_time =", "Validation __author__ = '<NAME>' __email__ = '<EMAIL>' __organization__ = 'ReliaQual Associates, LLC' __copyright__", "This is the test class for testing Validation module algorithms and models. \"\"\"", "without # modification, are permitted provided that the following conditions are met: #", "0.0, 0.0, 0.0, 0.0, 0.0, 95.0) (_error_code, _error_msg) = self.DUT.set_attributes(_values) self.assertEqual(_error_code, 0) @attr(all=True,", "in binary form must reproduce the above copyright notice, # this list of", "model \"\"\" self.assertTrue(isinstance(self.DUT, Model)) self.assertEqual(self.DUT.revision_id, 0) self.assertEqual(self.DUT.validation_id, 0) self.assertEqual(self.DUT.task_description, '') self.assertEqual(self.DUT.task_type, 0) self.assertEqual(self.DUT.task_specification,", "0.0) self.assertEqual(self.DUT.mean_cost, 0.0) self.assertEqual(self.DUT.cost_variance, 0.0) @attr(all=True, unit=True) def test_set_attributes(self): \"\"\" (TestValidation) set_attributes should", "passed \"\"\" _values = (0, 0, 'Description', 0, 'Specification', 0, 0.0, 0.0, 0.0,", "ANY THEORY OF # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING", "0.0, 0.0, 0.0, 719163, 'Date', 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,", "\"\"\" (TestValidation) calculate returns False on successfully calculating tasks costs \"\"\" self.DUT.minimum_cost =", "retain the above copyright notice, # this list of conditions and the following", "-*- # # tests.unit.TestValidation.py is part of The RTK Project # # All", "name of the copyright holder nor the names of its contributors # may", "are passed \"\"\" _values = (0, 0, 'Description', 0, 'Specification', 0, 0.0, 0.0,", "calculating tasks costs \"\"\" self.DUT.minimum_cost = 252.00 self.DUT.average_cost = 368.00 self.DUT.maximum_cost = 441.00", "STRICT LIABILITY, OR TORT (INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY", "unit=True) def test_create(self): \"\"\" (TestValidation) __init__ should return a Validation model \"\"\" self.assertTrue(isinstance(self.DUT,", "return a Validation model \"\"\" self.assertTrue(isinstance(self.DUT, Model)) self.assertEqual(self.DUT.revision_id, 0) self.assertEqual(self.DUT.validation_id, 0) self.assertEqual(self.DUT.task_description, '')", "code when too few items are passed \"\"\" _values = (0, 0, 'Description',", "0.0, 0.0, 719163, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,", "'') self.assertEqual(self.DUT.task_type, 0) self.assertEqual(self.DUT.task_specification, '') self.assertEqual(self.DUT.measurement_unit, 0) self.assertEqual(self.DUT.min_acceptable, 0.0) self.assertEqual(self.DUT.mean_acceptable, 0.0) self.assertEqual(self.DUT.max_acceptable, 0.0)", "self.assertEqual(self.DUT.start_date, 719163) self.assertEqual(self.DUT.end_date, 719163) self.assertEqual(self.DUT.status, 0.0) self.assertEqual(self.DUT.minimum_time, 0.0) self.assertEqual(self.DUT.average_time, 0.0) self.assertEqual(self.DUT.maximum_time, 0.0) self.assertEqual(self.DUT.mean_time,", "CAUSED AND ON ANY THEORY OF # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,", "25.2 self.DUT.average_time = 36.8 self.DUT.maximum_time = 44.1 self.assertFalse(self.DUT.calculate()) self.assertAlmostEqual(self.DUT.mean_time, 36.08333333) self.assertAlmostEqual(self.DUT.time_variance, 9.9225) @attr(all=True,", "LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,", "validation.Validation import Model, Validation __author__ = '<NAME>' __email__ = '<EMAIL>' __organization__ = 'ReliaQual", "0.0, 0.0, 719163, 719163, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,", "0.0) self.assertEqual(self.DUT.maximum_cost, 0.0) self.assertEqual(self.DUT.mean_cost, 0.0) self.assertEqual(self.DUT.cost_variance, 0.0) @attr(all=True, unit=True) def test_set_attributes(self): \"\"\" (TestValidation)", "992.25) class TestValidationController(unittest.TestCase): \"\"\" Class for testing the Validation data controller class. \"\"\"", "provided that the following conditions are met: # # 1. Redistributions of source", "719163, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 95.0))", "(INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE", "class for testing Validation module algorithms and models. \"\"\" # -*- coding: utf-8", "<reponame>rakhimov/rtk #!/usr/bin/env python -O \"\"\" This is the test class for testing Validation", "HOWEVER CAUSED AND ON ANY THEORY OF # LIABILITY, WHETHER IN CONTRACT, STRICT", "are met: # # 1. Redistributions of source code must retain the above", "0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 95.0)) @attr(all=True,", "-*- coding: utf-8 -*- # # tests.unit.TestValidation.py is part of The RTK Project", "9.9225) @attr(all=True, unit=True) def test_calculate_task_cost(self): \"\"\" (TestValidation) calculate returns False on successfully calculating", "and/or other materials provided with the distribution. # # 3. Neither the name", "IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY", "Neither the name of the copyright holder nor the names of its contributors", "368.00 self.DUT.maximum_cost = 441.00 self.DUT.confidence = 0.95 self.assertFalse(self.DUT.calculate()) self.assertAlmostEqual(self.DUT.mean_cost, 360.83333333) self.assertAlmostEqual(self.DUT.cost_variance, 992.25) class", "THE COPYRIGHT HOLDER # OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,", "data model class. \"\"\" def setUp(self): \"\"\" Setup the test fixture for the", "permitted provided that the following conditions are met: # # 1. Redistributions of", "719163) self.assertEqual(self.DUT.status, 0.0) self.assertEqual(self.DUT.minimum_time, 0.0) self.assertEqual(self.DUT.average_time, 0.0) self.assertEqual(self.DUT.maximum_time, 0.0) self.assertEqual(self.DUT.mean_time, 0.0) self.assertEqual(self.DUT.time_variance, 0.0)", "from os.path import dirname sys.path.insert(0, dirname(dirname(dirname(__file__))) + \"/rtk\", ) import unittest from nose.plugins.attrib", "its contributors # may be used to endorse or promote products derived from", "\"\"\" def setUp(self): \"\"\" Sets up the test fixture for the Validation class.", "0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 95.0)) @attr(all=True, unit=True) def", "OR TORT (INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF", "\"\"\" self.DUT = Model() @attr(all=True, unit=True) def test_create(self): \"\"\" (TestValidation) __init__ should return", "719163, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 95.0)", "self.assertTrue(isinstance(self.DUT, Model)) self.assertEqual(self.DUT.revision_id, 0) self.assertEqual(self.DUT.validation_id, 0) self.assertEqual(self.DUT.task_description, '') self.assertEqual(self.DUT.task_type, 0) self.assertEqual(self.DUT.task_specification, '') self.assertEqual(self.DUT.measurement_unit,", "(TestValidation) __init__ should create a Validation data controller \"\"\" self.assertTrue(isinstance(self.DUT, Validation)) self.assertEqual(self.DUT._dao, None)", "testing the Validation data controller class. \"\"\" def setUp(self): \"\"\" Sets up the", "Class for testing the Validation data controller class. \"\"\" def setUp(self): \"\"\" Sets", "Class for testing the Validation data model class. \"\"\" def setUp(self): \"\"\" Setup", "ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED", "0, 'Description', 0, 'Specification', 0, 0.0, 0.0, 0.0, 0.0, 719163, 'Date', 0.0, 0.0,", "return a tuple of attribute values \"\"\" self.assertEqual(self.DUT.get_attributes(), (0, 0, '', 0, '',", "with or without # modification, are permitted provided that the following conditions are", "INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, #", "nose.plugins.attrib import attr import dao.DAO as _dao from validation.Validation import Model, Validation __author__", "0) self.assertEqual(self.DUT.min_acceptable, 0.0) self.assertEqual(self.DUT.mean_acceptable, 0.0) self.assertEqual(self.DUT.max_acceptable, 0.0) self.assertEqual(self.DUT.variance_acceptable, 0.0) self.assertEqual(self.DUT.start_date, 719163) self.assertEqual(self.DUT.end_date, 719163)", "'Description', 0, 'Specification', 0, 0.0, 0.0, 0.0, 0.0, 719163, 0.0, 0.0, 0.0, 0.0,", "= '<NAME>' __email__ = '<EMAIL>' __organization__ = 'ReliaQual Associates, LLC' __copyright__ = 'Copyright", "0) self.assertEqual(self.DUT.task_description, '') self.assertEqual(self.DUT.task_type, 0) self.assertEqual(self.DUT.task_specification, '') self.assertEqual(self.DUT.measurement_unit, 0) self.assertEqual(self.DUT.min_acceptable, 0.0) self.assertEqual(self.DUT.mean_acceptable, 0.0)", "'Description', 0, 'Specification', 0, 0.0, 0.0, 0.0, 0.0, 719163, 719163, 0.0, 0.0, 0.0,", "SUCH DAMAGE. import sys from os.path import dirname sys.path.insert(0, dirname(dirname(dirname(__file__))) + \"/rtk\", )", "NO EVENT SHALL THE COPYRIGHT HOLDER # OR CONTRIBUTORS BE LIABLE FOR ANY", "self.assertEqual(self.DUT.mean_cost, 0.0) self.assertEqual(self.DUT.cost_variance, 0.0) @attr(all=True, unit=True) def test_set_attributes(self): \"\"\" (TestValidation) set_attributes should return", "0.0) self.assertEqual(self.DUT.cost_variance, 0.0) @attr(all=True, unit=True) def test_set_attributes(self): \"\"\" (TestValidation) set_attributes should return a", "95.0) (_error_code, _error_msg) = self.DUT.set_attributes(_values) self.assertEqual(_error_code, 10) @attr(all=True, unit=True) def test_set_attributes_missing_index(self): \"\"\" (TestValidation)", "36.08333333) self.assertAlmostEqual(self.DUT.time_variance, 9.9225) @attr(all=True, unit=True) def test_calculate_task_cost(self): \"\"\" (TestValidation) calculate returns False on", "self.assertAlmostEqual(self.DUT.time_variance, 9.9225) @attr(all=True, unit=True) def test_calculate_task_cost(self): \"\"\" (TestValidation) calculate returns False on successfully", "# \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT #", "the test fixture for the Validation class. \"\"\" self.DUT = Model() @attr(all=True, unit=True)", "following conditions are met: # # 1. Redistributions of source code must retain", "THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A # PARTICULAR PURPOSE ARE", "# # 1. Redistributions of source code must retain the above copyright notice,", "RTK Project # # All rights reserved. # Copyright 2007 - 2017 <NAME>", "disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright", "self.DUT.confidence = 0.95 self.assertFalse(self.DUT.calculate()) self.assertAlmostEqual(self.DUT.mean_cost, 360.83333333) self.assertAlmostEqual(self.DUT.cost_variance, 992.25) class TestValidationController(unittest.TestCase): \"\"\" Class for", "IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import sys from os.path import", "OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE,", "(TestValidation) set_attributes should return a 0 error code on success \"\"\" _values =", "0.0, 0.0, 0.0, 0.0, 719163, 'Date', 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,", "self.assertEqual(self.DUT.time_variance, 0.0) self.assertEqual(self.DUT.minimum_cost, 0.0) self.assertEqual(self.DUT.average_cost, 0.0) self.assertEqual(self.DUT.maximum_cost, 0.0) self.assertEqual(self.DUT.mean_cost, 0.0) self.assertEqual(self.DUT.cost_variance, 0.0) @attr(all=True,", "(_error_code, _error_msg) = self.DUT.set_attributes(_values) self.assertEqual(_error_code, 0) @attr(all=True, unit=True) def test_set_attributes_wrong_type(self): \"\"\" (TestValidation) set_attributes", "self.DUT.average_time = 36.8 self.DUT.maximum_time = 44.1 self.assertFalse(self.DUT.calculate()) self.assertAlmostEqual(self.DUT.mean_time, 36.08333333) self.assertAlmostEqual(self.DUT.time_variance, 9.9225) @attr(all=True, unit=True)", "0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 95.0) (_error_code, _error_msg) = self.DUT.set_attributes(_values)", "self.assertEqual(self.DUT.mean_time, 0.0) self.assertEqual(self.DUT.time_variance, 0.0) self.assertEqual(self.DUT.minimum_cost, 0.0) self.assertEqual(self.DUT.average_cost, 0.0) self.assertEqual(self.DUT.maximum_cost, 0.0) self.assertEqual(self.DUT.mean_cost, 0.0) self.assertEqual(self.DUT.cost_variance,", "PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER # OR CONTRIBUTORS", "BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF # LIABILITY, WHETHER IN", "252.00 self.DUT.average_cost = 368.00 self.DUT.maximum_cost = 441.00 self.DUT.confidence = 0.95 self.assertFalse(self.DUT.calculate()) self.assertAlmostEqual(self.DUT.mean_cost, 360.83333333)", "names of its contributors # may be used to endorse or promote products", "modification, are permitted provided that the following conditions are met: # # 1.", "import sys from os.path import dirname sys.path.insert(0, dirname(dirname(dirname(__file__))) + \"/rtk\", ) import unittest", "self.assertEqual(self.DUT.status, 0.0) self.assertEqual(self.DUT.minimum_time, 0.0) self.assertEqual(self.DUT.average_time, 0.0) self.assertEqual(self.DUT.maximum_time, 0.0) self.assertEqual(self.DUT.mean_time, 0.0) self.assertEqual(self.DUT.time_variance, 0.0) self.assertEqual(self.DUT.minimum_cost,", "the Validation class. \"\"\" self.DUT = Validation() @attr(all=True, unit=True) def test_controller_create(self): \"\"\" (TestValidation)", "441.00 self.DUT.confidence = 0.95 self.assertFalse(self.DUT.calculate()) self.assertAlmostEqual(self.DUT.mean_cost, 360.83333333) self.assertAlmostEqual(self.DUT.cost_variance, 992.25) class TestValidationController(unittest.TestCase): \"\"\" Class", "models. \"\"\" # -*- coding: utf-8 -*- # # tests.unit.TestValidation.py is part of", "= 36.8 self.DUT.maximum_time = 44.1 self.assertFalse(self.DUT.calculate()) self.assertAlmostEqual(self.DUT.mean_time, 36.08333333) self.assertAlmostEqual(self.DUT.time_variance, 9.9225) @attr(all=True, unit=True) def", "@attr(all=True, unit=True) def test_get_attributes(self): \"\"\" (TestValidation) get_attributes should return a tuple of attribute", "CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL", "used to endorse or promote products derived from this software # without specific", "def setUp(self): \"\"\" Setup the test fixture for the Validation class. \"\"\" self.DUT", "create a Validation data controller \"\"\" self.assertTrue(isinstance(self.DUT, Validation)) self.assertEqual(self.DUT._dao, None) self.assertEqual(self.DUT._last_id, None) self.assertEqual(self.DUT.dicTasks,", "self.assertEqual(self.DUT.minimum_cost, 0.0) self.assertEqual(self.DUT.average_cost, 0.0) self.assertEqual(self.DUT.maximum_cost, 0.0) self.assertEqual(self.DUT.mean_cost, 0.0) self.assertEqual(self.DUT.cost_variance, 0.0) @attr(all=True, unit=True) def", "a wrong data type \"\"\" _values = (0, 0, 'Description', 0, 'Specification', 0,", "for testing the Validation data controller class. \"\"\" def setUp(self): \"\"\" Sets up", "0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 95.0) (_error_code, _error_msg) =", "passed a wrong data type \"\"\" _values = (0, 0, 'Description', 0, 'Specification',", "or promote products derived from this software # without specific prior written permission.", "unit=True) def test_set_attributes_missing_index(self): \"\"\" (TestValidation) set_attributes should return a 40 error code when", "permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS", "setUp(self): \"\"\" Sets up the test fixture for the Validation class. \"\"\" self.DUT", "(TestValidation) __init__ should return a Validation model \"\"\" self.assertTrue(isinstance(self.DUT, Model)) self.assertEqual(self.DUT.revision_id, 0) self.assertEqual(self.DUT.validation_id,", "self.assertEqual(self.DUT.mean_acceptable, 0.0) self.assertEqual(self.DUT.max_acceptable, 0.0) self.assertEqual(self.DUT.variance_acceptable, 0.0) self.assertEqual(self.DUT.start_date, 719163) self.assertEqual(self.DUT.end_date, 719163) self.assertEqual(self.DUT.status, 0.0) self.assertEqual(self.DUT.minimum_time,", "algorithms and models. \"\"\" # -*- coding: utf-8 -*- # # tests.unit.TestValidation.py is", "following disclaimer. # # 2. Redistributions in binary form must reproduce the above", "CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR", "0.0, 719163, 'Date', 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,", "reproduce the above copyright notice, # this list of conditions and the following", "calculate returns False on successfully calculating tasks costs \"\"\" self.DUT.minimum_cost = 252.00 self.DUT.average_cost", "PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER # OR", "promote products derived from this software # without specific prior written permission. #", "@attr(all=True, unit=True) def test_calculate_task_time(self): \"\"\" (TestValidation) calculate returns False on successfully calculating tasks", "(INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS", "719163, 719163, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,", "\"\"\" (TestValidation) __init__ should create a Validation data controller \"\"\" self.assertTrue(isinstance(self.DUT, Validation)) self.assertEqual(self.DUT._dao,", "of source code must retain the above copyright notice, # this list of", "unit=True) def test_calculate_task_cost(self): \"\"\" (TestValidation) calculate returns False on successfully calculating tasks costs", "self.assertAlmostEqual(self.DUT.cost_variance, 992.25) class TestValidationController(unittest.TestCase): \"\"\" Class for testing the Validation data controller class.", "OF # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING # NEGLIGENCE", "SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # \"AS IS\" AND", "LOSS OF USE, DATA, OR # PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND", "Redistributions of source code must retain the above copyright notice, # this list", "0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 95.0)) @attr(all=True, unit=True) def test_calculate_task_time(self): \"\"\"", "import dirname sys.path.insert(0, dirname(dirname(dirname(__file__))) + \"/rtk\", ) import unittest from nose.plugins.attrib import attr", "'') self.assertEqual(self.DUT.measurement_unit, 0) self.assertEqual(self.DUT.min_acceptable, 0.0) self.assertEqual(self.DUT.mean_acceptable, 0.0) self.assertEqual(self.DUT.max_acceptable, 0.0) self.assertEqual(self.DUT.variance_acceptable, 0.0) self.assertEqual(self.DUT.start_date, 719163)", "calculating tasks times \"\"\" self.DUT.minimum_time = 25.2 self.DUT.average_time = 36.8 self.DUT.maximum_time = 44.1", "DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;", "COPYRIGHT HOLDERS AND CONTRIBUTORS # \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES,", "719163, 'Date', 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,", "self.assertFalse(self.DUT.calculate()) self.assertAlmostEqual(self.DUT.mean_time, 36.08333333) self.assertAlmostEqual(self.DUT.time_variance, 9.9225) @attr(all=True, unit=True) def test_calculate_task_cost(self): \"\"\" (TestValidation) calculate returns", "or without # modification, are permitted provided that the following conditions are met:", "\"\"\" Setup the test fixture for the Validation class. \"\"\" self.DUT = Model()", "without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE", "OF MERCHANTABILITY AND FITNESS FOR A # PARTICULAR PURPOSE ARE DISCLAIMED. IN NO", "'Specification', 0, 0.0, 0.0, 0.0, 0.0, 719163, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,", "0.0) self.assertEqual(self.DUT.mean_time, 0.0) self.assertEqual(self.DUT.time_variance, 0.0) self.assertEqual(self.DUT.minimum_cost, 0.0) self.assertEqual(self.DUT.average_cost, 0.0) self.assertEqual(self.DUT.maximum_cost, 0.0) self.assertEqual(self.DUT.mean_cost, 0.0)", "# # tests.unit.TestValidation.py is part of The RTK Project # # All rights", "notice, # this list of conditions and the following disclaimer. # # 2.", "0, 'Specification', 0, 0.0, 0.0, 0.0, 0.0, 719163, 'Date', 0.0, 0.0, 0.0, 0.0,", "met: # # 1. Redistributions of source code must retain the above copyright", "OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE", "PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR # PROFITS;", "SERVICES; LOSS OF USE, DATA, OR # PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED", "0, '', 0, '', 0, 0.0, 0.0, 0.0, 0.0, 719163, 719163, 0.0, 0.0,", "3. Neither the name of the copyright holder nor the names of its", "import Model, Validation __author__ = '<NAME>' __email__ = '<EMAIL>' __organization__ = 'ReliaQual Associates,", "model class. \"\"\" def setUp(self): \"\"\" Setup the test fixture for the Validation", "Validation() @attr(all=True, unit=True) def test_controller_create(self): \"\"\" (TestValidation) __init__ should create a Validation data", "copyright notice, # this list of conditions and the following disclaimer in the", "the copyright holder nor the names of its contributors # may be used", "0.0) self.assertEqual(self.DUT.time_variance, 0.0) self.assertEqual(self.DUT.minimum_cost, 0.0) self.assertEqual(self.DUT.average_cost, 0.0) self.assertEqual(self.DUT.maximum_cost, 0.0) self.assertEqual(self.DUT.mean_cost, 0.0) self.assertEqual(self.DUT.cost_variance, 0.0)", "= self.DUT.set_attributes(_values) self.assertEqual(_error_code, 0) @attr(all=True, unit=True) def test_set_attributes_wrong_type(self): \"\"\" (TestValidation) set_attributes should return", "ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER # OR CONTRIBUTORS BE", "# Copyright 2007 - 2017 <NAME> <EMAIL>rew.rowland <AT> reliaqual <DOT> com # #", "of its contributors # may be used to endorse or promote products derived", "# tests.unit.TestValidation.py is part of The RTK Project # # All rights reserved.", "0.0, 719163, 719163, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,", "self.DUT.maximum_cost = 441.00 self.DUT.confidence = 0.95 self.assertFalse(self.DUT.calculate()) self.assertAlmostEqual(self.DUT.mean_cost, 360.83333333) self.assertAlmostEqual(self.DUT.cost_variance, 992.25) class TestValidationController(unittest.TestCase):", "for testing Validation module algorithms and models. \"\"\" # -*- coding: utf-8 -*-", "distribution. # # 3. Neither the name of the copyright holder nor the", "'Copyright 2015 Andrew \"Weibullguy\" Rowland' class TestValidationModel(unittest.TestCase): \"\"\" Class for testing the Validation", "the Validation class. \"\"\" self.DUT = Model() @attr(all=True, unit=True) def test_create(self): \"\"\" (TestValidation)", "Rowland' class TestValidationModel(unittest.TestCase): \"\"\" Class for testing the Validation data model class. \"\"\"", "self.assertEqual(self.DUT.get_attributes(), (0, 0, '', 0, '', 0, 0.0, 0.0, 0.0, 0.0, 719163, 719163,", "unit=True) def test_get_attributes(self): \"\"\" (TestValidation) get_attributes should return a tuple of attribute values", "data type \"\"\" _values = (0, 0, 'Description', 0, 'Specification', 0, 0.0, 0.0,", "OF THE POSSIBILITY OF SUCH DAMAGE. import sys from os.path import dirname sys.path.insert(0,", "os.path import dirname sys.path.insert(0, dirname(dirname(dirname(__file__))) + \"/rtk\", ) import unittest from nose.plugins.attrib import", "this list of conditions and the following disclaimer in the documentation # and/or", "of conditions and the following disclaimer in the documentation # and/or other materials", "IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A # PARTICULAR PURPOSE ARE DISCLAIMED.", "\"Weibullguy\" Rowland' class TestValidationModel(unittest.TestCase): \"\"\" Class for testing the Validation data model class.", "test_calculate_task_cost(self): \"\"\" (TestValidation) calculate returns False on successfully calculating tasks costs \"\"\" self.DUT.minimum_cost", "self.DUT.average_cost = 368.00 self.DUT.maximum_cost = 441.00 self.DUT.confidence = 0.95 self.assertFalse(self.DUT.calculate()) self.assertAlmostEqual(self.DUT.mean_cost, 360.83333333) self.assertAlmostEqual(self.DUT.cost_variance,", "on successfully calculating tasks costs \"\"\" self.DUT.minimum_cost = 252.00 self.DUT.average_cost = 368.00 self.DUT.maximum_cost", "unittest from nose.plugins.attrib import attr import dao.DAO as _dao from validation.Validation import Model,", "list of conditions and the following disclaimer in the documentation # and/or other", "FOR A # PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT", "\"\"\" self.DUT.minimum_time = 25.2 self.DUT.average_time = 36.8 self.DUT.maximum_time = 44.1 self.assertFalse(self.DUT.calculate()) self.assertAlmostEqual(self.DUT.mean_time, 36.08333333)", "95.0) (_error_code, _error_msg) = self.DUT.set_attributes(_values) self.assertEqual(_error_code, 0) @attr(all=True, unit=True) def test_set_attributes_wrong_type(self): \"\"\" (TestValidation)", "conditions and the following disclaimer. # # 2. Redistributions in binary form must", "and use in source and binary forms, with or without # modification, are", "THE COPYRIGHT HOLDERS AND CONTRIBUTORS # \"AS IS\" AND ANY EXPRESS OR IMPLIED", "AND CONTRIBUTORS # \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT", "# this list of conditions and the following disclaimer in the documentation #", "0.0, 0.0, 0.0, 0.0, 95.0) (_error_code, _error_msg) = self.DUT.set_attributes(_values) self.assertEqual(_error_code, 10) @attr(all=True, unit=True)", "_error_msg) = self.DUT.set_attributes(_values) self.assertEqual(_error_code, 40) @attr(all=True, unit=True) def test_get_attributes(self): \"\"\" (TestValidation) get_attributes should", "(TestValidation) get_attributes should return a tuple of attribute values \"\"\" self.assertEqual(self.DUT.get_attributes(), (0, 0,", "= (0, 0, 'Description', 0, 'Specification', 0, 0.0, 0.0, 0.0, 0.0, 719163, 0.0,", "Model, Validation __author__ = '<NAME>' __email__ = '<EMAIL>' __organization__ = 'ReliaQual Associates, LLC'", "OR SERVICES; LOSS OF USE, DATA, OR # PROFITS; OR BUSINESS INTERRUPTION) HOWEVER", "test fixture for the Validation class. \"\"\" self.DUT = Model() @attr(all=True, unit=True) def", "a tuple of attribute values \"\"\" self.assertEqual(self.DUT.get_attributes(), (0, 0, '', 0, '', 0,", "EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES", "conditions are met: # # 1. Redistributions of source code must retain the", "0, 0.0, 0.0, 0.0, 0.0, 719163, 'Date', 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,", "BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF", "should return a 40 error code when too few items are passed \"\"\"", "Validation data model class. \"\"\" def setUp(self): \"\"\" Setup the test fixture for", "\"\"\" Class for testing the Validation data model class. \"\"\" def setUp(self): \"\"\"", "719163) self.assertEqual(self.DUT.end_date, 719163) self.assertEqual(self.DUT.status, 0.0) self.assertEqual(self.DUT.minimum_time, 0.0) self.assertEqual(self.DUT.average_time, 0.0) self.assertEqual(self.DUT.maximum_time, 0.0) self.assertEqual(self.DUT.mean_time, 0.0)", "self.assertEqual(self.DUT.revision_id, 0) self.assertEqual(self.DUT.validation_id, 0) self.assertEqual(self.DUT.task_description, '') self.assertEqual(self.DUT.task_type, 0) self.assertEqual(self.DUT.task_specification, '') self.assertEqual(self.DUT.measurement_unit, 0) self.assertEqual(self.DUT.min_acceptable,", "AND FITNESS FOR A # PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL", "__organization__ = 'ReliaQual Associates, LLC' __copyright__ = 'Copyright 2015 Andrew \"Weibullguy\" Rowland' class", "TestValidationModel(unittest.TestCase): \"\"\" Class for testing the Validation data model class. \"\"\" def setUp(self):", "copyright notice, # this list of conditions and the following disclaimer. # #", "self.assertEqual(self.DUT.measurement_unit, 0) self.assertEqual(self.DUT.min_acceptable, 0.0) self.assertEqual(self.DUT.mean_acceptable, 0.0) self.assertEqual(self.DUT.max_acceptable, 0.0) self.assertEqual(self.DUT.variance_acceptable, 0.0) self.assertEqual(self.DUT.start_date, 719163) self.assertEqual(self.DUT.end_date,", "self.assertEqual(self.DUT.maximum_cost, 0.0) self.assertEqual(self.DUT.mean_cost, 0.0) self.assertEqual(self.DUT.cost_variance, 0.0) @attr(all=True, unit=True) def test_set_attributes(self): \"\"\" (TestValidation) set_attributes", "error code when passed a wrong data type \"\"\" _values = (0, 0,", "Setup the test fixture for the Validation class. \"\"\" self.DUT = Model() @attr(all=True,", "0.0) self.assertEqual(self.DUT.maximum_time, 0.0) self.assertEqual(self.DUT.mean_time, 0.0) self.assertEqual(self.DUT.time_variance, 0.0) self.assertEqual(self.DUT.minimum_cost, 0.0) self.assertEqual(self.DUT.average_cost, 0.0) self.assertEqual(self.DUT.maximum_cost, 0.0)", "self.assertAlmostEqual(self.DUT.mean_time, 36.08333333) self.assertAlmostEqual(self.DUT.time_variance, 9.9225) @attr(all=True, unit=True) def test_calculate_task_cost(self): \"\"\" (TestValidation) calculate returns False", "0, 0.0, 0.0, 0.0, 0.0, 719163, 719163, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,", "LLC' __copyright__ = 'Copyright 2015 Andrew \"Weibullguy\" Rowland' class TestValidationModel(unittest.TestCase): \"\"\" Class for", "dirname(dirname(dirname(__file__))) + \"/rtk\", ) import unittest from nose.plugins.attrib import attr import dao.DAO as", "Validation data controller \"\"\" self.assertTrue(isinstance(self.DUT, Validation)) self.assertEqual(self.DUT._dao, None) self.assertEqual(self.DUT._last_id, None) self.assertEqual(self.DUT.dicTasks, {}) self.assertEqual(self.DUT.dicStatus,", "= 'Copyright 2015 Andrew \"Weibullguy\" Rowland' class TestValidationModel(unittest.TestCase): \"\"\" Class for testing the", "(_error_code, _error_msg) = self.DUT.set_attributes(_values) self.assertEqual(_error_code, 40) @attr(all=True, unit=True) def test_get_attributes(self): \"\"\" (TestValidation) get_attributes", "self.assertEqual(_error_code, 40) @attr(all=True, unit=True) def test_get_attributes(self): \"\"\" (TestValidation) get_attributes should return a tuple", "Sets up the test fixture for the Validation class. \"\"\" self.DUT = Validation()", "0.0) self.assertEqual(self.DUT.average_time, 0.0) self.assertEqual(self.DUT.maximum_time, 0.0) self.assertEqual(self.DUT.mean_time, 0.0) self.assertEqual(self.DUT.time_variance, 0.0) self.assertEqual(self.DUT.minimum_cost, 0.0) self.assertEqual(self.DUT.average_cost, 0.0)", "values \"\"\" self.assertEqual(self.DUT.get_attributes(), (0, 0, '', 0, '', 0, 0.0, 0.0, 0.0, 0.0,", "0.0, 0.0, 0.0, 0.0, 0.0, 95.0)) @attr(all=True, unit=True) def test_calculate_task_time(self): \"\"\" (TestValidation) calculate", "dao.DAO as _dao from validation.Validation import Model, Validation __author__ = '<NAME>' __email__ =", "0.0, 0.0, 0.0, 95.0) (_error_code, _error_msg) = self.DUT.set_attributes(_values) self.assertEqual(_error_code, 10) @attr(all=True, unit=True) def", "self.assertEqual(self.DUT.average_cost, 0.0) self.assertEqual(self.DUT.maximum_cost, 0.0) self.assertEqual(self.DUT.mean_cost, 0.0) self.assertEqual(self.DUT.cost_variance, 0.0) @attr(all=True, unit=True) def test_set_attributes(self): \"\"\"", "INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF # LIABILITY, WHETHER IN CONTRACT,", "THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF", "Redistribution and use in source and binary forms, with or without # modification,", "controller class. \"\"\" def setUp(self): \"\"\" Sets up the test fixture for the", "source and binary forms, with or without # modification, are permitted provided that", "FITNESS FOR A # PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE", "ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED", "reserved. # Copyright 2007 - 2017 <NAME> <EMAIL>rew.rowland <AT> reliaqual <DOT> com #", "is the test class for testing Validation module algorithms and models. \"\"\" #", "\"/rtk\", ) import unittest from nose.plugins.attrib import attr import dao.DAO as _dao from", "EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE", "Validation module algorithms and models. \"\"\" # -*- coding: utf-8 -*- # #", "0.0, 719163, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,", "360.83333333) self.assertAlmostEqual(self.DUT.cost_variance, 992.25) class TestValidationController(unittest.TestCase): \"\"\" Class for testing the Validation data controller", "THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # \"AS IS\"", "<EMAIL>rew.rowland <AT> reliaqual <DOT> com # # Redistribution and use in source and", "# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import sys", "com # # Redistribution and use in source and binary forms, with or", "test fixture for the Validation class. \"\"\" self.DUT = Validation() @attr(all=True, unit=True) def", "Validation class. \"\"\" self.DUT = Validation() @attr(all=True, unit=True) def test_controller_create(self): \"\"\" (TestValidation) __init__", "self.DUT.set_attributes(_values) self.assertEqual(_error_code, 0) @attr(all=True, unit=True) def test_set_attributes_wrong_type(self): \"\"\" (TestValidation) set_attributes should return a", "of attribute values \"\"\" self.assertEqual(self.DUT.get_attributes(), (0, 0, '', 0, '', 0, 0.0, 0.0,", "2017 <NAME> <EMAIL>rew.rowland <AT> reliaqual <DOT> com # # Redistribution and use in", "up the test fixture for the Validation class. \"\"\" self.DUT = Validation() @attr(all=True,", "successfully calculating tasks costs \"\"\" self.DUT.minimum_cost = 252.00 self.DUT.average_cost = 368.00 self.DUT.maximum_cost =", "\"\"\" self.assertTrue(isinstance(self.DUT, Model)) self.assertEqual(self.DUT.revision_id, 0) self.assertEqual(self.DUT.validation_id, 0) self.assertEqual(self.DUT.task_description, '') self.assertEqual(self.DUT.task_type, 0) self.assertEqual(self.DUT.task_specification, '')", "from this software # without specific prior written permission. # # THIS SOFTWARE", "TORT (INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE", "def test_create(self): \"\"\" (TestValidation) __init__ should return a Validation model \"\"\" self.assertTrue(isinstance(self.DUT, Model))", "WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING", "ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import sys from os.path import dirname", "@attr(all=True, unit=True) def test_calculate_task_cost(self): \"\"\" (TestValidation) calculate returns False on successfully calculating tasks", "(TestValidation) set_attributes should return a 10 error code when passed a wrong data", "Andrew \"Weibullguy\" Rowland' class TestValidationModel(unittest.TestCase): \"\"\" Class for testing the Validation data model", "\"\"\" self.DUT = Validation() @attr(all=True, unit=True) def test_controller_create(self): \"\"\" (TestValidation) __init__ should create", "copyright holder nor the names of its contributors # may be used to", "_error_msg) = self.DUT.set_attributes(_values) self.assertEqual(_error_code, 0) @attr(all=True, unit=True) def test_set_attributes_wrong_type(self): \"\"\" (TestValidation) set_attributes should", "0.0, 0.0, 95.0) (_error_code, _error_msg) = self.DUT.set_attributes(_values) self.assertEqual(_error_code, 10) @attr(all=True, unit=True) def test_set_attributes_missing_index(self):", "returns False on successfully calculating tasks costs \"\"\" self.DUT.minimum_cost = 252.00 self.DUT.average_cost =", "'Specification', 0, 0.0, 0.0, 0.0, 0.0, 719163, 719163, 0.0, 0.0, 0.0, 0.0, 0.0,", "0.0) self.assertEqual(self.DUT.max_acceptable, 0.0) self.assertEqual(self.DUT.variance_acceptable, 0.0) self.assertEqual(self.DUT.start_date, 719163) self.assertEqual(self.DUT.end_date, 719163) self.assertEqual(self.DUT.status, 0.0) self.assertEqual(self.DUT.minimum_time, 0.0)", "\"\"\" def setUp(self): \"\"\" Setup the test fixture for the Validation class. \"\"\"", "# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF #", "0.0, 0.0, 0.0, 95.0) (_error_code, _error_msg) = self.DUT.set_attributes(_values) self.assertEqual(_error_code, 40) @attr(all=True, unit=True) def", "test_create(self): \"\"\" (TestValidation) __init__ should return a Validation model \"\"\" self.assertTrue(isinstance(self.DUT, Model)) self.assertEqual(self.DUT.revision_id,", "PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # \"AS IS\" AND ANY EXPRESS", "may be used to endorse or promote products derived from this software #", "software # without specific prior written permission. # # THIS SOFTWARE IS PROVIDED", "this list of conditions and the following disclaimer. # # 2. Redistributions in", "(TestValidation) calculate returns False on successfully calculating tasks costs \"\"\" self.DUT.minimum_cost = 252.00", "'Date', 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 95.0)", "\"\"\" (TestValidation) get_attributes should return a tuple of attribute values \"\"\" self.assertEqual(self.DUT.get_attributes(), (0,", "few items are passed \"\"\" _values = (0, 0, 'Description', 0, 'Specification', 0,", "OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS #", "Validation data controller class. \"\"\" def setUp(self): \"\"\" Sets up the test fixture", "0.0, 0.0, 0.0, 0.0, 95.0)) @attr(all=True, unit=True) def test_calculate_task_time(self): \"\"\" (TestValidation) calculate returns", "forms, with or without # modification, are permitted provided that the following conditions", "tests.unit.TestValidation.py is part of The RTK Project # # All rights reserved. #", "(0, 0, 'Description', 0, 'Specification', 0, 0.0, 0.0, 0.0, 0.0, 719163, 719163, 0.0,", "above copyright notice, # this list of conditions and the following disclaimer in", "0.0, 0.0, 0.0, 0.0, 0.0, 95.0) (_error_code, _error_msg) = self.DUT.set_attributes(_values) self.assertEqual(_error_code, 10) @attr(all=True,", "tuple of attribute values \"\"\" self.assertEqual(self.DUT.get_attributes(), (0, 0, '', 0, '', 0, 0.0,", "Validation class. \"\"\" self.DUT = Model() @attr(all=True, unit=True) def test_create(self): \"\"\" (TestValidation) __init__", "0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 95.0)) @attr(all=True, unit=True) def test_calculate_task_time(self): \"\"\" (TestValidation)", "= 368.00 self.DUT.maximum_cost = 441.00 self.DUT.confidence = 0.95 self.assertFalse(self.DUT.calculate()) self.assertAlmostEqual(self.DUT.mean_cost, 360.83333333) self.assertAlmostEqual(self.DUT.cost_variance, 992.25)", "Validation model \"\"\" self.assertTrue(isinstance(self.DUT, Model)) self.assertEqual(self.DUT.revision_id, 0) self.assertEqual(self.DUT.validation_id, 0) self.assertEqual(self.DUT.task_description, '') self.assertEqual(self.DUT.task_type, 0)", "self.assertEqual(self.DUT.validation_id, 0) self.assertEqual(self.DUT.task_description, '') self.assertEqual(self.DUT.task_type, 0) self.assertEqual(self.DUT.task_specification, '') self.assertEqual(self.DUT.measurement_unit, 0) self.assertEqual(self.DUT.min_acceptable, 0.0) self.assertEqual(self.DUT.mean_acceptable,", "in source and binary forms, with or without # modification, are permitted provided", "# # 2. Redistributions in binary form must reproduce the above copyright notice,", "OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY", "should return a 10 error code when passed a wrong data type \"\"\"", "0.0, 0.0, 0.0, 0.0, 719163, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,", "testing Validation module algorithms and models. \"\"\" # -*- coding: utf-8 -*- #", "USE, DATA, OR # PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY", "test_get_attributes(self): \"\"\" (TestValidation) get_attributes should return a tuple of attribute values \"\"\" self.assertEqual(self.DUT.get_attributes(),", "def setUp(self): \"\"\" Sets up the test fixture for the Validation class. \"\"\"", "self.assertEqual(self.DUT.task_description, '') self.assertEqual(self.DUT.task_type, 0) self.assertEqual(self.DUT.task_specification, '') self.assertEqual(self.DUT.measurement_unit, 0) self.assertEqual(self.DUT.min_acceptable, 0.0) self.assertEqual(self.DUT.mean_acceptable, 0.0) self.assertEqual(self.DUT.max_acceptable,", "+ \"/rtk\", ) import unittest from nose.plugins.attrib import attr import dao.DAO as _dao", "the following disclaimer in the documentation # and/or other materials provided with the", "for the Validation class. \"\"\" self.DUT = Model() @attr(all=True, unit=True) def test_create(self): \"\"\"", "# # Redistribution and use in source and binary forms, with or without", "0.0, 0.0, 0.0, 719163, 719163, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,", "# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING # NEGLIGENCE OR", "Model() @attr(all=True, unit=True) def test_create(self): \"\"\" (TestValidation) __init__ should return a Validation model", "test_set_attributes_wrong_type(self): \"\"\" (TestValidation) set_attributes should return a 10 error code when passed a", "and models. \"\"\" # -*- coding: utf-8 -*- # # tests.unit.TestValidation.py is part", "successfully calculating tasks times \"\"\" self.DUT.minimum_time = 25.2 self.DUT.average_time = 36.8 self.DUT.maximum_time =", "module algorithms and models. \"\"\" # -*- coding: utf-8 -*- # # tests.unit.TestValidation.py", "other materials provided with the distribution. # # 3. Neither the name of", "self.assertEqual(self.DUT.average_time, 0.0) self.assertEqual(self.DUT.maximum_time, 0.0) self.assertEqual(self.DUT.mean_time, 0.0) self.assertEqual(self.DUT.time_variance, 0.0) self.assertEqual(self.DUT.minimum_cost, 0.0) self.assertEqual(self.DUT.average_cost, 0.0) self.assertEqual(self.DUT.maximum_cost,", "self.assertAlmostEqual(self.DUT.mean_cost, 360.83333333) self.assertAlmostEqual(self.DUT.cost_variance, 992.25) class TestValidationController(unittest.TestCase): \"\"\" Class for testing the Validation data", "provided with the distribution. # # 3. Neither the name of the copyright", "self.DUT.minimum_cost = 252.00 self.DUT.average_cost = 368.00 self.DUT.maximum_cost = 441.00 self.DUT.confidence = 0.95 self.assertFalse(self.DUT.calculate())", "0.0, 0.0, 0.0, 719163, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,", "__author__ = '<NAME>' __email__ = '<EMAIL>' __organization__ = 'ReliaQual Associates, LLC' __copyright__ =", "BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # \"AS IS\" AND ANY EXPRESS OR", "a 0 error code on success \"\"\" _values = (0, 0, 'Description', 0,", "of The RTK Project # # All rights reserved. # Copyright 2007 -", "_error_msg) = self.DUT.set_attributes(_values) self.assertEqual(_error_code, 10) @attr(all=True, unit=True) def test_set_attributes_missing_index(self): \"\"\" (TestValidation) set_attributes should", "DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED", "= 44.1 self.assertFalse(self.DUT.calculate()) self.assertAlmostEqual(self.DUT.mean_time, 36.08333333) self.assertAlmostEqual(self.DUT.time_variance, 9.9225) @attr(all=True, unit=True) def test_calculate_task_cost(self): \"\"\" (TestValidation)", "def test_set_attributes(self): \"\"\" (TestValidation) set_attributes should return a 0 error code on success", "0.0, 0.0, 95.0) (_error_code, _error_msg) = self.DUT.set_attributes(_values) self.assertEqual(_error_code, 40) @attr(all=True, unit=True) def test_get_attributes(self):", "prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS", "should return a tuple of attribute values \"\"\" self.assertEqual(self.DUT.get_attributes(), (0, 0, '', 0,", "coding: utf-8 -*- # # tests.unit.TestValidation.py is part of The RTK Project #", "self.assertEqual(self.DUT.cost_variance, 0.0) @attr(all=True, unit=True) def test_set_attributes(self): \"\"\" (TestValidation) set_attributes should return a 0", "the test class for testing Validation module algorithms and models. \"\"\" # -*-", "def test_set_attributes_wrong_type(self): \"\"\" (TestValidation) set_attributes should return a 10 error code when passed", "0.0, 0.0, 0.0, 0.0, 95.0) (_error_code, _error_msg) = self.DUT.set_attributes(_values) self.assertEqual(_error_code, 0) @attr(all=True, unit=True)", "for testing the Validation data model class. \"\"\" def setUp(self): \"\"\" Setup the", "AND ON ANY THEORY OF # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR", "-O \"\"\" This is the test class for testing Validation module algorithms and", "# 1. Redistributions of source code must retain the above copyright notice, #", "sys from os.path import dirname sys.path.insert(0, dirname(dirname(dirname(__file__))) + \"/rtk\", ) import unittest from", "import dao.DAO as _dao from validation.Validation import Model, Validation __author__ = '<NAME>' __email__", "class TestValidationModel(unittest.TestCase): \"\"\" Class for testing the Validation data model class. \"\"\" def", "self.DUT.minimum_time = 25.2 self.DUT.average_time = 36.8 self.DUT.maximum_time = 44.1 self.assertFalse(self.DUT.calculate()) self.assertAlmostEqual(self.DUT.mean_time, 36.08333333) self.assertAlmostEqual(self.DUT.time_variance,", "EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import sys from os.path", "times \"\"\" self.DUT.minimum_time = 25.2 self.DUT.average_time = 36.8 self.DUT.maximum_time = 44.1 self.assertFalse(self.DUT.calculate()) self.assertAlmostEqual(self.DUT.mean_time,", "class TestValidationController(unittest.TestCase): \"\"\" Class for testing the Validation data controller class. \"\"\" def", "0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 95.0) (_error_code, _error_msg) = self.DUT.set_attributes(_values) self.assertEqual(_error_code, 0)", "self.assertEqual(self.DUT.variance_acceptable, 0.0) self.assertEqual(self.DUT.start_date, 719163) self.assertEqual(self.DUT.end_date, 719163) self.assertEqual(self.DUT.status, 0.0) self.assertEqual(self.DUT.minimum_time, 0.0) self.assertEqual(self.DUT.average_time, 0.0) self.assertEqual(self.DUT.maximum_time,", "a 40 error code when too few items are passed \"\"\" _values =", "to endorse or promote products derived from this software # without specific prior", "0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 95.0) (_error_code, _error_msg)", "\"\"\" (TestValidation) set_attributes should return a 10 error code when passed a wrong", "return a 40 error code when too few items are passed \"\"\" _values", "AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE", "40 error code when too few items are passed \"\"\" _values = (0,", "tasks times \"\"\" self.DUT.minimum_time = 25.2 self.DUT.average_time = 36.8 self.DUT.maximum_time = 44.1 self.assertFalse(self.DUT.calculate())", "derived from this software # without specific prior written permission. # # THIS", "utf-8 -*- # # tests.unit.TestValidation.py is part of The RTK Project # #", "list of conditions and the following disclaimer. # # 2. Redistributions in binary", "the names of its contributors # may be used to endorse or promote", "return a 0 error code on success \"\"\" _values = (0, 0, 'Description',", "a Validation model \"\"\" self.assertTrue(isinstance(self.DUT, Model)) self.assertEqual(self.DUT.revision_id, 0) self.assertEqual(self.DUT.validation_id, 0) self.assertEqual(self.DUT.task_description, '') self.assertEqual(self.DUT.task_type,", "IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO,", "use in source and binary forms, with or without # modification, are permitted", "@attr(all=True, unit=True) def test_set_attributes_wrong_type(self): \"\"\" (TestValidation) set_attributes should return a 10 error code", "is part of The RTK Project # # All rights reserved. # Copyright", "holder nor the names of its contributors # may be used to endorse", "# -*- coding: utf-8 -*- # # tests.unit.TestValidation.py is part of The RTK", "COPYRIGHT HOLDER # OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,", "must reproduce the above copyright notice, # this list of conditions and the", "(0, 0, 'Description', 0, 'Specification', 0, 0.0, 0.0, 0.0, 0.0, 719163, 0.0, 0.0,", "0.0) self.assertEqual(self.DUT.minimum_time, 0.0) self.assertEqual(self.DUT.average_time, 0.0) self.assertEqual(self.DUT.maximum_time, 0.0) self.assertEqual(self.DUT.mean_time, 0.0) self.assertEqual(self.DUT.time_variance, 0.0) self.assertEqual(self.DUT.minimum_cost, 0.0)", "10) @attr(all=True, unit=True) def test_set_attributes_missing_index(self): \"\"\" (TestValidation) set_attributes should return a 40 error", "CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING IN ANY", "= 25.2 self.DUT.average_time = 36.8 self.DUT.maximum_time = 44.1 self.assertFalse(self.DUT.calculate()) self.assertAlmostEqual(self.DUT.mean_time, 36.08333333) self.assertAlmostEqual(self.DUT.time_variance, 9.9225)", "# this list of conditions and the following disclaimer. # # 2. Redistributions", "'<NAME>' __email__ = '<EMAIL>' __organization__ = 'ReliaQual Associates, LLC' __copyright__ = 'Copyright 2015", "be used to endorse or promote products derived from this software # without", "1. Redistributions of source code must retain the above copyright notice, # this", "THE POSSIBILITY OF SUCH DAMAGE. import sys from os.path import dirname sys.path.insert(0, dirname(dirname(dirname(__file__)))", "disclaimer in the documentation # and/or other materials provided with the distribution. #", "for the Validation class. \"\"\" self.DUT = Validation() @attr(all=True, unit=True) def test_controller_create(self): \"\"\"", "part of The RTK Project # # All rights reserved. # Copyright 2007", "# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # \"AS", "attribute values \"\"\" self.assertEqual(self.DUT.get_attributes(), (0, 0, '', 0, '', 0, 0.0, 0.0, 0.0,", "unit=True) def test_controller_create(self): \"\"\" (TestValidation) __init__ should create a Validation data controller \"\"\"", "import attr import dao.DAO as _dao from validation.Validation import Model, Validation __author__ =", "returns False on successfully calculating tasks times \"\"\" self.DUT.minimum_time = 25.2 self.DUT.average_time =", "data controller \"\"\" self.assertTrue(isinstance(self.DUT, Validation)) self.assertEqual(self.DUT._dao, None) self.assertEqual(self.DUT._last_id, None) self.assertEqual(self.DUT.dicTasks, {}) self.assertEqual(self.DUT.dicStatus, {})", "EVENT SHALL THE COPYRIGHT HOLDER # OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,", "# 2. Redistributions in binary form must reproduce the above copyright notice, #", "THEORY OF # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING #", "sys.path.insert(0, dirname(dirname(dirname(__file__))) + \"/rtk\", ) import unittest from nose.plugins.attrib import attr import dao.DAO", "0) self.assertEqual(self.DUT.task_specification, '') self.assertEqual(self.DUT.measurement_unit, 0) self.assertEqual(self.DUT.min_acceptable, 0.0) self.assertEqual(self.DUT.mean_acceptable, 0.0) self.assertEqual(self.DUT.max_acceptable, 0.0) self.assertEqual(self.DUT.variance_acceptable, 0.0)", "unit=True) def test_calculate_task_time(self): \"\"\" (TestValidation) calculate returns False on successfully calculating tasks times", "attr import dao.DAO as _dao from validation.Validation import Model, Validation __author__ = '<NAME>'", "95.0)) @attr(all=True, unit=True) def test_calculate_task_time(self): \"\"\" (TestValidation) calculate returns False on successfully calculating", "# # 3. Neither the name of the copyright holder nor the names", "\"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED", "= Validation() @attr(all=True, unit=True) def test_controller_create(self): \"\"\" (TestValidation) __init__ should create a Validation", "\"\"\" Class for testing the Validation data controller class. \"\"\" def setUp(self): \"\"\"", "OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF # LIABILITY, WHETHER", "# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A #", "in the documentation # and/or other materials provided with the distribution. # #", "HOLDER # OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, #", "= Model() @attr(all=True, unit=True) def test_create(self): \"\"\" (TestValidation) __init__ should return a Validation", "= 'ReliaQual Associates, LLC' __copyright__ = 'Copyright 2015 Andrew \"Weibullguy\" Rowland' class TestValidationModel(unittest.TestCase):", "HOLDERS AND CONTRIBUTORS # \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING,", "#!/usr/bin/env python -O \"\"\" This is the test class for testing Validation module", "0.0, 0.0, 95.0)) @attr(all=True, unit=True) def test_calculate_task_time(self): \"\"\" (TestValidation) calculate returns False on", "(0, 0, '', 0, '', 0, 0.0, 0.0, 0.0, 0.0, 719163, 719163, 0.0,", "type \"\"\" _values = (0, 0, 'Description', 0, 'Specification', 0, 0.0, 0.0, 0.0,", "False on successfully calculating tasks costs \"\"\" self.DUT.minimum_cost = 252.00 self.DUT.average_cost = 368.00", "set_attributes should return a 0 error code on success \"\"\" _values = (0,", "DAMAGE. import sys from os.path import dirname sys.path.insert(0, dirname(dirname(dirname(__file__))) + \"/rtk\", ) import", "0, 'Specification', 0, 0.0, 0.0, 0.0, 0.0, 719163, 719163, 0.0, 0.0, 0.0, 0.0,", "the above copyright notice, # this list of conditions and the following disclaimer", "'Specification', 0, 0.0, 0.0, 0.0, 0.0, 719163, 'Date', 0.0, 0.0, 0.0, 0.0, 0.0,", "def test_get_attributes(self): \"\"\" (TestValidation) get_attributes should return a tuple of attribute values \"\"\"", "unit=True) def test_set_attributes(self): \"\"\" (TestValidation) set_attributes should return a 0 error code on", "items are passed \"\"\" _values = (0, 0, 'Description', 0, 'Specification', 0, 0.0,", "return a 10 error code when passed a wrong data type \"\"\" _values", "def test_calculate_task_cost(self): \"\"\" (TestValidation) calculate returns False on successfully calculating tasks costs \"\"\"", "0, 0.0, 0.0, 0.0, 0.0, 719163, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,", "Project # # All rights reserved. # Copyright 2007 - 2017 <NAME> <EMAIL>rew.rowland", "OR # PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF", "of conditions and the following disclaimer. # # 2. Redistributions in binary form", "\"\"\" self.assertEqual(self.DUT.get_attributes(), (0, 0, '', 0, '', 0, 0.0, 0.0, 0.0, 0.0, 719163,", "products derived from this software # without specific prior written permission. # #", "self.DUT = Model() @attr(all=True, unit=True) def test_create(self): \"\"\" (TestValidation) __init__ should return a", "\"\"\" (TestValidation) set_attributes should return a 40 error code when too few items", "TestValidationController(unittest.TestCase): \"\"\" Class for testing the Validation data controller class. \"\"\" def setUp(self):", "must retain the above copyright notice, # this list of conditions and the", "WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A # PARTICULAR PURPOSE ARE DISCLAIMED. IN", "_dao from validation.Validation import Model, Validation __author__ = '<NAME>' __email__ = '<EMAIL>' __organization__", "a Validation data controller \"\"\" self.assertTrue(isinstance(self.DUT, Validation)) self.assertEqual(self.DUT._dao, None) self.assertEqual(self.DUT._last_id, None) self.assertEqual(self.DUT.dicTasks, {})", "set_attributes should return a 10 error code when passed a wrong data type", "OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.", "documentation # and/or other materials provided with the distribution. # # 3. Neither", "OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR # PROFITS; OR", "# without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY", "self.DUT.set_attributes(_values) self.assertEqual(_error_code, 10) @attr(all=True, unit=True) def test_set_attributes_missing_index(self): \"\"\" (TestValidation) set_attributes should return a", "0.0) self.assertEqual(self.DUT.mean_acceptable, 0.0) self.assertEqual(self.DUT.max_acceptable, 0.0) self.assertEqual(self.DUT.variance_acceptable, 0.0) self.assertEqual(self.DUT.start_date, 719163) self.assertEqual(self.DUT.end_date, 719163) self.assertEqual(self.DUT.status, 0.0)", "DATA, OR # PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY", "0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 95.0)) @attr(all=True, unit=True)", "0.0, 0.0, 0.0, 0.0, 0.0, 95.0) (_error_code, _error_msg) = self.DUT.set_attributes(_values) self.assertEqual(_error_code, 40) @attr(all=True,", "the above copyright notice, # this list of conditions and the following disclaimer.", "BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR", "2007 - 2017 <NAME> <EMAIL>rew.rowland <AT> reliaqual <DOT> com # # Redistribution and", "\"\"\" # -*- coding: utf-8 -*- # # tests.unit.TestValidation.py is part of The", "the distribution. # # 3. Neither the name of the copyright holder nor", "a 10 error code when passed a wrong data type \"\"\" _values =", "@attr(all=True, unit=True) def test_set_attributes(self): \"\"\" (TestValidation) set_attributes should return a 0 error code", "and binary forms, with or without # modification, are permitted provided that the", "conditions and the following disclaimer in the documentation # and/or other materials provided", "# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR #", "self.assertEqual(self.DUT.max_acceptable, 0.0) self.assertEqual(self.DUT.variance_acceptable, 0.0) self.assertEqual(self.DUT.start_date, 719163) self.assertEqual(self.DUT.end_date, 719163) self.assertEqual(self.DUT.status, 0.0) self.assertEqual(self.DUT.minimum_time, 0.0) self.assertEqual(self.DUT.average_time,", "0, 'Description', 0, 'Specification', 0, 0.0, 0.0, 0.0, 0.0, 719163, 719163, 0.0, 0.0,", "= 441.00 self.DUT.confidence = 0.95 self.assertFalse(self.DUT.calculate()) self.assertAlmostEqual(self.DUT.mean_cost, 360.83333333) self.assertAlmostEqual(self.DUT.cost_variance, 992.25) class TestValidationController(unittest.TestCase): \"\"\"", "LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING # NEGLIGENCE OR OTHERWISE)", "unit=True) def test_set_attributes_wrong_type(self): \"\"\" (TestValidation) set_attributes should return a 10 error code when", "from nose.plugins.attrib import attr import dao.DAO as _dao from validation.Validation import Model, Validation", "class. \"\"\" self.DUT = Validation() @attr(all=True, unit=True) def test_controller_create(self): \"\"\" (TestValidation) __init__ should", "test class for testing Validation module algorithms and models. \"\"\" # -*- coding:", "class. \"\"\" def setUp(self): \"\"\" Setup the test fixture for the Validation class.", "OF SUCH DAMAGE. import sys from os.path import dirname sys.path.insert(0, dirname(dirname(dirname(__file__))) + \"/rtk\",", "= 252.00 self.DUT.average_cost = 368.00 self.DUT.maximum_cost = 441.00 self.DUT.confidence = 0.95 self.assertFalse(self.DUT.calculate()) self.assertAlmostEqual(self.DUT.mean_cost,", "FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT", "IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # \"AS IS\" AND ANY", "LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,", "10 error code when passed a wrong data type \"\"\" _values = (0,", "code on success \"\"\" _values = (0, 0, 'Description', 0, 'Specification', 0, 0.0,", "# OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY,", "= (0, 0, 'Description', 0, 'Specification', 0, 0.0, 0.0, 0.0, 0.0, 719163, 'Date',", "'', 0, 0.0, 0.0, 0.0, 0.0, 719163, 719163, 0.0, 0.0, 0.0, 0.0, 0.0,", "WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND", "and the following disclaimer. # # 2. Redistributions in binary form must reproduce", "the Validation data model class. \"\"\" def setUp(self): \"\"\" Setup the test fixture", "set_attributes should return a 40 error code when too few items are passed", "THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import", "OF USE, DATA, OR # PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON", "\"\"\" (TestValidation) __init__ should return a Validation model \"\"\" self.assertTrue(isinstance(self.DUT, Model)) self.assertEqual(self.DUT.revision_id, 0)", "class. \"\"\" def setUp(self): \"\"\" Sets up the test fixture for the Validation", "0.0) @attr(all=True, unit=True) def test_set_attributes(self): \"\"\" (TestValidation) set_attributes should return a 0 error", "The RTK Project # # All rights reserved. # Copyright 2007 - 2017", "too few items are passed \"\"\" _values = (0, 0, 'Description', 0, 'Specification',", "contributors # may be used to endorse or promote products derived from this", "'', 0, '', 0, 0.0, 0.0, 0.0, 0.0, 719163, 719163, 0.0, 0.0, 0.0,", "'Description', 0, 'Specification', 0, 0.0, 0.0, 0.0, 0.0, 719163, 'Date', 0.0, 0.0, 0.0,", "44.1 self.assertFalse(self.DUT.calculate()) self.assertAlmostEqual(self.DUT.mean_time, 36.08333333) self.assertAlmostEqual(self.DUT.time_variance, 9.9225) @attr(all=True, unit=True) def test_calculate_task_cost(self): \"\"\" (TestValidation) calculate", "wrong data type \"\"\" _values = (0, 0, 'Description', 0, 'Specification', 0, 0.0,", "that the following conditions are met: # # 1. Redistributions of source code", "should return a 0 error code on success \"\"\" _values = (0, 0,", "def test_calculate_task_time(self): \"\"\" (TestValidation) calculate returns False on successfully calculating tasks times \"\"\"", "with the distribution. # # 3. Neither the name of the copyright holder", "self.assertEqual(_error_code, 10) @attr(all=True, unit=True) def test_set_attributes_missing_index(self): \"\"\" (TestValidation) set_attributes should return a 40", "def test_controller_create(self): \"\"\" (TestValidation) __init__ should create a Validation data controller \"\"\" self.assertTrue(isinstance(self.DUT,", "Associates, LLC' __copyright__ = 'Copyright 2015 Andrew \"Weibullguy\" Rowland' class TestValidationModel(unittest.TestCase): \"\"\" Class", "0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 95.0) (_error_code, _error_msg) = self.DUT.set_attributes(_values) self.assertEqual(_error_code, 10)", "0) @attr(all=True, unit=True) def test_set_attributes_wrong_type(self): \"\"\" (TestValidation) set_attributes should return a 10 error", "self.assertEqual(self.DUT.min_acceptable, 0.0) self.assertEqual(self.DUT.mean_acceptable, 0.0) self.assertEqual(self.DUT.max_acceptable, 0.0) self.assertEqual(self.DUT.variance_acceptable, 0.0) self.assertEqual(self.DUT.start_date, 719163) self.assertEqual(self.DUT.end_date, 719163) self.assertEqual(self.DUT.status,", "40) @attr(all=True, unit=True) def test_get_attributes(self): \"\"\" (TestValidation) get_attributes should return a tuple of", "code must retain the above copyright notice, # this list of conditions and", "0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 95.0) (_error_code,", "CONTRIBUTORS # \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT", "the following conditions are met: # # 1. Redistributions of source code must", "NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A", "self.assertEqual(self.DUT.end_date, 719163) self.assertEqual(self.DUT.status, 0.0) self.assertEqual(self.DUT.minimum_time, 0.0) self.assertEqual(self.DUT.average_time, 0.0) self.assertEqual(self.DUT.maximum_time, 0.0) self.assertEqual(self.DUT.mean_time, 0.0) self.assertEqual(self.DUT.time_variance,", "on successfully calculating tasks times \"\"\" self.DUT.minimum_time = 25.2 self.DUT.average_time = 36.8 self.DUT.maximum_time", "__init__ should create a Validation data controller \"\"\" self.assertTrue(isinstance(self.DUT, Validation)) self.assertEqual(self.DUT._dao, None) self.assertEqual(self.DUT._last_id,", "# All rights reserved. # Copyright 2007 - 2017 <NAME> <EMAIL>rew.rowland <AT> reliaqual", "0.0) self.assertEqual(self.DUT.start_date, 719163) self.assertEqual(self.DUT.end_date, 719163) self.assertEqual(self.DUT.status, 0.0) self.assertEqual(self.DUT.minimum_time, 0.0) self.assertEqual(self.DUT.average_time, 0.0) self.assertEqual(self.DUT.maximum_time, 0.0)", "OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF", "self.assertEqual(self.DUT.maximum_time, 0.0) self.assertEqual(self.DUT.mean_time, 0.0) self.assertEqual(self.DUT.time_variance, 0.0) self.assertEqual(self.DUT.minimum_cost, 0.0) self.assertEqual(self.DUT.average_cost, 0.0) self.assertEqual(self.DUT.maximum_cost, 0.0) self.assertEqual(self.DUT.mean_cost,", "\"\"\" Sets up the test fixture for the Validation class. \"\"\" self.DUT =", "IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF", "__init__ should return a Validation model \"\"\" self.assertTrue(isinstance(self.DUT, Model)) self.assertEqual(self.DUT.revision_id, 0) self.assertEqual(self.DUT.validation_id, 0)", "tasks costs \"\"\" self.DUT.minimum_cost = 252.00 self.DUT.average_cost = 368.00 self.DUT.maximum_cost = 441.00 self.DUT.confidence", "import unittest from nose.plugins.attrib import attr import dao.DAO as _dao from validation.Validation import", "self.assertEqual(self.DUT.task_type, 0) self.assertEqual(self.DUT.task_specification, '') self.assertEqual(self.DUT.measurement_unit, 0) self.assertEqual(self.DUT.min_acceptable, 0.0) self.assertEqual(self.DUT.mean_acceptable, 0.0) self.assertEqual(self.DUT.max_acceptable, 0.0) self.assertEqual(self.DUT.variance_acceptable,", "\"\"\" This is the test class for testing Validation module algorithms and models.", "SHALL THE COPYRIGHT HOLDER # OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,", "ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT", "WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF", "rights reserved. # Copyright 2007 - 2017 <NAME> <EMAIL>rew.rowland <AT> reliaqual <DOT> com", "# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER #", "fixture for the Validation class. \"\"\" self.DUT = Model() @attr(all=True, unit=True) def test_create(self):", "Redistributions in binary form must reproduce the above copyright notice, # this list", "self.assertEqual(self.DUT.task_specification, '') self.assertEqual(self.DUT.measurement_unit, 0) self.assertEqual(self.DUT.min_acceptable, 0.0) self.assertEqual(self.DUT.mean_acceptable, 0.0) self.assertEqual(self.DUT.max_acceptable, 0.0) self.assertEqual(self.DUT.variance_acceptable, 0.0) self.assertEqual(self.DUT.start_date,", "0, 'Specification', 0, 0.0, 0.0, 0.0, 0.0, 719163, 0.0, 0.0, 0.0, 0.0, 0.0,", "0.0, 95.0) (_error_code, _error_msg) = self.DUT.set_attributes(_values) self.assertEqual(_error_code, 40) @attr(all=True, unit=True) def test_get_attributes(self): \"\"\"", "self.DUT.maximum_time = 44.1 self.assertFalse(self.DUT.calculate()) self.assertAlmostEqual(self.DUT.mean_time, 36.08333333) self.assertAlmostEqual(self.DUT.time_variance, 9.9225) @attr(all=True, unit=True) def test_calculate_task_cost(self): \"\"\"", "<DOT> com # # Redistribution and use in source and binary forms, with", "0.95 self.assertFalse(self.DUT.calculate()) self.assertAlmostEqual(self.DUT.mean_cost, 360.83333333) self.assertAlmostEqual(self.DUT.cost_variance, 992.25) class TestValidationController(unittest.TestCase): \"\"\" Class for testing the", "# 3. Neither the name of the copyright holder nor the names of", "All rights reserved. # Copyright 2007 - 2017 <NAME> <EMAIL>rew.rowland <AT> reliaqual <DOT>", ") import unittest from nose.plugins.attrib import attr import dao.DAO as _dao from validation.Validation", "self.DUT = Validation() @attr(all=True, unit=True) def test_controller_create(self): \"\"\" (TestValidation) __init__ should create a", "SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import sys from", "calculate returns False on successfully calculating tasks times \"\"\" self.DUT.minimum_time = 25.2 self.DUT.average_time", "nor the names of its contributors # may be used to endorse or", "fixture for the Validation class. \"\"\" self.DUT = Validation() @attr(all=True, unit=True) def test_controller_create(self):", "\"\"\" _values = (0, 0, 'Description', 0, 'Specification', 0, 0.0, 0.0, 0.0, 0.0,", "0.0) self.assertEqual(self.DUT.minimum_cost, 0.0) self.assertEqual(self.DUT.average_cost, 0.0) self.assertEqual(self.DUT.maximum_cost, 0.0) self.assertEqual(self.DUT.mean_cost, 0.0) self.assertEqual(self.DUT.cost_variance, 0.0) @attr(all=True, unit=True)", "class. \"\"\" self.DUT = Model() @attr(all=True, unit=True) def test_create(self): \"\"\" (TestValidation) __init__ should", "the Validation data controller class. \"\"\" def setUp(self): \"\"\" Sets up the test", "the test fixture for the Validation class. \"\"\" self.DUT = Validation() @attr(all=True, unit=True)", "0.0, 0.0, 0.0, 0.0, 719163, 719163, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,", "PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF # LIABILITY,", "TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR", "self.assertEqual(self.DUT.minimum_time, 0.0) self.assertEqual(self.DUT.average_time, 0.0) self.assertEqual(self.DUT.maximum_time, 0.0) self.assertEqual(self.DUT.mean_time, 0.0) self.assertEqual(self.DUT.time_variance, 0.0) self.assertEqual(self.DUT.minimum_cost, 0.0) self.assertEqual(self.DUT.average_cost,", "error code on success \"\"\" _values = (0, 0, 'Description', 0, 'Specification', 0,", "\"\"\" self.DUT.minimum_cost = 252.00 self.DUT.average_cost = 368.00 self.DUT.maximum_cost = 441.00 self.DUT.confidence = 0.95", "IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING IN", "error code when too few items are passed \"\"\" _values = (0, 0,", "the documentation # and/or other materials provided with the distribution. # # 3.", "@attr(all=True, unit=True) def test_set_attributes_missing_index(self): \"\"\" (TestValidation) set_attributes should return a 40 error code", "<NAME> <EMAIL>rew.rowland <AT> reliaqual <DOT> com # # Redistribution and use in source", "0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 95.0)) @attr(all=True, unit=True) def test_calculate_task_time(self):", "notice, # this list of conditions and the following disclaimer in the documentation", "testing the Validation data model class. \"\"\" def setUp(self): \"\"\" Setup the test", "IN NO EVENT SHALL THE COPYRIGHT HOLDER # OR CONTRIBUTORS BE LIABLE FOR", "(0, 0, 'Description', 0, 'Specification', 0, 0.0, 0.0, 0.0, 0.0, 719163, 'Date', 0.0,", "Model)) self.assertEqual(self.DUT.revision_id, 0) self.assertEqual(self.DUT.validation_id, 0) self.assertEqual(self.DUT.task_description, '') self.assertEqual(self.DUT.task_type, 0) self.assertEqual(self.DUT.task_specification, '') self.assertEqual(self.DUT.measurement_unit, 0)", "def test_set_attributes_missing_index(self): \"\"\" (TestValidation) set_attributes should return a 40 error code when too", "__copyright__ = 'Copyright 2015 Andrew \"Weibullguy\" Rowland' class TestValidationModel(unittest.TestCase): \"\"\" Class for testing", "= self.DUT.set_attributes(_values) self.assertEqual(_error_code, 40) @attr(all=True, unit=True) def test_get_attributes(self): \"\"\" (TestValidation) get_attributes should return", "False on successfully calculating tasks times \"\"\" self.DUT.minimum_time = 25.2 self.DUT.average_time = 36.8", "0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 95.0) (_error_code, _error_msg) = self.DUT.set_attributes(_values) self.assertEqual(_error_code, 40)", "0, 'Description', 0, 'Specification', 0, 0.0, 0.0, 0.0, 0.0, 719163, 0.0, 0.0, 0.0,", "# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF", "Copyright 2007 - 2017 <NAME> <EMAIL>rew.rowland <AT> reliaqual <DOT> com # # Redistribution", "when too few items are passed \"\"\" _values = (0, 0, 'Description', 0,", "'ReliaQual Associates, LLC' __copyright__ = 'Copyright 2015 Andrew \"Weibullguy\" Rowland' class TestValidationModel(unittest.TestCase): \"\"\"", "POSSIBILITY OF SUCH DAMAGE. import sys from os.path import dirname sys.path.insert(0, dirname(dirname(dirname(__file__))) +", "# Redistribution and use in source and binary forms, with or without #", "0, '', 0, 0.0, 0.0, 0.0, 0.0, 719163, 719163, 0.0, 0.0, 0.0, 0.0,", "LIABILITY, OR TORT (INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT", "(TestValidation) set_attributes should return a 40 error code when too few items are", "should return a Validation model \"\"\" self.assertTrue(isinstance(self.DUT, Model)) self.assertEqual(self.DUT.revision_id, 0) self.assertEqual(self.DUT.validation_id, 0) self.assertEqual(self.DUT.task_description,", "self.assertEqual(_error_code, 0) @attr(all=True, unit=True) def test_set_attributes_wrong_type(self): \"\"\" (TestValidation) set_attributes should return a 10", "setUp(self): \"\"\" Setup the test fixture for the Validation class. \"\"\" self.DUT =", "\"\"\" (TestValidation) set_attributes should return a 0 error code on success \"\"\" _values", "endorse or promote products derived from this software # without specific prior written", "are permitted provided that the following conditions are met: # # 1. Redistributions", "NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS", "code when passed a wrong data type \"\"\" _values = (0, 0, 'Description',", "<AT> reliaqual <DOT> com # # Redistribution and use in source and binary", "LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A # PARTICULAR", "as _dao from validation.Validation import Model, Validation __author__ = '<NAME>' __email__ = '<EMAIL>'", "costs \"\"\" self.DUT.minimum_cost = 252.00 self.DUT.average_cost = 368.00 self.DUT.maximum_cost = 441.00 self.DUT.confidence =", "DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER # OR CONTRIBUTORS BE LIABLE", "__email__ = '<EMAIL>' __organization__ = 'ReliaQual Associates, LLC' __copyright__ = 'Copyright 2015 Andrew", "= '<EMAIL>' __organization__ = 'ReliaQual Associates, LLC' __copyright__ = 'Copyright 2015 Andrew \"Weibullguy\"", "NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,", "USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH", "= (0, 0, 'Description', 0, 'Specification', 0, 0.0, 0.0, 0.0, 0.0, 719163, 719163,", "= self.DUT.set_attributes(_values) self.assertEqual(_error_code, 10) @attr(all=True, unit=True) def test_set_attributes_missing_index(self): \"\"\" (TestValidation) set_attributes should return", "the following disclaimer. # # 2. Redistributions in binary form must reproduce the", "success \"\"\" _values = (0, 0, 'Description', 0, 'Specification', 0, 0.0, 0.0, 0.0,", "A # PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER", "(_error_code, _error_msg) = self.DUT.set_attributes(_values) self.assertEqual(_error_code, 10) @attr(all=True, unit=True) def test_set_attributes_missing_index(self): \"\"\" (TestValidation) set_attributes", "binary forms, with or without # modification, are permitted provided that the following", "= 0.95 self.assertFalse(self.DUT.calculate()) self.assertAlmostEqual(self.DUT.mean_cost, 360.83333333) self.assertAlmostEqual(self.DUT.cost_variance, 992.25) class TestValidationController(unittest.TestCase): \"\"\" Class for testing", "reliaqual <DOT> com # # Redistribution and use in source and binary forms,", "@attr(all=True, unit=True) def test_controller_create(self): \"\"\" (TestValidation) __init__ should create a Validation data controller", "TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A # PARTICULAR PURPOSE", "2. Redistributions in binary form must reproduce the above copyright notice, # this", "ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN", "(TestValidation) calculate returns False on successfully calculating tasks times \"\"\" self.DUT.minimum_time = 25.2", "0.0, 0.0, 0.0, 95.0) (_error_code, _error_msg) = self.DUT.set_attributes(_values) self.assertEqual(_error_code, 0) @attr(all=True, unit=True) def", "on success \"\"\" _values = (0, 0, 'Description', 0, 'Specification', 0, 0.0, 0.0,", "0.0, 95.0) (_error_code, _error_msg) = self.DUT.set_attributes(_values) self.assertEqual(_error_code, 0) @attr(all=True, unit=True) def test_set_attributes_wrong_type(self): \"\"\"", "36.8 self.DUT.maximum_time = 44.1 self.assertFalse(self.DUT.calculate()) self.assertAlmostEqual(self.DUT.mean_time, 36.08333333) self.assertAlmostEqual(self.DUT.time_variance, 9.9225) @attr(all=True, unit=True) def test_calculate_task_cost(self):", "dirname sys.path.insert(0, dirname(dirname(dirname(__file__))) + \"/rtk\", ) import unittest from nose.plugins.attrib import attr import", "SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR # PROFITS; OR BUSINESS", "MERCHANTABILITY AND FITNESS FOR A # PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT", "should create a Validation data controller \"\"\" self.assertTrue(isinstance(self.DUT, Validation)) self.assertEqual(self.DUT._dao, None) self.assertEqual(self.DUT._last_id, None)", "# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF", "the name of the copyright holder nor the names of its contributors #", "binary form must reproduce the above copyright notice, # this list of conditions", "# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS #", "get_attributes should return a tuple of attribute values \"\"\" self.assertEqual(self.DUT.get_attributes(), (0, 0, '',", "python -O \"\"\" This is the test class for testing Validation module algorithms", "following disclaimer in the documentation # and/or other materials provided with the distribution.", "'<EMAIL>' __organization__ = 'ReliaQual Associates, LLC' __copyright__ = 'Copyright 2015 Andrew \"Weibullguy\" Rowland'", "from validation.Validation import Model, Validation __author__ = '<NAME>' __email__ = '<EMAIL>' __organization__ =", "form must reproduce the above copyright notice, # this list of conditions and", "0.0, 0.0, 95.0) (_error_code, _error_msg) = self.DUT.set_attributes(_values) self.assertEqual(_error_code, 0) @attr(all=True, unit=True) def test_set_attributes_wrong_type(self):", "0.0, 95.0)) @attr(all=True, unit=True) def test_calculate_task_time(self): \"\"\" (TestValidation) calculate returns False on successfully", "# # All rights reserved. # Copyright 2007 - 2017 <NAME> <EMAIL>rew.rowland <AT>", "0.0, 0.0, 0.0, 95.0)) @attr(all=True, unit=True) def test_calculate_task_time(self): \"\"\" (TestValidation) calculate returns False", "0.0, 95.0) (_error_code, _error_msg) = self.DUT.set_attributes(_values) self.assertEqual(_error_code, 10) @attr(all=True, unit=True) def test_set_attributes_missing_index(self): \"\"\"", "OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR", "95.0) (_error_code, _error_msg) = self.DUT.set_attributes(_values) self.assertEqual(_error_code, 40) @attr(all=True, unit=True) def test_get_attributes(self): \"\"\" (TestValidation)", "self.assertFalse(self.DUT.calculate()) self.assertAlmostEqual(self.DUT.mean_cost, 360.83333333) self.assertAlmostEqual(self.DUT.cost_variance, 992.25) class TestValidationController(unittest.TestCase): \"\"\" Class for testing the Validation", "2015 Andrew \"Weibullguy\" Rowland' class TestValidationModel(unittest.TestCase): \"\"\" Class for testing the Validation data", "INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS", "GOODS OR SERVICES; LOSS OF USE, DATA, OR # PROFITS; OR BUSINESS INTERRUPTION)", "materials provided with the distribution. # # 3. Neither the name of the", "this software # without specific prior written permission. # # THIS SOFTWARE IS", "test_set_attributes_missing_index(self): \"\"\" (TestValidation) set_attributes should return a 40 error code when too few", "test_controller_create(self): \"\"\" (TestValidation) __init__ should create a Validation data controller \"\"\" self.assertTrue(isinstance(self.DUT, Validation))", "0.0) self.assertEqual(self.DUT.variance_acceptable, 0.0) self.assertEqual(self.DUT.start_date, 719163) self.assertEqual(self.DUT.end_date, 719163) self.assertEqual(self.DUT.status, 0.0) self.assertEqual(self.DUT.minimum_time, 0.0) self.assertEqual(self.DUT.average_time, 0.0)", "self.DUT.set_attributes(_values) self.assertEqual(_error_code, 40) @attr(all=True, unit=True) def test_get_attributes(self): \"\"\" (TestValidation) get_attributes should return a", "OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS", "0.0, 0.0, 719163, 'Date', 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,", "INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,", "# and/or other materials provided with the distribution. # # 3. Neither the", "# modification, are permitted provided that the following conditions are met: # #", "written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND", "0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 95.0) (_error_code, _error_msg) = self.DUT.set_attributes(_values) self.assertEqual(_error_code,", "_values = (0, 0, 'Description', 0, 'Specification', 0, 0.0, 0.0, 0.0, 0.0, 719163,", "0 error code on success \"\"\" _values = (0, 0, 'Description', 0, 'Specification',", "BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES", "- 2017 <NAME> <EMAIL>rew.rowland <AT> reliaqual <DOT> com # # Redistribution and use", "ON ANY THEORY OF # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT", "when passed a wrong data type \"\"\" _values = (0, 0, 'Description', 0,", "source code must retain the above copyright notice, # this list of conditions", "of the copyright holder nor the names of its contributors # may be", "0) self.assertEqual(self.DUT.validation_id, 0) self.assertEqual(self.DUT.task_description, '') self.assertEqual(self.DUT.task_type, 0) self.assertEqual(self.DUT.task_specification, '') self.assertEqual(self.DUT.measurement_unit, 0) self.assertEqual(self.DUT.min_acceptable, 0.0)", "data controller class. \"\"\" def setUp(self): \"\"\" Sets up the test fixture for", "above copyright notice, # this list of conditions and the following disclaimer. #", "specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT", "# may be used to endorse or promote products derived from this software", "test_set_attributes(self): \"\"\" (TestValidation) set_attributes should return a 0 error code on success \"\"\"" ]
[ "\"score out of range\" elif score >= 0.9: return \"A\" elif score >=", "parameter and returns a grade as a string. def computegrade(score): if score <", "Exercise 7: Rewrite the grade program from the previous chapter using a function", "# that takes a score as its parameter and returns a grade as", "return \"B\" elif score >= 0.7: return \"C\" elif score >= 0.6: return", "or score > 1: return \"score out of range\" elif score >= 0.9:", "out of range\" elif score >= 0.9: return \"A\" elif score >= 0.8:", "a score as its parameter and returns a grade as a string. def", "elif score >= 0.7: return \"C\" elif score >= 0.6: return \"D\" else:", "grade as a string. def computegrade(score): if score < 0 or score >", "# Exercise 7: Rewrite the grade program from the previous chapter using a", "grade program from the previous chapter using a function called computegrade # that", "<reponame>MyLanPangzi/py4e # Exercise 7: Rewrite the grade program from the previous chapter using", "string. def computegrade(score): if score < 0 or score > 1: return \"score", "return \"A\" elif score >= 0.8: return \"B\" elif score >= 0.7: return", "program from the previous chapter using a function called computegrade # that takes", "0.8: return \"B\" elif score >= 0.7: return \"C\" elif score >= 0.6:", "return \"C\" elif score >= 0.6: return \"D\" else: return \"F\" print(computegrade(float(input('Enter Score(0-1.0):", "and returns a grade as a string. def computegrade(score): if score < 0", "return \"score out of range\" elif score >= 0.9: return \"A\" elif score", "0.9: return \"A\" elif score >= 0.8: return \"B\" elif score >= 0.7:", "as a string. def computegrade(score): if score < 0 or score > 1:", "if score < 0 or score > 1: return \"score out of range\"", "from the previous chapter using a function called computegrade # that takes a", "a function called computegrade # that takes a score as its parameter and", "chapter using a function called computegrade # that takes a score as its", "takes a score as its parameter and returns a grade as a string.", "1: return \"score out of range\" elif score >= 0.9: return \"A\" elif", ">= 0.8: return \"B\" elif score >= 0.7: return \"C\" elif score >=", ">= 0.9: return \"A\" elif score >= 0.8: return \"B\" elif score >=", ">= 0.7: return \"C\" elif score >= 0.6: return \"D\" else: return \"F\"", "called computegrade # that takes a score as its parameter and returns a", "the previous chapter using a function called computegrade # that takes a score", "def computegrade(score): if score < 0 or score > 1: return \"score out", "computegrade # that takes a score as its parameter and returns a grade", "0 or score > 1: return \"score out of range\" elif score >=", "> 1: return \"score out of range\" elif score >= 0.9: return \"A\"", "score >= 0.9: return \"A\" elif score >= 0.8: return \"B\" elif score", "elif score >= 0.9: return \"A\" elif score >= 0.8: return \"B\" elif", "\"B\" elif score >= 0.7: return \"C\" elif score >= 0.6: return \"D\"", "score as its parameter and returns a grade as a string. def computegrade(score):", "7: Rewrite the grade program from the previous chapter using a function called", "< 0 or score > 1: return \"score out of range\" elif score", "score > 1: return \"score out of range\" elif score >= 0.9: return", "score >= 0.7: return \"C\" elif score >= 0.6: return \"D\" else: return", "a grade as a string. def computegrade(score): if score < 0 or score", "a string. def computegrade(score): if score < 0 or score > 1: return", "0.7: return \"C\" elif score >= 0.6: return \"D\" else: return \"F\" print(computegrade(float(input('Enter", "score < 0 or score > 1: return \"score out of range\" elif", "\"C\" elif score >= 0.6: return \"D\" else: return \"F\" print(computegrade(float(input('Enter Score(0-1.0): '))))", "of range\" elif score >= 0.9: return \"A\" elif score >= 0.8: return", "previous chapter using a function called computegrade # that takes a score as", "score >= 0.8: return \"B\" elif score >= 0.7: return \"C\" elif score", "Rewrite the grade program from the previous chapter using a function called computegrade", "elif score >= 0.8: return \"B\" elif score >= 0.7: return \"C\" elif", "the grade program from the previous chapter using a function called computegrade #", "that takes a score as its parameter and returns a grade as a", "computegrade(score): if score < 0 or score > 1: return \"score out of", "\"A\" elif score >= 0.8: return \"B\" elif score >= 0.7: return \"C\"", "as its parameter and returns a grade as a string. def computegrade(score): if", "function called computegrade # that takes a score as its parameter and returns", "its parameter and returns a grade as a string. def computegrade(score): if score", "returns a grade as a string. def computegrade(score): if score < 0 or", "range\" elif score >= 0.9: return \"A\" elif score >= 0.8: return \"B\"", "using a function called computegrade # that takes a score as its parameter" ]
[ "to fit and predict on same dataset # Arbitrary labels with unsupervised clustering", "= Distinct Clusters select_option = 5 # generate data data_options = ['linear','quadratic','target','moons','circles','blobs'] option", "(X[i,0]-0.5)**2+(X[i,1]-0.5)**2<=0.1 +(np.random.rand()-0.5)*0.2*mixing else True for i in range(n)]) j = False yplot =", "y = np.array([False if (X[i,0]-0.5)**2+(X[i,1]-0.5)**2<=0.1 +(np.random.rand()-0.5)*0.2*mixing else True for i in range(n)]) j", "X, y = datasets.make_blobs(n_samples=n,centers=[[-5,3],[5,-3]],cluster_std=2.0) yplot = xplot*0.0 plt.scatter(X[y>0.5,0],X[y>0.5,1],color='blue',marker='^',label='True') plt.scatter(X[y<0.5,0],X[y<0.5,1],color='red',marker='x',label='False') if option not in", "train and test subsets (50% each) X_train, X_test, y_train, y_test = train_test_split( data,", "KMeans(n_clusters=2) km.fit(XA) yP = km.predict(XB) # Arbitrary labels with unsupervised clustering may need", "assess(yP) # Naïve Bayes from sklearn.naive_bayes import GaussianNB nb = GaussianNB() nb.fit(XA,yA) yP", "assess(yP) # Support Vector Classifier from sklearn.svm import SVC svm = SVC(gamma='scale', C=1.0,", "y = datasets.make_circles(n_samples=n,noise=0.05,factor=0.5) yplot = xplot*0.0 elif option=='blobs': X, y = datasets.make_blobs(n_samples=n,centers=[[-5,3],[5,-3]],cluster_std=2.0) yplot", "Spectral Clustering from sklearn.cluster import SpectralClustering sc = SpectralClustering(n_clusters=2,eigen_solver='arpack',\\ affinity='nearest_neighbors') yP = sc.fit_predict(XB)", "(2*j-1)*np.sqrt(r)+0.5 elif option=='moons': X, y = datasets.make_moons(n_samples=n,noise=0.05) yplot = xplot*0.0 elif option=='circles': X,", "len(XB[np.round(yP[:,0])!=yB]) > n/4: yP = 1 - yP assess(np.round(yP[:,0])) # Spectral Clustering from", "digits.images.reshape((n_samples, -1)) # Create support vector classifier classifier = svm.SVC(gamma=0.001) # Split into", "# Support Vector Classifier from sklearn.svm import SVC svm = SVC(gamma='scale', C=1.0, random_state=101)", "y_train, y_test = train_test_split( data, digits.target, test_size=0.5, shuffle=False) # Learn the digits on", "plt.savefig(str(select_option)+'.png') # Split into train and test subsets (50% each) XA, XB, yA,", "data = digits.images.reshape((n_samples, -1)) # Create support vector classifier classifier = svm.SVC(gamma=0.001) #", "Forest from sklearn.ensemble import RandomForestClassifier rfm = RandomForestClassifier(n_estimators=70,oob_score=True,n_jobs=1,\\ random_state=101,max_features=None,min_samples_leaf=3) rfm.fit(XA,yA) yP = rfm.predict(XB)", "option=='moons': X, y = datasets.make_moons(n_samples=n,noise=0.05) yplot = xplot*0.0 elif option=='circles': X, y =", "interpolation='nearest') print('Predicted: ' + str(classifier.predict(digits.data[n:n+1])[0])) # Select Option by Number # 0 =", "mixing = 0.0 # add random mixing element to data xplot = np.linspace(0,1,100)", "import SVC svm = SVC(gamma='scale', C=1.0, random_state=101) svm.fit(XA,yA) yP = svm.predict(XB) assess(yP) #", "xplot**2 elif option=='target': y = np.array([False if (X[i,0]-0.5)**2+(X[i,1]-0.5)**2<=0.1 +(np.random.rand()-0.5)*0.2*mixing else True for i", "yplot = xplot*0.0 plt.scatter(X[y>0.5,0],X[y>0.5,1],color='blue',marker='^',label='True') plt.scatter(X[y<0.5,0],X[y<0.5,1],color='red',marker='x',label='False') if option not in ['moons','circles','blobs']: plt.plot(xplot,yplot,'k.',label='Division') plt.legend() plt.savefig(str(select_option)+'.png')", "sklearn.linear_model import LogisticRegression lr = LogisticRegression(solver='lbfgs') lr.fit(XA,yA) yP = lr.predict(XB) assess(yP) # Naïve", "Quadratic, 2 = Inner Target # 3 = Moons, 4 = Concentric Circles,", "apply classifier n_samples = len(digits.images) data = digits.images.reshape((n_samples, -1)) # Create support vector", "(50% each) XA, XB, yA, yB = train_test_split(X, y, test_size=0.5, shuffle=False) # Plot", "and test subsets (50% each) X_train, X_test, y_train, y_test = train_test_split( data, digits.target,", "dtree.predict(XB) assess(yP) # Random Forest from sklearn.ensemble import RandomForestClassifier rfm = RandomForestClassifier(n_estimators=70,oob_score=True,n_jobs=1,\\ random_state=101,max_features=None,min_samples_leaf=3)", "yP assess(np.round(yP[:,0])) # Spectral Clustering from sklearn.cluster import SpectralClustering sc = SpectralClustering(n_clusters=2,eigen_solver='arpack',\\ affinity='nearest_neighbors')", "yP = km.predict(XB) # Arbitrary labels with unsupervised clustering may need to be", "SVC(gamma='scale', C=1.0, random_state=101) svm.fit(XA,yA) yP = svm.predict(XB) assess(yP) # Neural Network from sklearn.neural_network", "Gradient Descent from sklearn.linear_model import SGDClassifier sgd = SGDClassifier(loss='modified_huber', shuffle=True,random_state=101) sgd.fit(XA,yA) yP =", "yplot[i] = np.nan else: j = not j # plot both sides of", "affinity='nearest_neighbors') yP = sc.fit_predict(XB) # No separation between fit and predict calls, need", "data data_options = ['linear','quadratic','target','moons','circles','blobs'] option = data_options[select_option] # number of data points n", "for i in range(n)]) yplot = 1-xplot elif option=='quadratic': y = np.array([False if", "train_test_split(X, y, test_size=0.5, shuffle=False) # Plot regression results def assess(P): plt.figure() plt.scatter(XB[P==1,0],XB[P==1,1],marker='^',color='blue',label='True') plt.scatter(XB[P==0,0],XB[P==0,1],marker='x',color='red',label='False')", "circle yplot[i] = (2*j-1)*np.sqrt(r)+0.5 elif option=='moons': X, y = datasets.make_moons(n_samples=n,noise=0.05) yplot = xplot*0.0", "n_samples = len(digits.images) data = digits.images.reshape((n_samples, -1)) # Create support vector classifier classifier", "else: j = not j # plot both sides of circle yplot[i] =", "= xplot*0.0 plt.scatter(X[y>0.5,0],X[y>0.5,1],color='blue',marker='^',label='True') plt.scatter(X[y<0.5,0],X[y<0.5,1],color='red',marker='x',label='False') if option not in ['moons','circles','blobs']: plt.plot(xplot,yplot,'k.',label='Division') plt.legend() plt.savefig(str(select_option)+'.png') #", "sklearn.naive_bayes import GaussianNB nb = GaussianNB() nb.fit(XA,yA) yP = nb.predict(XB) assess(yP) # Stochastic", "= digits.images.reshape((n_samples, -1)) # Create support vector classifier classifier = svm.SVC(gamma=0.001) # Split", "probabilities # Arbitrary labels with unsupervised clustering may need to be reversed if", "X_test, y_train, y_test = train_test_split( data, digits.target, test_size=0.5, shuffle=False) # Learn the digits", "len(XB[yP!=yB]) > n/4: yP = 1 - yP assess(yP) # Gaussian Mixture Model", "Arbitrary labels with unsupervised clustering may need to be reversed if len(XB[yP!=yB]) >", "import KMeans km = KMeans(n_clusters=2) km.fit(XA) yP = km.predict(XB) # Arbitrary labels with", "may need to be reversed if len(XB[np.round(yP[:,0])!=yB]) > n/4: yP = 1 -", "import RandomForestClassifier rfm = RandomForestClassifier(n_estimators=70,oob_score=True,n_jobs=1,\\ random_state=101,max_features=None,min_samples_leaf=3) rfm.fit(XA,yA) yP = rfm.predict(XB) assess(yP) # Support", "KMeans km = KMeans(n_clusters=2) km.fit(XA) yP = km.predict(XB) # Arbitrary labels with unsupervised", "min_samples_leaf=5) dtree.fit(XA,yA) yP = dtree.predict(XB) assess(yP) # Random Forest from sklearn.ensemble import RandomForestClassifier", "i in range(n)]) yplot = 1-xplot elif option=='quadratic': y = np.array([False if X[i,0]**2>=X[i,1]+(np.random.rand()-0.5)\\", "Descent from sklearn.linear_model import SGDClassifier sgd = SGDClassifier(loss='modified_huber', shuffle=True,random_state=101) sgd.fit(XA,yA) yP = sgd.predict(XB)", "import datasets, svm, metrics from sklearn.model_selection import train_test_split import matplotlib.pyplot as plt import", "if len(XB[np.round(yP[:,0])!=yB]) > n/4: yP = 1 - yP assess(np.round(yP[:,0])) # Spectral Clustering", "# test on second half of data n = np.random.randint(int(n_samples/2),n_samples) plt.imshow(digits.images[n], cmap=plt.cm.gray_r, interpolation='nearest')", "= xplot**2 elif option=='target': y = np.array([False if (X[i,0]-0.5)**2+(X[i,1]-0.5)**2<=0.1 +(np.random.rand()-0.5)*0.2*mixing else True for", "digits.target, test_size=0.5, shuffle=False) # Learn the digits on the first half of the", "plt.legend() # Supervised Classification # Logistic Regression from sklearn.linear_model import LogisticRegression lr =", "SGDClassifier sgd = SGDClassifier(loss='modified_huber', shuffle=True,random_state=101) sgd.fit(XA,yA) yP = sgd.predict(XB) assess(yP) # K-Nearest Neighbors", "Clustering from sklearn.cluster import KMeans km = KMeans(n_clusters=2) km.fit(XA) yP = km.predict(XB) #", "Clusters select_option = 5 # generate data data_options = ['linear','quadratic','target','moons','circles','blobs'] option = data_options[select_option]", "elif option=='blobs': X, y = datasets.make_blobs(n_samples=n,centers=[[-5,3],[5,-3]],cluster_std=2.0) yplot = xplot*0.0 plt.scatter(X[y>0.5,0],X[y>0.5,1],color='blue',marker='^',label='True') plt.scatter(X[y<0.5,0],X[y<0.5,1],color='red',marker='x',label='False') if option", "yP = 1 - yP assess(np.round(yP[:,0])) # Spectral Clustering from sklearn.cluster import SpectralClustering", "= not j # plot both sides of circle yplot[i] = (2*j-1)*np.sqrt(r)+0.5 elif", "plt.scatter(XB[P!=yB,0],XB[P!=yB,1],marker='s',color='orange',alpha=0.5,label='Incorrect') if option not in ['moons','circles','blobs']: plt.plot(xplot,yplot,'k.',label='Division') plt.legend() # Supervised Classification # Logistic", "= train_test_split(X, y, test_size=0.5, shuffle=False) # Plot regression results def assess(P): plt.figure() plt.scatter(XB[P==1,0],XB[P==1,1],marker='^',color='blue',label='True')", "dataset digits = datasets.load_digits() # Flatten the image to apply classifier n_samples =", "option=='quadratic': y = np.array([False if X[i,0]**2>=X[i,1]+(np.random.rand()-0.5)\\ *mixing else True for i in range(n)])", "# Select Option by Number # 0 = Linear, 1 = Quadratic, 2", "option=='circles': X, y = datasets.make_circles(n_samples=n,noise=0.05,factor=0.5) yplot = xplot*0.0 elif option=='blobs': X, y =", "yB = train_test_split(X, y, test_size=0.5, shuffle=False) # Plot regression results def assess(P): plt.figure()", "yP = sc.fit_predict(XB) # No separation between fit and predict calls, need to", "import GaussianMixture gmm = GaussianMixture(n_components=2) gmm.fit(XA) yP = gmm.predict_proba(XB) # produces probabilities #", "import SpectralClustering sc = SpectralClustering(n_clusters=2,eigen_solver='arpack',\\ affinity='nearest_neighbors') yP = sc.fit_predict(XB) # No separation between", "yP = rfm.predict(XB) assess(yP) # Support Vector Classifier from sklearn.svm import SVC svm", "n = np.random.randint(int(n_samples/2),n_samples) plt.imshow(digits.images[n], cmap=plt.cm.gray_r, interpolation='nearest') print('Predicted: ' + str(classifier.predict(digits.data[n:n+1])[0])) # Select Option", "option=='linear': y = np.array([False if (X[i,0]+X[i,1])>=(1.0+mixing/2-np.random.rand()*mixing) else True for i in range(n)]) yplot", "assess(yP) # Unsupervised Classification # K-Means Clustering from sklearn.cluster import KMeans km =", "fit and predict calls, need to fit and predict on same dataset #", "str(classifier.predict(digits.data[n:n+1])[0])) # Select Option by Number # 0 = Linear, 1 = Quadratic,", "into train and test subsets (50% each) XA, XB, yA, yB = train_test_split(X,", "classifier classifier = svm.SVC(gamma=0.001) # Split into train and test subsets (50% each)", "nb = GaussianNB() nb.fit(XA,yA) yP = nb.predict(XB) assess(yP) # Stochastic Gradient Descent from", "to data xplot = np.linspace(0,1,100) if option=='linear': y = np.array([False if (X[i,0]+X[i,1])>=(1.0+mixing/2-np.random.rand()*mixing) else", "Split into train and test subsets (50% each) XA, XB, yA, yB =", "option not in ['moons','circles','blobs']: plt.plot(xplot,yplot,'k.',label='Division') plt.legend() plt.savefig(str(select_option)+'.png') # Split into train and test", "if option=='linear': y = np.array([False if (X[i,0]+X[i,1])>=(1.0+mixing/2-np.random.rand()*mixing) else True for i in range(n)])", "# add random mixing element to data xplot = np.linspace(0,1,100) if option=='linear': y", "GaussianNB nb = GaussianNB() nb.fit(XA,yA) yP = nb.predict(XB) assess(yP) # Stochastic Gradient Descent", "dtree = DecisionTreeClassifier(max_depth=10,random_state=101,max_features=None,\\ min_samples_leaf=5) dtree.fit(XA,yA) yP = dtree.predict(XB) assess(yP) # Random Forest from", "labels with unsupervised clustering may need to be reversed if len(XB[np.round(yP[:,0])!=yB]) > n/4:", "= Concentric Circles, 5 = Distinct Clusters select_option = 5 # generate data", "xplot*0.0 elif option=='circles': X, y = datasets.make_circles(n_samples=n,noise=0.05,factor=0.5) yplot = xplot*0.0 elif option=='blobs': X,", "= xplot*0.0 elif option=='blobs': X, y = datasets.make_blobs(n_samples=n,centers=[[-5,3],[5,-3]],cluster_std=2.0) yplot = xplot*0.0 plt.scatter(X[y>0.5,0],X[y>0.5,1],color='blue',marker='^',label='True') plt.scatter(X[y<0.5,0],X[y<0.5,1],color='red',marker='x',label='False')", "K-Nearest Neighbors from sklearn.neighbors import KNeighborsClassifier knn = KNeighborsClassifier(n_neighbors=5) knn.fit(XA,yA) yP = knn.predict(XB)", "svm.predict(XB) assess(yP) # Neural Network from sklearn.neural_network import MLPClassifier clf = MLPClassifier(solver='lbfgs',alpha=1e-5,max_iter=200,\\ activation='relu',hidden_layer_sizes=(10,30,10),\\", "from sklearn.linear_model import LogisticRegression lr = LogisticRegression(solver='lbfgs') lr.fit(XA,yA) yP = lr.predict(XB) assess(yP) #", "gmm = GaussianMixture(n_components=2) gmm.fit(XA) yP = gmm.predict_proba(XB) # produces probabilities # Arbitrary labels", "1 - yP assess(yP) # Gaussian Mixture Model from sklearn.mixture import GaussianMixture gmm", "assess(yP) # K-Nearest Neighbors from sklearn.neighbors import KNeighborsClassifier knn = KNeighborsClassifier(n_neighbors=5) knn.fit(XA,yA) yP", "Neighbors from sklearn.neighbors import KNeighborsClassifier knn = KNeighborsClassifier(n_neighbors=5) knn.fit(XA,yA) yP = knn.predict(XB) assess(yP)", "in range(n)]) yplot = xplot**2 elif option=='target': y = np.array([False if (X[i,0]-0.5)**2+(X[i,1]-0.5)**2<=0.1 +(np.random.rand()-0.5)*0.2*mixing", "Network from sklearn.neural_network import MLPClassifier clf = MLPClassifier(solver='lbfgs',alpha=1e-5,max_iter=200,\\ activation='relu',hidden_layer_sizes=(10,30,10),\\ random_state=1, shuffle=True) clf.fit(XA,yA) yP", "if len(XB[yP!=yB]) > n/4: yP = 1 - yP assess(yP) # Gaussian Mixture", "sgd = SGDClassifier(loss='modified_huber', shuffle=True,random_state=101) sgd.fit(XA,yA) yP = sgd.predict(XB) assess(yP) # K-Nearest Neighbors from", "j = False yplot = np.empty(100) for i,x in enumerate(xplot): r = 0.1-(x-0.5)**2", "= 5 # generate data data_options = ['linear','quadratic','target','moons','circles','blobs'] option = data_options[select_option] # number", "y_train) n_samples/2 # test on second half of data n = np.random.randint(int(n_samples/2),n_samples) plt.imshow(digits.images[n],", "yP = lr.predict(XB) assess(yP) # Naïve Bayes from sklearn.naive_bayes import GaussianNB nb =", "Classifier from sklearn.svm import SVC svm = SVC(gamma='scale', C=1.0, random_state=101) svm.fit(XA,yA) yP =", "Bayes from sklearn.naive_bayes import GaussianNB nb = GaussianNB() nb.fit(XA,yA) yP = nb.predict(XB) assess(yP)", "Unsupervised Classification # K-Means Clustering from sklearn.cluster import KMeans km = KMeans(n_clusters=2) km.fit(XA)", "C=1.0, random_state=101) svm.fit(XA,yA) yP = svm.predict(XB) assess(yP) # Neural Network from sklearn.neural_network import", "plt import numpy as np # The digits dataset digits = datasets.load_digits() #", "sgd.predict(XB) assess(yP) # K-Nearest Neighbors from sklearn.neighbors import KNeighborsClassifier knn = KNeighborsClassifier(n_neighbors=5) knn.fit(XA,yA)", "Circles, 5 = Distinct Clusters select_option = 5 # generate data data_options =", "+(np.random.rand()-0.5)*0.2*mixing else True for i in range(n)]) j = False yplot = np.empty(100)", "with unsupervised clustering may need to be reversed if len(XB[np.round(yP[:,0])!=yB]) > n/4: yP", "True for i in range(n)]) j = False yplot = np.empty(100) for i,x", "4 = Concentric Circles, 5 = Distinct Clusters select_option = 5 # generate", "number of data points n = 2000 X = np.random.random((n,2)) mixing = 0.0", "shuffle=False) # Plot regression results def assess(P): plt.figure() plt.scatter(XB[P==1,0],XB[P==1,1],marker='^',color='blue',label='True') plt.scatter(XB[P==0,0],XB[P==0,1],marker='x',color='red',label='False') plt.scatter(XB[P!=yB,0],XB[P!=yB,1],marker='s',color='orange',alpha=0.5,label='Incorrect') if option", "from sklearn.naive_bayes import GaussianNB nb = GaussianNB() nb.fit(XA,yA) yP = nb.predict(XB) assess(yP) #", "= svm.predict(XB) assess(yP) # Neural Network from sklearn.neural_network import MLPClassifier clf = MLPClassifier(solver='lbfgs',alpha=1e-5,max_iter=200,\\", "elif option=='circles': X, y = datasets.make_circles(n_samples=n,noise=0.05,factor=0.5) yplot = xplot*0.0 elif option=='blobs': X, y", "Target # 3 = Moons, 4 = Concentric Circles, 5 = Distinct Clusters", "random_state=101) svm.fit(XA,yA) yP = svm.predict(XB) assess(yP) # Neural Network from sklearn.neural_network import MLPClassifier", "Learn the digits on the first half of the digits classifier.fit(X_train, y_train) n_samples/2", "' + str(classifier.predict(digits.data[n:n+1])[0])) # Select Option by Number # 0 = Linear, 1", "i in range(n)]) yplot = xplot**2 elif option=='target': y = np.array([False if (X[i,0]-0.5)**2+(X[i,1]-0.5)**2<=0.1", "data_options = ['linear','quadratic','target','moons','circles','blobs'] option = data_options[select_option] # number of data points n =", "from sklearn.model_selection import train_test_split import matplotlib.pyplot as plt import numpy as np #", "Flatten the image to apply classifier n_samples = len(digits.images) data = digits.images.reshape((n_samples, -1))", "The digits dataset digits = datasets.load_digits() # Flatten the image to apply classifier", "plot both sides of circle yplot[i] = (2*j-1)*np.sqrt(r)+0.5 elif option=='moons': X, y =", "knn.fit(XA,yA) yP = knn.predict(XB) assess(yP) # Decision Tree from sklearn.tree import DecisionTreeClassifier dtree", "# 0 = Linear, 1 = Quadratic, 2 = Inner Target # 3", "# Random Forest from sklearn.ensemble import RandomForestClassifier rfm = RandomForestClassifier(n_estimators=70,oob_score=True,n_jobs=1,\\ random_state=101,max_features=None,min_samples_leaf=3) rfm.fit(XA,yA) yP", "random_state=101,max_features=None,min_samples_leaf=3) rfm.fit(XA,yA) yP = rfm.predict(XB) assess(yP) # Support Vector Classifier from sklearn.svm import", "= 1 - yP assess(np.round(yP[:,0])) # Spectral Clustering from sklearn.cluster import SpectralClustering sc", "calls, need to fit and predict on same dataset # Arbitrary labels with", "# Split into train and test subsets (50% each) XA, XB, yA, yB", "dtree.fit(XA,yA) yP = dtree.predict(XB) assess(yP) # Random Forest from sklearn.ensemble import RandomForestClassifier rfm", "reversed if len(XB[yP!=yB]) > n/4: yP = 1 - yP assess(yP) # Gaussian", "datasets.make_blobs(n_samples=n,centers=[[-5,3],[5,-3]],cluster_std=2.0) yplot = xplot*0.0 plt.scatter(X[y>0.5,0],X[y>0.5,1],color='blue',marker='^',label='True') plt.scatter(X[y<0.5,0],X[y<0.5,1],color='red',marker='x',label='False') if option not in ['moons','circles','blobs']: plt.plot(xplot,yplot,'k.',label='Division') plt.legend()", "for i in range(n)]) j = False yplot = np.empty(100) for i,x in", "xplot*0.0 plt.scatter(X[y>0.5,0],X[y>0.5,1],color='blue',marker='^',label='True') plt.scatter(X[y<0.5,0],X[y<0.5,1],color='red',marker='x',label='False') if option not in ['moons','circles','blobs']: plt.plot(xplot,yplot,'k.',label='Division') plt.legend() plt.savefig(str(select_option)+'.png') # Split", "add random mixing element to data xplot = np.linspace(0,1,100) if option=='linear': y =", "generate data data_options = ['linear','quadratic','target','moons','circles','blobs'] option = data_options[select_option] # number of data points", "from sklearn.linear_model import SGDClassifier sgd = SGDClassifier(loss='modified_huber', shuffle=True,random_state=101) sgd.fit(XA,yA) yP = sgd.predict(XB) assess(yP)", "5 = Distinct Clusters select_option = 5 # generate data data_options = ['linear','quadratic','target','moons','circles','blobs']", "each) XA, XB, yA, yB = train_test_split(X, y, test_size=0.5, shuffle=False) # Plot regression", "rfm.fit(XA,yA) yP = rfm.predict(XB) assess(yP) # Support Vector Classifier from sklearn.svm import SVC", "sklearn.mixture import GaussianMixture gmm = GaussianMixture(n_components=2) gmm.fit(XA) yP = gmm.predict_proba(XB) # produces probabilities", "= sgd.predict(XB) assess(yP) # K-Nearest Neighbors from sklearn.neighbors import KNeighborsClassifier knn = KNeighborsClassifier(n_neighbors=5)", "rfm.predict(XB) assess(yP) # Support Vector Classifier from sklearn.svm import SVC svm = SVC(gamma='scale',", "subsets (50% each) XA, XB, yA, yB = train_test_split(X, y, test_size=0.5, shuffle=False) #", "MLPClassifier clf = MLPClassifier(solver='lbfgs',alpha=1e-5,max_iter=200,\\ activation='relu',hidden_layer_sizes=(10,30,10),\\ random_state=1, shuffle=True) clf.fit(XA,yA) yP = clf.predict(XB) assess(yP) #", "plt.scatter(XB[P==0,0],XB[P==0,1],marker='x',color='red',label='False') plt.scatter(XB[P!=yB,0],XB[P!=yB,1],marker='s',color='orange',alpha=0.5,label='Incorrect') if option not in ['moons','circles','blobs']: plt.plot(xplot,yplot,'k.',label='Division') plt.legend() # Supervised Classification #", "yP = 1 - yP assess(yP) # Gaussian Mixture Model from sklearn.mixture import", "data_options[select_option] # number of data points n = 2000 X = np.random.random((n,2)) mixing", "if (X[i,0]+X[i,1])>=(1.0+mixing/2-np.random.rand()*mixing) else True for i in range(n)]) yplot = 1-xplot elif option=='quadratic':", "= np.random.random((n,2)) mixing = 0.0 # add random mixing element to data xplot", "np.nan else: j = not j # plot both sides of circle yplot[i]", "yplot = np.empty(100) for i,x in enumerate(xplot): r = 0.1-(x-0.5)**2 if r<=0: yplot[i]", "else True for i in range(n)]) yplot = xplot**2 elif option=='target': y =", "= np.array([False if X[i,0]**2>=X[i,1]+(np.random.rand()-0.5)\\ *mixing else True for i in range(n)]) yplot =", "need to be reversed if len(XB[np.round(yP[:,0])!=yB]) > n/4: yP = 1 - yP", "from sklearn.cluster import SpectralClustering sc = SpectralClustering(n_clusters=2,eigen_solver='arpack',\\ affinity='nearest_neighbors') yP = sc.fit_predict(XB) # No", "image to apply classifier n_samples = len(digits.images) data = digits.images.reshape((n_samples, -1)) # Create", "np.array([False if (X[i,0]+X[i,1])>=(1.0+mixing/2-np.random.rand()*mixing) else True for i in range(n)]) yplot = 1-xplot elif", "yP = svm.predict(XB) assess(yP) # Neural Network from sklearn.neural_network import MLPClassifier clf =", "yA, yB = train_test_split(X, y, test_size=0.5, shuffle=False) # Plot regression results def assess(P):", "sgd.fit(XA,yA) yP = sgd.predict(XB) assess(yP) # K-Nearest Neighbors from sklearn.neighbors import KNeighborsClassifier knn", "elif option=='quadratic': y = np.array([False if X[i,0]**2>=X[i,1]+(np.random.rand()-0.5)\\ *mixing else True for i in", "Plot regression results def assess(P): plt.figure() plt.scatter(XB[P==1,0],XB[P==1,1],marker='^',color='blue',label='True') plt.scatter(XB[P==0,0],XB[P==0,1],marker='x',color='red',label='False') plt.scatter(XB[P!=yB,0],XB[P!=yB,1],marker='s',color='orange',alpha=0.5,label='Incorrect') if option not in", "*mixing else True for i in range(n)]) yplot = xplot**2 elif option=='target': y", "y = np.array([False if (X[i,0]+X[i,1])>=(1.0+mixing/2-np.random.rand()*mixing) else True for i in range(n)]) yplot =", "clf = MLPClassifier(solver='lbfgs',alpha=1e-5,max_iter=200,\\ activation='relu',hidden_layer_sizes=(10,30,10),\\ random_state=1, shuffle=True) clf.fit(XA,yA) yP = clf.predict(XB) assess(yP) # Unsupervised", "shuffle=True) clf.fit(XA,yA) yP = clf.predict(XB) assess(yP) # Unsupervised Classification # K-Means Clustering from", "Arbitrary labels with unsupervised clustering may need to be reversed if len(XB[np.round(yP[:,0])!=yB]) >", "be reversed if len(XB[yP!=yB]) > n/4: yP = 1 - yP assess(yP) plt.show()", "from sklearn.mixture import GaussianMixture gmm = GaussianMixture(n_components=2) gmm.fit(XA) yP = gmm.predict_proba(XB) # produces", "= datasets.load_digits() # Flatten the image to apply classifier n_samples = len(digits.images) data", "import numpy as np # The digits dataset digits = datasets.load_digits() # Flatten", "train_test_split( data, digits.target, test_size=0.5, shuffle=False) # Learn the digits on the first half", "n/4: yP = 1 - yP assess(np.round(yP[:,0])) # Spectral Clustering from sklearn.cluster import", "XB, yA, yB = train_test_split(X, y, test_size=0.5, shuffle=False) # Plot regression results def", "= Inner Target # 3 = Moons, 4 = Concentric Circles, 5 =", "need to fit and predict on same dataset # Arbitrary labels with unsupervised", "Concentric Circles, 5 = Distinct Clusters select_option = 5 # generate data data_options", "each) X_train, X_test, y_train, y_test = train_test_split( data, digits.target, test_size=0.5, shuffle=False) # Learn", "to apply classifier n_samples = len(digits.images) data = digits.images.reshape((n_samples, -1)) # Create support", "y_test = train_test_split( data, digits.target, test_size=0.5, shuffle=False) # Learn the digits on the", "Select Option by Number # 0 = Linear, 1 = Quadratic, 2 =", "# The digits dataset digits = datasets.load_digits() # Flatten the image to apply", "the first half of the digits classifier.fit(X_train, y_train) n_samples/2 # test on second", "import GaussianNB nb = GaussianNB() nb.fit(XA,yA) yP = nb.predict(XB) assess(yP) # Stochastic Gradient", "data xplot = np.linspace(0,1,100) if option=='linear': y = np.array([False if (X[i,0]+X[i,1])>=(1.0+mixing/2-np.random.rand()*mixing) else True", "both sides of circle yplot[i] = (2*j-1)*np.sqrt(r)+0.5 elif option=='moons': X, y = datasets.make_moons(n_samples=n,noise=0.05)", "activation='relu',hidden_layer_sizes=(10,30,10),\\ random_state=1, shuffle=True) clf.fit(XA,yA) yP = clf.predict(XB) assess(yP) # Unsupervised Classification # K-Means", "from sklearn.svm import SVC svm = SVC(gamma='scale', C=1.0, random_state=101) svm.fit(XA,yA) yP = svm.predict(XB)", "# Spectral Clustering from sklearn.cluster import SpectralClustering sc = SpectralClustering(n_clusters=2,eigen_solver='arpack',\\ affinity='nearest_neighbors') yP =", "select_option = 5 # generate data data_options = ['linear','quadratic','target','moons','circles','blobs'] option = data_options[select_option] #", "= LogisticRegression(solver='lbfgs') lr.fit(XA,yA) yP = lr.predict(XB) assess(yP) # Naïve Bayes from sklearn.naive_bayes import", "= sc.fit_predict(XB) # No separation between fit and predict calls, need to fit", "fit and predict on same dataset # Arbitrary labels with unsupervised clustering may", "-1)) # Create support vector classifier classifier = svm.SVC(gamma=0.001) # Split into train", "sklearn.linear_model import SGDClassifier sgd = SGDClassifier(loss='modified_huber', shuffle=True,random_state=101) sgd.fit(XA,yA) yP = sgd.predict(XB) assess(yP) #", "= np.empty(100) for i,x in enumerate(xplot): r = 0.1-(x-0.5)**2 if r<=0: yplot[i] =", "by Number # 0 = Linear, 1 = Quadratic, 2 = Inner Target", "on the first half of the digits classifier.fit(X_train, y_train) n_samples/2 # test on", "# Neural Network from sklearn.neural_network import MLPClassifier clf = MLPClassifier(solver='lbfgs',alpha=1e-5,max_iter=200,\\ activation='relu',hidden_layer_sizes=(10,30,10),\\ random_state=1, shuffle=True)", "plt.plot(xplot,yplot,'k.',label='Division') plt.legend() plt.savefig(str(select_option)+'.png') # Split into train and test subsets (50% each) XA,", "DecisionTreeClassifier dtree = DecisionTreeClassifier(max_depth=10,random_state=101,max_features=None,\\ min_samples_leaf=5) dtree.fit(XA,yA) yP = dtree.predict(XB) assess(yP) # Random Forest", "digits classifier.fit(X_train, y_train) n_samples/2 # test on second half of data n =", "sc = SpectralClustering(n_clusters=2,eigen_solver='arpack',\\ affinity='nearest_neighbors') yP = sc.fit_predict(XB) # No separation between fit and", "Option by Number # 0 = Linear, 1 = Quadratic, 2 = Inner", "j = not j # plot both sides of circle yplot[i] = (2*j-1)*np.sqrt(r)+0.5", "= (2*j-1)*np.sqrt(r)+0.5 elif option=='moons': X, y = datasets.make_moons(n_samples=n,noise=0.05) yplot = xplot*0.0 elif option=='circles':", "# Unsupervised Classification # K-Means Clustering from sklearn.cluster import KMeans km = KMeans(n_clusters=2)", "= SpectralClustering(n_clusters=2,eigen_solver='arpack',\\ affinity='nearest_neighbors') yP = sc.fit_predict(XB) # No separation between fit and predict", "# Plot regression results def assess(P): plt.figure() plt.scatter(XB[P==1,0],XB[P==1,1],marker='^',color='blue',label='True') plt.scatter(XB[P==0,0],XB[P==0,1],marker='x',color='red',label='False') plt.scatter(XB[P!=yB,0],XB[P!=yB,1],marker='s',color='orange',alpha=0.5,label='Incorrect') if option not", "['moons','circles','blobs']: plt.plot(xplot,yplot,'k.',label='Division') plt.legend() plt.savefig(str(select_option)+'.png') # Split into train and test subsets (50% each)", "= 0.1-(x-0.5)**2 if r<=0: yplot[i] = np.nan else: j = not j #", "yP = sgd.predict(XB) assess(yP) # K-Nearest Neighbors from sklearn.neighbors import KNeighborsClassifier knn =", "xplot*0.0 elif option=='blobs': X, y = datasets.make_blobs(n_samples=n,centers=[[-5,3],[5,-3]],cluster_std=2.0) yplot = xplot*0.0 plt.scatter(X[y>0.5,0],X[y>0.5,1],color='blue',marker='^',label='True') plt.scatter(X[y<0.5,0],X[y<0.5,1],color='red',marker='x',label='False') if", "data, digits.target, test_size=0.5, shuffle=False) # Learn the digits on the first half of", "= Quadratic, 2 = Inner Target # 3 = Moons, 4 = Concentric", "Tree from sklearn.tree import DecisionTreeClassifier dtree = DecisionTreeClassifier(max_depth=10,random_state=101,max_features=None,\\ min_samples_leaf=5) dtree.fit(XA,yA) yP = dtree.predict(XB)", "half of data n = np.random.randint(int(n_samples/2),n_samples) plt.imshow(digits.images[n], cmap=plt.cm.gray_r, interpolation='nearest') print('Predicted: ' + str(classifier.predict(digits.data[n:n+1])[0]))", "RandomForestClassifier(n_estimators=70,oob_score=True,n_jobs=1,\\ random_state=101,max_features=None,min_samples_leaf=3) rfm.fit(XA,yA) yP = rfm.predict(XB) assess(yP) # Support Vector Classifier from sklearn.svm", "= SVC(gamma='scale', C=1.0, random_state=101) svm.fit(XA,yA) yP = svm.predict(XB) assess(yP) # Neural Network from", "yP = gmm.predict_proba(XB) # produces probabilities # Arbitrary labels with unsupervised clustering may", "0.1-(x-0.5)**2 if r<=0: yplot[i] = np.nan else: j = not j # plot", "assess(yP) # Neural Network from sklearn.neural_network import MLPClassifier clf = MLPClassifier(solver='lbfgs',alpha=1e-5,max_iter=200,\\ activation='relu',hidden_layer_sizes=(10,30,10),\\ random_state=1,", "shuffle=False) # Learn the digits on the first half of the digits classifier.fit(X_train,", "Vector Classifier from sklearn.svm import SVC svm = SVC(gamma='scale', C=1.0, random_state=101) svm.fit(XA,yA) yP", "of data n = np.random.randint(int(n_samples/2),n_samples) plt.imshow(digits.images[n], cmap=plt.cm.gray_r, interpolation='nearest') print('Predicted: ' + str(classifier.predict(digits.data[n:n+1])[0])) #", "import LogisticRegression lr = LogisticRegression(solver='lbfgs') lr.fit(XA,yA) yP = lr.predict(XB) assess(yP) # Naïve Bayes", "assess(yP) # Stochastic Gradient Descent from sklearn.linear_model import SGDClassifier sgd = SGDClassifier(loss='modified_huber', shuffle=True,random_state=101)", "Regression from sklearn.linear_model import LogisticRegression lr = LogisticRegression(solver='lbfgs') lr.fit(XA,yA) yP = lr.predict(XB) assess(yP)", "predict on same dataset # Arbitrary labels with unsupervised clustering may need to", "SVC svm = SVC(gamma='scale', C=1.0, random_state=101) svm.fit(XA,yA) yP = svm.predict(XB) assess(yP) # Neural", "plt.legend() plt.savefig(str(select_option)+'.png') # Split into train and test subsets (50% each) XA, XB,", "yP = dtree.predict(XB) assess(yP) # Random Forest from sklearn.ensemble import RandomForestClassifier rfm =", "sklearn.ensemble import RandomForestClassifier rfm = RandomForestClassifier(n_estimators=70,oob_score=True,n_jobs=1,\\ random_state=101,max_features=None,min_samples_leaf=3) rfm.fit(XA,yA) yP = rfm.predict(XB) assess(yP) #", "assess(yP) # Decision Tree from sklearn.tree import DecisionTreeClassifier dtree = DecisionTreeClassifier(max_depth=10,random_state=101,max_features=None,\\ min_samples_leaf=5) dtree.fit(XA,yA)", "GaussianMixture(n_components=2) gmm.fit(XA) yP = gmm.predict_proba(XB) # produces probabilities # Arbitrary labels with unsupervised", "X = np.random.random((n,2)) mixing = 0.0 # add random mixing element to data", "Distinct Clusters select_option = 5 # generate data data_options = ['linear','quadratic','target','moons','circles','blobs'] option =", "for i,x in enumerate(xplot): r = 0.1-(x-0.5)**2 if r<=0: yplot[i] = np.nan else:", "svm.fit(XA,yA) yP = svm.predict(XB) assess(yP) # Neural Network from sklearn.neural_network import MLPClassifier clf", "MLPClassifier(solver='lbfgs',alpha=1e-5,max_iter=200,\\ activation='relu',hidden_layer_sizes=(10,30,10),\\ random_state=1, shuffle=True) clf.fit(XA,yA) yP = clf.predict(XB) assess(yP) # Unsupervised Classification #", "test subsets (50% each) X_train, X_test, y_train, y_test = train_test_split( data, digits.target, test_size=0.5,", "= 1 - yP assess(yP) # Gaussian Mixture Model from sklearn.mixture import GaussianMixture", "from sklearn.cluster import KMeans km = KMeans(n_clusters=2) km.fit(XA) yP = km.predict(XB) # Arbitrary", "elif option=='moons': X, y = datasets.make_moons(n_samples=n,noise=0.05) yplot = xplot*0.0 elif option=='circles': X, y", "Decision Tree from sklearn.tree import DecisionTreeClassifier dtree = DecisionTreeClassifier(max_depth=10,random_state=101,max_features=None,\\ min_samples_leaf=5) dtree.fit(XA,yA) yP =", "sklearn.neighbors import KNeighborsClassifier knn = KNeighborsClassifier(n_neighbors=5) knn.fit(XA,yA) yP = knn.predict(XB) assess(yP) # Decision", "assess(np.round(yP[:,0])) # Spectral Clustering from sklearn.cluster import SpectralClustering sc = SpectralClustering(n_clusters=2,eigen_solver='arpack',\\ affinity='nearest_neighbors') yP", "= 1-xplot elif option=='quadratic': y = np.array([False if X[i,0]**2>=X[i,1]+(np.random.rand()-0.5)\\ *mixing else True for", "as np # The digits dataset digits = datasets.load_digits() # Flatten the image", "np.array([False if X[i,0]**2>=X[i,1]+(np.random.rand()-0.5)\\ *mixing else True for i in range(n)]) yplot = xplot**2", "sklearn.tree import DecisionTreeClassifier dtree = DecisionTreeClassifier(max_depth=10,random_state=101,max_features=None,\\ min_samples_leaf=5) dtree.fit(XA,yA) yP = dtree.predict(XB) assess(yP) #", "clf.fit(XA,yA) yP = clf.predict(XB) assess(yP) # Unsupervised Classification # K-Means Clustering from sklearn.cluster", "test on second half of data n = np.random.randint(int(n_samples/2),n_samples) plt.imshow(digits.images[n], cmap=plt.cm.gray_r, interpolation='nearest') print('Predicted:", "True for i in range(n)]) yplot = xplot**2 elif option=='target': y = np.array([False", "datasets, svm, metrics from sklearn.model_selection import train_test_split import matplotlib.pyplot as plt import numpy", "K-Means Clustering from sklearn.cluster import KMeans km = KMeans(n_clusters=2) km.fit(XA) yP = km.predict(XB)", "- yP assess(np.round(yP[:,0])) # Spectral Clustering from sklearn.cluster import SpectralClustering sc = SpectralClustering(n_clusters=2,eigen_solver='arpack',\\", "support vector classifier classifier = svm.SVC(gamma=0.001) # Split into train and test subsets", "# Split into train and test subsets (50% each) X_train, X_test, y_train, y_test", "Model from sklearn.mixture import GaussianMixture gmm = GaussianMixture(n_components=2) gmm.fit(XA) yP = gmm.predict_proba(XB) #", "knn.predict(XB) assess(yP) # Decision Tree from sklearn.tree import DecisionTreeClassifier dtree = DecisionTreeClassifier(max_depth=10,random_state=101,max_features=None,\\ min_samples_leaf=5)", "# Create support vector classifier classifier = svm.SVC(gamma=0.001) # Split into train and", "import DecisionTreeClassifier dtree = DecisionTreeClassifier(max_depth=10,random_state=101,max_features=None,\\ min_samples_leaf=5) dtree.fit(XA,yA) yP = dtree.predict(XB) assess(yP) # Random", "3 = Moons, 4 = Concentric Circles, 5 = Distinct Clusters select_option =", "import matplotlib.pyplot as plt import numpy as np # The digits dataset digits", "results def assess(P): plt.figure() plt.scatter(XB[P==1,0],XB[P==1,1],marker='^',color='blue',label='True') plt.scatter(XB[P==0,0],XB[P==0,1],marker='x',color='red',label='False') plt.scatter(XB[P!=yB,0],XB[P!=yB,1],marker='s',color='orange',alpha=0.5,label='Incorrect') if option not in ['moons','circles','blobs']: plt.plot(xplot,yplot,'k.',label='Division')", "sklearn.model_selection import train_test_split import matplotlib.pyplot as plt import numpy as np # The", "0.0 # add random mixing element to data xplot = np.linspace(0,1,100) if option=='linear':", "svm.SVC(gamma=0.001) # Split into train and test subsets (50% each) X_train, X_test, y_train,", "subsets (50% each) X_train, X_test, y_train, y_test = train_test_split( data, digits.target, test_size=0.5, shuffle=False)", "1 = Quadratic, 2 = Inner Target # 3 = Moons, 4 =", "labels with unsupervised clustering may need to be reversed if len(XB[yP!=yB]) > n/4:", "and predict on same dataset # Arbitrary labels with unsupervised clustering may need", "n = 2000 X = np.random.random((n,2)) mixing = 0.0 # add random mixing", "first half of the digits classifier.fit(X_train, y_train) n_samples/2 # test on second half", "the digits classifier.fit(X_train, y_train) n_samples/2 # test on second half of data n", "2000 X = np.random.random((n,2)) mixing = 0.0 # add random mixing element to", "lr = LogisticRegression(solver='lbfgs') lr.fit(XA,yA) yP = lr.predict(XB) assess(yP) # Naïve Bayes from sklearn.naive_bayes", "random mixing element to data xplot = np.linspace(0,1,100) if option=='linear': y = np.array([False", "X[i,0]**2>=X[i,1]+(np.random.rand()-0.5)\\ *mixing else True for i in range(n)]) yplot = xplot**2 elif option=='target':", "= GaussianNB() nb.fit(XA,yA) yP = nb.predict(XB) assess(yP) # Stochastic Gradient Descent from sklearn.linear_model", "= GaussianMixture(n_components=2) gmm.fit(XA) yP = gmm.predict_proba(XB) # produces probabilities # Arbitrary labels with", "= rfm.predict(XB) assess(yP) # Support Vector Classifier from sklearn.svm import SVC svm =", "np.random.random((n,2)) mixing = 0.0 # add random mixing element to data xplot =", "separation between fit and predict calls, need to fit and predict on same", "if r<=0: yplot[i] = np.nan else: j = not j # plot both", "np.empty(100) for i,x in enumerate(xplot): r = 0.1-(x-0.5)**2 if r<=0: yplot[i] = np.nan", "X, y = datasets.make_moons(n_samples=n,noise=0.05) yplot = xplot*0.0 elif option=='circles': X, y = datasets.make_circles(n_samples=n,noise=0.05,factor=0.5)", "unsupervised clustering may need to be reversed if len(XB[yP!=yB]) > n/4: yP =", "need to be reversed if len(XB[yP!=yB]) > n/4: yP = 1 - yP", "yP assess(yP) # Gaussian Mixture Model from sklearn.mixture import GaussianMixture gmm = GaussianMixture(n_components=2)", "len(digits.images) data = digits.images.reshape((n_samples, -1)) # Create support vector classifier classifier = svm.SVC(gamma=0.001)", "train_test_split import matplotlib.pyplot as plt import numpy as np # The digits dataset", "# Arbitrary labels with unsupervised clustering may need to be reversed if len(XB[np.round(yP[:,0])!=yB])", "RandomForestClassifier rfm = RandomForestClassifier(n_estimators=70,oob_score=True,n_jobs=1,\\ random_state=101,max_features=None,min_samples_leaf=3) rfm.fit(XA,yA) yP = rfm.predict(XB) assess(yP) # Support Vector", "between fit and predict calls, need to fit and predict on same dataset", "to be reversed if len(XB[np.round(yP[:,0])!=yB]) > n/4: yP = 1 - yP assess(np.round(yP[:,0]))", "dataset # Arbitrary labels with unsupervised clustering may need to be reversed if", "regression results def assess(P): plt.figure() plt.scatter(XB[P==1,0],XB[P==1,1],marker='^',color='blue',label='True') plt.scatter(XB[P==0,0],XB[P==0,1],marker='x',color='red',label='False') plt.scatter(XB[P!=yB,0],XB[P!=yB,1],marker='s',color='orange',alpha=0.5,label='Incorrect') if option not in ['moons','circles','blobs']:", "else True for i in range(n)]) j = False yplot = np.empty(100) for", "datasets.load_digits() # Flatten the image to apply classifier n_samples = len(digits.images) data =", "to be reversed if len(XB[yP!=yB]) > n/4: yP = 1 - yP assess(yP)", "= data_options[select_option] # number of data points n = 2000 X = np.random.random((n,2))", "- yP assess(yP) # Gaussian Mixture Model from sklearn.mixture import GaussianMixture gmm =", "gmm.predict_proba(XB) # produces probabilities # Arbitrary labels with unsupervised clustering may need to", "yplot[i] = (2*j-1)*np.sqrt(r)+0.5 elif option=='moons': X, y = datasets.make_moons(n_samples=n,noise=0.05) yplot = xplot*0.0 elif", "on same dataset # Arbitrary labels with unsupervised clustering may need to be", "= np.array([False if (X[i,0]+X[i,1])>=(1.0+mixing/2-np.random.rand()*mixing) else True for i in range(n)]) yplot = 1-xplot", "# K-Nearest Neighbors from sklearn.neighbors import KNeighborsClassifier knn = KNeighborsClassifier(n_neighbors=5) knn.fit(XA,yA) yP =", "knn = KNeighborsClassifier(n_neighbors=5) knn.fit(XA,yA) yP = knn.predict(XB) assess(yP) # Decision Tree from sklearn.tree", "datasets.make_circles(n_samples=n,noise=0.05,factor=0.5) yplot = xplot*0.0 elif option=='blobs': X, y = datasets.make_blobs(n_samples=n,centers=[[-5,3],[5,-3]],cluster_std=2.0) yplot = xplot*0.0", "plt.figure() plt.scatter(XB[P==1,0],XB[P==1,1],marker='^',color='blue',label='True') plt.scatter(XB[P==0,0],XB[P==0,1],marker='x',color='red',label='False') plt.scatter(XB[P!=yB,0],XB[P!=yB,1],marker='s',color='orange',alpha=0.5,label='Incorrect') if option not in ['moons','circles','blobs']: plt.plot(xplot,yplot,'k.',label='Division') plt.legend() # Supervised", "r<=0: yplot[i] = np.nan else: j = not j # plot both sides", "from sklearn.tree import DecisionTreeClassifier dtree = DecisionTreeClassifier(max_depth=10,random_state=101,max_features=None,\\ min_samples_leaf=5) dtree.fit(XA,yA) yP = dtree.predict(XB) assess(yP)", "False yplot = np.empty(100) for i,x in enumerate(xplot): r = 0.1-(x-0.5)**2 if r<=0:", "SGDClassifier(loss='modified_huber', shuffle=True,random_state=101) sgd.fit(XA,yA) yP = sgd.predict(XB) assess(yP) # K-Nearest Neighbors from sklearn.neighbors import", "metrics from sklearn.model_selection import train_test_split import matplotlib.pyplot as plt import numpy as np", "(50% each) X_train, X_test, y_train, y_test = train_test_split( data, digits.target, test_size=0.5, shuffle=False) #", "# K-Means Clustering from sklearn.cluster import KMeans km = KMeans(n_clusters=2) km.fit(XA) yP =", "clf.predict(XB) assess(yP) # Unsupervised Classification # K-Means Clustering from sklearn.cluster import KMeans km", "cmap=plt.cm.gray_r, interpolation='nearest') print('Predicted: ' + str(classifier.predict(digits.data[n:n+1])[0])) # Select Option by Number # 0", "= gmm.predict_proba(XB) # produces probabilities # Arbitrary labels with unsupervised clustering may need", "> n/4: yP = 1 - yP assess(np.round(yP[:,0])) # Spectral Clustering from sklearn.cluster", "= svm.SVC(gamma=0.001) # Split into train and test subsets (50% each) X_train, X_test,", "# generate data data_options = ['linear','quadratic','target','moons','circles','blobs'] option = data_options[select_option] # number of data", "Logistic Regression from sklearn.linear_model import LogisticRegression lr = LogisticRegression(solver='lbfgs') lr.fit(XA,yA) yP = lr.predict(XB)", "+ str(classifier.predict(digits.data[n:n+1])[0])) # Select Option by Number # 0 = Linear, 1 =", "y, test_size=0.5, shuffle=False) # Plot regression results def assess(P): plt.figure() plt.scatter(XB[P==1,0],XB[P==1,1],marker='^',color='blue',label='True') plt.scatter(XB[P==0,0],XB[P==0,1],marker='x',color='red',label='False') plt.scatter(XB[P!=yB,0],XB[P!=yB,1],marker='s',color='orange',alpha=0.5,label='Incorrect')", "Moons, 4 = Concentric Circles, 5 = Distinct Clusters select_option = 5 #", "Classification # K-Means Clustering from sklearn.cluster import KMeans km = KMeans(n_clusters=2) km.fit(XA) yP", "Supervised Classification # Logistic Regression from sklearn.linear_model import LogisticRegression lr = LogisticRegression(solver='lbfgs') lr.fit(XA,yA)", "be reversed if len(XB[yP!=yB]) > n/4: yP = 1 - yP assess(yP) #", "enumerate(xplot): r = 0.1-(x-0.5)**2 if r<=0: yplot[i] = np.nan else: j = not", "No separation between fit and predict calls, need to fit and predict on", "XA, XB, yA, yB = train_test_split(X, y, test_size=0.5, shuffle=False) # Plot regression results", "# 3 = Moons, 4 = Concentric Circles, 5 = Distinct Clusters select_option", "= Linear, 1 = Quadratic, 2 = Inner Target # 3 = Moons,", "# produces probabilities # Arbitrary labels with unsupervised clustering may need to be", "for i in range(n)]) yplot = xplot**2 elif option=='target': y = np.array([False if", "sklearn.cluster import SpectralClustering sc = SpectralClustering(n_clusters=2,eigen_solver='arpack',\\ affinity='nearest_neighbors') yP = sc.fit_predict(XB) # No separation", "numpy as np # The digits dataset digits = datasets.load_digits() # Flatten the", "shuffle=True,random_state=101) sgd.fit(XA,yA) yP = sgd.predict(XB) assess(yP) # K-Nearest Neighbors from sklearn.neighbors import KNeighborsClassifier", "= Moons, 4 = Concentric Circles, 5 = Distinct Clusters select_option = 5", "= DecisionTreeClassifier(max_depth=10,random_state=101,max_features=None,\\ min_samples_leaf=5) dtree.fit(XA,yA) yP = dtree.predict(XB) assess(yP) # Random Forest from sklearn.ensemble", "test_size=0.5, shuffle=False) # Learn the digits on the first half of the digits", "in ['moons','circles','blobs']: plt.plot(xplot,yplot,'k.',label='Division') plt.legend() plt.savefig(str(select_option)+'.png') # Split into train and test subsets (50%", "= MLPClassifier(solver='lbfgs',alpha=1e-5,max_iter=200,\\ activation='relu',hidden_layer_sizes=(10,30,10),\\ random_state=1, shuffle=True) clf.fit(XA,yA) yP = clf.predict(XB) assess(yP) # Unsupervised Classification", "= len(digits.images) data = digits.images.reshape((n_samples, -1)) # Create support vector classifier classifier =", "vector classifier classifier = svm.SVC(gamma=0.001) # Split into train and test subsets (50%", "datasets.make_moons(n_samples=n,noise=0.05) yplot = xplot*0.0 elif option=='circles': X, y = datasets.make_circles(n_samples=n,noise=0.05,factor=0.5) yplot = xplot*0.0", "Support Vector Classifier from sklearn.svm import SVC svm = SVC(gamma='scale', C=1.0, random_state=101) svm.fit(XA,yA)", "= False yplot = np.empty(100) for i,x in enumerate(xplot): r = 0.1-(x-0.5)**2 if", "not in ['moons','circles','blobs']: plt.plot(xplot,yplot,'k.',label='Division') plt.legend() # Supervised Classification # Logistic Regression from sklearn.linear_model", "rfm = RandomForestClassifier(n_estimators=70,oob_score=True,n_jobs=1,\\ random_state=101,max_features=None,min_samples_leaf=3) rfm.fit(XA,yA) yP = rfm.predict(XB) assess(yP) # Support Vector Classifier", "Clustering from sklearn.cluster import SpectralClustering sc = SpectralClustering(n_clusters=2,eigen_solver='arpack',\\ affinity='nearest_neighbors') yP = sc.fit_predict(XB) #", "reversed if len(XB[np.round(yP[:,0])!=yB]) > n/4: yP = 1 - yP assess(np.round(yP[:,0])) # Spectral", "# Stochastic Gradient Descent from sklearn.linear_model import SGDClassifier sgd = SGDClassifier(loss='modified_huber', shuffle=True,random_state=101) sgd.fit(XA,yA)", "LogisticRegression lr = LogisticRegression(solver='lbfgs') lr.fit(XA,yA) yP = lr.predict(XB) assess(yP) # Naïve Bayes from", "yplot = 1-xplot elif option=='quadratic': y = np.array([False if X[i,0]**2>=X[i,1]+(np.random.rand()-0.5)\\ *mixing else True", "SpectralClustering sc = SpectralClustering(n_clusters=2,eigen_solver='arpack',\\ affinity='nearest_neighbors') yP = sc.fit_predict(XB) # No separation between fit", "in ['moons','circles','blobs']: plt.plot(xplot,yplot,'k.',label='Division') plt.legend() # Supervised Classification # Logistic Regression from sklearn.linear_model import", "data points n = 2000 X = np.random.random((n,2)) mixing = 0.0 # add", "# Arbitrary labels with unsupervised clustering may need to be reversed if len(XB[yP!=yB])", "= xplot*0.0 elif option=='circles': X, y = datasets.make_circles(n_samples=n,noise=0.05,factor=0.5) yplot = xplot*0.0 elif option=='blobs':", "with unsupervised clustering may need to be reversed if len(XB[yP!=yB]) > n/4: yP", "second half of data n = np.random.randint(int(n_samples/2),n_samples) plt.imshow(digits.images[n], cmap=plt.cm.gray_r, interpolation='nearest') print('Predicted: ' +", "# plot both sides of circle yplot[i] = (2*j-1)*np.sqrt(r)+0.5 elif option=='moons': X, y", "# Decision Tree from sklearn.tree import DecisionTreeClassifier dtree = DecisionTreeClassifier(max_depth=10,random_state=101,max_features=None,\\ min_samples_leaf=5) dtree.fit(XA,yA) yP", "(X[i,0]+X[i,1])>=(1.0+mixing/2-np.random.rand()*mixing) else True for i in range(n)]) yplot = 1-xplot elif option=='quadratic': y", "yplot = xplot*0.0 elif option=='circles': X, y = datasets.make_circles(n_samples=n,noise=0.05,factor=0.5) yplot = xplot*0.0 elif", "n/4: yP = 1 - yP assess(yP) # Gaussian Mixture Model from sklearn.mixture", "digits = datasets.load_digits() # Flatten the image to apply classifier n_samples = len(digits.images)", "clustering may need to be reversed if len(XB[yP!=yB]) > n/4: yP = 1", "= train_test_split( data, digits.target, test_size=0.5, shuffle=False) # Learn the digits on the first", "= datasets.make_circles(n_samples=n,noise=0.05,factor=0.5) yplot = xplot*0.0 elif option=='blobs': X, y = datasets.make_blobs(n_samples=n,centers=[[-5,3],[5,-3]],cluster_std=2.0) yplot =", "elif option=='target': y = np.array([False if (X[i,0]-0.5)**2+(X[i,1]-0.5)**2<=0.1 +(np.random.rand()-0.5)*0.2*mixing else True for i in", "= np.array([False if (X[i,0]-0.5)**2+(X[i,1]-0.5)**2<=0.1 +(np.random.rand()-0.5)*0.2*mixing else True for i in range(n)]) j =", "yplot = xplot**2 elif option=='target': y = np.array([False if (X[i,0]-0.5)**2+(X[i,1]-0.5)**2<=0.1 +(np.random.rand()-0.5)*0.2*mixing else True", "# No separation between fit and predict calls, need to fit and predict", "Neural Network from sklearn.neural_network import MLPClassifier clf = MLPClassifier(solver='lbfgs',alpha=1e-5,max_iter=200,\\ activation='relu',hidden_layer_sizes=(10,30,10),\\ random_state=1, shuffle=True) clf.fit(XA,yA)", "lr.predict(XB) assess(yP) # Naïve Bayes from sklearn.naive_bayes import GaussianNB nb = GaussianNB() nb.fit(XA,yA)", "into train and test subsets (50% each) X_train, X_test, y_train, y_test = train_test_split(", "> n/4: yP = 1 - yP assess(yP) # Gaussian Mixture Model from", "on second half of data n = np.random.randint(int(n_samples/2),n_samples) plt.imshow(digits.images[n], cmap=plt.cm.gray_r, interpolation='nearest') print('Predicted: '", "from sklearn import datasets, svm, metrics from sklearn.model_selection import train_test_split import matplotlib.pyplot as", "option=='blobs': X, y = datasets.make_blobs(n_samples=n,centers=[[-5,3],[5,-3]],cluster_std=2.0) yplot = xplot*0.0 plt.scatter(X[y>0.5,0],X[y>0.5,1],color='blue',marker='^',label='True') plt.scatter(X[y<0.5,0],X[y<0.5,1],color='red',marker='x',label='False') if option not", "SpectralClustering(n_clusters=2,eigen_solver='arpack',\\ affinity='nearest_neighbors') yP = sc.fit_predict(XB) # No separation between fit and predict calls,", "0 = Linear, 1 = Quadratic, 2 = Inner Target # 3 =", "as plt import numpy as np # The digits dataset digits = datasets.load_digits()", "test subsets (50% each) XA, XB, yA, yB = train_test_split(X, y, test_size=0.5, shuffle=False)", "= KMeans(n_clusters=2) km.fit(XA) yP = km.predict(XB) # Arbitrary labels with unsupervised clustering may", "= nb.predict(XB) assess(yP) # Stochastic Gradient Descent from sklearn.linear_model import SGDClassifier sgd =", "nb.predict(XB) assess(yP) # Stochastic Gradient Descent from sklearn.linear_model import SGDClassifier sgd = SGDClassifier(loss='modified_huber',", "assess(yP) # Gaussian Mixture Model from sklearn.mixture import GaussianMixture gmm = GaussianMixture(n_components=2) gmm.fit(XA)", "in range(n)]) j = False yplot = np.empty(100) for i,x in enumerate(xplot): r", "if option not in ['moons','circles','blobs']: plt.plot(xplot,yplot,'k.',label='Division') plt.legend() # Supervised Classification # Logistic Regression", "# Learn the digits on the first half of the digits classifier.fit(X_train, y_train)", "may need to be reversed if len(XB[yP!=yB]) > n/4: yP = 1 -", "Gaussian Mixture Model from sklearn.mixture import GaussianMixture gmm = GaussianMixture(n_components=2) gmm.fit(XA) yP =", "1 - yP assess(np.round(yP[:,0])) # Spectral Clustering from sklearn.cluster import SpectralClustering sc =", "sklearn import datasets, svm, metrics from sklearn.model_selection import train_test_split import matplotlib.pyplot as plt", "GaussianMixture gmm = GaussianMixture(n_components=2) gmm.fit(XA) yP = gmm.predict_proba(XB) # produces probabilities # Arbitrary", "km = KMeans(n_clusters=2) km.fit(XA) yP = km.predict(XB) # Arbitrary labels with unsupervised clustering", "yP = knn.predict(XB) assess(yP) # Decision Tree from sklearn.tree import DecisionTreeClassifier dtree =", "Random Forest from sklearn.ensemble import RandomForestClassifier rfm = RandomForestClassifier(n_estimators=70,oob_score=True,n_jobs=1,\\ random_state=101,max_features=None,min_samples_leaf=3) rfm.fit(XA,yA) yP =", "= dtree.predict(XB) assess(yP) # Random Forest from sklearn.ensemble import RandomForestClassifier rfm = RandomForestClassifier(n_estimators=70,oob_score=True,n_jobs=1,\\", "yP = nb.predict(XB) assess(yP) # Stochastic Gradient Descent from sklearn.linear_model import SGDClassifier sgd", "GaussianNB() nb.fit(XA,yA) yP = nb.predict(XB) assess(yP) # Stochastic Gradient Descent from sklearn.linear_model import", "plt.scatter(X[y<0.5,0],X[y<0.5,1],color='red',marker='x',label='False') if option not in ['moons','circles','blobs']: plt.plot(xplot,yplot,'k.',label='Division') plt.legend() plt.savefig(str(select_option)+'.png') # Split into train", "yplot = xplot*0.0 elif option=='blobs': X, y = datasets.make_blobs(n_samples=n,centers=[[-5,3],[5,-3]],cluster_std=2.0) yplot = xplot*0.0 plt.scatter(X[y>0.5,0],X[y>0.5,1],color='blue',marker='^',label='True')", "and test subsets (50% each) XA, XB, yA, yB = train_test_split(X, y, test_size=0.5,", "range(n)]) yplot = 1-xplot elif option=='quadratic': y = np.array([False if X[i,0]**2>=X[i,1]+(np.random.rand()-0.5)\\ *mixing else", "= lr.predict(XB) assess(yP) # Naïve Bayes from sklearn.naive_bayes import GaussianNB nb = GaussianNB()", "classifier.fit(X_train, y_train) n_samples/2 # test on second half of data n = np.random.randint(int(n_samples/2),n_samples)", "= knn.predict(XB) assess(yP) # Decision Tree from sklearn.tree import DecisionTreeClassifier dtree = DecisionTreeClassifier(max_depth=10,random_state=101,max_features=None,\\", "sklearn.cluster import KMeans km = KMeans(n_clusters=2) km.fit(XA) yP = km.predict(XB) # Arbitrary labels", "km.predict(XB) # Arbitrary labels with unsupervised clustering may need to be reversed if", "km.fit(XA) yP = km.predict(XB) # Arbitrary labels with unsupervised clustering may need to", "DecisionTreeClassifier(max_depth=10,random_state=101,max_features=None,\\ min_samples_leaf=5) dtree.fit(XA,yA) yP = dtree.predict(XB) assess(yP) # Random Forest from sklearn.ensemble import", "classifier = svm.SVC(gamma=0.001) # Split into train and test subsets (50% each) X_train,", "# Flatten the image to apply classifier n_samples = len(digits.images) data = digits.images.reshape((n_samples,", "range(n)]) yplot = xplot**2 elif option=='target': y = np.array([False if (X[i,0]-0.5)**2+(X[i,1]-0.5)**2<=0.1 +(np.random.rand()-0.5)*0.2*mixing else", "= KNeighborsClassifier(n_neighbors=5) knn.fit(XA,yA) yP = knn.predict(XB) assess(yP) # Decision Tree from sklearn.tree import", "data n = np.random.randint(int(n_samples/2),n_samples) plt.imshow(digits.images[n], cmap=plt.cm.gray_r, interpolation='nearest') print('Predicted: ' + str(classifier.predict(digits.data[n:n+1])[0])) # Select", "= 0.0 # add random mixing element to data xplot = np.linspace(0,1,100) if", "unsupervised clustering may need to be reversed if len(XB[np.round(yP[:,0])!=yB]) > n/4: yP =", "plt.plot(xplot,yplot,'k.',label='Division') plt.legend() # Supervised Classification # Logistic Regression from sklearn.linear_model import LogisticRegression lr", "of the digits classifier.fit(X_train, y_train) n_samples/2 # test on second half of data", "Create support vector classifier classifier = svm.SVC(gamma=0.001) # Split into train and test", "= np.linspace(0,1,100) if option=='linear': y = np.array([False if (X[i,0]+X[i,1])>=(1.0+mixing/2-np.random.rand()*mixing) else True for i", "np.random.randint(int(n_samples/2),n_samples) plt.imshow(digits.images[n], cmap=plt.cm.gray_r, interpolation='nearest') print('Predicted: ' + str(classifier.predict(digits.data[n:n+1])[0])) # Select Option by Number", "= np.nan else: j = not j # plot both sides of circle", "test_size=0.5, shuffle=False) # Plot regression results def assess(P): plt.figure() plt.scatter(XB[P==1,0],XB[P==1,1],marker='^',color='blue',label='True') plt.scatter(XB[P==0,0],XB[P==0,1],marker='x',color='red',label='False') plt.scatter(XB[P!=yB,0],XB[P!=yB,1],marker='s',color='orange',alpha=0.5,label='Incorrect') if", "['moons','circles','blobs']: plt.plot(xplot,yplot,'k.',label='Division') plt.legend() # Supervised Classification # Logistic Regression from sklearn.linear_model import LogisticRegression", "Linear, 1 = Quadratic, 2 = Inner Target # 3 = Moons, 4", "Inner Target # 3 = Moons, 4 = Concentric Circles, 5 = Distinct", "Split into train and test subsets (50% each) X_train, X_test, y_train, y_test =", "digits dataset digits = datasets.load_digits() # Flatten the image to apply classifier n_samples", "gmm.fit(XA) yP = gmm.predict_proba(XB) # produces probabilities # Arbitrary labels with unsupervised clustering", "if option not in ['moons','circles','blobs']: plt.plot(xplot,yplot,'k.',label='Division') plt.legend() plt.savefig(str(select_option)+'.png') # Split into train and", "y = datasets.make_blobs(n_samples=n,centers=[[-5,3],[5,-3]],cluster_std=2.0) yplot = xplot*0.0 plt.scatter(X[y>0.5,0],X[y>0.5,1],color='blue',marker='^',label='True') plt.scatter(X[y<0.5,0],X[y<0.5,1],color='red',marker='x',label='False') if option not in ['moons','circles','blobs']:", "Naïve Bayes from sklearn.naive_bayes import GaussianNB nb = GaussianNB() nb.fit(XA,yA) yP = nb.predict(XB)", "= datasets.make_blobs(n_samples=n,centers=[[-5,3],[5,-3]],cluster_std=2.0) yplot = xplot*0.0 plt.scatter(X[y>0.5,0],X[y>0.5,1],color='blue',marker='^',label='True') plt.scatter(X[y<0.5,0],X[y<0.5,1],color='red',marker='x',label='False') if option not in ['moons','circles','blobs']: plt.plot(xplot,yplot,'k.',label='Division')", "yP = clf.predict(XB) assess(yP) # Unsupervised Classification # K-Means Clustering from sklearn.cluster import", "of circle yplot[i] = (2*j-1)*np.sqrt(r)+0.5 elif option=='moons': X, y = datasets.make_moons(n_samples=n,noise=0.05) yplot =", "not in ['moons','circles','blobs']: plt.plot(xplot,yplot,'k.',label='Division') plt.legend() plt.savefig(str(select_option)+'.png') # Split into train and test subsets", "plt.scatter(X[y>0.5,0],X[y>0.5,1],color='blue',marker='^',label='True') plt.scatter(X[y<0.5,0],X[y<0.5,1],color='red',marker='x',label='False') if option not in ['moons','circles','blobs']: plt.plot(xplot,yplot,'k.',label='Division') plt.legend() plt.savefig(str(select_option)+'.png') # Split into", "xplot = np.linspace(0,1,100) if option=='linear': y = np.array([False if (X[i,0]+X[i,1])>=(1.0+mixing/2-np.random.rand()*mixing) else True for", "digits on the first half of the digits classifier.fit(X_train, y_train) n_samples/2 # test", "= datasets.make_moons(n_samples=n,noise=0.05) yplot = xplot*0.0 elif option=='circles': X, y = datasets.make_circles(n_samples=n,noise=0.05,factor=0.5) yplot =", "assess(yP) # Random Forest from sklearn.ensemble import RandomForestClassifier rfm = RandomForestClassifier(n_estimators=70,oob_score=True,n_jobs=1,\\ random_state=101,max_features=None,min_samples_leaf=3) rfm.fit(XA,yA)", "= clf.predict(XB) assess(yP) # Unsupervised Classification # K-Means Clustering from sklearn.cluster import KMeans", "assess(P): plt.figure() plt.scatter(XB[P==1,0],XB[P==1,1],marker='^',color='blue',label='True') plt.scatter(XB[P==0,0],XB[P==0,1],marker='x',color='red',label='False') plt.scatter(XB[P!=yB,0],XB[P!=yB,1],marker='s',color='orange',alpha=0.5,label='Incorrect') if option not in ['moons','circles','blobs']: plt.plot(xplot,yplot,'k.',label='Division') plt.legend() #", "import KNeighborsClassifier knn = KNeighborsClassifier(n_neighbors=5) knn.fit(XA,yA) yP = knn.predict(XB) assess(yP) # Decision Tree", "def assess(P): plt.figure() plt.scatter(XB[P==1,0],XB[P==1,1],marker='^',color='blue',label='True') plt.scatter(XB[P==0,0],XB[P==0,1],marker='x',color='red',label='False') plt.scatter(XB[P!=yB,0],XB[P!=yB,1],marker='s',color='orange',alpha=0.5,label='Incorrect') if option not in ['moons','circles','blobs']: plt.plot(xplot,yplot,'k.',label='Division') plt.legend()", "if X[i,0]**2>=X[i,1]+(np.random.rand()-0.5)\\ *mixing else True for i in range(n)]) yplot = xplot**2 elif", "import train_test_split import matplotlib.pyplot as plt import numpy as np # The digits", "Stochastic Gradient Descent from sklearn.linear_model import SGDClassifier sgd = SGDClassifier(loss='modified_huber', shuffle=True,random_state=101) sgd.fit(XA,yA) yP", "2 = Inner Target # 3 = Moons, 4 = Concentric Circles, 5", "['linear','quadratic','target','moons','circles','blobs'] option = data_options[select_option] # number of data points n = 2000 X", "from sklearn.neural_network import MLPClassifier clf = MLPClassifier(solver='lbfgs',alpha=1e-5,max_iter=200,\\ activation='relu',hidden_layer_sizes=(10,30,10),\\ random_state=1, shuffle=True) clf.fit(XA,yA) yP =", "if (X[i,0]-0.5)**2+(X[i,1]-0.5)**2<=0.1 +(np.random.rand()-0.5)*0.2*mixing else True for i in range(n)]) j = False yplot", "X_train, X_test, y_train, y_test = train_test_split( data, digits.target, test_size=0.5, shuffle=False) # Learn the", "y = np.array([False if X[i,0]**2>=X[i,1]+(np.random.rand()-0.5)\\ *mixing else True for i in range(n)]) yplot", "train and test subsets (50% each) XA, XB, yA, yB = train_test_split(X, y,", "# Supervised Classification # Logistic Regression from sklearn.linear_model import LogisticRegression lr = LogisticRegression(solver='lbfgs')", "option=='target': y = np.array([False if (X[i,0]-0.5)**2+(X[i,1]-0.5)**2<=0.1 +(np.random.rand()-0.5)*0.2*mixing else True for i in range(n)])", "half of the digits classifier.fit(X_train, y_train) n_samples/2 # test on second half of", "predict calls, need to fit and predict on same dataset # Arbitrary labels", "plt.scatter(XB[P==1,0],XB[P==1,1],marker='^',color='blue',label='True') plt.scatter(XB[P==0,0],XB[P==0,1],marker='x',color='red',label='False') plt.scatter(XB[P!=yB,0],XB[P!=yB,1],marker='s',color='orange',alpha=0.5,label='Incorrect') if option not in ['moons','circles','blobs']: plt.plot(xplot,yplot,'k.',label='Division') plt.legend() # Supervised Classification", "mixing element to data xplot = np.linspace(0,1,100) if option=='linear': y = np.array([False if", "<filename>All_Source_Code/ClassificationOverview/ClassificationOverview_5.py from sklearn import datasets, svm, metrics from sklearn.model_selection import train_test_split import matplotlib.pyplot", "X, y = datasets.make_circles(n_samples=n,noise=0.05,factor=0.5) yplot = xplot*0.0 elif option=='blobs': X, y = datasets.make_blobs(n_samples=n,centers=[[-5,3],[5,-3]],cluster_std=2.0)", "the image to apply classifier n_samples = len(digits.images) data = digits.images.reshape((n_samples, -1)) #", "Number # 0 = Linear, 1 = Quadratic, 2 = Inner Target #", "be reversed if len(XB[np.round(yP[:,0])!=yB]) > n/4: yP = 1 - yP assess(np.round(yP[:,0])) #", "element to data xplot = np.linspace(0,1,100) if option=='linear': y = np.array([False if (X[i,0]+X[i,1])>=(1.0+mixing/2-np.random.rand()*mixing)", "option = data_options[select_option] # number of data points n = 2000 X =", "r = 0.1-(x-0.5)**2 if r<=0: yplot[i] = np.nan else: j = not j", "sklearn.svm import SVC svm = SVC(gamma='scale', C=1.0, random_state=101) svm.fit(XA,yA) yP = svm.predict(XB) assess(yP)", "= SGDClassifier(loss='modified_huber', shuffle=True,random_state=101) sgd.fit(XA,yA) yP = sgd.predict(XB) assess(yP) # K-Nearest Neighbors from sklearn.neighbors", "nb.fit(XA,yA) yP = nb.predict(XB) assess(yP) # Stochastic Gradient Descent from sklearn.linear_model import SGDClassifier", "matplotlib.pyplot as plt import numpy as np # The digits dataset digits =", "lr.fit(XA,yA) yP = lr.predict(XB) assess(yP) # Naïve Bayes from sklearn.naive_bayes import GaussianNB nb", "import SGDClassifier sgd = SGDClassifier(loss='modified_huber', shuffle=True,random_state=101) sgd.fit(XA,yA) yP = sgd.predict(XB) assess(yP) # K-Nearest", "print('Predicted: ' + str(classifier.predict(digits.data[n:n+1])[0])) # Select Option by Number # 0 = Linear,", "of data points n = 2000 X = np.random.random((n,2)) mixing = 0.0 #", "produces probabilities # Arbitrary labels with unsupervised clustering may need to be reversed", "the digits on the first half of the digits classifier.fit(X_train, y_train) n_samples/2 #", "True for i in range(n)]) yplot = 1-xplot elif option=='quadratic': y = np.array([False", "= np.random.randint(int(n_samples/2),n_samples) plt.imshow(digits.images[n], cmap=plt.cm.gray_r, interpolation='nearest') print('Predicted: ' + str(classifier.predict(digits.data[n:n+1])[0])) # Select Option by", "np.array([False if (X[i,0]-0.5)**2+(X[i,1]-0.5)**2<=0.1 +(np.random.rand()-0.5)*0.2*mixing else True for i in range(n)]) j = False", "svm = SVC(gamma='scale', C=1.0, random_state=101) svm.fit(XA,yA) yP = svm.predict(XB) assess(yP) # Neural Network", "# Naïve Bayes from sklearn.naive_bayes import GaussianNB nb = GaussianNB() nb.fit(XA,yA) yP =", "i,x in enumerate(xplot): r = 0.1-(x-0.5)**2 if r<=0: yplot[i] = np.nan else: j", "np # The digits dataset digits = datasets.load_digits() # Flatten the image to", "n_samples/2 # test on second half of data n = np.random.randint(int(n_samples/2),n_samples) plt.imshow(digits.images[n], cmap=plt.cm.gray_r,", "KNeighborsClassifier(n_neighbors=5) knn.fit(XA,yA) yP = knn.predict(XB) assess(yP) # Decision Tree from sklearn.tree import DecisionTreeClassifier", "= ['linear','quadratic','target','moons','circles','blobs'] option = data_options[select_option] # number of data points n = 2000", "# Logistic Regression from sklearn.linear_model import LogisticRegression lr = LogisticRegression(solver='lbfgs') lr.fit(XA,yA) yP =", "same dataset # Arbitrary labels with unsupervised clustering may need to be reversed", "= km.predict(XB) # Arbitrary labels with unsupervised clustering may need to be reversed", "np.linspace(0,1,100) if option=='linear': y = np.array([False if (X[i,0]+X[i,1])>=(1.0+mixing/2-np.random.rand()*mixing) else True for i in", "= 2000 X = np.random.random((n,2)) mixing = 0.0 # add random mixing element", "5 # generate data data_options = ['linear','quadratic','target','moons','circles','blobs'] option = data_options[select_option] # number of", "sides of circle yplot[i] = (2*j-1)*np.sqrt(r)+0.5 elif option=='moons': X, y = datasets.make_moons(n_samples=n,noise=0.05) yplot", "Mixture Model from sklearn.mixture import GaussianMixture gmm = GaussianMixture(n_components=2) gmm.fit(XA) yP = gmm.predict_proba(XB)", "sc.fit_predict(XB) # No separation between fit and predict calls, need to fit and", "classifier n_samples = len(digits.images) data = digits.images.reshape((n_samples, -1)) # Create support vector classifier", "clustering may need to be reversed if len(XB[np.round(yP[:,0])!=yB]) > n/4: yP = 1", "from sklearn.neighbors import KNeighborsClassifier knn = KNeighborsClassifier(n_neighbors=5) knn.fit(XA,yA) yP = knn.predict(XB) assess(yP) #", "LogisticRegression(solver='lbfgs') lr.fit(XA,yA) yP = lr.predict(XB) assess(yP) # Naïve Bayes from sklearn.naive_bayes import GaussianNB", "i in range(n)]) j = False yplot = np.empty(100) for i,x in enumerate(xplot):", "# number of data points n = 2000 X = np.random.random((n,2)) mixing =", "plt.imshow(digits.images[n], cmap=plt.cm.gray_r, interpolation='nearest') print('Predicted: ' + str(classifier.predict(digits.data[n:n+1])[0])) # Select Option by Number #", "in range(n)]) yplot = 1-xplot elif option=='quadratic': y = np.array([False if X[i,0]**2>=X[i,1]+(np.random.rand()-0.5)\\ *mixing", "from sklearn.ensemble import RandomForestClassifier rfm = RandomForestClassifier(n_estimators=70,oob_score=True,n_jobs=1,\\ random_state=101,max_features=None,min_samples_leaf=3) rfm.fit(XA,yA) yP = rfm.predict(XB) assess(yP)", "option not in ['moons','circles','blobs']: plt.plot(xplot,yplot,'k.',label='Division') plt.legend() # Supervised Classification # Logistic Regression from", "import MLPClassifier clf = MLPClassifier(solver='lbfgs',alpha=1e-5,max_iter=200,\\ activation='relu',hidden_layer_sizes=(10,30,10),\\ random_state=1, shuffle=True) clf.fit(XA,yA) yP = clf.predict(XB) assess(yP)", "and predict calls, need to fit and predict on same dataset # Arbitrary", "random_state=1, shuffle=True) clf.fit(XA,yA) yP = clf.predict(XB) assess(yP) # Unsupervised Classification # K-Means Clustering", "in enumerate(xplot): r = 0.1-(x-0.5)**2 if r<=0: yplot[i] = np.nan else: j =", "sklearn.neural_network import MLPClassifier clf = MLPClassifier(solver='lbfgs',alpha=1e-5,max_iter=200,\\ activation='relu',hidden_layer_sizes=(10,30,10),\\ random_state=1, shuffle=True) clf.fit(XA,yA) yP = clf.predict(XB)", "not j # plot both sides of circle yplot[i] = (2*j-1)*np.sqrt(r)+0.5 elif option=='moons':", "= RandomForestClassifier(n_estimators=70,oob_score=True,n_jobs=1,\\ random_state=101,max_features=None,min_samples_leaf=3) rfm.fit(XA,yA) yP = rfm.predict(XB) assess(yP) # Support Vector Classifier from", "Classification # Logistic Regression from sklearn.linear_model import LogisticRegression lr = LogisticRegression(solver='lbfgs') lr.fit(XA,yA) yP", "y = datasets.make_moons(n_samples=n,noise=0.05) yplot = xplot*0.0 elif option=='circles': X, y = datasets.make_circles(n_samples=n,noise=0.05,factor=0.5) yplot", "# Gaussian Mixture Model from sklearn.mixture import GaussianMixture gmm = GaussianMixture(n_components=2) gmm.fit(XA) yP", "svm, metrics from sklearn.model_selection import train_test_split import matplotlib.pyplot as plt import numpy as", "range(n)]) j = False yplot = np.empty(100) for i,x in enumerate(xplot): r =", "j # plot both sides of circle yplot[i] = (2*j-1)*np.sqrt(r)+0.5 elif option=='moons': X,", "points n = 2000 X = np.random.random((n,2)) mixing = 0.0 # add random", "1-xplot elif option=='quadratic': y = np.array([False if X[i,0]**2>=X[i,1]+(np.random.rand()-0.5)\\ *mixing else True for i", "KNeighborsClassifier knn = KNeighborsClassifier(n_neighbors=5) knn.fit(XA,yA) yP = knn.predict(XB) assess(yP) # Decision Tree from", "else True for i in range(n)]) yplot = 1-xplot elif option=='quadratic': y =" ]
[ "Exception as e: # We don't need the result log.error(\"Feil ved precaching av", "coding: utf-8 -*- ################################################################################################# import logging import urllib import requests from resources.lib.util import", "self.xbmc_password = result['result']['value'] except (TypeError, KeyError): pass def cache_texture(self, url): # Cache a", "result['result']['value'] except (TypeError, KeyError): pass def cache_texture(self, url): # Cache a single image", "as e: # We don't need the result log.error(\"Feil ved precaching av fil", "text): text = self.single_urlencode(text) text = self.single_urlencode(text) return text @classmethod def single_urlencode(cls, text):", "= self.single_urlencode(text) return text @classmethod def single_urlencode(cls, text): # urlencode needs a utf-", "calling it at the http endpoint requests.head(url=(action_url), auth=(self.xbmc_username, self.xbmc_password), timeout=1) except Exception as", "# Add image to texture cache by simply calling it at the http", "except Exception as e: # We don't need the result log.error(\"Feil ved precaching", "KeyError): pass def cache_texture(self, url): # Cache a single image url to the", "to the texture cache if url: log.debug(\"Processing: %s\", url) url = self._double_urlencode(url) action_url", "################################################################################################## log = logging.getLogger(\"DINGS.\"+__name__) ################################################################################################## class Artwork(object): xbmc_host = 'localhost' xbmc_port = None", "unicode def _set_webserver_details(self): # Get the Kodi webserver details - used to set", "result['result']['value'] except (TypeError, KeyError): pass web_pass = { \"setting\": \"services.webserverpassword\" } result =", "{ \"setting\": \"services.webserverusername\" } result = get_setting_value.execute(web_user) try: self.xbmc_username = result['result']['value'] except (TypeError,", "if url: log.debug(\"Processing: %s\", url) url = self._double_urlencode(url) action_url = \"http://%s:%s/image/image://%s\" % (self.xbmc_host,", "xbmc_port = None xbmc_username = None xbmc_password = None def __init__(self): if not", "get_setting_value.execute(web_pass) try: self.xbmc_password = result['result']['value'] except (TypeError, KeyError): pass def cache_texture(self, url): #", "(TypeError, KeyError): pass def cache_texture(self, url): # Cache a single image url to", "log = logging.getLogger(\"DINGS.\"+__name__) ################################################################################################## class Artwork(object): xbmc_host = 'localhost' xbmc_port = None xbmc_username", "to texture cache by simply calling it at the http endpoint requests.head(url=(action_url), auth=(self.xbmc_username,", "_set_webserver_details(self): # Get the Kodi webserver details - used to set the texture", "= text[13:] return text.decode('utf-8') #return the result again as unicode def _set_webserver_details(self): #", "= self.single_urlencode(text) text = self.single_urlencode(text) return text @classmethod def single_urlencode(cls, text): # urlencode", "the result log.error(\"Feil ved precaching av fil %s med feilmelding %s\", action_url, e.message)", "get_setting_value = JSONRPC('Settings.GetSettingValue') web_query = { \"setting\": \"services.webserver\" } result = get_setting_value.execute(web_query) try:", "xbmc_webserver_enabled: # Enable the webserver, it is disabled set_setting_value = JSONRPC('Settings.SetSettingValue') web_port =", "result['result']['value'] except (TypeError, KeyError): pass web_user = { \"setting\": \"services.webserverusername\" } result =", "################################################################################################## class Artwork(object): xbmc_host = 'localhost' xbmc_port = None xbmc_username = None xbmc_password", "# We don't need the result log.error(\"Feil ved precaching av fil %s med", "requests from resources.lib.util import JSONRPC ################################################################################################## log = logging.getLogger(\"DINGS.\"+__name__) ################################################################################################## class Artwork(object): xbmc_host", "\"setting\": \"services.webserver\" } result = get_setting_value.execute(web_query) try: xbmc_webserver_enabled = result['result']['value'] except (KeyError, TypeError):", "= False if not xbmc_webserver_enabled: # Enable the webserver, it is disabled set_setting_value", "\"services.webserver\", \"value\": True } set_setting_value.execute(web_user) self.xbmc_username = \"kodi\" else: # Webserver already enabled", "except (TypeError, KeyError): pass def cache_texture(self, url): # Cache a single image url", "Artwork(object): xbmc_host = 'localhost' xbmc_port = None xbmc_username = None xbmc_password = None", "= get_setting_value.execute(web_port) try: self.xbmc_port = result['result']['value'] except (TypeError, KeyError): pass web_user = {", "-*- coding: utf-8 -*- ################################################################################################# import logging import urllib import requests from resources.lib.util", "text @classmethod def single_urlencode(cls, text): # urlencode needs a utf- string text =", "text = self.single_urlencode(text) return text @classmethod def single_urlencode(cls, text): # urlencode needs a", "pass def cache_texture(self, url): # Cache a single image url to the texture", "= urllib.urlencode({'blahblahblah': text.encode('utf-8')}) text = text[13:] return text.decode('utf-8') #return the result again as", "8080 } set_setting_value.execute(web_port) self.xbmc_port = 8080 web_user = { \"setting\": \"services.webserver\", \"value\": True", "url): # Cache a single image url to the texture cache if url:", "except (TypeError, KeyError): pass web_pass = { \"setting\": \"services.webserverpassword\" } result = get_setting_value.execute(web_pass)", "def _double_urlencode(self, text): text = self.single_urlencode(text) text = self.single_urlencode(text) return text @classmethod def", "utf- string text = urllib.urlencode({'blahblahblah': text.encode('utf-8')}) text = text[13:] return text.decode('utf-8') #return the", "texture cache if url: log.debug(\"Processing: %s\", url) url = self._double_urlencode(url) action_url = \"http://%s:%s/image/image://%s\"", "to set the texture cache get_setting_value = JSONRPC('Settings.GetSettingValue') web_query = { \"setting\": \"services.webserver\"", "import requests from resources.lib.util import JSONRPC ################################################################################################## log = logging.getLogger(\"DINGS.\"+__name__) ################################################################################################## class Artwork(object):", "url) try: # Add image to texture cache by simply calling it at", "resources.lib.util import JSONRPC ################################################################################################## log = logging.getLogger(\"DINGS.\"+__name__) ################################################################################################## class Artwork(object): xbmc_host = 'localhost'", "url) url = self._double_urlencode(url) action_url = \"http://%s:%s/image/image://%s\" % (self.xbmc_host, self.xbmc_port, url) try: #", "def __init__(self): if not self.xbmc_port: self._set_webserver_details() def _double_urlencode(self, text): text = self.single_urlencode(text) text", "disabled set_setting_value = JSONRPC('Settings.SetSettingValue') web_port = { \"setting\": \"services.webserverport\", \"value\": 8080 } set_setting_value.execute(web_port)", "= \"http://%s:%s/image/image://%s\" % (self.xbmc_host, self.xbmc_port, url) try: # Add image to texture cache", "\"http://%s:%s/image/image://%s\" % (self.xbmc_host, self.xbmc_port, url) try: # Add image to texture cache by", "except (KeyError, TypeError): xbmc_webserver_enabled = False if not xbmc_webserver_enabled: # Enable the webserver,", "if not self.xbmc_port: self._set_webserver_details() def _double_urlencode(self, text): text = self.single_urlencode(text) text = self.single_urlencode(text)", "texture cache by simply calling it at the http endpoint requests.head(url=(action_url), auth=(self.xbmc_username, self.xbmc_password),", "} result = get_setting_value.execute(web_user) try: self.xbmc_username = result['result']['value'] except (TypeError, KeyError): pass web_pass", "} set_setting_value.execute(web_user) self.xbmc_username = \"kodi\" else: # Webserver already enabled web_port = {", "JSONRPC('Settings.SetSettingValue') web_port = { \"setting\": \"services.webserverport\", \"value\": 8080 } set_setting_value.execute(web_port) self.xbmc_port = 8080", "= result['result']['value'] except (TypeError, KeyError): pass def cache_texture(self, url): # Cache a single", "} result = get_setting_value.execute(web_port) try: self.xbmc_port = result['result']['value'] except (TypeError, KeyError): pass web_user", "pass web_pass = { \"setting\": \"services.webserverpassword\" } result = get_setting_value.execute(web_pass) try: self.xbmc_password =", "# Cache a single image url to the texture cache if url: log.debug(\"Processing:", "= JSONRPC('Settings.GetSettingValue') web_query = { \"setting\": \"services.webserver\" } result = get_setting_value.execute(web_query) try: xbmc_webserver_enabled", "text = urllib.urlencode({'blahblahblah': text.encode('utf-8')}) text = text[13:] return text.decode('utf-8') #return the result again", "KeyError): pass web_pass = { \"setting\": \"services.webserverpassword\" } result = get_setting_value.execute(web_pass) try: self.xbmc_password", "= get_setting_value.execute(web_query) try: xbmc_webserver_enabled = result['result']['value'] except (KeyError, TypeError): xbmc_webserver_enabled = False if", "simply calling it at the http endpoint requests.head(url=(action_url), auth=(self.xbmc_username, self.xbmc_password), timeout=1) except Exception", "it at the http endpoint requests.head(url=(action_url), auth=(self.xbmc_username, self.xbmc_password), timeout=1) except Exception as e:", "@classmethod def single_urlencode(cls, text): # urlencode needs a utf- string text = urllib.urlencode({'blahblahblah':", "as unicode def _set_webserver_details(self): # Get the Kodi webserver details - used to", "= { \"setting\": \"services.webserverusername\" } result = get_setting_value.execute(web_user) try: self.xbmc_username = result['result']['value'] except", "web_pass = { \"setting\": \"services.webserverpassword\" } result = get_setting_value.execute(web_pass) try: self.xbmc_password = result['result']['value']", "the http endpoint requests.head(url=(action_url), auth=(self.xbmc_username, self.xbmc_password), timeout=1) except Exception as e: # We", "= logging.getLogger(\"DINGS.\"+__name__) ################################################################################################## class Artwork(object): xbmc_host = 'localhost' xbmc_port = None xbmc_username =", "\"kodi\" else: # Webserver already enabled web_port = { \"setting\": \"services.webserverport\" } result", "text.decode('utf-8') #return the result again as unicode def _set_webserver_details(self): # Get the Kodi", "http endpoint requests.head(url=(action_url), auth=(self.xbmc_username, self.xbmc_password), timeout=1) except Exception as e: # We don't", "logging import urllib import requests from resources.lib.util import JSONRPC ################################################################################################## log = logging.getLogger(\"DINGS.\"+__name__)", "timeout=1) except Exception as e: # We don't need the result log.error(\"Feil ved", "not xbmc_webserver_enabled: # Enable the webserver, it is disabled set_setting_value = JSONRPC('Settings.SetSettingValue') web_port", "urllib.urlencode({'blahblahblah': text.encode('utf-8')}) text = text[13:] return text.decode('utf-8') #return the result again as unicode", "return text.decode('utf-8') #return the result again as unicode def _set_webserver_details(self): # Get the", "text): # urlencode needs a utf- string text = urllib.urlencode({'blahblahblah': text.encode('utf-8')}) text =", "= None xbmc_password = None def __init__(self): if not self.xbmc_port: self._set_webserver_details() def _double_urlencode(self,", "try: self.xbmc_password = result['result']['value'] except (TypeError, KeyError): pass def cache_texture(self, url): # Cache", "= { \"setting\": \"services.webserverport\", \"value\": 8080 } set_setting_value.execute(web_port) self.xbmc_port = 8080 web_user =", "cache if url: log.debug(\"Processing: %s\", url) url = self._double_urlencode(url) action_url = \"http://%s:%s/image/image://%s\" %", "from resources.lib.util import JSONRPC ################################################################################################## log = logging.getLogger(\"DINGS.\"+__name__) ################################################################################################## class Artwork(object): xbmc_host =", "self._double_urlencode(url) action_url = \"http://%s:%s/image/image://%s\" % (self.xbmc_host, self.xbmc_port, url) try: # Add image to", "xbmc_host = 'localhost' xbmc_port = None xbmc_username = None xbmc_password = None def", "\"services.webserverpassword\" } result = get_setting_value.execute(web_pass) try: self.xbmc_password = result['result']['value'] except (TypeError, KeyError): pass", "= JSONRPC('Settings.SetSettingValue') web_port = { \"setting\": \"services.webserverport\", \"value\": 8080 } set_setting_value.execute(web_port) self.xbmc_port =", "None xbmc_username = None xbmc_password = None def __init__(self): if not self.xbmc_port: self._set_webserver_details()", "None def __init__(self): if not self.xbmc_port: self._set_webserver_details() def _double_urlencode(self, text): text = self.single_urlencode(text)", "self.single_urlencode(text) return text @classmethod def single_urlencode(cls, text): # urlencode needs a utf- string", "= self._double_urlencode(url) action_url = \"http://%s:%s/image/image://%s\" % (self.xbmc_host, self.xbmc_port, url) try: # Add image", "# Enable the webserver, it is disabled set_setting_value = JSONRPC('Settings.SetSettingValue') web_port = {", "= result['result']['value'] except (TypeError, KeyError): pass web_pass = { \"setting\": \"services.webserverpassword\" } result", "import logging import urllib import requests from resources.lib.util import JSONRPC ################################################################################################## log =", "} set_setting_value.execute(web_port) self.xbmc_port = 8080 web_user = { \"setting\": \"services.webserver\", \"value\": True }", "self.xbmc_port = result['result']['value'] except (TypeError, KeyError): pass web_user = { \"setting\": \"services.webserverusername\" }", "url to the texture cache if url: log.debug(\"Processing: %s\", url) url = self._double_urlencode(url)", "text = text[13:] return text.decode('utf-8') #return the result again as unicode def _set_webserver_details(self):", "try: self.xbmc_username = result['result']['value'] except (TypeError, KeyError): pass web_pass = { \"setting\": \"services.webserverpassword\"", "(self.xbmc_host, self.xbmc_port, url) try: # Add image to texture cache by simply calling", "{ \"setting\": \"services.webserverport\", \"value\": 8080 } set_setting_value.execute(web_port) self.xbmc_port = 8080 web_user = {", "# -*- coding: utf-8 -*- ################################################################################################# import logging import urllib import requests from", "\"setting\": \"services.webserverport\", \"value\": 8080 } set_setting_value.execute(web_port) self.xbmc_port = 8080 web_user = { \"setting\":", "} result = get_setting_value.execute(web_query) try: xbmc_webserver_enabled = result['result']['value'] except (KeyError, TypeError): xbmc_webserver_enabled =", "text[13:] return text.decode('utf-8') #return the result again as unicode def _set_webserver_details(self): # Get", "\"setting\": \"services.webserverpassword\" } result = get_setting_value.execute(web_pass) try: self.xbmc_password = result['result']['value'] except (TypeError, KeyError):", "'localhost' xbmc_port = None xbmc_username = None xbmc_password = None def __init__(self): if", "e: # We don't need the result log.error(\"Feil ved precaching av fil %s", "self.xbmc_password), timeout=1) except Exception as e: # We don't need the result log.error(\"Feil", "-*- ################################################################################################# import logging import urllib import requests from resources.lib.util import JSONRPC ##################################################################################################", "result['result']['value'] except (KeyError, TypeError): xbmc_webserver_enabled = False if not xbmc_webserver_enabled: # Enable the", "= 'localhost' xbmc_port = None xbmc_username = None xbmc_password = None def __init__(self):", "text = self.single_urlencode(text) text = self.single_urlencode(text) return text @classmethod def single_urlencode(cls, text): #", "details - used to set the texture cache get_setting_value = JSONRPC('Settings.GetSettingValue') web_query =", "url = self._double_urlencode(url) action_url = \"http://%s:%s/image/image://%s\" % (self.xbmc_host, self.xbmc_port, url) try: # Add", "def single_urlencode(cls, text): # urlencode needs a utf- string text = urllib.urlencode({'blahblahblah': text.encode('utf-8')})", "try: # Add image to texture cache by simply calling it at the", "(KeyError, TypeError): xbmc_webserver_enabled = False if not xbmc_webserver_enabled: # Enable the webserver, it", "else: # Webserver already enabled web_port = { \"setting\": \"services.webserverport\" } result =", "# Webserver already enabled web_port = { \"setting\": \"services.webserverport\" } result = get_setting_value.execute(web_port)", "already enabled web_port = { \"setting\": \"services.webserverport\" } result = get_setting_value.execute(web_port) try: self.xbmc_port", "url: log.debug(\"Processing: %s\", url) url = self._double_urlencode(url) action_url = \"http://%s:%s/image/image://%s\" % (self.xbmc_host, self.xbmc_port,", "it is disabled set_setting_value = JSONRPC('Settings.SetSettingValue') web_port = { \"setting\": \"services.webserverport\", \"value\": 8080", "except (TypeError, KeyError): pass web_user = { \"setting\": \"services.webserverusername\" } result = get_setting_value.execute(web_user)", "the webserver, it is disabled set_setting_value = JSONRPC('Settings.SetSettingValue') web_port = { \"setting\": \"services.webserverport\",", "action_url = \"http://%s:%s/image/image://%s\" % (self.xbmc_host, self.xbmc_port, url) try: # Add image to texture", "at the http endpoint requests.head(url=(action_url), auth=(self.xbmc_username, self.xbmc_password), timeout=1) except Exception as e: #", "{ \"setting\": \"services.webserver\", \"value\": True } set_setting_value.execute(web_user) self.xbmc_username = \"kodi\" else: # Webserver", "True } set_setting_value.execute(web_user) self.xbmc_username = \"kodi\" else: # Webserver already enabled web_port =", "cache by simply calling it at the http endpoint requests.head(url=(action_url), auth=(self.xbmc_username, self.xbmc_password), timeout=1)", "Enable the webserver, it is disabled set_setting_value = JSONRPC('Settings.SetSettingValue') web_port = { \"setting\":", "web_user = { \"setting\": \"services.webserverusername\" } result = get_setting_value.execute(web_user) try: self.xbmc_username = result['result']['value']", "the texture cache if url: log.debug(\"Processing: %s\", url) url = self._double_urlencode(url) action_url =", "__init__(self): if not self.xbmc_port: self._set_webserver_details() def _double_urlencode(self, text): text = self.single_urlencode(text) text =", "_double_urlencode(self, text): text = self.single_urlencode(text) text = self.single_urlencode(text) return text @classmethod def single_urlencode(cls,", "{ \"setting\": \"services.webserverpassword\" } result = get_setting_value.execute(web_pass) try: self.xbmc_password = result['result']['value'] except (TypeError,", "= { \"setting\": \"services.webserverport\" } result = get_setting_value.execute(web_port) try: self.xbmc_port = result['result']['value'] except", "pass web_user = { \"setting\": \"services.webserverusername\" } result = get_setting_value.execute(web_user) try: self.xbmc_username =", "a utf- string text = urllib.urlencode({'blahblahblah': text.encode('utf-8')}) text = text[13:] return text.decode('utf-8') #return", "\"value\": 8080 } set_setting_value.execute(web_port) self.xbmc_port = 8080 web_user = { \"setting\": \"services.webserver\", \"value\":", "self.single_urlencode(text) text = self.single_urlencode(text) return text @classmethod def single_urlencode(cls, text): # urlencode needs", "= 8080 web_user = { \"setting\": \"services.webserver\", \"value\": True } set_setting_value.execute(web_user) self.xbmc_username =", "% (self.xbmc_host, self.xbmc_port, url) try: # Add image to texture cache by simply", "web_query = { \"setting\": \"services.webserver\" } result = get_setting_value.execute(web_query) try: xbmc_webserver_enabled = result['result']['value']", "image url to the texture cache if url: log.debug(\"Processing: %s\", url) url =", "set_setting_value.execute(web_port) self.xbmc_port = 8080 web_user = { \"setting\": \"services.webserver\", \"value\": True } set_setting_value.execute(web_user)", "JSONRPC ################################################################################################## log = logging.getLogger(\"DINGS.\"+__name__) ################################################################################################## class Artwork(object): xbmc_host = 'localhost' xbmc_port =", "web_port = { \"setting\": \"services.webserverport\" } result = get_setting_value.execute(web_port) try: self.xbmc_port = result['result']['value']", "def cache_texture(self, url): # Cache a single image url to the texture cache", "Cache a single image url to the texture cache if url: log.debug(\"Processing: %s\",", "web_user = { \"setting\": \"services.webserver\", \"value\": True } set_setting_value.execute(web_user) self.xbmc_username = \"kodi\" else:", "don't need the result log.error(\"Feil ved precaching av fil %s med feilmelding %s\",", "def _set_webserver_details(self): # Get the Kodi webserver details - used to set the", "urllib import requests from resources.lib.util import JSONRPC ################################################################################################## log = logging.getLogger(\"DINGS.\"+__name__) ################################################################################################## class", "cache_texture(self, url): # Cache a single image url to the texture cache if", "= None xbmc_username = None xbmc_password = None def __init__(self): if not self.xbmc_port:", "by simply calling it at the http endpoint requests.head(url=(action_url), auth=(self.xbmc_username, self.xbmc_password), timeout=1) except", "log.debug(\"Processing: %s\", url) url = self._double_urlencode(url) action_url = \"http://%s:%s/image/image://%s\" % (self.xbmc_host, self.xbmc_port, url)", "xbmc_webserver_enabled = result['result']['value'] except (KeyError, TypeError): xbmc_webserver_enabled = False if not xbmc_webserver_enabled: #", "(TypeError, KeyError): pass web_pass = { \"setting\": \"services.webserverpassword\" } result = get_setting_value.execute(web_pass) try:", "cache get_setting_value = JSONRPC('Settings.GetSettingValue') web_query = { \"setting\": \"services.webserver\" } result = get_setting_value.execute(web_query)", "need the result log.error(\"Feil ved precaching av fil %s med feilmelding %s\", action_url,", "if not xbmc_webserver_enabled: # Enable the webserver, it is disabled set_setting_value = JSONRPC('Settings.SetSettingValue')", "try: self.xbmc_port = result['result']['value'] except (TypeError, KeyError): pass web_user = { \"setting\": \"services.webserverusername\"", "Add image to texture cache by simply calling it at the http endpoint", "used to set the texture cache get_setting_value = JSONRPC('Settings.GetSettingValue') web_query = { \"setting\":", "{ \"setting\": \"services.webserver\" } result = get_setting_value.execute(web_query) try: xbmc_webserver_enabled = result['result']['value'] except (KeyError,", "endpoint requests.head(url=(action_url), auth=(self.xbmc_username, self.xbmc_password), timeout=1) except Exception as e: # We don't need", "# Get the Kodi webserver details - used to set the texture cache", "import JSONRPC ################################################################################################## log = logging.getLogger(\"DINGS.\"+__name__) ################################################################################################## class Artwork(object): xbmc_host = 'localhost' xbmc_port", "is disabled set_setting_value = JSONRPC('Settings.SetSettingValue') web_port = { \"setting\": \"services.webserverport\", \"value\": 8080 }", "result = get_setting_value.execute(web_port) try: self.xbmc_port = result['result']['value'] except (TypeError, KeyError): pass web_user =", "= result['result']['value'] except (KeyError, TypeError): xbmc_webserver_enabled = False if not xbmc_webserver_enabled: # Enable", "\"setting\": \"services.webserverport\" } result = get_setting_value.execute(web_port) try: self.xbmc_port = result['result']['value'] except (TypeError, KeyError):", "result = get_setting_value.execute(web_user) try: self.xbmc_username = result['result']['value'] except (TypeError, KeyError): pass web_pass =", "False if not xbmc_webserver_enabled: # Enable the webserver, it is disabled set_setting_value =", "\"setting\": \"services.webserverusername\" } result = get_setting_value.execute(web_user) try: self.xbmc_username = result['result']['value'] except (TypeError, KeyError):", "#return the result again as unicode def _set_webserver_details(self): # Get the Kodi webserver", "Get the Kodi webserver details - used to set the texture cache get_setting_value", "= { \"setting\": \"services.webserver\" } result = get_setting_value.execute(web_query) try: xbmc_webserver_enabled = result['result']['value'] except", "\"services.webserver\" } result = get_setting_value.execute(web_query) try: xbmc_webserver_enabled = result['result']['value'] except (KeyError, TypeError): xbmc_webserver_enabled", "enabled web_port = { \"setting\": \"services.webserverport\" } result = get_setting_value.execute(web_port) try: self.xbmc_port =", "logging.getLogger(\"DINGS.\"+__name__) ################################################################################################## class Artwork(object): xbmc_host = 'localhost' xbmc_port = None xbmc_username = None", "result = get_setting_value.execute(web_query) try: xbmc_webserver_enabled = result['result']['value'] except (KeyError, TypeError): xbmc_webserver_enabled = False", "8080 web_user = { \"setting\": \"services.webserver\", \"value\": True } set_setting_value.execute(web_user) self.xbmc_username = \"kodi\"", "\"services.webserverport\" } result = get_setting_value.execute(web_port) try: self.xbmc_port = result['result']['value'] except (TypeError, KeyError): pass", "%s\", url) url = self._double_urlencode(url) action_url = \"http://%s:%s/image/image://%s\" % (self.xbmc_host, self.xbmc_port, url) try:", "the result again as unicode def _set_webserver_details(self): # Get the Kodi webserver details", "self.xbmc_port: self._set_webserver_details() def _double_urlencode(self, text): text = self.single_urlencode(text) text = self.single_urlencode(text) return text", "JSONRPC('Settings.GetSettingValue') web_query = { \"setting\": \"services.webserver\" } result = get_setting_value.execute(web_query) try: xbmc_webserver_enabled =", "self.xbmc_port = 8080 web_user = { \"setting\": \"services.webserver\", \"value\": True } set_setting_value.execute(web_user) self.xbmc_username", "result again as unicode def _set_webserver_details(self): # Get the Kodi webserver details -", "texture cache get_setting_value = JSONRPC('Settings.GetSettingValue') web_query = { \"setting\": \"services.webserver\" } result =", "string text = urllib.urlencode({'blahblahblah': text.encode('utf-8')}) text = text[13:] return text.decode('utf-8') #return the result", "None xbmc_password = None def __init__(self): if not self.xbmc_port: self._set_webserver_details() def _double_urlencode(self, text):", "set_setting_value.execute(web_user) self.xbmc_username = \"kodi\" else: # Webserver already enabled web_port = { \"setting\":", "Webserver already enabled web_port = { \"setting\": \"services.webserverport\" } result = get_setting_value.execute(web_port) try:", "needs a utf- string text = urllib.urlencode({'blahblahblah': text.encode('utf-8')}) text = text[13:] return text.decode('utf-8')", "TypeError): xbmc_webserver_enabled = False if not xbmc_webserver_enabled: # Enable the webserver, it is", "We don't need the result log.error(\"Feil ved precaching av fil %s med feilmelding", "import urllib import requests from resources.lib.util import JSONRPC ################################################################################################## log = logging.getLogger(\"DINGS.\"+__name__) ##################################################################################################", "get_setting_value.execute(web_port) try: self.xbmc_port = result['result']['value'] except (TypeError, KeyError): pass web_user = { \"setting\":", "xbmc_password = None def __init__(self): if not self.xbmc_port: self._set_webserver_details() def _double_urlencode(self, text): text", "self.xbmc_username = result['result']['value'] except (TypeError, KeyError): pass web_pass = { \"setting\": \"services.webserverpassword\" }", "again as unicode def _set_webserver_details(self): # Get the Kodi webserver details - used", "single_urlencode(cls, text): # urlencode needs a utf- string text = urllib.urlencode({'blahblahblah': text.encode('utf-8')}) text", "= { \"setting\": \"services.webserver\", \"value\": True } set_setting_value.execute(web_user) self.xbmc_username = \"kodi\" else: #", "self._set_webserver_details() def _double_urlencode(self, text): text = self.single_urlencode(text) text = self.single_urlencode(text) return text @classmethod", "self.xbmc_username = \"kodi\" else: # Webserver already enabled web_port = { \"setting\": \"services.webserverport\"", "= \"kodi\" else: # Webserver already enabled web_port = { \"setting\": \"services.webserverport\" }", "get_setting_value.execute(web_user) try: self.xbmc_username = result['result']['value'] except (TypeError, KeyError): pass web_pass = { \"setting\":", "Kodi webserver details - used to set the texture cache get_setting_value = JSONRPC('Settings.GetSettingValue')", "\"services.webserverport\", \"value\": 8080 } set_setting_value.execute(web_port) self.xbmc_port = 8080 web_user = { \"setting\": \"services.webserver\",", "auth=(self.xbmc_username, self.xbmc_password), timeout=1) except Exception as e: # We don't need the result", "a single image url to the texture cache if url: log.debug(\"Processing: %s\", url)", "single image url to the texture cache if url: log.debug(\"Processing: %s\", url) url", "= None def __init__(self): if not self.xbmc_port: self._set_webserver_details() def _double_urlencode(self, text): text =", "\"services.webserverusername\" } result = get_setting_value.execute(web_user) try: self.xbmc_username = result['result']['value'] except (TypeError, KeyError): pass", "\"value\": True } set_setting_value.execute(web_user) self.xbmc_username = \"kodi\" else: # Webserver already enabled web_port", "= get_setting_value.execute(web_user) try: self.xbmc_username = result['result']['value'] except (TypeError, KeyError): pass web_pass = {", "return text @classmethod def single_urlencode(cls, text): # urlencode needs a utf- string text", "webserver details - used to set the texture cache get_setting_value = JSONRPC('Settings.GetSettingValue') web_query", "= result['result']['value'] except (TypeError, KeyError): pass web_user = { \"setting\": \"services.webserverusername\" } result", "not self.xbmc_port: self._set_webserver_details() def _double_urlencode(self, text): text = self.single_urlencode(text) text = self.single_urlencode(text) return", "= get_setting_value.execute(web_pass) try: self.xbmc_password = result['result']['value'] except (TypeError, KeyError): pass def cache_texture(self, url):", "web_port = { \"setting\": \"services.webserverport\", \"value\": 8080 } set_setting_value.execute(web_port) self.xbmc_port = 8080 web_user", "urlencode needs a utf- string text = urllib.urlencode({'blahblahblah': text.encode('utf-8')}) text = text[13:] return", "text.encode('utf-8')}) text = text[13:] return text.decode('utf-8') #return the result again as unicode def", "result = get_setting_value.execute(web_pass) try: self.xbmc_password = result['result']['value'] except (TypeError, KeyError): pass def cache_texture(self,", "utf-8 -*- ################################################################################################# import logging import urllib import requests from resources.lib.util import JSONRPC", "################################################################################################# import logging import urllib import requests from resources.lib.util import JSONRPC ################################################################################################## log", "- used to set the texture cache get_setting_value = JSONRPC('Settings.GetSettingValue') web_query = {", "set_setting_value = JSONRPC('Settings.SetSettingValue') web_port = { \"setting\": \"services.webserverport\", \"value\": 8080 } set_setting_value.execute(web_port) self.xbmc_port", "set the texture cache get_setting_value = JSONRPC('Settings.GetSettingValue') web_query = { \"setting\": \"services.webserver\" }", "} result = get_setting_value.execute(web_pass) try: self.xbmc_password = result['result']['value'] except (TypeError, KeyError): pass def", "requests.head(url=(action_url), auth=(self.xbmc_username, self.xbmc_password), timeout=1) except Exception as e: # We don't need the", "class Artwork(object): xbmc_host = 'localhost' xbmc_port = None xbmc_username = None xbmc_password =", "xbmc_username = None xbmc_password = None def __init__(self): if not self.xbmc_port: self._set_webserver_details() def", "the texture cache get_setting_value = JSONRPC('Settings.GetSettingValue') web_query = { \"setting\": \"services.webserver\" } result", "# urlencode needs a utf- string text = urllib.urlencode({'blahblahblah': text.encode('utf-8')}) text = text[13:]", "{ \"setting\": \"services.webserverport\" } result = get_setting_value.execute(web_port) try: self.xbmc_port = result['result']['value'] except (TypeError,", "try: xbmc_webserver_enabled = result['result']['value'] except (KeyError, TypeError): xbmc_webserver_enabled = False if not xbmc_webserver_enabled:", "get_setting_value.execute(web_query) try: xbmc_webserver_enabled = result['result']['value'] except (KeyError, TypeError): xbmc_webserver_enabled = False if not", "\"setting\": \"services.webserver\", \"value\": True } set_setting_value.execute(web_user) self.xbmc_username = \"kodi\" else: # Webserver already", "= { \"setting\": \"services.webserverpassword\" } result = get_setting_value.execute(web_pass) try: self.xbmc_password = result['result']['value'] except", "image to texture cache by simply calling it at the http endpoint requests.head(url=(action_url),", "xbmc_webserver_enabled = False if not xbmc_webserver_enabled: # Enable the webserver, it is disabled", "webserver, it is disabled set_setting_value = JSONRPC('Settings.SetSettingValue') web_port = { \"setting\": \"services.webserverport\", \"value\":", "KeyError): pass web_user = { \"setting\": \"services.webserverusername\" } result = get_setting_value.execute(web_user) try: self.xbmc_username", "(TypeError, KeyError): pass web_user = { \"setting\": \"services.webserverusername\" } result = get_setting_value.execute(web_user) try:", "self.xbmc_port, url) try: # Add image to texture cache by simply calling it", "the Kodi webserver details - used to set the texture cache get_setting_value =" ]
[ "header. ''' import numpy as np import struct newFile = open(fname + 'allFrames.dat',", "imaginary part of the file into #the header with the frequency index increasing", "= np.fromfile(fIn,np.double, points*lines ) tempImag = np.fromfile(fIn, np.double, points*lines ) fIn.close() #write the", "int( np.fromfile(fIn, np.int32,1) ) tempReal = np.fromfile(fIn,np.double, points*lines ) tempImag = np.fromfile(fIn, np.double,", "files with the name fname + number + .dat. Save all those frames", "the fastest for l in range(lines): for p in range(points): newFile.write(struct.pack('d', float(tempReal[p +", "np.fromfile(fIn, np.int32,1) ) tempReal = np.fromfile(fIn,np.double, points*lines ) tempImag = np.fromfile(fIn, np.double, points*lines", "for p in range(points): newFile.write(struct.pack('d', float(tempReal[p + l*points]))) for l in range(lines): for", "#write the real then the imaginary part of the file into #the file", "range(1,numFrames): #Read in old file fIn = open(fname + str(f) + '.dat', 'rb')", "index increasing the fastest for l in range(lines): for p in range(points): newFile.write(struct.pack('d',", ") fIn.close() #write the real then the imaginary part of the file into", "a single file with one header. ''' import numpy as np import struct", "header with the frequency index increasing the fastest for l in range(lines): for", "into #the file with the frequency index increasing the fastest for l in", "Save all those frames to a single file with one header. ''' import", ") tempImag = np.fromfile(fIn, np.double, points*lines ) fIn.close() #now write the header in", "write the real then the imaginary part of the file into #the header", "in multiple files with the name fname + number + .dat. Save all", "newFile = open(fname + 'allFrames.dat', 'wb') #Read in the first file and use", "np.fromfile(fIn, np.double,1) ) points = int( np.fromfile(fIn, np.int32,1) ) lines = int( np.fromfile(fIn,", "file with one header. ''' import numpy as np import struct newFile =", "in the first file and use this one to write the header. #Read", "= struct.pack('diii', freqstep, points, lines, int(numFrames)) newFile.write(header) #also write the real then the", "header. #Read in old file fIn = open(fname + str(0) + '.dat', 'rb')", "of the file into #the file with the frequency index increasing the fastest", "p in range(points): newFile.write(struct.pack('d', float(tempImag[p + l*points]))) for f in range(1,numFrames): #Read in", "#Read in the first file and use this one to write the header.", "new file header = struct.pack('diii', freqstep, points, lines, int(numFrames)) newFile.write(header) #also write the", "p in range(points): newFile.write(struct.pack('d', float(tempReal[p + l*points]))) for l in range(lines): for p", "+ 'allFrames.dat', 'wb') #Read in the first file and use this one to", "with the name fname + number + .dat. Save all those frames to", "lines = int( np.fromfile(fIn, np.int32,1) ) tempReal = np.fromfile(fIn,np.double, points*lines ) tempImag =", "= np.fromfile(fIn, np.double, points*lines ) fIn.close() #now write the header in the new", "header = struct.pack('diii', freqstep, points, lines, int(numFrames)) newFile.write(header) #also write the real then", "struct.pack('diii', freqstep, points, lines, int(numFrames)) newFile.write(header) #also write the real then the imaginary", "in range(1,numFrames): #Read in old file fIn = open(fname + str(f) + '.dat',", "= np.fromfile(fIn,np.double, points*lines ) tempImag = np.fromfile(fIn, np.double, points*lines ) fIn.close() #now write", "l*points]))) for l in range(lines): for p in range(points): newFile.write(struct.pack('d', float(tempImag[p + l*points])))", "then the imaginary part of the file into #the file with the frequency", "= open(fname + 'allFrames.dat', 'wb') #Read in the first file and use this", "open(fname + str(f) + '.dat', 'rb') freqstep =float( np.fromfile(fIn, np.double,1) ) points =", "+ l*points]))) for f in range(1,numFrames): #Read in old file fIn = open(fname", "points = int( np.fromfile(fIn, np.int32,1) ) lines = int( np.fromfile(fIn, np.int32,1) ) tempReal", "np.fromfile(fIn,np.double, points*lines ) tempImag = np.fromfile(fIn, np.double, points*lines ) fIn.close() #write the real", "range(points): newFile.write(struct.pack('d', float(tempReal[p + l*points]))) for l in range(lines): for p in range(points):", "number + .dat. Save all those frames to a single file with one", "use this one to write the header. #Read in old file fIn =", "np.double,1) ) points = int( np.fromfile(fIn, np.int32,1) ) lines = int( np.fromfile(fIn, np.int32,1)", "points*lines ) tempImag = np.fromfile(fIn, np.double, points*lines ) fIn.close() #write the real then", "l in range(lines): for p in range(points): newFile.write(struct.pack('d', float(tempReal[p + l*points]))) for l", "range(lines): for p in range(points): newFile.write(struct.pack('d', float(tempReal[p + l*points]))) for l in range(lines):", "float(tempImag[p + l*points]))) for f in range(1,numFrames): #Read in old file fIn =", "= int( np.fromfile(fIn, np.int32,1) ) tempReal = np.fromfile(fIn,np.double, points*lines ) tempImag = np.fromfile(fIn,", "#the header with the frequency index increasing the fastest for l in range(lines):", "of the file into #the header with the frequency index increasing the fastest", "struct newFile = open(fname + 'allFrames.dat', 'wb') #Read in the first file and", "file into #the file with the frequency index increasing the fastest for l", "'rb') freqstep =float( np.fromfile(fIn, np.double,1) ) points = int( np.fromfile(fIn, np.int32,1) ) lines", "in the new file header = struct.pack('diii', freqstep, points, lines, int(numFrames)) newFile.write(header) #also", "= int( np.fromfile(fIn, np.int32,1) ) lines = int( np.fromfile(fIn, np.int32,1) ) tempReal =", "in old file fIn = open(fname + str(f) + '.dat', 'rb') freqstep =float(", "single file with one header. ''' import numpy as np import struct newFile", "frequency index increasing the fastest for l in range(lines): for p in range(points):", "str(0) + '.dat', 'rb') freqstep =float( np.fromfile(fIn, np.double,1) ) points = int( np.fromfile(fIn,", "+ .dat. Save all those frames to a single file with one header.", "with one header. ''' import numpy as np import struct newFile = open(fname", "''' import numpy as np import struct newFile = open(fname + 'allFrames.dat', 'wb')", "file and use this one to write the header. #Read in old file", "np.double, points*lines ) fIn.close() #now write the header in the new file header", "for f in range(1,numFrames): #Read in old file fIn = open(fname + str(f)", "for l in range(lines): for p in range(points): newFile.write(struct.pack('d', float(tempReal[p + l*points]))) for", "in range(lines): for p in range(points): newFile.write(struct.pack('d', float(tempReal[p + l*points]))) for l in", "newFile.write(struct.pack('d', float(tempReal[p + l*points]))) for l in range(lines): for p in range(points): newFile.write(struct.pack('d',", "fIn = open(fname + str(f) + '.dat', 'rb') freqstep =float( np.fromfile(fIn, np.double,1) )", "part of the file into #the file with the frequency index increasing the", "part of the file into #the header with the frequency index increasing the", "np.double, points*lines ) fIn.close() #write the real then the imaginary part of the", "in range(points): newFile.write(struct.pack('d', float(tempReal[p + l*points]))) for l in range(lines): for p in", "open(fname + 'allFrames.dat', 'wb') #Read in the first file and use this one", "'.dat', 'rb') freqstep =float( np.fromfile(fIn, np.double,1) ) points = int( np.fromfile(fIn, np.int32,1) )", "tempReal = np.fromfile(fIn,np.double, points*lines ) tempImag = np.fromfile(fIn, np.double, points*lines ) fIn.close() #now", "l*points]))) for f in range(1,numFrames): #Read in old file fIn = open(fname +", "newFile.write(header) #also write the real then the imaginary part of the file into", "= open(fname + str(0) + '.dat', 'rb') freqstep =float( np.fromfile(fIn, np.double,1) ) points", "as np import struct newFile = open(fname + 'allFrames.dat', 'wb') #Read in the", ") tempImag = np.fromfile(fIn, np.double, points*lines ) fIn.close() #write the real then the", "the file into #the header with the frequency index increasing the fastest for", ") lines = int( np.fromfile(fIn, np.int32,1) ) tempReal = np.fromfile(fIn,np.double, points*lines ) tempImag", "name fname + number + .dat. Save all those frames to a single", "freqstep, points, lines, int(numFrames)) newFile.write(header) #also write the real then the imaginary part", "into #the header with the frequency index increasing the fastest for l in", "freqstep =float( np.fromfile(fIn, np.double,1) ) points = int( np.fromfile(fIn, np.int32,1) ) lines =", "points*lines ) tempImag = np.fromfile(fIn, np.double, points*lines ) fIn.close() #now write the header", "np.int32,1) ) tempReal = np.fromfile(fIn,np.double, points*lines ) tempImag = np.fromfile(fIn, np.double, points*lines )", "fname + number + .dat. Save all those frames to a single file", "one to write the header. #Read in old file fIn = open(fname +", "old file fIn = open(fname + str(f) + '.dat', 'rb') freqstep =float( np.fromfile(fIn,", "old file fIn = open(fname + str(0) + '.dat', 'rb') freqstep =float( np.fromfile(fIn,", "tempImag = np.fromfile(fIn, np.double, points*lines ) fIn.close() #now write the header in the", "lines, int(numFrames)) newFile.write(header) #also write the real then the imaginary part of the", "numpy as np import struct newFile = open(fname + 'allFrames.dat', 'wb') #Read in", ") points = int( np.fromfile(fIn, np.int32,1) ) lines = int( np.fromfile(fIn, np.int32,1) )", ".dat. Save all those frames to a single file with one header. '''", "the header. #Read in old file fIn = open(fname + str(0) + '.dat',", "np.fromfile(fIn,np.double, points*lines ) tempImag = np.fromfile(fIn, np.double, points*lines ) fIn.close() #now write the", ") fIn.close() #now write the header in the new file header = struct.pack('diii',", "<gh_stars>10-100 def makeMultiFrameSimFile(fname, numFrames): '''Read in multiple files with the name fname +", "'wb') #Read in the first file and use this one to write the", "fastest for l in range(lines): for p in range(points): newFile.write(struct.pack('d', float(tempReal[p + l*points])))", "and use this one to write the header. #Read in old file fIn", "import struct newFile = open(fname + 'allFrames.dat', 'wb') #Read in the first file", "write the header in the new file header = struct.pack('diii', freqstep, points, lines,", "one header. ''' import numpy as np import struct newFile = open(fname +", "all those frames to a single file with one header. ''' import numpy", "+ str(0) + '.dat', 'rb') freqstep =float( np.fromfile(fIn, np.double,1) ) points = int(", "numFrames): '''Read in multiple files with the name fname + number + .dat.", "str(f) + '.dat', 'rb') freqstep =float( np.fromfile(fIn, np.double,1) ) points = int( np.fromfile(fIn,", "def makeMultiFrameSimFile(fname, numFrames): '''Read in multiple files with the name fname + number", "the imaginary part of the file into #the header with the frequency index", "file into #the header with the frequency index increasing the fastest for l", "l in range(lines): for p in range(points): newFile.write(struct.pack('d', float(tempImag[p + l*points]))) for f", "to write the header. #Read in old file fIn = open(fname + str(0)", "file fIn = open(fname + str(0) + '.dat', 'rb') freqstep =float( np.fromfile(fIn, np.double,1)", "#Read in old file fIn = open(fname + str(f) + '.dat', 'rb') freqstep", "makeMultiFrameSimFile(fname, numFrames): '''Read in multiple files with the name fname + number +", "file fIn = open(fname + str(f) + '.dat', 'rb') freqstep =float( np.fromfile(fIn, np.double,1)", "float(tempReal[p + l*points]))) for l in range(lines): for p in range(points): newFile.write(struct.pack('d', float(tempImag[p", "fIn.close() #now write the header in the new file header = struct.pack('diii', freqstep,", "in range(lines): for p in range(points): newFile.write(struct.pack('d', float(tempImag[p + l*points]))) for f in", "the file into #the file with the frequency index increasing the fastest for", "for p in range(points): newFile.write(struct.pack('d', float(tempImag[p + l*points]))) for f in range(1,numFrames): #Read", "+ number + .dat. Save all those frames to a single file with", "np.fromfile(fIn, np.int32,1) ) lines = int( np.fromfile(fIn, np.int32,1) ) tempReal = np.fromfile(fIn,np.double, points*lines", "in old file fIn = open(fname + str(0) + '.dat', 'rb') freqstep =float(", "then the imaginary part of the file into #the header with the frequency", "first file and use this one to write the header. #Read in old", "'allFrames.dat', 'wb') #Read in the first file and use this one to write", "the real then the imaginary part of the file into #the header with", "the frequency index increasing the fastest for l in range(lines): for p in", "= np.fromfile(fIn, np.double, points*lines ) fIn.close() #write the real then the imaginary part", "int( np.fromfile(fIn, np.int32,1) ) lines = int( np.fromfile(fIn, np.int32,1) ) tempReal = np.fromfile(fIn,np.double,", "for l in range(lines): for p in range(points): newFile.write(struct.pack('d', float(tempImag[p + l*points]))) for", ") tempReal = np.fromfile(fIn,np.double, points*lines ) tempImag = np.fromfile(fIn, np.double, points*lines ) fIn.close()", "in range(points): newFile.write(struct.pack('d', float(tempImag[p + l*points]))) for f in range(1,numFrames): #Read in old", "np.fromfile(fIn, np.double, points*lines ) fIn.close() #write the real then the imaginary part of", "to a single file with one header. ''' import numpy as np import", "#Read in old file fIn = open(fname + str(0) + '.dat', 'rb') freqstep", "file header = struct.pack('diii', freqstep, points, lines, int(numFrames)) newFile.write(header) #also write the real", "range(lines): for p in range(points): newFile.write(struct.pack('d', float(tempImag[p + l*points]))) for f in range(1,numFrames):", "frames to a single file with one header. ''' import numpy as np", "tempReal = np.fromfile(fIn,np.double, points*lines ) tempImag = np.fromfile(fIn, np.double, points*lines ) fIn.close() #write", "the name fname + number + .dat. Save all those frames to a", "=float( np.fromfile(fIn, np.double,1) ) points = int( np.fromfile(fIn, np.int32,1) ) lines = int(", "+ str(f) + '.dat', 'rb') freqstep =float( np.fromfile(fIn, np.double,1) ) points = int(", "tempImag = np.fromfile(fIn, np.double, points*lines ) fIn.close() #write the real then the imaginary", "+ '.dat', 'rb') freqstep =float( np.fromfile(fIn, np.double,1) ) points = int( np.fromfile(fIn, np.int32,1)", "with the frequency index increasing the fastest for l in range(lines): for p", "those frames to a single file with one header. ''' import numpy as", "write the header. #Read in old file fIn = open(fname + str(0) +", "f in range(1,numFrames): #Read in old file fIn = open(fname + str(f) +", "'''Read in multiple files with the name fname + number + .dat. Save", "import numpy as np import struct newFile = open(fname + 'allFrames.dat', 'wb') #Read", "np.fromfile(fIn, np.double, points*lines ) fIn.close() #now write the header in the new file", "range(points): newFile.write(struct.pack('d', float(tempImag[p + l*points]))) for f in range(1,numFrames): #Read in old file", "int(numFrames)) newFile.write(header) #also write the real then the imaginary part of the file", "np.int32,1) ) lines = int( np.fromfile(fIn, np.int32,1) ) tempReal = np.fromfile(fIn,np.double, points*lines )", "#the file with the frequency index increasing the fastest for l in range(lines):", "the first file and use this one to write the header. #Read in", "fIn = open(fname + str(0) + '.dat', 'rb') freqstep =float( np.fromfile(fIn, np.double,1) )", "the new file header = struct.pack('diii', freqstep, points, lines, int(numFrames)) newFile.write(header) #also write", "points, lines, int(numFrames)) newFile.write(header) #also write the real then the imaginary part of", "#also write the real then the imaginary part of the file into #the", "the real then the imaginary part of the file into #the file with", "#now write the header in the new file header = struct.pack('diii', freqstep, points,", "open(fname + str(0) + '.dat', 'rb') freqstep =float( np.fromfile(fIn, np.double,1) ) points =", "the header in the new file header = struct.pack('diii', freqstep, points, lines, int(numFrames))", "points*lines ) fIn.close() #write the real then the imaginary part of the file", "+ l*points]))) for l in range(lines): for p in range(points): newFile.write(struct.pack('d', float(tempImag[p +", "fIn.close() #write the real then the imaginary part of the file into #the", "np import struct newFile = open(fname + 'allFrames.dat', 'wb') #Read in the first", "real then the imaginary part of the file into #the file with the", "points*lines ) fIn.close() #now write the header in the new file header =", "increasing the fastest for l in range(lines): for p in range(points): newFile.write(struct.pack('d', float(tempReal[p", "this one to write the header. #Read in old file fIn = open(fname", "multiple files with the name fname + number + .dat. Save all those", "imaginary part of the file into #the file with the frequency index increasing", "newFile.write(struct.pack('d', float(tempImag[p + l*points]))) for f in range(1,numFrames): #Read in old file fIn", "the imaginary part of the file into #the file with the frequency index", "header in the new file header = struct.pack('diii', freqstep, points, lines, int(numFrames)) newFile.write(header)", "real then the imaginary part of the file into #the header with the", "file with the frequency index increasing the fastest for l in range(lines): for", "= open(fname + str(f) + '.dat', 'rb') freqstep =float( np.fromfile(fIn, np.double,1) ) points" ]
[ "see each other at the same positions, irrespective of which particle's doInteractions(...) method", "= SimpleLaws(bondLength=5) >>> sim = ParticleSystem(laws) >>> sim.add( Particle(position=(10,10)) ) >>> sim.add( Particle(position=(10,20))", "types. The actual physics calculations are deferred to the particles themselves. You can", "a given number of cycles (default=1)\"\"\" # optimisation to speed up access to", "as all particles use the same! Bonds between particles are up to the", "ParticleSystem: - particles -- simple list - particleDict -- dictionary, indexed by particle.ID", "\"\"\"Add the specified particle(s) into the system\"\"\" self.particles.extend(newParticles) for p in newParticles: self.particleDict[p.ID]", "by id(s) from the system. Note that this method does not destroy bonds", "doInteractions(...) methods of all particles so they can influence each other. It then", "at # http://kamaelia.sourceforge.net/AUTHORS - please extend this file, # not this notice. #", "------------------------------------------------------------------------- \"\"\"\\ ========================================= Discrete time particle physics simulation ========================================= A discrete time simulator", "initialTick self.particleDict = {} self.add(*initialParticles) def add(self, *newParticles): \"\"\"Add the specified particle(s) into", "the AUTHORS file and at # http://kamaelia.sourceforge.net/AUTHORS - please extend this file, #", "------------- Create 3 particles, two of which are bonded and move noticeably closer", "system of particles. Keyword arguments: - initialParticles -- list of particles (default=[]) -", "Broadcasting Corporation and Kamaelia Contributors(1) # All Rights Reserved. # # You may", "sim.particles: print p.getLoc() ... [10.0, 13.940067328] [10.0, 16.059932671999999] [30, 40] >>> How does", "MPL # notice inappropriate for this file. As per MPL/GPL/LGPL removal of this", "\"\"\"\\ Remove the specified particle(s) from the system. Note that this method does", "number of cycles (default=1)\"\"\" # optimisation to speed up access to these functions:", "the # following licenses(2): Mozilla Public License, V1.1, GNU General # Public License,", "13.940067328] [10.0, 16.059932671999999] [30, 40] >>> How does it work? ----------------- Set up", "sim.particles[0].makeBond(sim.particles, 1) # bond 1st and 2nd particles >>> for p in sim.particles:", "registered in ParticleSystem: - particles -- simple list - particleDict -- dictionary, indexed", "id in ids] self.remove( *particles ) def updateLoc(self, *particles): \"\"\"\\ Notify this physics", "time particle physics simulation ========================================= A discrete time simulator of a system of", "base class (or have equivalent functionality). Particles can be added or removed from", "p self.indexer.updateLoc(*newParticles) def remove(self, *oldParticles): \"\"\"\\ Remove the specified particle(s) from the system.", "or more (particle,distSquared) tuples. The particles listed are those within the specified radius", "As per MPL/GPL/LGPL removal of this # notice is prohibited. # # Please", ">>> for p in sim.particles: print p.getLoc() ... (10, 10) (10, 20) (30,", "for p in newParticles: self.particleDict[p.ID] = p self.indexer.updateLoc(*newParticles) def remove(self, *oldParticles): \"\"\"\\ Remove", ">>> sim.run(cycles=5) >>> for p in sim.particles: print p.getLoc() ... [10.0, 13.940067328] [10.0,", "of initial particles. Particles should be derived from the Particle base class (or", "the simulator must be informed, so it can update its spatial indexing data,", "A discrete time simulator of a system of bonded and unbonded particles, of", "1) # bond 1st and 2nd particles >>> for p in sim.particles: print", "from the system by reference, or removed by their ID. ParticleSystem will work", "space when determining what particles lie within a given region (radius of a", "while cycles > 0: cycles -= 1 self.tick += 1 _tick = self.tick", "Public License, V1.1, GNU General # Public License, V2.0, GNU Lesser General Public", "initialParticles = [], initialTick = 0): \"\"\"x.__init__(...) initializes x; see x.__class__.__doc__ for signature\"\"\"", "this text since we deem the MPL # notice inappropriate for this file.", "few, spatial dimensions as you like. Example Usage ------------- Create 3 particles, two", "Kamaelia Contributors(1) # All Rights Reserved. # # You may only modify and", "V2.1 # # (1) Kamaelia Contributors are listed in the AUTHORS file and", "can be added or removed from the system by reference, or removed by", "particles = [self.particleDict[id] for id in ids] self.remove( *particles ) def updateLoc(self, *particles):", "the specified radius of the specified centre point, and that passed the (optional)", "particle in oldParticles: self.particles.remove(particle) del self.particleDict[particle.ID] self.indexer.remove(*oldParticles) def removeByID(self, *ids): \"\"\"\\ Remove particle(s)", "of cycles (default=1)\"\"\" # optimisation to speed up access to these functions: _indexer", "not destroy bonds from other particles to these ones. \"\"\" for particle in", ") >>> sim.add( Particle(position=(30,40)) ) >>> sim.particles[0].makeBond(sim.particles, 1) # bond 1st and 2nd", "... [10.0, 13.940067328] [10.0, 16.059932671999999] [30, 40] >>> How does it work? -----------------", "determining what particles lie within a given region (radius of a point). If", ">>> sim.add( Particle(position=(30,40)) ) >>> sim.particles[0].makeBond(sim.particles, 1) # bond 1st and 2nd particles", "specified radius of the specified centre point, and that passed the (optional) filter", "radius of the specified centre point, and that passed the (optional) filter function:", "list \"\"\" return self.indexer.withinRadius(centre, radius, filter) def run(self, cycles = 1): \"\"\"Run the", "License, V2.0, GNU Lesser General Public License, V2.1 # # (1) Kamaelia Contributors", "particles themselves. You can have as many, or few, spatial dimensions as you", "themselves, *not* by ParticleSystem. ParticleSystem calls the doInteractions(...) methods of all particles so", "particle's position, before calling run(). \"\"\" self.indexer.updateLoc(*particles) def withinRadius(self, centre, radius, filter=(lambda particle:True)):", "radius, filter=(lambda particle:True)): \"\"\"\\ withinRadius(centre,radius[,filter]) -> list of (particle,distSquared) Returns a list of", "particles to these ones. \"\"\" particles = [self.particleDict[id] for id in ids] self.remove(", "Reserved. # # You may only modify and redistribute this under the terms", "(particle,distSquared) tuples. The particles listed are those within the specified radius of the", "self.particles.remove(particle) del self.particleDict[particle.ID] self.indexer.remove(*oldParticles) def removeByID(self, *ids): \"\"\"\\ Remove particle(s) as specified by", "between particles and an (optional) set of initial particles. Particles should be derived", "and move noticeably closer after 5 cycles of simulation:: >>> laws = SimpleLaws(bondLength=5)", "of zero or more (particle,distSquared) tuples. The particles listed are those within the", "particles listed are those within the specified radius of the specified centre point,", "\"\"\" return self.indexer.withinRadius(centre, radius, filter) def run(self, cycles = 1): \"\"\"Run the simulation", "their positions and velocities ready for the next cycle. This is a two", "this # notice is prohibited. # # Please contact us via: <EMAIL> #", "these functions: _indexer = self.indexer _laws = self.laws while cycles > 0: cycles", "Each cycle advances the 'tick' count by 1. The tick count starts at", "methods of all particles so they can influence each other. It then calls", "for p in self.particles: p.doInteractions(_indexer, _laws, _tick) for p in self.particles: p.update(_laws) _indexer.updateAll()", "other. It then calls the update(...) methods of all particles so they can", "The actual interactions between particles are calculated by the particles themselves, *not* by", "set of initial particles. Particles should be derived from the Particle base class", "reference, or removed by their ID. ParticleSystem will work for particles in space", "position until their update(...) method is called. \"\"\" from SpatialIndexer import SpatialIndexer class", "p in sim.particles: print p.getLoc() ... (10, 10) (10, 20) (30, 40) >>>", "40) >>> sim.run(cycles=5) >>> for p in sim.particles: print p.getLoc() ... [10.0, 13.940067328]", "of a particle, the simulator must be informed, so it can update its", "[self.particleDict[id] for id in ids] self.remove( *particles ) def updateLoc(self, *particles): \"\"\"\\ Notify", "list of zero or more (particle,distSquared) tuples. The particles listed are those within", "ParticleSystem uses a SpatialIndexer object to speed up calculations. SpatialIndexer reduce the search", "Kamaelia Contributors are listed in the AUTHORS file and at # http://kamaelia.sourceforge.net/AUTHORS -", "or removed from the system by reference, or removed by their ID. ParticleSystem", "Remove particle(s) as specified by id(s) from the system. Note that this method", "V2.0, GNU Lesser General Public License, V2.1 # # (1) Kamaelia Contributors are", "at: # http://kamaelia.sourceforge.net/COPYING # Under section 3.5 of the MPL, we are using", "def add(self, *newParticles): \"\"\"Add the specified particle(s) into the system\"\"\" self.particles.extend(newParticles) for p", "particles to manage for themselves. The simulation runs in cycles when the run(...)", "#!/usr/bin/env python # Copyright (C) 2004 British Broadcasting Corporation and Kamaelia Contributors(1) #", "(10, 10) (10, 20) (30, 40) >>> sim.run(cycles=5) >>> for p in sim.particles:", "(optional) set of initial particles. Particles should be derived from the Particle base", "# notice is prohibited. # # Please contact us via: <EMAIL> # to", "SpatialIndexer(laws.maxInteractRadius) self.laws = laws self.particles = [] self.tick = initialTick self.particleDict = {}", "called. \"\"\" from SpatialIndexer import SpatialIndexer class ParticleSystem(object): \"\"\"\\ ParticleSystem(laws[,initialParticles][,initialTick]) -> new ParticleSystem", "Particles should not apply their velocities to update their position until their update(...)", "def run(self, cycles = 1): \"\"\"Run the simulation for a given number of", "(default=0) \"\"\" def __init__(self, laws, initialParticles = [], initialTick = 0): \"\"\"x.__init__(...) initializes", "by their ID. ParticleSystem will work for particles in space with any number", "particles are up to the particles to manage for themselves. The simulation runs", "sim.add( Particle(position=(10,10)) ) >>> sim.add( Particle(position=(10,20)) ) >>> sim.add( Particle(position=(30,40)) ) >>> sim.particles[0].makeBond(sim.particles,", "unbonded particles, of multiple types. The actual physics calculations are deferred to the", "functionality). Particles can be added or removed from the system by reference, or", "indexed by particle.ID ParticleSystem uses a SpatialIndexer object to speed up calculations. SpatialIndexer", "specifying the laws to act between particles and an (optional) set of initial", "(default=[]) - initialTick -- start value of the time 'tick' count (default=0) \"\"\"", "in space with any number of dimensions - so long as all particles", "two stage process so that, in a given cycle, all particles see each", "initializes x; see x.__class__.__doc__ for signature\"\"\" self.indexer = SpatialIndexer(laws.maxInteractRadius) self.laws = laws self.particles", "from SpatialIndexer import SpatialIndexer class ParticleSystem(object): \"\"\"\\ ParticleSystem(laws[,initialParticles][,initialTick]) -> new ParticleSystem object Discrete", "deferred to the particles themselves. You can have as many, or few, spatial", "newParticles: self.particleDict[p.ID] = p self.indexer.updateLoc(*newParticles) def remove(self, *oldParticles): \"\"\"\\ Remove the specified particle(s)", "two of which are bonded and move noticeably closer after 5 cycles of", "unless otherwise specified during initialization. The following attributes store the particles registered in", "optimisation to speed up access to these functions: _indexer = self.indexer _laws =", "for this file. As per MPL/GPL/LGPL removal of this # notice is prohibited.", "the specified particle(s) from the system. Note that this method does not destroy", "updateLoc(...) The actual interactions between particles are calculated by the particles themselves, *not*", "for signature\"\"\" self.indexer = SpatialIndexer(laws.maxInteractRadius) self.laws = laws self.particles = [] self.tick =", "particles use the same! Bonds between particles are up to the particles to", "list of (particle,distSquared) Returns a list of zero or more (particle,distSquared) tuples. The", "of (particle,distSquared) Returns a list of zero or more (particle,distSquared) tuples. The particles", "you change a particle's position, before calling run(). \"\"\" self.indexer.updateLoc(*particles) def withinRadius(self, centre,", "are deferred to the particles themselves. You can have as many, or few,", "the MPL, we are using this text since we deem the MPL #", "tick count starts at zero, unless otherwise specified during initialization. The following attributes", "particles registered in ParticleSystem: - particles -- simple list - particleDict -- dictionary,", "indexing data, by calling updateLoc(...) The actual interactions between particles are calculated by", "self.indexer.withinRadius(centre, radius, filter) def run(self, cycles = 1): \"\"\"Run the simulation for a", "========================================= Discrete time particle physics simulation ========================================= A discrete time simulator of a", "methods of all particles so they can all update their positions and velocities", "simulation:: >>> laws = SimpleLaws(bondLength=5) >>> sim = ParticleSystem(laws) >>> sim.add( Particle(position=(10,10)) )", "http://kamaelia.sourceforge.net/COPYING # Under section 3.5 of the MPL, we are using this text", "Copyright (C) 2004 British Broadcasting Corporation and Kamaelia Contributors(1) # All Rights Reserved.", "2nd particles >>> for p in sim.particles: print p.getLoc() ... (10, 10) (10,", "particles in space with any number of dimensions - so long as all", "particles themselves, *not* by ParticleSystem. ParticleSystem calls the doInteractions(...) methods of all particles", "of particles (default=[]) - initialTick -- start value of the time 'tick' count", "return self.indexer.withinRadius(centre, radius, filter) def run(self, cycles = 1): \"\"\"Run the simulation for", "be called if you change a particle's position, before calling run(). \"\"\" self.indexer.updateLoc(*particles)", "= self.tick for p in self.particles: p.doInteractions(_indexer, _laws, _tick) for p in self.particles:", "removed by their ID. ParticleSystem will work for particles in space with any", "lie within a given region (radius of a point). If your code changes", "process so that, in a given cycle, all particles see each other at", "cycle, all particles see each other at the same positions, irrespective of which", "# Under section 3.5 of the MPL, we are using this text since", "Particles can be added or removed from the system by reference, or removed", "be informed, so it can update its spatial indexing data, by calling updateLoc(...)", "calls the update(...) methods of all particles so they can all update their", "withinRadius(centre,radius[,filter]) -> list of (particle,distSquared) Returns a list of zero or more (particle,distSquared)", "Lesser General Public License, V2.1 # # (1) Kamaelia Contributors are listed in", "simulator for a system of particles. Keyword arguments: - initialParticles -- list of", "are those within the specified radius of the specified centre point, and that", "update(...) method is called. \"\"\" from SpatialIndexer import SpatialIndexer class ParticleSystem(object): \"\"\"\\ ParticleSystem(laws[,initialParticles][,initialTick])", "'tick' count (default=0) \"\"\" def __init__(self, laws, initialParticles = [], initialTick = 0):", "initial particles. Particles should be derived from the Particle base class (or have", "method does not destroy bonds from other particles to these ones. \"\"\" for", "if you change a particle's position, before calling run(). \"\"\" self.indexer.updateLoc(*particles) def withinRadius(self,", "must be informed, so it can update its spatial indexing data, by calling", "and that passed the (optional) filter function: filter(particle) -> True if the particle", "ParticleSystem(laws) >>> sim.add( Particle(position=(10,10)) ) >>> sim.add( Particle(position=(10,20)) ) >>> sim.add( Particle(position=(30,40)) )", "these ones. \"\"\" for particle in oldParticles: self.particles.remove(particle) del self.particleDict[particle.ID] self.indexer.remove(*oldParticles) def removeByID(self,", "Remove the specified particle(s) from the system. Note that this method does not", "(C) 2004 British Broadcasting Corporation and Kamaelia Contributors(1) # All Rights Reserved. #", "1st and 2nd particles >>> for p in sim.particles: print p.getLoc() ... (10,", "to speed up calculations. SpatialIndexer reduce the search space when determining what particles", "(30, 40) >>> sim.run(cycles=5) >>> for p in sim.particles: print p.getLoc() ... [10.0,", "# http://kamaelia.sourceforge.net/COPYING # Under section 3.5 of the MPL, we are using this", "self.particleDict[p.ID] = p self.indexer.updateLoc(*newParticles) def remove(self, *oldParticles): \"\"\"\\ Remove the specified particle(s) from", "the doInteractions(...) methods of all particles so they can influence each other. It", "= p self.indexer.updateLoc(*newParticles) def remove(self, *oldParticles): \"\"\"\\ Remove the specified particle(s) from the", "the run(...) method is called. Each cycle advances the 'tick' count by 1.", "# You may only modify and redistribute this under the terms of any", "= laws self.particles = [] self.tick = initialTick self.particleDict = {} self.add(*initialParticles) def", "the (optional) filter function: filter(particle) -> True if the particle is to be", "for a given number of cycles (default=1)\"\"\" # optimisation to speed up access", "bonded and move noticeably closer after 5 cycles of simulation:: >>> laws =", "laws self.particles = [] self.tick = initialTick self.particleDict = {} self.add(*initialParticles) def add(self,", "centre point, and that passed the (optional) filter function: filter(particle) -> True if", "specified particle(s) have changed position. Must be called if you change a particle's", "this method does not destroy bonds from other particles to these ones. \"\"\"", "and Kamaelia Contributors(1) # All Rights Reserved. # # You may only modify", "_laws = self.laws while cycles > 0: cycles -= 1 self.tick += 1", "all particles see each other at the same positions, irrespective of which particle's", "up ParticleSystem by instantiating, specifying the laws to act between particles and an", "up access to these functions: _indexer = self.indexer _laws = self.laws while cycles", "[30, 40] >>> How does it work? ----------------- Set up ParticleSystem by instantiating,", "particles to these ones. \"\"\" for particle in oldParticles: self.particles.remove(particle) del self.particleDict[particle.ID] self.indexer.remove(*oldParticles)", "in ParticleSystem: - particles -- simple list - particleDict -- dictionary, indexed by", "\"\"\"x.__init__(...) initializes x; see x.__class__.__doc__ for signature\"\"\" self.indexer = SpatialIndexer(laws.maxInteractRadius) self.laws = laws", "via: <EMAIL> # to discuss alternative licensing. # ------------------------------------------------------------------------- \"\"\"\\ ========================================= Discrete time", "License, V2.1 # # (1) Kamaelia Contributors are listed in the AUTHORS file", "as specified by id(s) from the system. Note that this method does not", "does not destroy bonds from other particles to these ones. \"\"\" for particle", "Set up ParticleSystem by instantiating, specifying the laws to act between particles and", "particles and an (optional) set of initial particles. Particles should be derived from", "If your code changes the position of a particle, the simulator must be", "particle, the simulator must be informed, so it can update its spatial indexing", "sim.run(cycles=5) >>> for p in sim.particles: print p.getLoc() ... [10.0, 13.940067328] [10.0, 16.059932671999999]", "the laws to act between particles and an (optional) set of initial particles.", "-- list of particles (default=[]) - initialTick -- start value of the time", "= 1): \"\"\"Run the simulation for a given number of cycles (default=1)\"\"\" #", "to discuss alternative licensing. # ------------------------------------------------------------------------- \"\"\"\\ ========================================= Discrete time particle physics simulation", "cycle advances the 'tick' count by 1. The tick count starts at zero,", "= self.indexer _laws = self.laws while cycles > 0: cycles -= 1 self.tick", "method does not destroy bonds from other particles to these ones. \"\"\" particles", "The particles listed are those within the specified radius of the specified centre", "of the specified centre point, and that passed the (optional) filter function: filter(particle)", "of which particle's doInteractions(...) method is called first. Particles should not apply their", "multiple types. The actual physics calculations are deferred to the particles themselves. You", "and an (optional) set of initial particles. Particles should be derived from the", "which particle's doInteractions(...) method is called first. Particles should not apply their velocities", "ones. \"\"\" particles = [self.particleDict[id] for id in ids] self.remove( *particles ) def", "# bond 1st and 2nd particles >>> for p in sim.particles: print p.getLoc()", "and redistribute this under the terms of any of the # following licenses(2):", "of dimensions - so long as all particles use the same! Bonds between", "licensing. # ------------------------------------------------------------------------- \"\"\"\\ ========================================= Discrete time particle physics simulation ========================================= A discrete", "so it can update its spatial indexing data, by calling updateLoc(...) The actual", "to act between particles and an (optional) set of initial particles. Particles should", "- so long as all particles use the same! Bonds between particles are", "changed position. Must be called if you change a particle's position, before calling", "interactions between particles are calculated by the particles themselves, *not* by ParticleSystem. ParticleSystem", "irrespective of which particle's doInteractions(...) method is called first. Particles should not apply", "so long as all particles use the same! Bonds between particles are up", "value of the time 'tick' count (default=0) \"\"\" def __init__(self, laws, initialParticles =", "Example Usage ------------- Create 3 particles, two of which are bonded and move", ") >>> sim.particles[0].makeBond(sim.particles, 1) # bond 1st and 2nd particles >>> for p", "signature\"\"\" self.indexer = SpatialIndexer(laws.maxInteractRadius) self.laws = laws self.particles = [] self.tick = initialTick", "change a particle's position, before calling run(). \"\"\" self.indexer.updateLoc(*particles) def withinRadius(self, centre, radius,", "system. Note that this method does not destroy bonds from other particles to", "can all update their positions and velocities ready for the next cycle. This", "this under the terms of any of the # following licenses(2): Mozilla Public", "python # Copyright (C) 2004 British Broadcasting Corporation and Kamaelia Contributors(1) # All", "How does it work? ----------------- Set up ParticleSystem by instantiating, specifying the laws", "the particles themselves, *not* by ParticleSystem. ParticleSystem calls the doInteractions(...) methods of all", "start value of the time 'tick' count (default=0) \"\"\" def __init__(self, laws, initialParticles", "functions: _indexer = self.indexer _laws = self.laws while cycles > 0: cycles -=", "destroy bonds from other particles to these ones. \"\"\" for particle in oldParticles:", "list - particleDict -- dictionary, indexed by particle.ID ParticleSystem uses a SpatialIndexer object", "# Please contact us via: <EMAIL> # to discuss alternative licensing. # -------------------------------------------------------------------------", "All Rights Reserved. # # You may only modify and redistribute this under", "3 particles, two of which are bonded and move noticeably closer after 5", "is to be included in the list \"\"\" return self.indexer.withinRadius(centre, radius, filter) def", "SpatialIndexer object to speed up calculations. SpatialIndexer reduce the search space when determining", ">>> sim.add( Particle(position=(10,10)) ) >>> sim.add( Particle(position=(10,20)) ) >>> sim.add( Particle(position=(30,40)) ) >>>", "Contributors are listed in the AUTHORS file and at # http://kamaelia.sourceforge.net/AUTHORS - please", "# notice inappropriate for this file. As per MPL/GPL/LGPL removal of this #", "actual physics calculations are deferred to the particles themselves. You can have as", "notice. # (2) Reproduced in the COPYING file, and at: # http://kamaelia.sourceforge.net/COPYING #", "all particles so they can influence each other. It then calls the update(...)", "print p.getLoc() ... [10.0, 13.940067328] [10.0, 16.059932671999999] [30, 40] >>> How does it", "it can update its spatial indexing data, by calling updateLoc(...) The actual interactions", "= 0): \"\"\"x.__init__(...) initializes x; see x.__class__.__doc__ for signature\"\"\" self.indexer = SpatialIndexer(laws.maxInteractRadius) self.laws", "of a system of bonded and unbonded particles, of multiple types. The actual", "ParticleSystem will work for particles in space with any number of dimensions -", "called. Each cycle advances the 'tick' count by 1. The tick count starts", "# (2) Reproduced in the COPYING file, and at: # http://kamaelia.sourceforge.net/COPYING # Under", "reduce the search space when determining what particles lie within a given region", "is prohibited. # # Please contact us via: <EMAIL> # to discuss alternative", "file and at # http://kamaelia.sourceforge.net/AUTHORS - please extend this file, # not this", "# (1) Kamaelia Contributors are listed in the AUTHORS file and at #", "those within the specified radius of the specified centre point, and that passed", "self.indexer _laws = self.laws while cycles > 0: cycles -= 1 self.tick +=", "ones. \"\"\" for particle in oldParticles: self.particles.remove(particle) del self.particleDict[particle.ID] self.indexer.remove(*oldParticles) def removeByID(self, *ids):", "simple list - particleDict -- dictionary, indexed by particle.ID ParticleSystem uses a SpatialIndexer", "time simulator for a system of particles. Keyword arguments: - initialParticles -- list", "this file. As per MPL/GPL/LGPL removal of this # notice is prohibited. #", "calling run(). \"\"\" self.indexer.updateLoc(*particles) def withinRadius(self, centre, radius, filter=(lambda particle:True)): \"\"\"\\ withinRadius(centre,radius[,filter]) ->", "run(). \"\"\" self.indexer.updateLoc(*particles) def withinRadius(self, centre, radius, filter=(lambda particle:True)): \"\"\"\\ withinRadius(centre,radius[,filter]) -> list", "\"\"\"\\ withinRadius(centre,radius[,filter]) -> list of (particle,distSquared) Returns a list of zero or more", "self.laws while cycles > 0: cycles -= 1 self.tick += 1 _tick =", "calling updateLoc(...) The actual interactions between particles are calculated by the particles themselves,", "of the MPL, we are using this text since we deem the MPL", "5 cycles of simulation:: >>> laws = SimpleLaws(bondLength=5) >>> sim = ParticleSystem(laws) >>>", "cycles (default=1)\"\"\" # optimisation to speed up access to these functions: _indexer =", ") >>> sim.add( Particle(position=(10,20)) ) >>> sim.add( Particle(position=(30,40)) ) >>> sim.particles[0].makeBond(sim.particles, 1) #", "alternative licensing. # ------------------------------------------------------------------------- \"\"\"\\ ========================================= Discrete time particle physics simulation ========================================= A", "def remove(self, *oldParticles): \"\"\"\\ Remove the specified particle(s) from the system. Note that", "sim.add( Particle(position=(10,20)) ) >>> sim.add( Particle(position=(30,40)) ) >>> sim.particles[0].makeBond(sim.particles, 1) # bond 1st", "initialParticles -- list of particles (default=[]) - initialTick -- start value of the", "cycle. This is a two stage process so that, in a given cycle,", "a list of zero or more (particle,distSquared) tuples. The particles listed are those", "update their positions and velocities ready for the next cycle. This is a", "remove(self, *oldParticles): \"\"\"\\ Remove the specified particle(s) from the system. Note that this", "def updateLoc(self, *particles): \"\"\"\\ Notify this physics system that the specified particle(s) have", "please extend this file, # not this notice. # (2) Reproduced in the", "same positions, irrespective of which particle's doInteractions(...) method is called first. Particles should", "*newParticles): \"\"\"Add the specified particle(s) into the system\"\"\" self.particles.extend(newParticles) for p in newParticles:", "-- dictionary, indexed by particle.ID ParticleSystem uses a SpatialIndexer object to speed up", "particles lie within a given region (radius of a point). If your code", ">>> sim.add( Particle(position=(10,20)) ) >>> sim.add( Particle(position=(30,40)) ) >>> sim.particles[0].makeBond(sim.particles, 1) # bond", "the system by reference, or removed by their ID. ParticleSystem will work for", "particle(s) into the system\"\"\" self.particles.extend(newParticles) for p in newParticles: self.particleDict[p.ID] = p self.indexer.updateLoc(*newParticles)", "so they can influence each other. It then calls the update(...) methods of", "and at # http://kamaelia.sourceforge.net/AUTHORS - please extend this file, # not this notice.", "Particle(position=(10,20)) ) >>> sim.add( Particle(position=(30,40)) ) >>> sim.particles[0].makeBond(sim.particles, 1) # bond 1st and", "number of dimensions - so long as all particles use the same! Bonds", ">>> How does it work? ----------------- Set up ParticleSystem by instantiating, specifying the", "your code changes the position of a particle, the simulator must be informed,", "update(...) methods of all particles so they can all update their positions and", "# ------------------------------------------------------------------------- \"\"\"\\ ========================================= Discrete time particle physics simulation ========================================= A discrete time", "bonded and unbonded particles, of multiple types. The actual physics calculations are deferred", "The simulation runs in cycles when the run(...) method is called. Each cycle", "particle(s) from the system. Note that this method does not destroy bonds from", "point, and that passed the (optional) filter function: filter(particle) -> True if the", "(radius of a point). If your code changes the position of a particle,", "simulator must be informed, so it can update its spatial indexing data, by", "to the particles to manage for themselves. The simulation runs in cycles when", "since we deem the MPL # notice inappropriate for this file. As per", "Keyword arguments: - initialParticles -- list of particles (default=[]) - initialTick -- start", "any number of dimensions - so long as all particles use the same!", "they can all update their positions and velocities ready for the next cycle.", "the Particle base class (or have equivalent functionality). Particles can be added or", "redistribute this under the terms of any of the # following licenses(2): Mozilla", "calculations. SpatialIndexer reduce the search space when determining what particles lie within a", "# optimisation to speed up access to these functions: _indexer = self.indexer _laws", "are bonded and move noticeably closer after 5 cycles of simulation:: >>> laws", "\"\"\"\\ Remove particle(s) as specified by id(s) from the system. Note that this", "passed the (optional) filter function: filter(particle) -> True if the particle is to", "for particles in space with any number of dimensions - so long as", "before calling run(). \"\"\" self.indexer.updateLoc(*particles) def withinRadius(self, centre, radius, filter=(lambda particle:True)): \"\"\"\\ withinRadius(centre,radius[,filter])", "is called. \"\"\" from SpatialIndexer import SpatialIndexer class ParticleSystem(object): \"\"\"\\ ParticleSystem(laws[,initialParticles][,initialTick]) -> new", "updateLoc(self, *particles): \"\"\"\\ Notify this physics system that the specified particle(s) have changed", "first. Particles should not apply their velocities to update their position until their", "- initialTick -- start value of the time 'tick' count (default=0) \"\"\" def", "It then calls the update(...) methods of all particles so they can all", "(1) Kamaelia Contributors are listed in the AUTHORS file and at # http://kamaelia.sourceforge.net/AUTHORS", "it work? ----------------- Set up ParticleSystem by instantiating, specifying the laws to act", "called if you change a particle's position, before calling run(). \"\"\" self.indexer.updateLoc(*particles) def", "radius, filter) def run(self, cycles = 1): \"\"\"Run the simulation for a given", "actual interactions between particles are calculated by the particles themselves, *not* by ParticleSystem.", "of this # notice is prohibited. # # Please contact us via: <EMAIL>", "Particle(position=(30,40)) ) >>> sim.particles[0].makeBond(sim.particles, 1) # bond 1st and 2nd particles >>> for", "not apply their velocities to update their position until their update(...) method is", "us via: <EMAIL> # to discuss alternative licensing. # ------------------------------------------------------------------------- \"\"\"\\ ========================================= Discrete", "ParticleSystem calls the doInteractions(...) methods of all particles so they can influence each", "the particle is to be included in the list \"\"\" return self.indexer.withinRadius(centre, radius,", "0: cycles -= 1 self.tick += 1 _tick = self.tick for p in", "specified particle(s) into the system\"\"\" self.particles.extend(newParticles) for p in newParticles: self.particleDict[p.ID] = p", "# Copyright (C) 2004 British Broadcasting Corporation and Kamaelia Contributors(1) # All Rights", "licenses(2): Mozilla Public License, V1.1, GNU General # Public License, V2.0, GNU Lesser", "0): \"\"\"x.__init__(...) initializes x; see x.__class__.__doc__ for signature\"\"\" self.indexer = SpatialIndexer(laws.maxInteractRadius) self.laws =", "apply their velocities to update their position until their update(...) method is called.", "ParticleSystem. ParticleSystem calls the doInteractions(...) methods of all particles so they can influence", "all particles so they can all update their positions and velocities ready for", "import SpatialIndexer class ParticleSystem(object): \"\"\"\\ ParticleSystem(laws[,initialParticles][,initialTick]) -> new ParticleSystem object Discrete time simulator", "of simulation:: >>> laws = SimpleLaws(bondLength=5) >>> sim = ParticleSystem(laws) >>> sim.add( Particle(position=(10,10))", "for a system of particles. Keyword arguments: - initialParticles -- list of particles", "update its spatial indexing data, by calling updateLoc(...) The actual interactions between particles", "ready for the next cycle. This is a two stage process so that,", "if the particle is to be included in the list \"\"\" return self.indexer.withinRadius(centre,", "= SpatialIndexer(laws.maxInteractRadius) self.laws = laws self.particles = [] self.tick = initialTick self.particleDict =", "*oldParticles): \"\"\"\\ Remove the specified particle(s) from the system. Note that this method", "self.particles = [] self.tick = initialTick self.particleDict = {} self.add(*initialParticles) def add(self, *newParticles):", "included in the list \"\"\" return self.indexer.withinRadius(centre, radius, filter) def run(self, cycles =", "Bonds between particles are up to the particles to manage for themselves. The", "\"\"\"\\ ParticleSystem(laws[,initialParticles][,initialTick]) -> new ParticleSystem object Discrete time simulator for a system of", "positions, irrespective of which particle's doInteractions(...) method is called first. Particles should not", "bond 1st and 2nd particles >>> for p in sim.particles: print p.getLoc() ...", "cycles of simulation:: >>> laws = SimpleLaws(bondLength=5) >>> sim = ParticleSystem(laws) >>> sim.add(", "10) (10, 20) (30, 40) >>> sim.run(cycles=5) >>> for p in sim.particles: print", "region (radius of a point). If your code changes the position of a", "to update their position until their update(...) method is called. \"\"\" from SpatialIndexer", "the search space when determining what particles lie within a given region (radius", "so that, in a given cycle, all particles see each other at the", "runs in cycles when the run(...) method is called. Each cycle advances the", "3.5 of the MPL, we are using this text since we deem the", "to speed up access to these functions: _indexer = self.indexer _laws = self.laws", "General # Public License, V2.0, GNU Lesser General Public License, V2.1 # #", "self.particles.extend(newParticles) for p in newParticles: self.particleDict[p.ID] = p self.indexer.updateLoc(*newParticles) def remove(self, *oldParticles): \"\"\"\\", "-> True if the particle is to be included in the list \"\"\"", "particles, of multiple types. The actual physics calculations are deferred to the particles", "bonds from other particles to these ones. \"\"\" particles = [self.particleDict[id] for id", "class ParticleSystem(object): \"\"\"\\ ParticleSystem(laws[,initialParticles][,initialTick]) -> new ParticleSystem object Discrete time simulator for a", "method is called first. Particles should not apply their velocities to update their", "self.laws = laws self.particles = [] self.tick = initialTick self.particleDict = {} self.add(*initialParticles)", "code changes the position of a particle, the simulator must be informed, so", "x; see x.__class__.__doc__ for signature\"\"\" self.indexer = SpatialIndexer(laws.maxInteractRadius) self.laws = laws self.particles =", "def __init__(self, laws, initialParticles = [], initialTick = 0): \"\"\"x.__init__(...) initializes x; see", "of any of the # following licenses(2): Mozilla Public License, V1.1, GNU General", "particle.ID ParticleSystem uses a SpatialIndexer object to speed up calculations. SpatialIndexer reduce the", "the specified centre point, and that passed the (optional) filter function: filter(particle) ->", "to these functions: _indexer = self.indexer _laws = self.laws while cycles > 0:", "position, before calling run(). \"\"\" self.indexer.updateLoc(*particles) def withinRadius(self, centre, radius, filter=(lambda particle:True)): \"\"\"\\", "its spatial indexing data, by calling updateLoc(...) The actual interactions between particles are", "new ParticleSystem object Discrete time simulator for a system of particles. Keyword arguments:", "= {} self.add(*initialParticles) def add(self, *newParticles): \"\"\"Add the specified particle(s) into the system\"\"\"", "within a given region (radius of a point). If your code changes the", "and 2nd particles >>> for p in sim.particles: print p.getLoc() ... (10, 10)", "the simulation for a given number of cycles (default=1)\"\"\" # optimisation to speed", "the same positions, irrespective of which particle's doInteractions(...) method is called first. Particles", "under the terms of any of the # following licenses(2): Mozilla Public License,", "of all particles so they can all update their positions and velocities ready", "(particle,distSquared) Returns a list of zero or more (particle,distSquared) tuples. The particles listed", "the position of a particle, the simulator must be informed, so it can", "act between particles and an (optional) set of initial particles. Particles should be", "self.remove( *particles ) def updateLoc(self, *particles): \"\"\"\\ Notify this physics system that the", "dictionary, indexed by particle.ID ParticleSystem uses a SpatialIndexer object to speed up calculations.", ">>> laws = SimpleLaws(bondLength=5) >>> sim = ParticleSystem(laws) >>> sim.add( Particle(position=(10,10)) ) >>>", "stage process so that, in a given cycle, all particles see each other", "centre, radius, filter=(lambda particle:True)): \"\"\"\\ withinRadius(centre,radius[,filter]) -> list of (particle,distSquared) Returns a list", "calculations are deferred to the particles themselves. You can have as many, or", "file, and at: # http://kamaelia.sourceforge.net/COPYING # Under section 3.5 of the MPL, we", "spatial dimensions as you like. Example Usage ------------- Create 3 particles, two of", "next cycle. This is a two stage process so that, in a given", "system\"\"\" self.particles.extend(newParticles) for p in newParticles: self.particleDict[p.ID] = p self.indexer.updateLoc(*newParticles) def remove(self, *oldParticles):", "[10.0, 16.059932671999999] [30, 40] >>> How does it work? ----------------- Set up ParticleSystem", "*particles): \"\"\"\\ Notify this physics system that the specified particle(s) have changed position.", "Note that this method does not destroy bonds from other particles to these", "filter=(lambda particle:True)): \"\"\"\\ withinRadius(centre,radius[,filter]) -> list of (particle,distSquared) Returns a list of zero", "discrete time simulator of a system of bonded and unbonded particles, of multiple", "MPL/GPL/LGPL removal of this # notice is prohibited. # # Please contact us", "by calling updateLoc(...) The actual interactions between particles are calculated by the particles", "the next cycle. This is a two stage process so that, in a", "This is a two stage process so that, in a given cycle, all", "this file, # not this notice. # (2) Reproduced in the COPYING file,", "Under section 3.5 of the MPL, we are using this text since we", "work for particles in space with any number of dimensions - so long", "print p.getLoc() ... (10, 10) (10, 20) (30, 40) >>> sim.run(cycles=5) >>> for", "other at the same positions, irrespective of which particle's doInteractions(...) method is called", "GNU General # Public License, V2.0, GNU Lesser General Public License, V2.1 #", "speed up access to these functions: _indexer = self.indexer _laws = self.laws while", "point). If your code changes the position of a particle, the simulator must", "\"\"\" particles = [self.particleDict[id] for id in ids] self.remove( *particles ) def updateLoc(self,", "of multiple types. The actual physics calculations are deferred to the particles themselves.", "initialization. The following attributes store the particles registered in ParticleSystem: - particles --", "up to the particles to manage for themselves. The simulation runs in cycles", "they can influence each other. It then calls the update(...) methods of all", "when the run(...) method is called. Each cycle advances the 'tick' count by", "attributes store the particles registered in ParticleSystem: - particles -- simple list -", "sim.add( Particle(position=(30,40)) ) >>> sim.particles[0].makeBond(sim.particles, 1) # bond 1st and 2nd particles >>>", "run(...) method is called. Each cycle advances the 'tick' count by 1. The", "and at: # http://kamaelia.sourceforge.net/COPYING # Under section 3.5 of the MPL, we are", "the system\"\"\" self.particles.extend(newParticles) for p in newParticles: self.particleDict[p.ID] = p self.indexer.updateLoc(*newParticles) def remove(self,", "(2) Reproduced in the COPYING file, and at: # http://kamaelia.sourceforge.net/COPYING # Under section", "as you like. Example Usage ------------- Create 3 particles, two of which are", "The following attributes store the particles registered in ParticleSystem: - particles -- simple", "p.getLoc() ... (10, 10) (10, 20) (30, 40) >>> sim.run(cycles=5) >>> for p", "prohibited. # # Please contact us via: <EMAIL> # to discuss alternative licensing.", "list of particles (default=[]) - initialTick -- start value of the time 'tick'", "[] self.tick = initialTick self.particleDict = {} self.add(*initialParticles) def add(self, *newParticles): \"\"\"Add the", "manage for themselves. The simulation runs in cycles when the run(...) method is", "between particles are calculated by the particles themselves, *not* by ParticleSystem. ParticleSystem calls", "-- simple list - particleDict -- dictionary, indexed by particle.ID ParticleSystem uses a", "particles so they can influence each other. It then calls the update(...) methods", "laws to act between particles and an (optional) set of initial particles. Particles", "is called first. Particles should not apply their velocities to update their position", "in ids] self.remove( *particles ) def updateLoc(self, *particles): \"\"\"\\ Notify this physics system", "1. The tick count starts at zero, unless otherwise specified during initialization. The", "- particles -- simple list - particleDict -- dictionary, indexed by particle.ID ParticleSystem", "space with any number of dimensions - so long as all particles use", "laws = SimpleLaws(bondLength=5) >>> sim = ParticleSystem(laws) >>> sim.add( Particle(position=(10,10)) ) >>> sim.add(", "removeByID(self, *ids): \"\"\"\\ Remove particle(s) as specified by id(s) from the system. Note", "-= 1 self.tick += 1 _tick = self.tick for p in self.particles: p.doInteractions(_indexer,", "Particles should be derived from the Particle base class (or have equivalent functionality).", "object to speed up calculations. SpatialIndexer reduce the search space when determining what", "up calculations. SpatialIndexer reduce the search space when determining what particles lie within", "ParticleSystem object Discrete time simulator for a system of particles. Keyword arguments: -", "p.getLoc() ... [10.0, 13.940067328] [10.0, 16.059932671999999] [30, 40] >>> How does it work?", "and unbonded particles, of multiple types. The actual physics calculations are deferred to", "use the same! Bonds between particles are up to the particles to manage", "You may only modify and redistribute this under the terms of any of", "time 'tick' count (default=0) \"\"\" def __init__(self, laws, initialParticles = [], initialTick =", "for p in sim.particles: print p.getLoc() ... [10.0, 13.940067328] [10.0, 16.059932671999999] [30, 40]", "search space when determining what particles lie within a given region (radius of", "particle:True)): \"\"\"\\ withinRadius(centre,radius[,filter]) -> list of (particle,distSquared) Returns a list of zero or", "does it work? ----------------- Set up ParticleSystem by instantiating, specifying the laws to", "of the time 'tick' count (default=0) \"\"\" def __init__(self, laws, initialParticles = [],", "have changed position. Must be called if you change a particle's position, before", "position. Must be called if you change a particle's position, before calling run().", "physics simulation ========================================= A discrete time simulator of a system of bonded and", "= [], initialTick = 0): \"\"\"x.__init__(...) initializes x; see x.__class__.__doc__ for signature\"\"\" self.indexer", "closer after 5 cycles of simulation:: >>> laws = SimpleLaws(bondLength=5) >>> sim =", "arguments: - initialParticles -- list of particles (default=[]) - initialTick -- start value", "self.tick += 1 _tick = self.tick for p in self.particles: p.doInteractions(_indexer, _laws, _tick)", "destroy bonds from other particles to these ones. \"\"\" particles = [self.particleDict[id] for", "filter) def run(self, cycles = 1): \"\"\"Run the simulation for a given number", "during initialization. The following attributes store the particles registered in ParticleSystem: - particles", "particles (default=[]) - initialTick -- start value of the time 'tick' count (default=0)", "'tick' count by 1. The tick count starts at zero, unless otherwise specified", "be included in the list \"\"\" return self.indexer.withinRadius(centre, radius, filter) def run(self, cycles", "# # (1) Kamaelia Contributors are listed in the AUTHORS file and at", "# # Please contact us via: <EMAIL> # to discuss alternative licensing. #", "particle(s) as specified by id(s) from the system. Note that this method does", "by instantiating, specifying the laws to act between particles and an (optional) set", "[], initialTick = 0): \"\"\"x.__init__(...) initializes x; see x.__class__.__doc__ for signature\"\"\" self.indexer =", "simulation for a given number of cycles (default=1)\"\"\" # optimisation to speed up", "1 _tick = self.tick for p in self.particles: p.doInteractions(_indexer, _laws, _tick) for p", "uses a SpatialIndexer object to speed up calculations. SpatialIndexer reduce the search space", "of all particles so they can influence each other. It then calls the", "for themselves. The simulation runs in cycles when the run(...) method is called.", "count starts at zero, unless otherwise specified during initialization. The following attributes store", "their ID. ParticleSystem will work for particles in space with any number of", "for particle in oldParticles: self.particles.remove(particle) del self.particleDict[particle.ID] self.indexer.remove(*oldParticles) def removeByID(self, *ids): \"\"\"\\ Remove", "in the list \"\"\" return self.indexer.withinRadius(centre, radius, filter) def run(self, cycles = 1):", "to be included in the list \"\"\" return self.indexer.withinRadius(centre, radius, filter) def run(self,", "calculated by the particles themselves, *not* by ParticleSystem. ParticleSystem calls the doInteractions(...) methods", "Discrete time particle physics simulation ========================================= A discrete time simulator of a system", "Particle(position=(10,10)) ) >>> sim.add( Particle(position=(10,20)) ) >>> sim.add( Particle(position=(30,40)) ) >>> sim.particles[0].makeBond(sim.particles, 1)", "you like. Example Usage ------------- Create 3 particles, two of which are bonded", "can update its spatial indexing data, by calling updateLoc(...) The actual interactions between", ">>> sim.particles[0].makeBond(sim.particles, 1) # bond 1st and 2nd particles >>> for p in", "and velocities ready for the next cycle. This is a two stage process", "id(s) from the system. Note that this method does not destroy bonds from", "sim.particles: print p.getLoc() ... (10, 10) (10, 20) (30, 40) >>> sim.run(cycles=5) >>>", "Returns a list of zero or more (particle,distSquared) tuples. The particles listed are", "when determining what particles lie within a given region (radius of a point).", "particles see each other at the same positions, irrespective of which particle's doInteractions(...)", "<EMAIL> # to discuss alternative licensing. # ------------------------------------------------------------------------- \"\"\"\\ ========================================= Discrete time particle", "(default=1)\"\"\" # optimisation to speed up access to these functions: _indexer = self.indexer", "be added or removed from the system by reference, or removed by their", "following attributes store the particles registered in ParticleSystem: - particles -- simple list", "License, V1.1, GNU General # Public License, V2.0, GNU Lesser General Public License,", "particles. Keyword arguments: - initialParticles -- list of particles (default=[]) - initialTick --", "system of bonded and unbonded particles, of multiple types. The actual physics calculations", "a given region (radius of a point). If your code changes the position", "of the # following licenses(2): Mozilla Public License, V1.1, GNU General # Public", "all particles use the same! Bonds between particles are up to the particles", "self.particleDict[particle.ID] self.indexer.remove(*oldParticles) def removeByID(self, *ids): \"\"\"\\ Remove particle(s) as specified by id(s) from", "particles are calculated by the particles themselves, *not* by ParticleSystem. ParticleSystem calls the", "General Public License, V2.1 # # (1) Kamaelia Contributors are listed in the", "to manage for themselves. The simulation runs in cycles when the run(...) method", "listed in the AUTHORS file and at # http://kamaelia.sourceforge.net/AUTHORS - please extend this", "p in newParticles: self.particleDict[p.ID] = p self.indexer.updateLoc(*newParticles) def remove(self, *oldParticles): \"\"\"\\ Remove the", "self.indexer.updateLoc(*newParticles) def remove(self, *oldParticles): \"\"\"\\ Remove the specified particle(s) from the system. Note", "\"\"\" for particle in oldParticles: self.particles.remove(particle) del self.particleDict[particle.ID] self.indexer.remove(*oldParticles) def removeByID(self, *ids): \"\"\"\\", "V1.1, GNU General # Public License, V2.0, GNU Lesser General Public License, V2.1", "inappropriate for this file. As per MPL/GPL/LGPL removal of this # notice is", "file, # not this notice. # (2) Reproduced in the COPYING file, and", "the 'tick' count by 1. The tick count starts at zero, unless otherwise", "like. Example Usage ------------- Create 3 particles, two of which are bonded and", "\"\"\"Run the simulation for a given number of cycles (default=1)\"\"\" # optimisation to", "= ParticleSystem(laws) >>> sim.add( Particle(position=(10,10)) ) >>> sim.add( Particle(position=(10,20)) ) >>> sim.add( Particle(position=(30,40))", "the terms of any of the # following licenses(2): Mozilla Public License, V1.1,", "see x.__class__.__doc__ for signature\"\"\" self.indexer = SpatialIndexer(laws.maxInteractRadius) self.laws = laws self.particles = []", "between particles are up to the particles to manage for themselves. The simulation", "AUTHORS file and at # http://kamaelia.sourceforge.net/AUTHORS - please extend this file, # not", "The tick count starts at zero, unless otherwise specified during initialization. The following", "40] >>> How does it work? ----------------- Set up ParticleSystem by instantiating, specifying", "with any number of dimensions - so long as all particles use the", "at the same positions, irrespective of which particle's doInteractions(...) method is called first.", "work? ----------------- Set up ParticleSystem by instantiating, specifying the laws to act between", "to the particles themselves. You can have as many, or few, spatial dimensions", "from other particles to these ones. \"\"\" for particle in oldParticles: self.particles.remove(particle) del", "- initialParticles -- list of particles (default=[]) - initialTick -- start value of", "for p in sim.particles: print p.getLoc() ... (10, 10) (10, 20) (30, 40)", "20) (30, 40) >>> sim.run(cycles=5) >>> for p in sim.particles: print p.getLoc() ...", "the system. Note that this method does not destroy bonds from other particles", "the list \"\"\" return self.indexer.withinRadius(centre, radius, filter) def run(self, cycles = 1): \"\"\"Run", "which are bonded and move noticeably closer after 5 cycles of simulation:: >>>", "a point). If your code changes the position of a particle, the simulator", ">>> sim = ParticleSystem(laws) >>> sim.add( Particle(position=(10,10)) ) >>> sim.add( Particle(position=(10,20)) ) >>>", "only modify and redistribute this under the terms of any of the #", "have equivalent functionality). Particles can be added or removed from the system by", "that this method does not destroy bonds from other particles to these ones.", "tuples. The particles listed are those within the specified radius of the specified", "- particleDict -- dictionary, indexed by particle.ID ParticleSystem uses a SpatialIndexer object to", "more (particle,distSquared) tuples. The particles listed are those within the specified radius of", "SpatialIndexer class ParticleSystem(object): \"\"\"\\ ParticleSystem(laws[,initialParticles][,initialTick]) -> new ParticleSystem object Discrete time simulator for", "self.add(*initialParticles) def add(self, *newParticles): \"\"\"Add the specified particle(s) into the system\"\"\" self.particles.extend(newParticles) for", "should be derived from the Particle base class (or have equivalent functionality). Particles", "bonds from other particles to these ones. \"\"\" for particle in oldParticles: self.particles.remove(particle)", "the particles to manage for themselves. The simulation runs in cycles when the", "system that the specified particle(s) have changed position. Must be called if you", "deem the MPL # notice inappropriate for this file. As per MPL/GPL/LGPL removal", "Public License, V2.1 # # (1) Kamaelia Contributors are listed in the AUTHORS", "an (optional) set of initial particles. Particles should be derived from the Particle", "filter function: filter(particle) -> True if the particle is to be included in", "informed, so it can update its spatial indexing data, by calling updateLoc(...) The", "the time 'tick' count (default=0) \"\"\" def __init__(self, laws, initialParticles = [], initialTick", "1): \"\"\"Run the simulation for a given number of cycles (default=1)\"\"\" # optimisation", "Notify this physics system that the specified particle(s) have changed position. Must be", "(or have equivalent functionality). Particles can be added or removed from the system", "each other at the same positions, irrespective of which particle's doInteractions(...) method is", "file. As per MPL/GPL/LGPL removal of this # notice is prohibited. # #", "cycles -= 1 self.tick += 1 _tick = self.tick for p in self.particles:", "object Discrete time simulator for a system of particles. Keyword arguments: - initialParticles", "Particle base class (or have equivalent functionality). Particles can be added or removed", "otherwise specified during initialization. The following attributes store the particles registered in ParticleSystem:", "removal of this # notice is prohibited. # # Please contact us via:", "themselves. The simulation runs in cycles when the run(...) method is called. Each", "these ones. \"\"\" particles = [self.particleDict[id] for id in ids] self.remove( *particles )", "by particle.ID ParticleSystem uses a SpatialIndexer object to speed up calculations. SpatialIndexer reduce", "_tick = self.tick for p in self.particles: p.doInteractions(_indexer, _laws, _tick) for p in", ">>> for p in sim.particles: print p.getLoc() ... [10.0, 13.940067328] [10.0, 16.059932671999999] [30,", "GNU Lesser General Public License, V2.1 # # (1) Kamaelia Contributors are listed", "velocities to update their position until their update(...) method is called. \"\"\" from", "{} self.add(*initialParticles) def add(self, *newParticles): \"\"\"Add the specified particle(s) into the system\"\"\" self.particles.extend(newParticles)", "self.tick = initialTick self.particleDict = {} self.add(*initialParticles) def add(self, *newParticles): \"\"\"Add the specified", "- please extend this file, # not this notice. # (2) Reproduced in", "all update their positions and velocities ready for the next cycle. This is", "self.indexer.remove(*oldParticles) def removeByID(self, *ids): \"\"\"\\ Remove particle(s) as specified by id(s) from the", "SpatialIndexer reduce the search space when determining what particles lie within a given", "the MPL # notice inappropriate for this file. As per MPL/GPL/LGPL removal of", "particleDict -- dictionary, indexed by particle.ID ParticleSystem uses a SpatialIndexer object to speed", "move noticeably closer after 5 cycles of simulation:: >>> laws = SimpleLaws(bondLength=5) >>>", "their position until their update(...) method is called. \"\"\" from SpatialIndexer import SpatialIndexer", "specified by id(s) from the system. Note that this method does not destroy", "this physics system that the specified particle(s) have changed position. Must be called", "particles >>> for p in sim.particles: print p.getLoc() ... (10, 10) (10, 20)", "or removed by their ID. ParticleSystem will work for particles in space with", "\"\"\"\\ Notify this physics system that the specified particle(s) have changed position. Must", "SpatialIndexer import SpatialIndexer class ParticleSystem(object): \"\"\"\\ ParticleSystem(laws[,initialParticles][,initialTick]) -> new ParticleSystem object Discrete time", "Must be called if you change a particle's position, before calling run(). \"\"\"", "given number of cycles (default=1)\"\"\" # optimisation to speed up access to these", "same! Bonds between particles are up to the particles to manage for themselves.", "section 3.5 of the MPL, we are using this text since we deem", "their update(...) method is called. \"\"\" from SpatialIndexer import SpatialIndexer class ParticleSystem(object): \"\"\"\\", "calls the doInteractions(...) methods of all particles so they can influence each other.", "that the specified particle(s) have changed position. Must be called if you change", "starts at zero, unless otherwise specified during initialization. The following attributes store the", "cycles when the run(...) method is called. Each cycle advances the 'tick' count", "*particles ) def updateLoc(self, *particles): \"\"\"\\ Notify this physics system that the specified", "the particles registered in ParticleSystem: - particles -- simple list - particleDict --", "many, or few, spatial dimensions as you like. Example Usage ------------- Create 3", "that passed the (optional) filter function: filter(particle) -> True if the particle is", "# to discuss alternative licensing. # ------------------------------------------------------------------------- \"\"\"\\ ========================================= Discrete time particle physics", "notice is prohibited. # # Please contact us via: <EMAIL> # to discuss", "system by reference, or removed by their ID. ParticleSystem will work for particles", "self.particleDict = {} self.add(*initialParticles) def add(self, *newParticles): \"\"\"Add the specified particle(s) into the", "particles so they can all update their positions and velocities ready for the", "data, by calling updateLoc(...) The actual interactions between particles are calculated by the", "function: filter(particle) -> True if the particle is to be included in the", "2004 British Broadcasting Corporation and Kamaelia Contributors(1) # All Rights Reserved. # #", "in the COPYING file, and at: # http://kamaelia.sourceforge.net/COPYING # Under section 3.5 of", "in oldParticles: self.particles.remove(particle) del self.particleDict[particle.ID] self.indexer.remove(*oldParticles) def removeByID(self, *ids): \"\"\"\\ Remove particle(s) as", "terms of any of the # following licenses(2): Mozilla Public License, V1.1, GNU", "= initialTick self.particleDict = {} self.add(*initialParticles) def add(self, *newParticles): \"\"\"Add the specified particle(s)", "del self.particleDict[particle.ID] self.indexer.remove(*oldParticles) def removeByID(self, *ids): \"\"\"\\ Remove particle(s) as specified by id(s)", "+= 1 _tick = self.tick for p in self.particles: p.doInteractions(_indexer, _laws, _tick) for", "be derived from the Particle base class (or have equivalent functionality). Particles can", "not destroy bonds from other particles to these ones. \"\"\" particles = [self.particleDict[id]", "doInteractions(...) method is called first. Particles should not apply their velocities to update", "given region (radius of a point). If your code changes the position of", "... (10, 10) (10, 20) (30, 40) >>> sim.run(cycles=5) >>> for p in", "add(self, *newParticles): \"\"\"Add the specified particle(s) into the system\"\"\" self.particles.extend(newParticles) for p in", "of which are bonded and move noticeably closer after 5 cycles of simulation::", "by 1. The tick count starts at zero, unless otherwise specified during initialization.", "particles -- simple list - particleDict -- dictionary, indexed by particle.ID ParticleSystem uses", "self.indexer = SpatialIndexer(laws.maxInteractRadius) self.laws = laws self.particles = [] self.tick = initialTick self.particleDict", "influence each other. It then calls the update(...) methods of all particles so", "notice inappropriate for this file. As per MPL/GPL/LGPL removal of this # notice", "themselves. You can have as many, or few, spatial dimensions as you like.", "changes the position of a particle, the simulator must be informed, so it", "self.indexer.updateLoc(*particles) def withinRadius(self, centre, radius, filter=(lambda particle:True)): \"\"\"\\ withinRadius(centre,radius[,filter]) -> list of (particle,distSquared)", "COPYING file, and at: # http://kamaelia.sourceforge.net/COPYING # Under section 3.5 of the MPL,", "SimpleLaws(bondLength=5) >>> sim = ParticleSystem(laws) >>> sim.add( Particle(position=(10,10)) ) >>> sim.add( Particle(position=(10,20)) )", "using this text since we deem the MPL # notice inappropriate for this", "ParticleSystem(object): \"\"\"\\ ParticleSystem(laws[,initialParticles][,initialTick]) -> new ParticleSystem object Discrete time simulator for a system", "x.__class__.__doc__ for signature\"\"\" self.indexer = SpatialIndexer(laws.maxInteractRadius) self.laws = laws self.particles = [] self.tick", "their velocities to update their position until their update(...) method is called. \"\"\"", "= [] self.tick = initialTick self.particleDict = {} self.add(*initialParticles) def add(self, *newParticles): \"\"\"Add", "[10.0, 13.940067328] [10.0, 16.059932671999999] [30, 40] >>> How does it work? ----------------- Set", "by ParticleSystem. ParticleSystem calls the doInteractions(...) methods of all particles so they can", "the update(...) methods of all particles so they can all update their positions", "= [self.particleDict[id] for id in ids] self.remove( *particles ) def updateLoc(self, *particles): \"\"\"\\", "-> list of (particle,distSquared) Returns a list of zero or more (particle,distSquared) tuples.", "1 self.tick += 1 _tick = self.tick for p in self.particles: p.doInteractions(_indexer, _laws,", "the specified particle(s) have changed position. Must be called if you change a", "Contributors(1) # All Rights Reserved. # # You may only modify and redistribute", "# http://kamaelia.sourceforge.net/AUTHORS - please extend this file, # not this notice. # (2)", "for the next cycle. This is a two stage process so that, in", "initialTick -- start value of the time 'tick' count (default=0) \"\"\" def __init__(self,", "position of a particle, the simulator must be informed, so it can update", "British Broadcasting Corporation and Kamaelia Contributors(1) # All Rights Reserved. # # You", "Corporation and Kamaelia Contributors(1) # All Rights Reserved. # # You may only", "a particle, the simulator must be informed, so it can update its spatial", "particles. Particles should be derived from the Particle base class (or have equivalent", "removed from the system by reference, or removed by their ID. ParticleSystem will", "Please contact us via: <EMAIL> # to discuss alternative licensing. # ------------------------------------------------------------------------- \"\"\"\\", "the particles themselves. You can have as many, or few, spatial dimensions as", "have as many, or few, spatial dimensions as you like. Example Usage -------------", "in a given cycle, all particles see each other at the same positions,", "self.tick for p in self.particles: p.doInteractions(_indexer, _laws, _tick) for p in self.particles: p.update(_laws)", "other particles to these ones. \"\"\" particles = [self.particleDict[id] for id in ids]", "until their update(...) method is called. \"\"\" from SpatialIndexer import SpatialIndexer class ParticleSystem(object):", "modify and redistribute this under the terms of any of the # following", "not this notice. # (2) Reproduced in the COPYING file, and at: #", "You can have as many, or few, spatial dimensions as you like. Example", "we are using this text since we deem the MPL # notice inappropriate", "ids] self.remove( *particles ) def updateLoc(self, *particles): \"\"\"\\ Notify this physics system that", "a given cycle, all particles see each other at the same positions, irrespective", "positions and velocities ready for the next cycle. This is a two stage", "contact us via: <EMAIL> # to discuss alternative licensing. # ------------------------------------------------------------------------- \"\"\"\\ =========================================", "def withinRadius(self, centre, radius, filter=(lambda particle:True)): \"\"\"\\ withinRadius(centre,radius[,filter]) -> list of (particle,distSquared) Returns", "\"\"\" self.indexer.updateLoc(*particles) def withinRadius(self, centre, radius, filter=(lambda particle:True)): \"\"\"\\ withinRadius(centre,radius[,filter]) -> list of", "so they can all update their positions and velocities ready for the next", "by reference, or removed by their ID. ParticleSystem will work for particles in", "# following licenses(2): Mozilla Public License, V1.1, GNU General # Public License, V2.0,", "count by 1. The tick count starts at zero, unless otherwise specified during", "can have as many, or few, spatial dimensions as you like. Example Usage", "16.059932671999999] [30, 40] >>> How does it work? ----------------- Set up ParticleSystem by", "into the system\"\"\" self.particles.extend(newParticles) for p in newParticles: self.particleDict[p.ID] = p self.indexer.updateLoc(*newParticles) def", "or few, spatial dimensions as you like. Example Usage ------------- Create 3 particles,", "laws, initialParticles = [], initialTick = 0): \"\"\"x.__init__(...) initializes x; see x.__class__.__doc__ for", "may only modify and redistribute this under the terms of any of the", "specified during initialization. The following attributes store the particles registered in ParticleSystem: -", "of a point). If your code changes the position of a particle, the", "of particles. Keyword arguments: - initialParticles -- list of particles (default=[]) - initialTick", "speed up calculations. SpatialIndexer reduce the search space when determining what particles lie", "-> new ParticleSystem object Discrete time simulator for a system of particles. Keyword", "this notice. # (2) Reproduced in the COPYING file, and at: # http://kamaelia.sourceforge.net/COPYING", "within the specified radius of the specified centre point, and that passed the", "ID. ParticleSystem will work for particles in space with any number of dimensions", "spatial indexing data, by calling updateLoc(...) The actual interactions between particles are calculated", "method is called. Each cycle advances the 'tick' count by 1. The tick", "particle is to be included in the list \"\"\" return self.indexer.withinRadius(centre, radius, filter)", "are calculated by the particles themselves, *not* by ParticleSystem. ParticleSystem calls the doInteractions(...)", "ParticleSystem by instantiating, specifying the laws to act between particles and an (optional)", "(optional) filter function: filter(particle) -> True if the particle is to be included", "per MPL/GPL/LGPL removal of this # notice is prohibited. # # Please contact", "called first. Particles should not apply their velocities to update their position until", "should not apply their velocities to update their position until their update(...) method", "time simulator of a system of bonded and unbonded particles, of multiple types.", "long as all particles use the same! Bonds between particles are up to", "_indexer = self.indexer _laws = self.laws while cycles > 0: cycles -= 1", "simulation ========================================= A discrete time simulator of a system of bonded and unbonded", "specified centre point, and that passed the (optional) filter function: filter(particle) -> True", "simulation runs in cycles when the run(...) method is called. Each cycle advances", "True if the particle is to be included in the list \"\"\" return", "in newParticles: self.particleDict[p.ID] = p self.indexer.updateLoc(*newParticles) def remove(self, *oldParticles): \"\"\"\\ Remove the specified", ") def updateLoc(self, *particles): \"\"\"\\ Notify this physics system that the specified particle(s)", "the same! Bonds between particles are up to the particles to manage for", "physics system that the specified particle(s) have changed position. Must be called if", "instantiating, specifying the laws to act between particles and an (optional) set of", "are using this text since we deem the MPL # notice inappropriate for", "is a two stage process so that, in a given cycle, all particles", "filter(particle) -> True if the particle is to be included in the list", "from the Particle base class (or have equivalent functionality). Particles can be added", "what particles lie within a given region (radius of a point). If your", "*not* by ParticleSystem. ParticleSystem calls the doInteractions(...) methods of all particles so they", "listed are those within the specified radius of the specified centre point, and", "from other particles to these ones. \"\"\" particles = [self.particleDict[id] for id in", "oldParticles: self.particles.remove(particle) del self.particleDict[particle.ID] self.indexer.remove(*oldParticles) def removeByID(self, *ids): \"\"\"\\ Remove particle(s) as specified", "The actual physics calculations are deferred to the particles themselves. You can have", "# All Rights Reserved. # # You may only modify and redistribute this", "in sim.particles: print p.getLoc() ... (10, 10) (10, 20) (30, 40) >>> sim.run(cycles=5)", "a system of particles. Keyword arguments: - initialParticles -- list of particles (default=[])", "of bonded and unbonded particles, of multiple types. The actual physics calculations are", "can influence each other. It then calls the update(...) methods of all particles", "in the AUTHORS file and at # http://kamaelia.sourceforge.net/AUTHORS - please extend this file,", "given cycle, all particles see each other at the same positions, irrespective of", "simulator of a system of bonded and unbonded particles, of multiple types. The", "particle physics simulation ========================================= A discrete time simulator of a system of bonded", "count (default=0) \"\"\" def __init__(self, laws, initialParticles = [], initialTick = 0): \"\"\"x.__init__(...)", "(10, 20) (30, 40) >>> sim.run(cycles=5) >>> for p in sim.particles: print p.getLoc()", "particle(s) have changed position. Must be called if you change a particle's position,", "particle's doInteractions(...) method is called first. Particles should not apply their velocities to", "for id in ids] self.remove( *particles ) def updateLoc(self, *particles): \"\"\"\\ Notify this", "# # You may only modify and redistribute this under the terms of", "= self.laws while cycles > 0: cycles -= 1 self.tick += 1 _tick", "to these ones. \"\"\" for particle in oldParticles: self.particles.remove(particle) del self.particleDict[particle.ID] self.indexer.remove(*oldParticles) def", "access to these functions: _indexer = self.indexer _laws = self.laws while cycles >", "are up to the particles to manage for themselves. The simulation runs in", "dimensions - so long as all particles use the same! Bonds between particles", "Discrete time simulator for a system of particles. Keyword arguments: - initialParticles --", "\"\"\"\\ ========================================= Discrete time particle physics simulation ========================================= A discrete time simulator of", "velocities ready for the next cycle. This is a two stage process so", "dimensions as you like. Example Usage ------------- Create 3 particles, two of which", "each other. It then calls the update(...) methods of all particles so they", "withinRadius(self, centre, radius, filter=(lambda particle:True)): \"\"\"\\ withinRadius(centre,radius[,filter]) -> list of (particle,distSquared) Returns a", "advances the 'tick' count by 1. The tick count starts at zero, unless", "the specified particle(s) into the system\"\"\" self.particles.extend(newParticles) for p in newParticles: self.particleDict[p.ID] =", "\"\"\" from SpatialIndexer import SpatialIndexer class ParticleSystem(object): \"\"\"\\ ParticleSystem(laws[,initialParticles][,initialTick]) -> new ParticleSystem object", "zero or more (particle,distSquared) tuples. The particles listed are those within the specified", "discuss alternative licensing. # ------------------------------------------------------------------------- \"\"\"\\ ========================================= Discrete time particle physics simulation =========================================", "a SpatialIndexer object to speed up calculations. SpatialIndexer reduce the search space when", "p in sim.particles: print p.getLoc() ... [10.0, 13.940067328] [10.0, 16.059932671999999] [30, 40] >>>", "following licenses(2): Mozilla Public License, V1.1, GNU General # Public License, V2.0, GNU", "in cycles when the run(...) method is called. Each cycle advances the 'tick'", "http://kamaelia.sourceforge.net/AUTHORS - please extend this file, # not this notice. # (2) Reproduced", "def removeByID(self, *ids): \"\"\"\\ Remove particle(s) as specified by id(s) from the system.", "\"\"\" def __init__(self, laws, initialParticles = [], initialTick = 0): \"\"\"x.__init__(...) initializes x;", "added or removed from the system by reference, or removed by their ID.", "initialTick = 0): \"\"\"x.__init__(...) initializes x; see x.__class__.__doc__ for signature\"\"\" self.indexer = SpatialIndexer(laws.maxInteractRadius)", "by the particles themselves, *not* by ParticleSystem. ParticleSystem calls the doInteractions(...) methods of", "========================================= A discrete time simulator of a system of bonded and unbonded particles,", "> 0: cycles -= 1 self.tick += 1 _tick = self.tick for p", "are listed in the AUTHORS file and at # http://kamaelia.sourceforge.net/AUTHORS - please extend", "then calls the update(...) methods of all particles so they can all update", "Public License, V2.0, GNU Lesser General Public License, V2.1 # # (1) Kamaelia", "noticeably closer after 5 cycles of simulation:: >>> laws = SimpleLaws(bondLength=5) >>> sim", "after 5 cycles of simulation:: >>> laws = SimpleLaws(bondLength=5) >>> sim = ParticleSystem(laws)", "in sim.particles: print p.getLoc() ... [10.0, 13.940067328] [10.0, 16.059932671999999] [30, 40] >>> How", "derived from the Particle base class (or have equivalent functionality). Particles can be", "a two stage process so that, in a given cycle, all particles see", "__init__(self, laws, initialParticles = [], initialTick = 0): \"\"\"x.__init__(...) initializes x; see x.__class__.__doc__", "is called. Each cycle advances the 'tick' count by 1. The tick count", "update their position until their update(...) method is called. \"\"\" from SpatialIndexer import", "does not destroy bonds from other particles to these ones. \"\"\" particles =", "-- start value of the time 'tick' count (default=0) \"\"\" def __init__(self, laws,", "cycles = 1): \"\"\"Run the simulation for a given number of cycles (default=1)\"\"\"", "as many, or few, spatial dimensions as you like. Example Usage ------------- Create", "any of the # following licenses(2): Mozilla Public License, V1.1, GNU General #", "# not this notice. # (2) Reproduced in the COPYING file, and at:", "equivalent functionality). Particles can be added or removed from the system by reference,", "a system of bonded and unbonded particles, of multiple types. The actual physics", "a particle's position, before calling run(). \"\"\" self.indexer.updateLoc(*particles) def withinRadius(self, centre, radius, filter=(lambda", "method is called. \"\"\" from SpatialIndexer import SpatialIndexer class ParticleSystem(object): \"\"\"\\ ParticleSystem(laws[,initialParticles][,initialTick]) ->", "from the system. Note that this method does not destroy bonds from other", "sim = ParticleSystem(laws) >>> sim.add( Particle(position=(10,10)) ) >>> sim.add( Particle(position=(10,20)) ) >>> sim.add(", "----------------- Set up ParticleSystem by instantiating, specifying the laws to act between particles", "to these ones. \"\"\" particles = [self.particleDict[id] for id in ids] self.remove( *particles", "at zero, unless otherwise specified during initialization. The following attributes store the particles", "specified particle(s) from the system. Note that this method does not destroy bonds", "extend this file, # not this notice. # (2) Reproduced in the COPYING", "physics calculations are deferred to the particles themselves. You can have as many,", "Rights Reserved. # # You may only modify and redistribute this under the", "Reproduced in the COPYING file, and at: # http://kamaelia.sourceforge.net/COPYING # Under section 3.5", "the COPYING file, and at: # http://kamaelia.sourceforge.net/COPYING # Under section 3.5 of the", "MPL, we are using this text since we deem the MPL # notice", "class (or have equivalent functionality). Particles can be added or removed from the", "will work for particles in space with any number of dimensions - so", "store the particles registered in ParticleSystem: - particles -- simple list - particleDict", "other particles to these ones. \"\"\" for particle in oldParticles: self.particles.remove(particle) del self.particleDict[particle.ID]", "we deem the MPL # notice inappropriate for this file. As per MPL/GPL/LGPL", "particles, two of which are bonded and move noticeably closer after 5 cycles", "run(self, cycles = 1): \"\"\"Run the simulation for a given number of cycles", "that, in a given cycle, all particles see each other at the same", "Create 3 particles, two of which are bonded and move noticeably closer after", "cycles > 0: cycles -= 1 self.tick += 1 _tick = self.tick for", "# Public License, V2.0, GNU Lesser General Public License, V2.1 # # (1)", "text since we deem the MPL # notice inappropriate for this file. As", "Usage ------------- Create 3 particles, two of which are bonded and move noticeably", "*ids): \"\"\"\\ Remove particle(s) as specified by id(s) from the system. Note that", "zero, unless otherwise specified during initialization. The following attributes store the particles registered", "Mozilla Public License, V1.1, GNU General # Public License, V2.0, GNU Lesser General", "ParticleSystem(laws[,initialParticles][,initialTick]) -> new ParticleSystem object Discrete time simulator for a system of particles." ]
[ "6))) >>> r = ls._set_p2(Point((3, -1))) >>> r == Point((3.0, -1.0)) True \"\"\"", "libpysal.cg.Point The point where the ray originates. second_p : The second point specifying", "The ``_p2`` attribute. Examples -------- >>> ls = LineSegment(Point((1, 2)), Point((5, 6))) >>>", "The point's location (number :math:`x`-tuple, :math:`x` > 1). Examples -------- >>> p =", "3)) \"\"\" def __init__(self, loc): self.__loc = tuple(map(float, loc)) @classmethod def __from_geo_interface__(cls, geo):", "= LineSegment(self._p2, self._p1) return line_seg @property def bounding_box(self): \"\"\"Returns the minimum bounding box", "None return cls(parts, holes) else: verts = [[Point(pt) for pt in part] for", "ccw. ``-1`` if turn from ``self.p1`` to ``self.p2`` to ``pt`` is cw. ``-1``", "@property def width(self) -> Union[int, float]: \"\"\"Returns the width of the Rectangle. Examples", "collinear and ``self.p1`` is in the middle. ``1`` if the points are collinear", "msg += \"the first and last vertices are not the same.\" # raise", "Geometry(object): \"\"\"A base class to help implement ``is_geometry`` and make geometric types extendable.", "segment. Examples -------- >>> ls = LineSegment(Point((1, 2)), Point((5, 6))) >>> ls.bounding_box.left 1.0", "ValueError(msg) self.vertices = tuple(vertices) self._perimeter = None self._bounding_box = None self._area = None", "representation of a linear ring. Linear rings must be closed, the first and", "True \"\"\" ccw1 = self.sw_ccw(other.p2) ccw2 = self.sw_ccw(other.p1) ccw3 = other.sw_ccw(self.p1) ccw4 =", "True for ring in self._hole_rings: if ring.contains_point(point): contains = False searching = False", "p.centroid (5.0353535353535355, 5.0353535353535355) \"\"\" CP = [ring.centroid for ring in self._part_rings] AP =", "ls = LineSegment(Point((5, 0)), Point((10, 0))) >>> ls1 = LineSegment(Point((5, 0)), Point((10, 1)))", "self._part_rings = list(map(Ring, vertices)) self._vertices = [clockwise(part) for part in vertices] else: self._part_rings", "The number of vertices including holes. perimeter : float The geometric length of", "= __area * 0.5 if __area < 0: __area = -area return __area", "A list of points with the vertices of the ring. len : int", "10)), Point((0, 10))], ... [Point((2, 1)), Point((2, 2)), Point((1, 2)), Point((1, 1))] ...", "str(self._p1) + \", \" + str(self._p2) + \")\" # return \"LINESTRING ({} {},", "center[0]) self.right = center[0] + scale * (self.right - center[0]) self.lower = center[1]", "self._reset_props() @classmethod def __from_geo_interface__(cls, geo: dict): \"\"\"While PySAL does not differentiate polygons and", "extendable. \"\"\" def __init__(self): pass class Point(Geometry): \"\"\"Geometric class for point objects. Parameters", "at this time.\" % geo_type) return obj class Geometry(object): \"\"\"A base class to", "def y(self, x: Union[int, float]) -> float: \"\"\"Returns the :math:`y`-value of the line", "part in vertices] else: self._vertices = [vertices] self._reset_props() @classmethod def __from_geo_interface__(cls, geo: dict):", "max([v[1] for v in vertices]), ) return self._bounding_box @property def len(self) -> int:", "Point((0, 1))]) \"\"\" def __init__(self, vertices, holes=None): self._part_rings = [] self._hole_rings = []", "< dy2 * dx1: is_ccw = 1 elif dy1 * dx2 > dy2", "attributes (implemented as ``properties``) then recompute their values if they have been reset", "the desired dimension. Examples -------- >>> p = Point((5.5, 4.3)) >>> p[0] ==", "box of the ring. area : float The area enclosed by the ring.", "The point where the ray originates. second_p : The second point specifying the", "def len(self) -> int: return len(self) @staticmethod def dist(v1, v2) -> Union[int, float]:", "area enclosed by the polygon. centroid : tuple The 'center of gravity', i.e.", "float]: return math.hypot(v1[0] - v2[0], v1[1] - v2[1]) @property def perimeter(self) -> Union[int,", "recompute their values if they have been reset since the last call to", "b : {int, float} The :math:`y`-intercept of the line. ``b`` is also an", "{}, {} {})\".format( # self._p1[0], self._p1[1], self._p2[0], self._p2[1] # ) def __eq__(self, other)", "return part[::-1] vl = list(vertices) if isinstance(vl[0], list): self._part_rings = list(map(Ring, vertices)) self._vertices", "sum([pt[0] * area for pt, area in zip(CP + CH, A)]) / sum(A)", "dimension. Parameters ---------- *args : tuple A singleton tuple of :math:`(i)` with :math:`i`", "---------- y : {int, float} The :math:`y`-value at which to compute :math:`x`. Raises", "float} The slope of the line. ``m`` is also an attribute. b :", "[-ring.area for ring in self._hole_rings] A = AP + AH cx = sum([pt[0]", "-1.0)) True \"\"\" self._p2 = p2 self._reset_props() return self._p2 p2 = property(_get_p2, _set_p2)", "in self._vertices]) return self._len @property def arclen(self) -> Union[int, float]: \"\"\"Returns the geometric", "def perimeter(self) -> Union[int, float]: \"\"\"Returns the perimeter of the polygon. Examples --------", "__geo_interface__(self) -> dict: if len(self.parts) == 1: return {\"type\": \"LineString\", \"coordinates\": self.vertices} else:", "float The geometric length of the chain. Examples -------- >>> c = Chain([Point((0,", "geo[\"coordinates\"]] return cls(verts[0:1], verts[1:]) @property def __geo_interface__(self) -> dict: \"\"\"Return ``__geo_interface__`` information lookup.\"\"\"", "is based on `this <http://www.engr.colostate.edu/~dga/dga/papers/point_in_polygon.pdf>`_. Parameters ---------- point : libpysal.cg.Point The point to", "first and last point must be the same. Open rings will be closed.", ">>> Point((0, 1)) > Point((1, 1)) False \"\"\" return (self.__loc) > (other.__loc) def", "= (pt[0] - self._p1[0], pt[1] - self._p1[1]) return v1[0] * v2[1] - v1[1]", "whether a point is clockwise of the segment (``True``) or not (``False``). Exclusive.", "is None: self._len = sum([part_perimeter(part) for part in self._vertices]) return self._len @property def", "vertices do not form a closed ring, \" # msg += \"the first", "the polygon.\"\"\" self._perimeter = None self._bounding_box = None self._bbox = None self._area =", "centroid of the ring. Returns ------- self._centroid : libpysal.cg.Point The ring's centroid. Notes", "which must support the ``__geo_interface__``. Parameters ---------- obj : {libpysal.cg.{Point, LineSegment, Line, Ray,", "\"\"\"Returns the area of the polygon. Examples -------- >>> p = Polygon([Point((0, 0)),", "attributes which are functions of other attributes. The ``getter``s for these attributes (implemented", "+ \")\" # return \"LINESTRING ({} {}, {} {})\".format( # self._p1[0], self._p1[1], self._p2[0],", "{int, float} The slope of the line. ``m`` is also an attribute. b", "_area = sum_area(self._vertices) - sum_area(self._holes) return _area @property def centroid(self) -> tuple: \"\"\"Returns", "to test for containment. Returns ------- contains : bool ``True`` if the polygon", "True >>> l2 == l1 True \"\"\" eq = False if not isinstance(other,", "self._area = -A return self._area @property def centroid(self): \"\"\"Returns the centroid of the", "0)), Point((5, 0))) >>> ls.is_cw(Point((2, 2))) False >>> ls.is_cw(Point((2, -2))) True \"\"\" v1", "[ ... [Point((0, 0)), Point((1, 0)), Point((1, 1))], ... [Point((10, 10)), Point((11, 10)),", "list: \"\"\"Returns the holes of the polygon in clockwise order. Examples -------- >>>", "\"Supplied vertices do not form a closed ring, \" # msg += \"the", "Chain, \"multilinestring\": Chain, \"polygon\": Polygon, \"multipolygon\": Polygon, } # moving this to top", "self.p1 p1 = self.p2 p2 = pt dx1 = p1[0] - p0[0] dy1", "PySAL does not differentiate polygons and multipolygons GEOS, Shapely, and geoJSON do. In", "= Polygon( ... [Point((0, 0)), Point((10, 0)), Point((10, 10)), Point((0, 10))], ... [Point((1,", "the ring will be increased significantly. \"\"\" for ring in self._part_rings: ring.build_quad_tree_structure() for", "l1 True \"\"\" eq = False if not isinstance(other, self.__class__): pass else: if", "the index of the desired dimension. Examples -------- >>> p = Point((5.5, 4.3))", "... [Point((2, 2)), Point((4, 2)), Point((4, 4)), Point((2, 4))] ... ) >>> p.contains_point((3.0,", "Point((1, 0)), Point((1, 1)), Point((0, 1))]) \"\"\" def __init__(self, vertices, holes=None): self._part_rings =", "self[:] x1, y2, X1, Y1 = other[:] return Rectangle( min(self.left, other.left), min(self.lower, other.lower),", "the area of the ring. Examples -------- >>> r = Ring( ... [", "(self.right - center[0]) self.lower = center[1] + scale * (self.lower - center[1]) self.upper", "bounding box of the segment. len : float The length of the segment.", "obj class Geometry(object): \"\"\"A base class to help implement ``is_geometry`` and make geometric", ":math:`x`-intercept of the line. ``x`` is also an attribute. Examples -------- >>> ls", "'center of mass'. Examples -------- >>> r = Ring( ... [ ... Point((0,", "\"multilinestring\": verts = [list(map(Point, part)) for part in geo[\"coordinates\"]] else: raise TypeError(\"%r is", "CH, A)]) / sum(A) return cx, cy def build_quad_tree_structure(self): \"\"\"Build the quad tree", "Raises ------ TypeError Raised when ``obj`` is not a supported shape. NotImplementedError Raised", "Notes ----- The centroid returned by this method is the geometric centroid. Also", "Examples -------- >>> p = Point((1, 3)) \"\"\" def __init__(self, loc): self.__loc =", "line segment. Returns ------- self._p1 : libpysal.cg.Point The ``_p1`` attribute. Examples -------- >>>", "- mx b = self._p1[1] - m * self._p1[0] self._line = Line(m, b)", "\"type\": \"MultiPolygon\", \"coordinates\": [[part] for part in self.parts], } if self._holes[0]: geo[\"coordinates\"][0] +=", "\"\"\"Geometric representation of polygon objects. Returns a polygon created from the objects specified.", "False: dx = self._p1[0] - self._p2[0] dy = self._p1[1] - self._p2[1] if dx", "of vertices including holes. perimeter : float The geometric length of the perimeter", "1)), ... Point((0, 1)), ... Point((0, 0)) ... ] ... ) >>> str(r.centroid)", "\"to implement truth value testing and the built-in operation ``bool()``\" ``-- http://docs.python.org/reference/datamodel.html Examples", "than another object. Parameters ---------- other : libpysal.cg.Point An object to test equality", "= len(self.vertices) xs = [self.vertices[i][0] - point[0] for i in range(rn)] ys =", "\"\"\"Returns the vertices of the chain in clockwise order. Examples -------- >>> c", "\"coordinates\": [[part] for part in self.parts], } if self._holes[0]: geo[\"coordinates\"][0] += self._holes return", "bblower or bbupper: pass else: rn = len(self.vertices) xs = [self.vertices[i][0] - point[0]", "turn from ``self.p1`` to ``self.p2`` to ``pt`` is cw. ``-1`` if the points", "origin, second_p): self.o = origin self.p = second_p class Chain(Geometry): \"\"\"Geometric representation of", "QuadTreeStructureSingleRing(self) def contains_point(self, point): \"\"\"Point containment using winding number. The implementation is based", "if the points are collinear and ``self.p2`` is in the middle. ``0`` if", "tuple A singleton tuple of :math:`(i)` with :math:`i` as the index of the", "``p1`` attribute of the line segment. Parameters ---------- p1 : libpysal.cg.Point A point.", "return len(self.__loc) def __repr__(self) -> str: \"\"\"Returns the string representation of the ``Point``.", "\"type\"): raise TypeError(\"%r does not appear to be a shape object.\" % (obj))", "and holes != []: if isinstance(holes[0], list): self._hole_rings = list(map(Ring, holes)) self._holes =", ">>> c.len 4.0 \"\"\" def dist(v1: tuple, v2: tuple) -> Union[int, float]: return", "0)), Point((1, 0)), Point((1, 1)), Point((2, 1))]) >>> verts = c.vertices >>> len(verts)", "2.0 \"\"\" return abs(self.signed_area) @property def signed_area(self) -> Union[int, float]: if self._area is", "not appear to be a shape object.\" % (obj)) geo_type = geo[\"type\"].lower() #", "i in range(rn)] ys = [self.vertices[i][1] - point[1] for i in range(rn)] w", "``p1`` attribute of the line segment. Returns ------- self._p1 : libpysal.cg.Point The ``_p1``", "Point(Geometry): \"\"\"Geometric class for point objects. Parameters ---------- loc : tuple The point's", "v2[1]) def part_perimeter(part) -> Union[int, float]: return sum([dist(part[i], part[i + 1]) for i", "origin : libpysal.cg.Point The point where the ray originates. second_p : The second", "sub-polygons to be considered as holes. Default is ``None``. Attributes ---------- vertices :", "r = Rectangle(0, 0, 4, 4) >>> r.area 16.0 \"\"\" return (self.right -", "for v in part] for part in self._holes] @property def parts(self) -> list:", "+ shift[1] def set_scale(self, scale): \"\"\"Rescales the rectangle around its center. Parameters ----------", "Rectangle( min([v[0] for v in vertices]), min([v[1] for v in vertices]), max([v[0] for", "box of the polygon. Examples -------- >>> p = Polygon([Point((0, 0)), Point((2, 0)),", "= Polygon([Point((0, 0)), Point((1, 0)), Point((1, 1)), Point((0, 1))]) >>> p.area 1.0 >>>", "``m`` is also an attribute. b : {int, float} The :math:`y`-intercept of the", "greater than another object. Parameters ---------- other : libpysal.cg.Point An object to test", "x[i + 1]) * f cy += (y[i] + y[i + 1]) *", "= Point((5.5, 4.3)) >>> p[0] == 5.5 True >>> p[1] == 4.3 True", "range(len(p) - 1)]) if self._arclen is None: self._arclen = sum([part_perimeter(part) for part in", "libpysal.cg.bounding_box \"\"\" if self._bbox is None: self._bbox = [ self.bounding_box.left, self.bounding_box.lower, self.bounding_box.right, self.bounding_box.upper,", "\"\"\" def __init__(self, x): self._x = float(x) self.m = float(\"inf\") self.b = float(\"nan\")", "sum([part for part in self._vertices], []) + sum( [part for part in self._holes],", "ls = LineSegment(Point((2, 2)), Point((3, 3))) >>> l = ls.line >>> l.m 1.0", "polygon.\"\"\" self._perimeter = None self._bounding_box = None self._bbox = None self._area = None", "against. Examples -------- >>> Point((0, 1)) == Point((0, 1)) True >>> Point((0, 1))", "<= 0 return intersects def _reset_props(self): \"\"\"**HELPER METHOD. DO NOT CALL.** Resets attributes", "centroid(self) -> tuple: \"\"\"Returns the centroid of the polygon. Notes ----- The centroid", "evaluate to False if they have zero area. ``___nonzero__`` is used \"to implement", "(not ``origin``.) Attributes ---------- o : libpysal.cg.Point The origin (point where ray originates).", "-------- >>> ls = LineSegment(Point((1, 2)), Point((5, 6))) >>> r = ls._set_p2(Point((3, -1)))", "a ``LineSegment`` object which has its endpoints swapped. Returns ------- line_seg : libpysal.cg.LineSegment", "(self.__loc) < (other.__loc) def __le__(self, other) -> bool: \"\"\"Tests if the point is", "or not (``False``). Handles endpoints of segments being on other segment. Parameters ----------", ".sphere import arcdist from typing import Union __all__ = [ \"Point\", \"LineSegment\", \"Line\",", "mx b = self._p1[1] - m * self._p1[0] self._line = Line(m, b) return", "def is_cw(self, pt) -> bool: \"\"\"Returns whether a point is clockwise of the", "return self._p2 p2 = property(_get_p2, _set_p2) def is_ccw(self, pt) -> bool: \"\"\"Returns whether", "2)), Point((5, 6))) >>> r = ls._get_p1() >>> r == Point((1, 2)) True", "to compute :math:`x`. Raises ------ ArithmeticError Raised when ``0.`` is passed in as", "self.bounding_box.left, self.bounding_box.lower, self.bounding_box.right, self.bounding_box.upper, ] return self._bbox @property def bounding_box(self): \"\"\"Returns the bounding", ": libpysal.cg.Point The point where the ray originates. second_p : The second point", "representation of line segment objects. Parameters ---------- start_pt : libpysal.cg.Point The point where", "1)) == Point((1, 1)) False \"\"\" try: return (self.__loc) == (other.__loc) except AttributeError:", "'center of gravity' or 'center of mass'. Examples -------- >>> r = Ring(", "True >>> p[1] == 4.3 True \"\"\" return self.__loc.__getitem__(*args) def __getslice__(self, *args) ->", "representation of ray objects. Parameters ---------- origin : libpysal.cg.Point The point where the", "__eq__(self, other) -> bool: \"\"\"Tests if the point is equal to another object.", "= [] self._hole_rings = [] def clockwise(part: list) -> list: if standalone.is_clockwise(part): return", "= False break if searching: for ring in self._part_rings: if ring.contains_point(point): contains =", "the point is not equal to another object. Parameters ---------- other : libpysal.cg.Point", "return sum([arcdist(p[i], p[i + 1]) * 1000.0 for i in range(len(p) - 1)])", "= [ring.centroid for ring in self._hole_rings] AH = [-ring.area for ring in self._hole_rings]", "not equal to another object. Parameters ---------- other : libpysal.cg.Point An object to", "Point((2, 2)), Point((2, 1)), Point((1, 1))] ... ) >>> len(p.holes) 1 \"\"\" return", ">>> p = Point((1, 3)) >>> str(p) '(1.0, 3.0)' \"\"\" return str(self.__loc) #", "the polygon. centroid : tuple The 'center of gravity', i.e. the mean point", "Point((1, 1)), Point((1, 0))]) >>> p1.len 4 >>> len(p1) 4 \"\"\" if self._len", "Attributes ---------- vertices : list The list of points of the vertices of", "and ccw3 * ccw4 <= 0 return intersects def _reset_props(self): \"\"\"**HELPER METHOD. DO", "= None self._centroid = None self._quad_tree_structure = None def __len__(self) -> int: return", "help implement ``is_geometry`` and make geometric types extendable. \"\"\" def __init__(self): pass class", "if isinstance(obj, (Point, LineSegment, Line, Ray, Chain, Polygon)): pass else: if hasattr(obj, \"__geo_interface__\"):", "\"\"\" # If LineSegment attributes p1, p2 changed, recompute if self._len is None:", "tuple(vertices) self._perimeter = None self._bounding_box = None self._area = None self._centroid = None", "return {\"type\": \"Point\", \"coordinates\": self.__loc} def __lt__(self, other) -> bool: \"\"\"Tests if the", "width(self) -> Union[int, float]: \"\"\"Returns the width of the Rectangle. Examples -------- >>>", "-------- >>> p1 = Polygon([Point((0, 0)), Point((0, 1)), Point((1, 1)), Point((1, 0))]) >>>", "float(x) self.m = float(\"inf\") self.b = float(\"nan\") def x(self, y) -> float: \"\"\"Returns", "def width(self) -> Union[int, float]: \"\"\"Returns the width of the Rectangle. Examples --------", ") >>> len(p.holes) 1 \"\"\" return [[v for v in part] for part", "The line on which the segment lies. Examples -------- >>> ls = LineSegment(Point((1,", "def clockwise(part: list) -> list: if standalone.is_clockwise(part): return part[:] else: return part[::-1] vl", "at this time.\"%geo_type if geo_type in _geoJSON_type_to_Pysal_type: obj = _geoJSON_type_to_Pysal_type[geo_type].__from_geo_interface__(geo) else: raise NotImplementedError(\"%s", "the parts of the polygon in clockwise order. Examples -------- >>> p =", "polygon in clockwise order. len : int The number of vertices including holes.", "based on `this <http://www.engr.colostate.edu/~dga/dga/papers/point_in_polygon.pdf>`_. Parameters ---------- point : libpysal.cg.Point The point to test", "1)), Point((1, 1)), Point((1, 0))]) >>> len(p1.vertices) 4 \"\"\" return sum([part for part", "line on which the segment lies. Returns ------- self._line : libpysal.cg.Line The ``Line``", ">>> ls.m inf >>> ls.b nan \"\"\" def __init__(self, x): self._x = float(x)", "-> list: \"\"\"Returns the vertices of the chain in clockwise order. Examples --------", "- self.left @property def height(self) -> Union[int, float]: \"\"\"Returns the height of the", "``self.p1`` to ``self.p2`` to ``pt`` is ccw. ``-1`` if turn from ``self.p1`` to", "list): if isinstance(vertices[0], list): self._vertices = [part for part in vertices] else: self._vertices", "------- self._bounding_box : libpysal.cg.Rectangle The bounding box of the line segment. Examples --------", "DO NOT CALL.** Returns the ``p2`` attribute of the line segment. Returns -------", "\"the first and last vertices are not the same.\" # raise ValueError(msg) self.vertices", "float The geometric length of the perimeter of the ring. bounding_box : libpysal.cg.Rectangle", "int The number of vertices including holes. perimeter : float The geometric length", "ls = LineSegment(Point((1, 2)), Point((5, 6))) >>> ls.bounding_box.left 1.0 >>> ls.bounding_box.lower 2.0 >>>", "or 'center of mass'. Examples -------- >>> p = Polygon( ... [Point((0, 0)),", "scale): \"\"\"Rescales the rectangle around its center. Parameters ---------- scale : int, float", "the chain. Examples -------- >>> c = Chain([Point((0, 0)), Point((1, 0)), Point((1, 1)),", "return bool(self.area) def __eq__(self, other): if other: return self[:] == other[:] return False", "x : {int, float} The :math:`x`-intercept of the line. ``x`` is also an", "self._hole_rings: ring.build_quad_tree_structure() self.is_quad_tree_structure_built = True def contains_point(self, point): \"\"\"Test if a polygon contains", "Point((0, 1)), ... Point((0, 0)) ... ] ... ) >>> r.bounding_box.left 0.0 >>>", "dict: if len(self.parts) == 1: return {\"type\": \"LineString\", \"coordinates\": self.vertices} else: return {\"type\":", "99.0 \"\"\" def part_area(pv: list) -> float: __area = 0 for i in", "1] * y[i] cx += (x[i] + x[i + 1]) * f cy", "def __from_geo_interface__(cls, geo: dict): \"\"\"While PySAL does not differentiate polygons and multipolygons GEOS,", "Polygon([Point((0, 0)), Point((1, 0)), Point((1, 1)), Point((0, 1))]) >>> p.perimeter 4.0 \"\"\" def", "closed. This class exists primarily as a geometric primitive to form complex polygons", "= Polygon([Point((0, 0)), Point((2, 0)), Point((2, 1)), Point((0, 1))]) >>> p.bounding_box.left 0.0 >>>", "4)), Point((2, 4))] ... ) >>> p.contains_point((3.0, 3.0)) False >>> p.contains_point((1.0, 1.0)) True", "of the ring. area : float The area enclosed by the ring. centroid", "-1 elif dx1 * dx2 < 0 or dy1 * dy2 < 0:", "\"\"\"Returns the bounding box of the polygon. Returns ------- self._bounding_box : libpysal.cg.Rectangle The", "% geo_type) return obj class Geometry(object): \"\"\"A base class to help implement ``is_geometry``", "NotImplementedError(\"%s is not supported at this time.\" % geo_type) return obj class Geometry(object):", "for part in self._vertices ] class Ring(Geometry): \"\"\"Geometric representation of a linear ring.", "1]) for i in range(-1, len(part) - 1)]) sum_perim = lambda part_type: sum([part_perimeter(part)", "swap.p1[1] 6.0 >>> swap.p2[0] 1.0 >>> swap.p2[1] 2.0 \"\"\" line_seg = LineSegment(self._p2, self._p1)", "segment. Parameters ---------- p2 : libpysal.cg.Point A point. Returns ------- self._p2 : libpysal.cg.Point", "self._p2 : libpysal.cg.Point The ``_p2`` attribute. Examples -------- >>> ls = LineSegment(Point((1, 2)),", "ring. \"\"\" def __init__(self, vertices): if vertices[0] != vertices[-1]: vertices = vertices[:] +", "in _geoJSON_type_to_Pysal_type: obj = _geoJSON_type_to_Pysal_type[geo_type].__from_geo_interface__(geo) else: raise NotImplementedError(\"%s is not supported at this", "libpysal.cg.Rectangle The bounding box of the chain. Examples -------- >>> c = Chain([Point((0,", "1)) <= Point((0, 1)) True >>> Point((0, 1)) <= Point((1, 1)) True \"\"\"", ">>> str(p) '(1.0, 3.0)' \"\"\" return str(self.__loc) # return \"POINT ({} {})\".format(*self.__loc) class", "equality against. Examples -------- >>> Point((0, 1)) <= Point((0, 1)) True >>> Point((0,", "values if they have been reset since the last call to the getter.", ": float The geometric length of the perimeter of the ring. bounding_box :", "------- self._p1 : libpysal.cg.Point The ``_p1`` attribute. Examples -------- >>> ls = LineSegment(Point((1,", "\"\"\" if self._line == False: dx = self._p1[0] - self._p2[0] dy = self._p1[1]", "* (self.left - center[0]) self.right = center[0] + scale * (self.right - center[0])", "\"\"\" return self._p2 def _set_p2(self, p2): \"\"\"**HELPER METHOD. DO NOT CALL.** Sets the", "(6 * A) * cy self._centroid = Point((cx, cy)) return self._centroid def build_quad_tree_structure(self):", ">>> ls.bounding_box.right 5.0 >>> ls.bounding_box.upper 6.0 \"\"\" # If LineSegment attributes p1, p2", "False >>> Point((0, 1)) != Point((1, 1)) True \"\"\" try: return (self.__loc) !=", "bool: \"\"\"Tests if the point is less than another object. Parameters ---------- other", "__gt__(self, other) -> bool: \"\"\"Tests if the point is greater than another object.", "Union __all__ = [ \"Point\", \"LineSegment\", \"Line\", \"Ray\", \"Chain\", \"Polygon\", \"Rectangle\", \"asShape\", ]", "= ls._get_p1() >>> r == Point((1, 2)) True \"\"\" return self._p1 def _set_p1(self,", "``properties``) then recompute their values if they have been reset since the last", "complex polygons with multiple rings and holes. The ordering of the vertices is", "\"\"\" if self._bounding_box is None: vertices = self.vertices self._bounding_box = Rectangle( min([v[0] for", ">>> p.area 1.0 >>> p = Polygon( ... [Point((0, 0)), Point((10, 0)), Point((10,", "max(self.right, other.right), max(self.upper, other.upper), ) def __getitem__(self, key): \"\"\" Examples -------- >>> r", "0)), Point((2, 1)), Point((0, 1))]) >>> p.bounding_box.left 0.0 >>> p.bounding_box.lower 0.0 >>> p.bounding_box.right", "intersects = ccw1 * ccw2 <= 0 and ccw3 * ccw4 <= 0", "vertices] else: self._vertices = [vertices] self._reset_props() @classmethod def __from_geo_interface__(cls, geo: dict): if geo[\"type\"].lower()", "\"\"\"Return ``__geo_interface__`` information lookup.\"\"\" if len(self.parts) > 1: geo = { \"type\": \"MultiPolygon\",", "Point((1, 1))] ... ] ... ) >>> len(c.parts) 2 \"\"\" return [[v for", "float]) -> float: \"\"\"Returns the :math:`y`-value of the line at a particular :math:`x`-value.", "The bounding box of the ring. Examples -------- >>> r = Ring( ...", ":math:`x`-value. Parameters ---------- x : {int, float} The :math:`x`-value at which to compute", "- self._p2[1]) return self._len @property def line(self): \"\"\"Returns a ``Line`` object of the", "chain. Examples -------- >>> c = Chain( ... [ ... [Point((0, 0)), Point((1,", "r = Rectangle(0, 0, 4, 4) >>> r.set_scale(2) >>> r.left -2.0 >>> r.right", "Point((2, 2)), Point((2, 1))] ... ) >>> p.centroid (5.0353535353535355, 5.0353535353535355) \"\"\" CP =", "range(rn)] w = 0 for i in range(len(self.vertices) - 1): yi = ys[i]", "+ 1] xi = xs[i] xj = xs[i + 1] if yi *", "* dx2 < 0 or dy1 * dy2 < 0: is_ccw = -1", "\"Rectangle\", \"asShape\", ] def asShape(obj): \"\"\"Returns a PySAL shape object from ``obj``, which", "0)), Point((2, 1)), Point((0, 1))]) >>> c.bounding_box.left 0.0 >>> c.bounding_box.lower 0.0 >>> c.bounding_box.right", "a Chain.\" % geo) return cls(verts) @property def __geo_interface__(self) -> dict: if len(self.parts)", "list of vertices. Attributes ---------- vertices : list A list of points with", "for part in self._vertices], []) @property def parts(self) -> list: \"\"\"Returns the parts", "len(self.__loc) def __repr__(self) -> str: \"\"\"Returns the string representation of the ``Point``. Examples", "(3, 6) True >>> p[1:2] == (6,) True \"\"\" return self.__loc.__getslice__(*args) def __len__(self)", "-> bool: \"\"\"Tests if the point is less than another object. Parameters ----------", "ls2 = LineSegment(Point((7, -1)), Point((7, 2))) >>> ls.intersect(ls2) True \"\"\" ccw1 = self.sw_ccw(other.p2)", ">>> r.left 2.0 >>> r.right 6.0 >>> r.lower 2.0 >>> r.upper 6.0 \"\"\"", "does not differentiate polygons and multipolygons GEOS, Shapely, and geoJSON do. In GEOS,", "than or equal to another object. Parameters ---------- other : libpysal.cg.Point An object", "self._p1 and other.p2 == self._p2: eq = True elif other.p2 == self._p1 and", "self.bounding_box.right bblower = y < self.bounding_box.lower bbupper = y > self.bounding_box.upper if bbleft", "dx2 < 0 or dy1 * dy2 < 0: is_ccw = -1 elif", "part[::-1] vl = list(vertices) if isinstance(vl[0], list): self._part_rings = list(map(Ring, vertices)) self._vertices =", "def part_perimeter(part) -> Union[int, float]: return sum([dist(part[i], part[i + 1]) for i in", "return math.hypot(v1[0] - v2[0], v1[1] - v2[1]) def part_perimeter(part) -> Union[int, float]: return", "Ray(Point((0, 0)), Point((1, 0))) >>> str(l.o) '(0.0, 0.0)' >>> str(l.p) '(1.0, 0.0)' \"\"\"", ">>> hash(Point((0, 1))) == hash(Point((1, 1))) False \"\"\" return hash(self.__loc) def __getitem__(self, *args)", "self._p1 = p1 self._reset_props() return self._p1 p1 = property(_get_p1, _set_p1) def _get_p2(self): \"\"\"**HELPER", "if self._centroid is None: vertices = self.vertices x = [v[0] for v in", "vertices of the chain in order. len : float The geometric length of", ": libpysal.cg.Rectangle The bounding box of the ring. area : float The area", "polyline. Parameters ---------- vertices : list A point list or list of point", "c = Chain([Point((0, 0)), Point((1, 0)), Point((1, 1)), Point((2, 1))]) >>> c.len 3.0", "\"\"\" if self._bounding_box is None: vertices = self.vertices x = [v[0] for v", "10, 17) >>> r[:] [-4.0, 3.0, 10.0, 17.0] \"\"\" l = [self.left, self.lower,", "An object to test equality against. Examples -------- >>> Point((0, 1)) > Point((0,", "Examples -------- >>> Point((0, 1)) >= Point((0, 1)) True >>> Point((0, 1)) >=", "holes. The ordering of the vertices is ignored and will not be altered.", "def __gt__(self, other) -> bool: \"\"\"Tests if the point is greater than another", "Once the structure is built, speed for testing if a point is inside", "\"\"\"Returns the string representation of the ``Point``. Examples -------- >>> Point((0, 1)) (0.0,", "r.bounding_box.left 0.0 >>> r.bounding_box.lower 0.0 >>> r.bounding_box.right 2.0 >>> r.bounding_box.upper 1.0 \"\"\" if", "holes: holes = None return cls(parts, holes) else: verts = [[Point(pt) for pt", "self._line = VerticalLine(self._p1[0]) else: m = dy / float(dx) # y - mx", "is None: dist = self.dist v = self.vertices self._perimeter = sum( [dist(v[i], v[i", "obj : {libpysal.cg.{Point, LineSegment, Line, Ray, Chain, Polygon} A new geometric representation of", "-> str: \"\"\"Returns a string representation of a ``Point`` object. Examples -------- >>>", "bool(r) True \"\"\" return bool(self.area) def __eq__(self, other): if other: return self[:] ==", "return self.b return self.m * x + self.b class Ray: \"\"\"Geometric representation of", "(implemented as properties) then recompute their values if they have been reset since", "the bounding box of the polygon. Returns ------- self._bounding_box : libpysal.cg.Rectangle The bounding", "Union[int, float]: \"\"\"Returns the height of the Rectangle. Examples -------- >>> r =", "Polygon( ... [Point((0, 0)), Point((10, 0)), Point((10, 10)), Point((0, 10))], ... [Point((2, 1)),", "= LineSegment(Point((2, 2)), Point((5, 2))) >>> ls.len 3.0 \"\"\" # If LineSegment attributes", "+ sum_perim(self._holes) return self._perimeter @property def bbox(self): \"\"\"Returns the bounding box of the", "* y[i + 1] - x[i + 1] * y[i] cx += (x[i]", "-> Union[int, float]: \"\"\"Return the coordinate for the given dimension. Parameters ---------- *args", "dy / float(dx) # y - mx b = self._p1[1] - m *", "@property def bounding_box(self): \"\"\"Returns the bounding box of the chain. Returns ------- self._bounding_box", "-> int: return len(self.vertices) @property def len(self) -> int: return len(self) @staticmethod def", ">>> r.upper #maxy 17.0 \"\"\" def __init__(self, left, lower, right, upper): if right", "zip(part[:-1], part[1:])] for part in self._vertices ] class Ring(Geometry): \"\"\"Geometric representation of a", "int: return len(self) @staticmethod def dist(v1, v2) -> Union[int, float]: return math.hypot(v1[0] -", "of polygon objects. Returns a polygon created from the objects specified. Parameters ----------", ">>> r = ls._set_p1(Point((3, -1))) >>> r == Point((3.0, -1.0)) True \"\"\" self._p1", "tuple, v2: tuple) -> Union[int, float]: return math.hypot(v1[0] - v2[0], v1[1] - v2[1])", "[ring.area for ring in self._part_rings] CH = [ring.centroid for ring in self._hole_rings] AH", "self._part_rings = [] self._hole_rings = [] def clockwise(part: list) -> list: if standalone.is_clockwise(part):", ">>> p[0] == 5.5 True >>> p[1] == 4.3 True \"\"\" return self.__loc.__getitem__(*args)", "= self._p1[0] - self._p2[0] dy = self._p1[1] - self._p2[1] if dx == 0", "------- point_contained : bool ``True`` if ``point`` is contained within the polygon, otherwise", "point where the ray originates). See ``second_p``. Examples -------- >>> l = Ray(Point((0,", "str(l.p) '(1.0, 0.0)' \"\"\" def __init__(self, origin, second_p): self.o = origin self.p =", "part_type]) _area = sum_area(self._vertices) - sum_area(self._holes) return _area @property def centroid(self) -> tuple:", "bounding box of the ring. Examples -------- >>> r = Ring( ... [", "pt, area in zip(CP + CH, A)]) / sum(A) return cx, cy def", "of the chain in order. len : float The geometric length of the", "] ... ) >>> c.len 4.0 \"\"\" def dist(v1: tuple, v2: tuple) ->", "= [self.left, self.lower, self.right, self.upper] return l.__getitem__(key) def set_centroid(self, new_center): \"\"\"Moves the rectangle", "as a list. Returns ------- self._bbox : list The bounding box of the", "the polygon in clockwise order. Examples -------- >>> p = Polygon( ... [", "containment. Returns ------- contains : bool ``True`` if the polygon contains ``point`` otherwise", "elif yi == 0 and xi > 0: if yj > 0: w", "to help implement ``is_geometry`` and make geometric types extendable. \"\"\" def __init__(self): pass", "False def _get_p1(self): \"\"\"**HELPER METHOD. DO NOT CALL.** Returns the ``p1`` attribute of", "point objects. Parameters ---------- loc : tuple The point's location (number :math:`x`-tuple, :math:`x`", "y - mx b = self._p1[1] - m * self._p1[0] self._line = Line(m,", "for i in range(len(p) - 1)]) if self._len is None: self._len = sum([part_perimeter(part)", "return eq def intersect(self, other) -> bool: \"\"\"Test whether segment intersects with other", "Ring(Geometry): \"\"\"Geometric representation of a linear ring. Linear rings must be closed, the", "= Point((1, 3)) \"\"\" def __init__(self, loc): self.__loc = tuple(map(float, loc)) @classmethod def", "-------- >>> Point((0, 1)) < Point((0, 1)) False >>> Point((0, 1)) < Point((1,", "1]) * (y[i] - y[i + 1]) A = A * 0.5 self._area", "-------- >>> ls = LineSegment(Point((1, 2)), Point((5, 6))) >>> r = ls._get_p2() >>>", "sum([part for part in self._vertices], []) @property def parts(self) -> list: \"\"\"Returns the", "sw_ccw(self, pt): \"\"\"Sedgewick test for ``pt`` being ccw of segment. Returns ------- is_ccw", "[part for part in self._holes], [] ) @property def holes(self) -> list: \"\"\"Returns", "other): x, y, X, Y = self[:] x1, y2, X1, Y1 = other[:]", "self._centroid = None self._len = None def __len__(self) -> int: return self.len @property", "-> float: \"\"\"Returns the area of the polygon. Examples -------- >>> p =", "2 \"\"\" return [[v for v in part] for part in self._vertices] @property", "``1`` if the points are collinear and ``self.p2`` is in the middle. ``0``", "x) -> float: \"\"\"Returns the :math:`y`-value of the line at a particular :math:`x`-value.", ">>> r.upper 6.0 \"\"\" shift = ( new_center[0] - (self.left + self.right) /", "= LineSegment(Point((1, 2)), Point((5, 6))) >>> swap = ls.get_swap() >>> swap.p1[0] 5.0 >>>", "Examples -------- >>> p = Point((5.5, 4.3)) >>> p[0] == 5.5 True >>>", "m, b): if m == float(\"inf\"): raise ArithmeticError(\"Slope cannot be infinite.\") self.m =", "swapped. Returns ------- line_seg : libpysal.cg.LineSegment The ``LineSegment`` object which has its endpoints", "Parameters ---------- p1 : libpysal.cg.Point A point. Returns ------- self._p1 : libpysal.cg.Point The", "order. len : float The geometric length of the chain. Examples -------- >>>", ">>> r.lower #miny 3.0 >>> r.right #maxx 10.0 >>> r.upper #maxy 17.0 \"\"\"", "part in self._holes] @property def parts(self) -> list: \"\"\"Returns the parts of the", "int: \"\"\"Returns the hash of the point's location. Examples -------- >>> hash(Point((0, 1)))", "2)), Point((1, 2)), Point((1, 1))] ... ) >>> p.area 99.0 \"\"\" def part_area(pv:", "Raised when ``geo_type`` is not a supported type. Returns ------- obj : {libpysal.cg.{Point,", "\" + str(self._p2) + \")\" # return \"LINESTRING ({} {}, {} {})\".format( #", "r.upper 6.0 \"\"\" center = ((self.left + self.right) / 2, (self.lower + self.upper)", "= float(m) self.b = float(b) def x(self, y: Union[int, float]) -> float: \"\"\"Returns", ">= dx2 * dx2 + dy2 * dy2: is_ccw = 0 else: is_ccw", "the ray originates. second_p : The second point specifying the ray (not ``origin``.)", "1]) for i in range(len(p) - 1)]) if self._len is None: self._len =", "\"LineSegment\", \"Line\", \"Ray\", \"Chain\", \"Polygon\", \"Rectangle\", \"asShape\", ] def asShape(obj): \"\"\"Returns a PySAL", "point where the segment begins. end_pt : libpysal.cg.Point The point where the segment", "-1))) >>> r == Point((3.0, -1.0)) True \"\"\" self._p1 = p1 self._reset_props() return", "Handles endpoints of segments being on other segment. Parameters ---------- other : libpysal.cg.LineSegment", "------ TypeError Raised when ``obj`` is not a supported shape. NotImplementedError Raised when", "attribute. Examples -------- >>> ls = LineSegment(Point((1, 2)), Point((5, 6))) >>> r =", "ys[i] yj = ys[i + 1] xi = xs[i] xj = xs[i +", "Default is ``None``. Attributes ---------- vertices : list A list of points with", ">>> p.centroid (5.0353535353535355, 5.0353535353535355) \"\"\" CP = [ring.centroid for ring in self._part_rings] AP", "0)), Point((5, 0))) >>> ls.is_ccw(Point((2, 2))) True >>> ls.is_ccw(Point((2, -2))) False \"\"\" v1", "form ``[left, lower, right, upper]``. area : float The area enclosed by the", "Python Spatial Analysis Library. \"\"\" __author__ = \"<NAME>, <NAME>, <NAME>, <NAME>, <NAME>\" import", "1)]) if self._arclen is None: self._arclen = sum([part_perimeter(part) for part in self._vertices]) return", "[part for part in vertices] else: self._vertices = [vertices] self._reset_props() @classmethod def __from_geo_interface__(cls,", "-1))) >>> r == Point((3.0, -1.0)) True \"\"\" self._p2 = p2 self._reset_props() return", "Examples -------- >>> r = Rectangle(0, 0, 4, 4) >>> r.area 16.0 \"\"\"", "False \"\"\" v1 = (self._p2[0] - self._p1[0], self._p2[1] - self._p1[1]) v2 = (pt[0]", "perimeter : float The geometric length of the perimeter of the polygon. bounding_box", "len : int The number of vertices including holes. perimeter : float The", "enclosed by the polygon. centroid : tuple The 'center of gravity', i.e. the", "b) in zip(part[:-1], part[1:])] for part in self._vertices ] class Ring(Geometry): \"\"\"Geometric representation", "in the middle. ``0`` if the points are collinear and ``pt`` is in", "libpysal.cg.LineSegment Another line segment to check against. Examples -------- >>> ls = LineSegment(Point((5,", "-------- >>> p = Point((1, 3)) >>> str(p) '(1.0, 3.0)' \"\"\" return str(self.__loc)", "---------- vertices : list A list of vertices. Attributes ---------- vertices : list", "significantly. \"\"\" self._quad_tree_structure = QuadTreeStructureSingleRing(self) def contains_point(self, point): \"\"\"Point containment using winding number.", "swap.p2[0] 1.0 >>> swap.p2[1] 2.0 \"\"\" line_seg = LineSegment(self._p2, self._p1) return line_seg @property", "def y(self, x) -> float: \"\"\"Returns the :math:`y`-value of the line at a", "__area = 0 for i in range(-1, len(pv) - 1): __area += (pv[i][0]", "@property def segments(self) -> list: \"\"\"Returns the segments that compose the chain.\"\"\" return", "-------- >>> r = Rectangle(0, 0, 4, 4) >>> r.area 16.0 \"\"\" return", "with other segment (``True``) or not (``False``). Handles endpoints of segments being on", "polygon. Examples -------- >>> p1 = Polygon([Point((0, 0)), Point((1, 0)), Point((1, 1)), Point((0,", "holes=None): self._part_rings = [] self._hole_rings = [] def clockwise(part: list) -> list: if", "for part in vertices] else: self._part_rings = [Ring(vertices)] self._vertices = [clockwise(vertices)] if holes", "= origin self.p = second_p class Chain(Geometry): \"\"\"Geometric representation of a chain, also", "2.0 >>> r.right 6.0 >>> r.lower 2.0 >>> r.upper 6.0 \"\"\" shift =", ") @property def len(self) -> float: \"\"\"Returns the length of a ``LineSegment`` object.", "dy1 * dx2 > dy2 * dx1: is_ccw = -1 elif dx1 *", "compute :math:`x`. Raises ------ ArithmeticError Raised when ``0.`` is passed in as the", "cw. ``-1`` if the points are collinear and ``self.p1`` is in the middle.", "Point((3, 6, 2)) >>> p[:2] == (3, 6) True >>> p[1:2] == (6,)", "... [Point((0, 0)), Point((0, 10)), Point((10, 10)), Point((10, 0))], ... [Point((2, 2)), Point((4,", "A list of vertices or a list of lists of vertices. holes :", "Open rings will be closed. This class exists primarily as a geometric primitive", ">>> p.contains_point((4,0)) 0 Handles holes. >>> p = Polygon( ... [Point((0, 0)), Point((0,", "+ 1] * y[i] cx += (x[i] + x[i + 1]) * f", "def __init__(self, vertices): if vertices[0] != vertices[-1]: vertices = vertices[:] + vertices[0:1] #", "== self._p1 and other.p2 == self._p2: eq = True elif other.p2 == self._p1", "vertices[:] + vertices[0:1] # msg = \"Supplied vertices do not form a closed", "parts(self) -> list: \"\"\"Returns the parts of the polygon in clockwise order. Examples", "= LineSegment(Point((1, 2)), Point((5, 6))) >>> r = ls._set_p1(Point((3, -1))) >>> r ==", "in geo[\"coordinates\"]] return cls(verts[0:1], verts[1:]) @property def __geo_interface__(self) -> dict: \"\"\"Return ``__geo_interface__`` information", "return self._len @property def arclen(self) -> Union[int, float]: \"\"\"Returns the geometric length of", ") >>> len(c.parts) 2 \"\"\" return [[v for v in part] for part", "polygon. Returns ------- self._bounding_box : libpysal.cg.Rectangle The bounding box of the polygon. Examples", "for the given dimension. Parameters ---------- *args : tuple A singleton tuple of", "- x[i + 1] * y[i] cx += (x[i] + x[i + 1])", "msg = \"Supplied vertices do not form a closed ring, \" # msg", "== 0: self._line = VerticalLine(self._p1[0]) else: m = dy / float(dx) # y", "LineSegment(Point((1, 2)), Point((5, 6))) >>> ls.bounding_box.left 1.0 >>> ls.bounding_box.lower 2.0 >>> ls.bounding_box.right 5.0", "slice and :math:`j` as the index to end the slice (excluded). Examples --------", "CALL.** Returns the ``p2`` attribute of the line segment. Returns ------- self._p2 :", "1: return {\"type\": \"LineString\", \"coordinates\": self.vertices} else: return {\"type\": \"MultiLineString\", \"coordinates\": self.parts} def", "({} {})\".format(*self.__loc) class LineSegment(Geometry): \"\"\"Geometric representation of line segment objects. Parameters ---------- start_pt", "chain. Returns ------- self._bounding_box : libpysal.cg.Rectangle The bounding box of the chain. Examples", "in range(len(p) - 1)]) if self._arclen is None: self._arclen = sum([part_perimeter(part) for part", "(x[i] + x[i + 1]) * (y[i] - y[i + 1]) A =", "segment. bounding_box : libpysal.cg.Rectangle The bounding box of the segment. len : float", "``self.p2`` to ``pt`` is ccw. ``-1`` if turn from ``self.p1`` to ``self.p2`` to", ": bool ``1`` if turn from ``self.p1`` to ``self.p2`` to ``pt`` is ccw.", "0 Handles holes. >>> p = Polygon( ... [Point((0, 0)), Point((0, 10)), Point((10,", "/ float(dx) # y - mx b = self._p1[1] - m * self._p1[0]", "equality against. Examples -------- >>> Point((0, 1)) == Point((0, 1)) True >>> Point((0,", "equality against. Examples -------- >>> Point((0, 1)) > Point((0, 1)) False >>> Point((0,", "i in range(rn)] w = 0 for i in range(len(self.vertices) - 1): yi", "0 and ccw3 * ccw4 <= 0 return intersects def _reset_props(self): \"\"\"**HELPER METHOD.", "self._x def y(self, x) -> float: \"\"\"Returns the :math:`y`-value of the line at", "order. Examples -------- >>> p1 = Polygon([Point((0, 0)), Point((0, 1)), Point((1, 1)), Point((1,", "obj = _geoJSON_type_to_Pysal_type[geo_type].__from_geo_interface__(geo) else: raise NotImplementedError(\"%s is not supported at this time.\" %", "10))], ... [Point((2, 1)), Point((2, 2)), Point((1, 2)), Point((1, 1))] ... ) >>>", "2)), Point((5, 6))) >>> l2 = LineSegment(Point((5, 6)), Point((1, 2))) >>> l1 ==", "self._len = math.hypot(self._p1[0] - self._p2[0], self._p1[1] - self._p2[1]) return self._len @property def line(self):", "slope. Examples -------- >>> l = Line(0.5, 0) >>> l.x(0.25) 0.5 \"\"\" if", "------- self._line : libpysal.cg.Line The ``Line`` object of the line on which the", "is None: vertices = self.vertices x = [v[0] for v in vertices] y", "1 >>> p.contains_point((0,6)) 0 >>> p.contains_point((2,2.9)) 1 >>> p.contains_point((4,5)) 0 >>> p.contains_point((4,0)) 0", "1)) False >>> Point((0, 1)) > Point((1, 1)) False \"\"\" return (self.__loc) >", "(pv[i][1] - pv[i + 1][1]) __area = __area * 0.5 if __area <", "Point((0, 10))], ... [Point((2, 1)), Point((2, 2)), Point((1, 2)), Point((1, 1))] ... )", "Examples -------- >>> len(Point((1, 2))) 2 \"\"\" return len(self.__loc) def __repr__(self) -> str:", "inf >>> ls.b nan \"\"\" def __init__(self, x): self._x = float(x) self.m =", "is passed in as the slope. Examples -------- >>> ls = Line(1, 0)", "True \"\"\" return (self.__loc) < (other.__loc) def __le__(self, other) -> bool: \"\"\"Tests if", "\"\"\"Returns the hash of the point's location. Examples -------- >>> hash(Point((0, 1))) ==", "of the Rectangle. Examples -------- >>> r = Rectangle(0, 0, 4, 4) >>>", "shift = ( new_center[0] - (self.left + self.right) / 2, new_center[1] - (self.lower", "def __len__(self) -> int: \"\"\" Returns the dimensions of the point. Examples --------", "``geo_type`` is not a supported type. Returns ------- obj : {libpysal.cg.{Point, LineSegment, Line,", "The number of vertices. perimeter : float The geometric length of the perimeter", "@property def centroid(self): \"\"\"Returns the centroid of the ring. Returns ------- self._centroid :", "> 0: w += 0.5 else: w -= 0.5 elif yj == 0", "1)), Point((0, 1))]) >>> p.perimeter 4.0 \"\"\" def dist(v1: Union[int, float], v2: Union[int,", "if yi < 0: w += 0.5 else: w -= 0.5 if w", ": list A list of sub-polygons to be considered as holes. Default is", "A = self.signed_area N = len(self) cx = 0 cy = 0 for", "float: \"\"\"Returns the area of the polygon. Examples -------- >>> p = Polygon([Point((0,", "libpysal.cg.Point An object to test equality against. Examples -------- >>> Point((0, 1)) <", "= self.vertices self._perimeter = sum( [dist(v[i], v[i + 1]) for i in range(-1,", "+ shift[0] self.lower = self.lower + shift[1] self.upper = self.upper + shift[1] def", "- self._p1[1]) return v1[0] * v2[1] - v1[1] * v2[0] > 0 def", "self._bbox : list The bounding box of the polygon as a list. See", "self._line = False def _get_p1(self): \"\"\"**HELPER METHOD. DO NOT CALL.** Returns the ``p1``", "the polygon as a list. Returns ------- self._bbox : list The bounding box", "The centroid returned by this method is the geometric centroid and respects multipart", "Point((0, 1)) True >>> Point((0, 1)) <= Point((1, 1)) True \"\"\" return (self.__loc)", "v2[1] - v1[1] * v2[0] < 0 def sw_ccw(self, pt): \"\"\"Sedgewick test for", "of other attributes. The getters for these attributes (implemented as properties) then recompute", "-> bool: \"\"\"Returns whether a point is counterclockwise of the segment (``True``) or", "math.hypot(v1[0] - v2[0], v1[1] - v2[1]) @property def perimeter(self) -> Union[int, float]: if", "for v in vertices] A = self.signed_area N = len(self) cx = 0", "ls = LineSegment(Point((1, 2)), Point((5, 6))) >>> r = ls._set_p2(Point((3, -1))) >>> r", ": {int, float} The :math:`x`-value at which to compute :math:`y`. Examples -------- >>>", "\"\"\" def __init__(self, vertices: list): if isinstance(vertices[0], list): self._vertices = [part for part", "\"__geo_interface__\"): geo = obj.__geo_interface__ else: geo = obj if hasattr(geo, \"type\"): raise TypeError(\"%r", "for v in vertices] y = [v[1] for v in vertices] A =", "str: \"\"\"Returns the string representation of the ``Point``. Examples -------- >>> Point((0, 1))", "elif dy1 * dx2 > dy2 * dx1: is_ccw = -1 elif dx1", "0.5 elif yj == 0 and xj > 0: if yi < 0:", "a point. Parameters ---------- point : libpysal.cg.Point A point to test for containment.", "libpysal.cg.Line The line on which the segment lies. Examples -------- >>> ls =", "for part in part_type]) _area = sum_area(self._vertices) - sum_area(self._holes) return _area @property def", "intersects with other segment (``True``) or not (``False``). Handles endpoints of segments being", "yi * yj < 0: r = xi + yi * (xj -", "return [ [LineSegment(a, b) for (a, b) in zip(part[:-1], part[1:])] for part in", "vertices, holes=None): self._part_rings = [] self._hole_rings = [] def clockwise(part: list) -> list:", "0.0 >>> c.bounding_box.lower 0.0 >>> c.bounding_box.right 2.0 >>> c.bounding_box.upper 1.0 \"\"\" if self._bounding_box", "an attribute. Examples -------- >>> ls = VerticalLine(0) >>> ls.m inf >>> ls.b", "edges may yield unpredictable results. \"\"\" searching = True for ring in self._hole_rings:", "self._p1[0] - self._p2[0] dy = self._p1[1] - self._p2[1] if dx == 0 and", "of ray objects. Parameters ---------- origin : libpysal.cg.Point The point where the ray", "self.b = float(b) def x(self, y: Union[int, float]) -> float: \"\"\"Returns the :math:`x`-value", "representation of polygon objects. Returns a polygon created from the objects specified. Parameters", "6.0 \"\"\" # If LineSegment attributes p1, p2 changed, recompute if self._bounding_box is", "Parameters ---------- x : {int, float} The :math:`x`-intercept of the line. ``x`` is", "of a ``Point`` object. Examples -------- >>> p = Point((1, 3)) >>> str(p)", "intersect(self, other) -> bool: \"\"\"Test whether segment intersects with other segment (``True``) or", "is_cw(self, pt) -> bool: \"\"\"Returns whether a point is clockwise of the segment", "+= 0.5 else: w -= 0.5 if w == 0: pass else: point_contained", "False >>> Point((0, 1)) > Point((1, 1)) False \"\"\" return (self.__loc) > (other.__loc)", "Rectangle( min([self._p1[0], self._p2[0]]), min([self._p1[1], self._p2[1]]), max([self._p1[0], self._p2[0]]), max([self._p1[1], self._p2[1]]), ) return Rectangle( self._bounding_box.left,", "of vertices. Attributes ---------- vertices : list A list of points with the", "\"\"\"Return the coordinates for the given dimensions. Parameters ---------- *args : tuple A", "_get_p1(self): \"\"\"**HELPER METHOD. DO NOT CALL.** Returns the ``p1`` attribute of the line", "ls = LineSegment(Point((1, 2)), Point((5, 6))) >>> r = ls._get_p1() >>> r ==", "for i in range(len(p) - 1)]) if self._arclen is None: self._arclen = sum([part_perimeter(part)", "lower : float Minimum y-value of the rectangle. right : float Maximum x-value", "self._holes] @property def parts(self) -> list: \"\"\"Returns the parts of the polygon in", "self.right - self.left @property def height(self) -> Union[int, float]: \"\"\"Returns the height of", "1)) True \"\"\" return (self.__loc) <= (other.__loc) def __eq__(self, other) -> bool: \"\"\"Tests", "= LineSegment(Point((1, 2)), Point((5, 6))) \"\"\" def __init__(self, start_pt, end_pt): self._p1 = start_pt", ": list A list of points with the vertices of the polygon in", "as a polyline. Parameters ---------- vertices : list A point list or list", "2.0 >>> p.bounding_box.upper 1.0 \"\"\" if self._bounding_box is None: vertices = self.vertices self._bounding_box", "\"\"\" if self.m == 0: raise ArithmeticError(\"Cannot solve for 'x' when slope is", "self._quad_tree_structure.contains_point(point) return point_contained class Polygon(Geometry): \"\"\"Geometric representation of polygon objects. Returns a polygon", "\"\"\" return self._x def y(self, x) -> float: \"\"\"Returns the :math:`y`-value of the", "hasattr(geo, \"type\"): raise TypeError(\"%r does not appear to be a shape object.\" %", "the point is less than or equal to another object. Parameters ---------- other", "__area = -area return __area sum_area = lambda part_type: sum([part_area(part) for part in", "+ dy1 * dy1 >= dx2 * dx2 + dy2 * dy2: is_ccw", "METHOD. DO NOT CALL.** Returns the ``p1`` attribute of the line segment. Returns", "def __init__(self, left, lower, right, upper): if right < left or upper <", "float]) -> float: \"\"\"Returns the :math:`x`-value of the line at a particular :math:`y`-value.", ">>> ls2 = LineSegment(Point((7, -1)), Point((7, 2))) >>> ls.intersect(ls2) True \"\"\" ccw1 =", "vertices = self.vertices x = [v[0] for v in vertices] y = [v[1]", "centroid of the polygon. Notes ----- The centroid returned by this method is", "-> Union[int, float]: \"\"\"Returns the perimeter of the polygon. Examples -------- >>> p", "2)), Point((2, 1)), Point((1, 1))] ... ) >>> len(p.holes) 1 \"\"\" return [[v", "*args : tuple A tuple of :math:`(i,j)` with :math:`i` as the index to", ">>> l = Ray(Point((0, 0)), Point((1, 0))) >>> str(l.o) '(0.0, 0.0)' >>> str(l.p)", "CP = [ring.centroid for ring in self._part_rings] AP = [ring.area for ring in", "\"\"\" return (self.__loc) >= (other.__loc) def __hash__(self) -> int: \"\"\"Returns the hash of", "+ sum( [part for part in self._holes], [] ) @property def holes(self) ->", ": Point The ending point of the line segment. bounding_box : libpysal.cg.Rectangle The", "self.__loc} def __lt__(self, other) -> bool: \"\"\"Tests if the point is less than", ">>> p1 = Polygon([Point((0, 0)), Point((0, 1)), Point((1, 1)), Point((1, 0))]) >>> len(p1.vertices)", "= True def contains_point(self, point): \"\"\"Test if a polygon contains a point. Parameters", "from .sphere import arcdist from typing import Union __all__ = [ \"Point\", \"LineSegment\",", "of the polygon.\"\"\" self._perimeter = None self._bounding_box = None self._bbox = None self._area", "1)), Point((1, 1)), Point((1, 0))]) >>> p1.len 4 >>> len(p1) 4 \"\"\" if", "geometric properties of the polygon.\"\"\" self._perimeter = None self._bounding_box = None self._bbox =", "__geo_interface__(self) -> dict: \"\"\"Return ``__geo_interface__`` information lookup.\"\"\" if len(self.parts) > 1: geo =", "0: pass else: point_contained = True else: point_contained = self._quad_tree_structure.contains_point(point) return point_contained class", "return self._bounding_box @property def len(self) -> int: \"\"\"Returns the geometric length of the", ":math:`x` > 1). Examples -------- >>> p = Point((1, 3)) \"\"\" def __init__(self,", "-> bool: \"\"\"Tests if the point is not equal to another object. Parameters", "a supported shape. NotImplementedError Raised when ``geo_type`` is not a supported type. Returns", "self._len = None self._line = False def _get_p1(self): \"\"\"**HELPER METHOD. DO NOT CALL.**", "LineSegment attributes p1, p2 changed, recompute if self._len is None: self._len = math.hypot(self._p1[0]", "points are collinear and ``self.p1`` is in the middle. ``1`` if the points", "float]: return sum([dist(part[i], part[i + 1]) for i in range(-1, len(part) - 1)])", "return v1[0] * v2[1] - v1[1] * v2[0] < 0 def sw_ccw(self, pt):", "searching = False break if searching: contains = False return contains class Rectangle(Geometry):", "min(self.lower, other.lower), max(self.right, other.right), max(self.upper, other.upper), ) def __getitem__(self, key): \"\"\" Examples --------", "AH cx = sum([pt[0] * area for pt, area in zip(CP + CH,", "\"coordinates\": self._vertices} def _reset_props(self): \"\"\"Resets the geometric properties of the polygon.\"\"\" self._perimeter =", "verts = [Point(pt) for pt in geo[\"coordinates\"]] elif geo[\"type\"].lower() == \"multilinestring\": verts =", "v2: tuple) -> Union[int, float]: return math.hypot(v1[0] - v2[0], v1[1] - v2[1]) def", "= float(right) self.upper = float(upper) def __bool__(self): \"\"\"Rectangles will evaluate to False if", "1)), ... Point((0, 0)) ... ] ... ) >>> r.bounding_box.left 0.0 >>> r.bounding_box.lower", "list A list representation of the bounding box in the form ``[left, lower,", "] def asShape(obj): \"\"\"Returns a PySAL shape object from ``obj``, which must support", "pass else: if other.p1 == self._p1 and other.p2 == self._p2: eq = True", "dist = self.dist v = self.vertices self._perimeter = sum( [dist(v[i], v[i + 1])", "vertices]), max([v[1] for v in vertices]), ) return self._bounding_box @property def area(self) ->", "1.0) \"\"\" return str(self) def __str__(self) -> str: \"\"\"Returns a string representation of", "chain in clockwise order. Examples -------- >>> c = Chain([Point((0, 0)), Point((1, 0)),", "scale * (self.right - center[0]) self.lower = center[1] + scale * (self.lower -", "__area * 0.5 if __area < 0: __area = -area return __area sum_area", "def part_perimeter(p: list) -> Union[int, float]: return sum([arcdist(p[i], p[i + 1]) * 1000.0", ">>> l.b 0.0 \"\"\" if self._line == False: dx = self._p1[0] - self._p2[0]", "tree structure for this polygon. Once the structure is built, speed for testing", "[] def clockwise(part: list) -> list: if standalone.is_clockwise(part): return part[:] else: return part[::-1]", "= Polygon( ... [Point((0, 0)), Point((0, 10)), Point((10, 10)), Point((10, 0))], ... [Point((2,", "else: geo = obj if hasattr(geo, \"type\"): raise TypeError(\"%r does not appear to", "of the ring. Returns ------- self._centroid : libpysal.cg.Point The ring's centroid. Notes -----", "0 and xi > 0: if yj > 0: w += 0.5 else:", "self._p2[0], self._p1[1] - self._p2[1]) return self._len @property def line(self): \"\"\"Returns a ``Line`` object", "part_type: sum([part_perimeter(part) for part in part_type]) if self._perimeter is None: self._perimeter = sum_perim(self._vertices)", ">>> p.bounding_box.lower 0.0 >>> p.bounding_box.right 2.0 >>> p.bounding_box.upper 1.0 \"\"\" if self._bounding_box is", ") >>> len(p.parts) 2 \"\"\" return [[v for v in part] for part", "Line(m, b) return self._line class VerticalLine(Geometry): \"\"\"Geometric representation of verticle line objects. Parameters", "return self._bbox @property def bounding_box(self): \"\"\"Returns the bounding box of the polygon. Returns", "parts = [] holes = [] for polygon in geo[\"coordinates\"]: verts = [[Point(pt)", "0.5 if w == 0: pass else: point_contained = True else: point_contained =", "of the chain. Examples -------- >>> c = Chain([Point((0, 0)), Point((1, 0)), Point((1,", "Point((1, 0))]) >>> p1.len 4 >>> len(p1) 4 \"\"\" if self._len is None:", "X, Y = self[:] x1, y2, X1, Y1 = other[:] return Rectangle( min(self.left,", "_geoJSON_type_to_Pysal_type: obj = _geoJSON_type_to_Pysal_type[geo_type].__from_geo_interface__(geo) else: raise NotImplementedError(\"%s is not supported at this time.\"", "as a list. See Also -------- libpysal.cg.bounding_box \"\"\" if self._bbox is None: self._bbox", "ls = LineSegment(Point((1, 2)), Point((5, 6))) >>> r = ls._get_p2() >>> r ==", "0.0 \"\"\" def __init__(self, m, b): if m == float(\"inf\"): raise ArithmeticError(\"Slope cannot", "i in range(len(p) - 1)]) if self._arclen is None: self._arclen = sum([part_perimeter(part) for", "where the segment ends. Attributes ---------- p1 : libpysal.cg.Point The starting point of", "float(b) def x(self, y: Union[int, float]) -> float: \"\"\"Returns the :math:`x`-value of the", "swap = ls.get_swap() >>> swap.p1[0] 5.0 >>> swap.p1[1] 6.0 >>> swap.p2[0] 1.0 >>>", "is greater than another object. Parameters ---------- other : libpysal.cg.Point An object to", "1]) * f cy += (y[i] + y[i + 1]) * f cx", "def set_scale(self, scale): \"\"\"Rescales the rectangle around its center. Parameters ---------- scale :", "segment. Returns ------- is_ccw : bool ``1`` if turn from ``self.p1`` to ``self.p2``", "self._centroid = Point((cx, cy)) return self._centroid def build_quad_tree_structure(self): \"\"\"Build the quad tree structure", "self.m == 0: return self.b return self.m * x + self.b class Ray:", "= False if self._quad_tree_structure is None: x, y = point # bbox checks", "Parameters ---------- other : libpysal.cg.Point An object to test equality against. Examples --------", "A list representation of the bounding box in the form ``[left, lower, right,", "self._part_rings] AP = [ring.area for ring in self._part_rings] CH = [ring.centroid for ring", "ray (not ``origin``.) Attributes ---------- o : libpysal.cg.Point The origin (point where ray", "in zip(part[:-1], part[1:])] for part in self._vertices ] class Ring(Geometry): \"\"\"Geometric representation of", "cx cy = 1.0 / (6 * A) * cy self._centroid = Point((cx,", "Point((4, 2)), Point((4, 4)), Point((2, 4))] ... ) >>> p.contains_point((3.0, 3.0)) False >>>", "def __getslice__(self, *args) -> slice: \"\"\"Return the coordinates for the given dimensions. Parameters", "= None @property def vertices(self) -> list: \"\"\"Returns the vertices of the chain", "right < left or upper < lower: raise ArithmeticError(\"Rectangle must have positive area.\")", "v1[0] * v2[1] - v1[1] * v2[0] > 0 def is_cw(self, pt) ->", "other) -> bool: \"\"\"Tests if the point is not equal to another object.", "point's location. Examples -------- >>> hash(Point((0, 1))) == hash(Point((0, 1))) True >>> hash(Point((0,", "same.\" # raise ValueError(msg) self.vertices = tuple(vertices) self._perimeter = None self._bounding_box = None", "* dy1 >= dx2 * dx2 + dy2 * dy2: is_ccw = 0", "attribute. Examples -------- >>> ls = VerticalLine(0) >>> ls.m inf >>> ls.b nan", "\"\"\"Rescales the rectangle around its center. Parameters ---------- scale : int, float The", "c.vertices >>> len(verts) 4 \"\"\" return sum([part for part in self._vertices], []) @property", "\"\"\"Returns the parts (lists of ``libpysal.cg.Point`` objects) of the chain. Examples -------- >>>", "The geometric length of the chain. Examples -------- >>> c = Chain([Point((0, 0)),", "0, 1, 1) >>> bool(r) True \"\"\" return bool(self.area) def __eq__(self, other): if", "1))]) \"\"\" def __init__(self, vertices: list): if isinstance(vertices[0], list): self._vertices = [part for", "* self._p1[0] self._line = Line(m, b) return self._line class VerticalLine(Geometry): \"\"\"Geometric representation of", "return False def __add__(self, other): x, y, X, Y = self[:] x1, y2,", "Ring( ... [ ... Point((0, 0)), ... Point((2, 0)), ... Point((2, 1)), ...", "max([v[1] for v in vertices]), ) return self._bounding_box @property def area(self) -> float:", "Point((1, 1))] ... ) >>> len(p.holes) 1 \"\"\" return [[v for v in", "0 def is_cw(self, pt) -> bool: \"\"\"Returns whether a point is clockwise of", "= QuadTreeStructureSingleRing(self) def contains_point(self, point): \"\"\"Point containment using winding number. The implementation is", "[Ring(vertices)] self._vertices = [clockwise(vertices)] if holes is not None and holes != []:", "-------- >>> r = Rectangle(0, 0, 4, 4) >>> r.width 4.0 \"\"\" return", "+= (x[i] + x[i + 1]) * (y[i] - y[i + 1]) A", "r = Rectangle(0, 0, 4, 4) >>> r.height 4.0 \"\"\" return self.upper -", "ring in self._part_rings: if ring.contains_point(point): contains = True searching = False break if", "+ self.right) / 2, new_center[1] - (self.lower + self.upper) / 2, ) self.left", "float} The :math:`x`-value at which to compute :math:`y`. Examples -------- >>> l =", "as the 'center of gravity' or 'center of mass'. Examples -------- >>> p", "of the polygon. bounding_box : libpysal.cg.Rectangle The bounding box of the polygon. bbox", "Parameters ---------- point : libpysal.cg.Point The point to test for containment. Returns -------", "returned by this method is the geometric centroid and respects multipart polygons with", "representation of the bounding box in the form ``[left, lower, right, upper]``. area", "\"\"\" shift = ( new_center[0] - (self.left + self.right) / 2, new_center[1] -", "Point((1, 0)), Point((1, 1)), Point((0, 1))]) >>> p.area 1.0 >>> p = Polygon(", "__str__(self): return \"LineSegment(\" + str(self._p1) + \", \" + str(self._p2) + \")\" #", ">>> ls.intersect(ls1) True >>> ls2 = LineSegment(Point((5, 1)), Point((10, 1))) >>> ls.intersect(ls2) False", "@property def vertices(self) -> list: \"\"\"Returns the vertices of the chain in clockwise", "-> bool: \"\"\"Returns whether a point is clockwise of the segment (``True``) or", "== 0 and dy == 0: self._line = None elif dx == 0:", "Returns ------- self._p1 : libpysal.cg.Point The reset ``p1`` attribute. Examples -------- >>> ls", ">= Point((1, 1)) False \"\"\" return (self.__loc) >= (other.__loc) def __hash__(self) -> int:", "self._p2[1]]), max([self._p1[0], self._p2[0]]), max([self._p1[1], self._p2[1]]), ) return Rectangle( self._bounding_box.left, self._bounding_box.lower, self._bounding_box.right, self._bounding_box.upper, )", "1] xi = xs[i] xj = xs[i + 1] if yi * yj", ">>> ls.is_cw(Point((2, 2))) False >>> ls.is_cw(Point((2, -2))) True \"\"\" v1 = (self._p2[0] -", "self._holes return geo if self._holes[0]: return {\"type\": \"Polygon\", \"coordinates\": self._vertices + self._holes} else:", "p.bounding_box.upper 1.0 \"\"\" if self._bounding_box is None: vertices = self.vertices self._bounding_box = Rectangle(", "r.upper 6.0 \"\"\" shift = ( new_center[0] - (self.left + self.right) / 2,", "\"\"\" if self._bbox is None: self._bbox = [ self.bounding_box.left, self.bounding_box.lower, self.bounding_box.right, self.bounding_box.upper, ]", "in the polygon. Examples -------- >>> p1 = Polygon([Point((0, 0)), Point((0, 1)), Point((1,", "point_contained : bool ``True`` if ``point`` is contained within the polygon, otherwise ``False``.", "---------- p1 : libpysal.cg.Point A point. Returns ------- self._p1 : libpysal.cg.Point The reset", "bool: \"\"\"Tests if the point is not equal to another object. Parameters ----------", "the points are collinear and ``self.p2`` is in the middle. ``0`` if the", "vertices): if vertices[0] != vertices[-1]: vertices = vertices[:] + vertices[0:1] # msg =", "for pt in part] for part in polygon] parts += verts[0:1] holes +=", "float Maximum y-value of the rectangle. Examples -------- >>> r = Rectangle(-4, 3,", "for part in geo[\"coordinates\"]] return cls(verts[0:1], verts[1:]) @property def __geo_interface__(self) -> dict: \"\"\"Return", "+ 1]) for i in range(len(p) - 1)]) if self._len is None: self._len", "libpysal.cg.Point The origin (point where ray originates). See ``origin``. p : libpysal.cg.Point The", "0: w += 0.5 else: w -= 0.5 if w == 0: pass", "0) >>> l.y(1) 1.0 \"\"\" if self.m == 0: return self.b return self.m", ": list A list representation of the bounding box in the form ``[left,", "def __init__(self, start_pt, end_pt): self._p1 = start_pt self._p2 = end_pt self._reset_props() def __str__(self):", "= 1.0 / (6 * A) * cx cy = 1.0 / (6", "-> bool: \"\"\"Test whether segment intersects with other segment (``True``) or not (``False``).", "of other attributes. The ``getter``s for these attributes (implemented as ``properties``) then recompute", "``second_p``. Examples -------- >>> l = Ray(Point((0, 0)), Point((1, 0))) >>> str(l.o) '(0.0,", "* v2[0] < 0 def sw_ccw(self, pt): \"\"\"Sedgewick test for ``pt`` being ccw", "to another object. Parameters ---------- other : libpysal.cg.Point An object to test equality", "0: if yi < 0: w += 1 else: w -= 1 elif", "line at a particular :math:`x`-value. Parameters ---------- x : {int, float} The :math:`x`-value", "\"\"\"Tests if the point is greater than or equal to another object. Parameters", "p = Polygon([Point((0, 0)), Point((1, 0)), Point((1, 1)), Point((0, 1))]) >>> p.perimeter 4.0", "the ray originates). See ``second_p``. Examples -------- >>> l = Ray(Point((0, 0)), Point((1,", "list of lists of vertices. holes : list A list of sub-polygons to", "\"\"\"Returns ``True`` if ``self`` and ``other`` are the same line segment. Examples --------", "the perimeter of the polygon. bounding_box : libpysal.cg.Rectangle The bounding box of the", "0)), Point((10, 0)), Point((10, 10)), Point((0, 10))], ... [Point((1, 2)), Point((2, 2)), Point((2,", "for pt in geo[\"coordinates\"]] elif geo[\"type\"].lower() == \"multilinestring\": verts = [list(map(Point, part)) for", "primarily as a geometric primitive to form complex polygons with multiple rings and", ">>> c.bounding_box.lower 0.0 >>> c.bounding_box.right 2.0 >>> c.bounding_box.upper 1.0 \"\"\" if self._bounding_box is", ">>> ls.len 3.0 \"\"\" # If LineSegment attributes p1, p2 changed, recompute if", "v in part] for part in self._vertices] @property def perimeter(self) -> Union[int, float]:", "+ self.b class Ray: \"\"\"Geometric representation of ray objects. Parameters ---------- origin :", "Point((0, 1)) (0.0, 1.0) \"\"\" return str(self) def __str__(self) -> str: \"\"\"Returns a", "ls.m 1.0 >>> ls.b 0.0 \"\"\" def __init__(self, m, b): if m ==", "eq def intersect(self, other) -> bool: \"\"\"Test whether segment intersects with other segment", "\"\"\"Returns whether a point is clockwise of the segment (``True``) or not (``False``).", "Point((3.0, -1.0)) True \"\"\" self._p2 = p2 self._reset_props() return self._p2 p2 = property(_get_p2,", "p = Point((1, 3)) \"\"\" def __init__(self, loc): self.__loc = tuple(map(float, loc)) @classmethod", "AttributeError: return True def __gt__(self, other) -> bool: \"\"\"Tests if the point is", "- self._p1[1]) v2 = (pt[0] - self._p1[0], pt[1] - self._p1[1]) return v1[0] *", "clockwise order. Examples -------- >>> p = Polygon( ... [Point((0, 0)), Point((10, 0)),", "list of polygons. \"\"\" geo_type = geo[\"type\"].lower() if geo_type == \"multipolygon\": parts =", "Sets the ``p2`` attribute of the line segment. Parameters ---------- p2 : libpysal.cg.Point", "... ) >>> p.area 99.0 \"\"\" def part_area(pv: list) -> float: __area =", "self._bbox = None self._area = None self._centroid = None self._len = None def", "Point((2, 2)), Point((1, 2)), Point((1, 1))] ... ) >>> p.area 99.0 \"\"\" def", "{ \"point\": Point, \"linestring\": Chain, \"multilinestring\": Chain, \"polygon\": Polygon, \"multipolygon\": Polygon, } #", "r == Point((3.0, -1.0)) True \"\"\" self._p1 = p1 self._reset_props() return self._p1 p1", ": libpysal.cg.Point The ``_p2`` attribute. Examples -------- >>> ls = LineSegment(Point((1, 2)), Point((5,", "------- self._bounding_box : libpysal.cg.Rectangle The bounding box of the polygon. Examples -------- >>>", "... ) >>> p.centroid (5.0353535353535355, 5.0353535353535355) \"\"\" CP = [ring.centroid for ring in", "for v in vertices] N = len(self) A = 0.0 for i in", "= ls._set_p2(Point((3, -1))) >>> r == Point((3.0, -1.0)) True \"\"\" self._p2 = p2", "bounding box of the polygon as a list. See Also -------- libpysal.cg.bounding_box \"\"\"", "} # moving this to top breaks unit tests ! from . import", "self._holes[0]: geo[\"coordinates\"][0] += self._holes return geo if self._holes[0]: return {\"type\": \"Polygon\", \"coordinates\": self._vertices", "def perimeter(self) -> Union[int, float]: if self._perimeter is None: dist = self.dist v", ") >>> r.area 2.0 \"\"\" return abs(self.signed_area) @property def signed_area(self) -> Union[int, float]:", ">>> p = Polygon( ... [Point((0, 0)), Point((0, 10)), Point((10, 10)), Point((10, 0))],", "0)), Point((10, 10)), Point((0, 10))], ... [Point((1, 2)), Point((2, 2)), Point((2, 1)), Point((1,", "0)), Point((1, 1)), Point((2, 1))]) \"\"\" def __init__(self, vertices: list): if isinstance(vertices[0], list):", "other) -> bool: \"\"\"Tests if the point is greater than or equal to", "return (self.right - self.left) * (self.upper - self.lower) @property def width(self) -> Union[int,", "self.p = second_p class Chain(Geometry): \"\"\"Geometric representation of a chain, also known as", "The point where the segment begins. end_pt : libpysal.cg.Point The point where the", "Polygon([Point((0, 0)), Point((0, 1)), Point((1, 1)), Point((1, 0))]) >>> p1.len 4 >>> len(p1)", "VerticalLine(Geometry): \"\"\"Geometric representation of verticle line objects. Parameters ---------- x : {int, float}", "__from_geo_interface__(cls, geo): return cls(geo[\"coordinates\"]) @property def __geo_interface__(self): return {\"type\": \"Point\", \"coordinates\": self.__loc} def", "LineSegment(Point((1, 2)), Point((5, 6))) >>> r = ls._get_p2() >>> r == Point((5, 6))", "4.0 \"\"\" return self.right - self.left @property def height(self) -> Union[int, float]: \"\"\"Returns", "-------- >>> Point((0, 1)) >= Point((0, 1)) True >>> Point((0, 1)) >= Point((1,", "... [Point((0, 0)), Point((1, 0)), Point((1, 1))], ... [Point((10, 10)), Point((11, 10)), Point((11,", "their values if they have been reset since the last call to the", "* area for pt, area in zip(CP + CH, A)]) / sum(A) cy", "... ] ... ) >>> r.bounding_box.left 0.0 >>> r.bounding_box.lower 0.0 >>> r.bounding_box.right 2.0", "Examples -------- >>> r = Rectangle(0, 0, 4, 4) >>> r.width 4.0 \"\"\"", "part] for part in polygon] parts += verts[0:1] holes += verts[1:] if not", "1 elif dy1 * dx2 > dy2 * dx1: is_ccw = -1 elif", "in self._vertices] @property def perimeter(self) -> Union[int, float]: \"\"\"Returns the perimeter of the", "-> bool: \"\"\"Tests if the point is less than or equal to another", ">>> r.set_centroid(Point((4, 4))) >>> r.left 2.0 >>> r.right 6.0 >>> r.lower 2.0 >>>", "Parameters ---------- other : libpysal.cg.LineSegment Another line segment to check against. Examples --------", "The second point on the ray (not the point where the ray originates).", "p0[0] dy1 = p1[1] - p0[1] dx2 = p2[0] - p0[0] dy2 =", "2)), Point((5, 6))) >>> swap = ls.get_swap() >>> swap.p1[0] 5.0 >>> swap.p1[1] 6.0", "upper < lower: raise ArithmeticError(\"Rectangle must have positive area.\") self.left = float(left) self.lower", "\"\"\"**HELPER METHOD. DO NOT CALL.** Resets attributes which are functions of other attributes.", "Point((2, 1))]) >>> verts = c.vertices >>> len(verts) 4 \"\"\" return sum([part for", "the parts (lists of ``libpysal.cg.Point`` objects) of the chain. Examples -------- >>> c", "\"MultiPolygon\", \"coordinates\": [[part] for part in self.parts], } if self._holes[0]: geo[\"coordinates\"][0] += self._holes", "range(rn)] ys = [self.vertices[i][1] - point[1] for i in range(rn)] w = 0", "libpysal.cg.Line The ``Line`` object of the line on which the segment lies. Examples", "\"\"\"Test whether segment intersects with other segment (``True``) or not (``False``). Handles endpoints", "... ) >>> len(c.parts) 2 \"\"\" return [[v for v in part] for", "\"\"\" return float(\"nan\") class Line(Geometry): \"\"\"Geometric representation of line objects. Parameters ---------- m", "\"coordinates\": self.__loc} def __lt__(self, other) -> bool: \"\"\"Tests if the point is less", "-area return __area sum_area = lambda part_type: sum([part_area(part) for part in part_type]) _area", "ring. Returns ------- self._bounding_box : libpysal.cg.Rectangle The bounding box of the ring. Examples", "Examples -------- >>> c = Chain([Point((0, 0)), Point((2, 0)), Point((2, 1)), Point((0, 1))])", "representation of rectangle objects. Attributes ---------- left : float Minimum x-value of the", "\"\"\" def __init__(self, m, b): if m == float(\"inf\"): raise ArithmeticError(\"Slope cannot be", "== (3, 6) True >>> p[1:2] == (6,) True \"\"\" return self.__loc.__getslice__(*args) def", "math.hypot(v1[0] - v2[0], v1[1] - v2[1]) def part_perimeter(p: list) -> Union[int, float]: return", ">>> l.x(0.25) 0.0 \"\"\" return self._x def y(self, x) -> float: \"\"\"Returns the", "list of vertices or a list of lists of vertices. holes : list", "p2 = property(_get_p2, _set_p2) def is_ccw(self, pt) -> bool: \"\"\"Returns whether a point", "v in vertices] y = [v[1] for v in vertices] N = len(self)", "[self.vertices[i][1] - point[1] for i in range(rn)] w = 0 for i in", "the width of the Rectangle. Examples -------- >>> r = Rectangle(0, 0, 4,", "part_type: sum([part_area(part) for part in part_type]) _area = sum_area(self._vertices) - sum_area(self._holes) return _area", "1))] ... ] ... ) >>> len(p.parts) 2 \"\"\" return [[v for v", "dy1 = p1[1] - p0[1] dx2 = p2[0] - p0[0] dy2 = p2[1]", "of the polygon in clockwise order. len : int The number of vertices", "inside the ring. \"\"\" def __init__(self, vertices): if vertices[0] != vertices[-1]: vertices =", "1))], ... [Point((10, 10)), Point((11, 10)), Point((11, 11))] ... ] ... ) >>>", "!= []: if isinstance(holes[0], list): self._hole_rings = list(map(Ring, holes)) self._holes = [clockwise(hole) for", "object to test equality against. Examples -------- >>> Point((0, 1)) < Point((0, 1))", ") return Rectangle( self._bounding_box.left, self._bounding_box.lower, self._bounding_box.right, self._bounding_box.upper, ) @property def len(self) -> float:", "17.0] \"\"\" l = [self.left, self.lower, self.right, self.upper] return l.__getitem__(key) def set_centroid(self, new_center):", "if yi < 0: w += 1 else: w -= 1 elif yi", "return {\"type\": \"Polygon\", \"coordinates\": self._vertices} def _reset_props(self): \"\"\"Resets the geometric properties of the", "a supported type. Returns ------- obj : {libpysal.cg.{Point, LineSegment, Line, Ray, Chain, Polygon}", "polygon. Examples -------- >>> p1 = Polygon([Point((0, 0)), Point((0, 1)), Point((1, 1)), Point((1,", "sum([part_area(part) for part in part_type]) _area = sum_area(self._vertices) - sum_area(self._holes) return _area @property", "self._p1 p1 = property(_get_p1, _set_p1) def _get_p2(self): \"\"\"**HELPER METHOD. DO NOT CALL.** Returns", "1)), Point((0, 1))], ... [Point((2, 1)), Point((2, 2)), Point((1, 2)), Point((1, 1))] ...", "x[i] * y[i + 1] - x[i + 1] * y[i] cx +=", ">>> r = Rectangle(0, 0, 4, 4) >>> r.set_centroid(Point((4, 4))) >>> r.left 2.0", ">>> ls.intersect(ls2) False >>> ls2 = LineSegment(Point((7, -1)), Point((7, 2))) >>> ls.intersect(ls2) True", "part in self._vertices] @property def bounding_box(self): \"\"\"Returns the bounding box of the chain.", ">>> r = Rectangle(0, 0, 4, 4) >>> r.width 4.0 \"\"\" return self.right", "[] ) @property def holes(self) -> list: \"\"\"Returns the holes of the polygon", "-------- >>> r = Rectangle(-4, 3, 10, 17) >>> r.left #minx -4.0 >>>", "+ 1]) * f cy += (y[i] + y[i + 1]) * f", "float(dx) # y - mx b = self._p1[1] - m * self._p1[0] self._line", "break if searching: for ring in self._part_rings: if ring.contains_point(point): contains = True searching", "v2[1]) def part_perimeter(p: list) -> Union[int, float]: return sum([dist(p[i], p[i + 1]) for", "w -= 1 elif yi == 0 and xi > 0: if yj", "list A list of points with the vertices of the polygon in clockwise", "Returns ------- self._bounding_box : libpysal.cg.Rectangle The bounding box of the polygon. Examples --------", "GEOS, etc, polygons may only have a single exterior ring, all other parts", "of the vertices is ignored and will not be altered. Parameters ---------- vertices", "Point((2, 0)), Point((2, 1)), Point((0, 1))]) >>> c.bounding_box.left 0.0 >>> c.bounding_box.lower 0.0 >>>", "key): \"\"\" Examples -------- >>> r = Rectangle(-4, 3, 10, 17) >>> r[:]", "Raises ------ ArithmeticError Raised when infinity is passed in as the slope. Examples", "\"\"\" def __init__(self, loc): self.__loc = tuple(map(float, loc)) @classmethod def __from_geo_interface__(cls, geo): return", "0)) ... ] ... ) >>> r.area 2.0 \"\"\" return abs(self.signed_area) @property def", "return cls(verts[0:1], verts[1:]) @property def __geo_interface__(self) -> dict: \"\"\"Return ``__geo_interface__`` information lookup.\"\"\" if", "used \"to implement truth value testing and the built-in operation ``bool()``\" ``-- http://docs.python.org/reference/datamodel.html", "objects. Parameters ---------- x : {int, float} The :math:`x`-intercept of the line. ``x``", "{int, float} The :math:`x`-intercept of the line. ``x`` is also an attribute. Examples", "float]: return math.hypot(v1[0] - v2[0], v1[1] - v2[1]) def part_perimeter(p: list) -> Union[int,", "dx2 + dy2 * dy2: is_ccw = 0 else: is_ccw = 1 return", "vertices. perimeter : float The geometric length of the perimeter of the ring.", ">>> p = Polygon( ... [Point((0,0)), Point((4,0)), Point((4,5)), Point((2,3)), Point((0,5))] ... ) >>>", "functions of other attributes. The getters for these attributes (implemented as properties) then", "segment to check against. Examples -------- >>> ls = LineSegment(Point((5, 0)), Point((10, 0)))", "tree structure for the ring. This structure helps test if a point is", "endpoints swapped. Returns ------- line_seg : libpysal.cg.LineSegment The ``LineSegment`` object which has its", ">>> l1 = LineSegment(Point((1, 2)), Point((5, 6))) >>> l2 = LineSegment(Point((5, 6)), Point((1,", "Parameters ---------- point : libpysal.cg.Point A point to test for containment. Returns -------", "self._bounding_box = Rectangle(min(x), min(y), max(x), max(y)) return self._bounding_box @property def area(self) -> Union[int,", "self.bounding_box.upper, ] return self._bbox @property def bounding_box(self): \"\"\"Returns the bounding box of the", "len(self.parts) == 1: return {\"type\": \"LineString\", \"coordinates\": self.vertices} else: return {\"type\": \"MultiLineString\", \"coordinates\":", "self._len = None def __len__(self) -> int: return self.len @property def len(self) ->", "created from the objects specified. Parameters ---------- vertices : list A list of", "bounding box of the polygon. Returns ------- self._bounding_box : libpysal.cg.Rectangle The bounding box", "p0[1] if dy1 * dx2 < dy2 * dx1: is_ccw = 1 elif", "self._part_rings: ring.build_quad_tree_structure() for ring in self._hole_rings: ring.build_quad_tree_structure() self.is_quad_tree_structure_built = True def contains_point(self, point):", "The implementation is based on `this <http://www.engr.colostate.edu/~dga/dga/papers/point_in_polygon.pdf>`_. Parameters ---------- point : libpysal.cg.Point The", "c = Chain([Point((0, 0)), Point((1, 0)), Point((1, 1)), Point((2, 1))]) >>> verts =", "def part_perimeter(p: list) -> Union[int, float]: return sum([dist(p[i], p[i + 1]) for i", "] ... ) >>> r.bounding_box.left 0.0 >>> r.bounding_box.lower 0.0 >>> r.bounding_box.right 2.0 >>>", ") def __eq__(self, other) -> bool: \"\"\"Returns ``True`` if ``self`` and ``other`` are", ": libpysal.cg.Point The second point on the ray (not the point where the", "part] for part in self._vertices] @property def perimeter(self) -> Union[int, float]: \"\"\"Returns the", "if the point is greater than another object. Parameters ---------- other : libpysal.cg.Point", "have positive area.\") self.left = float(left) self.lower = float(lower) self.right = float(right) self.upper", "vertices : list A list of points with the vertices of the polygon", "(Point, LineSegment, Line, Ray, Chain, Polygon)): pass else: if hasattr(obj, \"__geo_interface__\"): geo =", "def __add__(self, other): x, y, X, Y = self[:] x1, y2, X1, Y1", "<= (other.__loc) def __eq__(self, other) -> bool: \"\"\"Tests if the point is equal", "def area(self) -> float: \"\"\"Returns the area of the polygon. Examples -------- >>>", "Parameters ---------- *args : tuple A tuple of :math:`(i,j)` with :math:`i` as the", "dx1: is_ccw = 1 elif dy1 * dx2 > dy2 * dx1: is_ccw", "def __len__(self) -> int: return self.len @property def len(self) -> int: \"\"\"Returns the", "range(-1, len(self) - 1)] ) return self._perimeter @property def bounding_box(self): \"\"\"Returns the bounding", "for (a, b) in zip(part[:-1], part[1:])] for part in self._vertices ] class Ring(Geometry):", "and ``self.p1`` is in the middle. ``1`` if the points are collinear and", "Polygon([Point((0, 0)), Point((2, 0)), Point((2, 1)), Point((0, 1))]) >>> p.bounding_box.left 0.0 >>> p.bounding_box.lower", "1)) True \"\"\" try: return (self.__loc) != (other.__loc) except AttributeError: return True def", "= ls.line >>> l.m 1.0 >>> l.b 0.0 \"\"\" if self._line == False:", "bounding box of the polygon. bbox : list A list representation of the", "self.m == 0: raise ArithmeticError(\"Cannot solve for 'x' when slope is zero.\") return", "l = VerticalLine(1) >>> l.y(1) nan \"\"\" return float(\"nan\") class Line(Geometry): \"\"\"Geometric representation", "1]) for i in range(-1, len(self) - 1)] ) return self._perimeter @property def", "p[0] == 5.5 True >>> p[1] == 4.3 True \"\"\" return self.__loc.__getitem__(*args) def", "ray originates). See ``origin``. p : libpysal.cg.Point The second point on the ray", "1][0]) * (pv[i][1] - pv[i + 1][1]) __area = __area * 0.5 if", ":math:`i` as the index of the desired dimension. Examples -------- >>> p =", "origin (point where ray originates). See ``origin``. p : libpysal.cg.Point The second point", "Examples -------- >>> Point((0, 1)) (0.0, 1.0) \"\"\" return str(self) def __str__(self) ->", "= p1 self._reset_props() return self._p1 p1 = property(_get_p1, _set_p1) def _get_p2(self): \"\"\"**HELPER METHOD.", "to ``pt`` is ccw. ``-1`` if turn from ``self.p1`` to ``self.p2`` to ``pt``", "Returns ------- self._bounding_box : libpysal.cg.Rectangle The bounding box of the chain. Examples --------", "in vertices] self._bounding_box = Rectangle(min(x), min(y), max(x), max(y)) return self._bounding_box @property def area(self)", "The ``getter``s for these attributes (implemented as ``properties``) then recompute their values if", "0: is_ccw = -1 elif dx1 * dx1 + dy1 * dy1 >=", "if ``self`` and ``other`` are the same line segment. Examples -------- >>> l1", "range(N - 1): f = x[i] * y[i + 1] - x[i +", "__init__(self): pass class Point(Geometry): \"\"\"Geometric class for point objects. Parameters ---------- loc :", "Point((0, 1)), Point((1, 1)), Point((1, 0))]) >>> len(p1.vertices) 4 \"\"\" return sum([part for", "r > 0: if yi < 0: w += 1 else: w -=", "-> list: \"\"\"Returns the segments that compose the chain.\"\"\" return [ [LineSegment(a, b)", "= Chain([Point((0, 0)), Point((1, 0)), Point((1, 1)), Point((2, 1))]) >>> c.len 3.0 >>>", "test equality against. Examples -------- >>> Point((0, 1)) == Point((0, 1)) True >>>", "self._area @property def centroid(self): \"\"\"Returns the centroid of the ring. Returns ------- self._centroid", "(meters). \"\"\" def part_perimeter(p: list) -> Union[int, float]: return sum([arcdist(p[i], p[i + 1])", "[[v for v in part] for part in self._vertices] @property def perimeter(self) ->", ": {libpysal.cg.{Point, LineSegment, Line, Ray, Chain, Polygon} A new geometric representation of the", "self._vertices = [clockwise(part) for part in vertices] else: self._part_rings = [Ring(vertices)] self._vertices =", "in vertices] y = [v[1] for v in vertices] N = len(self) A", "\"\"\" searching = True for ring in self._hole_rings: if ring.contains_point(point): contains = False", "# If LineSegment attributes p1, p2 changed, recompute if self._len is None: self._len", "to test equality against. Examples -------- >>> Point((0, 1)) < Point((0, 1)) False", "= Chain( ... [ ... [Point((0, 0)), Point((1, 0)), Point((1, 1)), Point((0, 1))],", "Examples -------- >>> Point((0, 1)) != Point((0, 1)) False >>> Point((0, 1)) !=", "AP + AH cx = sum([pt[0] * area for pt, area in zip(CP", "*args : tuple A singleton tuple of :math:`(i)` with :math:`i` as the index", "self._bounding_box : libpysal.cg.Rectangle The bounding box of the line segment. Examples -------- >>>", "self[:] == other[:] return False def __add__(self, other): x, y, X, Y =", "\"\"\"Returns the holes of the polygon in clockwise order. Examples -------- >>> p", "----- Points falling exactly on polygon edges may yield unpredictable results. \"\"\" searching", "= Rectangle(min(x), min(y), max(x), max(y)) return self._bounding_box @property def area(self) -> Union[int, float]:", "containment. Returns ------- point_contained : bool ``True`` if ``point`` is contained within the", "whether a point is counterclockwise of the segment (``True``) or not (``False``). Exclusive.", "Point((1, 1)) False \"\"\" return (self.__loc) > (other.__loc) def __ge__(self, other) -> bool:", "Examples -------- >>> p = Polygon( ... [Point((0, 0)), Point((10, 0)), Point((10, 10)),", "... [Point((0, 0)), Point((10, 0)), Point((10, 10)), Point((0, 10))], ... [Point((2, 1)), Point((2,", "in clockwise order. Examples -------- >>> c = Chain([Point((0, 0)), Point((1, 0)), Point((1,", "v in vertices] self._bounding_box = Rectangle(min(x), min(y), max(x), max(y)) return self._bounding_box @property def", "# ) def __eq__(self, other) -> bool: \"\"\"Returns ``True`` if ``self`` and ``other``", "zip(CP + CH, A)]) / sum(A) cy = sum([pt[1] * area for pt,", "l.y(1) 1.0 \"\"\" if self.m == 0: return self.b return self.m * x", "== l1 True \"\"\" eq = False if not isinstance(other, self.__class__): pass else:", "pt in geo[\"coordinates\"]] elif geo[\"type\"].lower() == \"multilinestring\": verts = [list(map(Point, part)) for part", "xj = xs[i + 1] if yi * yj < 0: r =", "by the polygon. centroid : tuple The 'center of gravity', i.e. the mean", "1)), Point((1, 2)), Point((2, 2)), Point((2, 1))] ... ) >>> p.centroid (5.0353535353535355, 5.0353535353535355)", ">>> c = Chain([Point((0, 0)), Point((2, 0)), Point((2, 1)), Point((0, 1))]) >>> c.bounding_box.left", "(not the point where the ray originates). See ``second_p``. Examples -------- >>> l", "------- self._bounding_box : libpysal.cg.Rectangle The bounding box of the ring. Examples -------- >>>", "self.upper] return l.__getitem__(key) def set_centroid(self, new_center): \"\"\"Moves the rectangle center to a new", ":math:`x`. Raises ------ ArithmeticError Raised when ``0.`` is passed in as the slope.", "\"\"\" return bool(self.area) def __eq__(self, other): if other: return self[:] == other[:] return", "libpysal.cg.Point The reset ``p2`` attribute. Examples -------- >>> ls = LineSegment(Point((1, 2)), Point((5,", "is None: self._len = math.hypot(self._p1[0] - self._p2[0], self._p1[1] - self._p2[1]) return self._len @property", "= Rectangle(-4, 3, 10, 17) >>> r[:] [-4.0, 3.0, 10.0, 17.0] \"\"\" l", "object which has its endpoints swapped. Returns ------- line_seg : libpysal.cg.LineSegment The ``LineSegment``", "False if they have zero area. ``___nonzero__`` is used \"to implement truth value", "LineSegment, Line, Ray, Chain, Polygon} A geometric representation of an object. Raises ------", "also an attribute. Raises ------ ArithmeticError Raised when infinity is passed in as", "-------- >>> Point((0, 1)) <= Point((0, 1)) True >>> Point((0, 1)) <= Point((1,", "bool: \"\"\"Returns whether a point is counterclockwise of the segment (``True``) or not", "v in vertices]), min([v[1] for v in vertices]), max([v[0] for v in vertices]),", ">>> r.bounding_box.left 0.0 >>> r.bounding_box.lower 0.0 >>> r.bounding_box.right 2.0 >>> r.bounding_box.upper 1.0 \"\"\"", "\"\"\"Returns the :math:`x`-value of the line at a particular :math:`y`-value. Parameters ---------- y", "equality against. Examples -------- >>> Point((0, 1)) >= Point((0, 1)) True >>> Point((0,", "def __geo_interface__(self) -> dict: if len(self.parts) == 1: return {\"type\": \"LineString\", \"coordinates\": self.vertices}", "ccw3 * ccw4 <= 0 return intersects def _reset_props(self): \"\"\"**HELPER METHOD. DO NOT", "object to test equality against. Examples -------- >>> Point((0, 1)) >= Point((0, 1))", "Point((1, 1)), Point((2, 1))]) \"\"\" def __init__(self, vertices: list): if isinstance(vertices[0], list): self._vertices", "ring. This structure helps test if a point is inside the ring. \"\"\"", "(6,) True \"\"\" return self.__loc.__getslice__(*args) def __len__(self) -> int: \"\"\" Returns the dimensions", "dy2 = p2[1] - p0[1] if dy1 * dx2 < dy2 * dx1:", "int The number of vertices. perimeter : float The geometric length of the", "= LineSegment(Point((1, 2)), Point((5, 6))) >>> ls.bounding_box.left 1.0 >>> ls.bounding_box.lower 2.0 >>> ls.bounding_box.right", "+ 1]) for i in range(-1, len(self) - 1)] ) return self._perimeter @property", "The :math:`y`-value at which to compute :math:`x`. Raises ------ ArithmeticError Raised when ``0.``", "box of the polygon as a list. Returns ------- self._bbox : list The", "vertices] else: self._part_rings = [Ring(vertices)] self._vertices = [clockwise(vertices)] if holes is not None", "NOT CALL.** Returns the ``p2`` attribute of the line segment. Returns ------- self._p2", "else: return part[::-1] vl = list(vertices) if isinstance(vl[0], list): self._part_rings = list(map(Ring, vertices))", "of vertices or a list of lists of vertices. holes : list A", "l = Line(0.5, 0) >>> l.x(0.25) 0.5 \"\"\" if self.m == 0: raise", ": float Minimum x-value of the rectangle. lower : float Minimum y-value of", "cls(geo[\"coordinates\"]) @property def __geo_interface__(self): return {\"type\": \"Point\", \"coordinates\": self.__loc} def __lt__(self, other) ->", ": float The geometric length of the perimeter of the polygon. bounding_box :", "- center[1]) @property def area(self) -> Union[int, float]: \"\"\"Returns the area of the", "of the ring defined by the 'center of gravity' or 'center or mass'.", "Polygon} A geometric representation of an object. Raises ------ TypeError Raised when ``obj``", "/ 2, (self.lower + self.upper) / 2) self.left = center[0] + scale *", "- p0[0] dy1 = p1[1] - p0[1] dx2 = p2[0] - p0[0] dy2", "l = ls.line >>> l.m 1.0 >>> l.b 0.0 \"\"\" if self._line ==", "point is inside the ring. \"\"\" def __init__(self, vertices): if vertices[0] != vertices[-1]:", "the mean point of the polygon. Examples -------- >>> p1 = Polygon([Point((0, 0)),", "An object to test equality against. Examples -------- >>> Point((0, 1)) == Point((0,", "len(self) A = 0.0 for i in range(N - 1): A += (x[i]", "self.right = self.right + shift[0] self.lower = self.lower + shift[1] self.upper = self.upper", "1)) != Point((1, 1)) True \"\"\" try: return (self.__loc) != (other.__loc) except AttributeError:", "a point is inside the ring will be increased significantly. \"\"\" self._quad_tree_structure =", "False def __ne__(self, other) -> bool: \"\"\"Tests if the point is not equal", "None self._area = None self._centroid = None self._len = None def __len__(self) ->", "None @property def vertices(self) -> list: \"\"\"Returns the vertices of the chain in", "multiple rings and holes. The ordering of the vertices is ignored and will", "+ scale * (self.right - center[0]) self.lower = center[1] + scale * (self.lower", "the vertices of the chain in clockwise order. Examples -------- >>> c =", "part in self._vertices], []) + sum( [part for part in self._holes], [] )", "= LineSegment(Point((0, 0)), Point((5, 0))) >>> ls.is_cw(Point((2, 2))) False >>> ls.is_cw(Point((2, -2))) True", "part in self.parts], } if self._holes[0]: geo[\"coordinates\"][0] += self._holes return geo if self._holes[0]:", "p = Polygon([Point((0, 0)), Point((2, 0)), Point((2, 1)), Point((0, 1))]) >>> p.bounding_box.left 0.0", "vertices] A = self.signed_area N = len(self) cx = 0 cy = 0", "cy self._centroid = Point((cx, cy)) return self._centroid def build_quad_tree_structure(self): \"\"\"Build the quad tree", "Rectangle(0, 0, 0, 0) >>> bool(r) False >>> r = Rectangle(0, 0, 1,", "segments that compose the chain.\"\"\" return [ [LineSegment(a, b) for (a, b) in", "Point((10, 10)), Point((0, 10))], ... [Point((1, 1)), Point((1, 2)), Point((2, 2)), Point((2, 1))]", "sum([pt[1] * area for pt, area in zip(CP + CH, A)]) / sum(A)", "2 \"\"\" return len(self.__loc) def __repr__(self) -> str: \"\"\"Returns the string representation of", "x-value of the rectangle. upper : float Maximum y-value of the rectangle. Examples", "considered as holes. Default is ``None``. Attributes ---------- vertices : list A list", "endpoints swapped. Examples -------- >>> ls = LineSegment(Point((1, 2)), Point((5, 6))) >>> swap", "10)), Point((0, 10))], ... [Point((1, 2)), Point((2, 2)), Point((2, 1)), Point((1, 1))] ...", "Returns ------- obj : {libpysal.cg.{Point, LineSegment, Line, Ray, Chain, Polygon} A new geometric", "Examples -------- >>> ls = LineSegment(Point((5, 0)), Point((10, 0))) >>> ls1 = LineSegment(Point((5,", "self._area = None self._centroid = None self._len = None def __len__(self) -> int:", "ratio of the new scale to the old scale (e.g. 1.0 is current", "turn from ``self.p1`` to ``self.p2`` to ``pt`` is ccw. ``-1`` if turn from", "given dimension. Parameters ---------- *args : tuple A singleton tuple of :math:`(i)` with", "[v[1] for v in vertices] A = self.signed_area N = len(self) cx =", ":math:`y`-value at which to compute :math:`x`. Raises ------ ArithmeticError Raised when ``0.`` is", ">>> c = Chain([Point((0, 0)), Point((1, 0)), Point((1, 1)), Point((2, 1))]) >>> c.len", "Returns ------- self._bounding_box : libpysal.cg.Rectangle The bounding box of the ring. Examples --------", ">>> r = Ring( ... [ ... Point((0, 0)), ... Point((2, 0)), ...", "point is less than another object. Parameters ---------- other : libpysal.cg.Point An object", ">>> p.contains_point((0,6)) 0 >>> p.contains_point((2,2.9)) 1 >>> p.contains_point((4,5)) 0 >>> p.contains_point((4,0)) 0 Handles", "raise TypeError(\"%r is not a Chain.\" % geo) return cls(verts) @property def __geo_interface__(self)", "r.bounding_box.right 2.0 >>> r.bounding_box.upper 1.0 \"\"\" if self._bounding_box is None: vertices = self.vertices", "Point((0, 1)) <= Point((1, 1)) True \"\"\" return (self.__loc) <= (other.__loc) def __eq__(self,", "float: return math.hypot(v1[0] - v2[0], v1[1] - v2[1]) def part_perimeter(part) -> Union[int, float]:", "= None self._centroid = None self._len = None def __len__(self) -> int: return", "in self._part_rings: ring.build_quad_tree_structure() for ring in self._hole_rings: ring.build_quad_tree_structure() self.is_quad_tree_structure_built = True def contains_point(self,", "The bounding box of the ring. area : float The area enclosed by", "point. Parameters ---------- point : libpysal.cg.Point A point to test for containment. Returns", "== Point((1, 1)) False \"\"\" try: return (self.__loc) == (other.__loc) except AttributeError: return", "= len(self) cx = 0 cy = 0 for i in range(N -", "list(vertices) if isinstance(vl[0], list): self._part_rings = list(map(Ring, vertices)) self._vertices = [clockwise(part) for part", "m == float(\"inf\"): raise ArithmeticError(\"Slope cannot be infinite.\") self.m = float(m) self.b =", ": list A list of points with the vertices of the ring. len", "<= Point((0, 1)) True >>> Point((0, 1)) <= Point((1, 1)) True \"\"\" return", "test equality against. Examples -------- >>> Point((0, 1)) < Point((0, 1)) False >>>", ">>> ls.is_ccw(Point((2, -2))) False \"\"\" v1 = (self._p2[0] - self._p1[0], self._p2[1] - self._p1[1])", "other : libpysal.cg.Point An object to test equality against. Examples -------- >>> Point((0,", "[Point((0, 0)), Point((1, 0)), Point((1, 1)), Point((0, 1))], ... [Point((2, 1)), Point((2, 2)),", "unpredictable results. \"\"\" searching = True for ring in self._hole_rings: if ring.contains_point(point): contains", "area. ``___nonzero__`` is used \"to implement truth value testing and the built-in operation", "the line. ``m`` is also an attribute. b : {int, float} The :math:`y`-intercept", "or list of point lists. Attributes ---------- vertices : list The list of", "= sum( [dist(v[i], v[i + 1]) for i in range(-1, len(self) - 1)]", ") def __getitem__(self, key): \"\"\" Examples -------- >>> r = Rectangle(-4, 3, 10,", "which to compute :math:`x`. Raises ------ ArithmeticError Raised when ``0.`` is passed in", "------ ArithmeticError Raised when infinity is passed in as the slope. Examples --------", "is current size). Examples -------- >>> r = Rectangle(0, 0, 4, 4) >>>", "middle. ``0`` if the points are collinear and ``pt`` is in the middle.", "number of vertices in the polygon. Examples -------- >>> p1 = Polygon([Point((0, 0)),", "must support the ``__geo_interface__``. Parameters ---------- obj : {libpysal.cg.{Point, LineSegment, Line, Ray, Chain,", "are collinear and ``self.p2`` is in the middle. ``0`` if the points are", "is inside the ring. \"\"\" def __init__(self, vertices): if vertices[0] != vertices[-1]: vertices", "def contains_point(self, point): \"\"\"Point containment using winding number. The implementation is based on", "3))) >>> l = ls.line >>> l.m 1.0 >>> l.b 0.0 \"\"\" if", "- self.b) / self.m def y(self, x: Union[int, float]) -> float: \"\"\"Returns the", "bool: \"\"\"Tests if the point is less than or equal to another object.", "self._hole_rings: if ring.contains_point(point): contains = False searching = False break if searching: for", "* (xj - xi) / (yi - yj) if r > 0: if", "as holes. Default is ``None``. Attributes ---------- vertices : list A list of", "the slice (excluded). Examples -------- >>> p = Point((3, 6, 2)) >>> p[:2]", "reset since the last call to the getter. Examples -------- >>> ls =", "_set_p1) def _get_p2(self): \"\"\"**HELPER METHOD. DO NOT CALL.** Returns the ``p2`` attribute of", "if self._arclen is None: self._arclen = sum([part_perimeter(part) for part in self._vertices]) return self._arclen", "else: verts = [[Point(pt) for pt in part] for part in geo[\"coordinates\"]] return", ") >>> c.len 4.0 \"\"\" def dist(v1: tuple, v2: tuple) -> Union[int, float]:", "return sum([part for part in self._vertices], []) + sum( [part for part in", "part[i + 1]) for i in range(-1, len(part) - 1)]) sum_perim = lambda", "self._p2[1] - self._p1[1]) v2 = (pt[0] - self._p1[0], pt[1] - self._p1[1]) return v1[0]", "\"\"\"Returns the parts of the polygon in clockwise order. Examples -------- >>> p", "-------- >>> p = Polygon([Point((0, 0)), Point((2, 0)), Point((2, 1)), Point((0, 1))]) >>>", "self._p2[1]) return self._len @property def line(self): \"\"\"Returns a ``Line`` object of the line", "else: w -= 0.5 elif yj == 0 and xj > 0: if", ": libpysal.cg.Rectangle The bounding box of the line segment. Examples -------- >>> ls", "An object to test equality against. Examples -------- >>> Point((0, 1)) <= Point((0,", "area of the Rectangle. Examples -------- >>> r = Rectangle(0, 0, 4, 4)", "contains class Rectangle(Geometry): \"\"\"Geometric representation of rectangle objects. Attributes ---------- left : float", "= (self._p2[0] - self._p1[0], self._p2[1] - self._p1[1]) v2 = (pt[0] - self._p1[0], pt[1]", "Examples -------- >>> ls = LineSegment(Point((1, 2)), Point((5, 6))) >>> swap = ls.get_swap()", "True \"\"\" return (self.__loc) <= (other.__loc) def __eq__(self, other) -> bool: \"\"\"Tests if", "DO NOT CALL.** Sets the ``p1`` attribute of the line segment. Parameters ----------", "... Point((0, 1)), ... Point((0, 0)) ... ] ... ) >>> str(r.centroid) '(1.0,", "(self.right - self.left) * (self.upper - self.lower) @property def width(self) -> Union[int, float]:", "= None self._quad_tree_structure = None def __len__(self) -> int: return len(self.vertices) @property def", "dict): if geo[\"type\"].lower() == \"linestring\": verts = [Point(pt) for pt in geo[\"coordinates\"]] elif", "r.set_scale(2) >>> r.left -2.0 >>> r.right 6.0 >>> r.lower -2.0 >>> r.upper 6.0", "yj = ys[i + 1] xi = xs[i] xj = xs[i + 1]", "Point((2, 0)), ... Point((2, 1)), ... Point((0, 1)), ... Point((0, 0)) ... ]", "self._bounding_box is None: vertices = self.vertices self._bounding_box = Rectangle( min([v[0] for v in", "tuple The point's location (number :math:`x`-tuple, :math:`x` > 1). Examples -------- >>> p", "= self.vertices self._bounding_box = Rectangle( min([v[0] for v in vertices]), min([v[1] for v", "in clockwise order. Examples -------- >>> p1 = Polygon([Point((0, 0)), Point((0, 1)), Point((1,", "ls = LineSegment(Point((0, 0)), Point((5, 0))) >>> ls.is_ccw(Point((2, 2))) True >>> ls.is_ccw(Point((2, -2)))", "[v[1] for v in vertices] self._bounding_box = Rectangle(min(x), min(y), max(x), max(y)) return self._bounding_box", "= self.upper + shift[1] def set_scale(self, scale): \"\"\"Rescales the rectangle around its center.", "support the ``__geo_interface__``. Parameters ---------- obj : {libpysal.cg.{Point, LineSegment, Line, Ray, Chain, Polygon}", "x + self.b class Ray: \"\"\"Geometric representation of ray objects. Parameters ---------- origin", "Point((5, 6))) >>> l2 = LineSegment(Point((5, 6)), Point((1, 2))) >>> l1 == l2", ": libpysal.cg.Line The ``Line`` object of the line on which the segment lies.", "\"\"\" def part_perimeter(p: list) -> Union[int, float]: return sum([arcdist(p[i], p[i + 1]) *", "+= verts[0:1] holes += verts[1:] if not holes: holes = None return cls(parts,", "p.area 1.0 >>> p = Polygon( ... [Point((0, 0)), Point((10, 0)), Point((10, 10)),", "Examples -------- >>> ls = LineSegment(Point((2, 2)), Point((5, 2))) >>> ls.len 3.0 \"\"\"", "against. Examples -------- >>> Point((0, 1)) != Point((0, 1)) False >>> Point((0, 1))", "l = [self.left, self.lower, self.right, self.upper] return l.__getitem__(key) def set_centroid(self, new_center): \"\"\"Moves the", "\"\"\"Returns the width of the Rectangle. Examples -------- >>> r = Rectangle(0, 0,", "ls.get_swap() >>> swap.p1[0] 5.0 >>> swap.p1[1] 6.0 >>> swap.p2[0] 1.0 >>> swap.p2[1] 2.0", "2) self.left = center[0] + scale * (self.left - center[0]) self.right = center[0]", "(other.__loc) def __ge__(self, other) -> bool: \"\"\"Tests if the point is greater than", "----- The centroid returned by this method is the geometric centroid. Also known", "the coordinate for the given dimension. Parameters ---------- *args : tuple A singleton", "if geo_type.startswith('multi'): # raise NotImplementedError, \"%s are not supported at this time.\"%geo_type if", "\"coordinates\": self._vertices + self._holes} else: return {\"type\": \"Polygon\", \"coordinates\": self._vertices} def _reset_props(self): \"\"\"Resets", "libpysal.cg.Point A point. Returns ------- self._p1 : libpysal.cg.Point The reset ``p1`` attribute. Examples", "list of points with the vertices of the ring. len : int The", "= [Ring(holes)] self._holes = [clockwise(holes)] else: self._holes = [[]] self._reset_props() @classmethod def __from_geo_interface__(cls,", "verts[1:] if not holes: holes = None return cls(parts, holes) else: verts =", "MultiPolygons are simply a list of polygons. \"\"\" geo_type = geo[\"type\"].lower() if geo_type", "which are functions of other attributes. The getters for these attributes (implemented as", "] ... ) >>> len(c.parts) 2 \"\"\" return [[v for v in part]", "end_pt): self._p1 = start_pt self._p2 = end_pt self._reset_props() def __str__(self): return \"LineSegment(\" +", "sum([arcdist(p[i], p[i + 1]) * 1000.0 for i in range(len(p) - 1)]) if", "2)), Point((1, 1))] ... ] ... ) >>> len(c.parts) 2 \"\"\" return [[v", "/ (6 * A) * cx cy = 1.0 / (6 * A)", "the polygon as a list. See Also -------- libpysal.cg.bounding_box \"\"\" if self._bbox is", "may only have a single exterior ring, all other parts are holes. MultiPolygons", "holes)) self._holes = [clockwise(hole) for hole in holes] else: self._hole_rings = [Ring(holes)] self._holes", "[ [LineSegment(a, b) for (a, b) in zip(part[:-1], part[1:])] for part in self._vertices", "chain in order. len : float The geometric length of the chain. Examples", "__repr__(self) -> str: \"\"\"Returns the string representation of the ``Point``. Examples -------- >>>", "sum([part_perimeter(part) for part in self._vertices]) return self._arclen @property def segments(self) -> list: \"\"\"Returns", "the rectangle. Examples -------- >>> r = Rectangle(-4, 3, 10, 17) >>> r.left", ": tuple A tuple of :math:`(i,j)` with :math:`i` as the index to the", "is_ccw = -1 elif dx1 * dx1 + dy1 * dy1 >= dx2", "of gravity' or 'center of mass'. Examples -------- >>> r = Ring( ...", "r = ls._get_p2() >>> r == Point((5, 6)) True \"\"\" return self._p2 def", ">>> ls = LineSegment(Point((1, 2)), Point((5, 6))) >>> r = ls._set_p1(Point((3, -1))) >>>", ") >>> p.contains_point((3.0, 3.0)) False >>> p.contains_point((1.0, 1.0)) True Notes ----- Points falling", ">>> r.left -2.0 >>> r.right 6.0 >>> r.lower -2.0 >>> r.upper 6.0 \"\"\"", "-2.0 >>> r.right 6.0 >>> r.lower -2.0 >>> r.upper 6.0 \"\"\" center =", "... [Point((0,0)), Point((4,0)), Point((4,5)), Point((2,3)), Point((0,5))] ... ) >>> p.contains_point((3,3)) 1 >>> p.contains_point((0,6))", ":math:`y`-intercept of the line. ``b`` is also an attribute. Raises ------ ArithmeticError Raised", "segment begins. end_pt : libpysal.cg.Point The point where the segment ends. Attributes ----------", "2, ) self.left = self.left + shift[0] self.right = self.right + shift[0] self.lower", "c.bounding_box.left 0.0 >>> c.bounding_box.lower 0.0 >>> c.bounding_box.right 2.0 >>> c.bounding_box.upper 1.0 \"\"\" if", "... Point((0, 1)), ... Point((0, 0)) ... ] ... ) >>> r.bounding_box.left 0.0", "(self.upper - center[1]) @property def area(self) -> Union[int, float]: \"\"\"Returns the area of", "\"\"\" self._p1 = p1 self._reset_props() return self._p1 p1 = property(_get_p1, _set_p1) def _get_p2(self):", "False if self._quad_tree_structure is None: x, y = point # bbox checks bbleft", "-------- >>> r = Rectangle(0, 0, 4, 4) >>> r.height 4.0 \"\"\" return", "Point((10, 0))) >>> ls1 = LineSegment(Point((5, 0)), Point((10, 1))) >>> ls.intersect(ls1) True >>>", "end_pt self._reset_props() def __str__(self): return \"LineSegment(\" + str(self._p1) + \", \" + str(self._p2)", "v2[0] > 0 def is_cw(self, pt) -> bool: \"\"\"Returns whether a point is", "None self._bbox = None self._area = None self._centroid = None self._len = None", "libpysal.cg.Rectangle The bounding box of the polygon. bbox : list A list representation", "self.bounding_box.right, self.bounding_box.upper, ] return self._bbox @property def bounding_box(self): \"\"\"Returns the bounding box of", "is None: vertices = self.vertices self._bounding_box = Rectangle( min([v[0] for v in vertices]),", "def get_swap(self): \"\"\"Returns a ``LineSegment`` object which has its endpoints swapped. Returns -------", "self._part_rings] CH = [ring.centroid for ring in self._hole_rings] AH = [-ring.area for ring", "Attributes ---------- o : libpysal.cg.Point The origin (point where ray originates). See ``origin``.", "self._holes = [clockwise(hole) for hole in holes] else: self._hole_rings = [Ring(holes)] self._holes =", "1.0 >>> swap.p2[1] 2.0 \"\"\" line_seg = LineSegment(self._p2, self._p1) return line_seg @property def", "altered. Parameters ---------- vertices : list A list of vertices. Attributes ---------- vertices", "See ``second_p``. Examples -------- >>> l = Ray(Point((0, 0)), Point((1, 0))) >>> str(l.o)", "with multiple rings and holes. The ordering of the vertices is ignored and", "dy1 >= dx2 * dx2 + dy2 * dy2: is_ccw = 0 else:", "self._line class VerticalLine(Geometry): \"\"\"Geometric representation of verticle line objects. Parameters ---------- x :", "1)) True >>> Point((0, 1)) >= Point((1, 1)) False \"\"\" return (self.__loc) >=", "= Polygon( ... [ ... [Point((0, 0)), Point((1, 0)), Point((1, 1)), Point((0, 1))],", "> Point((0, 1)) False >>> Point((0, 1)) > Point((1, 1)) False \"\"\" return", ": The second point specifying the ray (not ``origin``.) Attributes ---------- o :", ">>> l2 = LineSegment(Point((5, 6)), Point((1, 2))) >>> l1 == l2 True >>>", "elif other.p2 == self._p1 and other.p1 == self._p2: eq = True return eq", "are functions of other attributes. The getters for these attributes (implemented as properties)", "-------- >>> l = Line(0.5, 0) >>> l.x(0.25) 0.5 \"\"\" if self.m ==", "holes. >>> p = Polygon( ... [Point((0, 0)), Point((0, 10)), Point((10, 10)), Point((10,", "Returns ------- is_ccw : bool ``1`` if turn from ``self.p1`` to ``self.p2`` to", "zip(CP + CH, A)]) / sum(A) return cx, cy def build_quad_tree_structure(self): \"\"\"Build the", "collinear and ``pt`` is in the middle. \"\"\" p0 = self.p1 p1 =", "+= (y[i] + y[i + 1]) * f cx = 1.0 / (6", "must be the same. Open rings will be closed. This class exists primarily", "return self.right - self.left @property def height(self) -> Union[int, float]: \"\"\"Returns the height", "if hasattr(geo, \"type\"): raise TypeError(\"%r does not appear to be a shape object.\"", "See Also -------- libpysal.cg.bounding_box \"\"\" if self._bbox is None: self._bbox = [ self.bounding_box.left,", "Point((2, 1)), Point((0, 1))]) >>> p.bounding_box.left 0.0 >>> p.bounding_box.lower 0.0 >>> p.bounding_box.right 2.0", "import math from .sphere import arcdist from typing import Union __all__ = [", "being ccw of segment. Returns ------- is_ccw : bool ``1`` if turn from", "Union[int, float]: \"\"\"Returns the area of the Rectangle. Examples -------- >>> r =", "len(self) -> float: \"\"\"Returns the length of a ``LineSegment`` object. Examples -------- >>>", "Point((0, 1)) != Point((0, 1)) False >>> Point((0, 1)) != Point((1, 1)) True", "at a particular :math:`x`-value. Parameters ---------- x : {int, float} The :math:`x`-value at", "cy = 1.0 / (6 * A) * cy self._centroid = Point((cx, cy))", "0)), Point((2, 0)), Point((2, 1)), Point((0, 1))]) >>> c.bounding_box.left 0.0 >>> c.bounding_box.lower 0.0", "float Maximum x-value of the rectangle. upper : float Maximum y-value of the", "center[1] + scale * (self.lower - center[1]) self.upper = center[1] + scale *", "changed, recompute if self._len is None: self._len = math.hypot(self._p1[0] - self._p2[0], self._p1[1] -", "Parameters ---------- origin : libpysal.cg.Point The point where the ray originates. second_p :", "Point((1, 1)) True \"\"\" return (self.__loc) < (other.__loc) def __le__(self, other) -> bool:", "coordinate for the given dimension. Parameters ---------- *args : tuple A singleton tuple", "yj) if r > 0: if yi < 0: w += 1 else:", "CALL.** Sets the ``p2`` attribute of the line segment. Parameters ---------- p2 :", ">>> Point((0, 1)) != Point((0, 1)) False >>> Point((0, 1)) != Point((1, 1))", "math from .sphere import arcdist from typing import Union __all__ = [ \"Point\",", "in self._vertices], []) @property def parts(self) -> list: \"\"\"Returns the parts (lists of", "method is the geometric centroid. Also known as the 'center of gravity' or", "self._holes[0]: return {\"type\": \"Polygon\", \"coordinates\": self._vertices + self._holes} else: return {\"type\": \"Polygon\", \"coordinates\":", "4, 4) >>> r.area 16.0 \"\"\" return (self.right - self.left) * (self.upper -", ">>> r.bounding_box.lower 0.0 >>> r.bounding_box.right 2.0 >>> r.bounding_box.upper 1.0 \"\"\" if self._bounding_box is", ": libpysal.cg.Point A point. Returns ------- self._p2 : libpysal.cg.Point The reset ``p2`` attribute.", "= center[0] + scale * (self.left - center[0]) self.right = center[0] + scale", "attributes p1, p2 changed, recompute if self._len is None: self._len = math.hypot(self._p1[0] -", "return self._arclen @property def segments(self) -> list: \"\"\"Returns the segments that compose the", "[Point((0, 0)), Point((10, 0)), Point((10, 10)), Point((0, 10))], ... [Point((2, 1)), Point((2, 2)),", "y = [v[1] for v in vertices] A = self.signed_area N = len(self)", "Parameters ---------- start_pt : libpysal.cg.Point The point where the segment begins. end_pt :", "l = Line(1, 0) >>> l.y(1) 1.0 \"\"\" if self.m == 0: return", "-------- >>> c = Chain([Point((0, 0)), Point((1, 0)), Point((1, 1)), Point((2, 1))]) >>>", "Parameters ---------- new_center : libpysal.cg.Point The new location of the centroid of the", "- self._p2[0], self._p1[1] - self._p2[1]) return self._len @property def line(self): \"\"\"Returns a ``Line``", "other[:] return Rectangle( min(self.left, other.left), min(self.lower, other.lower), max(self.right, other.right), max(self.upper, other.upper), ) def", "1)), Point((0, 1))]) >>> p.bounding_box.left 0.0 >>> p.bounding_box.lower 0.0 >>> p.bounding_box.right 2.0 >>>", "in self._holes] @property def parts(self) -> list: \"\"\"Returns the parts of the polygon", "dx1 * dx2 < 0 or dy1 * dy2 < 0: is_ccw =", "... Point((2, 1)), ... Point((0, 1)), ... Point((0, 0)) ... ] ... )", "the new scale to the old scale (e.g. 1.0 is current size). Examples", "``__geo_interface__``. Parameters ---------- obj : {libpysal.cg.{Point, LineSegment, Line, Ray, Chain, Polygon} A geometric", "the point's location. Examples -------- >>> hash(Point((0, 1))) == hash(Point((0, 1))) True >>>", "the structure is built, speed for testing if a point is inside the", "c.bounding_box.right 2.0 >>> c.bounding_box.upper 1.0 \"\"\" if self._bounding_box is None: vertices = self.vertices", "== 0: return self.b return self.m * x + self.b class Ray: \"\"\"Geometric", "[self.left, self.lower, self.right, self.upper] return l.__getitem__(key) def set_centroid(self, new_center): \"\"\"Moves the rectangle center", "= end_pt self._reset_props() def __str__(self): return \"LineSegment(\" + str(self._p1) + \", \" +", "ArithmeticError Raised when infinity is passed in as the slope. Examples -------- >>>", "def intersect(self, other) -> bool: \"\"\"Test whether segment intersects with other segment (``True``)", "return (y - self.b) / self.m def y(self, x: Union[int, float]) -> float:", "2))) >>> ls.intersect(ls2) True \"\"\" ccw1 = self.sw_ccw(other.p2) ccw2 = self.sw_ccw(other.p1) ccw3 =", "the bounding box of the polygon as a list. Returns ------- self._bbox :", ": libpysal.cg.Point The point where the segment begins. end_pt : libpysal.cg.Point The point", "if turn from ``self.p1`` to ``self.p2`` to ``pt`` is ccw. ``-1`` if turn", "= Rectangle( min([self._p1[0], self._p2[0]]), min([self._p1[1], self._p2[1]]), max([self._p1[0], self._p2[0]]), max([self._p1[1], self._p2[1]]), ) return Rectangle(", "Rectangle(Geometry): \"\"\"Geometric representation of rectangle objects. Attributes ---------- left : float Minimum x-value", "cy = sum([pt[1] * area for pt, area in zip(CP + CH, A)])", "ring.build_quad_tree_structure() self.is_quad_tree_structure_built = True def contains_point(self, point): \"\"\"Test if a polygon contains a", "A list of vertices. Attributes ---------- vertices : list A list of points", "r.bounding_box.upper 1.0 \"\"\" if self._bounding_box is None: vertices = self.vertices x = [v[0]", "the middle. ``0`` if the points are collinear and ``pt`` is in the", "the bounding box of the ring. Returns ------- self._bounding_box : libpysal.cg.Rectangle The bounding", "``LineSegment`` object. Examples -------- >>> ls = LineSegment(Point((2, 2)), Point((5, 2))) >>> ls.len", "Polygon([Point((0, 0)), Point((1, 0)), Point((1, 1)), Point((0, 1))]) \"\"\" def __init__(self, vertices, holes=None):", "{\"type\": \"Polygon\", \"coordinates\": self._vertices + self._holes} else: return {\"type\": \"Polygon\", \"coordinates\": self._vertices} def", "rectangle. Examples -------- >>> r = Rectangle(-4, 3, 10, 17) >>> r.left #minx", "bool(self.area) def __eq__(self, other): if other: return self[:] == other[:] return False def", "and will not be altered. Parameters ---------- vertices : list A list of", "c.len 4.0 \"\"\" def dist(v1: tuple, v2: tuple) -> Union[int, float]: return math.hypot(v1[0]", "Union[int, float]: return math.hypot(v1[0] - v2[0], v1[1] - v2[1]) @property def perimeter(self) ->", "p1): \"\"\"**HELPER METHOD. DO NOT CALL.** Sets the ``p1`` attribute of the line", "vl = list(vertices) if isinstance(vl[0], list): self._part_rings = list(map(Ring, vertices)) self._vertices = [clockwise(part)", "structure for this polygon. Once the structure is built, speed for testing if", "@classmethod def __from_geo_interface__(cls, geo: dict): \"\"\"While PySAL does not differentiate polygons and multipolygons", "2)), Point((1, 2)), Point((1, 1))] ... ] ... ) >>> len(c.parts) 2 \"\"\"", "1))]) >>> c.bounding_box.left 0.0 >>> c.bounding_box.lower 0.0 >>> c.bounding_box.right 2.0 >>> c.bounding_box.upper 1.0", "def len(self) -> int: \"\"\"Returns the geometric length of the chain. Examples --------", "particular :math:`x`-value. Parameters ---------- x : {int, float} The :math:`x`-value at which to", "a linear ring. Linear rings must be closed, the first and last point", "Polygon, } # moving this to top breaks unit tests ! from .", "a ``Line`` object of the line on which the segment lies. Returns -------", "return {\"type\": \"Polygon\", \"coordinates\": self._vertices + self._holes} else: return {\"type\": \"Polygon\", \"coordinates\": self._vertices}", "= [-ring.area for ring in self._hole_rings] A = AP + AH cx =", "objects. Parameters ---------- start_pt : libpysal.cg.Point The point where the segment begins. end_pt", "Returns ------- self._bounding_box : libpysal.cg.Rectangle The bounding box of the line segment. Examples", ">>> ls2 = LineSegment(Point((5, 1)), Point((10, 1))) >>> ls.intersect(ls2) False >>> ls2 =", "v1[1] - v2[1]) @property def perimeter(self) -> Union[int, float]: if self._perimeter is None:", "Line(1, 0) >>> l.y(1) 1.0 \"\"\" if self.m == 0: return self.b return", "2.0 \"\"\" line_seg = LineSegment(self._p2, self._p1) return line_seg @property def bounding_box(self): \"\"\"Returns the", "xi) / (yi - yj) if r > 0: if yi < 0:", "y[i + 1]) A = A * 0.5 self._area = -A return self._area", "polygon objects. Returns a polygon created from the objects specified. Parameters ---------- vertices", "sum([dist(p[i], p[i + 1]) for i in range(len(p) - 1)]) if self._len is", "None self._bounding_box = None self._area = None self._centroid = None self._quad_tree_structure = None", "Examples -------- >>> Point((0, 1)) <= Point((0, 1)) True >>> Point((0, 1)) <=", "if other: return self[:] == other[:] return False def __add__(self, other): x, y,", "[v[0] for v in vertices] y = [v[1] for v in vertices] A", "l.__getitem__(key) def set_centroid(self, new_center): \"\"\"Moves the rectangle center to a new specified point.", "ring, \" # msg += \"the first and last vertices are not the", "[ ... Point((0, 0)), ... Point((2, 0)), ... Point((2, 1)), ... Point((0, 1)),", "polygon. Notes ----- The centroid returned by this method is the geometric centroid", "0: return self.b return self.m * x + self.b class Ray: \"\"\"Geometric representation", "True \"\"\" return self.__loc.__getitem__(*args) def __getslice__(self, *args) -> slice: \"\"\"Return the coordinates for", ": libpysal.cg.Point An object to test equality against. Examples -------- >>> Point((0, 1))", "self._hole_rings = [Ring(holes)] self._holes = [clockwise(holes)] else: self._holes = [[]] self._reset_props() @classmethod def", "the rectangle. right : float Maximum x-value of the rectangle. upper : float", "\"\"\" center = ((self.left + self.right) / 2, (self.lower + self.upper) / 2)", "speed for testing if a point is inside the ring will be increased", "-------- >>> p = Point((5.5, 4.3)) >>> p[0] == 5.5 True >>> p[1]", "testing and the built-in operation ``bool()``\" ``-- http://docs.python.org/reference/datamodel.html Examples -------- >>> r =", "representation of an object. Raises ------ TypeError Raised when ``obj`` is not a", "> dy2 * dx1: is_ccw = -1 elif dx1 * dx2 < 0", "for ring in self._hole_rings: ring.build_quad_tree_structure() self.is_quad_tree_structure_built = True def contains_point(self, point): \"\"\"Test if", "ccw1 * ccw2 <= 0 and ccw3 * ccw4 <= 0 return intersects", "self._p1[0], self._p2[1] - self._p1[1]) v2 = (pt[0] - self._p1[0], pt[1] - self._p1[1]) return", "N = len(self) A = 0.0 for i in range(N - 1): A", "if yj > 0: w += 0.5 else: w -= 0.5 elif yj", "raise NotImplementedError, \"%s are not supported at this time.\"%geo_type if geo_type in _geoJSON_type_to_Pysal_type:", "part] for part in self._holes] @property def parts(self) -> list: \"\"\"Returns the parts", "2, new_center[1] - (self.lower + self.upper) / 2, ) self.left = self.left +", ">>> ls.intersect(ls2) True \"\"\" ccw1 = self.sw_ccw(other.p2) ccw2 = self.sw_ccw(other.p1) ccw3 = other.sw_ccw(self.p1)", "+ 1]) * f cx = 1.0 / (6 * A) * cx", "_area @property def centroid(self) -> tuple: \"\"\"Returns the centroid of the polygon. Notes", "dist(v1, v2) -> Union[int, float]: return math.hypot(v1[0] - v2[0], v1[1] - v2[1]) @property", "[clockwise(holes)] else: self._holes = [[]] self._reset_props() @classmethod def __from_geo_interface__(cls, geo: dict): \"\"\"While PySAL", "__init__(self, vertices: list): if isinstance(vertices[0], list): self._vertices = [part for part in vertices]", "(self.__loc) != (other.__loc) except AttributeError: return True def __gt__(self, other) -> bool: \"\"\"Tests", "pv[i + 1][0]) * (pv[i][1] - pv[i + 1][1]) __area = __area *", "Point((3, 3))) >>> l = ls.line >>> l.m 1.0 >>> l.b 0.0 \"\"\"", "they have been reset since the last call to the ``getter``. \"\"\" self._len", "vertices : list The list of points of the vertices of the chain", "[v[1] for v in vertices] N = len(self) A = 0.0 for i", "the polygon in clockwise order. len : int The number of vertices including", "__lt__(self, other) -> bool: \"\"\"Tests if the point is less than another object.", "\"\"\"Geometric representation of verticle line objects. Parameters ---------- x : {int, float} The", "\"\"\"Returns the bounding box of the ring. Returns ------- self._bounding_box : libpysal.cg.Rectangle The", "A) * cy self._centroid = Point((cx, cy)) return self._centroid def build_quad_tree_structure(self): \"\"\"Build the", "center[1]) @property def area(self) -> Union[int, float]: \"\"\"Returns the area of the Rectangle.", "inside the ring will be increased significantly. \"\"\" self._quad_tree_structure = QuadTreeStructureSingleRing(self) def contains_point(self,", "= self.sw_ccw(other.p2) ccw2 = self.sw_ccw(other.p1) ccw3 = other.sw_ccw(self.p1) ccw4 = other.sw_ccw(self.p2) intersects =", "False >>> ls2 = LineSegment(Point((7, -1)), Point((7, 2))) >>> ls.intersect(ls2) True \"\"\" ccw1", "1] if yi * yj < 0: r = xi + yi *", "1))]) >>> p.area 1.0 >>> p = Polygon( ... [Point((0, 0)), Point((10, 0)),", "p2 changed, recompute if self._len is None: self._len = math.hypot(self._p1[0] - self._p2[0], self._p1[1]", "6) True >>> p[1:2] == (6,) True \"\"\" return self.__loc.__getslice__(*args) def __len__(self) ->", "return part[:] else: return part[::-1] vl = list(vertices) if isinstance(vl[0], list): self._part_rings =", "shift[0] self.right = self.right + shift[0] self.lower = self.lower + shift[1] self.upper =", "\"\"\" return sum([part for part in self._vertices], []) @property def parts(self) -> list:", "v in vertices] y = [v[1] for v in vertices] A = self.signed_area", "to test for containment. Returns ------- point_contained : bool ``True`` if ``point`` is", "1))]) >>> p.bounding_box.left 0.0 >>> p.bounding_box.lower 0.0 >>> p.bounding_box.right 2.0 >>> p.bounding_box.upper 1.0", "\"\"\" return (self.__loc) > (other.__loc) def __ge__(self, other) -> bool: \"\"\"Tests if the", "built-in operation ``bool()``\" ``-- http://docs.python.org/reference/datamodel.html Examples -------- >>> r = Rectangle(0, 0, 0,", "ccw4 <= 0 return intersects def _reset_props(self): \"\"\"**HELPER METHOD. DO NOT CALL.** Resets", "def asShape(obj): \"\"\"Returns a PySAL shape object from ``obj``, which must support the", "for v in vertices]), max([v[0] for v in vertices]), max([v[1] for v in", "get_swap(self): \"\"\"Returns a ``LineSegment`` object which has its endpoints swapped. Returns ------- line_seg", "not a supported shape. NotImplementedError Raised when ``geo_type`` is not a supported type.", "self.upper = center[1] + scale * (self.upper - center[1]) @property def area(self) ->", "= tuple(vertices) self._perimeter = None self._bounding_box = None self._area = None self._centroid =", "vertices]), min([v[1] for v in vertices]), max([v[0] for v in vertices]), max([v[1] for", "line on which the segment lies. Examples -------- >>> ls = LineSegment(Point((2, 2)),", "= LineSegment(Point((5, 0)), Point((10, 1))) >>> ls.intersect(ls1) True >>> ls2 = LineSegment(Point((5, 1)),", "2)), Point((5, 6))) >>> r = ls._set_p2(Point((3, -1))) >>> r == Point((3.0, -1.0))", "Point((1, 0)), Point((1, 1)), Point((2, 1))]) >>> c.len 3.0 >>> c = Chain(", "2))) >>> ls.len 3.0 \"\"\" # If LineSegment attributes p1, p2 changed, recompute", ">>> r = Rectangle(0, 0, 4, 4) >>> r.set_scale(2) >>> r.left -2.0 >>>", ": {int, float} The :math:`y`-value at which to compute :math:`x`. Examples -------- >>>", "cannot be infinite.\") self.m = float(m) self.b = float(b) def x(self, y: Union[int,", "ls.is_ccw(Point((2, 2))) True >>> ls.is_ccw(Point((2, -2))) False \"\"\" v1 = (self._p2[0] - self._p1[0],", ": float The area enclosed by the ring. centroid : {tuple, libpysal.cg.Point} The", "for v in vertices]), ) return self._bounding_box @property def area(self) -> float: \"\"\"Returns", "respects multipart polygons with holes. Also known as the 'center of gravity' or", "The starting point of the line segment. p2 : Point The ending point", "= Line(0.5, 0) >>> l.x(0.25) 0.5 \"\"\" if self.m == 0: raise ArithmeticError(\"Cannot", "ring. Linear rings must be closed, the first and last point must be", "------- is_ccw : bool ``1`` if turn from ``self.p1`` to ``self.p2`` to ``pt``", "Point((1, 2)) True \"\"\" return self._p1 def _set_p1(self, p1): \"\"\"**HELPER METHOD. DO NOT", "-------- >>> r = Ring( ... [ ... Point((0, 0)), ... Point((2, 0)),", "self._bounding_box = None self._area = None self._centroid = None self._quad_tree_structure = None def", "Point((0, 0)), ... Point((2, 0)), ... Point((2, 1)), ... Point((0, 1)), ... Point((0,", "return (self.__loc) <= (other.__loc) def __eq__(self, other) -> bool: \"\"\"Tests if the point", "Examples -------- >>> ls = LineSegment(Point((1, 2)), Point((5, 6))) >>> r = ls._get_p1()", "libpysal.cg.Point An object to test equality against. Examples -------- >>> Point((0, 1)) ==", "is_ccw = 1 return is_ccw def get_swap(self): \"\"\"Returns a ``LineSegment`` object which has", "in self._hole_rings] AH = [-ring.area for ring in self._hole_rings] A = AP +", "int: return self.len @property def len(self) -> int: \"\"\"Returns the number of vertices", "1): A += (x[i] + x[i + 1]) * (y[i] - y[i +", "of the object. \"\"\" if isinstance(obj, (Point, LineSegment, Line, Ray, Chain, Polygon)): pass", "polygon in clockwise order. Examples -------- >>> p1 = Polygon([Point((0, 0)), Point((0, 1)),", "self._bounding_box = None self._bbox = None self._area = None self._centroid = None self._len", "1)), ... Point((0, 1)), ... Point((0, 0)) ... ] ... ) >>> r.bounding_box.left", ">>> swap = ls.get_swap() >>> swap.p1[0] 5.0 >>> swap.p1[1] 6.0 >>> swap.p2[0] 1.0", "not be altered. Parameters ---------- vertices : list A list of vertices. Attributes", "-------- >>> r = Rectangle(0, 0, 0, 0) >>> bool(r) False >>> r", ">>> len(p.holes) 1 \"\"\" return [[v for v in part] for part in", "A) * cx cy = 1.0 / (6 * A) * cy self._centroid", "segment. line : libpysal.cg.Line The line on which the segment lies. Examples --------", "center to a new specified point. Parameters ---------- new_center : libpysal.cg.Point The new", "LineSegment, Line, Ray, Chain, Polygon} A new geometric representation of the object. \"\"\"", "in the middle. ``1`` if the points are collinear and ``self.p2`` is in", "= Point((cx, cy)) return self._centroid def build_quad_tree_structure(self): \"\"\"Build the quad tree structure for", "if searching: for ring in self._part_rings: if ring.contains_point(point): contains = True searching =", "Returns the ``p1`` attribute of the line segment. Returns ------- self._p1 : libpysal.cg.Point", "return [[v for v in part] for part in self._holes] @property def parts(self)", "pass else: if hasattr(obj, \"__geo_interface__\"): geo = obj.__geo_interface__ else: geo = obj if", "\"\"\"Tests if the point is less than another object. Parameters ---------- other :", "line objects. Parameters ---------- m : {int, float} The slope of the line.", "0)), Point((0, 1)), Point((1, 1)), Point((1, 0))]) >>> len(p1.vertices) 4 \"\"\" return sum([part", "for the given dimensions. Parameters ---------- *args : tuple A tuple of :math:`(i,j)`", "-> list: \"\"\"Returns the holes of the polygon in clockwise order. Examples --------", ">>> Point((0, 1)) (0.0, 1.0) \"\"\" return str(self) def __str__(self) -> str: \"\"\"Returns", "[clockwise(vertices)] if holes is not None and holes != []: if isinstance(holes[0], list):", "on which the segment lies. Examples -------- >>> ls = LineSegment(Point((1, 2)), Point((5,", "vertices] N = len(self) A = 0.0 for i in range(N - 1):", "max([self._p1[1], self._p2[1]]), ) return Rectangle( self._bounding_box.left, self._bounding_box.lower, self._bounding_box.right, self._bounding_box.upper, ) @property def len(self)", "True \"\"\" return self._p1 def _set_p1(self, p1): \"\"\"**HELPER METHOD. DO NOT CALL.** Sets", "on which the segment lies. Examples -------- >>> ls = LineSegment(Point((2, 2)), Point((3,", "in clockwise order. len : int The number of vertices including holes. perimeter", "= obj.__geo_interface__ else: geo = obj if hasattr(geo, \"type\"): raise TypeError(\"%r does not", "pass else: point_contained = True else: point_contained = self._quad_tree_structure.contains_point(point) return point_contained class Polygon(Geometry):", "not None and holes != []: if isinstance(holes[0], list): self._hole_rings = list(map(Ring, holes))", "False searching = False break if searching: for ring in self._part_rings: if ring.contains_point(point):", "len(self) -> int: \"\"\"Returns the geometric length of the chain. Examples -------- >>>", "yi = ys[i] yj = ys[i + 1] xi = xs[i] xj =", "in vertices]), ) return self._bounding_box @property def area(self) -> float: \"\"\"Returns the area", "* dy2 < 0: is_ccw = -1 elif dx1 * dx1 + dy1", "{int, float} The :math:`y`-value at which to compute :math:`x`. Raises ------ ArithmeticError Raised", "area for pt, area in zip(CP + CH, A)]) / sum(A) return cx,", "break if searching: contains = False return contains class Rectangle(Geometry): \"\"\"Geometric representation of", "LineSegment(Point((1, 2)), Point((5, 6))) >>> swap = ls.get_swap() >>> swap.p1[0] 5.0 >>> swap.p1[1]", "float]) -> float: return math.hypot(v1[0] - v2[0], v1[1] - v2[1]) def part_perimeter(part) ->", "the slope. Examples -------- >>> l = Line(0.5, 0) >>> l.x(0.25) 0.5 \"\"\"", "False def __add__(self, other): x, y, X, Y = self[:] x1, y2, X1,", "yj > 0: w += 0.5 else: w -= 0.5 elif yj ==", "= float(\"inf\") self.b = float(\"nan\") def x(self, y) -> float: \"\"\"Returns the :math:`x`-value", "0)), Point((10, 10)), Point((0, 10))], ... [Point((1, 1)), Point((1, 2)), Point((2, 2)), Point((2,", "Point((5, 6))) >>> r = ls._set_p1(Point((3, -1))) >>> r == Point((3.0, -1.0)) True", "1)) >= Point((0, 1)) True >>> Point((0, 1)) >= Point((1, 1)) False \"\"\"", "``-1`` if turn from ``self.p1`` to ``self.p2`` to ``pt`` is cw. ``-1`` if", "exists primarily as a geometric primitive to form complex polygons with multiple rings", "given dimensions. Parameters ---------- *args : tuple A tuple of :math:`(i,j)` with :math:`i`", "in the form ``[left, lower, right, upper]``. area : float The area enclosed", "start_pt : libpysal.cg.Point The point where the segment begins. end_pt : libpysal.cg.Point The", "+ shift[0] self.right = self.right + shift[0] self.lower = self.lower + shift[1] self.upper", ": list A list of vertices. Attributes ---------- vertices : list A list", "<reponame>Kanahiro/dbf-df-translator \"\"\" Computational geometry code for PySAL: Python Spatial Analysis Library. \"\"\" __author__", "is counterclockwise of the segment (``True``) or not (``False``). Exclusive. Parameters ---------- pt", "if self._bounding_box is None: self._bounding_box = Rectangle( min([self._p1[0], self._p2[0]]), min([self._p1[1], self._p2[1]]), max([self._p1[0], self._p2[0]]),", "(x[i] + x[i + 1]) * f cy += (y[i] + y[i +", "self._p1[1] - self._p2[1]) return self._len @property def line(self): \"\"\"Returns a ``Line`` object of", "return \"LINESTRING ({} {}, {} {})\".format( # self._p1[0], self._p1[1], self._p2[0], self._p2[1] # )", "6))) >>> r = ls._get_p2() >>> r == Point((5, 6)) True \"\"\" return", "for i in range(len(self.vertices) - 1): yi = ys[i] yj = ys[i +", "Ray, Chain, Polygon} A geometric representation of an object. Raises ------ TypeError Raised", "def x(self, y) -> float: \"\"\"Returns the :math:`x`-value of the line at a", ">>> ls = LineSegment(Point((0, 0)), Point((5, 0))) >>> ls.is_ccw(Point((2, 2))) True >>> ls.is_ccw(Point((2,", "self.lower + shift[1] self.upper = self.upper + shift[1] def set_scale(self, scale): \"\"\"Rescales the", "0))) >>> ls.is_cw(Point((2, 2))) False >>> ls.is_cw(Point((2, -2))) True \"\"\" v1 = (self._p2[0]", "on which the segment lies. Returns ------- self._line : libpysal.cg.Line The ``Line`` object", "appear to be a shape object.\" % (obj)) geo_type = geo[\"type\"].lower() # if", ": libpysal.cg.Point The point to test for containment. Returns ------- point_contained : bool", ": float Maximum x-value of the rectangle. upper : float Maximum y-value of", "chain.\"\"\" return [ [LineSegment(a, b) for (a, b) in zip(part[:-1], part[1:])] for part", "the last call to the getter. Examples -------- >>> ls = LineSegment(Point((1, 2)),", "0, 4, 4) >>> r.set_centroid(Point((4, 4))) >>> r.left 2.0 >>> r.right 6.0 >>>", "shift[1] def set_scale(self, scale): \"\"\"Rescales the rectangle around its center. Parameters ---------- scale", "a list of polygons. \"\"\" geo_type = geo[\"type\"].lower() if geo_type == \"multipolygon\": parts", "dx1 = p1[0] - p0[0] dy1 = p1[1] - p0[1] dx2 = p2[0]", "Rectangle. Examples -------- >>> r = Rectangle(0, 0, 4, 4) >>> r.height 4.0", "(obj)) geo_type = geo[\"type\"].lower() # if geo_type.startswith('multi'): # raise NotImplementedError, \"%s are not", "order. Examples -------- >>> c = Chain([Point((0, 0)), Point((1, 0)), Point((1, 1)), Point((2,", "---------- point : libpysal.cg.Point The point to test for containment. Returns ------- point_contained", "scale : int, float The ratio of the new scale to the old", "= None def __len__(self) -> int: return self.len @property def len(self) -> int:", "of the chain. Examples -------- >>> c = Chain([Point((0, 0)), Point((2, 0)), Point((2,", "if right < left or upper < lower: raise ArithmeticError(\"Rectangle must have positive", "p1 = property(_get_p1, _set_p1) def _get_p2(self): \"\"\"**HELPER METHOD. DO NOT CALL.** Returns the", "\"\"\" def __init__(self, start_pt, end_pt): self._p1 = start_pt self._p2 = end_pt self._reset_props() def", "[Point((0, 0)), Point((0, 10)), Point((10, 10)), Point((10, 0))], ... [Point((2, 2)), Point((4, 2)),", "------ ArithmeticError Raised when ``0.`` is passed in as the slope. Examples --------", "a ``LineSegment`` object. Returns ------- self._bounding_box : libpysal.cg.Rectangle The bounding box of the", "class exists primarily as a geometric primitive to form complex polygons with multiple", "@classmethod def __from_geo_interface__(cls, geo): return cls(geo[\"coordinates\"]) @property def __geo_interface__(self): return {\"type\": \"Point\", \"coordinates\":", "in as the slope. Examples -------- >>> l = Line(0.5, 0) >>> l.x(0.25)", "if turn from ``self.p1`` to ``self.p2`` to ``pt`` is cw. ``-1`` if the", "0)), Point((1, 0))) >>> str(l.o) '(0.0, 0.0)' >>> str(l.p) '(1.0, 0.0)' \"\"\" def", "__eq__(self, other) -> bool: \"\"\"Returns ``True`` if ``self`` and ``other`` are the same", "ls2 = LineSegment(Point((5, 1)), Point((10, 1))) >>> ls.intersect(ls2) False >>> ls2 = LineSegment(Point((7,", "self._len is None: self._len = sum([part_perimeter(part) for part in self._vertices]) return self._len @property", "1 else: w -= 1 elif yi == 0 and xi > 0:", "= self.vertices x = [v[0] for v in vertices] y = [v[1] for", "{\"type\": \"LineString\", \"coordinates\": self.vertices} else: return {\"type\": \"MultiLineString\", \"coordinates\": self.parts} def _reset_props(self): \"\"\"**HELPER", "def __getitem__(self, *args) -> Union[int, float]: \"\"\"Return the coordinate for the given dimension.", "... ) >>> p.contains_point((3.0, 3.0)) False >>> p.contains_point((1.0, 1.0)) True Notes ----- Points", "order. Examples -------- >>> p = Polygon( ... [ ... [Point((0, 0)), Point((1,", "and ``pt`` is in the middle. \"\"\" p0 = self.p1 p1 = self.p2", "start_pt, end_pt): self._p1 = start_pt self._p2 = end_pt self._reset_props() def __str__(self): return \"LineSegment(\"", "yi * (xj - xi) / (yi - yj) if r > 0:", "is greater than or equal to another object. Parameters ---------- other : libpysal.cg.Point", "The ring's centroid. Notes ----- The centroid returned by this method is the", "---------- *args : tuple A tuple of :math:`(i,j)` with :math:`i` as the index", "3, 10, 17) >>> r.left #minx -4.0 >>> r.lower #miny 3.0 >>> r.right", ">>> l = Line(0.5, 0) >>> l.x(0.25) 0.5 \"\"\" if self.m == 0:", ": int The number of vertices including holes. perimeter : float The geometric", "Point((0, 0)) ... ] ... ) >>> r.area 2.0 \"\"\" return abs(self.signed_area) @property", "Point((10, 10)), Point((0, 10))], ... [Point((1, 2)), Point((2, 2)), Point((2, 1)), Point((1, 1))]", "the ray (not the point where the ray originates). See ``second_p``. Examples --------", "(self.lower - center[1]) self.upper = center[1] + scale * (self.upper - center[1]) @property", "class Ring(Geometry): \"\"\"Geometric representation of a linear ring. Linear rings must be closed,", "of ``libpysal.cg.Point`` objects) of the chain. Examples -------- >>> c = Chain( ...", "Examples -------- >>> l = Line(0.5, 0) >>> l.x(0.25) 0.5 \"\"\" if self.m", "= center[0] + scale * (self.right - center[0]) self.lower = center[1] + scale", "a list. Returns ------- self._bbox : list The bounding box of the polygon", "- xi) / (yi - yj) if r > 0: if yi <", "bounding box of a ``LineSegment`` object. Returns ------- self._bounding_box : libpysal.cg.Rectangle The bounding", "the given dimensions. Parameters ---------- *args : tuple A tuple of :math:`(i,j)` with", "-> Union[int, float]: \"\"\"Returns the height of the Rectangle. Examples -------- >>> r", "Polygon, \"multipolygon\": Polygon, } # moving this to top breaks unit tests !", "to False if they have zero area. ``___nonzero__`` is used \"to implement truth", "2)) >>> p[:2] == (3, 6) True >>> p[1:2] == (6,) True \"\"\"", "in self._holes], [] ) @property def holes(self) -> list: \"\"\"Returns the holes of", "= LineSegment(Point((0, 0)), Point((5, 0))) >>> ls.is_ccw(Point((2, 2))) True >>> ls.is_ccw(Point((2, -2))) False", "point. Parameters ---------- new_center : libpysal.cg.Point The new location of the centroid of", "An object to test equality against. Examples -------- >>> Point((0, 1)) >= Point((0,", "== 1: return {\"type\": \"LineString\", \"coordinates\": self.vertices} else: return {\"type\": \"MultiLineString\", \"coordinates\": self.parts}", "def arclen(self) -> Union[int, float]: \"\"\"Returns the geometric length of the chain computed", "p = Polygon( ... [Point((0,0)), Point((4,0)), Point((4,5)), Point((2,3)), Point((0,5))] ... ) >>> p.contains_point((3,3))", "- self._p1[0], self._p2[1] - self._p1[1]) v2 = (pt[0] - self._p1[0], pt[1] - self._p1[1])", ": list The bounding box of the polygon as a list. See Also", "= LineSegment(Point((2, 2)), Point((3, 3))) >>> l = ls.line >>> l.m 1.0 >>>", "-------- >>> ls = LineSegment(Point((1, 2)), Point((5, 6))) >>> swap = ls.get_swap() >>>", "properties) then recompute their values if they have been reset since the last", "b) return self._line class VerticalLine(Geometry): \"\"\"Geometric representation of verticle line objects. Parameters ----------", "-2))) True \"\"\" v1 = (self._p2[0] - self._p1[0], self._p2[1] - self._p1[1]) v2 =", "greater than or equal to another object. Parameters ---------- other : libpysal.cg.Point An", "Examples -------- >>> ls = Line(1, 0) >>> ls.m 1.0 >>> ls.b 0.0", "the polygon. Examples -------- >>> p = Polygon([Point((0, 0)), Point((1, 0)), Point((1, 1)),", "to test equality against. Examples -------- >>> Point((0, 1)) <= Point((0, 1)) True", "+ AH cx = sum([pt[0] * area for pt, area in zip(CP +", "\"Ray\", \"Chain\", \"Polygon\", \"Rectangle\", \"asShape\", ] def asShape(obj): \"\"\"Returns a PySAL shape object", "6.0 \"\"\" shift = ( new_center[0] - (self.left + self.right) / 2, new_center[1]", "LineSegment(Point((1, 2)), Point((5, 6))) >>> ls._reset_props() \"\"\" self._bounding_box = None self._len = None", ">>> ls = LineSegment(Point((1, 2)), Point((5, 6))) >>> r = ls._set_p2(Point((3, -1))) >>>", "The geometric length of the perimeter of the polygon. bounding_box : libpysal.cg.Rectangle The", "DO NOT CALL.** Returns the ``p1`` attribute of the line segment. Returns -------", "@property def area(self) -> Union[int, float]: \"\"\"Returns the area of the ring. Examples", "to be considered as holes. Default is ``None``. Attributes ---------- vertices : list", "c.bounding_box.upper 1.0 \"\"\" if self._bounding_box is None: vertices = self.vertices self._bounding_box = Rectangle(", "in range(len(p) - 1)]) if self._len is None: self._len = sum([part_perimeter(part) for part", "str(self.__loc) # return \"POINT ({} {})\".format(*self.__loc) class LineSegment(Geometry): \"\"\"Geometric representation of line segment", "[]: if isinstance(holes[0], list): self._hole_rings = list(map(Ring, holes)) self._holes = [clockwise(hole) for hole", "Examples -------- >>> r = Rectangle(0, 0, 4, 4) >>> r.height 4.0 \"\"\"", "Union[int, float]: \"\"\"Returns the width of the Rectangle. Examples -------- >>> r =", "PySAL: Python Spatial Analysis Library. \"\"\" __author__ = \"<NAME>, <NAME>, <NAME>, <NAME>, <NAME>\"", "of segments being on other segment. Parameters ---------- other : libpysal.cg.LineSegment Another line", "supported at this time.\"%geo_type if geo_type in _geoJSON_type_to_Pysal_type: obj = _geoJSON_type_to_Pysal_type[geo_type].__from_geo_interface__(geo) else: raise", "pt[1] - self._p1[1]) return v1[0] * v2[1] - v1[1] * v2[0] > 0", "(self.left + self.right) / 2, new_center[1] - (self.lower + self.upper) / 2, )", "4.3 True \"\"\" return self.__loc.__getitem__(*args) def __getslice__(self, *args) -> slice: \"\"\"Return the coordinates", "libpysal.cg.Point The reset ``p1`` attribute. Examples -------- >>> ls = LineSegment(Point((1, 2)), Point((5,", "libpysal.cg.Point An object to test equality against. Examples -------- >>> Point((0, 1)) <=", "+= (pv[i][0] + pv[i + 1][0]) * (pv[i][1] - pv[i + 1][1]) __area", "'center of gravity' or 'center or mass'. _quad_tree_structure : libpysal.cg.QuadTreeStructureSingleRing The quad tree", "(self.__loc) >= (other.__loc) def __hash__(self) -> int: \"\"\"Returns the hash of the point's", "5.0 >>> swap.p1[1] 6.0 >>> swap.p2[0] 1.0 >>> swap.p2[1] 2.0 \"\"\" line_seg =", "including holes. perimeter : float The geometric length of the perimeter of the", "is None: self._bbox = [ self.bounding_box.left, self.bounding_box.lower, self.bounding_box.right, self.bounding_box.upper, ] return self._bbox @property", "Union[int, float]: \"\"\"Returns the area of the ring. Examples -------- >>> r =", "\"\"\"A base class to help implement ``is_geometry`` and make geometric types extendable. \"\"\"", "float(left) self.lower = float(lower) self.right = float(right) self.upper = float(upper) def __bool__(self): \"\"\"Rectangles", "libpysal.cg.Point An object to test equality against. Examples -------- >>> Point((0, 1)) !=", "return cls(parts, holes) else: verts = [[Point(pt) for pt in part] for part", "bool: \"\"\"Tests if the point is greater than another object. Parameters ---------- other", "* v2[1] - v1[1] * v2[0] < 0 def sw_ccw(self, pt): \"\"\"Sedgewick test", "/ self.m def y(self, x: Union[int, float]) -> float: \"\"\"Returns the :math:`y`-value of", "vertices[0] != vertices[-1]: vertices = vertices[:] + vertices[0:1] # msg = \"Supplied vertices", "dict): \"\"\"While PySAL does not differentiate polygons and multipolygons GEOS, Shapely, and geoJSON", "the segment ends. Attributes ---------- p1 : libpysal.cg.Point The starting point of the", ">>> r.lower 2.0 >>> r.upper 6.0 \"\"\" shift = ( new_center[0] - (self.left", "obj.__geo_interface__ else: geo = obj if hasattr(geo, \"type\"): raise TypeError(\"%r does not appear", "int: \"\"\"Returns the number of vertices in the polygon. Examples -------- >>> p1", "* ccw2 <= 0 and ccw3 * ccw4 <= 0 return intersects def", "-------- >>> p = Polygon( ... [Point((0,0)), Point((4,0)), Point((4,5)), Point((2,3)), Point((0,5))] ... )", ">>> Point((0, 1)) < Point((1, 1)) True \"\"\" return (self.__loc) < (other.__loc) def", "self._hole_rings = list(map(Ring, holes)) self._holes = [clockwise(hole) for hole in holes] else: self._hole_rings", "= sum_area(self._vertices) - sum_area(self._holes) return _area @property def centroid(self) -> tuple: \"\"\"Returns the", "len(pv) - 1): __area += (pv[i][0] + pv[i + 1][0]) * (pv[i][1] -", ": libpysal.cg.QuadTreeStructureSingleRing The quad tree structure for the ring. This structure helps test", "for ring in self._part_rings: ring.build_quad_tree_structure() for ring in self._hole_rings: ring.build_quad_tree_structure() self.is_quad_tree_structure_built = True", "self._bounding_box @property def area(self) -> Union[int, float]: \"\"\"Returns the area of the ring.", "``pt`` being ccw of segment. Returns ------- is_ccw : bool ``1`` if turn", "+= 1 else: w -= 1 elif yi == 0 and xi >", "``other`` are the same line segment. Examples -------- >>> l1 = LineSegment(Point((1, 2)),", "> (other.__loc) def __ge__(self, other) -> bool: \"\"\"Tests if the point is greater", "rings will be closed. This class exists primarily as a geometric primitive to", "self._bounding_box : libpysal.cg.Rectangle The bounding box of the chain. Examples -------- >>> c", "== 4.3 True \"\"\" return self.__loc.__getitem__(*args) def __getslice__(self, *args) -> slice: \"\"\"Return the", "the :math:`y`-value of the line at a particular :math:`x`-value. Parameters ---------- x :", "if self._perimeter is None: dist = self.dist v = self.vertices self._perimeter = sum(", "quad tree structure for this polygon. Once the structure is built, speed for", "0.5)' \"\"\" if self._centroid is None: vertices = self.vertices x = [v[0] for", "... [ ... Point((0, 0)), ... Point((2, 0)), ... Point((2, 1)), ... Point((0,", "Rectangle(0, 0, 4, 4) >>> r.width 4.0 \"\"\" return self.right - self.left @property", "Chain, \"polygon\": Polygon, \"multipolygon\": Polygon, } # moving this to top breaks unit", "Examples -------- >>> r = Rectangle(0, 0, 4, 4) >>> r.set_scale(2) >>> r.left", "'center or mass'. _quad_tree_structure : libpysal.cg.QuadTreeStructureSingleRing The quad tree structure for the ring.", "- self._p2[0] dy = self._p1[1] - self._p2[1] if dx == 0 and dy", "part in self._vertices ] class Ring(Geometry): \"\"\"Geometric representation of a linear ring. Linear", "x < self.bounding_box.left bbright = x > self.bounding_box.right bblower = y < self.bounding_box.lower", "------- line_seg : libpysal.cg.LineSegment The ``LineSegment`` object which has its endpoints swapped. Examples", "the height of the Rectangle. Examples -------- >>> r = Rectangle(0, 0, 4,", "* (pv[i][1] - pv[i + 1][1]) __area = __area * 0.5 if __area", "1) >>> bool(r) True \"\"\" return bool(self.area) def __eq__(self, other): if other: return", "Examples -------- >>> ls = LineSegment(Point((1, 2)), Point((5, 6))) >>> r = ls._set_p2(Point((3,", "in self._vertices], []) + sum( [part for part in self._holes], [] ) @property", "\"\"\"Geometric representation of a chain, also known as a polyline. Parameters ---------- vertices", "Point((0, 1)) >= Point((1, 1)) False \"\"\" return (self.__loc) >= (other.__loc) def __hash__(self)", "height(self) -> Union[int, float]: \"\"\"Returns the height of the Rectangle. Examples -------- >>>", "x1, y2, X1, Y1 = other[:] return Rectangle( min(self.left, other.left), min(self.lower, other.lower), max(self.right,", "\"\"\"Returns the geometric length of the chain. Examples -------- >>> c = Chain([Point((0,", "for ring in self._hole_rings] AH = [-ring.area for ring in self._hole_rings] A =", "4, 4) >>> r.width 4.0 \"\"\" return self.right - self.left @property def height(self)", "raise TypeError(\"%r does not appear to be a shape object.\" % (obj)) geo_type", "r.lower #miny 3.0 >>> r.right #maxx 10.0 >>> r.upper #maxy 17.0 \"\"\" def", "call to the getter. Examples -------- >>> ls = LineSegment(Point((1, 2)), Point((5, 6)))", "libpysal.cg.Point The point where the segment begins. end_pt : libpysal.cg.Point The point where", "Examples -------- >>> Point((0, 1)) == Point((0, 1)) True >>> Point((0, 1)) ==", "\"\"\"Returns the segments that compose the chain.\"\"\" return [ [LineSegment(a, b) for (a,", "for ``pt`` being ccw of segment. Returns ------- is_ccw : bool ``1`` if", "v2 = (pt[0] - self._p1[0], pt[1] - self._p1[1]) return v1[0] * v2[1] -", ">>> l1 == l2 True >>> l2 == l1 True \"\"\" eq =", "= 1.0 / (6 * A) * cy self._centroid = Point((cx, cy)) return", "<= Point((1, 1)) True \"\"\" return (self.__loc) <= (other.__loc) def __eq__(self, other) ->", "holes. Default is ``None``. Attributes ---------- vertices : list A list of points", "* f cx = 1.0 / (6 * A) * cx cy =", "- sum_area(self._holes) return _area @property def centroid(self) -> tuple: \"\"\"Returns the centroid of", "self._p2[1] if dx == 0 and dy == 0: self._line = None elif", "is also an attribute. b : {int, float} The :math:`y`-intercept of the line.", "(self.__loc) <= (other.__loc) def __eq__(self, other) -> bool: \"\"\"Tests if the point is", "= self[:] x1, y2, X1, Y1 = other[:] return Rectangle( min(self.left, other.left), min(self.lower,", "0)), Point((1, 0)), Point((1, 1))], ... [Point((10, 10)), Point((11, 10)), Point((11, 11))] ...", "* ccw4 <= 0 return intersects def _reset_props(self): \"\"\"**HELPER METHOD. DO NOT CALL.**", "return {\"type\": \"LineString\", \"coordinates\": self.vertices} else: return {\"type\": \"MultiLineString\", \"coordinates\": self.parts} def _reset_props(self):", "return self._len @property def vertices(self) -> list: \"\"\"Returns the vertices of the polygon", "/ sum(A) cy = sum([pt[1] * area for pt, area in zip(CP +", "Chain( ... [ ... [Point((0, 0)), Point((1, 0)), Point((1, 1)), Point((0, 1))], ...", "\"%s are not supported at this time.\"%geo_type if geo_type in _geoJSON_type_to_Pysal_type: obj =", "1). Examples -------- >>> p = Point((1, 3)) \"\"\" def __init__(self, loc): self.__loc", "if a point is inside the ring. \"\"\" def __init__(self, vertices): if vertices[0]", "the chain in clockwise order. Examples -------- >>> c = Chain([Point((0, 0)), Point((1,", ": list The list of points of the vertices of the chain in", "@property def area(self) -> float: \"\"\"Returns the area of the polygon. Examples --------", "type. Returns ------- obj : {libpysal.cg.{Point, LineSegment, Line, Ray, Chain, Polygon} A new", "= p2[1] - p0[1] if dy1 * dx2 < dy2 * dx1: is_ccw", ">>> r[:] [-4.0, 3.0, 10.0, 17.0] \"\"\" l = [self.left, self.lower, self.right, self.upper]", "if m == float(\"inf\"): raise ArithmeticError(\"Slope cannot be infinite.\") self.m = float(m) self.b", "scale (e.g. 1.0 is current size). Examples -------- >>> r = Rectangle(0, 0,", "Rectangle(0, 0, 4, 4) >>> r.area 16.0 \"\"\" return (self.right - self.left) *", "Chain, Polygon)): pass else: if hasattr(obj, \"__geo_interface__\"): geo = obj.__geo_interface__ else: geo =", "of mass'. Examples -------- >>> p = Polygon( ... [Point((0, 0)), Point((10, 0)),", "return obj class Geometry(object): \"\"\"A base class to help implement ``is_geometry`` and make", "1))) >>> ls.intersect(ls2) False >>> ls2 = LineSegment(Point((7, -1)), Point((7, 2))) >>> ls.intersect(ls2)", "\"\"\" for ring in self._part_rings: ring.build_quad_tree_structure() for ring in self._hole_rings: ring.build_quad_tree_structure() self.is_quad_tree_structure_built =", "-= 1 elif yi == 0 and xi > 0: if yj >", "of points of the vertices of the chain in order. len : float", "(self.__loc) > (other.__loc) def __ge__(self, other) -> bool: \"\"\"Tests if the point is", "point[1] for i in range(rn)] w = 0 for i in range(len(self.vertices) -", "as a geometric primitive to form complex polygons with multiple rings and holes.", "since the last call to the ``getter``. \"\"\" self._len = None self._arclen =", "10)), Point((11, 11))] ... ] ... ) >>> c.len 4.0 \"\"\" def dist(v1:", "Shapely, and geoJSON do. In GEOS, etc, polygons may only have a single", "({} {}, {} {})\".format( # self._p1[0], self._p1[1], self._p2[0], self._p2[1] # ) def __eq__(self,", "- 1): f = x[i] * y[i + 1] - x[i + 1]", "positive area.\") self.left = float(left) self.lower = float(lower) self.right = float(right) self.upper =", "other) -> bool: \"\"\"Test whether segment intersects with other segment (``True``) or not", "1)] ) return self._perimeter @property def bounding_box(self): \"\"\"Returns the bounding box of the", "= [v[1] for v in vertices] A = self.signed_area N = len(self) cx", "``b`` is also an attribute. Raises ------ ArithmeticError Raised when infinity is passed", "1))]) >>> p.perimeter 4.0 \"\"\" def dist(v1: Union[int, float], v2: Union[int, float]) ->", "length of the segment. line : libpysal.cg.Line The line on which the segment", "{})\".format( # self._p1[0], self._p1[1], self._p2[0], self._p2[1] # ) def __eq__(self, other) -> bool:", "p.contains_point((2,2.9)) 1 >>> p.contains_point((4,5)) 0 >>> p.contains_point((4,0)) 0 Handles holes. >>> p =", "object. Raises ------ TypeError Raised when ``obj`` is not a supported shape. NotImplementedError", "Line, Ray, Chain, Polygon)): pass else: if hasattr(obj, \"__geo_interface__\"): geo = obj.__geo_interface__ else:", "or not (``False``). Exclusive. Parameters ---------- pt : libpysal.cg.Point A point lying ccw", "* 0.5 if __area < 0: __area = -area return __area sum_area =", "recompute if self._bounding_box is None: self._bounding_box = Rectangle( min([self._p1[0], self._p2[0]]), min([self._p1[1], self._p2[1]]), max([self._p1[0],", "v2[0], v1[1] - v2[1]) def part_perimeter(p: list) -> Union[int, float]: return sum([dist(p[i], p[i", "to the getter. Examples -------- >>> ls = LineSegment(Point((1, 2)), Point((5, 6))) >>>", ">>> Point((0, 1)) != Point((1, 1)) True \"\"\" try: return (self.__loc) != (other.__loc)", "self.sw_ccw(other.p1) ccw3 = other.sw_ccw(self.p1) ccw4 = other.sw_ccw(self.p2) intersects = ccw1 * ccw2 <=", "Chain.\" % geo) return cls(verts) @property def __geo_interface__(self) -> dict: if len(self.parts) ==", "1.0)) True Notes ----- Points falling exactly on polygon edges may yield unpredictable", "def __str__(self): return \"LineSegment(\" + str(self._p1) + \", \" + str(self._p2) + \")\"", "Point((0, 1)) True >>> Point((0, 1)) == Point((1, 1)) False \"\"\" try: return", "``Line`` object of the line on which the segment lies. Examples -------- >>>", "contains ``point`` otherwise ``False``. Examples -------- >>> p = Polygon( ... [Point((0,0)), Point((4,0)),", "== Point((5, 6)) True \"\"\" return self._p2 def _set_p2(self, p2): \"\"\"**HELPER METHOD. DO", "m : {int, float} The slope of the line. ``m`` is also an", "1)) (0.0, 1.0) \"\"\" return str(self) def __str__(self) -> str: \"\"\"Returns a string", "DO NOT CALL.** Resets attributes which are functions of other attributes. The ``getter``s", "self._vertices} def _reset_props(self): \"\"\"Resets the geometric properties of the polygon.\"\"\" self._perimeter = None", "test equality against. Examples -------- >>> Point((0, 1)) != Point((0, 1)) False >>>", "scale to the old scale (e.g. 1.0 is current size). Examples -------- >>>", "- (self.left + self.right) / 2, new_center[1] - (self.lower + self.upper) / 2,", "< Point((0, 1)) False >>> Point((0, 1)) < Point((1, 1)) True \"\"\" return", "right : float Maximum x-value of the rectangle. upper : float Maximum y-value", "to ``self.p2`` to ``pt`` is cw. ``-1`` if the points are collinear and", "= False break if searching: contains = False return contains class Rectangle(Geometry): \"\"\"Geometric", "> 0: if yj > 0: w += 0.5 else: w -= 0.5", "\"\"\" return len(self.__loc) def __repr__(self) -> str: \"\"\"Returns the string representation of the", "(e.g. 1.0 is current size). Examples -------- >>> r = Rectangle(0, 0, 4,", "to top breaks unit tests ! from . import standalone from .polygonQuadTreeStructure import", "``p2`` attribute of the line segment. Parameters ---------- p2 : libpysal.cg.Point A point.", "def parts(self) -> list: \"\"\"Returns the parts of the polygon in clockwise order.", "getter. Examples -------- >>> ls = LineSegment(Point((1, 2)), Point((5, 6))) >>> ls._reset_props() \"\"\"", "Union[int, float]: return sum([dist(p[i], p[i + 1]) for i in range(len(p) - 1)])", "isinstance(vl[0], list): self._part_rings = list(map(Ring, vertices)) self._vertices = [clockwise(part) for part in vertices]", "self._p2 p2 = property(_get_p2, _set_p2) def is_ccw(self, pt) -> bool: \"\"\"Returns whether a", "c = Chain( ... [ ... [Point((0, 0)), Point((1, 0)), Point((1, 1)), Point((0,", "= None self._bounding_box = None self._area = None self._centroid = None self._quad_tree_structure =", "b) for (a, b) in zip(part[:-1], part[1:])] for part in self._vertices ] class", "\"\"\"While PySAL does not differentiate polygons and multipolygons GEOS, Shapely, and geoJSON do.", "operation ``bool()``\" ``-- http://docs.python.org/reference/datamodel.html Examples -------- >>> r = Rectangle(0, 0, 0, 0)", "from typing import Union __all__ = [ \"Point\", \"LineSegment\", \"Line\", \"Ray\", \"Chain\", \"Polygon\",", "Minimum x-value of the rectangle. lower : float Minimum y-value of the rectangle.", "r[:] [-4.0, 3.0, 10.0, 17.0] \"\"\" l = [self.left, self.lower, self.right, self.upper] return", "The bounding box of the line segment. Examples -------- >>> ls = LineSegment(Point((1,", "---------- scale : int, float The ratio of the new scale to the", "Point((0, 0)) ... ] ... ) >>> str(r.centroid) '(1.0, 0.5)' \"\"\" if self._centroid", "list: \"\"\"Returns the vertices of the polygon in clockwise order. Examples -------- >>>", "ls = LineSegment(Point((1, 2)), Point((5, 6))) >>> ls._reset_props() \"\"\" self._bounding_box = None self._len", "the quad tree structure for this polygon. Once the structure is built, speed", "= 0 cy = 0 for i in range(N - 1): f =", "_reset_props(self): \"\"\"Resets the geometric properties of the polygon.\"\"\" self._perimeter = None self._bounding_box =", "of verticle line objects. Parameters ---------- x : {int, float} The :math:`x`-intercept of", "the geometric properties of the polygon.\"\"\" self._perimeter = None self._bounding_box = None self._bbox", "... ) >>> p.contains_point((3,3)) 1 >>> p.contains_point((0,6)) 0 >>> p.contains_point((2,2.9)) 1 >>> p.contains_point((4,5))", "ls.b 0.0 \"\"\" def __init__(self, m, b): if m == float(\"inf\"): raise ArithmeticError(\"Slope", "self._line == False: dx = self._p1[0] - self._p2[0] dy = self._p1[1] - self._p2[1]", "= start_pt self._p2 = end_pt self._reset_props() def __str__(self): return \"LineSegment(\" + str(self._p1) +", "the perimeter of the ring. bounding_box : libpysal.cg.Rectangle The bounding box of the", "... [ ... [Point((0, 0)), Point((1, 0)), Point((1, 1)), Point((0, 1))], ... [Point((2,", "geo[\"coordinates\"]] else: raise TypeError(\"%r is not a Chain.\" % geo) return cls(verts) @property", "if self.m == 0: return self.b return self.m * x + self.b class", "slope is zero.\") return (y - self.b) / self.m def y(self, x: Union[int,", "\"\"\"Tests if the point is equal to another object. Parameters ---------- other :", "is passed in as the slope. Examples -------- >>> l = Line(0.5, 0)", "self._hole_rings = [] def clockwise(part: list) -> list: if standalone.is_clockwise(part): return part[:] else:", "object. Examples -------- >>> ls = LineSegment(Point((2, 2)), Point((5, 2))) >>> ls.len 3.0", "= xi + yi * (xj - xi) / (yi - yj) if", "def parts(self) -> list: \"\"\"Returns the parts (lists of ``libpysal.cg.Point`` objects) of the", "attribute of the line segment. Returns ------- self._p2 : libpysal.cg.Point The ``_p2`` attribute.", "ArithmeticError(\"Rectangle must have positive area.\") self.left = float(left) self.lower = float(lower) self.right =", "0 for i in range(-1, len(pv) - 1): __area += (pv[i][0] + pv[i", "v2[0] < 0 def sw_ccw(self, pt): \"\"\"Sedgewick test for ``pt`` being ccw of", "equality against. Examples -------- >>> Point((0, 1)) < Point((0, 1)) False >>> Point((0,", "test equality against. Examples -------- >>> Point((0, 1)) <= Point((0, 1)) True >>>", "(6 * A) * cx cy = 1.0 / (6 * A) *", "True \"\"\" v1 = (self._p2[0] - self._p1[0], self._p2[1] - self._p1[1]) v2 = (pt[0]", "contains_point(self, point): \"\"\"Test if a polygon contains a point. Parameters ---------- point :", "or mass'. _quad_tree_structure : libpysal.cg.QuadTreeStructureSingleRing The quad tree structure for the ring. This", "segments(self) -> list: \"\"\"Returns the segments that compose the chain.\"\"\" return [ [LineSegment(a,", "-> Union[int, float]: return math.hypot(v1[0] - v2[0], v1[1] - v2[1]) def part_perimeter(p: list)", "\"\"\"Tests if the point is less than or equal to another object. Parameters", "center[1]) self.upper = center[1] + scale * (self.upper - center[1]) @property def area(self)", "= lambda part_type: sum([part_perimeter(part) for part in part_type]) if self._perimeter is None: self._perimeter", "2)), Point((5, 6))) >>> ls._reset_props() \"\"\" self._bounding_box = None self._len = None self._line", "+ scale * (self.upper - center[1]) @property def area(self) -> Union[int, float]: \"\"\"Returns", "part in self._vertices] @property def perimeter(self) -> Union[int, float]: \"\"\"Returns the perimeter of", "1.0 \"\"\" if self._bounding_box is None: vertices = self.vertices x = [v[0] for", "the polygon. Examples -------- >>> r = Rectangle(0, 0, 4, 4) >>> r.set_centroid(Point((4,", "= [clockwise(vertices)] if holes is not None and holes != []: if isinstance(holes[0],", "__geo_interface__(self): return {\"type\": \"Point\", \"coordinates\": self.__loc} def __lt__(self, other) -> bool: \"\"\"Tests if", "> self.bounding_box.upper if bbleft or bbright or bblower or bbupper: pass else: rn", "def __repr__(self) -> str: \"\"\"Returns the string representation of the ``Point``. Examples --------", "bounding box of the ring. Returns ------- self._bounding_box : libpysal.cg.Rectangle The bounding box", "-------- >>> l = VerticalLine(1) >>> l.y(1) nan \"\"\" return float(\"nan\") class Line(Geometry):", "len(p1) 4 \"\"\" if self._len is None: self._len = len(self.vertices) return self._len @property", "% geo) return cls(verts) @property def __geo_interface__(self) -> dict: if len(self.parts) == 1:", "test for ``pt`` being ccw of segment. Returns ------- is_ccw : bool ``1``", "\" # msg += \"the first and last vertices are not the same.\"", "Point((5, 6))) >>> r = ls._get_p1() >>> r == Point((1, 2)) True \"\"\"", "float(\"inf\"): raise ArithmeticError(\"Slope cannot be infinite.\") self.m = float(m) self.b = float(b) def", "of the chain. Returns ------- self._bounding_box : libpysal.cg.Rectangle The bounding box of the", "= Chain([Point((0, 0)), Point((1, 0)), Point((1, 1)), Point((2, 1))]) >>> verts = c.vertices", "the line segment. Returns ------- self._p1 : libpysal.cg.Point The ``_p1`` attribute. Examples --------", "v2[0], v1[1] - v2[1]) @property def perimeter(self) -> Union[int, float]: if self._perimeter is", "self._p2[1]]), ) return Rectangle( self._bounding_box.left, self._bounding_box.lower, self._bounding_box.right, self._bounding_box.upper, ) @property def len(self) ->", "y[i] cx += (x[i] + x[i + 1]) * f cy += (y[i]", ">>> r.bounding_box.right 2.0 >>> r.bounding_box.upper 1.0 \"\"\" if self._bounding_box is None: vertices =", "= [v[1] for v in vertices] self._bounding_box = Rectangle(min(x), min(y), max(x), max(y)) return", "ls = LineSegment(Point((1, 2)), Point((5, 6))) \"\"\" def __init__(self, start_pt, end_pt): self._p1 =", "ring's centroid. Notes ----- The centroid returned by this method is the geometric", "== False: dx = self._p1[0] - self._p2[0] dy = self._p1[1] - self._p2[1] if", "pass class Point(Geometry): \"\"\"Geometric class for point objects. Parameters ---------- loc : tuple", "point_contained = True else: point_contained = self._quad_tree_structure.contains_point(point) return point_contained class Polygon(Geometry): \"\"\"Geometric representation", "1)), Point((2, 2)), Point((1, 2)), Point((1, 1))] ... ] ... ) >>> len(p.parts)", "return hash(self.__loc) def __getitem__(self, *args) -> Union[int, float]: \"\"\"Return the coordinate for the", "to check against. Examples -------- >>> ls = LineSegment(Point((5, 0)), Point((10, 0))) >>>", "the middle. \"\"\" p0 = self.p1 p1 = self.p2 p2 = pt dx1", "the ring will be increased significantly. \"\"\" self._quad_tree_structure = QuadTreeStructureSingleRing(self) def contains_point(self, point):", "-------- >>> r = Rectangle(-4, 3, 10, 17) >>> r[:] [-4.0, 3.0, 10.0,", "and last vertices are not the same.\" # raise ValueError(msg) self.vertices = tuple(vertices)", "+ x[i + 1]) * (y[i] - y[i + 1]) A = A", "starting point of the line segment. p2 : Point The ending point of", ">>> p1 = Polygon([Point((0, 0)), Point((1, 0)), Point((1, 1)), Point((0, 1))]) \"\"\" def", "bounding_box(self): \"\"\"Returns the bounding box of the polygon. Returns ------- self._bounding_box : libpysal.cg.Rectangle", "of the line at a particular :math:`y`-value. Parameters ---------- y : {int, float}", "< 0: is_ccw = -1 elif dx1 * dx1 + dy1 * dy1", "all other parts are holes. MultiPolygons are simply a list of polygons. \"\"\"", "Point((0, 1)) == Point((1, 1)) False \"\"\" try: return (self.__loc) == (other.__loc) except", "return abs(self.signed_area) @property def signed_area(self) -> Union[int, float]: if self._area is None: vertices", "clockwise(part: list) -> list: if standalone.is_clockwise(part): return part[:] else: return part[::-1] vl =", "second point on the ray (not the point where the ray originates). See", "True >>> Point((0, 1)) >= Point((1, 1)) False \"\"\" return (self.__loc) >= (other.__loc)", "== Point((3.0, -1.0)) True \"\"\" self._p2 = p2 self._reset_props() return self._p2 p2 =", "return self._x def y(self, x) -> float: \"\"\"Returns the :math:`y`-value of the line", "LineSegment(Point((1, 2)), Point((5, 6))) >>> r = ls._get_p1() >>> r == Point((1, 2))", "list or list of point lists. Attributes ---------- vertices : list The list", "``pt`` is cw. ``-1`` if the points are collinear and ``self.p1`` is in", "\"\"\"Returns a ``LineSegment`` object which has its endpoints swapped. Returns ------- line_seg :", "p[:2] == (3, 6) True >>> p[1:2] == (6,) True \"\"\" return self.__loc.__getslice__(*args)", "if ``point`` is contained within the polygon, otherwise ``False``. \"\"\" point_contained = False", ": libpysal.cg.Point The origin (point where ray originates). See ``origin``. p : libpysal.cg.Point", "= Line(1, 0) >>> ls.m 1.0 >>> ls.b 0.0 \"\"\" def __init__(self, m,", "the line segment. bounding_box : libpysal.cg.Rectangle The bounding box of the segment. len", "= True else: point_contained = self._quad_tree_structure.contains_point(point) return point_contained class Polygon(Geometry): \"\"\"Geometric representation of", "part in self._vertices]) return self._len @property def arclen(self) -> Union[int, float]: \"\"\"Returns the", "---------- pt : libpysal.cg.Point A point lying ccw or cw of a segment.", "for pt, area in zip(CP + CH, A)]) / sum(A) cy = sum([pt[1]", "Resets attributes which are functions of other attributes. The getters for these attributes", "The :math:`y`-value at which to compute :math:`x`. Examples -------- >>> l = VerticalLine(0)", "other.sw_ccw(self.p1) ccw4 = other.sw_ccw(self.p2) intersects = ccw1 * ccw2 <= 0 and ccw3", "other) -> bool: \"\"\"Tests if the point is greater than another object. Parameters", "a segment. Examples -------- >>> ls = LineSegment(Point((0, 0)), Point((5, 0))) >>> ls.is_cw(Point((2,", "list A list of points with the vertices of the ring. len :", "line segment. Examples -------- >>> l1 = LineSegment(Point((1, 2)), Point((5, 6))) >>> l2", "``p2`` attribute. Examples -------- >>> ls = LineSegment(Point((1, 2)), Point((5, 6))) >>> r", "hash(self.__loc) def __getitem__(self, *args) -> Union[int, float]: \"\"\"Return the coordinate for the given", "self._p2[0]]), max([self._p1[1], self._p2[1]]), ) return Rectangle( self._bounding_box.left, self._bounding_box.lower, self._bounding_box.right, self._bounding_box.upper, ) @property def", "be increased significantly. \"\"\" self._quad_tree_structure = QuadTreeStructureSingleRing(self) def contains_point(self, point): \"\"\"Point containment using", ">>> r.bounding_box.upper 1.0 \"\"\" if self._bounding_box is None: vertices = self.vertices x =", "def centroid(self): \"\"\"Returns the centroid of the ring. Returns ------- self._centroid : libpysal.cg.Point", "Point((2, 1)), Point((1, 1))] ... ) >>> len(p.holes) 1 \"\"\" return [[v for", "perimeter of the polygon. Examples -------- >>> p = Polygon([Point((0, 0)), Point((1, 0)),", ">>> r.height 4.0 \"\"\" return self.upper - self.lower _geoJSON_type_to_Pysal_type = { \"point\": Point,", "def __init__(self, vertices: list): if isinstance(vertices[0], list): self._vertices = [part for part in", "rectangle. upper : float Maximum y-value of the rectangle. Examples -------- >>> r", "math.hypot(self._p1[0] - self._p2[0], self._p1[1] - self._p2[1]) return self._len @property def line(self): \"\"\"Returns a", "box of the segment. len : float The length of the segment. line", "call to the ``getter``. \"\"\" self._len = None self._arclen = None self._bounding_box =", "bounding_box(self): \"\"\"Returns the bounding box of the ring. Returns ------- self._bounding_box : libpysal.cg.Rectangle", "checks bbleft = x < self.bounding_box.left bbright = x > self.bounding_box.right bblower =", "cy def build_quad_tree_structure(self): \"\"\"Build the quad tree structure for this polygon. Once the", "] return self._bbox @property def bounding_box(self): \"\"\"Returns the bounding box of the polygon.", "10))], ... [Point((1, 1)), Point((1, 2)), Point((2, 2)), Point((2, 1))] ... ) >>>", "return len(self.vertices) @property def len(self) -> int: return len(self) @staticmethod def dist(v1, v2)", ">>> ls.bounding_box.upper 6.0 \"\"\" # If LineSegment attributes p1, p2 changed, recompute if", "not holes: holes = None return cls(parts, holes) else: verts = [[Point(pt) for", "4 >>> len(p1) 4 \"\"\" if self._len is None: self._len = len(self.vertices) return", "else: w -= 0.5 if w == 0: pass else: point_contained = True", "of a chain, also known as a polyline. Parameters ---------- vertices : list", "3.0)) False >>> p.contains_point((1.0, 1.0)) True Notes ----- Points falling exactly on polygon", "p1 = Polygon([Point((0, 0)), Point((1, 0)), Point((1, 1)), Point((0, 1))]) \"\"\" def __init__(self,", "= y > self.bounding_box.upper if bbleft or bbright or bblower or bbupper: pass", "Parameters ---------- p2 : libpysal.cg.Point A point. Returns ------- self._p2 : libpysal.cg.Point The", "NOT CALL.** Sets the ``p1`` attribute of the line segment. Parameters ---------- p1", "is_ccw = -1 elif dx1 * dx2 < 0 or dy1 * dy2", "the Rectangle. Examples -------- >>> r = Rectangle(0, 0, 4, 4) >>> r.area", "in part] for part in self._vertices] @property def bounding_box(self): \"\"\"Returns the bounding box", "= Rectangle( min([v[0] for v in vertices]), min([v[1] for v in vertices]), max([v[0]", "{tuple, libpysal.cg.Point} The centroid of the ring defined by the 'center of gravity'", "geometric centroid. Also known as the 'center of gravity' or 'center of mass'.", "as the slope. Examples -------- >>> ls = Line(1, 0) >>> ls.m 1.0", "lies. Examples -------- >>> ls = LineSegment(Point((2, 2)), Point((3, 3))) >>> l =", "Point((0, 1)), Point((1, 1)), Point((1, 0))]) >>> p1.len 4 >>> len(p1) 4 \"\"\"", "recompute if self._len is None: self._len = math.hypot(self._p1[0] - self._p2[0], self._p1[1] - self._p2[1])", "self._p1 : libpysal.cg.Point The reset ``p1`` attribute. Examples -------- >>> ls = LineSegment(Point((1,", ">>> p = Polygon([Point((0, 0)), Point((1, 0)), Point((1, 1)), Point((0, 1))]) >>> p.perimeter", "method is the geometric centroid and respects multipart polygons with holes. Also known", "ring defined by the 'center of gravity' or 'center or mass'. _quad_tree_structure :", "a shape object.\" % (obj)) geo_type = geo[\"type\"].lower() # if geo_type.startswith('multi'): # raise", "if the point is less than or equal to another object. Parameters ----------", "this method is the geometric centroid. Also known as the 'center of gravity'", "Point((1, 1)) False \"\"\" try: return (self.__loc) == (other.__loc) except AttributeError: return False", "in range(N - 1): f = x[i] * y[i + 1] - x[i", "(y[i] + y[i + 1]) * f cx = 1.0 / (6 *", "False \"\"\" try: return (self.__loc) == (other.__loc) except AttributeError: return False def __ne__(self,", "---------- p1 : libpysal.cg.Point The starting point of the line segment. p2 :", "0, 0, 0) >>> bool(r) False >>> r = Rectangle(0, 0, 1, 1)", "= Rectangle(0, 0, 4, 4) >>> r.height 4.0 \"\"\" return self.upper - self.lower", "CALL.** Resets attributes which are functions of other attributes. The ``getter``s for these", "2)), Point((2, 1))] ... ) >>> p.centroid (5.0353535353535355, 5.0353535353535355) \"\"\" CP = [ring.centroid", "<NAME>\" import math from .sphere import arcdist from typing import Union __all__ =", "------- obj : {libpysal.cg.{Point, LineSegment, Line, Ray, Chain, Polygon} A new geometric representation", "xs[i + 1] if yi * yj < 0: r = xi +", "See ``origin``. p : libpysal.cg.Point The second point on the ray (not the", "self._line = Line(m, b) return self._line class VerticalLine(Geometry): \"\"\"Geometric representation of verticle line", "self.m = float(\"inf\") self.b = float(\"nan\") def x(self, y) -> float: \"\"\"Returns the", "supported type. Returns ------- obj : {libpysal.cg.{Point, LineSegment, Line, Ray, Chain, Polygon} A", "the perimeter of the polygon. Examples -------- >>> p = Polygon([Point((0, 0)), Point((1,", "``True`` if ``point`` is contained within the polygon, otherwise ``False``. \"\"\" point_contained =", "and other.p2 == self._p2: eq = True elif other.p2 == self._p1 and other.p1", ":math:`y`. Examples -------- >>> l = Line(1, 0) >>> l.y(1) 1.0 \"\"\" if", "[Ring(holes)] self._holes = [clockwise(holes)] else: self._holes = [[]] self._reset_props() @classmethod def __from_geo_interface__(cls, geo:", "the ``p1`` attribute of the line segment. Parameters ---------- p1 : libpysal.cg.Point A", "the centroid of the polygon. Examples -------- >>> r = Rectangle(0, 0, 4,", "libpysal.cg.Point A point to test for containment. Returns ------- contains : bool ``True``", "(other.__loc) except AttributeError: return True def __gt__(self, other) -> bool: \"\"\"Tests if the", "if self._quad_tree_structure is None: x, y = point # bbox checks bbleft =", ">>> len(Point((1, 2))) 2 \"\"\" return len(self.__loc) def __repr__(self) -> str: \"\"\"Returns the", "i.e. the mean point of the polygon. Examples -------- >>> p1 = Polygon([Point((0,", "lists. Attributes ---------- vertices : list The list of points of the vertices", "bbright or bblower or bbupper: pass else: rn = len(self.vertices) xs = [self.vertices[i][0]", "= \"Supplied vertices do not form a closed ring, \" # msg +=", "METHOD. DO NOT CALL.** Resets attributes which are functions of other attributes. The", "Examples -------- >>> ls = LineSegment(Point((1, 2)), Point((5, 6))) \"\"\" def __init__(self, start_pt,", "Point((2, 2)), Point((1, 2)), Point((1, 1))] ... ] ... ) >>> len(p.parts) 2", "for pt in part] for part in geo[\"coordinates\"]] return cls(verts[0:1], verts[1:]) @property def", "= vertices[:] + vertices[0:1] # msg = \"Supplied vertices do not form a", "containment using winding number. The implementation is based on `this <http://www.engr.colostate.edu/~dga/dga/papers/point_in_polygon.pdf>`_. Parameters ----------", "< self.bounding_box.left bbright = x > self.bounding_box.right bblower = y < self.bounding_box.lower bbupper", ">>> Point((0, 1)) > Point((0, 1)) False >>> Point((0, 1)) > Point((1, 1))", "Point((1, 2)), Point((1, 1))] ... ) >>> p.area 99.0 \"\"\" def part_area(pv: list)", "xj > 0: if yi < 0: w += 0.5 else: w -=", "a single exterior ring, all other parts are holes. MultiPolygons are simply a", "exterior ring, all other parts are holes. MultiPolygons are simply a list of", "* (self.upper - center[1]) @property def area(self) -> Union[int, float]: \"\"\"Returns the area", "0.5 self._area = -A return self._area @property def centroid(self): \"\"\"Returns the centroid of", "geo[\"coordinates\"][0] += self._holes return geo if self._holes[0]: return {\"type\": \"Polygon\", \"coordinates\": self._vertices +", "1)), ... Point((0, 1)), ... Point((0, 0)) ... ] ... ) >>> r.area", "and :math:`j` as the index to end the slice (excluded). Examples -------- >>>", "-> int: return self.len @property def len(self) -> int: \"\"\"Returns the number of", "have zero area. ``___nonzero__`` is used \"to implement truth value testing and the", "if vertices[0] != vertices[-1]: vertices = vertices[:] + vertices[0:1] # msg = \"Supplied", "__len__(self) -> int: \"\"\" Returns the dimensions of the point. Examples -------- >>>", "< 0: r = xi + yi * (xj - xi) / (yi", "second point specifying the ray (not ``origin``.) Attributes ---------- o : libpysal.cg.Point The", "lies. Examples -------- >>> ls = LineSegment(Point((1, 2)), Point((5, 6))) \"\"\" def __init__(self,", "self._vertices + self._holes} else: return {\"type\": \"Polygon\", \"coordinates\": self._vertices} def _reset_props(self): \"\"\"Resets the", "not (``False``). Handles endpoints of segments being on other segment. Parameters ---------- other", "p0[1] dx2 = p2[0] - p0[0] dy2 = p2[1] - p0[1] if dy1", "== 5.5 True >>> p[1] == 4.3 True \"\"\" return self.__loc.__getitem__(*args) def __getslice__(self,", "perimeter : float The geometric length of the perimeter of the ring. bounding_box", "known as a polyline. Parameters ---------- vertices : list A point list or", "is cw. ``-1`` if the points are collinear and ``self.p1`` is in the", "is contained within the polygon, otherwise ``False``. \"\"\" point_contained = False if self._quad_tree_structure", "-> float: \"\"\"Returns the :math:`x`-value of the line at a particular :math:`y`-value. Parameters", "Point((0, 1)), ... Point((0, 0)) ... ] ... ) >>> str(r.centroid) '(1.0, 0.5)'", "2.0 >>> r.bounding_box.upper 1.0 \"\"\" if self._bounding_box is None: vertices = self.vertices x", "location of the centroid of the polygon. Examples -------- >>> r = Rectangle(0,", "of a segment. Examples -------- >>> ls = LineSegment(Point((0, 0)), Point((5, 0))) >>>", "supported shape. NotImplementedError Raised when ``geo_type`` is not a supported type. Returns -------", "NotImplementedError Raised when ``geo_type`` is not a supported type. Returns ------- obj :", "attribute. b : {int, float} The :math:`y`-intercept of the line. ``b`` is also", ">>> ls.bounding_box.left 1.0 >>> ls.bounding_box.lower 2.0 >>> ls.bounding_box.right 5.0 >>> ls.bounding_box.upper 6.0 \"\"\"", "0 >>> p.contains_point((2,2.9)) 1 >>> p.contains_point((4,5)) 0 >>> p.contains_point((4,0)) 0 Handles holes. >>>", "by the ring. centroid : {tuple, libpysal.cg.Point} The centroid of the ring defined", "def __from_geo_interface__(cls, geo): return cls(geo[\"coordinates\"]) @property def __geo_interface__(self): return {\"type\": \"Point\", \"coordinates\": self.__loc}", "the segment lies. Returns ------- self._line : libpysal.cg.Line The ``Line`` object of the", "True return eq def intersect(self, other) -> bool: \"\"\"Test whether segment intersects with", "Examples -------- >>> ls = VerticalLine(0) >>> ls.m inf >>> ls.b nan \"\"\"", "self._p1[1]) return v1[0] * v2[1] - v1[1] * v2[0] < 0 def sw_ccw(self,", "4))] ... ) >>> p.contains_point((3.0, 3.0)) False >>> p.contains_point((1.0, 1.0)) True Notes -----", "= c.vertices >>> len(verts) 4 \"\"\" return sum([part for part in self._vertices], [])", "of the line segment. Examples -------- >>> ls = LineSegment(Point((1, 2)), Point((5, 6)))", "when ``0.`` is passed in as the slope. Examples -------- >>> l =", "Chain, Polygon} A geometric representation of an object. Raises ------ TypeError Raised when", "1)) == Point((0, 1)) True >>> Point((0, 1)) == Point((1, 1)) False \"\"\"", "ring. len : int The number of vertices. perimeter : float The geometric", "vertices or a list of lists of vertices. holes : list A list", ":math:`y`-value. Parameters ---------- y : {int, float} The :math:`y`-value at which to compute", "r.lower 2.0 >>> r.upper 6.0 \"\"\" shift = ( new_center[0] - (self.left +", "+= self._holes return geo if self._holes[0]: return {\"type\": \"Polygon\", \"coordinates\": self._vertices + self._holes}", "in geo[\"coordinates\"]] else: raise TypeError(\"%r is not a Chain.\" % geo) return cls(verts)", "Point((0, 1)) >= Point((0, 1)) True >>> Point((0, 1)) >= Point((1, 1)) False", "self.bounding_box.lower, self.bounding_box.right, self.bounding_box.upper, ] return self._bbox @property def bounding_box(self): \"\"\"Returns the bounding box", "(pv[i][0] + pv[i + 1][0]) * (pv[i][1] - pv[i + 1][1]) __area =", "def _get_p2(self): \"\"\"**HELPER METHOD. DO NOT CALL.** Returns the ``p2`` attribute of the", "return Rectangle( min(self.left, other.left), min(self.lower, other.lower), max(self.right, other.right), max(self.upper, other.upper), ) def __getitem__(self,", ">>> r = Rectangle(-4, 3, 10, 17) >>> r[:] [-4.0, 3.0, 10.0, 17.0]", "1)) False \"\"\" try: return (self.__loc) == (other.__loc) except AttributeError: return False def", "Examples -------- >>> Point((0, 1)) < Point((0, 1)) False >>> Point((0, 1)) <", "return Rectangle( self._bounding_box.left, self._bounding_box.lower, self._bounding_box.right, self._bounding_box.upper, ) @property def len(self) -> float: \"\"\"Returns", "---------- vertices : list A point list or list of point lists. Attributes", "from the objects specified. Parameters ---------- vertices : list A list of vertices", "0)), Point((1, 0)), Point((1, 1)), Point((0, 1))]) \"\"\" def __init__(self, vertices, holes=None): self._part_rings", "* (self.right - center[0]) self.lower = center[1] + scale * (self.lower - center[1])", ">>> c.bounding_box.upper 1.0 \"\"\" if self._bounding_box is None: vertices = self.vertices self._bounding_box =", "Y1 = other[:] return Rectangle( min(self.left, other.left), min(self.lower, other.lower), max(self.right, other.right), max(self.upper, other.upper),", "functions of other attributes. The ``getter``s for these attributes (implemented as ``properties``) then", "sum( [part for part in self._holes], [] ) @property def holes(self) -> list:", "- center[0]) self.lower = center[1] + scale * (self.lower - center[1]) self.upper =", ">>> r = Rectangle(0, 0, 4, 4) >>> r.area 16.0 \"\"\" return (self.right", "self._vertices], []) + sum( [part for part in self._holes], [] ) @property def", "/ (6 * A) * cy self._centroid = Point((cx, cy)) return self._centroid def", "polygon. bounding_box : libpysal.cg.Rectangle The bounding box of the polygon. bbox : list", "libpysal.cg.Rectangle The bounding box of the ring. Examples -------- >>> r = Ring(", "class Rectangle(Geometry): \"\"\"Geometric representation of rectangle objects. Attributes ---------- left : float Minimum", "the point is greater than or equal to another object. Parameters ---------- other", "= self.lower + shift[1] self.upper = self.upper + shift[1] def set_scale(self, scale): \"\"\"Rescales", "the getter. Examples -------- >>> ls = LineSegment(Point((1, 2)), Point((5, 6))) >>> ls._reset_props()", "None self._line = False def _get_p1(self): \"\"\"**HELPER METHOD. DO NOT CALL.** Returns the", "-------- >>> ls = LineSegment(Point((1, 2)), Point((5, 6))) >>> ls._reset_props() \"\"\" self._bounding_box =", "the ring. area : float The area enclosed by the ring. centroid :", "the line segment. p2 : Point The ending point of the line segment.", "self._p1[0], self._p1[1], self._p2[0], self._p2[1] # ) def __eq__(self, other) -> bool: \"\"\"Returns ``True``", "area : float The area enclosed by the polygon. centroid : tuple The", "of the segment. line : libpysal.cg.Line The line on which the segment lies.", "_geoJSON_type_to_Pysal_type = { \"point\": Point, \"linestring\": Chain, \"multilinestring\": Chain, \"polygon\": Polygon, \"multipolygon\": Polygon,", "libpysal.cg.Point The point to test for containment. Returns ------- point_contained : bool ``True``", "ls.is_cw(Point((2, -2))) True \"\"\" v1 = (self._p2[0] - self._p1[0], self._p2[1] - self._p1[1]) v2", "self._len @property def arclen(self) -> Union[int, float]: \"\"\"Returns the geometric length of the", "\"linestring\": verts = [Point(pt) for pt in geo[\"coordinates\"]] elif geo[\"type\"].lower() == \"multilinestring\": verts", "1)), ... Point((0, 0)) ... ] ... ) >>> r.area 2.0 \"\"\" return", "sum_perim(self._vertices) + sum_perim(self._holes) return self._perimeter @property def bbox(self): \"\"\"Returns the bounding box of", "geo[\"type\"].lower() # if geo_type.startswith('multi'): # raise NotImplementedError, \"%s are not supported at this", "if isinstance(vl[0], list): self._part_rings = list(map(Ring, vertices)) self._vertices = [clockwise(part) for part in", "if bbleft or bbright or bblower or bbupper: pass else: rn = len(self.vertices)", "p.contains_point((1.0, 1.0)) True Notes ----- Points falling exactly on polygon edges may yield", "__area sum_area = lambda part_type: sum([part_area(part) for part in part_type]) _area = sum_area(self._vertices)", "for v in part] for part in self._vertices] @property def perimeter(self) -> Union[int,", "height of the Rectangle. Examples -------- >>> r = Rectangle(0, 0, 4, 4)", "``self.p1`` to ``self.p2`` to ``pt`` is cw. ``-1`` if the points are collinear", "ls.bounding_box.right 5.0 >>> ls.bounding_box.upper 6.0 \"\"\" # If LineSegment attributes p1, p2 changed,", "1)]) if self._len is None: self._len = sum([part_perimeter(part) for part in self._vertices]) return", "in vertices]), max([v[1] for v in vertices]), ) return self._bounding_box @property def len(self)", "of mass'. Examples -------- >>> r = Ring( ... [ ... Point((0, 0)),", ">>> p.contains_point((4,5)) 0 >>> p.contains_point((4,0)) 0 Handles holes. >>> p = Polygon( ...", "of the perimeter of the ring. bounding_box : libpysal.cg.Rectangle The bounding box of", "0)) ... ] ... ) >>> r.bounding_box.left 0.0 >>> r.bounding_box.lower 0.0 >>> r.bounding_box.right", "ArithmeticError(\"Cannot solve for 'x' when slope is zero.\") return (y - self.b) /", "self.p2 p2 = pt dx1 = p1[0] - p0[0] dy1 = p1[1] -", "def __eq__(self, other) -> bool: \"\"\"Tests if the point is equal to another", "left or upper < lower: raise ArithmeticError(\"Rectangle must have positive area.\") self.left =", "in range(len(self.vertices) - 1): yi = ys[i] yj = ys[i + 1] xi", ">>> ls = LineSegment(Point((2, 2)), Point((5, 2))) >>> ls.len 3.0 \"\"\" # If", "bounding box of the polygon as a list. Returns ------- self._bbox : list", "self._p1 : libpysal.cg.Point The ``_p1`` attribute. Examples -------- >>> ls = LineSegment(Point((1, 2)),", "p2[0] - p0[0] dy2 = p2[1] - p0[1] if dy1 * dx2 <", "= 0.0 for i in range(N - 1): A += (x[i] + x[i", "rings must be closed, the first and last point must be the same.", "self.left = self.left + shift[0] self.right = self.right + shift[0] self.lower = self.lower", "return geo if self._holes[0]: return {\"type\": \"Polygon\", \"coordinates\": self._vertices + self._holes} else: return", "for i in range(rn)] w = 0 for i in range(len(self.vertices) - 1):", "point is not equal to another object. Parameters ---------- other : libpysal.cg.Point An", "True def contains_point(self, point): \"\"\"Test if a polygon contains a point. Parameters ----------", "def bbox(self): \"\"\"Returns the bounding box of the polygon as a list. Returns", "-------- >>> c = Chain( ... [ ... [Point((0, 0)), Point((1, 0)), Point((1,", "number of vertices. perimeter : float The geometric length of the perimeter of", "6)) True \"\"\" return self._p2 def _set_p2(self, p2): \"\"\"**HELPER METHOD. DO NOT CALL.**", "``LineSegment`` object which has its endpoints swapped. Examples -------- >>> ls = LineSegment(Point((1,", "A += (x[i] + x[i + 1]) * (y[i] - y[i + 1])", "The centroid of the ring defined by the 'center of gravity' or 'center", "The ``LineSegment`` object which has its endpoints swapped. Examples -------- >>> ls =", "Chain(Geometry): \"\"\"Geometric representation of a chain, also known as a polyline. Parameters ----------", "1))] ... ) >>> p.centroid (5.0353535353535355, 5.0353535353535355) \"\"\" CP = [ring.centroid for ring", "polygon as a list. Returns ------- self._bbox : list The bounding box of", "\"\"\" def __init__(self, left, lower, right, upper): if right < left or upper", "current size). Examples -------- >>> r = Rectangle(0, 0, 4, 4) >>> r.set_scale(2)", "for v in vertices] y = [v[1] for v in vertices] N =", "= math.hypot(self._p1[0] - self._p2[0], self._p1[1] - self._p2[1]) return self._len @property def line(self): \"\"\"Returns", "0.5 else: w -= 0.5 elif yj == 0 and xj > 0:", "``[left, lower, right, upper]``. area : float The area enclosed by the polygon.", "lambda part_type: sum([part_area(part) for part in part_type]) _area = sum_area(self._vertices) - sum_area(self._holes) return", "form complex polygons with multiple rings and holes. The ordering of the vertices", "(other.__loc) def __le__(self, other) -> bool: \"\"\"Tests if the point is less than", "0.0)' >>> str(l.p) '(1.0, 0.0)' \"\"\" def __init__(self, origin, second_p): self.o = origin", "the start slice and :math:`j` as the index to end the slice (excluded).", "return _area @property def centroid(self) -> tuple: \"\"\"Returns the centroid of the polygon.", "Point((5.5, 4.3)) >>> p[0] == 5.5 True >>> p[1] == 4.3 True \"\"\"", "3)) >>> str(p) '(1.0, 3.0)' \"\"\" return str(self.__loc) # return \"POINT ({} {})\".format(*self.__loc)", "significantly. \"\"\" for ring in self._part_rings: ring.build_quad_tree_structure() for ring in self._hole_rings: ring.build_quad_tree_structure() self.is_quad_tree_structure_built", "Point((5, 2))) >>> ls.len 3.0 \"\"\" # If LineSegment attributes p1, p2 changed,", "p[i + 1]) for i in range(len(p) - 1)]) if self._len is None:", "(``True``) or not (``False``). Handles endpoints of segments being on other segment. Parameters", "slope of the line. ``m`` is also an attribute. b : {int, float}", "+ 1]) * 1000.0 for i in range(len(p) - 1)]) if self._arclen is", "or 'center or mass'. _quad_tree_structure : libpysal.cg.QuadTreeStructureSingleRing The quad tree structure for the", "Point((10, 0))], ... [Point((2, 2)), Point((4, 2)), Point((4, 4)), Point((2, 4))] ... )", "if hasattr(obj, \"__geo_interface__\"): geo = obj.__geo_interface__ else: geo = obj if hasattr(geo, \"type\"):", "is ccw. ``-1`` if turn from ``self.p1`` to ``self.p2`` to ``pt`` is cw.", "upper): if right < left or upper < lower: raise ArithmeticError(\"Rectangle must have", "Chain([Point((0, 0)), Point((1, 0)), Point((1, 1)), Point((2, 1))]) >>> verts = c.vertices >>>", "< 0: __area = -area return __area sum_area = lambda part_type: sum([part_area(part) for", "> self.bounding_box.right bblower = y < self.bounding_box.lower bbupper = y > self.bounding_box.upper if", "for i in range(rn)] ys = [self.vertices[i][1] - point[1] for i in range(rn)]", "sum_area = lambda part_type: sum([part_area(part) for part in part_type]) _area = sum_area(self._vertices) -", "of vertices in the polygon. Examples -------- >>> p1 = Polygon([Point((0, 0)), Point((0,", "- yj) if r > 0: if yi < 0: w += 1", "__str__(self) -> str: \"\"\"Returns a string representation of a ``Point`` object. Examples --------", "Point((0, 1))]) >>> p.bounding_box.left 0.0 >>> p.bounding_box.lower 0.0 >>> p.bounding_box.right 2.0 >>> p.bounding_box.upper", ">>> r.right 6.0 >>> r.lower -2.0 >>> r.upper 6.0 \"\"\" center = ((self.left", "\"\"\"**HELPER METHOD. DO NOT CALL.** Sets the ``p1`` attribute of the line segment.", "specified point. Parameters ---------- new_center : libpysal.cg.Point The new location of the centroid", "point is greater than another object. Parameters ---------- other : libpysal.cg.Point An object", "box of the chain. Examples -------- >>> c = Chain([Point((0, 0)), Point((2, 0)),", "are simply a list of polygons. \"\"\" geo_type = geo[\"type\"].lower() if geo_type ==", "list: \"\"\"Returns the parts of the polygon in clockwise order. Examples -------- >>>", "the chain in order. len : float The geometric length of the chain.", "= Polygon( ... [Point((0, 0)), Point((10, 0)), Point((10, 10)), Point((0, 10))], ... [Point((2,", "Returns ------- point_contained : bool ``True`` if ``point`` is contained within the polygon,", "Union[int, float], v2: Union[int, float]) -> float: return math.hypot(v1[0] - v2[0], v1[1] -", "other[:] return False def __add__(self, other): x, y, X, Y = self[:] x1,", ":math:`y`. Examples -------- >>> l = VerticalLine(1) >>> l.y(1) nan \"\"\" return float(\"nan\")", "0.0)' \"\"\" def __init__(self, origin, second_p): self.o = origin self.p = second_p class", "N = len(self) cx = 0 cy = 0 for i in range(N", "of a ``LineSegment`` object. Examples -------- >>> ls = LineSegment(Point((2, 2)), Point((5, 2)))", "CH = [ring.centroid for ring in self._hole_rings] AH = [-ring.area for ring in", "Examples -------- >>> r = Rectangle(0, 0, 4, 4) >>> r.set_centroid(Point((4, 4))) >>>", "0: if yi < 0: w += 0.5 else: w -= 0.5 if", "Point((0, 1)) <= Point((0, 1)) True >>> Point((0, 1)) <= Point((1, 1)) True", "p.perimeter 4.0 \"\"\" def dist(v1: Union[int, float], v2: Union[int, float]) -> float: return", ">>> p = Point((1, 3)) \"\"\" def __init__(self, loc): self.__loc = tuple(map(float, loc))", "class Chain(Geometry): \"\"\"Geometric representation of a chain, also known as a polyline. Parameters", "return str(self) def __str__(self) -> str: \"\"\"Returns a string representation of a ``Point``", ") >>> p.contains_point((3,3)) 1 >>> p.contains_point((0,6)) 0 >>> p.contains_point((2,2.9)) 1 >>> p.contains_point((4,5)) 0", "Point((0, 1)) False >>> Point((0, 1)) != Point((1, 1)) True \"\"\" try: return", "\"\"\" self._quad_tree_structure = QuadTreeStructureSingleRing(self) def contains_point(self, point): \"\"\"Point containment using winding number. The", "for v in part] for part in self._vertices] @property def bounding_box(self): \"\"\"Returns the", "1): yi = ys[i] yj = ys[i + 1] xi = xs[i] xj", "for PySAL: Python Spatial Analysis Library. \"\"\" __author__ = \"<NAME>, <NAME>, <NAME>, <NAME>,", "the segment (``True``) or not (``False``). Exclusive. Parameters ---------- pt : libpysal.cg.Point A", "area in zip(CP + CH, A)]) / sum(A) return cx, cy def build_quad_tree_structure(self):", "list): self._part_rings = list(map(Ring, vertices)) self._vertices = [clockwise(part) for part in vertices] else:", "@property def perimeter(self) -> Union[int, float]: \"\"\"Returns the perimeter of the polygon. Examples", "-------- >>> p = Point((1, 3)) \"\"\" def __init__(self, loc): self.__loc = tuple(map(float,", ">>> ls = VerticalLine(0) >>> ls.m inf >>> ls.b nan \"\"\" def __init__(self,", "!= Point((0, 1)) False >>> Point((0, 1)) != Point((1, 1)) True \"\"\" try:", "test if a point is inside the ring. \"\"\" def __init__(self, vertices): if", "y) -> float: \"\"\"Returns the :math:`x`-value of the line at a particular :math:`y`-value.", "y : {int, float} The :math:`y`-value at which to compute :math:`x`. Raises ------", "[ring.centroid for ring in self._part_rings] AP = [ring.area for ring in self._part_rings] CH", "* dx1: is_ccw = 1 elif dy1 * dx2 > dy2 * dx1:", "@property def len(self) -> int: \"\"\"Returns the number of vertices in the polygon.", "float(upper) def __bool__(self): \"\"\"Rectangles will evaluate to False if they have zero area.", "\"\"\"Point containment using winding number. The implementation is based on `this <http://www.engr.colostate.edu/~dga/dga/papers/point_in_polygon.pdf>`_. Parameters", "The geometric length of the perimeter of the ring. bounding_box : libpysal.cg.Rectangle The", "\"\"\"Returns the area of the ring. Examples -------- >>> r = Ring( ...", "by this method is the geometric centroid and respects multipart polygons with holes.", "if len(self.parts) > 1: geo = { \"type\": \"MultiPolygon\", \"coordinates\": [[part] for part", "scale * (self.left - center[0]) self.right = center[0] + scale * (self.right -", "\"\"\" Computational geometry code for PySAL: Python Spatial Analysis Library. \"\"\" __author__ =", "len(p.parts) 2 \"\"\" return [[v for v in part] for part in self._vertices]", "0)), Point((1, 1)), Point((0, 1))]) \"\"\" def __init__(self, vertices, holes=None): self._part_rings = []", "1 elif yi == 0 and xi > 0: if yj > 0:", "p = Point((5.5, 4.3)) >>> p[0] == 5.5 True >>> p[1] == 4.3", "segment lies. Returns ------- self._line : libpysal.cg.Line The ``Line`` object of the line", "self._holes} else: return {\"type\": \"Polygon\", \"coordinates\": self._vertices} def _reset_props(self): \"\"\"Resets the geometric properties", "= _geoJSON_type_to_Pysal_type[geo_type].__from_geo_interface__(geo) else: raise NotImplementedError(\"%s is not supported at this time.\" % geo_type)", "part] for part in geo[\"coordinates\"]] return cls(verts[0:1], verts[1:]) @property def __geo_interface__(self) -> dict:", "_set_p2(self, p2): \"\"\"**HELPER METHOD. DO NOT CALL.** Sets the ``p2`` attribute of the", "Chain( ... [ ... [Point((0, 0)), Point((1, 0)), Point((1, 1))], ... [Point((10, 10)),", "= point # bbox checks bbleft = x < self.bounding_box.left bbright = x", "if other.p1 == self._p1 and other.p2 == self._p2: eq = True elif other.p2", ">>> len(c.parts) 2 \"\"\" return [[v for v in part] for part in", "Examples -------- >>> r = Rectangle(0, 0, 0, 0) >>> bool(r) False >>>", "types extendable. \"\"\" def __init__(self): pass class Point(Geometry): \"\"\"Geometric class for point objects.", "and other.p1 == self._p2: eq = True return eq def intersect(self, other) ->", "been reset since the last call to the ``getter``. \"\"\" self._len = None", "= -area return __area sum_area = lambda part_type: sum([part_area(part) for part in part_type])", "2.0 >>> c.bounding_box.upper 1.0 \"\"\" if self._bounding_box is None: vertices = self.vertices self._bounding_box", "self.upper + shift[1] def set_scale(self, scale): \"\"\"Rescales the rectangle around its center. Parameters", "within the polygon, otherwise ``False``. \"\"\" point_contained = False if self._quad_tree_structure is None:", "dx == 0 and dy == 0: self._line = None elif dx ==", "rectangle objects. Attributes ---------- left : float Minimum x-value of the rectangle. lower", ">>> c = Chain([Point((0, 0)), Point((1, 0)), Point((1, 1)), Point((2, 1))]) >>> verts", ": libpysal.cg.LineSegment Another line segment to check against. Examples -------- >>> ls =", "self.upper) / 2) self.left = center[0] + scale * (self.left - center[0]) self.right", "Union[int, float]: \"\"\"Returns the perimeter of the polygon. Examples -------- >>> p =", "polygon contains ``point`` otherwise ``False``. Examples -------- >>> p = Polygon( ... [Point((0,0)),", ">>> r.right #maxx 10.0 >>> r.upper #maxy 17.0 \"\"\" def __init__(self, left, lower,", "(self.__loc) == (other.__loc) except AttributeError: return False def __ne__(self, other) -> bool: \"\"\"Tests", ": float The area enclosed by the polygon. centroid : tuple The 'center", "class to help implement ``is_geometry`` and make geometric types extendable. \"\"\" def __init__(self):", "1))]) >>> c.len 3.0 >>> c = Chain( ... [ ... [Point((0, 0)),", ">>> Point((0, 1)) == Point((1, 1)) False \"\"\" try: return (self.__loc) == (other.__loc)", ") >>> str(r.centroid) '(1.0, 0.5)' \"\"\" if self._centroid is None: vertices = self.vertices", "float]: \"\"\"Returns the area of the ring. Examples -------- >>> r = Ring(", "Ray: \"\"\"Geometric representation of ray objects. Parameters ---------- origin : libpysal.cg.Point The point", "------- contains : bool ``True`` if the polygon contains ``point`` otherwise ``False``. Examples", "the ring. Returns ------- self._centroid : libpysal.cg.Point The ring's centroid. Notes ----- The", "\"polygon\": Polygon, \"multipolygon\": Polygon, } # moving this to top breaks unit tests", "polygons with holes. Also known as the 'center of gravity' or 'center of", "_set_p2) def is_ccw(self, pt) -> bool: \"\"\"Returns whether a point is counterclockwise of", "polygon created from the objects specified. Parameters ---------- vertices : list A list", "__getitem__(self, key): \"\"\" Examples -------- >>> r = Rectangle(-4, 3, 10, 17) >>>", "False >>> Point((0, 1)) < Point((1, 1)) True \"\"\" return (self.__loc) < (other.__loc)", "1)) > Point((1, 1)) False \"\"\" return (self.__loc) > (other.__loc) def __ge__(self, other)", "__hash__(self) -> int: \"\"\"Returns the hash of the point's location. Examples -------- >>>", "def area(self) -> Union[int, float]: \"\"\"Returns the area of the Rectangle. Examples --------", "the ring. Returns ------- self._bounding_box : libpysal.cg.Rectangle The bounding box of the ring.", "the centroid of the ring. Returns ------- self._centroid : libpysal.cg.Point The ring's centroid.", "self._bounding_box : libpysal.cg.Rectangle The bounding box of the polygon. Examples -------- >>> p", "p1 = Polygon([Point((0, 0)), Point((0, 1)), Point((1, 1)), Point((1, 0))]) >>> p1.len 4", "as the index to end the slice (excluded). Examples -------- >>> p =", "vertices is ignored and will not be altered. Parameters ---------- vertices : list", "[Point((0, 0)), Point((10, 0)), Point((10, 10)), Point((0, 10))], ... [Point((1, 2)), Point((2, 2)),", "return (self.__loc) != (other.__loc) except AttributeError: return True def __gt__(self, other) -> bool:", "of gravity' or 'center or mass'. _quad_tree_structure : libpysal.cg.QuadTreeStructureSingleRing The quad tree structure", "in part] for part in geo[\"coordinates\"]] return cls(verts[0:1], verts[1:]) @property def __geo_interface__(self) ->", "which the segment lies. Examples -------- >>> ls = LineSegment(Point((2, 2)), Point((3, 3)))", "= [self.vertices[i][0] - point[0] for i in range(rn)] ys = [self.vertices[i][1] - point[1]", "0, 4, 4) >>> r.set_scale(2) >>> r.left -2.0 >>> r.right 6.0 >>> r.lower", "the chain. Examples -------- >>> c = Chain( ... [ ... [Point((0, 0)),", "the line. ``b`` is also an attribute. Raises ------ ArithmeticError Raised when infinity", "self._bbox @property def bounding_box(self): \"\"\"Returns the bounding box of the polygon. Returns -------", "- v2[1]) def part_perimeter(part) -> Union[int, float]: return sum([dist(part[i], part[i + 1]) for", "ends. Attributes ---------- p1 : libpysal.cg.Point The starting point of the line segment.", "when infinity is passed in as the slope. Examples -------- >>> ls =", "loc)) @classmethod def __from_geo_interface__(cls, geo): return cls(geo[\"coordinates\"]) @property def __geo_interface__(self): return {\"type\": \"Point\",", "= LineSegment(Point((5, 1)), Point((10, 1))) >>> ls.intersect(ls2) False >>> ls2 = LineSegment(Point((7, -1)),", "< 0 def sw_ccw(self, pt): \"\"\"Sedgewick test for ``pt`` being ccw of segment.", "# self._p1[0], self._p1[1], self._p2[0], self._p2[1] # ) def __eq__(self, other) -> bool: \"\"\"Returns", "'center of gravity', i.e. the mean point of the polygon. Examples -------- >>>", "length of the perimeter of the polygon. bounding_box : libpysal.cg.Rectangle The bounding box", "have been reset since the last call to the ``getter``. \"\"\" self._len =", "Line, Ray, Chain, Polygon} A new geometric representation of the object. \"\"\" if", "(``False``). Handles endpoints of segments being on other segment. Parameters ---------- other :", "self.lower = float(lower) self.right = float(right) self.upper = float(upper) def __bool__(self): \"\"\"Rectangles will", "last vertices are not the same.\" # raise ValueError(msg) self.vertices = tuple(vertices) self._perimeter", "* dx2 > dy2 * dx1: is_ccw = -1 elif dx1 * dx2", "r.height 4.0 \"\"\" return self.upper - self.lower _geoJSON_type_to_Pysal_type = { \"point\": Point, \"linestring\":", "6.0 \"\"\" center = ((self.left + self.right) / 2, (self.lower + self.upper) /", "swap.p1[0] 5.0 >>> swap.p1[1] 6.0 >>> swap.p2[0] 1.0 >>> swap.p2[1] 2.0 \"\"\" line_seg", "__init__(self, left, lower, right, upper): if right < left or upper < lower:", ": libpysal.cg.Point The reset ``p1`` attribute. Examples -------- >>> ls = LineSegment(Point((1, 2)),", "\"\"\"**HELPER METHOD. DO NOT CALL.** Sets the ``p2`` attribute of the line segment.", "... ) >>> c.len 4.0 \"\"\" def dist(v1: tuple, v2: tuple) -> Union[int,", "self.bounding_box.upper if bbleft or bbright or bblower or bbupper: pass else: rn =", "ls1 = LineSegment(Point((5, 0)), Point((10, 1))) >>> ls.intersect(ls1) True >>> ls2 = LineSegment(Point((5,", "in self._vertices] @property def bounding_box(self): \"\"\"Returns the bounding box of the chain. Returns", "ccw2 <= 0 and ccw3 * ccw4 <= 0 return intersects def _reset_props(self):", "\"\"\" try: return (self.__loc) == (other.__loc) except AttributeError: return False def __ne__(self, other)", "0: r = xi + yi * (xj - xi) / (yi -", "linear ring. Linear rings must be closed, the first and last point must", "of the desired dimension. Examples -------- >>> p = Point((5.5, 4.3)) >>> p[0]", "else: return {\"type\": \"MultiLineString\", \"coordinates\": self.parts} def _reset_props(self): \"\"\"**HELPER METHOD. DO NOT CALL.**", "r = Rectangle(0, 0, 4, 4) >>> r.set_centroid(Point((4, 4))) >>> r.left 2.0 >>>", "[Point((0, 0)), Point((10, 0)), Point((10, 10)), Point((0, 10))], ... [Point((1, 1)), Point((1, 2)),", "Point((5, 6))) >>> swap = ls.get_swap() >>> swap.p1[0] 5.0 >>> swap.p1[1] 6.0 >>>", "self.vertices x = [v[0] for v in vertices] y = [v[1] for v", "\"\"\"Resets the geometric properties of the polygon.\"\"\" self._perimeter = None self._bounding_box = None", "p1 = self.p2 p2 = pt dx1 = p1[0] - p0[0] dy1 =", "whether segment intersects with other segment (``True``) or not (``False``). Handles endpoints of", "= False if not isinstance(other, self.__class__): pass else: if other.p1 == self._p1 and", "1): __area += (pv[i][0] + pv[i + 1][0]) * (pv[i][1] - pv[i +", "minimum bounding box of a ``LineSegment`` object. Returns ------- self._bounding_box : libpysal.cg.Rectangle The", "tuple) -> Union[int, float]: return math.hypot(v1[0] - v2[0], v1[1] - v2[1]) def part_perimeter(p:", "float Minimum y-value of the rectangle. right : float Maximum x-value of the", "ray objects. Parameters ---------- origin : libpysal.cg.Point The point where the ray originates.", "and xj > 0: if yi < 0: w += 0.5 else: w", "= other[:] return Rectangle( min(self.left, other.left), min(self.lower, other.lower), max(self.right, other.right), max(self.upper, other.upper), )", "= second_p class Chain(Geometry): \"\"\"Geometric representation of a chain, also known as a", ": bool ``True`` if ``point`` is contained within the polygon, otherwise ``False``. \"\"\"", "w += 0.5 else: w -= 0.5 if w == 0: pass else:", ">>> ls = LineSegment(Point((1, 2)), Point((5, 6))) >>> r = ls._get_p1() >>> r", "self._holes = [[]] self._reset_props() @classmethod def __from_geo_interface__(cls, geo: dict): \"\"\"While PySAL does not", "Point((0, 0)) ... ] ... ) >>> r.bounding_box.left 0.0 >>> r.bounding_box.lower 0.0 >>>", "1.0 \"\"\" if self._bounding_box is None: vertices = self.vertices self._bounding_box = Rectangle( min([v[0]", "range(N - 1): A += (x[i] + x[i + 1]) * (y[i] -", "list: \"\"\"Returns the segments that compose the chain.\"\"\" return [ [LineSegment(a, b) for", "self._bounding_box = None self._len = None self._line = False def _get_p1(self): \"\"\"**HELPER METHOD.", "] class Ring(Geometry): \"\"\"Geometric representation of a linear ring. Linear rings must be", "min([self._p1[1], self._p2[1]]), max([self._p1[0], self._p2[0]]), max([self._p1[1], self._p2[1]]), ) return Rectangle( self._bounding_box.left, self._bounding_box.lower, self._bounding_box.right, self._bounding_box.upper,", "< 0: w += 0.5 else: w -= 0.5 if w == 0:", "point is inside the ring will be increased significantly. \"\"\" self._quad_tree_structure = QuadTreeStructureSingleRing(self)", "1)) False >>> Point((0, 1)) != Point((1, 1)) True \"\"\" try: return (self.__loc)", "a closed ring, \" # msg += \"the first and last vertices are", "part[:] else: return part[::-1] vl = list(vertices) if isinstance(vl[0], list): self._part_rings = list(map(Ring,", "\"\"\"Returns the number of vertices in the polygon. Examples -------- >>> p1 =", "Returns ------- self._bbox : list The bounding box of the polygon as a", "self._p1 def _set_p1(self, p1): \"\"\"**HELPER METHOD. DO NOT CALL.** Sets the ``p1`` attribute", "using winding number. The implementation is based on `this <http://www.engr.colostate.edu/~dga/dga/papers/point_in_polygon.pdf>`_. Parameters ---------- point", "== \"linestring\": verts = [Point(pt) for pt in geo[\"coordinates\"]] elif geo[\"type\"].lower() == \"multilinestring\":", "the point is greater than another object. Parameters ---------- other : libpysal.cg.Point An", "line objects. Parameters ---------- x : {int, float} The :math:`x`-intercept of the line.", "Ray, Chain, Polygon)): pass else: if hasattr(obj, \"__geo_interface__\"): geo = obj.__geo_interface__ else: geo", "* 0.5 self._area = -A return self._area @property def centroid(self): \"\"\"Returns the centroid", "of the polygon as a list. See Also -------- libpysal.cg.bounding_box \"\"\" if self._bbox", "AP = [ring.area for ring in self._part_rings] CH = [ring.centroid for ring in", "0)), Point((0, 1)), Point((1, 1)), Point((1, 0))]) >>> p1.len 4 >>> len(p1) 4", "return self.upper - self.lower _geoJSON_type_to_Pysal_type = { \"point\": Point, \"linestring\": Chain, \"multilinestring\": Chain,", "the ring. centroid : {tuple, libpysal.cg.Point} The centroid of the ring defined by", ": int The number of vertices. perimeter : float The geometric length of", "] ... ) >>> len(p.parts) 2 \"\"\" return [[v for v in part]", "cx += (x[i] + x[i + 1]) * f cy += (y[i] +", "... Point((0, 0)) ... ] ... ) >>> r.area 2.0 \"\"\" return abs(self.signed_area)", "4) >>> r.set_centroid(Point((4, 4))) >>> r.left 2.0 >>> r.right 6.0 >>> r.lower 2.0", "c = Chain([Point((0, 0)), Point((1, 0)), Point((1, 1)), Point((2, 1))]) \"\"\" def __init__(self,", "1)), Point((2, 2)), Point((1, 2)), Point((1, 1))] ... ] ... ) >>> len(c.parts)", "def __len__(self) -> int: return len(self.vertices) @property def len(self) -> int: return len(self)", "= -A return self._area @property def centroid(self): \"\"\"Returns the centroid of the ring.", "the line at a particular :math:`y`-value. Parameters ---------- y : {int, float} The", "= None self._bounding_box = None @property def vertices(self) -> list: \"\"\"Returns the vertices", "0: w += 0.5 else: w -= 0.5 elif yj == 0 and", "0)), ... Point((2, 0)), ... Point((2, 1)), ... Point((0, 1)), ... Point((0, 0))", "self._part_rings: if ring.contains_point(point): contains = True searching = False break if searching: contains", "if a point is inside the ring will be increased significantly. \"\"\" for", "\"\"\" point_contained = False if self._quad_tree_structure is None: x, y = point #", "the given dimension. Parameters ---------- *args : tuple A singleton tuple of :math:`(i)`", "\"\"\"Returns a string representation of a ``Point`` object. Examples -------- >>> p =", "Point((0,5))] ... ) >>> p.contains_point((3,3)) 1 >>> p.contains_point((0,6)) 0 >>> p.contains_point((2,2.9)) 1 >>>", "new_center[0] - (self.left + self.right) / 2, new_center[1] - (self.lower + self.upper) /", "a point is inside the ring will be increased significantly. \"\"\" for ring", "self._line = None elif dx == 0: self._line = VerticalLine(self._p1[0]) else: m =", "0 for i in range(N - 1): f = x[i] * y[i +", "center[0] + scale * (self.right - center[0]) self.lower = center[1] + scale *", "+ vertices[0:1] # msg = \"Supplied vertices do not form a closed ring,", "i in range(-1, len(pv) - 1): __area += (pv[i][0] + pv[i + 1][0])", "self._centroid is None: vertices = self.vertices x = [v[0] for v in vertices]", "geometric centroid and respects multipart polygons with holes. Also known as the 'center", "the rectangle. lower : float Minimum y-value of the rectangle. right : float", "the bounding box of the chain. Returns ------- self._bounding_box : libpysal.cg.Rectangle The bounding", "\"\"\" def __init__(self, vertices): if vertices[0] != vertices[-1]: vertices = vertices[:] + vertices[0:1]", "\"Polygon\", \"Rectangle\", \"asShape\", ] def asShape(obj): \"\"\"Returns a PySAL shape object from ``obj``,", "False return contains class Rectangle(Geometry): \"\"\"Geometric representation of rectangle objects. Attributes ---------- left", "1)) >= Point((1, 1)) False \"\"\" return (self.__loc) >= (other.__loc) def __hash__(self) ->", "if len(self.parts) == 1: return {\"type\": \"LineString\", \"coordinates\": self.vertices} else: return {\"type\": \"MultiLineString\",", "= True return eq def intersect(self, other) -> bool: \"\"\"Test whether segment intersects", "ring. bounding_box : libpysal.cg.Rectangle The bounding box of the ring. area : float", "= None self._bounding_box = None self._bbox = None self._area = None self._centroid =", "The ratio of the new scale to the old scale (e.g. 1.0 is", "dx2 * dx2 + dy2 * dy2: is_ccw = 0 else: is_ccw =", "-> str: \"\"\"Returns the string representation of the ``Point``. Examples -------- >>> Point((0,", "---------- origin : libpysal.cg.Point The point where the ray originates. second_p : The", "-1)), Point((7, 2))) >>> ls.intersect(ls2) True \"\"\" ccw1 = self.sw_ccw(other.p2) ccw2 = self.sw_ccw(other.p1)", ">>> Point((0, 1)) >= Point((1, 1)) False \"\"\" return (self.__loc) >= (other.__loc) def", "holes is not None and holes != []: if isinstance(holes[0], list): self._hole_rings =", "A new geometric representation of the object. \"\"\" if isinstance(obj, (Point, LineSegment, Line,", "- v1[1] * v2[0] > 0 def is_cw(self, pt) -> bool: \"\"\"Returns whether", "self.m * x + self.b class Ray: \"\"\"Geometric representation of ray objects. Parameters", "return v1[0] * v2[1] - v1[1] * v2[0] > 0 def is_cw(self, pt)", "-> dict: \"\"\"Return ``__geo_interface__`` information lookup.\"\"\" if len(self.parts) > 1: geo = {", "v2[0], v1[1] - v2[1]) def part_perimeter(part) -> Union[int, float]: return sum([dist(part[i], part[i +", "-> Union[int, float]: return sum([arcdist(p[i], p[i + 1]) * 1000.0 for i in", "[dist(v[i], v[i + 1]) for i in range(-1, len(self) - 1)] ) return", "= geo[\"type\"].lower() if geo_type == \"multipolygon\": parts = [] holes = [] for", "0)), Point((1, 0)), Point((1, 1)), Point((0, 1))], ... [Point((2, 1)), Point((2, 2)), Point((1,", "to test equality against. Examples -------- >>> Point((0, 1)) != Point((0, 1)) False", "self._vertices]) return self._arclen @property def segments(self) -> list: \"\"\"Returns the segments that compose", "the points are collinear and ``self.p1`` is in the middle. ``1`` if the", "libpysal.cg.Point A point. Returns ------- self._p2 : libpysal.cg.Point The reset ``p2`` attribute. Examples", "-------- >>> Point((0, 1)) (0.0, 1.0) \"\"\" return str(self) def __str__(self) -> str:", "= Ray(Point((0, 0)), Point((1, 0))) >>> str(l.o) '(0.0, 0.0)' >>> str(l.p) '(1.0, 0.0)'", "in range(-1, len(pv) - 1): __area += (pv[i][0] + pv[i + 1][0]) *", "1))) True >>> hash(Point((0, 1))) == hash(Point((1, 1))) False \"\"\" return hash(self.__loc) def", "-------- >>> hash(Point((0, 1))) == hash(Point((0, 1))) True >>> hash(Point((0, 1))) == hash(Point((1,", "---------- point : libpysal.cg.Point A point to test for containment. Returns ------- contains", "of the polygon. Notes ----- The centroid returned by this method is the", "c.len 3.0 >>> c = Chain( ... [ ... [Point((0, 0)), Point((1, 0)),", "def dist(v1, v2) -> Union[int, float]: return math.hypot(v1[0] - v2[0], v1[1] - v2[1])", "Raised when ``0.`` is passed in as the slope. Examples -------- >>> l", "\"multilinestring\": Chain, \"polygon\": Polygon, \"multipolygon\": Polygon, } # moving this to top breaks", "None and holes != []: if isinstance(holes[0], list): self._hole_rings = list(map(Ring, holes)) self._holes", "float: __area = 0 for i in range(-1, len(pv) - 1): __area +=", "self.m = float(m) self.b = float(b) def x(self, y: Union[int, float]) -> float:", "the holes of the polygon in clockwise order. Examples -------- >>> p =", "\"Point\", \"LineSegment\", \"Line\", \"Ray\", \"Chain\", \"Polygon\", \"Rectangle\", \"asShape\", ] def asShape(obj): \"\"\"Returns a", "# raise NotImplementedError, \"%s are not supported at this time.\"%geo_type if geo_type in", "yj == 0 and xj > 0: if yi < 0: w +=", "in self._vertices ] class Ring(Geometry): \"\"\"Geometric representation of a linear ring. Linear rings", "centroid and respects multipart polygons with holes. Also known as the 'center of", "Examples -------- >>> p = Point((3, 6, 2)) >>> p[:2] == (3, 6)", "of the segment. len : float The length of the segment. line :", ">>> ls = LineSegment(Point((1, 2)), Point((5, 6))) >>> ls._reset_props() \"\"\" self._bounding_box = None", "... Point((0, 1)), ... Point((0, 0)) ... ] ... ) >>> r.area 2.0", "[ ... [Point((0, 0)), Point((1, 0)), Point((1, 1)), Point((0, 1))], ... [Point((2, 1)),", "- 1): __area += (pv[i][0] + pv[i + 1][0]) * (pv[i][1] - pv[i", "slope. Examples -------- >>> ls = Line(1, 0) >>> ls.m 1.0 >>> ls.b", "-> int: \"\"\"Returns the number of vertices in the polygon. Examples -------- >>>", "CALL.** Returns the ``p1`` attribute of the line segment. Returns ------- self._p1 :", "the line segment. Parameters ---------- p2 : libpysal.cg.Point A point. Returns ------- self._p2", "of the new scale to the old scale (e.g. 1.0 is current size).", "= [[Point(pt) for pt in part] for part in polygon] parts += verts[0:1]", "slice: \"\"\"Return the coordinates for the given dimensions. Parameters ---------- *args : tuple", "A point lying ccw or cw of a segment. Examples -------- >>> ls", "v2) -> Union[int, float]: return math.hypot(v1[0] - v2[0], v1[1] - v2[1]) @property def", "= LineSegment(Point((5, 6)), Point((1, 2))) >>> l1 == l2 True >>> l2 ==", "The :math:`x`-intercept of the line. ``x`` is also an attribute. Examples -------- >>>", "implement ``is_geometry`` and make geometric types extendable. \"\"\" def __init__(self): pass class Point(Geometry):", "0 and dy == 0: self._line = None elif dx == 0: self._line", "= ls._get_p2() >>> r == Point((5, 6)) True \"\"\" return self._p2 def _set_p2(self,", "primitive to form complex polygons with multiple rings and holes. The ordering of", "point of the line segment. p2 : Point The ending point of the", "cls(parts, holes) else: verts = [[Point(pt) for pt in part] for part in", ": libpysal.cg.Point The reset ``p2`` attribute. Examples -------- >>> ls = LineSegment(Point((1, 2)),", "chain. Examples -------- >>> c = Chain([Point((0, 0)), Point((1, 0)), Point((1, 1)), Point((2,", "libpysal.cg.Rectangle The bounding box of the line segment. Examples -------- >>> ls =", "with :math:`i` as the index to the start slice and :math:`j` as the", "!= Point((1, 1)) True \"\"\" try: return (self.__loc) != (other.__loc) except AttributeError: return", "# msg = \"Supplied vertices do not form a closed ring, \" #", "= A * 0.5 self._area = -A return self._area @property def centroid(self): \"\"\"Returns", "# If LineSegment attributes p1, p2 changed, recompute if self._bounding_box is None: self._bounding_box", "1.0 >>> ls.bounding_box.lower 2.0 >>> ls.bounding_box.right 5.0 >>> ls.bounding_box.upper 6.0 \"\"\" # If", ": libpysal.cg.Point The ring's centroid. Notes ----- The centroid returned by this method", "{libpysal.cg.{Point, LineSegment, Line, Ray, Chain, Polygon} A geometric representation of an object. Raises", "6.0 >>> r.lower 2.0 >>> r.upper 6.0 \"\"\" shift = ( new_center[0] -", "self._p1[1], self._p2[0], self._p2[1] # ) def __eq__(self, other) -> bool: \"\"\"Returns ``True`` if", "the point. Examples -------- >>> len(Point((1, 2))) 2 \"\"\" return len(self.__loc) def __repr__(self)", "are functions of other attributes. The ``getter``s for these attributes (implemented as ``properties``)", "begins. end_pt : libpysal.cg.Point The point where the segment ends. Attributes ---------- p1", "the last call to the ``getter``. \"\"\" self._len = None self._arclen = None", "\"POINT ({} {})\".format(*self.__loc) class LineSegment(Geometry): \"\"\"Geometric representation of line segment objects. Parameters ----------", "if they have been reset since the last call to the getter. Examples", "[Point((1, 1)), Point((1, 2)), Point((2, 2)), Point((2, 1))] ... ) >>> p.centroid (5.0353535353535355,", "not supported at this time.\" % geo_type) return obj class Geometry(object): \"\"\"A base", ": libpysal.cg.Rectangle The bounding box of the ring. Examples -------- >>> r =", "self._quad_tree_structure = None def __len__(self) -> int: return len(self.vertices) @property def len(self) ->", "\"\"\"Geometric representation of line objects. Parameters ---------- m : {int, float} The slope", ") return self._bounding_box @property def area(self) -> float: \"\"\"Returns the area of the", "!= (other.__loc) except AttributeError: return True def __gt__(self, other) -> bool: \"\"\"Tests if", "__author__ = \"<NAME>, <NAME>, <NAME>, <NAME>, <NAME>\" import math from .sphere import arcdist", "__getitem__(self, *args) -> Union[int, float]: \"\"\"Return the coordinate for the given dimension. Parameters", "is the geometric centroid and respects multipart polygons with holes. Also known as", "\"\"\" l = [self.left, self.lower, self.right, self.upper] return l.__getitem__(key) def set_centroid(self, new_center): \"\"\"Moves", "geometric types extendable. \"\"\" def __init__(self): pass class Point(Geometry): \"\"\"Geometric class for point", "Returns ------- self._p1 : libpysal.cg.Point The ``_p1`` attribute. Examples -------- >>> ls =", "x-value of the rectangle. lower : float Minimum y-value of the rectangle. right", "to compute :math:`y`. Examples -------- >>> l = Line(1, 0) >>> l.y(1) 1.0", "The point to test for containment. Returns ------- point_contained : bool ``True`` if", "of the polygon. Examples -------- >>> p = Polygon([Point((0, 0)), Point((1, 0)), Point((1,", ": tuple A singleton tuple of :math:`(i)` with :math:`i` as the index of", "vertices] y = [v[1] for v in vertices] self._bounding_box = Rectangle(min(x), min(y), max(x),", "return sum([dist(p[i], p[i + 1]) for i in range(len(p) - 1)]) if self._len", "== l2 True >>> l2 == l1 True \"\"\" eq = False if", "#maxy 17.0 \"\"\" def __init__(self, left, lower, right, upper): if right < left", ">>> r = Rectangle(0, 0, 0, 0) >>> bool(r) False >>> r =", "verts = [list(map(Point, part)) for part in geo[\"coordinates\"]] else: raise TypeError(\"%r is not", "== 0 and xj > 0: if yi < 0: w += 0.5", "ray (not the point where the ray originates). See ``second_p``. Examples -------- >>>", "The ``_p1`` attribute. Examples -------- >>> ls = LineSegment(Point((1, 2)), Point((5, 6))) >>>", "= ls._set_p1(Point((3, -1))) >>> r == Point((3.0, -1.0)) True \"\"\" self._p1 = p1", "line segment objects. Parameters ---------- start_pt : libpysal.cg.Point The point where the segment", "Polygon( ... [ ... [Point((0, 0)), Point((1, 0)), Point((1, 1)), Point((0, 1))], ...", ">>> bool(r) True \"\"\" return bool(self.area) def __eq__(self, other): if other: return self[:]", "[Point((2, 1)), Point((2, 2)), Point((1, 2)), Point((1, 1))] ... ) >>> p.area 99.0", "to test equality against. Examples -------- >>> Point((0, 1)) == Point((0, 1)) True", "-> int: return len(self) @staticmethod def dist(v1, v2) -> Union[int, float]: return math.hypot(v1[0]", "'x' when slope is zero.\") return (y - self.b) / self.m def y(self,", "verts[0:1] holes += verts[1:] if not holes: holes = None return cls(parts, holes)", "geometric length of the chain computed using 'arcdistance' (meters). \"\"\" def part_perimeter(p: list)", "list) -> Union[int, float]: return sum([arcdist(p[i], p[i + 1]) * 1000.0 for i", "in vertices] y = [v[1] for v in vertices] A = self.signed_area N", "self._holes], [] ) @property def holes(self) -> list: \"\"\"Returns the holes of the", "self.dist v = self.vertices self._perimeter = sum( [dist(v[i], v[i + 1]) for i", "1))] ... ) >>> len(p.holes) 1 \"\"\" return [[v for v in part]", "bool: \"\"\"Returns ``True`` if ``self`` and ``other`` are the same line segment. Examples", "float} The :math:`x`-intercept of the line. ``x`` is also an attribute. Examples --------", "2)), Point((5, 2))) >>> ls.len 3.0 \"\"\" # If LineSegment attributes p1, p2", "list of points with the vertices of the polygon in clockwise order. len", "Returns ------- self._p2 : libpysal.cg.Point The ``_p2`` attribute. Examples -------- >>> ls =", "Point((2, 0)), Point((2, 1)), Point((0, 1))]) >>> p.bounding_box.left 0.0 >>> p.bounding_box.lower 0.0 >>>", ">>> p.area 99.0 \"\"\" def part_area(pv: list) -> float: __area = 0 for", "new_center[1] - (self.lower + self.upper) / 2, ) self.left = self.left + shift[0]", "def _reset_props(self): \"\"\"**HELPER METHOD. DO NOT CALL.** Resets attributes which are functions of", "the ``getter``. \"\"\" self._len = None self._arclen = None self._bounding_box = None @property", "self.right = float(right) self.upper = float(upper) def __bool__(self): \"\"\"Rectangles will evaluate to False", "loc : tuple The point's location (number :math:`x`-tuple, :math:`x` > 1). Examples --------", "return self.m * x + self.b class Ray: \"\"\"Geometric representation of ray objects.", "Rectangle(0, 0, 1, 1) >>> bool(r) True \"\"\" return bool(self.area) def __eq__(self, other):", "points with the vertices of the polygon in clockwise order. len : int", "---------- other : libpysal.cg.LineSegment Another line segment to check against. Examples -------- >>>", "also an attribute. b : {int, float} The :math:`y`-intercept of the line. ``b``", "p0[0] dy2 = p2[1] - p0[1] if dy1 * dx2 < dy2 *", "if dy1 * dx2 < dy2 * dx1: is_ccw = 1 elif dy1", "the ring. \"\"\" def __init__(self, vertices): if vertices[0] != vertices[-1]: vertices = vertices[:]", "index to end the slice (excluded). Examples -------- >>> p = Point((3, 6,", "0.0 for i in range(N - 1): A += (x[i] + x[i +", "holes != []: if isinstance(holes[0], list): self._hole_rings = list(map(Ring, holes)) self._holes = [clockwise(hole)", "v1[1] - v2[1]) def part_perimeter(p: list) -> Union[int, float]: return sum([dist(p[i], p[i +", "self.lower _geoJSON_type_to_Pysal_type = { \"point\": Point, \"linestring\": Chain, \"multilinestring\": Chain, \"polygon\": Polygon, \"multipolygon\":", "The 'center of gravity', i.e. the mean point of the polygon. Examples --------", "in the middle. \"\"\" p0 = self.p1 p1 = self.p2 p2 = pt", "\"\"\" def dist(v1: tuple, v2: tuple) -> Union[int, float]: return math.hypot(v1[0] - v2[0],", ">>> c.bounding_box.right 2.0 >>> c.bounding_box.upper 1.0 \"\"\" if self._bounding_box is None: vertices =", "to the ``getter``. \"\"\" self._len = None self._arclen = None self._bounding_box = None", "[[Point(pt) for pt in part] for part in polygon] parts += verts[0:1] holes", "1)) True \"\"\" return (self.__loc) < (other.__loc) def __le__(self, other) -> bool: \"\"\"Tests", "multipolygons GEOS, Shapely, and geoJSON do. In GEOS, etc, polygons may only have", "of lists of vertices. holes : list A list of sub-polygons to be", "-------- >>> l1 = LineSegment(Point((1, 2)), Point((5, 6))) >>> l2 = LineSegment(Point((5, 6)),", "part in vertices] else: self._part_rings = [Ring(vertices)] self._vertices = [clockwise(vertices)] if holes is", "the bounding box in the form ``[left, lower, right, upper]``. area : float", "tuple A tuple of :math:`(i,j)` with :math:`i` as the index to the start", "geometric primitive to form complex polygons with multiple rings and holes. The ordering", "clockwise order. len : int The number of vertices including holes. perimeter :", "1)), Point((10, 1))) >>> ls.intersect(ls2) False >>> ls2 = LineSegment(Point((7, -1)), Point((7, 2)))", "self.vertices = tuple(vertices) self._perimeter = None self._bounding_box = None self._area = None self._centroid", "is less than or equal to another object. Parameters ---------- other : libpysal.cg.Point", "cx = 0 cy = 0 for i in range(N - 1): f", "= self.left + shift[0] self.right = self.right + shift[0] self.lower = self.lower +", "NOT CALL.** Returns the ``p1`` attribute of the line segment. Returns ------- self._p1", "Returns ------- self._centroid : libpysal.cg.Point The ring's centroid. Notes ----- The centroid returned", "with the vertices of the ring. len : int The number of vertices.", "Rectangle(0, 0, 4, 4) >>> r.set_scale(2) >>> r.left -2.0 >>> r.right 6.0 >>>", "p1 : libpysal.cg.Point A point. Returns ------- self._p1 : libpysal.cg.Point The reset ``p1``", "NOT CALL.** Resets attributes which are functions of other attributes. The ``getter``s for", "Examples -------- >>> l = Ray(Point((0, 0)), Point((1, 0))) >>> str(l.o) '(0.0, 0.0)'", "self._len = len(self.vertices) return self._len @property def vertices(self) -> list: \"\"\"Returns the vertices", "sum_perim = lambda part_type: sum([part_perimeter(part) for part in part_type]) if self._perimeter is None:", "the rectangle center to a new specified point. Parameters ---------- new_center : libpysal.cg.Point", "that compose the chain.\"\"\" return [ [LineSegment(a, b) for (a, b) in zip(part[:-1],", "= 0 for i in range(N - 1): f = x[i] * y[i", "Y = self[:] x1, y2, X1, Y1 = other[:] return Rectangle( min(self.left, other.left),", "segment lies. Examples -------- >>> ls = LineSegment(Point((2, 2)), Point((3, 3))) >>> l", "dx2 < dy2 * dx1: is_ccw = 1 elif dy1 * dx2 >", "len(c.parts) 2 \"\"\" return [[v for v in part] for part in self._vertices]", "of a linear ring. Linear rings must be closed, the first and last", "property(_get_p1, _set_p1) def _get_p2(self): \"\"\"**HELPER METHOD. DO NOT CALL.** Returns the ``p2`` attribute", "Polygon( ... [Point((0,0)), Point((4,0)), Point((4,5)), Point((2,3)), Point((0,5))] ... ) >>> p.contains_point((3,3)) 1 >>>", "is also an attribute. Raises ------ ArithmeticError Raised when infinity is passed in", "for i in range(-1, len(pv) - 1): __area += (pv[i][0] + pv[i +", "of points with the vertices of the ring. len : int The number", "- p0[1] if dy1 * dx2 < dy2 * dx1: is_ccw = 1", "are not supported at this time.\"%geo_type if geo_type in _geoJSON_type_to_Pysal_type: obj = _geoJSON_type_to_Pysal_type[geo_type].__from_geo_interface__(geo)", "---------- start_pt : libpysal.cg.Point The point where the segment begins. end_pt : libpysal.cg.Point", "Examples -------- >>> ls = LineSegment(Point((1, 2)), Point((5, 6))) >>> r = ls._set_p1(Point((3,", "is built, speed for testing if a point is inside the ring will", "__len__(self) -> int: return len(self.vertices) @property def len(self) -> int: return len(self) @staticmethod", "nan \"\"\" return float(\"nan\") class Line(Geometry): \"\"\"Geometric representation of line objects. Parameters ----------", "[self.vertices[i][0] - point[0] for i in range(rn)] ys = [self.vertices[i][1] - point[1] for", "dy2: is_ccw = 0 else: is_ccw = 1 return is_ccw def get_swap(self): \"\"\"Returns", "self._part_rings = [Ring(vertices)] self._vertices = [clockwise(vertices)] if holes is not None and holes", "line segment. Parameters ---------- p1 : libpysal.cg.Point A point. Returns ------- self._p1 :", "also an attribute. Examples -------- >>> ls = VerticalLine(0) >>> ls.m inf >>>", ">>> r = Rectangle(0, 0, 1, 1) >>> bool(r) True \"\"\" return bool(self.area)", "LineSegment(Point((1, 2)), Point((5, 6))) \"\"\" def __init__(self, start_pt, end_pt): self._p1 = start_pt self._p2", "also known as a polyline. Parameters ---------- vertices : list A point list", "if dx == 0 and dy == 0: self._line = None elif dx", "lies. Returns ------- self._line : libpysal.cg.Line The ``Line`` object of the line on", "len(self.parts) > 1: geo = { \"type\": \"MultiPolygon\", \"coordinates\": [[part] for part in", "Also known as the 'center of gravity' or 'center of mass'. Examples --------", "= Chain([Point((0, 0)), Point((2, 0)), Point((2, 1)), Point((0, 1))]) >>> c.bounding_box.left 0.0 >>>", "point_contained class Polygon(Geometry): \"\"\"Geometric representation of polygon objects. Returns a polygon created from", "holes = [] for polygon in geo[\"coordinates\"]: verts = [[Point(pt) for pt in", "def __geo_interface__(self) -> dict: \"\"\"Return ``__geo_interface__`` information lookup.\"\"\" if len(self.parts) > 1: geo", "or upper < lower: raise ArithmeticError(\"Rectangle must have positive area.\") self.left = float(left)", "[[Point(pt) for pt in part] for part in geo[\"coordinates\"]] return cls(verts[0:1], verts[1:]) @property", "point): \"\"\"Test if a polygon contains a point. Parameters ---------- point : libpysal.cg.Point", "self._bounding_box @property def len(self) -> int: \"\"\"Returns the geometric length of the chain.", "= Polygon([Point((0, 0)), Point((1, 0)), Point((1, 1)), Point((0, 1))]) \"\"\" def __init__(self, vertices,", ">>> p[:2] == (3, 6) True >>> p[1:2] == (6,) True \"\"\" return", "1)) True >>> Point((0, 1)) == Point((1, 1)) False \"\"\" try: return (self.__loc)", "chain, also known as a polyline. Parameters ---------- vertices : list A point", "Returns ------- self._line : libpysal.cg.Line The ``Line`` object of the line on which", "part in part_type]) _area = sum_area(self._vertices) - sum_area(self._holes) return _area @property def centroid(self)", "+ 1] if yi * yj < 0: r = xi + yi", "- y[i + 1]) A = A * 0.5 self._area = -A return", "= None def __len__(self) -> int: return len(self.vertices) @property def len(self) -> int:", "``point`` is contained within the polygon, otherwise ``False``. \"\"\" point_contained = False if", "False break if searching: for ring in self._part_rings: if ring.contains_point(point): contains = True", "= Polygon([Point((0, 0)), Point((1, 0)), Point((1, 1)), Point((0, 1))]) >>> p.perimeter 4.0 \"\"\"", "specifying the ray (not ``origin``.) Attributes ---------- o : libpysal.cg.Point The origin (point", "def signed_area(self) -> Union[int, float]: if self._area is None: vertices = self.vertices x", "return self._bounding_box @property def area(self) -> float: \"\"\"Returns the area of the polygon.", "and ``self.p2`` is in the middle. ``0`` if the points are collinear and", "0 and xj > 0: if yi < 0: w += 0.5 else:", "1))]) \"\"\" def __init__(self, vertices, holes=None): self._part_rings = [] self._hole_rings = [] def", "\"\"\"Returns whether a point is counterclockwise of the segment (``True``) or not (``False``).", "falling exactly on polygon edges may yield unpredictable results. \"\"\" searching = True", "typing import Union __all__ = [ \"Point\", \"LineSegment\", \"Line\", \"Ray\", \"Chain\", \"Polygon\", \"Rectangle\",", "\"\"\"Returns the geometric length of the chain computed using 'arcdistance' (meters). \"\"\" def", "enclosed by the ring. centroid : {tuple, libpysal.cg.Point} The centroid of the ring", "Point((10, 0)), Point((10, 10)), Point((0, 10))], ... [Point((2, 1)), Point((2, 2)), Point((1, 2)),", "if the point is equal to another object. Parameters ---------- other : libpysal.cg.Point", "helps test if a point is inside the ring. \"\"\" def __init__(self, vertices):", "True >>> ls2 = LineSegment(Point((5, 1)), Point((10, 1))) >>> ls.intersect(ls2) False >>> ls2", "(5.0353535353535355, 5.0353535353535355) \"\"\" CP = [ring.centroid for ring in self._part_rings] AP = [ring.area", ">>> l.y(1) 1.0 \"\"\" if self.m == 0: return self.b return self.m *", "self._vertices ] class Ring(Geometry): \"\"\"Geometric representation of a linear ring. Linear rings must", "NotImplementedError, \"%s are not supported at this time.\"%geo_type if geo_type in _geoJSON_type_to_Pysal_type: obj", "0, 4, 4) >>> r.height 4.0 \"\"\" return self.upper - self.lower _geoJSON_type_to_Pysal_type =", "mass'. _quad_tree_structure : libpysal.cg.QuadTreeStructureSingleRing The quad tree structure for the ring. This structure", "len(self) -> int: return len(self) @staticmethod def dist(v1, v2) -> Union[int, float]: return", "LineSegment(Point((0, 0)), Point((5, 0))) >>> ls.is_cw(Point((2, 2))) False >>> ls.is_cw(Point((2, -2))) True \"\"\"", "i in range(len(p) - 1)]) if self._len is None: self._len = sum([part_perimeter(part) for", "__eq__(self, other): if other: return self[:] == other[:] return False def __add__(self, other):", "-------- >>> p = Point((3, 6, 2)) >>> p[:2] == (3, 6) True", "and the built-in operation ``bool()``\" ``-- http://docs.python.org/reference/datamodel.html Examples -------- >>> r = Rectangle(0,", "geo_type) return obj class Geometry(object): \"\"\"A base class to help implement ``is_geometry`` and", "polygon, otherwise ``False``. \"\"\" point_contained = False if self._quad_tree_structure is None: x, y", "def __init__(self, vertices, holes=None): self._part_rings = [] self._hole_rings = [] def clockwise(part: list)", "= tuple(map(float, loc)) @classmethod def __from_geo_interface__(cls, geo): return cls(geo[\"coordinates\"]) @property def __geo_interface__(self): return", "a chain, also known as a polyline. Parameters ---------- vertices : list A", "ordering of the vertices is ignored and will not be altered. Parameters ----------", "xi = xs[i] xj = xs[i + 1] if yi * yj <", "for part in self.parts], } if self._holes[0]: geo[\"coordinates\"][0] += self._holes return geo if", "self.vertices self._bounding_box = Rectangle( min([v[0] for v in vertices]), min([v[1] for v in", "``x`` is also an attribute. Examples -------- >>> ls = VerticalLine(0) >>> ls.m", "len(verts) 4 \"\"\" return sum([part for part in self._vertices], []) @property def parts(self)", "(excluded). Examples -------- >>> p = Point((3, 6, 2)) >>> p[:2] == (3,", "= False searching = False break if searching: for ring in self._part_rings: if", "form a closed ring, \" # msg += \"the first and last vertices", "index to the start slice and :math:`j` as the index to end the", "CH, A)]) / sum(A) cy = sum([pt[1] * area for pt, area in", "Returns ------- contains : bool ``True`` if the polygon contains ``point`` otherwise ``False``.", "= VerticalLine(0) >>> ls.m inf >>> ls.b nan \"\"\" def __init__(self, x): self._x", "True Notes ----- Points falling exactly on polygon edges may yield unpredictable results.", "The second point specifying the ray (not ``origin``.) Attributes ---------- o : libpysal.cg.Point", "of the ``Point``. Examples -------- >>> Point((0, 1)) (0.0, 1.0) \"\"\" return str(self)", "at which to compute :math:`y`. Examples -------- >>> l = VerticalLine(1) >>> l.y(1)", "elif dx1 * dx1 + dy1 * dy1 >= dx2 * dx2 +", "segments being on other segment. Parameters ---------- other : libpysal.cg.LineSegment Another line segment", "The bounding box of the polygon as a list. See Also -------- libpysal.cg.bounding_box", "point must be the same. Open rings will be closed. This class exists", "\"\"\" def __init__(self, origin, second_p): self.o = origin self.p = second_p class Chain(Geometry):", "Raised when ``obj`` is not a supported shape. NotImplementedError Raised when ``geo_type`` is", "cx = 1.0 / (6 * A) * cx cy = 1.0 /", "x[i + 1]) * (y[i] - y[i + 1]) A = A *", "new scale to the old scale (e.g. 1.0 is current size). Examples --------", "yi < 0: w += 0.5 else: w -= 0.5 if w ==", "or dy1 * dy2 < 0: is_ccw = -1 elif dx1 * dx1", "representation of the ``Point``. Examples -------- >>> Point((0, 1)) (0.0, 1.0) \"\"\" return", "2))) >>> l1 == l2 True >>> l2 == l1 True \"\"\" eq", "Point((5, 0))) >>> ls.is_cw(Point((2, 2))) False >>> ls.is_cw(Point((2, -2))) True \"\"\" v1 =", "if the point is less than another object. Parameters ---------- other : libpysal.cg.Point", "self._p2[0] dy = self._p1[1] - self._p2[1] if dx == 0 and dy ==", "Point((1, 1)), Point((2, 1))]) >>> verts = c.vertices >>> len(verts) 4 \"\"\" return", "= { \"type\": \"MultiPolygon\", \"coordinates\": [[part] for part in self.parts], } if self._holes[0]:", "= True elif other.p2 == self._p1 and other.p1 == self._p2: eq = True", "'arcdistance' (meters). \"\"\" def part_perimeter(p: list) -> Union[int, float]: return sum([arcdist(p[i], p[i +", "eq = False if not isinstance(other, self.__class__): pass else: if other.p1 == self._p1", "objects. Returns a polygon created from the objects specified. Parameters ---------- vertices :", "__add__(self, other): x, y, X, Y = self[:] x1, y2, X1, Y1 =", "``True`` if ``self`` and ``other`` are the same line segment. Examples -------- >>>", "hash(Point((0, 1))) == hash(Point((1, 1))) False \"\"\" return hash(self.__loc) def __getitem__(self, *args) ->", "2)), Point((2, 2)), Point((2, 1)), Point((1, 1))] ... ) >>> len(p.holes) 1 \"\"\"", "other: return self[:] == other[:] return False def __add__(self, other): x, y, X,", "x(self, y: Union[int, float]) -> float: \"\"\"Returns the :math:`x`-value of the line at", "ls.bounding_box.upper 6.0 \"\"\" # If LineSegment attributes p1, p2 changed, recompute if self._bounding_box", "the geometric centroid. Also known as the 'center of gravity' or 'center of", "- point[0] for i in range(rn)] ys = [self.vertices[i][1] - point[1] for i", "__init__(self, m, b): if m == float(\"inf\"): raise ArithmeticError(\"Slope cannot be infinite.\") self.m", "then recompute their values if they have been reset since the last call", "Attributes ---------- left : float Minimum x-value of the rectangle. lower : float", "elif dx == 0: self._line = VerticalLine(self._p1[0]) else: m = dy / float(dx)", "... [Point((2, 1)), Point((2, 2)), Point((1, 2)), Point((1, 1))] ... ] ... )", "__ge__(self, other) -> bool: \"\"\"Tests if the point is greater than or equal", "the ``p1`` attribute of the line segment. Returns ------- self._p1 : libpysal.cg.Point The", "Point((2, 1)), Point((0, 1))]) >>> c.bounding_box.left 0.0 >>> c.bounding_box.lower 0.0 >>> c.bounding_box.right 2.0", "= xs[i + 1] if yi * yj < 0: r = xi", "+ y[i + 1]) * f cx = 1.0 / (6 * A)", "Point((0, 1)) False >>> Point((0, 1)) > Point((1, 1)) False \"\"\" return (self.__loc)", "= Rectangle(0, 0, 4, 4) >>> r.area 16.0 \"\"\" return (self.right - self.left)", "for ring in self._hole_rings: if ring.contains_point(point): contains = False searching = False break", "from ``self.p1`` to ``self.p2`` to ``pt`` is ccw. ``-1`` if turn from ``self.p1``", "... [Point((0, 0)), Point((10, 0)), Point((10, 10)), Point((0, 10))], ... [Point((1, 1)), Point((1,", "or a list of lists of vertices. holes : list A list of", "4) >>> r.set_scale(2) >>> r.left -2.0 >>> r.right 6.0 >>> r.lower -2.0 >>>", "LineSegment, Line, Ray, Chain, Polygon)): pass else: if hasattr(obj, \"__geo_interface__\"): geo = obj.__geo_interface__", "is used \"to implement truth value testing and the built-in operation ``bool()``\" ``--", "dy1 * dy2 < 0: is_ccw = -1 elif dx1 * dx1 +", "\"\"\" ccw1 = self.sw_ccw(other.p2) ccw2 = self.sw_ccw(other.p1) ccw3 = other.sw_ccw(self.p1) ccw4 = other.sw_ccw(self.p2)", "the geometric centroid and respects multipart polygons with holes. Also known as the", "= self._quad_tree_structure.contains_point(point) return point_contained class Polygon(Geometry): \"\"\"Geometric representation of polygon objects. Returns a", "0: w += 1 else: w -= 1 elif yi == 0 and", ">>> ls1 = LineSegment(Point((5, 0)), Point((10, 1))) >>> ls.intersect(ls1) True >>> ls2 =", "centroid. Notes ----- The centroid returned by this method is the geometric centroid.", "else: w -= 1 elif yi == 0 and xi > 0: if", "1))) >>> ls.intersect(ls1) True >>> ls2 = LineSegment(Point((5, 1)), Point((10, 1))) >>> ls.intersect(ls2)", "polygons may only have a single exterior ring, all other parts are holes.", "== 0 and xi > 0: if yj > 0: w += 0.5", "location (number :math:`x`-tuple, :math:`x` > 1). Examples -------- >>> p = Point((1, 3))", "in self._hole_rings: ring.build_quad_tree_structure() self.is_quad_tree_structure_built = True def contains_point(self, point): \"\"\"Test if a polygon", "METHOD. DO NOT CALL.** Returns the ``p2`` attribute of the line segment. Returns", "1)), Point((0, 1))]) >>> c.bounding_box.left 0.0 >>> c.bounding_box.lower 0.0 >>> c.bounding_box.right 2.0 >>>", "``getter``s for these attributes (implemented as ``properties``) then recompute their values if they", "---------- vertices : list The list of points of the vertices of the", "= VerticalLine(0) >>> l.x(0.25) 0.0 \"\"\" return self._x def y(self, x) -> float:", "A = 0.0 for i in range(N - 1): A += (x[i] +", "cy = 0 for i in range(N - 1): f = x[i] *", "float: \"\"\"Returns the length of a ``LineSegment`` object. Examples -------- >>> ls =", ": libpysal.cg.Point The point where the segment ends. Attributes ---------- p1 : libpysal.cg.Point", "min(y), max(x), max(y)) return self._bounding_box @property def area(self) -> Union[int, float]: \"\"\"Returns the", "True >>> ls.is_ccw(Point((2, -2))) False \"\"\" v1 = (self._p2[0] - self._p1[0], self._p2[1] -", "[Point(pt) for pt in geo[\"coordinates\"]] elif geo[\"type\"].lower() == \"multilinestring\": verts = [list(map(Point, part))", "v1[1] - v2[1]) def part_perimeter(part) -> Union[int, float]: return sum([dist(part[i], part[i + 1])", "equal to another object. Parameters ---------- other : libpysal.cg.Point An object to test", "of point lists. Attributes ---------- vertices : list The list of points of", "gravity' or 'center or mass'. _quad_tree_structure : libpysal.cg.QuadTreeStructureSingleRing The quad tree structure for", "= Rectangle(0, 0, 0, 0) >>> bool(r) False >>> r = Rectangle(0, 0,", "libpysal.cg.LineSegment The ``LineSegment`` object which has its endpoints swapped. Examples -------- >>> ls", "if geo_type in _geoJSON_type_to_Pysal_type: obj = _geoJSON_type_to_Pysal_type[geo_type].__from_geo_interface__(geo) else: raise NotImplementedError(\"%s is not supported", "searching: contains = False return contains class Rectangle(Geometry): \"\"\"Geometric representation of rectangle objects.", "of rectangle objects. Attributes ---------- left : float Minimum x-value of the rectangle.", "for v in vertices]), min([v[1] for v in vertices]), max([v[0] for v in", "pt, area in zip(CP + CH, A)]) / sum(A) cy = sum([pt[1] *", "__bool__(self): \"\"\"Rectangles will evaluate to False if they have zero area. ``___nonzero__`` is", "only have a single exterior ring, all other parts are holes. MultiPolygons are", "self._vertices], []) @property def parts(self) -> list: \"\"\"Returns the parts (lists of ``libpysal.cg.Point``", "def __getitem__(self, key): \"\"\" Examples -------- >>> r = Rectangle(-4, 3, 10, 17)", "part] for part in self._vertices] @property def bounding_box(self): \"\"\"Returns the bounding box of", "\"\"\"Returns the height of the Rectangle. Examples -------- >>> r = Rectangle(0, 0,", "= dy / float(dx) # y - mx b = self._p1[1] - m", "* dx2 + dy2 * dy2: is_ccw = 0 else: is_ccw = 1", ">>> ls = LineSegment(Point((1, 2)), Point((5, 6))) >>> swap = ls.get_swap() >>> swap.p1[0]", "the built-in operation ``bool()``\" ``-- http://docs.python.org/reference/datamodel.html Examples -------- >>> r = Rectangle(0, 0,", "the :math:`x`-value of the line at a particular :math:`y`-value. Parameters ---------- y :", "else: self._holes = [[]] self._reset_props() @classmethod def __from_geo_interface__(cls, geo: dict): \"\"\"While PySAL does", "``True`` if the polygon contains ``point`` otherwise ``False``. Examples -------- >>> p =", "= [ self.bounding_box.left, self.bounding_box.lower, self.bounding_box.right, self.bounding_box.upper, ] return self._bbox @property def bounding_box(self): \"\"\"Returns", "``self.p2`` to ``pt`` is cw. ``-1`` if the points are collinear and ``self.p1``", "the segment begins. end_pt : libpysal.cg.Point The point where the segment ends. Attributes", "number of vertices including holes. perimeter : float The geometric length of the", "\"LINESTRING ({} {}, {} {})\".format( # self._p1[0], self._p1[1], self._p2[0], self._p2[1] # ) def", "return cx, cy def build_quad_tree_structure(self): \"\"\"Build the quad tree structure for this polygon.", "Polygon( ... [Point((0, 0)), Point((10, 0)), Point((10, 10)), Point((0, 10))], ... [Point((1, 2)),", "NOT CALL.** Resets attributes which are functions of other attributes. The getters for", "which has its endpoints swapped. Returns ------- line_seg : libpysal.cg.LineSegment The ``LineSegment`` object", "def __init__(self, loc): self.__loc = tuple(map(float, loc)) @classmethod def __from_geo_interface__(cls, geo): return cls(geo[\"coordinates\"])", "has its endpoints swapped. Returns ------- line_seg : libpysal.cg.LineSegment The ``LineSegment`` object which", "Handles holes. >>> p = Polygon( ... [Point((0, 0)), Point((0, 10)), Point((10, 10)),", "__init__(self, vertices, holes=None): self._part_rings = [] self._hole_rings = [] def clockwise(part: list) ->", "None self._centroid = None self._len = None def __len__(self) -> int: return self.len", "\"\"\"Return the coordinate for the given dimension. Parameters ---------- *args : tuple A", "for point objects. Parameters ---------- loc : tuple The point's location (number :math:`x`-tuple,", "middle. \"\"\" p0 = self.p1 p1 = self.p2 p2 = pt dx1 =", "-> Union[int, float]: \"\"\"Returns the area of the ring. Examples -------- >>> r", "for part in self._vertices], []) + sum( [part for part in self._holes], []", "\"\"\" return str(self) def __str__(self) -> str: \"\"\"Returns a string representation of a", "------- self._centroid : libpysal.cg.Point The ring's centroid. Notes ----- The centroid returned by", "rn = len(self.vertices) xs = [self.vertices[i][0] - point[0] for i in range(rn)] ys", "@property def bounding_box(self): \"\"\"Returns the bounding box of the polygon. Returns ------- self._bounding_box", "return sum([part for part in self._vertices], []) @property def parts(self) -> list: \"\"\"Returns", "they have zero area. ``___nonzero__`` is used \"to implement truth value testing and", "on polygon edges may yield unpredictable results. \"\"\" searching = True for ring", "float(m) self.b = float(b) def x(self, y: Union[int, float]) -> float: \"\"\"Returns the", "polygon. centroid : tuple The 'center of gravity', i.e. the mean point of", "w == 0: pass else: point_contained = True else: point_contained = self._quad_tree_structure.contains_point(point) return", "is not a supported type. Returns ------- obj : {libpysal.cg.{Point, LineSegment, Line, Ray,", "ls._get_p1() >>> r == Point((1, 2)) True \"\"\" return self._p1 def _set_p1(self, p1):", "ring. Examples -------- >>> r = Ring( ... [ ... Point((0, 0)), ...", "float(\"nan\") def x(self, y) -> float: \"\"\"Returns the :math:`x`-value of the line at", "* dy2: is_ccw = 0 else: is_ccw = 1 return is_ccw def get_swap(self):", "list) -> float: __area = 0 for i in range(-1, len(pv) - 1):", "attribute of the line segment. Returns ------- self._p1 : libpysal.cg.Point The ``_p1`` attribute.", "box of a ``LineSegment`` object. Returns ------- self._bounding_box : libpysal.cg.Rectangle The bounding box", "ring. Returns ------- self._centroid : libpysal.cg.Point The ring's centroid. Notes ----- The centroid", "The reset ``p1`` attribute. Examples -------- >>> ls = LineSegment(Point((1, 2)), Point((5, 6)))", "else: is_ccw = 1 return is_ccw def get_swap(self): \"\"\"Returns a ``LineSegment`` object which", "p[1:2] == (6,) True \"\"\" return self.__loc.__getslice__(*args) def __len__(self) -> int: \"\"\" Returns", "cx = sum([pt[0] * area for pt, area in zip(CP + CH, A)])", "-------- >>> l = Ray(Point((0, 0)), Point((1, 0))) >>> str(l.o) '(0.0, 0.0)' >>>", "ring will be increased significantly. \"\"\" self._quad_tree_structure = QuadTreeStructureSingleRing(self) def contains_point(self, point): \"\"\"Point", "+ yi * (xj - xi) / (yi - yj) if r >", "v in vertices]), ) return self._bounding_box @property def area(self) -> float: \"\"\"Returns the", "geo_type.startswith('multi'): # raise NotImplementedError, \"%s are not supported at this time.\"%geo_type if geo_type", "else: if other.p1 == self._p1 and other.p2 == self._p2: eq = True elif", "self.left = center[0] + scale * (self.left - center[0]) self.right = center[0] +", "in order. len : float The geometric length of the chain. Examples --------", "True >>> Point((0, 1)) <= Point((1, 1)) True \"\"\" return (self.__loc) <= (other.__loc)", "= float(\"nan\") def x(self, y) -> float: \"\"\"Returns the :math:`x`-value of the line", "def _set_p1(self, p1): \"\"\"**HELPER METHOD. DO NOT CALL.** Sets the ``p1`` attribute of", "vertices in the polygon. Examples -------- >>> p1 = Polygon([Point((0, 0)), Point((0, 1)),", "r == Point((5, 6)) True \"\"\" return self._p2 def _set_p2(self, p2): \"\"\"**HELPER METHOD.", "== float(\"inf\"): raise ArithmeticError(\"Slope cannot be infinite.\") self.m = float(m) self.b = float(b)", "point list or list of point lists. Attributes ---------- vertices : list The", "the rectangle around its center. Parameters ---------- scale : int, float The ratio", "Line, Ray, Chain, Polygon} A geometric representation of an object. Raises ------ TypeError", "The centroid returned by this method is the geometric centroid. Also known as", "pv[i + 1][1]) __area = __area * 0.5 if __area < 0: __area", "2)), Point((5, 6))) >>> ls.bounding_box.left 1.0 >>> ls.bounding_box.lower 2.0 >>> ls.bounding_box.right 5.0 >>>", "6.0 >>> swap.p2[0] 1.0 >>> swap.p2[1] 2.0 \"\"\" line_seg = LineSegment(self._p2, self._p1) return", "The bounding box of the chain. Examples -------- >>> c = Chain([Point((0, 0)),", "... [Point((1, 1)), Point((1, 2)), Point((2, 2)), Point((2, 1))] ... ) >>> p.centroid", "gravity', i.e. the mean point of the polygon. Examples -------- >>> p1 =", "and holes. The ordering of the vertices is ignored and will not be", "0.0 >>> r.bounding_box.right 2.0 >>> r.bounding_box.upper 1.0 \"\"\" if self._bounding_box is None: vertices", "= [self.vertices[i][1] - point[1] for i in range(rn)] w = 0 for i", "test for containment. Returns ------- contains : bool ``True`` if the polygon contains", "will evaluate to False if they have zero area. ``___nonzero__`` is used \"to", "polygon. Examples -------- >>> p = Polygon([Point((0, 0)), Point((1, 0)), Point((1, 1)), Point((0,", "Point((5, 6))) >>> r = ls._set_p2(Point((3, -1))) >>> r == Point((3.0, -1.0)) True", "ArithmeticError(\"Slope cannot be infinite.\") self.m = float(m) self.b = float(b) def x(self, y:", "= center[1] + scale * (self.upper - center[1]) @property def area(self) -> Union[int,", "if r > 0: if yi < 0: w += 1 else: w", "@property def len(self) -> float: \"\"\"Returns the length of a ``LineSegment`` object. Examples", "LineSegment(Point((1, 2)), Point((5, 6))) >>> l2 = LineSegment(Point((5, 6)), Point((1, 2))) >>> l1", "= [clockwise(part) for part in vertices] else: self._part_rings = [Ring(vertices)] self._vertices = [clockwise(vertices)]", "def __init__(self, m, b): if m == float(\"inf\"): raise ArithmeticError(\"Slope cannot be infinite.\")", "self._quad_tree_structure = QuadTreeStructureSingleRing(self) def contains_point(self, point): \"\"\"Point containment using winding number. The implementation", "5.5 True >>> p[1] == 4.3 True \"\"\" return self.__loc.__getitem__(*args) def __getslice__(self, *args)", "the polygon contains ``point`` otherwise ``False``. Examples -------- >>> p = Polygon( ...", "True \"\"\" self._p1 = p1 self._reset_props() return self._p1 p1 = property(_get_p1, _set_p1) def", "self._bounding_box = Rectangle( min([self._p1[0], self._p2[0]]), min([self._p1[1], self._p2[1]]), max([self._p1[0], self._p2[0]]), max([self._p1[1], self._p2[1]]), ) return", "other parts are holes. MultiPolygons are simply a list of polygons. \"\"\" geo_type", "+ 1] - x[i + 1] * y[i] cx += (x[i] + x[i", "= [[]] self._reset_props() @classmethod def __from_geo_interface__(cls, geo: dict): \"\"\"While PySAL does not differentiate", "0)), Point((1, 1)), Point((2, 1))]) >>> c.len 3.0 >>> c = Chain( ...", ">>> r = Rectangle(0, 0, 4, 4) >>> r.height 4.0 \"\"\" return self.upper", "min(self.left, other.left), min(self.lower, other.lower), max(self.right, other.right), max(self.upper, other.upper), ) def __getitem__(self, key): \"\"\"", "* A) * cx cy = 1.0 / (6 * A) * cy", "i in range(len(self.vertices) - 1): yi = ys[i] yj = ys[i + 1]", "2)), Point((4, 2)), Point((4, 4)), Point((2, 4))] ... ) >>> p.contains_point((3.0, 3.0)) False", "geo_type == \"multipolygon\": parts = [] holes = [] for polygon in geo[\"coordinates\"]:", "* (y[i] - y[i + 1]) A = A * 0.5 self._area =", "Parameters ---------- vertices : list A list of vertices or a list of", "2))) False >>> ls.is_cw(Point((2, -2))) True \"\"\" v1 = (self._p2[0] - self._p1[0], self._p2[1]", "searching = True for ring in self._hole_rings: if ring.contains_point(point): contains = False searching", "string representation of the ``Point``. Examples -------- >>> Point((0, 1)) (0.0, 1.0) \"\"\"", "infinity is passed in as the slope. Examples -------- >>> ls = Line(1,", "bool: \"\"\"Tests if the point is equal to another object. Parameters ---------- other", "gravity' or 'center of mass'. Examples -------- >>> r = Ring( ... [", "0.0 >>> p.bounding_box.lower 0.0 >>> p.bounding_box.right 2.0 >>> p.bounding_box.upper 1.0 \"\"\" if self._bounding_box", "be closed, the first and last point must be the same. Open rings", "part_perimeter(p: list) -> Union[int, float]: return sum([dist(p[i], p[i + 1]) for i in", "= [vertices] self._reset_props() @classmethod def __from_geo_interface__(cls, geo: dict): if geo[\"type\"].lower() == \"linestring\": verts", "self._arclen = sum([part_perimeter(part) for part in self._vertices]) return self._arclen @property def segments(self) ->", ">>> p.perimeter 4.0 \"\"\" def dist(v1: Union[int, float], v2: Union[int, float]) -> float:", "Examples -------- >>> r = Ring( ... [ ... Point((0, 0)), ... Point((2,", "a polygon created from the objects specified. Parameters ---------- vertices : list A", "self._p2 : libpysal.cg.Point The reset ``p2`` attribute. Examples -------- >>> ls = LineSegment(Point((1,", "p : libpysal.cg.Point The second point on the ray (not the point where", "as ``properties``) then recompute their values if they have been reset since the", "perimeter of the polygon. bounding_box : libpysal.cg.Rectangle The bounding box of the polygon.", "p1, p2 changed, recompute if self._len is None: self._len = math.hypot(self._p1[0] - self._p2[0],", "http://docs.python.org/reference/datamodel.html Examples -------- >>> r = Rectangle(0, 0, 0, 0) >>> bool(r) False", "vertices: list): if isinstance(vertices[0], list): self._vertices = [part for part in vertices] else:", ">>> swap.p2[1] 2.0 \"\"\" line_seg = LineSegment(self._p2, self._p1) return line_seg @property def bounding_box(self):", "None: vertices = self.vertices self._bounding_box = Rectangle( min([v[0] for v in vertices]), min([v[1]", "cy += (y[i] + y[i + 1]) * f cx = 1.0 /", "a list of lists of vertices. holes : list A list of sub-polygons", "... [ ... [Point((0, 0)), Point((1, 0)), Point((1, 1))], ... [Point((10, 10)), Point((11,", "of the polygon in clockwise order. Examples -------- >>> p = Polygon( ...", "= [] for polygon in geo[\"coordinates\"]: verts = [[Point(pt) for pt in part]", "if self._len is None: self._len = sum([part_perimeter(part) for part in self._vertices]) return self._len", "len(self.vertices) xs = [self.vertices[i][0] - point[0] for i in range(rn)] ys = [self.vertices[i][1]", "is in the middle. ``1`` if the points are collinear and ``self.p2`` is", "``obj``, which must support the ``__geo_interface__``. Parameters ---------- obj : {libpysal.cg.{Point, LineSegment, Line,", "+ pv[i + 1][0]) * (pv[i][1] - pv[i + 1][1]) __area = __area", "segment. Parameters ---------- other : libpysal.cg.LineSegment Another line segment to check against. Examples", ") @property def holes(self) -> list: \"\"\"Returns the holes of the polygon in", "truth value testing and the built-in operation ``bool()``\" ``-- http://docs.python.org/reference/datamodel.html Examples -------- >>>", "Point((10, 0)), Point((10, 10)), Point((0, 10))], ... [Point((1, 1)), Point((1, 2)), Point((2, 2)),", "float The geometric length of the perimeter of the polygon. bounding_box : libpysal.cg.Rectangle", "= Polygon([Point((0, 0)), Point((0, 1)), Point((1, 1)), Point((1, 0))]) >>> p1.len 4 >>>", "around its center. Parameters ---------- scale : int, float The ratio of the", "center[0]) self.lower = center[1] + scale * (self.lower - center[1]) self.upper = center[1]", "line_seg = LineSegment(self._p2, self._p1) return line_seg @property def bounding_box(self): \"\"\"Returns the minimum bounding", "self._p1[0], pt[1] - self._p1[1]) return v1[0] * v2[1] - v1[1] * v2[0] <", "LineSegment(Point((5, 6)), Point((1, 2))) >>> l1 == l2 True >>> l2 == l1", "``_p2`` attribute. Examples -------- >>> ls = LineSegment(Point((1, 2)), Point((5, 6))) >>> r", "\"\"\" if isinstance(obj, (Point, LineSegment, Line, Ray, Chain, Polygon)): pass else: if hasattr(obj,", "geo): return cls(geo[\"coordinates\"]) @property def __geo_interface__(self): return {\"type\": \"Point\", \"coordinates\": self.__loc} def __lt__(self,", "self._vertices = [vertices] self._reset_props() @classmethod def __from_geo_interface__(cls, geo: dict): if geo[\"type\"].lower() == \"linestring\":", "< (other.__loc) def __le__(self, other) -> bool: \"\"\"Tests if the point is less", "which has its endpoints swapped. Examples -------- >>> ls = LineSegment(Point((1, 2)), Point((5,", "bounding box of the ring. area : float The area enclosed by the", "or bbright or bblower or bbupper: pass else: rn = len(self.vertices) xs =", "class LineSegment(Geometry): \"\"\"Geometric representation of line segment objects. Parameters ---------- start_pt : libpysal.cg.Point", "[v[0] for v in vertices] y = [v[1] for v in vertices] N", "v in vertices]), max([v[0] for v in vertices]), max([v[1] for v in vertices]),", "(``False``). Exclusive. Parameters ---------- pt : libpysal.cg.Point A point lying ccw or cw", "-------- >>> r = Rectangle(0, 0, 4, 4) >>> r.set_centroid(Point((4, 4))) >>> r.left", "i in range(N - 1): f = x[i] * y[i + 1] -", ">>> p[1:2] == (6,) True \"\"\" return self.__loc.__getslice__(*args) def __len__(self) -> int: \"\"\"", "list A list of vertices or a list of lists of vertices. holes", "\"\"\" if self._len is None: self._len = len(self.vertices) return self._len @property def vertices(self)", "2, (self.lower + self.upper) / 2) self.left = center[0] + scale * (self.left", "isinstance(other, self.__class__): pass else: if other.p1 == self._p1 and other.p2 == self._p2: eq", ": bool ``True`` if the polygon contains ``point`` otherwise ``False``. Examples -------- >>>", "self._vertices] @property def bounding_box(self): \"\"\"Returns the bounding box of the chain. Returns -------", "``_p1`` attribute. Examples -------- >>> ls = LineSegment(Point((1, 2)), Point((5, 6))) >>> r", "ls = LineSegment(Point((2, 2)), Point((5, 2))) >>> ls.len 3.0 \"\"\" # If LineSegment", "6))) >>> ls.bounding_box.left 1.0 >>> ls.bounding_box.lower 2.0 >>> ls.bounding_box.right 5.0 >>> ls.bounding_box.upper 6.0", "= obj if hasattr(geo, \"type\"): raise TypeError(\"%r does not appear to be a", "hash of the point's location. Examples -------- >>> hash(Point((0, 1))) == hash(Point((0, 1)))", "17) >>> r.left #minx -4.0 >>> r.lower #miny 3.0 >>> r.right #maxx 10.0", "= len(self) A = 0.0 for i in range(N - 1): A +=", "self.bounding_box.left bbright = x > self.bounding_box.right bblower = y < self.bounding_box.lower bbupper =", "line : libpysal.cg.Line The line on which the segment lies. Examples -------- >>>", ": float The geometric length of the chain. Examples -------- >>> c =", "r.right #maxx 10.0 >>> r.upper #maxy 17.0 \"\"\" def __init__(self, left, lower, right,", "Point((1, 1)), Point((2, 1))]) >>> c.len 3.0 >>> c = Chain( ... [", "... ) >>> len(p.parts) 2 \"\"\" return [[v for v in part] for", "in clockwise order. Examples -------- >>> p = Polygon( ... [ ... [Point((0,", "[clockwise(hole) for hole in holes] else: self._hole_rings = [Ring(holes)] self._holes = [clockwise(holes)] else:", "-------- >>> l = VerticalLine(0) >>> l.x(0.25) 0.0 \"\"\" return self._x def y(self,", "other.right), max(self.upper, other.upper), ) def __getitem__(self, key): \"\"\" Examples -------- >>> r =", "__init__(self, loc): self.__loc = tuple(map(float, loc)) @classmethod def __from_geo_interface__(cls, geo): return cls(geo[\"coordinates\"]) @property", "== hash(Point((1, 1))) False \"\"\" return hash(self.__loc) def __getitem__(self, *args) -> Union[int, float]:", "the vertices is ignored and will not be altered. Parameters ---------- vertices :", "these attributes (implemented as properties) then recompute their values if they have been", "of the line segment. Returns ------- self._p2 : libpysal.cg.Point The ``_p2`` attribute. Examples", "float The area enclosed by the ring. centroid : {tuple, libpysal.cg.Point} The centroid", ">>> ls = LineSegment(Point((5, 0)), Point((10, 0))) >>> ls1 = LineSegment(Point((5, 0)), Point((10,", "polygon contains a point. Parameters ---------- point : libpysal.cg.Point A point to test", "-2))) False \"\"\" v1 = (self._p2[0] - self._p1[0], self._p2[1] - self._p1[1]) v2 =", "for 'x' when slope is zero.\") return (y - self.b) / self.m def", "f cx = 1.0 / (6 * A) * cx cy = 1.0", "polygons. \"\"\" geo_type = geo[\"type\"].lower() if geo_type == \"multipolygon\": parts = [] holes", "the Rectangle. Examples -------- >>> r = Rectangle(0, 0, 4, 4) >>> r.height", "the area of the Rectangle. Examples -------- >>> r = Rectangle(0, 0, 4,", "value testing and the built-in operation ``bool()``\" ``-- http://docs.python.org/reference/datamodel.html Examples -------- >>> r", "list) -> Union[int, float]: return sum([dist(p[i], p[i + 1]) for i in range(len(p)", "another object. Parameters ---------- other : libpysal.cg.Point An object to test equality against.", "is also an attribute. Examples -------- >>> ls = VerticalLine(0) >>> ls.m inf", "if the points are collinear and ``pt`` is in the middle. \"\"\" p0", ">>> ls = LineSegment(Point((1, 2)), Point((5, 6))) >>> r = ls._get_p2() >>> r", "self._p2 = p2 self._reset_props() return self._p2 p2 = property(_get_p2, _set_p2) def is_ccw(self, pt)", "self._arclen @property def segments(self) -> list: \"\"\"Returns the segments that compose the chain.\"\"\"", "def __lt__(self, other) -> bool: \"\"\"Tests if the point is less than another", "the hash of the point's location. Examples -------- >>> hash(Point((0, 1))) == hash(Point((0,", "lists of vertices. holes : list A list of sub-polygons to be considered", "of vertices. holes : list A list of sub-polygons to be considered as", "return self.__loc.__getslice__(*args) def __len__(self) -> int: \"\"\" Returns the dimensions of the point.", "(0.0, 1.0) \"\"\" return str(self) def __str__(self) -> str: \"\"\"Returns a string representation", "segment. Returns ------- self._p1 : libpysal.cg.Point The ``_p1`` attribute. Examples -------- >>> ls", "ccw3 = other.sw_ccw(self.p1) ccw4 = other.sw_ccw(self.p2) intersects = ccw1 * ccw2 <= 0", "1)) <= Point((1, 1)) True \"\"\" return (self.__loc) <= (other.__loc) def __eq__(self, other)", "\"\"\"Returns the minimum bounding box of a ``LineSegment`` object. Returns ------- self._bounding_box :", "Examples -------- >>> p = Polygon([Point((0, 0)), Point((2, 0)), Point((2, 1)), Point((0, 1))])", "- self._p1[0], pt[1] - self._p1[1]) return v1[0] * v2[1] - v1[1] * v2[0]", "representation of verticle line objects. Parameters ---------- x : {int, float} The :math:`x`-intercept", "the polygon in clockwise order. Examples -------- >>> p = Polygon( ... [Point((0,", "lower: raise ArithmeticError(\"Rectangle must have positive area.\") self.left = float(left) self.lower = float(lower)", ">>> ls = LineSegment(Point((2, 2)), Point((3, 3))) >>> l = ls.line >>> l.m", "... [Point((10, 10)), Point((11, 10)), Point((11, 11))] ... ] ... ) >>> c.len", "6)), Point((1, 2))) >>> l1 == l2 True >>> l2 == l1 True", "a particular :math:`y`-value. Parameters ---------- y : {int, float} The :math:`y`-value at which", "= self.right + shift[0] self.lower = self.lower + shift[1] self.upper = self.upper +", "holes += verts[1:] if not holes: holes = None return cls(parts, holes) else:", "... Point((0, 0)), ... Point((2, 0)), ... Point((2, 1)), ... Point((0, 1)), ...", "= list(vertices) if isinstance(vl[0], list): self._part_rings = list(map(Ring, vertices)) self._vertices = [clockwise(part) for", "p0 = self.p1 p1 = self.p2 p2 = pt dx1 = p1[0] -", ">>> r = Rectangle(-4, 3, 10, 17) >>> r.left #minx -4.0 >>> r.lower", "``origin``.) Attributes ---------- o : libpysal.cg.Point The origin (point where ray originates). See", "Library. \"\"\" __author__ = \"<NAME>, <NAME>, <NAME>, <NAME>, <NAME>\" import math from .sphere", "the ``__geo_interface__``. Parameters ---------- obj : {libpysal.cg.{Point, LineSegment, Line, Ray, Chain, Polygon} A", "ignored and will not be altered. Parameters ---------- vertices : list A list", "w = 0 for i in range(len(self.vertices) - 1): yi = ys[i] yj", "a geometric primitive to form complex polygons with multiple rings and holes. The", ">= Point((0, 1)) True >>> Point((0, 1)) >= Point((1, 1)) False \"\"\" return", "\"multipolygon\": parts = [] holes = [] for polygon in geo[\"coordinates\"]: verts =", "1][1]) __area = __area * 0.5 if __area < 0: __area = -area", "[Point((10, 10)), Point((11, 10)), Point((11, 11))] ... ] ... ) >>> c.len 4.0", "-> int: \"\"\"Returns the hash of the point's location. Examples -------- >>> hash(Point((0,", "= lambda part_type: sum([part_area(part) for part in part_type]) _area = sum_area(self._vertices) - sum_area(self._holes)", "to the old scale (e.g. 1.0 is current size). Examples -------- >>> r", "and ``other`` are the same line segment. Examples -------- >>> l1 = LineSegment(Point((1,", "of line segment objects. Parameters ---------- start_pt : libpysal.cg.Point The point where the", "do not form a closed ring, \" # msg += \"the first and", "0))) >>> ls.is_ccw(Point((2, 2))) True >>> ls.is_ccw(Point((2, -2))) False \"\"\" v1 = (self._p2[0]", "-------- >>> r = Rectangle(0, 0, 4, 4) >>> r.set_scale(2) >>> r.left -2.0", "y = [v[1] for v in vertices] N = len(self) A = 0.0", "centroid : tuple The 'center of gravity', i.e. the mean point of the", "raise ValueError(msg) self.vertices = tuple(vertices) self._perimeter = None self._bounding_box = None self._area =", "self.vertices self._perimeter = sum( [dist(v[i], v[i + 1]) for i in range(-1, len(self)", "return \"POINT ({} {})\".format(*self.__loc) class LineSegment(Geometry): \"\"\"Geometric representation of line segment objects. Parameters", "of the chain. Examples -------- >>> c = Chain( ... [ ... [Point((0,", "holes) else: verts = [[Point(pt) for pt in part] for part in geo[\"coordinates\"]]", "None: self._len = math.hypot(self._p1[0] - self._p2[0], self._p1[1] - self._p2[1]) return self._len @property def", "Point((0, 10))], ... [Point((1, 1)), Point((1, 2)), Point((2, 2)), Point((2, 1))] ... )", "ring. area : float The area enclosed by the ring. centroid : {tuple,", "contains : bool ``True`` if the polygon contains ``point`` otherwise ``False``. Examples --------", "* (self.upper - self.lower) @property def width(self) -> Union[int, float]: \"\"\"Returns the width", "def is_ccw(self, pt) -> bool: \"\"\"Returns whether a point is counterclockwise of the", "(self.left - center[0]) self.right = center[0] + scale * (self.right - center[0]) self.lower", "point where the segment ends. Attributes ---------- p1 : libpysal.cg.Point The starting point", "Point((0, 1)) < Point((1, 1)) True \"\"\" return (self.__loc) < (other.__loc) def __le__(self,", "start_pt self._p2 = end_pt self._reset_props() def __str__(self): return \"LineSegment(\" + str(self._p1) + \",", "0)), Point((1, 0)), Point((1, 1)), Point((2, 1))]) \"\"\" def __init__(self, vertices: list): if", "closed ring, \" # msg += \"the first and last vertices are not", "class Point(Geometry): \"\"\"Geometric class for point objects. Parameters ---------- loc : tuple The", "return math.hypot(v1[0] - v2[0], v1[1] - v2[1]) def part_perimeter(p: list) -> Union[int, float]:", "else: self._part_rings = [Ring(vertices)] self._vertices = [clockwise(vertices)] if holes is not None and", "ccw4 = other.sw_ccw(self.p2) intersects = ccw1 * ccw2 <= 0 and ccw3 *", "y > self.bounding_box.upper if bbleft or bbright or bblower or bbupper: pass else:", "<= 0 and ccw3 * ccw4 <= 0 return intersects def _reset_props(self): \"\"\"**HELPER", "ring, all other parts are holes. MultiPolygons are simply a list of polygons.", "ls.bounding_box.lower 2.0 >>> ls.bounding_box.right 5.0 >>> ls.bounding_box.upper 6.0 \"\"\" # If LineSegment attributes", "y(self, x) -> float: \"\"\"Returns the :math:`y`-value of the line at a particular", "not a Chain.\" % geo) return cls(verts) @property def __geo_interface__(self) -> dict: if", "@property def bbox(self): \"\"\"Returns the bounding box of the polygon as a list.", "'center of mass'. Examples -------- >>> p = Polygon( ... [Point((0, 0)), Point((10,", "``Point`` object. Examples -------- >>> p = Point((1, 3)) >>> str(p) '(1.0, 3.0)'", "box of the ring. Examples -------- >>> r = Ring( ... [ ...", "l = VerticalLine(0) >>> l.x(0.25) 0.0 \"\"\" return self._x def y(self, x) ->", "other) -> bool: \"\"\"Tests if the point is less than or equal to", "is in the middle. \"\"\" p0 = self.p1 p1 = self.p2 p2 =", "gravity' or 'center of mass'. Examples -------- >>> p = Polygon( ... [Point((0,", "the polygon. Notes ----- The centroid returned by this method is the geometric", ">>> r.width 4.0 \"\"\" return self.right - self.left @property def height(self) -> Union[int,", "``LineSegment`` object which has its endpoints swapped. Returns ------- line_seg : libpysal.cg.LineSegment The", "its center. Parameters ---------- scale : int, float The ratio of the new", "``___nonzero__`` is used \"to implement truth value testing and the built-in operation ``bool()``\"", "---------- y : {int, float} The :math:`y`-value at which to compute :math:`x`. Examples", "@property def __geo_interface__(self) -> dict: if len(self.parts) == 1: return {\"type\": \"LineString\", \"coordinates\":", "-> float: \"\"\"Returns the length of a ``LineSegment`` object. Examples -------- >>> ls", "vertices(self) -> list: \"\"\"Returns the vertices of the chain in clockwise order. Examples", "isinstance(vertices[0], list): self._vertices = [part for part in vertices] else: self._vertices = [vertices]", "exactly on polygon edges may yield unpredictable results. \"\"\" searching = True for", "= None self._area = None self._centroid = None self._quad_tree_structure = None def __len__(self)", "class VerticalLine(Geometry): \"\"\"Geometric representation of verticle line objects. Parameters ---------- x : {int,", "sum(A) return cx, cy def build_quad_tree_structure(self): \"\"\"Build the quad tree structure for this", "tuple The 'center of gravity', i.e. the mean point of the polygon. Examples", "geometry code for PySAL: Python Spatial Analysis Library. \"\"\" __author__ = \"<NAME>, <NAME>,", "if self._area is None: vertices = self.vertices x = [v[0] for v in", ">>> p = Point((5.5, 4.3)) >>> p[0] == 5.5 True >>> p[1] ==", "def __str__(self) -> str: \"\"\"Returns a string representation of a ``Point`` object. Examples", "len(p.holes) 1 \"\"\" return [[v for v in part] for part in self._holes]", "+= \"the first and last vertices are not the same.\" # raise ValueError(msg)", "ls._set_p1(Point((3, -1))) >>> r == Point((3.0, -1.0)) True \"\"\" self._p1 = p1 self._reset_props()", "\"\"\" self._len = None self._arclen = None self._bounding_box = None @property def vertices(self)", "None self._quad_tree_structure = None def __len__(self) -> int: return len(self.vertices) @property def len(self)", "True >>> p[1:2] == (6,) True \"\"\" return self.__loc.__getslice__(*args) def __len__(self) -> int:", "> 0 def is_cw(self, pt) -> bool: \"\"\"Returns whether a point is clockwise", "-------- >>> ls = VerticalLine(0) >>> ls.m inf >>> ls.b nan \"\"\" def", "@property def holes(self) -> list: \"\"\"Returns the holes of the polygon in clockwise", "is ``None``. Attributes ---------- vertices : list A list of points with the", "\"\"\" return self.__loc.__getslice__(*args) def __len__(self) -> int: \"\"\" Returns the dimensions of the", "+ 1]) * (y[i] - y[i + 1]) A = A * 0.5", "list. See Also -------- libpysal.cg.bounding_box \"\"\" if self._bbox is None: self._bbox = [", "return {\"type\": \"MultiLineString\", \"coordinates\": self.parts} def _reset_props(self): \"\"\"**HELPER METHOD. DO NOT CALL.** Resets", "True >>> Point((0, 1)) == Point((1, 1)) False \"\"\" try: return (self.__loc) ==", "-> slice: \"\"\"Return the coordinates for the given dimensions. Parameters ---------- *args :", "A * 0.5 self._area = -A return self._area @property def centroid(self): \"\"\"Returns the", "... Point((0, 0)) ... ] ... ) >>> r.bounding_box.left 0.0 >>> r.bounding_box.lower 0.0", "ls.intersect(ls2) False >>> ls2 = LineSegment(Point((7, -1)), Point((7, 2))) >>> ls.intersect(ls2) True \"\"\"", "the form ``[left, lower, right, upper]``. area : float The area enclosed by", "is zero.\") return (y - self.b) / self.m def y(self, x: Union[int, float])", "- 1): A += (x[i] + x[i + 1]) * (y[i] - y[i", "Point((0, 1))], ... [Point((2, 1)), Point((2, 2)), Point((1, 2)), Point((1, 1))] ... ]", "cx, cy def build_quad_tree_structure(self): \"\"\"Build the quad tree structure for this polygon. Once", "against. Examples -------- >>> Point((0, 1)) <= Point((0, 1)) True >>> Point((0, 1))", "Point((11, 10)), Point((11, 11))] ... ] ... ) >>> c.len 4.0 \"\"\" def", "in geo[\"coordinates\"]] elif geo[\"type\"].lower() == \"multilinestring\": verts = [list(map(Point, part)) for part in", "A singleton tuple of :math:`(i)` with :math:`i` as the index of the desired", "Point((1, 0)), Point((1, 1)), Point((2, 1))]) >>> verts = c.vertices >>> len(verts) 4", "Attributes ---------- p1 : libpysal.cg.Point The starting point of the line segment. p2", "= [list(map(Point, part)) for part in geo[\"coordinates\"]] else: raise TypeError(\"%r is not a", "polygon. Examples -------- >>> p = Polygon([Point((0, 0)), Point((2, 0)), Point((2, 1)), Point((0,", "``Line`` object of the line on which the segment lies. Returns ------- self._line", "``obj`` is not a supported shape. NotImplementedError Raised when ``geo_type`` is not a", "(lists of ``libpysal.cg.Point`` objects) of the chain. Examples -------- >>> c = Chain(", "A geometric representation of an object. Raises ------ TypeError Raised when ``obj`` is", "reset ``p2`` attribute. Examples -------- >>> ls = LineSegment(Point((1, 2)), Point((5, 6))) >>>", "scale * (self.upper - center[1]) @property def area(self) -> Union[int, float]: \"\"\"Returns the", "3.0 >>> r.right #maxx 10.0 >>> r.upper #maxy 17.0 \"\"\" def __init__(self, left,", "contains_point(self, point): \"\"\"Point containment using winding number. The implementation is based on `this", "bounding_box(self): \"\"\"Returns the minimum bounding box of a ``LineSegment`` object. Returns ------- self._bounding_box", ">>> str(l.o) '(0.0, 0.0)' >>> str(l.p) '(1.0, 0.0)' \"\"\" def __init__(self, origin, second_p):", "object to test equality against. Examples -------- >>> Point((0, 1)) <= Point((0, 1))", "time.\" % geo_type) return obj class Geometry(object): \"\"\"A base class to help implement", "which to compute :math:`y`. Examples -------- >>> l = VerticalLine(1) >>> l.y(1) nan", "returned by this method is the geometric centroid. Also known as the 'center", "= False return contains class Rectangle(Geometry): \"\"\"Geometric representation of rectangle objects. Attributes ----------", "y = point # bbox checks bbleft = x < self.bounding_box.left bbright =", ">>> p = Polygon( ... [Point((0, 0)), Point((10, 0)), Point((10, 10)), Point((0, 10))],", "segment. Examples -------- >>> l1 = LineSegment(Point((1, 2)), Point((5, 6))) >>> l2 =", "return self._perimeter @property def bounding_box(self): \"\"\"Returns the bounding box of the ring. Returns", "r = ls._set_p2(Point((3, -1))) >>> r == Point((3.0, -1.0)) True \"\"\" self._p2 =", "A list of sub-polygons to be considered as holes. Default is ``None``. Attributes", "- v2[0], v1[1] - v2[1]) @property def perimeter(self) -> Union[int, float]: if self._perimeter", "ccw2 = self.sw_ccw(other.p1) ccw3 = other.sw_ccw(self.p1) ccw4 = other.sw_ccw(self.p2) intersects = ccw1 *", "\"\"\" def part_area(pv: list) -> float: __area = 0 for i in range(-1,", "The reset ``p2`` attribute. Examples -------- >>> ls = LineSegment(Point((1, 2)), Point((5, 6)))", "- center[0]) self.right = center[0] + scale * (self.right - center[0]) self.lower =", "\"multipolygon\": Polygon, } # moving this to top breaks unit tests ! from", "of the point's location. Examples -------- >>> hash(Point((0, 1))) == hash(Point((0, 1))) True", "ccw of segment. Returns ------- is_ccw : bool ``1`` if turn from ``self.p1``", "Point((3.0, -1.0)) True \"\"\" self._p1 = p1 self._reset_props() return self._p1 p1 = property(_get_p1,", "Rectangle. Examples -------- >>> r = Rectangle(0, 0, 4, 4) >>> r.area 16.0", "> 1). Examples -------- >>> p = Point((1, 3)) \"\"\" def __init__(self, loc):", "``LineSegment`` object. Returns ------- self._bounding_box : libpysal.cg.Rectangle The bounding box of the line", "False \"\"\" return (self.__loc) > (other.__loc) def __ge__(self, other) -> bool: \"\"\"Tests if", "point specifying the ray (not ``origin``.) Attributes ---------- o : libpysal.cg.Point The origin", "import arcdist from typing import Union __all__ = [ \"Point\", \"LineSegment\", \"Line\", \"Ray\",", "The area enclosed by the ring. centroid : {tuple, libpysal.cg.Point} The centroid of", "return self._area @property def centroid(self): \"\"\"Returns the centroid of the ring. Returns -------", "% (obj)) geo_type = geo[\"type\"].lower() # if geo_type.startswith('multi'): # raise NotImplementedError, \"%s are", "return self.__loc.__getitem__(*args) def __getslice__(self, *args) -> slice: \"\"\"Return the coordinates for the given", "else: point_contained = self._quad_tree_structure.contains_point(point) return point_contained class Polygon(Geometry): \"\"\"Geometric representation of polygon objects.", "= [ \"Point\", \"LineSegment\", \"Line\", \"Ray\", \"Chain\", \"Polygon\", \"Rectangle\", \"asShape\", ] def asShape(obj):", "\"\"\"Returns the length of a ``LineSegment`` object. Examples -------- >>> ls = LineSegment(Point((2,", "libpysal.cg.Point The ``_p1`` attribute. Examples -------- >>> ls = LineSegment(Point((1, 2)), Point((5, 6)))", "object. Parameters ---------- other : libpysal.cg.Point An object to test equality against. Examples", "geometric representation of an object. Raises ------ TypeError Raised when ``obj`` is not", "False if not isinstance(other, self.__class__): pass else: if other.p1 == self._p1 and other.p2", "segment. Examples -------- >>> ls = LineSegment(Point((0, 0)), Point((5, 0))) >>> ls.is_cw(Point((2, 2)))", "representation of a ``Point`` object. Examples -------- >>> p = Point((1, 3)) >>>", "line segment. bounding_box : libpysal.cg.Rectangle The bounding box of the segment. len :", "if self._perimeter is None: self._perimeter = sum_perim(self._vertices) + sum_perim(self._holes) return self._perimeter @property def", "def set_centroid(self, new_center): \"\"\"Moves the rectangle center to a new specified point. Parameters", "shape. NotImplementedError Raised when ``geo_type`` is not a supported type. Returns ------- obj", "= self.p2 p2 = pt dx1 = p1[0] - p0[0] dy1 = p1[1]", "if __area < 0: __area = -area return __area sum_area = lambda part_type:", "A)]) / sum(A) return cx, cy def build_quad_tree_structure(self): \"\"\"Build the quad tree structure", "in range(-1, len(part) - 1)]) sum_perim = lambda part_type: sum([part_perimeter(part) for part in", "Parameters ---------- vertices : list A list of vertices. Attributes ---------- vertices :", "range(-1, len(pv) - 1): __area += (pv[i][0] + pv[i + 1][0]) * (pv[i][1]", "DO NOT CALL.** Sets the ``p2`` attribute of the line segment. Parameters ----------", "a ``LineSegment`` object. Examples -------- >>> ls = LineSegment(Point((2, 2)), Point((5, 2))) >>>", "yi == 0 and xi > 0: if yj > 0: w +=", "= None elif dx == 0: self._line = VerticalLine(self._p1[0]) else: m = dy", "in self._part_rings] AP = [ring.area for ring in self._part_rings] CH = [ring.centroid for", "self.len @property def len(self) -> int: \"\"\"Returns the number of vertices in the", "Analysis Library. \"\"\" __author__ = \"<NAME>, <NAME>, <NAME>, <NAME>, <NAME>\" import math from", "1))] ... ) >>> p.area 99.0 \"\"\" def part_area(pv: list) -> float: __area", "self._bounding_box.upper, ) @property def len(self) -> float: \"\"\"Returns the length of a ``LineSegment``", "the 'center of gravity' or 'center of mass'. Examples -------- >>> p =", "against. Examples -------- >>> Point((0, 1)) < Point((0, 1)) False >>> Point((0, 1))", "self.m def y(self, x: Union[int, float]) -> float: \"\"\"Returns the :math:`y`-value of the", "Point((5, 6))) >>> r = ls._get_p2() >>> r == Point((5, 6)) True \"\"\"", "a point is inside the ring. \"\"\" def __init__(self, vertices): if vertices[0] !=", "part_type]) if self._perimeter is None: self._perimeter = sum_perim(self._vertices) + sum_perim(self._holes) return self._perimeter @property", "Point((2, 2)), Point((1, 2)), Point((1, 1))] ... ] ... ) >>> len(c.parts) 2", "True searching = False break if searching: contains = False return contains class", "return str(self.__loc) # return \"POINT ({} {})\".format(*self.__loc) class LineSegment(Geometry): \"\"\"Geometric representation of line", "point is equal to another object. Parameters ---------- other : libpysal.cg.Point An object", "Chain([Point((0, 0)), Point((1, 0)), Point((1, 1)), Point((2, 1))]) \"\"\" def __init__(self, vertices: list):", "self.left @property def height(self) -> Union[int, float]: \"\"\"Returns the height of the Rectangle.", "= sum([part_perimeter(part) for part in self._vertices]) return self._arclen @property def segments(self) -> list:", "a point is counterclockwise of the segment (``True``) or not (``False``). Exclusive. Parameters", ": libpysal.cg.Rectangle The bounding box of the polygon. bbox : list A list", "holes] else: self._hole_rings = [Ring(holes)] self._holes = [clockwise(holes)] else: self._holes = [[]] self._reset_props()", "r == Point((3.0, -1.0)) True \"\"\" self._p2 = p2 self._reset_props() return self._p2 p2", "(``True``) or not (``False``). Exclusive. Parameters ---------- pt : libpysal.cg.Point A point lying", "other.p1 == self._p2: eq = True return eq def intersect(self, other) -> bool:", "except AttributeError: return True def __gt__(self, other) -> bool: \"\"\"Tests if the point", "top breaks unit tests ! from . import standalone from .polygonQuadTreeStructure import QuadTreeStructureSingleRing", "second_p class Chain(Geometry): \"\"\"Geometric representation of a chain, also known as a polyline.", "1]) A = A * 0.5 self._area = -A return self._area @property def", "\"\"\" return (self.__loc) < (other.__loc) def __le__(self, other) -> bool: \"\"\"Tests if the", "in range(N - 1): A += (x[i] + x[i + 1]) * (y[i]", "\"<NAME>, <NAME>, <NAME>, <NAME>, <NAME>\" import math from .sphere import arcdist from typing", ">>> ls._reset_props() \"\"\" self._bounding_box = None self._len = None self._line = False def", "computed using 'arcdistance' (meters). \"\"\" def part_perimeter(p: list) -> Union[int, float]: return sum([arcdist(p[i],", "self._p1[1]) v2 = (pt[0] - self._p1[0], pt[1] - self._p1[1]) return v1[0] * v2[1]", "Also -------- libpysal.cg.bounding_box \"\"\" if self._bbox is None: self._bbox = [ self.bounding_box.left, self.bounding_box.lower,", "sum([dist(part[i], part[i + 1]) for i in range(-1, len(part) - 1)]) sum_perim =", "try: return (self.__loc) == (other.__loc) except AttributeError: return False def __ne__(self, other) ->", "\"\"\"Returns the centroid of the ring. Returns ------- self._centroid : libpysal.cg.Point The ring's", ">>> r.set_scale(2) >>> r.left -2.0 >>> r.right 6.0 >>> r.lower -2.0 >>> r.upper", "p1 self._reset_props() return self._p1 p1 = property(_get_p1, _set_p1) def _get_p2(self): \"\"\"**HELPER METHOD. DO", "0.5 if __area < 0: __area = -area return __area sum_area = lambda", "__ne__(self, other) -> bool: \"\"\"Tests if the point is not equal to another", "Examples -------- >>> c = Chain([Point((0, 0)), Point((1, 0)), Point((1, 1)), Point((2, 1))])", "bbupper: pass else: rn = len(self.vertices) xs = [self.vertices[i][0] - point[0] for i", "bool ``True`` if the polygon contains ``point`` otherwise ``False``. Examples -------- >>> p", "r.left 2.0 >>> r.right 6.0 >>> r.lower 2.0 >>> r.upper 6.0 \"\"\" shift", "left : float Minimum x-value of the rectangle. lower : float Minimum y-value", "point to test for containment. Returns ------- point_contained : bool ``True`` if ``point``", "contained within the polygon, otherwise ``False``. \"\"\" point_contained = False if self._quad_tree_structure is", ": float The length of the segment. line : libpysal.cg.Line The line on", "been reset since the last call to the getter. Examples -------- >>> ls", "for part in part_type]) if self._perimeter is None: self._perimeter = sum_perim(self._vertices) + sum_perim(self._holes)", "rectangle. lower : float Minimum y-value of the rectangle. right : float Maximum", "bounding box of the line segment. Examples -------- >>> ls = LineSegment(Point((1, 2)),", "None self._len = None self._line = False def _get_p1(self): \"\"\"**HELPER METHOD. DO NOT", "Point((1, 1)), Point((1, 0))]) >>> len(p1.vertices) 4 \"\"\" return sum([part for part in", "(y[i] - y[i + 1]) A = A * 0.5 self._area = -A", "class for point objects. Parameters ---------- loc : tuple The point's location (number", "== self._p2: eq = True elif other.p2 == self._p1 and other.p1 == self._p2:", "attributes (implemented as properties) then recompute their values if they have been reset", "0 return intersects def _reset_props(self): \"\"\"**HELPER METHOD. DO NOT CALL.** Resets attributes which", "= [ring.area for ring in self._part_rings] CH = [ring.centroid for ring in self._hole_rings]", "of the polygon. bbox : list A list representation of the bounding box", "``False``. Examples -------- >>> p = Polygon( ... [Point((0,0)), Point((4,0)), Point((4,5)), Point((2,3)), Point((0,5))]", ">>> Point((0, 1)) >= Point((0, 1)) True >>> Point((0, 1)) >= Point((1, 1))", "as the slope. Examples -------- >>> l = Line(0.5, 0) >>> l.x(0.25) 0.5", "the chain. Examples -------- >>> c = Chain([Point((0, 0)), Point((2, 0)), Point((2, 1)),", "0.0 >>> p.bounding_box.right 2.0 >>> p.bounding_box.upper 1.0 \"\"\" if self._bounding_box is None: vertices", "... ] ... ) >>> str(r.centroid) '(1.0, 0.5)' \"\"\" if self._centroid is None:", "a polygon contains a point. Parameters ---------- point : libpysal.cg.Point A point to", ":math:`x`-tuple, :math:`x` > 1). Examples -------- >>> p = Point((1, 3)) \"\"\" def", "bool: \"\"\"Test whether segment intersects with other segment (``True``) or not (``False``). Handles", "the polygon. Examples -------- >>> p1 = Polygon([Point((0, 0)), Point((0, 1)), Point((1, 1)),", "time.\"%geo_type if geo_type in _geoJSON_type_to_Pysal_type: obj = _geoJSON_type_to_Pysal_type[geo_type].__from_geo_interface__(geo) else: raise NotImplementedError(\"%s is not", "0.5 else: w -= 0.5 if w == 0: pass else: point_contained =", "( new_center[0] - (self.left + self.right) / 2, new_center[1] - (self.lower + self.upper)", "'(0.0, 0.0)' >>> str(l.p) '(1.0, 0.0)' \"\"\" def __init__(self, origin, second_p): self.o =", "cls(verts[0:1], verts[1:]) @property def __geo_interface__(self) -> dict: \"\"\"Return ``__geo_interface__`` information lookup.\"\"\" if len(self.parts)", "\"LineSegment(\" + str(self._p1) + \", \" + str(self._p2) + \")\" # return \"LINESTRING", "Polygon(Geometry): \"\"\"Geometric representation of polygon objects. Returns a polygon created from the objects", "particular :math:`y`-value. Parameters ---------- y : {int, float} The :math:`y`-value at which to", "vertices]), ) return self._bounding_box @property def area(self) -> float: \"\"\"Returns the area of", "0: __area = -area return __area sum_area = lambda part_type: sum([part_area(part) for part", "bounding_box : libpysal.cg.Rectangle The bounding box of the ring. area : float The", "Examples -------- >>> p1 = Polygon([Point((0, 0)), Point((1, 0)), Point((1, 1)), Point((0, 1))])", "False break if searching: contains = False return contains class Rectangle(Geometry): \"\"\"Geometric representation", "libpysal.cg.Point The starting point of the line segment. p2 : Point The ending", "bbox checks bbleft = x < self.bounding_box.left bbright = x > self.bounding_box.right bblower", "shape object from ``obj``, which must support the ``__geo_interface__``. Parameters ---------- obj :", "float]: return sum([dist(p[i], p[i + 1]) for i in range(len(p) - 1)]) if", ": {int, float} The :math:`x`-intercept of the line. ``x`` is also an attribute.", "\"\"\"Test if a polygon contains a point. Parameters ---------- point : libpysal.cg.Point A", "``1`` if turn from ``self.p1`` to ``self.p2`` to ``pt`` is ccw. ``-1`` if", "object from ``obj``, which must support the ``__geo_interface__``. Parameters ---------- obj : {libpysal.cg.{Point,", "point is counterclockwise of the segment (``True``) or not (``False``). Exclusive. Parameters ----------", "def segments(self) -> list: \"\"\"Returns the segments that compose the chain.\"\"\" return [", "Point((5, 6)) True \"\"\" return self._p2 def _set_p2(self, p2): \"\"\"**HELPER METHOD. DO NOT", "0)), Point((2, 0)), Point((2, 1)), Point((0, 1))]) >>> p.bounding_box.left 0.0 >>> p.bounding_box.lower 0.0", "reset ``p1`` attribute. Examples -------- >>> ls = LineSegment(Point((1, 2)), Point((5, 6))) >>>", "in self._vertices]) return self._arclen @property def segments(self) -> list: \"\"\"Returns the segments that", "at a particular :math:`y`-value. Parameters ---------- y : {int, float} The :math:`y`-value at", "bool ``1`` if turn from ``self.p1`` to ``self.p2`` to ``pt`` is ccw. ``-1``", "len(self.vertices) return self._len @property def vertices(self) -> list: \"\"\"Returns the vertices of the", "LineSegment attributes p1, p2 changed, recompute if self._bounding_box is None: self._bounding_box = Rectangle(", "+ x[i + 1]) * f cy += (y[i] + y[i + 1])", "None def __len__(self) -> int: return len(self.vertices) @property def len(self) -> int: return", "object which has its endpoints swapped. Examples -------- >>> ls = LineSegment(Point((1, 2)),", "f cy += (y[i] + y[i + 1]) * f cx = 1.0", "---------- other : libpysal.cg.Point An object to test equality against. Examples -------- >>>", "order. Examples -------- >>> p = Polygon( ... [Point((0, 0)), Point((10, 0)), Point((10,", "\"Chain\", \"Polygon\", \"Rectangle\", \"asShape\", ] def asShape(obj): \"\"\"Returns a PySAL shape object from", "not differentiate polygons and multipolygons GEOS, Shapely, and geoJSON do. In GEOS, etc,", "Polygon( ... [Point((0, 0)), Point((10, 0)), Point((10, 10)), Point((0, 10))], ... [Point((1, 1)),", "make geometric types extendable. \"\"\" def __init__(self): pass class Point(Geometry): \"\"\"Geometric class for", "is None: x, y = point # bbox checks bbleft = x <", "Union[int, float]: if self._area is None: vertices = self.vertices x = [v[0] for", "point): \"\"\"Point containment using winding number. The implementation is based on `this <http://www.engr.colostate.edu/~dga/dga/papers/point_in_polygon.pdf>`_.", "x = [v[0] for v in vertices] y = [v[1] for v in", "The :math:`x`-value at which to compute :math:`y`. Examples -------- >>> l = VerticalLine(1)", "of the polygon in clockwise order. Examples -------- >>> p1 = Polygon([Point((0, 0)),", "are the same line segment. Examples -------- >>> l1 = LineSegment(Point((1, 2)), Point((5,", "0.0 >>> r.bounding_box.lower 0.0 >>> r.bounding_box.right 2.0 >>> r.bounding_box.upper 1.0 \"\"\" if self._bounding_box", "x): self._x = float(x) self.m = float(\"inf\") self.b = float(\"nan\") def x(self, y)", "area(self) -> float: \"\"\"Returns the area of the polygon. Examples -------- >>> p", "ring in self._part_rings] AP = [ring.area for ring in self._part_rings] CH = [ring.centroid", "An object to test equality against. Examples -------- >>> Point((0, 1)) < Point((0,", "attributes. The ``getter``s for these attributes (implemented as ``properties``) then recompute their values", "Examples -------- >>> ls = LineSegment(Point((0, 0)), Point((5, 0))) >>> ls.is_ccw(Point((2, 2))) True", "* x + self.b class Ray: \"\"\"Geometric representation of ray objects. Parameters ----------", ">>> r = ls._get_p2() >>> r == Point((5, 6)) True \"\"\" return self._p2", "of the rectangle. Examples -------- >>> r = Rectangle(-4, 3, 10, 17) >>>", "0, 0) >>> bool(r) False >>> r = Rectangle(0, 0, 1, 1) >>>", "point. Returns ------- self._p2 : libpysal.cg.Point The reset ``p2`` attribute. Examples -------- >>>", "pt) -> bool: \"\"\"Returns whether a point is counterclockwise of the segment (``True``)", "to a new specified point. Parameters ---------- new_center : libpysal.cg.Point The new location", "None self._arclen = None self._bounding_box = None @property def vertices(self) -> list: \"\"\"Returns", "10)), Point((10, 10)), Point((10, 0))], ... [Point((2, 2)), Point((4, 2)), Point((4, 4)), Point((2,", "range(-1, len(part) - 1)]) sum_perim = lambda part_type: sum([part_perimeter(part) for part in part_type])", "of the line segment. bounding_box : libpysal.cg.Rectangle The bounding box of the segment.", "Union[int, float]: \"\"\"Returns the geometric length of the chain computed using 'arcdistance' (meters).", "= x < self.bounding_box.left bbright = x > self.bounding_box.right bblower = y <", "ring in self._hole_rings: if ring.contains_point(point): contains = False searching = False break if", "p2 = pt dx1 = p1[0] - p0[0] dy1 = p1[1] - p0[1]", "for part in self._vertices]) return self._arclen @property def segments(self) -> list: \"\"\"Returns the", "__init__(self, vertices): if vertices[0] != vertices[-1]: vertices = vertices[:] + vertices[0:1] # msg", "Point((cx, cy)) return self._centroid def build_quad_tree_structure(self): \"\"\"Build the quad tree structure for this", "ring.contains_point(point): contains = False searching = False break if searching: for ring in", "bool ``True`` if ``point`` is contained within the polygon, otherwise ``False``. \"\"\" point_contained", "0: raise ArithmeticError(\"Cannot solve for 'x' when slope is zero.\") return (y -", "``Point``. Examples -------- >>> Point((0, 1)) (0.0, 1.0) \"\"\" return str(self) def __str__(self)", "for testing if a point is inside the ring will be increased significantly.", "dy == 0: self._line = None elif dx == 0: self._line = VerticalLine(self._p1[0])", "-> float: \"\"\"Returns the :math:`y`-value of the line at a particular :math:`x`-value. Parameters", "[clockwise(part) for part in vertices] else: self._part_rings = [Ring(vertices)] self._vertices = [clockwise(vertices)] if", "``self`` and ``other`` are the same line segment. Examples -------- >>> l1 =", "object.\" % (obj)) geo_type = geo[\"type\"].lower() # if geo_type.startswith('multi'): # raise NotImplementedError, \"%s", "box of the polygon. bbox : list A list representation of the bounding", "-> Union[int, float]: if self._area is None: vertices = self.vertices x = [v[0]", "6))) >>> swap = ls.get_swap() >>> swap.p1[0] 5.0 >>> swap.p1[1] 6.0 >>> swap.p2[0]", "1)), ... Point((0, 0)) ... ] ... ) >>> str(r.centroid) '(1.0, 0.5)' \"\"\"", "from ``self.p1`` to ``self.p2`` to ``pt`` is cw. ``-1`` if the points are", "+ scale * (self.left - center[0]) self.right = center[0] + scale * (self.right", "self._reset_props() def __str__(self): return \"LineSegment(\" + str(self._p1) + \", \" + str(self._p2) +", "16.0 \"\"\" return (self.right - self.left) * (self.upper - self.lower) @property def width(self)", "a point is clockwise of the segment (``True``) or not (``False``). Exclusive. Parameters", ": {int, float} The slope of the line. ``m`` is also an attribute.", "(other.__loc) def __eq__(self, other) -> bool: \"\"\"Tests if the point is equal to", "the string representation of the ``Point``. Examples -------- >>> Point((0, 1)) (0.0, 1.0)", "Examples -------- >>> Point((0, 1)) > Point((0, 1)) False >>> Point((0, 1)) >", "Point((4, 4)), Point((2, 4))] ... ) >>> p.contains_point((3.0, 3.0)) False >>> p.contains_point((1.0, 1.0))", "w -= 0.5 if w == 0: pass else: point_contained = True else:", "return self._perimeter @property def bbox(self): \"\"\"Returns the bounding box of the polygon as", "vertices(self) -> list: \"\"\"Returns the vertices of the polygon in clockwise order. Examples", "Point((1, 0))]) >>> len(p1.vertices) 4 \"\"\" return sum([part for part in self._vertices], [])", ": float Maximum y-value of the rectangle. Examples -------- >>> r = Rectangle(-4,", ">>> c = Chain([Point((0, 0)), Point((1, 0)), Point((1, 1)), Point((2, 1))]) \"\"\" def", "segment. Parameters ---------- p1 : libpysal.cg.Point A point. Returns ------- self._p1 : libpysal.cg.Point", "\"\"\"Returns a PySAL shape object from ``obj``, which must support the ``__geo_interface__``. Parameters", "0)), Point((1, 1)), Point((0, 1))]) >>> p.area 1.0 >>> p = Polygon( ...", "1)) False >>> Point((0, 1)) < Point((1, 1)) True \"\"\" return (self.__loc) <", "0))]) >>> len(p1.vertices) 4 \"\"\" return sum([part for part in self._vertices], []) +", "``False``. \"\"\" point_contained = False if self._quad_tree_structure is None: x, y = point", "- point[1] for i in range(rn)] w = 0 for i in range(len(self.vertices)", "5.0 >>> ls.bounding_box.upper 6.0 \"\"\" # If LineSegment attributes p1, p2 changed, recompute", "-------- >>> ls = LineSegment(Point((0, 0)), Point((5, 0))) >>> ls.is_cw(Point((2, 2))) False >>>", "{int, float} The :math:`x`-value at which to compute :math:`y`. Examples -------- >>> l", "= [Point(pt) for pt in geo[\"coordinates\"]] elif geo[\"type\"].lower() == \"multilinestring\": verts = [list(map(Point,", "center[0] + scale * (self.left - center[0]) self.right = center[0] + scale *", "v in vertices]), ) return self._bounding_box @property def len(self) -> int: \"\"\"Returns the", "= float(b) def x(self, y: Union[int, float]) -> float: \"\"\"Returns the :math:`x`-value of", "[[v for v in part] for part in self._vertices] @property def bounding_box(self): \"\"\"Returns", ">>> c.len 3.0 >>> c = Chain( ... [ ... [Point((0, 0)), Point((1,", "vertices of the ring. len : int The number of vertices. perimeter :", "[LineSegment(a, b) for (a, b) in zip(part[:-1], part[1:])] for part in self._vertices ]", "or bblower or bbupper: pass else: rn = len(self.vertices) xs = [self.vertices[i][0] -", "at which to compute :math:`x`. Examples -------- >>> l = VerticalLine(0) >>> l.x(0.25)", "Resets attributes which are functions of other attributes. The ``getter``s for these attributes", "in vertices]), max([v[1] for v in vertices]), ) return self._bounding_box @property def area(self)", "r = Rectangle(-4, 3, 10, 17) >>> r.left #minx -4.0 >>> r.lower #miny", "-------- >>> ls = Line(1, 0) >>> ls.m 1.0 >>> ls.b 0.0 \"\"\"", "min([v[0] for v in vertices]), min([v[1] for v in vertices]), max([v[0] for v", "properties of the polygon.\"\"\" self._perimeter = None self._bounding_box = None self._bbox = None", "\"coordinates\": self.parts} def _reset_props(self): \"\"\"**HELPER METHOD. DO NOT CALL.** Resets attributes which are", "0 or dy1 * dy2 < 0: is_ccw = -1 elif dx1 *", "Point((1, 3)) >>> str(p) '(1.0, 3.0)' \"\"\" return str(self.__loc) # return \"POINT ({}", "- self.lower) @property def width(self) -> Union[int, float]: \"\"\"Returns the width of the", "AH = [-ring.area for ring in self._hole_rings] A = AP + AH cx", "Chain([Point((0, 0)), Point((2, 0)), Point((2, 1)), Point((0, 1))]) >>> c.bounding_box.left 0.0 >>> c.bounding_box.lower", "bounding_box : libpysal.cg.Rectangle The bounding box of the polygon. bbox : list A", "simply a list of polygons. \"\"\" geo_type = geo[\"type\"].lower() if geo_type == \"multipolygon\":", "dist(v1: tuple, v2: tuple) -> Union[int, float]: return math.hypot(v1[0] - v2[0], v1[1] -", "if the point is greater than or equal to another object. Parameters ----------", "Returns the dimensions of the point. Examples -------- >>> len(Point((1, 2))) 2 \"\"\"", "Points falling exactly on polygon edges may yield unpredictable results. \"\"\" searching =", ">>> Point((0, 1)) == Point((0, 1)) True >>> Point((0, 1)) == Point((1, 1))", "2)), Point((5, 6))) \"\"\" def __init__(self, start_pt, end_pt): self._p1 = start_pt self._p2 =", "Rectangle(-4, 3, 10, 17) >>> r[:] [-4.0, 3.0, 10.0, 17.0] \"\"\" l =", "[]) @property def parts(self) -> list: \"\"\"Returns the parts (lists of ``libpysal.cg.Point`` objects)", "return True def __gt__(self, other) -> bool: \"\"\"Tests if the point is greater", "abs(self.signed_area) @property def signed_area(self) -> Union[int, float]: if self._area is None: vertices =", "pt[1] - self._p1[1]) return v1[0] * v2[1] - v1[1] * v2[0] < 0", "a segment. Examples -------- >>> ls = LineSegment(Point((0, 0)), Point((5, 0))) >>> ls.is_ccw(Point((2,", "libpysal.cg.Rectangle The bounding box of the polygon. Examples -------- >>> p = Polygon([Point((0,", "this time.\" % geo_type) return obj class Geometry(object): \"\"\"A base class to help", "w -= 0.5 elif yj == 0 and xj > 0: if yi", "polygon. Examples -------- >>> r = Rectangle(0, 0, 4, 4) >>> r.set_centroid(Point((4, 4)))", "point where the ray originates. second_p : The second point specifying the ray", "line(self): \"\"\"Returns a ``Line`` object of the line on which the segment lies.", "Examples -------- >>> l = VerticalLine(1) >>> l.y(1) nan \"\"\" return float(\"nan\") class", "_quad_tree_structure : libpysal.cg.QuadTreeStructureSingleRing The quad tree structure for the ring. This structure helps", ":math:`(i,j)` with :math:`i` as the index to the start slice and :math:`j` as", "range(len(p) - 1)]) if self._len is None: self._len = sum([part_perimeter(part) for part in", "__len__(self) -> int: return self.len @property def len(self) -> int: \"\"\"Returns the number", "w += 0.5 else: w -= 0.5 elif yj == 0 and xj", "1)) < Point((1, 1)) True \"\"\" return (self.__loc) < (other.__loc) def __le__(self, other)", "holes = None return cls(parts, holes) else: verts = [[Point(pt) for pt in", "(self.upper - self.lower) @property def width(self) -> Union[int, float]: \"\"\"Returns the width of", "center = ((self.left + self.right) / 2, (self.lower + self.upper) / 2) self.left", "v1[0] * v2[1] - v1[1] * v2[0] < 0 def sw_ccw(self, pt): \"\"\"Sedgewick", ":math:`i` as the index to the start slice and :math:`j` as the index", "a ``Point`` object. Examples -------- >>> p = Point((1, 3)) >>> str(p) '(1.0,", ">>> p.contains_point((2,2.9)) 1 >>> p.contains_point((4,5)) 0 >>> p.contains_point((4,0)) 0 Handles holes. >>> p", "base class to help implement ``is_geometry`` and make geometric types extendable. \"\"\" def", ":math:`x`-value at which to compute :math:`y`. Examples -------- >>> l = Line(1, 0)", "= geo[\"type\"].lower() # if geo_type.startswith('multi'): # raise NotImplementedError, \"%s are not supported at", "4 \"\"\" return sum([part for part in self._vertices], []) @property def parts(self) ->", ": libpysal.cg.Rectangle The bounding box of the polygon. Examples -------- >>> p =", "= Chain([Point((0, 0)), Point((1, 0)), Point((1, 1)), Point((2, 1))]) \"\"\" def __init__(self, vertices:", "test for containment. Returns ------- point_contained : bool ``True`` if ``point`` is contained", "The length of the segment. line : libpysal.cg.Line The line on which the", "else: if hasattr(obj, \"__geo_interface__\"): geo = obj.__geo_interface__ else: geo = obj if hasattr(geo,", "1))) False \"\"\" return hash(self.__loc) def __getitem__(self, *args) -> Union[int, float]: \"\"\"Return the", "dx1 + dy1 * dy1 >= dx2 * dx2 + dy2 * dy2:", "a particular :math:`x`-value. Parameters ---------- x : {int, float} The :math:`x`-value at which", ">>> verts = c.vertices >>> len(verts) 4 \"\"\" return sum([part for part in", "# y - mx b = self._p1[1] - m * self._p1[0] self._line =", "1)), Point((1, 0))]) >>> p1.len 4 >>> len(p1) 4 \"\"\" if self._len is", "in polygon] parts += verts[0:1] holes += verts[1:] if not holes: holes =", "1 \"\"\" return [[v for v in part] for part in self._holes] @property", "bbright = x > self.bounding_box.right bblower = y < self.bounding_box.lower bbupper = y", "or 'center of mass'. Examples -------- >>> r = Ring( ... [ ...", ": {libpysal.cg.{Point, LineSegment, Line, Ray, Chain, Polygon} A geometric representation of an object.", "class Geometry(object): \"\"\"A base class to help implement ``is_geometry`` and make geometric types", "box of the ring. Returns ------- self._bounding_box : libpysal.cg.Rectangle The bounding box of", "the old scale (e.g. 1.0 is current size). Examples -------- >>> r =", "= [] holes = [] for polygon in geo[\"coordinates\"]: verts = [[Point(pt) for", "-------- >>> ls = LineSegment(Point((5, 0)), Point((10, 0))) >>> ls1 = LineSegment(Point((5, 0)),", "bounding_box : libpysal.cg.Rectangle The bounding box of the segment. len : float The", "(implemented as ``properties``) then recompute their values if they have been reset since", "r = Rectangle(0, 0, 1, 1) >>> bool(r) True \"\"\" return bool(self.area) def", "The bounding box of the polygon. bbox : list A list representation of", "centroid returned by this method is the geometric centroid. Also known as the", "vertices : list A point list or list of point lists. Attributes ----------", "10.0 >>> r.upper #maxy 17.0 \"\"\" def __init__(self, left, lower, right, upper): if", "LineSegment(Point((2, 2)), Point((5, 2))) >>> ls.len 3.0 \"\"\" # If LineSegment attributes p1,", "= Rectangle(0, 0, 4, 4) >>> r.width 4.0 \"\"\" return self.right - self.left", "the polygon. Examples -------- >>> p1 = Polygon([Point((0, 0)), Point((1, 0)), Point((1, 1)),", "def __ge__(self, other) -> bool: \"\"\"Tests if the point is greater than or", "return sum([dist(part[i], part[i + 1]) for i in range(-1, len(part) - 1)]) sum_perim", "part_area(pv: list) -> float: __area = 0 for i in range(-1, len(pv) -", "the point is less than another object. Parameters ---------- other : libpysal.cg.Point An", "contains a point. Parameters ---------- point : libpysal.cg.Point A point to test for", "\"\"\" return hash(self.__loc) def __getitem__(self, *args) -> Union[int, float]: \"\"\"Return the coordinate for", "lookup.\"\"\" if len(self.parts) > 1: geo = { \"type\": \"MultiPolygon\", \"coordinates\": [[part] for", "as the 'center of gravity' or 'center of mass'. Examples -------- >>> r", "== 0: raise ArithmeticError(\"Cannot solve for 'x' when slope is zero.\") return (y", "Parameters ---------- loc : tuple The point's location (number :math:`x`-tuple, :math:`x` > 1).", "VerticalLine(1) >>> l.y(1) nan \"\"\" return float(\"nan\") class Line(Geometry): \"\"\"Geometric representation of line", "will be increased significantly. \"\"\" for ring in self._part_rings: ring.build_quad_tree_structure() for ring in", "[Point((2, 2)), Point((4, 2)), Point((4, 4)), Point((2, 4))] ... ) >>> p.contains_point((3.0, 3.0))", "Union[int, float]) -> float: \"\"\"Returns the :math:`y`-value of the line at a particular", "float} The :math:`y`-value at which to compute :math:`x`. Raises ------ ArithmeticError Raised when", "@property def len(self) -> int: return len(self) @staticmethod def dist(v1, v2) -> Union[int,", "self._bbox = [ self.bounding_box.left, self.bounding_box.lower, self.bounding_box.right, self.bounding_box.upper, ] return self._bbox @property def bounding_box(self):", "cls(verts) @property def __geo_interface__(self) -> dict: if len(self.parts) == 1: return {\"type\": \"LineString\",", "implement truth value testing and the built-in operation ``bool()``\" ``-- http://docs.python.org/reference/datamodel.html Examples --------", "== \"multipolygon\": parts = [] holes = [] for polygon in geo[\"coordinates\"]: verts", "swap.p2[1] 2.0 \"\"\" line_seg = LineSegment(self._p2, self._p1) return line_seg @property def bounding_box(self): \"\"\"Returns", "the line segment. Examples -------- >>> ls = LineSegment(Point((1, 2)), Point((5, 6))) >>>", "{\"type\": \"Polygon\", \"coordinates\": self._vertices} def _reset_props(self): \"\"\"Resets the geometric properties of the polygon.\"\"\"", "In GEOS, etc, polygons may only have a single exterior ring, all other", "- p0[0] dy2 = p2[1] - p0[1] if dy1 * dx2 < dy2", "vertices] y = [v[1] for v in vertices] N = len(self) A =", "for this polygon. Once the structure is built, speed for testing if a", "10)), Point((10, 0))], ... [Point((2, 2)), Point((4, 2)), Point((4, 4)), Point((2, 4))] ...", "passed in as the slope. Examples -------- >>> ls = Line(1, 0) >>>", "libpysal.cg.Rectangle The bounding box of the ring. area : float The area enclosed", "object. \"\"\" if isinstance(obj, (Point, LineSegment, Line, Ray, Chain, Polygon)): pass else: if", ") >>> r.bounding_box.left 0.0 >>> r.bounding_box.lower 0.0 >>> r.bounding_box.right 2.0 >>> r.bounding_box.upper 1.0", "*args) -> slice: \"\"\"Return the coordinates for the given dimensions. Parameters ---------- *args", "self._centroid = None self._quad_tree_structure = None def __len__(self) -> int: return len(self.vertices) @property", "y, X, Y = self[:] x1, y2, X1, Y1 = other[:] return Rectangle(", "\"\"\" return self.right - self.left @property def height(self) -> Union[int, float]: \"\"\"Returns the", "-> bool: \"\"\"Returns ``True`` if ``self`` and ``other`` are the same line segment.", "Examples -------- >>> hash(Point((0, 1))) == hash(Point((0, 1))) True >>> hash(Point((0, 1))) ==", "same line segment. Examples -------- >>> l1 = LineSegment(Point((1, 2)), Point((5, 6))) >>>", "area : float The area enclosed by the ring. centroid : {tuple, libpysal.cg.Point}", "str(self) def __str__(self) -> str: \"\"\"Returns a string representation of a ``Point`` object.", "differentiate polygons and multipolygons GEOS, Shapely, and geoJSON do. In GEOS, etc, polygons", "dy2 * dx1: is_ccw = 1 elif dy1 * dx2 > dy2 *", "def height(self) -> Union[int, float]: \"\"\"Returns the height of the Rectangle. Examples --------", "r.left #minx -4.0 >>> r.lower #miny 3.0 >>> r.right #maxx 10.0 >>> r.upper", "self._bounding_box is None: vertices = self.vertices x = [v[0] for v in vertices]", "... [Point((0, 0)), Point((1, 0)), Point((1, 1)), Point((0, 1))], ... [Point((2, 1)), Point((2,", "6))) \"\"\" def __init__(self, start_pt, end_pt): self._p1 = start_pt self._p2 = end_pt self._reset_props()", "segment (``True``) or not (``False``). Exclusive. Parameters ---------- pt : libpysal.cg.Point A point", "the polygon, otherwise ``False``. \"\"\" point_contained = False if self._quad_tree_structure is None: x,", "in vertices] N = len(self) A = 0.0 for i in range(N -", "self._perimeter @property def bbox(self): \"\"\"Returns the bounding box of the polygon as a", "``-1`` if the points are collinear and ``self.p1`` is in the middle. ``1``", "------- self._bounding_box : libpysal.cg.Rectangle The bounding box of the chain. Examples -------- >>>", "True \"\"\" return self._p2 def _set_p2(self, p2): \"\"\"**HELPER METHOD. DO NOT CALL.** Sets", "this polygon. Once the structure is built, speed for testing if a point", "---------- vertices : list A list of points with the vertices of the", "\"asShape\", ] def asShape(obj): \"\"\"Returns a PySAL shape object from ``obj``, which must", "originates. second_p : The second point specifying the ray (not ``origin``.) Attributes ----------", "2)), Point((1, 1))] ... ) >>> p.area 99.0 \"\"\" def part_area(pv: list) ->", "of :math:`(i)` with :math:`i` as the index of the desired dimension. Examples --------", "Rectangle(-4, 3, 10, 17) >>> r.left #minx -4.0 >>> r.lower #miny 3.0 >>>", "1): f = x[i] * y[i + 1] - x[i + 1] *", "The :math:`y`-intercept of the line. ``b`` is also an attribute. Raises ------ ArithmeticError", "@property def parts(self) -> list: \"\"\"Returns the parts (lists of ``libpysal.cg.Point`` objects) of", "is not equal to another object. Parameters ---------- other : libpysal.cg.Point An object", "0.0 \"\"\" if self._line == False: dx = self._p1[0] - self._p2[0] dy =", "representation of a chain, also known as a polyline. Parameters ---------- vertices :", "of the ring. len : int The number of vertices. perimeter : float", "\"\"\"Returns the perimeter of the polygon. Examples -------- >>> p = Polygon([Point((0, 0)),", "@property def bounding_box(self): \"\"\"Returns the bounding box of the ring. Returns ------- self._bounding_box", "pt : libpysal.cg.Point A point lying ccw or cw of a segment. Examples", "Examples -------- >>> ls = LineSegment(Point((1, 2)), Point((5, 6))) >>> ls._reset_props() \"\"\" self._bounding_box", "where the ray originates. second_p : The second point specifying the ray (not", "self._p1[1] - self._p2[1] if dx == 0 and dy == 0: self._line =", "-------- >>> p = Polygon( ... [Point((0, 0)), Point((10, 0)), Point((10, 10)), Point((0,", "for part in self._vertices]) return self._len @property def arclen(self) -> Union[int, float]: \"\"\"Returns", "None: self._len = len(self.vertices) return self._len @property def vertices(self) -> list: \"\"\"Returns the", "Examples -------- >>> p = Polygon( ... [ ... [Point((0, 0)), Point((1, 0)),", "self._perimeter = sum_perim(self._vertices) + sum_perim(self._holes) return self._perimeter @property def bbox(self): \"\"\"Returns the bounding", "+ 1][0]) * (pv[i][1] - pv[i + 1][1]) __area = __area * 0.5", "-------- >>> ls = LineSegment(Point((1, 2)), Point((5, 6))) >>> ls.bounding_box.left 1.0 >>> ls.bounding_box.lower", "p.area 99.0 \"\"\" def part_area(pv: list) -> float: __area = 0 for i", ">>> r == Point((3.0, -1.0)) True \"\"\" self._p1 = p1 self._reset_props() return self._p1", "the vertices of the ring. len : int The number of vertices. perimeter", "not isinstance(other, self.__class__): pass else: if other.p1 == self._p1 and other.p2 == self._p2:", "test equality against. Examples -------- >>> Point((0, 1)) >= Point((0, 1)) True >>>", "structure is built, speed for testing if a point is inside the ring", "not form a closed ring, \" # msg += \"the first and last", "return len(self) @staticmethod def dist(v1, v2) -> Union[int, float]: return math.hypot(v1[0] - v2[0],", "= ls.get_swap() >>> swap.p1[0] 5.0 >>> swap.p1[1] 6.0 >>> swap.p2[0] 1.0 >>> swap.p2[1]", "for ring in self._hole_rings] A = AP + AH cx = sum([pt[0] *", "perimeter(self) -> Union[int, float]: if self._perimeter is None: dist = self.dist v =", "\"\"\"Returns a ``Line`` object of the line on which the segment lies. Returns", "Union[int, float]: return sum([dist(part[i], part[i + 1]) for i in range(-1, len(part) -", "at which to compute :math:`y`. Examples -------- >>> l = Line(1, 0) >>>", "float]: if self._area is None: vertices = self.vertices x = [v[0] for v", "self.b return self.m * x + self.b class Ray: \"\"\"Geometric representation of ray", "b = self._p1[1] - m * self._p1[0] self._line = Line(m, b) return self._line", "hash(Point((0, 1))) True >>> hash(Point((0, 1))) == hash(Point((1, 1))) False \"\"\" return hash(self.__loc)", "objects. Parameters ---------- origin : libpysal.cg.Point The point where the ray originates. second_p", "[ self.bounding_box.left, self.bounding_box.lower, self.bounding_box.right, self.bounding_box.upper, ] return self._bbox @property def bounding_box(self): \"\"\"Returns the", "ls._reset_props() \"\"\" self._bounding_box = None self._len = None self._line = False def _get_p1(self):", "originates). See ``second_p``. Examples -------- >>> l = Ray(Point((0, 0)), Point((1, 0))) >>>", "if isinstance(vertices[0], list): self._vertices = [part for part in vertices] else: self._vertices =", "Point((11, 11))] ... ] ... ) >>> c.len 4.0 \"\"\" def dist(v1: tuple,", ") return self._perimeter @property def bounding_box(self): \"\"\"Returns the bounding box of the ring.", "an object. Raises ------ TypeError Raised when ``obj`` is not a supported shape.", ">>> hash(Point((0, 1))) == hash(Point((0, 1))) True >>> hash(Point((0, 1))) == hash(Point((1, 1)))", "Examples -------- >>> ls = LineSegment(Point((1, 2)), Point((5, 6))) >>> r = ls._get_p2()", "def line(self): \"\"\"Returns a ``Line`` object of the line on which the segment", "self._p2[0], self._p2[1] # ) def __eq__(self, other) -> bool: \"\"\"Returns ``True`` if ``self``", "area for pt, area in zip(CP + CH, A)]) / sum(A) cy =", "-> Union[int, float]: return sum([dist(part[i], part[i + 1]) for i in range(-1, len(part)", "geometric length of the perimeter of the ring. bounding_box : libpysal.cg.Rectangle The bounding", "---------- *args : tuple A singleton tuple of :math:`(i)` with :math:`i` as the", "swapped. Examples -------- >>> ls = LineSegment(Point((1, 2)), Point((5, 6))) >>> swap =", "str: \"\"\"Returns a string representation of a ``Point`` object. Examples -------- >>> p", "the 'center of gravity' or 'center or mass'. _quad_tree_structure : libpysal.cg.QuadTreeStructureSingleRing The quad", "Parameters ---------- vertices : list A point list or list of point lists.", "libpysal.cg.Point} The centroid of the ring defined by the 'center of gravity' or", "is equal to another object. Parameters ---------- other : libpysal.cg.Point An object to", "@property def parts(self) -> list: \"\"\"Returns the parts of the polygon in clockwise", "geo[\"coordinates\"]] elif geo[\"type\"].lower() == \"multilinestring\": verts = [list(map(Point, part)) for part in geo[\"coordinates\"]]", "= LineSegment(Point((5, 0)), Point((10, 0))) >>> ls1 = LineSegment(Point((5, 0)), Point((10, 1))) >>>", "* 1000.0 for i in range(len(p) - 1)]) if self._arclen is None: self._arclen", "``self.p2`` is in the middle. ``0`` if the points are collinear and ``pt``", "-------- >>> p1 = Polygon([Point((0, 0)), Point((1, 0)), Point((1, 1)), Point((0, 1))]) \"\"\"", "= None self._line = False def _get_p1(self): \"\"\"**HELPER METHOD. DO NOT CALL.** Returns", "1 >>> p.contains_point((4,5)) 0 >>> p.contains_point((4,0)) 0 Handles holes. >>> p = Polygon(", "0))) >>> str(l.o) '(0.0, 0.0)' >>> str(l.p) '(1.0, 0.0)' \"\"\" def __init__(self, origin,", "to compute :math:`y`. Examples -------- >>> l = VerticalLine(1) >>> l.y(1) nan \"\"\"", "bounding box of the polygon. Examples -------- >>> p = Polygon([Point((0, 0)), Point((2,", "p2 : libpysal.cg.Point A point. Returns ------- self._p2 : libpysal.cg.Point The reset ``p2``", "libpysal.cg.QuadTreeStructureSingleRing The quad tree structure for the ring. This structure helps test if", "line. ``m`` is also an attribute. b : {int, float} The :math:`y`-intercept of", "__le__(self, other) -> bool: \"\"\"Tests if the point is less than or equal", "\"\"\" return str(self.__loc) # return \"POINT ({} {})\".format(*self.__loc) class LineSegment(Geometry): \"\"\"Geometric representation of", "\"\"\" self._bounding_box = None self._len = None self._line = False def _get_p1(self): \"\"\"**HELPER", "METHOD. DO NOT CALL.** Sets the ``p1`` attribute of the line segment. Parameters", "object to test equality against. Examples -------- >>> Point((0, 1)) == Point((0, 1))", "A)]) / sum(A) cy = sum([pt[1] * area for pt, area in zip(CP", "hash(Point((0, 1))) == hash(Point((0, 1))) True >>> hash(Point((0, 1))) == hash(Point((1, 1))) False", "segment (``True``) or not (``False``). Handles endpoints of segments being on other segment.", "raise ArithmeticError(\"Rectangle must have positive area.\") self.left = float(left) self.lower = float(lower) self.right", "\"\"\"Returns the centroid of the polygon. Notes ----- The centroid returned by this", "of sub-polygons to be considered as holes. Default is ``None``. Attributes ---------- vertices", "self._len = None self._arclen = None self._bounding_box = None @property def vertices(self) ->", "the length of a ``LineSegment`` object. Examples -------- >>> ls = LineSegment(Point((2, 2)),", "of the polygon. Examples -------- >>> r = Rectangle(0, 0, 4, 4) >>>", "against. Examples -------- >>> ls = LineSegment(Point((5, 0)), Point((10, 0))) >>> ls1 =", "4, 4) >>> r.height 4.0 \"\"\" return self.upper - self.lower _geoJSON_type_to_Pysal_type = {", "] ... ) >>> r.area 2.0 \"\"\" return abs(self.signed_area) @property def signed_area(self) ->", "last call to the ``getter``. \"\"\" self._len = None self._arclen = None self._bounding_box", "= self.signed_area N = len(self) cx = 0 cy = 0 for i", "right, upper]``. area : float The area enclosed by the polygon. centroid :", "Examples -------- >>> p = Point((1, 3)) >>> str(p) '(1.0, 3.0)' \"\"\" return", "= Rectangle(0, 0, 4, 4) >>> r.set_scale(2) >>> r.left -2.0 >>> r.right 6.0", "[] holes = [] for polygon in geo[\"coordinates\"]: verts = [[Point(pt) for pt", "p = Polygon( ... [Point((0, 0)), Point((10, 0)), Point((10, 10)), Point((0, 10))], ...", "and make geometric types extendable. \"\"\" def __init__(self): pass class Point(Geometry): \"\"\"Geometric class", "= LineSegment(Point((1, 2)), Point((5, 6))) >>> r = ls._get_p1() >>> r == Point((1,", "return self._p1 def _set_p1(self, p1): \"\"\"**HELPER METHOD. DO NOT CALL.** Sets the ``p1``", "raise NotImplementedError(\"%s is not supported at this time.\" % geo_type) return obj class", "y = [v[1] for v in vertices] self._bounding_box = Rectangle(min(x), min(y), max(x), max(y))", ">>> r = ls._set_p2(Point((3, -1))) >>> r == Point((3.0, -1.0)) True \"\"\" self._p2", "6))) >>> r = ls._get_p1() >>> r == Point((1, 2)) True \"\"\" return", "== Point((3.0, -1.0)) True \"\"\" self._p1 = p1 self._reset_props() return self._p1 p1 =", "The area enclosed by the polygon. centroid : tuple The 'center of gravity',", "have a single exterior ring, all other parts are holes. MultiPolygons are simply", "ls.bounding_box.left 1.0 >>> ls.bounding_box.lower 2.0 >>> ls.bounding_box.right 5.0 >>> ls.bounding_box.upper 6.0 \"\"\" #", ": {tuple, libpysal.cg.Point} The centroid of the ring defined by the 'center of", "------- self._p1 : libpysal.cg.Point The reset ``p1`` attribute. Examples -------- >>> ls =", "yj < 0: r = xi + yi * (xj - xi) /", "self._hole_rings] AH = [-ring.area for ring in self._hole_rings] A = AP + AH", ">>> Point((0, 1)) <= Point((1, 1)) True \"\"\" return (self.__loc) <= (other.__loc) def", "list(map(Ring, holes)) self._holes = [clockwise(hole) for hole in holes] else: self._hole_rings = [Ring(holes)]", "Polygon([Point((0, 0)), Point((1, 0)), Point((1, 1)), Point((0, 1))]) >>> p.area 1.0 >>> p", "2)), Point((2, 2)), Point((2, 1))] ... ) >>> p.centroid (5.0353535353535355, 5.0353535353535355) \"\"\" CP", "= ys[i + 1] xi = xs[i] xj = xs[i + 1] if", "* y[i] cx += (x[i] + x[i + 1]) * f cy +=", "yield unpredictable results. \"\"\" searching = True for ring in self._hole_rings: if ring.contains_point(point):", "start slice and :math:`j` as the index to end the slice (excluded). Examples", "self.parts} def _reset_props(self): \"\"\"**HELPER METHOD. DO NOT CALL.** Resets attributes which are functions", "@classmethod def __from_geo_interface__(cls, geo: dict): if geo[\"type\"].lower() == \"linestring\": verts = [Point(pt) for", "Line(Geometry): \"\"\"Geometric representation of line objects. Parameters ---------- m : {int, float} The", "in vertices] else: self._vertices = [vertices] self._reset_props() @classmethod def __from_geo_interface__(cls, geo: dict): if", "winding number. The implementation is based on `this <http://www.engr.colostate.edu/~dga/dga/papers/point_in_polygon.pdf>`_. Parameters ---------- point :", ">>> p = Polygon([Point((0, 0)), Point((2, 0)), Point((2, 1)), Point((0, 1))]) >>> p.bounding_box.left", "centroid(self): \"\"\"Returns the centroid of the ring. Returns ------- self._centroid : libpysal.cg.Point The", "of the rectangle. lower : float Minimum y-value of the rectangle. right :", "which to compute :math:`y`. Examples -------- >>> l = Line(1, 0) >>> l.y(1)", "the same.\" # raise ValueError(msg) self.vertices = tuple(vertices) self._perimeter = None self._bounding_box =", "except AttributeError: return False def __ne__(self, other) -> bool: \"\"\"Tests if the point", ": libpysal.cg.Rectangle The bounding box of the segment. len : float The length", "clockwise order. Examples -------- >>> p1 = Polygon([Point((0, 0)), Point((0, 1)), Point((1, 1)),", "vertices including holes. perimeter : float The geometric length of the perimeter of", "0, 4, 4) >>> r.width 4.0 \"\"\" return self.right - self.left @property def", "pt) -> bool: \"\"\"Returns whether a point is clockwise of the segment (``True``)", "length of the chain computed using 'arcdistance' (meters). \"\"\" def part_perimeter(p: list) ->", "is not a supported shape. NotImplementedError Raised when ``geo_type`` is not a supported", "the segment. line : libpysal.cg.Line The line on which the segment lies. Examples", "6.0 >>> r.lower -2.0 >>> r.upper 6.0 \"\"\" center = ((self.left + self.right)", "ls.intersect(ls1) True >>> ls2 = LineSegment(Point((5, 1)), Point((10, 1))) >>> ls.intersect(ls2) False >>>", "__getslice__(self, *args) -> slice: \"\"\"Return the coordinates for the given dimensions. Parameters ----------", "with holes. Also known as the 'center of gravity' or 'center of mass'.", "Point((1, 1)), Point((0, 1))], ... [Point((2, 1)), Point((2, 2)), Point((1, 2)), Point((1, 1))]", "self._reset_props() @classmethod def __from_geo_interface__(cls, geo: dict): if geo[\"type\"].lower() == \"linestring\": verts = [Point(pt)", "for i in range(N - 1): f = x[i] * y[i + 1]", "the line segment. Returns ------- self._p2 : libpysal.cg.Point The ``_p2`` attribute. Examples --------", "shift[0] self.lower = self.lower + shift[1] self.upper = self.upper + shift[1] def set_scale(self,", ": libpysal.cg.Point A point. Returns ------- self._p1 : libpysal.cg.Point The reset ``p1`` attribute.", "Point((1, 1)) True \"\"\" return (self.__loc) <= (other.__loc) def __eq__(self, other) -> bool:", "centroid of the ring defined by the 'center of gravity' or 'center or", "l2 = LineSegment(Point((5, 6)), Point((1, 2))) >>> l1 == l2 True >>> l2", "not supported at this time.\"%geo_type if geo_type in _geoJSON_type_to_Pysal_type: obj = _geoJSON_type_to_Pysal_type[geo_type].__from_geo_interface__(geo) else:", "ring.build_quad_tree_structure() for ring in self._hole_rings: ring.build_quad_tree_structure() self.is_quad_tree_structure_built = True def contains_point(self, point): \"\"\"Test", "is inside the ring will be increased significantly. \"\"\" for ring in self._part_rings:", "#miny 3.0 >>> r.right #maxx 10.0 >>> r.upper #maxy 17.0 \"\"\" def __init__(self,", "if self._bounding_box is None: vertices = self.vertices x = [v[0] for v in", "ls.line >>> l.m 1.0 >>> l.b 0.0 \"\"\" if self._line == False: dx", "---------- m : {int, float} The slope of the line. ``m`` is also", "= AP + AH cx = sum([pt[0] * area for pt, area in", "dx2 > dy2 * dx1: is_ccw = -1 elif dx1 * dx2 <", "cy)) return self._centroid def build_quad_tree_structure(self): \"\"\"Build the quad tree structure for this polygon.", "and geoJSON do. In GEOS, etc, polygons may only have a single exterior", "end_pt : libpysal.cg.Point The point where the segment ends. Attributes ---------- p1 :", "... Point((2, 0)), ... Point((2, 1)), ... Point((0, 1)), ... Point((0, 0)) ...", "... ) >>> r.area 2.0 \"\"\" return abs(self.signed_area) @property def signed_area(self) -> Union[int,", "* v2[0] > 0 def is_cw(self, pt) -> bool: \"\"\"Returns whether a point", "... [Point((1, 2)), Point((2, 2)), Point((2, 1)), Point((1, 1))] ... ) >>> len(p.holes)", "objects. Parameters ---------- m : {int, float} The slope of the line. ``m``", "/ (yi - yj) if r > 0: if yi < 0: w", "bbleft or bbright or bblower or bbupper: pass else: rn = len(self.vertices) xs", "scale * (self.lower - center[1]) self.upper = center[1] + scale * (self.upper -", "the polygon. bbox : list A list representation of the bounding box in", "the segment lies. Examples -------- >>> ls = LineSegment(Point((1, 2)), Point((5, 6))) \"\"\"", "1)), Point((0, 1))]) >>> p.area 1.0 >>> p = Polygon( ... [Point((0, 0)),", "Examples -------- >>> ls = LineSegment(Point((2, 2)), Point((3, 3))) >>> l = ls.line", "str(r.centroid) '(1.0, 0.5)' \"\"\" if self._centroid is None: vertices = self.vertices x =", "line_seg : libpysal.cg.LineSegment The ``LineSegment`` object which has its endpoints swapped. Examples --------", "1)) False \"\"\" return (self.__loc) >= (other.__loc) def __hash__(self) -> int: \"\"\"Returns the", "at which to compute :math:`x`. Raises ------ ArithmeticError Raised when ``0.`` is passed", "pt dx1 = p1[0] - p0[0] dy1 = p1[1] - p0[1] dx2 =", "= [clockwise(holes)] else: self._holes = [[]] self._reset_props() @classmethod def __from_geo_interface__(cls, geo: dict): \"\"\"While", "is None: self._perimeter = sum_perim(self._vertices) + sum_perim(self._holes) return self._perimeter @property def bbox(self): \"\"\"Returns", "isinstance(holes[0], list): self._hole_rings = list(map(Ring, holes)) self._holes = [clockwise(hole) for hole in holes]", ":math:`j` as the index to end the slice (excluded). Examples -------- >>> p", "self._arclen is None: self._arclen = sum([part_perimeter(part) for part in self._vertices]) return self._arclen @property", "for part in self._vertices] @property def bounding_box(self): \"\"\"Returns the bounding box of the", "---------- left : float Minimum x-value of the rectangle. lower : float Minimum", "= Rectangle(-4, 3, 10, 17) >>> r.left #minx -4.0 >>> r.lower #miny 3.0", "shape object.\" % (obj)) geo_type = geo[\"type\"].lower() # if geo_type.startswith('multi'): # raise NotImplementedError,", "ls.b nan \"\"\" def __init__(self, x): self._x = float(x) self.m = float(\"inf\") self.b", "for i in range(-1, len(self) - 1)] ) return self._perimeter @property def bounding_box(self):", "def __eq__(self, other) -> bool: \"\"\"Returns ``True`` if ``self`` and ``other`` are the", "__from_geo_interface__(cls, geo: dict): if geo[\"type\"].lower() == \"linestring\": verts = [Point(pt) for pt in", "supported at this time.\" % geo_type) return obj class Geometry(object): \"\"\"A base class", ": tuple The 'center of gravity', i.e. the mean point of the polygon.", "other.p2 == self._p2: eq = True elif other.p2 == self._p1 and other.p1 ==", "p1[1] - p0[1] dx2 = p2[0] - p0[0] dy2 = p2[1] - p0[1]", "\"\"\"Returns the vertices of the polygon in clockwise order. Examples -------- >>> p1", "= property(_get_p1, _set_p1) def _get_p2(self): \"\"\"**HELPER METHOD. DO NOT CALL.** Returns the ``p2``", "other attributes. The getters for these attributes (implemented as properties) then recompute their", "bounding box of the chain. Examples -------- >>> c = Chain([Point((0, 0)), Point((2,", "-1 elif dx1 * dx1 + dy1 * dy1 >= dx2 * dx2", "def _set_p2(self, p2): \"\"\"**HELPER METHOD. DO NOT CALL.** Sets the ``p2`` attribute of", "__area < 0: __area = -area return __area sum_area = lambda part_type: sum([part_area(part)", "vertices]), max([v[1] for v in vertices]), ) return self._bounding_box @property def len(self) ->", "2)), Point((4, 4)), Point((2, 4))] ... ) >>> p.contains_point((3.0, 3.0)) False >>> p.contains_point((1.0,", "y: Union[int, float]) -> float: \"\"\"Returns the :math:`x`-value of the line at a", "[Point((1, 2)), Point((2, 2)), Point((2, 1)), Point((1, 1))] ... ) >>> len(p.holes) 1", "4.0 \"\"\" def dist(v1: Union[int, float], v2: Union[int, float]) -> float: return math.hypot(v1[0]", "def bounding_box(self): \"\"\"Returns the bounding box of the ring. Returns ------- self._bounding_box :", "Notes ----- The centroid returned by this method is the geometric centroid and", "\"\"\" return (self.right - self.left) * (self.upper - self.lower) @property def width(self) ->", "a PySAL shape object from ``obj``, which must support the ``__geo_interface__``. Parameters ----------", "self._p2[1] # ) def __eq__(self, other) -> bool: \"\"\"Returns ``True`` if ``self`` and", "Union[int, float]) -> float: return math.hypot(v1[0] - v2[0], v1[1] - v2[1]) def part_perimeter(part)", ">>> r == Point((3.0, -1.0)) True \"\"\" self._p2 = p2 self._reset_props() return self._p2", "(number :math:`x`-tuple, :math:`x` > 1). Examples -------- >>> p = Point((1, 3)) \"\"\"", "ls.is_ccw(Point((2, -2))) False \"\"\" v1 = (self._p2[0] - self._p1[0], self._p2[1] - self._p1[1]) v2", ">>> l = Line(1, 0) >>> l.y(1) 1.0 \"\"\" if self.m == 0:", "\"\"\" return sum([part for part in self._vertices], []) + sum( [part for part", "``is_geometry`` and make geometric types extendable. \"\"\" def __init__(self): pass class Point(Geometry): \"\"\"Geometric", "line on which the segment lies. Examples -------- >>> ls = LineSegment(Point((1, 2)),", "== (other.__loc) except AttributeError: return False def __ne__(self, other) -> bool: \"\"\"Tests if", "(xj - xi) / (yi - yj) if r > 0: if yi", "the polygon. Returns ------- self._bounding_box : libpysal.cg.Rectangle The bounding box of the polygon.", "r.lower -2.0 >>> r.upper 6.0 \"\"\" center = ((self.left + self.right) / 2,", "the ring. bounding_box : libpysal.cg.Rectangle The bounding box of the ring. area :", ": {int, float} The :math:`y`-intercept of the line. ``b`` is also an attribute.", "polygons and multipolygons GEOS, Shapely, and geoJSON do. In GEOS, etc, polygons may", ">>> p.contains_point((1.0, 1.0)) True Notes ----- Points falling exactly on polygon edges may", "ls = LineSegment(Point((1, 2)), Point((5, 6))) >>> swap = ls.get_swap() >>> swap.p1[0] 5.0", "Point((1, 1))] ... ) >>> p.area 99.0 \"\"\" def part_area(pv: list) -> float:", "4) >>> r.area 16.0 \"\"\" return (self.right - self.left) * (self.upper - self.lower)", ":math:`x`-value at which to compute :math:`y`. Examples -------- >>> l = VerticalLine(1) >>>", "p.contains_point((4,5)) 0 >>> p.contains_point((4,0)) 0 Handles holes. >>> p = Polygon( ... [Point((0,", "@property def __geo_interface__(self): return {\"type\": \"Point\", \"coordinates\": self.__loc} def __lt__(self, other) -> bool:", "coordinates for the given dimensions. Parameters ---------- *args : tuple A tuple of", "= [v[0] for v in vertices] y = [v[1] for v in vertices]", "int: return len(self.vertices) @property def len(self) -> int: return len(self) @staticmethod def dist(v1,", "+ self.right) / 2, (self.lower + self.upper) / 2) self.left = center[0] +", "def contains_point(self, point): \"\"\"Test if a polygon contains a point. Parameters ---------- point", "1)) False \"\"\" return (self.__loc) > (other.__loc) def __ge__(self, other) -> bool: \"\"\"Tests", "sum( [dist(v[i], v[i + 1]) for i in range(-1, len(self) - 1)] )", ") >>> p.centroid (5.0353535353535355, 5.0353535353535355) \"\"\" CP = [ring.centroid for ring in self._part_rings]", "Parameters ---------- obj : {libpysal.cg.{Point, LineSegment, Line, Ray, Chain, Polygon} A geometric representation", "Ray, Chain, Polygon} A new geometric representation of the object. \"\"\" if isinstance(obj,", "CALL.** Resets attributes which are functions of other attributes. The getters for these", "c = Chain( ... [ ... [Point((0, 0)), Point((1, 0)), Point((1, 1))], ...", "l2 True >>> l2 == l1 True \"\"\" eq = False if not", "location. Examples -------- >>> hash(Point((0, 1))) == hash(Point((0, 1))) True >>> hash(Point((0, 1)))", "for pt, area in zip(CP + CH, A)]) / sum(A) return cx, cy", "Point((5, 6))) >>> ls.bounding_box.left 1.0 >>> ls.bounding_box.lower 2.0 >>> ls.bounding_box.right 5.0 >>> ls.bounding_box.upper", "libpysal.cg.Point The point where the segment ends. Attributes ---------- p1 : libpysal.cg.Point The", "old scale (e.g. 1.0 is current size). Examples -------- >>> r = Rectangle(0,", "holes. MultiPolygons are simply a list of polygons. \"\"\" geo_type = geo[\"type\"].lower() if", "@property def arclen(self) -> Union[int, float]: \"\"\"Returns the geometric length of the chain", "\"\"\"Geometric representation of rectangle objects. Attributes ---------- left : float Minimum x-value of", "0 >>> p.contains_point((4,0)) 0 Handles holes. >>> p = Polygon( ... [Point((0, 0)),", ":math:`x`-value of the line at a particular :math:`y`-value. Parameters ---------- y : {int,", "1 return is_ccw def get_swap(self): \"\"\"Returns a ``LineSegment`` object which has its endpoints", "the line on which the segment lies. Returns ------- self._line : libpysal.cg.Line The", "return self._bounding_box @property def area(self) -> Union[int, float]: \"\"\"Returns the area of the", "self._vertices = [part for part in vertices] else: self._vertices = [vertices] self._reset_props() @classmethod", "== Point((1, 2)) True \"\"\" return self._p1 def _set_p1(self, p1): \"\"\"**HELPER METHOD. DO", "other.p2 == self._p1 and other.p1 == self._p2: eq = True return eq def", "if the points are collinear and ``self.p1`` is in the middle. ``1`` if", "+ scale * (self.lower - center[1]) self.upper = center[1] + scale * (self.upper", "r.upper #maxy 17.0 \"\"\" def __init__(self, left, lower, right, upper): if right <", "return cls(geo[\"coordinates\"]) @property def __geo_interface__(self): return {\"type\": \"Point\", \"coordinates\": self.__loc} def __lt__(self, other)", "0) >>> ls.m 1.0 >>> ls.b 0.0 \"\"\" def __init__(self, m, b): if", "self.signed_area N = len(self) cx = 0 cy = 0 for i in", "point : libpysal.cg.Point The point to test for containment. Returns ------- point_contained :", "{int, float} The :math:`y`-value at which to compute :math:`x`. Examples -------- >>> l", "Rectangle. Examples -------- >>> r = Rectangle(0, 0, 4, 4) >>> r.width 4.0", "structure helps test if a point is inside the ring. \"\"\" def __init__(self,", "def dist(v1: tuple, v2: tuple) -> Union[int, float]: return math.hypot(v1[0] - v2[0], v1[1]", "to compute :math:`x`. Examples -------- >>> l = VerticalLine(0) >>> l.x(0.25) 0.0 \"\"\"", "p2): \"\"\"**HELPER METHOD. DO NOT CALL.** Sets the ``p2`` attribute of the line", "the segment lies. Examples -------- >>> ls = LineSegment(Point((2, 2)), Point((3, 3))) >>>", "of the line. ``x`` is also an attribute. Examples -------- >>> ls =", "vertices]), max([v[0] for v in vertices]), max([v[1] for v in vertices]), ) return", "@property def perimeter(self) -> Union[int, float]: if self._perimeter is None: dist = self.dist", ">>> ls.is_ccw(Point((2, 2))) True >>> ls.is_ccw(Point((2, -2))) False \"\"\" v1 = (self._p2[0] -", "hole in holes] else: self._hole_rings = [Ring(holes)] self._holes = [clockwise(holes)] else: self._holes =", "xs[i] xj = xs[i + 1] if yi * yj < 0: r", "in self.parts], } if self._holes[0]: geo[\"coordinates\"][0] += self._holes return geo if self._holes[0]: return", ">>> l = ls.line >>> l.m 1.0 >>> l.b 0.0 \"\"\" if self._line", "4.0 \"\"\" return self.upper - self.lower _geoJSON_type_to_Pysal_type = { \"point\": Point, \"linestring\": Chain,", "if holes is not None and holes != []: if isinstance(holes[0], list): self._hole_rings", "\"coordinates\": self.vertices} else: return {\"type\": \"MultiLineString\", \"coordinates\": self.parts} def _reset_props(self): \"\"\"**HELPER METHOD. DO", "= property(_get_p2, _set_p2) def is_ccw(self, pt) -> bool: \"\"\"Returns whether a point is", "0 def sw_ccw(self, pt): \"\"\"Sedgewick test for ``pt`` being ccw of segment. Returns", "for v in vertices] y = [v[1] for v in vertices] self._bounding_box =", "- self._p1[1]) return v1[0] * v2[1] - v1[1] * v2[0] < 0 def", "other.p1 == self._p1 and other.p2 == self._p2: eq = True elif other.p2 ==", "``libpysal.cg.Point`` objects) of the chain. Examples -------- >>> c = Chain( ... [", "float]: \"\"\"Returns the perimeter of the polygon. Examples -------- >>> p = Polygon([Point((0,", "polygon edges may yield unpredictable results. \"\"\" searching = True for ring in", "len : float The geometric length of the chain. Examples -------- >>> c", "new specified point. Parameters ---------- new_center : libpysal.cg.Point The new location of the", "# if geo_type.startswith('multi'): # raise NotImplementedError, \"%s are not supported at this time.\"%geo_type", "Point((2, 1))]) \"\"\" def __init__(self, vertices: list): if isinstance(vertices[0], list): self._vertices = [part", "def build_quad_tree_structure(self): \"\"\"Build the quad tree structure for this polygon. Once the structure", "Point((4,0)), Point((4,5)), Point((2,3)), Point((0,5))] ... ) >>> p.contains_point((3,3)) 1 >>> p.contains_point((0,6)) 0 >>>", "if yi * yj < 0: r = xi + yi * (xj", ">>> r = ls._get_p1() >>> r == Point((1, 2)) True \"\"\" return self._p1", "= other.sw_ccw(self.p2) intersects = ccw1 * ccw2 <= 0 and ccw3 * ccw4", "The list of points of the vertices of the chain in order. len", "point is clockwise of the segment (``True``) or not (``False``). Exclusive. Parameters ----------", "v1 = (self._p2[0] - self._p1[0], self._p2[1] - self._p1[1]) v2 = (pt[0] - self._p1[0],", "r = xi + yi * (xj - xi) / (yi - yj)", "the segment. len : float The length of the segment. line : libpysal.cg.Line", "self._p2 def _set_p2(self, p2): \"\"\"**HELPER METHOD. DO NOT CALL.** Sets the ``p2`` attribute", "has its endpoints swapped. Examples -------- >>> ls = LineSegment(Point((1, 2)), Point((5, 6)))", "__init__(self, x): self._x = float(x) self.m = float(\"inf\") self.b = float(\"nan\") def x(self,", "of the line segment. Returns ------- self._p1 : libpysal.cg.Point The ``_p1`` attribute. Examples", "\"\"\" eq = False if not isinstance(other, self.__class__): pass else: if other.p1 ==", "self._area = None self._centroid = None self._quad_tree_structure = None def __len__(self) -> int:", "the ring. Examples -------- >>> r = Ring( ... [ ... Point((0, 0)),", "Returns the ``p2`` attribute of the line segment. Returns ------- self._p2 : libpysal.cg.Point", "inside the ring will be increased significantly. \"\"\" for ring in self._part_rings: ring.build_quad_tree_structure()", "2))) 2 \"\"\" return len(self.__loc) def __repr__(self) -> str: \"\"\"Returns the string representation", "self.left + shift[0] self.right = self.right + shift[0] self.lower = self.lower + shift[1]", "polygon as a list. See Also -------- libpysal.cg.bounding_box \"\"\" if self._bbox is None:", "Examples -------- >>> l1 = LineSegment(Point((1, 2)), Point((5, 6))) >>> l2 = LineSegment(Point((5,", "- 1)]) if self._arclen is None: self._arclen = sum([part_perimeter(part) for part in self._vertices])", "Examples -------- >>> l = VerticalLine(0) >>> l.x(0.25) 0.0 \"\"\" return self._x def", "str(p) '(1.0, 3.0)' \"\"\" return str(self.__loc) # return \"POINT ({} {})\".format(*self.__loc) class LineSegment(Geometry):", "self._p2: eq = True elif other.p2 == self._p1 and other.p1 == self._p2: eq", "0)), Point((1, 1)), Point((0, 1))]) >>> p.perimeter 4.0 \"\"\" def dist(v1: Union[int, float],", "self._p2[0]]), min([self._p1[1], self._p2[1]]), max([self._p1[0], self._p2[0]]), max([self._p1[1], self._p2[1]]), ) return Rectangle( self._bounding_box.left, self._bounding_box.lower, self._bounding_box.right,", "float The area enclosed by the polygon. centroid : tuple The 'center of", "1)), Point((2, 1))]) >>> verts = c.vertices >>> len(verts) 4 \"\"\" return sum([part", "Polygon([Point((0, 0)), Point((0, 1)), Point((1, 1)), Point((1, 0))]) >>> len(p1.vertices) 4 \"\"\" return", "test equality against. Examples -------- >>> Point((0, 1)) > Point((0, 1)) False >>>", "of line objects. Parameters ---------- m : {int, float} The slope of the", "class Line(Geometry): \"\"\"Geometric representation of line objects. Parameters ---------- m : {int, float}", "p.contains_point((3,3)) 1 >>> p.contains_point((0,6)) 0 >>> p.contains_point((2,2.9)) 1 >>> p.contains_point((4,5)) 0 >>> p.contains_point((4,0))", "endpoints of segments being on other segment. Parameters ---------- other : libpysal.cg.LineSegment Another", "self._len is None: self._len = math.hypot(self._p1[0] - self._p2[0], self._p1[1] - self._p2[1]) return self._len", "m * self._p1[0] self._line = Line(m, b) return self._line class VerticalLine(Geometry): \"\"\"Geometric representation", "origin self.p = second_p class Chain(Geometry): \"\"\"Geometric representation of a chain, also known", "point lying ccw or cw of a segment. Examples -------- >>> ls =", "\"\"\"Geometric class for point objects. Parameters ---------- loc : tuple The point's location", "self._bounding_box is None: self._bounding_box = Rectangle( min([self._p1[0], self._p2[0]]), min([self._p1[1], self._p2[1]]), max([self._p1[0], self._p2[0]]), max([self._p1[1],", "hasattr(obj, \"__geo_interface__\"): geo = obj.__geo_interface__ else: geo = obj if hasattr(geo, \"type\"): raise", "for v in vertices]), ) return self._bounding_box @property def len(self) -> int: \"\"\"Returns", "\"Point\", \"coordinates\": self.__loc} def __lt__(self, other) -> bool: \"\"\"Tests if the point is", "== \"multilinestring\": verts = [list(map(Point, part)) for part in geo[\"coordinates\"]] else: raise TypeError(\"%r", "Point((2,3)), Point((0,5))] ... ) >>> p.contains_point((3,3)) 1 >>> p.contains_point((0,6)) 0 >>> p.contains_point((2,2.9)) 1", "r = Rectangle(0, 0, 4, 4) >>> r.width 4.0 \"\"\" return self.right -", "= Rectangle(0, 0, 1, 1) >>> bool(r) True \"\"\" return bool(self.area) def __eq__(self,", "is_ccw(self, pt) -> bool: \"\"\"Returns whether a point is counterclockwise of the segment", "dist(v1: Union[int, float], v2: Union[int, float]) -> float: return math.hypot(v1[0] - v2[0], v1[1]", "The ordering of the vertices is ignored and will not be altered. Parameters", "increased significantly. \"\"\" self._quad_tree_structure = QuadTreeStructureSingleRing(self) def contains_point(self, point): \"\"\"Point containment using winding", "# raise ValueError(msg) self.vertices = tuple(vertices) self._perimeter = None self._bounding_box = None self._area", "-> Union[int, float]: if self._perimeter is None: dist = self.dist v = self.vertices", "5.0353535353535355) \"\"\" CP = [ring.centroid for ring in self._part_rings] AP = [ring.area for", "2)) True \"\"\" return self._p1 def _set_p1(self, p1): \"\"\"**HELPER METHOD. DO NOT CALL.**", "def _reset_props(self): \"\"\"Resets the geometric properties of the polygon.\"\"\" self._perimeter = None self._bounding_box", "segment intersects with other segment (``True``) or not (``False``). Handles endpoints of segments", "self._bounding_box.right, self._bounding_box.upper, ) @property def len(self) -> float: \"\"\"Returns the length of a", "= Chain( ... [ ... [Point((0, 0)), Point((1, 0)), Point((1, 1))], ... [Point((10,", "for i in range(N - 1): A += (x[i] + x[i + 1])", "libpysal.cg.Point The ring's centroid. Notes ----- The centroid returned by this method is", "the area of the polygon. Examples -------- >>> p = Polygon([Point((0, 0)), Point((1,", "min([v[1] for v in vertices]), max([v[0] for v in vertices]), max([v[1] for v", "-> list: \"\"\"Returns the parts of the polygon in clockwise order. Examples --------", "l.x(0.25) 0.0 \"\"\" return self._x def y(self, x) -> float: \"\"\"Returns the :math:`y`-value", "raise ArithmeticError(\"Slope cannot be infinite.\") self.m = float(m) self.b = float(b) def x(self,", "def __bool__(self): \"\"\"Rectangles will evaluate to False if they have zero area. ``___nonzero__``", "the vertices of the chain in order. len : float The geometric length", "centroid : {tuple, libpysal.cg.Point} The centroid of the ring defined by the 'center", "in range(rn)] w = 0 for i in range(len(self.vertices) - 1): yi =", "-4.0 >>> r.lower #miny 3.0 >>> r.right #maxx 10.0 >>> r.upper #maxy 17.0", "False >>> r = Rectangle(0, 0, 1, 1) >>> bool(r) True \"\"\" return", "area of the polygon. Examples -------- >>> p = Polygon([Point((0, 0)), Point((1, 0)),", "0)), Point((1, 0)), Point((1, 1)), Point((0, 1))]) >>> p.area 1.0 >>> p =", "p[i + 1]) * 1000.0 for i in range(len(p) - 1)]) if self._arclen", "self._p1 and other.p1 == self._p2: eq = True return eq def intersect(self, other)", "None def __len__(self) -> int: return self.len @property def len(self) -> int: \"\"\"Returns", ">>> r == Point((1, 2)) True \"\"\" return self._p1 def _set_p1(self, p1): \"\"\"**HELPER", "-> bool: \"\"\"Tests if the point is equal to another object. Parameters ----------", "Point((0, 1)), ... Point((0, 0)) ... ] ... ) >>> r.area 2.0 \"\"\"", "----- The centroid returned by this method is the geometric centroid and respects", "Polygon( ... [Point((0, 0)), Point((0, 10)), Point((10, 10)), Point((10, 0))], ... [Point((2, 2)),", "``pt`` is in the middle. \"\"\" p0 = self.p1 p1 = self.p2 p2", "0, 4, 4) >>> r.area 16.0 \"\"\" return (self.right - self.left) * (self.upper", ": libpysal.cg.Point A point to test for containment. Returns ------- contains : bool", "= LineSegment(Point((1, 2)), Point((5, 6))) >>> l2 = LineSegment(Point((5, 6)), Point((1, 2))) >>>", "point # bbox checks bbleft = x < self.bounding_box.left bbright = x >", "to the start slice and :math:`j` as the index to end the slice", "other.upper), ) def __getitem__(self, key): \"\"\" Examples -------- >>> r = Rectangle(-4, 3,", "1.0 is current size). Examples -------- >>> r = Rectangle(0, 0, 4, 4)", "def len(self) -> int: \"\"\"Returns the number of vertices in the polygon. Examples", "p = Polygon( ... [ ... [Point((0, 0)), Point((1, 0)), Point((1, 1)), Point((0,", "x, y, X, Y = self[:] x1, y2, X1, Y1 = other[:] return", "``-- http://docs.python.org/reference/datamodel.html Examples -------- >>> r = Rectangle(0, 0, 0, 0) >>> bool(r)", "list of sub-polygons to be considered as holes. Default is ``None``. Attributes ----------", "Parameters ---------- pt : libpysal.cg.Point A point lying ccw or cw of a", "quad tree structure for the ring. This structure helps test if a point", "self._centroid : libpysal.cg.Point The ring's centroid. Notes ----- The centroid returned by this", "\"\"\"Returns the bounding box of the chain. Returns ------- self._bounding_box : libpysal.cg.Rectangle The", "eq = True return eq def intersect(self, other) -> bool: \"\"\"Test whether segment", "<NAME>, <NAME>\" import math from .sphere import arcdist from typing import Union __all__", "v1[1] * v2[0] < 0 def sw_ccw(self, pt): \"\"\"Sedgewick test for ``pt`` being", "same. Open rings will be closed. This class exists primarily as a geometric", "The bounding box of the segment. len : float The length of the", "return point_contained class Polygon(Geometry): \"\"\"Geometric representation of polygon objects. Returns a polygon created", "Point((10, 0)), Point((10, 10)), Point((0, 10))], ... [Point((1, 2)), Point((2, 2)), Point((2, 1)),", "object. Returns ------- self._bounding_box : libpysal.cg.Rectangle The bounding box of the line segment.", "<http://www.engr.colostate.edu/~dga/dga/papers/point_in_polygon.pdf>`_. Parameters ---------- point : libpysal.cg.Point The point to test for containment. Returns", "return [[v for v in part] for part in self._vertices] @property def perimeter(self)", "---------- obj : {libpysal.cg.{Point, LineSegment, Line, Ray, Chain, Polygon} A geometric representation of", "dx1: is_ccw = -1 elif dx1 * dx2 < 0 or dy1 *", "= y < self.bounding_box.lower bbupper = y > self.bounding_box.upper if bbleft or bbright", "in vertices]), ) return self._bounding_box @property def len(self) -> int: \"\"\"Returns the geometric", ">>> swap.p1[0] 5.0 >>> swap.p1[1] 6.0 >>> swap.p2[0] 1.0 >>> swap.p2[1] 2.0 \"\"\"", "area(self) -> Union[int, float]: \"\"\"Returns the area of the Rectangle. Examples -------- >>>", "Point((0, 1))]) >>> p.perimeter 4.0 \"\"\" def dist(v1: Union[int, float], v2: Union[int, float])", "(point where ray originates). See ``origin``. p : libpysal.cg.Point The second point on", "= sum([part_perimeter(part) for part in self._vertices]) return self._len @property def arclen(self) -> Union[int,", "'(1.0, 3.0)' \"\"\" return str(self.__loc) # return \"POINT ({} {})\".format(*self.__loc) class LineSegment(Geometry): \"\"\"Geometric", "[ring.centroid for ring in self._hole_rings] AH = [-ring.area for ring in self._hole_rings] A", "= LineSegment(Point((1, 2)), Point((5, 6))) >>> ls._reset_props() \"\"\" self._bounding_box = None self._len =", "ring in self._hole_rings: ring.build_quad_tree_structure() self.is_quad_tree_structure_built = True def contains_point(self, point): \"\"\"Test if a", "def sw_ccw(self, pt): \"\"\"Sedgewick test for ``pt`` being ccw of segment. Returns -------", "ls.len 3.0 \"\"\" # If LineSegment attributes p1, p2 changed, recompute if self._len", "{libpysal.cg.{Point, LineSegment, Line, Ray, Chain, Polygon} A new geometric representation of the object.", "-> bool: \"\"\"Tests if the point is greater than another object. Parameters ----------", "6))) >>> ls._reset_props() \"\"\" self._bounding_box = None self._len = None self._line = False", "-> dict: if len(self.parts) == 1: return {\"type\": \"LineString\", \"coordinates\": self.vertices} else: return", "return self.len @property def len(self) -> int: \"\"\"Returns the number of vertices in", "def __le__(self, other) -> bool: \"\"\"Tests if the point is less than or", "self._line : libpysal.cg.Line The ``Line`` object of the line on which the segment", "attribute of the line segment. Parameters ---------- p1 : libpysal.cg.Point A point. Returns", "float]: return sum([arcdist(p[i], p[i + 1]) * 1000.0 for i in range(len(p) -", "else: raise TypeError(\"%r is not a Chain.\" % geo) return cls(verts) @property def", "r.width 4.0 \"\"\" return self.right - self.left @property def height(self) -> Union[int, float]:", "bool: \"\"\"Returns whether a point is clockwise of the segment (``True``) or not", "of the line. ``b`` is also an attribute. Raises ------ ArithmeticError Raised when", "must be closed, the first and last point must be the same. Open", "point is greater than or equal to another object. Parameters ---------- other :", "1))]) >>> verts = c.vertices >>> len(verts) 4 \"\"\" return sum([part for part", "a new specified point. Parameters ---------- new_center : libpysal.cg.Point The new location of", "not a supported type. Returns ------- obj : {libpysal.cg.{Point, LineSegment, Line, Ray, Chain,", "\"\"\" try: return (self.__loc) != (other.__loc) except AttributeError: return True def __gt__(self, other)", "A tuple of :math:`(i,j)` with :math:`i` as the index to the start slice", "``bool()``\" ``-- http://docs.python.org/reference/datamodel.html Examples -------- >>> r = Rectangle(0, 0, 0, 0) >>>", "return self._p1 p1 = property(_get_p1, _set_p1) def _get_p2(self): \"\"\"**HELPER METHOD. DO NOT CALL.**", "None self._centroid = None self._quad_tree_structure = None def __len__(self) -> int: return len(self.vertices)", "the objects specified. Parameters ---------- vertices : list A list of vertices or", "{\"type\": \"Point\", \"coordinates\": self.__loc} def __lt__(self, other) -> bool: \"\"\"Tests if the point", "passed in as the slope. Examples -------- >>> l = Line(0.5, 0) >>>", "class Polygon(Geometry): \"\"\"Geometric representation of polygon objects. Returns a polygon created from the", "len(p1.vertices) 4 \"\"\" return sum([part for part in self._vertices], []) + sum( [part", "def __from_geo_interface__(cls, geo: dict): if geo[\"type\"].lower() == \"linestring\": verts = [Point(pt) for pt", "Line(0.5, 0) >>> l.x(0.25) 0.5 \"\"\" if self.m == 0: raise ArithmeticError(\"Cannot solve", "is_ccw def get_swap(self): \"\"\"Returns a ``LineSegment`` object which has its endpoints swapped. Returns", ">>> ls.b nan \"\"\" def __init__(self, x): self._x = float(x) self.m = float(\"inf\")", "is not None and holes != []: if isinstance(holes[0], list): self._hole_rings = list(map(Ring,", "Minimum y-value of the rectangle. right : float Maximum x-value of the rectangle.", "ring. centroid : {tuple, libpysal.cg.Point} The centroid of the ring defined by the", "6, 2)) >>> p[:2] == (3, 6) True >>> p[1:2] == (6,) True", "vertices of the polygon in clockwise order. len : int The number of", "Point((1, 1)) True \"\"\" try: return (self.__loc) != (other.__loc) except AttributeError: return True", "is in the middle. ``0`` if the points are collinear and ``pt`` is", "Union[int, float]: return sum([arcdist(p[i], p[i + 1]) * 1000.0 for i in range(len(p)", "self._p1[0] self._line = Line(m, b) return self._line class VerticalLine(Geometry): \"\"\"Geometric representation of verticle", "self.b) / self.m def y(self, x: Union[int, float]) -> float: \"\"\"Returns the :math:`y`-value", "+ dy2 * dy2: is_ccw = 0 else: is_ccw = 1 return is_ccw", "((self.left + self.right) / 2, (self.lower + self.upper) / 2) self.left = center[0]", "if self._holes[0]: return {\"type\": \"Polygon\", \"coordinates\": self._vertices + self._holes} else: return {\"type\": \"Polygon\",", "-> float: return math.hypot(v1[0] - v2[0], v1[1] - v2[1]) def part_perimeter(part) -> Union[int,", "the polygon. Examples -------- >>> p = Polygon([Point((0, 0)), Point((2, 0)), Point((2, 1)),", "geoJSON do. In GEOS, etc, polygons may only have a single exterior ring,", "+ self.upper) / 2, ) self.left = self.left + shift[0] self.right = self.right", ": libpysal.cg.Point The ``_p1`` attribute. Examples -------- >>> ls = LineSegment(Point((1, 2)), Point((5,", "arcdist from typing import Union __all__ = [ \"Point\", \"LineSegment\", \"Line\", \"Ray\", \"Chain\",", "1)), Point((1, 1))] ... ) >>> len(p.holes) 1 \"\"\" return [[v for v", "X1, Y1 = other[:] return Rectangle( min(self.left, other.left), min(self.lower, other.lower), max(self.right, other.right), max(self.upper,", "= None self._len = None def __len__(self) -> int: return self.len @property def", "import Union __all__ = [ \"Point\", \"LineSegment\", \"Line\", \"Ray\", \"Chain\", \"Polygon\", \"Rectangle\", \"asShape\",", "v2[1]) @property def perimeter(self) -> Union[int, float]: if self._perimeter is None: dist =", "tuple of :math:`(i,j)` with :math:`i` as the index to the start slice and", "point of the polygon. Examples -------- >>> p1 = Polygon([Point((0, 0)), Point((1, 0)),", "centroid returned by this method is the geometric centroid and respects multipart polygons", "Examples -------- >>> p = Polygon( ... [Point((0,0)), Point((4,0)), Point((4,5)), Point((2,3)), Point((0,5))] ...", "@property def signed_area(self) -> Union[int, float]: if self._area is None: vertices = self.vertices", "{})\".format(*self.__loc) class LineSegment(Geometry): \"\"\"Geometric representation of line segment objects. Parameters ---------- start_pt :", "clockwise order. Examples -------- >>> c = Chain([Point((0, 0)), Point((1, 0)), Point((1, 1)),", "if self._len is None: self._len = len(self.vertices) return self._len @property def vertices(self) ->", "where ray originates). See ``origin``. p : libpysal.cg.Point The second point on the", "vertices : list A list of vertices. Attributes ---------- vertices : list A", "point is inside the ring will be increased significantly. \"\"\" for ring in", "polygon. Once the structure is built, speed for testing if a point is", "standalone.is_clockwise(part): return part[:] else: return part[::-1] vl = list(vertices) if isinstance(vl[0], list): self._part_rings", "Rectangle(min(x), min(y), max(x), max(y)) return self._bounding_box @property def area(self) -> Union[int, float]: \"\"\"Returns", "return (self.__loc) > (other.__loc) def __ge__(self, other) -> bool: \"\"\"Tests if the point", "Point((1, 0)), Point((1, 1))], ... [Point((10, 10)), Point((11, 10)), Point((11, 11))] ... ]", "> Point((1, 1)) False \"\"\" return (self.__loc) > (other.__loc) def __ge__(self, other) ->", "Rectangle( min(self.left, other.left), min(self.lower, other.lower), max(self.right, other.right), max(self.upper, other.upper), ) def __getitem__(self, key):", "float]: \"\"\"Returns the width of the Rectangle. Examples -------- >>> r = Rectangle(0,", "self.upper) / 2, ) self.left = self.left + shift[0] self.right = self.right +", "the 'center of gravity' or 'center of mass'. Examples -------- >>> r =", "for containment. Returns ------- contains : bool ``True`` if the polygon contains ``point``", "property(_get_p2, _set_p2) def is_ccw(self, pt) -> bool: \"\"\"Returns whether a point is counterclockwise", ">>> l2 == l1 True \"\"\" eq = False if not isinstance(other, self.__class__):", "] ... ) >>> str(r.centroid) '(1.0, 0.5)' \"\"\" if self._centroid is None: vertices", "area enclosed by the ring. centroid : {tuple, libpysal.cg.Point} The centroid of the", "-------- >>> p = Polygon( ... [ ... [Point((0, 0)), Point((1, 0)), Point((1,", "set_centroid(self, new_center): \"\"\"Moves the rectangle center to a new specified point. Parameters ----------", "increased significantly. \"\"\" for ring in self._part_rings: ring.build_quad_tree_structure() for ring in self._hole_rings: ring.build_quad_tree_structure()", "ccw or cw of a segment. Examples -------- >>> ls = LineSegment(Point((0, 0)),", "in vertices]), max([v[0] for v in vertices]), max([v[1] for v in vertices]), )", "if a point is inside the ring will be increased significantly. \"\"\" self._quad_tree_structure", "[ \"Point\", \"LineSegment\", \"Line\", \"Ray\", \"Chain\", \"Polygon\", \"Rectangle\", \"asShape\", ] def asShape(obj): \"\"\"Returns", "Point((0, 1)) != Point((1, 1)) True \"\"\" try: return (self.__loc) != (other.__loc) except", "Raised when infinity is passed in as the slope. Examples -------- >>> ls", "etc, polygons may only have a single exterior ring, all other parts are", ">>> p[1] == 4.3 True \"\"\" return self.__loc.__getitem__(*args) def __getslice__(self, *args) -> slice:", "3.0 \"\"\" # If LineSegment attributes p1, p2 changed, recompute if self._len is", "l.b 0.0 \"\"\" if self._line == False: dx = self._p1[0] - self._p2[0] dy", "1))], ... [Point((2, 1)), Point((2, 2)), Point((1, 2)), Point((1, 1))] ... ] ...", "Point((2, 1)), ... Point((0, 1)), ... Point((0, 0)) ... ] ... ) >>>", "self._holes = [clockwise(holes)] else: self._holes = [[]] self._reset_props() @classmethod def __from_geo_interface__(cls, geo: dict):", "\"\"\" def __init__(self): pass class Point(Geometry): \"\"\"Geometric class for point objects. Parameters ----------", "ray originates). See ``second_p``. Examples -------- >>> l = Ray(Point((0, 0)), Point((1, 0)))", "mass'. Examples -------- >>> r = Ring( ... [ ... Point((0, 0)), ...", "the coordinates for the given dimensions. Parameters ---------- *args : tuple A tuple", "p1, p2 changed, recompute if self._bounding_box is None: self._bounding_box = Rectangle( min([self._p1[0], self._p2[0]]),", "= pt dx1 = p1[0] - p0[0] dy1 = p1[1] - p0[1] dx2", "geo = obj.__geo_interface__ else: geo = obj if hasattr(geo, \"type\"): raise TypeError(\"%r does", "False \"\"\" return (self.__loc) >= (other.__loc) def __hash__(self) -> int: \"\"\"Returns the hash", "-> list: \"\"\"Returns the vertices of the polygon in clockwise order. Examples --------", "-> Union[int, float]: \"\"\"Returns the width of the Rectangle. Examples -------- >>> r", "intersects def _reset_props(self): \"\"\"**HELPER METHOD. DO NOT CALL.** Resets attributes which are functions", "width of the Rectangle. Examples -------- >>> r = Rectangle(0, 0, 4, 4)", "eq = True elif other.p2 == self._p1 and other.p1 == self._p2: eq =", "Chain([Point((0, 0)), Point((1, 0)), Point((1, 1)), Point((2, 1))]) >>> c.len 3.0 >>> c", "bounding_box(self): \"\"\"Returns the bounding box of the chain. Returns ------- self._bounding_box : libpysal.cg.Rectangle", "l.x(0.25) 0.5 \"\"\" if self.m == 0: raise ArithmeticError(\"Cannot solve for 'x' when", "0)) ... ] ... ) >>> str(r.centroid) '(1.0, 0.5)' \"\"\" if self._centroid is", "y2, X1, Y1 = other[:] return Rectangle( min(self.left, other.left), min(self.lower, other.lower), max(self.right, other.right),", "Spatial Analysis Library. \"\"\" __author__ = \"<NAME>, <NAME>, <NAME>, <NAME>, <NAME>\" import math", "l.m 1.0 >>> l.b 0.0 \"\"\" if self._line == False: dx = self._p1[0]", "of the centroid of the polygon. Examples -------- >>> r = Rectangle(0, 0,", "implementation is based on `this <http://www.engr.colostate.edu/~dga/dga/papers/point_in_polygon.pdf>`_. Parameters ---------- point : libpysal.cg.Point The point", "of the ring. Examples -------- >>> r = Ring( ... [ ... Point((0,", "return l.__getitem__(key) def set_centroid(self, new_center): \"\"\"Moves the rectangle center to a new specified", "Exclusive. Parameters ---------- pt : libpysal.cg.Point A point lying ccw or cw of", "point to test for containment. Returns ------- contains : bool ``True`` if the", "geo = { \"type\": \"MultiPolygon\", \"coordinates\": [[part] for part in self.parts], } if", "{} {})\".format( # self._p1[0], self._p1[1], self._p2[0], self._p2[1] # ) def __eq__(self, other) ->", "ring.contains_point(point): contains = True searching = False break if searching: contains = False", "may yield unpredictable results. \"\"\" searching = True for ring in self._hole_rings: if", "\"\"\"Geometric representation of line segment objects. Parameters ---------- start_pt : libpysal.cg.Point The point", "xi > 0: if yj > 0: w += 0.5 else: w -=", "of the line segment. Parameters ---------- p1 : libpysal.cg.Point A point. Returns -------", "right, upper): if right < left or upper < lower: raise ArithmeticError(\"Rectangle must", "TypeError(\"%r does not appear to be a shape object.\" % (obj)) geo_type =", "\"\"\" self._p2 = p2 self._reset_props() return self._p2 p2 = property(_get_p2, _set_p2) def is_ccw(self,", "of the ring. bounding_box : libpysal.cg.Rectangle The bounding box of the ring. area", "parts += verts[0:1] holes += verts[1:] if not holes: holes = None return", "for v in vertices]), max([v[1] for v in vertices]), ) return self._bounding_box @property", "return __area sum_area = lambda part_type: sum([part_area(part) for part in part_type]) _area =", "not the same.\" # raise ValueError(msg) self.vertices = tuple(vertices) self._perimeter = None self._bounding_box", "= self.dist v = self.vertices self._perimeter = sum( [dist(v[i], v[i + 1]) for", ") return self._bounding_box @property def len(self) -> int: \"\"\"Returns the geometric length of", "list of point lists. Attributes ---------- vertices : list The list of points", "... Point((0, 0)) ... ] ... ) >>> str(r.centroid) '(1.0, 0.5)' \"\"\" if", "Point((1, 2)), Point((2, 2)), Point((2, 1))] ... ) >>> p.centroid (5.0353535353535355, 5.0353535353535355) \"\"\"", ">>> p.contains_point((3,3)) 1 >>> p.contains_point((0,6)) 0 >>> p.contains_point((2,2.9)) 1 >>> p.contains_point((4,5)) 0 >>>", "segment objects. Parameters ---------- start_pt : libpysal.cg.Point The point where the segment begins.", "r = Ring( ... [ ... Point((0, 0)), ... Point((2, 0)), ... Point((2,", "x[i + 1] * y[i] cx += (x[i] + x[i + 1]) *", "= LineSegment(Point((7, -1)), Point((7, 2))) >>> ls.intersect(ls2) True \"\"\" ccw1 = self.sw_ccw(other.p2) ccw2", "2.0 >>> ls.bounding_box.right 5.0 >>> ls.bounding_box.upper 6.0 \"\"\" # If LineSegment attributes p1,", "infinite.\") self.m = float(m) self.b = float(b) def x(self, y: Union[int, float]) ->", "int, float The ratio of the new scale to the old scale (e.g.", "'center of gravity' or 'center of mass'. Examples -------- >>> p = Polygon(", "= self._p1[1] - self._p2[1] if dx == 0 and dy == 0: self._line", ">>> l.y(1) nan \"\"\" return float(\"nan\") class Line(Geometry): \"\"\"Geometric representation of line objects.", "4.0 \"\"\" def dist(v1: tuple, v2: tuple) -> Union[int, float]: return math.hypot(v1[0] -", "vertices = vertices[:] + vertices[0:1] # msg = \"Supplied vertices do not form", "y(self, x: Union[int, float]) -> float: \"\"\"Returns the :math:`y`-value of the line at", "= \"<NAME>, <NAME>, <NAME>, <NAME>, <NAME>\" import math from .sphere import arcdist from", "polygon in geo[\"coordinates\"]: verts = [[Point(pt) for pt in part] for part in", "``p1`` attribute. Examples -------- >>> ls = LineSegment(Point((1, 2)), Point((5, 6))) >>> r", "the dimensions of the point. Examples -------- >>> len(Point((1, 2))) 2 \"\"\" return", "for part in self._vertices] @property def perimeter(self) -> Union[int, float]: \"\"\"Returns the perimeter", "= sum_perim(self._vertices) + sum_perim(self._holes) return self._perimeter @property def bbox(self): \"\"\"Returns the bounding box", "signed_area(self) -> Union[int, float]: if self._area is None: vertices = self.vertices x =", "will be increased significantly. \"\"\" self._quad_tree_structure = QuadTreeStructureSingleRing(self) def contains_point(self, point): \"\"\"Point containment", "holes : list A list of sub-polygons to be considered as holes. Default", "these attributes (implemented as ``properties``) then recompute their values if they have been", "parts (lists of ``libpysal.cg.Point`` objects) of the chain. Examples -------- >>> c =", ">>> p1.len 4 >>> len(p1) 4 \"\"\" if self._len is None: self._len =", "``point`` otherwise ``False``. Examples -------- >>> p = Polygon( ... [Point((0,0)), Point((4,0)), Point((4,5)),", "4 \"\"\" if self._len is None: self._len = len(self.vertices) return self._len @property def", "part in self._holes], [] ) @property def holes(self) -> list: \"\"\"Returns the holes", "\"\"\"Rectangles will evaluate to False if they have zero area. ``___nonzero__`` is used", "self._hole_rings] A = AP + AH cx = sum([pt[0] * area for pt,", "Point((1, 2)), Point((1, 1))] ... ] ... ) >>> len(c.parts) 2 \"\"\" return", "\"\"\" return (self.__loc) <= (other.__loc) def __eq__(self, other) -> bool: \"\"\"Tests if the", "0 cy = 0 for i in range(N - 1): f = x[i]", "bounding box in the form ``[left, lower, right, upper]``. area : float The", "return self[:] == other[:] return False def __add__(self, other): x, y, X, Y", "less than or equal to another object. Parameters ---------- other : libpysal.cg.Point An", "A = A * 0.5 self._area = -A return self._area @property def centroid(self):", "Point((0, 1))]) >>> c.bounding_box.left 0.0 >>> c.bounding_box.lower 0.0 >>> c.bounding_box.right 2.0 >>> c.bounding_box.upper", "- v1[1] * v2[0] < 0 def sw_ccw(self, pt): \"\"\"Sedgewick test for ``pt``", "float], v2: Union[int, float]) -> float: return math.hypot(v1[0] - v2[0], v1[1] - v2[1])", "in vertices] else: self._part_rings = [Ring(vertices)] self._vertices = [clockwise(vertices)] if holes is not", "``getter``. \"\"\" self._len = None self._arclen = None self._bounding_box = None @property def", "and last point must be the same. Open rings will be closed. This", "the ring. This structure helps test if a point is inside the ring.", "of the chain computed using 'arcdistance' (meters). \"\"\" def part_perimeter(p: list) -> Union[int,", "None elif dx == 0: self._line = VerticalLine(self._p1[0]) else: m = dy /", "= None self._bbox = None self._area = None self._centroid = None self._len =", "- self.left) * (self.upper - self.lower) @property def width(self) -> Union[int, float]: \"\"\"Returns", ">>> ls = LineSegment(Point((0, 0)), Point((5, 0))) >>> ls.is_cw(Point((2, 2))) False >>> ls.is_cw(Point((2,", "object of the line on which the segment lies. Returns ------- self._line :", "self.lower = center[1] + scale * (self.lower - center[1]) self.upper = center[1] +", "v in part] for part in self._vertices] @property def bounding_box(self): \"\"\"Returns the bounding", ">>> p1 = Polygon([Point((0, 0)), Point((0, 1)), Point((1, 1)), Point((1, 0))]) >>> p1.len", "__area += (pv[i][0] + pv[i + 1][0]) * (pv[i][1] - pv[i + 1][1])", "other segment (``True``) or not (``False``). Handles endpoints of segments being on other", "number. The implementation is based on `this <http://www.engr.colostate.edu/~dga/dga/papers/point_in_polygon.pdf>`_. Parameters ---------- point : libpysal.cg.Point", "dimension. Examples -------- >>> p = Point((5.5, 4.3)) >>> p[0] == 5.5 True", "self._len = sum([part_perimeter(part) for part in self._vertices]) return self._len @property def arclen(self) ->", "float} The :math:`y`-intercept of the line. ``b`` is also an attribute. Raises ------", "libpysal.cg.Point The ``_p2`` attribute. Examples -------- >>> ls = LineSegment(Point((1, 2)), Point((5, 6)))", "v in vertices]), max([v[1] for v in vertices]), ) return self._bounding_box @property def", "def vertices(self) -> list: \"\"\"Returns the vertices of the chain in clockwise order.", "attribute of the line segment. Parameters ---------- p2 : libpysal.cg.Point A point. Returns", "the line at a particular :math:`x`-value. Parameters ---------- x : {int, float} The", "self._p1[0], pt[1] - self._p1[1]) return v1[0] * v2[1] - v1[1] * v2[0] >", "def __init__(self, origin, second_p): self.o = origin self.p = second_p class Chain(Geometry): \"\"\"Geometric", "float]: \"\"\"Returns the area of the Rectangle. Examples -------- >>> r = Rectangle(0,", "perimeter(self) -> Union[int, float]: \"\"\"Returns the perimeter of the polygon. Examples -------- >>>", "# moving this to top breaks unit tests ! from . import standalone", "l = Ray(Point((0, 0)), Point((1, 0))) >>> str(l.o) '(0.0, 0.0)' >>> str(l.p) '(1.0,", "for ring in self._part_rings] CH = [ring.centroid for ring in self._hole_rings] AH =", "line at a particular :math:`y`-value. Parameters ---------- y : {int, float} The :math:`y`-value", "def __hash__(self) -> int: \"\"\"Returns the hash of the point's location. Examples --------", "geo[\"type\"].lower() == \"linestring\": verts = [Point(pt) for pt in geo[\"coordinates\"]] elif geo[\"type\"].lower() ==", "the polygon. bounding_box : libpysal.cg.Rectangle The bounding box of the polygon. bbox :", "dimensions of the point. Examples -------- >>> len(Point((1, 2))) 2 \"\"\" return len(self.__loc)", "which the segment lies. Examples -------- >>> ls = LineSegment(Point((1, 2)), Point((5, 6)))", "isinstance(obj, (Point, LineSegment, Line, Ray, Chain, Polygon)): pass else: if hasattr(obj, \"__geo_interface__\"): geo", "== Point((0, 1)) True >>> Point((0, 1)) == Point((1, 1)) False \"\"\" try:", "part in part_type]) if self._perimeter is None: self._perimeter = sum_perim(self._vertices) + sum_perim(self._holes) return", "of gravity', i.e. the mean point of the polygon. Examples -------- >>> p1", ">>> r.upper 6.0 \"\"\" center = ((self.left + self.right) / 2, (self.lower +", "ArithmeticError Raised when ``0.`` is passed in as the slope. Examples -------- >>>", "self._perimeter @property def bounding_box(self): \"\"\"Returns the bounding box of the ring. Returns -------", "v2: Union[int, float]) -> float: return math.hypot(v1[0] - v2[0], v1[1] - v2[1]) def", "self._len @property def line(self): \"\"\"Returns a ``Line`` object of the line on which", "Parameters ---------- y : {int, float} The :math:`y`-value at which to compute :math:`x`.", "Raises ------ ArithmeticError Raised when ``0.`` is passed in as the slope. Examples", "loc): self.__loc = tuple(map(float, loc)) @classmethod def __from_geo_interface__(cls, geo): return cls(geo[\"coordinates\"]) @property def", "\"\"\" if self.m == 0: return self.b return self.m * x + self.b", "6))) >>> r = ls._set_p1(Point((3, -1))) >>> r == Point((3.0, -1.0)) True \"\"\"", "other.sw_ccw(self.p2) intersects = ccw1 * ccw2 <= 0 and ccw3 * ccw4 <=", "len(part) - 1)]) sum_perim = lambda part_type: sum([part_perimeter(part) for part in part_type]) if", "0)), Point((10, 1))) >>> ls.intersect(ls1) True >>> ls2 = LineSegment(Point((5, 1)), Point((10, 1)))", "compute :math:`y`. Examples -------- >>> l = VerticalLine(1) >>> l.y(1) nan \"\"\" return", "= float(upper) def __bool__(self): \"\"\"Rectangles will evaluate to False if they have zero", "``0.`` is passed in as the slope. Examples -------- >>> l = Line(0.5,", "this time.\"%geo_type if geo_type in _geoJSON_type_to_Pysal_type: obj = _geoJSON_type_to_Pysal_type[geo_type].__from_geo_interface__(geo) else: raise NotImplementedError(\"%s is", "pass else: rn = len(self.vertices) xs = [self.vertices[i][0] - point[0] for i in", "results. \"\"\" searching = True for ring in self._hole_rings: if ring.contains_point(point): contains =", "0))], ... [Point((2, 2)), Point((4, 2)), Point((4, 4)), Point((2, 4))] ... ) >>>", "... ) >>> len(p.holes) 1 \"\"\" return [[v for v in part] for", "Returns ------- line_seg : libpysal.cg.LineSegment The ``LineSegment`` object which has its endpoints swapped.", "object to test equality against. Examples -------- >>> Point((0, 1)) != Point((0, 1))", ">>> str(l.p) '(1.0, 0.0)' \"\"\" def __init__(self, origin, second_p): self.o = origin self.p", "to ``self.p2`` to ``pt`` is ccw. ``-1`` if turn from ``self.p1`` to ``self.p2``", "if ring.contains_point(point): contains = False searching = False break if searching: for ring", "1))) == hash(Point((0, 1))) True >>> hash(Point((0, 1))) == hash(Point((1, 1))) False \"\"\"", "line_seg @property def bounding_box(self): \"\"\"Returns the minimum bounding box of a ``LineSegment`` object.", "+= verts[1:] if not holes: holes = None return cls(parts, holes) else: verts", "Parameters ---------- scale : int, float The ratio of the new scale to", "Line(1, 0) >>> ls.m 1.0 >>> ls.b 0.0 \"\"\" def __init__(self, m, b):", "... ] ... ) >>> len(p.parts) 2 \"\"\" return [[v for v in", "[Point((0,0)), Point((4,0)), Point((4,5)), Point((2,3)), Point((0,5))] ... ) >>> p.contains_point((3,3)) 1 >>> p.contains_point((0,6)) 0", "or bbupper: pass else: rn = len(self.vertices) xs = [self.vertices[i][0] - point[0] for", "0)), Point((1, 0)), Point((1, 1)), Point((0, 1))]) >>> p.perimeter 4.0 \"\"\" def dist(v1:", "of the bounding box in the form ``[left, lower, right, upper]``. area :", "None: self._len = sum([part_perimeter(part) for part in self._vertices]) return self._len @property def arclen(self)", "= Polygon( ... [Point((0,0)), Point((4,0)), Point((4,5)), Point((2,3)), Point((0,5))] ... ) >>> p.contains_point((3,3)) 1", ":math:`y`-value at which to compute :math:`x`. Examples -------- >>> l = VerticalLine(0) >>>", "(y - self.b) / self.m def y(self, x: Union[int, float]) -> float: \"\"\"Returns", "Point((1, 0))) >>> str(l.o) '(0.0, 0.0)' >>> str(l.p) '(1.0, 0.0)' \"\"\" def __init__(self,", "on other segment. Parameters ---------- other : libpysal.cg.LineSegment Another line segment to check", "self.left = float(left) self.lower = float(lower) self.right = float(right) self.upper = float(upper) def", "r == Point((1, 2)) True \"\"\" return self._p1 def _set_p1(self, p1): \"\"\"**HELPER METHOD.", "def bounding_box(self): \"\"\"Returns the bounding box of the chain. Returns ------- self._bounding_box :", "mean point of the polygon. Examples -------- >>> p1 = Polygon([Point((0, 0)), Point((1,", "if geo_type == \"multipolygon\": parts = [] holes = [] for polygon in", "-> float: __area = 0 for i in range(-1, len(pv) - 1): __area", "10.0, 17.0] \"\"\" l = [self.left, self.lower, self.right, self.upper] return l.__getitem__(key) def set_centroid(self,", "if geo[\"type\"].lower() == \"linestring\": verts = [Point(pt) for pt in geo[\"coordinates\"]] elif geo[\"type\"].lower()", "centroid of the polygon. Examples -------- >>> r = Rectangle(0, 0, 4, 4)", "polygon in clockwise order. Examples -------- >>> p = Polygon( ... [Point((0, 0)),", "polygon] parts += verts[0:1] holes += verts[1:] if not holes: holes = None", "len : int The number of vertices. perimeter : float The geometric length", "= xs[i] xj = xs[i + 1] if yi * yj < 0:", ">>> l = VerticalLine(1) >>> l.y(1) nan \"\"\" return float(\"nan\") class Line(Geometry): \"\"\"Geometric", "point lists. Attributes ---------- vertices : list The list of points of the", "for containment. Returns ------- point_contained : bool ``True`` if ``point`` is contained within", "in range(-1, len(self) - 1)] ) return self._perimeter @property def bounding_box(self): \"\"\"Returns the", "pt in part] for part in polygon] parts += verts[0:1] holes += verts[1:]", "= Ring( ... [ ... Point((0, 0)), ... Point((2, 0)), ... Point((2, 1)),", "p.contains_point((4,0)) 0 Handles holes. >>> p = Polygon( ... [Point((0, 0)), Point((0, 10)),", "in self._hole_rings] A = AP + AH cx = sum([pt[0] * area for", "0.0 \"\"\" return self._x def y(self, x) -> float: \"\"\"Returns the :math:`y`-value of", "equality against. Examples -------- >>> Point((0, 1)) != Point((0, 1)) False >>> Point((0,", "A point to test for containment. Returns ------- contains : bool ``True`` if", "Point((1, 0)), Point((1, 1)), Point((0, 1))]) >>> p.perimeter 4.0 \"\"\" def dist(v1: Union[int,", "part[1:])] for part in self._vertices ] class Ring(Geometry): \"\"\"Geometric representation of a linear", "1)) True >>> Point((0, 1)) <= Point((1, 1)) True \"\"\" return (self.__loc) <=", "geo: dict): \"\"\"While PySAL does not differentiate polygons and multipolygons GEOS, Shapely, and", "part_perimeter(p: list) -> Union[int, float]: return sum([arcdist(p[i], p[i + 1]) * 1000.0 for", "dx == 0: self._line = VerticalLine(self._p1[0]) else: m = dy / float(dx) #", "the segments that compose the chain.\"\"\" return [ [LineSegment(a, b) for (a, b)", "1)) < Point((0, 1)) False >>> Point((0, 1)) < Point((1, 1)) True \"\"\"", "= Polygon([Point((0, 0)), Point((0, 1)), Point((1, 1)), Point((1, 0))]) >>> len(p1.vertices) 4 \"\"\"", "[] for polygon in geo[\"coordinates\"]: verts = [[Point(pt) for pt in part] for", ">>> ls.m 1.0 >>> ls.b 0.0 \"\"\" def __init__(self, m, b): if m", "the point where the ray originates). See ``second_p``. Examples -------- >>> l =", "This class exists primarily as a geometric primitive to form complex polygons with", "of points with the vertices of the polygon in clockwise order. len :", "= p1[1] - p0[1] dx2 = p2[0] - p0[0] dy2 = p2[1] -", "float(lower) self.right = float(right) self.upper = float(upper) def __bool__(self): \"\"\"Rectangles will evaluate to", "def __geo_interface__(self): return {\"type\": \"Point\", \"coordinates\": self.__loc} def __lt__(self, other) -> bool: \"\"\"Tests", "of the segment (``True``) or not (``False``). Exclusive. Parameters ---------- pt : libpysal.cg.Point", "return self._centroid def build_quad_tree_structure(self): \"\"\"Build the quad tree structure for this polygon. Once", "return \"LineSegment(\" + str(self._p1) + \", \" + str(self._p2) + \")\" # return", "which the segment lies. Returns ------- self._line : libpysal.cg.Line The ``Line`` object of", "verts = c.vertices >>> len(verts) 4 \"\"\" return sum([part for part in self._vertices],", "libpysal.cg.Point The second point on the ray (not the point where the ray", "\")\" # return \"LINESTRING ({} {}, {} {})\".format( # self._p1[0], self._p1[1], self._p2[0], self._p2[1]", "AttributeError: return False def __ne__(self, other) -> bool: \"\"\"Tests if the point is", "p.bounding_box.lower 0.0 >>> p.bounding_box.right 2.0 >>> p.bounding_box.upper 1.0 \"\"\" if self._bounding_box is None:", "max(self.upper, other.upper), ) def __getitem__(self, key): \"\"\" Examples -------- >>> r = Rectangle(-4,", "\", \" + str(self._p2) + \")\" # return \"LINESTRING ({} {}, {} {})\".format(", "known as the 'center of gravity' or 'center of mass'. Examples -------- >>>", "the ``p2`` attribute of the line segment. Parameters ---------- p2 : libpysal.cg.Point A", "``None``. Attributes ---------- vertices : list A list of points with the vertices", "return intersects def _reset_props(self): \"\"\"**HELPER METHOD. DO NOT CALL.** Resets attributes which are", ">>> p.contains_point((3.0, 3.0)) False >>> p.contains_point((1.0, 1.0)) True Notes ----- Points falling exactly", "xi + yi * (xj - xi) / (yi - yj) if r", "self._len is None: self._len = len(self.vertices) return self._len @property def vertices(self) -> list:", "self.__loc.__getitem__(*args) def __getslice__(self, *args) -> slice: \"\"\"Return the coordinates for the given dimensions.", "= [[Point(pt) for pt in part] for part in geo[\"coordinates\"]] return cls(verts[0:1], verts[1:])", "= [] def clockwise(part: list) -> list: if standalone.is_clockwise(part): return part[:] else: return", "---------- new_center : libpysal.cg.Point The new location of the centroid of the polygon.", "in vertices]), min([v[1] for v in vertices]), max([v[0] for v in vertices]), max([v[1]", "line. ``x`` is also an attribute. Examples -------- >>> ls = VerticalLine(0) >>>", "ray originates. second_p : The second point specifying the ray (not ``origin``.) Attributes", "this to top breaks unit tests ! from . import standalone from .polygonQuadTreeStructure", "... ) >>> r.bounding_box.left 0.0 >>> r.bounding_box.lower 0.0 >>> r.bounding_box.right 2.0 >>> r.bounding_box.upper", "``__geo_interface__`` information lookup.\"\"\" if len(self.parts) > 1: geo = { \"type\": \"MultiPolygon\", \"coordinates\":", "+= 0.5 else: w -= 0.5 elif yj == 0 and xj >", "part in geo[\"coordinates\"]] else: raise TypeError(\"%r is not a Chain.\" % geo) return", "attributes. The getters for these attributes (implemented as properties) then recompute their values", "the vertices of the polygon in clockwise order. Examples -------- >>> p1 =", ": libpysal.cg.Rectangle The bounding box of the chain. Examples -------- >>> c =", "l1 = LineSegment(Point((1, 2)), Point((5, 6))) >>> l2 = LineSegment(Point((5, 6)), Point((1, 2)))", "\"\"\"**HELPER METHOD. DO NOT CALL.** Returns the ``p1`` attribute of the line segment.", "Union[int, float]) -> float: \"\"\"Returns the :math:`x`-value of the line at a particular", "Point((1, 1))], ... [Point((10, 10)), Point((11, 10)), Point((11, 11))] ... ] ... )", "for i in range(-1, len(part) - 1)]) sum_perim = lambda part_type: sum([part_perimeter(part) for", "len(self) - 1)] ) return self._perimeter @property def bounding_box(self): \"\"\"Returns the bounding box", "on `this <http://www.engr.colostate.edu/~dga/dga/papers/point_in_polygon.pdf>`_. Parameters ---------- point : libpysal.cg.Point The point to test for", "changed, recompute if self._bounding_box is None: self._bounding_box = Rectangle( min([self._p1[0], self._p2[0]]), min([self._p1[1], self._p2[1]]),", "to form complex polygons with multiple rings and holes. The ordering of the", "def __eq__(self, other): if other: return self[:] == other[:] return False def __add__(self,", "``pt`` is ccw. ``-1`` if turn from ``self.p1`` to ``self.p2`` to ``pt`` is", "p1 : libpysal.cg.Point The starting point of the line segment. p2 : Point", "not (``False``). Exclusive. Parameters ---------- pt : libpysal.cg.Point A point lying ccw or", "= False def _get_p1(self): \"\"\"**HELPER METHOD. DO NOT CALL.** Returns the ``p1`` attribute", "= len(self.vertices) return self._len @property def vertices(self) -> list: \"\"\"Returns the vertices of", "= center[1] + scale * (self.lower - center[1]) self.upper = center[1] + scale", "ys = [self.vertices[i][1] - point[1] for i in range(rn)] w = 0 for", "sum_area(self._vertices) - sum_area(self._holes) return _area @property def centroid(self) -> tuple: \"\"\"Returns the centroid", "a polyline. Parameters ---------- vertices : list A point list or list of", "for part in self._holes] @property def parts(self) -> list: \"\"\"Returns the parts of", "== (6,) True \"\"\" return self.__loc.__getslice__(*args) def __len__(self) -> int: \"\"\" Returns the", "return self._p2 def _set_p2(self, p2): \"\"\"**HELPER METHOD. DO NOT CALL.** Sets the ``p2``", "* area for pt, area in zip(CP + CH, A)]) / sum(A) return", "the points are collinear and ``pt`` is in the middle. \"\"\" p0 =", "0.0 >>> c.bounding_box.right 2.0 >>> c.bounding_box.upper 1.0 \"\"\" if self._bounding_box is None: vertices", "of the line on which the segment lies. Returns ------- self._line : libpysal.cg.Line", "self._vertices = [clockwise(vertices)] if holes is not None and holes != []: if", "\"\"\" return self.__loc.__getitem__(*args) def __getslice__(self, *args) -> slice: \"\"\"Return the coordinates for the", "self._bounding_box @property def area(self) -> float: \"\"\"Returns the area of the polygon. Examples", "= p2[0] - p0[0] dy2 = p2[1] - p0[1] if dy1 * dx2", "LineSegment(Point((1, 2)), Point((5, 6))) >>> r = ls._set_p2(Point((3, -1))) >>> r == Point((3.0,", "float: \"\"\"Returns the :math:`y`-value of the line at a particular :math:`x`-value. Parameters ----------", "box in the form ``[left, lower, right, upper]``. area : float The area", "* cx cy = 1.0 / (6 * A) * cy self._centroid =", "list. Returns ------- self._bbox : list The bounding box of the polygon as", "upper]``. area : float The area enclosed by the polygon. centroid : tuple", "nan \"\"\" def __init__(self, x): self._x = float(x) self.m = float(\"inf\") self.b =", "\"\"\" p0 = self.p1 p1 = self.p2 p2 = pt dx1 = p1[0]", "if self._bbox is None: self._bbox = [ self.bounding_box.left, self.bounding_box.lower, self.bounding_box.right, self.bounding_box.upper, ] return", "self.upper = self.upper + shift[1] def set_scale(self, scale): \"\"\"Rescales the rectangle around its", "- 1)]) if self._len is None: self._len = sum([part_perimeter(part) for part in self._vertices])", "self.lower) @property def width(self) -> Union[int, float]: \"\"\"Returns the width of the Rectangle.", "-> Union[int, float]: \"\"\"Returns the geometric length of the chain computed using 'arcdistance'", "area.\") self.left = float(left) self.lower = float(lower) self.right = float(right) self.upper = float(upper)", "Point((10, 10)), Point((0, 10))], ... [Point((2, 1)), Point((2, 2)), Point((1, 2)), Point((1, 1))]", "0 for i in range(len(self.vertices) - 1): yi = ys[i] yj = ys[i", "4 \"\"\" return sum([part for part in self._vertices], []) + sum( [part for", "p.contains_point((3.0, 3.0)) False >>> p.contains_point((1.0, 1.0)) True Notes ----- Points falling exactly on", "point on the ray (not the point where the ray originates). See ``second_p``.", "attribute. Raises ------ ArithmeticError Raised when infinity is passed in as the slope.", "\"\"\"Geometric representation of a linear ring. Linear rings must be closed, the first", "[[]] self._reset_props() @classmethod def __from_geo_interface__(cls, geo: dict): \"\"\"While PySAL does not differentiate polygons", "dy2 * dy2: is_ccw = 0 else: is_ccw = 1 return is_ccw def", "of the rectangle. upper : float Maximum y-value of the rectangle. Examples --------", "is less than another object. Parameters ---------- other : libpysal.cg.Point An object to", "Point((1, 0)), Point((1, 1)), Point((2, 1))]) \"\"\" def __init__(self, vertices: list): if isinstance(vertices[0],", ">>> len(p.parts) 2 \"\"\" return [[v for v in part] for part in", "p1[0] - p0[0] dy1 = p1[1] - p0[1] dx2 = p2[0] - p0[0]", "def holes(self) -> list: \"\"\"Returns the holes of the polygon in clockwise order.", "LineSegment(self._p2, self._p1) return line_seg @property def bounding_box(self): \"\"\"Returns the minimum bounding box of", "Rectangle( self._bounding_box.left, self._bounding_box.lower, self._bounding_box.right, self._bounding_box.upper, ) @property def len(self) -> float: \"\"\"Returns the", "= [clockwise(hole) for hole in holes] else: self._hole_rings = [Ring(holes)] self._holes = [clockwise(holes)]", "points of the vertices of the chain in order. len : float The", "True >>> hash(Point((0, 1))) == hash(Point((1, 1))) False \"\"\" return hash(self.__loc) def __getitem__(self,", "- p0[1] dx2 = p2[0] - p0[0] dy2 = p2[1] - p0[1] if", "dy2 < 0: is_ccw = -1 elif dx1 * dx1 + dy1 *", "return float(\"nan\") class Line(Geometry): \"\"\"Geometric representation of line objects. Parameters ---------- m :", ">>> len(p1) 4 \"\"\" if self._len is None: self._len = len(self.vertices) return self._len", "solve for 'x' when slope is zero.\") return (y - self.b) / self.m", "r = ls._get_p1() >>> r == Point((1, 2)) True \"\"\" return self._p1 def", ">>> p.bounding_box.right 2.0 >>> p.bounding_box.upper 1.0 \"\"\" if self._bounding_box is None: vertices =", "geo: dict): if geo[\"type\"].lower() == \"linestring\": verts = [Point(pt) for pt in geo[\"coordinates\"]]", "other): if other: return self[:] == other[:] return False def __add__(self, other): x,", "check against. Examples -------- >>> ls = LineSegment(Point((5, 0)), Point((10, 0))) >>> ls1", "if self._len is None: self._len = math.hypot(self._p1[0] - self._p2[0], self._p1[1] - self._p2[1]) return", ">>> p.bounding_box.left 0.0 >>> p.bounding_box.lower 0.0 >>> p.bounding_box.right 2.0 >>> p.bounding_box.upper 1.0 \"\"\"", "-------- >>> c = Chain([Point((0, 0)), Point((2, 0)), Point((2, 1)), Point((0, 1))]) >>>", "is None: self._bounding_box = Rectangle( min([self._p1[0], self._p2[0]]), min([self._p1[1], self._p2[1]]), max([self._p1[0], self._p2[0]]), max([self._p1[1], self._p2[1]]),", "reset since the last call to the ``getter``. \"\"\" self._len = None self._arclen", "- v2[1]) @property def perimeter(self) -> Union[int, float]: if self._perimeter is None: dist", "is not a Chain.\" % geo) return cls(verts) @property def __geo_interface__(self) -> dict:", "self._vertices] @property def perimeter(self) -> Union[int, float]: \"\"\"Returns the perimeter of the polygon.", ">= (other.__loc) def __hash__(self) -> int: \"\"\"Returns the hash of the point's location.", "p1.len 4 >>> len(p1) 4 \"\"\" if self._len is None: self._len = len(self.vertices)", "Point((0, 1)) == Point((0, 1)) True >>> Point((0, 1)) == Point((1, 1)) False", "list(map(Ring, vertices)) self._vertices = [clockwise(part) for part in vertices] else: self._part_rings = [Ring(vertices)]", "``p2`` attribute of the line segment. Returns ------- self._p2 : libpysal.cg.Point The ``_p2``", "---------- p2 : libpysal.cg.Point A point. Returns ------- self._p2 : libpysal.cg.Point The reset", "None: self._bbox = [ self.bounding_box.left, self.bounding_box.lower, self.bounding_box.right, self.bounding_box.upper, ] return self._bbox @property def", "other) -> bool: \"\"\"Tests if the point is less than another object. Parameters", "-> list: if standalone.is_clockwise(part): return part[:] else: return part[::-1] vl = list(vertices) if", "object. Examples -------- >>> p = Point((1, 3)) >>> str(p) '(1.0, 3.0)' \"\"\"", "The bounding box of the polygon. Examples -------- >>> p = Polygon([Point((0, 0)),", "float(right) self.upper = float(upper) def __bool__(self): \"\"\"Rectangles will evaluate to False if they", "_geoJSON_type_to_Pysal_type[geo_type].__from_geo_interface__(geo) else: raise NotImplementedError(\"%s is not supported at this time.\" % geo_type) return", ">>> p = Polygon([Point((0, 0)), Point((1, 0)), Point((1, 1)), Point((0, 1))]) >>> p.area", "_set_p1(self, p1): \"\"\"**HELPER METHOD. DO NOT CALL.** Sets the ``p1`` attribute of the", "in zip(CP + CH, A)]) / sum(A) cy = sum([pt[1] * area for", "LineSegment(Point((0, 0)), Point((5, 0))) >>> ls.is_ccw(Point((2, 2))) True >>> ls.is_ccw(Point((2, -2))) False \"\"\"", "geo[\"coordinates\"]: verts = [[Point(pt) for pt in part] for part in polygon] parts", "-> int: \"\"\"Returns the geometric length of the chain. Examples -------- >>> c", "specified. Parameters ---------- vertices : list A list of vertices or a list", "return [[v for v in part] for part in self._vertices] @property def bounding_box(self):", "class Ray: \"\"\"Geometric representation of ray objects. Parameters ---------- origin : libpysal.cg.Point The", "Point((1, 1)) False \"\"\" return (self.__loc) >= (other.__loc) def __hash__(self) -> int: \"\"\"Returns", ">>> len(p1.vertices) 4 \"\"\" return sum([part for part in self._vertices], []) + sum(", "p2 changed, recompute if self._bounding_box is None: self._bounding_box = Rectangle( min([self._p1[0], self._p2[0]]), min([self._p1[1],", "ending point of the line segment. bounding_box : libpysal.cg.Rectangle The bounding box of", "try: return (self.__loc) != (other.__loc) except AttributeError: return True def __gt__(self, other) ->", "\"\"\" return [[v for v in part] for part in self._vertices] @property def", "/ sum(A) return cx, cy def build_quad_tree_structure(self): \"\"\"Build the quad tree structure for", "on the ray (not the point where the ray originates). See ``second_p``. Examples", "self._perimeter is None: dist = self.dist v = self.vertices self._perimeter = sum( [dist(v[i],", "= True searching = False break if searching: contains = False return contains", ">>> c.bounding_box.left 0.0 >>> c.bounding_box.lower 0.0 >>> c.bounding_box.right 2.0 >>> c.bounding_box.upper 1.0 \"\"\"", "len : float The length of the segment. line : libpysal.cg.Line The line", "string representation of a ``Point`` object. Examples -------- >>> p = Point((1, 3))", "geo[\"type\"].lower() == \"multilinestring\": verts = [list(map(Point, part)) for part in geo[\"coordinates\"]] else: raise", "for the ring. This structure helps test if a point is inside the", "-A return self._area @property def centroid(self): \"\"\"Returns the centroid of the ring. Returns", "return False def __ne__(self, other) -> bool: \"\"\"Tests if the point is not", "\"\"\" # If LineSegment attributes p1, p2 changed, recompute if self._bounding_box is None:", "line. ``b`` is also an attribute. Raises ------ ArithmeticError Raised when infinity is", "LineSegment(Point((5, 0)), Point((10, 0))) >>> ls1 = LineSegment(Point((5, 0)), Point((10, 1))) >>> ls.intersect(ls1)", "= None self._area = None self._centroid = None self._len = None def __len__(self)", "self._arclen = None self._bounding_box = None @property def vertices(self) -> list: \"\"\"Returns the", "Point((0, 1)) < Point((0, 1)) False >>> Point((0, 1)) < Point((1, 1)) True", ": int, float The ratio of the new scale to the old scale", ":math:`y`-value of the line at a particular :math:`x`-value. Parameters ---------- x : {int,", "= list(map(Ring, vertices)) self._vertices = [clockwise(part) for part in vertices] else: self._part_rings =", "Examples -------- >>> r = Rectangle(-4, 3, 10, 17) >>> r.left #minx -4.0", "self.parts], } if self._holes[0]: geo[\"coordinates\"][0] += self._holes return geo if self._holes[0]: return {\"type\":", "True elif other.p2 == self._p1 and other.p1 == self._p2: eq = True return", "ls._get_p2() >>> r == Point((5, 6)) True \"\"\" return self._p2 def _set_p2(self, p2):", "Examples -------- >>> c = Chain( ... [ ... [Point((0, 0)), Point((1, 0)),", "box of the polygon as a list. See Also -------- libpysal.cg.bounding_box \"\"\" if", "1.0 / (6 * A) * cx cy = 1.0 / (6 *", "searching: for ring in self._part_rings: if ring.contains_point(point): contains = True searching = False", "dx1 * dx1 + dy1 * dy1 >= dx2 * dx2 + dy2", "zero.\") return (y - self.b) / self.m def y(self, x: Union[int, float]) ->", "False >>> ls.is_cw(Point((2, -2))) True \"\"\" v1 = (self._p2[0] - self._p1[0], self._p2[1] -", "else: raise NotImplementedError(\"%s is not supported at this time.\" % geo_type) return obj", "compute :math:`y`. Examples -------- >>> l = Line(1, 0) >>> l.y(1) 1.0 \"\"\"", "None: vertices = self.vertices x = [v[0] for v in vertices] y =", ": libpysal.cg.Point The starting point of the line segment. p2 : Point The", "bbleft = x < self.bounding_box.left bbright = x > self.bounding_box.right bblower = y", "+ str(self._p1) + \", \" + str(self._p2) + \")\" # return \"LINESTRING ({}", "self._p1[1] - m * self._p1[0] self._line = Line(m, b) return self._line class VerticalLine(Geometry):", "in part] for part in self._vertices] @property def perimeter(self) -> Union[int, float]: \"\"\"Returns", "The ending point of the line segment. bounding_box : libpysal.cg.Rectangle The bounding box", ">>> ls = LineSegment(Point((1, 2)), Point((5, 6))) >>> ls.bounding_box.left 1.0 >>> ls.bounding_box.lower 2.0", "are not the same.\" # raise ValueError(msg) self.vertices = tuple(vertices) self._perimeter = None", "vertices : list A list of vertices or a list of lists of", "Point((0, 1)) False >>> Point((0, 1)) < Point((1, 1)) True \"\"\" return (self.__loc)", "4) >>> r.height 4.0 \"\"\" return self.upper - self.lower _geoJSON_type_to_Pysal_type = { \"point\":", "Point((1, 3)) \"\"\" def __init__(self, loc): self.__loc = tuple(map(float, loc)) @classmethod def __from_geo_interface__(cls,", "of the vertices of the chain in order. len : float The geometric", "> 1: geo = { \"type\": \"MultiPolygon\", \"coordinates\": [[part] for part in self.parts],", "1.0 >>> l.b 0.0 \"\"\" if self._line == False: dx = self._p1[0] -", "holes(self) -> list: \"\"\"Returns the holes of the polygon in clockwise order. Examples", "first and last vertices are not the same.\" # raise ValueError(msg) self.vertices =", "float Minimum x-value of the rectangle. lower : float Minimum y-value of the", "tuple: \"\"\"Returns the centroid of the polygon. Notes ----- The centroid returned by", "vertices = self.vertices self._bounding_box = Rectangle( min([v[0] for v in vertices]), min([v[1] for", "geo) return cls(verts) @property def __geo_interface__(self) -> dict: if len(self.parts) == 1: return", "an attribute. Raises ------ ArithmeticError Raised when infinity is passed in as the", "information lookup.\"\"\" if len(self.parts) > 1: geo = { \"type\": \"MultiPolygon\", \"coordinates\": [[part]", "return self._line class VerticalLine(Geometry): \"\"\"Geometric representation of verticle line objects. Parameters ---------- x", "y-value of the rectangle. right : float Maximum x-value of the rectangle. upper", ":math:`x`. Examples -------- >>> l = VerticalLine(0) >>> l.x(0.25) 0.0 \"\"\" return self._x", "None: self._arclen = sum([part_perimeter(part) for part in self._vertices]) return self._arclen @property def segments(self)", "y[i + 1]) * f cx = 1.0 / (6 * A) *", "__all__ = [ \"Point\", \"LineSegment\", \"Line\", \"Ray\", \"Chain\", \"Polygon\", \"Rectangle\", \"asShape\", ] def", "-> Union[int, float]: \"\"\"Returns the area of the Rectangle. Examples -------- >>> r", "min([self._p1[0], self._p2[0]]), min([self._p1[1], self._p2[1]]), max([self._p1[0], self._p2[0]]), max([self._p1[1], self._p2[1]]), ) return Rectangle( self._bounding_box.left, self._bounding_box.lower,", "is None: self._len = len(self.vertices) return self._len @property def vertices(self) -> list: \"\"\"Returns", "{\"type\": \"MultiLineString\", \"coordinates\": self.parts} def _reset_props(self): \"\"\"**HELPER METHOD. DO NOT CALL.** Resets attributes", "are holes. MultiPolygons are simply a list of polygons. \"\"\" geo_type = geo[\"type\"].lower()", "geo if self._holes[0]: return {\"type\": \"Polygon\", \"coordinates\": self._vertices + self._holes} else: return {\"type\":", "the first and last point must be the same. Open rings will be", "as the index to the start slice and :math:`j` as the index to", "[vertices] self._reset_props() @classmethod def __from_geo_interface__(cls, geo: dict): if geo[\"type\"].lower() == \"linestring\": verts =", "6))) >>> l2 = LineSegment(Point((5, 6)), Point((1, 2))) >>> l1 == l2 True", "return line_seg @property def bounding_box(self): \"\"\"Returns the minimum bounding box of a ``LineSegment``", "self._bounding_box : libpysal.cg.Rectangle The bounding box of the ring. Examples -------- >>> r", "``origin``. p : libpysal.cg.Point The second point on the ray (not the point", "is clockwise of the segment (``True``) or not (``False``). Exclusive. Parameters ---------- pt", "17.0 \"\"\" def __init__(self, left, lower, right, upper): if right < left or", "vertices of the chain in clockwise order. Examples -------- >>> c = Chain([Point((0,", "ys[i + 1] xi = xs[i] xj = xs[i + 1] if yi", "i in range(-1, len(part) - 1)]) sum_perim = lambda part_type: sum([part_perimeter(part) for part", "LineSegment(Point((2, 2)), Point((3, 3))) >>> l = ls.line >>> l.m 1.0 >>> l.b", "to be a shape object.\" % (obj)) geo_type = geo[\"type\"].lower() # if geo_type.startswith('multi'):", "VerticalLine(0) >>> ls.m inf >>> ls.b nan \"\"\" def __init__(self, x): self._x =", "= [ring.centroid for ring in self._part_rings] AP = [ring.area for ring in self._part_rings]", "The slope of the line. ``m`` is also an attribute. b : {int,", "\"\"\" CP = [ring.centroid for ring in self._part_rings] AP = [ring.area for ring", "list A point list or list of point lists. Attributes ---------- vertices :", "vertices of the polygon in clockwise order. Examples -------- >>> p1 = Polygon([Point((0,", "Union[int, float]: \"\"\"Return the coordinate for the given dimension. Parameters ---------- *args :", "tuple of :math:`(i)` with :math:`i` as the index of the desired dimension. Examples", ">>> ls = LineSegment(Point((1, 2)), Point((5, 6))) \"\"\" def __init__(self, start_pt, end_pt): self._p1", "libpysal.cg.Point The new location of the centroid of the polygon. Examples -------- >>>", "[[v for v in part] for part in self._holes] @property def parts(self) ->", "of the ring. Returns ------- self._bounding_box : libpysal.cg.Rectangle The bounding box of the", "* cy self._centroid = Point((cx, cy)) return self._centroid def build_quad_tree_structure(self): \"\"\"Build the quad", "Returns a polygon created from the objects specified. Parameters ---------- vertices : list", "multipart polygons with holes. Also known as the 'center of gravity' or 'center", "Point((2, 1))]) >>> c.len 3.0 >>> c = Chain( ... [ ... [Point((0,", "raise ArithmeticError(\"Cannot solve for 'x' when slope is zero.\") return (y - self.b)", "`this <http://www.engr.colostate.edu/~dga/dga/papers/point_in_polygon.pdf>`_. Parameters ---------- point : libpysal.cg.Point The point to test for containment.", "self.is_quad_tree_structure_built = True def contains_point(self, point): \"\"\"Test if a polygon contains a point.", ") >>> p.area 99.0 \"\"\" def part_area(pv: list) -> float: __area = 0", "Point((10, 1))) >>> ls.intersect(ls2) False >>> ls2 = LineSegment(Point((7, -1)), Point((7, 2))) >>>", "the same. Open rings will be closed. This class exists primarily as a", "else: self._hole_rings = [Ring(holes)] self._holes = [clockwise(holes)] else: self._holes = [[]] self._reset_props() @classmethod", "\"\"\" def __init__(self, vertices, holes=None): self._part_rings = [] self._hole_rings = [] def clockwise(part:", "build_quad_tree_structure(self): \"\"\"Build the quad tree structure for this polygon. Once the structure is", "int: \"\"\"Returns the geometric length of the chain. Examples -------- >>> c =", "\"\"\" return [[v for v in part] for part in self._holes] @property def", "other) -> bool: \"\"\"Returns ``True`` if ``self`` and ``other`` are the same line", "the middle. ``1`` if the points are collinear and ``self.p2`` is in the", "self._bounding_box = None @property def vertices(self) -> list: \"\"\"Returns the vertices of the", "0: self._line = VerticalLine(self._p1[0]) else: m = dy / float(dx) # y -", "for these attributes (implemented as ``properties``) then recompute their values if they have", "be infinite.\") self.m = float(m) self.b = float(b) def x(self, y: Union[int, float])", "* A) * cy self._centroid = Point((cx, cy)) return self._centroid def build_quad_tree_structure(self): \"\"\"Build", "r.right 6.0 >>> r.lower -2.0 >>> r.upper 6.0 \"\"\" center = ((self.left +", "is not supported at this time.\" % geo_type) return obj class Geometry(object): \"\"\"A", "None self._bounding_box = None self._bbox = None self._area = None self._centroid = None", ">>> ls = Line(1, 0) >>> ls.m 1.0 >>> ls.b 0.0 \"\"\" def", ">>> l = VerticalLine(0) >>> l.x(0.25) 0.0 \"\"\" return self._x def y(self, x)", "r = Rectangle(-4, 3, 10, 17) >>> r[:] [-4.0, 3.0, 10.0, 17.0] \"\"\"", "if a polygon contains a point. Parameters ---------- point : libpysal.cg.Point A point", "the rectangle. upper : float Maximum y-value of the rectangle. Examples -------- >>>", "contains = True searching = False break if searching: contains = False return", "ccw1 = self.sw_ccw(other.p2) ccw2 = self.sw_ccw(other.p1) ccw3 = other.sw_ccw(self.p1) ccw4 = other.sw_ccw(self.p2) intersects", "3.0, 10.0, 17.0] \"\"\" l = [self.left, self.lower, self.right, self.upper] return l.__getitem__(key) def", "have been reset since the last call to the getter. Examples -------- >>>", "sum(A) cy = sum([pt[1] * area for pt, area in zip(CP + CH,", "float]: \"\"\"Returns the height of the Rectangle. Examples -------- >>> r = Rectangle(0,", "and dy == 0: self._line = None elif dx == 0: self._line =", "Examples -------- >>> ls = LineSegment(Point((0, 0)), Point((5, 0))) >>> ls.is_cw(Point((2, 2))) False", "self.b = float(\"nan\") def x(self, y) -> float: \"\"\"Returns the :math:`x`-value of the", "by the 'center of gravity' or 'center or mass'. _quad_tree_structure : libpysal.cg.QuadTreeStructureSingleRing The", "else: self._vertices = [vertices] self._reset_props() @classmethod def __from_geo_interface__(cls, geo: dict): if geo[\"type\"].lower() ==", "A point. Returns ------- self._p1 : libpysal.cg.Point The reset ``p1`` attribute. Examples --------", "\"\"\"Moves the rectangle center to a new specified point. Parameters ---------- new_center :", "counterclockwise of the segment (``True``) or not (``False``). Exclusive. Parameters ---------- pt :", "self.upper = float(upper) def __bool__(self): \"\"\"Rectangles will evaluate to False if they have", "= x > self.bounding_box.right bblower = y < self.bounding_box.lower bbupper = y >", "when ``obj`` is not a supported shape. NotImplementedError Raised when ``geo_type`` is not", "in zip(CP + CH, A)]) / sum(A) return cx, cy def build_quad_tree_structure(self): \"\"\"Build", "is_ccw = 1 elif dy1 * dx2 > dy2 * dx1: is_ccw =", "= float(x) self.m = float(\"inf\") self.b = float(\"nan\") def x(self, y) -> float:", "list): self._hole_rings = list(map(Ring, holes)) self._holes = [clockwise(hole) for hole in holes] else:", "vertices]), ) return self._bounding_box @property def len(self) -> int: \"\"\"Returns the geometric length", "- 1)] ) return self._perimeter @property def bounding_box(self): \"\"\"Returns the bounding box of", "asShape(obj): \"\"\"Returns a PySAL shape object from ``obj``, which must support the ``__geo_interface__``.", "polygons with multiple rings and holes. The ordering of the vertices is ignored", ">>> r.area 16.0 \"\"\" return (self.right - self.left) * (self.upper - self.lower) @property", "= self.sw_ccw(other.p1) ccw3 = other.sw_ccw(self.p1) ccw4 = other.sw_ccw(self.p2) intersects = ccw1 * ccw2", "True \"\"\" eq = False if not isinstance(other, self.__class__): pass else: if other.p1", "list: \"\"\"Returns the vertices of the chain in clockwise order. Examples -------- >>>", "= float(left) self.lower = float(lower) self.right = float(right) self.upper = float(upper) def __bool__(self):", "= ( new_center[0] - (self.left + self.right) / 2, new_center[1] - (self.lower +", "\"MultiLineString\", \"coordinates\": self.parts} def _reset_props(self): \"\"\"**HELPER METHOD. DO NOT CALL.** Resets attributes which", "c = Chain([Point((0, 0)), Point((2, 0)), Point((2, 1)), Point((0, 1))]) >>> c.bounding_box.left 0.0", "CALL.** Sets the ``p1`` attribute of the line segment. Parameters ---------- p1 :", "area(self) -> Union[int, float]: \"\"\"Returns the area of the ring. Examples -------- >>>", "self._p1 = start_pt self._p2 = end_pt self._reset_props() def __str__(self): return \"LineSegment(\" + str(self._p1)", "if isinstance(holes[0], list): self._hole_rings = list(map(Ring, holes)) self._holes = [clockwise(hole) for hole in", "bbox : list A list representation of the bounding box in the form", "... ) >>> str(r.centroid) '(1.0, 0.5)' \"\"\" if self._centroid is None: vertices =", "``self.p1`` is in the middle. ``1`` if the points are collinear and ``self.p2``", "= p1[0] - p0[0] dy1 = p1[1] - p0[1] dx2 = p2[0] -", "0)), ... Point((2, 1)), ... Point((0, 1)), ... Point((0, 0)) ... ] ...", "(a, b) in zip(part[:-1], part[1:])] for part in self._vertices ] class Ring(Geometry): \"\"\"Geometric", ": libpysal.cg.LineSegment The ``LineSegment`` object which has its endpoints swapped. Examples -------- >>>", "chain computed using 'arcdistance' (meters). \"\"\" def part_perimeter(p: list) -> Union[int, float]: return", "self._p1) return line_seg @property def bounding_box(self): \"\"\"Returns the minimum bounding box of a", "-------- >>> ls = LineSegment(Point((1, 2)), Point((5, 6))) >>> r = ls._set_p1(Point((3, -1)))", "\"\"\" if self._centroid is None: vertices = self.vertices x = [v[0] for v", "self._centroid def build_quad_tree_structure(self): \"\"\"Build the quad tree structure for this polygon. Once the", "x, y = point # bbox checks bbleft = x < self.bounding_box.left bbright", "Point The ending point of the line segment. bounding_box : libpysal.cg.Rectangle The bounding", "- v2[0], v1[1] - v2[1]) def part_perimeter(p: list) -> Union[int, float]: return sum([dist(p[i],", "LineSegment(Point((1, 2)), Point((5, 6))) >>> r = ls._set_p1(Point((3, -1))) >>> r == Point((3.0,", "v2[1] - v1[1] * v2[0] > 0 def is_cw(self, pt) -> bool: \"\"\"Returns", "self.b class Ray: \"\"\"Geometric representation of ray objects. Parameters ---------- origin : libpysal.cg.Point", "be the same. Open rings will be closed. This class exists primarily as", "def x(self, y: Union[int, float]) -> float: \"\"\"Returns the :math:`x`-value of the line", "second_p): self.o = origin self.p = second_p class Chain(Geometry): \"\"\"Geometric representation of a", "-------- >>> ls = LineSegment(Point((1, 2)), Point((5, 6))) \"\"\" def __init__(self, start_pt, end_pt):", "10)), Point((11, 10)), Point((11, 11))] ... ] ... ) >>> c.len 4.0 \"\"\"", "polygon. bbox : list A list representation of the bounding box in the", "ls = LineSegment(Point((0, 0)), Point((5, 0))) >>> ls.is_cw(Point((2, 2))) False >>> ls.is_cw(Point((2, -2)))", "\"\"\" Returns the dimensions of the point. Examples -------- >>> len(Point((1, 2))) 2", "attributes p1, p2 changed, recompute if self._bounding_box is None: self._bounding_box = Rectangle( min([self._p1[0],", "r.right 6.0 >>> r.lower 2.0 >>> r.upper 6.0 \"\"\" shift = ( new_center[0]", "as the index of the desired dimension. Examples -------- >>> p = Point((5.5,", "Examples -------- >>> r = Rectangle(-4, 3, 10, 17) >>> r[:] [-4.0, 3.0,", "Union[int, float]: if self._perimeter is None: dist = self.dist v = self.vertices self._perimeter", "- self._p2[1] if dx == 0 and dy == 0: self._line = None", "* (self.lower - center[1]) self.upper = center[1] + scale * (self.upper - center[1])", "p = Point((3, 6, 2)) >>> p[:2] == (3, 6) True >>> p[1:2]", "Chain, Polygon} A new geometric representation of the object. \"\"\" if isinstance(obj, (Point,", "be considered as holes. Default is ``None``. Attributes ---------- vertices : list A", "A = AP + AH cx = sum([pt[0] * area for pt, area", "Examples -------- >>> p = Polygon([Point((0, 0)), Point((1, 0)), Point((1, 1)), Point((0, 1))])", ">>> p = Polygon( ... [ ... [Point((0, 0)), Point((1, 0)), Point((1, 1)),", "geometric length of the chain. Examples -------- >>> c = Chain([Point((0, 0)), Point((1,", "-1.0)) True \"\"\" self._p1 = p1 self._reset_props() return self._p1 p1 = property(_get_p1, _set_p1)", "\"\"\"Geometric representation of ray objects. Parameters ---------- origin : libpysal.cg.Point The point where", "self._quad_tree_structure is None: x, y = point # bbox checks bbleft = x", "/ 2, ) self.left = self.left + shift[0] self.right = self.right + shift[0]", "# bbox checks bbleft = x < self.bounding_box.left bbright = x > self.bounding_box.right", "Point((7, 2))) >>> ls.intersect(ls2) True \"\"\" ccw1 = self.sw_ccw(other.p2) ccw2 = self.sw_ccw(other.p1) ccw3", "= list(map(Ring, holes)) self._holes = [clockwise(hole) for hole in holes] else: self._hole_rings =", "part in self._vertices]) return self._arclen @property def segments(self) -> list: \"\"\"Returns the segments", "(self.lower + self.upper) / 2, ) self.left = self.left + shift[0] self.right =", "code for PySAL: Python Spatial Analysis Library. \"\"\" __author__ = \"<NAME>, <NAME>, <NAME>,", "middle. ``1`` if the points are collinear and ``self.p2`` is in the middle.", "x > self.bounding_box.right bblower = y < self.bounding_box.lower bbupper = y > self.bounding_box.upper", "p.bounding_box.left 0.0 >>> p.bounding_box.lower 0.0 >>> p.bounding_box.right 2.0 >>> p.bounding_box.upper 1.0 \"\"\" if", "- 1): yi = ys[i] yj = ys[i + 1] xi = xs[i]", "def __init__(self): pass class Point(Geometry): \"\"\"Geometric class for point objects. Parameters ---------- loc", "p.contains_point((0,6)) 0 >>> p.contains_point((2,2.9)) 1 >>> p.contains_point((4,5)) 0 >>> p.contains_point((4,0)) 0 Handles holes.", "rectangle. right : float Maximum x-value of the rectangle. upper : float Maximum", "LineSegment(Point((5, 0)), Point((10, 1))) >>> ls.intersect(ls1) True >>> ls2 = LineSegment(Point((5, 1)), Point((10,", "Parameters ---------- x : {int, float} The :math:`x`-value at which to compute :math:`y`.", "/ 2, new_center[1] - (self.lower + self.upper) / 2, ) self.left = self.left", "= float(lower) self.right = float(right) self.upper = float(upper) def __bool__(self): \"\"\"Rectangles will evaluate", "Point((1, 2))) >>> l1 == l2 True >>> l2 == l1 True \"\"\"", "the ``Point``. Examples -------- >>> Point((0, 1)) (0.0, 1.0) \"\"\" return str(self) def", "point is less than or equal to another object. Parameters ---------- other :", "v[i + 1]) for i in range(-1, len(self) - 1)] ) return self._perimeter", "the minimum bounding box of a ``LineSegment`` object. Returns ------- self._bounding_box : libpysal.cg.Rectangle", "= { \"point\": Point, \"linestring\": Chain, \"multilinestring\": Chain, \"polygon\": Polygon, \"multipolygon\": Polygon, }", "True \"\"\" return self.__loc.__getslice__(*args) def __len__(self) -> int: \"\"\" Returns the dimensions of", "[]) + sum( [part for part in self._holes], [] ) @property def holes(self)", "Point((5, 6))) >>> ls._reset_props() \"\"\" self._bounding_box = None self._len = None self._line =", "built, speed for testing if a point is inside the ring will be", "<NAME>, <NAME>, <NAME>, <NAME>\" import math from .sphere import arcdist from typing import", "{ \"type\": \"MultiPolygon\", \"coordinates\": [[part] for part in self.parts], } if self._holes[0]: geo[\"coordinates\"][0]", "__init__(self, origin, second_p): self.o = origin self.p = second_p class Chain(Geometry): \"\"\"Geometric representation", "the number of vertices in the polygon. Examples -------- >>> p1 = Polygon([Point((0,", "ls = Line(1, 0) >>> ls.m 1.0 >>> ls.b 0.0 \"\"\" def __init__(self,", "- v2[0], v1[1] - v2[1]) def part_perimeter(part) -> Union[int, float]: return sum([dist(part[i], part[i", "False >>> p.contains_point((1.0, 1.0)) True Notes ----- Points falling exactly on polygon edges", "Examples -------- >>> p1 = Polygon([Point((0, 0)), Point((0, 1)), Point((1, 1)), Point((1, 0))])", "METHOD. DO NOT CALL.** Sets the ``p2`` attribute of the line segment. Parameters", ">>> swap.p1[1] 6.0 >>> swap.p2[0] 1.0 >>> swap.p2[1] 2.0 \"\"\" line_seg = LineSegment(self._p2,", "the object. \"\"\" if isinstance(obj, (Point, LineSegment, Line, Ray, Chain, Polygon)): pass else:", "the ``p2`` attribute of the line segment. Returns ------- self._p2 : libpysal.cg.Point The", "r = ls._set_p1(Point((3, -1))) >>> r == Point((3.0, -1.0)) True \"\"\" self._p1 =", "= 0 else: is_ccw = 1 return is_ccw def get_swap(self): \"\"\"Returns a ``LineSegment``", "self._vertices]) return self._len @property def arclen(self) -> Union[int, float]: \"\"\"Returns the geometric length", ">>> r.right 6.0 >>> r.lower 2.0 >>> r.upper 6.0 \"\"\" shift = (", "... ] ... ) >>> r.area 2.0 \"\"\" return abs(self.signed_area) @property def signed_area(self)", "== self._p2: eq = True return eq def intersect(self, other) -> bool: \"\"\"Test", "+ 1]) for i in range(-1, len(part) - 1)]) sum_perim = lambda part_type:", "float The length of the segment. line : libpysal.cg.Line The line on which", "= 0 for i in range(-1, len(pv) - 1): __area += (pv[i][0] +", "testing if a point is inside the ring will be increased significantly. \"\"\"", "ls.is_cw(Point((2, 2))) False >>> ls.is_cw(Point((2, -2))) True \"\"\" v1 = (self._p2[0] - self._p1[0],", "objects. Parameters ---------- loc : tuple The point's location (number :math:`x`-tuple, :math:`x` >", "if searching: contains = False return contains class Rectangle(Geometry): \"\"\"Geometric representation of rectangle", "f = x[i] * y[i + 1] - x[i + 1] * y[i]", "perimeter of the ring. bounding_box : libpysal.cg.Rectangle The bounding box of the ring.", "of the line. ``m`` is also an attribute. b : {int, float} The", ">>> r.left #minx -4.0 >>> r.lower #miny 3.0 >>> r.right #maxx 10.0 >>>", "part_perimeter(part) -> Union[int, float]: return sum([dist(part[i], part[i + 1]) for i in range(-1,", "+ str(self._p2) + \")\" # return \"LINESTRING ({} {}, {} {})\".format( # self._p1[0],", "Polygon} A new geometric representation of the object. \"\"\" if isinstance(obj, (Point, LineSegment,", "self._bounding_box.lower, self._bounding_box.right, self._bounding_box.upper, ) @property def len(self) -> float: \"\"\"Returns the length of", "point's location (number :math:`x`-tuple, :math:`x` > 1). Examples -------- >>> p = Point((1,", "attributes which are functions of other attributes. The getters for these attributes (implemented", "self.lower = self.lower + shift[1] self.upper = self.upper + shift[1] def set_scale(self, scale):", "geo_type in _geoJSON_type_to_Pysal_type: obj = _geoJSON_type_to_Pysal_type[geo_type].__from_geo_interface__(geo) else: raise NotImplementedError(\"%s is not supported at", "Point((1, 1)), Point((0, 1))]) >>> p.area 1.0 >>> p = Polygon( ... [Point((0,", ">>> ls.is_cw(Point((2, -2))) True \"\"\" v1 = (self._p2[0] - self._p1[0], self._p2[1] - self._p1[1])", ": float Minimum y-value of the rectangle. right : float Maximum x-value of", "otherwise ``False``. \"\"\" point_contained = False if self._quad_tree_structure is None: x, y =", "@property def centroid(self) -> tuple: \"\"\"Returns the centroid of the polygon. Notes -----", "geometric representation of the object. \"\"\" if isinstance(obj, (Point, LineSegment, Line, Ray, Chain,", "bounding box of the chain. Returns ------- self._bounding_box : libpysal.cg.Rectangle The bounding box", "max(x), max(y)) return self._bounding_box @property def area(self) -> Union[int, float]: \"\"\"Returns the area", "-> int: \"\"\" Returns the dimensions of the point. Examples -------- >>> len(Point((1,", "self.right) / 2, new_center[1] - (self.lower + self.upper) / 2, ) self.left =", "VerticalLine(0) >>> l.x(0.25) 0.0 \"\"\" return self._x def y(self, x) -> float: \"\"\"Returns", "the centroid of the polygon. Notes ----- The centroid returned by this method", "17) >>> r[:] [-4.0, 3.0, 10.0, 17.0] \"\"\" l = [self.left, self.lower, self.right,", "1)), Point((0, 1))]) \"\"\" def __init__(self, vertices, holes=None): self._part_rings = [] self._hole_rings =", "size). Examples -------- >>> r = Rectangle(0, 0, 4, 4) >>> r.set_scale(2) >>>", "in vertices] y = [v[1] for v in vertices] self._bounding_box = Rectangle(min(x), min(y),", "dx2 = p2[0] - p0[0] dy2 = p2[1] - p0[1] if dy1 *", "-= 0.5 elif yj == 0 and xj > 0: if yi <", "in part] for part in polygon] parts += verts[0:1] holes += verts[1:] if", "p2 self._reset_props() return self._p2 p2 = property(_get_p2, _set_p2) def is_ccw(self, pt) -> bool:", "for v in vertices] self._bounding_box = Rectangle(min(x), min(y), max(x), max(y)) return self._bounding_box @property", "be increased significantly. \"\"\" for ring in self._part_rings: ring.build_quad_tree_structure() for ring in self._hole_rings:", ">>> Point((0, 1)) <= Point((0, 1)) True >>> Point((0, 1)) <= Point((1, 1))", "Examples -------- >>> ls = LineSegment(Point((1, 2)), Point((5, 6))) >>> ls.bounding_box.left 1.0 >>>", "Point((5, 0))) >>> ls.is_ccw(Point((2, 2))) True >>> ls.is_ccw(Point((2, -2))) False \"\"\" v1 =", "part)) for part in geo[\"coordinates\"]] else: raise TypeError(\"%r is not a Chain.\" %", "parts(self) -> list: \"\"\"Returns the parts (lists of ``libpysal.cg.Point`` objects) of the chain.", "holes. Also known as the 'center of gravity' or 'center of mass'. Examples", "libpysal.cg.Rectangle The bounding box of the segment. len : float The length of", "= VerticalLine(1) >>> l.y(1) nan \"\"\" return float(\"nan\") class Line(Geometry): \"\"\"Geometric representation of", "@property def bounding_box(self): \"\"\"Returns the minimum bounding box of a ``LineSegment`` object. Returns", "< 0: w += 1 else: w -= 1 elif yi == 0", "1)), Point((2, 2)), Point((1, 2)), Point((1, 1))] ... ) >>> p.area 99.0 \"\"\"", "-> list: \"\"\"Returns the parts (lists of ``libpysal.cg.Point`` objects) of the chain. Examples", "holes. perimeter : float The geometric length of the perimeter of the polygon.", "objects. Attributes ---------- left : float Minimum x-value of the rectangle. lower :", "1.0 >>> ls.b 0.0 \"\"\" def __init__(self, m, b): if m == float(\"inf\"):", "-------- >>> Point((0, 1)) > Point((0, 1)) False >>> Point((0, 1)) > Point((1,", "other attributes. The ``getter``s for these attributes (implemented as ``properties``) then recompute their", "0)), Point((1, 0)), Point((1, 1)), Point((2, 1))]) >>> c.len 3.0 >>> c =", "list representation of the bounding box in the form ``[left, lower, right, upper]``.", "An object to test equality against. Examples -------- >>> Point((0, 1)) != Point((0,", "@property def line(self): \"\"\"Returns a ``Line`` object of the line on which the", "max([v[0] for v in vertices]), max([v[1] for v in vertices]), ) return self._bounding_box", "-> Union[int, float]: return sum([dist(p[i], p[i + 1]) for i in range(len(p) -", "self._bbox is None: self._bbox = [ self.bounding_box.left, self.bounding_box.lower, self.bounding_box.right, self.bounding_box.upper, ] return self._bbox", "[Point((2, 1)), Point((2, 2)), Point((1, 2)), Point((1, 1))] ... ] ... ) >>>", "the slope. Examples -------- >>> ls = Line(1, 0) >>> ls.m 1.0 >>>", "1))] ... ] ... ) >>> len(c.parts) 2 \"\"\" return [[v for v", "= 0 for i in range(len(self.vertices) - 1): yi = ys[i] yj =", "+ 1]) A = A * 0.5 self._area = -A return self._area @property", "where the ray originates). See ``second_p``. Examples -------- >>> l = Ray(Point((0, 0)),", "None self._bounding_box = None @property def vertices(self) -> list: \"\"\"Returns the vertices of", "VerticalLine(self._p1[0]) else: m = dy / float(dx) # y - mx b =", "segment. Examples -------- >>> ls = LineSegment(Point((0, 0)), Point((5, 0))) >>> ls.is_ccw(Point((2, 2)))", "list: \"\"\"Returns the parts (lists of ``libpysal.cg.Point`` objects) of the chain. Examples --------", "= None self._arclen = None self._bounding_box = None @property def vertices(self) -> list:", "str(l.o) '(0.0, 0.0)' >>> str(l.p) '(1.0, 0.0)' \"\"\" def __init__(self, origin, second_p): self.o", "w += 1 else: w -= 1 elif yi == 0 and xi", "b): if m == float(\"inf\"): raise ArithmeticError(\"Slope cannot be infinite.\") self.m = float(m)", "end the slice (excluded). Examples -------- >>> p = Point((3, 6, 2)) >>>", "y[i + 1] - x[i + 1] * y[i] cx += (x[i] +", "True \"\"\" try: return (self.__loc) != (other.__loc) except AttributeError: return True def __gt__(self,", "if ring.contains_point(point): contains = True searching = False break if searching: contains =", "elif geo[\"type\"].lower() == \"multilinestring\": verts = [list(map(Point, part)) for part in geo[\"coordinates\"]] else:", "-------- >>> p = Polygon([Point((0, 0)), Point((1, 0)), Point((1, 1)), Point((0, 1))]) >>>", "len(self) -> int: \"\"\"Returns the number of vertices in the polygon. Examples --------", "*args) -> Union[int, float]: \"\"\"Return the coordinate for the given dimension. Parameters ----------", "1.0 \"\"\" if self.m == 0: return self.b return self.m * x +", "Point((0, 1)) True >>> Point((0, 1)) >= Point((1, 1)) False \"\"\" return (self.__loc)", "self.upper - self.lower _geoJSON_type_to_Pysal_type = { \"point\": Point, \"linestring\": Chain, \"multilinestring\": Chain, \"polygon\":", ">>> r.area 2.0 \"\"\" return abs(self.signed_area) @property def signed_area(self) -> Union[int, float]: if", ">>> p = Point((3, 6, 2)) >>> p[:2] == (3, 6) True >>>", "desired dimension. Examples -------- >>> p = Point((5.5, 4.3)) >>> p[0] == 5.5", "-2.0 >>> r.upper 6.0 \"\"\" center = ((self.left + self.right) / 2, (self.lower", "* yj < 0: r = xi + yi * (xj - xi)", "Point((0, 1))]) >>> p.area 1.0 >>> p = Polygon( ... [Point((0, 0)), Point((10,", "by this method is the geometric centroid. Also known as the 'center of", "p2 : Point The ending point of the line segment. bounding_box : libpysal.cg.Rectangle", "Point((5, 6))) \"\"\" def __init__(self, start_pt, end_pt): self._p1 = start_pt self._p2 = end_pt", "points with the vertices of the ring. len : int The number of", "ls.m inf >>> ls.b nan \"\"\" def __init__(self, x): self._x = float(x) self.m", "the chain computed using 'arcdistance' (meters). \"\"\" def part_perimeter(p: list) -> Union[int, float]:", "p = Point((1, 3)) >>> str(p) '(1.0, 3.0)' \"\"\" return str(self.__loc) # return", "to test equality against. Examples -------- >>> Point((0, 1)) > Point((0, 1)) False", "length of the perimeter of the ring. bounding_box : libpysal.cg.Rectangle The bounding box", "= [Ring(vertices)] self._vertices = [clockwise(vertices)] if holes is not None and holes !=", "1)), Point((2, 1))]) >>> c.len 3.0 >>> c = Chain( ... [ ...", "... [Point((2, 1)), Point((2, 2)), Point((1, 2)), Point((1, 1))] ... ) >>> p.area", ">>> r.lower -2.0 >>> r.upper 6.0 \"\"\" center = ((self.left + self.right) /", "sum_area(self._holes) return _area @property def centroid(self) -> tuple: \"\"\"Returns the centroid of the", "they have been reset since the last call to the getter. Examples --------", "segment lies. Examples -------- >>> ls = LineSegment(Point((1, 2)), Point((5, 6))) \"\"\" def", "__area = __area * 0.5 if __area < 0: __area = -area return", "= Line(1, 0) >>> l.y(1) 1.0 \"\"\" if self.m == 0: return self.b", "= Point((1, 3)) >>> str(p) '(1.0, 3.0)' \"\"\" return str(self.__loc) # return \"POINT", "center[1] + scale * (self.upper - center[1]) @property def area(self) -> Union[int, float]:", "self.__loc.__getslice__(*args) def __len__(self) -> int: \"\"\" Returns the dimensions of the point. Examples", "'(1.0, 0.0)' \"\"\" def __init__(self, origin, second_p): self.o = origin self.p = second_p", "last call to the getter. Examples -------- >>> ls = LineSegment(Point((1, 2)), Point((5,", "searching = False break if searching: for ring in self._part_rings: if ring.contains_point(point): contains", "= LineSegment(Point((1, 2)), Point((5, 6))) >>> r = ls._set_p2(Point((3, -1))) >>> r ==", "- v2[1]) def part_perimeter(p: list) -> Union[int, float]: return sum([dist(p[i], p[i + 1])", "line segment. p2 : Point The ending point of the line segment. bounding_box", "The origin (point where ray originates). See ``origin``. p : libpysal.cg.Point The second", "x(self, y) -> float: \"\"\"Returns the :math:`x`-value of the line at a particular", "0)), Point((10, 10)), Point((0, 10))], ... [Point((2, 1)), Point((2, 2)), Point((1, 2)), Point((1,", "in vertices] A = self.signed_area N = len(self) cx = 0 cy =", "ring in self._part_rings] CH = [ring.centroid for ring in self._hole_rings] AH = [-ring.area", "of the point. Examples -------- >>> len(Point((1, 2))) 2 \"\"\" return len(self.__loc) def", "is the geometric centroid. Also known as the 'center of gravity' or 'center", "1)), Point((1, 0))]) >>> len(p1.vertices) 4 \"\"\" return sum([part for part in self._vertices],", "True \"\"\" self._p2 = p2 self._reset_props() return self._p2 p2 = property(_get_p2, _set_p2) def", "using 'arcdistance' (meters). \"\"\" def part_perimeter(p: list) -> Union[int, float]: return sum([arcdist(p[i], p[i", "self._perimeter is None: self._perimeter = sum_perim(self._vertices) + sum_perim(self._holes) return self._perimeter @property def bbox(self):", "2.0 >>> r.upper 6.0 \"\"\" shift = ( new_center[0] - (self.left + self.right)", "{int, float} The :math:`y`-intercept of the line. ``b`` is also an attribute. Raises", "def centroid(self) -> tuple: \"\"\"Returns the centroid of the polygon. Notes ----- The", "= Point((3, 6, 2)) >>> p[:2] == (3, 6) True >>> p[1:2] ==", "of :math:`(i,j)` with :math:`i` as the index to the start slice and :math:`j`", "rectangle around its center. Parameters ---------- scale : int, float The ratio of", "point. Returns ------- self._p1 : libpysal.cg.Point The reset ``p1`` attribute. Examples -------- >>>", ": list A point list or list of point lists. Attributes ---------- vertices", "self.sw_ccw(other.p2) ccw2 = self.sw_ccw(other.p1) ccw3 = other.sw_ccw(self.p1) ccw4 = other.sw_ccw(self.p2) intersects = ccw1", "\"\"\" v1 = (self._p2[0] - self._p1[0], self._p2[1] - self._p1[1]) v2 = (pt[0] -", "length of the chain. Examples -------- >>> c = Chain([Point((0, 0)), Point((1, 0)),", "+ shift[1] self.upper = self.upper + shift[1] def set_scale(self, scale): \"\"\"Rescales the rectangle", "list A list of vertices. Attributes ---------- vertices : list A list of", "if not isinstance(other, self.__class__): pass else: if other.p1 == self._p1 and other.p2 ==", "if not holes: holes = None return cls(parts, holes) else: verts = [[Point(pt)", "[] self._hole_rings = [] def clockwise(part: list) -> list: if standalone.is_clockwise(part): return part[:]", "o : libpysal.cg.Point The origin (point where ray originates). See ``origin``. p :", "len(self) @staticmethod def dist(v1, v2) -> Union[int, float]: return math.hypot(v1[0] - v2[0], v1[1]", "libpysal.cg.Point A point lying ccw or cw of a segment. Examples -------- >>>", "polygon in clockwise order. Examples -------- >>> p = Polygon( ... [ ...", "does not appear to be a shape object.\" % (obj)) geo_type = geo[\"type\"].lower()", "Point((2, 4))] ... ) >>> p.contains_point((3.0, 3.0)) False >>> p.contains_point((1.0, 1.0)) True Notes", "self.left) * (self.upper - self.lower) @property def width(self) -> Union[int, float]: \"\"\"Returns the", "- self.lower _geoJSON_type_to_Pysal_type = { \"point\": Point, \"linestring\": Chain, \"multilinestring\": Chain, \"polygon\": Polygon,", "> 0: if yi < 0: w += 1 else: w -= 1", "compose the chain.\"\"\" return [ [LineSegment(a, b) for (a, b) in zip(part[:-1], part[1:])]", "== self._p1 and other.p1 == self._p2: eq = True return eq def intersect(self,", ">>> str(r.centroid) '(1.0, 0.5)' \"\"\" if self._centroid is None: vertices = self.vertices x", "(self.lower + self.upper) / 2) self.left = center[0] + scale * (self.left -", "If LineSegment attributes p1, p2 changed, recompute if self._len is None: self._len =", "Point((1, 2)), Point((1, 1))] ... ] ... ) >>> len(p.parts) 2 \"\"\" return", "\"Polygon\", \"coordinates\": self._vertices + self._holes} else: return {\"type\": \"Polygon\", \"coordinates\": self._vertices} def _reset_props(self):", "1)) > Point((0, 1)) False >>> Point((0, 1)) > Point((1, 1)) False \"\"\"", "of the perimeter of the polygon. bounding_box : libpysal.cg.Rectangle The bounding box of", "math.hypot(v1[0] - v2[0], v1[1] - v2[1]) def part_perimeter(part) -> Union[int, float]: return sum([dist(part[i],", "self.__class__): pass else: if other.p1 == self._p1 and other.p2 == self._p2: eq =", "if w == 0: pass else: point_contained = True else: point_contained = self._quad_tree_structure.contains_point(point)", "len(self) cx = 0 cy = 0 for i in range(N - 1):", "If LineSegment attributes p1, p2 changed, recompute if self._bounding_box is None: self._bounding_box =", "= p2 self._reset_props() return self._p2 p2 = property(_get_p2, _set_p2) def is_ccw(self, pt) ->", "in part] for part in self._holes] @property def parts(self) -> list: \"\"\"Returns the", "\"\"\" __author__ = \"<NAME>, <NAME>, <NAME>, <NAME>, <NAME>\" import math from .sphere import", "self.right, self.upper] return l.__getitem__(key) def set_centroid(self, new_center): \"\"\"Moves the rectangle center to a", "\"\"\"Returns the bounding box of the polygon as a list. Returns ------- self._bbox", "= other.sw_ccw(self.p1) ccw4 = other.sw_ccw(self.p2) intersects = ccw1 * ccw2 <= 0 and", "p.bounding_box.right 2.0 >>> p.bounding_box.upper 1.0 \"\"\" if self._bounding_box is None: vertices = self.vertices", "lying ccw or cw of a segment. Examples -------- >>> ls = LineSegment(Point((0,", "xs = [self.vertices[i][0] - point[0] for i in range(rn)] ys = [self.vertices[i][1] -", "Point((0, 1)) > Point((1, 1)) False \"\"\" return (self.__loc) > (other.__loc) def __ge__(self,", "or cw of a segment. Examples -------- >>> ls = LineSegment(Point((0, 0)), Point((5,", "-> tuple: \"\"\"Returns the centroid of the polygon. Notes ----- The centroid returned", "0)), Point((10, 0)), Point((10, 10)), Point((0, 10))], ... [Point((1, 1)), Point((1, 2)), Point((2,", "0: self._line = None elif dx == 0: self._line = VerticalLine(self._p1[0]) else: m", "or equal to another object. Parameters ---------- other : libpysal.cg.Point An object to", "verts = [[Point(pt) for pt in part] for part in geo[\"coordinates\"]] return cls(verts[0:1],", "Parameters ---------- m : {int, float} The slope of the line. ``m`` is", "* dx1: is_ccw = -1 elif dx1 * dx2 < 0 or dy1", "= sum([pt[1] * area for pt, area in zip(CP + CH, A)]) /", "x : {int, float} The :math:`x`-value at which to compute :math:`y`. Examples --------", "10))], ... [Point((1, 2)), Point((2, 2)), Point((2, 1)), Point((1, 1))] ... ) >>>", "+ CH, A)]) / sum(A) return cx, cy def build_quad_tree_structure(self): \"\"\"Build the quad", "---------- loc : tuple The point's location (number :math:`x`-tuple, :math:`x` > 1). Examples", "def _get_p1(self): \"\"\"**HELPER METHOD. DO NOT CALL.** Returns the ``p1`` attribute of the", "Point((2, 1))] ... ) >>> p.centroid (5.0353535353535355, 5.0353535353535355) \"\"\" CP = [ring.centroid for", "self.right = center[0] + scale * (self.right - center[0]) self.lower = center[1] +", "if they have been reset since the last call to the ``getter``. \"\"\"", "= x[i] * y[i + 1] - x[i + 1] * y[i] cx", "self._reset_props() return self._p1 p1 = property(_get_p1, _set_p1) def _get_p2(self): \"\"\"**HELPER METHOD. DO NOT", "self._bounding_box = Rectangle( min([v[0] for v in vertices]), min([v[1] for v in vertices]),", "\"\"\"Build the quad tree structure for this polygon. Once the structure is built,", "TypeError(\"%r is not a Chain.\" % geo) return cls(verts) @property def __geo_interface__(self) ->", "vertices] self._bounding_box = Rectangle(min(x), min(y), max(x), max(y)) return self._bounding_box @property def area(self) ->", "---------- x : {int, float} The :math:`x`-intercept of the line. ``x`` is also", "obj : {libpysal.cg.{Point, LineSegment, Line, Ray, Chain, Polygon} A geometric representation of an", "- center[1]) self.upper = center[1] + scale * (self.upper - center[1]) @property def", "bblower = y < self.bounding_box.lower bbupper = y > self.bounding_box.upper if bbleft or", "to end the slice (excluded). Examples -------- >>> p = Point((3, 6, 2))", "for these attributes (implemented as properties) then recompute their values if they have", "self._perimeter = None self._bounding_box = None self._area = None self._centroid = None self._quad_tree_structure", "PySAL shape object from ``obj``, which must support the ``__geo_interface__``. Parameters ---------- obj", "its endpoints swapped. Examples -------- >>> ls = LineSegment(Point((1, 2)), Point((5, 6))) >>>", "+ CH, A)]) / sum(A) cy = sum([pt[1] * area for pt, area", "object of the line on which the segment lies. Examples -------- >>> ls", "bool(r) False >>> r = Rectangle(0, 0, 1, 1) >>> bool(r) True \"\"\"", "\"Line\", \"Ray\", \"Chain\", \"Polygon\", \"Rectangle\", \"asShape\", ] def asShape(obj): \"\"\"Returns a PySAL shape", "new location of the centroid of the polygon. Examples -------- >>> r =", "ring in self._hole_rings] A = AP + AH cx = sum([pt[0] * area", "as properties) then recompute their values if they have been reset since the", "new_center : libpysal.cg.Point The new location of the centroid of the polygon. Examples", "parts are holes. MultiPolygons are simply a list of polygons. \"\"\" geo_type =", "points are collinear and ``pt`` is in the middle. \"\"\" p0 = self.p1", "v in part] for part in self._holes] @property def parts(self) -> list: \"\"\"Returns", "\"\"\" geo_type = geo[\"type\"].lower() if geo_type == \"multipolygon\": parts = [] holes =", "1000.0 for i in range(len(p) - 1)]) if self._arclen is None: self._arclen =", "box of the line segment. Examples -------- >>> ls = LineSegment(Point((1, 2)), Point((5,", "the vertices of the polygon in clockwise order. len : int The number", ":math:`(i)` with :math:`i` as the index of the desired dimension. Examples -------- >>>", "of vertices. perimeter : float The geometric length of the perimeter of the", "in self._hole_rings: if ring.contains_point(point): contains = False searching = False break if searching:", "Point((1, 0)), Point((1, 1)), Point((0, 1))], ... [Point((2, 1)), Point((2, 2)), Point((1, 2)),", "and multipolygons GEOS, Shapely, and geoJSON do. In GEOS, etc, polygons may only", "be altered. Parameters ---------- vertices : list A list of vertices. Attributes ----------", "order. len : int The number of vertices including holes. perimeter : float", "dy1 * dx2 < dy2 * dx1: is_ccw = 1 elif dy1 *", "= ccw1 * ccw2 <= 0 and ccw3 * ccw4 <= 0 return", "2))) True >>> ls.is_ccw(Point((2, -2))) False \"\"\" v1 = (self._p2[0] - self._p1[0], self._p2[1]", "set_scale(self, scale): \"\"\"Rescales the rectangle around its center. Parameters ---------- scale : int,", "moving this to top breaks unit tests ! from . import standalone from", "+ \", \" + str(self._p2) + \")\" # return \"LINESTRING ({} {}, {}", "float]: \"\"\"Return the coordinate for the given dimension. Parameters ---------- *args : tuple", "will be closed. This class exists primarily as a geometric primitive to form", "mass'. Examples -------- >>> p = Polygon( ... [Point((0, 0)), Point((10, 0)), Point((10,", "r = Rectangle(0, 0, 0, 0) >>> bool(r) False >>> r = Rectangle(0,", "-------- >>> ls = LineSegment(Point((2, 2)), Point((3, 3))) >>> l = ls.line >>>", "The :math:`x`-value at which to compute :math:`y`. Examples -------- >>> l = Line(1,", "__init__(self, start_pt, end_pt): self._p1 = start_pt self._p2 = end_pt self._reset_props() def __str__(self): return", "Point((0, 10))], ... [Point((1, 2)), Point((2, 2)), Point((2, 1)), Point((1, 1))] ... )", "0) >>> l.x(0.25) 0.5 \"\"\" if self.m == 0: raise ArithmeticError(\"Cannot solve for", "-------- >>> Point((0, 1)) != Point((0, 1)) False >>> Point((0, 1)) != Point((1,", "\"\"\" return self._p1 def _set_p1(self, p1): \"\"\"**HELPER METHOD. DO NOT CALL.** Sets the", "= Line(m, b) return self._line class VerticalLine(Geometry): \"\"\"Geometric representation of verticle line objects.", "of the line at a particular :math:`x`-value. Parameters ---------- x : {int, float}", "geo[\"type\"].lower() if geo_type == \"multipolygon\": parts = [] holes = [] for polygon", "} if self._holes[0]: geo[\"coordinates\"][0] += self._holes return geo if self._holes[0]: return {\"type\": \"Polygon\",", "---------- o : libpysal.cg.Point The origin (point where ray originates). See ``origin``. p", "def bounding_box(self): \"\"\"Returns the bounding box of the polygon. Returns ------- self._bounding_box :", "True def __gt__(self, other) -> bool: \"\"\"Tests if the point is greater than", "------- self._p2 : libpysal.cg.Point The reset ``p2`` attribute. Examples -------- >>> ls =", "the index to end the slice (excluded). Examples -------- >>> p = Point((3,", "True \"\"\" return bool(self.area) def __eq__(self, other): if other: return self[:] == other[:]", "= None return cls(parts, holes) else: verts = [[Point(pt) for pt in part]", "return contains class Rectangle(Geometry): \"\"\"Geometric representation of rectangle objects. Attributes ---------- left :", "self.bounding_box.lower bbupper = y > self.bounding_box.upper if bbleft or bbright or bblower or", "<NAME>, <NAME>, <NAME>\" import math from .sphere import arcdist from typing import Union", "other.lower), max(self.right, other.right), max(self.upper, other.upper), ) def __getitem__(self, key): \"\"\" Examples -------- >>>", "False \"\"\" return hash(self.__loc) def __getitem__(self, *args) -> Union[int, float]: \"\"\"Return the coordinate", "else: rn = len(self.vertices) xs = [self.vertices[i][0] - point[0] for i in range(rn)]", "Point((1, 1)), Point((0, 1))]) \"\"\" def __init__(self, vertices, holes=None): self._part_rings = [] self._hole_rings", "_get_p2(self): \"\"\"**HELPER METHOD. DO NOT CALL.** Returns the ``p2`` attribute of the line", "len(Point((1, 2))) 2 \"\"\" return len(self.__loc) def __repr__(self) -> str: \"\"\"Returns the string", "def dist(v1: Union[int, float], v2: Union[int, float]) -> float: return math.hypot(v1[0] - v2[0],", "\"Polygon\", \"coordinates\": self._vertices} def _reset_props(self): \"\"\"Resets the geometric properties of the polygon.\"\"\" self._perimeter", "l1 == l2 True >>> l2 == l1 True \"\"\" eq = False", "for part in geo[\"coordinates\"]] else: raise TypeError(\"%r is not a Chain.\" % geo)", "values if they have been reset since the last call to the ``getter``.", "# return \"POINT ({} {})\".format(*self.__loc) class LineSegment(Geometry): \"\"\"Geometric representation of line segment objects.", "of gravity' or 'center of mass'. Examples -------- >>> p = Polygon( ...", "the line segment. Parameters ---------- p1 : libpysal.cg.Point A point. Returns ------- self._p1", "+ 1][1]) __area = __area * 0.5 if __area < 0: __area =", "and respects multipart polygons with holes. Also known as the 'center of gravity'", "the chain.\"\"\" return [ [LineSegment(a, b) for (a, b) in zip(part[:-1], part[1:])] for", "x: Union[int, float]) -> float: \"\"\"Returns the :math:`y`-value of the line at a", "of segment. Returns ------- is_ccw : bool ``1`` if turn from ``self.p1`` to", "4, 4) >>> r.set_scale(2) >>> r.left -2.0 >>> r.right 6.0 >>> r.lower -2.0", "the ray (not ``origin``.) Attributes ---------- o : libpysal.cg.Point The origin (point where", ">>> l.x(0.25) 0.5 \"\"\" if self.m == 0: raise ArithmeticError(\"Cannot solve for 'x'", "ring in self._hole_rings] AH = [-ring.area for ring in self._hole_rings] A = AP", "r.set_centroid(Point((4, 4))) >>> r.left 2.0 >>> r.right 6.0 >>> r.lower 2.0 >>> r.upper", "int: \"\"\" Returns the dimensions of the point. Examples -------- >>> len(Point((1, 2)))", "The quad tree structure for the ring. This structure helps test if a", "point of the line segment. bounding_box : libpysal.cg.Rectangle The bounding box of the", "\"point\": Point, \"linestring\": Chain, \"multilinestring\": Chain, \"polygon\": Polygon, \"multipolygon\": Polygon, } # moving", "in part_type]) if self._perimeter is None: self._perimeter = sum_perim(self._vertices) + sum_perim(self._holes) return self._perimeter", "the Rectangle. Examples -------- >>> r = Rectangle(0, 0, 4, 4) >>> r.width", "\"LineString\", \"coordinates\": self.vertices} else: return {\"type\": \"MultiLineString\", \"coordinates\": self.parts} def _reset_props(self): \"\"\"**HELPER METHOD.", "ls = LineSegment(Point((1, 2)), Point((5, 6))) >>> r = ls._set_p1(Point((3, -1))) >>> r", "... ] ... ) >>> c.len 4.0 \"\"\" def dist(v1: tuple, v2: tuple)", "geo_type = geo[\"type\"].lower() if geo_type == \"multipolygon\": parts = [] holes = []", "the ring defined by the 'center of gravity' or 'center or mass'. _quad_tree_structure", "is_ccw = 0 else: is_ccw = 1 return is_ccw def get_swap(self): \"\"\"Returns a", "1)), Point((2, 1))]) \"\"\" def __init__(self, vertices: list): if isinstance(vertices[0], list): self._vertices =", "Attributes ---------- vertices : list A list of points with the vertices of", "str(self._p2) + \")\" # return \"LINESTRING ({} {}, {} {})\".format( # self._p1[0], self._p1[1],", "elif dx1 * dx2 < 0 or dy1 * dy2 < 0: is_ccw", "4) >>> r.width 4.0 \"\"\" return self.right - self.left @property def height(self) ->", "-------- >>> len(Point((1, 2))) 2 \"\"\" return len(self.__loc) def __repr__(self) -> str: \"\"\"Returns", "the ring. len : int The number of vertices. perimeter : float The", "else: point_contained = True else: point_contained = self._quad_tree_structure.contains_point(point) return point_contained class Polygon(Geometry): \"\"\"Geometric", "left, lower, right, upper): if right < left or upper < lower: raise", "---------- x : {int, float} The :math:`x`-value at which to compute :math:`y`. Examples", "= VerticalLine(self._p1[0]) else: m = dy / float(dx) # y - mx b", "is inside the ring will be increased significantly. \"\"\" self._quad_tree_structure = QuadTreeStructureSingleRing(self) def", "y-value of the rectangle. Examples -------- >>> r = Rectangle(-4, 3, 10, 17)", ": libpysal.cg.Point A point lying ccw or cw of a segment. Examples --------", "r.bounding_box.lower 0.0 >>> r.bounding_box.right 2.0 >>> r.bounding_box.upper 1.0 \"\"\" if self._bounding_box is None:", "2)), Point((1, 2)), Point((1, 1))] ... ] ... ) >>> len(p.parts) 2 \"\"\"", "return is_ccw def get_swap(self): \"\"\"Returns a ``LineSegment`` object which has its endpoints swapped.", "0)), Point((10, 0)), Point((10, 10)), Point((0, 10))], ... [Point((2, 1)), Point((2, 2)), Point((1,", "new geometric representation of the object. \"\"\" if isinstance(obj, (Point, LineSegment, Line, Ray,", "Point, \"linestring\": Chain, \"multilinestring\": Chain, \"polygon\": Polygon, \"multipolygon\": Polygon, } # moving this", "GEOS, Shapely, and geoJSON do. In GEOS, etc, polygons may only have a", "-> Union[int, float]: return math.hypot(v1[0] - v2[0], v1[1] - v2[1]) @property def perimeter(self)", "hash(Point((1, 1))) False \"\"\" return hash(self.__loc) def __getitem__(self, *args) -> Union[int, float]: \"\"\"Return", "+ self.upper) / 2) self.left = center[0] + scale * (self.left - center[0])", "in part_type]) _area = sum_area(self._vertices) - sum_area(self._holes) return _area @property def centroid(self) ->", "== 0: self._line = None elif dx == 0: self._line = VerticalLine(self._p1[0]) else:", "float(\"inf\") self.b = float(\"nan\") def x(self, y) -> float: \"\"\"Returns the :math:`x`-value of", "sum([part_perimeter(part) for part in part_type]) if self._perimeter is None: self._perimeter = sum_perim(self._vertices) +", "< Point((1, 1)) True \"\"\" return (self.__loc) < (other.__loc) def __le__(self, other) ->", "y : {int, float} The :math:`y`-value at which to compute :math:`x`. Examples --------", "< 0 or dy1 * dy2 < 0: is_ccw = -1 elif dx1", "contains = False return contains class Rectangle(Geometry): \"\"\"Geometric representation of rectangle objects. Attributes", "for ring in self._part_rings: if ring.contains_point(point): contains = True searching = False break", "None: self._perimeter = sum_perim(self._vertices) + sum_perim(self._holes) return self._perimeter @property def bbox(self): \"\"\"Returns the", "parts of the polygon in clockwise order. Examples -------- >>> p = Polygon(", "Examples -------- >>> l = Line(1, 0) >>> l.y(1) 1.0 \"\"\" if self.m", "y < self.bounding_box.lower bbupper = y > self.bounding_box.upper if bbleft or bbright or", "The new location of the centroid of the polygon. Examples -------- >>> r", ">>> Point((0, 1)) < Point((0, 1)) False >>> Point((0, 1)) < Point((1, 1))", "sum_perim(self._holes) return self._perimeter @property def bbox(self): \"\"\"Returns the bounding box of the polygon", "-------- >>> Point((0, 1)) == Point((0, 1)) True >>> Point((0, 1)) == Point((1,", ">>> c = Chain( ... [ ... [Point((0, 0)), Point((1, 0)), Point((1, 1))],", "Point((0, 1)) > Point((0, 1)) False >>> Point((0, 1)) > Point((1, 1)) False", "for hole in holes] else: self._hole_rings = [Ring(holes)] self._holes = [clockwise(holes)] else: self._holes", "Linear rings must be closed, the first and last point must be the", "self._area is None: vertices = self.vertices x = [v[0] for v in vertices]", "geometric length of the perimeter of the polygon. bounding_box : libpysal.cg.Rectangle The bounding", "self._reset_props() return self._p2 p2 = property(_get_p2, _set_p2) def is_ccw(self, pt) -> bool: \"\"\"Returns", "is ignored and will not be altered. Parameters ---------- vertices : list A", "max([self._p1[0], self._p2[0]]), max([self._p1[1], self._p2[1]]), ) return Rectangle( self._bounding_box.left, self._bounding_box.lower, self._bounding_box.right, self._bounding_box.upper, ) @property", "with :math:`i` as the index of the desired dimension. Examples -------- >>> p", "otherwise ``False``. Examples -------- >>> p = Polygon( ... [Point((0,0)), Point((4,0)), Point((4,5)), Point((2,3)),", "... [Point((0, 0)), Point((10, 0)), Point((10, 10)), Point((0, 10))], ... [Point((1, 2)), Point((2,", "list: if standalone.is_clockwise(part): return part[:] else: return part[::-1] vl = list(vertices) if isinstance(vl[0],", "2)), Point((5, 6))) >>> r = ls._set_p1(Point((3, -1))) >>> r == Point((3.0, -1.0))", "------- self._p2 : libpysal.cg.Point The ``_p2`` attribute. Examples -------- >>> ls = LineSegment(Point((1,", "+= (x[i] + x[i + 1]) * f cy += (y[i] + y[i", "segment. Returns ------- self._p2 : libpysal.cg.Point The ``_p2`` attribute. Examples -------- >>> ls", "representation of line objects. Parameters ---------- m : {int, float} The slope of", "line segment. Examples -------- >>> ls = LineSegment(Point((1, 2)), Point((5, 6))) >>> ls.bounding_box.left", "v in vertices] y = [v[1] for v in vertices] self._bounding_box = Rectangle(min(x),", "* dx1 + dy1 * dy1 >= dx2 * dx2 + dy2 *", "lower, right, upper): if right < left or upper < lower: raise ArithmeticError(\"Rectangle", "self.right + shift[0] self.lower = self.lower + shift[1] self.upper = self.upper + shift[1]", "ring in self._part_rings: ring.build_quad_tree_structure() for ring in self._hole_rings: ring.build_quad_tree_structure() self.is_quad_tree_structure_built = True def", "point[0] for i in range(rn)] ys = [self.vertices[i][1] - point[1] for i in", "-------- >>> ls = LineSegment(Point((1, 2)), Point((5, 6))) >>> r = ls._get_p1() >>>", ": {int, float} The :math:`y`-value at which to compute :math:`x`. Raises ------ ArithmeticError", ">>> l.m 1.0 >>> l.b 0.0 \"\"\" if self._line == False: dx =", "-------- >>> c = Chain([Point((0, 0)), Point((1, 0)), Point((1, 1)), Point((2, 1))]) \"\"\"", "objects) of the chain. Examples -------- >>> c = Chain( ... [ ...", "which are functions of other attributes. The ``getter``s for these attributes (implemented as", "for part in polygon] parts += verts[0:1] holes += verts[1:] if not holes:", "collinear and ``self.p2`` is in the middle. ``0`` if the points are collinear", "other.left), min(self.lower, other.lower), max(self.right, other.right), max(self.upper, other.upper), ) def __getitem__(self, key): \"\"\" Examples", "objects specified. Parameters ---------- vertices : list A list of vertices or a", "if they have zero area. ``___nonzero__`` is used \"to implement truth value testing", "be closed. This class exists primarily as a geometric primitive to form complex", "# msg += \"the first and last vertices are not the same.\" #", "and xi > 0: if yj > 0: w += 0.5 else: w", "------- self._bbox : list The bounding box of the polygon as a list.", "verticle line objects. Parameters ---------- x : {int, float} The :math:`x`-intercept of the", "if self._bounding_box is None: vertices = self.vertices self._bounding_box = Rectangle( min([v[0] for v", "None: x, y = point # bbox checks bbleft = x < self.bounding_box.left", ">>> swap.p2[0] 1.0 >>> swap.p2[1] 2.0 \"\"\" line_seg = LineSegment(self._p2, self._p1) return line_seg", "== hash(Point((0, 1))) True >>> hash(Point((0, 1))) == hash(Point((1, 1))) False \"\"\" return", "the same line segment. Examples -------- >>> l1 = LineSegment(Point((1, 2)), Point((5, 6)))", "[list(map(Point, part)) for part in geo[\"coordinates\"]] else: raise TypeError(\"%r is not a Chain.\"", "3.0 >>> c = Chain( ... [ ... [Point((0, 0)), Point((1, 0)), Point((1,", "-------- >>> l = Line(1, 0) >>> l.y(1) 1.0 \"\"\" if self.m ==", "object to test equality against. Examples -------- >>> Point((0, 1)) > Point((0, 1))", "2)), Point((5, 6))) >>> r = ls._get_p2() >>> r == Point((5, 6)) True", "---------- vertices : list A list of vertices or a list of lists", "def area(self) -> Union[int, float]: \"\"\"Returns the area of the ring. Examples --------", "for part in self._holes], [] ) @property def holes(self) -> list: \"\"\"Returns the", "r.left -2.0 >>> r.right 6.0 >>> r.lower -2.0 >>> r.upper 6.0 \"\"\" center", "defined by the 'center of gravity' or 'center or mass'. _quad_tree_structure : libpysal.cg.QuadTreeStructureSingleRing", "\"\"\"Sedgewick test for ``pt`` being ccw of segment. Returns ------- is_ccw : bool", "structure for the ring. This structure helps test if a point is inside", "part in polygon] parts += verts[0:1] holes += verts[1:] if not holes: holes", "part in geo[\"coordinates\"]] return cls(verts[0:1], verts[1:]) @property def __geo_interface__(self) -> dict: \"\"\"Return ``__geo_interface__``", "clockwise order. Examples -------- >>> p = Polygon( ... [ ... [Point((0, 0)),", "slice (excluded). Examples -------- >>> p = Point((3, 6, 2)) >>> p[:2] ==", "self._perimeter = sum( [dist(v[i], v[i + 1]) for i in range(-1, len(self) -", "area of the ring. Examples -------- >>> r = Ring( ... [ ...", "return self._len @property def line(self): \"\"\"Returns a ``Line`` object of the line on", "0.5 \"\"\" if self.m == 0: raise ArithmeticError(\"Cannot solve for 'x' when slope", "- (self.lower + self.upper) / 2, ) self.left = self.left + shift[0] self.right", "the polygon in clockwise order. Examples -------- >>> p1 = Polygon([Point((0, 0)), Point((0,", "its endpoints swapped. Returns ------- line_seg : libpysal.cg.LineSegment The ``LineSegment`` object which has", "line segment. Parameters ---------- p2 : libpysal.cg.Point A point. Returns ------- self._p2 :", "l.y(1) nan \"\"\" return float(\"nan\") class Line(Geometry): \"\"\"Geometric representation of line objects. Parameters", "= 1 elif dy1 * dx2 > dy2 * dx1: is_ccw = -1", "list) -> list: if standalone.is_clockwise(part): return part[:] else: return part[::-1] vl = list(vertices)", ") self.left = self.left + shift[0] self.right = self.right + shift[0] self.lower =", "return (self.__loc) >= (other.__loc) def __hash__(self) -> int: \"\"\"Returns the hash of the", "if self._holes[0]: geo[\"coordinates\"][0] += self._holes return geo if self._holes[0]: return {\"type\": \"Polygon\", \"coordinates\":", "since the last call to the getter. Examples -------- >>> ls = LineSegment(Point((1,", "float: \"\"\"Returns the :math:`x`-value of the line at a particular :math:`y`-value. Parameters ----------", "when ``geo_type`` is not a supported type. Returns ------- obj : {libpysal.cg.{Point, LineSegment,", "of the line segment. p2 : Point The ending point of the line", "[v[0] for v in vertices] y = [v[1] for v in vertices] self._bounding_box", "clockwise of the segment (``True``) or not (``False``). Exclusive. Parameters ---------- pt :", "Point((4,5)), Point((2,3)), Point((0,5))] ... ) >>> p.contains_point((3,3)) 1 >>> p.contains_point((0,6)) 0 >>> p.contains_point((2,2.9))", "geo = obj if hasattr(geo, \"type\"): raise TypeError(\"%r does not appear to be", "NOT CALL.** Sets the ``p2`` attribute of the line segment. Parameters ---------- p2", "libpysal.cg.Point An object to test equality against. Examples -------- >>> Point((0, 1)) >", "points are collinear and ``self.p2`` is in the middle. ``0`` if the points", "segment. len : float The length of the segment. line : libpysal.cg.Line The", "len(self.vertices) @property def len(self) -> int: return len(self) @staticmethod def dist(v1, v2) ->", "the line on which the segment lies. Examples -------- >>> ls = LineSegment(Point((2,", "\"\"\"Returns the :math:`y`-value of the line at a particular :math:`x`-value. Parameters ---------- x", "list A list of sub-polygons to be considered as holes. Default is ``None``.", "0)), Point((1, 1)), Point((0, 1))], ... [Point((2, 1)), Point((2, 2)), Point((1, 2)), Point((1,", "the index to the start slice and :math:`j` as the index to end", "self._p2 = end_pt self._reset_props() def __str__(self): return \"LineSegment(\" + str(self._p1) + \", \"", "[-4.0, 3.0, 10.0, 17.0] \"\"\" l = [self.left, self.lower, self.right, self.upper] return l.__getitem__(key)", "= [v[1] for v in vertices] N = len(self) A = 0.0 for", "The getters for these attributes (implemented as properties) then recompute their values if", "4, 4) >>> r.set_centroid(Point((4, 4))) >>> r.left 2.0 >>> r.right 6.0 >>> r.lower", "yi < 0: w += 1 else: w -= 1 elif yi ==", "index of the desired dimension. Examples -------- >>> p = Point((5.5, 4.3)) >>>", "(self._p2[0] - self._p1[0], self._p2[1] - self._p1[1]) v2 = (pt[0] - self._p1[0], pt[1] -", "geo_type = geo[\"type\"].lower() # if geo_type.startswith('multi'): # raise NotImplementedError, \"%s are not supported", "-------- libpysal.cg.bounding_box \"\"\" if self._bbox is None: self._bbox = [ self.bounding_box.left, self.bounding_box.lower, self.bounding_box.right,", "= ((self.left + self.right) / 2, (self.lower + self.upper) / 2) self.left =", "0)), Point((1, 1)), Point((2, 1))]) >>> verts = c.vertices >>> len(verts) 4 \"\"\"", "[[part] for part in self.parts], } if self._holes[0]: geo[\"coordinates\"][0] += self._holes return geo", "LineSegment(Geometry): \"\"\"Geometric representation of line segment objects. Parameters ---------- start_pt : libpysal.cg.Point The", "Sets the ``p1`` attribute of the line segment. Parameters ---------- p1 : libpysal.cg.Point", "vertices[0:1] # msg = \"Supplied vertices do not form a closed ring, \"", "\"\"\" def dist(v1: Union[int, float], v2: Union[int, float]) -> float: return math.hypot(v1[0] -", "dict: \"\"\"Return ``__geo_interface__`` information lookup.\"\"\" if len(self.parts) > 1: geo = { \"type\":", "= sum([pt[0] * area for pt, area in zip(CP + CH, A)]) /", "Point((1, 1)), Point((0, 1))]) >>> p.perimeter 4.0 \"\"\" def dist(v1: Union[int, float], v2:", "1)) != Point((0, 1)) False >>> Point((0, 1)) != Point((1, 1)) True \"\"\"", "1, 1) >>> bool(r) True \"\"\" return bool(self.area) def __eq__(self, other): if other:", "box of the chain. Returns ------- self._bounding_box : libpysal.cg.Rectangle The bounding box of", "\"\"\"Tests if the point is not equal to another object. Parameters ---------- other", ">>> ls.bounding_box.lower 2.0 >>> ls.bounding_box.right 5.0 >>> ls.bounding_box.upper 6.0 \"\"\" # If LineSegment", "\"\"\" return self.upper - self.lower _geoJSON_type_to_Pysal_type = { \"point\": Point, \"linestring\": Chain, \"multilinestring\":", "point : libpysal.cg.Point A point to test for containment. Returns ------- contains :", "-= 0.5 if w == 0: pass else: point_contained = True else: point_contained", "[Point((0, 0)), Point((1, 0)), Point((1, 1))], ... [Point((10, 10)), Point((11, 10)), Point((11, 11))]", "Union[int, float]: return math.hypot(v1[0] - v2[0], v1[1] - v2[1]) def part_perimeter(p: list) ->", "self.__loc = tuple(map(float, loc)) @classmethod def __from_geo_interface__(cls, geo): return cls(geo[\"coordinates\"]) @property def __geo_interface__(self):", "being on other segment. Parameters ---------- other : libpysal.cg.LineSegment Another line segment to", "the chain. Returns ------- self._bounding_box : libpysal.cg.Rectangle The bounding box of the chain.", ": tuple The point's location (number :math:`x`-tuple, :math:`x` > 1). Examples -------- >>>", "(other.__loc) def __hash__(self) -> int: \"\"\"Returns the hash of the point's location. Examples", "line segment. Returns ------- self._p2 : libpysal.cg.Point The ``_p2`` attribute. Examples -------- >>>", "self.lower, self.right, self.upper] return l.__getitem__(key) def set_centroid(self, new_center): \"\"\"Moves the rectangle center to", "vertices are not the same.\" # raise ValueError(msg) self.vertices = tuple(vertices) self._perimeter =", "- 1)]) sum_perim = lambda part_type: sum([part_perimeter(part) for part in part_type]) if self._perimeter", "of polygons. \"\"\" geo_type = geo[\"type\"].lower() if geo_type == \"multipolygon\": parts = []", "\"\"\"Returns the area of the Rectangle. Examples -------- >>> r = Rectangle(0, 0,", "3, 10, 17) >>> r[:] [-4.0, 3.0, 10.0, 17.0] \"\"\" l = [self.left,", "second_p : The second point specifying the ray (not ``origin``.) Attributes ---------- o", "< lower: raise ArithmeticError(\"Rectangle must have positive area.\") self.left = float(left) self.lower =", "vertices] y = [v[1] for v in vertices] A = self.signed_area N =", "this method is the geometric centroid and respects multipart polygons with holes. Also", "of the line on which the segment lies. Examples -------- >>> ls =", "Notes ----- Points falling exactly on polygon edges may yield unpredictable results. \"\"\"", "sum([part_perimeter(part) for part in self._vertices]) return self._len @property def arclen(self) -> Union[int, float]:", "True else: point_contained = self._quad_tree_structure.contains_point(point) return point_contained class Polygon(Geometry): \"\"\"Geometric representation of polygon", "> 0: if yi < 0: w += 0.5 else: w -= 0.5", "of a ``LineSegment`` object. Returns ------- self._bounding_box : libpysal.cg.Rectangle The bounding box of", "!= vertices[-1]: vertices = vertices[:] + vertices[0:1] # msg = \"Supplied vertices do", "Point((10, 1))) >>> ls.intersect(ls1) True >>> ls2 = LineSegment(Point((5, 1)), Point((10, 1))) >>>", "are collinear and ``self.p1`` is in the middle. ``1`` if the points are", "an attribute. b : {int, float} The :math:`y`-intercept of the line. ``b`` is", "1.0 >>> p = Polygon( ... [Point((0, 0)), Point((10, 0)), Point((10, 10)), Point((0,", "0)), Point((1, 1))], ... [Point((10, 10)), Point((11, 10)), Point((11, 11))] ... ] ...", "1)]) sum_perim = lambda part_type: sum([part_perimeter(part) for part in part_type]) if self._perimeter is", "a string representation of a ``Point`` object. Examples -------- >>> p = Point((1,", "#minx -4.0 >>> r.lower #miny 3.0 >>> r.right #maxx 10.0 >>> r.upper #maxy", "single exterior ring, all other parts are holes. MultiPolygons are simply a list", "= Rectangle(0, 0, 4, 4) >>> r.set_centroid(Point((4, 4))) >>> r.left 2.0 >>> r.right", "bbupper = y > self.bounding_box.upper if bbleft or bbright or bblower or bbupper:", "must have positive area.\") self.left = float(left) self.lower = float(lower) self.right = float(right)", "vertices)) self._vertices = [clockwise(part) for part in vertices] else: self._part_rings = [Ring(vertices)] self._vertices", "dy1 * dy1 >= dx2 * dx2 + dy2 * dy2: is_ccw =", "10)), Point((0, 10))], ... [Point((1, 1)), Point((1, 2)), Point((2, 2)), Point((2, 1))] ...", "< left or upper < lower: raise ArithmeticError(\"Rectangle must have positive area.\") self.left", "zero area. ``___nonzero__`` is used \"to implement truth value testing and the built-in", "= [part for part in vertices] else: self._vertices = [vertices] self._reset_props() @classmethod def", "@staticmethod def dist(v1, v2) -> Union[int, float]: return math.hypot(v1[0] - v2[0], v1[1] -", "* f cy += (y[i] + y[i + 1]) * f cx =", "of the polygon. Returns ------- self._bounding_box : libpysal.cg.Rectangle The bounding box of the", "-------- >>> ls = LineSegment(Point((2, 2)), Point((5, 2))) >>> ls.len 3.0 \"\"\" #", "A list of points with the vertices of the polygon in clockwise order.", "+ self._holes} else: return {\"type\": \"Polygon\", \"coordinates\": self._vertices} def _reset_props(self): \"\"\"Resets the geometric", "1]) * 1000.0 for i in range(len(p) - 1)]) if self._arclen is None:", "Parameters ---------- *args : tuple A singleton tuple of :math:`(i)` with :math:`i` as", "in holes] else: self._hole_rings = [Ring(holes)] self._holes = [clockwise(holes)] else: self._holes = [[]]", "list The list of points of the vertices of the chain in order.", ">>> bool(r) False >>> r = Rectangle(0, 0, 1, 1) >>> bool(r) True", "chain. Examples -------- >>> c = Chain([Point((0, 0)), Point((2, 0)), Point((2, 1)), Point((0,", "TypeError Raised when ``obj`` is not a supported shape. NotImplementedError Raised when ``geo_type``", "- pv[i + 1][1]) __area = __area * 0.5 if __area < 0:", "Maximum y-value of the rectangle. Examples -------- >>> r = Rectangle(-4, 3, 10,", "11))] ... ] ... ) >>> c.len 4.0 \"\"\" def dist(v1: tuple, v2:", "\"\"\"**HELPER METHOD. DO NOT CALL.** Returns the ``p2`` attribute of the line segment.", "for part in vertices] else: self._vertices = [vertices] self._reset_props() @classmethod def __from_geo_interface__(cls, geo:", "'(1.0, 0.5)' \"\"\" if self._centroid is None: vertices = self.vertices x = [v[0]", "originates). See ``origin``. p : libpysal.cg.Point The second point on the ray (not", "r.area 16.0 \"\"\" return (self.right - self.left) * (self.upper - self.lower) @property def", "4.3)) >>> p[0] == 5.5 True >>> p[1] == 4.3 True \"\"\" return", "= self.p1 p1 = self.p2 p2 = pt dx1 = p1[0] - p0[0]", "= -1 elif dx1 * dx1 + dy1 * dy1 >= dx2 *", "@property def height(self) -> Union[int, float]: \"\"\"Returns the height of the Rectangle. Examples", "the geometric length of the chain. Examples -------- >>> c = Chain([Point((0, 0)),", "from ``obj``, which must support the ``__geo_interface__``. Parameters ---------- obj : {libpysal.cg.{Point, LineSegment,", "part in self._vertices], []) @property def parts(self) -> list: \"\"\"Returns the parts (lists", "where the segment begins. end_pt : libpysal.cg.Point The point where the segment ends.", "1))) == hash(Point((1, 1))) False \"\"\" return hash(self.__loc) def __getitem__(self, *args) -> Union[int,", "max(y)) return self._bounding_box @property def area(self) -> Union[int, float]: \"\"\"Returns the area of", "list The bounding box of the polygon as a list. See Also --------", "= LineSegment(Point((1, 2)), Point((5, 6))) >>> r = ls._get_p2() >>> r == Point((5,", "\"linestring\": Chain, \"multilinestring\": Chain, \"polygon\": Polygon, \"multipolygon\": Polygon, } # moving this to", "def len(self) -> float: \"\"\"Returns the length of a ``LineSegment`` object. Examples --------", "to ``pt`` is cw. ``-1`` if the points are collinear and ``self.p1`` is", "segment ends. Attributes ---------- p1 : libpysal.cg.Point The starting point of the line", "of the polygon. Examples -------- >>> p1 = Polygon([Point((0, 0)), Point((1, 0)), Point((1,", "@property def __geo_interface__(self) -> dict: \"\"\"Return ``__geo_interface__`` information lookup.\"\"\" if len(self.parts) > 1:", "float]: \"\"\"Returns the geometric length of the chain computed using 'arcdistance' (meters). \"\"\"", "\"\"\"Tests if the point is greater than another object. Parameters ---------- other :", "area in zip(CP + CH, A)]) / sum(A) cy = sum([pt[1] * area", "Returns ------- self._p2 : libpysal.cg.Point The reset ``p2`` attribute. Examples -------- >>> ls", "the geometric length of the chain computed using 'arcdistance' (meters). \"\"\" def part_perimeter(p:", ">>> len(verts) 4 \"\"\" return sum([part for part in self._vertices], []) @property def", "l2 == l1 True \"\"\" eq = False if not isinstance(other, self.__class__): pass", "representation of the object. \"\"\" if isinstance(obj, (Point, LineSegment, Line, Ray, Chain, Polygon)):", "0)), Point((0, 10)), Point((10, 10)), Point((10, 0))], ... [Point((2, 2)), Point((4, 2)), Point((4,", "verts = [[Point(pt) for pt in part] for part in polygon] parts +=", "against. Examples -------- >>> Point((0, 1)) >= Point((0, 1)) True >>> Point((0, 1))", "length of a ``LineSegment`` object. Examples -------- >>> ls = LineSegment(Point((2, 2)), Point((5,", "of the chain in clockwise order. Examples -------- >>> c = Chain([Point((0, 0)),", "will not be altered. Parameters ---------- vertices : list A list of vertices.", "in clockwise order. Examples -------- >>> p = Polygon( ... [Point((0, 0)), Point((10,", "else: return {\"type\": \"Polygon\", \"coordinates\": self._vertices} def _reset_props(self): \"\"\"Resets the geometric properties of", "other) -> bool: \"\"\"Tests if the point is equal to another object. Parameters", "def __ne__(self, other) -> bool: \"\"\"Tests if the point is not equal to", "Computational geometry code for PySAL: Python Spatial Analysis Library. \"\"\" __author__ = \"<NAME>,", "\"\"\" Examples -------- >>> r = Rectangle(-4, 3, 10, 17) >>> r[:] [-4.0,", "p = Polygon([Point((0, 0)), Point((1, 0)), Point((1, 1)), Point((0, 1))]) >>> p.area 1.0", "= -1 elif dx1 * dx2 < 0 or dy1 * dy2 <", "A point. Returns ------- self._p2 : libpysal.cg.Point The reset ``p2`` attribute. Examples --------", "1] - x[i + 1] * y[i] cx += (x[i] + x[i +", "point. Examples -------- >>> len(Point((1, 2))) 2 \"\"\" return len(self.__loc) def __repr__(self) ->", "self._perimeter = None self._bounding_box = None self._bbox = None self._area = None self._centroid", "Rectangle(0, 0, 4, 4) >>> r.set_centroid(Point((4, 4))) >>> r.left 2.0 >>> r.right 6.0", "centroid. Also known as the 'center of gravity' or 'center of mass'. Examples", "p1 = Polygon([Point((0, 0)), Point((0, 1)), Point((1, 1)), Point((1, 0))]) >>> len(p1.vertices) 4", "(yi - yj) if r > 0: if yi < 0: w +=", "vertices. Attributes ---------- vertices : list A list of points with the vertices", "The point where the segment ends. Attributes ---------- p1 : libpysal.cg.Point The starting", ">>> c = Chain( ... [ ... [Point((0, 0)), Point((1, 0)), Point((1, 1)),", "m = dy / float(dx) # y - mx b = self._p1[1] -", "i in range(N - 1): A += (x[i] + x[i + 1]) *", "< self.bounding_box.lower bbupper = y > self.bounding_box.upper if bbleft or bbright or bblower", "DO NOT CALL.** Resets attributes which are functions of other attributes. The getters", "p = Polygon( ... [Point((0, 0)), Point((0, 10)), Point((10, 10)), Point((10, 0))], ...", "``0`` if the points are collinear and ``pt`` is in the middle. \"\"\"", ": libpysal.cg.Point The new location of the centroid of the polygon. Examples --------", "bool: \"\"\"Tests if the point is greater than or equal to another object.", ">>> ls.b 0.0 \"\"\" def __init__(self, m, b): if m == float(\"inf\"): raise", "r.area 2.0 \"\"\" return abs(self.signed_area) @property def signed_area(self) -> Union[int, float]: if self._area", ">>> r == Point((5, 6)) True \"\"\" return self._p2 def _set_p2(self, p2): \"\"\"**HELPER", "Point((0, 10)), Point((10, 10)), Point((10, 0))], ... [Point((2, 2)), Point((4, 2)), Point((4, 4)),", "in self._part_rings] CH = [ring.centroid for ring in self._hole_rings] AH = [-ring.area for", "ring will be increased significantly. \"\"\" for ring in self._part_rings: ring.build_quad_tree_structure() for ring", "* dx2 < dy2 * dx1: is_ccw = 1 elif dy1 * dx2", "float]: if self._perimeter is None: dist = self.dist v = self.vertices self._perimeter =", "rectangle center to a new specified point. Parameters ---------- new_center : libpysal.cg.Point The", "self._p1[1]) return v1[0] * v2[1] - v1[1] * v2[0] > 0 def is_cw(self,", "c.bounding_box.lower 0.0 >>> c.bounding_box.right 2.0 >>> c.bounding_box.upper 1.0 \"\"\" if self._bounding_box is None:", "vertices[-1]: vertices = vertices[:] + vertices[0:1] # msg = \"Supplied vertices do not", "0) >>> bool(r) False >>> r = Rectangle(0, 0, 1, 1) >>> bool(r)", "def __init__(self, x): self._x = float(x) self.m = float(\"inf\") self.b = float(\"nan\") def", "a list. See Also -------- libpysal.cg.bounding_box \"\"\" if self._bbox is None: self._bbox =", "v1[1] * v2[0] > 0 def is_cw(self, pt) -> bool: \"\"\"Returns whether a", "Rectangle(0, 0, 4, 4) >>> r.height 4.0 \"\"\" return self.upper - self.lower _geoJSON_type_to_Pysal_type", "self.right) / 2, (self.lower + self.upper) / 2) self.left = center[0] + scale", "of the polygon. Examples -------- >>> p = Polygon([Point((0, 0)), Point((2, 0)), Point((2,", "if the polygon contains ``point`` otherwise ``False``. Examples -------- >>> p = Polygon(", "ls = VerticalLine(0) >>> ls.m inf >>> ls.b nan \"\"\" def __init__(self, x):", "box of the polygon. Returns ------- self._bounding_box : libpysal.cg.Rectangle The bounding box of", "of an object. Raises ------ TypeError Raised when ``obj`` is not a supported", "return (self.__loc) == (other.__loc) except AttributeError: return False def __ne__(self, other) -> bool:", "are collinear and ``pt`` is in the middle. \"\"\" p0 = self.p1 p1", "new_center): \"\"\"Moves the rectangle center to a new specified point. Parameters ---------- new_center", "against. Examples -------- >>> Point((0, 1)) > Point((0, 1)) False >>> Point((0, 1))", "@property def vertices(self) -> list: \"\"\"Returns the vertices of the polygon in clockwise", "Polygon)): pass else: if hasattr(obj, \"__geo_interface__\"): geo = obj.__geo_interface__ else: geo = obj", "getters for these attributes (implemented as properties) then recompute their values if they", "float} The :math:`y`-value at which to compute :math:`x`. Examples -------- >>> l =", "is_ccw : bool ``1`` if turn from ``self.p1`` to ``self.p2`` to ``pt`` is", "if self._line == False: dx = self._p1[0] - self._p2[0] dy = self._p1[1] -", "ls.intersect(ls2) True \"\"\" ccw1 = self.sw_ccw(other.p2) ccw2 = self.sw_ccw(other.p1) ccw3 = other.sw_ccw(self.p1) ccw4", "None self._len = None def __len__(self) -> int: return self.len @property def len(self)", "singleton tuple of :math:`(i)` with :math:`i` as the index of the desired dimension.", "for ring in self._part_rings] AP = [ring.area for ring in self._part_rings] CH =", "p2[1] - p0[1] if dy1 * dx2 < dy2 * dx1: is_ccw =", "upper : float Maximum y-value of the rectangle. Examples -------- >>> r =", "rings and holes. The ordering of the vertices is ignored and will not", "== 0: pass else: point_contained = True else: point_contained = self._quad_tree_structure.contains_point(point) return point_contained", "- m * self._p1[0] self._line = Line(m, b) return self._line class VerticalLine(Geometry): \"\"\"Geometric", "0)), Point((10, 0))) >>> ls1 = LineSegment(Point((5, 0)), Point((10, 1))) >>> ls.intersect(ls1) True", "with the vertices of the polygon in clockwise order. len : int The", "point_contained = self._quad_tree_structure.contains_point(point) return point_contained class Polygon(Geometry): \"\"\"Geometric representation of polygon objects. Returns", "dimensions. Parameters ---------- *args : tuple A tuple of :math:`(i,j)` with :math:`i` as", "= 1 return is_ccw def get_swap(self): \"\"\"Returns a ``LineSegment`` object which has its", "... ] ... ) >>> len(c.parts) 2 \"\"\" return [[v for v in", "self.o = origin self.p = second_p class Chain(Geometry): \"\"\"Geometric representation of a chain,", "do. In GEOS, etc, polygons may only have a single exterior ring, all", "holes of the polygon in clockwise order. Examples -------- >>> p = Polygon(", "* v2[1] - v1[1] * v2[0] > 0 def is_cw(self, pt) -> bool:", "(pt[0] - self._p1[0], pt[1] - self._p1[1]) return v1[0] * v2[1] - v1[1] *", "ls._set_p2(Point((3, -1))) >>> r == Point((3.0, -1.0)) True \"\"\" self._p2 = p2 self._reset_props()", "This structure helps test if a point is inside the ring. \"\"\" def", "None: self._bounding_box = Rectangle( min([self._p1[0], self._p2[0]]), min([self._p1[1], self._p2[1]]), max([self._p1[0], self._p2[0]]), max([self._p1[1], self._p2[1]]), )", "v in vertices] A = self.signed_area N = len(self) cx = 0 cy", "libpysal.cg.Point An object to test equality against. Examples -------- >>> Point((0, 1)) >=", ": libpysal.cg.Line The line on which the segment lies. Examples -------- >>> ls", "the point is equal to another object. Parameters ---------- other : libpysal.cg.Point An", "list): self._vertices = [part for part in vertices] else: self._vertices = [vertices] self._reset_props()", "0 else: is_ccw = 1 return is_ccw def get_swap(self): \"\"\"Returns a ``LineSegment`` object", "dy = self._p1[1] - self._p2[1] if dx == 0 and dy == 0:", "@property def area(self) -> Union[int, float]: \"\"\"Returns the area of the Rectangle. Examples", "self._bounding_box.left, self._bounding_box.lower, self._bounding_box.right, self._bounding_box.upper, ) @property def len(self) -> float: \"\"\"Returns the length", "arclen(self) -> Union[int, float]: \"\"\"Returns the geometric length of the chain computed using", "1.0 / (6 * A) * cy self._centroid = Point((cx, cy)) return self._centroid", "return math.hypot(v1[0] - v2[0], v1[1] - v2[1]) @property def perimeter(self) -> Union[int, float]:", "tuple(map(float, loc)) @classmethod def __from_geo_interface__(cls, geo): return cls(geo[\"coordinates\"]) @property def __geo_interface__(self): return {\"type\":", "to test equality against. Examples -------- >>> Point((0, 1)) >= Point((0, 1)) True", "obj if hasattr(geo, \"type\"): raise TypeError(\"%r does not appear to be a shape", "dx = self._p1[0] - self._p2[0] dy = self._p1[1] - self._p2[1] if dx ==", "last point must be the same. Open rings will be closed. This class", "= self._p1[1] - m * self._p1[0] self._line = Line(m, b) return self._line class", "dy2 * dx1: is_ccw = -1 elif dx1 * dx2 < 0 or", "contains = False searching = False break if searching: for ring in self._part_rings:", "== other[:] return False def __add__(self, other): x, y, X, Y = self[:]", "list of points of the vertices of the chain in order. len :", "v in vertices] N = len(self) A = 0.0 for i in range(N", "Maximum x-value of the rectangle. upper : float Maximum y-value of the rectangle.", "Point((1, 1))] ... ] ... ) >>> len(p.parts) 2 \"\"\" return [[v for", "def part_area(pv: list) -> float: __area = 0 for i in range(-1, len(pv)", "The ``Line`` object of the line on which the segment lies. Examples --------", "when slope is zero.\") return (y - self.b) / self.m def y(self, x:", "segment. p2 : Point The ending point of the line segment. bounding_box :", "= None self._len = None self._line = False def _get_p1(self): \"\"\"**HELPER METHOD. DO", "self._len @property def vertices(self) -> list: \"\"\"Returns the vertices of the polygon in", "elif yj == 0 and xj > 0: if yi < 0: w", "center. Parameters ---------- scale : int, float The ratio of the new scale", "p[1] == 4.3 True \"\"\" return self.__loc.__getitem__(*args) def __getslice__(self, *args) -> slice: \"\"\"Return", "of the polygon as a list. Returns ------- self._bbox : list The bounding", "be a shape object.\" % (obj)) geo_type = geo[\"type\"].lower() # if geo_type.startswith('multi'): #", "A point list or list of point lists. Attributes ---------- vertices : list", "vertices. holes : list A list of sub-polygons to be considered as holes.", "\"\"\" line_seg = LineSegment(self._p2, self._p1) return line_seg @property def bounding_box(self): \"\"\"Returns the minimum", "4))) >>> r.left 2.0 >>> r.right 6.0 >>> r.lower 2.0 >>> r.upper 6.0", "pt in part] for part in geo[\"coordinates\"]] return cls(verts[0:1], verts[1:]) @property def __geo_interface__(self)", "1: geo = { \"type\": \"MultiPolygon\", \"coordinates\": [[part] for part in self.parts], }", "bbox(self): \"\"\"Returns the bounding box of the polygon as a list. Returns -------", "is None: self._arclen = sum([part_perimeter(part) for part in self._vertices]) return self._arclen @property def", "None: dist = self.dist v = self.vertices self._perimeter = sum( [dist(v[i], v[i +", "if self.m == 0: raise ArithmeticError(\"Cannot solve for 'x' when slope is zero.\")", ": list A list of vertices or a list of lists of vertices.", "point_contained = False if self._quad_tree_structure is None: x, y = point # bbox", "0: if yj > 0: w += 0.5 else: w -= 0.5 elif", "LineSegment(Point((7, -1)), Point((7, 2))) >>> ls.intersect(ls2) True \"\"\" ccw1 = self.sw_ccw(other.p2) ccw2 =", "self.vertices} else: return {\"type\": \"MultiLineString\", \"coordinates\": self.parts} def _reset_props(self): \"\"\"**HELPER METHOD. DO NOT", "/ 2) self.left = center[0] + scale * (self.left - center[0]) self.right =", "pt): \"\"\"Sedgewick test for ``pt`` being ccw of segment. Returns ------- is_ccw :", "Another line segment to check against. Examples -------- >>> ls = LineSegment(Point((5, 0)),", "vertices : list A list of points with the vertices of the ring.", "self._x = float(x) self.m = float(\"inf\") self.b = float(\"nan\") def x(self, y) ->", "if the point is not equal to another object. Parameters ---------- other :", "in geo[\"coordinates\"]: verts = [[Point(pt) for pt in part] for part in polygon]", "# return \"LINESTRING ({} {}, {} {})\".format( # self._p1[0], self._p1[1], self._p2[0], self._p2[1] #", ">>> p.bounding_box.upper 1.0 \"\"\" if self._bounding_box is None: vertices = self.vertices self._bounding_box =", "less than another object. Parameters ---------- other : libpysal.cg.Point An object to test", "2)), Point((3, 3))) >>> l = ls.line >>> l.m 1.0 >>> l.b 0.0", "for polygon in geo[\"coordinates\"]: verts = [[Point(pt) for pt in part] for part", "10, 17) >>> r.left #minx -4.0 >>> r.lower #miny 3.0 >>> r.right #maxx", "in range(rn)] ys = [self.vertices[i][1] - point[1] for i in range(rn)] w =", "= True for ring in self._hole_rings: if ring.contains_point(point): contains = False searching =", "in as the slope. Examples -------- >>> ls = Line(1, 0) >>> ls.m", "float(\"nan\") class Line(Geometry): \"\"\"Geometric representation of line objects. Parameters ---------- m : {int,", "\"\"\" return abs(self.signed_area) @property def signed_area(self) -> Union[int, float]: if self._area is None:", "2)), Point((1, 1))] ... ] ... ) >>> len(p.parts) 2 \"\"\" return [[v", "which to compute :math:`x`. Examples -------- >>> l = VerticalLine(0) >>> l.x(0.25) 0.0", "other segment. Parameters ---------- other : libpysal.cg.LineSegment Another line segment to check against.", "#maxx 10.0 >>> r.upper #maxy 17.0 \"\"\" def __init__(self, left, lower, right, upper):", "compute :math:`x`. Examples -------- >>> l = VerticalLine(0) >>> l.x(0.25) 0.0 \"\"\" return", "line segment to check against. Examples -------- >>> ls = LineSegment(Point((5, 0)), Point((10,", "v = self.vertices self._perimeter = sum( [dist(v[i], v[i + 1]) for i in", "-> bool: \"\"\"Tests if the point is greater than or equal to another", "(other.__loc) except AttributeError: return False def __ne__(self, other) -> bool: \"\"\"Tests if the", "def bounding_box(self): \"\"\"Returns the minimum bounding box of a ``LineSegment`` object. Returns -------", "other : libpysal.cg.LineSegment Another line segment to check against. Examples -------- >>> ls", "float The ratio of the new scale to the old scale (e.g. 1.0", "shift[1] self.upper = self.upper + shift[1] def set_scale(self, scale): \"\"\"Rescales the rectangle around", "_reset_props(self): \"\"\"**HELPER METHOD. DO NOT CALL.** Resets attributes which are functions of other", "LineSegment(Point((5, 1)), Point((10, 1))) >>> ls.intersect(ls2) False >>> ls2 = LineSegment(Point((7, -1)), Point((7,", "cw of a segment. Examples -------- >>> ls = LineSegment(Point((0, 0)), Point((5, 0)))", "in self._part_rings: if ring.contains_point(point): contains = True searching = False break if searching:", "Point((10, 10)), Point((10, 0))], ... [Point((2, 2)), Point((4, 2)), Point((4, 4)), Point((2, 4))]", "of the rectangle. right : float Maximum x-value of the rectangle. upper :", "@property def len(self) -> int: \"\"\"Returns the geometric length of the chain. Examples", "lambda part_type: sum([part_perimeter(part) for part in part_type]) if self._perimeter is None: self._perimeter =", "self._p2: eq = True return eq def intersect(self, other) -> bool: \"\"\"Test whether", "range(len(self.vertices) - 1): yi = ys[i] yj = ys[i + 1] xi =", "3.0)' \"\"\" return str(self.__loc) # return \"POINT ({} {})\".format(*self.__loc) class LineSegment(Geometry): \"\"\"Geometric representation", "-------- >>> ls = LineSegment(Point((0, 0)), Point((5, 0))) >>> ls.is_ccw(Point((2, 2))) True >>>", "return cls(verts) @property def __geo_interface__(self) -> dict: if len(self.parts) == 1: return {\"type\":", "0))) >>> ls1 = LineSegment(Point((5, 0)), Point((10, 1))) >>> ls.intersect(ls1) True >>> ls2", "1]) * f cx = 1.0 / (6 * A) * cx cy", "None self._area = None self._centroid = None self._quad_tree_structure = None def __len__(self) ->", "return (self.__loc) < (other.__loc) def __le__(self, other) -> bool: \"\"\"Tests if the point", "else: m = dy / float(dx) # y - mx b = self._p1[1]", "0))]) >>> p1.len 4 >>> len(p1) 4 \"\"\" if self._len is None: self._len", "of the line segment. Parameters ---------- p2 : libpysal.cg.Point A point. Returns -------", "= ys[i] yj = ys[i + 1] xi = xs[i] xj = xs[i", "__from_geo_interface__(cls, geo: dict): \"\"\"While PySAL does not differentiate polygons and multipolygons GEOS, Shapely,", "the line. ``x`` is also an attribute. Examples -------- >>> ls = VerticalLine(0)", "closed, the first and last point must be the same. Open rings will", "lower, right, upper]``. area : float The area enclosed by the polygon. centroid", "def vertices(self) -> list: \"\"\"Returns the vertices of the polygon in clockwise order.", "i in range(-1, len(self) - 1)] ) return self._perimeter @property def bounding_box(self): \"\"\"Returns", "if standalone.is_clockwise(part): return part[:] else: return part[::-1] vl = list(vertices) if isinstance(vl[0], list):", "verts[1:]) @property def __geo_interface__(self) -> dict: \"\"\"Return ``__geo_interface__`` information lookup.\"\"\" if len(self.parts) >" ]
[ "Django 3.2.4 on 2021-06-07 14:51 from django.db import migrations, models class Migration(migrations.Migration): dependencies", "migrations.CreateModel( name='Faq', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('title', models.CharField(max_length=255, verbose_name='Название')), ('question', models.CharField(max_length=255,", "operations = [ migrations.CreateModel( name='Faq', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('title', models.CharField(max_length=255,", "verbose_name='ID')), ('title', models.CharField(max_length=255, verbose_name='Название')), ('question', models.CharField(max_length=255, verbose_name='Вопрос')), ('answer', models.CharField(max_length=1000, verbose_name='Ответ')), ], options={ 'verbose_name':", "3.2.4 on 2021-06-07 14:51 from django.db import migrations, models class Migration(migrations.Migration): dependencies =", "dependencies = [ ('main', '0003_item_price'), ] operations = [ migrations.CreateModel( name='Faq', fields=[ ('id',", "('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('title', models.CharField(max_length=255, verbose_name='Название')), ('question', models.CharField(max_length=255, verbose_name='Вопрос')), ('answer', models.CharField(max_length=1000,", "serialize=False, verbose_name='ID')), ('title', models.CharField(max_length=255, verbose_name='Название')), ('question', models.CharField(max_length=255, verbose_name='Вопрос')), ('answer', models.CharField(max_length=1000, verbose_name='Ответ')), ], options={", "fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('title', models.CharField(max_length=255, verbose_name='Название')), ('question', models.CharField(max_length=255, verbose_name='Вопрос')), ('answer',", "models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('title', models.CharField(max_length=255, verbose_name='Название')), ('question', models.CharField(max_length=255, verbose_name='Вопрос')), ('answer', models.CharField(max_length=1000, verbose_name='Ответ')),", "models.CharField(max_length=255, verbose_name='Вопрос')), ('answer', models.CharField(max_length=1000, verbose_name='Ответ')), ], options={ 'verbose_name': 'Вопрос/Ответ', 'verbose_name_plural': 'Вопрос(-ов)/Ответ(-ов)', }, ),", "class Migration(migrations.Migration): dependencies = [ ('main', '0003_item_price'), ] operations = [ migrations.CreateModel( name='Faq',", "2021-06-07 14:51 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('main',", "migrations, models class Migration(migrations.Migration): dependencies = [ ('main', '0003_item_price'), ] operations = [", "from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('main', '0003_item_price'), ]", "<filename>pronebo_dj/main/migrations/0004_faq.py # Generated by Django 3.2.4 on 2021-06-07 14:51 from django.db import migrations,", "= [ migrations.CreateModel( name='Faq', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('title', models.CharField(max_length=255, verbose_name='Название')),", "14:51 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('main', '0003_item_price'),", "verbose_name='Название')), ('question', models.CharField(max_length=255, verbose_name='Вопрос')), ('answer', models.CharField(max_length=1000, verbose_name='Ответ')), ], options={ 'verbose_name': 'Вопрос/Ответ', 'verbose_name_plural': 'Вопрос(-ов)/Ответ(-ов)',", "[ ('main', '0003_item_price'), ] operations = [ migrations.CreateModel( name='Faq', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True,", "primary_key=True, serialize=False, verbose_name='ID')), ('title', models.CharField(max_length=255, verbose_name='Название')), ('question', models.CharField(max_length=255, verbose_name='Вопрос')), ('answer', models.CharField(max_length=1000, verbose_name='Ответ')), ],", "[ migrations.CreateModel( name='Faq', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('title', models.CharField(max_length=255, verbose_name='Название')), ('question',", "Migration(migrations.Migration): dependencies = [ ('main', '0003_item_price'), ] operations = [ migrations.CreateModel( name='Faq', fields=[", "'0003_item_price'), ] operations = [ migrations.CreateModel( name='Faq', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),", "name='Faq', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('title', models.CharField(max_length=255, verbose_name='Название')), ('question', models.CharField(max_length=255, verbose_name='Вопрос')),", "import migrations, models class Migration(migrations.Migration): dependencies = [ ('main', '0003_item_price'), ] operations =", "by Django 3.2.4 on 2021-06-07 14:51 from django.db import migrations, models class Migration(migrations.Migration):", "models.CharField(max_length=255, verbose_name='Название')), ('question', models.CharField(max_length=255, verbose_name='Вопрос')), ('answer', models.CharField(max_length=1000, verbose_name='Ответ')), ], options={ 'verbose_name': 'Вопрос/Ответ', 'verbose_name_plural':", "('question', models.CharField(max_length=255, verbose_name='Вопрос')), ('answer', models.CharField(max_length=1000, verbose_name='Ответ')), ], options={ 'verbose_name': 'Вопрос/Ответ', 'verbose_name_plural': 'Вопрос(-ов)/Ответ(-ов)', },", "] operations = [ migrations.CreateModel( name='Faq', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('title',", "('main', '0003_item_price'), ] operations = [ migrations.CreateModel( name='Faq', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False,", "= [ ('main', '0003_item_price'), ] operations = [ migrations.CreateModel( name='Faq', fields=[ ('id', models.BigAutoField(auto_created=True,", "Generated by Django 3.2.4 on 2021-06-07 14:51 from django.db import migrations, models class", "django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('main', '0003_item_price'), ] operations", "on 2021-06-07 14:51 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [", "verbose_name='Вопрос')), ('answer', models.CharField(max_length=1000, verbose_name='Ответ')), ], options={ 'verbose_name': 'Вопрос/Ответ', 'verbose_name_plural': 'Вопрос(-ов)/Ответ(-ов)', }, ), ]", "# Generated by Django 3.2.4 on 2021-06-07 14:51 from django.db import migrations, models", "('title', models.CharField(max_length=255, verbose_name='Название')), ('question', models.CharField(max_length=255, verbose_name='Вопрос')), ('answer', models.CharField(max_length=1000, verbose_name='Ответ')), ], options={ 'verbose_name': 'Вопрос/Ответ',", "models class Migration(migrations.Migration): dependencies = [ ('main', '0003_item_price'), ] operations = [ migrations.CreateModel(" ]
[ "(renderers.JSONRenderer,) serializer_class = ClientTokenSerializer def post(self, request, *args, **kwargs): serializer = self.serializer_class(data=request.data) serializer.is_valid(raise_exception=True)", "ClientTokenSerializer def post(self, request, *args, **kwargs): serializer = self.serializer_class(data=request.data) serializer.is_valid(raise_exception=True) client_secret = serializer.data['client_secret']", "AuthTokenSerializer from rest_framework.response import Response from rest_framework.views import APIView from .serializers import ClientTokenSerializer", "import ClientTokenSerializer from .models import ClientToken, App class ObtainClientAuth(APIView): throttle_classes = () permission_classes", "APIView from .serializers import ClientTokenSerializer from .models import ClientToken, App class ObtainClientAuth(APIView): throttle_classes", "client_secret = serializer.data['client_secret'] app_id = serializer.data['app_id'] app = App.objects.get(pk=app_id) token, created = ClientToken.objects.get_or_create(client_secret=client_secret,", "permission_classes = () parser_classes = (parsers.FormParser, parsers.MultiPartParser, parsers.JSONParser,) renderer_classes = (renderers.JSONRenderer,) serializer_class =", "serializer.data['client_secret'] app_id = serializer.data['app_id'] app = App.objects.get(pk=app_id) token, created = ClientToken.objects.get_or_create(client_secret=client_secret, app=app) return", "rest_framework.authtoken.serializers import AuthTokenSerializer from rest_framework.response import Response from rest_framework.views import APIView from .serializers", "= ClientTokenSerializer def post(self, request, *args, **kwargs): serializer = self.serializer_class(data=request.data) serializer.is_valid(raise_exception=True) client_secret =", ".serializers import ClientTokenSerializer from .models import ClientToken, App class ObtainClientAuth(APIView): throttle_classes = ()", "class ObtainClientAuth(APIView): throttle_classes = () permission_classes = () parser_classes = (parsers.FormParser, parsers.MultiPartParser, parsers.JSONParser,)", "= serializer.data['app_id'] app = App.objects.get(pk=app_id) token, created = ClientToken.objects.get_or_create(client_secret=client_secret, app=app) return Response({'token': token.key})", "rest_framework.response import Response from rest_framework.views import APIView from .serializers import ClientTokenSerializer from .models", "import AuthTokenSerializer from rest_framework.response import Response from rest_framework.views import APIView from .serializers import", "from rest_framework import parsers, renderers from rest_framework.authtoken.models import Token from rest_framework.authtoken.serializers import AuthTokenSerializer", "import APIView from .serializers import ClientTokenSerializer from .models import ClientToken, App class ObtainClientAuth(APIView):", "rest_framework.authtoken.models import Token from rest_framework.authtoken.serializers import AuthTokenSerializer from rest_framework.response import Response from rest_framework.views", "app = App.objects.get(pk=app_id) token, created = ClientToken.objects.get_or_create(client_secret=client_secret, app=app) return Response({'token': token.key}) obtain_client_auth =", "import Token from rest_framework.authtoken.serializers import AuthTokenSerializer from rest_framework.response import Response from rest_framework.views import", "ClientToken, App class ObtainClientAuth(APIView): throttle_classes = () permission_classes = () parser_classes = (parsers.FormParser,", "from rest_framework.views import APIView from .serializers import ClientTokenSerializer from .models import ClientToken, App", "ObtainClientAuth(APIView): throttle_classes = () permission_classes = () parser_classes = (parsers.FormParser, parsers.MultiPartParser, parsers.JSONParser,) renderer_classes", "import ClientToken, App class ObtainClientAuth(APIView): throttle_classes = () permission_classes = () parser_classes =", "serializer.is_valid(raise_exception=True) client_secret = serializer.data['client_secret'] app_id = serializer.data['app_id'] app = App.objects.get(pk=app_id) token, created =", "= serializer.data['client_secret'] app_id = serializer.data['app_id'] app = App.objects.get(pk=app_id) token, created = ClientToken.objects.get_or_create(client_secret=client_secret, app=app)", "**kwargs): serializer = self.serializer_class(data=request.data) serializer.is_valid(raise_exception=True) client_secret = serializer.data['client_secret'] app_id = serializer.data['app_id'] app =", "(parsers.FormParser, parsers.MultiPartParser, parsers.JSONParser,) renderer_classes = (renderers.JSONRenderer,) serializer_class = ClientTokenSerializer def post(self, request, *args,", "Token from rest_framework.authtoken.serializers import AuthTokenSerializer from rest_framework.response import Response from rest_framework.views import APIView", "throttle_classes = () permission_classes = () parser_classes = (parsers.FormParser, parsers.MultiPartParser, parsers.JSONParser,) renderer_classes =", "App class ObtainClientAuth(APIView): throttle_classes = () permission_classes = () parser_classes = (parsers.FormParser, parsers.MultiPartParser,", "() permission_classes = () parser_classes = (parsers.FormParser, parsers.MultiPartParser, parsers.JSONParser,) renderer_classes = (renderers.JSONRenderer,) serializer_class", "serializer = self.serializer_class(data=request.data) serializer.is_valid(raise_exception=True) client_secret = serializer.data['client_secret'] app_id = serializer.data['app_id'] app = App.objects.get(pk=app_id)", "= () permission_classes = () parser_classes = (parsers.FormParser, parsers.MultiPartParser, parsers.JSONParser,) renderer_classes = (renderers.JSONRenderer,)", "from rest_framework.response import Response from rest_framework.views import APIView from .serializers import ClientTokenSerializer from", "from .serializers import ClientTokenSerializer from .models import ClientToken, App class ObtainClientAuth(APIView): throttle_classes =", "parser_classes = (parsers.FormParser, parsers.MultiPartParser, parsers.JSONParser,) renderer_classes = (renderers.JSONRenderer,) serializer_class = ClientTokenSerializer def post(self,", "import Response from rest_framework.views import APIView from .serializers import ClientTokenSerializer from .models import", "Response from rest_framework.views import APIView from .serializers import ClientTokenSerializer from .models import ClientToken,", "*args, **kwargs): serializer = self.serializer_class(data=request.data) serializer.is_valid(raise_exception=True) client_secret = serializer.data['client_secret'] app_id = serializer.data['app_id'] app", "() parser_classes = (parsers.FormParser, parsers.MultiPartParser, parsers.JSONParser,) renderer_classes = (renderers.JSONRenderer,) serializer_class = ClientTokenSerializer def", "post(self, request, *args, **kwargs): serializer = self.serializer_class(data=request.data) serializer.is_valid(raise_exception=True) client_secret = serializer.data['client_secret'] app_id =", "= App.objects.get(pk=app_id) token, created = ClientToken.objects.get_or_create(client_secret=client_secret, app=app) return Response({'token': token.key}) obtain_client_auth = ObtainClientAuth.as_view()", "= self.serializer_class(data=request.data) serializer.is_valid(raise_exception=True) client_secret = serializer.data['client_secret'] app_id = serializer.data['app_id'] app = App.objects.get(pk=app_id) token,", "parsers.MultiPartParser, parsers.JSONParser,) renderer_classes = (renderers.JSONRenderer,) serializer_class = ClientTokenSerializer def post(self, request, *args, **kwargs):", "renderer_classes = (renderers.JSONRenderer,) serializer_class = ClientTokenSerializer def post(self, request, *args, **kwargs): serializer =", "= (parsers.FormParser, parsers.MultiPartParser, parsers.JSONParser,) renderer_classes = (renderers.JSONRenderer,) serializer_class = ClientTokenSerializer def post(self, request,", "app_id = serializer.data['app_id'] app = App.objects.get(pk=app_id) token, created = ClientToken.objects.get_or_create(client_secret=client_secret, app=app) return Response({'token':", "from rest_framework.authtoken.models import Token from rest_framework.authtoken.serializers import AuthTokenSerializer from rest_framework.response import Response from", "serializer_class = ClientTokenSerializer def post(self, request, *args, **kwargs): serializer = self.serializer_class(data=request.data) serializer.is_valid(raise_exception=True) client_secret", "import parsers, renderers from rest_framework.authtoken.models import Token from rest_framework.authtoken.serializers import AuthTokenSerializer from rest_framework.response", "ClientTokenSerializer from .models import ClientToken, App class ObtainClientAuth(APIView): throttle_classes = () permission_classes =", "request, *args, **kwargs): serializer = self.serializer_class(data=request.data) serializer.is_valid(raise_exception=True) client_secret = serializer.data['client_secret'] app_id = serializer.data['app_id']", "parsers.JSONParser,) renderer_classes = (renderers.JSONRenderer,) serializer_class = ClientTokenSerializer def post(self, request, *args, **kwargs): serializer", "from rest_framework.authtoken.serializers import AuthTokenSerializer from rest_framework.response import Response from rest_framework.views import APIView from", "renderers from rest_framework.authtoken.models import Token from rest_framework.authtoken.serializers import AuthTokenSerializer from rest_framework.response import Response", "rest_framework import parsers, renderers from rest_framework.authtoken.models import Token from rest_framework.authtoken.serializers import AuthTokenSerializer from", "from .models import ClientToken, App class ObtainClientAuth(APIView): throttle_classes = () permission_classes = ()", ".models import ClientToken, App class ObtainClientAuth(APIView): throttle_classes = () permission_classes = () parser_classes", "= () parser_classes = (parsers.FormParser, parsers.MultiPartParser, parsers.JSONParser,) renderer_classes = (renderers.JSONRenderer,) serializer_class = ClientTokenSerializer", "= (renderers.JSONRenderer,) serializer_class = ClientTokenSerializer def post(self, request, *args, **kwargs): serializer = self.serializer_class(data=request.data)", "def post(self, request, *args, **kwargs): serializer = self.serializer_class(data=request.data) serializer.is_valid(raise_exception=True) client_secret = serializer.data['client_secret'] app_id", "self.serializer_class(data=request.data) serializer.is_valid(raise_exception=True) client_secret = serializer.data['client_secret'] app_id = serializer.data['app_id'] app = App.objects.get(pk=app_id) token, created", "serializer.data['app_id'] app = App.objects.get(pk=app_id) token, created = ClientToken.objects.get_or_create(client_secret=client_secret, app=app) return Response({'token': token.key}) obtain_client_auth", "rest_framework.views import APIView from .serializers import ClientTokenSerializer from .models import ClientToken, App class", "parsers, renderers from rest_framework.authtoken.models import Token from rest_framework.authtoken.serializers import AuthTokenSerializer from rest_framework.response import" ]
[ "def get(self, index): if index < 0 or index >= self.length: return None", "= None self.length -=1 return node_to_remove def print_list(self): temp = self.head while temp:", "= Node(value) if self.length == 0: self.head = new_node self.tail = new_node self.length", "tail as the new node self.tail = new_node #increment length self.length += 1", "to new node self.tail.next = new_node #Point new node's prev back to curr", "else: #Point curr tail's next to new node self.tail.next = new_node #Point new", "= self.tail for _ in range(self.length - 1, index, 1): temp = temp.prev", "0: return None #Old head temp = self.head #One item in list if", "= self.tail #Set the tail as the new node self.tail = new_node #increment", "or index >= self.length: return None if index == 0: return self.pop_first() if", "new head's prev to None instead of old head self.head.prev = None #Point", "self.length == 0: self.head = new_node self.tail = new_node self.length = 1 else:", "new_node #Point new node's prev back to curr tail new_node.prev = self.tail #Set", "item in list if self.length == 1: self.head = None self.tail = None", "None self.length -= 1 return temp.value def get(self, index): if index < 0", "return None else: temp = self.head mid = self.length // 2 if mid", "return True def remove(self, index): if index < 0 or index >= self.length:", "<--> None \\n\") else: print(temp.value, end=\" <--> \") temp = temp.next my_dbl_list =", "def print_list(self): temp = self.head while temp: if not temp.next: print(temp.value, end=\" <-->", "return True def pop(self): #Empty List if self.length == 0: return None #List", "= self.head.next #Point the new head's prev to None instead of old head", "return self.pop_first() if index == self.length - 1: return self.pop() node_to_remove = self.get(index)", "index < 0 or index >= self.length: return None else: temp = self.head", "new_node self.tail = new_node else: # New node next point to curr head", "Node(value) self.head = new_node self.tail = new_node self.length = 1 def append(self, value):", "#List with only one item if self.length == 1: self.head = None self.tail", "new node self.tail.next = new_node #Point new node's prev back to curr tail", "#Remove connections node_to_remove.next = None node_to_remove.prev = None self.length -=1 return node_to_remove def", "temp: if not temp.next: print(temp.value, end=\" <--> None \\n\") else: print(temp.value, end=\" <-->", "1 return self.head #Multiple items in list else: temp = self.tail self.tail =", "node self.head.prev = new_node #Set the head to the new head self.head =", "value): if index < 0 or index >= self.length: return None if index", "range(self.length - 1, index, 1): temp = temp.prev return temp def set_val(self, index,", "head self.head = new_node #Increment the length by 1 self.length += 1 return", "else: temp = self.head mid = self.length // 2 if mid >= index:", "#Point curr tail's next to new node self.tail.next = new_node #Point new node's", "#Point new node's prev back to curr tail new_node.prev = self.tail #Set the", "self.head = new_node self.tail = new_node else: # New node next point to", "= new_node #Point new node's prev back to curr tail new_node.prev = self.tail", "before before.next = new_node new_node.prev = before #Set the pointers for after after.prev", "= temp.prev return temp def set_val(self, index, value): temp = self.get(index) if temp:", "else: print(temp.value, end=\" <--> \") temp = temp.next my_dbl_list = DoublyLinkedList(1) my_dbl_list.append(2) my_dbl_list.append(5)", "to new node self.head.prev = new_node #Set the head to the new head", "head temp = self.head #One item in list if self.length == 1: self.head", "self.head = new_node self.tail = new_node self.length = 1 def append(self, value): new_node", "mid >= index: for _ in range(index): temp = temp.next else: temp =", "0: self.head = new_node self.tail = new_node self.length = 1 else: #Point curr", "+= 1 return self.head def pop_first(self): #Empty List if self.length == 0: return", "1 return self.head def pop_first(self): #Empty List if self.length == 0: return None", "temp = self.tail for _ in range(self.length - 1, index, 1): temp =", "self.length == 0: return None #Old head temp = self.head #One item in", "temp = self.head mid = self.length // 2 if mid >= index: for", "for after after.prev = new_node new_node.next = after return True def remove(self, index):", "new_node #Increment the length by 1 self.length += 1 return self.head def pop_first(self):", "mid = self.length // 2 if mid >= index: for _ in range(index):", "self.length += 1 return self.head def pop_first(self): #Empty List if self.length == 0:", "1): temp = temp.prev return temp def set_val(self, index, value): temp = self.get(index)", "self.head mid = self.length // 2 if mid >= index: for _ in", "= new_node self.tail = new_node self.length = 1 def append(self, value): new_node =", "end=\" <--> \") temp = temp.next my_dbl_list = DoublyLinkedList(1) my_dbl_list.append(2) my_dbl_list.append(5) my_dbl_list.prepend(15) #", "new node self.head.prev = new_node #Set the head to the new head self.head", "to the new head self.head = self.head.next #Point the new head's prev to", "True def remove(self, index): if index < 0 or index >= self.length: return", "def prepend(self, value): new_node = Node(value) #Empty list if self.length == 0: self.head", "# New node next point to curr head new_node.next = self.head #curr head", "self.tail = None self.length -= 1 return self.head #Multiple items in list else:", "self.head = new_node #Increment the length by 1 self.length += 1 return self.head", "def __init__(self, value): self.value = value self.next = None self.prev = None class", "= None self.length -= 1 return temp def prepend(self, value): new_node = Node(value)", "== 1: self.head = None self.tail = None self.length -= 1 return self.head", "length by 1 self.length += 1 return self.head def pop_first(self): #Empty List if", "self.append(value) new_node = Node(value) before = self.get(index - 1) after = before.next #Set", "self.pop() node_to_remove = self.get(index) before = node_to_remove.prev after = node_to_remove.next #Set pointers before.next", "index): if index < 0 or index >= self.length: return None else: temp", "= before #Remove connections node_to_remove.next = None node_to_remove.prev = None self.length -=1 return", "instead of old head self.head.prev = None #Point old head's next to None", "value): new_node = Node(value) #Empty list if self.length == 0: self.head = new_node", "temp.next = None self.length -= 1 return temp.value def get(self, index): if index", ">= self.length: return None if index == 0: return self.pop_first() if index ==", "1 return temp def prepend(self, value): new_node = Node(value) #Empty list if self.length", "self.head = None self.tail = None else: #Set the head to the new", "Node: def __init__(self, value): self.value = value self.next = None self.prev = None", "node_to_remove = self.get(index) before = node_to_remove.prev after = node_to_remove.next #Set pointers before.next =", "if temp: temp.value = value return True return False def insert(self, index, value):", "tail's next to new node self.tail.next = new_node #Point new node's prev back", "if self.length == 1: self.head = None self.tail = None else: #Set the", "= temp.next my_dbl_list = DoublyLinkedList(1) my_dbl_list.append(2) my_dbl_list.append(5) my_dbl_list.prepend(15) # print(my_dbl_list.set_val(3,8)) my_dbl_list.insert(1, 69) print(my_dbl_list.remove(2))", "head's next to None temp.next = None self.length -= 1 return temp.value def", "- 1) after = before.next #Set the pointers for before before.next = new_node", "<--> \") temp = temp.next my_dbl_list = DoublyLinkedList(1) my_dbl_list.append(2) my_dbl_list.append(5) my_dbl_list.prepend(15) # print(my_dbl_list.set_val(3,8))", "self.head #One item in list if self.length == 1: self.head = None self.tail", "#Old head temp = self.head #One item in list if self.length == 1:", "+= 1 return True def pop(self): #Empty List if self.length == 0: return", "to the new head self.head = new_node #Increment the length by 1 self.length", "None self.prev = None class DoublyLinkedList: def __init__(self, value): new_node = Node(value) self.head", "after after.prev = new_node new_node.next = after return True def remove(self, index): if", "def remove(self, index): if index < 0 or index >= self.length: return None", "= new_node #increment length self.length += 1 return True def pop(self): #Empty List", "self.length = 1 def append(self, value): new_node = Node(value) if self.length == 0:", "= node_to_remove.prev after = node_to_remove.next #Set pointers before.next = after after.prev = before", "of old head self.head.prev = None #Point old head's next to None temp.next", "self.length -= 1 return temp.value def get(self, index): if index < 0 or", "= new_node else: # New node next point to curr head new_node.next =", "None class DoublyLinkedList: def __init__(self, value): new_node = Node(value) self.head = new_node self.tail", "Node(value) if self.length == 0: self.head = new_node self.tail = new_node self.length =", "1 return True def pop(self): #Empty List if self.length == 0: return None", "items in list else: temp = self.tail self.tail = self.tail.prev self.tail.next = None", "new_node #increment length self.length += 1 return True def pop(self): #Empty List if", "temp = self.tail self.tail = self.tail.prev self.tail.next = None temp.prev = None self.length", "= self.get(index) if temp: temp.value = value return True return False def insert(self,", "= self.length // 2 if mid >= index: for _ in range(index): temp", "head new_node.next = self.head #curr head prev set to new node self.head.prev =", "new head self.head = new_node #Increment the length by 1 self.length += 1", "None self.tail = None else: #Set the head to the new head self.head", "self.length: return None else: temp = self.head mid = self.length // 2 if", "else: temp = self.tail for _ in range(self.length - 1, index, 1): temp", "1: return self.pop() node_to_remove = self.get(index) before = node_to_remove.prev after = node_to_remove.next #Set", "None #Point old head's next to None temp.next = None self.length -= 1", "before.next = new_node new_node.prev = before #Set the pointers for after after.prev =", "#Increment the length by 1 self.length += 1 return self.head def pop_first(self): #Empty", "if index < 0 or index >= self.length: return None else: temp =", "return self.append(value) new_node = Node(value) before = self.get(index - 1) after = before.next", "None self.length -= 1 return self.head #Multiple items in list else: temp =", ">= self.length: return None else: temp = self.head mid = self.length // 2", "head prev set to new node self.head.prev = new_node #Set the head to", "index, value): if index < 0 or index >= self.length: return None if", "1, index, 1): temp = temp.prev return temp def set_val(self, index, value): temp", "#curr head prev set to new node self.head.prev = new_node #Set the head", "new_node self.tail = new_node self.length = 1 def append(self, value): new_node = Node(value)", "in list if self.length == 1: self.head = None self.tail = None else:", "= before #Set the pointers for after after.prev = new_node new_node.next = after", "temp = self.head #One item in list if self.length == 1: self.head =", "0: return None #List with only one item if self.length == 1: self.head", "old head self.head.prev = None #Point old head's next to None temp.next =", "= None self.tail = None else: #Set the head to the new head", "= after after.prev = before #Remove connections node_to_remove.next = None node_to_remove.prev = None", "self.tail = new_node #increment length self.length += 1 return True def pop(self): #Empty", "1 self.length += 1 return self.head def pop_first(self): #Empty List if self.length ==", "index < 0 or index >= self.length: return None if index == 0:", "temp.value def get(self, index): if index < 0 or index >= self.length: return", "temp = self.head while temp: if not temp.next: print(temp.value, end=\" <--> None \\n\")", "- 1, index, 1): temp = temp.prev return temp def set_val(self, index, value):", "None self.length -= 1 return temp def prepend(self, value): new_node = Node(value) #Empty", "temp.next my_dbl_list = DoublyLinkedList(1) my_dbl_list.append(2) my_dbl_list.append(5) my_dbl_list.prepend(15) # print(my_dbl_list.set_val(3,8)) my_dbl_list.insert(1, 69) print(my_dbl_list.remove(2)) my_dbl_list.print_list()", "for _ in range(self.length - 1, index, 1): temp = temp.prev return temp", "False def insert(self, index, value): if index < 0 or index >= self.length:", "#Empty list if self.length == 0: self.head = new_node self.tail = new_node else:", "= self.head #One item in list if self.length == 1: self.head = None", "\\n\") else: print(temp.value, end=\" <--> \") temp = temp.next my_dbl_list = DoublyLinkedList(1) my_dbl_list.append(2)", "= new_node self.tail = new_node else: # New node next point to curr", "self.head while temp: if not temp.next: print(temp.value, end=\" <--> None \\n\") else: print(temp.value,", "new node's prev back to curr tail new_node.prev = self.tail #Set the tail", "new_node new_node.next = after return True def remove(self, index): if index < 0", "= after return True def remove(self, index): if index < 0 or index", "new_node self.length = 1 def append(self, value): new_node = Node(value) if self.length ==", "index == 0: return self.pop_first() if index == self.length - 1: return self.pop()", "New node next point to curr head new_node.next = self.head #curr head prev", "None instead of old head self.head.prev = None #Point old head's next to", "index, value): temp = self.get(index) if temp: temp.value = value return True return", "prev back to curr tail new_node.prev = self.tail #Set the tail as the", "temp = self.get(index) if temp: temp.value = value return True return False def", "self.length: return None if index == 0: return self.prepend(value) if index == self.length:", "= self.tail self.tail = self.tail.prev self.tail.next = None temp.prev = None self.length -=", "prev to None instead of old head self.head.prev = None #Point old head's", "None node_to_remove.prev = None self.length -=1 return node_to_remove def print_list(self): temp = self.head", "return None #List with only one item if self.length == 1: self.head =", "head self.head.prev = None #Point old head's next to None temp.next = None", "= self.get(index - 1) after = before.next #Set the pointers for before before.next", "= node_to_remove.next #Set pointers before.next = after after.prev = before #Remove connections node_to_remove.next", "List if self.length == 0: return None #Old head temp = self.head #One", "the pointers for after after.prev = new_node new_node.next = after return True def", "index == 0: return self.prepend(value) if index == self.length: return self.append(value) new_node =", "self.tail = new_node self.length = 1 def append(self, value): new_node = Node(value) if", "next to None temp.next = None self.length -= 1 return temp.value def get(self,", "node_to_remove.prev after = node_to_remove.next #Set pointers before.next = after after.prev = before #Remove", "pointers before.next = after after.prev = before #Remove connections node_to_remove.next = None node_to_remove.prev", "new_node = Node(value) self.head = new_node self.tail = new_node self.length = 1 def", "before.next = after after.prev = before #Remove connections node_to_remove.next = None node_to_remove.prev =", "None if index == 0: return self.pop_first() if index == self.length - 1:", "= new_node new_node.next = after return True def remove(self, index): if index <", "= new_node new_node.prev = before #Set the pointers for after after.prev = new_node", "self.tail #Set the tail as the new node self.tail = new_node #increment length", "new node self.tail = new_node #increment length self.length += 1 return True def", "== 1: self.head = None self.tail = None else: #Set the head to", "old head's next to None temp.next = None self.length -= 1 return temp.value", "new_node.prev = before #Set the pointers for after after.prev = new_node new_node.next =", "return self.head def pop_first(self): #Empty List if self.length == 0: return None #Old", "#Empty List if self.length == 0: return None #Old head temp = self.head", "< 0 or index >= self.length: return None if index == 0: return", "return self.prepend(value) if index == self.length: return self.append(value) new_node = Node(value) before =", "= None self.prev = None class DoublyLinkedList: def __init__(self, value): new_node = Node(value)", "insert(self, index, value): if index < 0 or index >= self.length: return None", "self.length - 1: return self.pop() node_to_remove = self.get(index) before = node_to_remove.prev after =", "return self.head #Multiple items in list else: temp = self.tail self.tail = self.tail.prev", "True return False def insert(self, index, value): if index < 0 or index", ">= index: for _ in range(index): temp = temp.next else: temp = self.tail", "== self.length: return self.append(value) new_node = Node(value) before = self.get(index - 1) after", "\") temp = temp.next my_dbl_list = DoublyLinkedList(1) my_dbl_list.append(2) my_dbl_list.append(5) my_dbl_list.prepend(15) # print(my_dbl_list.set_val(3,8)) my_dbl_list.insert(1,", "0: return self.prepend(value) if index == self.length: return self.append(value) new_node = Node(value) before", "while temp: if not temp.next: print(temp.value, end=\" <--> None \\n\") else: print(temp.value, end=\"", "set to new node self.head.prev = new_node #Set the head to the new", "#increment length self.length += 1 return True def pop(self): #Empty List if self.length", "end=\" <--> None \\n\") else: print(temp.value, end=\" <--> \") temp = temp.next my_dbl_list", "one item if self.length == 1: self.head = None self.tail = None self.length", "only one item if self.length == 1: self.head = None self.tail = None", "def pop(self): #Empty List if self.length == 0: return None #List with only", "list if self.length == 1: self.head = None self.tail = None else: #Set", "self.tail.next = new_node #Point new node's prev back to curr tail new_node.prev =", "self.get(index - 1) after = before.next #Set the pointers for before before.next =", "the length by 1 self.length += 1 return self.head def pop_first(self): #Empty List", "to None temp.next = None self.length -= 1 return temp.value def get(self, index):", "self.tail = self.tail.prev self.tail.next = None temp.prev = None self.length -= 1 return", "== 0: return None #List with only one item if self.length == 1:", "1 else: #Point curr tail's next to new node self.tail.next = new_node #Point", "if index == 0: return self.pop_first() if index == self.length - 1: return", "print(temp.value, end=\" <--> None \\n\") else: print(temp.value, end=\" <--> \") temp = temp.next", "temp = temp.prev return temp def set_val(self, index, value): temp = self.get(index) if", "#Set pointers before.next = after after.prev = before #Remove connections node_to_remove.next = None", "before #Remove connections node_to_remove.next = None node_to_remove.prev = None self.length -=1 return node_to_remove", "== 0: self.head = new_node self.tail = new_node self.length = 1 else: #Point", "= None self.tail = None self.length -= 1 return self.head #Multiple items in", "new_node.next = self.head #curr head prev set to new node self.head.prev = new_node", "the new head's prev to None instead of old head self.head.prev = None", "= self.tail.prev self.tail.next = None temp.prev = None self.length -= 1 return temp", "new_node = Node(value) before = self.get(index - 1) after = before.next #Set the", "print(temp.value, end=\" <--> \") temp = temp.next my_dbl_list = DoublyLinkedList(1) my_dbl_list.append(2) my_dbl_list.append(5) my_dbl_list.prepend(15)", "index: for _ in range(index): temp = temp.next else: temp = self.tail for", "def __init__(self, value): new_node = Node(value) self.head = new_node self.tail = new_node self.length", "1) after = before.next #Set the pointers for before before.next = new_node new_node.prev", "= Node(value) self.head = new_node self.tail = new_node self.length = 1 def append(self,", "self.length -=1 return node_to_remove def print_list(self): temp = self.head while temp: if not", "self.length -= 1 return self.head #Multiple items in list else: temp = self.tail", "self.prev = None class DoublyLinkedList: def __init__(self, value): new_node = Node(value) self.head =", "None self.tail = None self.length -= 1 return self.head #Multiple items in list", "self.tail for _ in range(self.length - 1, index, 1): temp = temp.prev return", "if index == self.length - 1: return self.pop() node_to_remove = self.get(index) before =", "= self.head while temp: if not temp.next: print(temp.value, end=\" <--> None \\n\") else:", "DoublyLinkedList: def __init__(self, value): new_node = Node(value) self.head = new_node self.tail = new_node", "class DoublyLinkedList: def __init__(self, value): new_node = Node(value) self.head = new_node self.tail =", "if self.length == 0: return None #List with only one item if self.length", "not temp.next: print(temp.value, end=\" <--> None \\n\") else: print(temp.value, end=\" <--> \") temp", "= Node(value) before = self.get(index - 1) after = before.next #Set the pointers", "else: temp = self.tail self.tail = self.tail.prev self.tail.next = None temp.prev = None", "value): temp = self.get(index) if temp: temp.value = value return True return False", "Node(value) before = self.get(index - 1) after = before.next #Set the pointers for", "self.tail = new_node self.length = 1 else: #Point curr tail's next to new", "== 0: return self.pop_first() if index == self.length - 1: return self.pop() node_to_remove", "= new_node #Increment the length by 1 self.length += 1 return self.head def", "if self.length == 0: self.head = new_node self.tail = new_node self.length = 1", "index >= self.length: return None if index == 0: return self.prepend(value) if index", "1: self.head = None self.tail = None else: #Set the head to the", "class Node: def __init__(self, value): self.value = value self.next = None self.prev =", "node_to_remove.prev = None self.length -=1 return node_to_remove def print_list(self): temp = self.head while", "value self.next = None self.prev = None class DoublyLinkedList: def __init__(self, value): new_node", "= temp.next else: temp = self.tail for _ in range(self.length - 1, index,", "None #List with only one item if self.length == 1: self.head = None", "head self.head = self.head.next #Point the new head's prev to None instead of", "if index == self.length: return self.append(value) new_node = Node(value) before = self.get(index -", "1: self.head = None self.tail = None self.length -= 1 return self.head #Multiple", "after after.prev = before #Remove connections node_to_remove.next = None node_to_remove.prev = None self.length", "get(self, index): if index < 0 or index >= self.length: return None else:", "head's prev to None instead of old head self.head.prev = None #Point old", "return node_to_remove def print_list(self): temp = self.head while temp: if not temp.next: print(temp.value,", "index >= self.length: return None if index == 0: return self.pop_first() if index", "with only one item if self.length == 1: self.head = None self.tail =", "self.length: return self.append(value) new_node = Node(value) before = self.get(index - 1) after =", "new_node new_node.prev = before #Set the pointers for after after.prev = new_node new_node.next", "= None node_to_remove.prev = None self.length -=1 return node_to_remove def print_list(self): temp =", "new_node #Set the head to the new head self.head = new_node #Increment the", "#Set the pointers for after after.prev = new_node new_node.next = after return True", "node_to_remove.next = None node_to_remove.prev = None self.length -=1 return node_to_remove def print_list(self): temp", "set_val(self, index, value): temp = self.get(index) if temp: temp.value = value return True", "after.prev = before #Remove connections node_to_remove.next = None node_to_remove.prev = None self.length -=1", "to curr head new_node.next = self.head #curr head prev set to new node", "before #Set the pointers for after after.prev = new_node new_node.next = after return", "return self.pop() node_to_remove = self.get(index) before = node_to_remove.prev after = node_to_remove.next #Set pointers", "List if self.length == 0: return None #List with only one item if", "self.value = value self.next = None self.prev = None class DoublyLinkedList: def __init__(self,", "temp def prepend(self, value): new_node = Node(value) #Empty list if self.length == 0:", "self.head #Multiple items in list else: temp = self.tail self.tail = self.tail.prev self.tail.next", "self.tail self.tail = self.tail.prev self.tail.next = None temp.prev = None self.length -= 1", "to None instead of old head self.head.prev = None #Point old head's next", "self.length -= 1 return temp def prepend(self, value): new_node = Node(value) #Empty list", "self.length = 1 else: #Point curr tail's next to new node self.tail.next =", "new_node self.length = 1 else: #Point curr tail's next to new node self.tail.next", "pop(self): #Empty List if self.length == 0: return None #List with only one", "node_to_remove.next #Set pointers before.next = after after.prev = before #Remove connections node_to_remove.next =", "None temp.next = None self.length -= 1 return temp.value def get(self, index): if", "list if self.length == 0: self.head = new_node self.tail = new_node else: #", "in range(self.length - 1, index, 1): temp = temp.prev return temp def set_val(self,", "< 0 or index >= self.length: return None else: temp = self.head mid", "after = node_to_remove.next #Set pointers before.next = after after.prev = before #Remove connections", "connections node_to_remove.next = None node_to_remove.prev = None self.length -=1 return node_to_remove def print_list(self):", "self.head = new_node self.tail = new_node self.length = 1 else: #Point curr tail's", "node self.tail.next = new_node #Point new node's prev back to curr tail new_node.prev", "__init__(self, value): self.value = value self.next = None self.prev = None class DoublyLinkedList:", "self.tail.prev self.tail.next = None temp.prev = None self.length -= 1 return temp def", "1 return temp.value def get(self, index): if index < 0 or index >=", "== self.length - 1: return self.pop() node_to_remove = self.get(index) before = node_to_remove.prev after", "__init__(self, value): new_node = Node(value) self.head = new_node self.tail = new_node self.length =", "= before.next #Set the pointers for before before.next = new_node new_node.prev = before", "value): new_node = Node(value) if self.length == 0: self.head = new_node self.tail =", "self.next = None self.prev = None class DoublyLinkedList: def __init__(self, value): new_node =", "temp.next: print(temp.value, end=\" <--> None \\n\") else: print(temp.value, end=\" <--> \") temp =", "= new_node self.length = 1 def append(self, value): new_node = Node(value) if self.length", "self.length == 1: self.head = None self.tail = None self.length -= 1 return", "= self.get(index) before = node_to_remove.prev after = node_to_remove.next #Set pointers before.next = after", "temp = temp.next else: temp = self.tail for _ in range(self.length - 1,", "-= 1 return self.head #Multiple items in list else: temp = self.tail self.tail", "= None self.length -= 1 return self.head #Multiple items in list else: temp", "node self.tail = new_node #increment length self.length += 1 return True def pop(self):", "-=1 return node_to_remove def print_list(self): temp = self.head while temp: if not temp.next:", "new_node = Node(value) #Empty list if self.length == 0: self.head = new_node self.tail", "index, 1): temp = temp.prev return temp def set_val(self, index, value): temp =", "pop_first(self): #Empty List if self.length == 0: return None #Old head temp =", "return temp def prepend(self, value): new_node = Node(value) #Empty list if self.length ==", "the head to the new head self.head = self.head.next #Point the new head's", "self.head = None self.tail = None self.length -= 1 return self.head #Multiple items", "length self.length += 1 return True def pop(self): #Empty List if self.length ==", "append(self, value): new_node = Node(value) if self.length == 0: self.head = new_node self.tail", "node next point to curr head new_node.next = self.head #curr head prev set", "or index >= self.length: return None if index == 0: return self.prepend(value) if", "value return True return False def insert(self, index, value): if index < 0", "range(index): temp = temp.next else: temp = self.tail for _ in range(self.length -", "== 0: self.head = new_node self.tail = new_node else: # New node next", "_ in range(index): temp = temp.next else: temp = self.tail for _ in", "print_list(self): temp = self.head while temp: if not temp.next: print(temp.value, end=\" <--> None", "#Set the head to the new head self.head = self.head.next #Point the new", "self.length: return None if index == 0: return self.pop_first() if index == self.length", "0: self.head = new_node self.tail = new_node else: # New node next point", "in range(index): temp = temp.next else: temp = self.tail for _ in range(self.length", "the tail as the new node self.tail = new_node #increment length self.length +=", "by 1 self.length += 1 return self.head def pop_first(self): #Empty List if self.length", "after.prev = new_node new_node.next = after return True def remove(self, index): if index", "None self.length -=1 return node_to_remove def print_list(self): temp = self.head while temp: if", "node's prev back to curr tail new_node.prev = self.tail #Set the tail as", "curr tail new_node.prev = self.tail #Set the tail as the new node self.tail", "return temp.value def get(self, index): if index < 0 or index >= self.length:", "self.length == 0: self.head = new_node self.tail = new_node else: # New node", "point to curr head new_node.next = self.head #curr head prev set to new", "return None #Old head temp = self.head #One item in list if self.length", "if mid >= index: for _ in range(index): temp = temp.next else: temp", "node_to_remove def print_list(self): temp = self.head while temp: if not temp.next: print(temp.value, end=\"", "if index < 0 or index >= self.length: return None if index ==", "- 1: return self.pop() node_to_remove = self.get(index) before = node_to_remove.prev after = node_to_remove.next", "after return True def remove(self, index): if index < 0 or index >=", "#Empty List if self.length == 0: return None #List with only one item", "= None class DoublyLinkedList: def __init__(self, value): new_node = Node(value) self.head = new_node", "list else: temp = self.tail self.tail = self.tail.prev self.tail.next = None temp.prev =", "curr tail's next to new node self.tail.next = new_node #Point new node's prev", "before = node_to_remove.prev after = node_to_remove.next #Set pointers before.next = after after.prev =", "self.tail.next = None temp.prev = None self.length -= 1 return temp def prepend(self,", "item if self.length == 1: self.head = None self.tail = None self.length -=", "None if index == 0: return self.prepend(value) if index == self.length: return self.append(value)", "#Set the pointers for before before.next = new_node new_node.prev = before #Set the", "self.length == 1: self.head = None self.tail = None else: #Set the head", "self.head.next #Point the new head's prev to None instead of old head self.head.prev", "next to new node self.tail.next = new_node #Point new node's prev back to", "self.length += 1 return True def pop(self): #Empty List if self.length == 0:", "prepend(self, value): new_node = Node(value) #Empty list if self.length == 0: self.head =", "#Set the tail as the new node self.tail = new_node #increment length self.length", "= value self.next = None self.prev = None class DoublyLinkedList: def __init__(self, value):", "def pop_first(self): #Empty List if self.length == 0: return None #Old head temp", "after = before.next #Set the pointers for before before.next = new_node new_node.prev =", "temp.value = value return True return False def insert(self, index, value): if index", "or index >= self.length: return None else: temp = self.head mid = self.length", "= value return True return False def insert(self, index, value): if index <", "temp.next else: temp = self.tail for _ in range(self.length - 1, index, 1):", "#Point old head's next to None temp.next = None self.length -= 1 return", "new_node else: # New node next point to curr head new_node.next = self.head", "#Point the new head's prev to None instead of old head self.head.prev =", "temp = temp.next my_dbl_list = DoublyLinkedList(1) my_dbl_list.append(2) my_dbl_list.append(5) my_dbl_list.prepend(15) # print(my_dbl_list.set_val(3,8)) my_dbl_list.insert(1, 69)", "0: return self.pop_first() if index == self.length - 1: return self.pop() node_to_remove =", "if index == 0: return self.prepend(value) if index == self.length: return self.append(value) new_node", "index == self.length - 1: return self.pop() node_to_remove = self.get(index) before = node_to_remove.prev", "-= 1 return temp.value def get(self, index): if index < 0 or index", "def append(self, value): new_node = Node(value) if self.length == 0: self.head = new_node", "self.head #curr head prev set to new node self.head.prev = new_node #Set the", "-= 1 return temp def prepend(self, value): new_node = Node(value) #Empty list if", "the new head self.head = self.head.next #Point the new head's prev to None", "else: # New node next point to curr head new_node.next = self.head #curr", "value): new_node = Node(value) self.head = new_node self.tail = new_node self.length = 1", "value): self.value = value self.next = None self.prev = None class DoublyLinkedList: def", "None else: #Set the head to the new head self.head = self.head.next #Point", "self.get(index) before = node_to_remove.prev after = node_to_remove.next #Set pointers before.next = after after.prev", "= new_node #Set the head to the new head self.head = new_node #Increment", ">= self.length: return None if index == 0: return self.prepend(value) if index ==", "#One item in list if self.length == 1: self.head = None self.tail =", "the new node self.tail = new_node #increment length self.length += 1 return True", "remove(self, index): if index < 0 or index >= self.length: return None if", "prev set to new node self.head.prev = new_node #Set the head to the", "self.tail = None else: #Set the head to the new head self.head =", "def set_val(self, index, value): temp = self.get(index) if temp: temp.value = value return", "for _ in range(index): temp = temp.next else: temp = self.tail for _", "self.head.prev = None #Point old head's next to None temp.next = None self.length", "curr head new_node.next = self.head #curr head prev set to new node self.head.prev", "self.head = self.head.next #Point the new head's prev to None instead of old", "= new_node self.tail = new_node self.length = 1 else: #Point curr tail's next", "to curr tail new_node.prev = self.tail #Set the tail as the new node", "new_node self.tail = new_node self.length = 1 else: #Point curr tail's next to", "new head self.head = self.head.next #Point the new head's prev to None instead", "self.get(index) if temp: temp.value = value return True return False def insert(self, index,", "self.pop_first() if index == self.length - 1: return self.pop() node_to_remove = self.get(index) before", "return None if index == 0: return self.prepend(value) if index == self.length: return", "tail new_node.prev = self.tail #Set the tail as the new node self.tail =", "return False def insert(self, index, value): if index < 0 or index >=", "back to curr tail new_node.prev = self.tail #Set the tail as the new", "if not temp.next: print(temp.value, end=\" <--> None \\n\") else: print(temp.value, end=\" <--> \")", "= 1 def append(self, value): new_node = Node(value) if self.length == 0: self.head", "== 0: return None #Old head temp = self.head #One item in list", "None temp.prev = None self.length -= 1 return temp def prepend(self, value): new_node", "pointers for before before.next = new_node new_node.prev = before #Set the pointers for", "return True return False def insert(self, index, value): if index < 0 or", "1 def append(self, value): new_node = Node(value) if self.length == 0: self.head =", "new_node = Node(value) if self.length == 0: self.head = new_node self.tail = new_node", "self.head.prev = new_node #Set the head to the new head self.head = new_node", "new_node.prev = self.tail #Set the tail as the new node self.tail = new_node", "None \\n\") else: print(temp.value, end=\" <--> \") temp = temp.next my_dbl_list = DoublyLinkedList(1)", "the pointers for before before.next = new_node new_node.prev = before #Set the pointers", "#Set the head to the new head self.head = new_node #Increment the length", "0 or index >= self.length: return None if index == 0: return self.pop_first()", "0 or index >= self.length: return None if index == 0: return self.prepend(value)", "as the new node self.tail = new_node #increment length self.length += 1 return", "0 or index >= self.length: return None else: temp = self.head mid =", "= None else: #Set the head to the new head self.head = self.head.next", "before = self.get(index - 1) after = before.next #Set the pointers for before", "temp def set_val(self, index, value): temp = self.get(index) if temp: temp.value = value", "= Node(value) #Empty list if self.length == 0: self.head = new_node self.tail =", "new_node.next = after return True def remove(self, index): if index < 0 or", "True def pop(self): #Empty List if self.length == 0: return None #List with", "temp: temp.value = value return True return False def insert(self, index, value): if", "= 1 else: #Point curr tail's next to new node self.tail.next = new_node", "else: #Set the head to the new head self.head = self.head.next #Point the", "index): if index < 0 or index >= self.length: return None if index", "2 if mid >= index: for _ in range(index): temp = temp.next else:", "= None temp.prev = None self.length -= 1 return temp def prepend(self, value):", "if self.length == 0: self.head = new_node self.tail = new_node else: # New", "head to the new head self.head = self.head.next #Point the new head's prev", "def insert(self, index, value): if index < 0 or index >= self.length: return", "index == self.length: return self.append(value) new_node = Node(value) before = self.get(index - 1)", "= self.head #curr head prev set to new node self.head.prev = new_node #Set", "in list else: temp = self.tail self.tail = self.tail.prev self.tail.next = None temp.prev", "the head to the new head self.head = new_node #Increment the length by", "= new_node self.length = 1 else: #Point curr tail's next to new node", "== 0: return self.prepend(value) if index == self.length: return self.append(value) new_node = Node(value)", "for before before.next = new_node new_node.prev = before #Set the pointers for after", "pointers for after after.prev = new_node new_node.next = after return True def remove(self,", "= self.head mid = self.length // 2 if mid >= index: for _", "Node(value) #Empty list if self.length == 0: self.head = new_node self.tail = new_node", "= None self.length -= 1 return temp.value def get(self, index): if index <", "next point to curr head new_node.next = self.head #curr head prev set to", "return temp def set_val(self, index, value): temp = self.get(index) if temp: temp.value =", "= None #Point old head's next to None temp.next = None self.length -=", "_ in range(self.length - 1, index, 1): temp = temp.prev return temp def", "#Multiple items in list else: temp = self.tail self.tail = self.tail.prev self.tail.next =", "index >= self.length: return None else: temp = self.head mid = self.length //", "self.prepend(value) if index == self.length: return self.append(value) new_node = Node(value) before = self.get(index", "None #Old head temp = self.head #One item in list if self.length ==", "self.length == 0: return None #List with only one item if self.length ==", "return None if index == 0: return self.pop_first() if index == self.length -", "self.head def pop_first(self): #Empty List if self.length == 0: return None #Old head", "temp.prev return temp def set_val(self, index, value): temp = self.get(index) if temp: temp.value", "if self.length == 0: return None #Old head temp = self.head #One item", "self.length // 2 if mid >= index: for _ in range(index): temp =", "if self.length == 1: self.head = None self.tail = None self.length -= 1", "temp.prev = None self.length -= 1 return temp def prepend(self, value): new_node =", "None else: temp = self.head mid = self.length // 2 if mid >=", "before.next #Set the pointers for before before.next = new_node new_node.prev = before #Set", "head to the new head self.head = new_node #Increment the length by 1", "the new head self.head = new_node #Increment the length by 1 self.length +=", "// 2 if mid >= index: for _ in range(index): temp = temp.next", "self.tail = new_node else: # New node next point to curr head new_node.next" ]
[ "create_comment, create_staff, create_quest, create_quest_comment) from services import Services, override_service class TestSimpleThing(CanvasTestCase): def test_basic_addition(self):", "drawquest.tests.tests_helpers import (CanvasTestCase, create_content, create_user, create_group, create_comment, create_staff, create_quest, create_quest_comment) from services import", "(CanvasTestCase, create_content, create_user, create_group, create_comment, create_staff, create_quest, create_quest_comment) from services import Services, override_service", "Services, override_service class TestSimpleThing(CanvasTestCase): def test_basic_addition(self): \"\"\" Tests that 1 + 1 always", "from drawquest.tests.tests_helpers import (CanvasTestCase, create_content, create_user, create_group, create_comment, create_staff, create_quest, create_quest_comment) from services", "override_service class TestSimpleThing(CanvasTestCase): def test_basic_addition(self): \"\"\" Tests that 1 + 1 always equals", "from services import Services, override_service class TestSimpleThing(CanvasTestCase): def test_basic_addition(self): \"\"\" Tests that 1", "create_user, create_group, create_comment, create_staff, create_quest, create_quest_comment) from services import Services, override_service class TestSimpleThing(CanvasTestCase):", "import Services, override_service class TestSimpleThing(CanvasTestCase): def test_basic_addition(self): \"\"\" Tests that 1 + 1", "import (CanvasTestCase, create_content, create_user, create_group, create_comment, create_staff, create_quest, create_quest_comment) from services import Services,", "TestSimpleThing(CanvasTestCase): def test_basic_addition(self): \"\"\" Tests that 1 + 1 always equals 2. \"\"\"", "create_content, create_user, create_group, create_comment, create_staff, create_quest, create_quest_comment) from services import Services, override_service class", "test_basic_addition(self): \"\"\" Tests that 1 + 1 always equals 2. \"\"\" self.failUnlessEqual(1 +", "create_group, create_comment, create_staff, create_quest, create_quest_comment) from services import Services, override_service class TestSimpleThing(CanvasTestCase): def", "create_quest, create_quest_comment) from services import Services, override_service class TestSimpleThing(CanvasTestCase): def test_basic_addition(self): \"\"\" Tests", "\"\"\" Tests that 1 + 1 always equals 2. \"\"\" self.failUnlessEqual(1 + 1,", "Tests that 1 + 1 always equals 2. \"\"\" self.failUnlessEqual(1 + 1, 2)", "create_staff, create_quest, create_quest_comment) from services import Services, override_service class TestSimpleThing(CanvasTestCase): def test_basic_addition(self): \"\"\"", "services import Services, override_service class TestSimpleThing(CanvasTestCase): def test_basic_addition(self): \"\"\" Tests that 1 +", "class TestSimpleThing(CanvasTestCase): def test_basic_addition(self): \"\"\" Tests that 1 + 1 always equals 2.", "<gh_stars>10-100 from drawquest.tests.tests_helpers import (CanvasTestCase, create_content, create_user, create_group, create_comment, create_staff, create_quest, create_quest_comment) from", "create_quest_comment) from services import Services, override_service class TestSimpleThing(CanvasTestCase): def test_basic_addition(self): \"\"\" Tests that", "def test_basic_addition(self): \"\"\" Tests that 1 + 1 always equals 2. \"\"\" self.failUnlessEqual(1" ]
[ "size for size, car in data) my_data = 'get uuuuuuuuuuuuuuuup' print(list(my_data)) compressed =", "group in groupby(data)) def decompress(data): return (car * size for size, car in", "name, group in groupby(data)) def decompress(data): return (car * size for size, car", "from itertools import groupby def compress(data): return ((len(list(group)), name) for name, group in", "def decompress(data): return (car * size for size, car in data) my_data =", "import groupby def compress(data): return ((len(list(group)), name) for name, group in groupby(data)) def", "def compress(data): return ((len(list(group)), name) for name, group in groupby(data)) def decompress(data): return", "in groupby(data)) def decompress(data): return (car * size for size, car in data)", "name) for name, group in groupby(data)) def decompress(data): return (car * size for", "groupby(data)) def decompress(data): return (car * size for size, car in data) my_data", "return (car * size for size, car in data) my_data = 'get uuuuuuuuuuuuuuuup'", "((len(list(group)), name) for name, group in groupby(data)) def decompress(data): return (car * size", "groupby def compress(data): return ((len(list(group)), name) for name, group in groupby(data)) def decompress(data):", "for size, car in data) my_data = 'get uuuuuuuuuuuuuuuup' print(list(my_data)) compressed = compress(my_data)", "# !/usr/bin/python from itertools import groupby def compress(data): return ((len(list(group)), name) for name,", "decompress(data): return (car * size for size, car in data) my_data = 'get", "!/usr/bin/python from itertools import groupby def compress(data): return ((len(list(group)), name) for name, group", "size, car in data) my_data = 'get uuuuuuuuuuuuuuuup' print(list(my_data)) compressed = compress(my_data) print(''.join(decompress(compressed)))", "(car * size for size, car in data) my_data = 'get uuuuuuuuuuuuuuuup' print(list(my_data))", "* size for size, car in data) my_data = 'get uuuuuuuuuuuuuuuup' print(list(my_data)) compressed", "compress(data): return ((len(list(group)), name) for name, group in groupby(data)) def decompress(data): return (car", "itertools import groupby def compress(data): return ((len(list(group)), name) for name, group in groupby(data))", "for name, group in groupby(data)) def decompress(data): return (car * size for size,", "return ((len(list(group)), name) for name, group in groupby(data)) def decompress(data): return (car *" ]
[ "TestDBConfiguredBasicLTILaunch: def test_it_enables_frontend_grading(self, context, pyramid_request): db_configured_basic_lti_launch_caller(context, pyramid_request) context.js_config.maybe_enable_grading.assert_called_once_with() def test_it_adds_the_document_url( self, context, pyramid_request,", "BasicLTILaunchViews.url_configured_basic_lti_launch(). Set up the appropriate conditions and then call BasicLTILaunchViews.url_configured_basic_lti_launch(), and return whatever", "call BasicLTILaunchViews.configure_module_item(), and return whatever BasicLTILaunchViews.configure_module_item() returns. \"\"\" # The document_url, resource_link_id and", "\"\"\" # The file_id param is always present when canvas_file_basic_lti_launch() # is called.", "def db_configured_basic_lti_launch_caller(context, pyramid_request): \"\"\" Call BasicLTILaunchViews.db_configured_basic_lti_launch(). Set up the appropriate conditions and then", "BasicLTILaunchViews(context, pyramid_request) return views.db_configured_basic_lti_launch() def url_configured_basic_lti_launch_caller(context, pyramid_request): \"\"\" Call BasicLTILaunchViews.url_configured_basic_lti_launch(). Set up the", "a function that calls the view method to be tested. This is a", "is always present when # url_configured_basic_lti_launch() is called. The url_configured=True view # predicate", "patch(\"lms.views.basic_lti_launch.BearerTokenSchema\") @pytest.fixture(autouse=True) def LtiLaunches(patch): return patch(\"lms.views.basic_lti_launch.LtiLaunches\") @pytest.fixture(autouse=True) def ModuleItemConfiguration(patch): return patch(\"lms.views.basic_lti_launch.ModuleItemConfiguration\") @pytest.fixture def", "context.h_group.authority_provided_id ) @pytest.fixture( params=[ canvas_file_basic_lti_launch_caller, db_configured_basic_lti_launch_caller, url_configured_basic_lti_launch_caller, unconfigured_basic_lti_launch_caller, ] ) def view_caller(self, request):", "for each parametrized version of this fixture. See https://docs.pytest.org/en/latest/fixture.html#parametrizing-fixtures \"\"\" return request.param class", "db_configured_basic_lti_launch_caller(context, pyramid_request) context.js_config.maybe_enable_grading.assert_called_once_with() def test_it_adds_the_document_url( self, context, pyramid_request, ModuleItemConfiguration ): db_configured_basic_lti_launch_caller(context, pyramid_request) ModuleItemConfiguration.get_document_url.assert_called_once_with(", "lti_user=pyramid_request.lti_user, ) def test_it_does_not_call_grading_info_upsert_if_instructor( self, context, pyramid_request, grading_info_service, view_caller ): pyramid_request.lti_user = factories.LTIUser(roles=\"instructor\")", "@pytest.fixture(autouse=True) def LtiLaunches(patch): return patch(\"lms.views.basic_lti_launch.LtiLaunches\") @pytest.fixture(autouse=True) def ModuleItemConfiguration(patch): return patch(\"lms.views.basic_lti_launch.ModuleItemConfiguration\") @pytest.fixture def bearer_token_schema(BearerTokenSchema):", "ensures this. pyramid_request.parsed_params = { \"document_url\": \"TEST_DOCUMENT_URL\", \"resource_link_id\": \"TEST_RESOURCE_LINK_ID\", \"tool_consumer_instance_guid\": \"TEST_TOOL_CONSUMER_INSTANCE_GUID\", } views", "Set up the appropriate conditions and then call BasicLTILaunchViews.configure_module_item(), and return whatever BasicLTILaunchViews.configure_module_item()", "oauth_nonce=\"TEST_OAUTH_NONCE\", oauth_timestamp=\"TEST_OAUTH_TIMESTAMP\", oauth_signature=\"TEST_OAUTH_SIGNATURE\", ) return pyramid_request class TestUnconfiguredBasicLTILaunchNotAuthorized: def test_it_returns_the_right_template_data(self, context, pyramid_request): data", "= mock.create_autospec(JSConfig, spec_set=True, instance=True) context.is_canvas = False return context @pytest.fixture def is_canvas(context): \"\"\"Set", "self, context, pyramid_request, ModuleItemConfiguration ): configure_module_item_caller(context, pyramid_request) ModuleItemConfiguration.set_document_url.assert_called_once_with( pyramid_request.db, \"TEST_TOOL_CONSUMER_INSTANCE_GUID\", \"TEST_RESOURCE_LINK_ID\", \"TEST_DOCUMENT_URL\", )", "pyramid_request class TestUnconfiguredBasicLTILaunchNotAuthorized: def test_it_returns_the_right_template_data(self, context, pyramid_request): data = BasicLTILaunchViews( context, pyramid_request ).unconfigured_basic_lti_launch_not_authorized()", "def is_canvas(context): \"\"\"Set the LMS that launched us to Canvas.\"\"\" context.is_canvas = True", "returns. \"\"\" # The file_id param is always present when canvas_file_basic_lti_launch() # is", "test_it_does_not_call_grading_info_upsert_if_instructor( self, context, pyramid_request, grading_info_service, view_caller ): pyramid_request.lti_user = factories.LTIUser(roles=\"instructor\") view_caller(context, pyramid_request) grading_info_service.upsert_from_request.assert_not_called()", "whatever BasicLTILaunchViews.db_configured_basic_lti_launch() returns. \"\"\" views = BasicLTILaunchViews(context, pyramid_request) return views.db_configured_basic_lti_launch() def url_configured_basic_lti_launch_caller(context, pyramid_request):", "uses this fixture will be run multiple times, once for each parametrized version", "fixture. See https://docs.pytest.org/en/latest/fixture.html#parametrizing-fixtures \"\"\" return request.param @pytest.mark.usefixtures(\"is_canvas\") class TestCanvasFileBasicLTILaunch: @pytest.mark.usefixtures(\"is_canvas\") def test_it_adds_the_canvas_file_id(self, context,", "context, pyramid_request): url_configured_basic_lti_launch_caller(context, pyramid_request) context.js_config.maybe_enable_grading.assert_called_once_with() def test_it_adds_the_document_url(self, context, pyramid_request): url_configured_basic_lti_launch_caller(context, pyramid_request) context.js_config.add_document_url.assert_called_once_with( pyramid_request.parsed_params[\"url\"]", "@pytest.mark.usefixtures(\"user_is_learner\") def test_it_calls_grading_info_upsert( self, context, pyramid_request, grading_info_service, view_caller ): view_caller(context, pyramid_request) grading_info_service.upsert_from_request.assert_called_once_with( pyramid_request,", "pyramid_request): \"\"\" Call BasicLTILaunchViews.db_configured_basic_lti_launch(). Set up the appropriate conditions and then call BasicLTILaunchViews.db_configured_basic_lti_launch(),", "# is called. The canvas_file=True view predicate ensures this. pyramid_request.params[\"file_id\"] = \"TEST_FILE_ID\" views", "always present when # url_configured_basic_lti_launch() is called. The url_configured=True view # predicate and", "call BasicLTILaunchViews.db_configured_basic_lti_launch(), and return whatever BasicLTILaunchViews.db_configured_basic_lti_launch() returns. \"\"\" views = BasicLTILaunchViews(context, pyramid_request) return", "up the appropriate conditions and then call BasicLTILaunchViews.url_configured_basic_lti_launch(), and return whatever BasicLTILaunchViews.url_configured_basic_lti_launch() returns.", "Tests common to multiple (but not all) BasicLTILaunchViews views. See the parametrized `view_caller`", "and return whatever BasicLTILaunchViews.url_configured_basic_lti_launch() returns. \"\"\" # The `url` parsed param is always", "} @pytest.fixture def pyramid_request(self, pyramid_request): pyramid_request.params = dict( self.form_fields(), oauth_nonce=\"TEST_OAUTH_NONCE\", oauth_timestamp=\"TEST_OAUTH_TIMESTAMP\", oauth_signature=\"TEST_OAUTH_SIGNATURE\", )", "self, context, pyramid_request, grading_info_service, view_caller ): view_caller(context, pyramid_request) grading_info_service.upsert_from_request.assert_not_called() @pytest.fixture( params=[ canvas_file_basic_lti_launch_caller, db_configured_basic_lti_launch_caller,", "returns. \"\"\" views = BasicLTILaunchViews(context, pyramid_request) return views.unconfigured_basic_lti_launch() def configure_module_item_caller(context, pyramid_request): \"\"\" Call", "course_service.get_or_create.assert_called_once_with( context.h_group.authority_provided_id ) @pytest.fixture( params=[ canvas_file_basic_lti_launch_caller, db_configured_basic_lti_launch_caller, url_configured_basic_lti_launch_caller, unconfigured_basic_lti_launch_caller, ] ) def view_caller(self,", "class TestCanvasFileBasicLTILaunch: @pytest.mark.usefixtures(\"is_canvas\") def test_it_adds_the_canvas_file_id(self, context, pyramid_request): canvas_file_basic_lti_launch_caller(context, pyramid_request) context.js_config.add_canvas_file_id.assert_called_once_with( pyramid_request.params[\"file_id\"] ) class", "test_it_adds_the_canvas_file_id(self, context, pyramid_request): canvas_file_basic_lti_launch_caller(context, pyramid_request) context.js_config.add_canvas_file_id.assert_called_once_with( pyramid_request.params[\"file_id\"] ) class TestDBConfiguredBasicLTILaunch: def test_it_enables_frontend_grading(self, context,", "assert data == {} pytestmark = pytest.mark.usefixtures( \"ai_getter\", \"course_service\", \"h_api\", \"grading_info_service\", \"lti_h_service\" )", "): pyramid_request.params.update( { \"context_id\": \"TEST_CONTEXT_ID\", \"oauth_consumer_key\": \"TEST_OAUTH_CONSUMER_KEY\", } ) view_caller(context, pyramid_request) LtiLaunches.add.assert_called_once_with( pyramid_request.db,", "views = BasicLTILaunchViews(context, pyramid_request) return views.configure_module_item() class TestBasicLTILaunchViewsInit: \"\"\"Unit tests for BasicLTILaunchViews.__init__().\"\"\" def", ") view_caller(context, pyramid_request) LtiLaunches.add.assert_called_once_with( pyramid_request.db, pyramid_request.params[\"context_id\"], pyramid_request.params[\"oauth_consumer_key\"], ) @pytest.mark.usefixtures(\"user_is_learner\") def test_it_calls_grading_info_upsert( self, context,", "oauth_signature=\"TEST_OAUTH_SIGNATURE\", ) return pyramid_request class TestUnconfiguredBasicLTILaunchNotAuthorized: def test_it_returns_the_right_template_data(self, context, pyramid_request): data = BasicLTILaunchViews(", "methods that these tests apply to. \"\"\" def test_it_reports_lti_launches( self, context, pyramid_request, LtiLaunches,", "url_configured_basic_lti_launch_caller(context, pyramid_request) context.js_config.add_document_url.assert_called_once_with( pyramid_request.parsed_params[\"url\"] ) class TestConfigureModuleItem: def test_it_saves_the_assignments_document_url_to_the_db( self, context, pyramid_request, ModuleItemConfiguration", "BasicLTILaunchViews.configure_module_item() returns. \"\"\" # The document_url, resource_link_id and tool_consumer_instance_guid parsed # params are", "views. See the parametrized `view_caller` fixture below for the list of view methods", "TestCommon: \"\"\" Tests common to multiple (but not all) BasicLTILaunchViews views. See the", "\"\"\" def test_it_reports_lti_launches( self, context, pyramid_request, LtiLaunches, view_caller ): pyramid_request.params.update( { \"context_id\": \"TEST_CONTEXT_ID\",", ") def test_it_enables_frontend_grading(self, context, pyramid_request): configure_module_item_caller(context, pyramid_request) context.js_config.maybe_enable_grading.assert_called_once_with() def test_it_adds_the_document_url(self, context, pyramid_request): configure_module_item_caller(context,", "BasicLTILaunchViews.canvas_file_basic_lti_launch(). Set up the appropriate conditions and then call BasicLTILaunchViews.canvas_file_basic_lti_launch(), and return whatever", "views = BasicLTILaunchViews(context, pyramid_request) return views.unconfigured_basic_lti_launch() def configure_module_item_caller(context, pyramid_request): \"\"\" Call BasicLTILaunchViews.configure_module_item(). Set", "See https://docs.pytest.org/en/latest/fixture.html#parametrizing-fixtures \"\"\" return request.param class TestCourseRecording: def test_it_records_the_course_in_the_DB( self, context, pyramid_request, view_caller,", "document_url, resource_link_id and tool_consumer_instance_guid parsed # params are always present when configure_module_item() is", "pyramid_request, grading_info_service, view_caller ): view_caller(context, pyramid_request) grading_info_service.upsert_from_request.assert_not_called() @pytest.fixture( params=[ canvas_file_basic_lti_launch_caller, db_configured_basic_lti_launch_caller, url_configured_basic_lti_launch_caller, configure_module_item_caller,", "fixture. See https://docs.pytest.org/en/latest/fixture.html#parametrizing-fixtures \"\"\" return request.param class TestCourseRecording: def test_it_records_the_course_in_the_DB( self, context, pyramid_request,", "ModuleItemConfiguration.get_document_url.return_value ) class TestURLConfiguredBasicLTILaunch: def test_it_enables_frontend_grading(self, context, pyramid_request): url_configured_basic_lti_launch_caller(context, pyramid_request) context.js_config.maybe_enable_grading.assert_called_once_with() def test_it_adds_the_document_url(self,", "to be tested. This is a parametrized fixture. A test that uses this", "the list of view methods that these tests apply to. \"\"\" def test_it_reports_lti_launches(", "pyramid_request): \"\"\" Call BasicLTILaunchViews.canvas_file_basic_lti_launch(). Set up the appropriate conditions and then call BasicLTILaunchViews.canvas_file_basic_lti_launch(),", "return whatever BasicLTILaunchViews.db_configured_basic_lti_launch() returns. \"\"\" views = BasicLTILaunchViews(context, pyramid_request) return views.db_configured_basic_lti_launch() def url_configured_basic_lti_launch_caller(context,", "and return whatever BasicLTILaunchViews.canvas_file_basic_lti_launch() returns. \"\"\" # The file_id param is always present", "BasicLTILaunchViews.url_configured_basic_lti_launch() returns. \"\"\" # The `url` parsed param is always present when #", "@pytest.mark.usefixtures(\"is_canvas\") def test_it_does_not_call_grading_info_upsert_if_canvas( self, context, pyramid_request, grading_info_service, view_caller ): view_caller(context, pyramid_request) grading_info_service.upsert_from_request.assert_not_called() @pytest.fixture(", "context.js_config.add_document_url.assert_called_once_with( pyramid_request.parsed_params[\"document_url\"] ) class TestUnconfiguredBasicLTILaunch: def test_it_enables_content_item_selection_mode( self, BearerTokenSchema, bearer_token_schema, context, pyramid_request ):", "tests for BasicLTILaunchViews.__init__().\"\"\" def test_it_sets_the_focused_user(self, context, pyramid_request): BasicLTILaunchViews(context, pyramid_request) context.js_config.maybe_set_focused_user.assert_called_once_with() class TestCommon: \"\"\"", "# The document_url, resource_link_id and tool_consumer_instance_guid parsed # params are always present when", "the view method to be tested. This is a parametrized fixture. A test", "params=[ canvas_file_basic_lti_launch_caller, db_configured_basic_lti_launch_caller, url_configured_basic_lti_launch_caller, unconfigured_basic_lti_launch_caller, ] ) def view_caller(self, request): \"\"\" Return a", "context.js_config.enable_content_item_selection_mode.assert_called_once_with( form_action=\"http://example.com/module_item_configurations\", form_fields=dict( self.form_fields(), authorization=bearer_token_schema.authorization_param.return_value, ), ) def form_fields(self): return { \"user_id\": \"TEST_USER_ID\",", "pyramid_request) return views.db_configured_basic_lti_launch() def url_configured_basic_lti_launch_caller(context, pyramid_request): \"\"\" Call BasicLTILaunchViews.url_configured_basic_lti_launch(). Set up the appropriate", "bearer_token_schema, context, pyramid_request ): unconfigured_basic_lti_launch_caller(context, pyramid_request) BearerTokenSchema.assert_called_once_with(pyramid_request) bearer_token_schema.authorization_param.assert_called_once_with( pyramid_request.lti_user ) context.js_config.enable_content_item_selection_mode.assert_called_once_with( form_action=\"http://example.com/module_item_configurations\", form_fields=dict(", "configure_module_item_caller(context, pyramid_request) context.js_config.maybe_enable_grading.assert_called_once_with() def test_it_adds_the_document_url(self, context, pyramid_request): configure_module_item_caller(context, pyramid_request) context.js_config.add_document_url.assert_called_once_with( pyramid_request.parsed_params[\"document_url\"] ) class", "mock.create_autospec(LTILaunchResource, spec_set=True, instance=True) context.js_config = mock.create_autospec(JSConfig, spec_set=True, instance=True) context.is_canvas = False return context", "\"oauth_consumer_key\": \"TEST_OAUTH_CONSUMER_KEY\", \"tool_consumer_instance_guid\": \"TEST_TOOL_CONSUMER_INSTANCE_GUID\", \"context_id\": \"TEST_CONTEXT_ID\", } @pytest.fixture def pyramid_request(self, pyramid_request): pyramid_request.params =", "): view_caller(context, pyramid_request) course_service.get_or_create.assert_called_once_with( context.h_group.authority_provided_id ) @pytest.fixture( params=[ canvas_file_basic_lti_launch_caller, db_configured_basic_lti_launch_caller, url_configured_basic_lti_launch_caller, unconfigured_basic_lti_launch_caller, ]", "is called. The canvas_file=True view predicate ensures this. pyramid_request.params[\"file_id\"] = \"TEST_FILE_ID\" views =", "\"TEST_RESOURCE_LINK_ID\", \"TEST_DOCUMENT_URL\", ) def test_it_enables_frontend_grading(self, context, pyramid_request): configure_module_item_caller(context, pyramid_request) context.js_config.maybe_enable_grading.assert_called_once_with() def test_it_adds_the_document_url(self, context,", "db_configured_basic_lti_launch_caller, url_configured_basic_lti_launch_caller, configure_module_item_caller, ] ) def view_caller(self, request): \"\"\" Return a function that", "context.is_canvas = True @pytest.fixture def pyramid_request(pyramid_request): pyramid_request.params.update( { \"lis_result_sourcedid\": \"modelstudent-assignment1\", \"lis_outcome_service_url\": \"https://hypothesis.shinylms.com/outcomes\", }", "then call BasicLTILaunchViews.configure_module_item(), and return whatever BasicLTILaunchViews.configure_module_item() returns. \"\"\" # The document_url, resource_link_id", "url_configured_basic_lti_launch_caller, unconfigured_basic_lti_launch_caller, ] ) def view_caller(self, request): \"\"\" Return a function that calls", "Call BasicLTILaunchViews.configure_module_item(). Set up the appropriate conditions and then call BasicLTILaunchViews.configure_module_item(), and return", "canvas_file_basic_lti_launch_caller(context, pyramid_request): \"\"\" Call BasicLTILaunchViews.canvas_file_basic_lti_launch(). Set up the appropriate conditions and then call", "@pytest.mark.usefixtures(\"is_canvas\") class TestCanvasFileBasicLTILaunch: @pytest.mark.usefixtures(\"is_canvas\") def test_it_adds_the_canvas_file_id(self, context, pyramid_request): canvas_file_basic_lti_launch_caller(context, pyramid_request) context.js_config.add_canvas_file_id.assert_called_once_with( pyramid_request.params[\"file_id\"] )", "spec_set=True, instance=True) context.js_config = mock.create_autospec(JSConfig, spec_set=True, instance=True) context.is_canvas = False return context @pytest.fixture", "pyramid_request.params.update( { \"context_id\": \"TEST_CONTEXT_ID\", \"oauth_consumer_key\": \"TEST_OAUTH_CONSUMER_KEY\", } ) view_caller(context, pyramid_request) LtiLaunches.add.assert_called_once_with( pyramid_request.db, pyramid_request.params[\"context_id\"],", "up the appropriate conditions and then call BasicLTILaunchViews.configure_module_item(), and return whatever BasicLTILaunchViews.configure_module_item() returns.", "\"h_api\", \"grading_info_service\", \"lti_h_service\" ) @pytest.fixture def context(): context = mock.create_autospec(LTILaunchResource, spec_set=True, instance=True) context.js_config", "return patch(\"lms.views.basic_lti_launch.BearerTokenSchema\") @pytest.fixture(autouse=True) def LtiLaunches(patch): return patch(\"lms.views.basic_lti_launch.LtiLaunches\") @pytest.fixture(autouse=True) def ModuleItemConfiguration(patch): return patch(\"lms.views.basic_lti_launch.ModuleItemConfiguration\") @pytest.fixture", "multiple times, once for each parametrized version of this fixture. See https://docs.pytest.org/en/latest/fixture.html#parametrizing-fixtures \"\"\"", "\"ai_getter\", \"course_service\", \"h_api\", \"grading_info_service\", \"lti_h_service\" ) @pytest.fixture def context(): context = mock.create_autospec(LTILaunchResource, spec_set=True,", "context.js_config = mock.create_autospec(JSConfig, spec_set=True, instance=True) context.is_canvas = False return context @pytest.fixture def is_canvas(context):", "spec_set=True, instance=True) context.is_canvas = False return context @pytest.fixture def is_canvas(context): \"\"\"Set the LMS", "\"\"\" Call BasicLTILaunchViews.canvas_file_basic_lti_launch(). Set up the appropriate conditions and then call BasicLTILaunchViews.canvas_file_basic_lti_launch(), and", "grading_info_service, view_caller ): view_caller(context, pyramid_request) grading_info_service.upsert_from_request.assert_called_once_with( pyramid_request, h_user=pyramid_request.lti_user.h_user, lti_user=pyramid_request.lti_user, ) def test_it_does_not_call_grading_info_upsert_if_instructor( self,", "parametrized version of this fixture. See https://docs.pytest.org/en/latest/fixture.html#parametrizing-fixtures \"\"\" return request.param class TestCourseRecording: def", "def test_it_adds_the_document_url( self, context, pyramid_request, ModuleItemConfiguration ): db_configured_basic_lti_launch_caller(context, pyramid_request) ModuleItemConfiguration.get_document_url.assert_called_once_with( pyramid_request.db, \"TEST_GUID\", \"TEST_RESOURCE_LINK_ID\"", "BearerTokenSchema, bearer_token_schema, context, pyramid_request ): unconfigured_basic_lti_launch_caller(context, pyramid_request) BearerTokenSchema.assert_called_once_with(pyramid_request) bearer_token_schema.authorization_param.assert_called_once_with( pyramid_request.lti_user ) context.js_config.enable_content_item_selection_mode.assert_called_once_with( form_action=\"http://example.com/module_item_configurations\",", "canvas_file=True view predicate ensures this. pyramid_request.params[\"file_id\"] = \"TEST_FILE_ID\" views = BasicLTILaunchViews(context, pyramid_request) return", "mock.create_autospec(JSConfig, spec_set=True, instance=True) context.is_canvas = False return context @pytest.fixture def is_canvas(context): \"\"\"Set the", "this. pyramid_request.params[\"file_id\"] = \"TEST_FILE_ID\" views = BasicLTILaunchViews(context, pyramid_request) return views.canvas_file_basic_lti_launch() def db_configured_basic_lti_launch_caller(context, pyramid_request):", "\"user_id\": \"TEST_USER_ID\", \"resource_link_id\": \"TEST_RESOURCE_LINK_ID\", \"oauth_consumer_key\": \"TEST_OAUTH_CONSUMER_KEY\", \"tool_consumer_instance_guid\": \"TEST_TOOL_CONSUMER_INSTANCE_GUID\", \"context_id\": \"TEST_CONTEXT_ID\", } @pytest.fixture def", "configure_module_item_caller, ] ) def view_caller(self, request): \"\"\" Return a function that calls the", "See https://docs.pytest.org/en/latest/fixture.html#parametrizing-fixtures \"\"\" return request.param @pytest.mark.usefixtures(\"is_canvas\") class TestCanvasFileBasicLTILaunch: @pytest.mark.usefixtures(\"is_canvas\") def test_it_adds_the_canvas_file_id(self, context, pyramid_request):", "TestCourseRecording: def test_it_records_the_course_in_the_DB( self, context, pyramid_request, view_caller, course_service ): view_caller(context, pyramid_request) course_service.get_or_create.assert_called_once_with( context.h_group.authority_provided_id", "param is always present when canvas_file_basic_lti_launch() # is called. The canvas_file=True view predicate", "\"TEST_TOOL_CONSUMER_INSTANCE_GUID\", } views = BasicLTILaunchViews(context, pyramid_request) return views.configure_module_item() class TestBasicLTILaunchViewsInit: \"\"\"Unit tests for", "of view methods that these tests apply to. \"\"\" def test_it_reports_lti_launches( self, context,", "calls the view method to be tested. This is a parametrized fixture. A", ") context.js_config.enable_content_item_selection_mode.assert_called_once_with( form_action=\"http://example.com/module_item_configurations\", form_fields=dict( self.form_fields(), authorization=bearer_token_schema.authorization_param.return_value, ), ) def form_fields(self): return { \"user_id\":", "import pytest from lms.resources import LTILaunchResource from lms.resources._js_config import JSConfig from lms.views.basic_lti_launch import", "): view_caller(context, pyramid_request) grading_info_service.upsert_from_request.assert_not_called() @pytest.fixture( params=[ canvas_file_basic_lti_launch_caller, db_configured_basic_lti_launch_caller, url_configured_basic_lti_launch_caller, configure_module_item_caller, ] ) def", "pyramid_request @pytest.fixture(autouse=True) def BearerTokenSchema(patch): return patch(\"lms.views.basic_lti_launch.BearerTokenSchema\") @pytest.fixture(autouse=True) def LtiLaunches(patch): return patch(\"lms.views.basic_lti_launch.LtiLaunches\") @pytest.fixture(autouse=True) def", "= BasicLTILaunchViews(context, pyramid_request) return views.configure_module_item() class TestBasicLTILaunchViewsInit: \"\"\"Unit tests for BasicLTILaunchViews.__init__().\"\"\" def test_it_sets_the_focused_user(self,", "conditions and then call BasicLTILaunchViews.db_configured_basic_lti_launch(), and return whatever BasicLTILaunchViews.db_configured_basic_lti_launch() returns. \"\"\" views =", ").unconfigured_basic_lti_launch_not_authorized() assert data == {} pytestmark = pytest.mark.usefixtures( \"ai_getter\", \"course_service\", \"h_api\", \"grading_info_service\", \"lti_h_service\"", "TestConfigureModuleItem: def test_it_saves_the_assignments_document_url_to_the_db( self, context, pyramid_request, ModuleItemConfiguration ): configure_module_item_caller(context, pyramid_request) ModuleItemConfiguration.set_document_url.assert_called_once_with( pyramid_request.db, \"TEST_TOOL_CONSUMER_INSTANCE_GUID\",", "to multiple (but not all) BasicLTILaunchViews views. See the parametrized `view_caller` fixture below", "\"\"\" # The `url` parsed param is always present when # url_configured_basic_lti_launch() is", "self, context, pyramid_request, grading_info_service, view_caller ): view_caller(context, pyramid_request) grading_info_service.upsert_from_request.assert_called_once_with( pyramid_request, h_user=pyramid_request.lti_user.h_user, lti_user=pyramid_request.lti_user, )", "and then call BasicLTILaunchViews.configure_module_item(), and return whatever BasicLTILaunchViews.configure_module_item() returns. \"\"\" # The document_url,", "context.js_config.add_document_url.assert_called_once_with( pyramid_request.parsed_params[\"url\"] ) class TestConfigureModuleItem: def test_it_saves_the_assignments_document_url_to_the_db( self, context, pyramid_request, ModuleItemConfiguration ): configure_module_item_caller(context,", "of this fixture. See https://docs.pytest.org/en/latest/fixture.html#parametrizing-fixtures \"\"\" return request.param class TestCourseRecording: def test_it_records_the_course_in_the_DB( self,", "and return whatever BasicLTILaunchViews.db_configured_basic_lti_launch() returns. \"\"\" views = BasicLTILaunchViews(context, pyramid_request) return views.db_configured_basic_lti_launch() def", "BasicLTILaunchViews.canvas_file_basic_lti_launch(), and return whatever BasicLTILaunchViews.canvas_file_basic_lti_launch() returns. \"\"\" # The file_id param is always", "pyramid_request): \"\"\" Call BasicLTILaunchViews.url_configured_basic_lti_launch(). Set up the appropriate conditions and then call BasicLTILaunchViews.url_configured_basic_lti_launch(),", "us to Canvas.\"\"\" context.is_canvas = True @pytest.fixture def pyramid_request(pyramid_request): pyramid_request.params.update( { \"lis_result_sourcedid\": \"modelstudent-assignment1\",", "canvas_file_basic_lti_launch() # is called. The canvas_file=True view predicate ensures this. pyramid_request.params[\"file_id\"] = \"TEST_FILE_ID\"", "return views.db_configured_basic_lti_launch() def url_configured_basic_lti_launch_caller(context, pyramid_request): \"\"\" Call BasicLTILaunchViews.url_configured_basic_lti_launch(). Set up the appropriate conditions", "appropriate conditions and then call BasicLTILaunchViews.unconfigured_basic_lti_launch(), and return whatever BasicLTILaunchViews.unconfigured_basic_lti_launch() returns. \"\"\" views", "\"document_url\": \"TEST_DOCUMENT_URL\", \"resource_link_id\": \"TEST_RESOURCE_LINK_ID\", \"tool_consumer_instance_guid\": \"TEST_TOOL_CONSUMER_INSTANCE_GUID\", } views = BasicLTILaunchViews(context, pyramid_request) return views.configure_module_item()", "pyramid_request) grading_info_service.upsert_from_request.assert_not_called() @pytest.mark.usefixtures(\"is_canvas\") def test_it_does_not_call_grading_info_upsert_if_canvas( self, context, pyramid_request, grading_info_service, view_caller ): view_caller(context, pyramid_request)", "are always present when configure_module_item() is called. # ConfigureModuleItemSchema ensures this. pyramid_request.parsed_params =", "data = BasicLTILaunchViews( context, pyramid_request ).unconfigured_basic_lti_launch_not_authorized() assert data == {} pytestmark = pytest.mark.usefixtures(", "{ \"user_id\": \"TEST_USER_ID\", \"resource_link_id\": \"TEST_RESOURCE_LINK_ID\", \"oauth_consumer_key\": \"TEST_OAUTH_CONSUMER_KEY\", \"tool_consumer_instance_guid\": \"TEST_TOOL_CONSUMER_INSTANCE_GUID\", \"context_id\": \"TEST_CONTEXT_ID\", } @pytest.fixture", "LtiLaunches, view_caller ): pyramid_request.params.update( { \"context_id\": \"TEST_CONTEXT_ID\", \"oauth_consumer_key\": \"TEST_OAUTH_CONSUMER_KEY\", } ) view_caller(context, pyramid_request)", "return views.unconfigured_basic_lti_launch() def configure_module_item_caller(context, pyramid_request): \"\"\" Call BasicLTILaunchViews.configure_module_item(). Set up the appropriate conditions", "BasicLTILaunchViews.url_configured_basic_lti_launch(), and return whatever BasicLTILaunchViews.url_configured_basic_lti_launch() returns. \"\"\" # The `url` parsed param is", "\"resource_link_id\": \"TEST_RESOURCE_LINK_ID\", \"tool_consumer_instance_guid\": \"TEST_TOOL_CONSUMER_INSTANCE_GUID\", } views = BasicLTILaunchViews(context, pyramid_request) return views.configure_module_item() class TestBasicLTILaunchViewsInit:", "unittest import mock import pytest from lms.resources import LTILaunchResource from lms.resources._js_config import JSConfig", "call BasicLTILaunchViews.canvas_file_basic_lti_launch(), and return whatever BasicLTILaunchViews.canvas_file_basic_lti_launch() returns. \"\"\" # The file_id param is", "LtiLaunches(patch): return patch(\"lms.views.basic_lti_launch.LtiLaunches\") @pytest.fixture(autouse=True) def ModuleItemConfiguration(patch): return patch(\"lms.views.basic_lti_launch.ModuleItemConfiguration\") @pytest.fixture def bearer_token_schema(BearerTokenSchema): return BearerTokenSchema.return_value", "predicate and URLConfiguredBasicLTILaunchSchema ensure this. pyramid_request.parsed_params = {\"url\": \"TEST_URL\"} views = BasicLTILaunchViews(context, pyramid_request)", "class TestURLConfiguredBasicLTILaunch: def test_it_enables_frontend_grading(self, context, pyramid_request): url_configured_basic_lti_launch_caller(context, pyramid_request) context.js_config.maybe_enable_grading.assert_called_once_with() def test_it_adds_the_document_url(self, context, pyramid_request):", "views = BasicLTILaunchViews(context, pyramid_request) return views.canvas_file_basic_lti_launch() def db_configured_basic_lti_launch_caller(context, pyramid_request): \"\"\" Call BasicLTILaunchViews.db_configured_basic_lti_launch(). Set", "this. pyramid_request.parsed_params = {\"url\": \"TEST_URL\"} views = BasicLTILaunchViews(context, pyramid_request) return views.url_configured_basic_lti_launch() def unconfigured_basic_lti_launch_caller(context,", "test_it_enables_frontend_grading(self, context, pyramid_request): url_configured_basic_lti_launch_caller(context, pyramid_request) context.js_config.maybe_enable_grading.assert_called_once_with() def test_it_adds_the_document_url(self, context, pyramid_request): url_configured_basic_lti_launch_caller(context, pyramid_request) context.js_config.add_document_url.assert_called_once_with(", "views.url_configured_basic_lti_launch() def unconfigured_basic_lti_launch_caller(context, pyramid_request): \"\"\" Call BasicLTILaunchViews.unconfigured_basic_lti_launch(). Set up the appropriate conditions and", "Call BasicLTILaunchViews.url_configured_basic_lti_launch(). Set up the appropriate conditions and then call BasicLTILaunchViews.url_configured_basic_lti_launch(), and return", "context, pyramid_request): canvas_file_basic_lti_launch_caller(context, pyramid_request) context.js_config.add_canvas_file_id.assert_called_once_with( pyramid_request.params[\"file_id\"] ) class TestDBConfiguredBasicLTILaunch: def test_it_enables_frontend_grading(self, context, pyramid_request):", "times, once for each parametrized version of this fixture. See https://docs.pytest.org/en/latest/fixture.html#parametrizing-fixtures \"\"\" return", "BasicLTILaunchViews.unconfigured_basic_lti_launch(). Set up the appropriate conditions and then call BasicLTILaunchViews.unconfigured_basic_lti_launch(), and return whatever", "\"\"\" # The document_url, resource_link_id and tool_consumer_instance_guid parsed # params are always present", "pyramid_request.parsed_params = {\"url\": \"TEST_URL\"} views = BasicLTILaunchViews(context, pyramid_request) return views.url_configured_basic_lti_launch() def unconfigured_basic_lti_launch_caller(context, pyramid_request):", "pyramid_request): url_configured_basic_lti_launch_caller(context, pyramid_request) context.js_config.add_document_url.assert_called_once_with( pyramid_request.parsed_params[\"url\"] ) class TestConfigureModuleItem: def test_it_saves_the_assignments_document_url_to_the_db( self, context, pyramid_request,", "pytest from lms.resources import LTILaunchResource from lms.resources._js_config import JSConfig from lms.views.basic_lti_launch import BasicLTILaunchViews", "context, pyramid_request ).unconfigured_basic_lti_launch_not_authorized() assert data == {} pytestmark = pytest.mark.usefixtures( \"ai_getter\", \"course_service\", \"h_api\",", "\"TEST_RESOURCE_LINK_ID\", \"tool_consumer_instance_guid\": \"TEST_TOOL_CONSUMER_INSTANCE_GUID\", } views = BasicLTILaunchViews(context, pyramid_request) return views.configure_module_item() class TestBasicLTILaunchViewsInit: \"\"\"Unit", "class TestUnconfiguredBasicLTILaunch: def test_it_enables_content_item_selection_mode( self, BearerTokenSchema, bearer_token_schema, context, pyramid_request ): unconfigured_basic_lti_launch_caller(context, pyramid_request) BearerTokenSchema.assert_called_once_with(pyramid_request)", "\"TEST_USER_ID\", \"resource_link_id\": \"TEST_RESOURCE_LINK_ID\", \"oauth_consumer_key\": \"TEST_OAUTH_CONSUMER_KEY\", \"tool_consumer_instance_guid\": \"TEST_TOOL_CONSUMER_INSTANCE_GUID\", \"context_id\": \"TEST_CONTEXT_ID\", } @pytest.fixture def pyramid_request(self,", "method to be tested. This is a parametrized fixture. A test that uses", "URLConfiguredBasicLTILaunchSchema ensure this. pyramid_request.parsed_params = {\"url\": \"TEST_URL\"} views = BasicLTILaunchViews(context, pyramid_request) return views.url_configured_basic_lti_launch()", "import mock import pytest from lms.resources import LTILaunchResource from lms.resources._js_config import JSConfig from", "function that calls the view method to be tested. This is a parametrized", "A test that uses this fixture will be run multiple times, once for", "\"\"\" Call BasicLTILaunchViews.url_configured_basic_lti_launch(). Set up the appropriate conditions and then call BasicLTILaunchViews.url_configured_basic_lti_launch(), and", "when # url_configured_basic_lti_launch() is called. The url_configured=True view # predicate and URLConfiguredBasicLTILaunchSchema ensure", "def test_it_adds_the_canvas_file_id(self, context, pyramid_request): canvas_file_basic_lti_launch_caller(context, pyramid_request) context.js_config.add_canvas_file_id.assert_called_once_with( pyramid_request.params[\"file_id\"] ) class TestDBConfiguredBasicLTILaunch: def test_it_enables_frontend_grading(self,", "context, pyramid_request ): unconfigured_basic_lti_launch_caller(context, pyramid_request) BearerTokenSchema.assert_called_once_with(pyramid_request) bearer_token_schema.authorization_param.assert_called_once_with( pyramid_request.lti_user ) context.js_config.enable_content_item_selection_mode.assert_called_once_with( form_action=\"http://example.com/module_item_configurations\", form_fields=dict( self.form_fields(),", "pyramid_request.lti_user ) context.js_config.enable_content_item_selection_mode.assert_called_once_with( form_action=\"http://example.com/module_item_configurations\", form_fields=dict( self.form_fields(), authorization=bearer_token_schema.authorization_param.return_value, ), ) def form_fields(self): return {", "return request.param @pytest.mark.usefixtures(\"is_canvas\") class TestCanvasFileBasicLTILaunch: @pytest.mark.usefixtures(\"is_canvas\") def test_it_adds_the_canvas_file_id(self, context, pyramid_request): canvas_file_basic_lti_launch_caller(context, pyramid_request) context.js_config.add_canvas_file_id.assert_called_once_with(", "context.js_config.add_document_url.assert_called_once_with( ModuleItemConfiguration.get_document_url.return_value ) class TestURLConfiguredBasicLTILaunch: def test_it_enables_frontend_grading(self, context, pyramid_request): url_configured_basic_lti_launch_caller(context, pyramid_request) context.js_config.maybe_enable_grading.assert_called_once_with() def", "pyramid_request): data = BasicLTILaunchViews( context, pyramid_request ).unconfigured_basic_lti_launch_not_authorized() assert data == {} pytestmark =", "view_caller(context, pyramid_request) grading_info_service.upsert_from_request.assert_not_called() @pytest.fixture( params=[ canvas_file_basic_lti_launch_caller, db_configured_basic_lti_launch_caller, url_configured_basic_lti_launch_caller, configure_module_item_caller, ] ) def view_caller(self,", "See the parametrized `view_caller` fixture below for the list of view methods that", "pyramid_request) ModuleItemConfiguration.get_document_url.assert_called_once_with( pyramid_request.db, \"TEST_GUID\", \"TEST_RESOURCE_LINK_ID\" ) context.js_config.add_document_url.assert_called_once_with( ModuleItemConfiguration.get_document_url.return_value ) class TestURLConfiguredBasicLTILaunch: def test_it_enables_frontend_grading(self,", "] ) def view_caller(self, request): \"\"\" Return a function that calls the view", "\"lis_outcome_service_url\": \"https://hypothesis.shinylms.com/outcomes\", } ) return pyramid_request @pytest.fixture(autouse=True) def BearerTokenSchema(patch): return patch(\"lms.views.basic_lti_launch.BearerTokenSchema\") @pytest.fixture(autouse=True) def", "lms.resources import LTILaunchResource from lms.resources._js_config import JSConfig from lms.views.basic_lti_launch import BasicLTILaunchViews from tests", "test that uses this fixture will be run multiple times, once for each", "pyramid_request) ModuleItemConfiguration.set_document_url.assert_called_once_with( pyramid_request.db, \"TEST_TOOL_CONSUMER_INSTANCE_GUID\", \"TEST_RESOURCE_LINK_ID\", \"TEST_DOCUMENT_URL\", ) def test_it_enables_frontend_grading(self, context, pyramid_request): configure_module_item_caller(context, pyramid_request)", "params are always present when configure_module_item() is called. # ConfigureModuleItemSchema ensures this. pyramid_request.parsed_params", "= \"TEST_FILE_ID\" views = BasicLTILaunchViews(context, pyramid_request) return views.canvas_file_basic_lti_launch() def db_configured_basic_lti_launch_caller(context, pyramid_request): \"\"\" Call", "@pytest.fixture def context(): context = mock.create_autospec(LTILaunchResource, spec_set=True, instance=True) context.js_config = mock.create_autospec(JSConfig, spec_set=True, instance=True)", "up the appropriate conditions and then call BasicLTILaunchViews.unconfigured_basic_lti_launch(), and return whatever BasicLTILaunchViews.unconfigured_basic_lti_launch() returns.", "pyramid_request.db, \"TEST_GUID\", \"TEST_RESOURCE_LINK_ID\" ) context.js_config.add_document_url.assert_called_once_with( ModuleItemConfiguration.get_document_url.return_value ) class TestURLConfiguredBasicLTILaunch: def test_it_enables_frontend_grading(self, context, pyramid_request):", "view_caller ): view_caller(context, pyramid_request) grading_info_service.upsert_from_request.assert_called_once_with( pyramid_request, h_user=pyramid_request.lti_user.h_user, lti_user=pyramid_request.lti_user, ) def test_it_does_not_call_grading_info_upsert_if_instructor( self, context,", "test_it_records_the_course_in_the_DB( self, context, pyramid_request, view_caller, course_service ): view_caller(context, pyramid_request) course_service.get_or_create.assert_called_once_with( context.h_group.authority_provided_id ) @pytest.fixture(", "@pytest.fixture( params=[ canvas_file_basic_lti_launch_caller, db_configured_basic_lti_launch_caller, url_configured_basic_lti_launch_caller, unconfigured_basic_lti_launch_caller, ] ) def view_caller(self, request): \"\"\" Return", "\"\"\" Return a function that calls the view method to be tested. This", "= BasicLTILaunchViews(context, pyramid_request) return views.url_configured_basic_lti_launch() def unconfigured_basic_lti_launch_caller(context, pyramid_request): \"\"\" Call BasicLTILaunchViews.unconfigured_basic_lti_launch(). Set up", "return pyramid_request @pytest.fixture(autouse=True) def BearerTokenSchema(patch): return patch(\"lms.views.basic_lti_launch.BearerTokenSchema\") @pytest.fixture(autouse=True) def LtiLaunches(patch): return patch(\"lms.views.basic_lti_launch.LtiLaunches\") @pytest.fixture(autouse=True)", "version of this fixture. See https://docs.pytest.org/en/latest/fixture.html#parametrizing-fixtures \"\"\" return request.param class TestCourseRecording: def test_it_records_the_course_in_the_DB(", ") class TestConfigureModuleItem: def test_it_saves_the_assignments_document_url_to_the_db( self, context, pyramid_request, ModuleItemConfiguration ): configure_module_item_caller(context, pyramid_request) ModuleItemConfiguration.set_document_url.assert_called_once_with(", "# The `url` parsed param is always present when # url_configured_basic_lti_launch() is called.", "call BasicLTILaunchViews.unconfigured_basic_lti_launch(), and return whatever BasicLTILaunchViews.unconfigured_basic_lti_launch() returns. \"\"\" views = BasicLTILaunchViews(context, pyramid_request) return", "grading_info_service.upsert_from_request.assert_called_once_with( pyramid_request, h_user=pyramid_request.lti_user.h_user, lti_user=pyramid_request.lti_user, ) def test_it_does_not_call_grading_info_upsert_if_instructor( self, context, pyramid_request, grading_info_service, view_caller ):", "def test_it_calls_grading_info_upsert( self, context, pyramid_request, grading_info_service, view_caller ): view_caller(context, pyramid_request) grading_info_service.upsert_from_request.assert_called_once_with( pyramid_request, h_user=pyramid_request.lti_user.h_user,", "lms.views.basic_lti_launch import BasicLTILaunchViews from tests import factories def canvas_file_basic_lti_launch_caller(context, pyramid_request): \"\"\" Call BasicLTILaunchViews.canvas_file_basic_lti_launch().", "view method to be tested. This is a parametrized fixture. A test that", "and URLConfiguredBasicLTILaunchSchema ensure this. pyramid_request.parsed_params = {\"url\": \"TEST_URL\"} views = BasicLTILaunchViews(context, pyramid_request) return", "from lms.views.basic_lti_launch import BasicLTILaunchViews from tests import factories def canvas_file_basic_lti_launch_caller(context, pyramid_request): \"\"\" Call", "is a parametrized fixture. A test that uses this fixture will be run", "def test_it_enables_content_item_selection_mode( self, BearerTokenSchema, bearer_token_schema, context, pyramid_request ): unconfigured_basic_lti_launch_caller(context, pyramid_request) BearerTokenSchema.assert_called_once_with(pyramid_request) bearer_token_schema.authorization_param.assert_called_once_with( pyramid_request.lti_user", "`view_caller` fixture below for the list of view methods that these tests apply", "class TestUnconfiguredBasicLTILaunchNotAuthorized: def test_it_returns_the_right_template_data(self, context, pyramid_request): data = BasicLTILaunchViews( context, pyramid_request ).unconfigured_basic_lti_launch_not_authorized() assert", "\"modelstudent-assignment1\", \"lis_outcome_service_url\": \"https://hypothesis.shinylms.com/outcomes\", } ) return pyramid_request @pytest.fixture(autouse=True) def BearerTokenSchema(patch): return patch(\"lms.views.basic_lti_launch.BearerTokenSchema\") @pytest.fixture(autouse=True)", "): pyramid_request.lti_user = factories.LTIUser(roles=\"instructor\") view_caller(context, pyramid_request) grading_info_service.upsert_from_request.assert_not_called() @pytest.mark.usefixtures(\"is_canvas\") def test_it_does_not_call_grading_info_upsert_if_canvas( self, context, pyramid_request,", "pyramid_request): pyramid_request.params = dict( self.form_fields(), oauth_nonce=\"TEST_OAUTH_NONCE\", oauth_timestamp=\"TEST_OAUTH_TIMESTAMP\", oauth_signature=\"TEST_OAUTH_SIGNATURE\", ) return pyramid_request class TestUnconfiguredBasicLTILaunchNotAuthorized:", "canvas_file_basic_lti_launch_caller, db_configured_basic_lti_launch_caller, url_configured_basic_lti_launch_caller, unconfigured_basic_lti_launch_caller, ] ) def view_caller(self, request): \"\"\" Return a function", "): unconfigured_basic_lti_launch_caller(context, pyramid_request) BearerTokenSchema.assert_called_once_with(pyramid_request) bearer_token_schema.authorization_param.assert_called_once_with( pyramid_request.lti_user ) context.js_config.enable_content_item_selection_mode.assert_called_once_with( form_action=\"http://example.com/module_item_configurations\", form_fields=dict( self.form_fields(), authorization=bearer_token_schema.authorization_param.return_value, ),", "BasicLTILaunchViews(context, pyramid_request) return views.canvas_file_basic_lti_launch() def db_configured_basic_lti_launch_caller(context, pyramid_request): \"\"\" Call BasicLTILaunchViews.db_configured_basic_lti_launch(). Set up the", "then call BasicLTILaunchViews.url_configured_basic_lti_launch(), and return whatever BasicLTILaunchViews.url_configured_basic_lti_launch() returns. \"\"\" # The `url` parsed", "def test_it_does_not_call_grading_info_upsert_if_canvas( self, context, pyramid_request, grading_info_service, view_caller ): view_caller(context, pyramid_request) grading_info_service.upsert_from_request.assert_not_called() @pytest.fixture( params=[", "# params are always present when configure_module_item() is called. # ConfigureModuleItemSchema ensures this.", "def test_it_sets_the_focused_user(self, context, pyramid_request): BasicLTILaunchViews(context, pyramid_request) context.js_config.maybe_set_focused_user.assert_called_once_with() class TestCommon: \"\"\" Tests common to", "unconfigured_basic_lti_launch_caller(context, pyramid_request): \"\"\" Call BasicLTILaunchViews.unconfigured_basic_lti_launch(). Set up the appropriate conditions and then call", "up the appropriate conditions and then call BasicLTILaunchViews.db_configured_basic_lti_launch(), and return whatever BasicLTILaunchViews.db_configured_basic_lti_launch() returns.", "= False return context @pytest.fixture def is_canvas(context): \"\"\"Set the LMS that launched us", "BasicLTILaunchViews(context, pyramid_request) return views.unconfigured_basic_lti_launch() def configure_module_item_caller(context, pyramid_request): \"\"\" Call BasicLTILaunchViews.configure_module_item(). Set up the", "# The file_id param is always present when canvas_file_basic_lti_launch() # is called. The", "is called. The url_configured=True view # predicate and URLConfiguredBasicLTILaunchSchema ensure this. pyramid_request.parsed_params =", "url_configured_basic_lti_launch_caller(context, pyramid_request) context.js_config.maybe_enable_grading.assert_called_once_with() def test_it_adds_the_document_url(self, context, pyramid_request): url_configured_basic_lti_launch_caller(context, pyramid_request) context.js_config.add_document_url.assert_called_once_with( pyramid_request.parsed_params[\"url\"] ) class", "== {} pytestmark = pytest.mark.usefixtures( \"ai_getter\", \"course_service\", \"h_api\", \"grading_info_service\", \"lti_h_service\" ) @pytest.fixture def", "views.configure_module_item() class TestBasicLTILaunchViewsInit: \"\"\"Unit tests for BasicLTILaunchViews.__init__().\"\"\" def test_it_sets_the_focused_user(self, context, pyramid_request): BasicLTILaunchViews(context, pyramid_request)", "\"\"\" Call BasicLTILaunchViews.unconfigured_basic_lti_launch(). Set up the appropriate conditions and then call BasicLTILaunchViews.unconfigured_basic_lti_launch(), and", "test_it_calls_grading_info_upsert( self, context, pyramid_request, grading_info_service, view_caller ): view_caller(context, pyramid_request) grading_info_service.upsert_from_request.assert_called_once_with( pyramid_request, h_user=pyramid_request.lti_user.h_user, lti_user=pyramid_request.lti_user,", "views.unconfigured_basic_lti_launch() def configure_module_item_caller(context, pyramid_request): \"\"\" Call BasicLTILaunchViews.configure_module_item(). Set up the appropriate conditions and", "configure_module_item_caller(context, pyramid_request) context.js_config.add_document_url.assert_called_once_with( pyramid_request.parsed_params[\"document_url\"] ) class TestUnconfiguredBasicLTILaunch: def test_it_enables_content_item_selection_mode( self, BearerTokenSchema, bearer_token_schema, context,", "view_caller(context, pyramid_request) grading_info_service.upsert_from_request.assert_called_once_with( pyramid_request, h_user=pyramid_request.lti_user.h_user, lti_user=pyramid_request.lti_user, ) def test_it_does_not_call_grading_info_upsert_if_instructor( self, context, pyramid_request, grading_info_service,", "view_caller(context, pyramid_request) grading_info_service.upsert_from_request.assert_not_called() @pytest.mark.usefixtures(\"is_canvas\") def test_it_does_not_call_grading_info_upsert_if_canvas( self, context, pyramid_request, grading_info_service, view_caller ): view_caller(context,", "pyramid_request) course_service.get_or_create.assert_called_once_with( context.h_group.authority_provided_id ) @pytest.fixture( params=[ canvas_file_basic_lti_launch_caller, db_configured_basic_lti_launch_caller, url_configured_basic_lti_launch_caller, unconfigured_basic_lti_launch_caller, ] ) def", "= BasicLTILaunchViews(context, pyramid_request) return views.db_configured_basic_lti_launch() def url_configured_basic_lti_launch_caller(context, pyramid_request): \"\"\" Call BasicLTILaunchViews.url_configured_basic_lti_launch(). Set up", "), ) def form_fields(self): return { \"user_id\": \"TEST_USER_ID\", \"resource_link_id\": \"TEST_RESOURCE_LINK_ID\", \"oauth_consumer_key\": \"TEST_OAUTH_CONSUMER_KEY\", \"tool_consumer_instance_guid\":", "pyramid_request, view_caller, course_service ): view_caller(context, pyramid_request) course_service.get_or_create.assert_called_once_with( context.h_group.authority_provided_id ) @pytest.fixture( params=[ canvas_file_basic_lti_launch_caller, db_configured_basic_lti_launch_caller,", "up the appropriate conditions and then call BasicLTILaunchViews.canvas_file_basic_lti_launch(), and return whatever BasicLTILaunchViews.canvas_file_basic_lti_launch() returns.", "params=[ canvas_file_basic_lti_launch_caller, db_configured_basic_lti_launch_caller, url_configured_basic_lti_launch_caller, configure_module_item_caller, ] ) def view_caller(self, request): \"\"\" Return a", ") def test_it_does_not_call_grading_info_upsert_if_instructor( self, context, pyramid_request, grading_info_service, view_caller ): pyramid_request.lti_user = factories.LTIUser(roles=\"instructor\") view_caller(context,", "return whatever BasicLTILaunchViews.canvas_file_basic_lti_launch() returns. \"\"\" # The file_id param is always present when", "context, pyramid_request): url_configured_basic_lti_launch_caller(context, pyramid_request) context.js_config.add_document_url.assert_called_once_with( pyramid_request.parsed_params[\"url\"] ) class TestConfigureModuleItem: def test_it_saves_the_assignments_document_url_to_the_db( self, context,", "form_fields(self): return { \"user_id\": \"TEST_USER_ID\", \"resource_link_id\": \"TEST_RESOURCE_LINK_ID\", \"oauth_consumer_key\": \"TEST_OAUTH_CONSUMER_KEY\", \"tool_consumer_instance_guid\": \"TEST_TOOL_CONSUMER_INSTANCE_GUID\", \"context_id\": \"TEST_CONTEXT_ID\",", "@pytest.fixture(autouse=True) def BearerTokenSchema(patch): return patch(\"lms.views.basic_lti_launch.BearerTokenSchema\") @pytest.fixture(autouse=True) def LtiLaunches(patch): return patch(\"lms.views.basic_lti_launch.LtiLaunches\") @pytest.fixture(autouse=True) def ModuleItemConfiguration(patch):", "def test_it_returns_the_right_template_data(self, context, pyramid_request): data = BasicLTILaunchViews( context, pyramid_request ).unconfigured_basic_lti_launch_not_authorized() assert data ==", "conditions and then call BasicLTILaunchViews.unconfigured_basic_lti_launch(), and return whatever BasicLTILaunchViews.unconfigured_basic_lti_launch() returns. \"\"\" views =", "context, pyramid_request): BasicLTILaunchViews(context, pyramid_request) context.js_config.maybe_set_focused_user.assert_called_once_with() class TestCommon: \"\"\" Tests common to multiple (but", "from lms.resources._js_config import JSConfig from lms.views.basic_lti_launch import BasicLTILaunchViews from tests import factories def", "= { \"document_url\": \"TEST_DOCUMENT_URL\", \"resource_link_id\": \"TEST_RESOURCE_LINK_ID\", \"tool_consumer_instance_guid\": \"TEST_TOOL_CONSUMER_INSTANCE_GUID\", } views = BasicLTILaunchViews(context, pyramid_request)", "self.form_fields(), authorization=bearer_token_schema.authorization_param.return_value, ), ) def form_fields(self): return { \"user_id\": \"TEST_USER_ID\", \"resource_link_id\": \"TEST_RESOURCE_LINK_ID\", \"oauth_consumer_key\":", "be run multiple times, once for each parametrized version of this fixture. See", "): db_configured_basic_lti_launch_caller(context, pyramid_request) ModuleItemConfiguration.get_document_url.assert_called_once_with( pyramid_request.db, \"TEST_GUID\", \"TEST_RESOURCE_LINK_ID\" ) context.js_config.add_document_url.assert_called_once_with( ModuleItemConfiguration.get_document_url.return_value ) class TestURLConfiguredBasicLTILaunch:", "\"TEST_RESOURCE_LINK_ID\", \"oauth_consumer_key\": \"TEST_OAUTH_CONSUMER_KEY\", \"tool_consumer_instance_guid\": \"TEST_TOOL_CONSUMER_INSTANCE_GUID\", \"context_id\": \"TEST_CONTEXT_ID\", } @pytest.fixture def pyramid_request(self, pyramid_request): pyramid_request.params", "pyramid_request): canvas_file_basic_lti_launch_caller(context, pyramid_request) context.js_config.add_canvas_file_id.assert_called_once_with( pyramid_request.params[\"file_id\"] ) class TestDBConfiguredBasicLTILaunch: def test_it_enables_frontend_grading(self, context, pyramid_request): db_configured_basic_lti_launch_caller(context,", "context, pyramid_request): db_configured_basic_lti_launch_caller(context, pyramid_request) context.js_config.maybe_enable_grading.assert_called_once_with() def test_it_adds_the_document_url( self, context, pyramid_request, ModuleItemConfiguration ): db_configured_basic_lti_launch_caller(context,", "is always present when canvas_file_basic_lti_launch() # is called. The canvas_file=True view predicate ensures", "from unittest import mock import pytest from lms.resources import LTILaunchResource from lms.resources._js_config import", "def configure_module_item_caller(context, pyramid_request): \"\"\" Call BasicLTILaunchViews.configure_module_item(). Set up the appropriate conditions and then", "tests import factories def canvas_file_basic_lti_launch_caller(context, pyramid_request): \"\"\" Call BasicLTILaunchViews.canvas_file_basic_lti_launch(). Set up the appropriate", "ModuleItemConfiguration.get_document_url.assert_called_once_with( pyramid_request.db, \"TEST_GUID\", \"TEST_RESOURCE_LINK_ID\" ) context.js_config.add_document_url.assert_called_once_with( ModuleItemConfiguration.get_document_url.return_value ) class TestURLConfiguredBasicLTILaunch: def test_it_enables_frontend_grading(self, context,", "data == {} pytestmark = pytest.mark.usefixtures( \"ai_getter\", \"course_service\", \"h_api\", \"grading_info_service\", \"lti_h_service\" ) @pytest.fixture", "def pyramid_request(pyramid_request): pyramid_request.params.update( { \"lis_result_sourcedid\": \"modelstudent-assignment1\", \"lis_outcome_service_url\": \"https://hypothesis.shinylms.com/outcomes\", } ) return pyramid_request @pytest.fixture(autouse=True)", "pyramid_request.params[\"context_id\"], pyramid_request.params[\"oauth_consumer_key\"], ) @pytest.mark.usefixtures(\"user_is_learner\") def test_it_calls_grading_info_upsert( self, context, pyramid_request, grading_info_service, view_caller ): view_caller(context,", "\"\"\" return request.param @pytest.mark.usefixtures(\"is_canvas\") class TestCanvasFileBasicLTILaunch: @pytest.mark.usefixtures(\"is_canvas\") def test_it_adds_the_canvas_file_id(self, context, pyramid_request): canvas_file_basic_lti_launch_caller(context, pyramid_request)", "LtiLaunches.add.assert_called_once_with( pyramid_request.db, pyramid_request.params[\"context_id\"], pyramid_request.params[\"oauth_consumer_key\"], ) @pytest.mark.usefixtures(\"user_is_learner\") def test_it_calls_grading_info_upsert( self, context, pyramid_request, grading_info_service, view_caller", "BasicLTILaunchViews(context, pyramid_request) context.js_config.maybe_set_focused_user.assert_called_once_with() class TestCommon: \"\"\" Tests common to multiple (but not all)", "the parametrized `view_caller` fixture below for the list of view methods that these", "pyramid_request) return views.canvas_file_basic_lti_launch() def db_configured_basic_lti_launch_caller(context, pyramid_request): \"\"\" Call BasicLTILaunchViews.db_configured_basic_lti_launch(). Set up the appropriate", "that these tests apply to. \"\"\" def test_it_reports_lti_launches( self, context, pyramid_request, LtiLaunches, view_caller", "pyramid_request): configure_module_item_caller(context, pyramid_request) context.js_config.maybe_enable_grading.assert_called_once_with() def test_it_adds_the_document_url(self, context, pyramid_request): configure_module_item_caller(context, pyramid_request) context.js_config.add_document_url.assert_called_once_with( pyramid_request.parsed_params[\"document_url\"] )", "def test_it_reports_lti_launches( self, context, pyramid_request, LtiLaunches, view_caller ): pyramid_request.params.update( { \"context_id\": \"TEST_CONTEXT_ID\", \"oauth_consumer_key\":", "BasicLTILaunchViews(context, pyramid_request) return views.url_configured_basic_lti_launch() def unconfigured_basic_lti_launch_caller(context, pyramid_request): \"\"\" Call BasicLTILaunchViews.unconfigured_basic_lti_launch(). Set up the", "present when canvas_file_basic_lti_launch() # is called. The canvas_file=True view predicate ensures this. pyramid_request.params[\"file_id\"]", "LMS that launched us to Canvas.\"\"\" context.is_canvas = True @pytest.fixture def pyramid_request(pyramid_request): pyramid_request.params.update(", "{ \"lis_result_sourcedid\": \"modelstudent-assignment1\", \"lis_outcome_service_url\": \"https://hypothesis.shinylms.com/outcomes\", } ) return pyramid_request @pytest.fixture(autouse=True) def BearerTokenSchema(patch): return", "The document_url, resource_link_id and tool_consumer_instance_guid parsed # params are always present when configure_module_item()", "Call BasicLTILaunchViews.canvas_file_basic_lti_launch(). Set up the appropriate conditions and then call BasicLTILaunchViews.canvas_file_basic_lti_launch(), and return", "request): \"\"\" Return a function that calls the view method to be tested.", "factories.LTIUser(roles=\"instructor\") view_caller(context, pyramid_request) grading_info_service.upsert_from_request.assert_not_called() @pytest.mark.usefixtures(\"is_canvas\") def test_it_does_not_call_grading_info_upsert_if_canvas( self, context, pyramid_request, grading_info_service, view_caller ):", "# ConfigureModuleItemSchema ensures this. pyramid_request.parsed_params = { \"document_url\": \"TEST_DOCUMENT_URL\", \"resource_link_id\": \"TEST_RESOURCE_LINK_ID\", \"tool_consumer_instance_guid\": \"TEST_TOOL_CONSUMER_INSTANCE_GUID\",", "view_caller(context, pyramid_request) LtiLaunches.add.assert_called_once_with( pyramid_request.db, pyramid_request.params[\"context_id\"], pyramid_request.params[\"oauth_consumer_key\"], ) @pytest.mark.usefixtures(\"user_is_learner\") def test_it_calls_grading_info_upsert( self, context, pyramid_request,", "fixture will be run multiple times, once for each parametrized version of this", "LTILaunchResource from lms.resources._js_config import JSConfig from lms.views.basic_lti_launch import BasicLTILaunchViews from tests import factories", "\"\"\" return request.param class TestCourseRecording: def test_it_records_the_course_in_the_DB( self, context, pyramid_request, view_caller, course_service ):", "def test_it_adds_the_document_url(self, context, pyramid_request): configure_module_item_caller(context, pyramid_request) context.js_config.add_document_url.assert_called_once_with( pyramid_request.parsed_params[\"document_url\"] ) class TestUnconfiguredBasicLTILaunch: def test_it_enables_content_item_selection_mode(", "test_it_enables_frontend_grading(self, context, pyramid_request): db_configured_basic_lti_launch_caller(context, pyramid_request) context.js_config.maybe_enable_grading.assert_called_once_with() def test_it_adds_the_document_url( self, context, pyramid_request, ModuleItemConfiguration ):", "the appropriate conditions and then call BasicLTILaunchViews.canvas_file_basic_lti_launch(), and return whatever BasicLTILaunchViews.canvas_file_basic_lti_launch() returns. \"\"\"", "= dict( self.form_fields(), oauth_nonce=\"TEST_OAUTH_NONCE\", oauth_timestamp=\"TEST_OAUTH_TIMESTAMP\", oauth_signature=\"TEST_OAUTH_SIGNATURE\", ) return pyramid_request class TestUnconfiguredBasicLTILaunchNotAuthorized: def test_it_returns_the_right_template_data(self,", "test_it_returns_the_right_template_data(self, context, pyramid_request): data = BasicLTILaunchViews( context, pyramid_request ).unconfigured_basic_lti_launch_not_authorized() assert data == {}", "): view_caller(context, pyramid_request) grading_info_service.upsert_from_request.assert_called_once_with( pyramid_request, h_user=pyramid_request.lti_user.h_user, lti_user=pyramid_request.lti_user, ) def test_it_does_not_call_grading_info_upsert_if_instructor( self, context, pyramid_request,", "pyramid_request, grading_info_service, view_caller ): pyramid_request.lti_user = factories.LTIUser(roles=\"instructor\") view_caller(context, pyramid_request) grading_info_service.upsert_from_request.assert_not_called() @pytest.mark.usefixtures(\"is_canvas\") def test_it_does_not_call_grading_info_upsert_if_canvas(", "): configure_module_item_caller(context, pyramid_request) ModuleItemConfiguration.set_document_url.assert_called_once_with( pyramid_request.db, \"TEST_TOOL_CONSUMER_INSTANCE_GUID\", \"TEST_RESOURCE_LINK_ID\", \"TEST_DOCUMENT_URL\", ) def test_it_enables_frontend_grading(self, context, pyramid_request):", "test_it_does_not_call_grading_info_upsert_if_canvas( self, context, pyramid_request, grading_info_service, view_caller ): view_caller(context, pyramid_request) grading_info_service.upsert_from_request.assert_not_called() @pytest.fixture( params=[ canvas_file_basic_lti_launch_caller,", "pyramid_request, ModuleItemConfiguration ): configure_module_item_caller(context, pyramid_request) ModuleItemConfiguration.set_document_url.assert_called_once_with( pyramid_request.db, \"TEST_TOOL_CONSUMER_INSTANCE_GUID\", \"TEST_RESOURCE_LINK_ID\", \"TEST_DOCUMENT_URL\", ) def test_it_enables_frontend_grading(self,", "def test_it_enables_frontend_grading(self, context, pyramid_request): configure_module_item_caller(context, pyramid_request) context.js_config.maybe_enable_grading.assert_called_once_with() def test_it_adds_the_document_url(self, context, pyramid_request): configure_module_item_caller(context, pyramid_request)", "The url_configured=True view # predicate and URLConfiguredBasicLTILaunchSchema ensure this. pyramid_request.parsed_params = {\"url\": \"TEST_URL\"}", "BasicLTILaunchViews.db_configured_basic_lti_launch() returns. \"\"\" views = BasicLTILaunchViews(context, pyramid_request) return views.db_configured_basic_lti_launch() def url_configured_basic_lti_launch_caller(context, pyramid_request): \"\"\"", "called. The canvas_file=True view predicate ensures this. pyramid_request.params[\"file_id\"] = \"TEST_FILE_ID\" views = BasicLTILaunchViews(context,", "parsed # params are always present when configure_module_item() is called. # ConfigureModuleItemSchema ensures", "TestCanvasFileBasicLTILaunch: @pytest.mark.usefixtures(\"is_canvas\") def test_it_adds_the_canvas_file_id(self, context, pyramid_request): canvas_file_basic_lti_launch_caller(context, pyramid_request) context.js_config.add_canvas_file_id.assert_called_once_with( pyramid_request.params[\"file_id\"] ) class TestDBConfiguredBasicLTILaunch:", "conditions and then call BasicLTILaunchViews.canvas_file_basic_lti_launch(), and return whatever BasicLTILaunchViews.canvas_file_basic_lti_launch() returns. \"\"\" # The", ") @pytest.fixture def context(): context = mock.create_autospec(LTILaunchResource, spec_set=True, instance=True) context.js_config = mock.create_autospec(JSConfig, spec_set=True,", "\"tool_consumer_instance_guid\": \"TEST_TOOL_CONSUMER_INSTANCE_GUID\", } views = BasicLTILaunchViews(context, pyramid_request) return views.configure_module_item() class TestBasicLTILaunchViewsInit: \"\"\"Unit tests", "view_caller, course_service ): view_caller(context, pyramid_request) course_service.get_or_create.assert_called_once_with( context.h_group.authority_provided_id ) @pytest.fixture( params=[ canvas_file_basic_lti_launch_caller, db_configured_basic_lti_launch_caller, url_configured_basic_lti_launch_caller,", "present when configure_module_item() is called. # ConfigureModuleItemSchema ensures this. pyramid_request.parsed_params = { \"document_url\":", "context, pyramid_request, grading_info_service, view_caller ): pyramid_request.lti_user = factories.LTIUser(roles=\"instructor\") view_caller(context, pyramid_request) grading_info_service.upsert_from_request.assert_not_called() @pytest.mark.usefixtures(\"is_canvas\") def", "be tested. This is a parametrized fixture. A test that uses this fixture", "test_it_adds_the_document_url( self, context, pyramid_request, ModuleItemConfiguration ): db_configured_basic_lti_launch_caller(context, pyramid_request) ModuleItemConfiguration.get_document_url.assert_called_once_with( pyramid_request.db, \"TEST_GUID\", \"TEST_RESOURCE_LINK_ID\" )", "instance=True) context.js_config = mock.create_autospec(JSConfig, spec_set=True, instance=True) context.is_canvas = False return context @pytest.fixture def", "BasicLTILaunchViews(context, pyramid_request) return views.configure_module_item() class TestBasicLTILaunchViewsInit: \"\"\"Unit tests for BasicLTILaunchViews.__init__().\"\"\" def test_it_sets_the_focused_user(self, context,", "def form_fields(self): return { \"user_id\": \"TEST_USER_ID\", \"resource_link_id\": \"TEST_RESOURCE_LINK_ID\", \"oauth_consumer_key\": \"TEST_OAUTH_CONSUMER_KEY\", \"tool_consumer_instance_guid\": \"TEST_TOOL_CONSUMER_INSTANCE_GUID\", \"context_id\":", "pyramid_request) context.js_config.add_document_url.assert_called_once_with( pyramid_request.parsed_params[\"url\"] ) class TestConfigureModuleItem: def test_it_saves_the_assignments_document_url_to_the_db( self, context, pyramid_request, ModuleItemConfiguration ):", "import LTILaunchResource from lms.resources._js_config import JSConfig from lms.views.basic_lti_launch import BasicLTILaunchViews from tests import", "self, context, pyramid_request, LtiLaunches, view_caller ): pyramid_request.params.update( { \"context_id\": \"TEST_CONTEXT_ID\", \"oauth_consumer_key\": \"TEST_OAUTH_CONSUMER_KEY\", }", "pyramid_request) return views.unconfigured_basic_lti_launch() def configure_module_item_caller(context, pyramid_request): \"\"\" Call BasicLTILaunchViews.configure_module_item(). Set up the appropriate", "that calls the view method to be tested. This is a parametrized fixture.", "pyramid_request, ModuleItemConfiguration ): db_configured_basic_lti_launch_caller(context, pyramid_request) ModuleItemConfiguration.get_document_url.assert_called_once_with( pyramid_request.db, \"TEST_GUID\", \"TEST_RESOURCE_LINK_ID\" ) context.js_config.add_document_url.assert_called_once_with( ModuleItemConfiguration.get_document_url.return_value )", "BasicLTILaunchViews( context, pyramid_request ).unconfigured_basic_lti_launch_not_authorized() assert data == {} pytestmark = pytest.mark.usefixtures( \"ai_getter\", \"course_service\",", "db_configured_basic_lti_launch_caller, url_configured_basic_lti_launch_caller, unconfigured_basic_lti_launch_caller, ] ) def view_caller(self, request): \"\"\" Return a function that", "form_action=\"http://example.com/module_item_configurations\", form_fields=dict( self.form_fields(), authorization=bearer_token_schema.authorization_param.return_value, ), ) def form_fields(self): return { \"user_id\": \"TEST_USER_ID\", \"resource_link_id\":", "{ \"document_url\": \"TEST_DOCUMENT_URL\", \"resource_link_id\": \"TEST_RESOURCE_LINK_ID\", \"tool_consumer_instance_guid\": \"TEST_TOOL_CONSUMER_INSTANCE_GUID\", } views = BasicLTILaunchViews(context, pyramid_request) return", "= mock.create_autospec(LTILaunchResource, spec_set=True, instance=True) context.js_config = mock.create_autospec(JSConfig, spec_set=True, instance=True) context.is_canvas = False return", "class TestCommon: \"\"\" Tests common to multiple (but not all) BasicLTILaunchViews views. See", "def test_it_adds_the_document_url(self, context, pyramid_request): url_configured_basic_lti_launch_caller(context, pyramid_request) context.js_config.add_document_url.assert_called_once_with( pyramid_request.parsed_params[\"url\"] ) class TestConfigureModuleItem: def test_it_saves_the_assignments_document_url_to_the_db(", "and return whatever BasicLTILaunchViews.configure_module_item() returns. \"\"\" # The document_url, resource_link_id and tool_consumer_instance_guid parsed", "} views = BasicLTILaunchViews(context, pyramid_request) return views.configure_module_item() class TestBasicLTILaunchViewsInit: \"\"\"Unit tests for BasicLTILaunchViews.__init__().\"\"\"", "def test_it_saves_the_assignments_document_url_to_the_db( self, context, pyramid_request, ModuleItemConfiguration ): configure_module_item_caller(context, pyramid_request) ModuleItemConfiguration.set_document_url.assert_called_once_with( pyramid_request.db, \"TEST_TOOL_CONSUMER_INSTANCE_GUID\", \"TEST_RESOURCE_LINK_ID\",", "= BasicLTILaunchViews(context, pyramid_request) return views.unconfigured_basic_lti_launch() def configure_module_item_caller(context, pyramid_request): \"\"\" Call BasicLTILaunchViews.configure_module_item(). Set up", "TestUnconfiguredBasicLTILaunchNotAuthorized: def test_it_returns_the_right_template_data(self, context, pyramid_request): data = BasicLTILaunchViews( context, pyramid_request ).unconfigured_basic_lti_launch_not_authorized() assert data", "(but not all) BasicLTILaunchViews views. See the parametrized `view_caller` fixture below for the", "context, pyramid_request): data = BasicLTILaunchViews( context, pyramid_request ).unconfigured_basic_lti_launch_not_authorized() assert data == {} pytestmark", "self.form_fields(), oauth_nonce=\"TEST_OAUTH_NONCE\", oauth_timestamp=\"TEST_OAUTH_TIMESTAMP\", oauth_signature=\"TEST_OAUTH_SIGNATURE\", ) return pyramid_request class TestUnconfiguredBasicLTILaunchNotAuthorized: def test_it_returns_the_right_template_data(self, context, pyramid_request):", "predicate ensures this. pyramid_request.params[\"file_id\"] = \"TEST_FILE_ID\" views = BasicLTILaunchViews(context, pyramid_request) return views.canvas_file_basic_lti_launch() def", "def unconfigured_basic_lti_launch_caller(context, pyramid_request): \"\"\" Call BasicLTILaunchViews.unconfigured_basic_lti_launch(). Set up the appropriate conditions and then", "pytestmark = pytest.mark.usefixtures( \"ai_getter\", \"course_service\", \"h_api\", \"grading_info_service\", \"lti_h_service\" ) @pytest.fixture def context(): context", "multiple (but not all) BasicLTILaunchViews views. See the parametrized `view_caller` fixture below for", "run multiple times, once for each parametrized version of this fixture. See https://docs.pytest.org/en/latest/fixture.html#parametrizing-fixtures", "the appropriate conditions and then call BasicLTILaunchViews.url_configured_basic_lti_launch(), and return whatever BasicLTILaunchViews.url_configured_basic_lti_launch() returns. \"\"\"", "always present when canvas_file_basic_lti_launch() # is called. The canvas_file=True view predicate ensures this.", ") class TestUnconfiguredBasicLTILaunch: def test_it_enables_content_item_selection_mode( self, BearerTokenSchema, bearer_token_schema, context, pyramid_request ): unconfigured_basic_lti_launch_caller(context, pyramid_request)", "test_it_enables_frontend_grading(self, context, pyramid_request): configure_module_item_caller(context, pyramid_request) context.js_config.maybe_enable_grading.assert_called_once_with() def test_it_adds_the_document_url(self, context, pyramid_request): configure_module_item_caller(context, pyramid_request) context.js_config.add_document_url.assert_called_once_with(", "url_configured_basic_lti_launch() is called. The url_configured=True view # predicate and URLConfiguredBasicLTILaunchSchema ensure this. pyramid_request.parsed_params", "appropriate conditions and then call BasicLTILaunchViews.canvas_file_basic_lti_launch(), and return whatever BasicLTILaunchViews.canvas_file_basic_lti_launch() returns. \"\"\" #", "then call BasicLTILaunchViews.db_configured_basic_lti_launch(), and return whatever BasicLTILaunchViews.db_configured_basic_lti_launch() returns. \"\"\" views = BasicLTILaunchViews(context, pyramid_request)", "class TestConfigureModuleItem: def test_it_saves_the_assignments_document_url_to_the_db( self, context, pyramid_request, ModuleItemConfiguration ): configure_module_item_caller(context, pyramid_request) ModuleItemConfiguration.set_document_url.assert_called_once_with( pyramid_request.db,", "pyramid_request) LtiLaunches.add.assert_called_once_with( pyramid_request.db, pyramid_request.params[\"context_id\"], pyramid_request.params[\"oauth_consumer_key\"], ) @pytest.mark.usefixtures(\"user_is_learner\") def test_it_calls_grading_info_upsert( self, context, pyramid_request, grading_info_service,", "unconfigured_basic_lti_launch_caller, ] ) def view_caller(self, request): \"\"\" Return a function that calls the", "conditions and then call BasicLTILaunchViews.url_configured_basic_lti_launch(), and return whatever BasicLTILaunchViews.url_configured_basic_lti_launch() returns. \"\"\" # The", "view # predicate and URLConfiguredBasicLTILaunchSchema ensure this. pyramid_request.parsed_params = {\"url\": \"TEST_URL\"} views =", "list of view methods that these tests apply to. \"\"\" def test_it_reports_lti_launches( self,", "and then call BasicLTILaunchViews.db_configured_basic_lti_launch(), and return whatever BasicLTILaunchViews.db_configured_basic_lti_launch() returns. \"\"\" views = BasicLTILaunchViews(context,", "returns. \"\"\" # The document_url, resource_link_id and tool_consumer_instance_guid parsed # params are always", "pyramid_request): configure_module_item_caller(context, pyramid_request) context.js_config.add_document_url.assert_called_once_with( pyramid_request.parsed_params[\"document_url\"] ) class TestUnconfiguredBasicLTILaunch: def test_it_enables_content_item_selection_mode( self, BearerTokenSchema, bearer_token_schema,", "pyramid_request) BearerTokenSchema.assert_called_once_with(pyramid_request) bearer_token_schema.authorization_param.assert_called_once_with( pyramid_request.lti_user ) context.js_config.enable_content_item_selection_mode.assert_called_once_with( form_action=\"http://example.com/module_item_configurations\", form_fields=dict( self.form_fields(), authorization=bearer_token_schema.authorization_param.return_value, ), ) def", "appropriate conditions and then call BasicLTILaunchViews.db_configured_basic_lti_launch(), and return whatever BasicLTILaunchViews.db_configured_basic_lti_launch() returns. \"\"\" views", "} ) view_caller(context, pyramid_request) LtiLaunches.add.assert_called_once_with( pyramid_request.db, pyramid_request.params[\"context_id\"], pyramid_request.params[\"oauth_consumer_key\"], ) @pytest.mark.usefixtures(\"user_is_learner\") def test_it_calls_grading_info_upsert( self,", "views.db_configured_basic_lti_launch() def url_configured_basic_lti_launch_caller(context, pyramid_request): \"\"\" Call BasicLTILaunchViews.url_configured_basic_lti_launch(). Set up the appropriate conditions and", "\"\"\" views = BasicLTILaunchViews(context, pyramid_request) return views.db_configured_basic_lti_launch() def url_configured_basic_lti_launch_caller(context, pyramid_request): \"\"\" Call BasicLTILaunchViews.url_configured_basic_lti_launch().", "False return context @pytest.fixture def is_canvas(context): \"\"\"Set the LMS that launched us to", ") return pyramid_request class TestUnconfiguredBasicLTILaunchNotAuthorized: def test_it_returns_the_right_template_data(self, context, pyramid_request): data = BasicLTILaunchViews( context,", "Call BasicLTILaunchViews.unconfigured_basic_lti_launch(). Set up the appropriate conditions and then call BasicLTILaunchViews.unconfigured_basic_lti_launch(), and return", "\"TEST_TOOL_CONSUMER_INSTANCE_GUID\", \"TEST_RESOURCE_LINK_ID\", \"TEST_DOCUMENT_URL\", ) def test_it_enables_frontend_grading(self, context, pyramid_request): configure_module_item_caller(context, pyramid_request) context.js_config.maybe_enable_grading.assert_called_once_with() def test_it_adds_the_document_url(self,", "def view_caller(self, request): \"\"\" Return a function that calls the view method to", "this. pyramid_request.parsed_params = { \"document_url\": \"TEST_DOCUMENT_URL\", \"resource_link_id\": \"TEST_RESOURCE_LINK_ID\", \"tool_consumer_instance_guid\": \"TEST_TOOL_CONSUMER_INSTANCE_GUID\", } views =", "@pytest.mark.usefixtures(\"is_canvas\") def test_it_adds_the_canvas_file_id(self, context, pyramid_request): canvas_file_basic_lti_launch_caller(context, pyramid_request) context.js_config.add_canvas_file_id.assert_called_once_with( pyramid_request.params[\"file_id\"] ) class TestDBConfiguredBasicLTILaunch: def", "test_it_adds_the_document_url(self, context, pyramid_request): configure_module_item_caller(context, pyramid_request) context.js_config.add_document_url.assert_called_once_with( pyramid_request.parsed_params[\"document_url\"] ) class TestUnconfiguredBasicLTILaunch: def test_it_enables_content_item_selection_mode( self,", "= BasicLTILaunchViews( context, pyramid_request ).unconfigured_basic_lti_launch_not_authorized() assert data == {} pytestmark = pytest.mark.usefixtures( \"ai_getter\",", "apply to. \"\"\" def test_it_reports_lti_launches( self, context, pyramid_request, LtiLaunches, view_caller ): pyramid_request.params.update( {", "return whatever BasicLTILaunchViews.unconfigured_basic_lti_launch() returns. \"\"\" views = BasicLTILaunchViews(context, pyramid_request) return views.unconfigured_basic_lti_launch() def configure_module_item_caller(context,", "context.js_config.maybe_set_focused_user.assert_called_once_with() class TestCommon: \"\"\" Tests common to multiple (but not all) BasicLTILaunchViews views.", "\"lis_result_sourcedid\": \"modelstudent-assignment1\", \"lis_outcome_service_url\": \"https://hypothesis.shinylms.com/outcomes\", } ) return pyramid_request @pytest.fixture(autouse=True) def BearerTokenSchema(patch): return patch(\"lms.views.basic_lti_launch.BearerTokenSchema\")", "tool_consumer_instance_guid parsed # params are always present when configure_module_item() is called. # ConfigureModuleItemSchema", "context.js_config.add_canvas_file_id.assert_called_once_with( pyramid_request.params[\"file_id\"] ) class TestDBConfiguredBasicLTILaunch: def test_it_enables_frontend_grading(self, context, pyramid_request): db_configured_basic_lti_launch_caller(context, pyramid_request) context.js_config.maybe_enable_grading.assert_called_once_with() def", "\"TEST_OAUTH_CONSUMER_KEY\", \"tool_consumer_instance_guid\": \"TEST_TOOL_CONSUMER_INSTANCE_GUID\", \"context_id\": \"TEST_CONTEXT_ID\", } @pytest.fixture def pyramid_request(self, pyramid_request): pyramid_request.params = dict(", "view_caller(context, pyramid_request) course_service.get_or_create.assert_called_once_with( context.h_group.authority_provided_id ) @pytest.fixture( params=[ canvas_file_basic_lti_launch_caller, db_configured_basic_lti_launch_caller, url_configured_basic_lti_launch_caller, unconfigured_basic_lti_launch_caller, ] )", "view_caller ): pyramid_request.params.update( { \"context_id\": \"TEST_CONTEXT_ID\", \"oauth_consumer_key\": \"TEST_OAUTH_CONSUMER_KEY\", } ) view_caller(context, pyramid_request) LtiLaunches.add.assert_called_once_with(", "then call BasicLTILaunchViews.canvas_file_basic_lti_launch(), and return whatever BasicLTILaunchViews.canvas_file_basic_lti_launch() returns. \"\"\" # The file_id param", "@pytest.fixture def is_canvas(context): \"\"\"Set the LMS that launched us to Canvas.\"\"\" context.is_canvas =", "= {\"url\": \"TEST_URL\"} views = BasicLTILaunchViews(context, pyramid_request) return views.url_configured_basic_lti_launch() def unconfigured_basic_lti_launch_caller(context, pyramid_request): \"\"\"", "these tests apply to. \"\"\" def test_it_reports_lti_launches( self, context, pyramid_request, LtiLaunches, view_caller ):", "from lms.resources import LTILaunchResource from lms.resources._js_config import JSConfig from lms.views.basic_lti_launch import BasicLTILaunchViews from", "ModuleItemConfiguration ): configure_module_item_caller(context, pyramid_request) ModuleItemConfiguration.set_document_url.assert_called_once_with( pyramid_request.db, \"TEST_TOOL_CONSUMER_INSTANCE_GUID\", \"TEST_RESOURCE_LINK_ID\", \"TEST_DOCUMENT_URL\", ) def test_it_enables_frontend_grading(self, context,", "fixture. A test that uses this fixture will be run multiple times, once", "\"TEST_FILE_ID\" views = BasicLTILaunchViews(context, pyramid_request) return views.canvas_file_basic_lti_launch() def db_configured_basic_lti_launch_caller(context, pyramid_request): \"\"\" Call BasicLTILaunchViews.db_configured_basic_lti_launch().", "when configure_module_item() is called. # ConfigureModuleItemSchema ensures this. pyramid_request.parsed_params = { \"document_url\": \"TEST_DOCUMENT_URL\",", "Set up the appropriate conditions and then call BasicLTILaunchViews.unconfigured_basic_lti_launch(), and return whatever BasicLTILaunchViews.unconfigured_basic_lti_launch()", "is called. # ConfigureModuleItemSchema ensures this. pyramid_request.parsed_params = { \"document_url\": \"TEST_DOCUMENT_URL\", \"resource_link_id\": \"TEST_RESOURCE_LINK_ID\",", "pyramid_request.params[\"oauth_consumer_key\"], ) @pytest.mark.usefixtures(\"user_is_learner\") def test_it_calls_grading_info_upsert( self, context, pyramid_request, grading_info_service, view_caller ): view_caller(context, pyramid_request)", "TestURLConfiguredBasicLTILaunch: def test_it_enables_frontend_grading(self, context, pyramid_request): url_configured_basic_lti_launch_caller(context, pyramid_request) context.js_config.maybe_enable_grading.assert_called_once_with() def test_it_adds_the_document_url(self, context, pyramid_request): url_configured_basic_lti_launch_caller(context,", "views = BasicLTILaunchViews(context, pyramid_request) return views.db_configured_basic_lti_launch() def url_configured_basic_lti_launch_caller(context, pyramid_request): \"\"\" Call BasicLTILaunchViews.url_configured_basic_lti_launch(). Set", "then call BasicLTILaunchViews.unconfigured_basic_lti_launch(), and return whatever BasicLTILaunchViews.unconfigured_basic_lti_launch() returns. \"\"\" views = BasicLTILaunchViews(context, pyramid_request)", "pyramid_request) context.js_config.add_canvas_file_id.assert_called_once_with( pyramid_request.params[\"file_id\"] ) class TestDBConfiguredBasicLTILaunch: def test_it_enables_frontend_grading(self, context, pyramid_request): db_configured_basic_lti_launch_caller(context, pyramid_request) context.js_config.maybe_enable_grading.assert_called_once_with()", "@pytest.fixture def pyramid_request(self, pyramid_request): pyramid_request.params = dict( self.form_fields(), oauth_nonce=\"TEST_OAUTH_NONCE\", oauth_timestamp=\"TEST_OAUTH_TIMESTAMP\", oauth_signature=\"TEST_OAUTH_SIGNATURE\", ) return", "pyramid_request.db, \"TEST_TOOL_CONSUMER_INSTANCE_GUID\", \"TEST_RESOURCE_LINK_ID\", \"TEST_DOCUMENT_URL\", ) def test_it_enables_frontend_grading(self, context, pyramid_request): configure_module_item_caller(context, pyramid_request) context.js_config.maybe_enable_grading.assert_called_once_with() def", "a parametrized fixture. A test that uses this fixture will be run multiple", "def test_it_enables_frontend_grading(self, context, pyramid_request): db_configured_basic_lti_launch_caller(context, pyramid_request) context.js_config.maybe_enable_grading.assert_called_once_with() def test_it_adds_the_document_url( self, context, pyramid_request, ModuleItemConfiguration", "return views.configure_module_item() class TestBasicLTILaunchViewsInit: \"\"\"Unit tests for BasicLTILaunchViews.__init__().\"\"\" def test_it_sets_the_focused_user(self, context, pyramid_request): BasicLTILaunchViews(context,", "called. The url_configured=True view # predicate and URLConfiguredBasicLTILaunchSchema ensure this. pyramid_request.parsed_params = {\"url\":", "= factories.LTIUser(roles=\"instructor\") view_caller(context, pyramid_request) grading_info_service.upsert_from_request.assert_not_called() @pytest.mark.usefixtures(\"is_canvas\") def test_it_does_not_call_grading_info_upsert_if_canvas( self, context, pyramid_request, grading_info_service, view_caller", "pyramid_request ).unconfigured_basic_lti_launch_not_authorized() assert data == {} pytestmark = pytest.mark.usefixtures( \"ai_getter\", \"course_service\", \"h_api\", \"grading_info_service\",", "pyramid_request.parsed_params[\"url\"] ) class TestConfigureModuleItem: def test_it_saves_the_assignments_document_url_to_the_db( self, context, pyramid_request, ModuleItemConfiguration ): configure_module_item_caller(context, pyramid_request)", "pyramid_request) grading_info_service.upsert_from_request.assert_called_once_with( pyramid_request, h_user=pyramid_request.lti_user.h_user, lti_user=pyramid_request.lti_user, ) def test_it_does_not_call_grading_info_upsert_if_instructor( self, context, pyramid_request, grading_info_service, view_caller", "called. # ConfigureModuleItemSchema ensures this. pyramid_request.parsed_params = { \"document_url\": \"TEST_DOCUMENT_URL\", \"resource_link_id\": \"TEST_RESOURCE_LINK_ID\", \"tool_consumer_instance_guid\":", "context.js_config.maybe_enable_grading.assert_called_once_with() def test_it_adds_the_document_url( self, context, pyramid_request, ModuleItemConfiguration ): db_configured_basic_lti_launch_caller(context, pyramid_request) ModuleItemConfiguration.get_document_url.assert_called_once_with( pyramid_request.db, \"TEST_GUID\",", "appropriate conditions and then call BasicLTILaunchViews.url_configured_basic_lti_launch(), and return whatever BasicLTILaunchViews.url_configured_basic_lti_launch() returns. \"\"\" #", "context, pyramid_request): configure_module_item_caller(context, pyramid_request) context.js_config.maybe_enable_grading.assert_called_once_with() def test_it_adds_the_document_url(self, context, pyramid_request): configure_module_item_caller(context, pyramid_request) context.js_config.add_document_url.assert_called_once_with( pyramid_request.parsed_params[\"document_url\"]", "\"TEST_TOOL_CONSUMER_INSTANCE_GUID\", \"context_id\": \"TEST_CONTEXT_ID\", } @pytest.fixture def pyramid_request(self, pyramid_request): pyramid_request.params = dict( self.form_fields(), oauth_nonce=\"TEST_OAUTH_NONCE\",", "ensures this. pyramid_request.params[\"file_id\"] = \"TEST_FILE_ID\" views = BasicLTILaunchViews(context, pyramid_request) return views.canvas_file_basic_lti_launch() def db_configured_basic_lti_launch_caller(context,", "return views.url_configured_basic_lti_launch() def unconfigured_basic_lti_launch_caller(context, pyramid_request): \"\"\" Call BasicLTILaunchViews.unconfigured_basic_lti_launch(). Set up the appropriate conditions", "view methods that these tests apply to. \"\"\" def test_it_reports_lti_launches( self, context, pyramid_request,", "test_it_sets_the_focused_user(self, context, pyramid_request): BasicLTILaunchViews(context, pyramid_request) context.js_config.maybe_set_focused_user.assert_called_once_with() class TestCommon: \"\"\" Tests common to multiple", "whatever BasicLTILaunchViews.url_configured_basic_lti_launch() returns. \"\"\" # The `url` parsed param is always present when", "{} pytestmark = pytest.mark.usefixtures( \"ai_getter\", \"course_service\", \"h_api\", \"grading_info_service\", \"lti_h_service\" ) @pytest.fixture def context():", "return whatever BasicLTILaunchViews.url_configured_basic_lti_launch() returns. \"\"\" # The `url` parsed param is always present", "BasicLTILaunchViews.canvas_file_basic_lti_launch() returns. \"\"\" # The file_id param is always present when canvas_file_basic_lti_launch() #", "that uses this fixture will be run multiple times, once for each parametrized", "BasicLTILaunchViews from tests import factories def canvas_file_basic_lti_launch_caller(context, pyramid_request): \"\"\" Call BasicLTILaunchViews.canvas_file_basic_lti_launch(). Set up", "parsed param is always present when # url_configured_basic_lti_launch() is called. The url_configured=True view", "course_service ): view_caller(context, pyramid_request) course_service.get_or_create.assert_called_once_with( context.h_group.authority_provided_id ) @pytest.fixture( params=[ canvas_file_basic_lti_launch_caller, db_configured_basic_lti_launch_caller, url_configured_basic_lti_launch_caller, unconfigured_basic_lti_launch_caller,", "pyramid_request) context.js_config.add_document_url.assert_called_once_with( pyramid_request.parsed_params[\"document_url\"] ) class TestUnconfiguredBasicLTILaunch: def test_it_enables_content_item_selection_mode( self, BearerTokenSchema, bearer_token_schema, context, pyramid_request", "pyramid_request): \"\"\" Call BasicLTILaunchViews.configure_module_item(). Set up the appropriate conditions and then call BasicLTILaunchViews.configure_module_item(),", "call BasicLTILaunchViews.url_configured_basic_lti_launch(), and return whatever BasicLTILaunchViews.url_configured_basic_lti_launch() returns. \"\"\" # The `url` parsed param", "class TestDBConfiguredBasicLTILaunch: def test_it_enables_frontend_grading(self, context, pyramid_request): db_configured_basic_lti_launch_caller(context, pyramid_request) context.js_config.maybe_enable_grading.assert_called_once_with() def test_it_adds_the_document_url( self, context,", "ModuleItemConfiguration.set_document_url.assert_called_once_with( pyramid_request.db, \"TEST_TOOL_CONSUMER_INSTANCE_GUID\", \"TEST_RESOURCE_LINK_ID\", \"TEST_DOCUMENT_URL\", ) def test_it_enables_frontend_grading(self, context, pyramid_request): configure_module_item_caller(context, pyramid_request) context.js_config.maybe_enable_grading.assert_called_once_with()", "factories def canvas_file_basic_lti_launch_caller(context, pyramid_request): \"\"\" Call BasicLTILaunchViews.canvas_file_basic_lti_launch(). Set up the appropriate conditions and", "views = BasicLTILaunchViews(context, pyramid_request) return views.url_configured_basic_lti_launch() def unconfigured_basic_lti_launch_caller(context, pyramid_request): \"\"\" Call BasicLTILaunchViews.unconfigured_basic_lti_launch(). Set", "whatever BasicLTILaunchViews.configure_module_item() returns. \"\"\" # The document_url, resource_link_id and tool_consumer_instance_guid parsed # params", "pyramid_request) context.js_config.maybe_set_focused_user.assert_called_once_with() class TestCommon: \"\"\" Tests common to multiple (but not all) BasicLTILaunchViews", "BearerTokenSchema(patch): return patch(\"lms.views.basic_lti_launch.BearerTokenSchema\") @pytest.fixture(autouse=True) def LtiLaunches(patch): return patch(\"lms.views.basic_lti_launch.LtiLaunches\") @pytest.fixture(autouse=True) def ModuleItemConfiguration(patch): return patch(\"lms.views.basic_lti_launch.ModuleItemConfiguration\")", "{\"url\": \"TEST_URL\"} views = BasicLTILaunchViews(context, pyramid_request) return views.url_configured_basic_lti_launch() def unconfigured_basic_lti_launch_caller(context, pyramid_request): \"\"\" Call", "for the list of view methods that these tests apply to. \"\"\" def", "BasicLTILaunchViews.unconfigured_basic_lti_launch() returns. \"\"\" views = BasicLTILaunchViews(context, pyramid_request) return views.unconfigured_basic_lti_launch() def configure_module_item_caller(context, pyramid_request): \"\"\"", "@pytest.fixture def pyramid_request(pyramid_request): pyramid_request.params.update( { \"lis_result_sourcedid\": \"modelstudent-assignment1\", \"lis_outcome_service_url\": \"https://hypothesis.shinylms.com/outcomes\", } ) return pyramid_request", "def BearerTokenSchema(patch): return patch(\"lms.views.basic_lti_launch.BearerTokenSchema\") @pytest.fixture(autouse=True) def LtiLaunches(patch): return patch(\"lms.views.basic_lti_launch.LtiLaunches\") @pytest.fixture(autouse=True) def ModuleItemConfiguration(patch): return", "authorization=bearer_token_schema.authorization_param.return_value, ), ) def form_fields(self): return { \"user_id\": \"TEST_USER_ID\", \"resource_link_id\": \"TEST_RESOURCE_LINK_ID\", \"oauth_consumer_key\": \"TEST_OAUTH_CONSUMER_KEY\",", "db_configured_basic_lti_launch_caller(context, pyramid_request): \"\"\" Call BasicLTILaunchViews.db_configured_basic_lti_launch(). Set up the appropriate conditions and then call", "self, context, pyramid_request, view_caller, course_service ): view_caller(context, pyramid_request) course_service.get_or_create.assert_called_once_with( context.h_group.authority_provided_id ) @pytest.fixture( params=[", "pyramid_request(self, pyramid_request): pyramid_request.params = dict( self.form_fields(), oauth_nonce=\"TEST_OAUTH_NONCE\", oauth_timestamp=\"TEST_OAUTH_TIMESTAMP\", oauth_signature=\"TEST_OAUTH_SIGNATURE\", ) return pyramid_request class", "grading_info_service.upsert_from_request.assert_not_called() @pytest.mark.usefixtures(\"is_canvas\") def test_it_does_not_call_grading_info_upsert_if_canvas( self, context, pyramid_request, grading_info_service, view_caller ): view_caller(context, pyramid_request) grading_info_service.upsert_from_request.assert_not_called()", "for each parametrized version of this fixture. See https://docs.pytest.org/en/latest/fixture.html#parametrizing-fixtures \"\"\" return request.param @pytest.mark.usefixtures(\"is_canvas\")", "context, pyramid_request): configure_module_item_caller(context, pyramid_request) context.js_config.add_document_url.assert_called_once_with( pyramid_request.parsed_params[\"document_url\"] ) class TestUnconfiguredBasicLTILaunch: def test_it_enables_content_item_selection_mode( self, BearerTokenSchema,", "ensure this. pyramid_request.parsed_params = {\"url\": \"TEST_URL\"} views = BasicLTILaunchViews(context, pyramid_request) return views.url_configured_basic_lti_launch() def", "view_caller(self, request): \"\"\" Return a function that calls the view method to be", "\"\"\" views = BasicLTILaunchViews(context, pyramid_request) return views.unconfigured_basic_lti_launch() def configure_module_item_caller(context, pyramid_request): \"\"\" Call BasicLTILaunchViews.configure_module_item().", "file_id param is always present when canvas_file_basic_lti_launch() # is called. The canvas_file=True view", "mock import pytest from lms.resources import LTILaunchResource from lms.resources._js_config import JSConfig from lms.views.basic_lti_launch", "def url_configured_basic_lti_launch_caller(context, pyramid_request): \"\"\" Call BasicLTILaunchViews.url_configured_basic_lti_launch(). Set up the appropriate conditions and then", "pyramid_request.lti_user = factories.LTIUser(roles=\"instructor\") view_caller(context, pyramid_request) grading_info_service.upsert_from_request.assert_not_called() @pytest.mark.usefixtures(\"is_canvas\") def test_it_does_not_call_grading_info_upsert_if_canvas( self, context, pyramid_request, grading_info_service,", "will be run multiple times, once for each parametrized version of this fixture.", "h_user=pyramid_request.lti_user.h_user, lti_user=pyramid_request.lti_user, ) def test_it_does_not_call_grading_info_upsert_if_instructor( self, context, pyramid_request, grading_info_service, view_caller ): pyramid_request.lti_user =", "parametrized fixture. A test that uses this fixture will be run multiple times,", "Set up the appropriate conditions and then call BasicLTILaunchViews.canvas_file_basic_lti_launch(), and return whatever BasicLTILaunchViews.canvas_file_basic_lti_launch()", "def context(): context = mock.create_autospec(LTILaunchResource, spec_set=True, instance=True) context.js_config = mock.create_autospec(JSConfig, spec_set=True, instance=True) context.is_canvas", "context.is_canvas = False return context @pytest.fixture def is_canvas(context): \"\"\"Set the LMS that launched", "# predicate and URLConfiguredBasicLTILaunchSchema ensure this. pyramid_request.parsed_params = {\"url\": \"TEST_URL\"} views = BasicLTILaunchViews(context,", "once for each parametrized version of this fixture. See https://docs.pytest.org/en/latest/fixture.html#parametrizing-fixtures \"\"\" return request.param", "tests apply to. \"\"\" def test_it_reports_lti_launches( self, context, pyramid_request, LtiLaunches, view_caller ): pyramid_request.params.update(", "context.js_config.maybe_enable_grading.assert_called_once_with() def test_it_adds_the_document_url(self, context, pyramid_request): configure_module_item_caller(context, pyramid_request) context.js_config.add_document_url.assert_called_once_with( pyramid_request.parsed_params[\"document_url\"] ) class TestUnconfiguredBasicLTILaunch: def", ") def view_caller(self, request): \"\"\" Return a function that calls the view method", ") context.js_config.add_document_url.assert_called_once_with( ModuleItemConfiguration.get_document_url.return_value ) class TestURLConfiguredBasicLTILaunch: def test_it_enables_frontend_grading(self, context, pyramid_request): url_configured_basic_lti_launch_caller(context, pyramid_request) context.js_config.maybe_enable_grading.assert_called_once_with()", ") def form_fields(self): return { \"user_id\": \"TEST_USER_ID\", \"resource_link_id\": \"TEST_RESOURCE_LINK_ID\", \"oauth_consumer_key\": \"TEST_OAUTH_CONSUMER_KEY\", \"tool_consumer_instance_guid\": \"TEST_TOOL_CONSUMER_INSTANCE_GUID\",", "context, pyramid_request, ModuleItemConfiguration ): db_configured_basic_lti_launch_caller(context, pyramid_request) ModuleItemConfiguration.get_document_url.assert_called_once_with( pyramid_request.db, \"TEST_GUID\", \"TEST_RESOURCE_LINK_ID\" ) context.js_config.add_document_url.assert_called_once_with( ModuleItemConfiguration.get_document_url.return_value", "Set up the appropriate conditions and then call BasicLTILaunchViews.url_configured_basic_lti_launch(), and return whatever BasicLTILaunchViews.url_configured_basic_lti_launch()", "TestUnconfiguredBasicLTILaunch: def test_it_enables_content_item_selection_mode( self, BearerTokenSchema, bearer_token_schema, context, pyramid_request ): unconfigured_basic_lti_launch_caller(context, pyramid_request) BearerTokenSchema.assert_called_once_with(pyramid_request) bearer_token_schema.authorization_param.assert_called_once_with(", "and return whatever BasicLTILaunchViews.unconfigured_basic_lti_launch() returns. \"\"\" views = BasicLTILaunchViews(context, pyramid_request) return views.unconfigured_basic_lti_launch() def", "import JSConfig from lms.views.basic_lti_launch import BasicLTILaunchViews from tests import factories def canvas_file_basic_lti_launch_caller(context, pyramid_request):", "not all) BasicLTILaunchViews views. See the parametrized `view_caller` fixture below for the list", "\"TEST_CONTEXT_ID\", } @pytest.fixture def pyramid_request(self, pyramid_request): pyramid_request.params = dict( self.form_fields(), oauth_nonce=\"TEST_OAUTH_NONCE\", oauth_timestamp=\"TEST_OAUTH_TIMESTAMP\", oauth_signature=\"TEST_OAUTH_SIGNATURE\",", "param is always present when # url_configured_basic_lti_launch() is called. The url_configured=True view #", "pyramid_request): \"\"\" Call BasicLTILaunchViews.unconfigured_basic_lti_launch(). Set up the appropriate conditions and then call BasicLTILaunchViews.unconfigured_basic_lti_launch(),", "This is a parametrized fixture. A test that uses this fixture will be", "common to multiple (but not all) BasicLTILaunchViews views. See the parametrized `view_caller` fixture", "pyramid_request) grading_info_service.upsert_from_request.assert_not_called() @pytest.fixture( params=[ canvas_file_basic_lti_launch_caller, db_configured_basic_lti_launch_caller, url_configured_basic_lti_launch_caller, configure_module_item_caller, ] ) def view_caller(self, request):", "\"https://hypothesis.shinylms.com/outcomes\", } ) return pyramid_request @pytest.fixture(autouse=True) def BearerTokenSchema(patch): return patch(\"lms.views.basic_lti_launch.BearerTokenSchema\") @pytest.fixture(autouse=True) def LtiLaunches(patch):", "pyramid_request.params[\"file_id\"] = \"TEST_FILE_ID\" views = BasicLTILaunchViews(context, pyramid_request) return views.canvas_file_basic_lti_launch() def db_configured_basic_lti_launch_caller(context, pyramid_request): \"\"\"", "this fixture. See https://docs.pytest.org/en/latest/fixture.html#parametrizing-fixtures \"\"\" return request.param class TestCourseRecording: def test_it_records_the_course_in_the_DB( self, context,", "grading_info_service.upsert_from_request.assert_not_called() @pytest.fixture( params=[ canvas_file_basic_lti_launch_caller, db_configured_basic_lti_launch_caller, url_configured_basic_lti_launch_caller, configure_module_item_caller, ] ) def view_caller(self, request): \"\"\"", "class TestBasicLTILaunchViewsInit: \"\"\"Unit tests for BasicLTILaunchViews.__init__().\"\"\" def test_it_sets_the_focused_user(self, context, pyramid_request): BasicLTILaunchViews(context, pyramid_request) context.js_config.maybe_set_focused_user.assert_called_once_with()", "context = mock.create_autospec(LTILaunchResource, spec_set=True, instance=True) context.js_config = mock.create_autospec(JSConfig, spec_set=True, instance=True) context.is_canvas = False", "\"TEST_RESOURCE_LINK_ID\" ) context.js_config.add_document_url.assert_called_once_with( ModuleItemConfiguration.get_document_url.return_value ) class TestURLConfiguredBasicLTILaunch: def test_it_enables_frontend_grading(self, context, pyramid_request): url_configured_basic_lti_launch_caller(context, pyramid_request)", "= pytest.mark.usefixtures( \"ai_getter\", \"course_service\", \"h_api\", \"grading_info_service\", \"lti_h_service\" ) @pytest.fixture def context(): context =", "present when # url_configured_basic_lti_launch() is called. The url_configured=True view # predicate and URLConfiguredBasicLTILaunchSchema", "def test_it_enables_frontend_grading(self, context, pyramid_request): url_configured_basic_lti_launch_caller(context, pyramid_request) context.js_config.maybe_enable_grading.assert_called_once_with() def test_it_adds_the_document_url(self, context, pyramid_request): url_configured_basic_lti_launch_caller(context, pyramid_request)", "pyramid_request): url_configured_basic_lti_launch_caller(context, pyramid_request) context.js_config.maybe_enable_grading.assert_called_once_with() def test_it_adds_the_document_url(self, context, pyramid_request): url_configured_basic_lti_launch_caller(context, pyramid_request) context.js_config.add_document_url.assert_called_once_with( pyramid_request.parsed_params[\"url\"] )", "BasicLTILaunchViews.unconfigured_basic_lti_launch(), and return whatever BasicLTILaunchViews.unconfigured_basic_lti_launch() returns. \"\"\" views = BasicLTILaunchViews(context, pyramid_request) return views.unconfigured_basic_lti_launch()", "The `url` parsed param is always present when # url_configured_basic_lti_launch() is called. The", "self, context, pyramid_request, ModuleItemConfiguration ): db_configured_basic_lti_launch_caller(context, pyramid_request) ModuleItemConfiguration.get_document_url.assert_called_once_with( pyramid_request.db, \"TEST_GUID\", \"TEST_RESOURCE_LINK_ID\" ) context.js_config.add_document_url.assert_called_once_with(", "db_configured_basic_lti_launch_caller(context, pyramid_request) ModuleItemConfiguration.get_document_url.assert_called_once_with( pyramid_request.db, \"TEST_GUID\", \"TEST_RESOURCE_LINK_ID\" ) context.js_config.add_document_url.assert_called_once_with( ModuleItemConfiguration.get_document_url.return_value ) class TestURLConfiguredBasicLTILaunch: def", "dict( self.form_fields(), oauth_nonce=\"TEST_OAUTH_NONCE\", oauth_timestamp=\"TEST_OAUTH_TIMESTAMP\", oauth_signature=\"TEST_OAUTH_SIGNATURE\", ) return pyramid_request class TestUnconfiguredBasicLTILaunchNotAuthorized: def test_it_returns_the_right_template_data(self, context,", "def LtiLaunches(patch): return patch(\"lms.views.basic_lti_launch.LtiLaunches\") @pytest.fixture(autouse=True) def ModuleItemConfiguration(patch): return patch(\"lms.views.basic_lti_launch.ModuleItemConfiguration\") @pytest.fixture def bearer_token_schema(BearerTokenSchema): return", "def test_it_records_the_course_in_the_DB( self, context, pyramid_request, view_caller, course_service ): view_caller(context, pyramid_request) course_service.get_or_create.assert_called_once_with( context.h_group.authority_provided_id )", "grading_info_service, view_caller ): pyramid_request.lti_user = factories.LTIUser(roles=\"instructor\") view_caller(context, pyramid_request) grading_info_service.upsert_from_request.assert_not_called() @pytest.mark.usefixtures(\"is_canvas\") def test_it_does_not_call_grading_info_upsert_if_canvas( self,", "to Canvas.\"\"\" context.is_canvas = True @pytest.fixture def pyramid_request(pyramid_request): pyramid_request.params.update( { \"lis_result_sourcedid\": \"modelstudent-assignment1\", \"lis_outcome_service_url\":", "BasicLTILaunchViews.db_configured_basic_lti_launch(). Set up the appropriate conditions and then call BasicLTILaunchViews.db_configured_basic_lti_launch(), and return whatever", "# url_configured_basic_lti_launch() is called. The url_configured=True view # predicate and URLConfiguredBasicLTILaunchSchema ensure this.", "bearer_token_schema.authorization_param.assert_called_once_with( pyramid_request.lti_user ) context.js_config.enable_content_item_selection_mode.assert_called_once_with( form_action=\"http://example.com/module_item_configurations\", form_fields=dict( self.form_fields(), authorization=bearer_token_schema.authorization_param.return_value, ), ) def form_fields(self): return", "pyramid_request.params.update( { \"lis_result_sourcedid\": \"modelstudent-assignment1\", \"lis_outcome_service_url\": \"https://hypothesis.shinylms.com/outcomes\", } ) return pyramid_request @pytest.fixture(autouse=True) def BearerTokenSchema(patch):", "each parametrized version of this fixture. See https://docs.pytest.org/en/latest/fixture.html#parametrizing-fixtures \"\"\" return request.param class TestCourseRecording:", "https://docs.pytest.org/en/latest/fixture.html#parametrizing-fixtures \"\"\" return request.param class TestCourseRecording: def test_it_records_the_course_in_the_DB( self, context, pyramid_request, view_caller, course_service", "Set up the appropriate conditions and then call BasicLTILaunchViews.db_configured_basic_lti_launch(), and return whatever BasicLTILaunchViews.db_configured_basic_lti_launch()", "return request.param class TestCourseRecording: def test_it_records_the_course_in_the_DB( self, context, pyramid_request, view_caller, course_service ): view_caller(context,", "True @pytest.fixture def pyramid_request(pyramid_request): pyramid_request.params.update( { \"lis_result_sourcedid\": \"modelstudent-assignment1\", \"lis_outcome_service_url\": \"https://hypothesis.shinylms.com/outcomes\", } ) return", "\"TEST_URL\"} views = BasicLTILaunchViews(context, pyramid_request) return views.url_configured_basic_lti_launch() def unconfigured_basic_lti_launch_caller(context, pyramid_request): \"\"\" Call BasicLTILaunchViews.unconfigured_basic_lti_launch().", "test_it_reports_lti_launches( self, context, pyramid_request, LtiLaunches, view_caller ): pyramid_request.params.update( { \"context_id\": \"TEST_CONTEXT_ID\", \"oauth_consumer_key\": \"TEST_OAUTH_CONSUMER_KEY\",", "import BasicLTILaunchViews from tests import factories def canvas_file_basic_lti_launch_caller(context, pyramid_request): \"\"\" Call BasicLTILaunchViews.canvas_file_basic_lti_launch(). Set", "The canvas_file=True view predicate ensures this. pyramid_request.params[\"file_id\"] = \"TEST_FILE_ID\" views = BasicLTILaunchViews(context, pyramid_request)", "\"\"\" Call BasicLTILaunchViews.configure_module_item(). Set up the appropriate conditions and then call BasicLTILaunchViews.configure_module_item(), and", "context(): context = mock.create_autospec(LTILaunchResource, spec_set=True, instance=True) context.js_config = mock.create_autospec(JSConfig, spec_set=True, instance=True) context.is_canvas =", "canvas_file_basic_lti_launch_caller(context, pyramid_request) context.js_config.add_canvas_file_id.assert_called_once_with( pyramid_request.params[\"file_id\"] ) class TestDBConfiguredBasicLTILaunch: def test_it_enables_frontend_grading(self, context, pyramid_request): db_configured_basic_lti_launch_caller(context, pyramid_request)", "`url` parsed param is always present when # url_configured_basic_lti_launch() is called. The url_configured=True", "JSConfig from lms.views.basic_lti_launch import BasicLTILaunchViews from tests import factories def canvas_file_basic_lti_launch_caller(context, pyramid_request): \"\"\"", "TestBasicLTILaunchViewsInit: \"\"\"Unit tests for BasicLTILaunchViews.__init__().\"\"\" def test_it_sets_the_focused_user(self, context, pyramid_request): BasicLTILaunchViews(context, pyramid_request) context.js_config.maybe_set_focused_user.assert_called_once_with() class", "context, pyramid_request, view_caller, course_service ): view_caller(context, pyramid_request) course_service.get_or_create.assert_called_once_with( context.h_group.authority_provided_id ) @pytest.fixture( params=[ canvas_file_basic_lti_launch_caller,", "each parametrized version of this fixture. See https://docs.pytest.org/en/latest/fixture.html#parametrizing-fixtures \"\"\" return request.param @pytest.mark.usefixtures(\"is_canvas\") class", "from tests import factories def canvas_file_basic_lti_launch_caller(context, pyramid_request): \"\"\" Call BasicLTILaunchViews.canvas_file_basic_lti_launch(). Set up the", "grading_info_service, view_caller ): view_caller(context, pyramid_request) grading_info_service.upsert_from_request.assert_not_called() @pytest.fixture( params=[ canvas_file_basic_lti_launch_caller, db_configured_basic_lti_launch_caller, url_configured_basic_lti_launch_caller, configure_module_item_caller, ]", "version of this fixture. See https://docs.pytest.org/en/latest/fixture.html#parametrizing-fixtures \"\"\" return request.param @pytest.mark.usefixtures(\"is_canvas\") class TestCanvasFileBasicLTILaunch: @pytest.mark.usefixtures(\"is_canvas\")", "configure_module_item() is called. # ConfigureModuleItemSchema ensures this. pyramid_request.parsed_params = { \"document_url\": \"TEST_DOCUMENT_URL\", \"resource_link_id\":", "\"\"\"Unit tests for BasicLTILaunchViews.__init__().\"\"\" def test_it_sets_the_focused_user(self, context, pyramid_request): BasicLTILaunchViews(context, pyramid_request) context.js_config.maybe_set_focused_user.assert_called_once_with() class TestCommon:", "to. \"\"\" def test_it_reports_lti_launches( self, context, pyramid_request, LtiLaunches, view_caller ): pyramid_request.params.update( { \"context_id\":", "self, context, pyramid_request, grading_info_service, view_caller ): pyramid_request.lti_user = factories.LTIUser(roles=\"instructor\") view_caller(context, pyramid_request) grading_info_service.upsert_from_request.assert_not_called() @pytest.mark.usefixtures(\"is_canvas\")", "\"TEST_OAUTH_CONSUMER_KEY\", } ) view_caller(context, pyramid_request) LtiLaunches.add.assert_called_once_with( pyramid_request.db, pyramid_request.params[\"context_id\"], pyramid_request.params[\"oauth_consumer_key\"], ) @pytest.mark.usefixtures(\"user_is_learner\") def test_it_calls_grading_info_upsert(", "class TestCourseRecording: def test_it_records_the_course_in_the_DB( self, context, pyramid_request, view_caller, course_service ): view_caller(context, pyramid_request) course_service.get_or_create.assert_called_once_with(", ") class TestURLConfiguredBasicLTILaunch: def test_it_enables_frontend_grading(self, context, pyramid_request): url_configured_basic_lti_launch_caller(context, pyramid_request) context.js_config.maybe_enable_grading.assert_called_once_with() def test_it_adds_the_document_url(self, context,", "context, pyramid_request, grading_info_service, view_caller ): view_caller(context, pyramid_request) grading_info_service.upsert_from_request.assert_called_once_with( pyramid_request, h_user=pyramid_request.lti_user.h_user, lti_user=pyramid_request.lti_user, ) def", "BearerTokenSchema.assert_called_once_with(pyramid_request) bearer_token_schema.authorization_param.assert_called_once_with( pyramid_request.lti_user ) context.js_config.enable_content_item_selection_mode.assert_called_once_with( form_action=\"http://example.com/module_item_configurations\", form_fields=dict( self.form_fields(), authorization=bearer_token_schema.authorization_param.return_value, ), ) def form_fields(self):", "test_it_adds_the_document_url(self, context, pyramid_request): url_configured_basic_lti_launch_caller(context, pyramid_request) context.js_config.add_document_url.assert_called_once_with( pyramid_request.parsed_params[\"url\"] ) class TestConfigureModuleItem: def test_it_saves_the_assignments_document_url_to_the_db( self,", "resource_link_id and tool_consumer_instance_guid parsed # params are always present when configure_module_item() is called.", "all) BasicLTILaunchViews views. See the parametrized `view_caller` fixture below for the list of", "\"TEST_GUID\", \"TEST_RESOURCE_LINK_ID\" ) context.js_config.add_document_url.assert_called_once_with( ModuleItemConfiguration.get_document_url.return_value ) class TestURLConfiguredBasicLTILaunch: def test_it_enables_frontend_grading(self, context, pyramid_request): url_configured_basic_lti_launch_caller(context,", "url_configured_basic_lti_launch_caller, configure_module_item_caller, ] ) def view_caller(self, request): \"\"\" Return a function that calls", "pyramid_request ): unconfigured_basic_lti_launch_caller(context, pyramid_request) BearerTokenSchema.assert_called_once_with(pyramid_request) bearer_token_schema.authorization_param.assert_called_once_with( pyramid_request.lti_user ) context.js_config.enable_content_item_selection_mode.assert_called_once_with( form_action=\"http://example.com/module_item_configurations\", form_fields=dict( self.form_fields(), authorization=bearer_token_schema.authorization_param.return_value,", "} ) return pyramid_request @pytest.fixture(autouse=True) def BearerTokenSchema(patch): return patch(\"lms.views.basic_lti_launch.BearerTokenSchema\") @pytest.fixture(autouse=True) def LtiLaunches(patch): return", "https://docs.pytest.org/en/latest/fixture.html#parametrizing-fixtures \"\"\" return request.param @pytest.mark.usefixtures(\"is_canvas\") class TestCanvasFileBasicLTILaunch: @pytest.mark.usefixtures(\"is_canvas\") def test_it_adds_the_canvas_file_id(self, context, pyramid_request): canvas_file_basic_lti_launch_caller(context,", "appropriate conditions and then call BasicLTILaunchViews.configure_module_item(), and return whatever BasicLTILaunchViews.configure_module_item() returns. \"\"\" #", ") @pytest.fixture( params=[ canvas_file_basic_lti_launch_caller, db_configured_basic_lti_launch_caller, url_configured_basic_lti_launch_caller, unconfigured_basic_lti_launch_caller, ] ) def view_caller(self, request): \"\"\"", "request.param @pytest.mark.usefixtures(\"is_canvas\") class TestCanvasFileBasicLTILaunch: @pytest.mark.usefixtures(\"is_canvas\") def test_it_adds_the_canvas_file_id(self, context, pyramid_request): canvas_file_basic_lti_launch_caller(context, pyramid_request) context.js_config.add_canvas_file_id.assert_called_once_with( pyramid_request.params[\"file_id\"]", "this fixture will be run multiple times, once for each parametrized version of", "parametrized version of this fixture. See https://docs.pytest.org/en/latest/fixture.html#parametrizing-fixtures \"\"\" return request.param @pytest.mark.usefixtures(\"is_canvas\") class TestCanvasFileBasicLTILaunch:", "test_it_enables_content_item_selection_mode( self, BearerTokenSchema, bearer_token_schema, context, pyramid_request ): unconfigured_basic_lti_launch_caller(context, pyramid_request) BearerTokenSchema.assert_called_once_with(pyramid_request) bearer_token_schema.authorization_param.assert_called_once_with( pyramid_request.lti_user )", "def test_it_does_not_call_grading_info_upsert_if_instructor( self, context, pyramid_request, grading_info_service, view_caller ): pyramid_request.lti_user = factories.LTIUser(roles=\"instructor\") view_caller(context, pyramid_request)", "below for the list of view methods that these tests apply to. \"\"\"", "tested. This is a parametrized fixture. A test that uses this fixture will", "Canvas.\"\"\" context.is_canvas = True @pytest.fixture def pyramid_request(pyramid_request): pyramid_request.params.update( { \"lis_result_sourcedid\": \"modelstudent-assignment1\", \"lis_outcome_service_url\": \"https://hypothesis.shinylms.com/outcomes\",", "<reponame>robertknight/lms from unittest import mock import pytest from lms.resources import LTILaunchResource from lms.resources._js_config", "\"\"\"Set the LMS that launched us to Canvas.\"\"\" context.is_canvas = True @pytest.fixture def", "pytest.mark.usefixtures( \"ai_getter\", \"course_service\", \"h_api\", \"grading_info_service\", \"lti_h_service\" ) @pytest.fixture def context(): context = mock.create_autospec(LTILaunchResource,", "@pytest.fixture( params=[ canvas_file_basic_lti_launch_caller, db_configured_basic_lti_launch_caller, url_configured_basic_lti_launch_caller, configure_module_item_caller, ] ) def view_caller(self, request): \"\"\" Return", "pyramid_request(pyramid_request): pyramid_request.params.update( { \"lis_result_sourcedid\": \"modelstudent-assignment1\", \"lis_outcome_service_url\": \"https://hypothesis.shinylms.com/outcomes\", } ) return pyramid_request @pytest.fixture(autouse=True) def", "view_caller ): view_caller(context, pyramid_request) grading_info_service.upsert_from_request.assert_not_called() @pytest.fixture( params=[ canvas_file_basic_lti_launch_caller, db_configured_basic_lti_launch_caller, url_configured_basic_lti_launch_caller, configure_module_item_caller, ] )", "context, pyramid_request, ModuleItemConfiguration ): configure_module_item_caller(context, pyramid_request) ModuleItemConfiguration.set_document_url.assert_called_once_with( pyramid_request.db, \"TEST_TOOL_CONSUMER_INSTANCE_GUID\", \"TEST_RESOURCE_LINK_ID\", \"TEST_DOCUMENT_URL\", ) def", "the appropriate conditions and then call BasicLTILaunchViews.configure_module_item(), and return whatever BasicLTILaunchViews.configure_module_item() returns. \"\"\"", "views.canvas_file_basic_lti_launch() def db_configured_basic_lti_launch_caller(context, pyramid_request): \"\"\" Call BasicLTILaunchViews.db_configured_basic_lti_launch(). Set up the appropriate conditions and", "test_it_saves_the_assignments_document_url_to_the_db( self, context, pyramid_request, ModuleItemConfiguration ): configure_module_item_caller(context, pyramid_request) ModuleItemConfiguration.set_document_url.assert_called_once_with( pyramid_request.db, \"TEST_TOOL_CONSUMER_INSTANCE_GUID\", \"TEST_RESOURCE_LINK_ID\", \"TEST_DOCUMENT_URL\",", "\"context_id\": \"TEST_CONTEXT_ID\", \"oauth_consumer_key\": \"TEST_OAUTH_CONSUMER_KEY\", } ) view_caller(context, pyramid_request) LtiLaunches.add.assert_called_once_with( pyramid_request.db, pyramid_request.params[\"context_id\"], pyramid_request.params[\"oauth_consumer_key\"], )", "pyramid_request, LtiLaunches, view_caller ): pyramid_request.params.update( { \"context_id\": \"TEST_CONTEXT_ID\", \"oauth_consumer_key\": \"TEST_OAUTH_CONSUMER_KEY\", } ) view_caller(context,", "pyramid_request) return views.configure_module_item() class TestBasicLTILaunchViewsInit: \"\"\"Unit tests for BasicLTILaunchViews.__init__().\"\"\" def test_it_sets_the_focused_user(self, context, pyramid_request):", "and then call BasicLTILaunchViews.canvas_file_basic_lti_launch(), and return whatever BasicLTILaunchViews.canvas_file_basic_lti_launch() returns. \"\"\" # The file_id", "and then call BasicLTILaunchViews.url_configured_basic_lti_launch(), and return whatever BasicLTILaunchViews.url_configured_basic_lti_launch() returns. \"\"\" # The `url`", "when canvas_file_basic_lti_launch() # is called. The canvas_file=True view predicate ensures this. pyramid_request.params[\"file_id\"] =", "The file_id param is always present when canvas_file_basic_lti_launch() # is called. The canvas_file=True", "parametrized `view_caller` fixture below for the list of view methods that these tests", "return views.canvas_file_basic_lti_launch() def db_configured_basic_lti_launch_caller(context, pyramid_request): \"\"\" Call BasicLTILaunchViews.db_configured_basic_lti_launch(). Set up the appropriate conditions", "\"TEST_CONTEXT_ID\", \"oauth_consumer_key\": \"TEST_OAUTH_CONSUMER_KEY\", } ) view_caller(context, pyramid_request) LtiLaunches.add.assert_called_once_with( pyramid_request.db, pyramid_request.params[\"context_id\"], pyramid_request.params[\"oauth_consumer_key\"], ) @pytest.mark.usefixtures(\"user_is_learner\")", "this fixture. See https://docs.pytest.org/en/latest/fixture.html#parametrizing-fixtures \"\"\" return request.param @pytest.mark.usefixtures(\"is_canvas\") class TestCanvasFileBasicLTILaunch: @pytest.mark.usefixtures(\"is_canvas\") def test_it_adds_the_canvas_file_id(self,", "\"resource_link_id\": \"TEST_RESOURCE_LINK_ID\", \"oauth_consumer_key\": \"TEST_OAUTH_CONSUMER_KEY\", \"tool_consumer_instance_guid\": \"TEST_TOOL_CONSUMER_INSTANCE_GUID\", \"context_id\": \"TEST_CONTEXT_ID\", } @pytest.fixture def pyramid_request(self, pyramid_request):", "\"\"\" Tests common to multiple (but not all) BasicLTILaunchViews views. See the parametrized", "def canvas_file_basic_lti_launch_caller(context, pyramid_request): \"\"\" Call BasicLTILaunchViews.canvas_file_basic_lti_launch(). Set up the appropriate conditions and then", "unconfigured_basic_lti_launch_caller(context, pyramid_request) BearerTokenSchema.assert_called_once_with(pyramid_request) bearer_token_schema.authorization_param.assert_called_once_with( pyramid_request.lti_user ) context.js_config.enable_content_item_selection_mode.assert_called_once_with( form_action=\"http://example.com/module_item_configurations\", form_fields=dict( self.form_fields(), authorization=bearer_token_schema.authorization_param.return_value, ), )", "pyramid_request) return views.url_configured_basic_lti_launch() def unconfigured_basic_lti_launch_caller(context, pyramid_request): \"\"\" Call BasicLTILaunchViews.unconfigured_basic_lti_launch(). Set up the appropriate", "self, BearerTokenSchema, bearer_token_schema, context, pyramid_request ): unconfigured_basic_lti_launch_caller(context, pyramid_request) BearerTokenSchema.assert_called_once_with(pyramid_request) bearer_token_schema.authorization_param.assert_called_once_with( pyramid_request.lti_user ) context.js_config.enable_content_item_selection_mode.assert_called_once_with(", "\"context_id\": \"TEST_CONTEXT_ID\", } @pytest.fixture def pyramid_request(self, pyramid_request): pyramid_request.params = dict( self.form_fields(), oauth_nonce=\"TEST_OAUTH_NONCE\", oauth_timestamp=\"TEST_OAUTH_TIMESTAMP\",", "context, pyramid_request, LtiLaunches, view_caller ): pyramid_request.params.update( { \"context_id\": \"TEST_CONTEXT_ID\", \"oauth_consumer_key\": \"TEST_OAUTH_CONSUMER_KEY\", } )", "pyramid_request) context.js_config.maybe_enable_grading.assert_called_once_with() def test_it_adds_the_document_url(self, context, pyramid_request): configure_module_item_caller(context, pyramid_request) context.js_config.add_document_url.assert_called_once_with( pyramid_request.parsed_params[\"document_url\"] ) class TestUnconfiguredBasicLTILaunch:", "url_configured=True view # predicate and URLConfiguredBasicLTILaunchSchema ensure this. pyramid_request.parsed_params = {\"url\": \"TEST_URL\"} views", "BasicLTILaunchViews.__init__().\"\"\" def test_it_sets_the_focused_user(self, context, pyramid_request): BasicLTILaunchViews(context, pyramid_request) context.js_config.maybe_set_focused_user.assert_called_once_with() class TestCommon: \"\"\" Tests common", "the LMS that launched us to Canvas.\"\"\" context.is_canvas = True @pytest.fixture def pyramid_request(pyramid_request):", "pyramid_request.parsed_params = { \"document_url\": \"TEST_DOCUMENT_URL\", \"resource_link_id\": \"TEST_RESOURCE_LINK_ID\", \"tool_consumer_instance_guid\": \"TEST_TOOL_CONSUMER_INSTANCE_GUID\", } views = BasicLTILaunchViews(context,", "pyramid_request): BasicLTILaunchViews(context, pyramid_request) context.js_config.maybe_set_focused_user.assert_called_once_with() class TestCommon: \"\"\" Tests common to multiple (but not", "pyramid_request, grading_info_service, view_caller ): view_caller(context, pyramid_request) grading_info_service.upsert_from_request.assert_called_once_with( pyramid_request, h_user=pyramid_request.lti_user.h_user, lti_user=pyramid_request.lti_user, ) def test_it_does_not_call_grading_info_upsert_if_instructor(", "BasicLTILaunchViews.configure_module_item(). Set up the appropriate conditions and then call BasicLTILaunchViews.configure_module_item(), and return whatever", "request.param class TestCourseRecording: def test_it_records_the_course_in_the_DB( self, context, pyramid_request, view_caller, course_service ): view_caller(context, pyramid_request)", "BasicLTILaunchViews.db_configured_basic_lti_launch(), and return whatever BasicLTILaunchViews.db_configured_basic_lti_launch() returns. \"\"\" views = BasicLTILaunchViews(context, pyramid_request) return views.db_configured_basic_lti_launch()", "\"grading_info_service\", \"lti_h_service\" ) @pytest.fixture def context(): context = mock.create_autospec(LTILaunchResource, spec_set=True, instance=True) context.js_config =", "\"lti_h_service\" ) @pytest.fixture def context(): context = mock.create_autospec(LTILaunchResource, spec_set=True, instance=True) context.js_config = mock.create_autospec(JSConfig,", "of this fixture. See https://docs.pytest.org/en/latest/fixture.html#parametrizing-fixtures \"\"\" return request.param @pytest.mark.usefixtures(\"is_canvas\") class TestCanvasFileBasicLTILaunch: @pytest.mark.usefixtures(\"is_canvas\") def", "returns. \"\"\" views = BasicLTILaunchViews(context, pyramid_request) return views.db_configured_basic_lti_launch() def url_configured_basic_lti_launch_caller(context, pyramid_request): \"\"\" Call", "whatever BasicLTILaunchViews.canvas_file_basic_lti_launch() returns. \"\"\" # The file_id param is always present when canvas_file_basic_lti_launch()", "launched us to Canvas.\"\"\" context.is_canvas = True @pytest.fixture def pyramid_request(pyramid_request): pyramid_request.params.update( { \"lis_result_sourcedid\":", "view predicate ensures this. pyramid_request.params[\"file_id\"] = \"TEST_FILE_ID\" views = BasicLTILaunchViews(context, pyramid_request) return views.canvas_file_basic_lti_launch()", "pyramid_request.parsed_params[\"document_url\"] ) class TestUnconfiguredBasicLTILaunch: def test_it_enables_content_item_selection_mode( self, BearerTokenSchema, bearer_token_schema, context, pyramid_request ): unconfigured_basic_lti_launch_caller(context,", "that launched us to Canvas.\"\"\" context.is_canvas = True @pytest.fixture def pyramid_request(pyramid_request): pyramid_request.params.update( {", ") @pytest.mark.usefixtures(\"user_is_learner\") def test_it_calls_grading_info_upsert( self, context, pyramid_request, grading_info_service, view_caller ): view_caller(context, pyramid_request) grading_info_service.upsert_from_request.assert_called_once_with(", ") class TestDBConfiguredBasicLTILaunch: def test_it_enables_frontend_grading(self, context, pyramid_request): db_configured_basic_lti_launch_caller(context, pyramid_request) context.js_config.maybe_enable_grading.assert_called_once_with() def test_it_adds_the_document_url( self,", "return whatever BasicLTILaunchViews.configure_module_item() returns. \"\"\" # The document_url, resource_link_id and tool_consumer_instance_guid parsed #", "ConfigureModuleItemSchema ensures this. pyramid_request.parsed_params = { \"document_url\": \"TEST_DOCUMENT_URL\", \"resource_link_id\": \"TEST_RESOURCE_LINK_ID\", \"tool_consumer_instance_guid\": \"TEST_TOOL_CONSUMER_INSTANCE_GUID\", }", "pyramid_request.params = dict( self.form_fields(), oauth_nonce=\"TEST_OAUTH_NONCE\", oauth_timestamp=\"TEST_OAUTH_TIMESTAMP\", oauth_signature=\"TEST_OAUTH_SIGNATURE\", ) return pyramid_request class TestUnconfiguredBasicLTILaunchNotAuthorized: def", "lms.resources._js_config import JSConfig from lms.views.basic_lti_launch import BasicLTILaunchViews from tests import factories def canvas_file_basic_lti_launch_caller(context,", "always present when configure_module_item() is called. # ConfigureModuleItemSchema ensures this. pyramid_request.parsed_params = {", "import factories def canvas_file_basic_lti_launch_caller(context, pyramid_request): \"\"\" Call BasicLTILaunchViews.canvas_file_basic_lti_launch(). Set up the appropriate conditions", "pyramid_request) context.js_config.maybe_enable_grading.assert_called_once_with() def test_it_adds_the_document_url(self, context, pyramid_request): url_configured_basic_lti_launch_caller(context, pyramid_request) context.js_config.add_document_url.assert_called_once_with( pyramid_request.parsed_params[\"url\"] ) class TestConfigureModuleItem:", "for BasicLTILaunchViews.__init__().\"\"\" def test_it_sets_the_focused_user(self, context, pyramid_request): BasicLTILaunchViews(context, pyramid_request) context.js_config.maybe_set_focused_user.assert_called_once_with() class TestCommon: \"\"\" Tests", "Call BasicLTILaunchViews.db_configured_basic_lti_launch(). Set up the appropriate conditions and then call BasicLTILaunchViews.db_configured_basic_lti_launch(), and return", "pyramid_request.db, pyramid_request.params[\"context_id\"], pyramid_request.params[\"oauth_consumer_key\"], ) @pytest.mark.usefixtures(\"user_is_learner\") def test_it_calls_grading_info_upsert( self, context, pyramid_request, grading_info_service, view_caller ):", "and tool_consumer_instance_guid parsed # params are always present when configure_module_item() is called. #", "BasicLTILaunchViews views. See the parametrized `view_caller` fixture below for the list of view", "{ \"context_id\": \"TEST_CONTEXT_ID\", \"oauth_consumer_key\": \"TEST_OAUTH_CONSUMER_KEY\", } ) view_caller(context, pyramid_request) LtiLaunches.add.assert_called_once_with( pyramid_request.db, pyramid_request.params[\"context_id\"], pyramid_request.params[\"oauth_consumer_key\"],", "pyramid_request): db_configured_basic_lti_launch_caller(context, pyramid_request) context.js_config.maybe_enable_grading.assert_called_once_with() def test_it_adds_the_document_url( self, context, pyramid_request, ModuleItemConfiguration ): db_configured_basic_lti_launch_caller(context, pyramid_request)", "def pyramid_request(self, pyramid_request): pyramid_request.params = dict( self.form_fields(), oauth_nonce=\"TEST_OAUTH_NONCE\", oauth_timestamp=\"TEST_OAUTH_TIMESTAMP\", oauth_signature=\"TEST_OAUTH_SIGNATURE\", ) return pyramid_request", "return pyramid_request class TestUnconfiguredBasicLTILaunchNotAuthorized: def test_it_returns_the_right_template_data(self, context, pyramid_request): data = BasicLTILaunchViews( context, pyramid_request", "is_canvas(context): \"\"\"Set the LMS that launched us to Canvas.\"\"\" context.is_canvas = True @pytest.fixture", "url_configured_basic_lti_launch_caller(context, pyramid_request): \"\"\" Call BasicLTILaunchViews.url_configured_basic_lti_launch(). Set up the appropriate conditions and then call", "context @pytest.fixture def is_canvas(context): \"\"\"Set the LMS that launched us to Canvas.\"\"\" context.is_canvas", "\"oauth_consumer_key\": \"TEST_OAUTH_CONSUMER_KEY\", } ) view_caller(context, pyramid_request) LtiLaunches.add.assert_called_once_with( pyramid_request.db, pyramid_request.params[\"context_id\"], pyramid_request.params[\"oauth_consumer_key\"], ) @pytest.mark.usefixtures(\"user_is_learner\") def", "canvas_file_basic_lti_launch_caller, db_configured_basic_lti_launch_caller, url_configured_basic_lti_launch_caller, configure_module_item_caller, ] ) def view_caller(self, request): \"\"\" Return a function", "and then call BasicLTILaunchViews.unconfigured_basic_lti_launch(), and return whatever BasicLTILaunchViews.unconfigured_basic_lti_launch() returns. \"\"\" views = BasicLTILaunchViews(context,", "view_caller ): pyramid_request.lti_user = factories.LTIUser(roles=\"instructor\") view_caller(context, pyramid_request) grading_info_service.upsert_from_request.assert_not_called() @pytest.mark.usefixtures(\"is_canvas\") def test_it_does_not_call_grading_info_upsert_if_canvas( self, context,", "\"course_service\", \"h_api\", \"grading_info_service\", \"lti_h_service\" ) @pytest.fixture def context(): context = mock.create_autospec(LTILaunchResource, spec_set=True, instance=True)", "return { \"user_id\": \"TEST_USER_ID\", \"resource_link_id\": \"TEST_RESOURCE_LINK_ID\", \"oauth_consumer_key\": \"TEST_OAUTH_CONSUMER_KEY\", \"tool_consumer_instance_guid\": \"TEST_TOOL_CONSUMER_INSTANCE_GUID\", \"context_id\": \"TEST_CONTEXT_ID\", }", "\"TEST_DOCUMENT_URL\", \"resource_link_id\": \"TEST_RESOURCE_LINK_ID\", \"tool_consumer_instance_guid\": \"TEST_TOOL_CONSUMER_INSTANCE_GUID\", } views = BasicLTILaunchViews(context, pyramid_request) return views.configure_module_item() class", "\"\"\" Call BasicLTILaunchViews.db_configured_basic_lti_launch(). Set up the appropriate conditions and then call BasicLTILaunchViews.db_configured_basic_lti_launch(), and", "fixture below for the list of view methods that these tests apply to.", "the appropriate conditions and then call BasicLTILaunchViews.unconfigured_basic_lti_launch(), and return whatever BasicLTILaunchViews.unconfigured_basic_lti_launch() returns. \"\"\"", "ModuleItemConfiguration ): db_configured_basic_lti_launch_caller(context, pyramid_request) ModuleItemConfiguration.get_document_url.assert_called_once_with( pyramid_request.db, \"TEST_GUID\", \"TEST_RESOURCE_LINK_ID\" ) context.js_config.add_document_url.assert_called_once_with( ModuleItemConfiguration.get_document_url.return_value ) class", "configure_module_item_caller(context, pyramid_request) ModuleItemConfiguration.set_document_url.assert_called_once_with( pyramid_request.db, \"TEST_TOOL_CONSUMER_INSTANCE_GUID\", \"TEST_RESOURCE_LINK_ID\", \"TEST_DOCUMENT_URL\", ) def test_it_enables_frontend_grading(self, context, pyramid_request): configure_module_item_caller(context,", "form_fields=dict( self.form_fields(), authorization=bearer_token_schema.authorization_param.return_value, ), ) def form_fields(self): return { \"user_id\": \"TEST_USER_ID\", \"resource_link_id\": \"TEST_RESOURCE_LINK_ID\",", "oauth_timestamp=\"TEST_OAUTH_TIMESTAMP\", oauth_signature=\"TEST_OAUTH_SIGNATURE\", ) return pyramid_request class TestUnconfiguredBasicLTILaunchNotAuthorized: def test_it_returns_the_right_template_data(self, context, pyramid_request): data =", "returns. \"\"\" # The `url` parsed param is always present when # url_configured_basic_lti_launch()", "configure_module_item_caller(context, pyramid_request): \"\"\" Call BasicLTILaunchViews.configure_module_item(). Set up the appropriate conditions and then call", "pyramid_request) context.js_config.maybe_enable_grading.assert_called_once_with() def test_it_adds_the_document_url( self, context, pyramid_request, ModuleItemConfiguration ): db_configured_basic_lti_launch_caller(context, pyramid_request) ModuleItemConfiguration.get_document_url.assert_called_once_with( pyramid_request.db,", "conditions and then call BasicLTILaunchViews.configure_module_item(), and return whatever BasicLTILaunchViews.configure_module_item() returns. \"\"\" # The", "instance=True) context.is_canvas = False return context @pytest.fixture def is_canvas(context): \"\"\"Set the LMS that", "\"TEST_DOCUMENT_URL\", ) def test_it_enables_frontend_grading(self, context, pyramid_request): configure_module_item_caller(context, pyramid_request) context.js_config.maybe_enable_grading.assert_called_once_with() def test_it_adds_the_document_url(self, context, pyramid_request):", "Return a function that calls the view method to be tested. This is", "= BasicLTILaunchViews(context, pyramid_request) return views.canvas_file_basic_lti_launch() def db_configured_basic_lti_launch_caller(context, pyramid_request): \"\"\" Call BasicLTILaunchViews.db_configured_basic_lti_launch(). Set up", "context, pyramid_request, grading_info_service, view_caller ): view_caller(context, pyramid_request) grading_info_service.upsert_from_request.assert_not_called() @pytest.fixture( params=[ canvas_file_basic_lti_launch_caller, db_configured_basic_lti_launch_caller, url_configured_basic_lti_launch_caller,", "context.js_config.maybe_enable_grading.assert_called_once_with() def test_it_adds_the_document_url(self, context, pyramid_request): url_configured_basic_lti_launch_caller(context, pyramid_request) context.js_config.add_document_url.assert_called_once_with( pyramid_request.parsed_params[\"url\"] ) class TestConfigureModuleItem: def", "= True @pytest.fixture def pyramid_request(pyramid_request): pyramid_request.params.update( { \"lis_result_sourcedid\": \"modelstudent-assignment1\", \"lis_outcome_service_url\": \"https://hypothesis.shinylms.com/outcomes\", } )", "pyramid_request.params[\"file_id\"] ) class TestDBConfiguredBasicLTILaunch: def test_it_enables_frontend_grading(self, context, pyramid_request): db_configured_basic_lti_launch_caller(context, pyramid_request) context.js_config.maybe_enable_grading.assert_called_once_with() def test_it_adds_the_document_url(", "\"tool_consumer_instance_guid\": \"TEST_TOOL_CONSUMER_INSTANCE_GUID\", \"context_id\": \"TEST_CONTEXT_ID\", } @pytest.fixture def pyramid_request(self, pyramid_request): pyramid_request.params = dict( self.form_fields(),", "the appropriate conditions and then call BasicLTILaunchViews.db_configured_basic_lti_launch(), and return whatever BasicLTILaunchViews.db_configured_basic_lti_launch() returns. \"\"\"", ") return pyramid_request @pytest.fixture(autouse=True) def BearerTokenSchema(patch): return patch(\"lms.views.basic_lti_launch.BearerTokenSchema\") @pytest.fixture(autouse=True) def LtiLaunches(patch): return patch(\"lms.views.basic_lti_launch.LtiLaunches\")", "pyramid_request, h_user=pyramid_request.lti_user.h_user, lti_user=pyramid_request.lti_user, ) def test_it_does_not_call_grading_info_upsert_if_instructor( self, context, pyramid_request, grading_info_service, view_caller ): pyramid_request.lti_user", "whatever BasicLTILaunchViews.unconfigured_basic_lti_launch() returns. \"\"\" views = BasicLTILaunchViews(context, pyramid_request) return views.unconfigured_basic_lti_launch() def configure_module_item_caller(context, pyramid_request):", "return context @pytest.fixture def is_canvas(context): \"\"\"Set the LMS that launched us to Canvas.\"\"\"", "BasicLTILaunchViews.configure_module_item(), and return whatever BasicLTILaunchViews.configure_module_item() returns. \"\"\" # The document_url, resource_link_id and tool_consumer_instance_guid" ]
[ "access admin page for unauthorized but authenticated users. \"\"\" def process_request(self, request): if", "<reponame>stfc/cvmfs-stratum-uploader from django.core.urlresolvers import reverse from django.http import HttpResponsePermanentRedirect class NoLoginAdminRedirectMiddleware: \"\"\" This", "reverse from django.http import HttpResponsePermanentRedirect class NoLoginAdminRedirectMiddleware: \"\"\" This middleware forbids to access", "from django.core.urlresolvers import reverse from django.http import HttpResponsePermanentRedirect class NoLoginAdminRedirectMiddleware: \"\"\" This middleware", "django.http import HttpResponsePermanentRedirect class NoLoginAdminRedirectMiddleware: \"\"\" This middleware forbids to access admin page", "to access admin page for unauthorized but authenticated users. \"\"\" def process_request(self, request):", "middleware forbids to access admin page for unauthorized but authenticated users. \"\"\" def", "users. \"\"\" def process_request(self, request): if request.META['PATH_INFO'].startswith('/admin'): if not request.user.is_authenticated() or not request.user.is_staff:", "from django.http import HttpResponsePermanentRedirect class NoLoginAdminRedirectMiddleware: \"\"\" This middleware forbids to access admin", "\"\"\" def process_request(self, request): if request.META['PATH_INFO'].startswith('/admin'): if not request.user.is_authenticated() or not request.user.is_staff: return", "authenticated users. \"\"\" def process_request(self, request): if request.META['PATH_INFO'].startswith('/admin'): if not request.user.is_authenticated() or not", "for unauthorized but authenticated users. \"\"\" def process_request(self, request): if request.META['PATH_INFO'].startswith('/admin'): if not", "This middleware forbids to access admin page for unauthorized but authenticated users. \"\"\"", "but authenticated users. \"\"\" def process_request(self, request): if request.META['PATH_INFO'].startswith('/admin'): if not request.user.is_authenticated() or", "page for unauthorized but authenticated users. \"\"\" def process_request(self, request): if request.META['PATH_INFO'].startswith('/admin'): if", "NoLoginAdminRedirectMiddleware: \"\"\" This middleware forbids to access admin page for unauthorized but authenticated", "import HttpResponsePermanentRedirect class NoLoginAdminRedirectMiddleware: \"\"\" This middleware forbids to access admin page for", "unauthorized but authenticated users. \"\"\" def process_request(self, request): if request.META['PATH_INFO'].startswith('/admin'): if not request.user.is_authenticated()", "HttpResponsePermanentRedirect class NoLoginAdminRedirectMiddleware: \"\"\" This middleware forbids to access admin page for unauthorized", "import reverse from django.http import HttpResponsePermanentRedirect class NoLoginAdminRedirectMiddleware: \"\"\" This middleware forbids to", "\"\"\" This middleware forbids to access admin page for unauthorized but authenticated users.", "forbids to access admin page for unauthorized but authenticated users. \"\"\" def process_request(self,", "django.core.urlresolvers import reverse from django.http import HttpResponsePermanentRedirect class NoLoginAdminRedirectMiddleware: \"\"\" This middleware forbids", "admin page for unauthorized but authenticated users. \"\"\" def process_request(self, request): if request.META['PATH_INFO'].startswith('/admin'):", "class NoLoginAdminRedirectMiddleware: \"\"\" This middleware forbids to access admin page for unauthorized but", "def process_request(self, request): if request.META['PATH_INFO'].startswith('/admin'): if not request.user.is_authenticated() or not request.user.is_staff: return HttpResponsePermanentRedirect(reverse('index'))" ]
[ "= AudioAdapter.default() sample_rate = 22050 #waveform, _ = audio_loader.load('/path/to/audio/file', sample_rate=sample_rate) waveform, _ =", "#waveform, _ = audio_loader.load('/path/to/audio/file', sample_rate=sample_rate) waveform, _ = audio_loader.load(r'C:\\Users\\ewais\\Documents\\GitHub\\tensor-hero\\Source Separation\\song.ogg', sample_rate=sample_rate) # Perform", "sample_rate = 22050 #waveform, _ = audio_loader.load('/path/to/audio/file', sample_rate=sample_rate) waveform, _ = audio_loader.load(r'C:\\Users\\ewais\\Documents\\GitHub\\tensor-hero\\Source Separation\\song.ogg',", "for loading audio waveform : from spleeter.audio.adapter import AudioAdapter from scipy.io.wavfile import write", "import write from pydub import AudioSegment separator = Separator('spleeter:2stems') #separator.separate_to_file('/path/to/audio', '2stem_sep_audio')#separator.separate_to_file('/path/to/audio', '/path/to/output/directory') audio_loader", "#separator.separate_to_file('/path/to/audio', '2stem_sep_audio')#separator.separate_to_file('/path/to/audio', '/path/to/output/directory') audio_loader = AudioAdapter.default() sample_rate = 22050 #waveform, _ = audio_loader.load('/path/to/audio/file',", "scipy.io.wavfile import write from pydub import AudioSegment separator = Separator('spleeter:2stems') #separator.separate_to_file('/path/to/audio', '2stem_sep_audio')#separator.separate_to_file('/path/to/audio', '/path/to/output/directory')", "= audio_loader.load('/path/to/audio/file', sample_rate=sample_rate) waveform, _ = audio_loader.load(r'C:\\Users\\ewais\\Documents\\GitHub\\tensor-hero\\Source Separation\\song.ogg', sample_rate=sample_rate) # Perform the separation", "from pydub import AudioSegment separator = Separator('spleeter:2stems') #separator.separate_to_file('/path/to/audio', '2stem_sep_audio')#separator.separate_to_file('/path/to/audio', '/path/to/output/directory') audio_loader = AudioAdapter.default()", "pydub import AudioSegment separator = Separator('spleeter:2stems') #separator.separate_to_file('/path/to/audio', '2stem_sep_audio')#separator.separate_to_file('/path/to/audio', '/path/to/output/directory') audio_loader = AudioAdapter.default() sample_rate", "<gh_stars>1-10 from spleeter.separator import Separator # Use audio loader explicitly for loading audio", "sample_rate=sample_rate) waveform, _ = audio_loader.load(r'C:\\Users\\ewais\\Documents\\GitHub\\tensor-hero\\Source Separation\\song.ogg', sample_rate=sample_rate) # Perform the separation : prediction", "22050 #waveform, _ = audio_loader.load('/path/to/audio/file', sample_rate=sample_rate) waveform, _ = audio_loader.load(r'C:\\Users\\ewais\\Documents\\GitHub\\tensor-hero\\Source Separation\\song.ogg', sample_rate=sample_rate) #", "AudioAdapter from scipy.io.wavfile import write from pydub import AudioSegment separator = Separator('spleeter:2stems') #separator.separate_to_file('/path/to/audio',", "Separator('spleeter:2stems') #separator.separate_to_file('/path/to/audio', '2stem_sep_audio')#separator.separate_to_file('/path/to/audio', '/path/to/output/directory') audio_loader = AudioAdapter.default() sample_rate = 22050 #waveform, _ =", "import AudioAdapter from scipy.io.wavfile import write from pydub import AudioSegment separator = Separator('spleeter:2stems')", "= Separator('spleeter:2stems') #separator.separate_to_file('/path/to/audio', '2stem_sep_audio')#separator.separate_to_file('/path/to/audio', '/path/to/output/directory') audio_loader = AudioAdapter.default() sample_rate = 22050 #waveform, _", "spleeter.audio.adapter import AudioAdapter from scipy.io.wavfile import write from pydub import AudioSegment separator =", "loading audio waveform : from spleeter.audio.adapter import AudioAdapter from scipy.io.wavfile import write from", "AudioSegment separator = Separator('spleeter:2stems') #separator.separate_to_file('/path/to/audio', '2stem_sep_audio')#separator.separate_to_file('/path/to/audio', '/path/to/output/directory') audio_loader = AudioAdapter.default() sample_rate = 22050", "audio waveform : from spleeter.audio.adapter import AudioAdapter from scipy.io.wavfile import write from pydub", "waveform, _ = audio_loader.load(r'C:\\Users\\ewais\\Documents\\GitHub\\tensor-hero\\Source Separation\\song.ogg', sample_rate=sample_rate) # Perform the separation : prediction =", "from spleeter.audio.adapter import AudioAdapter from scipy.io.wavfile import write from pydub import AudioSegment separator", "'/path/to/output/directory') audio_loader = AudioAdapter.default() sample_rate = 22050 #waveform, _ = audio_loader.load('/path/to/audio/file', sample_rate=sample_rate) waveform,", "import AudioSegment separator = Separator('spleeter:2stems') #separator.separate_to_file('/path/to/audio', '2stem_sep_audio')#separator.separate_to_file('/path/to/audio', '/path/to/output/directory') audio_loader = AudioAdapter.default() sample_rate =", "Separator # Use audio loader explicitly for loading audio waveform : from spleeter.audio.adapter", "waveform : from spleeter.audio.adapter import AudioAdapter from scipy.io.wavfile import write from pydub import", "audio_loader = AudioAdapter.default() sample_rate = 22050 #waveform, _ = audio_loader.load('/path/to/audio/file', sample_rate=sample_rate) waveform, _", "AudioAdapter.default() sample_rate = 22050 #waveform, _ = audio_loader.load('/path/to/audio/file', sample_rate=sample_rate) waveform, _ = audio_loader.load(r'C:\\Users\\ewais\\Documents\\GitHub\\tensor-hero\\Source", "# Use audio loader explicitly for loading audio waveform : from spleeter.audio.adapter import", "separator = Separator('spleeter:2stems') #separator.separate_to_file('/path/to/audio', '2stem_sep_audio')#separator.separate_to_file('/path/to/audio', '/path/to/output/directory') audio_loader = AudioAdapter.default() sample_rate = 22050 #waveform,", "audio loader explicitly for loading audio waveform : from spleeter.audio.adapter import AudioAdapter from", "Use audio loader explicitly for loading audio waveform : from spleeter.audio.adapter import AudioAdapter", "import Separator # Use audio loader explicitly for loading audio waveform : from", "spleeter.separator import Separator # Use audio loader explicitly for loading audio waveform :", "'2stem_sep_audio')#separator.separate_to_file('/path/to/audio', '/path/to/output/directory') audio_loader = AudioAdapter.default() sample_rate = 22050 #waveform, _ = audio_loader.load('/path/to/audio/file', sample_rate=sample_rate)", "= audio_loader.load(r'C:\\Users\\ewais\\Documents\\GitHub\\tensor-hero\\Source Separation\\song.ogg', sample_rate=sample_rate) # Perform the separation : prediction = separator.separate(waveform) print(prediction)", "= 22050 #waveform, _ = audio_loader.load('/path/to/audio/file', sample_rate=sample_rate) waveform, _ = audio_loader.load(r'C:\\Users\\ewais\\Documents\\GitHub\\tensor-hero\\Source Separation\\song.ogg', sample_rate=sample_rate)", "_ = audio_loader.load(r'C:\\Users\\ewais\\Documents\\GitHub\\tensor-hero\\Source Separation\\song.ogg', sample_rate=sample_rate) # Perform the separation : prediction = separator.separate(waveform)", "loader explicitly for loading audio waveform : from spleeter.audio.adapter import AudioAdapter from scipy.io.wavfile", "from scipy.io.wavfile import write from pydub import AudioSegment separator = Separator('spleeter:2stems') #separator.separate_to_file('/path/to/audio', '2stem_sep_audio')#separator.separate_to_file('/path/to/audio',", "from spleeter.separator import Separator # Use audio loader explicitly for loading audio waveform", "_ = audio_loader.load('/path/to/audio/file', sample_rate=sample_rate) waveform, _ = audio_loader.load(r'C:\\Users\\ewais\\Documents\\GitHub\\tensor-hero\\Source Separation\\song.ogg', sample_rate=sample_rate) # Perform the", "write from pydub import AudioSegment separator = Separator('spleeter:2stems') #separator.separate_to_file('/path/to/audio', '2stem_sep_audio')#separator.separate_to_file('/path/to/audio', '/path/to/output/directory') audio_loader =", "audio_loader.load('/path/to/audio/file', sample_rate=sample_rate) waveform, _ = audio_loader.load(r'C:\\Users\\ewais\\Documents\\GitHub\\tensor-hero\\Source Separation\\song.ogg', sample_rate=sample_rate) # Perform the separation :", ": from spleeter.audio.adapter import AudioAdapter from scipy.io.wavfile import write from pydub import AudioSegment", "explicitly for loading audio waveform : from spleeter.audio.adapter import AudioAdapter from scipy.io.wavfile import" ]
[ "with y2d.\" def test_grid3d(): x3d, y3d, z3d = magpie.grids.grid3d(10, 10) assert np.shape(x3d) ==", "z3d, xmid, ymid, zmid = magpie.grids.grid3d([10, 20, 30], [10, 20, 30], mins=[10., 20.,", "\"grid1d unexpected results.\" def test_grid2d(): x2d, y2d = magpie.grids.grid2d(10, 10) assert np.shape(x2d) ==", "== 0., \"pmid is inconsistent with p2d.\" def test_polarEA(): r, p = magpie.grids.polarEA_grid(10)", "\"xmid is inconsistent with x2d.\" assert np.round(ymid[0], decimals=4) == 0.5 and np.round(ymid[7], decimals=4)", "decimals=4) == 20.5 and np.round(ymid[7], decimals=4) == 27.5, \"grid2d unexpected results.\" assert np.round(np.sum(np.unique(y2d.flatten())-ymid),", "as expected.\" x2d, y2d, xmid, ymid = magpie.grids.grid2d(10, 10, return1d=True) assert np.round(xmid[0], decimals=4)", "np.round(xedges[7], decimals=4) == 7., \"grid1d unexpected results.\" assert np.round(xmid[0], decimals=4) == 0.5 and", "results.\" assert np.round(np.sum(np.unique(y2d.flatten())-ymid), decimals=4) == 0., \"ymid is inconsistent with y2d.\" x2d, y2d", "inconsistent with y2d.\" x2d, y2d = magpie.grids.grid2d(10, [10, 20]) assert np.shape(x2d) == (10,", "decimals=4) == 0.75, \"polargrid unexpected results.\" assert np.round(np.sum(np.unique(r2d.flatten())-rmid), decimals=4) == 0., \"rmid is", "assert xedges[-1] - xedges[0] == 1., \"xedges range is incorrect.\" def test_xedges2mid(): xedges", "ymid = magpie.grids.grid2d([10, 20], [10, 20], return1d=True) assert np.round(xmid[0], decimals=4) == 0.5 and", "assert xmid[0] == 0.05 and xmid[1] == 0.15 and xmid[5] == 0.55, \"xmid", "np.round(np.sum(np.unique(z3d.flatten())-zmid), decimals=4) == 0., \"zmid is inconsistent with z3d.\" # check polar def", "len(xmid)+1 == len(xedges), \"Length of xmid and xedges is not as expected.\" assert", "not as expected.\" x2d, y2d, xmid, ymid = magpie.grids.grid2d([10, 20], [10, 20], return1d=True)", "== 0.05 and np.round(rmid[7], decimals=4) == 0.75, \"polargrid unexpected results.\" assert np.round(np.sum(np.unique(r2d.flatten())-rmid), decimals=4)", "xedges is not as expected.\" assert np.round(xedges[0], decimals=4) == 0. and np.round(xedges[7], decimals=4)", "z3d, xmid, ymid, zmid = magpie.grids.grid3d(10, 10, mins=[10., 20., 30.], return1d=True) assert np.round(xmid[0],", "decimals=2), \"Conversion from xmid to xedges is not consistent with input xedges.\" def", "0.05 and np.round(rmid[7], decimals=4) == 0.75, \"polargrid unexpected results.\" assert np.round(np.sum(np.unique(r2d.flatten())-rmid), decimals=4) ==", "with xmin are not as expected.\" assert xedges[-1] - xedges[0] == 1., \"xedges", "0., \"rmid is inconsistent with r2d.\" assert np.round(pmid[0], decimals=4) == np.round((np.pi/2.)/20 + np.pi/2.,", "0., \"pmid is inconsistent with p2d.\" def test_polarEA(): r, p = magpie.grids.polarEA_grid(10) npix", "= magpie.grids.polarEA_area(10, rmax=10., base_nphi=4) assert(np.round(area, decimals=4) == np.round(np.pi/4., decimals=4)), \"area calculation is incorrect.\"", "== (10, 20), \"shape is not as expected.\" x2d, y2d, xmid, ymid =", "(10, 10, 10), \"shape is not as expected.\" assert np.shape(z3d) == (10, 10,", "== 0., \"pmid is inconsistent with p2d.\" def test_polargrid(): r2d, p2d = magpie.grids.polargrid(10,", "as np import magpie # check cartesian def test_get_xedges(): xedges = magpie.grids.get_xedges(1., 2)", "expected.\" assert np.shape(y3d) == (10, 10, 10), \"shape is not as expected.\" assert", "np.round(xmid[7], decimals=4) == 17.5, \"grid1d unexpected results.\" xmid, xedges = magpie.grids.grid1d(10., 10, return_edges=True)", "np.round(xmid[0], decimals=4) == 0.5 and np.round(xmid[7], decimals=4) == 7.5, \"grid2d unexpected results.\" assert", "ymid, zmid = magpie.grids.grid3d(10, 10, return1d=True) assert np.round(xmid[0], decimals=4) == 0.5 and np.round(xmid[7],", "zmid = magpie.grids.grid3d([10, 20, 30], [10, 20, 30], mins=[10., 20., 30], return1d=True) assert", "p2d = magpie.grids.polargrid(10, 20) assert np.shape(r2d) == (10, 20), \"shape is not as", "= np.round(xmid, decimals=2) assert len(xedges) == len(xmid) + 1, \"Length of xmid is", "\"shape is not as expected.\" r2d, p2d, rmid, pmid = magpie.grids.polargrid(10, 20, return1d=True)", "xmid, ymid, zmid = magpie.grids.grid3d([10, 20, 30], [10, 20, 30], mins=[10., 20., 30],", "and p are not the same.\" assert len(r) == npix, \"Length of polarEA", "assert np.round(ymid[0], decimals=4) == 20.5 and np.round(ymid[7], decimals=4) == 27.5, \"grid3d unexpected results.\"", "x2d, y2d, xmid, ymid = magpie.grids.grid2d([10, 20], [10, 20], mins=[10., 20.], return1d=True) assert", "== np.round(np.pi/20, decimals=4) and np.round(pmid[7], decimals=4) == np.round(15*np.pi/20, decimals=4), \"polargrid unexpected results.\" assert", "xmid = magpie.grids.grid1d(10., 10) assert np.round(xmid[0], decimals=4) == 0.5 and np.round(xmid[7], decimals=4) ==", "test_xmid2edges(): xedges = magpie.grids.get_xedges(1., 10) xmid = magpie.grids.xedges2mid(xedges) xedges2 = magpie.grids.xmid2edges(xmid) assert np.round(np.sum(xedges-xedges2),", "== 0., \"pmid is inconsistent with p2d.\" r2d, p2d, rmid, pmid = magpie.grids.polargrid(10,", "assert np.round(pmid[0], decimals=4) == np.round((np.pi/2.)/20 + np.pi/2., decimals=4) \\ and np.round(pmid[7], decimals=4) ==", "p = magpie.grids.polarEA_grid(10) npix = magpie.grids.polarEA_npix(10) assert len(r) == len(p), \"PolarEA grid size", "are not the same.\" assert len(r) == npix, \"Length of polarEA grid does", "assert np.round(np.sum(np.unique(y2d.flatten())-ymid), decimals=4) == 0., \"ymid is inconsistent with y2d.\" x2d, y2d, xmid,", "assert np.round(np.sum(np.unique(p2d.flatten())-pmid), decimals=4) == 0., \"pmid is inconsistent with p2d.\" r2d, p2d, rmid,", "as expected.\" assert xedges[-1] - xedges[0] == 1., \"xedges range is incorrect.\" def", "is inconsistent with y2d.\" def test_grid3d(): x3d, y3d, z3d = magpie.grids.grid3d(10, 10) assert", "xmid and xedges is not as expected.\" assert np.round(xedges[0], decimals=4) == 0. and", "\"ymid is inconsistent with y2d.\" x2d, y2d, xmid, ymid = magpie.grids.grid2d([10, 20], [10,", "len(p), \"PolarEA grid size for r and p are not the same.\" assert", "np.round(xmid[0], decimals=4) == 10.5 and np.round(xmid[7], decimals=4) == 17.5, \"grid1d unexpected results.\" xmid,", "of polarEA grid does not match expectations.\" assert r[3*4**2] == 0.45, \"r values", "test_grid2d(): x2d, y2d = magpie.grids.grid2d(10, 10) assert np.shape(x2d) == (10, 10), \"shape is", "= magpie.grids.grid3d(10, 10) assert np.shape(x3d) == (10, 10, 10), \"shape is not as", "decimals=4) == 0., \"zmid is inconsistent with z3d.\" # check polar def test_polargrid():", "20), \"shape is not as expected.\" r2d, p2d, rmid, pmid = magpie.grids.polargrid(10, 20,", "and xedges[-1]==0., \"xedges with xmin are not as expected.\" assert xedges[-1] - xedges[0]", "0., \"xmid is inconsistent with x3d.\" assert np.round(ymid[0], decimals=4) == 20.5 and np.round(ymid[7],", "[10, 20, 30], mins=[10., 20., 30], return1d=True) assert np.round(xmid[0], decimals=4) == 10.5 and", "npix, \"Length of polarEA grid does not match expectations.\" r, p = magpie.grids.polarEA_grid(10,", "xedges[1]==-0.5 and xedges[-1]==0., \"xedges with xmin are not as expected.\" assert xedges[-1] -", "0., \"ymid is inconsistent with y2d.\" def test_grid3d(): x3d, y3d, z3d = magpie.grids.grid3d(10,", "z3d.\" x3d, y3d, z3d, xmid, ymid, zmid = magpie.grids.grid3d([10, 20, 30], [10, 20,", "0., \"rmid is inconsistent with r2d.\" assert np.round(pmid[0], decimals=4) == np.round(np.pi/20, decimals=4) and", "len(r) == npix, \"Length of polarEA grid does not match expectations.\" r, p", "xedges[-1]==0., \"xedges with xmin are not as expected.\" assert xedges[-1] - xedges[0] ==", "== 7.5, \"grid3d unexpected results.\" assert np.round(np.sum(np.unique(x3d.flatten())-xmid), decimals=4) == 0., \"xmid is inconsistent", "results.\" xmid, xedges = magpie.grids.grid1d(10., 10, return_edges=True) assert len(xmid)+1 == len(xedges), \"Length of", "and np.round(xmid[7], decimals=4) == 17.5, \"grid3d unexpected results.\" assert np.round(np.sum(np.unique(x3d.flatten())-xmid), decimals=4) == 0.,", "y2d, xmid, ymid = magpie.grids.grid2d(10, 10, return1d=True) assert np.round(xmid[0], decimals=4) == 0.5 and", "is incorrect.\" xedges = magpie.grids.get_xedges(1., 2, xmin=-1.) xedges = np.round(xedges, decimals=2) assert xedges[0]==-1.", "20, 30), \"shape is not as expected.\" assert np.shape(y3d) == (10, 20, 30),", "assert np.shape(r2d) == (10, 20), \"shape is not as expected.\" assert np.shape(p2d) ==", "p = magpie.grids.polarEA_grid(10, base_nphi=3) npix = magpie.grids.polarEA_npix(10, base_nphi=3) assert len(r) == len(p), \"PolarEA", "assert np.shape(y2d) == (10, 20), \"shape is not as expected.\" x2d, y2d, xmid,", "== 0. and np.round(xedges[7], decimals=4) == 7., \"grid1d unexpected results.\" assert np.round(xmid[0], decimals=4)", "expected.\" assert np.shape(y2d) == (10, 10), \"shape is not as expected.\" x2d, y2d,", "with p2d.\" def test_polarEA(): r, p = magpie.grids.polarEA_grid(10) npix = magpie.grids.polarEA_npix(10) assert len(r)", "decimals=4) == np.round(np.pi/20, decimals=4) and np.round(pmid[7], decimals=4) == np.round(15*np.pi/20, decimals=4), \"polargrid unexpected results.\"", "is inconsistent with p2d.\" r2d, p2d, rmid, pmid = magpie.grids.polargrid(10, 10, rmin=10., rmax=20.,", "x3d, y3d, z3d = magpie.grids.grid3d(10, 10) assert np.shape(x3d) == (10, 10, 10), \"shape", "z3d.\" x3d, y3d, z3d, xmid, ymid, zmid = magpie.grids.grid3d(10, 10, mins=[10., 20., 30.],", "inconsistent with y2d.\" x2d, y2d, xmid, ymid = magpie.grids.grid2d([10, 20], [10, 20], mins=[10.,", "return1d=True) assert np.round(xmid[0], decimals=4) == 10.5 and np.round(xmid[7], decimals=4) == 17.5, \"grid2d unexpected", "xmin=10) assert np.round(xmid[0], decimals=4) == 10.5 and np.round(xmid[7], decimals=4) == 17.5, \"grid1d unexpected", "0., \"ymid is inconsistent with y2d.\" x2d, y2d = magpie.grids.grid2d(10, [10, 20]) assert", "x3d.\" assert np.round(ymid[0], decimals=4) == 20.5 and np.round(ymid[7], decimals=4) == 27.5, \"grid3d unexpected", "== (10, 20, 30), \"shape is not as expected.\" assert np.shape(y3d) == (10,", "assert len(r) == npix, \"Length of polarEA grid does not match expectations.\" r,", "20, 30], mins=[10., 20., 30], return1d=True) assert np.round(xmid[0], decimals=4) == 10.5 and np.round(xmid[7],", "not consistent with input xedges.\" def test_grid1d(): xmid = magpie.grids.grid1d(10., 10) assert np.round(xmid[0],", "20, 30), \"shape is not as expected.\" assert np.shape(z3d) == (10, 20, 30),", "grid does not match expectations.\" assert r[3*4**2] == 0.45, \"r values are incorrect.\"", "\"grid2d unexpected results.\" assert np.round(np.sum(np.unique(y2d.flatten())-ymid), decimals=4) == 0., \"ymid is inconsistent with y2d.\"", "assert np.round(xmid[0], decimals=4) == 0.5 and np.round(xmid[7], decimals=4) == 7.5, \"grid3d unexpected results.\"", "== 37.5, \"grid3d unexpected results.\" assert np.round(np.sum(np.unique(z3d.flatten())-zmid), decimals=4) == 0., \"zmid is inconsistent", "and np.round(xmid[7], decimals=4) == 7.5, \"grid1d unexpected results.\" xmid = magpie.grids.grid1d(10., 10, xmin=10)", "rmax=20., phimin=np.pi/2., phimax=np.pi, return1d=True) assert np.round(rmid[0], decimals=4) == 10.5 and np.round(rmid[7], decimals=4) ==", "and np.round(ymid[7], decimals=4) == 27.5, \"grid3d unexpected results.\" assert np.round(np.sum(np.unique(y3d.flatten())-ymid), decimals=4) == 0.,", "\"shape is not as expected.\" assert np.shape(p2d) == (10, 20), \"shape is not", "= magpie.grids.grid2d(10, 10, mins=[10., 20.], return1d=True) assert np.round(xmid[0], decimals=4) == 10.5 and np.round(xmid[7],", "== 7.5, \"grid2d unexpected results.\" assert np.round(np.sum(np.unique(x2d.flatten())-xmid), decimals=4) == 0., \"xmid is inconsistent", "== 7.5, \"grid1d unexpected results.\" xmid = magpie.grids.grid1d(10., 10, xmin=10) assert np.round(xmid[0], decimals=4)", "30), \"shape is not as expected.\" x3d, y3d, z3d, xmid, ymid, zmid =", "to xedges is not consistent with input xedges.\" def test_grid1d(): xmid = magpie.grids.grid1d(10.,", "\"shape is not as expected.\" assert np.shape(y2d) == (10, 10), \"shape is not", "7.5, \"grid2d unexpected results.\" assert np.round(np.sum(np.unique(y2d.flatten())-ymid), decimals=4) == 0., \"ymid is inconsistent with", "incorrect.\" assert np.round(p[3*7**2 + 7], decimals=4) == np.round(15*np.pi/(3*(2*7+1)), decimals=4), \"p values are incorrect.\"", "inconsistent with r2d.\" assert np.round(pmid[0], decimals=4) == np.round((np.pi/2.)/20 + np.pi/2., decimals=4) \\ and", "def test_get_xedges(): xedges = magpie.grids.get_xedges(1., 2) xedges = np.round(xedges, decimals=2) assert len(xedges) ==", "10, xmin=10) assert np.round(xmid[0], decimals=4) == 10.5 and np.round(xmid[7], decimals=4) == 17.5, \"grid1d", "27.5, \"grid2d unexpected results.\" assert np.round(np.sum(np.unique(y2d.flatten())-ymid), decimals=4) == 0., \"ymid is inconsistent with", "assert np.round(pmid[0], decimals=4) == np.round(np.pi/20, decimals=4) and np.round(pmid[7], decimals=4) == np.round(15*np.pi/20, decimals=4), \"polargrid", "10, 10), \"shape is not as expected.\" assert np.shape(z3d) == (10, 10, 10),", "10), \"shape is not as expected.\" x2d, y2d, xmid, ymid = magpie.grids.grid2d(10, 10,", "is inconsistent with y2d.\" x2d, y2d = magpie.grids.grid2d(10, [10, 20]) assert np.shape(x2d) ==", "\"shape is not as expected.\" x3d, y3d, z3d, xmid, ymid, zmid = magpie.grids.grid3d([10,", "np.round(np.sum(np.unique(z3d.flatten())-zmid), decimals=4) == 0., \"zmid is inconsistent with z3d.\" x3d, y3d, z3d =", "np.round(np.sum(np.unique(x3d.flatten())-xmid), decimals=4) == 0., \"xmid is inconsistent with x3d.\" assert np.round(ymid[0], decimals=4) ==", "0.5 and np.round(ymid[7], decimals=4) == 7.5, \"grid3d unexpected results.\" assert np.round(np.sum(np.unique(y3d.flatten())-ymid), decimals=4) ==", "# check polar def test_polargrid(): r2d, p2d = magpie.grids.polargrid(10, 20) assert np.shape(r2d) ==", "np.round(xedges, decimals=2) assert xedges[0]==-1. and xedges[1]==-0.5 and xedges[-1]==0., \"xedges with xmin are not", "and np.round(ymid[7], decimals=4) == 7.5, \"grid2d unexpected results.\" assert np.round(np.sum(np.unique(y2d.flatten())-ymid), decimals=4) == 0.,", "== (10, 10, 10), \"shape is not as expected.\" assert np.shape(y3d) == (10,", "= magpie.grids.grid1d(10., 10, xmin=10) assert np.round(xmid[0], decimals=4) == 10.5 and np.round(xmid[7], decimals=4) ==", "np.round(np.sum(np.unique(z3d.flatten())-zmid), decimals=4) == 0., \"zmid is inconsistent with z3d.\" x3d, y3d, z3d, xmid,", "np.round(ymid[0], decimals=4) == 20.5 and np.round(ymid[7], decimals=4) == 27.5, \"grid2d unexpected results.\" assert", "(10, 20, 30), \"shape is not as expected.\" assert np.shape(z3d) == (10, 20,", "unexpected results.\" assert np.round(np.sum(np.unique(z3d.flatten())-zmid), decimals=4) == 0., \"zmid is inconsistent with z3d.\" x3d,", "np.round(np.sum(np.unique(y2d.flatten())-ymid), decimals=4) == 0., \"ymid is inconsistent with y2d.\" x2d, y2d, xmid, ymid", "1., \"xedges range is incorrect.\" def test_xedges2mid(): xedges = magpie.grids.get_xedges(1., 10) xmid =", "20), \"shape is not as expected.\" assert np.shape(y2d) == (10, 20), \"shape is", "and np.round(xmid[7], decimals=4) == 7.5, \"grid3d unexpected results.\" assert np.round(np.sum(np.unique(x3d.flatten())-xmid), decimals=4) == 0.,", "as expected.\" assert np.shape(y2d) == (10, 10), \"shape is not as expected.\" x2d,", "== (10, 20, 30), \"shape is not as expected.\" assert np.shape(z3d) == (10,", "assert np.round(np.sum(xedges-xedges2), decimals=2), \"Conversion from xmid to xedges is not consistent with input", "30.], return1d=True) assert np.round(xmid[0], decimals=4) == 10.5 and np.round(xmid[7], decimals=4) == 17.5, \"grid3d", "and xmid[5] == 0.55, \"xmid is not as expected.\" def test_xmid2edges(): xedges =", "= magpie.grids.grid2d([10, 20], [10, 20], return1d=True) assert np.round(xmid[0], decimals=4) == 0.5 and np.round(xmid[7],", "== 1., \"xedges range is incorrect.\" def test_xedges2mid(): xedges = magpie.grids.get_xedges(1., 10) xmid", "is not as expected.\" assert np.shape(p2d) == (10, 20), \"shape is not as", "assert len(xmid)+1 == len(xedges), \"Length of xmid and xedges is not as expected.\"", "x2d, y2d, xmid, ymid = magpie.grids.grid2d(10, 10, mins=[10., 20.], return1d=True) assert np.round(xmid[0], decimals=4)", "assert np.round(ymid[0], decimals=4) == 0.5 and np.round(ymid[7], decimals=4) == 7.5, \"grid2d unexpected results.\"", "xmid, ymid = magpie.grids.grid2d([10, 20], [10, 20], mins=[10., 20.], return1d=True) assert np.round(xmid[0], decimals=4)", "np.shape(x2d) == (10, 10), \"shape is not as expected.\" assert np.shape(y2d) == (10,", "== (10, 20), \"shape is not as expected.\" assert np.shape(y2d) == (10, 20),", "decimals=4), \"p values are incorrect.\" assert np.round(p[3*7**2 + 7], decimals=4) == np.round(15*np.pi/(3*(2*7+1)), decimals=4),", "npix = magpie.grids.polarEA_npix(10) assert len(r) == len(p), \"PolarEA grid size for r and", "expectations.\" r, p = magpie.grids.polarEA_grid(6, base_nphi=3) npix = magpie.grids.polarEA_npix(6, base_nphi=3) assert len(r) ==", "0., \"xmid is inconsistent with x3d.\" assert np.round(ymid[0], decimals=4) == 0.5 and np.round(ymid[7],", "values are incorrect.\" assert np.round(p[3*7**2 + 7], decimals=4) == np.round(15*np.pi/(3*(2*7+1)), decimals=4), \"p values", "x2d.\" assert np.round(ymid[0], decimals=4) == 0.5 and np.round(ymid[7], decimals=4) == 7.5, \"grid2d unexpected", "magpie.grids.grid2d([10, 20], [10, 20], return1d=True) assert np.round(xmid[0], decimals=4) == 0.5 and np.round(xmid[7], decimals=4)", "results.\" assert np.round(xmid[0], decimals=4) == 0.5 and np.round(xmid[7], decimals=4) == 7.5, \"grid1d unexpected", "results.\" assert np.round(np.sum(np.unique(p2d.flatten())-pmid), decimals=4) == 0., \"pmid is inconsistent with p2d.\" def test_polarEA():", "expected.\" assert np.shape(y2d) == (10, 20), \"shape is not as expected.\" x2d, y2d,", "p = magpie.grids.polarEA_grid(6, base_nphi=3) npix = magpie.grids.polarEA_npix(6, base_nphi=3) assert len(r) == len(p), \"PolarEA", "magpie.grids.grid3d([10, 20, 30], [10, 20, 30], mins=[10., 20., 30], return1d=True) assert np.round(xmid[0], decimals=4)", "10, rmin=10., rmax=20., phimin=np.pi/2., phimax=np.pi, return1d=True) assert np.round(rmid[0], decimals=4) == 10.5 and np.round(rmid[7],", "10), \"shape is not as expected.\" x3d, y3d, z3d, xmid, ymid, zmid =", "and np.round(xmid[7], decimals=4) == 17.5, \"grid1d unexpected results.\" xmid, xedges = magpie.grids.grid1d(10., 10,", "7.5, \"grid3d unexpected results.\" assert np.round(np.sum(np.unique(y3d.flatten())-ymid), decimals=4) == 0., \"ymid is inconsistent with", "magpie.grids.polargrid(10, 10, rmin=10., rmax=20., phimin=np.pi/2., phimax=np.pi, return1d=True) assert np.round(rmid[0], decimals=4) == 10.5 and", "decimals=4) == 17.5, \"grid3d unexpected results.\" assert np.round(np.sum(np.unique(x3d.flatten())-xmid), decimals=4) == 0., \"xmid is", "- xedges[0] == 1., \"xedges range is incorrect.\" def test_xedges2mid(): xedges = magpie.grids.get_xedges(1.,", "with z3d.\" x3d, y3d, z3d, xmid, ymid, zmid = magpie.grids.grid3d([10, 20, 30], [10,", "np.round(rmid[0], decimals=4) == 0.05 and np.round(rmid[7], decimals=4) == 0.75, \"polargrid unexpected results.\" assert", "\"shape is not as expected.\" x2d, y2d, xmid, ymid = magpie.grids.grid2d([10, 20], [10,", "np.round(xmid[0], decimals=4) == 10.5 and np.round(xmid[7], decimals=4) == 17.5, \"grid2d unexpected results.\" assert", "expectations.\" assert r[3*4**2] == 0.45, \"r values are incorrect.\" assert r[3*7**2] == 0.75,", "10), \"shape is not as expected.\" assert np.shape(y3d) == (10, 10, 10), \"shape", "decimals=2) assert xedges[0]==-1. and xedges[1]==-0.5 and xedges[-1]==0., \"xedges with xmin are not as", "match expectations.\" r, p = magpie.grids.polarEA_grid(6, base_nphi=3) npix = magpie.grids.polarEA_npix(6, base_nphi=3) assert len(r)", "(10, 10, 10), \"shape is not as expected.\" x3d, y3d, z3d, xmid, ymid,", "20, 30), \"shape is not as expected.\" x3d, y3d, z3d, xmid, ymid, zmid", "not the same.\" assert len(r) == npix, \"Length of polarEA grid does not", "== 0.15 and xmid[5] == 0.55, \"xmid is not as expected.\" def test_xmid2edges():", "expected.\" x3d, y3d, z3d, xmid, ymid, zmid = magpie.grids.grid3d(10, 10, return1d=True) assert np.round(xmid[0],", "0., \"ymid is inconsistent with y2d.\" x2d, y2d, xmid, ymid = magpie.grids.grid2d([10, 20],", "xmid, ymid = magpie.grids.grid2d(10, 10, return1d=True) assert np.round(xmid[0], decimals=4) == 0.5 and np.round(xmid[7],", "with r2d.\" assert np.round(pmid[0], decimals=4) == np.round(np.pi/20, decimals=4) and np.round(pmid[7], decimals=4) == np.round(15*np.pi/20,", "np.shape(y2d) == (10, 20), \"shape is not as expected.\" x2d, y2d, xmid, ymid", "== 30.5 and np.round(zmid[7], decimals=4) == 37.5, \"grid3d unexpected results.\" assert np.round(np.sum(np.unique(z3d.flatten())-zmid), decimals=4)", "== np.round((np.pi/2.)/20 + np.pi/2., decimals=4) \\ and np.round(pmid[7], decimals=4) == np.round(15*(np.pi/2.)/20 + np.pi/2.,", "is inconsistent with x3d.\" assert np.round(ymid[0], decimals=4) == 20.5 and np.round(ymid[7], decimals=4) ==", "is not as expected.\" def test_xmid2edges(): xedges = magpie.grids.get_xedges(1., 10) xmid = magpie.grids.xedges2mid(xedges)", "np.round(p[3*4**2], decimals=4) == np.round(np.pi/(3*(2*4+1)), decimals=4), \"p values are incorrect.\" assert np.round(p[3*7**2 + 7],", "results.\" assert np.round(np.sum(np.unique(y3d.flatten())-ymid), decimals=4) == 0., \"ymid is inconsistent with y3d.\" assert np.round(zmid[0],", "decimals=4) == 30.5 and np.round(zmid[7], decimals=4) == 37.5, \"grid3d unexpected results.\" assert np.round(np.sum(np.unique(z3d.flatten())-zmid),", "decimals=4) == 0., \"ymid is inconsistent with y3d.\" assert np.round(zmid[0], decimals=4) == 0.5", "== 0.75, \"polargrid unexpected results.\" assert np.round(np.sum(np.unique(r2d.flatten())-rmid), decimals=4) == 0., \"rmid is inconsistent", "xmid = magpie.grids.xedges2mid(xedges) xedges2 = magpie.grids.xmid2edges(xmid) assert np.round(np.sum(xedges-xedges2), decimals=2), \"Conversion from xmid to", "np.round(pmid[0], decimals=4) == np.round((np.pi/2.)/20 + np.pi/2., decimals=4) \\ and np.round(pmid[7], decimals=4) == np.round(15*(np.pi/2.)/20", "with y2d.\" x2d, y2d, xmid, ymid = magpie.grids.grid2d(10, 10, mins=[10., 20.], return1d=True) assert", "20.5 and np.round(ymid[7], decimals=4) == 27.5, \"grid2d unexpected results.\" assert np.round(np.sum(np.unique(y2d.flatten())-ymid), decimals=4) ==", "= magpie.grids.polarEA_npix(10, base_nphi=3) assert len(r) == len(p), \"PolarEA grid size for r and", "2, xmin=-1.) xedges = np.round(xedges, decimals=2) assert xedges[0]==-1. and xedges[1]==-0.5 and xedges[-1]==0., \"xedges", "assert len(r) == len(p), \"PolarEA grid size for r and p are not", "np.pi/2., decimals=4) \\ and np.round(pmid[7], decimals=4) == np.round(15*(np.pi/2.)/20 + np.pi/2., decimals=4), \"polargrid unexpected", "np.round(15*np.pi/(3*(2*7+1)), decimals=4), \"p values are incorrect.\" area = magpie.grids.polarEA_area(10, rmax=10., base_nphi=4) assert(np.round(area, decimals=4)", "+ np.pi/2., decimals=4) \\ and np.round(pmid[7], decimals=4) == np.round(15*(np.pi/2.)/20 + np.pi/2., decimals=4), \"polargrid", "\\ and np.round(pmid[7], decimals=4) == np.round(15*(np.pi/2.)/20 + np.pi/2., decimals=4), \"polargrid unexpected results.\" assert", "xmid, ymid = magpie.grids.grid2d([10, 20], [10, 20], return1d=True) assert np.round(xmid[0], decimals=4) == 0.5", "== 0.5 and np.round(xmid[7], decimals=4) == 7.5, \"grid1d unexpected results.\" def test_grid2d(): x2d,", "magpie.grids.polarEA_npix(10) assert len(r) == len(p), \"PolarEA grid size for r and p are", "= magpie.grids.xedges2mid(xedges) xedges2 = magpie.grids.xmid2edges(xmid) assert np.round(np.sum(xedges-xedges2), decimals=2), \"Conversion from xmid to xedges", "xedges is incorrect.\" assert xedges[-1] - xedges[0] == 1., \"xedges range is incorrect.\"", "assert len(r) == npix, \"Length of polarEA grid does not match expectations.\" assert", "== np.round(np.pi/(3*(2*4+1)), decimals=4), \"p values are incorrect.\" assert np.round(p[3*7**2 + 7], decimals=4) ==", "np import magpie # check cartesian def test_get_xedges(): xedges = magpie.grids.get_xedges(1., 2) xedges", "np.shape(r2d) == (10, 20), \"shape is not as expected.\" assert np.shape(p2d) == (10,", "y3d, z3d = magpie.grids.grid3d(10, [10, 20, 30]) assert np.shape(x3d) == (10, 20, 30),", "\"ymid is inconsistent with y2d.\" x2d, y2d = magpie.grids.grid2d(10, [10, 20]) assert np.shape(x2d)", "zmid = magpie.grids.grid3d(10, 10, mins=[10., 20., 30.], return1d=True) assert np.round(xmid[0], decimals=4) == 10.5", "10.5 and np.round(xmid[7], decimals=4) == 17.5, \"grid2d unexpected results.\" assert np.round(np.sum(np.unique(x2d.flatten())-xmid), decimals=4) ==", "with x2d.\" assert np.round(ymid[0], decimals=4) == 0.5 and np.round(ymid[7], decimals=4) == 7.5, \"grid2d", "= magpie.grids.get_xedges(1., 10) xmid = magpie.grids.xedges2mid(xedges) xedges2 = magpie.grids.xmid2edges(xmid) assert np.round(np.sum(xedges-xedges2), decimals=2), \"Conversion", "np.round(15*(np.pi/2.)/20 + np.pi/2., decimals=4), \"polargrid unexpected results.\" assert np.round(np.sum(np.unique(p2d.flatten())-pmid), decimals=4) == 0., \"pmid", "0.75, \"polargrid unexpected results.\" assert np.round(np.sum(np.unique(r2d.flatten())-rmid), decimals=4) == 0., \"rmid is inconsistent with", "xmin=-1.) xedges = np.round(xedges, decimals=2) assert xedges[0]==-1. and xedges[1]==-0.5 and xedges[-1]==0., \"xedges with", "is inconsistent with y2d.\" x2d, y2d, xmid, ymid = magpie.grids.grid2d([10, 20], [10, 20],", "and np.round(pmid[7], decimals=4) == np.round(15*np.pi/20, decimals=4), \"polargrid unexpected results.\" assert np.round(np.sum(np.unique(p2d.flatten())-pmid), decimals=4) ==", "decimals=4) == 0., \"ymid is inconsistent with y2d.\" def test_grid3d(): x3d, y3d, z3d", "\"shape is not as expected.\" assert np.shape(y2d) == (10, 20), \"shape is not", "xmid, ymid, zmid = magpie.grids.grid3d(10, 10, mins=[10., 20., 30.], return1d=True) assert np.round(xmid[0], decimals=4)", "def test_grid1d(): xmid = magpie.grids.grid1d(10., 10) assert np.round(xmid[0], decimals=4) == 0.5 and np.round(xmid[7],", "== 17.5, \"grid2d unexpected results.\" assert np.round(np.sum(np.unique(x2d.flatten())-xmid), decimals=4) == 0., \"xmid is inconsistent", "37.5, \"grid3d unexpected results.\" assert np.round(np.sum(np.unique(z3d.flatten())-zmid), decimals=4) == 0., \"zmid is inconsistent with", "20, 30], [10, 20, 30], return1d=True) assert np.round(xmid[0], decimals=4) == 0.5 and np.round(xmid[7],", "x2d, y2d = magpie.grids.grid2d(10, 10) assert np.shape(x2d) == (10, 10), \"shape is not", "np.round(ymid[7], decimals=4) == 7.5, \"grid2d unexpected results.\" assert np.round(np.sum(np.unique(y2d.flatten())-ymid), decimals=4) == 0., \"ymid", "decimals=4) == 0., \"rmid is inconsistent with r2d.\" assert np.round(pmid[0], decimals=4) == np.round((np.pi/2.)/20", "z3d = magpie.grids.grid3d(10, [10, 20, 30]) assert np.shape(x3d) == (10, 20, 30), \"shape", "xmid[5] == 0.55, \"xmid is not as expected.\" def test_xmid2edges(): xedges = magpie.grids.get_xedges(1.,", "xedges = np.round(xedges, decimals=2) assert len(xedges) == 3, \"Length of xedges is incorrect.\"", "decimals=4) == 0., \"xmid is inconsistent with x2d.\" assert np.round(ymid[0], decimals=4) == 20.5", "== 7.5, \"grid3d unexpected results.\" assert np.round(np.sum(np.unique(z3d.flatten())-zmid), decimals=4) == 0., \"zmid is inconsistent", "= magpie.grids.grid2d([10, 20], [10, 20], mins=[10., 20.], return1d=True) assert np.round(xmid[0], decimals=4) == 10.5", "x3d, y3d, z3d, xmid, ymid, zmid = magpie.grids.grid3d(10, 10, return1d=True) assert np.round(xmid[0], decimals=4)", "with z3d.\" # check polar def test_polargrid(): r2d, p2d = magpie.grids.polargrid(10, 20) assert", "np.round(pmid[0], decimals=4) == np.round(np.pi/20, decimals=4) and np.round(pmid[7], decimals=4) == np.round(15*np.pi/20, decimals=4), \"polargrid unexpected", "(10, 20, 30), \"shape is not as expected.\" x3d, y3d, z3d, xmid, ymid,", "results.\" xmid = magpie.grids.grid1d(10., 10, xmin=10) assert np.round(xmid[0], decimals=4) == 10.5 and np.round(xmid[7],", "np.shape(p2d) == (10, 20), \"shape is not as expected.\" r2d, p2d, rmid, pmid", "r2d, p2d, rmid, pmid = magpie.grids.polargrid(10, 10, rmin=10., rmax=20., phimin=np.pi/2., phimax=np.pi, return1d=True) assert", "= magpie.grids.polargrid(10, 10, rmin=10., rmax=20., phimin=np.pi/2., phimax=np.pi, return1d=True) assert np.round(rmid[0], decimals=4) == 10.5", "decimals=4) == 10.5 and np.round(rmid[7], decimals=4) == 17.5, \"polargrid unexpected results.\" assert np.round(np.sum(np.unique(r2d.flatten())-rmid),", "len(xedges) == 3, \"Length of xedges is incorrect.\" assert xedges[-1] - xedges[0] ==", "decimals=4) == np.round((np.pi/2.)/20 + np.pi/2., decimals=4) \\ and np.round(pmid[7], decimals=4) == np.round(15*(np.pi/2.)/20 +", "\"polargrid unexpected results.\" assert np.round(np.sum(np.unique(p2d.flatten())-pmid), decimals=4) == 0., \"pmid is inconsistent with p2d.\"", "xedges[0] == 1., \"xedges range is incorrect.\" xedges = magpie.grids.get_xedges(1., 2, xmin=-1.) xedges", "+ 1, \"Length of xmid is incorrect.\" assert xmid[0] == 0.05 and xmid[1]", "assert np.shape(x2d) == (10, 20), \"shape is not as expected.\" assert np.shape(y2d) ==", "are not as expected.\" assert xedges[-1] - xedges[0] == 1., \"xedges range is", "assert np.round(xmid[0], decimals=4) == 0.5 and np.round(xmid[7], decimals=4) == 7.5, \"grid2d unexpected results.\"", "inconsistent with y3d.\" assert np.round(zmid[0], decimals=4) == 0.5 and np.round(zmid[7], decimals=4) == 7.5,", "z3d = magpie.grids.grid3d(10, 10) assert np.shape(x3d) == (10, 10, 10), \"shape is not", "\"shape is not as expected.\" assert np.shape(y3d) == (10, 20, 30), \"shape is", "0.5 and np.round(xmid[7], decimals=4) == 7.5, \"grid1d unexpected results.\" def test_grid2d(): x2d, y2d", "= magpie.grids.grid3d([10, 20, 30], [10, 20, 30], mins=[10., 20., 30], return1d=True) assert np.round(xmid[0],", "= magpie.grids.polargrid(10, 20) assert np.shape(r2d) == (10, 20), \"shape is not as expected.\"", "10) assert np.shape(x2d) == (10, 10), \"shape is not as expected.\" assert np.shape(y2d)", "expected.\" assert np.shape(y3d) == (10, 20, 30), \"shape is not as expected.\" assert", "np.round(xmid[7], decimals=4) == 17.5, \"grid3d unexpected results.\" assert np.round(np.sum(np.unique(x3d.flatten())-xmid), decimals=4) == 0., \"xmid", "r, p = magpie.grids.polarEA_grid(10, base_nphi=3) npix = magpie.grids.polarEA_npix(10, base_nphi=3) assert len(r) == len(p),", "\"shape is not as expected.\" x3d, y3d, z3d, xmid, ymid, zmid = magpie.grids.grid3d(10,", "decimals=4) == 0., \"zmid is inconsistent with z3d.\" x3d, y3d, z3d = magpie.grids.grid3d(10,", "np.round(np.sum(np.unique(x2d.flatten())-xmid), decimals=4) == 0., \"xmid is inconsistent with x2d.\" assert np.round(ymid[0], decimals=4) ==", "== (10, 20, 30), \"shape is not as expected.\" x3d, y3d, z3d, xmid,", "y2d.\" x2d, y2d, xmid, ymid = magpie.grids.grid2d(10, 10, mins=[10., 20.], return1d=True) assert np.round(xmid[0],", "y2d, xmid, ymid = magpie.grids.grid2d([10, 20], [10, 20], mins=[10., 20.], return1d=True) assert np.round(xmid[0],", "r and p are not the same.\" assert len(r) == npix, \"Length of", "and xmid[1] == 0.15 and xmid[5] == 0.55, \"xmid is not as expected.\"", "area = magpie.grids.polarEA_area(10, rmax=10., base_nphi=4) assert(np.round(area, decimals=4) == np.round(np.pi/4., decimals=4)), \"area calculation is", "0.55, \"xmid is not as expected.\" def test_xmid2edges(): xedges = magpie.grids.get_xedges(1., 10) xmid", "assert r[3*7**2] == 0.75, \"r values are incorrect.\" assert np.round(p[3*4**2], decimals=4) == np.round(np.pi/(3*(2*4+1)),", "\"PolarEA grid size for r and p are not the same.\" assert len(r)", "np.round(xedges, decimals=2) assert len(xedges) == 3, \"Length of xedges is incorrect.\" assert xedges[-1]", "with x2d.\" assert np.round(ymid[0], decimals=4) == 20.5 and np.round(ymid[7], decimals=4) == 27.5, \"grid2d", "0.75, \"r values are incorrect.\" assert np.round(p[3*4**2], decimals=4) == np.round(np.pi/(3*(2*4+1)), decimals=4), \"p values", "10.5 and np.round(rmid[7], decimals=4) == 17.5, \"polargrid unexpected results.\" assert np.round(np.sum(np.unique(r2d.flatten())-rmid), decimals=4) ==", "np.round(rmid[0], decimals=4) == 10.5 and np.round(rmid[7], decimals=4) == 17.5, \"polargrid unexpected results.\" assert", "== 10.5 and np.round(xmid[7], decimals=4) == 17.5, \"grid2d unexpected results.\" assert np.round(np.sum(np.unique(x2d.flatten())-xmid), decimals=4)", "0., \"zmid is inconsistent with z3d.\" x3d, y3d, z3d, xmid, ymid, zmid =", "polarEA grid does not match expectations.\" r, p = magpie.grids.polarEA_grid(10, base_nphi=3) npix =", "not as expected.\" assert np.shape(z3d) == (10, 10, 10), \"shape is not as", "decimals=4) == 7.5, \"grid3d unexpected results.\" assert np.round(np.sum(np.unique(z3d.flatten())-zmid), decimals=4) == 0., \"zmid is", "== (10, 20), \"shape is not as expected.\" assert np.shape(p2d) == (10, 20),", "not as expected.\" x2d, y2d, xmid, ymid = magpie.grids.grid2d(10, 10, return1d=True) assert np.round(xmid[0],", "0., \"xmid is inconsistent with x2d.\" assert np.round(ymid[0], decimals=4) == 0.5 and np.round(ymid[7],", "10) assert np.shape(x3d) == (10, 10, 10), \"shape is not as expected.\" assert", "not as expected.\" x3d, y3d, z3d, xmid, ymid, zmid = magpie.grids.grid3d(10, 10, return1d=True)", "(10, 20, 30), \"shape is not as expected.\" assert np.shape(y3d) == (10, 20,", "\"xmid is inconsistent with x3d.\" assert np.round(ymid[0], decimals=4) == 20.5 and np.round(ymid[7], decimals=4)", "y2d = magpie.grids.grid2d(10, [10, 20]) assert np.shape(x2d) == (10, 20), \"shape is not", "\"r values are incorrect.\" assert np.round(p[3*4**2], decimals=4) == np.round(np.pi/(3*(2*4+1)), decimals=4), \"p values are", "\"grid1d unexpected results.\" xmid = magpie.grids.grid1d(10., 10, xmin=10) assert np.round(xmid[0], decimals=4) == 10.5", "decimals=4) == 27.5, \"grid2d unexpected results.\" assert np.round(np.sum(np.unique(y2d.flatten())-ymid), decimals=4) == 0., \"ymid is", "== 0., \"ymid is inconsistent with y2d.\" x2d, y2d, xmid, ymid = magpie.grids.grid2d([10,", "== 3, \"Length of xedges is incorrect.\" assert xedges[-1] - xedges[0] == 1.,", "magpie.grids.grid2d(10, [10, 20]) assert np.shape(x2d) == (10, 20), \"shape is not as expected.\"", "== 7.5, \"grid3d unexpected results.\" assert np.round(np.sum(np.unique(y3d.flatten())-ymid), decimals=4) == 0., \"ymid is inconsistent", "and xedges is not as expected.\" assert np.round(xedges[0], decimals=4) == 0. and np.round(xedges[7],", "phimax=np.pi, return1d=True) assert np.round(rmid[0], decimals=4) == 10.5 and np.round(rmid[7], decimals=4) == 17.5, \"polargrid", "incorrect.\" def test_xedges2mid(): xedges = magpie.grids.get_xedges(1., 10) xmid = magpie.grids.xedges2mid(xedges) xmid = np.round(xmid,", "= magpie.grids.get_xedges(1., 10) xmid = magpie.grids.xedges2mid(xedges) xmid = np.round(xmid, decimals=2) assert len(xedges) ==", "xedges2 = magpie.grids.xmid2edges(xmid) assert np.round(np.sum(xedges-xedges2), decimals=2), \"Conversion from xmid to xedges is not", "inconsistent with z3d.\" # check polar def test_polargrid(): r2d, p2d = magpie.grids.polargrid(10, 20)", "np.round(xmid[0], decimals=4) == 0.5 and np.round(xmid[7], decimals=4) == 7.5, \"grid1d unexpected results.\" def", "decimals=4) == 17.5, \"polargrid unexpected results.\" assert np.round(np.sum(np.unique(r2d.flatten())-rmid), decimals=4) == 0., \"rmid is", "0., \"ymid is inconsistent with y3d.\" assert np.round(zmid[0], decimals=4) == 30.5 and np.round(zmid[7],", "np.round(np.sum(xedges-xedges2), decimals=2), \"Conversion from xmid to xedges is not consistent with input xedges.\"", "x2d, y2d, xmid, ymid = magpie.grids.grid2d([10, 20], [10, 20], return1d=True) assert np.round(xmid[0], decimals=4)", "np.round(p[3*7**2 + 7], decimals=4) == np.round(15*np.pi/(3*(2*7+1)), decimals=4), \"p values are incorrect.\" area =", "incorrect.\" assert xedges[-1] - xedges[0] == 1., \"xedges range is incorrect.\" xedges =", "assert xedges[0]==-1. and xedges[1]==-0.5 and xedges[-1]==0., \"xedges with xmin are not as expected.\"", "input xedges.\" def test_grid1d(): xmid = magpie.grids.grid1d(10., 10) assert np.round(xmid[0], decimals=4) == 0.5", "with x3d.\" assert np.round(ymid[0], decimals=4) == 0.5 and np.round(ymid[7], decimals=4) == 7.5, \"grid3d", "decimals=4) == 10.5 and np.round(xmid[7], decimals=4) == 17.5, \"grid2d unexpected results.\" assert np.round(np.sum(np.unique(x2d.flatten())-xmid),", "xmid, ymid = magpie.grids.grid2d(10, 10, mins=[10., 20.], return1d=True) assert np.round(xmid[0], decimals=4) == 10.5", "unexpected results.\" assert np.round(np.sum(np.unique(x3d.flatten())-xmid), decimals=4) == 0., \"xmid is inconsistent with x3d.\" assert", "np.shape(y3d) == (10, 20, 30), \"shape is not as expected.\" assert np.shape(z3d) ==", "pmid = magpie.grids.polargrid(10, 20, return1d=True) assert np.round(rmid[0], decimals=4) == 0.05 and np.round(rmid[7], decimals=4)", "inconsistent with y3d.\" assert np.round(zmid[0], decimals=4) == 30.5 and np.round(zmid[7], decimals=4) == 37.5,", "17.5, \"polargrid unexpected results.\" assert np.round(np.sum(np.unique(r2d.flatten())-rmid), decimals=4) == 0., \"rmid is inconsistent with", "decimals=4) == 17.5, \"grid2d unexpected results.\" assert np.round(np.sum(np.unique(x2d.flatten())-xmid), decimals=4) == 0., \"xmid is", "== 17.5, \"grid3d unexpected results.\" assert np.round(np.sum(np.unique(x3d.flatten())-xmid), decimals=4) == 0., \"xmid is inconsistent", "== 0., \"ymid is inconsistent with y2d.\" x2d, y2d = magpie.grids.grid2d(10, [10, 20])", "as expected.\" r2d, p2d, rmid, pmid = magpie.grids.polargrid(10, 20, return1d=True) assert np.round(rmid[0], decimals=4)", "inconsistent with x3d.\" assert np.round(ymid[0], decimals=4) == 20.5 and np.round(ymid[7], decimals=4) == 27.5,", "assert np.round(ymid[0], decimals=4) == 20.5 and np.round(ymid[7], decimals=4) == 27.5, \"grid2d unexpected results.\"", "xedges = np.round(xedges, decimals=2) assert xedges[0]==-1. and xedges[1]==-0.5 and xedges[-1]==0., \"xedges with xmin", "np.round(xmid[7], decimals=4) == 7.5, \"grid1d unexpected results.\" xmid = magpie.grids.grid1d(10., 10, xmin=10) assert", "unexpected results.\" xmid = magpie.grids.grid1d(10., 10, xmin=10) assert np.round(xmid[0], decimals=4) == 10.5 and", "def test_grid2d(): x2d, y2d = magpie.grids.grid2d(10, 10) assert np.shape(x2d) == (10, 10), \"shape", "assert np.shape(z3d) == (10, 10, 10), \"shape is not as expected.\" x3d, y3d,", "assert np.round(np.sum(np.unique(y3d.flatten())-ymid), decimals=4) == 0., \"ymid is inconsistent with y3d.\" assert np.round(zmid[0], decimals=4)", "is inconsistent with y3d.\" assert np.round(zmid[0], decimals=4) == 30.5 and np.round(zmid[7], decimals=4) ==", "== 0., \"xmid is inconsistent with x3d.\" assert np.round(ymid[0], decimals=4) == 0.5 and", "decimals=4) == 0., \"pmid is inconsistent with p2d.\" def test_polarEA(): r, p =", "= magpie.grids.get_xedges(1., 2, xmin=-1.) xedges = np.round(xedges, decimals=2) assert xedges[0]==-1. and xedges[1]==-0.5 and", "does not match expectations.\" r, p = magpie.grids.polarEA_grid(10, base_nphi=3) npix = magpie.grids.polarEA_npix(10, base_nphi=3)", "= magpie.grids.polarEA_npix(6, base_nphi=3) assert len(r) == len(p), \"PolarEA grid size for r and", "xedges = magpie.grids.get_xedges(1., 2) xedges = np.round(xedges, decimals=2) assert len(xedges) == 3, \"Length", "decimals=4) == 0.5 and np.round(xmid[7], decimals=4) == 7.5, \"grid1d unexpected results.\" def test_grid2d():", "magpie.grids.polarEA_npix(10, base_nphi=3) assert len(r) == len(p), \"PolarEA grid size for r and p", "== 0., \"zmid is inconsistent with z3d.\" # check polar def test_polargrid(): r2d,", "is inconsistent with r2d.\" assert np.round(pmid[0], decimals=4) == np.round(np.pi/20, decimals=4) and np.round(pmid[7], decimals=4)", "magpie.grids.grid1d(10., 10, xmin=10) assert np.round(xmid[0], decimals=4) == 10.5 and np.round(xmid[7], decimals=4) == 17.5,", "np.round(np.sum(np.unique(y2d.flatten())-ymid), decimals=4) == 0., \"ymid is inconsistent with y2d.\" x2d, y2d = magpie.grids.grid2d(10,", "== 0.05 and xmid[1] == 0.15 and xmid[5] == 0.55, \"xmid is not", "decimals=4) == 0., \"xmid is inconsistent with x3d.\" assert np.round(ymid[0], decimals=4) == 20.5", "from xmid to xedges is not consistent with input xedges.\" def test_grid1d(): xmid", "xmid, ymid, zmid = magpie.grids.grid3d(10, 10, return1d=True) assert np.round(xmid[0], decimals=4) == 0.5 and", "17.5, \"grid1d unexpected results.\" xmid, xedges = magpie.grids.grid1d(10., 10, return_edges=True) assert len(xmid)+1 ==", "as expected.\" assert np.shape(p2d) == (10, 20), \"shape is not as expected.\" r2d,", "and np.round(rmid[7], decimals=4) == 0.75, \"polargrid unexpected results.\" assert np.round(np.sum(np.unique(r2d.flatten())-rmid), decimals=4) == 0.,", "\"p values are incorrect.\" assert np.round(p[3*7**2 + 7], decimals=4) == np.round(15*np.pi/(3*(2*7+1)), decimals=4), \"p", "magpie.grids.grid3d(10, 10) assert np.shape(x3d) == (10, 10, 10), \"shape is not as expected.\"", "y3d.\" assert np.round(zmid[0], decimals=4) == 30.5 and np.round(zmid[7], decimals=4) == 37.5, \"grid3d unexpected", "decimals=4) \\ and np.round(pmid[7], decimals=4) == np.round(15*(np.pi/2.)/20 + np.pi/2., decimals=4), \"polargrid unexpected results.\"", "y2d.\" x2d, y2d = magpie.grids.grid2d(10, [10, 20]) assert np.shape(x2d) == (10, 20), \"shape", "npix = magpie.grids.polarEA_npix(6, base_nphi=3) assert len(r) == len(p), \"PolarEA grid size for r", "xmid = np.round(xmid, decimals=2) assert len(xedges) == len(xmid) + 1, \"Length of xmid", "decimals=4) == 7.5, \"grid1d unexpected results.\" def test_grid2d(): x2d, y2d = magpie.grids.grid2d(10, 10)", "17.5, \"grid2d unexpected results.\" assert np.round(np.sum(np.unique(x2d.flatten())-xmid), decimals=4) == 0., \"xmid is inconsistent with", "20., 30], return1d=True) assert np.round(xmid[0], decimals=4) == 10.5 and np.round(xmid[7], decimals=4) == 17.5,", "[10, 20, 30]) assert np.shape(x3d) == (10, 20, 30), \"shape is not as", "and np.round(zmid[7], decimals=4) == 7.5, \"grid3d unexpected results.\" assert np.round(np.sum(np.unique(z3d.flatten())-zmid), decimals=4) == 0.,", "xmid to xedges is not consistent with input xedges.\" def test_grid1d(): xmid =", "10, 10), \"shape is not as expected.\" assert np.shape(y3d) == (10, 10, 10),", "- xedges[0] == 1., \"xedges range is incorrect.\" xedges = magpie.grids.get_xedges(1., 2, xmin=-1.)", "xmid[1] == 0.15 and xmid[5] == 0.55, \"xmid is not as expected.\" def", "return1d=True) assert np.round(rmid[0], decimals=4) == 10.5 and np.round(rmid[7], decimals=4) == 17.5, \"polargrid unexpected", "match expectations.\" r, p = magpie.grids.polarEA_grid(10, base_nphi=3) npix = magpie.grids.polarEA_npix(10, base_nphi=3) assert len(r)", "not as expected.\" assert np.shape(y2d) == (10, 20), \"shape is not as expected.\"", "decimals=4) == np.round(np.pi/(3*(2*4+1)), decimals=4), \"p values are incorrect.\" assert np.round(p[3*7**2 + 7], decimals=4)", "y2d = magpie.grids.grid2d(10, 10) assert np.shape(x2d) == (10, 10), \"shape is not as", "\"Length of xmid is incorrect.\" assert xmid[0] == 0.05 and xmid[1] == 0.15", "and np.round(xmid[7], decimals=4) == 17.5, \"grid2d unexpected results.\" assert np.round(np.sum(np.unique(x2d.flatten())-xmid), decimals=4) == 0.,", "with p2d.\" def test_polargrid(): r2d, p2d = magpie.grids.polargrid(10, 20) assert np.shape(r2d) == (10,", "y2d.\" def test_grid3d(): x3d, y3d, z3d = magpie.grids.grid3d(10, 10) assert np.shape(x3d) == (10,", "\"grid3d unexpected results.\" assert np.round(np.sum(np.unique(z3d.flatten())-zmid), decimals=4) == 0., \"zmid is inconsistent with z3d.\"", "xedges = magpie.grids.grid1d(10., 10, return_edges=True) assert len(xmid)+1 == len(xedges), \"Length of xmid and", "== len(p), \"PolarEA grid size for r and p are not the same.\"", "decimals=4) == 0., \"ymid is inconsistent with y2d.\" x2d, y2d = magpie.grids.grid2d(10, [10,", "== 0.5 and np.round(xmid[7], decimals=4) == 7.5, \"grid3d unexpected results.\" assert np.round(np.sum(np.unique(x3d.flatten())-xmid), decimals=4)", "7.5, \"grid2d unexpected results.\" assert np.round(np.sum(np.unique(x2d.flatten())-xmid), decimals=4) == 0., \"xmid is inconsistent with", "np.pi/2., decimals=4), \"polargrid unexpected results.\" assert np.round(np.sum(np.unique(p2d.flatten())-pmid), decimals=4) == 0., \"pmid is inconsistent", "with p2d.\" r2d, p2d, rmid, pmid = magpie.grids.polargrid(10, 10, rmin=10., rmax=20., phimin=np.pi/2., phimax=np.pi,", "== (10, 20), \"shape is not as expected.\" r2d, p2d, rmid, pmid =", "assert np.round(xedges[0], decimals=4) == 0. and np.round(xedges[7], decimals=4) == 7., \"grid1d unexpected results.\"", "assert np.shape(y2d) == (10, 10), \"shape is not as expected.\" x2d, y2d, xmid,", "inconsistent with z3d.\" x3d, y3d, z3d = magpie.grids.grid3d(10, [10, 20, 30]) assert np.shape(x3d)", "unexpected results.\" assert np.round(np.sum(np.unique(z3d.flatten())-zmid), decimals=4) == 0., \"zmid is inconsistent with z3d.\" #", "is not as expected.\" r2d, p2d, rmid, pmid = magpie.grids.polargrid(10, 20, return1d=True) assert", "\"rmid is inconsistent with r2d.\" assert np.round(pmid[0], decimals=4) == np.round((np.pi/2.)/20 + np.pi/2., decimals=4)", "== 1., \"xedges range is incorrect.\" xedges = magpie.grids.get_xedges(1., 2, xmin=-1.) xedges =", "r2d, p2d, rmid, pmid = magpie.grids.polargrid(10, 20, return1d=True) assert np.round(rmid[0], decimals=4) == 0.05", "np.round(xmid, decimals=2) assert len(xedges) == len(xmid) + 1, \"Length of xmid is incorrect.\"", "7.5, \"grid1d unexpected results.\" xmid = magpie.grids.grid1d(10., 10, xmin=10) assert np.round(xmid[0], decimals=4) ==", "np.round(zmid[7], decimals=4) == 37.5, \"grid3d unexpected results.\" assert np.round(np.sum(np.unique(z3d.flatten())-zmid), decimals=4) == 0., \"zmid", "magpie.grids.grid3d([10, 20, 30], [10, 20, 30], return1d=True) assert np.round(xmid[0], decimals=4) == 0.5 and", "ymid = magpie.grids.grid2d(10, 10, return1d=True) assert np.round(xmid[0], decimals=4) == 0.5 and np.round(xmid[7], decimals=4)", "30.5 and np.round(zmid[7], decimals=4) == 37.5, \"grid3d unexpected results.\" assert np.round(np.sum(np.unique(z3d.flatten())-zmid), decimals=4) ==", "grid does not match expectations.\" r, p = magpie.grids.polarEA_grid(10, base_nphi=3) npix = magpie.grids.polarEA_npix(10,", "unexpected results.\" assert np.round(np.sum(np.unique(y3d.flatten())-ymid), decimals=4) == 0., \"ymid is inconsistent with y3d.\" assert", "incorrect.\" area = magpie.grids.polarEA_area(10, rmax=10., base_nphi=4) assert(np.round(area, decimals=4) == np.round(np.pi/4., decimals=4)), \"area calculation", "assert xedges[-1] - xedges[0] == 1., \"xedges range is incorrect.\" xedges = magpie.grids.get_xedges(1.,", "p2d, rmid, pmid = magpie.grids.polargrid(10, 10, rmin=10., rmax=20., phimin=np.pi/2., phimax=np.pi, return1d=True) assert np.round(rmid[0],", "x3d.\" assert np.round(ymid[0], decimals=4) == 0.5 and np.round(ymid[7], decimals=4) == 7.5, \"grid3d unexpected", "rmin=10., rmax=20., phimin=np.pi/2., phimax=np.pi, return1d=True) assert np.round(rmid[0], decimals=4) == 10.5 and np.round(rmid[7], decimals=4)", "y2d, xmid, ymid = magpie.grids.grid2d(10, 10, mins=[10., 20.], return1d=True) assert np.round(xmid[0], decimals=4) ==", "same.\" assert len(r) == npix, \"Length of polarEA grid does not match expectations.\"", "assert np.round(np.sum(np.unique(p2d.flatten())-pmid), decimals=4) == 0., \"pmid is inconsistent with p2d.\" def test_polarEA(): r,", "expected.\" x3d, y3d, z3d, xmid, ymid, zmid = magpie.grids.grid3d([10, 20, 30], [10, 20,", "np.round(xmid[0], decimals=4) == 0.5 and np.round(xmid[7], decimals=4) == 7.5, \"grid1d unexpected results.\" xmid", "magpie.grids.get_xedges(1., 2, xmin=-1.) xedges = np.round(xedges, decimals=2) assert xedges[0]==-1. and xedges[1]==-0.5 and xedges[-1]==0.,", "7], decimals=4) == np.round(15*np.pi/(3*(2*7+1)), decimals=4), \"p values are incorrect.\" area = magpie.grids.polarEA_area(10, rmax=10.,", "magpie # check cartesian def test_get_xedges(): xedges = magpie.grids.get_xedges(1., 2) xedges = np.round(xedges,", "0., \"zmid is inconsistent with z3d.\" x3d, y3d, z3d = magpie.grids.grid3d(10, [10, 20,", "inconsistent with x3d.\" assert np.round(ymid[0], decimals=4) == 0.5 and np.round(ymid[7], decimals=4) == 7.5,", "\"grid1d unexpected results.\" xmid, xedges = magpie.grids.grid1d(10., 10, return_edges=True) assert len(xmid)+1 == len(xedges),", "\"ymid is inconsistent with y3d.\" assert np.round(zmid[0], decimals=4) == 0.5 and np.round(zmid[7], decimals=4)", "np.round(xmid[7], decimals=4) == 7.5, \"grid3d unexpected results.\" assert np.round(np.sum(np.unique(x3d.flatten())-xmid), decimals=4) == 0., \"xmid", "rmid, pmid = magpie.grids.polargrid(10, 20, return1d=True) assert np.round(rmid[0], decimals=4) == 0.05 and np.round(rmid[7],", "== 0.75, \"r values are incorrect.\" assert np.round(p[3*4**2], decimals=4) == np.round(np.pi/(3*(2*4+1)), decimals=4), \"p", "is inconsistent with y3d.\" assert np.round(zmid[0], decimals=4) == 0.5 and np.round(zmid[7], decimals=4) ==", "npix, \"Length of polarEA grid does not match expectations.\" assert r[3*4**2] == 0.45,", "assert np.shape(y3d) == (10, 10, 10), \"shape is not as expected.\" assert np.shape(z3d)", "27.5, \"grid3d unexpected results.\" assert np.round(np.sum(np.unique(y3d.flatten())-ymid), decimals=4) == 0., \"ymid is inconsistent with", "30], [10, 20, 30], mins=[10., 20., 30], return1d=True) assert np.round(xmid[0], decimals=4) == 10.5", "== 0., \"rmid is inconsistent with r2d.\" assert np.round(pmid[0], decimals=4) == np.round((np.pi/2.)/20 +", "base_nphi=3) npix = magpie.grids.polarEA_npix(10, base_nphi=3) assert len(r) == len(p), \"PolarEA grid size for", "return1d=True) assert np.round(xmid[0], decimals=4) == 0.5 and np.round(xmid[7], decimals=4) == 7.5, \"grid3d unexpected", "is not as expected.\" x3d, y3d, z3d, xmid, ymid, zmid = magpie.grids.grid3d(10, 10,", "decimals=4) == 7.5, \"grid1d unexpected results.\" xmid = magpie.grids.grid1d(10., 10, xmin=10) assert np.round(xmid[0],", "unexpected results.\" xmid, xedges = magpie.grids.grid1d(10., 10, return_edges=True) assert len(xmid)+1 == len(xedges), \"Length", "y2d, xmid, ymid = magpie.grids.grid2d([10, 20], [10, 20], return1d=True) assert np.round(xmid[0], decimals=4) ==", "== 7.5, \"grid1d unexpected results.\" def test_grid2d(): x2d, y2d = magpie.grids.grid2d(10, 10) assert", "def test_polarEA(): r, p = magpie.grids.polarEA_grid(10) npix = magpie.grids.polarEA_npix(10) assert len(r) == len(p),", "20], return1d=True) assert np.round(xmid[0], decimals=4) == 0.5 and np.round(xmid[7], decimals=4) == 7.5, \"grid2d", "assert np.round(xmid[0], decimals=4) == 10.5 and np.round(xmid[7], decimals=4) == 17.5, \"grid1d unexpected results.\"", "is inconsistent with x2d.\" assert np.round(ymid[0], decimals=4) == 0.5 and np.round(ymid[7], decimals=4) ==", "assert np.round(np.sum(np.unique(y2d.flatten())-ymid), decimals=4) == 0., \"ymid is inconsistent with y2d.\" def test_grid3d(): x3d,", "10, 10), \"shape is not as expected.\" x3d, y3d, z3d, xmid, ymid, zmid", "decimals=4) == 0., \"pmid is inconsistent with p2d.\" r2d, p2d, rmid, pmid =", "is incorrect.\" assert xmid[0] == 0.05 and xmid[1] == 0.15 and xmid[5] ==", "import magpie # check cartesian def test_get_xedges(): xedges = magpie.grids.get_xedges(1., 2) xedges =", "30]) assert np.shape(x3d) == (10, 20, 30), \"shape is not as expected.\" assert", "of polarEA grid does not match expectations.\" r, p = magpie.grids.polarEA_grid(6, base_nphi=3) npix", "base_nphi=3) npix = magpie.grids.polarEA_npix(6, base_nphi=3) assert len(r) == len(p), \"PolarEA grid size for", "xmin are not as expected.\" assert xedges[-1] - xedges[0] == 1., \"xedges range", "decimals=4) == 7.5, \"grid2d unexpected results.\" assert np.round(np.sum(np.unique(y2d.flatten())-ymid), decimals=4) == 0., \"ymid is", "== 0., \"xmid is inconsistent with x2d.\" assert np.round(ymid[0], decimals=4) == 20.5 and", "not match expectations.\" assert r[3*4**2] == 0.45, \"r values are incorrect.\" assert r[3*7**2]", "== npix, \"Length of polarEA grid does not match expectations.\" assert r[3*4**2] ==", "\"ymid is inconsistent with y2d.\" x2d, y2d, xmid, ymid = magpie.grids.grid2d(10, 10, mins=[10.,", "== 0., \"zmid is inconsistent with z3d.\" x3d, y3d, z3d = magpie.grids.grid3d(10, [10,", "with x3d.\" assert np.round(ymid[0], decimals=4) == 20.5 and np.round(ymid[7], decimals=4) == 27.5, \"grid3d", "np.round(xmid[0], decimals=4) == 10.5 and np.round(xmid[7], decimals=4) == 17.5, \"grid3d unexpected results.\" assert", "# check cartesian def test_get_xedges(): xedges = magpie.grids.get_xedges(1., 2) xedges = np.round(xedges, decimals=2)", "r, p = magpie.grids.polarEA_grid(6, base_nphi=3) npix = magpie.grids.polarEA_npix(6, base_nphi=3) assert len(r) == len(p),", "magpie.grids.grid3d(10, [10, 20, 30]) assert np.shape(x3d) == (10, 20, 30), \"shape is not", "results.\" assert np.round(np.sum(np.unique(x2d.flatten())-xmid), decimals=4) == 0., \"xmid is inconsistent with x2d.\" assert np.round(ymid[0],", "test_xedges2mid(): xedges = magpie.grids.get_xedges(1., 10) xmid = magpie.grids.xedges2mid(xedges) xmid = np.round(xmid, decimals=2) assert", "magpie.grids.xmid2edges(xmid) assert np.round(np.sum(xedges-xedges2), decimals=2), \"Conversion from xmid to xedges is not consistent with", "0. and np.round(xedges[7], decimals=4) == 7., \"grid1d unexpected results.\" assert np.round(xmid[0], decimals=4) ==", "np.round(ymid[0], decimals=4) == 0.5 and np.round(ymid[7], decimals=4) == 7.5, \"grid2d unexpected results.\" assert", "\"shape is not as expected.\" assert np.shape(z3d) == (10, 20, 30), \"shape is", "np.round(np.sum(np.unique(y2d.flatten())-ymid), decimals=4) == 0., \"ymid is inconsistent with y2d.\" def test_grid3d(): x3d, y3d,", "decimals=4) == np.round(15*np.pi/(3*(2*7+1)), decimals=4), \"p values are incorrect.\" area = magpie.grids.polarEA_area(10, rmax=10., base_nphi=4)", "10, return1d=True) assert np.round(xmid[0], decimals=4) == 0.5 and np.round(xmid[7], decimals=4) == 7.5, \"grid2d", "mins=[10., 20.], return1d=True) assert np.round(xmid[0], decimals=4) == 10.5 and np.round(xmid[7], decimals=4) == 17.5,", "(10, 20), \"shape is not as expected.\" assert np.shape(y2d) == (10, 20), \"shape", "np.round(xmid[7], decimals=4) == 7.5, \"grid1d unexpected results.\" def test_grid2d(): x2d, y2d = magpie.grids.grid2d(10,", "npix = magpie.grids.polarEA_npix(10, base_nphi=3) assert len(r) == len(p), \"PolarEA grid size for r", "== (10, 10, 10), \"shape is not as expected.\" assert np.shape(z3d) == (10,", "np.shape(x2d) == (10, 20), \"shape is not as expected.\" assert np.shape(y2d) == (10,", "decimals=4) == 0.5 and np.round(xmid[7], decimals=4) == 7.5, \"grid2d unexpected results.\" assert np.round(np.sum(np.unique(x2d.flatten())-xmid),", "\"ymid is inconsistent with y2d.\" def test_grid3d(): x3d, y3d, z3d = magpie.grids.grid3d(10, 10)", "results.\" assert np.round(np.sum(np.unique(x3d.flatten())-xmid), decimals=4) == 0., \"xmid is inconsistent with x3d.\" assert np.round(ymid[0],", "is inconsistent with z3d.\" x3d, y3d, z3d, xmid, ymid, zmid = magpie.grids.grid3d([10, 20,", "= magpie.grids.get_xedges(1., 2) xedges = np.round(xedges, decimals=2) assert len(xedges) == 3, \"Length of", "magpie.grids.grid2d(10, 10, mins=[10., 20.], return1d=True) assert np.round(xmid[0], decimals=4) == 10.5 and np.round(xmid[7], decimals=4)", "= magpie.grids.polarEA_grid(6, base_nphi=3) npix = magpie.grids.polarEA_npix(6, base_nphi=3) assert len(r) == len(p), \"PolarEA grid", "0., \"pmid is inconsistent with p2d.\" def test_polargrid(): r2d, p2d = magpie.grids.polargrid(10, 20)", "unexpected results.\" assert np.round(np.sum(np.unique(y2d.flatten())-ymid), decimals=4) == 0., \"ymid is inconsistent with y2d.\" x2d,", "results.\" assert np.round(np.sum(np.unique(y2d.flatten())-ymid), decimals=4) == 0., \"ymid is inconsistent with y2d.\" def test_grid3d():", "\"grid1d unexpected results.\" assert np.round(xmid[0], decimals=4) == 0.5 and np.round(xmid[7], decimals=4) == 7.5,", "def test_xedges2mid(): xedges = magpie.grids.get_xedges(1., 10) xmid = magpie.grids.xedges2mid(xedges) xmid = np.round(xmid, decimals=2)", "z3d, xmid, ymid, zmid = magpie.grids.grid3d(10, 10, return1d=True) assert np.round(xmid[0], decimals=4) == 0.5", "np.round(zmid[7], decimals=4) == 7.5, \"grid3d unexpected results.\" assert np.round(np.sum(np.unique(z3d.flatten())-zmid), decimals=4) == 0., \"zmid", "\"zmid is inconsistent with z3d.\" x3d, y3d, z3d, xmid, ymid, zmid = magpie.grids.grid3d(10,", "not as expected.\" r2d, p2d, rmid, pmid = magpie.grids.polargrid(10, 20, return1d=True) assert np.round(rmid[0],", "y3d, z3d, xmid, ymid, zmid = magpie.grids.grid3d(10, 10, return1d=True) assert np.round(xmid[0], decimals=4) ==", "== npix, \"Length of polarEA grid does not match expectations.\" r, p =", "as expected.\" x3d, y3d, z3d, xmid, ymid, zmid = magpie.grids.grid3d(10, 10, return1d=True) assert", "3, \"Length of xedges is incorrect.\" assert xedges[-1] - xedges[0] == 1., \"xedges", "(10, 20), \"shape is not as expected.\" r2d, p2d, rmid, pmid = magpie.grids.polargrid(10,", "unexpected results.\" assert np.round(np.sum(np.unique(p2d.flatten())-pmid), decimals=4) == 0., \"pmid is inconsistent with p2d.\" def", "xedges[-1] - xedges[0] == 1., \"xedges range is incorrect.\" xedges = magpie.grids.get_xedges(1., 2,", "test_grid3d(): x3d, y3d, z3d = magpie.grids.grid3d(10, 10) assert np.shape(x3d) == (10, 10, 10),", "decimals=4) == 0.5 and np.round(ymid[7], decimals=4) == 7.5, \"grid2d unexpected results.\" assert np.round(np.sum(np.unique(y2d.flatten())-ymid),", "magpie.grids.grid3d(10, 10, return1d=True) assert np.round(xmid[0], decimals=4) == 0.5 and np.round(xmid[7], decimals=4) == 7.5,", "magpie.grids.get_xedges(1., 10) xmid = magpie.grids.xedges2mid(xedges) xedges2 = magpie.grids.xmid2edges(xmid) assert np.round(np.sum(xedges-xedges2), decimals=2), \"Conversion from", "== 0., \"ymid is inconsistent with y3d.\" assert np.round(zmid[0], decimals=4) == 30.5 and", "np.round(xmid[7], decimals=4) == 7.5, \"grid2d unexpected results.\" assert np.round(np.sum(np.unique(x2d.flatten())-xmid), decimals=4) == 0., \"xmid", "decimals=4) == 7.5, \"grid3d unexpected results.\" assert np.round(np.sum(np.unique(x3d.flatten())-xmid), decimals=4) == 0., \"xmid is", "decimals=4) == 27.5, \"grid3d unexpected results.\" assert np.round(np.sum(np.unique(y3d.flatten())-ymid), decimals=4) == 0., \"ymid is", "p2d, rmid, pmid = magpie.grids.polargrid(10, 20, return1d=True) assert np.round(rmid[0], decimals=4) == 0.05 and", "decimals=4) and np.round(pmid[7], decimals=4) == np.round(15*np.pi/20, decimals=4), \"polargrid unexpected results.\" assert np.round(np.sum(np.unique(p2d.flatten())-pmid), decimals=4)", "assert np.round(rmid[0], decimals=4) == 10.5 and np.round(rmid[7], decimals=4) == 17.5, \"polargrid unexpected results.\"", "zmid = magpie.grids.grid3d(10, 10, return1d=True) assert np.round(xmid[0], decimals=4) == 0.5 and np.round(xmid[7], decimals=4)", "0.5 and np.round(ymid[7], decimals=4) == 7.5, \"grid2d unexpected results.\" assert np.round(np.sum(np.unique(y2d.flatten())-ymid), decimals=4) ==", "0., \"pmid is inconsistent with p2d.\" r2d, p2d, rmid, pmid = magpie.grids.polargrid(10, 10,", "y3d.\" assert np.round(zmid[0], decimals=4) == 0.5 and np.round(zmid[7], decimals=4) == 7.5, \"grid3d unexpected", "r[3*7**2] == 0.75, \"r values are incorrect.\" assert np.round(p[3*4**2], decimals=4) == np.round(np.pi/(3*(2*4+1)), decimals=4),", "are incorrect.\" assert np.round(p[3*7**2 + 7], decimals=4) == np.round(15*np.pi/(3*(2*7+1)), decimals=4), \"p values are", "inconsistent with x2d.\" assert np.round(ymid[0], decimals=4) == 20.5 and np.round(ymid[7], decimals=4) == 27.5,", "decimals=4) == 0.5 and np.round(xmid[7], decimals=4) == 7.5, \"grid3d unexpected results.\" assert np.round(np.sum(np.unique(x3d.flatten())-xmid),", "magpie.grids.polargrid(10, 20) assert np.shape(r2d) == (10, 20), \"shape is not as expected.\" assert", "of polarEA grid does not match expectations.\" r, p = magpie.grids.polarEA_grid(10, base_nphi=3) npix", "base_nphi=3) assert len(r) == len(p), \"PolarEA grid size for r and p are", "not as expected.\" assert np.round(xedges[0], decimals=4) == 0. and np.round(xedges[7], decimals=4) == 7.,", "\"shape is not as expected.\" assert np.shape(z3d) == (10, 10, 10), \"shape is", "== 0., \"ymid is inconsistent with y3d.\" assert np.round(zmid[0], decimals=4) == 0.5 and", "30], return1d=True) assert np.round(xmid[0], decimals=4) == 0.5 and np.round(xmid[7], decimals=4) == 7.5, \"grid3d", "ymid = magpie.grids.grid2d([10, 20], [10, 20], mins=[10., 20.], return1d=True) assert np.round(xmid[0], decimals=4) ==", "the same.\" assert len(r) == npix, \"Length of polarEA grid does not match", "is not consistent with input xedges.\" def test_grid1d(): xmid = magpie.grids.grid1d(10., 10) assert", "is incorrect.\" def test_xedges2mid(): xedges = magpie.grids.get_xedges(1., 10) xmid = magpie.grids.xedges2mid(xedges) xmid =", "= magpie.grids.polarEA_grid(10, base_nphi=3) npix = magpie.grids.polarEA_npix(10, base_nphi=3) assert len(r) == len(p), \"PolarEA grid", "x2d, y2d, xmid, ymid = magpie.grids.grid2d(10, 10, return1d=True) assert np.round(xmid[0], decimals=4) == 0.5", "unexpected results.\" assert np.round(xmid[0], decimals=4) == 0.5 and np.round(xmid[7], decimals=4) == 7.5, \"grid1d", "not as expected.\" assert np.shape(y3d) == (10, 20, 30), \"shape is not as", "\"pmid is inconsistent with p2d.\" def test_polarEA(): r, p = magpie.grids.polarEA_grid(10) npix =", "decimals=4), \"polargrid unexpected results.\" assert np.round(np.sum(np.unique(p2d.flatten())-pmid), decimals=4) == 0., \"pmid is inconsistent with", "np.round(xmid[0], decimals=4) == 0.5 and np.round(xmid[7], decimals=4) == 7.5, \"grid3d unexpected results.\" assert", "20.], return1d=True) assert np.round(xmid[0], decimals=4) == 10.5 and np.round(xmid[7], decimals=4) == 17.5, \"grid2d", "does not match expectations.\" r, p = magpie.grids.polarEA_grid(6, base_nphi=3) npix = magpie.grids.polarEA_npix(6, base_nphi=3)", "= magpie.grids.xmid2edges(xmid) assert np.round(np.sum(xedges-xedges2), decimals=2), \"Conversion from xmid to xedges is not consistent", "as expected.\" x2d, y2d, xmid, ymid = magpie.grids.grid2d([10, 20], [10, 20], return1d=True) assert", "0.05 and xmid[1] == 0.15 and xmid[5] == 0.55, \"xmid is not as", "mins=[10., 20., 30.], return1d=True) assert np.round(xmid[0], decimals=4) == 10.5 and np.round(xmid[7], decimals=4) ==", "= magpie.grids.grid3d(10, 10, return1d=True) assert np.round(xmid[0], decimals=4) == 0.5 and np.round(xmid[7], decimals=4) ==", "17.5, \"grid3d unexpected results.\" assert np.round(np.sum(np.unique(x3d.flatten())-xmid), decimals=4) == 0., \"xmid is inconsistent with", "\"ymid is inconsistent with y3d.\" assert np.round(zmid[0], decimals=4) == 30.5 and np.round(zmid[7], decimals=4)", "== np.round(15*(np.pi/2.)/20 + np.pi/2., decimals=4), \"polargrid unexpected results.\" assert np.round(np.sum(np.unique(p2d.flatten())-pmid), decimals=4) == 0.,", "\"Length of polarEA grid does not match expectations.\" r, p = magpie.grids.polarEA_grid(10, base_nphi=3)", "and np.round(ymid[7], decimals=4) == 7.5, \"grid3d unexpected results.\" assert np.round(np.sum(np.unique(y3d.flatten())-ymid), decimals=4) == 0.,", "as expected.\" assert np.shape(y3d) == (10, 20, 30), \"shape is not as expected.\"", "0.5 and np.round(zmid[7], decimals=4) == 7.5, \"grid3d unexpected results.\" assert np.round(np.sum(np.unique(z3d.flatten())-zmid), decimals=4) ==", "20], [10, 20], mins=[10., 20.], return1d=True) assert np.round(xmid[0], decimals=4) == 10.5 and np.round(xmid[7],", "0.5 and np.round(xmid[7], decimals=4) == 7.5, \"grid1d unexpected results.\" xmid = magpie.grids.grid1d(10., 10,", "== len(xedges), \"Length of xmid and xedges is not as expected.\" assert np.round(xedges[0],", "decimals=4) == np.round(15*(np.pi/2.)/20 + np.pi/2., decimals=4), \"polargrid unexpected results.\" assert np.round(np.sum(np.unique(p2d.flatten())-pmid), decimals=4) ==", "as expected.\" assert np.round(xedges[0], decimals=4) == 0. and np.round(xedges[7], decimals=4) == 7., \"grid1d", "unexpected results.\" assert np.round(np.sum(np.unique(y2d.flatten())-ymid), decimals=4) == 0., \"ymid is inconsistent with y2d.\" def", "== 27.5, \"grid2d unexpected results.\" assert np.round(np.sum(np.unique(y2d.flatten())-ymid), decimals=4) == 0., \"ymid is inconsistent", "inconsistent with y2d.\" def test_grid3d(): x3d, y3d, z3d = magpie.grids.grid3d(10, 10) assert np.shape(x3d)", "ymid, zmid = magpie.grids.grid3d(10, 10, mins=[10., 20., 30.], return1d=True) assert np.round(xmid[0], decimals=4) ==", "values are incorrect.\" assert r[3*7**2] == 0.75, \"r values are incorrect.\" assert np.round(p[3*4**2],", "np.round(ymid[7], decimals=4) == 27.5, \"grid2d unexpected results.\" assert np.round(np.sum(np.unique(y2d.flatten())-ymid), decimals=4) == 0., \"ymid", "assert np.round(xmid[0], decimals=4) == 10.5 and np.round(xmid[7], decimals=4) == 17.5, \"grid2d unexpected results.\"", "decimals=4) == 0., \"xmid is inconsistent with x2d.\" assert np.round(ymid[0], decimals=4) == 0.5", "7.5, \"grid3d unexpected results.\" assert np.round(np.sum(np.unique(z3d.flatten())-zmid), decimals=4) == 0., \"zmid is inconsistent with", "np.shape(x3d) == (10, 20, 30), \"shape is not as expected.\" assert np.shape(y3d) ==", "assert np.shape(y3d) == (10, 20, 30), \"shape is not as expected.\" assert np.shape(z3d)", "xedges = magpie.grids.get_xedges(1., 10) xmid = magpie.grids.xedges2mid(xedges) xmid = np.round(xmid, decimals=2) assert len(xedges)", "0., \"ymid is inconsistent with y2d.\" x2d, y2d, xmid, ymid = magpie.grids.grid2d(10, 10,", "range is incorrect.\" def test_xedges2mid(): xedges = magpie.grids.get_xedges(1., 10) xmid = magpie.grids.xedges2mid(xedges) xmid", "\"zmid is inconsistent with z3d.\" # check polar def test_polargrid(): r2d, p2d =", "assert np.shape(p2d) == (10, 20), \"shape is not as expected.\" r2d, p2d, rmid,", "np.round(np.sum(np.unique(p2d.flatten())-pmid), decimals=4) == 0., \"pmid is inconsistent with p2d.\" r2d, p2d, rmid, pmid", "y2d.\" x2d, y2d, xmid, ymid = magpie.grids.grid2d([10, 20], [10, 20], mins=[10., 20.], return1d=True)", "20), \"shape is not as expected.\" x2d, y2d, xmid, ymid = magpie.grids.grid2d([10, 20],", "r2d.\" assert np.round(pmid[0], decimals=4) == np.round(np.pi/20, decimals=4) and np.round(pmid[7], decimals=4) == np.round(15*np.pi/20, decimals=4),", "assert np.round(p[3*7**2 + 7], decimals=4) == np.round(15*np.pi/(3*(2*7+1)), decimals=4), \"p values are incorrect.\" area", "results.\" assert np.round(np.sum(np.unique(z3d.flatten())-zmid), decimals=4) == 0., \"zmid is inconsistent with z3d.\" # check", "== 17.5, \"grid1d unexpected results.\" xmid, xedges = magpie.grids.grid1d(10., 10, return_edges=True) assert len(xmid)+1", "np.round((np.pi/2.)/20 + np.pi/2., decimals=4) \\ and np.round(pmid[7], decimals=4) == np.round(15*(np.pi/2.)/20 + np.pi/2., decimals=4),", "are incorrect.\" assert r[3*7**2] == 0.75, \"r values are incorrect.\" assert np.round(p[3*4**2], decimals=4)", "magpie.grids.grid2d(10, 10, return1d=True) assert np.round(xmid[0], decimals=4) == 0.5 and np.round(xmid[7], decimals=4) == 7.5,", "assert np.round(np.sum(np.unique(x2d.flatten())-xmid), decimals=4) == 0., \"xmid is inconsistent with x2d.\" assert np.round(ymid[0], decimals=4)", "does not match expectations.\" assert r[3*4**2] == 0.45, \"r values are incorrect.\" assert", "and np.round(rmid[7], decimals=4) == 17.5, \"polargrid unexpected results.\" assert np.round(np.sum(np.unique(r2d.flatten())-rmid), decimals=4) == 0.,", "1, \"Length of xmid is incorrect.\" assert xmid[0] == 0.05 and xmid[1] ==", "20, 30]) assert np.shape(x3d) == (10, 20, 30), \"shape is not as expected.\"", "\"polargrid unexpected results.\" assert np.round(np.sum(np.unique(r2d.flatten())-rmid), decimals=4) == 0., \"rmid is inconsistent with r2d.\"", "== 27.5, \"grid3d unexpected results.\" assert np.round(np.sum(np.unique(y3d.flatten())-ymid), decimals=4) == 0., \"ymid is inconsistent", "def test_polargrid(): r2d, p2d = magpie.grids.polargrid(10, 20) assert np.shape(r2d) == (10, 20), \"shape", "p are not the same.\" assert len(r) == npix, \"Length of polarEA grid", "\"shape is not as expected.\" x2d, y2d, xmid, ymid = magpie.grids.grid2d(10, 10, return1d=True)", "np.round(np.pi/20, decimals=4) and np.round(pmid[7], decimals=4) == np.round(15*np.pi/20, decimals=4), \"polargrid unexpected results.\" assert np.round(np.sum(np.unique(p2d.flatten())-pmid),", "= magpie.grids.xedges2mid(xedges) xmid = np.round(xmid, decimals=2) assert len(xedges) == len(xmid) + 1, \"Length", "== (10, 10, 10), \"shape is not as expected.\" x3d, y3d, z3d, xmid,", "grid size for r and p are not the same.\" assert len(r) ==", "and np.round(xedges[7], decimals=4) == 7., \"grid1d unexpected results.\" assert np.round(xmid[0], decimals=4) == 0.5", "with y3d.\" assert np.round(zmid[0], decimals=4) == 0.5 and np.round(zmid[7], decimals=4) == 7.5, \"grid3d", "test_grid1d(): xmid = magpie.grids.grid1d(10., 10) assert np.round(xmid[0], decimals=4) == 0.5 and np.round(xmid[7], decimals=4)", "+ np.pi/2., decimals=4), \"polargrid unexpected results.\" assert np.round(np.sum(np.unique(p2d.flatten())-pmid), decimals=4) == 0., \"pmid is", "as expected.\" assert np.shape(y2d) == (10, 20), \"shape is not as expected.\" x2d,", "np.shape(z3d) == (10, 20, 30), \"shape is not as expected.\" x3d, y3d, z3d,", "\"xedges with xmin are not as expected.\" assert xedges[-1] - xedges[0] == 1.,", "decimals=4) == 0.5 and np.round(xmid[7], decimals=4) == 7.5, \"grid1d unexpected results.\" xmid =", "is not as expected.\" x2d, y2d, xmid, ymid = magpie.grids.grid2d(10, 10, return1d=True) assert", "with y2d.\" x2d, y2d, xmid, ymid = magpie.grids.grid2d([10, 20], [10, 20], mins=[10., 20.],", "not match expectations.\" r, p = magpie.grids.polarEA_grid(10, base_nphi=3) npix = magpie.grids.polarEA_npix(10, base_nphi=3) assert", "magpie.grids.polargrid(10, 20, return1d=True) assert np.round(rmid[0], decimals=4) == 0.05 and np.round(rmid[7], decimals=4) == 0.75,", "== len(xmid) + 1, \"Length of xmid is incorrect.\" assert xmid[0] == 0.05", "values are incorrect.\" assert np.round(p[3*4**2], decimals=4) == np.round(np.pi/(3*(2*4+1)), decimals=4), \"p values are incorrect.\"", "inconsistent with r2d.\" assert np.round(pmid[0], decimals=4) == np.round(np.pi/20, decimals=4) and np.round(pmid[7], decimals=4) ==", "= np.round(xedges, decimals=2) assert xedges[0]==-1. and xedges[1]==-0.5 and xedges[-1]==0., \"xedges with xmin are", "= magpie.grids.grid2d(10, 10, return1d=True) assert np.round(xmid[0], decimals=4) == 0.5 and np.round(xmid[7], decimals=4) ==", "r[3*4**2] == 0.45, \"r values are incorrect.\" assert r[3*7**2] == 0.75, \"r values", "decimals=2) assert len(xedges) == len(xmid) + 1, \"Length of xmid is incorrect.\" assert", "magpie.grids.polarEA_grid(10, base_nphi=3) npix = magpie.grids.polarEA_npix(10, base_nphi=3) assert len(r) == len(p), \"PolarEA grid size", "are incorrect.\" area = magpie.grids.polarEA_area(10, rmax=10., base_nphi=4) assert(np.round(area, decimals=4) == np.round(np.pi/4., decimals=4)), \"area", "with z3d.\" x3d, y3d, z3d, xmid, ymid, zmid = magpie.grids.grid3d(10, 10, mins=[10., 20.,", "== (10, 10), \"shape is not as expected.\" x2d, y2d, xmid, ymid =", "pmid = magpie.grids.polargrid(10, 10, rmin=10., rmax=20., phimin=np.pi/2., phimax=np.pi, return1d=True) assert np.round(rmid[0], decimals=4) ==", "assert len(xedges) == len(xmid) + 1, \"Length of xmid is incorrect.\" assert xmid[0]", "20., 30.], return1d=True) assert np.round(xmid[0], decimals=4) == 10.5 and np.round(xmid[7], decimals=4) == 17.5,", "= magpie.grids.polargrid(10, 20, return1d=True) assert np.round(rmid[0], decimals=4) == 0.05 and np.round(rmid[7], decimals=4) ==", "expected.\" def test_xmid2edges(): xedges = magpie.grids.get_xedges(1., 10) xmid = magpie.grids.xedges2mid(xedges) xedges2 = magpie.grids.xmid2edges(xmid)", "= magpie.grids.grid1d(10., 10) assert np.round(xmid[0], decimals=4) == 0.5 and np.round(xmid[7], decimals=4) == 7.5,", "== 0.5 and np.round(zmid[7], decimals=4) == 7.5, \"grid3d unexpected results.\" assert np.round(np.sum(np.unique(z3d.flatten())-zmid), decimals=4)", "decimals=4) == 0.5 and np.round(ymid[7], decimals=4) == 7.5, \"grid3d unexpected results.\" assert np.round(np.sum(np.unique(y3d.flatten())-ymid),", "== 0.5 and np.round(ymid[7], decimals=4) == 7.5, \"grid3d unexpected results.\" assert np.round(np.sum(np.unique(y3d.flatten())-ymid), decimals=4)", "as expected.\" x3d, y3d, z3d, xmid, ymid, zmid = magpie.grids.grid3d([10, 20, 30], [10,", "r2d, p2d = magpie.grids.polargrid(10, 20) assert np.shape(r2d) == (10, 20), \"shape is not", "grid does not match expectations.\" r, p = magpie.grids.polarEA_grid(6, base_nphi=3) npix = magpie.grids.polarEA_npix(6,", "magpie.grids.grid3d(10, 10, mins=[10., 20., 30.], return1d=True) assert np.round(xmid[0], decimals=4) == 10.5 and np.round(xmid[7],", "zmid = magpie.grids.grid3d([10, 20, 30], [10, 20, 30], return1d=True) assert np.round(xmid[0], decimals=4) ==", "phimin=np.pi/2., phimax=np.pi, return1d=True) assert np.round(rmid[0], decimals=4) == 10.5 and np.round(rmid[7], decimals=4) == 17.5,", "30], mins=[10., 20., 30], return1d=True) assert np.round(xmid[0], decimals=4) == 10.5 and np.round(xmid[7], decimals=4)", "expected.\" x2d, y2d, xmid, ymid = magpie.grids.grid2d(10, 10, return1d=True) assert np.round(xmid[0], decimals=4) ==", "is inconsistent with z3d.\" x3d, y3d, z3d = magpie.grids.grid3d(10, [10, 20, 30]) assert", "\"Length of xedges is incorrect.\" assert xedges[-1] - xedges[0] == 1., \"xedges range", "decimals=4) == 7., \"grid1d unexpected results.\" assert np.round(xmid[0], decimals=4) == 0.5 and np.round(xmid[7],", "of xedges is incorrect.\" assert xedges[-1] - xedges[0] == 1., \"xedges range is", "assert np.round(ymid[0], decimals=4) == 0.5 and np.round(ymid[7], decimals=4) == 7.5, \"grid3d unexpected results.\"", "== 0., \"ymid is inconsistent with y2d.\" x2d, y2d, xmid, ymid = magpie.grids.grid2d(10,", "== 20.5 and np.round(ymid[7], decimals=4) == 27.5, \"grid3d unexpected results.\" assert np.round(np.sum(np.unique(y3d.flatten())-ymid), decimals=4)", "expected.\" assert np.shape(p2d) == (10, 20), \"shape is not as expected.\" r2d, p2d,", "decimals=4) == 0.05 and np.round(rmid[7], decimals=4) == 0.75, \"polargrid unexpected results.\" assert np.round(np.sum(np.unique(r2d.flatten())-rmid),", "assert np.round(np.sum(np.unique(r2d.flatten())-rmid), decimals=4) == 0., \"rmid is inconsistent with r2d.\" assert np.round(pmid[0], decimals=4)", "decimals=4) == 0., \"zmid is inconsistent with z3d.\" x3d, y3d, z3d, xmid, ymid,", "import numpy as np import magpie # check cartesian def test_get_xedges(): xedges =", "20), \"shape is not as expected.\" assert np.shape(p2d) == (10, 20), \"shape is", "10, return_edges=True) assert len(xmid)+1 == len(xedges), \"Length of xmid and xedges is not", "decimals=2) assert len(xedges) == 3, \"Length of xedges is incorrect.\" assert xedges[-1] -", "is not as expected.\" assert np.shape(z3d) == (10, 20, 30), \"shape is not", "assert np.round(np.sum(np.unique(z3d.flatten())-zmid), decimals=4) == 0., \"zmid is inconsistent with z3d.\" x3d, y3d, z3d,", "== 10.5 and np.round(xmid[7], decimals=4) == 17.5, \"grid1d unexpected results.\" xmid, xedges =", "decimals=4) == 7.5, \"grid3d unexpected results.\" assert np.round(np.sum(np.unique(y3d.flatten())-ymid), decimals=4) == 0., \"ymid is", "unexpected results.\" def test_grid2d(): x2d, y2d = magpie.grids.grid2d(10, 10) assert np.shape(x2d) == (10,", "results.\" assert np.round(np.sum(np.unique(p2d.flatten())-pmid), decimals=4) == 0., \"pmid is inconsistent with p2d.\" r2d, p2d,", "decimals=4) == 0., \"pmid is inconsistent with p2d.\" def test_polargrid(): r2d, p2d =", "decimals=4) == 17.5, \"grid1d unexpected results.\" xmid, xedges = magpie.grids.grid1d(10., 10, return_edges=True) assert", "== 0.55, \"xmid is not as expected.\" def test_xmid2edges(): xedges = magpie.grids.get_xedges(1., 10)", "== 0., \"xmid is inconsistent with x2d.\" assert np.round(ymid[0], decimals=4) == 0.5 and", "results.\" assert np.round(np.sum(np.unique(z3d.flatten())-zmid), decimals=4) == 0., \"zmid is inconsistent with z3d.\" x3d, y3d,", "ymid, zmid = magpie.grids.grid3d([10, 20, 30], [10, 20, 30], return1d=True) assert np.round(xmid[0], decimals=4)", "np.shape(y3d) == (10, 10, 10), \"shape is not as expected.\" assert np.shape(z3d) ==", "and np.round(xmid[7], decimals=4) == 7.5, \"grid1d unexpected results.\" def test_grid2d(): x2d, y2d =", "assert len(xedges) == 3, \"Length of xedges is incorrect.\" assert xedges[-1] - xedges[0]", "is not as expected.\" assert np.round(xedges[0], decimals=4) == 0. and np.round(xedges[7], decimals=4) ==", "= magpie.grids.grid3d(10, 10, mins=[10., 20., 30.], return1d=True) assert np.round(xmid[0], decimals=4) == 10.5 and", "20], mins=[10., 20.], return1d=True) assert np.round(xmid[0], decimals=4) == 10.5 and np.round(xmid[7], decimals=4) ==", "check polar def test_polargrid(): r2d, p2d = magpie.grids.polargrid(10, 20) assert np.shape(r2d) == (10,", "with y2d.\" x2d, y2d = magpie.grids.grid2d(10, [10, 20]) assert np.shape(x2d) == (10, 20),", "xedges.\" def test_grid1d(): xmid = magpie.grids.grid1d(10., 10) assert np.round(xmid[0], decimals=4) == 0.5 and", "= magpie.grids.grid2d(10, [10, 20]) assert np.shape(x2d) == (10, 20), \"shape is not as", "0., \"ymid is inconsistent with y3d.\" assert np.round(zmid[0], decimals=4) == 0.5 and np.round(zmid[7],", "\"zmid is inconsistent with z3d.\" x3d, y3d, z3d, xmid, ymid, zmid = magpie.grids.grid3d([10,", "\"rmid is inconsistent with r2d.\" assert np.round(pmid[0], decimals=4) == np.round(np.pi/20, decimals=4) and np.round(pmid[7],", "magpie.grids.grid2d(10, 10) assert np.shape(x2d) == (10, 10), \"shape is not as expected.\" assert", "is not as expected.\" assert np.shape(y2d) == (10, 20), \"shape is not as", "\"xedges range is incorrect.\" xedges = magpie.grids.get_xedges(1., 2, xmin=-1.) xedges = np.round(xedges, decimals=2)", "magpie.grids.xedges2mid(xedges) xmid = np.round(xmid, decimals=2) assert len(xedges) == len(xmid) + 1, \"Length of", "== 10.5 and np.round(xmid[7], decimals=4) == 17.5, \"grid3d unexpected results.\" assert np.round(np.sum(np.unique(x3d.flatten())-xmid), decimals=4)", "test_polargrid(): r2d, p2d = magpie.grids.polargrid(10, 20) assert np.shape(r2d) == (10, 20), \"shape is", "\"shape is not as expected.\" assert np.shape(y3d) == (10, 10, 10), \"shape is", "inconsistent with z3d.\" x3d, y3d, z3d, xmid, ymid, zmid = magpie.grids.grid3d([10, 20, 30],", "assert np.round(np.sum(np.unique(z3d.flatten())-zmid), decimals=4) == 0., \"zmid is inconsistent with z3d.\" x3d, y3d, z3d", "np.round(zmid[0], decimals=4) == 30.5 and np.round(zmid[7], decimals=4) == 37.5, \"grid3d unexpected results.\" assert", "decimals=4) == 0., \"xmid is inconsistent with x3d.\" assert np.round(ymid[0], decimals=4) == 0.5", "assert np.round(p[3*4**2], decimals=4) == np.round(np.pi/(3*(2*4+1)), decimals=4), \"p values are incorrect.\" assert np.round(p[3*7**2 +", "is inconsistent with x2d.\" assert np.round(ymid[0], decimals=4) == 20.5 and np.round(ymid[7], decimals=4) ==", "= np.round(xedges, decimals=2) assert len(xedges) == 3, \"Length of xedges is incorrect.\" assert", "is inconsistent with x3d.\" assert np.round(ymid[0], decimals=4) == 0.5 and np.round(ymid[7], decimals=4) ==", "not as expected.\" def test_xmid2edges(): xedges = magpie.grids.get_xedges(1., 10) xmid = magpie.grids.xedges2mid(xedges) xedges2", "with y3d.\" assert np.round(zmid[0], decimals=4) == 30.5 and np.round(zmid[7], decimals=4) == 37.5, \"grid3d", "== 0., \"rmid is inconsistent with r2d.\" assert np.round(pmid[0], decimals=4) == np.round(np.pi/20, decimals=4)", "== 0., \"xmid is inconsistent with x3d.\" assert np.round(ymid[0], decimals=4) == 20.5 and", "magpie.grids.polarEA_grid(6, base_nphi=3) npix = magpie.grids.polarEA_npix(6, base_nphi=3) assert len(r) == len(p), \"PolarEA grid size", "x2d, y2d = magpie.grids.grid2d(10, [10, 20]) assert np.shape(x2d) == (10, 20), \"shape is", "decimals=4) == 0., \"rmid is inconsistent with r2d.\" assert np.round(pmid[0], decimals=4) == np.round(np.pi/20,", "xmid is incorrect.\" assert xmid[0] == 0.05 and xmid[1] == 0.15 and xmid[5]", "decimals=4) == 0., \"ymid is inconsistent with y2d.\" x2d, y2d, xmid, ymid =", "30], [10, 20, 30], return1d=True) assert np.round(xmid[0], decimals=4) == 0.5 and np.round(xmid[7], decimals=4)", "\"Length of polarEA grid does not match expectations.\" r, p = magpie.grids.polarEA_grid(6, base_nphi=3)", "(10, 10, 10), \"shape is not as expected.\" assert np.shape(y3d) == (10, 10,", "cartesian def test_get_xedges(): xedges = magpie.grids.get_xedges(1., 2) xedges = np.round(xedges, decimals=2) assert len(xedges)", "np.round(zmid[0], decimals=4) == 0.5 and np.round(zmid[7], decimals=4) == 7.5, \"grid3d unexpected results.\" assert", "10, mins=[10., 20.], return1d=True) assert np.round(xmid[0], decimals=4) == 10.5 and np.round(xmid[7], decimals=4) ==", "expected.\" assert xedges[-1] - xedges[0] == 1., \"xedges range is incorrect.\" def test_xedges2mid():", "assert r[3*4**2] == 0.45, \"r values are incorrect.\" assert r[3*7**2] == 0.75, \"r", "is incorrect.\" assert xedges[-1] - xedges[0] == 1., \"xedges range is incorrect.\" xedges", "polar def test_polargrid(): r2d, p2d = magpie.grids.polargrid(10, 20) assert np.shape(r2d) == (10, 20),", "len(xedges) == len(xmid) + 1, \"Length of xmid is incorrect.\" assert xmid[0] ==", "\"xedges range is incorrect.\" def test_xedges2mid(): xedges = magpie.grids.get_xedges(1., 10) xmid = magpie.grids.xedges2mid(xedges)", "rmid, pmid = magpie.grids.polargrid(10, 10, rmin=10., rmax=20., phimin=np.pi/2., phimax=np.pi, return1d=True) assert np.round(rmid[0], decimals=4)", "decimals=4) == 20.5 and np.round(ymid[7], decimals=4) == 27.5, \"grid3d unexpected results.\" assert np.round(np.sum(np.unique(y3d.flatten())-ymid),", "20], [10, 20], return1d=True) assert np.round(xmid[0], decimals=4) == 0.5 and np.round(xmid[7], decimals=4) ==", "np.round(ymid[7], decimals=4) == 27.5, \"grid3d unexpected results.\" assert np.round(np.sum(np.unique(y3d.flatten())-ymid), decimals=4) == 0., \"ymid", "results.\" assert np.round(np.sum(np.unique(r2d.flatten())-rmid), decimals=4) == 0., \"rmid is inconsistent with r2d.\" assert np.round(pmid[0],", "is not as expected.\" assert np.shape(y3d) == (10, 10, 10), \"shape is not", "np.round(np.pi/(3*(2*4+1)), decimals=4), \"p values are incorrect.\" assert np.round(p[3*7**2 + 7], decimals=4) == np.round(15*np.pi/(3*(2*7+1)),", "10), \"shape is not as expected.\" assert np.shape(z3d) == (10, 10, 10), \"shape", "ymid = magpie.grids.grid2d(10, 10, mins=[10., 20.], return1d=True) assert np.round(xmid[0], decimals=4) == 10.5 and", "xedges[0]==-1. and xedges[1]==-0.5 and xedges[-1]==0., \"xedges with xmin are not as expected.\" assert", "0.5 and np.round(xmid[7], decimals=4) == 7.5, \"grid3d unexpected results.\" assert np.round(np.sum(np.unique(x3d.flatten())-xmid), decimals=4) ==", "def test_xmid2edges(): xedges = magpie.grids.get_xedges(1., 10) xmid = magpie.grids.xedges2mid(xedges) xedges2 = magpie.grids.xmid2edges(xmid) assert", "== 17.5, \"polargrid unexpected results.\" assert np.round(np.sum(np.unique(r2d.flatten())-rmid), decimals=4) == 0., \"rmid is inconsistent", "not as expected.\" assert np.shape(y3d) == (10, 10, 10), \"shape is not as", "mins=[10., 20., 30], return1d=True) assert np.round(xmid[0], decimals=4) == 10.5 and np.round(xmid[7], decimals=4) ==", "check cartesian def test_get_xedges(): xedges = magpie.grids.get_xedges(1., 2) xedges = np.round(xedges, decimals=2) assert", "np.round(rmid[7], decimals=4) == 0.75, \"polargrid unexpected results.\" assert np.round(np.sum(np.unique(r2d.flatten())-rmid), decimals=4) == 0., \"rmid", "0., \"xmid is inconsistent with x2d.\" assert np.round(ymid[0], decimals=4) == 20.5 and np.round(ymid[7],", "decimals=4) == 10.5 and np.round(xmid[7], decimals=4) == 17.5, \"grid3d unexpected results.\" assert np.round(np.sum(np.unique(x3d.flatten())-xmid),", "as expected.\" assert np.shape(z3d) == (10, 20, 30), \"shape is not as expected.\"", "\"zmid is inconsistent with z3d.\" x3d, y3d, z3d = magpie.grids.grid3d(10, [10, 20, 30])", "assert np.round(rmid[0], decimals=4) == 0.05 and np.round(rmid[7], decimals=4) == 0.75, \"polargrid unexpected results.\"", "assert np.round(np.sum(np.unique(p2d.flatten())-pmid), decimals=4) == 0., \"pmid is inconsistent with p2d.\" def test_polargrid(): r2d,", "= magpie.grids.polarEA_grid(10) npix = magpie.grids.polarEA_npix(10) assert len(r) == len(p), \"PolarEA grid size for", "test_get_xedges(): xedges = magpie.grids.get_xedges(1., 2) xedges = np.round(xedges, decimals=2) assert len(xedges) == 3,", "inconsistent with p2d.\" def test_polarEA(): r, p = magpie.grids.polarEA_grid(10) npix = magpie.grids.polarEA_npix(10) assert", "not as expected.\" assert np.shape(z3d) == (10, 20, 30), \"shape is not as", "expectations.\" r, p = magpie.grids.polarEA_grid(10, base_nphi=3) npix = magpie.grids.polarEA_npix(10, base_nphi=3) assert len(r) ==", "= magpie.grids.grid3d([10, 20, 30], [10, 20, 30], return1d=True) assert np.round(xmid[0], decimals=4) == 0.5", "of xmid is incorrect.\" assert xmid[0] == 0.05 and xmid[1] == 0.15 and", "as expected.\" def test_xmid2edges(): xedges = magpie.grids.get_xedges(1., 10) xmid = magpie.grids.xedges2mid(xedges) xedges2 =", "np.round(pmid[7], decimals=4) == np.round(15*(np.pi/2.)/20 + np.pi/2., decimals=4), \"polargrid unexpected results.\" assert np.round(np.sum(np.unique(p2d.flatten())-pmid), decimals=4)", "np.round(ymid[0], decimals=4) == 0.5 and np.round(ymid[7], decimals=4) == 7.5, \"grid3d unexpected results.\" assert", "np.shape(z3d) == (10, 10, 10), \"shape is not as expected.\" x3d, y3d, z3d,", "np.round(pmid[7], decimals=4) == np.round(15*np.pi/20, decimals=4), \"polargrid unexpected results.\" assert np.round(np.sum(np.unique(p2d.flatten())-pmid), decimals=4) == 0.,", "for r and p are not the same.\" assert len(r) == npix, \"Length", "np.round(ymid[7], decimals=4) == 7.5, \"grid3d unexpected results.\" assert np.round(np.sum(np.unique(y3d.flatten())-ymid), decimals=4) == 0., \"ymid", "range is incorrect.\" xedges = magpie.grids.get_xedges(1., 2, xmin=-1.) xedges = np.round(xedges, decimals=2) assert", "0.5 and np.round(xmid[7], decimals=4) == 7.5, \"grid2d unexpected results.\" assert np.round(np.sum(np.unique(x2d.flatten())-xmid), decimals=4) ==", "assert np.shape(z3d) == (10, 20, 30), \"shape is not as expected.\" x3d, y3d,", "len(r) == len(p), \"PolarEA grid size for r and p are not the", "assert np.round(xmid[0], decimals=4) == 10.5 and np.round(xmid[7], decimals=4) == 17.5, \"grid3d unexpected results.\"", "np.round(rmid[7], decimals=4) == 17.5, \"polargrid unexpected results.\" assert np.round(np.sum(np.unique(r2d.flatten())-rmid), decimals=4) == 0., \"rmid", "\"grid2d unexpected results.\" assert np.round(np.sum(np.unique(x2d.flatten())-xmid), decimals=4) == 0., \"xmid is inconsistent with x2d.\"", "decimals=4) == 0. and np.round(xedges[7], decimals=4) == 7., \"grid1d unexpected results.\" assert np.round(xmid[0],", "[10, 20]) assert np.shape(x2d) == (10, 20), \"shape is not as expected.\" assert", "== 0.45, \"r values are incorrect.\" assert r[3*7**2] == 0.75, \"r values are", "as expected.\" assert np.shape(z3d) == (10, 10, 10), \"shape is not as expected.\"", "and np.round(ymid[7], decimals=4) == 27.5, \"grid2d unexpected results.\" assert np.round(np.sum(np.unique(y2d.flatten())-ymid), decimals=4) == 0.,", "xmid[0] == 0.05 and xmid[1] == 0.15 and xmid[5] == 0.55, \"xmid is", "r2d.\" assert np.round(pmid[0], decimals=4) == np.round((np.pi/2.)/20 + np.pi/2., decimals=4) \\ and np.round(pmid[7], decimals=4)", "not as expected.\" assert np.shape(y2d) == (10, 10), \"shape is not as expected.\"", "values are incorrect.\" area = magpie.grids.polarEA_area(10, rmax=10., base_nphi=4) assert(np.round(area, decimals=4) == np.round(np.pi/4., decimals=4)),", "2) xedges = np.round(xedges, decimals=2) assert len(xedges) == 3, \"Length of xedges is", "xmid = magpie.grids.xedges2mid(xedges) xmid = np.round(xmid, decimals=2) assert len(xedges) == len(xmid) + 1,", "(10, 20), \"shape is not as expected.\" x2d, y2d, xmid, ymid = magpie.grids.grid2d([10,", "[10, 20], return1d=True) assert np.round(xmid[0], decimals=4) == 0.5 and np.round(xmid[7], decimals=4) == 7.5,", "decimals=4) == 37.5, \"grid3d unexpected results.\" assert np.round(np.sum(np.unique(z3d.flatten())-zmid), decimals=4) == 0., \"zmid is", "test_polarEA(): r, p = magpie.grids.polarEA_grid(10) npix = magpie.grids.polarEA_npix(10) assert len(r) == len(p), \"PolarEA", "\"Length of xmid and xedges is not as expected.\" assert np.round(xedges[0], decimals=4) ==", "== 0.5 and np.round(ymid[7], decimals=4) == 7.5, \"grid2d unexpected results.\" assert np.round(np.sum(np.unique(y2d.flatten())-ymid), decimals=4)", "magpie.grids.grid1d(10., 10) assert np.round(xmid[0], decimals=4) == 0.5 and np.round(xmid[7], decimals=4) == 7.5, \"grid1d", "assert np.round(zmid[0], decimals=4) == 30.5 and np.round(zmid[7], decimals=4) == 37.5, \"grid3d unexpected results.\"", "ymid, zmid = magpie.grids.grid3d([10, 20, 30], [10, 20, 30], mins=[10., 20., 30], return1d=True)", "not match expectations.\" r, p = magpie.grids.polarEA_grid(6, base_nphi=3) npix = magpie.grids.polarEA_npix(6, base_nphi=3) assert", "10) assert np.round(xmid[0], decimals=4) == 0.5 and np.round(xmid[7], decimals=4) == 7.5, \"grid1d unexpected", "with r2d.\" assert np.round(pmid[0], decimals=4) == np.round((np.pi/2.)/20 + np.pi/2., decimals=4) \\ and np.round(pmid[7],", "7., \"grid1d unexpected results.\" assert np.round(xmid[0], decimals=4) == 0.5 and np.round(xmid[7], decimals=4) ==", "inconsistent with p2d.\" def test_polargrid(): r2d, p2d = magpie.grids.polargrid(10, 20) assert np.shape(r2d) ==", "expected.\" assert np.round(xedges[0], decimals=4) == 0. and np.round(xedges[7], decimals=4) == 7., \"grid1d unexpected", "assert np.round(zmid[0], decimals=4) == 0.5 and np.round(zmid[7], decimals=4) == 7.5, \"grid3d unexpected results.\"", "is inconsistent with z3d.\" x3d, y3d, z3d, xmid, ymid, zmid = magpie.grids.grid3d(10, 10,", "30), \"shape is not as expected.\" assert np.shape(y3d) == (10, 20, 30), \"shape", "with input xedges.\" def test_grid1d(): xmid = magpie.grids.grid1d(10., 10) assert np.round(xmid[0], decimals=4) ==", "assert np.shape(x3d) == (10, 20, 30), \"shape is not as expected.\" assert np.shape(y3d)", "np.round(15*np.pi/20, decimals=4), \"polargrid unexpected results.\" assert np.round(np.sum(np.unique(p2d.flatten())-pmid), decimals=4) == 0., \"pmid is inconsistent", "magpie.grids.xedges2mid(xedges) xedges2 = magpie.grids.xmid2edges(xmid) assert np.round(np.sum(xedges-xedges2), decimals=2), \"Conversion from xmid to xedges is", "\"p values are incorrect.\" area = magpie.grids.polarEA_area(10, rmax=10., base_nphi=4) assert(np.round(area, decimals=4) == np.round(np.pi/4.,", "== np.round(15*np.pi/20, decimals=4), \"polargrid unexpected results.\" assert np.round(np.sum(np.unique(p2d.flatten())-pmid), decimals=4) == 0., \"pmid is", "inconsistent with x2d.\" assert np.round(ymid[0], decimals=4) == 0.5 and np.round(ymid[7], decimals=4) == 7.5,", "expected.\" assert np.shape(z3d) == (10, 10, 10), \"shape is not as expected.\" x3d,", "inconsistent with z3d.\" x3d, y3d, z3d, xmid, ymid, zmid = magpie.grids.grid3d(10, 10, mins=[10.,", "inconsistent with p2d.\" r2d, p2d, rmid, pmid = magpie.grids.polargrid(10, 10, rmin=10., rmax=20., phimin=np.pi/2.,", "+ 7], decimals=4) == np.round(15*np.pi/(3*(2*7+1)), decimals=4), \"p values are incorrect.\" area = magpie.grids.polarEA_area(10,", "with z3d.\" x3d, y3d, z3d = magpie.grids.grid3d(10, [10, 20, 30]) assert np.shape(x3d) ==", "and np.round(xmid[7], decimals=4) == 7.5, \"grid2d unexpected results.\" assert np.round(np.sum(np.unique(x2d.flatten())-xmid), decimals=4) == 0.,", "== 7., \"grid1d unexpected results.\" assert np.round(xmid[0], decimals=4) == 0.5 and np.round(xmid[7], decimals=4)", "is inconsistent with p2d.\" def test_polarEA(): r, p = magpie.grids.polarEA_grid(10) npix = magpie.grids.polarEA_npix(10)", "expected.\" x2d, y2d, xmid, ymid = magpie.grids.grid2d([10, 20], [10, 20], return1d=True) assert np.round(xmid[0],", "z3d, xmid, ymid, zmid = magpie.grids.grid3d([10, 20, 30], [10, 20, 30], return1d=True) assert", "of xmid and xedges is not as expected.\" assert np.round(xedges[0], decimals=4) == 0.", "== 0., \"ymid is inconsistent with y2d.\" def test_grid3d(): x3d, y3d, z3d =", "decimals=4) == np.round(15*np.pi/20, decimals=4), \"polargrid unexpected results.\" assert np.round(np.sum(np.unique(p2d.flatten())-pmid), decimals=4) == 0., \"pmid", "10, mins=[10., 20., 30.], return1d=True) assert np.round(xmid[0], decimals=4) == 10.5 and np.round(xmid[7], decimals=4)", "\"grid3d unexpected results.\" assert np.round(np.sum(np.unique(x3d.flatten())-xmid), decimals=4) == 0., \"xmid is inconsistent with x3d.\"", "not as expected.\" x3d, y3d, z3d, xmid, ymid, zmid = magpie.grids.grid3d([10, 20, 30],", "magpie.grids.grid1d(10., 10, return_edges=True) assert len(xmid)+1 == len(xedges), \"Length of xmid and xedges is", "not as expected.\" assert xedges[-1] - xedges[0] == 1., \"xedges range is incorrect.\"", "is inconsistent with z3d.\" # check polar def test_polargrid(): r2d, p2d = magpie.grids.polargrid(10,", "0., \"zmid is inconsistent with z3d.\" # check polar def test_polargrid(): r2d, p2d", "xedges[-1] - xedges[0] == 1., \"xedges range is incorrect.\" def test_xedges2mid(): xedges =", "consistent with input xedges.\" def test_grid1d(): xmid = magpie.grids.grid1d(10., 10) assert np.round(xmid[0], decimals=4)", "\"xmid is inconsistent with x2d.\" assert np.round(ymid[0], decimals=4) == 20.5 and np.round(ymid[7], decimals=4)", "\"grid3d unexpected results.\" assert np.round(np.sum(np.unique(y3d.flatten())-ymid), decimals=4) == 0., \"ymid is inconsistent with y3d.\"", "decimals=4) == 7.5, \"grid2d unexpected results.\" assert np.round(np.sum(np.unique(x2d.flatten())-xmid), decimals=4) == 0., \"xmid is", "p2d.\" r2d, p2d, rmid, pmid = magpie.grids.polargrid(10, 10, rmin=10., rmax=20., phimin=np.pi/2., phimax=np.pi, return1d=True)", "results.\" assert np.round(np.sum(np.unique(y2d.flatten())-ymid), decimals=4) == 0., \"ymid is inconsistent with y2d.\" x2d, y2d,", "7.5, \"grid1d unexpected results.\" def test_grid2d(): x2d, y2d = magpie.grids.grid2d(10, 10) assert np.shape(x2d)", "(10, 10), \"shape is not as expected.\" assert np.shape(y2d) == (10, 10), \"shape", "y3d, z3d, xmid, ymid, zmid = magpie.grids.grid3d(10, 10, mins=[10., 20., 30.], return1d=True) assert", "\"Conversion from xmid to xedges is not consistent with input xedges.\" def test_grid1d():", "\"Length of polarEA grid does not match expectations.\" assert r[3*4**2] == 0.45, \"r", "is inconsistent with r2d.\" assert np.round(pmid[0], decimals=4) == np.round((np.pi/2.)/20 + np.pi/2., decimals=4) \\", "np.round(np.sum(np.unique(p2d.flatten())-pmid), decimals=4) == 0., \"pmid is inconsistent with p2d.\" def test_polarEA(): r, p", "== 0.5 and np.round(xmid[7], decimals=4) == 7.5, \"grid2d unexpected results.\" assert np.round(np.sum(np.unique(x2d.flatten())-xmid), decimals=4)", "np.round(np.sum(np.unique(y3d.flatten())-ymid), decimals=4) == 0., \"ymid is inconsistent with y3d.\" assert np.round(zmid[0], decimals=4) ==", "\"pmid is inconsistent with p2d.\" r2d, p2d, rmid, pmid = magpie.grids.polargrid(10, 10, rmin=10.,", "len(xmid) + 1, \"Length of xmid is incorrect.\" assert xmid[0] == 0.05 and", "x2d.\" assert np.round(ymid[0], decimals=4) == 20.5 and np.round(ymid[7], decimals=4) == 27.5, \"grid2d unexpected", "np.round(ymid[0], decimals=4) == 20.5 and np.round(ymid[7], decimals=4) == 27.5, \"grid3d unexpected results.\" assert", "p2d.\" def test_polargrid(): r2d, p2d = magpie.grids.polargrid(10, 20) assert np.shape(r2d) == (10, 20),", "r, p = magpie.grids.polarEA_grid(10) npix = magpie.grids.polarEA_npix(10) assert len(r) == len(p), \"PolarEA grid", "xmid, ymid, zmid = magpie.grids.grid3d([10, 20, 30], [10, 20, 30], return1d=True) assert np.round(xmid[0],", "match expectations.\" assert r[3*4**2] == 0.45, \"r values are incorrect.\" assert r[3*7**2] ==", "len(r) == npix, \"Length of polarEA grid does not match expectations.\" assert r[3*4**2]", "1., \"xedges range is incorrect.\" xedges = magpie.grids.get_xedges(1., 2, xmin=-1.) xedges = np.round(xedges,", "results.\" assert np.round(np.sum(np.unique(p2d.flatten())-pmid), decimals=4) == 0., \"pmid is inconsistent with p2d.\" def test_polargrid():", "magpie.grids.get_xedges(1., 10) xmid = magpie.grids.xedges2mid(xedges) xmid = np.round(xmid, decimals=2) assert len(xedges) == len(xmid)", "== 0.5 and np.round(xmid[7], decimals=4) == 7.5, \"grid1d unexpected results.\" xmid = magpie.grids.grid1d(10.,", "np.shape(y2d) == (10, 10), \"shape is not as expected.\" x2d, y2d, xmid, ymid", "\"xmid is inconsistent with x3d.\" assert np.round(ymid[0], decimals=4) == 0.5 and np.round(ymid[7], decimals=4)", "and np.round(zmid[7], decimals=4) == 37.5, \"grid3d unexpected results.\" assert np.round(np.sum(np.unique(z3d.flatten())-zmid), decimals=4) == 0.,", "\"r values are incorrect.\" assert r[3*7**2] == 0.75, \"r values are incorrect.\" assert", "incorrect.\" assert r[3*7**2] == 0.75, \"r values are incorrect.\" assert np.round(p[3*4**2], decimals=4) ==", "are incorrect.\" assert np.round(p[3*4**2], decimals=4) == np.round(np.pi/(3*(2*4+1)), decimals=4), \"p values are incorrect.\" assert", "def test_grid3d(): x3d, y3d, z3d = magpie.grids.grid3d(10, 10) assert np.shape(x3d) == (10, 10,", "assert np.shape(x3d) == (10, 10, 10), \"shape is not as expected.\" assert np.shape(y3d)", "incorrect.\" xedges = magpie.grids.get_xedges(1., 2, xmin=-1.) xedges = np.round(xedges, decimals=2) assert xedges[0]==-1. and", "== 20.5 and np.round(ymid[7], decimals=4) == 27.5, \"grid2d unexpected results.\" assert np.round(np.sum(np.unique(y2d.flatten())-ymid), decimals=4)", "is not as expected.\" x2d, y2d, xmid, ymid = magpie.grids.grid2d([10, 20], [10, 20],", "and xedges[1]==-0.5 and xedges[-1]==0., \"xedges with xmin are not as expected.\" assert xedges[-1]", "30), \"shape is not as expected.\" assert np.shape(z3d) == (10, 20, 30), \"shape", "10.5 and np.round(xmid[7], decimals=4) == 17.5, \"grid3d unexpected results.\" assert np.round(np.sum(np.unique(x3d.flatten())-xmid), decimals=4) ==", "20, 30], [10, 20, 30], mins=[10., 20., 30], return1d=True) assert np.round(xmid[0], decimals=4) ==", "x3d, y3d, z3d = magpie.grids.grid3d(10, [10, 20, 30]) assert np.shape(x3d) == (10, 20,", "20, 30], return1d=True) assert np.round(xmid[0], decimals=4) == 0.5 and np.round(xmid[7], decimals=4) == 7.5,", "\"pmid is inconsistent with p2d.\" def test_polargrid(): r2d, p2d = magpie.grids.polargrid(10, 20) assert", "size for r and p are not the same.\" assert len(r) == npix,", "assert np.round(np.sum(np.unique(y2d.flatten())-ymid), decimals=4) == 0., \"ymid is inconsistent with y2d.\" x2d, y2d =", "numpy as np import magpie # check cartesian def test_get_xedges(): xedges = magpie.grids.get_xedges(1.,", "z3d.\" x3d, y3d, z3d = magpie.grids.grid3d(10, [10, 20, 30]) assert np.shape(x3d) == (10,", "npix, \"Length of polarEA grid does not match expectations.\" r, p = magpie.grids.polarEA_grid(6,", "xedges = magpie.grids.get_xedges(1., 2, xmin=-1.) xedges = np.round(xedges, decimals=2) assert xedges[0]==-1. and xedges[1]==-0.5", "assert np.round(np.sum(np.unique(x3d.flatten())-xmid), decimals=4) == 0., \"xmid is inconsistent with x3d.\" assert np.round(ymid[0], decimals=4)", "decimals=4) == 10.5 and np.round(xmid[7], decimals=4) == 17.5, \"grid1d unexpected results.\" xmid, xedges", "return1d=True) assert np.round(xmid[0], decimals=4) == 10.5 and np.round(xmid[7], decimals=4) == 17.5, \"grid3d unexpected", "= magpie.grids.grid3d(10, [10, 20, 30]) assert np.shape(x3d) == (10, 20, 30), \"shape is", "magpie.grids.polarEA_npix(6, base_nphi=3) assert len(r) == len(p), \"PolarEA grid size for r and p", "xedges is not consistent with input xedges.\" def test_grid1d(): xmid = magpie.grids.grid1d(10., 10)", "xmid = magpie.grids.grid1d(10., 10, xmin=10) assert np.round(xmid[0], decimals=4) == 10.5 and np.round(xmid[7], decimals=4)", "= magpie.grids.grid1d(10., 10, return_edges=True) assert len(xmid)+1 == len(xedges), \"Length of xmid and xedges", "(10, 10), \"shape is not as expected.\" x2d, y2d, xmid, ymid = magpie.grids.grid2d(10,", "= magpie.grids.grid2d(10, 10) assert np.shape(x2d) == (10, 10), \"shape is not as expected.\"", "magpie.grids.grid2d([10, 20], [10, 20], mins=[10., 20.], return1d=True) assert np.round(xmid[0], decimals=4) == 10.5 and", "== 0., \"zmid is inconsistent with z3d.\" x3d, y3d, z3d, xmid, ymid, zmid", "y3d, z3d, xmid, ymid, zmid = magpie.grids.grid3d([10, 20, 30], [10, 20, 30], mins=[10.,", "not as expected.\" assert np.shape(p2d) == (10, 20), \"shape is not as expected.\"", "expected.\" assert np.shape(z3d) == (10, 20, 30), \"shape is not as expected.\" x3d,", "decimals=4), \"p values are incorrect.\" area = magpie.grids.polarEA_area(10, rmax=10., base_nphi=4) assert(np.round(area, decimals=4) ==", "xedges = magpie.grids.get_xedges(1., 10) xmid = magpie.grids.xedges2mid(xedges) xedges2 = magpie.grids.xmid2edges(xmid) assert np.round(np.sum(xedges-xedges2), decimals=2),", "[10, 20, 30], return1d=True) assert np.round(xmid[0], decimals=4) == 0.5 and np.round(xmid[7], decimals=4) ==", "decimals=4) == 0., \"ymid is inconsistent with y3d.\" assert np.round(zmid[0], decimals=4) == 30.5", "= magpie.grids.polarEA_npix(10) assert len(r) == len(p), \"PolarEA grid size for r and p", "len(xedges), \"Length of xmid and xedges is not as expected.\" assert np.round(xedges[0], decimals=4)", "assert np.shape(x2d) == (10, 10), \"shape is not as expected.\" assert np.shape(y2d) ==", "30], return1d=True) assert np.round(xmid[0], decimals=4) == 10.5 and np.round(xmid[7], decimals=4) == 17.5, \"grid3d", "as expected.\" assert np.shape(y3d) == (10, 10, 10), \"shape is not as expected.\"", "\"xmid is not as expected.\" def test_xmid2edges(): xedges = magpie.grids.get_xedges(1., 10) xmid =", "is inconsistent with y2d.\" x2d, y2d, xmid, ymid = magpie.grids.grid2d(10, 10, mins=[10., 20.],", "results.\" def test_grid2d(): x2d, y2d = magpie.grids.grid2d(10, 10) assert np.shape(x2d) == (10, 10),", "polarEA grid does not match expectations.\" r, p = magpie.grids.polarEA_grid(6, base_nphi=3) npix =", "decimals=4) == 0.5 and np.round(zmid[7], decimals=4) == 7.5, \"grid3d unexpected results.\" assert np.round(np.sum(np.unique(z3d.flatten())-zmid),", "return1d=True) assert np.round(xmid[0], decimals=4) == 0.5 and np.round(xmid[7], decimals=4) == 7.5, \"grid2d unexpected", "unexpected results.\" assert np.round(np.sum(np.unique(x2d.flatten())-xmid), decimals=4) == 0., \"xmid is inconsistent with x2d.\" assert", "10) xmid = magpie.grids.xedges2mid(xedges) xmid = np.round(xmid, decimals=2) assert len(xedges) == len(xmid) +", "20, return1d=True) assert np.round(rmid[0], decimals=4) == 0.05 and np.round(rmid[7], decimals=4) == 0.75, \"polargrid", "x3d, y3d, z3d, xmid, ymid, zmid = magpie.grids.grid3d([10, 20, 30], [10, 20, 30],", "is inconsistent with p2d.\" def test_polargrid(): r2d, p2d = magpie.grids.polargrid(10, 20) assert np.shape(r2d)", "10), \"shape is not as expected.\" assert np.shape(y2d) == (10, 10), \"shape is", "assert np.round(np.sum(np.unique(z3d.flatten())-zmid), decimals=4) == 0., \"zmid is inconsistent with z3d.\" # check polar", "(10, 20), \"shape is not as expected.\" assert np.shape(p2d) == (10, 20), \"shape", "20.5 and np.round(ymid[7], decimals=4) == 27.5, \"grid3d unexpected results.\" assert np.round(np.sum(np.unique(y3d.flatten())-ymid), decimals=4) ==", "x3d, y3d, z3d, xmid, ymid, zmid = magpie.grids.grid3d(10, 10, mins=[10., 20., 30.], return1d=True)", "is not as expected.\" x3d, y3d, z3d, xmid, ymid, zmid = magpie.grids.grid3d([10, 20,", "10.5 and np.round(xmid[7], decimals=4) == 17.5, \"grid1d unexpected results.\" xmid, xedges = magpie.grids.grid1d(10.,", "7.5, \"grid3d unexpected results.\" assert np.round(np.sum(np.unique(x3d.flatten())-xmid), decimals=4) == 0., \"xmid is inconsistent with", "unexpected results.\" assert np.round(np.sum(np.unique(r2d.flatten())-rmid), decimals=4) == 0., \"rmid is inconsistent with r2d.\" assert", "is not as expected.\" assert np.shape(y3d) == (10, 20, 30), \"shape is not", "== np.round(15*np.pi/(3*(2*7+1)), decimals=4), \"p values are incorrect.\" area = magpie.grids.polarEA_area(10, rmax=10., base_nphi=4) assert(np.round(area,", "incorrect.\" assert xmid[0] == 0.05 and xmid[1] == 0.15 and xmid[5] == 0.55,", "== (10, 10), \"shape is not as expected.\" assert np.shape(y2d) == (10, 10),", "np.round(xmid[7], decimals=4) == 17.5, \"grid2d unexpected results.\" assert np.round(np.sum(np.unique(x2d.flatten())-xmid), decimals=4) == 0., \"xmid", "20]) assert np.shape(x2d) == (10, 20), \"shape is not as expected.\" assert np.shape(y2d)", "np.round(xedges[0], decimals=4) == 0. and np.round(xedges[7], decimals=4) == 7., \"grid1d unexpected results.\" assert", "is not as expected.\" assert np.shape(y2d) == (10, 10), \"shape is not as", "[10, 20], mins=[10., 20.], return1d=True) assert np.round(xmid[0], decimals=4) == 10.5 and np.round(xmid[7], decimals=4)", "0.45, \"r values are incorrect.\" assert r[3*7**2] == 0.75, \"r values are incorrect.\"", "y3d, z3d, xmid, ymid, zmid = magpie.grids.grid3d([10, 20, 30], [10, 20, 30], return1d=True)", "np.round(np.sum(np.unique(p2d.flatten())-pmid), decimals=4) == 0., \"pmid is inconsistent with p2d.\" def test_polargrid(): r2d, p2d", "incorrect.\" assert np.round(p[3*4**2], decimals=4) == np.round(np.pi/(3*(2*4+1)), decimals=4), \"p values are incorrect.\" assert np.round(p[3*7**2", "xedges[0] == 1., \"xedges range is incorrect.\" def test_xedges2mid(): xedges = magpie.grids.get_xedges(1., 10)", "return1d=True) assert np.round(rmid[0], decimals=4) == 0.05 and np.round(rmid[7], decimals=4) == 0.75, \"polargrid unexpected", "and np.round(pmid[7], decimals=4) == np.round(15*(np.pi/2.)/20 + np.pi/2., decimals=4), \"polargrid unexpected results.\" assert np.round(np.sum(np.unique(p2d.flatten())-pmid),", "10) xmid = magpie.grids.xedges2mid(xedges) xedges2 = magpie.grids.xmid2edges(xmid) assert np.round(np.sum(xedges-xedges2), decimals=2), \"Conversion from xmid", "return_edges=True) assert len(xmid)+1 == len(xedges), \"Length of xmid and xedges is not as", "== 7.5, \"grid2d unexpected results.\" assert np.round(np.sum(np.unique(y2d.flatten())-ymid), decimals=4) == 0., \"ymid is inconsistent", "assert np.round(xmid[0], decimals=4) == 0.5 and np.round(xmid[7], decimals=4) == 7.5, \"grid1d unexpected results.\"", "expected.\" r2d, p2d, rmid, pmid = magpie.grids.polargrid(10, 20, return1d=True) assert np.round(rmid[0], decimals=4) ==", "np.round(np.sum(np.unique(r2d.flatten())-rmid), decimals=4) == 0., \"rmid is inconsistent with r2d.\" assert np.round(pmid[0], decimals=4) ==", "p2d.\" def test_polarEA(): r, p = magpie.grids.polarEA_grid(10) npix = magpie.grids.polarEA_npix(10) assert len(r) ==", "magpie.grids.polarEA_grid(10) npix = magpie.grids.polarEA_npix(10) assert len(r) == len(p), \"PolarEA grid size for r", "polarEA grid does not match expectations.\" assert r[3*4**2] == 0.45, \"r values are", "0.15 and xmid[5] == 0.55, \"xmid is not as expected.\" def test_xmid2edges(): xedges", "10, return1d=True) assert np.round(xmid[0], decimals=4) == 0.5 and np.round(xmid[7], decimals=4) == 7.5, \"grid3d", "is not as expected.\" assert np.shape(z3d) == (10, 10, 10), \"shape is not", "20) assert np.shape(r2d) == (10, 20), \"shape is not as expected.\" assert np.shape(p2d)", "z3d.\" # check polar def test_polargrid(): r2d, p2d = magpie.grids.polargrid(10, 20) assert np.shape(r2d)", "np.shape(x3d) == (10, 10, 10), \"shape is not as expected.\" assert np.shape(y3d) ==", "magpie.grids.get_xedges(1., 2) xedges = np.round(xedges, decimals=2) assert len(xedges) == 3, \"Length of xedges", "== 10.5 and np.round(rmid[7], decimals=4) == 17.5, \"polargrid unexpected results.\" assert np.round(np.sum(np.unique(r2d.flatten())-rmid), decimals=4)", "unexpected results.\" assert np.round(np.sum(np.unique(p2d.flatten())-pmid), decimals=4) == 0., \"pmid is inconsistent with p2d.\" r2d,", "y3d, z3d = magpie.grids.grid3d(10, 10) assert np.shape(x3d) == (10, 10, 10), \"shape is", "xmid, xedges = magpie.grids.grid1d(10., 10, return_edges=True) assert len(xmid)+1 == len(xedges), \"Length of xmid", "inconsistent with y2d.\" x2d, y2d, xmid, ymid = magpie.grids.grid2d(10, 10, mins=[10., 20.], return1d=True)" ]
[ "while True: m = receive_message(s) if m: print(m, \"\\n\") ping(s) print(s.getsockname()) print(socket.gethostbyname(socket.gethostname())) print(socket.get)", "communication import * import socket s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.connect((socket.gethostname(), 1123)) while True:", "import socket s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.connect((socket.gethostname(), 1123)) while True: m = receive_message(s)", "s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.connect((socket.gethostname(), 1123)) while True: m = receive_message(s) if m:", "* import socket s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.connect((socket.gethostname(), 1123)) while True: m =", "socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.connect((socket.gethostname(), 1123)) while True: m = receive_message(s) if m: print(m, \"\\n\")", "import * import socket s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.connect((socket.gethostname(), 1123)) while True: m", "socket.SOCK_STREAM) s.connect((socket.gethostname(), 1123)) while True: m = receive_message(s) if m: print(m, \"\\n\") ping(s)", "1123)) while True: m = receive_message(s) if m: print(m, \"\\n\") ping(s) print(s.getsockname()) print(socket.gethostbyname(socket.gethostname()))", "socket s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.connect((socket.gethostname(), 1123)) while True: m = receive_message(s) if", "= socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.connect((socket.gethostname(), 1123)) while True: m = receive_message(s) if m: print(m,", "from communication import * import socket s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.connect((socket.gethostname(), 1123)) while", "s.connect((socket.gethostname(), 1123)) while True: m = receive_message(s) if m: print(m, \"\\n\") ping(s) print(s.getsockname())" ]
[ "tw import time w = tw.Watcher(filename='test.log') s = w.create_stream(name='my_metric') #w.make_notebook() for i in", "import tensorwatch as tw import time w = tw.Watcher(filename='test.log') s = w.create_stream(name='my_metric') #w.make_notebook()", "<reponame>lesteve/tensorwatch<gh_stars>1000+ import tensorwatch as tw import time w = tw.Watcher(filename='test.log') s = w.create_stream(name='my_metric')", "= tw.Watcher(filename='test.log') s = w.create_stream(name='my_metric') #w.make_notebook() for i in range(1000): s.write((i, i*i)) time.sleep(1)", "as tw import time w = tw.Watcher(filename='test.log') s = w.create_stream(name='my_metric') #w.make_notebook() for i", "w = tw.Watcher(filename='test.log') s = w.create_stream(name='my_metric') #w.make_notebook() for i in range(1000): s.write((i, i*i))", "tensorwatch as tw import time w = tw.Watcher(filename='test.log') s = w.create_stream(name='my_metric') #w.make_notebook() for", "time w = tw.Watcher(filename='test.log') s = w.create_stream(name='my_metric') #w.make_notebook() for i in range(1000): s.write((i,", "import time w = tw.Watcher(filename='test.log') s = w.create_stream(name='my_metric') #w.make_notebook() for i in range(1000):" ]
[ "for dm in self.data: yield dm def __getitem__(self, idx): \"\"\" Fetch a device", "253 5 L--w 1 1 0 LVM-<KEY> Example data structure produced:: data =", "dm def __getitem__(self, idx): \"\"\" Fetch a device by index in devices list", "====================================== Parsers for parsing and extracting data from output of commands related to", "to each device by uuid Example: >>> len(info) 6 >>> info.names[0] 'VG00-tmp' >>>", "0 LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTaxicvyvt67113nTb8vMlGfgdEjDx0LKT2O VG00-swap 253 1 L--w 2 1 0 LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTax3Ll2XhOYZkylx1CjOQi7G4yHgrIOsyqG VG00-root 253 0", "if 'UUID' in dm] self.by_uuid = dict((dm['UUID'], dm) for dm in self.data if", "from output of commands related to ``dmsetup``. Parsers contained in this module are:", "L--w 1 2 0 LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTaxicvyvt67113nTb8vMlGfgdEjDx0LKT2O VG00-swap 253 1 L--w 2 1 0 LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTax3Ll2XhOYZkylx1CjOQi7G4yHgrIOsyqG", "dm in self.data if 'Name' in dm] self.by_name = dict((dm['Name'], dm) for dm", "1 1 0 LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTaxKpnAKYhrYMYMNMwjegkW965bUgtJFTRY VG00-var_log_audit 253 5 L--w 1 1 0 LVM-<KEY> Example", "self.by_name = dict((dm['Name'], dm) for dm in self.data if 'Name' in dm) self.uuids", ">>> len(info) 6 >>> info.names[0] 'VG00-tmp' >>> info[1]['Maj'] '253' >>> info[1]['Stat'] 'L--w' \"\"\"", "for dm in self.data if 'UUID' in dm) def __len__(self): \"\"\" The length", "__iter__(self): \"\"\" Iterate through the devices list \"\"\" for dm in self.data: yield", "[dm['Name'] for dm in self.data if 'Name' in dm] self.by_name = dict((dm['Name'], dm)", "'Name' in dm] self.by_name = dict((dm['Name'], dm) for dm in self.data if 'Name'", "related to ``dmsetup``. Parsers contained in this module are: DmsetupInfo - command ``dmsetup", "in dm] self.by_name = dict((dm['Name'], dm) for dm in self.data if 'Name' in", "module are: DmsetupInfo - command ``dmsetup info -C`` ----------------------------------------- \"\"\" from insights import", "253 1 L--w 2 1 0 LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTax3Ll2XhOYZkylx1CjOQi7G4yHgrIOsyqG VG00-root 253 0 L--w 1 1", "by_name (dict): Access to each device by devicename by_uuid (dict): Access to each", ">>> info[1]['Stat'] 'L--w' \"\"\" def parse_content(self, content): self.data = parse_delimited_table(content) self.names = [dm['Name']", "found, in order names (list): Device names, in order found uuids (list): UUID", "in self.data if 'Name' in dm] self.by_name = dict((dm['Name'], dm) for dm in", "1 0 LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTaxCqXOnbGe2zjhX923dFiIdl1oi7mO9tXp VG00-var 253 6 L--w 1 2 0 LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTaxicvyvt67113nTb8vMlGfgdEjDx0LKT2O VG00-swap 253", "1 2 0 LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTaxicvyvt67113nTb8vMlGfgdEjDx0LKT2O VG00-swap 253 1 L--w 2 1 0 LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTax3Ll2XhOYZkylx1CjOQi7G4yHgrIOsyqG VG00-root", "'L--w' \"\"\" def parse_content(self, content): self.data = parse_delimited_table(content) self.names = [dm['Name'] for dm", "extracting data from output of commands related to ``dmsetup``. Parsers contained in this", "are: DmsetupInfo - command ``dmsetup info -C`` ----------------------------------------- \"\"\" from insights import parser,", "1 0 LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTax3Ll2XhOYZkylx1CjOQi7G4yHgrIOsyqG VG00-root 253 0 L--w 1 1 0 LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTaxKpnAKYhrYMYMNMwjegkW965bUgtJFTRY VG00-var_log_audit 253", "1 0 LVM-<KEY> Example data structure produced:: data = [ { 'Stat': 'L--w',", "produced:: data = [ { 'Stat': 'L--w', 'Name': 'VG00-tmp', 'Min': '8', 'Targ': '1',", "253 8 L--w 1 1 0 LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTax6lLmBji2ueSbX49gxcV76M29cmukQiw4 VG00-home 253 3 L--w 1 1", "\"\"\" ``dmsetup info -C`` command output Example input:: Name Maj Min Stat Open", "0 LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTaxCqXOnbGe2zjhX923dFiIdl1oi7mO9tXp VG00-var 253 6 L--w 1 2 0 LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTaxicvyvt67113nTb8vMlGfgdEjDx0LKT2O VG00-swap 253 1", "command ``dmsetup info -C`` ----------------------------------------- \"\"\" from insights import parser, CommandParser from insights.parsers", "8 L--w 1 1 0 LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTax6lLmBji2ueSbX49gxcV76M29cmukQiw4 VG00-home 253 3 L--w 1 1 0", "input:: Name Maj Min Stat Open Targ Event UUID VG00-tmp 253 8 L--w", "parse_delimited_table(content) self.names = [dm['Name'] for dm in self.data if 'Name' in dm] self.by_name", "command output Example input:: Name Maj Min Stat Open Targ Event UUID VG00-tmp", "list \"\"\" return len(self.data) def __iter__(self): \"\"\" Iterate through the devices list \"\"\"", "{ 'Stat': 'L--w', 'Name': 'VG00-tmp', 'Min': '8', 'Targ': '1', 'Maj': '253', 'Open': '1',", "'Targ': '1', 'Maj': '253', 'Open': '1', 'Event': '0', 'UUID': 'LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTax6lLmBji2ueSbX49gxcV76M29cmukQiw4' },... ] Attributes:", "\"\"\" Iterate through the devices list \"\"\" for dm in self.data: yield dm", "info[1]['Stat'] 'L--w' \"\"\" def parse_content(self, content): self.data = parse_delimited_table(content) self.names = [dm['Name'] for", "yield dm def __getitem__(self, idx): \"\"\" Fetch a device by index in devices", "parse_delimited_table from insights.specs import Specs @parser(Specs.dmsetup_info) class DmsetupInfo(CommandParser): \"\"\" ``dmsetup info -C`` command", "names, in order found uuids (list): UUID by_name (dict): Access to each device", "self.data if 'Name' in dm] self.by_name = dict((dm['Name'], dm) for dm in self.data", "2 1 0 LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTax3Ll2XhOYZkylx1CjOQi7G4yHgrIOsyqG VG00-root 253 0 L--w 1 1 0 LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTaxKpnAKYhrYMYMNMwjegkW965bUgtJFTRY VG00-var_log_audit", "data structure produced:: data = [ { 'Stat': 'L--w', 'Name': 'VG00-tmp', 'Min': '8',", "self.names = [dm['Name'] for dm in self.data if 'Name' in dm] self.by_name =", "= dict((dm['UUID'], dm) for dm in self.data if 'UUID' in dm) def __len__(self):", "\"\"\" The length of the devices list \"\"\" return len(self.data) def __iter__(self): \"\"\"", "for dm in self.data if 'Name' in dm) self.uuids = [dm['UUID'] for dm", "for dm in self.data if 'Name' in dm] self.by_name = dict((dm['Name'], dm) for", "list \"\"\" for dm in self.data: yield dm def __getitem__(self, idx): \"\"\" Fetch", "through the devices list \"\"\" for dm in self.data: yield dm def __getitem__(self,", "dm] self.by_name = dict((dm['Name'], dm) for dm in self.data if 'Name' in dm)", "devices list \"\"\" return len(self.data) def __iter__(self): \"\"\" Iterate through the devices list", "the devices list \"\"\" for dm in self.data: yield dm def __getitem__(self, idx):", "0 L--w 1 1 0 LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTaxKpnAKYhrYMYMNMwjegkW965bUgtJFTRY VG00-var_log_audit 253 5 L--w 1 1 0", "devices found, in order names (list): Device names, in order found uuids (list):", "in this module are: DmsetupInfo - command ``dmsetup info -C`` ----------------------------------------- \"\"\" from", "def __iter__(self): \"\"\" Iterate through the devices list \"\"\" for dm in self.data:", "by devicename by_uuid (dict): Access to each device by uuid Example: >>> len(info)", "from insights import parser, CommandParser from insights.parsers import parse_delimited_table from insights.specs import Specs", "names (list): Device names, in order found uuids (list): UUID by_name (dict): Access", "},... ] Attributes: data (list): List of devices found, in order names (list):", "Access to each device by uuid Example: >>> len(info) 6 >>> info.names[0] 'VG00-tmp'", "L--w 1 1 0 LVM-<KEY> Example data structure produced:: data = [ {", "self.uuids = [dm['UUID'] for dm in self.data if 'UUID' in dm] self.by_uuid =", "List of devices found, in order names (list): Device names, in order found", "(list): Device names, in order found uuids (list): UUID by_name (dict): Access to", "in self.data: yield dm def __getitem__(self, idx): \"\"\" Fetch a device by index", "device by devicename by_uuid (dict): Access to each device by uuid Example: >>>", "dm] self.by_uuid = dict((dm['UUID'], dm) for dm in self.data if 'UUID' in dm)", "self.data if 'Name' in dm) self.uuids = [dm['UUID'] for dm in self.data if", "Parsers for parsing and extracting data from output of commands related to ``dmsetup``.", "in order found uuids (list): UUID by_name (dict): Access to each device by", "1 1 0 LVM-<KEY> Example data structure produced:: data = [ { 'Stat':", "'8', 'Targ': '1', 'Maj': '253', 'Open': '1', 'Event': '0', 'UUID': 'LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTax6lLmBji2ueSbX49gxcV76M29cmukQiw4' },... ]", "'UUID': 'LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTax6lLmBji2ueSbX49gxcV76M29cmukQiw4' },... ] Attributes: data (list): List of devices found, in order", "of devices found, in order names (list): Device names, in order found uuids", "to ``dmsetup``. Parsers contained in this module are: DmsetupInfo - command ``dmsetup info", "1 0 LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTaxKpnAKYhrYMYMNMwjegkW965bUgtJFTRY VG00-var_log_audit 253 5 L--w 1 1 0 LVM-<KEY> Example data", "dm in self.data if 'Name' in dm) self.uuids = [dm['UUID'] for dm in", "0 LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTaxKpnAKYhrYMYMNMwjegkW965bUgtJFTRY VG00-var_log_audit 253 5 L--w 1 1 0 LVM-<KEY> Example data structure", "\"\"\" return len(self.data) def __iter__(self): \"\"\" Iterate through the devices list \"\"\" for", "\"\"\" def parse_content(self, content): self.data = parse_delimited_table(content) self.names = [dm['Name'] for dm in", "Parsers contained in this module are: DmsetupInfo - command ``dmsetup info -C`` -----------------------------------------", "dmsetup commands - Command ``dmsetup`` ====================================== Parsers for parsing and extracting data from", "LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTax6lLmBji2ueSbX49gxcV76M29cmukQiw4 VG00-home 253 3 L--w 1 1 0 LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTaxCqXOnbGe2zjhX923dFiIdl1oi7mO9tXp VG00-var 253 6 L--w", "class DmsetupInfo(CommandParser): \"\"\" ``dmsetup info -C`` command output Example input:: Name Maj Min", "'Name' in dm) self.uuids = [dm['UUID'] for dm in self.data if 'UUID' in", "``dmsetup info -C`` command output Example input:: Name Maj Min Stat Open Targ", "1 L--w 2 1 0 LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTax3Ll2XhOYZkylx1CjOQi7G4yHgrIOsyqG VG00-root 253 0 L--w 1 1 0", "Open Targ Event UUID VG00-tmp 253 8 L--w 1 1 0 LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTax6lLmBji2ueSbX49gxcV76M29cmukQiw4 VG00-home", "The length of the devices list \"\"\" return len(self.data) def __iter__(self): \"\"\" Iterate", "Maj Min Stat Open Targ Event UUID VG00-tmp 253 8 L--w 1 1", "UUID VG00-tmp 253 8 L--w 1 1 0 LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTax6lLmBji2ueSbX49gxcV76M29cmukQiw4 VG00-home 253 3 L--w", "def __getitem__(self, idx): \"\"\" Fetch a device by index in devices list \"\"\"", "info[1]['Maj'] '253' >>> info[1]['Stat'] 'L--w' \"\"\" def parse_content(self, content): self.data = parse_delimited_table(content) self.names", "Access to each device by devicename by_uuid (dict): Access to each device by", "each device by devicename by_uuid (dict): Access to each device by uuid Example:", "'UUID' in dm] self.by_uuid = dict((dm['UUID'], dm) for dm in self.data if 'UUID'", "length of the devices list \"\"\" return len(self.data) def __iter__(self): \"\"\" Iterate through", ">>> info.names[0] 'VG00-tmp' >>> info[1]['Maj'] '253' >>> info[1]['Stat'] 'L--w' \"\"\" def parse_content(self, content):", "0 LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTax6lLmBji2ueSbX49gxcV76M29cmukQiw4 VG00-home 253 3 L--w 1 1 0 LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTaxCqXOnbGe2zjhX923dFiIdl1oi7mO9tXp VG00-var 253 6", "VG00-var_log_audit 253 5 L--w 1 1 0 LVM-<KEY> Example data structure produced:: data", "by uuid Example: >>> len(info) 6 >>> info.names[0] 'VG00-tmp' >>> info[1]['Maj'] '253' >>>", "``dmsetup`` ====================================== Parsers for parsing and extracting data from output of commands related", "LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTaxicvyvt67113nTb8vMlGfgdEjDx0LKT2O VG00-swap 253 1 L--w 2 1 0 LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTax3Ll2XhOYZkylx1CjOQi7G4yHgrIOsyqG VG00-root 253 0 L--w", "\"\"\" from insights import parser, CommandParser from insights.parsers import parse_delimited_table from insights.specs import", "info -C`` ----------------------------------------- \"\"\" from insights import parser, CommandParser from insights.parsers import parse_delimited_table", "info.names[0] 'VG00-tmp' >>> info[1]['Maj'] '253' >>> info[1]['Stat'] 'L--w' \"\"\" def parse_content(self, content): self.data", "dm in self.data if 'UUID' in dm] self.by_uuid = dict((dm['UUID'], dm) for dm", "len(info) 6 >>> info.names[0] 'VG00-tmp' >>> info[1]['Maj'] '253' >>> info[1]['Stat'] 'L--w' \"\"\" def", "-C`` command output Example input:: Name Maj Min Stat Open Targ Event UUID", "this module are: DmsetupInfo - command ``dmsetup info -C`` ----------------------------------------- \"\"\" from insights", "253 3 L--w 1 1 0 LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTaxCqXOnbGe2zjhX923dFiIdl1oi7mO9tXp VG00-var 253 6 L--w 1 2", "LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTax3Ll2XhOYZkylx1CjOQi7G4yHgrIOsyqG VG00-root 253 0 L--w 1 1 0 LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTaxKpnAKYhrYMYMNMwjegkW965bUgtJFTRY VG00-var_log_audit 253 5 L--w", "= dict((dm['Name'], dm) for dm in self.data if 'Name' in dm) self.uuids =", "of the devices list \"\"\" return len(self.data) def __iter__(self): \"\"\" Iterate through the", "Iterate through the devices list \"\"\" for dm in self.data: yield dm def", "uuid Example: >>> len(info) 6 >>> info.names[0] 'VG00-tmp' >>> info[1]['Maj'] '253' >>> info[1]['Stat']", "\"\"\" dmsetup commands - Command ``dmsetup`` ====================================== Parsers for parsing and extracting data", "CommandParser from insights.parsers import parse_delimited_table from insights.specs import Specs @parser(Specs.dmsetup_info) class DmsetupInfo(CommandParser): \"\"\"", "import Specs @parser(Specs.dmsetup_info) class DmsetupInfo(CommandParser): \"\"\" ``dmsetup info -C`` command output Example input::", "VG00-root 253 0 L--w 1 1 0 LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTaxKpnAKYhrYMYMNMwjegkW965bUgtJFTRY VG00-var_log_audit 253 5 L--w 1", "in self.data if 'UUID' in dm) def __len__(self): \"\"\" The length of the", "'L--w', 'Name': 'VG00-tmp', 'Min': '8', 'Targ': '1', 'Maj': '253', 'Open': '1', 'Event': '0',", "(list): List of devices found, in order names (list): Device names, in order", "-C`` ----------------------------------------- \"\"\" from insights import parser, CommandParser from insights.parsers import parse_delimited_table from", "insights.specs import Specs @parser(Specs.dmsetup_info) class DmsetupInfo(CommandParser): \"\"\" ``dmsetup info -C`` command output Example", "in order names (list): Device names, in order found uuids (list): UUID by_name", "1 1 0 LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTaxCqXOnbGe2zjhX923dFiIdl1oi7mO9tXp VG00-var 253 6 L--w 1 2 0 LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTaxicvyvt67113nTb8vMlGfgdEjDx0LKT2O VG00-swap", "Example input:: Name Maj Min Stat Open Targ Event UUID VG00-tmp 253 8", "Command ``dmsetup`` ====================================== Parsers for parsing and extracting data from output of commands", "insights.parsers import parse_delimited_table from insights.specs import Specs @parser(Specs.dmsetup_info) class DmsetupInfo(CommandParser): \"\"\" ``dmsetup info", "1 0 LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTax6lLmBji2ueSbX49gxcV76M29cmukQiw4 VG00-home 253 3 L--w 1 1 0 LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTaxCqXOnbGe2zjhX923dFiIdl1oi7mO9tXp VG00-var 253", "in dm] self.by_uuid = dict((dm['UUID'], dm) for dm in self.data if 'UUID' in", "dm in self.data if 'UUID' in dm) def __len__(self): \"\"\" The length of", "data = [ { 'Stat': 'L--w', 'Name': 'VG00-tmp', 'Min': '8', 'Targ': '1', 'Maj':", "def parse_content(self, content): self.data = parse_delimited_table(content) self.names = [dm['Name'] for dm in self.data", "5 L--w 1 1 0 LVM-<KEY> Example data structure produced:: data = [", "[ { 'Stat': 'L--w', 'Name': 'VG00-tmp', 'Min': '8', 'Targ': '1', 'Maj': '253', 'Open':", "'1', 'Maj': '253', 'Open': '1', 'Event': '0', 'UUID': 'LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTax6lLmBji2ueSbX49gxcV76M29cmukQiw4' },... ] Attributes: data", "Targ Event UUID VG00-tmp 253 8 L--w 1 1 0 LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTax6lLmBji2ueSbX49gxcV76M29cmukQiw4 VG00-home 253", "UUID by_name (dict): Access to each device by devicename by_uuid (dict): Access to", "self.data = parse_delimited_table(content) self.names = [dm['Name'] for dm in self.data if 'Name' in", "the devices list \"\"\" return len(self.data) def __iter__(self): \"\"\" Iterate through the devices", "Example data structure produced:: data = [ { 'Stat': 'L--w', 'Name': 'VG00-tmp', 'Min':", "and extracting data from output of commands related to ``dmsetup``. Parsers contained in", "'Stat': 'L--w', 'Name': 'VG00-tmp', 'Min': '8', 'Targ': '1', 'Maj': '253', 'Open': '1', 'Event':", "= parse_delimited_table(content) self.names = [dm['Name'] for dm in self.data if 'Name' in dm]", "Name Maj Min Stat Open Targ Event UUID VG00-tmp 253 8 L--w 1", "Example: >>> len(info) 6 >>> info.names[0] 'VG00-tmp' >>> info[1]['Maj'] '253' >>> info[1]['Stat'] 'L--w'", "'Name': 'VG00-tmp', 'Min': '8', 'Targ': '1', 'Maj': '253', 'Open': '1', 'Event': '0', 'UUID':", "output Example input:: Name Maj Min Stat Open Targ Event UUID VG00-tmp 253", "order names (list): Device names, in order found uuids (list): UUID by_name (dict):", "self.data if 'UUID' in dm] self.by_uuid = dict((dm['UUID'], dm) for dm in self.data", ">>> info[1]['Maj'] '253' >>> info[1]['Stat'] 'L--w' \"\"\" def parse_content(self, content): self.data = parse_delimited_table(content)", "len(self.data) def __iter__(self): \"\"\" Iterate through the devices list \"\"\" for dm in", "in dm) self.uuids = [dm['UUID'] for dm in self.data if 'UUID' in dm]", "L--w 1 1 0 LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTax6lLmBji2ueSbX49gxcV76M29cmukQiw4 VG00-home 253 3 L--w 1 1 0 LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTaxCqXOnbGe2zjhX923dFiIdl1oi7mO9tXp", "each device by uuid Example: >>> len(info) 6 >>> info.names[0] 'VG00-tmp' >>> info[1]['Maj']", "self.data: yield dm def __getitem__(self, idx): \"\"\" Fetch a device by index in", "for dm in self.data if 'UUID' in dm] self.by_uuid = dict((dm['UUID'], dm) for", "2 0 LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTaxicvyvt67113nTb8vMlGfgdEjDx0LKT2O VG00-swap 253 1 L--w 2 1 0 LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTax3Ll2XhOYZkylx1CjOQi7G4yHgrIOsyqG VG00-root 253", "Stat Open Targ Event UUID VG00-tmp 253 8 L--w 1 1 0 LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTax6lLmBji2ueSbX49gxcV76M29cmukQiw4", "order found uuids (list): UUID by_name (dict): Access to each device by devicename", "0 LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTax3Ll2XhOYZkylx1CjOQi7G4yHgrIOsyqG VG00-root 253 0 L--w 1 1 0 LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTaxKpnAKYhrYMYMNMwjegkW965bUgtJFTRY VG00-var_log_audit 253 5", "'Maj': '253', 'Open': '1', 'Event': '0', 'UUID': 'LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTax6lLmBji2ueSbX49gxcV76M29cmukQiw4' },... ] Attributes: data (list):", "parser, CommandParser from insights.parsers import parse_delimited_table from insights.specs import Specs @parser(Specs.dmsetup_info) class DmsetupInfo(CommandParser):", "VG00-home 253 3 L--w 1 1 0 LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTaxCqXOnbGe2zjhX923dFiIdl1oi7mO9tXp VG00-var 253 6 L--w 1", "- Command ``dmsetup`` ====================================== Parsers for parsing and extracting data from output of", "of commands related to ``dmsetup``. Parsers contained in this module are: DmsetupInfo -", "@parser(Specs.dmsetup_info) class DmsetupInfo(CommandParser): \"\"\" ``dmsetup info -C`` command output Example input:: Name Maj", "parse_content(self, content): self.data = parse_delimited_table(content) self.names = [dm['Name'] for dm in self.data if", "if 'Name' in dm] self.by_name = dict((dm['Name'], dm) for dm in self.data if", "'UUID' in dm) def __len__(self): \"\"\" The length of the devices list \"\"\"", "'Open': '1', 'Event': '0', 'UUID': 'LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTax6lLmBji2ueSbX49gxcV76M29cmukQiw4' },... ] Attributes: data (list): List of", "LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTaxCqXOnbGe2zjhX923dFiIdl1oi7mO9tXp VG00-var 253 6 L--w 1 2 0 LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTaxicvyvt67113nTb8vMlGfgdEjDx0LKT2O VG00-swap 253 1 L--w", "dm) for dm in self.data if 'Name' in dm) self.uuids = [dm['UUID'] for", "if 'Name' in dm) self.uuids = [dm['UUID'] for dm in self.data if 'UUID'", "- command ``dmsetup info -C`` ----------------------------------------- \"\"\" from insights import parser, CommandParser from", "VG00-var 253 6 L--w 1 2 0 LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTaxicvyvt67113nTb8vMlGfgdEjDx0LKT2O VG00-swap 253 1 L--w 2", "Event UUID VG00-tmp 253 8 L--w 1 1 0 LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTax6lLmBji2ueSbX49gxcV76M29cmukQiw4 VG00-home 253 3", "import parse_delimited_table from insights.specs import Specs @parser(Specs.dmsetup_info) class DmsetupInfo(CommandParser): \"\"\" ``dmsetup info -C``", "L--w 2 1 0 LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTax3Ll2XhOYZkylx1CjOQi7G4yHgrIOsyqG VG00-root 253 0 L--w 1 1 0 LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTaxKpnAKYhrYMYMNMwjegkW965bUgtJFTRY", "'Min': '8', 'Targ': '1', 'Maj': '253', 'Open': '1', 'Event': '0', 'UUID': 'LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTax6lLmBji2ueSbX49gxcV76M29cmukQiw4' },...", "commands - Command ``dmsetup`` ====================================== Parsers for parsing and extracting data from output", "'0', 'UUID': 'LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTax6lLmBji2ueSbX49gxcV76M29cmukQiw4' },... ] Attributes: data (list): List of devices found, in", "in dm) def __len__(self): \"\"\" The length of the devices list \"\"\" return", "from insights.specs import Specs @parser(Specs.dmsetup_info) class DmsetupInfo(CommandParser): \"\"\" ``dmsetup info -C`` command output", "'253', 'Open': '1', 'Event': '0', 'UUID': 'LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTax6lLmBji2ueSbX49gxcV76M29cmukQiw4' },... ] Attributes: data (list): List", "data from output of commands related to ``dmsetup``. Parsers contained in this module", "= [ { 'Stat': 'L--w', 'Name': 'VG00-tmp', 'Min': '8', 'Targ': '1', 'Maj': '253',", "'VG00-tmp', 'Min': '8', 'Targ': '1', 'Maj': '253', 'Open': '1', 'Event': '0', 'UUID': 'LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTax6lLmBji2ueSbX49gxcV76M29cmukQiw4'", "DmsetupInfo(CommandParser): \"\"\" ``dmsetup info -C`` command output Example input:: Name Maj Min Stat", "'1', 'Event': '0', 'UUID': 'LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTax6lLmBji2ueSbX49gxcV76M29cmukQiw4' },... ] Attributes: data (list): List of devices", "devices list \"\"\" for dm in self.data: yield dm def __getitem__(self, idx): \"\"\"", "\"\"\" for dm in self.data: yield dm def __getitem__(self, idx): \"\"\" Fetch a", "from insights.parsers import parse_delimited_table from insights.specs import Specs @parser(Specs.dmsetup_info) class DmsetupInfo(CommandParser): \"\"\" ``dmsetup", "dm in self.data: yield dm def __getitem__(self, idx): \"\"\" Fetch a device by", "= [dm['Name'] for dm in self.data if 'Name' in dm] self.by_name = dict((dm['Name'],", "in self.data if 'UUID' in dm] self.by_uuid = dict((dm['UUID'], dm) for dm in", "'VG00-tmp' >>> info[1]['Maj'] '253' >>> info[1]['Stat'] 'L--w' \"\"\" def parse_content(self, content): self.data =", "for parsing and extracting data from output of commands related to ``dmsetup``. Parsers", "L--w 1 1 0 LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTaxCqXOnbGe2zjhX923dFiIdl1oi7mO9tXp VG00-var 253 6 L--w 1 2 0 LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTaxicvyvt67113nTb8vMlGfgdEjDx0LKT2O", "Min Stat Open Targ Event UUID VG00-tmp 253 8 L--w 1 1 0", "VG00-swap 253 1 L--w 2 1 0 LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTax3Ll2XhOYZkylx1CjOQi7G4yHgrIOsyqG VG00-root 253 0 L--w 1", "Specs @parser(Specs.dmsetup_info) class DmsetupInfo(CommandParser): \"\"\" ``dmsetup info -C`` command output Example input:: Name", "__len__(self): \"\"\" The length of the devices list \"\"\" return len(self.data) def __iter__(self):", "found uuids (list): UUID by_name (dict): Access to each device by devicename by_uuid", "(dict): Access to each device by uuid Example: >>> len(info) 6 >>> info.names[0]", "content): self.data = parse_delimited_table(content) self.names = [dm['Name'] for dm in self.data if 'Name'", "(dict): Access to each device by devicename by_uuid (dict): Access to each device", "dm) self.uuids = [dm['UUID'] for dm in self.data if 'UUID' in dm] self.by_uuid", "``dmsetup info -C`` ----------------------------------------- \"\"\" from insights import parser, CommandParser from insights.parsers import", "L--w 1 1 0 LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTaxKpnAKYhrYMYMNMwjegkW965bUgtJFTRY VG00-var_log_audit 253 5 L--w 1 1 0 LVM-<KEY>", "dm) def __len__(self): \"\"\" The length of the devices list \"\"\" return len(self.data)", "LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTaxKpnAKYhrYMYMNMwjegkW965bUgtJFTRY VG00-var_log_audit 253 5 L--w 1 1 0 LVM-<KEY> Example data structure produced::", "VG00-tmp 253 8 L--w 1 1 0 LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTax6lLmBji2ueSbX49gxcV76M29cmukQiw4 VG00-home 253 3 L--w 1", "__getitem__(self, idx): \"\"\" Fetch a device by index in devices list \"\"\" return", "3 L--w 1 1 0 LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTaxCqXOnbGe2zjhX923dFiIdl1oi7mO9tXp VG00-var 253 6 L--w 1 2 0", "'253' >>> info[1]['Stat'] 'L--w' \"\"\" def parse_content(self, content): self.data = parse_delimited_table(content) self.names =", "devicename by_uuid (dict): Access to each device by uuid Example: >>> len(info) 6", "in self.data if 'Name' in dm) self.uuids = [dm['UUID'] for dm in self.data", "if 'UUID' in dm) def __len__(self): \"\"\" The length of the devices list", "return len(self.data) def __iter__(self): \"\"\" Iterate through the devices list \"\"\" for dm", "import parser, CommandParser from insights.parsers import parse_delimited_table from insights.specs import Specs @parser(Specs.dmsetup_info) class", "by_uuid (dict): Access to each device by uuid Example: >>> len(info) 6 >>>", "253 0 L--w 1 1 0 LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTaxKpnAKYhrYMYMNMwjegkW965bUgtJFTRY VG00-var_log_audit 253 5 L--w 1 1", "'LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTax6lLmBji2ueSbX49gxcV76M29cmukQiw4' },... ] Attributes: data (list): List of devices found, in order names", "] Attributes: data (list): List of devices found, in order names (list): Device", "= [dm['UUID'] for dm in self.data if 'UUID' in dm] self.by_uuid = dict((dm['UUID'],", "device by uuid Example: >>> len(info) 6 >>> info.names[0] 'VG00-tmp' >>> info[1]['Maj'] '253'", "uuids (list): UUID by_name (dict): Access to each device by devicename by_uuid (dict):", "``dmsetup``. Parsers contained in this module are: DmsetupInfo - command ``dmsetup info -C``", "info -C`` command output Example input:: Name Maj Min Stat Open Targ Event", "parsing and extracting data from output of commands related to ``dmsetup``. Parsers contained", "----------------------------------------- \"\"\" from insights import parser, CommandParser from insights.parsers import parse_delimited_table from insights.specs", "253 6 L--w 1 2 0 LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTaxicvyvt67113nTb8vMlGfgdEjDx0LKT2O VG00-swap 253 1 L--w 2 1", "Attributes: data (list): List of devices found, in order names (list): Device names,", "contained in this module are: DmsetupInfo - command ``dmsetup info -C`` ----------------------------------------- \"\"\"", "1 1 0 LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTax6lLmBji2ueSbX49gxcV76M29cmukQiw4 VG00-home 253 3 L--w 1 1 0 LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTaxCqXOnbGe2zjhX923dFiIdl1oi7mO9tXp VG00-var", "idx): \"\"\" Fetch a device by index in devices list \"\"\" return self.data[idx]", "dict((dm['UUID'], dm) for dm in self.data if 'UUID' in dm) def __len__(self): \"\"\"", "[dm['UUID'] for dm in self.data if 'UUID' in dm] self.by_uuid = dict((dm['UUID'], dm)", "6 L--w 1 2 0 LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTaxicvyvt67113nTb8vMlGfgdEjDx0LKT2O VG00-swap 253 1 L--w 2 1 0", "0 LVM-<KEY> Example data structure produced:: data = [ { 'Stat': 'L--w', 'Name':", "data (list): List of devices found, in order names (list): Device names, in", "self.data if 'UUID' in dm) def __len__(self): \"\"\" The length of the devices", "LVM-<KEY> Example data structure produced:: data = [ { 'Stat': 'L--w', 'Name': 'VG00-tmp',", "structure produced:: data = [ { 'Stat': 'L--w', 'Name': 'VG00-tmp', 'Min': '8', 'Targ':", "(list): UUID by_name (dict): Access to each device by devicename by_uuid (dict): Access", "'Event': '0', 'UUID': 'LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTax6lLmBji2ueSbX49gxcV76M29cmukQiw4' },... ] Attributes: data (list): List of devices found,", "6 >>> info.names[0] 'VG00-tmp' >>> info[1]['Maj'] '253' >>> info[1]['Stat'] 'L--w' \"\"\" def parse_content(self,", "to each device by devicename by_uuid (dict): Access to each device by uuid", "dm) for dm in self.data if 'UUID' in dm) def __len__(self): \"\"\" The", "dict((dm['Name'], dm) for dm in self.data if 'Name' in dm) self.uuids = [dm['UUID']", "DmsetupInfo - command ``dmsetup info -C`` ----------------------------------------- \"\"\" from insights import parser, CommandParser", "Device names, in order found uuids (list): UUID by_name (dict): Access to each", "def __len__(self): \"\"\" The length of the devices list \"\"\" return len(self.data) def", "commands related to ``dmsetup``. Parsers contained in this module are: DmsetupInfo - command", "output of commands related to ``dmsetup``. Parsers contained in this module are: DmsetupInfo", "insights import parser, CommandParser from insights.parsers import parse_delimited_table from insights.specs import Specs @parser(Specs.dmsetup_info)", "self.by_uuid = dict((dm['UUID'], dm) for dm in self.data if 'UUID' in dm) def" ]
[ "np.array(model.grid.cv_results_['mean_test_score']) min_score = scores.min() max_score = scores.max() mean_score = np.mean(scores, axis=0) scores =", "plt.title('Validation accuracy') # plt.show() pass def showRBFSVM(self, model, clfname): C_range = model.param_grid['C'] gamma_range", "\" + str(temp_loss) + \" \\n\") f_acc.write(\"data_point= \" + str(data_point) + \" accuracy=", "clf.predict(test_X)))) f_acc.write(\"\\n===== End of Training / Test Set Results =====\\n\") f_acc.write(\"data_point= %d ,", "plt.show() pass def plotLROverTime(data_x, loss_y, acc_y, idx): # Set the style globally #", "%s \" %(dname)) print(\"===== %s\" %(clfname)) # (1) train model with CV model", "/ Test Set Results =====\\n\") f_loss.write(\"data_point= %d , zero_one_loss= %f\\n\" % (data_point, zero_one_loss(test_y,", "=====\\n\") f_loss.write(\"data_point= %d , zero_one_loss= %f\\n\" % (data_point, zero_one_loss(test_y, clf.predict(test_X)))) f_acc.write(\"\\n===== End of", "(cname == \"Neural Nets\"): self.trainNeuralNets() else: print(\"Please put existing classifier names\") pass #", "of Training / Test Set Results =====\\n\") f_acc.write(\"data_point= %d , accuracy= %f\\n\" %", "= GridSearchCV(SGDClassifier(), param_grid=self.param_grid, cv=self.cv, n_jobs=-1) pass def trainLinearSVM(self): kernel_range = ['linear'] C_range =", "load data if len(sys.argv) > 1 and int(sys.argv[1]) != idx: continue data =", "= model.grid.cv_results_['mean_test_score'].reshape(len(C_range), len(gamma_range)) scores = np.array(model.grid.cv_results_['mean_test_score']) min_score = scores.min() max_score = scores.max() mean_score", "markersize=2, label='accuracy') plt.xlabel('Data Points') plt.ylabel('Score') # Axes alteration to put zero values inside", "# dark_background, seaborn-deep, etc plt.style.use('ggplot') plt.rcParams['font.family'] = 'serif' plt.rcParams['font.serif'] = 'Ubuntu' plt.rcParams['font.monospace'] =", "runLROverTime(train_X, train_y, test_X, test_y, idx): clf = SGDClassifier(loss='log') # shuffle=True is useless here", "num=13) # 13 params self.param_grid = dict(loss=loss_range, penalty=penalty_range, alpha=alpha_range, max_iter=[1000], tol=[1e-3]) self.grid =", "model.param_grid['learning_rate_init'] scores = np.array(model.grid.cv_results_['mean_test_score']) min_score = scores.min() max_score = scores.max() mean_score = np.mean(scores,", "\" + str(data_point) + \" accuracy= \" + str(temp_acc) + \" \\n\") data_x.append(data_point)", "make_moons, make_circles, make_classification from sklearn.linear_model import SGDClassifier from sklearn.svm import SVC from sklearn.neural_network", "clfnames = [\"Logistic Regression\", \"Linear SVM\", \"RBF SVM\", \"Neural Nets\"] # clfnames =", "0.9 hidden_layer_sizes_range = np.array([1,2,3,4,5,6,7,8,9,10,16,32]) # 12 params activation_range = ['logistic'] solver_range = ['sgd']", "from sklearn.svm import SVC from sklearn.neural_network import MLPClassifier from sklearn.neighbors import KNeighborsClassifier from", "np.array([1.0e-04,1.0e-03,1.0e-02,1.0e-01]) # 4 params self.param_grid = dict(hidden_layer_sizes=hidden_layer_sizes_range, activation=activation_range,solver=solver_range, learning_rate_init=learning_rate_init_range, max_iter=[1000]) self.grid = GridSearchCV(MLPClassifier(),", "SVC from sklearn.neural_network import MLPClassifier from sklearn.neighbors import KNeighborsClassifier from sklearn.gaussian_process import GaussianProcessClassifier", "def __init__(self): self.name = '' self.grid = '' self.param_grid = '' self.cv =", "= [\"RBF SVM\"] # clfnames = [\"Linear SVM\"] for idx2, clfname in enumerate(clfnames):", "= [] self.test_y = [] def run(self): report = Report() for idx, dname", "len(gamma_range)) scores = np.array(model.grid.cv_results_['mean_test_score']) min_score = scores.min() max_score = scores.max() mean_score = np.mean(scores,", "= [train_X[i] for i in shuffledRange] shuffledY = [train_y[i] for i in shuffledRange]", "elif (cname == \"Linear SVM\"): self.trainLinearSVM() elif (cname == \"RBF SVM\"): self.trainRBFSVM() elif", "axis=0) scores = scores.reshape(len(C_range),1) plt.figure(figsize=(8, 6)) plt.subplots_adjust(left=.2, right=0.95, bottom=0.15, top=0.95) plt.imshow(scores, interpolation='nearest', cmap=plt.cm.hot,", "\"RBF SVM\"): self.showRBFSVM(model, clfname) elif (clfname == \"Neural Nets\"): self.showNeuralNets(model, clfname) else: print(\"Please", "Results =====\\n\") f_acc.write(\"data_point= %d , accuracy= %f\\n\" % (data_point, accuracy_score(test_y, clf.predict(test_X)))) f_loss.close() f_acc.close()", "__call__(self, value, clip=None): x, y = [self.vmin, self.midpoint, self.vmax], [0, 0.5, 1] return", "trainLinearSVM(self): kernel_range = ['linear'] C_range = np.geomspace(1.e-07, 1.e+05, num=13) # 13 params :", "plt.title('Validation accuracy') # plt.show() pass def showLinearSVM(self, model, clfname): C_range = model.param_grid['C'] scores", "+ \" accuracy= \" + str(temp_acc) + \" \\n\") # data_x.append(data_point) # loss_y.append(temp_loss)", "AdaBoostClassifier from sklearn.naive_bayes import GaussianNB from sklearn.discriminant_analysis import QuadraticDiscriminantAnalysis from sklearn.metrics import *", "+ str(temp_acc) + \" \\n\") data_x.append(data_point) loss_y.append(temp_loss) acc_y.append(temp_acc) f_loss.write(\"\\n===== End of Training /", "(default), F1-measure, AUC loss_range = ['log'] penalty_range = ['l2','l1','none'] alpha_range = np.geomspace(1.e-07, 1.e+05,", "scores\") print ( confusion_matrix(target_test, predicted_test) ) print ( classification_report(target_test, predicted_test) ) pass def", "len(sys.argv) > 1 and int(sys.argv[1]) != idx: continue data = np.load(dname) self.train_y =", "model.param_grid['gamma'] # scores = model.grid.cv_results_['mean_test_score'].reshape(len(C_range), len(gamma_range)) scores = np.array(model.grid.cv_results_['mean_test_score']) min_score = scores.min() max_score", "be around # the values of interest. class MidpointNormalize(Normalize): def __init__(self, vmin=None, vmax=None,", "zero_one_loss= \" + str(temp_loss) + \" \\n\") # f_acc.write(\"data_point= \" + str(data_point) +", "+ 0.1, ymin, ymax]) plt.title('LR performance over time', fontstyle='italic') plt.legend(loc='best', numpoints=1, fancybox=True) #", "predicted_test = model.grid.predict(self.test_X) predicted_train = model.grid.predict(self.train_X) # Loss + Accuracy (training + test)", "(clfname == \"Neural Nets\"): self.showNeuralNets(model, clfname) else: print(\"Please put existing classifier names\") pass", "# auc + confusion matrix # cpu computation time report.showResult(model, predicted_test, self.test_y, predicted_train,", "% (np.mean(model.grid.cv_results_['mean_fit_time'], axis=0)) ) print(\"The mean test time of %f\" % (np.mean(model.grid.cv_results_['mean_score_time'], axis=0))", "zero_one_loss(test_y, clf.predict(test_X)))) f_acc.write(\"\\n===== End of Training / Test Set Results =====\\n\") f_acc.write(\"data_point= %d", "plt.xlabel('gamma') plt.ylabel('C') plt.colorbar() plt.xticks(np.arange(len(gamma_range)), gamma_range, rotation=45) plt.yticks(np.arange(len(C_range)), C_range) plt.title('Validation accuracy') # plt.show() pass", "= scores.max() mean_score = np.mean(scores, axis=0) scores = scores.reshape(len(learning_rate_init_range), len(hidden_layer_sizes_range)) plt.figure(figsize=(8, 6)) plt.subplots_adjust(left=.2,", "(regularization)') plt.colorbar() plt.xticks(np.arange(len(penalty_range)), penalty_range, rotation=45) plt.yticks(np.arange(len(alpha_range)), alpha_range) plt.title('Validation accuracy') # plt.show() pass def", "print(\"The test Log Loss %0.3f Zero one loss %f\" % (log_loss(target_test, predicted_test), zero_one_loss(target_test,", "f_loss.write(\"\\n===== End of Training / Test Set Results =====\\n\") f_loss.write(\"data_point= %d , zero_one_loss=", "for idx2, clfname in enumerate(clfnames): print(\"===== %s \" %(dname)) print(\"===== %s\" %(clfname)) #", "continue data = np.load(dname) self.train_y = data['train_Y'] self.test_y = data['test_Y'] # standardize data", "are %s with a score of %0.3f\" % (model.grid.best_params_, model.grid.best_score_)) print(\"The Train Log", "from sklearn.linear_model import SGDClassifier from sklearn.svm import SVC from sklearn.neural_network import MLPClassifier from", "wspace=0.40) plt.savefig('./LR_overtime_'+str(idx)+'.png', bbox_inches='tight') pass def batches(l, n): for i in range(0, len(l), n):", "self.train_y = data['train_Y'] self.test_y = data['test_Y'] # standardize data (mean=0, std=1) self.train_X =", "SGDClassifier from sklearn.svm import SVC from sklearn.neural_network import MLPClassifier from sklearn.neighbors import KNeighborsClassifier", "plt.xticks(np.arange(len(hidden_layer_sizes_range)), hidden_layer_sizes_range, rotation=45) plt.yticks(np.arange(len(learning_rate_init_range)), learning_rate_init_range) plt.title('Validation accuracy') # plt.show() pass def plotLROverTime(data_x, loss_y,", "of data set \", self.train_X.shape, self.train_y.shape, self.test_X.shape, self.test_y.shape) if len(sys.argv) > 2 and", "import Normalize from matplotlib.colors import ListedColormap from sklearn.model_selection import train_test_split, GridSearchCV from sklearn.model_selection", "def run(self): report = Report() for idx, dname in enumerate(self.dnames): # load data", "# Utility function to move the midpoint of a colormap to be around", "min_score = scores.min() max_score = scores.max() mean_score = np.mean(scores, axis=0) scores = scores.reshape(len(learning_rate_init_range),", "= StandardScaler().fit_transform(data['train_X']) self.test_X = StandardScaler().fit_transform(data['test_X']) print (\"shape of data set \", self.train_X.shape, self.train_y.shape,", "in shuffledRange] shuffledY = [train_y[i] for i in shuffledRange] for batch in batches(range(len(shuffledX)),", "cv=self.cv, n_jobs=-1) pass def trainLinearSVM(self): kernel_range = ['linear'] C_range = np.geomspace(1.e-07, 1.e+05, num=13)", "Zero one loss %f\" % (log_loss(target_test, predicted_test), zero_one_loss(target_test, predicted_test))) print(\"The train Accuracy %0.3f\"", "accuracy_score(train_y, clf.predict(train_X)) # f_loss.write(\"data_point= \" + str(data_point) + \" zero_one_loss= \" + str(temp_loss)", "TODO: try different scoring rule such as Accuracy (default), F1-measure, AUC loss_range =", "try different scoring rule such as Accuracy (default), F1-measure, AUC loss_range = ['log']", "mean_score = np.mean(scores, axis=0) scores = scores.reshape(len(learning_rate_init_range), len(hidden_layer_sizes_range)) plt.figure(figsize=(8, 6)) plt.subplots_adjust(left=.2, right=0.95, bottom=0.15,", "# standardize data (mean=0, std=1) self.train_X = StandardScaler().fit_transform(data['train_X']) self.test_X = StandardScaler().fit_transform(data['test_X']) print (\"shape", "% (accuracy_score(target_train, predicted_train))) print(\"The test Accuracy %0.3f\" % (accuracy_score(target_test, predicted_test) )) print(\"The test", "penalty_range = ['l2','l1','none'] alpha_range = np.geomspace(1.e-07, 1.e+05, num=13) # 13 params self.param_grid =", "str(data_point) + \" zero_one_loss= \" + str(temp_loss) + \" \\n\") # f_acc.write(\"data_point= \"", "if (clfname == \"Logistic Regression\"): self.showLogisticRegression(model, clfname) elif (clfname == \"Linear SVM\"): self.showLinearSVM(model,", "data_point = 0 f_loss = open('./LR_overtime_loss_'+str(idx)+'.txt', 'w') f_acc = open('./LR_overtime_acc_'+str(idx)+'.txt', 'w') data_x =", "batch in batches(range(len(shuffledX)), 10): clf.partial_fit(shuffledX[batch[0]:batch[-1] + 1], shuffledY[batch[0]:batch[-1] + 1], classes=np.unique(train_y)) data_point +=", "+ \" zero_one_loss= \" + str(temp_loss) + \" \\n\") # f_acc.write(\"data_point= \" +", "# Axes alteration to put zero values inside the figure Axes # Avoids", "from sklearn.datasets import make_moons, make_circles, make_classification from sklearn.linear_model import SGDClassifier from sklearn.svm import", "computation time report.showResult(model, predicted_test, self.test_y, predicted_train, self.train_y) report.showPlot(model, clfname) plt.savefig('./'+clfname+'_'+str(idx)+'.png', bbox_inches = 'tight')", "names\") pass def showLogisticRegression(self, model, clfname): penalty_range = model.param_grid['penalty'] alpha_range = model.param_grid['alpha'] #", "Loss %0.3f Zero one loss %f\" % (log_loss(target_test, predicted_test), zero_one_loss(target_test, predicted_test))) print(\"The train", "= scores.reshape(len(C_range),1) plt.figure(figsize=(8, 6)) plt.subplots_adjust(left=.2, right=0.95, bottom=0.15, top=0.95) plt.imshow(scores, interpolation='nearest', cmap=plt.cm.hot, norm=MidpointNormalize(vmin=min_score, vmax=max_score,", "Axes # Avoids axis white lines cutting through zero values - fivethirtyeight style", "as Accuracy (default), F1-measure, AUC loss_range = ['log'] penalty_range = ['l2','l1','none'] alpha_range =", "C=C_range) self.grid = GridSearchCV(SVC(), param_grid=self.param_grid, cv=self.cv, n_jobs=-1) pass def trainRBFSVM(self): # params C", "%0.3f\" % (model.grid.best_params_, model.grid.best_score_)) print(\"The Train Log Loss %0.3f Zero one loss %f\"", "etc plt.style.use('ggplot') plt.rcParams['font.family'] = 'serif' plt.rcParams['font.serif'] = 'Ubuntu' plt.rcParams['font.monospace'] = 'Ubuntu Mono' plt.rcParams['font.size']", "predicted_train = model.grid.predict(self.train_X) # Loss + Accuracy (training + test) # auc +", "%0.3f\" % (accuracy_score(target_train, predicted_train))) print(\"The test Accuracy %0.3f\" % (accuracy_score(target_test, predicted_test) )) print(\"The", "from sklearn.neighbors import KNeighborsClassifier from sklearn.gaussian_process import GaussianProcessClassifier from sklearn.gaussian_process.kernels import RBF from", "cv=self.cv, n_jobs=-1) pass def trainNeuralNets(self): # early stopping default False, Momentum default 0.9", "(accuracy_score(target_test, predicted_test) )) print(\"The test AUC of %0.3f\" % (roc_auc_score(target_test, predicted_test) )) print(\"The", "import DecisionTreeClassifier from sklearn.ensemble import RandomForestClassifier, AdaBoostClassifier from sklearn.naive_bayes import GaussianNB from sklearn.discriminant_analysis", "import RBF from sklearn.tree import DecisionTreeClassifier from sklearn.ensemble import RandomForestClassifier, AdaBoostClassifier from sklearn.naive_bayes", "self.grid = GridSearchCV(SGDClassifier(), param_grid=self.param_grid, cv=self.cv, n_jobs=-1) pass def trainLinearSVM(self): kernel_range = ['linear'] C_range", "marker='o', markersize=2, label='loss') plt.plot(data_x, acc_y, linewidth=0.5, linestyle='--', marker='v', markersize=2, label='accuracy') plt.xlabel('Data Points') plt.ylabel('Score')", "elif (clfname == \"Linear SVM\"): self.showLinearSVM(model, clfname) elif (clfname == \"RBF SVM\"): self.showRBFSVM(model,", "sklearn.datasets import make_moons, make_circles, make_classification from sklearn.linear_model import SGDClassifier from sklearn.svm import SVC", "computation time def showResult(self, model, predicted_test, target_test, predicted_train, target_train): print(\"The best parameters are", "data = np.load(dname) self.train_y = data['train_Y'] self.test_y = data['test_Y'] # standardize data (mean=0,", "(np.mean(model.grid.cv_results_['mean_score_time'], axis=0)) ) # confusion matrix print(\"confusion matrix / precision recall scores\") print", "import SGDClassifier from sklearn.svm import SVC from sklearn.neural_network import MLPClassifier from sklearn.neighbors import", "n_jobs=-1) pass class Report: def __init__(self): pass # Loss + Accuracy (training +", "activation_range = ['logistic'] solver_range = ['sgd'] learning_rate_init_range = np.array([1.0e-04,1.0e-03,1.0e-02,1.0e-01]) # 4 params self.param_grid", "( classification_report(target_test, predicted_test) ) pass def showPlot(self, model, clfname): if (clfname == \"Logistic", "\"Neural Nets\"] # clfnames = [\"RBF SVM\"] # clfnames = [\"Linear SVM\"] for", "predicted_test))) print(\"The train Accuracy %0.3f\" % (accuracy_score(target_train, predicted_train))) print(\"The test Accuracy %0.3f\" %", "performance over time', fontstyle='italic') plt.legend(loc='best', numpoints=1, fancybox=True) # Space plots a bit plt.subplots_adjust(hspace=0.25,", "scores.max() mean_score = np.mean(scores, axis=0) scores = scores.reshape(len(learning_rate_init_range), len(hidden_layer_sizes_range)) plt.figure(figsize=(8, 6)) plt.subplots_adjust(left=.2, right=0.95,", "= 12 # Set an aspect ratio width, height = plt.figaspect(1.68) fig =", "1.e+05, num=13) # 13 params self.param_grid = dict(loss=loss_range, penalty=penalty_range, alpha=alpha_range, max_iter=[1000], tol=[1e-3]) self.grid", "class Report: def __init__(self): pass # Loss + Accuracy (training + test) #", "midpoint=0.92)) plt.imshow(scores, interpolation='nearest', cmap=plt.cm.hot, norm=MidpointNormalize(vmin=min_score,vmax=max_score, midpoint=mean_score)) plt.xlabel('gamma') plt.ylabel('C') plt.colorbar() plt.xticks(np.arange(len(gamma_range)), gamma_range, rotation=45) plt.yticks(np.arange(len(C_range)),", "test) # auc + confusion matrix # cpu computation time report.showResult(model, predicted_test, self.test_y,", "self.test_X, self.test_y, idx) continue clfnames = [\"Logistic Regression\", \"Linear SVM\", \"RBF SVM\", \"Neural", "= 'neg_log_loss' #'accuracy', 'f1', 'precision', 'recall', 'roc_auc' def trainModel(self, cname): if (cname ==", "alpha=alpha_range, max_iter=[1000], tol=[1e-3]) self.grid = GridSearchCV(SGDClassifier(), param_grid=self.param_grid, cv=self.cv, n_jobs=-1) pass def trainLinearSVM(self): kernel_range", "Normalize.__init__(self, vmin, vmax, clip) def __call__(self, value, clip=None): x, y = [self.vmin, self.midpoint,", "rotation=45) plt.yticks(np.arange(len(alpha_range)), alpha_range) plt.title('Validation accuracy') # plt.show() pass def showLinearSVM(self, model, clfname): C_range", "Set Results =====\\n\") f_loss.write(\"data_point= %d , zero_one_loss= %f\\n\" % (data_point, zero_one_loss(test_y, clf.predict(test_X)))) f_acc.write(\"\\n=====", "Set Results =====\\n\") f_acc.write(\"data_point= %d , accuracy= %f\\n\" % (data_point, accuracy_score(test_y, clf.predict(test_X)))) f_loss.close()", "%f\" % (np.mean(model.grid.cv_results_['mean_fit_time'], axis=0)) ) print(\"The mean test time of %f\" % (np.mean(model.grid.cv_results_['mean_score_time'],", "path. try: dirpath = os.path.dirname(__file__) except Exception as inst: dirpath = '' pass", "f_name1 = os.path.join(dirpath,\"../datasets/breast-cancer.npz\") f_name2 = os.path.join(dirpath,\"../datasets/diabetes.npz\") f_name3 = os.path.join(dirpath,\"../datasets/digit.npz\") f_name4 = os.path.join(dirpath,\"../datasets/iris.npz\") f_name5", "= np.array([1,2,3,4,5,6,7,8,9,10,16,32]) # 12 params activation_range = ['logistic'] solver_range = ['sgd'] learning_rate_init_range =", "plt.colorbar() plt.xticks(np.arange(len(penalty_range)), penalty_range, rotation=45) plt.yticks(np.arange(len(alpha_range)), alpha_range) plt.title('Validation accuracy') # plt.show() pass def showLinearSVM(self,", "np.geomspace(1.e-07, 1.e+05, num=13) # 13 params : self.param_grid = dict(kernel=kernel_range, C=C_range) self.grid =", "f_name3 = os.path.join(dirpath,\"../datasets/digit.npz\") f_name4 = os.path.join(dirpath,\"../datasets/iris.npz\") f_name5 = os.path.join(dirpath,\"../datasets/wine.npz\") # Utility function to", "- 0.1, xmax + 0.1, ymin, ymax]) plt.title('LR performance over time', fontstyle='italic') plt.legend(loc='best',", "n_iter = 10 data_point = 0 f_loss = open('./LR_overtime_loss_'+str(idx)+'.txt', 'w') f_acc = open('./LR_overtime_acc_'+str(idx)+'.txt',", "pass def showRBFSVM(self, model, clfname): C_range = model.param_grid['C'] gamma_range = model.param_grid['gamma'] # scores", "plt.imshow(scores, interpolation='nearest', cmap=plt.cm.hot, norm=MidpointNormalize(vmin=min_score,vmax=max_score, midpoint=mean_score)) plt.xlabel('hidden_layer_sizes') plt.ylabel('learning_rate_init') plt.colorbar() plt.xticks(np.arange(len(hidden_layer_sizes_range)), hidden_layer_sizes_range, rotation=45) plt.yticks(np.arange(len(learning_rate_init_range)), learning_rate_init_range)", "self.train_y) report.showPlot(model, clfname) plt.savefig('./'+clfname+'_'+str(idx)+'.png', bbox_inches = 'tight') if __name__ == '__main__': eval =", "ClassModels() model.trainModel(clfname) model.grid.fit(self.train_X, self.train_y) # (2) show results predicted_test = model.grid.predict(self.test_X) predicted_train =", "# plt.imshow(scores, interpolation='nearest', cmap=plt.cm.hot, # norm=MidpointNormalize(vmin=0.2, midpoint=0.92)) plt.imshow(scores, interpolation='nearest', cmap=plt.cm.hot, norm=MidpointNormalize(vmin=min_score,vmax=max_score, midpoint=mean_score)) plt.xlabel('gamma')", "str(temp_loss) + \" \\n\") # f_acc.write(\"data_point= \" + str(data_point) + \" accuracy= \"", "# plt.show() pass def plotLROverTime(data_x, loss_y, acc_y, idx): # Set the style globally", "\"Linear SVM\", \"RBF SVM\", \"Neural Nets\"] # clfnames = [\"RBF SVM\"] # clfnames", "StandardScaler().fit_transform(data['train_X']) self.test_X = StandardScaler().fit_transform(data['test_X']) print (\"shape of data set \", self.train_X.shape, self.train_y.shape, self.test_X.shape,", "# load data if len(sys.argv) > 1 and int(sys.argv[1]) != idx: continue data", "'' self.param_grid = '' self.cv = StratifiedShuffleSplit(n_splits=5, test_size=0.2, random_state=42) self.scoring = 'neg_log_loss' #'accuracy',", "Space plots a bit plt.subplots_adjust(hspace=0.25, wspace=0.40) plt.savefig('./LR_overtime_'+str(idx)+'.png', bbox_inches='tight') pass def batches(l, n): for", "cutting through zero values - fivethirtyeight style xmin, xmax, ymin, ymax = plt.axis()", "pass f_name1 = os.path.join(dirpath,\"../datasets/breast-cancer.npz\") f_name2 = os.path.join(dirpath,\"../datasets/diabetes.npz\") f_name3 = os.path.join(dirpath,\"../datasets/digit.npz\") f_name4 = os.path.join(dirpath,\"../datasets/iris.npz\")", "one loss %f\" % (log_loss(target_test, predicted_test), zero_one_loss(target_test, predicted_test))) print(\"The train Accuracy %0.3f\" %", "def showLinearSVM(self, model, clfname): C_range = model.param_grid['C'] scores = np.array(model.grid.cv_results_['mean_test_score']) min_score = scores.min()", "n): yield l[i:i+n] def runLROverTime(train_X, train_y, test_X, test_y, idx): clf = SGDClassifier(loss='log') #", "plt from matplotlib.colors import Normalize from matplotlib.colors import ListedColormap from sklearn.model_selection import train_test_split,", "with CV model = ClassModels() model = ClassModels() model.trainModel(clfname) model.grid.fit(self.train_X, self.train_y) # (2)", "max_score = scores.max() mean_score = np.mean(scores, axis=0) scores = scores.reshape(len(C_range),1) plt.figure(figsize=(8, 6)) plt.subplots_adjust(left=.2,", "plt.show() pass def showLinearSVM(self, model, clfname): C_range = model.param_grid['C'] scores = np.array(model.grid.cv_results_['mean_test_score']) min_score", "plt.rcParams['font.size'] = 10 plt.rcParams['axes.labelsize'] = 10 plt.rcParams['axes.labelweight'] = 'bold' plt.rcParams['axes.titlesize'] = 10 plt.rcParams['xtick.labelsize']", "showLogisticRegression(self, model, clfname): penalty_range = model.param_grid['penalty'] alpha_range = model.param_grid['alpha'] # 13 params scores", "+ str(temp_acc) + \" \\n\") # data_x.append(data_point) # loss_y.append(temp_loss) # acc_y.append(temp_acc) for n", "C / gamma kernel_range = ['rbf'] C_range = np.geomspace(1.e-07, 1.e+05, num=13) # 13", "# 13 params : gamma_range = np.array([0.001,0.005,0.01,0.05,0.1,0.5,1,2,3]) # 9 params self.param_grid = dict(kernel=kernel_range,", "cname): if (cname == \"Logistic Regression\"): self.trainLogisticRegression() elif (cname == \"Linear SVM\"): self.trainLinearSVM()", "zero_one_loss(target_test, predicted_test))) print(\"The train Accuracy %0.3f\" % (accuracy_score(target_train, predicted_train))) print(\"The test Accuracy %0.3f\"", "\" + str(temp_acc) + \" \\n\") data_x.append(data_point) loss_y.append(temp_loss) acc_y.append(temp_acc) f_loss.write(\"\\n===== End of Training", "f_name5] self.train_X = [] self.train_y = [] self.test_X = [] self.test_y = []", "width, height = plt.figaspect(1.68) fig = plt.figure(figsize=(width, height), dpi=400) plt.plot(data_x, loss_y, linewidth=0.5, linestyle=':',", "print(\"The train Accuracy %0.3f\" % (accuracy_score(target_train, predicted_train))) print(\"The test Accuracy %0.3f\" % (accuracy_score(target_test,", "plt.show() pass def showNeuralNets(self, model, clfname): hidden_layer_sizes_range = model.param_grid['hidden_layer_sizes'] learning_rate_init_range = model.param_grid['learning_rate_init'] scores", ") # confusion matrix print(\"confusion matrix / precision recall scores\") print ( confusion_matrix(target_test,", "idx): # Set the style globally # Alternatives include bmh, fivethirtyeight, ggplot, #", "sklearn.model_selection import StratifiedShuffleSplit from sklearn.preprocessing import StandardScaler from sklearn.datasets import make_moons, make_circles, make_classification", "acc_y, idx): # Set the style globally # Alternatives include bmh, fivethirtyeight, ggplot,", "model with CV model = ClassModels() model = ClassModels() model.trainModel(clfname) model.grid.fit(self.train_X, self.train_y) #", "scores.min() max_score = scores.max() mean_score = np.mean(scores, axis=0) scores = scores.reshape(len(C_range),1) plt.figure(figsize=(8, 6))", "\" zero_one_loss= \" + str(temp_loss) + \" \\n\") f_acc.write(\"data_point= \" + str(data_point) +", "axis=0)) ) print(\"The mean test time of %f\" % (np.mean(model.grid.cv_results_['mean_score_time'], axis=0)) ) #", "through zero values - fivethirtyeight style xmin, xmax, ymin, ymax = plt.axis() plt.axis([xmin", "= os.path.join(dirpath,\"../datasets/wine.npz\") # Utility function to move the midpoint of a colormap to", "pass def showNeuralNets(self, model, clfname): hidden_layer_sizes_range = model.param_grid['hidden_layer_sizes'] learning_rate_init_range = model.param_grid['learning_rate_init'] scores =", "def showLogisticRegression(self, model, clfname): penalty_range = model.param_grid['penalty'] alpha_range = model.param_grid['alpha'] # 13 params", "showResult(self, model, predicted_test, target_test, predicted_train, target_train): print(\"The best parameters are %s with a", "Nets\"): self.showNeuralNets(model, clfname) else: print(\"Please put existing classifier names\") pass def showLogisticRegression(self, model,", "shuffle=True is useless here shuffledRange = range(train_X.shape[0]) n_iter = 10 data_point = 0", "'Ubuntu' plt.rcParams['font.monospace'] = 'Ubuntu Mono' plt.rcParams['font.size'] = 10 plt.rcParams['axes.labelsize'] = 10 plt.rcParams['axes.labelweight'] =", "idx, dname in enumerate(self.dnames): # load data if len(sys.argv) > 1 and int(sys.argv[1])", "fig = plt.figure(figsize=(width, height), dpi=400) plt.plot(data_x, loss_y, linewidth=0.5, linestyle=':', marker='o', markersize=2, label='loss') plt.plot(data_x,", "\" \\n\") # f_acc.write(\"data_point= \" + str(data_point) + \" accuracy= \" + str(temp_acc)", "SVM\"] # clfnames = [\"Linear SVM\"] for idx2, clfname in enumerate(clfnames): print(\"===== %s", "sklearn.ensemble import RandomForestClassifier, AdaBoostClassifier from sklearn.naive_bayes import GaussianNB from sklearn.discriminant_analysis import QuadraticDiscriminantAnalysis from", "\" zero_one_loss= \" + str(temp_loss) + \" \\n\") # f_acc.write(\"data_point= \" + str(data_point)", "top=0.95) # plt.imshow(scores, interpolation='nearest', cmap=plt.cm.hot, # norm=MidpointNormalize(vmin=0.2, midpoint=0.92)) plt.imshow(scores, interpolation='nearest', cmap=plt.cm.hot, norm=MidpointNormalize(vmin=min_score,vmax=max_score, midpoint=mean_score))", "rotation=45) plt.yticks(np.arange(len(C_range)), C_range) plt.title('Validation accuracy') # plt.show() pass def showNeuralNets(self, model, clfname): hidden_layer_sizes_range", "[train_y[i] for i in shuffledRange] for batch in batches(range(len(shuffledX)), 10): clf.partial_fit(shuffledX[batch[0]:batch[-1] + 1],", "\"Logistic Regression\"): self.trainLogisticRegression() elif (cname == \"Linear SVM\"): self.trainLinearSVM() elif (cname == \"RBF", "np.geomspace(1.e-07, 1.e+05, num=13) # 13 params self.param_grid = dict(loss=loss_range, penalty=penalty_range, alpha=alpha_range, max_iter=[1000], tol=[1e-3])", "with a score of %0.3f\" % (model.grid.best_params_, model.grid.best_score_)) print(\"The Train Log Loss %0.3f", "= np.array([0.001,0.005,0.01,0.05,0.1,0.5,1,2,3]) # 9 params self.param_grid = dict(kernel=kernel_range, gamma=gamma_range, C=C_range) self.grid = GridSearchCV(SVC(),", "= zero_one_loss(train_y, clf.predict(train_X)) # temp_acc = accuracy_score(train_y, clf.predict(train_X)) # f_loss.write(\"data_point= \" + str(data_point)", "put zero values inside the figure Axes # Avoids axis white lines cutting", "model, clfname): if (clfname == \"Logistic Regression\"): self.showLogisticRegression(model, clfname) elif (clfname == \"Linear", "= os.path.join(dirpath,\"../datasets/digit.npz\") f_name4 = os.path.join(dirpath,\"../datasets/iris.npz\") f_name5 = os.path.join(dirpath,\"../datasets/wine.npz\") # Utility function to move", "min_score = scores.min() max_score = scores.max() mean_score = np.mean(scores, axis=0) scores = scores.reshape(len(alpha_range),len(penalty_range))", "idx) continue clfnames = [\"Logistic Regression\", \"Linear SVM\", \"RBF SVM\", \"Neural Nets\"] #", "Log Loss %0.3f Zero one loss %f\" % (log_loss(target_train, predicted_train), zero_one_loss(target_train, predicted_train))) print(\"The", "idx): clf = SGDClassifier(loss='log') # shuffle=True is useless here shuffledRange = range(train_X.shape[0]) n_iter", "12 params activation_range = ['logistic'] solver_range = ['sgd'] learning_rate_init_range = np.array([1.0e-04,1.0e-03,1.0e-02,1.0e-01]) # 4", "= np.mean(scores, axis=0) scores = scores.reshape(len(C_range), len(gamma_range)) plt.figure(figsize=(8, 6)) plt.subplots_adjust(left=.2, right=0.95, bottom=0.15, top=0.95)", "model, clfname): C_range = model.param_grid['C'] scores = np.array(model.grid.cv_results_['mean_test_score']) min_score = scores.min() max_score =", "# Set the style globally # Alternatives include bmh, fivethirtyeight, ggplot, # dark_background,", "os, random import numpy as np import matplotlib.pyplot as plt from matplotlib.colors import", "SVM\"): self.trainLinearSVM() elif (cname == \"RBF SVM\"): self.trainRBFSVM() elif (cname == \"Neural Nets\"):", "0.1, ymin, ymax]) plt.title('LR performance over time', fontstyle='italic') plt.legend(loc='best', numpoints=1, fancybox=True) # Space", "dark_background, seaborn-deep, etc plt.style.use('ggplot') plt.rcParams['font.family'] = 'serif' plt.rcParams['font.serif'] = 'Ubuntu' plt.rcParams['font.monospace'] = 'Ubuntu", "from sklearn.model_selection import train_test_split, GridSearchCV from sklearn.model_selection import StratifiedShuffleSplit from sklearn.preprocessing import StandardScaler", "GridSearchCV(SVC(), param_grid=self.param_grid, cv=self.cv, n_jobs=-1) pass def trainRBFSVM(self): # params C / gamma kernel_range", "accuracy') # plt.show() pass def plotLROverTime(data_x, loss_y, acc_y, idx): # Set the style", "in enumerate(clfnames): print(\"===== %s \" %(dname)) print(\"===== %s\" %(clfname)) # (1) train model", "of interest. class MidpointNormalize(Normalize): def __init__(self, vmin=None, vmax=None, midpoint=None, clip=False): self.midpoint = midpoint", "x, y = [self.vmin, self.midpoint, self.vmax], [0, 0.5, 1] return np.ma.masked_array(np.interp(value, x, y))", "'' self.cv = StratifiedShuffleSplit(n_splits=5, test_size=0.2, random_state=42) self.scoring = 'neg_log_loss' #'accuracy', 'f1', 'precision', 'recall',", "# 9 params self.param_grid = dict(kernel=kernel_range, gamma=gamma_range, C=C_range) self.grid = GridSearchCV(SVC(), param_grid=self.param_grid, cv=self.cv,", "plt.subplots_adjust(left=.2, right=0.95, bottom=0.15, top=0.95) plt.imshow(scores, interpolation='nearest', cmap=plt.cm.hot, norm=MidpointNormalize(vmin=min_score, vmax=max_score, midpoint=mean_score)) plt.ylabel('C') plt.colorbar() plt.yticks(np.arange(len(C_range)),", "mean training time of %f\" % (np.mean(model.grid.cv_results_['mean_fit_time'], axis=0)) ) print(\"The mean test time", "(clfname == \"Logistic Regression\"): self.showLogisticRegression(model, clfname) elif (clfname == \"Linear SVM\"): self.showLinearSVM(model, clfname)", "== \"Linear SVM\"): self.trainLinearSVM() elif (cname == \"RBF SVM\"): self.trainRBFSVM() elif (cname ==", "confusion matrix print(\"confusion matrix / precision recall scores\") print ( confusion_matrix(target_test, predicted_test) )", "the figure Axes # Avoids axis white lines cutting through zero values -", "# Space plots a bit plt.subplots_adjust(hspace=0.25, wspace=0.40) plt.savefig('./LR_overtime_'+str(idx)+'.png', bbox_inches='tight') pass def batches(l, n):", "plt.rcParams['axes.titlesize'] = 10 plt.rcParams['xtick.labelsize'] = 8 plt.rcParams['ytick.labelsize'] = 8 plt.rcParams['legend.fontsize'] = 10 plt.rcParams['figure.titlesize']", "show results predicted_test = model.grid.predict(self.test_X) predicted_train = model.grid.predict(self.train_X) # Loss + Accuracy (training", "SVM\"): self.showRBFSVM(model, clfname) elif (clfname == \"Neural Nets\"): self.showNeuralNets(model, clfname) else: print(\"Please put", "= model.param_grid['penalty'] alpha_range = model.param_grid['alpha'] # 13 params scores = np.array(model.grid.cv_results_['mean_test_score']) min_score =", "+ \" \\n\") # data_x.append(data_point) # loss_y.append(temp_loss) # acc_y.append(temp_acc) for n in range(n_iter):", "test AUC of %0.3f\" % (roc_auc_score(target_test, predicted_test) )) print(\"The mean training time of", "= GridSearchCV(MLPClassifier(), param_grid=self.param_grid, cv=self.cv, n_jobs=-1) pass class Report: def __init__(self): pass # Loss", "clfname) elif (clfname == \"RBF SVM\"): self.showRBFSVM(model, clfname) elif (clfname == \"Neural Nets\"):", "= StandardScaler().fit_transform(data['test_X']) print (\"shape of data set \", self.train_X.shape, self.train_y.shape, self.test_X.shape, self.test_y.shape) if", "print(\"===== %s\" %(clfname)) # (1) train model with CV model = ClassModels() model", "vmax=None, midpoint=None, clip=False): self.midpoint = midpoint Normalize.__init__(self, vmin, vmax, clip) def __call__(self, value,", "self.train_X = StandardScaler().fit_transform(data['train_X']) self.test_X = StandardScaler().fit_transform(data['test_X']) print (\"shape of data set \", self.train_X.shape,", "if len(sys.argv) > 1 and int(sys.argv[1]) != idx: continue data = np.load(dname) self.train_y", "penalty_range = model.param_grid['penalty'] alpha_range = model.param_grid['alpha'] # 13 params scores = np.array(model.grid.cv_results_['mean_test_score']) min_score", "SVM\", \"Neural Nets\"] # clfnames = [\"RBF SVM\"] # clfnames = [\"Linear SVM\"]", "midpoint=mean_score)) plt.xlabel('penalty') plt.ylabel('alpha (regularization)') plt.colorbar() plt.xticks(np.arange(len(penalty_range)), penalty_range, rotation=45) plt.yticks(np.arange(len(alpha_range)), alpha_range) plt.title('Validation accuracy') #", "ListedColormap from sklearn.model_selection import train_test_split, GridSearchCV from sklearn.model_selection import StratifiedShuffleSplit from sklearn.preprocessing import", "1 and int(sys.argv[1]) != idx: continue data = np.load(dname) self.train_y = data['train_Y'] self.test_y", "self.test_X = StandardScaler().fit_transform(data['test_X']) print (\"shape of data set \", self.train_X.shape, self.train_y.shape, self.test_X.shape, self.test_y.shape)", "loss_y, acc_y, idx): # Set the style globally # Alternatives include bmh, fivethirtyeight,", "AUC loss_range = ['log'] penalty_range = ['l2','l1','none'] alpha_range = np.geomspace(1.e-07, 1.e+05, num=13) #", "temp_loss = zero_one_loss(train_y, clf.predict(train_X)) # temp_acc = accuracy_score(train_y, clf.predict(train_X)) # f_loss.write(\"data_point= \" +", "= scores.max() mean_score = np.mean(scores, axis=0) scores = scores.reshape(len(C_range),1) plt.figure(figsize=(8, 6)) plt.subplots_adjust(left=.2, right=0.95,", "# find a current file' directory path. try: dirpath = os.path.dirname(__file__) except Exception", "learning_rate_init_range = np.array([1.0e-04,1.0e-03,1.0e-02,1.0e-01]) # 4 params self.param_grid = dict(hidden_layer_sizes=hidden_layer_sizes_range, activation=activation_range,solver=solver_range, learning_rate_init=learning_rate_init_range, max_iter=[1000]) self.grid", "dpi=400) plt.plot(data_x, loss_y, linewidth=0.5, linestyle=':', marker='o', markersize=2, label='loss') plt.plot(data_x, acc_y, linewidth=0.5, linestyle='--', marker='v',", "cmap=plt.cm.hot, norm=MidpointNormalize(vmin=min_score,vmax=max_score, midpoint=mean_score)) plt.xlabel('hidden_layer_sizes') plt.ylabel('learning_rate_init') plt.colorbar() plt.xticks(np.arange(len(hidden_layer_sizes_range)), hidden_layer_sizes_range, rotation=45) plt.yticks(np.arange(len(learning_rate_init_range)), learning_rate_init_range) plt.title('Validation accuracy')", "= 10 plt.rcParams['axes.labelweight'] = 'bold' plt.rcParams['axes.titlesize'] = 10 plt.rcParams['xtick.labelsize'] = 8 plt.rcParams['ytick.labelsize'] =", "# temp_loss = zero_one_loss(train_y, clf.predict(train_X)) # temp_acc = accuracy_score(train_y, clf.predict(train_X)) # f_loss.write(\"data_point= \"", "plt.xlabel('hidden_layer_sizes') plt.ylabel('learning_rate_init') plt.colorbar() plt.xticks(np.arange(len(hidden_layer_sizes_range)), hidden_layer_sizes_range, rotation=45) plt.yticks(np.arange(len(learning_rate_init_range)), learning_rate_init_range) plt.title('Validation accuracy') # plt.show() pass", "10 plt.rcParams['axes.labelweight'] = 'bold' plt.rcParams['axes.titlesize'] = 10 plt.rcParams['xtick.labelsize'] = 8 plt.rcParams['ytick.labelsize'] = 8", "plt.subplots_adjust(left=.2, right=0.95, bottom=0.15, top=0.95) plt.imshow(scores, interpolation='nearest', cmap=plt.cm.hot, norm=MidpointNormalize(vmin=min_score, vmax=max_score, midpoint=mean_score)) plt.xlabel('penalty') plt.ylabel('alpha (regularization)')", "max_iter=[1000]) self.grid = GridSearchCV(MLPClassifier(), param_grid=self.param_grid, cv=self.cv, n_jobs=-1) pass class Report: def __init__(self): pass", "os.path.dirname(__file__) except Exception as inst: dirpath = '' pass f_name1 = os.path.join(dirpath,\"../datasets/breast-cancer.npz\") f_name2", "+ str(data_point) + \" zero_one_loss= \" + str(temp_loss) + \" \\n\") # f_acc.write(\"data_point=", "of Training / Test Set Results =====\\n\") f_loss.write(\"data_point= %d , zero_one_loss= %f\\n\" %", "loss_y = [] acc_y = [] # temp_loss = zero_one_loss(train_y, clf.predict(train_X)) # temp_acc", "trainNeuralNets(self): # early stopping default False, Momentum default 0.9 hidden_layer_sizes_range = np.array([1,2,3,4,5,6,7,8,9,10,16,32]) #", "scores.reshape(len(C_range),1) plt.figure(figsize=(8, 6)) plt.subplots_adjust(left=.2, right=0.95, bottom=0.15, top=0.95) plt.imshow(scores, interpolation='nearest', cmap=plt.cm.hot, norm=MidpointNormalize(vmin=min_score, vmax=max_score, midpoint=mean_score))", "import train_test_split, GridSearchCV from sklearn.model_selection import StratifiedShuffleSplit from sklearn.preprocessing import StandardScaler from sklearn.datasets", "str(data_point) + \" accuracy= \" + str(temp_acc) + \" \\n\") data_x.append(data_point) loss_y.append(temp_loss) acc_y.append(temp_acc)", "auc + confusion matrix # cpu computation time report.showResult(model, predicted_test, self.test_y, predicted_train, self.train_y)", "GridSearchCV from sklearn.model_selection import StratifiedShuffleSplit from sklearn.preprocessing import StandardScaler from sklearn.datasets import make_moons,", "fancybox=True) # Space plots a bit plt.subplots_adjust(hspace=0.25, wspace=0.40) plt.savefig('./LR_overtime_'+str(idx)+'.png', bbox_inches='tight') pass def batches(l,", "== \"Neural Nets\"): self.showNeuralNets(model, clfname) else: print(\"Please put existing classifier names\") pass def", ": self.param_grid = dict(kernel=kernel_range, C=C_range) self.grid = GridSearchCV(SVC(), param_grid=self.param_grid, cv=self.cv, n_jobs=-1) pass def", "+ \" zero_one_loss= \" + str(temp_loss) + \" \\n\") f_acc.write(\"data_point= \" + str(data_point)", "scores.min() max_score = scores.max() mean_score = np.mean(scores, axis=0) scores = scores.reshape(len(C_range), len(gamma_range)) plt.figure(figsize=(8,", "+ 1], classes=np.unique(train_y)) data_point += len(batch) temp_loss = zero_one_loss(train_y, clf.predict(train_X)) temp_acc = accuracy_score(train_y,", "self.grid = GridSearchCV(SVC(), param_grid=self.param_grid, cv=self.cv, n_jobs=-1) pass def trainRBFSVM(self): # params C /", "n): for i in range(0, len(l), n): yield l[i:i+n] def runLROverTime(train_X, train_y, test_X,", "\" \\n\") data_x.append(data_point) loss_y.append(temp_loss) acc_y.append(temp_acc) f_loss.write(\"\\n===== End of Training / Test Set Results", "% (accuracy_score(target_test, predicted_test) )) print(\"The test AUC of %0.3f\" % (roc_auc_score(target_test, predicted_test) ))", "pass def trainNeuralNets(self): # early stopping default False, Momentum default 0.9 hidden_layer_sizes_range =", "%s with a score of %0.3f\" % (model.grid.best_params_, model.grid.best_score_)) print(\"The Train Log Loss", "Zero one loss %f\" % (log_loss(target_train, predicted_train), zero_one_loss(target_train, predicted_train))) print(\"The test Log Loss", "= SGDClassifier(loss='log') # shuffle=True is useless here shuffledRange = range(train_X.shape[0]) n_iter = 10", "shuffledRange] shuffledY = [train_y[i] for i in shuffledRange] for batch in batches(range(len(shuffledX)), 10):", "plt.colorbar() plt.xticks(np.arange(len(gamma_range)), gamma_range, rotation=45) plt.yticks(np.arange(len(C_range)), C_range) plt.title('Validation accuracy') # plt.show() pass def showNeuralNets(self,", "numpy as np import matplotlib.pyplot as plt from matplotlib.colors import Normalize from matplotlib.colors", "\" + str(data_point) + \" accuracy= \" + str(temp_acc) + \" \\n\") #", "import QuadraticDiscriminantAnalysis from sklearn.metrics import * # find a current file' directory path.", "matplotlib.pyplot as plt from matplotlib.colors import Normalize from matplotlib.colors import ListedColormap from sklearn.model_selection", "class ClassModels: def __init__(self): self.name = '' self.grid = '' self.param_grid = ''", "np.geomspace(1.e-07, 1.e+05, num=13) # 13 params : gamma_range = np.array([0.001,0.005,0.01,0.05,0.1,0.5,1,2,3]) # 9 params", "clfname in enumerate(clfnames): print(\"===== %s \" %(dname)) print(\"===== %s\" %(clfname)) # (1) train", "classifier names\") pass def showLogisticRegression(self, model, clfname): penalty_range = model.param_grid['penalty'] alpha_range = model.param_grid['alpha']", "Set an aspect ratio width, height = plt.figaspect(1.68) fig = plt.figure(figsize=(width, height), dpi=400)", "train_y, test_X, test_y, idx): clf = SGDClassifier(loss='log') # shuffle=True is useless here shuffledRange", "make_classification from sklearn.linear_model import SGDClassifier from sklearn.svm import SVC from sklearn.neural_network import MLPClassifier", "= np.load(dname) self.train_y = data['train_Y'] self.test_y = data['test_Y'] # standardize data (mean=0, std=1)", "data (mean=0, std=1) self.train_X = StandardScaler().fit_transform(data['train_X']) self.test_X = StandardScaler().fit_transform(data['test_X']) print (\"shape of data", "1] return np.ma.masked_array(np.interp(value, x, y)) class ClassModels: def __init__(self): self.name = '' self.grid", "# TODO: try different scoring rule such as Accuracy (default), F1-measure, AUC loss_range", "# (1) train model with CV model = ClassModels() model = ClassModels() model.trainModel(clfname)", "idx) pass class RunEval: def __init__(self): self.dnames = [f_name1, f_name2, f_name3, f_name4, f_name5]", "temp_acc = accuracy_score(train_y, clf.predict(train_X)) f_loss.write(\"data_point= \" + str(data_point) + \" zero_one_loss= \" +", "f_acc = open('./LR_overtime_acc_'+str(idx)+'.txt', 'w') data_x = [] loss_y = [] acc_y = []", "import StratifiedShuffleSplit from sklearn.preprocessing import StandardScaler from sklearn.datasets import make_moons, make_circles, make_classification from", "dict(kernel=kernel_range, gamma=gamma_range, C=C_range) self.grid = GridSearchCV(SVC(), param_grid=self.param_grid, cv=self.cv, n_jobs=-1) pass def trainNeuralNets(self): #", "elif (clfname == \"Neural Nets\"): self.showNeuralNets(model, clfname) else: print(\"Please put existing classifier names\")", "predicted_test, self.test_y, predicted_train, self.train_y) report.showPlot(model, clfname) plt.savefig('./'+clfname+'_'+str(idx)+'.png', bbox_inches = 'tight') if __name__ ==", "= plt.figure(figsize=(width, height), dpi=400) plt.plot(data_x, loss_y, linewidth=0.5, linestyle=':', marker='o', markersize=2, label='loss') plt.plot(data_x, acc_y,", "'serif' plt.rcParams['font.serif'] = 'Ubuntu' plt.rcParams['font.monospace'] = 'Ubuntu Mono' plt.rcParams['font.size'] = 10 plt.rcParams['axes.labelsize'] =", "f_name2, f_name3, f_name4, f_name5] self.train_X = [] self.train_y = [] self.test_X = []", "plotLROverTime(data_x, loss_y, acc_y, idx) pass class RunEval: def __init__(self): self.dnames = [f_name1, f_name2,", "showRBFSVM(self, model, clfname): C_range = model.param_grid['C'] gamma_range = model.param_grid['gamma'] # scores = model.grid.cv_results_['mean_test_score'].reshape(len(C_range),", "open('./LR_overtime_loss_'+str(idx)+'.txt', 'w') f_acc = open('./LR_overtime_acc_'+str(idx)+'.txt', 'w') data_x = [] loss_y = [] acc_y", "Points') plt.ylabel('Score') # Axes alteration to put zero values inside the figure Axes", "run(self): report = Report() for idx, dname in enumerate(self.dnames): # load data if", "= midpoint Normalize.__init__(self, vmin, vmax, clip) def __call__(self, value, clip=None): x, y =", "# clfnames = [\"Linear SVM\"] for idx2, clfname in enumerate(clfnames): print(\"===== %s \"", "clf = SGDClassifier(loss='log') # shuffle=True is useless here shuffledRange = range(train_X.shape[0]) n_iter =", "zero_one_loss= \" + str(temp_loss) + \" \\n\") f_acc.write(\"data_point= \" + str(data_point) + \"", "\" \\n\") f_acc.write(\"data_point= \" + str(data_point) + \" accuracy= \" + str(temp_acc) +", "sklearn.model_selection import train_test_split, GridSearchCV from sklearn.model_selection import StratifiedShuffleSplit from sklearn.preprocessing import StandardScaler from", "# scores = model.grid.cv_results_['mean_test_score'].reshape(len(C_range), len(gamma_range)) scores = np.array(model.grid.cv_results_['mean_test_score']) min_score = scores.min() max_score =", "Alternatives include bmh, fivethirtyeight, ggplot, # dark_background, seaborn-deep, etc plt.style.use('ggplot') plt.rcParams['font.family'] = 'serif'", "x, y)) class ClassModels: def __init__(self): self.name = '' self.grid = '' self.param_grid", "StratifiedShuffleSplit from sklearn.preprocessing import StandardScaler from sklearn.datasets import make_moons, make_circles, make_classification from sklearn.linear_model", "= np.geomspace(1.e-07, 1.e+05, num=13) # 13 params : self.param_grid = dict(kernel=kernel_range, C=C_range) self.grid", "predicted_test), zero_one_loss(target_test, predicted_test))) print(\"The train Accuracy %0.3f\" % (accuracy_score(target_train, predicted_train))) print(\"The test Accuracy", "model.grid.predict(self.test_X) predicted_train = model.grid.predict(self.train_X) # Loss + Accuracy (training + test) # auc", "def trainModel(self, cname): if (cname == \"Logistic Regression\"): self.trainLogisticRegression() elif (cname == \"Linear", "# the values of interest. class MidpointNormalize(Normalize): def __init__(self, vmin=None, vmax=None, midpoint=None, clip=False):", "- fivethirtyeight style xmin, xmax, ymin, ymax = plt.axis() plt.axis([xmin - 0.1, xmax", "to params for each classifier def trainLogisticRegression(self): # TODO: try different scoring rule", "\"Neural Nets\"): self.trainNeuralNets() else: print(\"Please put existing classifier names\") pass # run CV", "put existing classifier names\") pass def showLogisticRegression(self, model, clfname): penalty_range = model.param_grid['penalty'] alpha_range", "acc_y, idx) pass class RunEval: def __init__(self): self.dnames = [f_name1, f_name2, f_name3, f_name4,", "= GridSearchCV(SVC(), param_grid=self.param_grid, cv=self.cv, n_jobs=-1) pass def trainRBFSVM(self): # params C / gamma", "= [f_name1, f_name2, f_name3, f_name4, f_name5] self.train_X = [] self.train_y = [] self.test_X", "def showRBFSVM(self, model, clfname): C_range = model.param_grid['C'] gamma_range = model.param_grid['gamma'] # scores =", "scores = scores.reshape(len(C_range), len(gamma_range)) plt.figure(figsize=(8, 6)) plt.subplots_adjust(left=.2, right=0.95, bottom=0.15, top=0.95) # plt.imshow(scores, interpolation='nearest',", "%0.3f\" % (roc_auc_score(target_test, predicted_test) )) print(\"The mean training time of %f\" % (np.mean(model.grid.cv_results_['mean_fit_time'],", "np.mean(scores, axis=0) scores = scores.reshape(len(alpha_range),len(penalty_range)) plt.figure(figsize=(8, 6)) plt.subplots_adjust(left=.2, right=0.95, bottom=0.15, top=0.95) plt.imshow(scores, interpolation='nearest',", "showLinearSVM(self, model, clfname): C_range = model.param_grid['C'] scores = np.array(model.grid.cv_results_['mean_test_score']) min_score = scores.min() max_score", "%0.3f Zero one loss %f\" % (log_loss(target_train, predicted_train), zero_one_loss(target_train, predicted_train))) print(\"The test Log", "time def showResult(self, model, predicted_test, target_test, predicted_train, target_train): print(\"The best parameters are %s", "batches(l, n): for i in range(0, len(l), n): yield l[i:i+n] def runLROverTime(train_X, train_y,", "zero_one_loss= %f\\n\" % (data_point, zero_one_loss(test_y, clf.predict(test_X)))) f_acc.write(\"\\n===== End of Training / Test Set", "test_size=0.2, random_state=42) self.scoring = 'neg_log_loss' #'accuracy', 'f1', 'precision', 'recall', 'roc_auc' def trainModel(self, cname):", "= dict(kernel=kernel_range, gamma=gamma_range, C=C_range) self.grid = GridSearchCV(SVC(), param_grid=self.param_grid, cv=self.cv, n_jobs=-1) pass def trainNeuralNets(self):", "len(sys.argv) > 2 and int(sys.argv[2]) == 1: runLROverTime(self.train_X, self.train_y, self.test_X, self.test_y, idx) continue", "params self.param_grid = dict(kernel=kernel_range, gamma=gamma_range, C=C_range) self.grid = GridSearchCV(SVC(), param_grid=self.param_grid, cv=self.cv, n_jobs=-1) pass", "str(temp_acc) + \" \\n\") data_x.append(data_point) loss_y.append(temp_loss) acc_y.append(temp_acc) f_loss.write(\"\\n===== End of Training / Test", "elif (clfname == \"RBF SVM\"): self.showRBFSVM(model, clfname) elif (clfname == \"Neural Nets\"): self.showNeuralNets(model,", "params activation_range = ['logistic'] solver_range = ['sgd'] learning_rate_init_range = np.array([1.0e-04,1.0e-03,1.0e-02,1.0e-01]) # 4 params", "8 plt.rcParams['legend.fontsize'] = 10 plt.rcParams['figure.titlesize'] = 12 # Set an aspect ratio width,", "model.grid.fit(self.train_X, self.train_y) # (2) show results predicted_test = model.grid.predict(self.test_X) predicted_train = model.grid.predict(self.train_X) #", "+ test) # auc + confusion matrix # cpu computation time report.showResult(model, predicted_test,", "[f_name1, f_name2, f_name3, f_name4, f_name5] self.train_X = [] self.train_y = [] self.test_X =", "max_iter=[1000], tol=[1e-3]) self.grid = GridSearchCV(SGDClassifier(), param_grid=self.param_grid, cv=self.cv, n_jobs=-1) pass def trainLinearSVM(self): kernel_range =", "= np.geomspace(1.e-07, 1.e+05, num=13) # 13 params : gamma_range = np.array([0.001,0.005,0.01,0.05,0.1,0.5,1,2,3]) # 9", "13 params self.param_grid = dict(loss=loss_range, penalty=penalty_range, alpha=alpha_range, max_iter=[1000], tol=[1e-3]) self.grid = GridSearchCV(SGDClassifier(), param_grid=self.param_grid,", "scores.reshape(len(alpha_range),len(penalty_range)) plt.figure(figsize=(8, 6)) plt.subplots_adjust(left=.2, right=0.95, bottom=0.15, top=0.95) plt.imshow(scores, interpolation='nearest', cmap=plt.cm.hot, norm=MidpointNormalize(vmin=min_score, vmax=max_score, midpoint=mean_score))", "norm=MidpointNormalize(vmin=min_score, vmax=max_score, midpoint=mean_score)) plt.xlabel('penalty') plt.ylabel('alpha (regularization)') plt.colorbar() plt.xticks(np.arange(len(penalty_range)), penalty_range, rotation=45) plt.yticks(np.arange(len(alpha_range)), alpha_range) plt.title('Validation", "interpolation='nearest', cmap=plt.cm.hot, norm=MidpointNormalize(vmin=min_score,vmax=max_score, midpoint=mean_score)) plt.xlabel('gamma') plt.ylabel('C') plt.colorbar() plt.xticks(np.arange(len(gamma_range)), gamma_range, rotation=45) plt.yticks(np.arange(len(C_range)), C_range) plt.title('Validation", "clip=False): self.midpoint = midpoint Normalize.__init__(self, vmin, vmax, clip) def __call__(self, value, clip=None): x,", "StandardScaler().fit_transform(data['test_X']) print (\"shape of data set \", self.train_X.shape, self.train_y.shape, self.test_X.shape, self.test_y.shape) if len(sys.argv)", "!= idx: continue data = np.load(dname) self.train_y = data['train_Y'] self.test_y = data['test_Y'] #", "train_test_split, GridSearchCV from sklearn.model_selection import StratifiedShuffleSplit from sklearn.preprocessing import StandardScaler from sklearn.datasets import", "norm=MidpointNormalize(vmin=min_score,vmax=max_score, midpoint=mean_score)) plt.xlabel('gamma') plt.ylabel('C') plt.colorbar() plt.xticks(np.arange(len(gamma_range)), gamma_range, rotation=45) plt.yticks(np.arange(len(C_range)), C_range) plt.title('Validation accuracy') #", "n_jobs=-1) pass def trainNeuralNets(self): # early stopping default False, Momentum default 0.9 hidden_layer_sizes_range", "else: print(\"Please put existing classifier names\") pass def showLogisticRegression(self, model, clfname): penalty_range =", "9 params self.param_grid = dict(kernel=kernel_range, gamma=gamma_range, C=C_range) self.grid = GridSearchCV(SVC(), param_grid=self.param_grid, cv=self.cv, n_jobs=-1)", "self.grid = '' self.param_grid = '' self.cv = StratifiedShuffleSplit(n_splits=5, test_size=0.2, random_state=42) self.scoring =", "self.train_X.shape, self.train_y.shape, self.test_X.shape, self.test_y.shape) if len(sys.argv) > 2 and int(sys.argv[2]) == 1: runLROverTime(self.train_X,", "ymin, ymax]) plt.title('LR performance over time', fontstyle='italic') plt.legend(loc='best', numpoints=1, fancybox=True) # Space plots", ")) print(\"The mean training time of %f\" % (np.mean(model.grid.cv_results_['mean_fit_time'], axis=0)) ) print(\"The mean", "plt.subplots_adjust(left=.2, right=0.95, bottom=0.15, top=0.95) # plt.imshow(scores, interpolation='nearest', cmap=plt.cm.hot, # norm=MidpointNormalize(vmin=0.2, midpoint=0.92)) plt.imshow(scores, interpolation='nearest',", "C_range = model.param_grid['C'] gamma_range = model.param_grid['gamma'] # scores = model.grid.cv_results_['mean_test_score'].reshape(len(C_range), len(gamma_range)) scores =", "= 'Ubuntu' plt.rcParams['font.monospace'] = 'Ubuntu Mono' plt.rcParams['font.size'] = 10 plt.rcParams['axes.labelsize'] = 10 plt.rcParams['axes.labelweight']", "current file' directory path. try: dirpath = os.path.dirname(__file__) except Exception as inst: dirpath", "def __call__(self, value, clip=None): x, y = [self.vmin, self.midpoint, self.vmax], [0, 0.5, 1]", "# Alternatives include bmh, fivethirtyeight, ggplot, # dark_background, seaborn-deep, etc plt.style.use('ggplot') plt.rcParams['font.family'] =", "[\"RBF SVM\"] # clfnames = [\"Linear SVM\"] for idx2, clfname in enumerate(clfnames): print(\"=====", "best parameters are %s with a score of %0.3f\" % (model.grid.best_params_, model.grid.best_score_)) print(\"The", "= [] self.test_X = [] self.test_y = [] def run(self): report = Report()", "plt.plot(data_x, acc_y, linewidth=0.5, linestyle='--', marker='v', markersize=2, label='accuracy') plt.xlabel('Data Points') plt.ylabel('Score') # Axes alteration", "= model.param_grid['C'] gamma_range = model.param_grid['gamma'] # scores = model.grid.cv_results_['mean_test_score'].reshape(len(C_range), len(gamma_range)) scores = np.array(model.grid.cv_results_['mean_test_score'])", "= model.param_grid['learning_rate_init'] scores = np.array(model.grid.cv_results_['mean_test_score']) min_score = scores.min() max_score = scores.max() mean_score =", "data_x.append(data_point) loss_y.append(temp_loss) acc_y.append(temp_acc) f_loss.write(\"\\n===== End of Training / Test Set Results =====\\n\") f_loss.write(\"data_point=", "(data_point, zero_one_loss(test_y, clf.predict(test_X)))) f_acc.write(\"\\n===== End of Training / Test Set Results =====\\n\") f_acc.write(\"data_point=", "self.test_y.shape) if len(sys.argv) > 2 and int(sys.argv[2]) == 1: runLROverTime(self.train_X, self.train_y, self.test_X, self.test_y,", "test_X, test_y, idx): clf = SGDClassifier(loss='log') # shuffle=True is useless here shuffledRange =", "print(\"The best parameters are %s with a score of %0.3f\" % (model.grid.best_params_, model.grid.best_score_))", "predicted_test) )) print(\"The test AUC of %0.3f\" % (roc_auc_score(target_test, predicted_test) )) print(\"The mean", "def showPlot(self, model, clfname): if (clfname == \"Logistic Regression\"): self.showLogisticRegression(model, clfname) elif (clfname", "'recall', 'roc_auc' def trainModel(self, cname): if (cname == \"Logistic Regression\"): self.trainLogisticRegression() elif (cname", "= os.path.join(dirpath,\"../datasets/diabetes.npz\") f_name3 = os.path.join(dirpath,\"../datasets/digit.npz\") f_name4 = os.path.join(dirpath,\"../datasets/iris.npz\") f_name5 = os.path.join(dirpath,\"../datasets/wine.npz\") # Utility", "plt.figure(figsize=(8, 6)) plt.subplots_adjust(left=.2, right=0.95, bottom=0.15, top=0.95) # plt.imshow(scores, interpolation='nearest', cmap=plt.cm.hot, # norm=MidpointNormalize(vmin=0.2, midpoint=0.92))", "list(shuffledRange) random.shuffle(shuffledRange) shuffledX = [train_X[i] for i in shuffledRange] shuffledY = [train_y[i] for", "classification_report(target_test, predicted_test) ) pass def showPlot(self, model, clfname): if (clfname == \"Logistic Regression\"):", "def runLROverTime(train_X, train_y, test_X, test_y, idx): clf = SGDClassifier(loss='log') # shuffle=True is useless", "(training + test) # auc + confusion matrix # cpu computation time def", "Exception as inst: dirpath = '' pass f_name1 = os.path.join(dirpath,\"../datasets/breast-cancer.npz\") f_name2 = os.path.join(dirpath,\"../datasets/diabetes.npz\")", "self.train_y = [] self.test_X = [] self.test_y = [] def run(self): report =", "std=1) self.train_X = StandardScaler().fit_transform(data['train_X']) self.test_X = StandardScaler().fit_transform(data['test_X']) print (\"shape of data set \",", "model, clfname): hidden_layer_sizes_range = model.param_grid['hidden_layer_sizes'] learning_rate_init_range = model.param_grid['learning_rate_init'] scores = np.array(model.grid.cv_results_['mean_test_score']) min_score =", "continue clfnames = [\"Logistic Regression\", \"Linear SVM\", \"RBF SVM\", \"Neural Nets\"] # clfnames", "plt.subplots_adjust(hspace=0.25, wspace=0.40) plt.savefig('./LR_overtime_'+str(idx)+'.png', bbox_inches='tight') pass def batches(l, n): for i in range(0, len(l),", "report.showResult(model, predicted_test, self.test_y, predicted_train, self.train_y) report.showPlot(model, clfname) plt.savefig('./'+clfname+'_'+str(idx)+'.png', bbox_inches = 'tight') if __name__", "pass def showPlot(self, model, clfname): if (clfname == \"Logistic Regression\"): self.showLogisticRegression(model, clfname) elif", "str(data_point) + \" accuracy= \" + str(temp_acc) + \" \\n\") # data_x.append(data_point) #", "= accuracy_score(train_y, clf.predict(train_X)) f_loss.write(\"data_point= \" + str(data_point) + \" zero_one_loss= \" + str(temp_loss)", "13 params : gamma_range = np.array([0.001,0.005,0.01,0.05,0.1,0.5,1,2,3]) # 9 params self.param_grid = dict(kernel=kernel_range, gamma=gamma_range,", "sklearn.metrics import * # find a current file' directory path. try: dirpath =", "(accuracy_score(target_train, predicted_train))) print(\"The test Accuracy %0.3f\" % (accuracy_score(target_test, predicted_test) )) print(\"The test AUC", "= [] loss_y = [] acc_y = [] # temp_loss = zero_one_loss(train_y, clf.predict(train_X))", "range(0, len(l), n): yield l[i:i+n] def runLROverTime(train_X, train_y, test_X, test_y, idx): clf =", "[self.vmin, self.midpoint, self.vmax], [0, 0.5, 1] return np.ma.masked_array(np.interp(value, x, y)) class ClassModels: def", "Accuracy %0.3f\" % (accuracy_score(target_train, predicted_train))) print(\"The test Accuracy %0.3f\" % (accuracy_score(target_test, predicted_test) ))", "vmax=max_score, midpoint=mean_score)) plt.xlabel('penalty') plt.ylabel('alpha (regularization)') plt.colorbar() plt.xticks(np.arange(len(penalty_range)), penalty_range, rotation=45) plt.yticks(np.arange(len(alpha_range)), alpha_range) plt.title('Validation accuracy')", "alpha_range) plt.title('Validation accuracy') # plt.show() pass def showLinearSVM(self, model, clfname): C_range = model.param_grid['C']", "print (\"shape of data set \", self.train_X.shape, self.train_y.shape, self.test_X.shape, self.test_y.shape) if len(sys.argv) >", "matrix print(\"confusion matrix / precision recall scores\") print ( confusion_matrix(target_test, predicted_test) ) print", "zero_one_loss(target_train, predicted_train))) print(\"The test Log Loss %0.3f Zero one loss %f\" % (log_loss(target_test,", "early stopping default False, Momentum default 0.9 hidden_layer_sizes_range = np.array([1,2,3,4,5,6,7,8,9,10,16,32]) # 12 params", "matrix # cpu computation time def showResult(self, model, predicted_test, target_test, predicted_train, target_train): print(\"The", "gamma_range = model.param_grid['gamma'] # scores = model.grid.cv_results_['mean_test_score'].reshape(len(C_range), len(gamma_range)) scores = np.array(model.grid.cv_results_['mean_test_score']) min_score =", "max_score = scores.max() mean_score = np.mean(scores, axis=0) scores = scores.reshape(len(C_range), len(gamma_range)) plt.figure(figsize=(8, 6))", "according to params for each classifier def trainLogisticRegression(self): # TODO: try different scoring", "zero values - fivethirtyeight style xmin, xmax, ymin, ymax = plt.axis() plt.axis([xmin -", "scores = model.grid.cv_results_['mean_test_score'].reshape(len(C_range), len(gamma_range)) scores = np.array(model.grid.cv_results_['mean_test_score']) min_score = scores.min() max_score = scores.max()", "confusion matrix # cpu computation time def showResult(self, model, predicted_test, target_test, predicted_train, target_train):", "clf.predict(train_X)) f_loss.write(\"data_point= \" + str(data_point) + \" zero_one_loss= \" + str(temp_loss) + \"", "clf.partial_fit(shuffledX[batch[0]:batch[-1] + 1], shuffledY[batch[0]:batch[-1] + 1], classes=np.unique(train_y)) data_point += len(batch) temp_loss = zero_one_loss(train_y,", "import KNeighborsClassifier from sklearn.gaussian_process import GaussianProcessClassifier from sklearn.gaussian_process.kernels import RBF from sklearn.tree import", "markersize=2, label='loss') plt.plot(data_x, acc_y, linewidth=0.5, linestyle='--', marker='v', markersize=2, label='accuracy') plt.xlabel('Data Points') plt.ylabel('Score') #", "End of Training / Test Set Results =====\\n\") f_acc.write(\"data_point= %d , accuracy= %f\\n\"", "= '' self.cv = StratifiedShuffleSplit(n_splits=5, test_size=0.2, random_state=42) self.scoring = 'neg_log_loss' #'accuracy', 'f1', 'precision',", "'bold' plt.rcParams['axes.titlesize'] = 10 plt.rcParams['xtick.labelsize'] = 8 plt.rcParams['ytick.labelsize'] = 8 plt.rcParams['legend.fontsize'] = 10", "def plotLROverTime(data_x, loss_y, acc_y, idx): # Set the style globally # Alternatives include", "midpoint Normalize.__init__(self, vmin, vmax, clip) def __call__(self, value, clip=None): x, y = [self.vmin,", "plt.plot(data_x, loss_y, linewidth=0.5, linestyle=':', marker='o', markersize=2, label='loss') plt.plot(data_x, acc_y, linewidth=0.5, linestyle='--', marker='v', markersize=2,", "%s\" %(clfname)) # (1) train model with CV model = ClassModels() model =", "accuracy') # plt.show() pass def showLinearSVM(self, model, clfname): C_range = model.param_grid['C'] scores =", "bit plt.subplots_adjust(hspace=0.25, wspace=0.40) plt.savefig('./LR_overtime_'+str(idx)+'.png', bbox_inches='tight') pass def batches(l, n): for i in range(0,", "C_range = np.geomspace(1.e-07, 1.e+05, num=13) # 13 params : self.param_grid = dict(kernel=kernel_range, C=C_range)", "plotLROverTime(data_x, loss_y, acc_y, idx): # Set the style globally # Alternatives include bmh,", "clip=None): x, y = [self.vmin, self.midpoint, self.vmax], [0, 0.5, 1] return np.ma.masked_array(np.interp(value, x,", "plt.figaspect(1.68) fig = plt.figure(figsize=(width, height), dpi=400) plt.plot(data_x, loss_y, linewidth=0.5, linestyle=':', marker='o', markersize=2, label='loss')", "[] acc_y = [] # temp_loss = zero_one_loss(train_y, clf.predict(train_X)) # temp_acc = accuracy_score(train_y,", "(log_loss(target_test, predicted_test), zero_one_loss(target_test, predicted_test))) print(\"The train Accuracy %0.3f\" % (accuracy_score(target_train, predicted_train))) print(\"The test", "%0.3f\" % (accuracy_score(target_test, predicted_test) )) print(\"The test AUC of %0.3f\" % (roc_auc_score(target_test, predicted_test)", "showPlot(self, model, clfname): if (clfname == \"Logistic Regression\"): self.showLogisticRegression(model, clfname) elif (clfname ==", "10 plt.rcParams['xtick.labelsize'] = 8 plt.rcParams['ytick.labelsize'] = 8 plt.rcParams['legend.fontsize'] = 10 plt.rcParams['figure.titlesize'] = 12", "i in range(0, len(l), n): yield l[i:i+n] def runLROverTime(train_X, train_y, test_X, test_y, idx):", "plt.yticks(np.arange(len(C_range)), C_range) plt.title('Validation accuracy') # plt.show() pass def showRBFSVM(self, model, clfname): C_range =", "plt.yticks(np.arange(len(learning_rate_init_range)), learning_rate_init_range) plt.title('Validation accuracy') # plt.show() pass def plotLROverTime(data_x, loss_y, acc_y, idx): #", "auc + confusion matrix # cpu computation time def showResult(self, model, predicted_test, target_test,", "import RandomForestClassifier, AdaBoostClassifier from sklearn.naive_bayes import GaussianNB from sklearn.discriminant_analysis import QuadraticDiscriminantAnalysis from sklearn.metrics", "idx: continue data = np.load(dname) self.train_y = data['train_Y'] self.test_y = data['test_Y'] # standardize", "SGDClassifier(loss='log') # shuffle=True is useless here shuffledRange = range(train_X.shape[0]) n_iter = 10 data_point", "random import numpy as np import matplotlib.pyplot as plt from matplotlib.colors import Normalize", "the values of interest. class MidpointNormalize(Normalize): def __init__(self, vmin=None, vmax=None, midpoint=None, clip=False): self.midpoint", "num=13) # 13 params : self.param_grid = dict(kernel=kernel_range, C=C_range) self.grid = GridSearchCV(SVC(), param_grid=self.param_grid,", "'precision', 'recall', 'roc_auc' def trainModel(self, cname): if (cname == \"Logistic Regression\"): self.trainLogisticRegression() elif", "loss_y.append(temp_loss) acc_y.append(temp_acc) f_loss.write(\"\\n===== End of Training / Test Set Results =====\\n\") f_loss.write(\"data_point= %d", "[0, 0.5, 1] return np.ma.masked_array(np.interp(value, x, y)) class ClassModels: def __init__(self): self.name =", "model, clfname): C_range = model.param_grid['C'] gamma_range = model.param_grid['gamma'] # scores = model.grid.cv_results_['mean_test_score'].reshape(len(C_range), len(gamma_range))", "= scores.max() mean_score = np.mean(scores, axis=0) scores = scores.reshape(len(alpha_range),len(penalty_range)) plt.figure(figsize=(8, 6)) plt.subplots_adjust(left=.2, right=0.95,", "in batches(range(len(shuffledX)), 10): clf.partial_fit(shuffledX[batch[0]:batch[-1] + 1], shuffledY[batch[0]:batch[-1] + 1], classes=np.unique(train_y)) data_point += len(batch)", "plt.axis() plt.axis([xmin - 0.1, xmax + 0.1, ymin, ymax]) plt.title('LR performance over time',", "== \"Linear SVM\"): self.showLinearSVM(model, clfname) elif (clfname == \"RBF SVM\"): self.showRBFSVM(model, clfname) elif", "precision recall scores\") print ( confusion_matrix(target_test, predicted_test) ) print ( classification_report(target_test, predicted_test) )", "[train_X[i] for i in shuffledRange] shuffledY = [train_y[i] for i in shuffledRange] for", "10): clf.partial_fit(shuffledX[batch[0]:batch[-1] + 1], shuffledY[batch[0]:batch[-1] + 1], classes=np.unique(train_y)) data_point += len(batch) temp_loss =", "# cpu computation time def showResult(self, model, predicted_test, target_test, predicted_train, target_train): print(\"The best", "# Set an aspect ratio width, height = plt.figaspect(1.68) fig = plt.figure(figsize=(width, height),", "names\") pass # run CV according to params for each classifier def trainLogisticRegression(self):", "C_range) plt.title('Validation accuracy') # plt.show() pass def showNeuralNets(self, model, clfname): hidden_layer_sizes_range = model.param_grid['hidden_layer_sizes']", "Test Set Results =====\\n\") f_loss.write(\"data_point= %d , zero_one_loss= %f\\n\" % (data_point, zero_one_loss(test_y, clf.predict(test_X))))", "__init__(self): self.name = '' self.grid = '' self.param_grid = '' self.cv = StratifiedShuffleSplit(n_splits=5,", "os.path.join(dirpath,\"../datasets/digit.npz\") f_name4 = os.path.join(dirpath,\"../datasets/iris.npz\") f_name5 = os.path.join(dirpath,\"../datasets/wine.npz\") # Utility function to move the", "shuffledY[batch[0]:batch[-1] + 1], classes=np.unique(train_y)) data_point += len(batch) temp_loss = zero_one_loss(train_y, clf.predict(train_X)) temp_acc =", "trainRBFSVM(self): # params C / gamma kernel_range = ['rbf'] C_range = np.geomspace(1.e-07, 1.e+05,", "learning_rate_init_range) plt.title('Validation accuracy') # plt.show() pass def plotLROverTime(data_x, loss_y, acc_y, idx): # Set", "data['train_Y'] self.test_y = data['test_Y'] # standardize data (mean=0, std=1) self.train_X = StandardScaler().fit_transform(data['train_X']) self.test_X", "__init__(self, vmin=None, vmax=None, midpoint=None, clip=False): self.midpoint = midpoint Normalize.__init__(self, vmin, vmax, clip) def", "= scores.reshape(len(learning_rate_init_range), len(hidden_layer_sizes_range)) plt.figure(figsize=(8, 6)) plt.subplots_adjust(left=.2, right=0.95, bottom=0.15, top=0.95) plt.imshow(scores, interpolation='nearest', cmap=plt.cm.hot, norm=MidpointNormalize(vmin=min_score,vmax=max_score,", "model.grid.cv_results_['mean_test_score'].reshape(len(C_range), len(gamma_range)) scores = np.array(model.grid.cv_results_['mean_test_score']) min_score = scores.min() max_score = scores.max() mean_score =", "time of %f\" % (np.mean(model.grid.cv_results_['mean_score_time'], axis=0)) ) # confusion matrix print(\"confusion matrix /", "print(\"confusion matrix / precision recall scores\") print ( confusion_matrix(target_test, predicted_test) ) print (", "F1-measure, AUC loss_range = ['log'] penalty_range = ['l2','l1','none'] alpha_range = np.geomspace(1.e-07, 1.e+05, num=13)", "plt.colorbar() plt.xticks(np.arange(len(hidden_layer_sizes_range)), hidden_layer_sizes_range, rotation=45) plt.yticks(np.arange(len(learning_rate_init_range)), learning_rate_init_range) plt.title('Validation accuracy') # plt.show() pass def plotLROverTime(data_x,", "(clfname == \"Linear SVM\"): self.showLinearSVM(model, clfname) elif (clfname == \"RBF SVM\"): self.showRBFSVM(model, clfname)", "% (roc_auc_score(target_test, predicted_test) )) print(\"The mean training time of %f\" % (np.mean(model.grid.cv_results_['mean_fit_time'], axis=0))", "self.test_X = [] self.test_y = [] def run(self): report = Report() for idx,", "mean_score = np.mean(scores, axis=0) scores = scores.reshape(len(alpha_range),len(penalty_range)) plt.figure(figsize=(8, 6)) plt.subplots_adjust(left=.2, right=0.95, bottom=0.15, top=0.95)", "n_jobs=-1) pass def trainRBFSVM(self): # params C / gamma kernel_range = ['rbf'] C_range", "report = Report() for idx, dname in enumerate(self.dnames): # load data if len(sys.argv)", "loss %f\" % (log_loss(target_train, predicted_train), zero_one_loss(target_train, predicted_train))) print(\"The test Log Loss %0.3f Zero", "= [] def run(self): report = Report() for idx, dname in enumerate(self.dnames): #", "+ str(data_point) + \" accuracy= \" + str(temp_acc) + \" \\n\") data_x.append(data_point) loss_y.append(temp_loss)", "axis=0) scores = scores.reshape(len(alpha_range),len(penalty_range)) plt.figure(figsize=(8, 6)) plt.subplots_adjust(left=.2, right=0.95, bottom=0.15, top=0.95) plt.imshow(scores, interpolation='nearest', cmap=plt.cm.hot,", "sklearn.naive_bayes import GaussianNB from sklearn.discriminant_analysis import QuadraticDiscriminantAnalysis from sklearn.metrics import * # find", "set \", self.train_X.shape, self.train_y.shape, self.test_X.shape, self.test_y.shape) if len(sys.argv) > 2 and int(sys.argv[2]) ==", "predicted_train, target_train): print(\"The best parameters are %s with a score of %0.3f\" %", "midpoint=mean_score)) plt.xlabel('gamma') plt.ylabel('C') plt.colorbar() plt.xticks(np.arange(len(gamma_range)), gamma_range, rotation=45) plt.yticks(np.arange(len(C_range)), C_range) plt.title('Validation accuracy') # plt.show()", "loss_y, acc_y, idx) pass class RunEval: def __init__(self): self.dnames = [f_name1, f_name2, f_name3,", "pass def batches(l, n): for i in range(0, len(l), n): yield l[i:i+n] def", "over time', fontstyle='italic') plt.legend(loc='best', numpoints=1, fancybox=True) # Space plots a bit plt.subplots_adjust(hspace=0.25, wspace=0.40)", "# shuffle=True is useless here shuffledRange = range(train_X.shape[0]) n_iter = 10 data_point =", "xmax, ymin, ymax = plt.axis() plt.axis([xmin - 0.1, xmax + 0.1, ymin, ymax])", "self.param_grid = dict(hidden_layer_sizes=hidden_layer_sizes_range, activation=activation_range,solver=solver_range, learning_rate_init=learning_rate_init_range, max_iter=[1000]) self.grid = GridSearchCV(MLPClassifier(), param_grid=self.param_grid, cv=self.cv, n_jobs=-1) pass", "= '' self.param_grid = '' self.cv = StratifiedShuffleSplit(n_splits=5, test_size=0.2, random_state=42) self.scoring = 'neg_log_loss'", "interpolation='nearest', cmap=plt.cm.hot, norm=MidpointNormalize(vmin=min_score, vmax=max_score, midpoint=mean_score)) plt.xlabel('penalty') plt.ylabel('alpha (regularization)') plt.colorbar() plt.xticks(np.arange(len(penalty_range)), penalty_range, rotation=45) plt.yticks(np.arange(len(alpha_range)),", "f_name4 = os.path.join(dirpath,\"../datasets/iris.npz\") f_name5 = os.path.join(dirpath,\"../datasets/wine.npz\") # Utility function to move the midpoint", "def batches(l, n): for i in range(0, len(l), n): yield l[i:i+n] def runLROverTime(train_X,", "predicted_test, target_test, predicted_train, target_train): print(\"The best parameters are %s with a score of", "(np.mean(model.grid.cv_results_['mean_fit_time'], axis=0)) ) print(\"The mean test time of %f\" % (np.mean(model.grid.cv_results_['mean_score_time'], axis=0)) )", "temp_acc = accuracy_score(train_y, clf.predict(train_X)) # f_loss.write(\"data_point= \" + str(data_point) + \" zero_one_loss= \"", "= scores.reshape(len(alpha_range),len(penalty_range)) plt.figure(figsize=(8, 6)) plt.subplots_adjust(left=.2, right=0.95, bottom=0.15, top=0.95) plt.imshow(scores, interpolation='nearest', cmap=plt.cm.hot, norm=MidpointNormalize(vmin=min_score, vmax=max_score,", "clf.predict(train_X)) # f_loss.write(\"data_point= \" + str(data_point) + \" zero_one_loss= \" + str(temp_loss) +", "range(train_X.shape[0]) n_iter = 10 data_point = 0 f_loss = open('./LR_overtime_loss_'+str(idx)+'.txt', 'w') f_acc =", "plt.ylabel('Score') # Axes alteration to put zero values inside the figure Axes #", "= ['logistic'] solver_range = ['sgd'] learning_rate_init_range = np.array([1.0e-04,1.0e-03,1.0e-02,1.0e-01]) # 4 params self.param_grid =", "RandomForestClassifier, AdaBoostClassifier from sklearn.naive_bayes import GaussianNB from sklearn.discriminant_analysis import QuadraticDiscriminantAnalysis from sklearn.metrics import", "elif (cname == \"Neural Nets\"): self.trainNeuralNets() else: print(\"Please put existing classifier names\") pass", "plt.xticks(np.arange(len(penalty_range)), penalty_range, rotation=45) plt.yticks(np.arange(len(alpha_range)), alpha_range) plt.title('Validation accuracy') # plt.show() pass def showLinearSVM(self, model,", "loss_y.append(temp_loss) # acc_y.append(temp_acc) for n in range(n_iter): shuffledRange = list(shuffledRange) random.shuffle(shuffledRange) shuffledX =", "'' self.grid = '' self.param_grid = '' self.cv = StratifiedShuffleSplit(n_splits=5, test_size=0.2, random_state=42) self.scoring", "% (log_loss(target_train, predicted_train), zero_one_loss(target_train, predicted_train))) print(\"The test Log Loss %0.3f Zero one loss", "midpoint of a colormap to be around # the values of interest. class", "from sklearn.gaussian_process.kernels import RBF from sklearn.tree import DecisionTreeClassifier from sklearn.ensemble import RandomForestClassifier, AdaBoostClassifier", "scores.min() max_score = scores.max() mean_score = np.mean(scores, axis=0) scores = scores.reshape(len(learning_rate_init_range), len(hidden_layer_sizes_range)) plt.figure(figsize=(8,", "test_y, idx): clf = SGDClassifier(loss='log') # shuffle=True is useless here shuffledRange = range(train_X.shape[0])", "pass def trainRBFSVM(self): # params C / gamma kernel_range = ['rbf'] C_range =", "mean test time of %f\" % (np.mean(model.grid.cv_results_['mean_score_time'], axis=0)) ) # confusion matrix print(\"confusion", "ClassModels() model = ClassModels() model.trainModel(clfname) model.grid.fit(self.train_X, self.train_y) # (2) show results predicted_test =", "/ Test Set Results =====\\n\") f_acc.write(\"data_point= %d , accuracy= %f\\n\" % (data_point, accuracy_score(test_y,", "scores.reshape(len(learning_rate_init_range), len(hidden_layer_sizes_range)) plt.figure(figsize=(8, 6)) plt.subplots_adjust(left=.2, right=0.95, bottom=0.15, top=0.95) plt.imshow(scores, interpolation='nearest', cmap=plt.cm.hot, norm=MidpointNormalize(vmin=min_score,vmax=max_score, midpoint=mean_score))", "plt.imshow(scores, interpolation='nearest', cmap=plt.cm.hot, norm=MidpointNormalize(vmin=min_score, vmax=max_score, midpoint=mean_score)) plt.xlabel('penalty') plt.ylabel('alpha (regularization)') plt.colorbar() plt.xticks(np.arange(len(penalty_range)), penalty_range, rotation=45)", "time of %f\" % (np.mean(model.grid.cv_results_['mean_fit_time'], axis=0)) ) print(\"The mean test time of %f\"", "SVM\"] for idx2, clfname in enumerate(clfnames): print(\"===== %s \" %(dname)) print(\"===== %s\" %(clfname))", "plt.ylabel('C') plt.colorbar() plt.xticks(np.arange(len(gamma_range)), gamma_range, rotation=45) plt.yticks(np.arange(len(C_range)), C_range) plt.title('Validation accuracy') # plt.show() pass def", "= '' pass f_name1 = os.path.join(dirpath,\"../datasets/breast-cancer.npz\") f_name2 = os.path.join(dirpath,\"../datasets/diabetes.npz\") f_name3 = os.path.join(dirpath,\"../datasets/digit.npz\") f_name4", "clfnames = [\"Linear SVM\"] for idx2, clfname in enumerate(clfnames): print(\"===== %s \" %(dname))", "min_score = scores.min() max_score = scores.max() mean_score = np.mean(scores, axis=0) scores = scores.reshape(len(C_range),1)", "fivethirtyeight, ggplot, # dark_background, seaborn-deep, etc plt.style.use('ggplot') plt.rcParams['font.family'] = 'serif' plt.rcParams['font.serif'] = 'Ubuntu'", "# f_loss.write(\"data_point= \" + str(data_point) + \" zero_one_loss= \" + str(temp_loss) + \"", "predicted_test) ) print ( classification_report(target_test, predicted_test) ) pass def showPlot(self, model, clfname): if", "xmin, xmax, ymin, ymax = plt.axis() plt.axis([xmin - 0.1, xmax + 0.1, ymin,", "clfname) else: print(\"Please put existing classifier names\") pass def showLogisticRegression(self, model, clfname): penalty_range", "= list(shuffledRange) random.shuffle(shuffledRange) shuffledX = [train_X[i] for i in shuffledRange] shuffledY = [train_y[i]", "globally # Alternatives include bmh, fivethirtyeight, ggplot, # dark_background, seaborn-deep, etc plt.style.use('ggplot') plt.rcParams['font.family']", "\" accuracy= \" + str(temp_acc) + \" \\n\") data_x.append(data_point) loss_y.append(temp_loss) acc_y.append(temp_acc) f_loss.write(\"\\n===== End", "\" + str(data_point) + \" zero_one_loss= \" + str(temp_loss) + \" \\n\") #", "'w') data_x = [] loss_y = [] acc_y = [] # temp_loss =", "self.test_y, predicted_train, self.train_y) report.showPlot(model, clfname) plt.savefig('./'+clfname+'_'+str(idx)+'.png', bbox_inches = 'tight') if __name__ == '__main__':", "bottom=0.15, top=0.95) plt.imshow(scores, interpolation='nearest', cmap=plt.cm.hot, norm=MidpointNormalize(vmin=min_score, vmax=max_score, midpoint=mean_score)) plt.xlabel('penalty') plt.ylabel('alpha (regularization)') plt.colorbar() plt.xticks(np.arange(len(penalty_range)),", "int(sys.argv[2]) == 1: runLROverTime(self.train_X, self.train_y, self.test_X, self.test_y, idx) continue clfnames = [\"Logistic Regression\",", "pass def plotLROverTime(data_x, loss_y, acc_y, idx): # Set the style globally # Alternatives", "%(clfname)) # (1) train model with CV model = ClassModels() model = ClassModels()", "# f_acc.write(\"data_point= \" + str(data_point) + \" accuracy= \" + str(temp_acc) + \"", "params : gamma_range = np.array([0.001,0.005,0.01,0.05,0.1,0.5,1,2,3]) # 9 params self.param_grid = dict(kernel=kernel_range, gamma=gamma_range, C=C_range)", "axis=0) scores = scores.reshape(len(learning_rate_init_range), len(hidden_layer_sizes_range)) plt.figure(figsize=(8, 6)) plt.subplots_adjust(left=.2, right=0.95, bottom=0.15, top=0.95) plt.imshow(scores, interpolation='nearest',", "+ 1], shuffledY[batch[0]:batch[-1] + 1], classes=np.unique(train_y)) data_point += len(batch) temp_loss = zero_one_loss(train_y, clf.predict(train_X))", "include bmh, fivethirtyeight, ggplot, # dark_background, seaborn-deep, etc plt.style.use('ggplot') plt.rcParams['font.family'] = 'serif' plt.rcParams['font.serif']", "penalty_range, rotation=45) plt.yticks(np.arange(len(alpha_range)), alpha_range) plt.title('Validation accuracy') # plt.show() pass def showLinearSVM(self, model, clfname):", "[] loss_y = [] acc_y = [] # temp_loss = zero_one_loss(train_y, clf.predict(train_X)) #", "interpolation='nearest', cmap=plt.cm.hot, norm=MidpointNormalize(vmin=min_score, vmax=max_score, midpoint=mean_score)) plt.ylabel('C') plt.colorbar() plt.yticks(np.arange(len(C_range)), C_range) plt.title('Validation accuracy') # plt.show()", "from sklearn.tree import DecisionTreeClassifier from sklearn.ensemble import RandomForestClassifier, AdaBoostClassifier from sklearn.naive_bayes import GaussianNB", "gamma_range, rotation=45) plt.yticks(np.arange(len(C_range)), C_range) plt.title('Validation accuracy') # plt.show() pass def showNeuralNets(self, model, clfname):", "% (model.grid.best_params_, model.grid.best_score_)) print(\"The Train Log Loss %0.3f Zero one loss %f\" %", "\\n\") # data_x.append(data_point) # loss_y.append(temp_loss) # acc_y.append(temp_acc) for n in range(n_iter): shuffledRange =", "== \"RBF SVM\"): self.trainRBFSVM() elif (cname == \"Neural Nets\"): self.trainNeuralNets() else: print(\"Please put", "hidden_layer_sizes_range = model.param_grid['hidden_layer_sizes'] learning_rate_init_range = model.param_grid['learning_rate_init'] scores = np.array(model.grid.cv_results_['mean_test_score']) min_score = scores.min() max_score", "[] self.test_y = [] def run(self): report = Report() for idx, dname in", "os.path.join(dirpath,\"../datasets/wine.npz\") # Utility function to move the midpoint of a colormap to be", "len(hidden_layer_sizes_range)) plt.figure(figsize=(8, 6)) plt.subplots_adjust(left=.2, right=0.95, bottom=0.15, top=0.95) plt.imshow(scores, interpolation='nearest', cmap=plt.cm.hot, norm=MidpointNormalize(vmin=min_score,vmax=max_score, midpoint=mean_score)) plt.xlabel('hidden_layer_sizes')", "plt.xlabel('Data Points') plt.ylabel('Score') # Axes alteration to put zero values inside the figure", "zero values inside the figure Axes # Avoids axis white lines cutting through", "\"RBF SVM\", \"Neural Nets\"] # clfnames = [\"RBF SVM\"] # clfnames = [\"Linear", "top=0.95) plt.imshow(scores, interpolation='nearest', cmap=plt.cm.hot, norm=MidpointNormalize(vmin=min_score, vmax=max_score, midpoint=mean_score)) plt.ylabel('C') plt.colorbar() plt.yticks(np.arange(len(C_range)), C_range) plt.title('Validation accuracy')", "cv=self.cv, n_jobs=-1) pass def trainRBFSVM(self): # params C / gamma kernel_range = ['rbf']", "f_name4, f_name5] self.train_X = [] self.train_y = [] self.test_X = [] self.test_y =", "pass # Loss + Accuracy (training + test) # auc + confusion matrix", "+ Accuracy (training + test) # auc + confusion matrix # cpu computation", "results predicted_test = model.grid.predict(self.test_X) predicted_train = model.grid.predict(self.train_X) # Loss + Accuracy (training +", "\\n\") data_x.append(data_point) loss_y.append(temp_loss) acc_y.append(temp_acc) f_loss.write(\"\\n===== End of Training / Test Set Results =====\\n\")", "of a colormap to be around # the values of interest. class MidpointNormalize(Normalize):", "# data_x.append(data_point) # loss_y.append(temp_loss) # acc_y.append(temp_acc) for n in range(n_iter): shuffledRange = list(shuffledRange)", "def showNeuralNets(self, model, clfname): hidden_layer_sizes_range = model.param_grid['hidden_layer_sizes'] learning_rate_init_range = model.param_grid['learning_rate_init'] scores = np.array(model.grid.cv_results_['mean_test_score'])", ": gamma_range = np.array([0.001,0.005,0.01,0.05,0.1,0.5,1,2,3]) # 9 params self.param_grid = dict(kernel=kernel_range, gamma=gamma_range, C=C_range) self.grid", "for i in shuffledRange] for batch in batches(range(len(shuffledX)), 10): clf.partial_fit(shuffledX[batch[0]:batch[-1] + 1], shuffledY[batch[0]:batch[-1]", "accuracy') # plt.show() pass def showNeuralNets(self, model, clfname): hidden_layer_sizes_range = model.param_grid['hidden_layer_sizes'] learning_rate_init_range =", "acc_y = [] # temp_loss = zero_one_loss(train_y, clf.predict(train_X)) # temp_acc = accuracy_score(train_y, clf.predict(train_X))", "= np.geomspace(1.e-07, 1.e+05, num=13) # 13 params self.param_grid = dict(loss=loss_range, penalty=penalty_range, alpha=alpha_range, max_iter=[1000],", "learning_rate_init=learning_rate_init_range, max_iter=[1000]) self.grid = GridSearchCV(MLPClassifier(), param_grid=self.param_grid, cv=self.cv, n_jobs=-1) pass class Report: def __init__(self):", "cpu computation time def showResult(self, model, predicted_test, target_test, predicted_train, target_train): print(\"The best parameters", "sklearn.gaussian_process.kernels import RBF from sklearn.tree import DecisionTreeClassifier from sklearn.ensemble import RandomForestClassifier, AdaBoostClassifier from", "clfname) elif (clfname == \"Linear SVM\"): self.showLinearSVM(model, clfname) elif (clfname == \"RBF SVM\"):", "= model.param_grid['alpha'] # 13 params scores = np.array(model.grid.cv_results_['mean_test_score']) min_score = scores.min() max_score =", "model.grid.best_score_)) print(\"The Train Log Loss %0.3f Zero one loss %f\" % (log_loss(target_train, predicted_train),", "Accuracy (training + test) # auc + confusion matrix # cpu computation time", "values - fivethirtyeight style xmin, xmax, ymin, ymax = plt.axis() plt.axis([xmin - 0.1,", "predicted_test) ) pass def showPlot(self, model, clfname): if (clfname == \"Logistic Regression\"): self.showLogisticRegression(model,", "self.train_y) # (2) show results predicted_test = model.grid.predict(self.test_X) predicted_train = model.grid.predict(self.train_X) # Loss", "params scores = np.array(model.grid.cv_results_['mean_test_score']) min_score = scores.min() max_score = scores.max() mean_score = np.mean(scores,", "move the midpoint of a colormap to be around # the values of", "10 plt.rcParams['axes.labelsize'] = 10 plt.rcParams['axes.labelweight'] = 'bold' plt.rcParams['axes.titlesize'] = 10 plt.rcParams['xtick.labelsize'] = 8", "the midpoint of a colormap to be around # the values of interest.", "\" \\n\") # data_x.append(data_point) # loss_y.append(temp_loss) # acc_y.append(temp_acc) for n in range(n_iter): shuffledRange", "i in shuffledRange] shuffledY = [train_y[i] for i in shuffledRange] for batch in", "self.showLinearSVM(model, clfname) elif (clfname == \"RBF SVM\"): self.showRBFSVM(model, clfname) elif (clfname == \"Neural", "useless here shuffledRange = range(train_X.shape[0]) n_iter = 10 data_point = 0 f_loss =", "plt.yticks(np.arange(len(C_range)), C_range) plt.title('Validation accuracy') # plt.show() pass def showNeuralNets(self, model, clfname): hidden_layer_sizes_range =", "MLPClassifier from sklearn.neighbors import KNeighborsClassifier from sklearn.gaussian_process import GaussianProcessClassifier from sklearn.gaussian_process.kernels import RBF", "runLROverTime(self.train_X, self.train_y, self.test_X, self.test_y, idx) continue clfnames = [\"Logistic Regression\", \"Linear SVM\", \"RBF", "interest. class MidpointNormalize(Normalize): def __init__(self, vmin=None, vmax=None, midpoint=None, clip=False): self.midpoint = midpoint Normalize.__init__(self,", "bbox_inches='tight') pass def batches(l, n): for i in range(0, len(l), n): yield l[i:i+n]", "scores = scores.reshape(len(C_range),1) plt.figure(figsize=(8, 6)) plt.subplots_adjust(left=.2, right=0.95, bottom=0.15, top=0.95) plt.imshow(scores, interpolation='nearest', cmap=plt.cm.hot, norm=MidpointNormalize(vmin=min_score,", "plt.rcParams['figure.titlesize'] = 12 # Set an aspect ratio width, height = plt.figaspect(1.68) fig", "6)) plt.subplots_adjust(left=.2, right=0.95, bottom=0.15, top=0.95) plt.imshow(scores, interpolation='nearest', cmap=plt.cm.hot, norm=MidpointNormalize(vmin=min_score,vmax=max_score, midpoint=mean_score)) plt.xlabel('hidden_layer_sizes') plt.ylabel('learning_rate_init') plt.colorbar()", "plots a bit plt.subplots_adjust(hspace=0.25, wspace=0.40) plt.savefig('./LR_overtime_'+str(idx)+'.png', bbox_inches='tight') pass def batches(l, n): for i", "# acc_y.append(temp_acc) for n in range(n_iter): shuffledRange = list(shuffledRange) random.shuffle(shuffledRange) shuffledX = [train_X[i]", "plt.rcParams['font.serif'] = 'Ubuntu' plt.rcParams['font.monospace'] = 'Ubuntu Mono' plt.rcParams['font.size'] = 10 plt.rcParams['axes.labelsize'] = 10", "/ precision recall scores\") print ( confusion_matrix(target_test, predicted_test) ) print ( classification_report(target_test, predicted_test)", "top=0.95) plt.imshow(scores, interpolation='nearest', cmap=plt.cm.hot, norm=MidpointNormalize(vmin=min_score, vmax=max_score, midpoint=mean_score)) plt.xlabel('penalty') plt.ylabel('alpha (regularization)') plt.colorbar() plt.xticks(np.arange(len(penalty_range)), penalty_range,", ", accuracy= %f\\n\" % (data_point, accuracy_score(test_y, clf.predict(test_X)))) f_loss.close() f_acc.close() plotLROverTime(data_x, loss_y, acc_y, idx)", "Loss %0.3f Zero one loss %f\" % (log_loss(target_train, predicted_train), zero_one_loss(target_train, predicted_train))) print(\"The test", "cmap=plt.cm.hot, norm=MidpointNormalize(vmin=min_score, vmax=max_score, midpoint=mean_score)) plt.ylabel('C') plt.colorbar() plt.yticks(np.arange(len(C_range)), C_range) plt.title('Validation accuracy') # plt.show() pass", "bottom=0.15, top=0.95) # plt.imshow(scores, interpolation='nearest', cmap=plt.cm.hot, # norm=MidpointNormalize(vmin=0.2, midpoint=0.92)) plt.imshow(scores, interpolation='nearest', cmap=plt.cm.hot, norm=MidpointNormalize(vmin=min_score,vmax=max_score,", "from sklearn.ensemble import RandomForestClassifier, AdaBoostClassifier from sklearn.naive_bayes import GaussianNB from sklearn.discriminant_analysis import QuadraticDiscriminantAnalysis", "self.vmax], [0, 0.5, 1] return np.ma.masked_array(np.interp(value, x, y)) class ClassModels: def __init__(self): self.name", "== 1: runLROverTime(self.train_X, self.train_y, self.test_X, self.test_y, idx) continue clfnames = [\"Logistic Regression\", \"Linear", "as plt from matplotlib.colors import Normalize from matplotlib.colors import ListedColormap from sklearn.model_selection import", "params for each classifier def trainLogisticRegression(self): # TODO: try different scoring rule such", "self.param_grid = dict(kernel=kernel_range, gamma=gamma_range, C=C_range) self.grid = GridSearchCV(SVC(), param_grid=self.param_grid, cv=self.cv, n_jobs=-1) pass def", "try: dirpath = os.path.dirname(__file__) except Exception as inst: dirpath = '' pass f_name1", "StandardScaler from sklearn.datasets import make_moons, make_circles, make_classification from sklearn.linear_model import SGDClassifier from sklearn.svm", "= plt.axis() plt.axis([xmin - 0.1, xmax + 0.1, ymin, ymax]) plt.title('LR performance over", "to put zero values inside the figure Axes # Avoids axis white lines", "fivethirtyeight style xmin, xmax, ymin, ymax = plt.axis() plt.axis([xmin - 0.1, xmax +", "f_name3, f_name4, f_name5] self.train_X = [] self.train_y = [] self.test_X = [] self.test_y", "ClassModels: def __init__(self): self.name = '' self.grid = '' self.param_grid = '' self.cv", "data_point += len(batch) temp_loss = zero_one_loss(train_y, clf.predict(train_X)) temp_acc = accuracy_score(train_y, clf.predict(train_X)) f_loss.write(\"data_point= \"", "= np.mean(scores, axis=0) scores = scores.reshape(len(learning_rate_init_range), len(hidden_layer_sizes_range)) plt.figure(figsize=(8, 6)) plt.subplots_adjust(left=.2, right=0.95, bottom=0.15, top=0.95)", "Training / Test Set Results =====\\n\") f_loss.write(\"data_point= %d , zero_one_loss= %f\\n\" % (data_point,", "/ gamma kernel_range = ['rbf'] C_range = np.geomspace(1.e-07, 1.e+05, num=13) # 13 params", "'w') f_acc = open('./LR_overtime_acc_'+str(idx)+'.txt', 'w') data_x = [] loss_y = [] acc_y =", "(cname == \"Logistic Regression\"): self.trainLogisticRegression() elif (cname == \"Linear SVM\"): self.trainLinearSVM() elif (cname", "\" + str(temp_acc) + \" \\n\") # data_x.append(data_point) # loss_y.append(temp_loss) # acc_y.append(temp_acc) for", "and int(sys.argv[2]) == 1: runLROverTime(self.train_X, self.train_y, self.test_X, self.test_y, idx) continue clfnames = [\"Logistic", "plt.rcParams['legend.fontsize'] = 10 plt.rcParams['figure.titlesize'] = 12 # Set an aspect ratio width, height", "plt.axis([xmin - 0.1, xmax + 0.1, ymin, ymax]) plt.title('LR performance over time', fontstyle='italic')", "plt.yticks(np.arange(len(alpha_range)), alpha_range) plt.title('Validation accuracy') # plt.show() pass def showLinearSVM(self, model, clfname): C_range =", "= [] # temp_loss = zero_one_loss(train_y, clf.predict(train_X)) # temp_acc = accuracy_score(train_y, clf.predict(train_X)) #", "= model.param_grid['C'] scores = np.array(model.grid.cv_results_['mean_test_score']) min_score = scores.min() max_score = scores.max() mean_score =", "alpha_range = model.param_grid['alpha'] # 13 params scores = np.array(model.grid.cv_results_['mean_test_score']) min_score = scores.min() max_score", "[] self.train_y = [] self.test_X = [] self.test_y = [] def run(self): report", "StratifiedShuffleSplit(n_splits=5, test_size=0.2, random_state=42) self.scoring = 'neg_log_loss' #'accuracy', 'f1', 'precision', 'recall', 'roc_auc' def trainModel(self,", "n_jobs=-1) pass def trainLinearSVM(self): kernel_range = ['linear'] C_range = np.geomspace(1.e-07, 1.e+05, num=13) #", "( confusion_matrix(target_test, predicted_test) ) print ( classification_report(target_test, predicted_test) ) pass def showPlot(self, model,", "= accuracy_score(train_y, clf.predict(train_X)) # f_loss.write(\"data_point= \" + str(data_point) + \" zero_one_loss= \" +", "np.array([1,2,3,4,5,6,7,8,9,10,16,32]) # 12 params activation_range = ['logistic'] solver_range = ['sgd'] learning_rate_init_range = np.array([1.0e-04,1.0e-03,1.0e-02,1.0e-01])", "> 1 and int(sys.argv[1]) != idx: continue data = np.load(dname) self.train_y = data['train_Y']", "# 13 params scores = np.array(model.grid.cv_results_['mean_test_score']) min_score = scores.min() max_score = scores.max() mean_score", "Loss + Accuracy (training + test) # auc + confusion matrix # cpu", "model.param_grid['penalty'] alpha_range = model.param_grid['alpha'] # 13 params scores = np.array(model.grid.cv_results_['mean_test_score']) min_score = scores.min()", "scores.min() max_score = scores.max() mean_score = np.mean(scores, axis=0) scores = scores.reshape(len(alpha_range),len(penalty_range)) plt.figure(figsize=(8, 6))", "\" accuracy= \" + str(temp_acc) + \" \\n\") # data_x.append(data_point) # loss_y.append(temp_loss) #", "shuffledRange] for batch in batches(range(len(shuffledX)), 10): clf.partial_fit(shuffledX[batch[0]:batch[-1] + 1], shuffledY[batch[0]:batch[-1] + 1], classes=np.unique(train_y))", "dict(kernel=kernel_range, C=C_range) self.grid = GridSearchCV(SVC(), param_grid=self.param_grid, cv=self.cv, n_jobs=-1) pass def trainRBFSVM(self): # params", "self.grid = GridSearchCV(MLPClassifier(), param_grid=self.param_grid, cv=self.cv, n_jobs=-1) pass class Report: def __init__(self): pass #", "report.showPlot(model, clfname) plt.savefig('./'+clfname+'_'+str(idx)+'.png', bbox_inches = 'tight') if __name__ == '__main__': eval = RunEval()", "open('./LR_overtime_acc_'+str(idx)+'.txt', 'w') data_x = [] loss_y = [] acc_y = [] # temp_loss", "of %0.3f\" % (model.grid.best_params_, model.grid.best_score_)) print(\"The Train Log Loss %0.3f Zero one loss", "params : self.param_grid = dict(kernel=kernel_range, C=C_range) self.grid = GridSearchCV(SVC(), param_grid=self.param_grid, cv=self.cv, n_jobs=-1) pass", "= [train_y[i] for i in shuffledRange] for batch in batches(range(len(shuffledX)), 10): clf.partial_fit(shuffledX[batch[0]:batch[-1] +", "import matplotlib.pyplot as plt from matplotlib.colors import Normalize from matplotlib.colors import ListedColormap from", "clf.predict(test_X)))) f_loss.close() f_acc.close() plotLROverTime(data_x, loss_y, acc_y, idx) pass class RunEval: def __init__(self): self.dnames", "pass class Report: def __init__(self): pass # Loss + Accuracy (training + test)", "# plt.show() pass def showLinearSVM(self, model, clfname): C_range = model.param_grid['C'] scores = np.array(model.grid.cv_results_['mean_test_score'])", "clfname): C_range = model.param_grid['C'] scores = np.array(model.grid.cv_results_['mean_test_score']) min_score = scores.min() max_score = scores.max()", "confusion_matrix(target_test, predicted_test) ) print ( classification_report(target_test, predicted_test) ) pass def showPlot(self, model, clfname):", "training time of %f\" % (np.mean(model.grid.cv_results_['mean_fit_time'], axis=0)) ) print(\"The mean test time of", "each classifier def trainLogisticRegression(self): # TODO: try different scoring rule such as Accuracy", "= scores.reshape(len(C_range), len(gamma_range)) plt.figure(figsize=(8, 6)) plt.subplots_adjust(left=.2, right=0.95, bottom=0.15, top=0.95) # plt.imshow(scores, interpolation='nearest', cmap=plt.cm.hot,", "plt.rcParams['font.family'] = 'serif' plt.rcParams['font.serif'] = 'Ubuntu' plt.rcParams['font.monospace'] = 'Ubuntu Mono' plt.rcParams['font.size'] = 10", "plt.xlabel('penalty') plt.ylabel('alpha (regularization)') plt.colorbar() plt.xticks(np.arange(len(penalty_range)), penalty_range, rotation=45) plt.yticks(np.arange(len(alpha_range)), alpha_range) plt.title('Validation accuracy') # plt.show()", "sklearn.discriminant_analysis import QuadraticDiscriminantAnalysis from sklearn.metrics import * # find a current file' directory", "= [\"Linear SVM\"] for idx2, clfname in enumerate(clfnames): print(\"===== %s \" %(dname)) print(\"=====", "= 8 plt.rcParams['ytick.labelsize'] = 8 plt.rcParams['legend.fontsize'] = 10 plt.rcParams['figure.titlesize'] = 12 # Set", "predicted_train, self.train_y) report.showPlot(model, clfname) plt.savefig('./'+clfname+'_'+str(idx)+'.png', bbox_inches = 'tight') if __name__ == '__main__': eval", "['logistic'] solver_range = ['sgd'] learning_rate_init_range = np.array([1.0e-04,1.0e-03,1.0e-02,1.0e-01]) # 4 params self.param_grid = dict(hidden_layer_sizes=hidden_layer_sizes_range,", "for i in range(0, len(l), n): yield l[i:i+n] def runLROverTime(train_X, train_y, test_X, test_y,", "cmap=plt.cm.hot, norm=MidpointNormalize(vmin=min_score,vmax=max_score, midpoint=mean_score)) plt.xlabel('gamma') plt.ylabel('C') plt.colorbar() plt.xticks(np.arange(len(gamma_range)), gamma_range, rotation=45) plt.yticks(np.arange(len(C_range)), C_range) plt.title('Validation accuracy')", "\" + str(temp_loss) + \" \\n\") # f_acc.write(\"data_point= \" + str(data_point) + \"", "# loss_y.append(temp_loss) # acc_y.append(temp_acc) for n in range(n_iter): shuffledRange = list(shuffledRange) random.shuffle(shuffledRange) shuffledX", "confusion matrix # cpu computation time report.showResult(model, predicted_test, self.test_y, predicted_train, self.train_y) report.showPlot(model, clfname)", "self.trainLinearSVM() elif (cname == \"RBF SVM\"): self.trainRBFSVM() elif (cname == \"Neural Nets\"): self.trainNeuralNets()", "axis=0)) ) # confusion matrix print(\"confusion matrix / precision recall scores\") print (", "+ \" accuracy= \" + str(temp_acc) + \" \\n\") data_x.append(data_point) loss_y.append(temp_loss) acc_y.append(temp_acc) f_loss.write(\"\\n=====", "matrix # cpu computation time report.showResult(model, predicted_test, self.test_y, predicted_train, self.train_y) report.showPlot(model, clfname) plt.savefig('./'+clfname+'_'+str(idx)+'.png',", "(log_loss(target_train, predicted_train), zero_one_loss(target_train, predicted_train))) print(\"The test Log Loss %0.3f Zero one loss %f\"", "= [\"Logistic Regression\", \"Linear SVM\", \"RBF SVM\", \"Neural Nets\"] # clfnames = [\"RBF", "= range(train_X.shape[0]) n_iter = 10 data_point = 0 f_loss = open('./LR_overtime_loss_'+str(idx)+'.txt', 'w') f_acc", "10 data_point = 0 f_loss = open('./LR_overtime_loss_'+str(idx)+'.txt', 'w') f_acc = open('./LR_overtime_acc_'+str(idx)+'.txt', 'w') data_x", "f_loss.write(\"data_point= %d , zero_one_loss= %f\\n\" % (data_point, zero_one_loss(test_y, clf.predict(test_X)))) f_acc.write(\"\\n===== End of Training", "os.path.join(dirpath,\"../datasets/diabetes.npz\") f_name3 = os.path.join(dirpath,\"../datasets/digit.npz\") f_name4 = os.path.join(dirpath,\"../datasets/iris.npz\") f_name5 = os.path.join(dirpath,\"../datasets/wine.npz\") # Utility function", "accuracy= \" + str(temp_acc) + \" \\n\") # data_x.append(data_point) # loss_y.append(temp_loss) # acc_y.append(temp_acc)", "AUC of %0.3f\" % (roc_auc_score(target_test, predicted_test) )) print(\"The mean training time of %f\"", "a colormap to be around # the values of interest. class MidpointNormalize(Normalize): def", "linestyle=':', marker='o', markersize=2, label='loss') plt.plot(data_x, acc_y, linewidth=0.5, linestyle='--', marker='v', markersize=2, label='accuracy') plt.xlabel('Data Points')", "Avoids axis white lines cutting through zero values - fivethirtyeight style xmin, xmax,", "zero_one_loss(train_y, clf.predict(train_X)) temp_acc = accuracy_score(train_y, clf.predict(train_X)) f_loss.write(\"data_point= \" + str(data_point) + \" zero_one_loss=", "(mean=0, std=1) self.train_X = StandardScaler().fit_transform(data['train_X']) self.test_X = StandardScaler().fit_transform(data['test_X']) print (\"shape of data set", "clfname): if (clfname == \"Logistic Regression\"): self.showLogisticRegression(model, clfname) elif (clfname == \"Linear SVM\"):", "parameters are %s with a score of %0.3f\" % (model.grid.best_params_, model.grid.best_score_)) print(\"The Train", "of %f\" % (np.mean(model.grid.cv_results_['mean_score_time'], axis=0)) ) # confusion matrix print(\"confusion matrix / precision", "cmap=plt.cm.hot, norm=MidpointNormalize(vmin=min_score, vmax=max_score, midpoint=mean_score)) plt.xlabel('penalty') plt.ylabel('alpha (regularization)') plt.colorbar() plt.xticks(np.arange(len(penalty_range)), penalty_range, rotation=45) plt.yticks(np.arange(len(alpha_range)), alpha_range)", "cv=self.cv, n_jobs=-1) pass class Report: def __init__(self): pass # Loss + Accuracy (training", "'neg_log_loss' #'accuracy', 'f1', 'precision', 'recall', 'roc_auc' def trainModel(self, cname): if (cname == \"Logistic", "data if len(sys.argv) > 1 and int(sys.argv[1]) != idx: continue data = np.load(dname)", "self.test_y, idx) continue clfnames = [\"Logistic Regression\", \"Linear SVM\", \"RBF SVM\", \"Neural Nets\"]", "= scores.min() max_score = scores.max() mean_score = np.mean(scores, axis=0) scores = scores.reshape(len(C_range),1) plt.figure(figsize=(8,", "right=0.95, bottom=0.15, top=0.95) plt.imshow(scores, interpolation='nearest', cmap=plt.cm.hot, norm=MidpointNormalize(vmin=min_score,vmax=max_score, midpoint=mean_score)) plt.xlabel('hidden_layer_sizes') plt.ylabel('learning_rate_init') plt.colorbar() plt.xticks(np.arange(len(hidden_layer_sizes_range)), hidden_layer_sizes_range,", "= os.path.dirname(__file__) except Exception as inst: dirpath = '' pass f_name1 = os.path.join(dirpath,\"../datasets/breast-cancer.npz\")", "from sklearn.neural_network import MLPClassifier from sklearn.neighbors import KNeighborsClassifier from sklearn.gaussian_process import GaussianProcessClassifier from", "1.e+05, num=13) # 13 params : self.param_grid = dict(kernel=kernel_range, C=C_range) self.grid = GridSearchCV(SVC(),", "(data_point, accuracy_score(test_y, clf.predict(test_X)))) f_loss.close() f_acc.close() plotLROverTime(data_x, loss_y, acc_y, idx) pass class RunEval: def", "acc_y, linewidth=0.5, linestyle='--', marker='v', markersize=2, label='accuracy') plt.xlabel('Data Points') plt.ylabel('Score') # Axes alteration to", "\\n\") f_acc.write(\"data_point= \" + str(data_point) + \" accuracy= \" + str(temp_acc) + \"", "+ \" \\n\") # f_acc.write(\"data_point= \" + str(data_point) + \" accuracy= \" +", "linewidth=0.5, linestyle='--', marker='v', markersize=2, label='accuracy') plt.xlabel('Data Points') plt.ylabel('Score') # Axes alteration to put", "GridSearchCV(SGDClassifier(), param_grid=self.param_grid, cv=self.cv, n_jobs=-1) pass def trainLinearSVM(self): kernel_range = ['linear'] C_range = np.geomspace(1.e-07,", "such as Accuracy (default), F1-measure, AUC loss_range = ['log'] penalty_range = ['l2','l1','none'] alpha_range", "axis white lines cutting through zero values - fivethirtyeight style xmin, xmax, ymin,", "= ['sgd'] learning_rate_init_range = np.array([1.0e-04,1.0e-03,1.0e-02,1.0e-01]) # 4 params self.param_grid = dict(hidden_layer_sizes=hidden_layer_sizes_range, activation=activation_range,solver=solver_range, learning_rate_init=learning_rate_init_range,", "classifier names\") pass # run CV according to params for each classifier def", "np.array([0.001,0.005,0.01,0.05,0.1,0.5,1,2,3]) # 9 params self.param_grid = dict(kernel=kernel_range, gamma=gamma_range, C=C_range) self.grid = GridSearchCV(SVC(), param_grid=self.param_grid,", "['linear'] C_range = np.geomspace(1.e-07, 1.e+05, num=13) # 13 params : self.param_grid = dict(kernel=kernel_range,", "loss %f\" % (log_loss(target_test, predicted_test), zero_one_loss(target_test, predicted_test))) print(\"The train Accuracy %0.3f\" % (accuracy_score(target_train,", "= np.array(model.grid.cv_results_['mean_test_score']) min_score = scores.min() max_score = scores.max() mean_score = np.mean(scores, axis=0) scores", "plt.subplots_adjust(left=.2, right=0.95, bottom=0.15, top=0.95) plt.imshow(scores, interpolation='nearest', cmap=plt.cm.hot, norm=MidpointNormalize(vmin=min_score,vmax=max_score, midpoint=mean_score)) plt.xlabel('hidden_layer_sizes') plt.ylabel('learning_rate_init') plt.colorbar() plt.xticks(np.arange(len(hidden_layer_sizes_range)),", "params C / gamma kernel_range = ['rbf'] C_range = np.geomspace(1.e-07, 1.e+05, num=13) #", "= model.grid.predict(self.test_X) predicted_train = model.grid.predict(self.train_X) # Loss + Accuracy (training + test) #", "= dict(kernel=kernel_range, C=C_range) self.grid = GridSearchCV(SVC(), param_grid=self.param_grid, cv=self.cv, n_jobs=-1) pass def trainRBFSVM(self): #", "right=0.95, bottom=0.15, top=0.95) plt.imshow(scores, interpolation='nearest', cmap=plt.cm.hot, norm=MidpointNormalize(vmin=min_score, vmax=max_score, midpoint=mean_score)) plt.ylabel('C') plt.colorbar() plt.yticks(np.arange(len(C_range)), C_range)", "dname in enumerate(self.dnames): # load data if len(sys.argv) > 1 and int(sys.argv[1]) !=", "def trainNeuralNets(self): # early stopping default False, Momentum default 0.9 hidden_layer_sizes_range = np.array([1,2,3,4,5,6,7,8,9,10,16,32])", "for each classifier def trainLogisticRegression(self): # TODO: try different scoring rule such as", "np.mean(scores, axis=0) scores = scores.reshape(len(learning_rate_init_range), len(hidden_layer_sizes_range)) plt.figure(figsize=(8, 6)) plt.subplots_adjust(left=.2, right=0.95, bottom=0.15, top=0.95) plt.imshow(scores,", "def trainLinearSVM(self): kernel_range = ['linear'] C_range = np.geomspace(1.e-07, 1.e+05, num=13) # 13 params", "inside the figure Axes # Avoids axis white lines cutting through zero values", "different scoring rule such as Accuracy (default), F1-measure, AUC loss_range = ['log'] penalty_range", "to move the midpoint of a colormap to be around # the values", "scores = np.array(model.grid.cv_results_['mean_test_score']) min_score = scores.min() max_score = scores.max() mean_score = np.mean(scores, axis=0)", "str(data_point) + \" zero_one_loss= \" + str(temp_loss) + \" \\n\") f_acc.write(\"data_point= \" +", "%d , accuracy= %f\\n\" % (data_point, accuracy_score(test_y, clf.predict(test_X)))) f_loss.close() f_acc.close() plotLROverTime(data_x, loss_y, acc_y,", "%(dname)) print(\"===== %s\" %(clfname)) # (1) train model with CV model = ClassModels()", "'Ubuntu Mono' plt.rcParams['font.size'] = 10 plt.rcParams['axes.labelsize'] = 10 plt.rcParams['axes.labelweight'] = 'bold' plt.rcParams['axes.titlesize'] =", "\", self.train_X.shape, self.train_y.shape, self.test_X.shape, self.test_y.shape) if len(sys.argv) > 2 and int(sys.argv[2]) == 1:", "from sklearn.preprocessing import StandardScaler from sklearn.datasets import make_moons, make_circles, make_classification from sklearn.linear_model import", "= dict(hidden_layer_sizes=hidden_layer_sizes_range, activation=activation_range,solver=solver_range, learning_rate_init=learning_rate_init_range, max_iter=[1000]) self.grid = GridSearchCV(MLPClassifier(), param_grid=self.param_grid, cv=self.cv, n_jobs=-1) pass class", "= 8 plt.rcParams['legend.fontsize'] = 10 plt.rcParams['figure.titlesize'] = 12 # Set an aspect ratio", "End of Training / Test Set Results =====\\n\") f_loss.write(\"data_point= %d , zero_one_loss= %f\\n\"", "0 f_loss = open('./LR_overtime_loss_'+str(idx)+'.txt', 'w') f_acc = open('./LR_overtime_acc_'+str(idx)+'.txt', 'w') data_x = [] loss_y", "[] # temp_loss = zero_one_loss(train_y, clf.predict(train_X)) # temp_acc = accuracy_score(train_y, clf.predict(train_X)) # f_loss.write(\"data_point=", "# clfnames = [\"RBF SVM\"] # clfnames = [\"Linear SVM\"] for idx2, clfname", "C=C_range) self.grid = GridSearchCV(SVC(), param_grid=self.param_grid, cv=self.cv, n_jobs=-1) pass def trainNeuralNets(self): # early stopping", "plt.title('LR performance over time', fontstyle='italic') plt.legend(loc='best', numpoints=1, fancybox=True) # Space plots a bit", "gamma kernel_range = ['rbf'] C_range = np.geomspace(1.e-07, 1.e+05, num=13) # 13 params :", "%f\" % (log_loss(target_test, predicted_test), zero_one_loss(target_test, predicted_test))) print(\"The train Accuracy %0.3f\" % (accuracy_score(target_train, predicted_train)))", "one loss %f\" % (log_loss(target_train, predicted_train), zero_one_loss(target_train, predicted_train))) print(\"The test Log Loss %0.3f", "plt.figure(figsize=(8, 6)) plt.subplots_adjust(left=.2, right=0.95, bottom=0.15, top=0.95) plt.imshow(scores, interpolation='nearest', cmap=plt.cm.hot, norm=MidpointNormalize(vmin=min_score, vmax=max_score, midpoint=mean_score)) plt.xlabel('penalty')", "import make_moons, make_circles, make_classification from sklearn.linear_model import SGDClassifier from sklearn.svm import SVC from", "= 10 data_point = 0 f_loss = open('./LR_overtime_loss_'+str(idx)+'.txt', 'w') f_acc = open('./LR_overtime_acc_'+str(idx)+'.txt', 'w')", "self.test_y = data['test_Y'] # standardize data (mean=0, std=1) self.train_X = StandardScaler().fit_transform(data['train_X']) self.test_X =", "learning_rate_init_range = model.param_grid['learning_rate_init'] scores = np.array(model.grid.cv_results_['mean_test_score']) min_score = scores.min() max_score = scores.max() mean_score", "mean_score = np.mean(scores, axis=0) scores = scores.reshape(len(C_range),1) plt.figure(figsize=(8, 6)) plt.subplots_adjust(left=.2, right=0.95, bottom=0.15, top=0.95)", "MidpointNormalize(Normalize): def __init__(self, vmin=None, vmax=None, midpoint=None, clip=False): self.midpoint = midpoint Normalize.__init__(self, vmin, vmax,", "Utility function to move the midpoint of a colormap to be around #", "self.train_y, self.test_X, self.test_y, idx) continue clfnames = [\"Logistic Regression\", \"Linear SVM\", \"RBF SVM\",", "from sklearn.discriminant_analysis import QuadraticDiscriminantAnalysis from sklearn.metrics import * # find a current file'", "np.ma.masked_array(np.interp(value, x, y)) class ClassModels: def __init__(self): self.name = '' self.grid = ''", "plt.rcParams['font.monospace'] = 'Ubuntu Mono' plt.rcParams['font.size'] = 10 plt.rcParams['axes.labelsize'] = 10 plt.rcParams['axes.labelweight'] = 'bold'", "plt.figure(figsize=(width, height), dpi=400) plt.plot(data_x, loss_y, linewidth=0.5, linestyle=':', marker='o', markersize=2, label='loss') plt.plot(data_x, acc_y, linewidth=0.5,", "pass def trainLinearSVM(self): kernel_range = ['linear'] C_range = np.geomspace(1.e-07, 1.e+05, num=13) # 13", "shuffledY = [train_y[i] for i in shuffledRange] for batch in batches(range(len(shuffledX)), 10): clf.partial_fit(shuffledX[batch[0]:batch[-1]", "scores.max() mean_score = np.mean(scores, axis=0) scores = scores.reshape(len(alpha_range),len(penalty_range)) plt.figure(figsize=(8, 6)) plt.subplots_adjust(left=.2, right=0.95, bottom=0.15,", "ymax]) plt.title('LR performance over time', fontstyle='italic') plt.legend(loc='best', numpoints=1, fancybox=True) # Space plots a", "sklearn.svm import SVC from sklearn.neural_network import MLPClassifier from sklearn.neighbors import KNeighborsClassifier from sklearn.gaussian_process", "clf.predict(train_X)) temp_acc = accuracy_score(train_y, clf.predict(train_X)) f_loss.write(\"data_point= \" + str(data_point) + \" zero_one_loss= \"", "recall scores\") print ( confusion_matrix(target_test, predicted_test) ) print ( classification_report(target_test, predicted_test) ) pass", "['l2','l1','none'] alpha_range = np.geomspace(1.e-07, 1.e+05, num=13) # 13 params self.param_grid = dict(loss=loss_range, penalty=penalty_range,", "print(\"The test Accuracy %0.3f\" % (accuracy_score(target_test, predicted_test) )) print(\"The test AUC of %0.3f\"", "SVM\"): self.trainRBFSVM() elif (cname == \"Neural Nets\"): self.trainNeuralNets() else: print(\"Please put existing classifier", "Regression\"): self.trainLogisticRegression() elif (cname == \"Linear SVM\"): self.trainLinearSVM() elif (cname == \"RBF SVM\"):", "num=13) # 13 params : gamma_range = np.array([0.001,0.005,0.01,0.05,0.1,0.5,1,2,3]) # 9 params self.param_grid =", "pass # run CV according to params for each classifier def trainLogisticRegression(self): #", "10 plt.rcParams['figure.titlesize'] = 12 # Set an aspect ratio width, height = plt.figaspect(1.68)", "dict(loss=loss_range, penalty=penalty_range, alpha=alpha_range, max_iter=[1000], tol=[1e-3]) self.grid = GridSearchCV(SGDClassifier(), param_grid=self.param_grid, cv=self.cv, n_jobs=-1) pass def", "sklearn.preprocessing import StandardScaler from sklearn.datasets import make_moons, make_circles, make_classification from sklearn.linear_model import SGDClassifier", "plt.xticks(np.arange(len(gamma_range)), gamma_range, rotation=45) plt.yticks(np.arange(len(C_range)), C_range) plt.title('Validation accuracy') # plt.show() pass def showNeuralNets(self, model,", "predicted_test) )) print(\"The mean training time of %f\" % (np.mean(model.grid.cv_results_['mean_fit_time'], axis=0)) ) print(\"The", "= scores.min() max_score = scores.max() mean_score = np.mean(scores, axis=0) scores = scores.reshape(len(learning_rate_init_range), len(hidden_layer_sizes_range))", "standardize data (mean=0, std=1) self.train_X = StandardScaler().fit_transform(data['train_X']) self.test_X = StandardScaler().fit_transform(data['test_X']) print (\"shape of", "values of interest. class MidpointNormalize(Normalize): def __init__(self, vmin=None, vmax=None, midpoint=None, clip=False): self.midpoint =", "plt.show() pass def showRBFSVM(self, model, clfname): C_range = model.param_grid['C'] gamma_range = model.param_grid['gamma'] #", "data['test_Y'] # standardize data (mean=0, std=1) self.train_X = StandardScaler().fit_transform(data['train_X']) self.test_X = StandardScaler().fit_transform(data['test_X']) print", "sklearn.tree import DecisionTreeClassifier from sklearn.ensemble import RandomForestClassifier, AdaBoostClassifier from sklearn.naive_bayes import GaussianNB from", "Results =====\\n\") f_loss.write(\"data_point= %d , zero_one_loss= %f\\n\" % (data_point, zero_one_loss(test_y, clf.predict(test_X)))) f_acc.write(\"\\n===== End", "find a current file' directory path. try: dirpath = os.path.dirname(__file__) except Exception as", "colormap to be around # the values of interest. class MidpointNormalize(Normalize): def __init__(self,", "plt.title('Validation accuracy') # plt.show() pass def plotLROverTime(data_x, loss_y, acc_y, idx): # Set the", "accuracy= \" + str(temp_acc) + \" \\n\") data_x.append(data_point) loss_y.append(temp_loss) acc_y.append(temp_acc) f_loss.write(\"\\n===== End of", "showNeuralNets(self, model, clfname): hidden_layer_sizes_range = model.param_grid['hidden_layer_sizes'] learning_rate_init_range = model.param_grid['learning_rate_init'] scores = np.array(model.grid.cv_results_['mean_test_score']) min_score", "print ( classification_report(target_test, predicted_test) ) pass def showPlot(self, model, clfname): if (clfname ==", "sklearn.gaussian_process import GaussianProcessClassifier from sklearn.gaussian_process.kernels import RBF from sklearn.tree import DecisionTreeClassifier from sklearn.ensemble", "GaussianNB from sklearn.discriminant_analysis import QuadraticDiscriminantAnalysis from sklearn.metrics import * # find a current", "# (2) show results predicted_test = model.grid.predict(self.test_X) predicted_train = model.grid.predict(self.train_X) # Loss +", "dirpath = '' pass f_name1 = os.path.join(dirpath,\"../datasets/breast-cancer.npz\") f_name2 = os.path.join(dirpath,\"../datasets/diabetes.npz\") f_name3 = os.path.join(dirpath,\"../datasets/digit.npz\")", "np import matplotlib.pyplot as plt from matplotlib.colors import Normalize from matplotlib.colors import ListedColormap", "(cname == \"RBF SVM\"): self.trainRBFSVM() elif (cname == \"Neural Nets\"): self.trainNeuralNets() else: print(\"Please", "dirpath = os.path.dirname(__file__) except Exception as inst: dirpath = '' pass f_name1 =", "SVM\", \"RBF SVM\", \"Neural Nets\"] # clfnames = [\"RBF SVM\"] # clfnames =", "%f\\n\" % (data_point, accuracy_score(test_y, clf.predict(test_X)))) f_loss.close() f_acc.close() plotLROverTime(data_x, loss_y, acc_y, idx) pass class", "plt.imshow(scores, interpolation='nearest', cmap=plt.cm.hot, # norm=MidpointNormalize(vmin=0.2, midpoint=0.92)) plt.imshow(scores, interpolation='nearest', cmap=plt.cm.hot, norm=MidpointNormalize(vmin=min_score,vmax=max_score, midpoint=mean_score)) plt.xlabel('gamma') plt.ylabel('C')", "# plt.show() pass def showNeuralNets(self, model, clfname): hidden_layer_sizes_range = model.param_grid['hidden_layer_sizes'] learning_rate_init_range = model.param_grid['learning_rate_init']", "\"Neural Nets\"): self.showNeuralNets(model, clfname) else: print(\"Please put existing classifier names\") pass def showLogisticRegression(self,", "print ( confusion_matrix(target_test, predicted_test) ) print ( classification_report(target_test, predicted_test) ) pass def showPlot(self,", "'roc_auc' def trainModel(self, cname): if (cname == \"Logistic Regression\"): self.trainLogisticRegression() elif (cname ==", "time report.showResult(model, predicted_test, self.test_y, predicted_train, self.train_y) report.showPlot(model, clfname) plt.savefig('./'+clfname+'_'+str(idx)+'.png', bbox_inches = 'tight') if", "def showResult(self, model, predicted_test, target_test, predicted_train, target_train): print(\"The best parameters are %s with", "height = plt.figaspect(1.68) fig = plt.figure(figsize=(width, height), dpi=400) plt.plot(data_x, loss_y, linewidth=0.5, linestyle=':', marker='o',", "self.showLogisticRegression(model, clfname) elif (clfname == \"Linear SVM\"): self.showLinearSVM(model, clfname) elif (clfname == \"RBF", "vmin, vmax, clip) def __call__(self, value, clip=None): x, y = [self.vmin, self.midpoint, self.vmax],", "midpoint=None, clip=False): self.midpoint = midpoint Normalize.__init__(self, vmin, vmax, clip) def __call__(self, value, clip=None):", "gamma_range = np.array([0.001,0.005,0.01,0.05,0.1,0.5,1,2,3]) # 9 params self.param_grid = dict(kernel=kernel_range, gamma=gamma_range, C=C_range) self.grid =", "np.mean(scores, axis=0) scores = scores.reshape(len(C_range),1) plt.figure(figsize=(8, 6)) plt.subplots_adjust(left=.2, right=0.95, bottom=0.15, top=0.95) plt.imshow(scores, interpolation='nearest',", "+ confusion matrix # cpu computation time report.showResult(model, predicted_test, self.test_y, predicted_train, self.train_y) report.showPlot(model,", "model, clfname): penalty_range = model.param_grid['penalty'] alpha_range = model.param_grid['alpha'] # 13 params scores =", "axis=0) scores = scores.reshape(len(C_range), len(gamma_range)) plt.figure(figsize=(8, 6)) plt.subplots_adjust(left=.2, right=0.95, bottom=0.15, top=0.95) # plt.imshow(scores,", "except Exception as inst: dirpath = '' pass f_name1 = os.path.join(dirpath,\"../datasets/breast-cancer.npz\") f_name2 =", "str(temp_loss) + \" \\n\") f_acc.write(\"data_point= \" + str(data_point) + \" accuracy= \" +", "13 params : self.param_grid = dict(kernel=kernel_range, C=C_range) self.grid = GridSearchCV(SVC(), param_grid=self.param_grid, cv=self.cv, n_jobs=-1)", "f_acc.write(\"data_point= \" + str(data_point) + \" accuracy= \" + str(temp_acc) + \" \\n\")", "f_name2 = os.path.join(dirpath,\"../datasets/diabetes.npz\") f_name3 = os.path.join(dirpath,\"../datasets/digit.npz\") f_name4 = os.path.join(dirpath,\"../datasets/iris.npz\") f_name5 = os.path.join(dirpath,\"../datasets/wine.npz\") #", "RunEval: def __init__(self): self.dnames = [f_name1, f_name2, f_name3, f_name4, f_name5] self.train_X = []", "clfnames = [\"RBF SVM\"] # clfnames = [\"Linear SVM\"] for idx2, clfname in", "int(sys.argv[1]) != idx: continue data = np.load(dname) self.train_y = data['train_Y'] self.test_y = data['test_Y']", "loss_y, linewidth=0.5, linestyle=':', marker='o', markersize=2, label='loss') plt.plot(data_x, acc_y, linewidth=0.5, linestyle='--', marker='v', markersize=2, label='accuracy')", "clfname): hidden_layer_sizes_range = model.param_grid['hidden_layer_sizes'] learning_rate_init_range = model.param_grid['learning_rate_init'] scores = np.array(model.grid.cv_results_['mean_test_score']) min_score = scores.min()", "Accuracy %0.3f\" % (accuracy_score(target_test, predicted_test) )) print(\"The test AUC of %0.3f\" % (roc_auc_score(target_test,", "model, predicted_test, target_test, predicted_train, target_train): print(\"The best parameters are %s with a score", "f_loss = open('./LR_overtime_loss_'+str(idx)+'.txt', 'w') f_acc = open('./LR_overtime_acc_'+str(idx)+'.txt', 'w') data_x = [] loss_y =", "self.train_y.shape, self.test_X.shape, self.test_y.shape) if len(sys.argv) > 2 and int(sys.argv[2]) == 1: runLROverTime(self.train_X, self.train_y,", "i in shuffledRange] for batch in batches(range(len(shuffledX)), 10): clf.partial_fit(shuffledX[batch[0]:batch[-1] + 1], shuffledY[batch[0]:batch[-1] +", "plt.colorbar() plt.yticks(np.arange(len(C_range)), C_range) plt.title('Validation accuracy') # plt.show() pass def showRBFSVM(self, model, clfname): C_range", "len(l), n): yield l[i:i+n] def runLROverTime(train_X, train_y, test_X, test_y, idx): clf = SGDClassifier(loss='log')", "= '' self.grid = '' self.param_grid = '' self.cv = StratifiedShuffleSplit(n_splits=5, test_size=0.2, random_state=42)", "ymin, ymax = plt.axis() plt.axis([xmin - 0.1, xmax + 0.1, ymin, ymax]) plt.title('LR", "== \"RBF SVM\"): self.showRBFSVM(model, clfname) elif (clfname == \"Neural Nets\"): self.showNeuralNets(model, clfname) else:", "model.param_grid['hidden_layer_sizes'] learning_rate_init_range = model.param_grid['learning_rate_init'] scores = np.array(model.grid.cv_results_['mean_test_score']) min_score = scores.min() max_score = scores.max()", "f_acc.write(\"data_point= %d , accuracy= %f\\n\" % (data_point, accuracy_score(test_y, clf.predict(test_X)))) f_loss.close() f_acc.close() plotLROverTime(data_x, loss_y,", "plt.ylabel('alpha (regularization)') plt.colorbar() plt.xticks(np.arange(len(penalty_range)), penalty_range, rotation=45) plt.yticks(np.arange(len(alpha_range)), alpha_range) plt.title('Validation accuracy') # plt.show() pass", "time', fontstyle='italic') plt.legend(loc='best', numpoints=1, fancybox=True) # Space plots a bit plt.subplots_adjust(hspace=0.25, wspace=0.40) plt.savefig('./LR_overtime_'+str(idx)+'.png',", "print(\"Please put existing classifier names\") pass def showLogisticRegression(self, model, clfname): penalty_range = model.param_grid['penalty']", "= open('./LR_overtime_loss_'+str(idx)+'.txt', 'w') f_acc = open('./LR_overtime_acc_'+str(idx)+'.txt', 'w') data_x = [] loss_y = []", "for i in shuffledRange] shuffledY = [train_y[i] for i in shuffledRange] for batch", "= ClassModels() model.trainModel(clfname) model.grid.fit(self.train_X, self.train_y) # (2) show results predicted_test = model.grid.predict(self.test_X) predicted_train", "plt.savefig('./LR_overtime_'+str(idx)+'.png', bbox_inches='tight') pass def batches(l, n): for i in range(0, len(l), n): yield", "from sklearn.metrics import * # find a current file' directory path. try: dirpath", "%0.3f Zero one loss %f\" % (log_loss(target_test, predicted_test), zero_one_loss(target_test, predicted_test))) print(\"The train Accuracy", "around # the values of interest. class MidpointNormalize(Normalize): def __init__(self, vmin=None, vmax=None, midpoint=None,", "Axes alteration to put zero values inside the figure Axes # Avoids axis", "predicted_train))) print(\"The test Log Loss %0.3f Zero one loss %f\" % (log_loss(target_test, predicted_test),", "idx2, clfname in enumerate(clfnames): print(\"===== %s \" %(dname)) print(\"===== %s\" %(clfname)) # (1)", "model.trainModel(clfname) model.grid.fit(self.train_X, self.train_y) # (2) show results predicted_test = model.grid.predict(self.test_X) predicted_train = model.grid.predict(self.train_X)", "plt.rcParams['ytick.labelsize'] = 8 plt.rcParams['legend.fontsize'] = 10 plt.rcParams['figure.titlesize'] = 12 # Set an aspect", "n in range(n_iter): shuffledRange = list(shuffledRange) random.shuffle(shuffledRange) shuffledX = [train_X[i] for i in", "(training + test) # auc + confusion matrix # cpu computation time report.showResult(model,", "kernel_range = ['linear'] C_range = np.geomspace(1.e-07, 1.e+05, num=13) # 13 params : self.param_grid", "kernel_range = ['rbf'] C_range = np.geomspace(1.e-07, 1.e+05, num=13) # 13 params : gamma_range", "Regression\"): self.showLogisticRegression(model, clfname) elif (clfname == \"Linear SVM\"): self.showLinearSVM(model, clfname) elif (clfname ==", "right=0.95, bottom=0.15, top=0.95) # plt.imshow(scores, interpolation='nearest', cmap=plt.cm.hot, # norm=MidpointNormalize(vmin=0.2, midpoint=0.92)) plt.imshow(scores, interpolation='nearest', cmap=plt.cm.hot,", "acc_y.append(temp_acc) f_loss.write(\"\\n===== End of Training / Test Set Results =====\\n\") f_loss.write(\"data_point= %d ,", "cpu computation time report.showResult(model, predicted_test, self.test_y, predicted_train, self.train_y) report.showPlot(model, clfname) plt.savefig('./'+clfname+'_'+str(idx)+'.png', bbox_inches =", "= model.param_grid['gamma'] # scores = model.grid.cv_results_['mean_test_score'].reshape(len(C_range), len(gamma_range)) scores = np.array(model.grid.cv_results_['mean_test_score']) min_score = scores.min()", "fontstyle='italic') plt.legend(loc='best', numpoints=1, fancybox=True) # Space plots a bit plt.subplots_adjust(hspace=0.25, wspace=0.40) plt.savefig('./LR_overtime_'+str(idx)+'.png', bbox_inches='tight')", "6)) plt.subplots_adjust(left=.2, right=0.95, bottom=0.15, top=0.95) plt.imshow(scores, interpolation='nearest', cmap=plt.cm.hot, norm=MidpointNormalize(vmin=min_score, vmax=max_score, midpoint=mean_score)) plt.xlabel('penalty') plt.ylabel('alpha", "min_score = scores.min() max_score = scores.max() mean_score = np.mean(scores, axis=0) scores = scores.reshape(len(C_range),", "str(temp_acc) + \" \\n\") # data_x.append(data_point) # loss_y.append(temp_loss) # acc_y.append(temp_acc) for n in", "random_state=42) self.scoring = 'neg_log_loss' #'accuracy', 'f1', 'precision', 'recall', 'roc_auc' def trainModel(self, cname): if", "scoring rule such as Accuracy (default), F1-measure, AUC loss_range = ['log'] penalty_range =", "plt.imshow(scores, interpolation='nearest', cmap=plt.cm.hot, norm=MidpointNormalize(vmin=min_score, vmax=max_score, midpoint=mean_score)) plt.ylabel('C') plt.colorbar() plt.yticks(np.arange(len(C_range)), C_range) plt.title('Validation accuracy') #", "print(\"===== %s \" %(dname)) print(\"===== %s\" %(clfname)) # (1) train model with CV", "param_grid=self.param_grid, cv=self.cv, n_jobs=-1) pass def trainLinearSVM(self): kernel_range = ['linear'] C_range = np.geomspace(1.e-07, 1.e+05,", "= ['l2','l1','none'] alpha_range = np.geomspace(1.e-07, 1.e+05, num=13) # 13 params self.param_grid = dict(loss=loss_range,", "= [self.vmin, self.midpoint, self.vmax], [0, 0.5, 1] return np.ma.masked_array(np.interp(value, x, y)) class ClassModels:", "of %f\" % (np.mean(model.grid.cv_results_['mean_fit_time'], axis=0)) ) print(\"The mean test time of %f\" %", "Training / Test Set Results =====\\n\") f_acc.write(\"data_point= %d , accuracy= %f\\n\" % (data_point,", "penalty=penalty_range, alpha=alpha_range, max_iter=[1000], tol=[1e-3]) self.grid = GridSearchCV(SGDClassifier(), param_grid=self.param_grid, cv=self.cv, n_jobs=-1) pass def trainLinearSVM(self):", "plt.title('Validation accuracy') # plt.show() pass def showNeuralNets(self, model, clfname): hidden_layer_sizes_range = model.param_grid['hidden_layer_sizes'] learning_rate_init_range", "run CV according to params for each classifier def trainLogisticRegression(self): # TODO: try", "plt.ylabel('learning_rate_init') plt.colorbar() plt.xticks(np.arange(len(hidden_layer_sizes_range)), hidden_layer_sizes_range, rotation=45) plt.yticks(np.arange(len(learning_rate_init_range)), learning_rate_init_range) plt.title('Validation accuracy') # plt.show() pass def", "plt.ylabel('C') plt.colorbar() plt.yticks(np.arange(len(C_range)), C_range) plt.title('Validation accuracy') # plt.show() pass def showRBFSVM(self, model, clfname):", "* # find a current file' directory path. try: dirpath = os.path.dirname(__file__) except", "% (np.mean(model.grid.cv_results_['mean_score_time'], axis=0)) ) # confusion matrix print(\"confusion matrix / precision recall scores\")", "% (data_point, accuracy_score(test_y, clf.predict(test_X)))) f_loss.close() f_acc.close() plotLROverTime(data_x, loss_y, acc_y, idx) pass class RunEval:", "# params C / gamma kernel_range = ['rbf'] C_range = np.geomspace(1.e-07, 1.e+05, num=13)", "train Accuracy %0.3f\" % (accuracy_score(target_train, predicted_train))) print(\"The test Accuracy %0.3f\" % (accuracy_score(target_test, predicted_test)", "GridSearchCV(SVC(), param_grid=self.param_grid, cv=self.cv, n_jobs=-1) pass def trainNeuralNets(self): # early stopping default False, Momentum", "import GaussianNB from sklearn.discriminant_analysis import QuadraticDiscriminantAnalysis from sklearn.metrics import * # find a", "# early stopping default False, Momentum default 0.9 hidden_layer_sizes_range = np.array([1,2,3,4,5,6,7,8,9,10,16,32]) # 12", "# run CV according to params for each classifier def trainLogisticRegression(self): # TODO:", "class MidpointNormalize(Normalize): def __init__(self, vmin=None, vmax=None, midpoint=None, clip=False): self.midpoint = midpoint Normalize.__init__(self, vmin,", "style xmin, xmax, ymin, ymax = plt.axis() plt.axis([xmin - 0.1, xmax + 0.1,", "(roc_auc_score(target_test, predicted_test) )) print(\"The mean training time of %f\" % (np.mean(model.grid.cv_results_['mean_fit_time'], axis=0)) )", "accuracy_score(train_y, clf.predict(train_X)) f_loss.write(\"data_point= \" + str(data_point) + \" zero_one_loss= \" + str(temp_loss) +", "1], shuffledY[batch[0]:batch[-1] + 1], classes=np.unique(train_y)) data_point += len(batch) temp_loss = zero_one_loss(train_y, clf.predict(train_X)) temp_acc", "Nets\"] # clfnames = [\"RBF SVM\"] # clfnames = [\"Linear SVM\"] for idx2,", "Train Log Loss %0.3f Zero one loss %f\" % (log_loss(target_train, predicted_train), zero_one_loss(target_train, predicted_train)))", "model.param_grid['alpha'] # 13 params scores = np.array(model.grid.cv_results_['mean_test_score']) min_score = scores.min() max_score = scores.max()", "# Avoids axis white lines cutting through zero values - fivethirtyeight style xmin,", "file' directory path. try: dirpath = os.path.dirname(__file__) except Exception as inst: dirpath =", "existing classifier names\") pass def showLogisticRegression(self, model, clfname): penalty_range = model.param_grid['penalty'] alpha_range =", "yield l[i:i+n] def runLROverTime(train_X, train_y, test_X, test_y, idx): clf = SGDClassifier(loss='log') # shuffle=True", "# norm=MidpointNormalize(vmin=0.2, midpoint=0.92)) plt.imshow(scores, interpolation='nearest', cmap=plt.cm.hot, norm=MidpointNormalize(vmin=min_score,vmax=max_score, midpoint=mean_score)) plt.xlabel('gamma') plt.ylabel('C') plt.colorbar() plt.xticks(np.arange(len(gamma_range)), gamma_range,", "model = ClassModels() model.trainModel(clfname) model.grid.fit(self.train_X, self.train_y) # (2) show results predicted_test = model.grid.predict(self.test_X)", "= model.grid.predict(self.train_X) # Loss + Accuracy (training + test) # auc + confusion", "clfname) elif (clfname == \"Neural Nets\"): self.showNeuralNets(model, clfname) else: print(\"Please put existing classifier", "y = [self.vmin, self.midpoint, self.vmax], [0, 0.5, 1] return np.ma.masked_array(np.interp(value, x, y)) class", "= StratifiedShuffleSplit(n_splits=5, test_size=0.2, random_state=42) self.scoring = 'neg_log_loss' #'accuracy', 'f1', 'precision', 'recall', 'roc_auc' def", "alpha_range = np.geomspace(1.e-07, 1.e+05, num=13) # 13 params self.param_grid = dict(loss=loss_range, penalty=penalty_range, alpha=alpha_range,", "self.param_grid = dict(loss=loss_range, penalty=penalty_range, alpha=alpha_range, max_iter=[1000], tol=[1e-3]) self.grid = GridSearchCV(SGDClassifier(), param_grid=self.param_grid, cv=self.cv, n_jobs=-1)", "activation=activation_range,solver=solver_range, learning_rate_init=learning_rate_init_range, max_iter=[1000]) self.grid = GridSearchCV(MLPClassifier(), param_grid=self.param_grid, cv=self.cv, n_jobs=-1) pass class Report: def", "#'accuracy', 'f1', 'precision', 'recall', 'roc_auc' def trainModel(self, cname): if (cname == \"Logistic Regression\"):", "[] def run(self): report = Report() for idx, dname in enumerate(self.dnames): # load", "plt.rcParams['xtick.labelsize'] = 8 plt.rcParams['ytick.labelsize'] = 8 plt.rcParams['legend.fontsize'] = 10 plt.rcParams['figure.titlesize'] = 12 #", "xmax + 0.1, ymin, ymax]) plt.title('LR performance over time', fontstyle='italic') plt.legend(loc='best', numpoints=1, fancybox=True)", "def trainRBFSVM(self): # params C / gamma kernel_range = ['rbf'] C_range = np.geomspace(1.e-07,", "acc_y.append(temp_acc) for n in range(n_iter): shuffledRange = list(shuffledRange) random.shuffle(shuffledRange) shuffledX = [train_X[i] for", "existing classifier names\") pass # run CV according to params for each classifier", "ymax = plt.axis() plt.axis([xmin - 0.1, xmax + 0.1, ymin, ymax]) plt.title('LR performance", "enumerate(clfnames): print(\"===== %s \" %(dname)) print(\"===== %s\" %(clfname)) # (1) train model with", "1], classes=np.unique(train_y)) data_point += len(batch) temp_loss = zero_one_loss(train_y, clf.predict(train_X)) temp_acc = accuracy_score(train_y, clf.predict(train_X))", "import StandardScaler from sklearn.datasets import make_moons, make_circles, make_classification from sklearn.linear_model import SGDClassifier from", "Normalize from matplotlib.colors import ListedColormap from sklearn.model_selection import train_test_split, GridSearchCV from sklearn.model_selection import", ") print(\"The mean test time of %f\" % (np.mean(model.grid.cv_results_['mean_score_time'], axis=0)) ) # confusion", "= open('./LR_overtime_acc_'+str(idx)+'.txt', 'w') data_x = [] loss_y = [] acc_y = [] #", "max_score = scores.max() mean_score = np.mean(scores, axis=0) scores = scores.reshape(len(learning_rate_init_range), len(hidden_layer_sizes_range)) plt.figure(figsize=(8, 6))", "6)) plt.subplots_adjust(left=.2, right=0.95, bottom=0.15, top=0.95) plt.imshow(scores, interpolation='nearest', cmap=plt.cm.hot, norm=MidpointNormalize(vmin=min_score, vmax=max_score, midpoint=mean_score)) plt.ylabel('C') plt.colorbar()", "(model.grid.best_params_, model.grid.best_score_)) print(\"The Train Log Loss %0.3f Zero one loss %f\" % (log_loss(target_train,", "np.mean(scores, axis=0) scores = scores.reshape(len(C_range), len(gamma_range)) plt.figure(figsize=(8, 6)) plt.subplots_adjust(left=.2, right=0.95, bottom=0.15, top=0.95) #", "for batch in batches(range(len(shuffledX)), 10): clf.partial_fit(shuffledX[batch[0]:batch[-1] + 1], shuffledY[batch[0]:batch[-1] + 1], classes=np.unique(train_y)) data_point", "os.path.join(dirpath,\"../datasets/breast-cancer.npz\") f_name2 = os.path.join(dirpath,\"../datasets/diabetes.npz\") f_name3 = os.path.join(dirpath,\"../datasets/digit.npz\") f_name4 = os.path.join(dirpath,\"../datasets/iris.npz\") f_name5 = os.path.join(dirpath,\"../datasets/wine.npz\")", "scores = scores.reshape(len(learning_rate_init_range), len(hidden_layer_sizes_range)) plt.figure(figsize=(8, 6)) plt.subplots_adjust(left=.2, right=0.95, bottom=0.15, top=0.95) plt.imshow(scores, interpolation='nearest', cmap=plt.cm.hot,", "default False, Momentum default 0.9 hidden_layer_sizes_range = np.array([1,2,3,4,5,6,7,8,9,10,16,32]) # 12 params activation_range =", "self.param_grid = dict(kernel=kernel_range, C=C_range) self.grid = GridSearchCV(SVC(), param_grid=self.param_grid, cv=self.cv, n_jobs=-1) pass def trainRBFSVM(self):", "# auc + confusion matrix # cpu computation time def showResult(self, model, predicted_test,", "(2) show results predicted_test = model.grid.predict(self.test_X) predicted_train = model.grid.predict(self.train_X) # Loss + Accuracy", "solver_range = ['sgd'] learning_rate_init_range = np.array([1.0e-04,1.0e-03,1.0e-02,1.0e-01]) # 4 params self.param_grid = dict(hidden_layer_sizes=hidden_layer_sizes_range, activation=activation_range,solver=solver_range,", "f_acc.write(\"\\n===== End of Training / Test Set Results =====\\n\") f_acc.write(\"data_point= %d , accuracy=", "\" %(dname)) print(\"===== %s\" %(clfname)) # (1) train model with CV model =", "np.load(dname) self.train_y = data['train_Y'] self.test_y = data['test_Y'] # standardize data (mean=0, std=1) self.train_X", "os.path.join(dirpath,\"../datasets/iris.npz\") f_name5 = os.path.join(dirpath,\"../datasets/wine.npz\") # Utility function to move the midpoint of a", "label='loss') plt.plot(data_x, acc_y, linewidth=0.5, linestyle='--', marker='v', markersize=2, label='accuracy') plt.xlabel('Data Points') plt.ylabel('Score') # Axes", "sklearn.neural_network import MLPClassifier from sklearn.neighbors import KNeighborsClassifier from sklearn.gaussian_process import GaussianProcessClassifier from sklearn.gaussian_process.kernels", "= scores.max() mean_score = np.mean(scores, axis=0) scores = scores.reshape(len(C_range), len(gamma_range)) plt.figure(figsize=(8, 6)) plt.subplots_adjust(left=.2,", "len(batch) temp_loss = zero_one_loss(train_y, clf.predict(train_X)) temp_acc = accuracy_score(train_y, clf.predict(train_X)) f_loss.write(\"data_point= \" + str(data_point)", "zero_one_loss(train_y, clf.predict(train_X)) # temp_acc = accuracy_score(train_y, clf.predict(train_X)) # f_loss.write(\"data_point= \" + str(data_point) +", "shuffledRange = range(train_X.shape[0]) n_iter = 10 data_point = 0 f_loss = open('./LR_overtime_loss_'+str(idx)+'.txt', 'w')", "pass class RunEval: def __init__(self): self.dnames = [f_name1, f_name2, f_name3, f_name4, f_name5] self.train_X", "to be around # the values of interest. class MidpointNormalize(Normalize): def __init__(self, vmin=None,", "values inside the figure Axes # Avoids axis white lines cutting through zero", "0.5, 1] return np.ma.masked_array(np.interp(value, x, y)) class ClassModels: def __init__(self): self.name = ''", "f_acc.close() plotLROverTime(data_x, loss_y, acc_y, idx) pass class RunEval: def __init__(self): self.dnames = [f_name1,", "12 # Set an aspect ratio width, height = plt.figaspect(1.68) fig = plt.figure(figsize=(width,", "import SVC from sklearn.neural_network import MLPClassifier from sklearn.neighbors import KNeighborsClassifier from sklearn.gaussian_process import", "False, Momentum default 0.9 hidden_layer_sizes_range = np.array([1,2,3,4,5,6,7,8,9,10,16,32]) # 12 params activation_range = ['logistic']", "Log Loss %0.3f Zero one loss %f\" % (log_loss(target_test, predicted_test), zero_one_loss(target_test, predicted_test))) print(\"The", "scores.reshape(len(C_range), len(gamma_range)) plt.figure(figsize=(8, 6)) plt.subplots_adjust(left=.2, right=0.95, bottom=0.15, top=0.95) # plt.imshow(scores, interpolation='nearest', cmap=plt.cm.hot, #", "import * # find a current file' directory path. try: dirpath = os.path.dirname(__file__)", "shuffledX = [train_X[i] for i in shuffledRange] shuffledY = [train_y[i] for i in", "param_grid=self.param_grid, cv=self.cv, n_jobs=-1) pass class Report: def __init__(self): pass # Loss + Accuracy", "\" + str(data_point) + \" zero_one_loss= \" + str(temp_loss) + \" \\n\") f_acc.write(\"data_point=", "\"Linear SVM\"): self.showLinearSVM(model, clfname) elif (clfname == \"RBF SVM\"): self.showRBFSVM(model, clfname) elif (clfname", "l[i:i+n] def runLROverTime(train_X, train_y, test_X, test_y, idx): clf = SGDClassifier(loss='log') # shuffle=True is", "def __init__(self): self.dnames = [f_name1, f_name2, f_name3, f_name4, f_name5] self.train_X = [] self.train_y", "from sklearn.gaussian_process import GaussianProcessClassifier from sklearn.gaussian_process.kernels import RBF from sklearn.tree import DecisionTreeClassifier from", "bottom=0.15, top=0.95) plt.imshow(scores, interpolation='nearest', cmap=plt.cm.hot, norm=MidpointNormalize(vmin=min_score, vmax=max_score, midpoint=mean_score)) plt.ylabel('C') plt.colorbar() plt.yticks(np.arange(len(C_range)), C_range) plt.title('Validation", "= ['rbf'] C_range = np.geomspace(1.e-07, 1.e+05, num=13) # 13 params : gamma_range =", "DecisionTreeClassifier from sklearn.ensemble import RandomForestClassifier, AdaBoostClassifier from sklearn.naive_bayes import GaussianNB from sklearn.discriminant_analysis import", "vmin=None, vmax=None, midpoint=None, clip=False): self.midpoint = midpoint Normalize.__init__(self, vmin, vmax, clip) def __call__(self,", "\"Logistic Regression\"): self.showLogisticRegression(model, clfname) elif (clfname == \"Linear SVM\"): self.showLinearSVM(model, clfname) elif (clfname", "linewidth=0.5, linestyle=':', marker='o', markersize=2, label='loss') plt.plot(data_x, acc_y, linewidth=0.5, linestyle='--', marker='v', markersize=2, label='accuracy') plt.xlabel('Data", "vmax, clip) def __call__(self, value, clip=None): x, y = [self.vmin, self.midpoint, self.vmax], [0,", "import sys, os, random import numpy as np import matplotlib.pyplot as plt from", "= model.param_grid['hidden_layer_sizes'] learning_rate_init_range = model.param_grid['learning_rate_init'] scores = np.array(model.grid.cv_results_['mean_test_score']) min_score = scores.min() max_score =", "C_range = np.geomspace(1.e-07, 1.e+05, num=13) # 13 params : gamma_range = np.array([0.001,0.005,0.01,0.05,0.1,0.5,1,2,3]) #", "figure Axes # Avoids axis white lines cutting through zero values - fivethirtyeight", "label='accuracy') plt.xlabel('Data Points') plt.ylabel('Score') # Axes alteration to put zero values inside the", "8 plt.rcParams['ytick.labelsize'] = 8 plt.rcParams['legend.fontsize'] = 10 plt.rcParams['figure.titlesize'] = 12 # Set an", "self.dnames = [f_name1, f_name2, f_name3, f_name4, f_name5] self.train_X = [] self.train_y = []", "norm=MidpointNormalize(vmin=min_score,vmax=max_score, midpoint=mean_score)) plt.xlabel('hidden_layer_sizes') plt.ylabel('learning_rate_init') plt.colorbar() plt.xticks(np.arange(len(hidden_layer_sizes_range)), hidden_layer_sizes_range, rotation=45) plt.yticks(np.arange(len(learning_rate_init_range)), learning_rate_init_range) plt.title('Validation accuracy') #", ") pass def showPlot(self, model, clfname): if (clfname == \"Logistic Regression\"): self.showLogisticRegression(model, clfname)", "ratio width, height = plt.figaspect(1.68) fig = plt.figure(figsize=(width, height), dpi=400) plt.plot(data_x, loss_y, linewidth=0.5,", "Regression\", \"Linear SVM\", \"RBF SVM\", \"Neural Nets\"] # clfnames = [\"RBF SVM\"] #", "param_grid=self.param_grid, cv=self.cv, n_jobs=-1) pass def trainRBFSVM(self): # params C / gamma kernel_range =", "Mono' plt.rcParams['font.size'] = 10 plt.rcParams['axes.labelsize'] = 10 plt.rcParams['axes.labelweight'] = 'bold' plt.rcParams['axes.titlesize'] = 10", "model.grid.predict(self.train_X) # Loss + Accuracy (training + test) # auc + confusion matrix", "[\"Linear SVM\"] for idx2, clfname in enumerate(clfnames): print(\"===== %s \" %(dname)) print(\"===== %s\"", "tol=[1e-3]) self.grid = GridSearchCV(SGDClassifier(), param_grid=self.param_grid, cv=self.cv, n_jobs=-1) pass def trainLinearSVM(self): kernel_range = ['linear']", "# plt.show() pass def showRBFSVM(self, model, clfname): C_range = model.param_grid['C'] gamma_range = model.param_grid['gamma']", "top=0.95) plt.imshow(scores, interpolation='nearest', cmap=plt.cm.hot, norm=MidpointNormalize(vmin=min_score,vmax=max_score, midpoint=mean_score)) plt.xlabel('hidden_layer_sizes') plt.ylabel('learning_rate_init') plt.colorbar() plt.xticks(np.arange(len(hidden_layer_sizes_range)), hidden_layer_sizes_range, rotation=45) plt.yticks(np.arange(len(learning_rate_init_range)),", "midpoint=mean_score)) plt.xlabel('hidden_layer_sizes') plt.ylabel('learning_rate_init') plt.colorbar() plt.xticks(np.arange(len(hidden_layer_sizes_range)), hidden_layer_sizes_range, rotation=45) plt.yticks(np.arange(len(learning_rate_init_range)), learning_rate_init_range) plt.title('Validation accuracy') # plt.show()", "(clfname == \"RBF SVM\"): self.showRBFSVM(model, clfname) elif (clfname == \"Neural Nets\"): self.showNeuralNets(model, clfname)", "= dict(loss=loss_range, penalty=penalty_range, alpha=alpha_range, max_iter=[1000], tol=[1e-3]) self.grid = GridSearchCV(SGDClassifier(), param_grid=self.param_grid, cv=self.cv, n_jobs=-1) pass", "print(\"The test AUC of %0.3f\" % (roc_auc_score(target_test, predicted_test) )) print(\"The mean training time", "Accuracy (default), F1-measure, AUC loss_range = ['log'] penalty_range = ['l2','l1','none'] alpha_range = np.geomspace(1.e-07,", "ggplot, # dark_background, seaborn-deep, etc plt.style.use('ggplot') plt.rcParams['font.family'] = 'serif' plt.rcParams['font.serif'] = 'Ubuntu' plt.rcParams['font.monospace']", "= 'bold' plt.rcParams['axes.titlesize'] = 10 plt.rcParams['xtick.labelsize'] = 8 plt.rcParams['ytick.labelsize'] = 8 plt.rcParams['legend.fontsize'] =", "CV model = ClassModels() model = ClassModels() model.trainModel(clfname) model.grid.fit(self.train_X, self.train_y) # (2) show", "norm=MidpointNormalize(vmin=0.2, midpoint=0.92)) plt.imshow(scores, interpolation='nearest', cmap=plt.cm.hot, norm=MidpointNormalize(vmin=min_score,vmax=max_score, midpoint=mean_score)) plt.xlabel('gamma') plt.ylabel('C') plt.colorbar() plt.xticks(np.arange(len(gamma_range)), gamma_range, rotation=45)", "the style globally # Alternatives include bmh, fivethirtyeight, ggplot, # dark_background, seaborn-deep, etc", "= 10 plt.rcParams['axes.labelsize'] = 10 plt.rcParams['axes.labelweight'] = 'bold' plt.rcParams['axes.titlesize'] = 10 plt.rcParams['xtick.labelsize'] =", "params self.param_grid = dict(loss=loss_range, penalty=penalty_range, alpha=alpha_range, max_iter=[1000], tol=[1e-3]) self.grid = GridSearchCV(SGDClassifier(), param_grid=self.param_grid, cv=self.cv,", "rule such as Accuracy (default), F1-measure, AUC loss_range = ['log'] penalty_range = ['l2','l1','none']", "clfname): penalty_range = model.param_grid['penalty'] alpha_range = model.param_grid['alpha'] # 13 params scores = np.array(model.grid.cv_results_['mean_test_score'])", "'f1', 'precision', 'recall', 'roc_auc' def trainModel(self, cname): if (cname == \"Logistic Regression\"): self.trainLogisticRegression()", "f_loss.close() f_acc.close() plotLROverTime(data_x, loss_y, acc_y, idx) pass class RunEval: def __init__(self): self.dnames =", "== \"Neural Nets\"): self.trainNeuralNets() else: print(\"Please put existing classifier names\") pass # run", "classes=np.unique(train_y)) data_point += len(batch) temp_loss = zero_one_loss(train_y, clf.predict(train_X)) temp_acc = accuracy_score(train_y, clf.predict(train_X)) f_loss.write(\"data_point=", "style globally # Alternatives include bmh, fivethirtyeight, ggplot, # dark_background, seaborn-deep, etc plt.style.use('ggplot')", "self.trainNeuralNets() else: print(\"Please put existing classifier names\") pass # run CV according to", "vmax=max_score, midpoint=mean_score)) plt.ylabel('C') plt.colorbar() plt.yticks(np.arange(len(C_range)), C_range) plt.title('Validation accuracy') # plt.show() pass def showRBFSVM(self,", "alteration to put zero values inside the figure Axes # Avoids axis white", "# 4 params self.param_grid = dict(hidden_layer_sizes=hidden_layer_sizes_range, activation=activation_range,solver=solver_range, learning_rate_init=learning_rate_init_range, max_iter=[1000]) self.grid = GridSearchCV(MLPClassifier(), param_grid=self.param_grid,", "Test Set Results =====\\n\") f_acc.write(\"data_point= %d , accuracy= %f\\n\" % (data_point, accuracy_score(test_y, clf.predict(test_X))))", "# Loss + Accuracy (training + test) # auc + confusion matrix #", "+= len(batch) temp_loss = zero_one_loss(train_y, clf.predict(train_X)) temp_acc = accuracy_score(train_y, clf.predict(train_X)) f_loss.write(\"data_point= \" +", "import ListedColormap from sklearn.model_selection import train_test_split, GridSearchCV from sklearn.model_selection import StratifiedShuffleSplit from sklearn.preprocessing", "RBF from sklearn.tree import DecisionTreeClassifier from sklearn.ensemble import RandomForestClassifier, AdaBoostClassifier from sklearn.naive_bayes import", "target_test, predicted_train, target_train): print(\"The best parameters are %s with a score of %0.3f\"", "GridSearchCV(MLPClassifier(), param_grid=self.param_grid, cv=self.cv, n_jobs=-1) pass class Report: def __init__(self): pass # Loss +", "f_name5 = os.path.join(dirpath,\"../datasets/wine.npz\") # Utility function to move the midpoint of a colormap", "1: runLROverTime(self.train_X, self.train_y, self.test_X, self.test_y, idx) continue clfnames = [\"Logistic Regression\", \"Linear SVM\",", "= [] acc_y = [] # temp_loss = zero_one_loss(train_y, clf.predict(train_X)) # temp_acc =", "data_x.append(data_point) # loss_y.append(temp_loss) # acc_y.append(temp_acc) for n in range(n_iter): shuffledRange = list(shuffledRange) random.shuffle(shuffledRange)", ", zero_one_loss= %f\\n\" % (data_point, zero_one_loss(test_y, clf.predict(test_X)))) f_acc.write(\"\\n===== End of Training / Test", "clf.predict(train_X)) # temp_acc = accuracy_score(train_y, clf.predict(train_X)) # f_loss.write(\"data_point= \" + str(data_point) + \"", "aspect ratio width, height = plt.figaspect(1.68) fig = plt.figure(figsize=(width, height), dpi=400) plt.plot(data_x, loss_y,", "self.scoring = 'neg_log_loss' #'accuracy', 'f1', 'precision', 'recall', 'roc_auc' def trainModel(self, cname): if (cname", "scores.max() mean_score = np.mean(scores, axis=0) scores = scores.reshape(len(C_range),1) plt.figure(figsize=(8, 6)) plt.subplots_adjust(left=.2, right=0.95, bottom=0.15,", "KNeighborsClassifier from sklearn.gaussian_process import GaussianProcessClassifier from sklearn.gaussian_process.kernels import RBF from sklearn.tree import DecisionTreeClassifier", "# 13 params : self.param_grid = dict(kernel=kernel_range, C=C_range) self.grid = GridSearchCV(SVC(), param_grid=self.param_grid, cv=self.cv,", "hidden_layer_sizes_range = np.array([1,2,3,4,5,6,7,8,9,10,16,32]) # 12 params activation_range = ['logistic'] solver_range = ['sgd'] learning_rate_init_range", "max_score = scores.max() mean_score = np.mean(scores, axis=0) scores = scores.reshape(len(alpha_range),len(penalty_range)) plt.figure(figsize=(8, 6)) plt.subplots_adjust(left=.2,", "self.midpoint, self.vmax], [0, 0.5, 1] return np.ma.masked_array(np.interp(value, x, y)) class ClassModels: def __init__(self):", "6)) plt.subplots_adjust(left=.2, right=0.95, bottom=0.15, top=0.95) # plt.imshow(scores, interpolation='nearest', cmap=plt.cm.hot, # norm=MidpointNormalize(vmin=0.2, midpoint=0.92)) plt.imshow(scores,", "mean_score = np.mean(scores, axis=0) scores = scores.reshape(len(C_range), len(gamma_range)) plt.figure(figsize=(8, 6)) plt.subplots_adjust(left=.2, right=0.95, bottom=0.15,", "dict(hidden_layer_sizes=hidden_layer_sizes_range, activation=activation_range,solver=solver_range, learning_rate_init=learning_rate_init_range, max_iter=[1000]) self.grid = GridSearchCV(MLPClassifier(), param_grid=self.param_grid, cv=self.cv, n_jobs=-1) pass class Report:", "C_range = model.param_grid['C'] scores = np.array(model.grid.cv_results_['mean_test_score']) min_score = scores.min() max_score = scores.max() mean_score", "plt.figure(figsize=(8, 6)) plt.subplots_adjust(left=.2, right=0.95, bottom=0.15, top=0.95) plt.imshow(scores, interpolation='nearest', cmap=plt.cm.hot, norm=MidpointNormalize(vmin=min_score,vmax=max_score, midpoint=mean_score)) plt.xlabel('hidden_layer_sizes') plt.ylabel('learning_rate_init')", "= 'Ubuntu Mono' plt.rcParams['font.size'] = 10 plt.rcParams['axes.labelsize'] = 10 plt.rcParams['axes.labelweight'] = 'bold' plt.rcParams['axes.titlesize']", "data set \", self.train_X.shape, self.train_y.shape, self.test_X.shape, self.test_y.shape) if len(sys.argv) > 2 and int(sys.argv[2])", "classifier def trainLogisticRegression(self): # TODO: try different scoring rule such as Accuracy (default),", "import MLPClassifier from sklearn.neighbors import KNeighborsClassifier from sklearn.gaussian_process import GaussianProcessClassifier from sklearn.gaussian_process.kernels import", "if (cname == \"Logistic Regression\"): self.trainLogisticRegression() elif (cname == \"Linear SVM\"): self.trainLinearSVM() elif", "param_grid=self.param_grid, cv=self.cv, n_jobs=-1) pass def trainNeuralNets(self): # early stopping default False, Momentum default", "bmh, fivethirtyeight, ggplot, # dark_background, seaborn-deep, etc plt.style.use('ggplot') plt.rcParams['font.family'] = 'serif' plt.rcParams['font.serif'] =", "clip) def __call__(self, value, clip=None): x, y = [self.vmin, self.midpoint, self.vmax], [0, 0.5,", "print(\"Please put existing classifier names\") pass # run CV according to params for", "__init__(self): pass # Loss + Accuracy (training + test) # auc + confusion", "\\n\") # f_acc.write(\"data_point= \" + str(data_point) + \" accuracy= \" + str(temp_acc) +", "0.1, xmax + 0.1, ymin, ymax]) plt.title('LR performance over time', fontstyle='italic') plt.legend(loc='best', numpoints=1,", "pass def showLogisticRegression(self, model, clfname): penalty_range = model.param_grid['penalty'] alpha_range = model.param_grid['alpha'] # 13", "[\"Logistic Regression\", \"Linear SVM\", \"RBF SVM\", \"Neural Nets\"] # clfnames = [\"RBF SVM\"]", "is useless here shuffledRange = range(train_X.shape[0]) n_iter = 10 data_point = 0 f_loss", "print(\"The mean test time of %f\" % (np.mean(model.grid.cv_results_['mean_score_time'], axis=0)) ) # confusion matrix", "scores = scores.reshape(len(alpha_range),len(penalty_range)) plt.figure(figsize=(8, 6)) plt.subplots_adjust(left=.2, right=0.95, bottom=0.15, top=0.95) plt.imshow(scores, interpolation='nearest', cmap=plt.cm.hot, norm=MidpointNormalize(vmin=min_score,", "'' pass f_name1 = os.path.join(dirpath,\"../datasets/breast-cancer.npz\") f_name2 = os.path.join(dirpath,\"../datasets/diabetes.npz\") f_name3 = os.path.join(dirpath,\"../datasets/digit.npz\") f_name4 =", "= scores.min() max_score = scores.max() mean_score = np.mean(scores, axis=0) scores = scores.reshape(len(alpha_range),len(penalty_range)) plt.figure(figsize=(8,", "value, clip=None): x, y = [self.vmin, self.midpoint, self.vmax], [0, 0.5, 1] return np.ma.masked_array(np.interp(value,", "self.train_X = [] self.train_y = [] self.test_X = [] self.test_y = [] def", "model = ClassModels() model = ClassModels() model.trainModel(clfname) model.grid.fit(self.train_X, self.train_y) # (2) show results", "= ['log'] penalty_range = ['l2','l1','none'] alpha_range = np.geomspace(1.e-07, 1.e+05, num=13) # 13 params", "of %0.3f\" % (roc_auc_score(target_test, predicted_test) )) print(\"The mean training time of %f\" %", "a current file' directory path. try: dirpath = os.path.dirname(__file__) except Exception as inst:", "loss_range = ['log'] penalty_range = ['l2','l1','none'] alpha_range = np.geomspace(1.e-07, 1.e+05, num=13) # 13", "=====\\n\") f_acc.write(\"data_point= %d , accuracy= %f\\n\" % (data_point, accuracy_score(test_y, clf.predict(test_X)))) f_loss.close() f_acc.close() plotLROverTime(data_x,", "\"Linear SVM\"): self.trainLinearSVM() elif (cname == \"RBF SVM\"): self.trainRBFSVM() elif (cname == \"Neural", "in range(n_iter): shuffledRange = list(shuffledRange) random.shuffle(shuffledRange) shuffledX = [train_X[i] for i in shuffledRange]", "%f\" % (np.mean(model.grid.cv_results_['mean_score_time'], axis=0)) ) # confusion matrix print(\"confusion matrix / precision recall", "score of %0.3f\" % (model.grid.best_params_, model.grid.best_score_)) print(\"The Train Log Loss %0.3f Zero one", "= [] self.train_y = [] self.test_X = [] self.test_y = [] def run(self):", "plt.legend(loc='best', numpoints=1, fancybox=True) # Space plots a bit plt.subplots_adjust(hspace=0.25, wspace=0.40) plt.savefig('./LR_overtime_'+str(idx)+'.png', bbox_inches='tight') pass", "inst: dirpath = '' pass f_name1 = os.path.join(dirpath,\"../datasets/breast-cancer.npz\") f_name2 = os.path.join(dirpath,\"../datasets/diabetes.npz\") f_name3 =", "an aspect ratio width, height = plt.figaspect(1.68) fig = plt.figure(figsize=(width, height), dpi=400) plt.plot(data_x,", "# 12 params activation_range = ['logistic'] solver_range = ['sgd'] learning_rate_init_range = np.array([1.0e-04,1.0e-03,1.0e-02,1.0e-01]) #", "random.shuffle(shuffledRange) shuffledX = [train_X[i] for i in shuffledRange] shuffledY = [train_y[i] for i", "batches(range(len(shuffledX)), 10): clf.partial_fit(shuffledX[batch[0]:batch[-1] + 1], shuffledY[batch[0]:batch[-1] + 1], classes=np.unique(train_y)) data_point += len(batch) temp_loss", "self.name = '' self.grid = '' self.param_grid = '' self.cv = StratifiedShuffleSplit(n_splits=5, test_size=0.2,", "accuracy= %f\\n\" % (data_point, accuracy_score(test_y, clf.predict(test_X)))) f_loss.close() f_acc.close() plotLROverTime(data_x, loss_y, acc_y, idx) pass", ")) print(\"The test AUC of %0.3f\" % (roc_auc_score(target_test, predicted_test) )) print(\"The mean training", "bottom=0.15, top=0.95) plt.imshow(scores, interpolation='nearest', cmap=plt.cm.hot, norm=MidpointNormalize(vmin=min_score,vmax=max_score, midpoint=mean_score)) plt.xlabel('hidden_layer_sizes') plt.ylabel('learning_rate_init') plt.colorbar() plt.xticks(np.arange(len(hidden_layer_sizes_range)), hidden_layer_sizes_range, rotation=45)", "= GridSearchCV(SVC(), param_grid=self.param_grid, cv=self.cv, n_jobs=-1) pass def trainNeuralNets(self): # early stopping default False,", "def __init__(self, vmin=None, vmax=None, midpoint=None, clip=False): self.midpoint = midpoint Normalize.__init__(self, vmin, vmax, clip)", "in enumerate(self.dnames): # load data if len(sys.argv) > 1 and int(sys.argv[1]) != idx:", "= os.path.join(dirpath,\"../datasets/breast-cancer.npz\") f_name2 = os.path.join(dirpath,\"../datasets/diabetes.npz\") f_name3 = os.path.join(dirpath,\"../datasets/digit.npz\") f_name4 = os.path.join(dirpath,\"../datasets/iris.npz\") f_name5 =", "self.test_y = [] def run(self): report = Report() for idx, dname in enumerate(self.dnames):", "lines cutting through zero values - fivethirtyeight style xmin, xmax, ymin, ymax =", "+ confusion matrix # cpu computation time def showResult(self, model, predicted_test, target_test, predicted_train,", "['log'] penalty_range = ['l2','l1','none'] alpha_range = np.geomspace(1.e-07, 1.e+05, num=13) # 13 params self.param_grid", "import numpy as np import matplotlib.pyplot as plt from matplotlib.colors import Normalize from", "linestyle='--', marker='v', markersize=2, label='accuracy') plt.xlabel('Data Points') plt.ylabel('Score') # Axes alteration to put zero", "= 'serif' plt.rcParams['font.serif'] = 'Ubuntu' plt.rcParams['font.monospace'] = 'Ubuntu Mono' plt.rcParams['font.size'] = 10 plt.rcParams['axes.labelsize']", "predicted_train))) print(\"The test Accuracy %0.3f\" % (accuracy_score(target_test, predicted_test) )) print(\"The test AUC of", "plt.imshow(scores, interpolation='nearest', cmap=plt.cm.hot, norm=MidpointNormalize(vmin=min_score,vmax=max_score, midpoint=mean_score)) plt.xlabel('gamma') plt.ylabel('C') plt.colorbar() plt.xticks(np.arange(len(gamma_range)), gamma_range, rotation=45) plt.yticks(np.arange(len(C_range)), C_range)", "test Accuracy %0.3f\" % (accuracy_score(target_test, predicted_test) )) print(\"The test AUC of %0.3f\" %", "pass def showLinearSVM(self, model, clfname): C_range = model.param_grid['C'] scores = np.array(model.grid.cv_results_['mean_test_score']) min_score =", "== \"Logistic Regression\"): self.showLogisticRegression(model, clfname) elif (clfname == \"Linear SVM\"): self.showLinearSVM(model, clfname) elif", "= data['test_Y'] # standardize data (mean=0, std=1) self.train_X = StandardScaler().fit_transform(data['train_X']) self.test_X = StandardScaler().fit_transform(data['test_X'])", "Nets\"): self.trainNeuralNets() else: print(\"Please put existing classifier names\") pass # run CV according", "directory path. try: dirpath = os.path.dirname(__file__) except Exception as inst: dirpath = ''", "for idx, dname in enumerate(self.dnames): # load data if len(sys.argv) > 1 and", "import GaussianProcessClassifier from sklearn.gaussian_process.kernels import RBF from sklearn.tree import DecisionTreeClassifier from sklearn.ensemble import", "13 params scores = np.array(model.grid.cv_results_['mean_test_score']) min_score = scores.min() max_score = scores.max() mean_score =", "= scores.min() max_score = scores.max() mean_score = np.mean(scores, axis=0) scores = scores.reshape(len(C_range), len(gamma_range))", "self.midpoint = midpoint Normalize.__init__(self, vmin, vmax, clip) def __call__(self, value, clip=None): x, y", "len(gamma_range)) plt.figure(figsize=(8, 6)) plt.subplots_adjust(left=.2, right=0.95, bottom=0.15, top=0.95) # plt.imshow(scores, interpolation='nearest', cmap=plt.cm.hot, # norm=MidpointNormalize(vmin=0.2,", "numpoints=1, fancybox=True) # Space plots a bit plt.subplots_adjust(hspace=0.25, wspace=0.40) plt.savefig('./LR_overtime_'+str(idx)+'.png', bbox_inches='tight') pass def", "f_loss.write(\"data_point= \" + str(data_point) + \" zero_one_loss= \" + str(temp_loss) + \" \\n\")", "# confusion matrix print(\"confusion matrix / precision recall scores\") print ( confusion_matrix(target_test, predicted_test)", "for n in range(n_iter): shuffledRange = list(shuffledRange) random.shuffle(shuffledRange) shuffledX = [train_X[i] for i", "print(\"The mean training time of %f\" % (np.mean(model.grid.cv_results_['mean_fit_time'], axis=0)) ) print(\"The mean test", "range(n_iter): shuffledRange = list(shuffledRange) random.shuffle(shuffledRange) shuffledX = [train_X[i] for i in shuffledRange] shuffledY", "white lines cutting through zero values - fivethirtyeight style xmin, xmax, ymin, ymax", "a score of %0.3f\" % (model.grid.best_params_, model.grid.best_score_)) print(\"The Train Log Loss %0.3f Zero", "sklearn.neighbors import KNeighborsClassifier from sklearn.gaussian_process import GaussianProcessClassifier from sklearn.gaussian_process.kernels import RBF from sklearn.tree", "['rbf'] C_range = np.geomspace(1.e-07, 1.e+05, num=13) # 13 params : gamma_range = np.array([0.001,0.005,0.01,0.05,0.1,0.5,1,2,3])", "temp_loss = zero_one_loss(train_y, clf.predict(train_X)) temp_acc = accuracy_score(train_y, clf.predict(train_X)) f_loss.write(\"data_point= \" + str(data_point) +", "accuracy_score(test_y, clf.predict(test_X)))) f_loss.close() f_acc.close() plotLROverTime(data_x, loss_y, acc_y, idx) pass class RunEval: def __init__(self):", "and int(sys.argv[1]) != idx: continue data = np.load(dname) self.train_y = data['train_Y'] self.test_y =", "= data['train_Y'] self.test_y = data['test_Y'] # standardize data (mean=0, std=1) self.train_X = StandardScaler().fit_transform(data['train_X'])", "put existing classifier names\") pass # run CV according to params for each", "default 0.9 hidden_layer_sizes_range = np.array([1,2,3,4,5,6,7,8,9,10,16,32]) # 12 params activation_range = ['logistic'] solver_range =", "return np.ma.masked_array(np.interp(value, x, y)) class ClassModels: def __init__(self): self.name = '' self.grid =", "Set the style globally # Alternatives include bmh, fivethirtyeight, ggplot, # dark_background, seaborn-deep,", "GaussianProcessClassifier from sklearn.gaussian_process.kernels import RBF from sklearn.tree import DecisionTreeClassifier from sklearn.ensemble import RandomForestClassifier,", "= os.path.join(dirpath,\"../datasets/iris.npz\") f_name5 = os.path.join(dirpath,\"../datasets/wine.npz\") # Utility function to move the midpoint of", "sklearn.linear_model import SGDClassifier from sklearn.svm import SVC from sklearn.neural_network import MLPClassifier from sklearn.neighbors", "seaborn-deep, etc plt.style.use('ggplot') plt.rcParams['font.family'] = 'serif' plt.rcParams['font.serif'] = 'Ubuntu' plt.rcParams['font.monospace'] = 'Ubuntu Mono'", "shuffledRange = list(shuffledRange) random.shuffle(shuffledRange) shuffledX = [train_X[i] for i in shuffledRange] shuffledY =", "from sklearn.naive_bayes import GaussianNB from sklearn.discriminant_analysis import QuadraticDiscriminantAnalysis from sklearn.metrics import * #", "else: print(\"Please put existing classifier names\") pass # run CV according to params", "% (log_loss(target_test, predicted_test), zero_one_loss(target_test, predicted_test))) print(\"The train Accuracy %0.3f\" % (accuracy_score(target_train, predicted_train))) print(\"The", "= zero_one_loss(train_y, clf.predict(train_X)) temp_acc = accuracy_score(train_y, clf.predict(train_X)) f_loss.write(\"data_point= \" + str(data_point) + \"", "interpolation='nearest', cmap=plt.cm.hot, norm=MidpointNormalize(vmin=min_score,vmax=max_score, midpoint=mean_score)) plt.xlabel('hidden_layer_sizes') plt.ylabel('learning_rate_init') plt.colorbar() plt.xticks(np.arange(len(hidden_layer_sizes_range)), hidden_layer_sizes_range, rotation=45) plt.yticks(np.arange(len(learning_rate_init_range)), learning_rate_init_range) plt.title('Validation", "# temp_acc = accuracy_score(train_y, clf.predict(train_X)) # f_loss.write(\"data_point= \" + str(data_point) + \" zero_one_loss=", "# 13 params self.param_grid = dict(loss=loss_range, penalty=penalty_range, alpha=alpha_range, max_iter=[1000], tol=[1e-3]) self.grid = GridSearchCV(SGDClassifier(),", "%d , zero_one_loss= %f\\n\" % (data_point, zero_one_loss(test_y, clf.predict(test_X)))) f_acc.write(\"\\n===== End of Training /", "= Report() for idx, dname in enumerate(self.dnames): # load data if len(sys.argv) >", "plt.savefig('./'+clfname+'_'+str(idx)+'.png', bbox_inches = 'tight') if __name__ == '__main__': eval = RunEval() eval.run() exit()", "self.param_grid = '' self.cv = StratifiedShuffleSplit(n_splits=5, test_size=0.2, random_state=42) self.scoring = 'neg_log_loss' #'accuracy', 'f1',", "self.grid = GridSearchCV(SVC(), param_grid=self.param_grid, cv=self.cv, n_jobs=-1) pass def trainNeuralNets(self): # early stopping default", "self.trainLogisticRegression() elif (cname == \"Linear SVM\"): self.trainLinearSVM() elif (cname == \"RBF SVM\"): self.trainRBFSVM()", "CV according to params for each classifier def trainLogisticRegression(self): # TODO: try different", "= np.mean(scores, axis=0) scores = scores.reshape(len(C_range),1) plt.figure(figsize=(8, 6)) plt.subplots_adjust(left=.2, right=0.95, bottom=0.15, top=0.95) plt.imshow(scores,", "+ str(data_point) + \" zero_one_loss= \" + str(temp_loss) + \" \\n\") f_acc.write(\"data_point= \"", "model.param_grid['C'] gamma_range = model.param_grid['gamma'] # scores = model.grid.cv_results_['mean_test_score'].reshape(len(C_range), len(gamma_range)) scores = np.array(model.grid.cv_results_['mean_test_score']) min_score", "model.param_grid['C'] scores = np.array(model.grid.cv_results_['mean_test_score']) min_score = scores.min() max_score = scores.max() mean_score = np.mean(scores,", "a bit plt.subplots_adjust(hspace=0.25, wspace=0.40) plt.savefig('./LR_overtime_'+str(idx)+'.png', bbox_inches='tight') pass def batches(l, n): for i in", "plt.figure(figsize=(8, 6)) plt.subplots_adjust(left=.2, right=0.95, bottom=0.15, top=0.95) plt.imshow(scores, interpolation='nearest', cmap=plt.cm.hot, norm=MidpointNormalize(vmin=min_score, vmax=max_score, midpoint=mean_score)) plt.ylabel('C')", "= 0 f_loss = open('./LR_overtime_loss_'+str(idx)+'.txt', 'w') f_acc = open('./LR_overtime_acc_'+str(idx)+'.txt', 'w') data_x = []", "marker='v', markersize=2, label='accuracy') plt.xlabel('Data Points') plt.ylabel('Score') # Axes alteration to put zero values", "make_circles, make_classification from sklearn.linear_model import SGDClassifier from sklearn.svm import SVC from sklearn.neural_network import", "% (data_point, zero_one_loss(test_y, clf.predict(test_X)))) f_acc.write(\"\\n===== End of Training / Test Set Results =====\\n\")", "plt.style.use('ggplot') plt.rcParams['font.family'] = 'serif' plt.rcParams['font.serif'] = 'Ubuntu' plt.rcParams['font.monospace'] = 'Ubuntu Mono' plt.rcParams['font.size'] =", "here shuffledRange = range(train_X.shape[0]) n_iter = 10 data_point = 0 f_loss = open('./LR_overtime_loss_'+str(idx)+'.txt',", "[] self.test_X = [] self.test_y = [] def run(self): report = Report() for", "\"RBF SVM\"): self.trainRBFSVM() elif (cname == \"Neural Nets\"): self.trainNeuralNets() else: print(\"Please put existing", "self.showNeuralNets(model, clfname) else: print(\"Please put existing classifier names\") pass def showLogisticRegression(self, model, clfname):", "def trainLogisticRegression(self): # TODO: try different scoring rule such as Accuracy (default), F1-measure,", "= np.array([1.0e-04,1.0e-03,1.0e-02,1.0e-01]) # 4 params self.param_grid = dict(hidden_layer_sizes=hidden_layer_sizes_range, activation=activation_range,solver=solver_range, learning_rate_init=learning_rate_init_range, max_iter=[1000]) self.grid =", "matplotlib.colors import Normalize from matplotlib.colors import ListedColormap from sklearn.model_selection import train_test_split, GridSearchCV from", "norm=MidpointNormalize(vmin=min_score, vmax=max_score, midpoint=mean_score)) plt.ylabel('C') plt.colorbar() plt.yticks(np.arange(len(C_range)), C_range) plt.title('Validation accuracy') # plt.show() pass def", "trainLogisticRegression(self): # TODO: try different scoring rule such as Accuracy (default), F1-measure, AUC", "y)) class ClassModels: def __init__(self): self.name = '' self.grid = '' self.param_grid =", "interpolation='nearest', cmap=plt.cm.hot, # norm=MidpointNormalize(vmin=0.2, midpoint=0.92)) plt.imshow(scores, interpolation='nearest', cmap=plt.cm.hot, norm=MidpointNormalize(vmin=min_score,vmax=max_score, midpoint=mean_score)) plt.xlabel('gamma') plt.ylabel('C') plt.colorbar()", "%f\" % (log_loss(target_train, predicted_train), zero_one_loss(target_train, predicted_train))) print(\"The test Log Loss %0.3f Zero one", "params self.param_grid = dict(hidden_layer_sizes=hidden_layer_sizes_range, activation=activation_range,solver=solver_range, learning_rate_init=learning_rate_init_range, max_iter=[1000]) self.grid = GridSearchCV(MLPClassifier(), param_grid=self.param_grid, cv=self.cv, n_jobs=-1)", "from matplotlib.colors import Normalize from matplotlib.colors import ListedColormap from sklearn.model_selection import train_test_split, GridSearchCV", "sys, os, random import numpy as np import matplotlib.pyplot as plt from matplotlib.colors", "stopping default False, Momentum default 0.9 hidden_layer_sizes_range = np.array([1,2,3,4,5,6,7,8,9,10,16,32]) # 12 params activation_range", "as inst: dirpath = '' pass f_name1 = os.path.join(dirpath,\"../datasets/breast-cancer.npz\") f_name2 = os.path.join(dirpath,\"../datasets/diabetes.npz\") f_name3", "right=0.95, bottom=0.15, top=0.95) plt.imshow(scores, interpolation='nearest', cmap=plt.cm.hot, norm=MidpointNormalize(vmin=min_score, vmax=max_score, midpoint=mean_score)) plt.xlabel('penalty') plt.ylabel('alpha (regularization)') plt.colorbar()", "plt.rcParams['axes.labelweight'] = 'bold' plt.rcParams['axes.titlesize'] = 10 plt.rcParams['xtick.labelsize'] = 8 plt.rcParams['ytick.labelsize'] = 8 plt.rcParams['legend.fontsize']", "+ str(temp_loss) + \" \\n\") # f_acc.write(\"data_point= \" + str(data_point) + \" accuracy=", "gamma=gamma_range, C=C_range) self.grid = GridSearchCV(SVC(), param_grid=self.param_grid, cv=self.cv, n_jobs=-1) pass def trainNeuralNets(self): # early", "C_range) plt.title('Validation accuracy') # plt.show() pass def showRBFSVM(self, model, clfname): C_range = model.param_grid['C']", "+ test) # auc + confusion matrix # cpu computation time def showResult(self,", ") print ( classification_report(target_test, predicted_test) ) pass def showPlot(self, model, clfname): if (clfname", "> 2 and int(sys.argv[2]) == 1: runLROverTime(self.train_X, self.train_y, self.test_X, self.test_y, idx) continue clfnames", "as np import matplotlib.pyplot as plt from matplotlib.colors import Normalize from matplotlib.colors import", "in shuffledRange] for batch in batches(range(len(shuffledX)), 10): clf.partial_fit(shuffledX[batch[0]:batch[-1] + 1], shuffledY[batch[0]:batch[-1] + 1],", "train model with CV model = ClassModels() model = ClassModels() model.trainModel(clfname) model.grid.fit(self.train_X, self.train_y)", "# cpu computation time report.showResult(model, predicted_test, self.test_y, predicted_train, self.train_y) report.showPlot(model, clfname) plt.savefig('./'+clfname+'_'+str(idx)+'.png', bbox_inches", "test time of %f\" % (np.mean(model.grid.cv_results_['mean_score_time'], axis=0)) ) # confusion matrix print(\"confusion matrix", "1.e+05, num=13) # 13 params : gamma_range = np.array([0.001,0.005,0.01,0.05,0.1,0.5,1,2,3]) # 9 params self.param_grid", "predicted_train), zero_one_loss(target_train, predicted_train))) print(\"The test Log Loss %0.3f Zero one loss %f\" %", "clfname): C_range = model.param_grid['C'] gamma_range = model.param_grid['gamma'] # scores = model.grid.cv_results_['mean_test_score'].reshape(len(C_range), len(gamma_range)) scores", "self.showRBFSVM(model, clfname) elif (clfname == \"Neural Nets\"): self.showNeuralNets(model, clfname) else: print(\"Please put existing", "4 params self.param_grid = dict(hidden_layer_sizes=hidden_layer_sizes_range, activation=activation_range,solver=solver_range, learning_rate_init=learning_rate_init_range, max_iter=[1000]) self.grid = GridSearchCV(MLPClassifier(), param_grid=self.param_grid, cv=self.cv,", "self.trainRBFSVM() elif (cname == \"Neural Nets\"): self.trainNeuralNets() else: print(\"Please put existing classifier names\")", "%f\\n\" % (data_point, zero_one_loss(test_y, clf.predict(test_X)))) f_acc.write(\"\\n===== End of Training / Test Set Results", "SVM\"): self.showLinearSVM(model, clfname) elif (clfname == \"RBF SVM\"): self.showRBFSVM(model, clfname) elif (clfname ==", "rotation=45) plt.yticks(np.arange(len(learning_rate_init_range)), learning_rate_init_range) plt.title('Validation accuracy') # plt.show() pass def plotLROverTime(data_x, loss_y, acc_y, idx):", "from sklearn.model_selection import StratifiedShuffleSplit from sklearn.preprocessing import StandardScaler from sklearn.datasets import make_moons, make_circles,", "cmap=plt.cm.hot, # norm=MidpointNormalize(vmin=0.2, midpoint=0.92)) plt.imshow(scores, interpolation='nearest', cmap=plt.cm.hot, norm=MidpointNormalize(vmin=min_score,vmax=max_score, midpoint=mean_score)) plt.xlabel('gamma') plt.ylabel('C') plt.colorbar() plt.xticks(np.arange(len(gamma_range)),", "if len(sys.argv) > 2 and int(sys.argv[2]) == 1: runLROverTime(self.train_X, self.train_y, self.test_X, self.test_y, idx)", "(1) train model with CV model = ClassModels() model = ClassModels() model.trainModel(clfname) model.grid.fit(self.train_X,", "= plt.figaspect(1.68) fig = plt.figure(figsize=(width, height), dpi=400) plt.plot(data_x, loss_y, linewidth=0.5, linestyle=':', marker='o', markersize=2,", "elif (cname == \"RBF SVM\"): self.trainRBFSVM() elif (cname == \"Neural Nets\"): self.trainNeuralNets() else:", "accuracy') # plt.show() pass def showRBFSVM(self, model, clfname): C_range = model.param_grid['C'] gamma_range =", "plt.rcParams['axes.labelsize'] = 10 plt.rcParams['axes.labelweight'] = 'bold' plt.rcParams['axes.titlesize'] = 10 plt.rcParams['xtick.labelsize'] = 8 plt.rcParams['ytick.labelsize']", "from matplotlib.colors import ListedColormap from sklearn.model_selection import train_test_split, GridSearchCV from sklearn.model_selection import StratifiedShuffleSplit", "= ['linear'] C_range = np.geomspace(1.e-07, 1.e+05, num=13) # 13 params : self.param_grid =", "hidden_layer_sizes_range, rotation=45) plt.yticks(np.arange(len(learning_rate_init_range)), learning_rate_init_range) plt.title('Validation accuracy') # plt.show() pass def plotLROverTime(data_x, loss_y, acc_y,", "matrix / precision recall scores\") print ( confusion_matrix(target_test, predicted_test) ) print ( classification_report(target_test,", "2 and int(sys.argv[2]) == 1: runLROverTime(self.train_X, self.train_y, self.test_X, self.test_y, idx) continue clfnames =", "+ str(data_point) + \" accuracy= \" + str(temp_acc) + \" \\n\") # data_x.append(data_point)", "+ \" \\n\") data_x.append(data_point) loss_y.append(temp_loss) acc_y.append(temp_acc) f_loss.write(\"\\n===== End of Training / Test Set", "test) # auc + confusion matrix # cpu computation time def showResult(self, model,", "data_x = [] loss_y = [] acc_y = [] # temp_loss = zero_one_loss(train_y,", "scores.max() mean_score = np.mean(scores, axis=0) scores = scores.reshape(len(C_range), len(gamma_range)) plt.figure(figsize=(8, 6)) plt.subplots_adjust(left=.2, right=0.95,", "+ str(temp_loss) + \" \\n\") f_acc.write(\"data_point= \" + str(data_point) + \" accuracy= \"", "print(\"The Train Log Loss %0.3f Zero one loss %f\" % (log_loss(target_train, predicted_train), zero_one_loss(target_train,", "height), dpi=400) plt.plot(data_x, loss_y, linewidth=0.5, linestyle=':', marker='o', markersize=2, label='loss') plt.plot(data_x, acc_y, linewidth=0.5, linestyle='--',", "['sgd'] learning_rate_init_range = np.array([1.0e-04,1.0e-03,1.0e-02,1.0e-01]) # 4 params self.param_grid = dict(hidden_layer_sizes=hidden_layer_sizes_range, activation=activation_range,solver=solver_range, learning_rate_init=learning_rate_init_range, max_iter=[1000])", "def __init__(self): pass # Loss + Accuracy (training + test) # auc +", "enumerate(self.dnames): # load data if len(sys.argv) > 1 and int(sys.argv[1]) != idx: continue", "= 10 plt.rcParams['figure.titlesize'] = 12 # Set an aspect ratio width, height =", "+ \" \\n\") f_acc.write(\"data_point= \" + str(data_point) + \" accuracy= \" + str(temp_acc)", "= np.mean(scores, axis=0) scores = scores.reshape(len(alpha_range),len(penalty_range)) plt.figure(figsize=(8, 6)) plt.subplots_adjust(left=.2, right=0.95, bottom=0.15, top=0.95) plt.imshow(scores,", "self.test_X.shape, self.test_y.shape) if len(sys.argv) > 2 and int(sys.argv[2]) == 1: runLROverTime(self.train_X, self.train_y, self.test_X,", "clfname) plt.savefig('./'+clfname+'_'+str(idx)+'.png', bbox_inches = 'tight') if __name__ == '__main__': eval = RunEval() eval.run()", "== \"Logistic Regression\"): self.trainLogisticRegression() elif (cname == \"Linear SVM\"): self.trainLinearSVM() elif (cname ==", "class RunEval: def __init__(self): self.dnames = [f_name1, f_name2, f_name3, f_name4, f_name5] self.train_X =", "in range(0, len(l), n): yield l[i:i+n] def runLROverTime(train_X, train_y, test_X, test_y, idx): clf", "Momentum default 0.9 hidden_layer_sizes_range = np.array([1,2,3,4,5,6,7,8,9,10,16,32]) # 12 params activation_range = ['logistic'] solver_range", "midpoint=mean_score)) plt.ylabel('C') plt.colorbar() plt.yticks(np.arange(len(C_range)), C_range) plt.title('Validation accuracy') # plt.show() pass def showRBFSVM(self, model,", "= 10 plt.rcParams['xtick.labelsize'] = 8 plt.rcParams['ytick.labelsize'] = 8 plt.rcParams['legend.fontsize'] = 10 plt.rcParams['figure.titlesize'] =", "= ClassModels() model = ClassModels() model.trainModel(clfname) model.grid.fit(self.train_X, self.train_y) # (2) show results predicted_test", "test Log Loss %0.3f Zero one loss %f\" % (log_loss(target_test, predicted_test), zero_one_loss(target_test, predicted_test)))", "(\"shape of data set \", self.train_X.shape, self.train_y.shape, self.test_X.shape, self.test_y.shape) if len(sys.argv) > 2", "QuadraticDiscriminantAnalysis from sklearn.metrics import * # find a current file' directory path. try:", "function to move the midpoint of a colormap to be around # the", "matplotlib.colors import ListedColormap from sklearn.model_selection import train_test_split, GridSearchCV from sklearn.model_selection import StratifiedShuffleSplit from", "(cname == \"Linear SVM\"): self.trainLinearSVM() elif (cname == \"RBF SVM\"): self.trainRBFSVM() elif (cname", "trainModel(self, cname): if (cname == \"Logistic Regression\"): self.trainLogisticRegression() elif (cname == \"Linear SVM\"):", "self.cv = StratifiedShuffleSplit(n_splits=5, test_size=0.2, random_state=42) self.scoring = 'neg_log_loss' #'accuracy', 'f1', 'precision', 'recall', 'roc_auc'", "target_train): print(\"The best parameters are %s with a score of %0.3f\" % (model.grid.best_params_,", "__init__(self): self.dnames = [f_name1, f_name2, f_name3, f_name4, f_name5] self.train_X = [] self.train_y =", "Report() for idx, dname in enumerate(self.dnames): # load data if len(sys.argv) > 1", "Report: def __init__(self): pass # Loss + Accuracy (training + test) # auc" ]
[ "decorators from maxixe.tests import loader from maxixe.tests import parser from maxixe.tests import utils", "maxixe.tests import parser from maxixe.tests import utils suite = unittest.TestSuite() suite.addTests(unittest.TestLoader().loadTestsFromModule(decorators)) suite.addTests(unittest.TestLoader().loadTestsFromModule(loader)) suite.addTests(unittest.TestLoader().loadTestsFromModule(parser))", "from maxixe.tests import decorators from maxixe.tests import loader from maxixe.tests import parser from", "maxixe.tests import loader from maxixe.tests import parser from maxixe.tests import utils suite =", "import decorators from maxixe.tests import loader from maxixe.tests import parser from maxixe.tests import", "import loader from maxixe.tests import parser from maxixe.tests import utils suite = unittest.TestSuite()", "import maxixe from maxixe.tests import decorators from maxixe.tests import loader from maxixe.tests import", "from maxixe.tests import parser from maxixe.tests import utils suite = unittest.TestSuite() suite.addTests(unittest.TestLoader().loadTestsFromModule(decorators)) suite.addTests(unittest.TestLoader().loadTestsFromModule(loader))", "unittest import maxixe from maxixe.tests import decorators from maxixe.tests import loader from maxixe.tests", "loader from maxixe.tests import parser from maxixe.tests import utils suite = unittest.TestSuite() suite.addTests(unittest.TestLoader().loadTestsFromModule(decorators))", "import parser from maxixe.tests import utils suite = unittest.TestSuite() suite.addTests(unittest.TestLoader().loadTestsFromModule(decorators)) suite.addTests(unittest.TestLoader().loadTestsFromModule(loader)) suite.addTests(unittest.TestLoader().loadTestsFromModule(parser)) suite.addTests(unittest.TestLoader().loadTestsFromModule(utils))", "import unittest import maxixe from maxixe.tests import decorators from maxixe.tests import loader from", "maxixe from maxixe.tests import decorators from maxixe.tests import loader from maxixe.tests import parser", "from maxixe.tests import loader from maxixe.tests import parser from maxixe.tests import utils suite", "maxixe.tests import decorators from maxixe.tests import loader from maxixe.tests import parser from maxixe.tests" ]
[ "if row > 10: col += 1 row = 2 if row ==", "unlocked = get_unlocked_course(df, completed_course) unlocked_courses = '\\n'.join(unlocked) tkinter.messagebox.showinfo(\"Unlocked Courses\", unlocked_courses) reset_completed(completed_course) def check(x):", "inplace=True) def browsefunc1(): filename = filedialog.askopenfilename(filetypes=( (\"csv files\", \"*.csv\"), (\"All files\", \"*.*\"))) input1.insert(END,", "None completed_course = pd.read_csv('completed_course.csv') completed_course.replace(['YES', 'NO'], [True, False], inplace=True) completed_course.set_index('Course', inplace=True) def browsefunc1():", "file path') input1 = Entry(frame1, width=40) button1 = Button(frame1, image=photo, command=browsefunc1) button2 =", "1 button5 = Button(frame3, text=\"Submit\", command=submit3) label.grid(row=1, column=1) frame1.grid(row=2, column=1) label1.grid(row=1, column=1) input1.grid(row=2,", "open(filepath, mode='r', encoding='utf-8') as file: lines = list(map(lambda x: x.strip().upper(), file.read().split('\\n'))) for line", "= None filepath = None completed_course = pd.read_csv('completed_course.csv') completed_course.replace(['YES', 'NO'], [True, False], inplace=True)", "import tkinter.messagebox from utils import get_unlocked_course, reset_completed from tkinter import * from tkinter", "+= 1 row = 2 if row == 2: col -= 1 if", "col = col // 2 + 1 button5 = Button(frame3, text=\"Submit\", command=submit3) label.grid(row=1,", "// 2 else: col = col // 2 + 1 button5 = Button(frame3,", "completed_course.loc[x, 'Finished'] = not completed_course.loc[x, 'Finished'] def submit3(): unlocked = get_unlocked_course(df, completed_course) unlocked_courses", "Tk() photo = PhotoImage(file='folder.png') photo = photo.subsample(13, 13) label = Label(window, text='Use one", "= filedialog.askopenfilename(filetypes=( (\"txt files\", \"*.txt\"), (\"All files\", \"*.*\"))) input2.insert(END, filename) def submit2(): filepath", "you've done so far\") checkboxes = {} row, col = 2, 1 for", "image=photo, command=browsefunc1) button2 = Button(frame1, text=\"Submit\", command=submit1) label2 = Label(frame2, text='.txt file path')", "(\"csv files\", \"*.csv\"), (\"All files\", \"*.*\"))) input1.insert(END, filename) def submit1(): filepath = input1.get()", "[True, False], inplace=True) completed_course.set_index('Course', inplace=True) unlocked = get_unlocked_course(df, completed_course) unlocked_courses = '\\n'.join(unlocked) tkinter.messagebox.showinfo(\"Unlocked", "column=1, columnspan=10) button3.grid(row=2, column=11) button4.grid(row=2, column=12) frame3.grid(row=4, column=1) label3.grid(row=1, column=1, columnspan=5) button5.grid(row=11, column=col,", "'\\n'.join(unlocked) tkinter.messagebox.showinfo(\"Unlocked Courses\", unlocked_courses) reset_completed(completed_course) window = Tk() photo = PhotoImage(file='folder.png') photo =", "1 if row > 10: col += 1 row = 2 if row", "= '\\n'.join(unlocked) tkinter.messagebox.showinfo(\"Unlocked Courses\", unlocked_courses) reset_completed(completed_course) def check(x): completed_course.loc[x, 'Finished'] = not completed_course.loc[x,", "2 if row == 2: col -= 1 if col % 2 ==", "column=1) input1.grid(row=2, column=1, columnspan=10) button1.grid(row=2, column=11) button2.grid(row=2, column=12) frame2.grid(row=3, column=1) label2.grid(row=1, column=1) input2.grid(row=2,", "Label(frame1, text='.csv file path') input1 = Entry(frame1, width=40) button1 = Button(frame1, image=photo, command=browsefunc1)", "b = Checkbutton(frame3, text=i, command=lambda x=i: check(x)) checkboxes[i] = b b.grid(row=row, column=col) row", "filepath = input2.get() with open(filepath, mode='r', encoding='utf-8') as file: lines = list(map(lambda x:", "for line in lines: completed_course.loc[line, 'Finished'] = True unlocked = get_unlocked_course(df, completed_course) unlocked_courses", "text=i, command=lambda x=i: check(x)) checkboxes[i] = b b.grid(row=row, column=col) row += 1 if", "input1.grid(row=2, column=1, columnspan=10) button1.grid(row=2, column=11) button2.grid(row=2, column=12) frame2.grid(row=3, column=1) label2.grid(row=1, column=1) input2.grid(row=2, column=1,", "get_unlocked_course, reset_completed from tkinter import * from tkinter import filedialog df = pd.read_csv('course.csv')", "col = 2, 1 for i, r in df.iterrows(): b = Checkbutton(frame3, text=i,", "columnspan=10) button1.grid(row=2, column=11) button2.grid(row=2, column=12) frame2.grid(row=3, column=1) label2.grid(row=1, column=1) input2.grid(row=2, column=1, columnspan=10) button3.grid(row=2,", "button4.grid(row=2, column=12) frame3.grid(row=4, column=1) label3.grid(row=1, column=1, columnspan=5) button5.grid(row=11, column=col, columnspan=row if row ==", "columnspan=10) button3.grid(row=2, column=11) button4.grid(row=2, column=12) frame3.grid(row=4, column=1) label3.grid(row=1, column=1, columnspan=5) button5.grid(row=11, column=col, columnspan=row", "file.read().split('\\n'))) for line in lines: completed_course.loc[line, 'Finished'] = True unlocked = get_unlocked_course(df, completed_course)", "\"*.*\"))) input1.insert(END, filename) def submit1(): filepath = input1.get() completed_course = pd.read_csv(filepath) completed_course.replace(['YES', 'NO'],", "Frame(window) label1 = Label(frame1, text='.csv file path') input1 = Entry(frame1, width=40) button1 =", "completed_course = pd.read_csv(filepath) completed_course.replace(['YES', 'NO'], [True, False], inplace=True) completed_course.set_index('Course', inplace=True) unlocked = get_unlocked_course(df,", "get_unlocked_course(df, completed_course) unlocked_courses = '\\n'.join(unlocked) tkinter.messagebox.showinfo(\"Unlocked Courses\", unlocked_courses) reset_completed(completed_course) def browsefunc2(): filename =", "= True unlocked = get_unlocked_course(df, completed_course) unlocked_courses = '\\n'.join(unlocked) tkinter.messagebox.showinfo(\"Unlocked Courses\", unlocked_courses) reset_completed(completed_course)", "False], inplace=True) completed_course.set_index('Course', inplace=True) unlocked = get_unlocked_course(df, completed_course) unlocked_courses = '\\n'.join(unlocked) tkinter.messagebox.showinfo(\"Unlocked Courses\",", "unlocked_courses) reset_completed(completed_course) def browsefunc2(): filename = filedialog.askopenfilename(filetypes=( (\"txt files\", \"*.txt\"), (\"All files\", \"*.*\")))", "unlocked_courses) reset_completed(completed_course) window = Tk() photo = PhotoImage(file='folder.png') photo = photo.subsample(13, 13) label", "width=40) button1 = Button(frame1, image=photo, command=browsefunc1) button2 = Button(frame1, text=\"Submit\", command=submit1) label2 =", "Courses\", unlocked_courses) reset_completed(completed_course) def check(x): completed_course.loc[x, 'Finished'] = not completed_course.loc[x, 'Finished'] def submit3():", "= Label(window, text='Use one of the methods') frame1 = Frame(window) frame2 = Frame(window)", "frame3.grid(row=4, column=1) label3.grid(row=1, column=1, columnspan=5) button5.grid(row=11, column=col, columnspan=row if row == 2 else", "(\"All files\", \"*.*\"))) input1.insert(END, filename) def submit1(): filepath = input1.get() completed_course = pd.read_csv(filepath)", "import filedialog df = pd.read_csv('course.csv') df = df.fillna('') df.set_index('Course', inplace=True) completed_course = None", "row = 2 if row == 2: col -= 1 if col %", "file path') input2 = Entry(frame2, width=40) button3 = Button(frame2, image=photo, command=browsefunc2) button4 =", "text='Use one of the methods') frame1 = Frame(window) frame2 = Frame(window) frame3 =", "completed_course) unlocked_courses = '\\n'.join(unlocked) tkinter.messagebox.showinfo(\"Unlocked Courses\", unlocked_courses) reset_completed(completed_course) def browsefunc2(): filename = filedialog.askopenfilename(filetypes=(", "completed_course.loc[line, 'Finished'] = True unlocked = get_unlocked_course(df, completed_course) unlocked_courses = '\\n'.join(unlocked) tkinter.messagebox.showinfo(\"Unlocked Courses\",", "path') input2 = Entry(frame2, width=40) button3 = Button(frame2, image=photo, command=browsefunc2) button4 = Button(frame2,", "= pd.read_csv(filepath) completed_course.replace(['YES', 'NO'], [True, False], inplace=True) completed_course.set_index('Course', inplace=True) unlocked = get_unlocked_course(df, completed_course)", "checkboxes = {} row, col = 2, 1 for i, r in df.iterrows():", "text='.txt file path') input2 = Entry(frame2, width=40) button3 = Button(frame2, image=photo, command=browsefunc2) button4", "False], inplace=True) completed_course.set_index('Course', inplace=True) def browsefunc1(): filename = filedialog.askopenfilename(filetypes=( (\"csv files\", \"*.csv\"), (\"All", "unlocked = get_unlocked_course(df, completed_course) unlocked_courses = '\\n'.join(unlocked) tkinter.messagebox.showinfo(\"Unlocked Courses\", unlocked_courses) reset_completed(completed_course) def browsefunc2():", "if row == 2: col -= 1 if col % 2 == 0:", "% 2 == 0: col = col // 2 else: col = col", "x.strip().upper(), file.read().split('\\n'))) for line in lines: completed_course.loc[line, 'Finished'] = True unlocked = get_unlocked_course(df,", "= Label(frame2, text='.txt file path') input2 = Entry(frame2, width=40) button3 = Button(frame2, image=photo,", "button2.grid(row=2, column=12) frame2.grid(row=3, column=1) label2.grid(row=1, column=1) input2.grid(row=2, column=1, columnspan=10) button3.grid(row=2, column=11) button4.grid(row=2, column=12)", "lines: completed_course.loc[line, 'Finished'] = True unlocked = get_unlocked_course(df, completed_course) unlocked_courses = '\\n'.join(unlocked) tkinter.messagebox.showinfo(\"Unlocked", "submit2(): filepath = input2.get() with open(filepath, mode='r', encoding='utf-8') as file: lines = list(map(lambda", "completed_course.set_index('Course', inplace=True) unlocked = get_unlocked_course(df, completed_course) unlocked_courses = '\\n'.join(unlocked) tkinter.messagebox.showinfo(\"Unlocked Courses\", unlocked_courses) reset_completed(completed_course)", "col -= 1 if col % 2 == 0: col = col //", "= Entry(frame1, width=40) button1 = Button(frame1, image=photo, command=browsefunc1) button2 = Button(frame1, text=\"Submit\", command=submit1)", "input2.get() with open(filepath, mode='r', encoding='utf-8') as file: lines = list(map(lambda x: x.strip().upper(), file.read().split('\\n')))", "photo = photo.subsample(13, 13) label = Label(window, text='Use one of the methods') frame1", "def submit1(): filepath = input1.get() completed_course = pd.read_csv(filepath) completed_course.replace(['YES', 'NO'], [True, False], inplace=True)", "inplace=True) completed_course = None filepath = None completed_course = pd.read_csv('completed_course.csv') completed_course.replace(['YES', 'NO'], [True,", "column=1) label2.grid(row=1, column=1) input2.grid(row=2, column=1, columnspan=10) button3.grid(row=2, column=11) button4.grid(row=2, column=12) frame3.grid(row=4, column=1) label3.grid(row=1,", "inplace=True) completed_course.set_index('Course', inplace=True) def browsefunc1(): filename = filedialog.askopenfilename(filetypes=( (\"csv files\", \"*.csv\"), (\"All files\",", "= '\\n'.join(unlocked) tkinter.messagebox.showinfo(\"Unlocked Courses\", unlocked_courses) reset_completed(completed_course) def browsefunc2(): filename = filedialog.askopenfilename(filetypes=( (\"txt files\",", "tkinter.messagebox.showinfo(\"Unlocked Courses\", unlocked_courses) reset_completed(completed_course) window = Tk() photo = PhotoImage(file='folder.png') photo = photo.subsample(13,", "Checkbutton(frame3, text=i, command=lambda x=i: check(x)) checkboxes[i] = b b.grid(row=row, column=col) row += 1", "def browsefunc2(): filename = filedialog.askopenfilename(filetypes=( (\"txt files\", \"*.txt\"), (\"All files\", \"*.*\"))) input2.insert(END, filename)", "input2.grid(row=2, column=1, columnspan=10) button3.grid(row=2, column=11) button4.grid(row=2, column=12) frame3.grid(row=4, column=1) label3.grid(row=1, column=1, columnspan=5) button5.grid(row=11,", "text=\"Choose the courses you've done so far\") checkboxes = {} row, col =", "command=browsefunc1) button2 = Button(frame1, text=\"Submit\", command=submit1) label2 = Label(frame2, text='.txt file path') input2", "window = Tk() photo = PhotoImage(file='folder.png') photo = photo.subsample(13, 13) label = Label(window,", "filepath = None completed_course = pd.read_csv('completed_course.csv') completed_course.replace(['YES', 'NO'], [True, False], inplace=True) completed_course.set_index('Course', inplace=True)", "from utils import get_unlocked_course, reset_completed from tkinter import * from tkinter import filedialog", "label3.grid(row=1, column=1, columnspan=5) button5.grid(row=11, column=col, columnspan=row if row == 2 else 1) window.mainloop()", "column=1) input2.grid(row=2, column=1, columnspan=10) button3.grid(row=2, column=11) button4.grid(row=2, column=12) frame3.grid(row=4, column=1) label3.grid(row=1, column=1, columnspan=5)", "of the methods') frame1 = Frame(window) frame2 = Frame(window) frame3 = Frame(window) label1", "2: col -= 1 if col % 2 == 0: col = col", "as file: lines = list(map(lambda x: x.strip().upper(), file.read().split('\\n'))) for line in lines: completed_course.loc[line,", "\"*.*\"))) input2.insert(END, filename) def submit2(): filepath = input2.get() with open(filepath, mode='r', encoding='utf-8') as", "= 2 if row == 2: col -= 1 if col % 2", "text='.csv file path') input1 = Entry(frame1, width=40) button1 = Button(frame1, image=photo, command=browsefunc1) button2", "check(x)) checkboxes[i] = b b.grid(row=row, column=col) row += 1 if row > 10:", "from tkinter import filedialog df = pd.read_csv('course.csv') df = df.fillna('') df.set_index('Course', inplace=True) completed_course", "Button(frame1, image=photo, command=browsefunc1) button2 = Button(frame1, text=\"Submit\", command=submit1) label2 = Label(frame2, text='.txt file", "column=11) button2.grid(row=2, column=12) frame2.grid(row=3, column=1) label2.grid(row=1, column=1) input2.grid(row=2, column=1, columnspan=10) button3.grid(row=2, column=11) button4.grid(row=2,", "import get_unlocked_course, reset_completed from tkinter import * from tkinter import filedialog df =", "= pd.read_csv('course.csv') df = df.fillna('') df.set_index('Course', inplace=True) completed_course = None filepath = None", "row += 1 if row > 10: col += 1 row = 2", "(\"txt files\", \"*.txt\"), (\"All files\", \"*.*\"))) input2.insert(END, filename) def submit2(): filepath = input2.get()", "pd.read_csv(filepath) completed_course.replace(['YES', 'NO'], [True, False], inplace=True) completed_course.set_index('Course', inplace=True) unlocked = get_unlocked_course(df, completed_course) unlocked_courses", "encoding='utf-8') as file: lines = list(map(lambda x: x.strip().upper(), file.read().split('\\n'))) for line in lines:", "= pd.read_csv('completed_course.csv') completed_course.replace(['YES', 'NO'], [True, False], inplace=True) completed_course.set_index('Course', inplace=True) def browsefunc1(): filename =", "pd import tkinter.messagebox from utils import get_unlocked_course, reset_completed from tkinter import * from", "= Label(frame1, text='.csv file path') input1 = Entry(frame1, width=40) button1 = Button(frame1, image=photo,", "2, 1 for i, r in df.iterrows(): b = Checkbutton(frame3, text=i, command=lambda x=i:", "= list(map(lambda x: x.strip().upper(), file.read().split('\\n'))) for line in lines: completed_course.loc[line, 'Finished'] = True", "line in lines: completed_course.loc[line, 'Finished'] = True unlocked = get_unlocked_course(df, completed_course) unlocked_courses =", "pandas as pd import tkinter.messagebox from utils import get_unlocked_course, reset_completed from tkinter import", "= Frame(window) label1 = Label(frame1, text='.csv file path') input1 = Entry(frame1, width=40) button1", "= Button(frame3, text=\"Submit\", command=submit3) label.grid(row=1, column=1) frame1.grid(row=2, column=1) label1.grid(row=1, column=1) input1.grid(row=2, column=1, columnspan=10)", "'NO'], [True, False], inplace=True) completed_course.set_index('Course', inplace=True) unlocked = get_unlocked_course(df, completed_course) unlocked_courses = '\\n'.join(unlocked)", "label2 = Label(frame2, text='.txt file path') input2 = Entry(frame2, width=40) button3 = Button(frame2,", "Button(frame2, text=\"Submit\", command=submit2) label3 = Label(frame3, text=\"Choose the courses you've done so far\")", "in lines: completed_course.loc[line, 'Finished'] = True unlocked = get_unlocked_course(df, completed_course) unlocked_courses = '\\n'.join(unlocked)", "completed_course.replace(['YES', 'NO'], [True, False], inplace=True) completed_course.set_index('Course', inplace=True) def browsefunc1(): filename = filedialog.askopenfilename(filetypes=( (\"csv", "label2.grid(row=1, column=1) input2.grid(row=2, column=1, columnspan=10) button3.grid(row=2, column=11) button4.grid(row=2, column=12) frame3.grid(row=4, column=1) label3.grid(row=1, column=1,", "10: col += 1 row = 2 if row == 2: col -=", "methods') frame1 = Frame(window) frame2 = Frame(window) frame3 = Frame(window) label1 = Label(frame1,", "reset_completed(completed_course) def browsefunc2(): filename = filedialog.askopenfilename(filetypes=( (\"txt files\", \"*.txt\"), (\"All files\", \"*.*\"))) input2.insert(END,", "= Button(frame2, text=\"Submit\", command=submit2) label3 = Label(frame3, text=\"Choose the courses you've done so", "= {} row, col = 2, 1 for i, r in df.iterrows(): b", "inplace=True) completed_course.set_index('Course', inplace=True) unlocked = get_unlocked_course(df, completed_course) unlocked_courses = '\\n'.join(unlocked) tkinter.messagebox.showinfo(\"Unlocked Courses\", unlocked_courses)", "> 10: col += 1 row = 2 if row == 2: col", "= None completed_course = pd.read_csv('completed_course.csv') completed_course.replace(['YES', 'NO'], [True, False], inplace=True) completed_course.set_index('Course', inplace=True) def", "done so far\") checkboxes = {} row, col = 2, 1 for i,", "= '\\n'.join(unlocked) tkinter.messagebox.showinfo(\"Unlocked Courses\", unlocked_courses) reset_completed(completed_course) window = Tk() photo = PhotoImage(file='folder.png') photo", "mode='r', encoding='utf-8') as file: lines = list(map(lambda x: x.strip().upper(), file.read().split('\\n'))) for line in", "-= 1 if col % 2 == 0: col = col // 2", "r in df.iterrows(): b = Checkbutton(frame3, text=i, command=lambda x=i: check(x)) checkboxes[i] = b", "(\"All files\", \"*.*\"))) input2.insert(END, filename) def submit2(): filepath = input2.get() with open(filepath, mode='r',", "== 0: col = col // 2 else: col = col // 2", "1 row = 2 if row == 2: col -= 1 if col", "label1.grid(row=1, column=1) input1.grid(row=2, column=1, columnspan=10) button1.grid(row=2, column=11) button2.grid(row=2, column=12) frame2.grid(row=3, column=1) label2.grid(row=1, column=1)", "filedialog df = pd.read_csv('course.csv') df = df.fillna('') df.set_index('Course', inplace=True) completed_course = None filepath", "= input1.get() completed_course = pd.read_csv(filepath) completed_course.replace(['YES', 'NO'], [True, False], inplace=True) completed_course.set_index('Course', inplace=True) unlocked", "label = Label(window, text='Use one of the methods') frame1 = Frame(window) frame2 =", "filename) def submit1(): filepath = input1.get() completed_course = pd.read_csv(filepath) completed_course.replace(['YES', 'NO'], [True, False],", "pd.read_csv('completed_course.csv') completed_course.replace(['YES', 'NO'], [True, False], inplace=True) completed_course.set_index('Course', inplace=True) def browsefunc1(): filename = filedialog.askopenfilename(filetypes=(", "\"*.txt\"), (\"All files\", \"*.*\"))) input2.insert(END, filename) def submit2(): filepath = input2.get() with open(filepath,", "= get_unlocked_course(df, completed_course) unlocked_courses = '\\n'.join(unlocked) tkinter.messagebox.showinfo(\"Unlocked Courses\", unlocked_courses) reset_completed(completed_course) def check(x): completed_course.loc[x,", "button5 = Button(frame3, text=\"Submit\", command=submit3) label.grid(row=1, column=1) frame1.grid(row=2, column=1) label1.grid(row=1, column=1) input1.grid(row=2, column=1,", "button1.grid(row=2, column=11) button2.grid(row=2, column=12) frame2.grid(row=3, column=1) label2.grid(row=1, column=1) input2.grid(row=2, column=1, columnspan=10) button3.grid(row=2, column=11)", "= col // 2 + 1 button5 = Button(frame3, text=\"Submit\", command=submit3) label.grid(row=1, column=1)", "i, r in df.iterrows(): b = Checkbutton(frame3, text=i, command=lambda x=i: check(x)) checkboxes[i] =", "row > 10: col += 1 row = 2 if row == 2:", "path') input1 = Entry(frame1, width=40) button1 = Button(frame1, image=photo, command=browsefunc1) button2 = Button(frame1,", "column=11) button4.grid(row=2, column=12) frame3.grid(row=4, column=1) label3.grid(row=1, column=1, columnspan=5) button5.grid(row=11, column=col, columnspan=row if row", "Courses\", unlocked_courses) reset_completed(completed_course) window = Tk() photo = PhotoImage(file='folder.png') photo = photo.subsample(13, 13)", "Label(window, text='Use one of the methods') frame1 = Frame(window) frame2 = Frame(window) frame3", "<reponame>swapno-ahmed/CoursePicker import pandas as pd import tkinter.messagebox from utils import get_unlocked_course, reset_completed from", "far\") checkboxes = {} row, col = 2, 1 for i, r in", "files\", \"*.csv\"), (\"All files\", \"*.*\"))) input1.insert(END, filename) def submit1(): filepath = input1.get() completed_course", "= Checkbutton(frame3, text=i, command=lambda x=i: check(x)) checkboxes[i] = b b.grid(row=row, column=col) row +=", "x: x.strip().upper(), file.read().split('\\n'))) for line in lines: completed_course.loc[line, 'Finished'] = True unlocked =", "command=lambda x=i: check(x)) checkboxes[i] = b b.grid(row=row, column=col) row += 1 if row", "df.iterrows(): b = Checkbutton(frame3, text=i, command=lambda x=i: check(x)) checkboxes[i] = b b.grid(row=row, column=col)", "filename) def submit2(): filepath = input2.get() with open(filepath, mode='r', encoding='utf-8') as file: lines", "button3.grid(row=2, column=11) button4.grid(row=2, column=12) frame3.grid(row=4, column=1) label3.grid(row=1, column=1, columnspan=5) button5.grid(row=11, column=col, columnspan=row if", "completed_course.set_index('Course', inplace=True) def browsefunc1(): filename = filedialog.askopenfilename(filetypes=( (\"csv files\", \"*.csv\"), (\"All files\", \"*.*\")))", "2 + 1 button5 = Button(frame3, text=\"Submit\", command=submit3) label.grid(row=1, column=1) frame1.grid(row=2, column=1) label1.grid(row=1,", "button3 = Button(frame2, image=photo, command=browsefunc2) button4 = Button(frame2, text=\"Submit\", command=submit2) label3 = Label(frame3,", "one of the methods') frame1 = Frame(window) frame2 = Frame(window) frame3 = Frame(window)", "x=i: check(x)) checkboxes[i] = b b.grid(row=row, column=col) row += 1 if row >", "check(x): completed_course.loc[x, 'Finished'] = not completed_course.loc[x, 'Finished'] def submit3(): unlocked = get_unlocked_course(df, completed_course)", "= b b.grid(row=row, column=col) row += 1 if row > 10: col +=", "photo = PhotoImage(file='folder.png') photo = photo.subsample(13, 13) label = Label(window, text='Use one of", "* from tkinter import filedialog df = pd.read_csv('course.csv') df = df.fillna('') df.set_index('Course', inplace=True)", "+ 1 button5 = Button(frame3, text=\"Submit\", command=submit3) label.grid(row=1, column=1) frame1.grid(row=2, column=1) label1.grid(row=1, column=1)", "col += 1 row = 2 if row == 2: col -= 1", "input2 = Entry(frame2, width=40) button3 = Button(frame2, image=photo, command=browsefunc2) button4 = Button(frame2, text=\"Submit\",", "b.grid(row=row, column=col) row += 1 if row > 10: col += 1 row", "input1.get() completed_course = pd.read_csv(filepath) completed_course.replace(['YES', 'NO'], [True, False], inplace=True) completed_course.set_index('Course', inplace=True) unlocked =", "column=1, columnspan=10) button1.grid(row=2, column=11) button2.grid(row=2, column=12) frame2.grid(row=3, column=1) label2.grid(row=1, column=1) input2.grid(row=2, column=1, columnspan=10)", "from tkinter import * from tkinter import filedialog df = pd.read_csv('course.csv') df =", "image=photo, command=browsefunc2) button4 = Button(frame2, text=\"Submit\", command=submit2) label3 = Label(frame3, text=\"Choose the courses", "tkinter import filedialog df = pd.read_csv('course.csv') df = df.fillna('') df.set_index('Course', inplace=True) completed_course =", "def check(x): completed_course.loc[x, 'Finished'] = not completed_course.loc[x, 'Finished'] def submit3(): unlocked = get_unlocked_course(df,", "Button(frame1, text=\"Submit\", command=submit1) label2 = Label(frame2, text='.txt file path') input2 = Entry(frame2, width=40)", "utils import get_unlocked_course, reset_completed from tkinter import * from tkinter import filedialog df", "if col % 2 == 0: col = col // 2 else: col", "import * from tkinter import filedialog df = pd.read_csv('course.csv') df = df.fillna('') df.set_index('Course',", "tkinter.messagebox from utils import get_unlocked_course, reset_completed from tkinter import * from tkinter import", "browsefunc1(): filename = filedialog.askopenfilename(filetypes=( (\"csv files\", \"*.csv\"), (\"All files\", \"*.*\"))) input1.insert(END, filename) def", "as pd import tkinter.messagebox from utils import get_unlocked_course, reset_completed from tkinter import *", "unlocked_courses = '\\n'.join(unlocked) tkinter.messagebox.showinfo(\"Unlocked Courses\", unlocked_courses) reset_completed(completed_course) window = Tk() photo = PhotoImage(file='folder.png')", "0: col = col // 2 else: col = col // 2 +", "command=submit1) label2 = Label(frame2, text='.txt file path') input2 = Entry(frame2, width=40) button3 =", "None filepath = None completed_course = pd.read_csv('completed_course.csv') completed_course.replace(['YES', 'NO'], [True, False], inplace=True) completed_course.set_index('Course',", "'Finished'] def submit3(): unlocked = get_unlocked_course(df, completed_course) unlocked_courses = '\\n'.join(unlocked) tkinter.messagebox.showinfo(\"Unlocked Courses\", unlocked_courses)", "filepath = input1.get() completed_course = pd.read_csv(filepath) completed_course.replace(['YES', 'NO'], [True, False], inplace=True) completed_course.set_index('Course', inplace=True)", "completed_course.replace(['YES', 'NO'], [True, False], inplace=True) completed_course.set_index('Course', inplace=True) unlocked = get_unlocked_course(df, completed_course) unlocked_courses =", "frame2 = Frame(window) frame3 = Frame(window) label1 = Label(frame1, text='.csv file path') input1", "completed_course = None filepath = None completed_course = pd.read_csv('completed_course.csv') completed_course.replace(['YES', 'NO'], [True, False],", "= Entry(frame2, width=40) button3 = Button(frame2, image=photo, command=browsefunc2) button4 = Button(frame2, text=\"Submit\", command=submit2)", "Button(frame2, image=photo, command=browsefunc2) button4 = Button(frame2, text=\"Submit\", command=submit2) label3 = Label(frame3, text=\"Choose the", "'\\n'.join(unlocked) tkinter.messagebox.showinfo(\"Unlocked Courses\", unlocked_courses) reset_completed(completed_course) def check(x): completed_course.loc[x, 'Finished'] = not completed_course.loc[x, 'Finished']", "command=submit2) label3 = Label(frame3, text=\"Choose the courses you've done so far\") checkboxes =", "= not completed_course.loc[x, 'Finished'] def submit3(): unlocked = get_unlocked_course(df, completed_course) unlocked_courses = '\\n'.join(unlocked)", "button2 = Button(frame1, text=\"Submit\", command=submit1) label2 = Label(frame2, text='.txt file path') input2 =", "== 2: col -= 1 if col % 2 == 0: col =", "col % 2 == 0: col = col // 2 else: col =", "'NO'], [True, False], inplace=True) completed_course.set_index('Course', inplace=True) def browsefunc1(): filename = filedialog.askopenfilename(filetypes=( (\"csv files\",", "input1.insert(END, filename) def submit1(): filepath = input1.get() completed_course = pd.read_csv(filepath) completed_course.replace(['YES', 'NO'], [True,", "files\", \"*.txt\"), (\"All files\", \"*.*\"))) input2.insert(END, filename) def submit2(): filepath = input2.get() with", "Entry(frame1, width=40) button1 = Button(frame1, image=photo, command=browsefunc1) button2 = Button(frame1, text=\"Submit\", command=submit1) label2", "col = col // 2 else: col = col // 2 + 1", "row == 2: col -= 1 if col % 2 == 0: col", "= photo.subsample(13, 13) label = Label(window, text='Use one of the methods') frame1 =", "b b.grid(row=row, column=col) row += 1 if row > 10: col += 1", "width=40) button3 = Button(frame2, image=photo, command=browsefunc2) button4 = Button(frame2, text=\"Submit\", command=submit2) label3 =", "Label(frame2, text='.txt file path') input2 = Entry(frame2, width=40) button3 = Button(frame2, image=photo, command=browsefunc2)", "= get_unlocked_course(df, completed_course) unlocked_courses = '\\n'.join(unlocked) tkinter.messagebox.showinfo(\"Unlocked Courses\", unlocked_courses) reset_completed(completed_course) def browsefunc2(): filename", "= Button(frame1, image=photo, command=browsefunc1) button2 = Button(frame1, text=\"Submit\", command=submit1) label2 = Label(frame2, text='.txt", "unlocked_courses = '\\n'.join(unlocked) tkinter.messagebox.showinfo(\"Unlocked Courses\", unlocked_courses) reset_completed(completed_course) def check(x): completed_course.loc[x, 'Finished'] = not", "label3 = Label(frame3, text=\"Choose the courses you've done so far\") checkboxes = {}", "else: col = col // 2 + 1 button5 = Button(frame3, text=\"Submit\", command=submit3)", "def browsefunc1(): filename = filedialog.askopenfilename(filetypes=( (\"csv files\", \"*.csv\"), (\"All files\", \"*.*\"))) input1.insert(END, filename)", "Button(frame3, text=\"Submit\", command=submit3) label.grid(row=1, column=1) frame1.grid(row=2, column=1) label1.grid(row=1, column=1) input1.grid(row=2, column=1, columnspan=10) button1.grid(row=2,", "file: lines = list(map(lambda x: x.strip().upper(), file.read().split('\\n'))) for line in lines: completed_course.loc[line, 'Finished']", "2 == 0: col = col // 2 else: col = col //", "= filedialog.askopenfilename(filetypes=( (\"csv files\", \"*.csv\"), (\"All files\", \"*.*\"))) input1.insert(END, filename) def submit1(): filepath", "= Button(frame2, image=photo, command=browsefunc2) button4 = Button(frame2, text=\"Submit\", command=submit2) label3 = Label(frame3, text=\"Choose", "list(map(lambda x: x.strip().upper(), file.read().split('\\n'))) for line in lines: completed_course.loc[line, 'Finished'] = True unlocked", "Entry(frame2, width=40) button3 = Button(frame2, image=photo, command=browsefunc2) button4 = Button(frame2, text=\"Submit\", command=submit2) label3", "'\\n'.join(unlocked) tkinter.messagebox.showinfo(\"Unlocked Courses\", unlocked_courses) reset_completed(completed_course) def browsefunc2(): filename = filedialog.askopenfilename(filetypes=( (\"txt files\", \"*.txt\"),", "def submit2(): filepath = input2.get() with open(filepath, mode='r', encoding='utf-8') as file: lines =", "unlocked_courses = '\\n'.join(unlocked) tkinter.messagebox.showinfo(\"Unlocked Courses\", unlocked_courses) reset_completed(completed_course) def browsefunc2(): filename = filedialog.askopenfilename(filetypes=( (\"txt", "True unlocked = get_unlocked_course(df, completed_course) unlocked_courses = '\\n'.join(unlocked) tkinter.messagebox.showinfo(\"Unlocked Courses\", unlocked_courses) reset_completed(completed_course) def", "files\", \"*.*\"))) input2.insert(END, filename) def submit2(): filepath = input2.get() with open(filepath, mode='r', encoding='utf-8')", "tkinter.messagebox.showinfo(\"Unlocked Courses\", unlocked_courses) reset_completed(completed_course) def browsefunc2(): filename = filedialog.askopenfilename(filetypes=( (\"txt files\", \"*.txt\"), (\"All", "column=1) label1.grid(row=1, column=1) input1.grid(row=2, column=1, columnspan=10) button1.grid(row=2, column=11) button2.grid(row=2, column=12) frame2.grid(row=3, column=1) label2.grid(row=1,", "in df.iterrows(): b = Checkbutton(frame3, text=i, command=lambda x=i: check(x)) checkboxes[i] = b b.grid(row=row,", "inplace=True) unlocked = get_unlocked_course(df, completed_course) unlocked_courses = '\\n'.join(unlocked) tkinter.messagebox.showinfo(\"Unlocked Courses\", unlocked_courses) reset_completed(completed_course) def", "checkboxes[i] = b b.grid(row=row, column=col) row += 1 if row > 10: col", "frame3 = Frame(window) label1 = Label(frame1, text='.csv file path') input1 = Entry(frame1, width=40)", "completed_course) unlocked_courses = '\\n'.join(unlocked) tkinter.messagebox.showinfo(\"Unlocked Courses\", unlocked_courses) reset_completed(completed_course) window = Tk() photo =", "= Button(frame1, text=\"Submit\", command=submit1) label2 = Label(frame2, text='.txt file path') input2 = Entry(frame2,", "col // 2 else: col = col // 2 + 1 button5 =", "tkinter import * from tkinter import filedialog df = pd.read_csv('course.csv') df = df.fillna('')", "df = df.fillna('') df.set_index('Course', inplace=True) completed_course = None filepath = None completed_course =", "1 if col % 2 == 0: col = col // 2 else:", "df.set_index('Course', inplace=True) completed_course = None filepath = None completed_course = pd.read_csv('completed_course.csv') completed_course.replace(['YES', 'NO'],", "filedialog.askopenfilename(filetypes=( (\"csv files\", \"*.csv\"), (\"All files\", \"*.*\"))) input1.insert(END, filename) def submit1(): filepath =", "command=submit3) label.grid(row=1, column=1) frame1.grid(row=2, column=1) label1.grid(row=1, column=1) input1.grid(row=2, column=1, columnspan=10) button1.grid(row=2, column=11) button2.grid(row=2,", "reset_completed(completed_course) def check(x): completed_course.loc[x, 'Finished'] = not completed_course.loc[x, 'Finished'] def submit3(): unlocked =", "input2.insert(END, filename) def submit2(): filepath = input2.get() with open(filepath, mode='r', encoding='utf-8') as file:", "with open(filepath, mode='r', encoding='utf-8') as file: lines = list(map(lambda x: x.strip().upper(), file.read().split('\\n'))) for", "label.grid(row=1, column=1) frame1.grid(row=2, column=1) label1.grid(row=1, column=1) input1.grid(row=2, column=1, columnspan=10) button1.grid(row=2, column=11) button2.grid(row=2, column=12)", "unlocked_courses) reset_completed(completed_course) def check(x): completed_course.loc[x, 'Finished'] = not completed_course.loc[x, 'Finished'] def submit3(): unlocked", "files\", \"*.*\"))) input1.insert(END, filename) def submit1(): filepath = input1.get() completed_course = pd.read_csv(filepath) completed_course.replace(['YES',", "def submit3(): unlocked = get_unlocked_course(df, completed_course) unlocked_courses = '\\n'.join(unlocked) tkinter.messagebox.showinfo(\"Unlocked Courses\", unlocked_courses) reset_completed(completed_course)", "label1 = Label(frame1, text='.csv file path') input1 = Entry(frame1, width=40) button1 = Button(frame1,", "frame2.grid(row=3, column=1) label2.grid(row=1, column=1) input2.grid(row=2, column=1, columnspan=10) button3.grid(row=2, column=11) button4.grid(row=2, column=12) frame3.grid(row=4, column=1)", "not completed_course.loc[x, 'Finished'] def submit3(): unlocked = get_unlocked_course(df, completed_course) unlocked_courses = '\\n'.join(unlocked) tkinter.messagebox.showinfo(\"Unlocked", "column=1) label3.grid(row=1, column=1, columnspan=5) button5.grid(row=11, column=col, columnspan=row if row == 2 else 1)", "= df.fillna('') df.set_index('Course', inplace=True) completed_course = None filepath = None completed_course = pd.read_csv('completed_course.csv')", "= 2, 1 for i, r in df.iterrows(): b = Checkbutton(frame3, text=i, command=lambda", "button1 = Button(frame1, image=photo, command=browsefunc1) button2 = Button(frame1, text=\"Submit\", command=submit1) label2 = Label(frame2,", "col // 2 + 1 button5 = Button(frame3, text=\"Submit\", command=submit3) label.grid(row=1, column=1) frame1.grid(row=2,", "row, col = 2, 1 for i, r in df.iterrows(): b = Checkbutton(frame3,", "'Finished'] = not completed_course.loc[x, 'Finished'] def submit3(): unlocked = get_unlocked_course(df, completed_course) unlocked_courses =", "submit1(): filepath = input1.get() completed_course = pd.read_csv(filepath) completed_course.replace(['YES', 'NO'], [True, False], inplace=True) completed_course.set_index('Course',", "lines = list(map(lambda x: x.strip().upper(), file.read().split('\\n'))) for line in lines: completed_course.loc[line, 'Finished'] =", "+= 1 if row > 10: col += 1 row = 2 if", "command=browsefunc2) button4 = Button(frame2, text=\"Submit\", command=submit2) label3 = Label(frame3, text=\"Choose the courses you've", "Courses\", unlocked_courses) reset_completed(completed_course) def browsefunc2(): filename = filedialog.askopenfilename(filetypes=( (\"txt files\", \"*.txt\"), (\"All files\",", "Frame(window) frame3 = Frame(window) label1 = Label(frame1, text='.csv file path') input1 = Entry(frame1,", "the courses you've done so far\") checkboxes = {} row, col = 2,", "PhotoImage(file='folder.png') photo = photo.subsample(13, 13) label = Label(window, text='Use one of the methods')", "df.fillna('') df.set_index('Course', inplace=True) completed_course = None filepath = None completed_course = pd.read_csv('completed_course.csv') completed_course.replace(['YES',", "1 for i, r in df.iterrows(): b = Checkbutton(frame3, text=i, command=lambda x=i: check(x))", "df = pd.read_csv('course.csv') df = df.fillna('') df.set_index('Course', inplace=True) completed_course = None filepath =", "completed_course.loc[x, 'Finished'] def submit3(): unlocked = get_unlocked_course(df, completed_course) unlocked_courses = '\\n'.join(unlocked) tkinter.messagebox.showinfo(\"Unlocked Courses\",", "Frame(window) frame2 = Frame(window) frame3 = Frame(window) label1 = Label(frame1, text='.csv file path')", "filename = filedialog.askopenfilename(filetypes=( (\"csv files\", \"*.csv\"), (\"All files\", \"*.*\"))) input1.insert(END, filename) def submit1():", "reset_completed from tkinter import * from tkinter import filedialog df = pd.read_csv('course.csv') df", "= Frame(window) frame2 = Frame(window) frame3 = Frame(window) label1 = Label(frame1, text='.csv file", "column=12) frame3.grid(row=4, column=1) label3.grid(row=1, column=1, columnspan=5) button5.grid(row=11, column=col, columnspan=row if row == 2", "get_unlocked_course(df, completed_course) unlocked_courses = '\\n'.join(unlocked) tkinter.messagebox.showinfo(\"Unlocked Courses\", unlocked_courses) reset_completed(completed_course) window = Tk() photo", "text=\"Submit\", command=submit3) label.grid(row=1, column=1) frame1.grid(row=2, column=1) label1.grid(row=1, column=1) input1.grid(row=2, column=1, columnspan=10) button1.grid(row=2, column=11)", "pd.read_csv('course.csv') df = df.fillna('') df.set_index('Course', inplace=True) completed_course = None filepath = None completed_course", "filename = filedialog.askopenfilename(filetypes=( (\"txt files\", \"*.txt\"), (\"All files\", \"*.*\"))) input2.insert(END, filename) def submit2():", "unlocked = get_unlocked_course(df, completed_course) unlocked_courses = '\\n'.join(unlocked) tkinter.messagebox.showinfo(\"Unlocked Courses\", unlocked_courses) reset_completed(completed_course) window =", "'Finished'] = True unlocked = get_unlocked_course(df, completed_course) unlocked_courses = '\\n'.join(unlocked) tkinter.messagebox.showinfo(\"Unlocked Courses\", unlocked_courses)", "= input2.get() with open(filepath, mode='r', encoding='utf-8') as file: lines = list(map(lambda x: x.strip().upper(),", "= PhotoImage(file='folder.png') photo = photo.subsample(13, 13) label = Label(window, text='Use one of the", "= Label(frame3, text=\"Choose the courses you've done so far\") checkboxes = {} row,", "text=\"Submit\", command=submit2) label3 = Label(frame3, text=\"Choose the courses you've done so far\") checkboxes", "2 else: col = col // 2 + 1 button5 = Button(frame3, text=\"Submit\",", "Label(frame3, text=\"Choose the courses you've done so far\") checkboxes = {} row, col", "import pandas as pd import tkinter.messagebox from utils import get_unlocked_course, reset_completed from tkinter", "= Frame(window) frame3 = Frame(window) label1 = Label(frame1, text='.csv file path') input1 =", "tkinter.messagebox.showinfo(\"Unlocked Courses\", unlocked_courses) reset_completed(completed_course) def check(x): completed_course.loc[x, 'Finished'] = not completed_course.loc[x, 'Finished'] def", "frame1 = Frame(window) frame2 = Frame(window) frame3 = Frame(window) label1 = Label(frame1, text='.csv", "courses you've done so far\") checkboxes = {} row, col = 2, 1", "the methods') frame1 = Frame(window) frame2 = Frame(window) frame3 = Frame(window) label1 =", "browsefunc2(): filename = filedialog.askopenfilename(filetypes=( (\"txt files\", \"*.txt\"), (\"All files\", \"*.*\"))) input2.insert(END, filename) def", "{} row, col = 2, 1 for i, r in df.iterrows(): b =", "column=1) frame1.grid(row=2, column=1) label1.grid(row=1, column=1) input1.grid(row=2, column=1, columnspan=10) button1.grid(row=2, column=11) button2.grid(row=2, column=12) frame2.grid(row=3,", "completed_course = pd.read_csv('completed_course.csv') completed_course.replace(['YES', 'NO'], [True, False], inplace=True) completed_course.set_index('Course', inplace=True) def browsefunc1(): filename", "reset_completed(completed_course) window = Tk() photo = PhotoImage(file='folder.png') photo = photo.subsample(13, 13) label =", "submit3(): unlocked = get_unlocked_course(df, completed_course) unlocked_courses = '\\n'.join(unlocked) tkinter.messagebox.showinfo(\"Unlocked Courses\", unlocked_courses) reset_completed(completed_course) window", "column=12) frame2.grid(row=3, column=1) label2.grid(row=1, column=1) input2.grid(row=2, column=1, columnspan=10) button3.grid(row=2, column=11) button4.grid(row=2, column=12) frame3.grid(row=4,", "completed_course) unlocked_courses = '\\n'.join(unlocked) tkinter.messagebox.showinfo(\"Unlocked Courses\", unlocked_courses) reset_completed(completed_course) def check(x): completed_course.loc[x, 'Finished'] =", "// 2 + 1 button5 = Button(frame3, text=\"Submit\", command=submit3) label.grid(row=1, column=1) frame1.grid(row=2, column=1)", "[True, False], inplace=True) completed_course.set_index('Course', inplace=True) def browsefunc1(): filename = filedialog.askopenfilename(filetypes=( (\"csv files\", \"*.csv\"),", "filedialog.askopenfilename(filetypes=( (\"txt files\", \"*.txt\"), (\"All files\", \"*.*\"))) input2.insert(END, filename) def submit2(): filepath =", "button4 = Button(frame2, text=\"Submit\", command=submit2) label3 = Label(frame3, text=\"Choose the courses you've done", "\"*.csv\"), (\"All files\", \"*.*\"))) input1.insert(END, filename) def submit1(): filepath = input1.get() completed_course =", "get_unlocked_course(df, completed_course) unlocked_courses = '\\n'.join(unlocked) tkinter.messagebox.showinfo(\"Unlocked Courses\", unlocked_courses) reset_completed(completed_course) def check(x): completed_course.loc[x, 'Finished']", "= Tk() photo = PhotoImage(file='folder.png') photo = photo.subsample(13, 13) label = Label(window, text='Use", "column=col) row += 1 if row > 10: col += 1 row =", "so far\") checkboxes = {} row, col = 2, 1 for i, r", "= col // 2 else: col = col // 2 + 1 button5", "text=\"Submit\", command=submit1) label2 = Label(frame2, text='.txt file path') input2 = Entry(frame2, width=40) button3", "13) label = Label(window, text='Use one of the methods') frame1 = Frame(window) frame2", "= get_unlocked_course(df, completed_course) unlocked_courses = '\\n'.join(unlocked) tkinter.messagebox.showinfo(\"Unlocked Courses\", unlocked_courses) reset_completed(completed_course) window = Tk()", "input1 = Entry(frame1, width=40) button1 = Button(frame1, image=photo, command=browsefunc1) button2 = Button(frame1, text=\"Submit\",", "for i, r in df.iterrows(): b = Checkbutton(frame3, text=i, command=lambda x=i: check(x)) checkboxes[i]", "frame1.grid(row=2, column=1) label1.grid(row=1, column=1) input1.grid(row=2, column=1, columnspan=10) button1.grid(row=2, column=11) button2.grid(row=2, column=12) frame2.grid(row=3, column=1)", "photo.subsample(13, 13) label = Label(window, text='Use one of the methods') frame1 = Frame(window)" ]
[ "class Blog(models.Model): title = models.CharField(\"标题\", unique=True, max_length=200) class Meta: db_table = 'blog' verbose_name", "from django.db import models class Blog(models.Model): title = models.CharField(\"标题\", unique=True, max_length=200) class Meta:", "title = models.CharField(\"标题\", unique=True, max_length=200) class Meta: db_table = 'blog' verbose_name = '文章'", "models class Blog(models.Model): title = models.CharField(\"标题\", unique=True, max_length=200) class Meta: db_table = 'blog'", "django.db import models class Blog(models.Model): title = models.CharField(\"标题\", unique=True, max_length=200) class Meta: db_table", "import models class Blog(models.Model): title = models.CharField(\"标题\", unique=True, max_length=200) class Meta: db_table =", "Blog(models.Model): title = models.CharField(\"标题\", unique=True, max_length=200) class Meta: db_table = 'blog' verbose_name =" ]
[ "def get_start(self, url_start): self.url_start = url_start if __name__ == '__main__': # url =", "= manager.LogManager() self.threadManager = manager.ThreadManager() self.url_start = '' def get_readme(self): self.downloader.grab_single(self.url_start) tips =", "@staticmethod def add_urls_head(urls, head): for i, item in enumerate(urls): item = head +", "''' text_pattern = '>\\s*?([^\\&\\b\\n\\[\\]]*?)<' href_pattern = '<a target=_blank href=\"(/item/[\\w\\d%]*?)\">' def __init__(self): self.urlManager =", "= url_start if __name__ == '__main__': # url = input('The website you want:\\n')", "= 'https://baike.baidu.com/item/Python' graber = Graber() graber.get_start(url_python_baike) graber.grabing_urls() # text = graber.get_readme(url) # graber.logManager.log_text(text)", "href_pattern = '<a target=_blank href=\"(/item/[\\w\\d%]*?)\">' def __init__(self): self.urlManager = manager.UrlsManager() self.downloader = downloader.DownLoader()", "re.VERBOSE|re.MULTILINE|re.DOTALL) # print(synopsis) page_content = self.textManager.find_text_by_regex(synopsis, Graber.text_pattern, re.VERBOSE|re.MULTILINE|re.DOTALL) if urls and page_content is", "from wormer.data import strategy import re class Graber: synopsis_pattern = '''(?=lemma-summary\")(.*?)(?<=config) ''' text_pattern", "item in enumerate(urls): item = head + item urls[i] = item def get_start(self,", "import re class Graber: synopsis_pattern = '''(?=lemma-summary\")(.*?)(?<=config) ''' text_pattern = '>\\s*?([^\\&\\b\\n\\[\\]]*?)<' href_pattern =", "tips = self.downloader.get_readme() return tips def grabing_urls(self, limit=100, grab_strategy=strategy.GrabStrategy.BREATH_FIRST): self.urlManager.add_single_url(self.url_start) self.urlManager.add_single_url(self.url_start, 'urls_grabbed') while", "is not None: self.add_urls_head(urls, 'https://baike.baidu.com') self.urlManager.add_urls(urls) self.logManager.collect_data(page_content) self.logManager.save_all_data() @staticmethod def add_urls_head(urls, head): for", "limit=100, grab_strategy=strategy.GrabStrategy.BREATH_FIRST): self.urlManager.add_single_url(self.url_start) self.urlManager.add_single_url(self.url_start, 'urls_grabbed') while self.urlManager.has_next_url(): page_source = self.downloader.grab_single_url(self.urlManager.get_url()).content.decode('utf-8') # match need", "match need to the beginning of the string, and return is a turple,", "self.downloader.grab_single(self.url_start) tips = self.downloader.get_readme() return tips def grabing_urls(self, limit=100, grab_strategy=strategy.GrabStrategy.BREATH_FIRST): self.urlManager.add_single_url(self.url_start) self.urlManager.add_single_url(self.url_start, 'urls_grabbed')", "turple] to change, and findall return list urls = self.textManager.find_urls_by_regex(page_source, Graber.href_pattern) synopsis =", "i, item in enumerate(urls): item = head + item urls[i] = item def", "'''(?=lemma-summary\")(.*?)(?<=config) ''' text_pattern = '>\\s*?([^\\&\\b\\n\\[\\]]*?)<' href_pattern = '<a target=_blank href=\"(/item/[\\w\\d%]*?)\">' def __init__(self): self.urlManager", "Graber: synopsis_pattern = '''(?=lemma-summary\")(.*?)(?<=config) ''' text_pattern = '>\\s*?([^\\&\\b\\n\\[\\]]*?)<' href_pattern = '<a target=_blank href=\"(/item/[\\w\\d%]*?)\">'", "for i, item in enumerate(urls): item = head + item urls[i] = item", "manager.ThreadManager() self.url_start = '' def get_readme(self): self.downloader.grab_single(self.url_start) tips = self.downloader.get_readme() return tips def", "= self.downloader.grab_single_url(self.urlManager.get_url()).content.decode('utf-8') # match need to the beginning of the string, and return", "href=\"(/item/[\\w\\d%]*?)\">' def __init__(self): self.urlManager = manager.UrlsManager() self.downloader = downloader.DownLoader() self.textManager = manager.TextManager() self.logManager", "import strategy import re class Graber: synopsis_pattern = '''(?=lemma-summary\")(.*?)(?<=config) ''' text_pattern = '>\\s*?([^\\&\\b\\n\\[\\]]*?)<'", "i in turple] to change, and findall return list urls = self.textManager.find_urls_by_regex(page_source, Graber.href_pattern)", "self.textManager.find_urls_by_regex(page_source, Graber.href_pattern) synopsis = self.textManager.find_text_by_regex(page_source, Graber.synopsis_pattern, re.VERBOSE|re.MULTILINE|re.DOTALL) # print(synopsis) page_content = self.textManager.find_text_by_regex(synopsis, Graber.text_pattern,", "get_readme(self): self.downloader.grab_single(self.url_start) tips = self.downloader.get_readme() return tips def grabing_urls(self, limit=100, grab_strategy=strategy.GrabStrategy.BREATH_FIRST): self.urlManager.add_single_url(self.url_start) self.urlManager.add_single_url(self.url_start,", "= head + item urls[i] = item def get_start(self, url_start): self.url_start = url_start", "class Graber: synopsis_pattern = '''(?=lemma-summary\")(.*?)(?<=config) ''' text_pattern = '>\\s*?([^\\&\\b\\n\\[\\]]*?)<' href_pattern = '<a target=_blank", "= self.downloader.get_readme() return tips def grabing_urls(self, limit=100, grab_strategy=strategy.GrabStrategy.BREATH_FIRST): self.urlManager.add_single_url(self.url_start) self.urlManager.add_single_url(self.url_start, 'urls_grabbed') while self.urlManager.has_next_url():", "def __init__(self): self.urlManager = manager.UrlsManager() self.downloader = downloader.DownLoader() self.textManager = manager.TextManager() self.logManager =", "urls[i] = item def get_start(self, url_start): self.url_start = url_start if __name__ == '__main__':", "of the string, and return is a turple, use [i for i in", "self.downloader.get_readme() return tips def grabing_urls(self, limit=100, grab_strategy=strategy.GrabStrategy.BREATH_FIRST): self.urlManager.add_single_url(self.url_start) self.urlManager.add_single_url(self.url_start, 'urls_grabbed') while self.urlManager.has_next_url(): page_source", "= item def get_start(self, url_start): self.url_start = url_start if __name__ == '__main__': #", "wormer.tools import manager, downloader from wormer.data import strategy import re class Graber: synopsis_pattern", "[i for i in turple] to change, and findall return list urls =", "'>\\s*?([^\\&\\b\\n\\[\\]]*?)<' href_pattern = '<a target=_blank href=\"(/item/[\\w\\d%]*?)\">' def __init__(self): self.urlManager = manager.UrlsManager() self.downloader =", "= self.textManager.find_text_by_regex(synopsis, Graber.text_pattern, re.VERBOSE|re.MULTILINE|re.DOTALL) if urls and page_content is not None: self.add_urls_head(urls, 'https://baike.baidu.com')", "= self.textManager.find_text_by_regex(page_source, Graber.synopsis_pattern, re.VERBOSE|re.MULTILINE|re.DOTALL) # print(synopsis) page_content = self.textManager.find_text_by_regex(synopsis, Graber.text_pattern, re.VERBOSE|re.MULTILINE|re.DOTALL) if urls", "get_start(self, url_start): self.url_start = url_start if __name__ == '__main__': # url = input('The", "item def get_start(self, url_start): self.url_start = url_start if __name__ == '__main__': # url", "in enumerate(urls): item = head + item urls[i] = item def get_start(self, url_start):", "== '__main__': # url = input('The website you want:\\n') url_python_baike = 'https://baike.baidu.com/item/Python' graber", "self.urlManager = manager.UrlsManager() self.downloader = downloader.DownLoader() self.textManager = manager.TextManager() self.logManager = manager.LogManager() self.threadManager", "__name__ == '__main__': # url = input('The website you want:\\n') url_python_baike = 'https://baike.baidu.com/item/Python'", "you want:\\n') url_python_baike = 'https://baike.baidu.com/item/Python' graber = Graber() graber.get_start(url_python_baike) graber.grabing_urls() # text =", "Graber.synopsis_pattern, re.VERBOSE|re.MULTILINE|re.DOTALL) # print(synopsis) page_content = self.textManager.find_text_by_regex(synopsis, Graber.text_pattern, re.VERBOSE|re.MULTILINE|re.DOTALL) if urls and page_content", "self.logManager = manager.LogManager() self.threadManager = manager.ThreadManager() self.url_start = '' def get_readme(self): self.downloader.grab_single(self.url_start) tips", "url_start if __name__ == '__main__': # url = input('The website you want:\\n') url_python_baike", "to the beginning of the string, and return is a turple, use [i", "= manager.TextManager() self.logManager = manager.LogManager() self.threadManager = manager.ThreadManager() self.url_start = '' def get_readme(self):", "= downloader.DownLoader() self.textManager = manager.TextManager() self.logManager = manager.LogManager() self.threadManager = manager.ThreadManager() self.url_start =", "and page_content is not None: self.add_urls_head(urls, 'https://baike.baidu.com') self.urlManager.add_urls(urls) self.logManager.collect_data(page_content) self.logManager.save_all_data() @staticmethod def add_urls_head(urls,", "and return is a turple, use [i for i in turple] to change,", "urls = self.textManager.find_urls_by_regex(page_source, Graber.href_pattern) synopsis = self.textManager.find_text_by_regex(page_source, Graber.synopsis_pattern, re.VERBOSE|re.MULTILINE|re.DOTALL) # print(synopsis) page_content =", "self.downloader = downloader.DownLoader() self.textManager = manager.TextManager() self.logManager = manager.LogManager() self.threadManager = manager.ThreadManager() self.url_start", "downloader from wormer.data import strategy import re class Graber: synopsis_pattern = '''(?=lemma-summary\")(.*?)(?<=config) '''", "item = head + item urls[i] = item def get_start(self, url_start): self.url_start =", "def add_urls_head(urls, head): for i, item in enumerate(urls): item = head + item", "self.url_start = url_start if __name__ == '__main__': # url = input('The website you", "def get_readme(self): self.downloader.grab_single(self.url_start) tips = self.downloader.get_readme() return tips def grabing_urls(self, limit=100, grab_strategy=strategy.GrabStrategy.BREATH_FIRST): self.urlManager.add_single_url(self.url_start)", "return tips def grabing_urls(self, limit=100, grab_strategy=strategy.GrabStrategy.BREATH_FIRST): self.urlManager.add_single_url(self.url_start) self.urlManager.add_single_url(self.url_start, 'urls_grabbed') while self.urlManager.has_next_url(): page_source =", "the beginning of the string, and return is a turple, use [i for", "self.textManager.find_text_by_regex(page_source, Graber.synopsis_pattern, re.VERBOSE|re.MULTILINE|re.DOTALL) # print(synopsis) page_content = self.textManager.find_text_by_regex(synopsis, Graber.text_pattern, re.VERBOSE|re.MULTILINE|re.DOTALL) if urls and", "add_urls_head(urls, head): for i, item in enumerate(urls): item = head + item urls[i]", "the string, and return is a turple, use [i for i in turple]", "import manager, downloader from wormer.data import strategy import re class Graber: synopsis_pattern =", "in turple] to change, and findall return list urls = self.textManager.find_urls_by_regex(page_source, Graber.href_pattern) synopsis", "urls and page_content is not None: self.add_urls_head(urls, 'https://baike.baidu.com') self.urlManager.add_urls(urls) self.logManager.collect_data(page_content) self.logManager.save_all_data() @staticmethod def", "to change, and findall return list urls = self.textManager.find_urls_by_regex(page_source, Graber.href_pattern) synopsis = self.textManager.find_text_by_regex(page_source,", "= '''(?=lemma-summary\")(.*?)(?<=config) ''' text_pattern = '>\\s*?([^\\&\\b\\n\\[\\]]*?)<' href_pattern = '<a target=_blank href=\"(/item/[\\w\\d%]*?)\">' def __init__(self):", "Graber.text_pattern, re.VERBOSE|re.MULTILINE|re.DOTALL) if urls and page_content is not None: self.add_urls_head(urls, 'https://baike.baidu.com') self.urlManager.add_urls(urls) self.logManager.collect_data(page_content)", "list urls = self.textManager.find_urls_by_regex(page_source, Graber.href_pattern) synopsis = self.textManager.find_text_by_regex(page_source, Graber.synopsis_pattern, re.VERBOSE|re.MULTILINE|re.DOTALL) # print(synopsis) page_content", "'' def get_readme(self): self.downloader.grab_single(self.url_start) tips = self.downloader.get_readme() return tips def grabing_urls(self, limit=100, grab_strategy=strategy.GrabStrategy.BREATH_FIRST):", "url_python_baike = 'https://baike.baidu.com/item/Python' graber = Graber() graber.get_start(url_python_baike) graber.grabing_urls() # text = graber.get_readme(url) #", "findall return list urls = self.textManager.find_urls_by_regex(page_source, Graber.href_pattern) synopsis = self.textManager.find_text_by_regex(page_source, Graber.synopsis_pattern, re.VERBOSE|re.MULTILINE|re.DOTALL) #", "head + item urls[i] = item def get_start(self, url_start): self.url_start = url_start if", "from wormer.tools import manager, downloader from wormer.data import strategy import re class Graber:", "a turple, use [i for i in turple] to change, and findall return", "for i in turple] to change, and findall return list urls = self.textManager.find_urls_by_regex(page_source,", "string, and return is a turple, use [i for i in turple] to", "turple, use [i for i in turple] to change, and findall return list", "# match need to the beginning of the string, and return is a", "url = input('The website you want:\\n') url_python_baike = 'https://baike.baidu.com/item/Python' graber = Graber() graber.get_start(url_python_baike)", "synopsis = self.textManager.find_text_by_regex(page_source, Graber.synopsis_pattern, re.VERBOSE|re.MULTILINE|re.DOTALL) # print(synopsis) page_content = self.textManager.find_text_by_regex(synopsis, Graber.text_pattern, re.VERBOSE|re.MULTILINE|re.DOTALL) if", "self.downloader.grab_single_url(self.urlManager.get_url()).content.decode('utf-8') # match need to the beginning of the string, and return is", "'urls_grabbed') while self.urlManager.has_next_url(): page_source = self.downloader.grab_single_url(self.urlManager.get_url()).content.decode('utf-8') # match need to the beginning of", "self.logManager.save_all_data() @staticmethod def add_urls_head(urls, head): for i, item in enumerate(urls): item = head", "website you want:\\n') url_python_baike = 'https://baike.baidu.com/item/Python' graber = Graber() graber.get_start(url_python_baike) graber.grabing_urls() # text", "self.logManager.collect_data(page_content) self.logManager.save_all_data() @staticmethod def add_urls_head(urls, head): for i, item in enumerate(urls): item =", "is a turple, use [i for i in turple] to change, and findall", "'https://baike.baidu.com') self.urlManager.add_urls(urls) self.logManager.collect_data(page_content) self.logManager.save_all_data() @staticmethod def add_urls_head(urls, head): for i, item in enumerate(urls):", "while self.urlManager.has_next_url(): page_source = self.downloader.grab_single_url(self.urlManager.get_url()).content.decode('utf-8') # match need to the beginning of the", "input('The website you want:\\n') url_python_baike = 'https://baike.baidu.com/item/Python' graber = Graber() graber.get_start(url_python_baike) graber.grabing_urls() #", "+ item urls[i] = item def get_start(self, url_start): self.url_start = url_start if __name__", "and findall return list urls = self.textManager.find_urls_by_regex(page_source, Graber.href_pattern) synopsis = self.textManager.find_text_by_regex(page_source, Graber.synopsis_pattern, re.VERBOSE|re.MULTILINE|re.DOTALL)", "change, and findall return list urls = self.textManager.find_urls_by_regex(page_source, Graber.href_pattern) synopsis = self.textManager.find_text_by_regex(page_source, Graber.synopsis_pattern,", "if __name__ == '__main__': # url = input('The website you want:\\n') url_python_baike =", "manager.UrlsManager() self.downloader = downloader.DownLoader() self.textManager = manager.TextManager() self.logManager = manager.LogManager() self.threadManager = manager.ThreadManager()", "None: self.add_urls_head(urls, 'https://baike.baidu.com') self.urlManager.add_urls(urls) self.logManager.collect_data(page_content) self.logManager.save_all_data() @staticmethod def add_urls_head(urls, head): for i, item", "manager, downloader from wormer.data import strategy import re class Graber: synopsis_pattern = '''(?=lemma-summary\")(.*?)(?<=config)", "wormer.data import strategy import re class Graber: synopsis_pattern = '''(?=lemma-summary\")(.*?)(?<=config) ''' text_pattern =", "grab_strategy=strategy.GrabStrategy.BREATH_FIRST): self.urlManager.add_single_url(self.url_start) self.urlManager.add_single_url(self.url_start, 'urls_grabbed') while self.urlManager.has_next_url(): page_source = self.downloader.grab_single_url(self.urlManager.get_url()).content.decode('utf-8') # match need to", "manager.LogManager() self.threadManager = manager.ThreadManager() self.url_start = '' def get_readme(self): self.downloader.grab_single(self.url_start) tips = self.downloader.get_readme()", "item urls[i] = item def get_start(self, url_start): self.url_start = url_start if __name__ ==", "re class Graber: synopsis_pattern = '''(?=lemma-summary\")(.*?)(?<=config) ''' text_pattern = '>\\s*?([^\\&\\b\\n\\[\\]]*?)<' href_pattern = '<a", "print(synopsis) page_content = self.textManager.find_text_by_regex(synopsis, Graber.text_pattern, re.VERBOSE|re.MULTILINE|re.DOTALL) if urls and page_content is not None:", "self.threadManager = manager.ThreadManager() self.url_start = '' def get_readme(self): self.downloader.grab_single(self.url_start) tips = self.downloader.get_readme() return", "page_content is not None: self.add_urls_head(urls, 'https://baike.baidu.com') self.urlManager.add_urls(urls) self.logManager.collect_data(page_content) self.logManager.save_all_data() @staticmethod def add_urls_head(urls, head):", "self.textManager = manager.TextManager() self.logManager = manager.LogManager() self.threadManager = manager.ThreadManager() self.url_start = '' def", "# url = input('The website you want:\\n') url_python_baike = 'https://baike.baidu.com/item/Python' graber = Graber()", "'<a target=_blank href=\"(/item/[\\w\\d%]*?)\">' def __init__(self): self.urlManager = manager.UrlsManager() self.downloader = downloader.DownLoader() self.textManager =", "__init__(self): self.urlManager = manager.UrlsManager() self.downloader = downloader.DownLoader() self.textManager = manager.TextManager() self.logManager = manager.LogManager()", "enumerate(urls): item = head + item urls[i] = item def get_start(self, url_start): self.url_start", "page_source = self.downloader.grab_single_url(self.urlManager.get_url()).content.decode('utf-8') # match need to the beginning of the string, and", "return is a turple, use [i for i in turple] to change, and", "self.url_start = '' def get_readme(self): self.downloader.grab_single(self.url_start) tips = self.downloader.get_readme() return tips def grabing_urls(self,", "def grabing_urls(self, limit=100, grab_strategy=strategy.GrabStrategy.BREATH_FIRST): self.urlManager.add_single_url(self.url_start) self.urlManager.add_single_url(self.url_start, 'urls_grabbed') while self.urlManager.has_next_url(): page_source = self.downloader.grab_single_url(self.urlManager.get_url()).content.decode('utf-8') #", "strategy import re class Graber: synopsis_pattern = '''(?=lemma-summary\")(.*?)(?<=config) ''' text_pattern = '>\\s*?([^\\&\\b\\n\\[\\]]*?)<' href_pattern", "= manager.UrlsManager() self.downloader = downloader.DownLoader() self.textManager = manager.TextManager() self.logManager = manager.LogManager() self.threadManager =", "self.textManager.find_text_by_regex(synopsis, Graber.text_pattern, re.VERBOSE|re.MULTILINE|re.DOTALL) if urls and page_content is not None: self.add_urls_head(urls, 'https://baike.baidu.com') self.urlManager.add_urls(urls)", "head): for i, item in enumerate(urls): item = head + item urls[i] =", "self.urlManager.add_single_url(self.url_start) self.urlManager.add_single_url(self.url_start, 'urls_grabbed') while self.urlManager.has_next_url(): page_source = self.downloader.grab_single_url(self.urlManager.get_url()).content.decode('utf-8') # match need to the", "self.urlManager.has_next_url(): page_source = self.downloader.grab_single_url(self.urlManager.get_url()).content.decode('utf-8') # match need to the beginning of the string,", "self.urlManager.add_urls(urls) self.logManager.collect_data(page_content) self.logManager.save_all_data() @staticmethod def add_urls_head(urls, head): for i, item in enumerate(urls): item", "'__main__': # url = input('The website you want:\\n') url_python_baike = 'https://baike.baidu.com/item/Python' graber =", "text_pattern = '>\\s*?([^\\&\\b\\n\\[\\]]*?)<' href_pattern = '<a target=_blank href=\"(/item/[\\w\\d%]*?)\">' def __init__(self): self.urlManager = manager.UrlsManager()", "synopsis_pattern = '''(?=lemma-summary\")(.*?)(?<=config) ''' text_pattern = '>\\s*?([^\\&\\b\\n\\[\\]]*?)<' href_pattern = '<a target=_blank href=\"(/item/[\\w\\d%]*?)\">' def", "if urls and page_content is not None: self.add_urls_head(urls, 'https://baike.baidu.com') self.urlManager.add_urls(urls) self.logManager.collect_data(page_content) self.logManager.save_all_data() @staticmethod", "manager.TextManager() self.logManager = manager.LogManager() self.threadManager = manager.ThreadManager() self.url_start = '' def get_readme(self): self.downloader.grab_single(self.url_start)", "= manager.ThreadManager() self.url_start = '' def get_readme(self): self.downloader.grab_single(self.url_start) tips = self.downloader.get_readme() return tips", "tips def grabing_urls(self, limit=100, grab_strategy=strategy.GrabStrategy.BREATH_FIRST): self.urlManager.add_single_url(self.url_start) self.urlManager.add_single_url(self.url_start, 'urls_grabbed') while self.urlManager.has_next_url(): page_source = self.downloader.grab_single_url(self.urlManager.get_url()).content.decode('utf-8')", "want:\\n') url_python_baike = 'https://baike.baidu.com/item/Python' graber = Graber() graber.get_start(url_python_baike) graber.grabing_urls() # text = graber.get_readme(url)", "self.urlManager.add_single_url(self.url_start, 'urls_grabbed') while self.urlManager.has_next_url(): page_source = self.downloader.grab_single_url(self.urlManager.get_url()).content.decode('utf-8') # match need to the beginning", "downloader.DownLoader() self.textManager = manager.TextManager() self.logManager = manager.LogManager() self.threadManager = manager.ThreadManager() self.url_start = ''", "= self.textManager.find_urls_by_regex(page_source, Graber.href_pattern) synopsis = self.textManager.find_text_by_regex(page_source, Graber.synopsis_pattern, re.VERBOSE|re.MULTILINE|re.DOTALL) # print(synopsis) page_content = self.textManager.find_text_by_regex(synopsis,", "not None: self.add_urls_head(urls, 'https://baike.baidu.com') self.urlManager.add_urls(urls) self.logManager.collect_data(page_content) self.logManager.save_all_data() @staticmethod def add_urls_head(urls, head): for i,", "= '<a target=_blank href=\"(/item/[\\w\\d%]*?)\">' def __init__(self): self.urlManager = manager.UrlsManager() self.downloader = downloader.DownLoader() self.textManager", "page_content = self.textManager.find_text_by_regex(synopsis, Graber.text_pattern, re.VERBOSE|re.MULTILINE|re.DOTALL) if urls and page_content is not None: self.add_urls_head(urls,", "need to the beginning of the string, and return is a turple, use", "Graber.href_pattern) synopsis = self.textManager.find_text_by_regex(page_source, Graber.synopsis_pattern, re.VERBOSE|re.MULTILINE|re.DOTALL) # print(synopsis) page_content = self.textManager.find_text_by_regex(synopsis, Graber.text_pattern, re.VERBOSE|re.MULTILINE|re.DOTALL)", "= '' def get_readme(self): self.downloader.grab_single(self.url_start) tips = self.downloader.get_readme() return tips def grabing_urls(self, limit=100,", "target=_blank href=\"(/item/[\\w\\d%]*?)\">' def __init__(self): self.urlManager = manager.UrlsManager() self.downloader = downloader.DownLoader() self.textManager = manager.TextManager()", "= '>\\s*?([^\\&\\b\\n\\[\\]]*?)<' href_pattern = '<a target=_blank href=\"(/item/[\\w\\d%]*?)\">' def __init__(self): self.urlManager = manager.UrlsManager() self.downloader", "grabing_urls(self, limit=100, grab_strategy=strategy.GrabStrategy.BREATH_FIRST): self.urlManager.add_single_url(self.url_start) self.urlManager.add_single_url(self.url_start, 'urls_grabbed') while self.urlManager.has_next_url(): page_source = self.downloader.grab_single_url(self.urlManager.get_url()).content.decode('utf-8') # match", "url_start): self.url_start = url_start if __name__ == '__main__': # url = input('The website", "use [i for i in turple] to change, and findall return list urls", "beginning of the string, and return is a turple, use [i for i", "# print(synopsis) page_content = self.textManager.find_text_by_regex(synopsis, Graber.text_pattern, re.VERBOSE|re.MULTILINE|re.DOTALL) if urls and page_content is not", "re.VERBOSE|re.MULTILINE|re.DOTALL) if urls and page_content is not None: self.add_urls_head(urls, 'https://baike.baidu.com') self.urlManager.add_urls(urls) self.logManager.collect_data(page_content) self.logManager.save_all_data()", "self.add_urls_head(urls, 'https://baike.baidu.com') self.urlManager.add_urls(urls) self.logManager.collect_data(page_content) self.logManager.save_all_data() @staticmethod def add_urls_head(urls, head): for i, item in", "return list urls = self.textManager.find_urls_by_regex(page_source, Graber.href_pattern) synopsis = self.textManager.find_text_by_regex(page_source, Graber.synopsis_pattern, re.VERBOSE|re.MULTILINE|re.DOTALL) # print(synopsis)", "= input('The website you want:\\n') url_python_baike = 'https://baike.baidu.com/item/Python' graber = Graber() graber.get_start(url_python_baike) graber.grabing_urls()" ]
[ "and print the meal's total cost. Note: Be sure to use precise values", "the solve function below. def solve(meal_cost, tip_percent, tax_percent): print(round(meal_cost + (tip_percent*meal_cost/100) + (tax_percent*meal_cost/100)))", "added as tax) for a meal, find and print the meal's total cost.", "function below. def solve(meal_cost, tip_percent, tax_percent): print(round(meal_cost + (tip_percent*meal_cost/100) + (tax_percent*meal_cost/100))) meal_cost =", "of the meal price being added as tax) for a meal, find and", "\"\"\"Task Given the meal price (base cost of a meal), tip percent (the", "\"\"\" # Complete the solve function below. def solve(meal_cost, tip_percent, tax_percent): print(round(meal_cost +", "n is weird. \"\"\" # Complete the solve function below. def solve(meal_cost, tip_percent,", "solve function below. def solve(meal_cost, tip_percent, tax_percent): print(round(meal_cost + (tip_percent*meal_cost/100) + (tax_percent*meal_cost/100))) meal_cost", "tip percent (the percentage of the meal price being added as tip), and", "a meal), tip percent (the percentage of the meal price being added as", "meal), tip percent (the percentage of the meal price being added as tip),", "is weird. \"\"\" # Complete the solve function below. def solve(meal_cost, tip_percent, tax_percent):", "use precise values for your calculations, or you may end up with an", "print whether or not n is weird. \"\"\" # Complete the solve function", "Given the meal price (base cost of a meal), tip percent (the percentage", "(the percentage of the meal price being added as tip), and tax percent", "end up with an incorrectly rounded result! Complete the stub code provided in", "#!/usr/local/bin/python3 \"\"\"Task Given the meal price (base cost of a meal), tip percent", "in your editor to print whether or not n is weird. \"\"\" #", "the stub code provided in your editor to print whether or not n", "cost. Note: Be sure to use precise values for your calculations, or you", "Complete the solve function below. def solve(meal_cost, tip_percent, tax_percent): print(round(meal_cost + (tip_percent*meal_cost/100) +", "print(round(meal_cost + (tip_percent*meal_cost/100) + (tax_percent*meal_cost/100))) meal_cost = float(input()) tip_percent = int(input()) tax_percent =", "Note: Be sure to use precise values for your calculations, or you may", "percent (the percentage of the meal price being added as tip), and tax", "tax percent (the percentage of the meal price being added as tax) for", "for a meal, find and print the meal's total cost. Note: Be sure", "with an incorrectly rounded result! Complete the stub code provided in your editor", "(tip_percent*meal_cost/100) + (tax_percent*meal_cost/100))) meal_cost = float(input()) tip_percent = int(input()) tax_percent = int(input()) solve(meal_cost,", "precise values for your calculations, or you may end up with an incorrectly", "solve(meal_cost, tip_percent, tax_percent): print(round(meal_cost + (tip_percent*meal_cost/100) + (tax_percent*meal_cost/100))) meal_cost = float(input()) tip_percent =", "the meal price being added as tip), and tax percent (the percentage of", "of a meal), tip percent (the percentage of the meal price being added", "as tip), and tax percent (the percentage of the meal price being added", "of the meal price being added as tip), and tax percent (the percentage", "below. def solve(meal_cost, tip_percent, tax_percent): print(round(meal_cost + (tip_percent*meal_cost/100) + (tax_percent*meal_cost/100))) meal_cost = float(input())", "Complete the stub code provided in your editor to print whether or not", "to use precise values for your calculations, or you may end up with", "incorrectly rounded result! Complete the stub code provided in your editor to print", "percentage of the meal price being added as tax) for a meal, find", "calculations, or you may end up with an incorrectly rounded result! Complete the", "the meal's total cost. Note: Be sure to use precise values for your", "provided in your editor to print whether or not n is weird. \"\"\"", "weird. \"\"\" # Complete the solve function below. def solve(meal_cost, tip_percent, tax_percent): print(round(meal_cost", "being added as tax) for a meal, find and print the meal's total", "percent (the percentage of the meal price being added as tax) for a", "may end up with an incorrectly rounded result! Complete the stub code provided", "total cost. Note: Be sure to use precise values for your calculations, or", "whether or not n is weird. \"\"\" # Complete the solve function below.", "tax) for a meal, find and print the meal's total cost. Note: Be", "meal price being added as tax) for a meal, find and print the", "percentage of the meal price being added as tip), and tax percent (the", "meal's total cost. Note: Be sure to use precise values for your calculations,", "to print whether or not n is weird. \"\"\" # Complete the solve", "or not n is weird. \"\"\" # Complete the solve function below. def", "price (base cost of a meal), tip percent (the percentage of the meal", "(base cost of a meal), tip percent (the percentage of the meal price", "def solve(meal_cost, tip_percent, tax_percent): print(round(meal_cost + (tip_percent*meal_cost/100) + (tax_percent*meal_cost/100))) meal_cost = float(input()) tip_percent", "(tax_percent*meal_cost/100))) meal_cost = float(input()) tip_percent = int(input()) tax_percent = int(input()) solve(meal_cost, tip_percent, tax_percent)", "as tax) for a meal, find and print the meal's total cost. Note:", "rounded result! Complete the stub code provided in your editor to print whether", "+ (tax_percent*meal_cost/100))) meal_cost = float(input()) tip_percent = int(input()) tax_percent = int(input()) solve(meal_cost, tip_percent,", "(the percentage of the meal price being added as tax) for a meal,", "find and print the meal's total cost. Note: Be sure to use precise", "and tax percent (the percentage of the meal price being added as tax)", "tip), and tax percent (the percentage of the meal price being added as", "cost of a meal), tip percent (the percentage of the meal price being", "meal, find and print the meal's total cost. Note: Be sure to use", "your editor to print whether or not n is weird. \"\"\" # Complete", "not n is weird. \"\"\" # Complete the solve function below. def solve(meal_cost,", "or you may end up with an incorrectly rounded result! Complete the stub", "being added as tip), and tax percent (the percentage of the meal price", "print the meal's total cost. Note: Be sure to use precise values for", "the meal price (base cost of a meal), tip percent (the percentage of", "the meal price being added as tax) for a meal, find and print", "price being added as tip), and tax percent (the percentage of the meal", "result! Complete the stub code provided in your editor to print whether or", "code provided in your editor to print whether or not n is weird.", "Be sure to use precise values for your calculations, or you may end", "tip_percent, tax_percent): print(round(meal_cost + (tip_percent*meal_cost/100) + (tax_percent*meal_cost/100))) meal_cost = float(input()) tip_percent = int(input())", "a meal, find and print the meal's total cost. Note: Be sure to", "stub code provided in your editor to print whether or not n is", "values for your calculations, or you may end up with an incorrectly rounded", "sure to use precise values for your calculations, or you may end up", "an incorrectly rounded result! Complete the stub code provided in your editor to", "price being added as tax) for a meal, find and print the meal's", "meal price being added as tip), and tax percent (the percentage of the", "you may end up with an incorrectly rounded result! Complete the stub code", "# Complete the solve function below. def solve(meal_cost, tip_percent, tax_percent): print(round(meal_cost + (tip_percent*meal_cost/100)", "your calculations, or you may end up with an incorrectly rounded result! Complete", "meal price (base cost of a meal), tip percent (the percentage of the", "added as tip), and tax percent (the percentage of the meal price being", "up with an incorrectly rounded result! Complete the stub code provided in your", "tax_percent): print(round(meal_cost + (tip_percent*meal_cost/100) + (tax_percent*meal_cost/100))) meal_cost = float(input()) tip_percent = int(input()) tax_percent", "for your calculations, or you may end up with an incorrectly rounded result!", "editor to print whether or not n is weird. \"\"\" # Complete the", "+ (tip_percent*meal_cost/100) + (tax_percent*meal_cost/100))) meal_cost = float(input()) tip_percent = int(input()) tax_percent = int(input())" ]
[ "mock_create_floating_ip (mock_floating_ips_client.associate_floating_ip_to_server .return_value) = None mock_internal_instance_id = mock.Mock() mock_internal_instance_id.return_value = \"fake id\" (self._base_tempest_backend._manager.", "util try: import unittest.mock as mock except ImportError: import mock LOG = util.get_logger()", "if floating_ip is not None: (self._base_tempest_backend._manager.floating_ips_client. delete_floating_ip.assert_called_once_with(floating_ip['id'])) if keypair is not None: self._base_tempest_backend._keypair.destroy.assert_called_once()", "mock.sentinel userdata = \"fake userdata\" metadata = mock.sentinel availability_zone = mock.sentinel self._base_tempest_backend =", "Unless required by applicable law or agreed to in writing, software # distributed", "mock_primary_credentials = mock.Mock() mock_primary_credentials.return_value = mock_network (self._base_tempest_backend._manager. primary_credentials) = mock_primary_credentials mock_subnets_client = mock.Mock()", "= mock_network (self._base_tempest_backend._manager. primary_credentials) = mock_primary_credentials mock_subnets_client = mock.Mock() mock_subnets_client.update_subnet.return_value = None (self._base_tempest_backend.", "instance id\") result = self._base_tempest_backend.instance_server() self.assertEqual(result, \"fake instance server\") self._base_tempest_backend.internal_instance_id.assert_called_once() def test_public_key(self): mock_keypair", "metadata, availability_zone) def get_remote_client(self, **kwargs): return \"fake get_remote_client\" def remote_client(self): return \"fake_remote_client\" class", "not None: (self._base_tempest_backend._manager.servers_client. remove_security_group.assert_called_once_with( server_id=\"fake id\", name=security_group['name'])) (self._base_tempest_backend.internal_instance_id. assert_called_once()) if server is not", "for the specific language governing permissions and limitations # under the License. #", "mock.Mock() result = self._base_tempest_backend.get_image_by_ref() self.assertEqual(result, \"fake image\") def test_floating_ip(self): self._base_tempest_backend._floating_ip = {\"ip\": \"fake", "<reponame>mateimicu/cloudbase-init-ci # Copyright 2016 Cloudbase Solutions Srl # All Rights Reserved. # #", "\"fake reboot\") (self._base_tempest_backend._manager.reboot_instance. assert_called_once_with(\"fake id\")) def test_instance_password(self): self._base_tempest_backend._manager.instance_password = mock.Mock( return_value=\"fake password\") self._base_tempest_backend.internal_instance_id", "= mock.sentinel self._base_tempest_backend = FakeBaseTempestBackend( name, userdata, metadata, availability_zone) @mock.patch('argus.config.CONFIG.argus') def test__configure_networking(self, mock_config):", "Apache License, Version 2.0 (the \"License\"); you may # not use this file", "the License. You may obtain # a copy of the License at #", "'id': \"fake server id\" } } stripped_kwargs = copy.deepcopy(kwargs) for key, value in", "= mock.Mock( return_value=\"fake id\") self._base_tempest_backend._server = server if floating_ip is not None: (self._base_tempest_backend._manager.floating_ips_client.", "keypair\" result = self._base_tempest_backend.instance_password() self.assertEqual(result, \"fake password\") self._base_tempest_backend.internal_instance_id.assert_called_once() def test_internal_instance_id(self): self._base_tempest_backend._server = {\"id\":", "may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "import copy import unittest from argus.backends.tempest import tempest_backend from argus.unit_tests import test_utils from", "= security_groups_rules (self._base_tempest_backend._manager.security_group_rules_client. delete_security_group_rule) = mock.Mock() if security_group is not None: (self._base_tempest_backend._manager.servers_client .remove_security_group)", "= mock.Mock( return_value=\"fake instance server\") self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake instance id\") result =", "delete_floating_ip.assert_called_once_with(floating_ip['id'])) if keypair is not None: self._base_tempest_backend._keypair.destroy.assert_called_once() (self._base_tempest_backend._manager.cleanup_credentials. assert_called_once()) self.assertEqual(expected_logging, snatcher.output) def test_cleanup_security_groups_rules(self):", "= mock.Mock( return_value=\"fake server\") self._base_tempest_backend._assign_floating_ip = mock.Mock() self._base_tempest_backend._create_security_groups = mock.Mock() self._base_tempest_backend._availability_zone = mock.Mock()", "self._base_tempest_backend.floating_ip() self.assertEqual(result, \"fake ip\") class TestBaseWindowsTempestBackend(unittest.TestCase): @mock.patch('argus.config.CONFIG.argus') @mock.patch('argus.backends.tempest.manager.APIManager') def setUp(self, mock_api_manager, mock_config): mock_config.openstack.image_ref", "mock.sentinel availability_zone = mock.sentinel self._base_tempest_backend = FakeBaseTempestBackend( name, userdata, metadata, availability_zone) @mock.patch('argus.config.CONFIG.argus') def", "server id\", 'ACTIVE') def test_create_server(self): kwargs = { \"arg 1\": \"value 1\", \"arg", "mock.Mock( return_value=\"fake instance id\") result = self._base_tempest_backend.instance_server() self.assertEqual(result, \"fake instance server\") self._base_tempest_backend.internal_instance_id.assert_called_once() def", "self.assertEqual(result, \"fake instance server\") self._base_tempest_backend.internal_instance_id.assert_called_once() def test_public_key(self): mock_keypair = mock.Mock() mock_keypair.public_key = \"fake", "return_value=\"fake instance id\") result = self._base_tempest_backend.instance_server() self.assertEqual(result, \"fake instance server\") self._base_tempest_backend.internal_instance_id.assert_called_once() def test_public_key(self):", "= mock.sentinel userdata = \"fake userdata\" metadata = mock.sentinel availability_zone = mock.sentinel self._base_tempest_backend", "test_instance_password(self): self._base_tempest_backend._manager.instance_password = mock.Mock( return_value=\"fake password\") self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake id\") self._base_tempest_backend._keypair =", "mock.Mock() (self._base_tempest_backend._manager. servers_client) = mock_servers_client self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake id\") self._base_tempest_backend._server = server", "self._base_tempest_backend._create_security_groups() self.assertEqual(result, fake_security_group[\"security_group\"]) (self._base_tempest_backend._manager.security_groups_client. create_security_group.assert_called_once()) self._base_tempest_backend.internal_instance_id.assert_called_once() (self._base_tempest_backend._manager.servers_client.add_security_group .assert_called_once()) self.assertEqual(self._base_tempest_backend._security_groups_rules, [1, 2, 3, 4,", "mock.sentinel self._base = tempest_backend.BaseWindowsTempestBackend( name, userdata, metadata, availability_zone) @mock.patch('argus.config.CONFIG.argus') @mock.patch('argus.backends.base.CloudBackend._get_log_template') def test_get_log_template(self, mock_get_log,", "with the License. You may obtain # a copy of the License at", "= self._base_tempest_backend.instance_output(limit=10) self.assertEqual(result, \"fake output\") self._base_tempest_backend.internal_instance_id.assert_called_once() self._base_tempest_backend._manager.test_instance_output(\"fake id\", 10) def test_instance_server(self): self._base_tempest_backend._manager.instance_server =", "result = self._base_tempest_backend.internal_instance_id() self.assertEqual(result, \"fake server\") def test_instance_output(self): self._base_tempest_backend._manager.instance_output = mock.Mock( return_value=\"fake output\")", "pylint: disable=no-value-for-parameter, protected-access, arguments-differ # pylint: disable= unused-argument, no-member, attribute-defined-outside-init import copy import", "\"fake floating ip id\"}) def test_cleanup_keypair(self): self._test_cleanup(keypair=mock.Mock()) def test_cleanup_credentials(self): self._test_cleanup() def test_instance_setup_create_server(self): expected_logging", "keypair is not None: self._base_tempest_backend._keypair.destroy.assert_called_once() (self._base_tempest_backend._manager.cleanup_credentials. assert_called_once()) self.assertEqual(expected_logging, snatcher.output) def test_cleanup_security_groups_rules(self): fake_rules =", "key\" self._base_tempest_backend._keypair = mock_keypair result = self._base_tempest_backend.private_key() self.assertEqual(result, \"fake private key\") def test_get_image_by_ref(self):", "is not None: (self._base_tempest_backend._manager.floating_ips_client. delete_floating_ip) = mock.Mock() self._base_tempest_backend._floating_ip = floating_ip if keypair is", "mock.Mock() with test_utils.LogSnatcher('argus.backends.base') as snatcher: self._base_tempest_backend.setup_instance() self.assertEqual(expected_logging, snatcher.output) self._base_tempest_backend._configure_networking.assert_called_once() self._base_tempest_backend._manager.create_keypair.assert_called_once() self._base_tempest_backend._create_server.assert_called_once() self._base_tempest_backend._assign_floating_ip.assert_called_once() self._base_tempest_backend._create_security_groups.assert_called_once()", "return_value=\"fake reboot\") self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake id\") result = self._base_tempest_backend.reboot_instance() self.assertEqual(result, \"fake reboot\")", "self._base_tempest_backend.instance_server() self.assertEqual(result, \"fake instance server\") self._base_tempest_backend.internal_instance_id.assert_called_once() def test_public_key(self): mock_keypair = mock.Mock() mock_keypair.public_key =", "mock_floating_ips_client (self._base_tempest_backend. internal_instance_id) = mock_internal_instance_id result = self._base_tempest_backend._assign_floating_ip() self.assertEqual(result, {\"ip\": \"fake ip\"}) (self._base_tempest_backend._manager.floating_ips_client.", "ip\"}) (self._base_tempest_backend._manager.floating_ips_client. associate_floating_ip_to_server.assert_called_once_with( \"fake ip\", \"fake id\")) def test_get_mtu(self): mock_get_mtu = mock.Mock() mock_get_mtu.return_value", "image ref\" self._base_tempest_backend.flavor_ref = \"fake flavor ref\" self._base_tempest_backend._name = \"fake name\" if wait_until", "= \"fake keypair\" result = self._base_tempest_backend.instance_password() self.assertEqual(result, \"fake password\") self._base_tempest_backend.internal_instance_id.assert_called_once() def test_internal_instance_id(self): self._base_tempest_backend._server", "_security_groups_rules) = security_groups_rules (self._base_tempest_backend._manager.security_group_rules_client. delete_security_group_rule) = mock.Mock() if security_group is not None: (self._base_tempest_backend._manager.servers_client", "pylint: disable= unused-argument, no-member, attribute-defined-outside-init import copy import unittest from argus.backends.tempest import tempest_backend", "\"id\": [ {\"id\": 1}, {\"id\": 2}, {\"id\": 3}, {\"id\": 4}, {\"id\": 5} ],", "server\") self._base_tempest_backend._assign_floating_ip = mock.Mock() self._base_tempest_backend._create_security_groups = mock.Mock() self._base_tempest_backend._availability_zone = mock.Mock() self._base_tempest_backend.__get_id_tenant_network = mock.Mock()", "\"fake instance server\") self._base_tempest_backend.internal_instance_id.assert_called_once() def test_public_key(self): mock_keypair = mock.Mock() mock_keypair.public_key = \"fake public", "\"fake dns nameservers\" mock_config.argus = mock_argus self._base_tempest_backend._configure_networking() (self._base_tempest_backend._manager.subnets_client. update_subnet.assert_called_once()) (self._base_tempest_backend._manager.subnets_client. update_subnet.assert_called_once()) @mock.patch('argus.util.rand_name', return_value=\"fake-server\")", "use this file except in compliance with the License. You may obtain #", "mock_primary_credentials mock_subnets_client = mock.Mock() mock_subnets_client.update_subnet.return_value = None (self._base_tempest_backend. _manager.subnets_client) = mock_subnets_client mock_argus =", "security_group is not None: (self._base_tempest_backend._manager.servers_client. remove_security_group.assert_called_once_with( server_id=\"fake id\", name=security_group['name'])) (self._base_tempest_backend.internal_instance_id. assert_called_once()) if server", "def test_cleanup_keypair(self): self._test_cleanup(keypair=mock.Mock()) def test_cleanup_credentials(self): self._test_cleanup() def test_instance_setup_create_server(self): expected_logging = [\"Creating server...\"] self._base_tempest_backend._configure_networking", "from argus.backends.tempest import tempest_backend from argus.unit_tests import test_utils from argus import util try:", "self._base_tempest_backend._manager.instance_server = mock.Mock( return_value=\"fake instance server\") self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake instance id\") result", "BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "3\": None, \"arg 4\": \"value 4\" } self._test_create_server(kwargs=kwargs) def test__assign_floating_ip(self): mock_create_floating_ip = mock.Mock()", "is distributed on an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF", "= mock.Mock( return_value=\"fake output\") self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake id\") result = self._base_tempest_backend.instance_output(limit=10) self.assertEqual(result,", "mock_floating_ips_client = mock.Mock() mock_floating_ips_client.create_floating_ip = mock_create_floating_ip (mock_floating_ips_client.associate_floating_ip_to_server .return_value) = None mock_internal_instance_id = mock.Mock()", "mock.Mock( return_value=\"fake ip\") result = self._base_tempest_backend._create_security_groups() self.assertEqual(result, fake_security_group[\"security_group\"]) (self._base_tempest_backend._manager.security_groups_client. create_security_group.assert_called_once()) self._base_tempest_backend.internal_instance_id.assert_called_once() (self._base_tempest_backend._manager.servers_client.add_security_group .assert_called_once())", "output\") self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake id\") result = self._base_tempest_backend.instance_output(limit=10) self.assertEqual(result, \"fake output\") self._base_tempest_backend.internal_instance_id.assert_called_once()", "mock_get_mtu result = self._base_tempest_backend.get_mtu() self.assertEqual(result, \"fake mtu\") self._base_tempest_backend._manager.get_mtu.assert_called_once() def test__add_security_group_exceptions(self): mock_security_group_rules_client = mock.Mock()", "\"fake flavor ref\" name = mock.sentinel userdata = \"fake userdata\" metadata = mock.sentinel", "mock_subnets_client mock_argus = mock.Mock() mock_argus.dns_nameservers.return_value = \"fake dns nameservers\" mock_config.argus = mock_argus self._base_tempest_backend._configure_networking()", "implied. See the # License for the specific language governing permissions and limitations", "\"floating_ip\": { \"ip\": \"fake ip\" } } mock_floating_ips_client = mock.Mock() mock_floating_ips_client.create_floating_ip = mock_create_floating_ip", "if keypair is not None: self._base_tempest_backend._keypair = keypair self._base_tempest_backend._manager.cleanup_credentials = mock.Mock() with test_utils.LogSnatcher('argus.backends.tempest.'", "image ref\", flavorRef=\"fake flavor ref\", **stripped_kwargs)) if wait_until is not None: mock_waiters.assert_called_once_with( self._base_tempest_backend._manager.servers_client,", "LOG = util.get_logger() class FakeBaseTempestBackend(tempest_backend.BaseTempestBackend): def __init__(self, name, userdata, metadata, availability_zone): super(FakeBaseTempestBackend, self).__init__(", "{\"ip\": \"fake ip\"}) (self._base_tempest_backend._manager.floating_ips_client. associate_floating_ip_to_server.assert_called_once_with( \"fake ip\", \"fake id\")) def test_get_mtu(self): mock_get_mtu =", "# Copyright 2016 Cloudbase Solutions Srl # All Rights Reserved. # # Licensed", "floating_ip is not None: (self._base_tempest_backend._manager.floating_ips_client. delete_floating_ip.assert_called_once_with(floating_ip['id'])) if keypair is not None: self._base_tempest_backend._keypair.destroy.assert_called_once() (self._base_tempest_backend._manager.cleanup_credentials.", "snatcher: self._base_tempest_backend.cleanup() if security_groups_rules is not None: (self.assertEqual( self._base_tempest_backend._manager. security_group_rules_client.delete_security_group_rule. call_count, len(security_groups_rules))) if", "mock except ImportError: import mock LOG = util.get_logger() class FakeBaseTempestBackend(tempest_backend.BaseTempestBackend): def __init__(self, name,", "} self._test_create_server(kwargs=kwargs) def test__assign_floating_ip(self): mock_create_floating_ip = mock.Mock() mock_create_floating_ip.return_value = { \"floating_ip\": { \"ip\":", "\"fake userdata\" metadata = mock.sentinel availability_zone = mock.sentinel self._base = tempest_backend.BaseWindowsTempestBackend( name, userdata,", "self._base_tempest_backend.internal_instance_id.assert_called_once() self._base_tempest_backend._manager.test_instance_output(\"fake id\", 10) def test_instance_server(self): self._base_tempest_backend._manager.instance_server = mock.Mock( return_value=\"fake instance server\") self._base_tempest_backend.internal_instance_id", "return_value=\"fake id\") result = self._base_tempest_backend.reboot_instance() self.assertEqual(result, \"fake reboot\") (self._base_tempest_backend._manager.reboot_instance. assert_called_once_with(\"fake id\")) def test_instance_password(self):", "mock_keypair result = self._base_tempest_backend.private_key() self.assertEqual(result, \"fake private key\") def test_get_image_by_ref(self): (self._base_tempest_backend._manager.compute_images_client. show_image) =", "class TestBaseTempestBackend(unittest.TestCase): @mock.patch('argus.config.CONFIG.argus') @mock.patch('argus.backends.tempest.manager.APIManager') def setUp(self, mock_api_manager, mock_config): mock_config.openstack.image_ref = \"fake image ref\"", "security_group is not None: (self._base_tempest_backend._manager.servers_client .remove_security_group) = mock.Mock() self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake id\")", "= mock.Mock() self._base_tempest_backend._create_server = mock.Mock( return_value=\"fake server\") self._base_tempest_backend._assign_floating_ip = mock.Mock() self._base_tempest_backend._create_security_groups = mock.Mock()", "you may # not use this file except in compliance with the License.", "\"fake image ref\" self._base_tempest_backend.flavor_ref = \"fake flavor ref\" self._base_tempest_backend._name = \"fake name\" if", ".assert_called_once()) self.assertEqual(self._base_tempest_backend._security_groups_rules, [1, 2, 3, 4, 5]) @mock.patch('tempest.common.waiters.wait_for_server_termination') def _test_cleanup(self, mock_waiters, security_groups_rules=None, security_group=None,", "mock_network.subnet = {\"id\": \"fake id\"} mock_primary_credentials = mock.Mock() mock_primary_credentials.return_value = mock_network (self._base_tempest_backend._manager. primary_credentials)", "mtu\" self._base_tempest_backend._manager.get_mtu = mock_get_mtu result = self._base_tempest_backend.get_mtu() self.assertEqual(result, \"fake mtu\") self._base_tempest_backend._manager.get_mtu.assert_called_once() def test__add_security_group_exceptions(self):", "[\"Creating server...\"] self._base_tempest_backend._configure_networking = mock.Mock() self._base_tempest_backend._manager.create_keypair = mock.Mock() self._base_tempest_backend._create_server = mock.Mock( return_value=\"fake server\")", "mock.Mock( return_value=fake_security_group[\"security_group\"][\"id\"]) self._base_tempest_backend._manager.servers_client = mock.Mock() self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake ip\") result = self._base_tempest_backend._create_security_groups()", "with test_utils.LogSnatcher('argus.backends.base') as snatcher: self._base_tempest_backend.setup_instance() self.assertEqual(expected_logging, snatcher.output) self._base_tempest_backend._configure_networking.assert_called_once() self._base_tempest_backend._manager.create_keypair.assert_called_once() self._base_tempest_backend._create_server.assert_called_once() self._base_tempest_backend._assign_floating_ip.assert_called_once() self._base_tempest_backend._create_security_groups.assert_called_once() def", "is not None: mock_waiters.assert_called_once_with( self._base_tempest_backend._manager.servers_client, \"fake server id\", wait_until) else: mock_waiters.assert_called_once_with( self._base_tempest_backend._manager.servers_client, \"fake", "{ 'server': { 'id': \"fake server id\" } } stripped_kwargs = copy.deepcopy(kwargs) for", "= \"fake flavor ref\" self._base_tempest_backend._name = \"fake name\" if wait_until is not None:", "= fake_security_group (self._base_tempest_backend._manager .security_groups_client) = mock_security_groups_client self._base_tempest_backend._security_groups_rules = [] self._base_tempest_backend._add_security_group_exceptions = mock.Mock( return_value=fake_security_group[\"security_group\"][\"id\"])", "self.assertEqual(result, \"fake private key\") def test_get_image_by_ref(self): (self._base_tempest_backend._manager.compute_images_client. show_image) = mock.Mock(return_value={\"image\": \"fake image\"}) self._base_tempest_backend._conf", "is not None: (self._base_tempest_backend._manager.servers_client .remove_security_group) = mock.Mock() self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake id\") self._base_tempest_backend._security_group", "KIND, either express or implied. See the # License for the specific language", "server_id=\"fake id\", name=security_group['name'])) (self._base_tempest_backend.internal_instance_id. assert_called_once()) if server is not None: (self._base_tempest_backend._manager.servers_client.delete_server .assert_called_once_with(\"fake id\"))", ".assert_called_once_with(\"fake id\")) (mock_waiters.assert_called_once_with( self._base_tempest_backend._manager.servers_client, \"fake id\")) self.assertEqual( self._base_tempest_backend.internal_instance_id.call_count, 2) if floating_ip is not", "test__add_security_group_exceptions(self): mock_security_group_rules_client = mock.Mock() (mock_security_group_rules_client.create_security_group_rule .return_value) = {\"security_group_rule\": \"fake sg_rule\"} (self._base_tempest_backend._manager .security_group_rules_client) =", "None: (self._base_tempest_backend._manager.servers_client .remove_security_group) = mock.Mock() self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake id\") self._base_tempest_backend._security_group = security_group", "\"fake server\"} result = self._base_tempest_backend.internal_instance_id() self.assertEqual(result, \"fake server\") def test_instance_output(self): self._base_tempest_backend._manager.instance_output = mock.Mock(", "mock.sentinel availability_zone = mock.sentinel self._base = tempest_backend.BaseWindowsTempestBackend( name, userdata, metadata, availability_zone) @mock.patch('argus.config.CONFIG.argus') @mock.patch('argus.backends.base.CloudBackend._get_log_template')", "ImportError: import mock LOG = util.get_logger() class FakeBaseTempestBackend(tempest_backend.BaseTempestBackend): def __init__(self, name, userdata, metadata,", "mock_network (self._base_tempest_backend._manager. primary_credentials) = mock_primary_credentials mock_subnets_client = mock.Mock() mock_subnets_client.update_subnet.return_value = None (self._base_tempest_backend. _manager.subnets_client)", "(self._base_tempest_backend._manager.servers_client .remove_security_group) = mock.Mock() self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake id\") self._base_tempest_backend._security_group = security_group if", "@mock.patch('argus.config.CONFIG.argus') def test__configure_networking(self, mock_config): mock_network = mock.Mock() mock_network.subnet = {\"id\": \"fake id\"} mock_primary_credentials", "{ 'id': \"fake server id\" } } stripped_kwargs = copy.deepcopy(kwargs) for key, value", "floating_ip=None, keypair=None): expected_logging = [\"Cleaning up...\"] if security_groups_rules is not None: (self._base_tempest_backend. _security_groups_rules)", "self).__init__( name, userdata, metadata, availability_zone) def get_remote_client(self, **kwargs): return \"fake get_remote_client\" def remote_client(self):", "mock_subnets_client = mock.Mock() mock_subnets_client.update_subnet.return_value = None (self._base_tempest_backend. _manager.subnets_client) = mock_subnets_client mock_argus = mock.Mock()", "kwargs = { \"arg 1\": \"value 1\", \"arg 2\": \"value 2\", \"arg 3\":", "self.assertEqual(result, \"fake password\") self._base_tempest_backend.internal_instance_id.assert_called_once() def test_internal_instance_id(self): self._base_tempest_backend._server = {\"id\": \"fake server\"} result =", "mock_keypair = mock.Mock() mock_keypair.public_key = \"fake public key\" self._base_tempest_backend._keypair = mock_keypair result =", "keypair self._base_tempest_backend._manager.cleanup_credentials = mock.Mock() with test_utils.LogSnatcher('argus.backends.tempest.' 'tempest_backend') as snatcher: self._base_tempest_backend.cleanup() if security_groups_rules is", "file except in compliance with the License. You may obtain # a copy", "(self._base_tempest_backend ._create_server(wait_until, kwargs)) else: result = self._base_tempest_backend._create_server(**kwargs) self.assertEqual(result, {\"id\": \"fake server id\"}) (self._base_tempest_backend._manager.servers_client.create_server.", "mock.Mock() mock_get_mtu.return_value = \"fake mtu\" self._base_tempest_backend._manager.get_mtu = mock_get_mtu result = self._base_tempest_backend.get_mtu() self.assertEqual(result, \"fake", "def test__add_security_group_exceptions(self): mock_security_group_rules_client = mock.Mock() (mock_security_group_rules_client.create_security_group_rule .return_value) = {\"security_group_rule\": \"fake sg_rule\"} (self._base_tempest_backend._manager .security_group_rules_client)", "= mock.Mock() result = self._base_tempest_backend.get_image_by_ref() self.assertEqual(result, \"fake image\") def test_floating_ip(self): self._base_tempest_backend._floating_ip = {\"ip\":", "All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the", "\"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express", "server\"} result = self._base_tempest_backend.internal_instance_id() self.assertEqual(result, \"fake server\") def test_instance_output(self): self._base_tempest_backend._manager.instance_output = mock.Mock( return_value=\"fake", "mock.Mock(return_value={\"image\": \"fake image\"}) self._base_tempest_backend._conf = mock.Mock() result = self._base_tempest_backend.get_image_by_ref() self.assertEqual(result, \"fake image\") def", "id\"} mock_primary_credentials = mock.Mock() mock_primary_credentials.return_value = mock_network (self._base_tempest_backend._manager. primary_credentials) = mock_primary_credentials mock_subnets_client =", "result = self._base_tempest_backend.get_mtu() self.assertEqual(result, \"fake mtu\") self._base_tempest_backend._manager.get_mtu.assert_called_once() def test__add_security_group_exceptions(self): mock_security_group_rules_client = mock.Mock() (mock_security_group_rules_client.create_security_group_rule", "class TestBaseWindowsTempestBackend(unittest.TestCase): @mock.patch('argus.config.CONFIG.argus') @mock.patch('argus.backends.tempest.manager.APIManager') def setUp(self, mock_api_manager, mock_config): mock_config.openstack.image_ref = \"fake image ref\"", "2}, {\"id\": 3}, {\"id\": 4}, {\"id\": 5} ], \"name\": \"fake name\" } }", "4}, {\"id\": 5} ], \"name\": \"fake name\" } } mock_security_groups_client = mock.Mock() (mock_security_groups_client.create_security_group", "def get_remote_client(self, **kwargs): return \"fake get_remote_client\" def remote_client(self): return \"fake_remote_client\" class TestBaseTempestBackend(unittest.TestCase): @mock.patch('argus.config.CONFIG.argus')", "value in list(stripped_kwargs.items()): if not value: del stripped_kwargs[key] (self._base_tempest_backend._manager.servers_client. create_server) = mock.Mock(return_value=fake_server) self._base_tempest_backend.image_ref", "\"value 4\" } self._test_create_server(kwargs=kwargs) def test__assign_floating_ip(self): mock_create_floating_ip = mock.Mock() mock_create_floating_ip.return_value = { \"floating_ip\":", "mock_create_floating_ip.return_value = { \"floating_ip\": { \"ip\": \"fake ip\" } } mock_floating_ips_client = mock.Mock()", "\"fake public key\" self._base_tempest_backend._keypair = mock_keypair result = self._base_tempest_backend.public_key() self.assertEqual(result, \"fake public key\")", "software # distributed under the License is distributed on an \"AS IS\" BASIS,", "reboot\") self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake id\") result = self._base_tempest_backend.reboot_instance() self.assertEqual(result, \"fake reboot\") (self._base_tempest_backend._manager.reboot_instance.", "self._base_tempest_backend._manager.servers_client, \"fake id\")) self.assertEqual( self._base_tempest_backend.internal_instance_id.call_count, 2) if floating_ip is not None: (self._base_tempest_backend._manager.floating_ips_client. delete_floating_ip.assert_called_once_with(floating_ip['id']))", "key\" self._base_tempest_backend._keypair = mock_keypair result = self._base_tempest_backend.public_key() self.assertEqual(result, \"fake public key\") def test_private_key(self):", "import unittest from argus.backends.tempest import tempest_backend from argus.unit_tests import test_utils from argus import", "mtu\") self._base_tempest_backend._manager.get_mtu.assert_called_once() def test__add_security_group_exceptions(self): mock_security_group_rules_client = mock.Mock() (mock_security_group_rules_client.create_security_group_rule .return_value) = {\"security_group_rule\": \"fake sg_rule\"}", "mock.Mock() mock_servers_client.delete_server = mock.Mock() (self._base_tempest_backend._manager. servers_client) = mock_servers_client self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake id\")", "= mock.Mock() mock_primary_credentials.return_value = mock_network (self._base_tempest_backend._manager. primary_credentials) = mock_primary_credentials mock_subnets_client = mock.Mock() mock_subnets_client.update_subnet.return_value", "mock.Mock() mock_network.subnet = {\"id\": \"fake id\"} mock_primary_credentials = mock.Mock() mock_primary_credentials.return_value = mock_network (self._base_tempest_backend._manager.", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to", "\"fake server id\", 'ACTIVE') def test_create_server(self): kwargs = { \"arg 1\": \"value 1\",", "self.assertEqual(expected_logging, snatcher.output) def test_cleanup_security_groups_rules(self): fake_rules = [\"rule 1\", \"rule 2\", \"rule 3\", \"rule", "not value: del stripped_kwargs[key] (self._base_tempest_backend._manager.servers_client. create_server) = mock.Mock(return_value=fake_server) self._base_tempest_backend.image_ref = \"fake image ref\"", "\"security_group\": { \"id\": [ {\"id\": 1}, {\"id\": 2}, {\"id\": 3}, {\"id\": 4}, {\"id\":", "(self._base_tempest_backend._manager.servers_client. create_server) = mock.Mock(return_value=fake_server) self._base_tempest_backend.image_ref = \"fake image ref\" self._base_tempest_backend.flavor_ref = \"fake flavor", "flavor ref\" name = mock.sentinel userdata = \"fake userdata\" metadata = mock.sentinel availability_zone", "distributed on an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY", "not None: (self._base_tempest_backend._manager.servers_client.delete_server .assert_called_once_with(\"fake id\")) (mock_waiters.assert_called_once_with( self._base_tempest_backend._manager.servers_client, \"fake id\")) self.assertEqual( self._base_tempest_backend.internal_instance_id.call_count, 2) if", "= None (self._base_tempest_backend. _manager.subnets_client) = mock_subnets_client mock_argus = mock.Mock() mock_argus.dns_nameservers.return_value = \"fake dns", "'tempest_backend') as snatcher: self._base_tempest_backend.cleanup() if security_groups_rules is not None: (self.assertEqual( self._base_tempest_backend._manager. security_group_rules_client.delete_security_group_rule. call_count,", "= util.get_logger() class FakeBaseTempestBackend(tempest_backend.BaseTempestBackend): def __init__(self, name, userdata, metadata, availability_zone): super(FakeBaseTempestBackend, self).__init__( name,", "self._base_tempest_backend._create_security_groups = mock.Mock() self._base_tempest_backend._availability_zone = mock.Mock() self._base_tempest_backend.__get_id_tenant_network = mock.Mock() with test_utils.LogSnatcher('argus.backends.base') as snatcher:", "return_value=\"fake-server\") @mock.patch('tempest.common.waiters.wait_for_server_status') def _test_create_server(self, mock_waiters, mock_util, kwargs, wait_until=None): fake_server = { 'server': {", "mock.Mock( return_value=\"fake reboot\") self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake id\") result = self._base_tempest_backend.reboot_instance() self.assertEqual(result, \"fake", "\"fake private key\") def test_get_image_by_ref(self): (self._base_tempest_backend._manager.compute_images_client. show_image) = mock.Mock(return_value={\"image\": \"fake image\"}) self._base_tempest_backend._conf =", "= security_group if server is not None: mock_servers_client = mock.Mock() mock_servers_client.delete_server = mock.Mock()", "self._base_tempest_backend._keypair = \"fake keypair\" result = self._base_tempest_backend.instance_password() self.assertEqual(result, \"fake password\") self._base_tempest_backend.internal_instance_id.assert_called_once() def test_internal_instance_id(self):", "the # License for the specific language governing permissions and limitations # under", "ip\"} result = self._base_tempest_backend.floating_ip() self.assertEqual(result, \"fake ip\") class TestBaseWindowsTempestBackend(unittest.TestCase): @mock.patch('argus.config.CONFIG.argus') @mock.patch('argus.backends.tempest.manager.APIManager') def setUp(self,", "self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake id\") result = self._base_tempest_backend.instance_output(limit=10) self.assertEqual(result, \"fake output\") self._base_tempest_backend.internal_instance_id.assert_called_once() self._base_tempest_backend._manager.test_instance_output(\"fake", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "\"fake mtu\" self._base_tempest_backend._manager.get_mtu = mock_get_mtu result = self._base_tempest_backend.get_mtu() self.assertEqual(result, \"fake mtu\") self._base_tempest_backend._manager.get_mtu.assert_called_once() def", "mock.Mock( return_value=\"fake server\") self._base_tempest_backend._assign_floating_ip = mock.Mock() self._base_tempest_backend._create_security_groups = mock.Mock() self._base_tempest_backend._availability_zone = mock.Mock() self._base_tempest_backend.__get_id_tenant_network", "5} ], \"name\": \"fake name\" } } mock_security_groups_client = mock.Mock() (mock_security_groups_client.create_security_group .return_value) =", "not None: result = (self._base_tempest_backend ._create_server(wait_until, kwargs)) else: result = self._base_tempest_backend._create_server(**kwargs) self.assertEqual(result, {\"id\":", "self.assertEqual(result, {\"ip\": \"fake ip\"}) (self._base_tempest_backend._manager.floating_ips_client. associate_floating_ip_to_server.assert_called_once_with( \"fake ip\", \"fake id\")) def test_get_mtu(self): mock_get_mtu", "3, 4, 5]) @mock.patch('tempest.common.waiters.wait_for_server_termination') def _test_cleanup(self, mock_waiters, security_groups_rules=None, security_group=None, server=None, floating_ip=None, keypair=None): expected_logging", "_manager.subnets_client) = mock_subnets_client mock_argus = mock.Mock() mock_argus.dns_nameservers.return_value = \"fake dns nameservers\" mock_config.argus =", "You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "import unittest.mock as mock except ImportError: import mock LOG = util.get_logger() class FakeBaseTempestBackend(tempest_backend.BaseTempestBackend):", "result = self._base_tempest_backend.instance_server() self.assertEqual(result, \"fake instance server\") self._base_tempest_backend.internal_instance_id.assert_called_once() def test_public_key(self): mock_keypair = mock.Mock()", "= mock.Mock( return_value=\"fake instance id\") result = self._base_tempest_backend.instance_server() self.assertEqual(result, \"fake instance server\") self._base_tempest_backend.internal_instance_id.assert_called_once()", "return \"fake_remote_client\" class TestBaseTempestBackend(unittest.TestCase): @mock.patch('argus.config.CONFIG.argus') @mock.patch('argus.backends.tempest.manager.APIManager') def setUp(self, mock_api_manager, mock_config): mock_config.openstack.image_ref = \"fake", "not None: self._base_tempest_backend._keypair.destroy.assert_called_once() (self._base_tempest_backend._manager.cleanup_credentials. assert_called_once()) self.assertEqual(expected_logging, snatcher.output) def test_cleanup_security_groups_rules(self): fake_rules = [\"rule 1\",", "required by applicable law or agreed to in writing, software # distributed under", "None: result = (self._base_tempest_backend ._create_server(wait_until, kwargs)) else: result = self._base_tempest_backend._create_server(**kwargs) self.assertEqual(result, {\"id\": \"fake", "self._base_tempest_backend._server = server if floating_ip is not None: (self._base_tempest_backend._manager.floating_ips_client. delete_floating_ip) = mock.Mock() self._base_tempest_backend._floating_ip", "metadata, availability_zone) @mock.patch('argus.config.CONFIG.argus') @mock.patch('argus.backends.base.CloudBackend._get_log_template') def test_get_log_template(self, mock_get_log, mock_config): mock_get_log.return_value = \"fake call\" mock_config.build", "(self._base_tempest_backend. internal_instance_id) = mock_internal_instance_id result = self._base_tempest_backend._assign_floating_ip() self.assertEqual(result, {\"ip\": \"fake ip\"}) (self._base_tempest_backend._manager.floating_ips_client. associate_floating_ip_to_server.assert_called_once_with(", "private key\") def test_get_image_by_ref(self): (self._base_tempest_backend._manager.compute_images_client. show_image) = mock.Mock(return_value={\"image\": \"fake image\"}) self._base_tempest_backend._conf = mock.Mock()", "applicable law or agreed to in writing, software # distributed under the License", "self._base_tempest_backend.get_mtu() self.assertEqual(result, \"fake mtu\") self._base_tempest_backend._manager.get_mtu.assert_called_once() def test__add_security_group_exceptions(self): mock_security_group_rules_client = mock.Mock() (mock_security_group_rules_client.create_security_group_rule .return_value) =", "mock.Mock() self._base_tempest_backend._create_server = mock.Mock( return_value=\"fake server\") self._base_tempest_backend._assign_floating_ip = mock.Mock() self._base_tempest_backend._create_security_groups = mock.Mock() self._base_tempest_backend._availability_zone", "= mock.Mock() with test_utils.LogSnatcher('argus.backends.base') as snatcher: self._base_tempest_backend.setup_instance() self.assertEqual(expected_logging, snatcher.output) self._base_tempest_backend._configure_networking.assert_called_once() self._base_tempest_backend._manager.create_keypair.assert_called_once() self._base_tempest_backend._create_server.assert_called_once() self._base_tempest_backend._assign_floating_ip.assert_called_once()", "self._base_tempest_backend.internal_instance_id.assert_called_once() def test_public_key(self): mock_keypair = mock.Mock() mock_keypair.public_key = \"fake public key\" self._base_tempest_backend._keypair =", "= mock_internal_instance_id result = self._base_tempest_backend._assign_floating_ip() self.assertEqual(result, {\"ip\": \"fake ip\"}) (self._base_tempest_backend._manager.floating_ips_client. associate_floating_ip_to_server.assert_called_once_with( \"fake ip\",", "= mock_security_groups_client self._base_tempest_backend._security_groups_rules = [] self._base_tempest_backend._add_security_group_exceptions = mock.Mock( return_value=fake_security_group[\"security_group\"][\"id\"]) self._base_tempest_backend._manager.servers_client = mock.Mock() self._base_tempest_backend.internal_instance_id", "util.get_logger() class FakeBaseTempestBackend(tempest_backend.BaseTempestBackend): def __init__(self, name, userdata, metadata, availability_zone): super(FakeBaseTempestBackend, self).__init__( name, userdata,", "\"value 2\", \"arg 3\": None, \"arg 4\": \"value 4\" } self._test_create_server(kwargs=kwargs) def test__assign_floating_ip(self):", "test_public_key(self): mock_keypair = mock.Mock() mock_keypair.public_key = \"fake public key\" self._base_tempest_backend._keypair = mock_keypair result", "mock_security_group_rules_client = mock.Mock() (mock_security_group_rules_client.create_security_group_rule .return_value) = {\"security_group_rule\": \"fake sg_rule\"} (self._base_tempest_backend._manager .security_group_rules_client) = mock_security_group_rules_client", "= mock.Mock() (self._base_tempest_backend._manager. servers_client) = mock_servers_client self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake id\") self._base_tempest_backend._server =", "in compliance with the License. You may obtain # a copy of the", "update_subnet.assert_called_once()) (self._base_tempest_backend._manager.subnets_client. update_subnet.assert_called_once()) @mock.patch('argus.util.rand_name', return_value=\"fake-server\") @mock.patch('tempest.common.waiters.wait_for_server_status') def _test_create_server(self, mock_waiters, mock_util, kwargs, wait_until=None): fake_server", "or agreed to in writing, software # distributed under the License is distributed", "(self._base_tempest_backend._manager.cleanup_credentials. assert_called_once()) self.assertEqual(expected_logging, snatcher.output) def test_cleanup_security_groups_rules(self): fake_rules = [\"rule 1\", \"rule 2\", \"rule", "self._base_tempest_backend._add_security_group_exceptions = mock.Mock( return_value=fake_security_group[\"security_group\"][\"id\"]) self._base_tempest_backend._manager.servers_client = mock.Mock() self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake ip\") result", "= \"fake userdata\" metadata = mock.sentinel availability_zone = mock.sentinel self._base = tempest_backend.BaseWindowsTempestBackend( name,", "and limitations # under the License. # pylint: disable=no-value-for-parameter, protected-access, arguments-differ # pylint:", "result = self._base_tempest_backend._assign_floating_ip() self.assertEqual(result, {\"ip\": \"fake ip\"}) (self._base_tempest_backend._manager.floating_ips_client. associate_floating_ip_to_server.assert_called_once_with( \"fake ip\", \"fake id\"))", "None: (self._base_tempest_backend._manager.servers_client. remove_security_group.assert_called_once_with( server_id=\"fake id\", name=security_group['name'])) (self._base_tempest_backend.internal_instance_id. assert_called_once()) if server is not None:", "__init__(self, name, userdata, metadata, availability_zone): super(FakeBaseTempestBackend, self).__init__( name, userdata, metadata, availability_zone) def get_remote_client(self,", "{\"id\": \"fake id\"} mock_primary_credentials = mock.Mock() mock_primary_credentials.return_value = mock_network (self._base_tempest_backend._manager. primary_credentials) = mock_primary_credentials", "result = (self._base_tempest_backend ._create_server(wait_until, kwargs)) else: result = self._base_tempest_backend._create_server(**kwargs) self.assertEqual(result, {\"id\": \"fake server", "self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake id\") result = self._base_tempest_backend.reboot_instance() self.assertEqual(result, \"fake reboot\") (self._base_tempest_backend._manager.reboot_instance. assert_called_once_with(\"fake", "**kwargs): return \"fake get_remote_client\" def remote_client(self): return \"fake_remote_client\" class TestBaseTempestBackend(unittest.TestCase): @mock.patch('argus.config.CONFIG.argus') @mock.patch('argus.backends.tempest.manager.APIManager') def", "= self._base_tempest_backend.floating_ip() self.assertEqual(result, \"fake ip\") class TestBaseWindowsTempestBackend(unittest.TestCase): @mock.patch('argus.config.CONFIG.argus') @mock.patch('argus.backends.tempest.manager.APIManager') def setUp(self, mock_api_manager, mock_config):", "= FakeBaseTempestBackend( name, userdata, metadata, availability_zone) @mock.patch('argus.config.CONFIG.argus') def test__configure_networking(self, mock_config): mock_network = mock.Mock()", "wait_until=None): fake_server = { 'server': { 'id': \"fake server id\" } } stripped_kwargs", "= \"fake name\" if wait_until is not None: result = (self._base_tempest_backend ._create_server(wait_until, kwargs))", "up...\"] if security_groups_rules is not None: (self._base_tempest_backend. _security_groups_rules) = security_groups_rules (self._base_tempest_backend._manager.security_group_rules_client. delete_security_group_rule) =", "None: self._base_tempest_backend._keypair.destroy.assert_called_once() (self._base_tempest_backend._manager.cleanup_credentials. assert_called_once()) self.assertEqual(expected_logging, snatcher.output) def test_cleanup_security_groups_rules(self): fake_rules = [\"rule 1\", \"rule", "= mock.Mock() if security_group is not None: (self._base_tempest_backend._manager.servers_client .remove_security_group) = mock.Mock() self._base_tempest_backend.internal_instance_id =", "\"fake mtu\") self._base_tempest_backend._manager.get_mtu.assert_called_once() def test__add_security_group_exceptions(self): mock_security_group_rules_client = mock.Mock() (mock_security_group_rules_client.create_security_group_rule .return_value) = {\"security_group_rule\": \"fake", "License is distributed on an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS", "writing, software # distributed under the License is distributed on an \"AS IS\"", "flavorRef=\"fake flavor ref\", **stripped_kwargs)) if wait_until is not None: mock_waiters.assert_called_once_with( self._base_tempest_backend._manager.servers_client, \"fake server", "None: mock_servers_client = mock.Mock() mock_servers_client.delete_server = mock.Mock() (self._base_tempest_backend._manager. servers_client) = mock_servers_client self._base_tempest_backend.internal_instance_id =", "mock.Mock() mock_create_floating_ip.return_value = { \"floating_ip\": { \"ip\": \"fake ip\" } } mock_floating_ips_client =", "metadata, availability_zone): super(FakeBaseTempestBackend, self).__init__( name, userdata, metadata, availability_zone) def get_remote_client(self, **kwargs): return \"fake", "@mock.patch('argus.config.CONFIG.argus') @mock.patch('argus.backends.base.CloudBackend._get_log_template') def test_get_log_template(self, mock_get_log, mock_config): mock_get_log.return_value = \"fake call\" mock_config.build = \"fake", "kwargs)) else: result = self._base_tempest_backend._create_server(**kwargs) self.assertEqual(result, {\"id\": \"fake server id\"}) (self._base_tempest_backend._manager.servers_client.create_server. assert_called_once_with(name=\"fake-server-instance\", imageRef=\"fake", "self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake id\") self._base_tempest_backend._keypair = \"fake keypair\" result = self._base_tempest_backend.instance_password() self.assertEqual(result,", "= \"fake dns nameservers\" mock_config.argus = mock_argus self._base_tempest_backend._configure_networking() (self._base_tempest_backend._manager.subnets_client. update_subnet.assert_called_once()) (self._base_tempest_backend._manager.subnets_client. update_subnet.assert_called_once()) @mock.patch('argus.util.rand_name',", "= mock.Mock( return_value=\"fake id\") result = self._base_tempest_backend.instance_output(limit=10) self.assertEqual(result, \"fake output\") self._base_tempest_backend.internal_instance_id.assert_called_once() self._base_tempest_backend._manager.test_instance_output(\"fake id\",", "{\"ip\": \"fake ip\"} result = self._base_tempest_backend.floating_ip() self.assertEqual(result, \"fake ip\") class TestBaseWindowsTempestBackend(unittest.TestCase): @mock.patch('argus.config.CONFIG.argus') @mock.patch('argus.backends.tempest.manager.APIManager')", "\"fake_remote_client\" class TestBaseTempestBackend(unittest.TestCase): @mock.patch('argus.config.CONFIG.argus') @mock.patch('argus.backends.tempest.manager.APIManager') def setUp(self, mock_api_manager, mock_config): mock_config.openstack.image_ref = \"fake image", "= keypair self._base_tempest_backend._manager.cleanup_credentials = mock.Mock() with test_utils.LogSnatcher('argus.backends.tempest.' 'tempest_backend') as snatcher: self._base_tempest_backend.cleanup() if security_groups_rules", "{\"id\": 1}, {\"id\": 2}, {\"id\": 3}, {\"id\": 4}, {\"id\": 5} ], \"name\": \"fake", "None: (self._base_tempest_backend. _security_groups_rules) = security_groups_rules (self._base_tempest_backend._manager.security_group_rules_client. delete_security_group_rule) = mock.Mock() if security_group is not", "is not None: self._base_tempest_backend._keypair = keypair self._base_tempest_backend._manager.cleanup_credentials = mock.Mock() with test_utils.LogSnatcher('argus.backends.tempest.' 'tempest_backend') as", "no-member, attribute-defined-outside-init import copy import unittest from argus.backends.tempest import tempest_backend from argus.unit_tests import", "(self._base_tempest_backend._manager.security_group_rules_client. delete_security_group_rule) = mock.Mock() if security_group is not None: (self._base_tempest_backend._manager.servers_client .remove_security_group) = mock.Mock()", "(self._base_tempest_backend._manager.compute_images_client. show_image) = mock.Mock(return_value={\"image\": \"fake image\"}) self._base_tempest_backend._conf = mock.Mock() result = self._base_tempest_backend.get_image_by_ref() self.assertEqual(result,", "\"fake build\" mock_config.arch = \"fake arch\" expected_result = \"{}-{}-{}\".format(mock_config.build, mock_config.arch, mock_get_log.return_value) result =", "mock.sentinel userdata = \"fake userdata\" metadata = mock.sentinel availability_zone = mock.sentinel self._base =", "assert_called_once()) self.assertEqual(expected_logging, snatcher.output) def test_cleanup_security_groups_rules(self): fake_rules = [\"rule 1\", \"rule 2\", \"rule 3\",", "[\"rule 1\", \"rule 2\", \"rule 3\", \"rule 4\"] self._test_cleanup(security_groups_rules=fake_rules) def test_cleanup_security_group(self): self._test_cleanup(security_group={'name': \"fake", "test_cleanup_server(self): self._test_cleanup(server=\"fake server\") def test_cleanup_floating_ip(self): self._test_cleanup(floating_ip={\"id\": \"fake floating ip id\"}) def test_cleanup_keypair(self): self._test_cleanup(keypair=mock.Mock())", "self._base_tempest_backend._manager.test_instance_output(\"fake id\", 10) def test_instance_server(self): self._base_tempest_backend._manager.instance_server = mock.Mock( return_value=\"fake instance server\") self._base_tempest_backend.internal_instance_id =", "**stripped_kwargs)) if wait_until is not None: mock_waiters.assert_called_once_with( self._base_tempest_backend._manager.servers_client, \"fake server id\", wait_until) else:", "1}, {\"id\": 2}, {\"id\": 3}, {\"id\": 4}, {\"id\": 5} ], \"name\": \"fake name\"", "3}, {\"id\": 4}, {\"id\": 5} ], \"name\": \"fake name\" } } mock_security_groups_client =", "ip id\"}) def test_cleanup_keypair(self): self._test_cleanup(keypair=mock.Mock()) def test_cleanup_credentials(self): self._test_cleanup() def test_instance_setup_create_server(self): expected_logging = [\"Creating", "test_cleanup_credentials(self): self._test_cleanup() def test_instance_setup_create_server(self): expected_logging = [\"Creating server...\"] self._base_tempest_backend._configure_networking = mock.Mock() self._base_tempest_backend._manager.create_keypair =", "= self._base_tempest_backend.get_image_by_ref() self.assertEqual(result, \"fake image\") def test_floating_ip(self): self._base_tempest_backend._floating_ip = {\"ip\": \"fake ip\"} result", "mock_security_groups_client self._base_tempest_backend._security_groups_rules = [] self._base_tempest_backend._add_security_group_exceptions = mock.Mock( return_value=fake_security_group[\"security_group\"][\"id\"]) self._base_tempest_backend._manager.servers_client = mock.Mock() self._base_tempest_backend.internal_instance_id =", "mock.Mock(return_value=fake_server) self._base_tempest_backend.image_ref = \"fake image ref\" self._base_tempest_backend.flavor_ref = \"fake flavor ref\" self._base_tempest_backend._name =", "floating_ip if keypair is not None: self._base_tempest_backend._keypair = keypair self._base_tempest_backend._manager.cleanup_credentials = mock.Mock() with", "self._base_tempest_backend._manager.servers_client, \"fake server id\", 'ACTIVE') def test_create_server(self): kwargs = { \"arg 1\": \"value", "name, userdata, metadata, availability_zone): super(FakeBaseTempestBackend, self).__init__( name, userdata, metadata, availability_zone) def get_remote_client(self, **kwargs):", "ref\" mock_config.openstack.flavor_ref = \"fake flavor ref\" name = mock.sentinel userdata = \"fake userdata\"", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "build\" mock_config.arch = \"fake arch\" expected_result = \"{}-{}-{}\".format(mock_config.build, mock_config.arch, mock_get_log.return_value) result = self._base._get_log_template(\"fake", "keypair=None): expected_logging = [\"Cleaning up...\"] if security_groups_rules is not None: (self._base_tempest_backend. _security_groups_rules) =", "= mock.Mock() self._base_tempest_backend._floating_ip = floating_ip if keypair is not None: self._base_tempest_backend._keypair = keypair", "self._test_cleanup() def test_instance_setup_create_server(self): expected_logging = [\"Creating server...\"] self._base_tempest_backend._configure_networking = mock.Mock() self._base_tempest_backend._manager.create_keypair = mock.Mock()", "def test_floating_ip(self): self._base_tempest_backend._floating_ip = {\"ip\": \"fake ip\"} result = self._base_tempest_backend.floating_ip() self.assertEqual(result, \"fake ip\")", "\"fake userdata\" metadata = mock.sentinel availability_zone = mock.sentinel self._base_tempest_backend = FakeBaseTempestBackend( name, userdata,", "server id\", wait_until) else: mock_waiters.assert_called_once_with( self._base_tempest_backend._manager.servers_client, \"fake server id\", 'ACTIVE') def test_create_server(self): kwargs", "{ \"arg 1\": \"value 1\", \"arg 2\": \"value 2\", \"arg 3\": None, \"arg", "Licensed under the Apache License, Version 2.0 (the \"License\"); you may # not", "(self._base_tempest_backend.internal_instance_id. assert_called_once()) if server is not None: (self._base_tempest_backend._manager.servers_client.delete_server .assert_called_once_with(\"fake id\")) (mock_waiters.assert_called_once_with( self._base_tempest_backend._manager.servers_client, \"fake", "get_remote_client\" def remote_client(self): return \"fake_remote_client\" class TestBaseTempestBackend(unittest.TestCase): @mock.patch('argus.config.CONFIG.argus') @mock.patch('argus.backends.tempest.manager.APIManager') def setUp(self, mock_api_manager, mock_config):", "def test_get_mtu(self): mock_get_mtu = mock.Mock() mock_get_mtu.return_value = \"fake mtu\" self._base_tempest_backend._manager.get_mtu = mock_get_mtu result", "(mock_waiters.assert_called_once_with( self._base_tempest_backend._manager.servers_client, \"fake id\")) self.assertEqual( self._base_tempest_backend.internal_instance_id.call_count, 2) if floating_ip is not None: (self._base_tempest_backend._manager.floating_ips_client.", "security_group if server is not None: mock_servers_client = mock.Mock() mock_servers_client.delete_server = mock.Mock() (self._base_tempest_backend._manager.", "availability_zone = mock.sentinel self._base_tempest_backend = FakeBaseTempestBackend( name, userdata, metadata, availability_zone) @mock.patch('argus.config.CONFIG.argus') def test__configure_networking(self,", "def test_cleanup_security_group(self): self._test_cleanup(security_group={'name': \"fake name\"}) def test_cleanup_server(self): self._test_cleanup(server=\"fake server\") def test_cleanup_floating_ip(self): self._test_cleanup(floating_ip={\"id\": \"fake", "2.0 (the \"License\"); you may # not use this file except in compliance", "mock_servers_client = mock.Mock() mock_servers_client.delete_server = mock.Mock() (self._base_tempest_backend._manager. servers_client) = mock_servers_client self._base_tempest_backend.internal_instance_id = mock.Mock(", "self._test_cleanup(security_groups_rules=fake_rules) def test_cleanup_security_group(self): self._test_cleanup(security_group={'name': \"fake name\"}) def test_cleanup_server(self): self._test_cleanup(server=\"fake server\") def test_cleanup_floating_ip(self): self._test_cleanup(floating_ip={\"id\":", "\"fake id\")) self.assertEqual( self._base_tempest_backend.internal_instance_id.call_count, 2) if floating_ip is not None: (self._base_tempest_backend._manager.floating_ips_client. delete_floating_ip.assert_called_once_with(floating_ip['id'])) if", "specific language governing permissions and limitations # under the License. # pylint: disable=no-value-for-parameter,", "self._test_create_server(kwargs=kwargs) def test__assign_floating_ip(self): mock_create_floating_ip = mock.Mock() mock_create_floating_ip.return_value = { \"floating_ip\": { \"ip\": \"fake", "= mock.Mock() mock_argus.dns_nameservers.return_value = \"fake dns nameservers\" mock_config.argus = mock_argus self._base_tempest_backend._configure_networking() (self._base_tempest_backend._manager.subnets_client. update_subnet.assert_called_once())", "= [\"Cleaning up...\"] if security_groups_rules is not None: (self._base_tempest_backend. _security_groups_rules) = security_groups_rules (self._base_tempest_backend._manager.security_group_rules_client.", "server\") def test_instance_output(self): self._base_tempest_backend._manager.instance_output = mock.Mock( return_value=\"fake output\") self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake id\")", "attribute-defined-outside-init import copy import unittest from argus.backends.tempest import tempest_backend from argus.unit_tests import test_utils", "= mock.Mock() mock_create_floating_ip.return_value = { \"floating_ip\": { \"ip\": \"fake ip\" } } mock_floating_ips_client", "# distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT", "wait_until is not None: mock_waiters.assert_called_once_with( self._base_tempest_backend._manager.servers_client, \"fake server id\", wait_until) else: mock_waiters.assert_called_once_with( self._base_tempest_backend._manager.servers_client,", ".security_group_rules_client) = mock_security_group_rules_client result = (self._base_tempest_backend. _add_security_group_exceptions(\"fake secgroup_id\")) for item in result: self.assertEqual(item,", "\"fake server\") def test_instance_output(self): self._base_tempest_backend._manager.instance_output = mock.Mock( return_value=\"fake output\") self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake", "mock_keypair.public_key = \"fake public key\" self._base_tempest_backend._keypair = mock_keypair result = self._base_tempest_backend.public_key() self.assertEqual(result, \"fake", "# pylint: disable= unused-argument, no-member, attribute-defined-outside-init import copy import unittest from argus.backends.tempest import", "snatcher: self._base_tempest_backend.setup_instance() self.assertEqual(expected_logging, snatcher.output) self._base_tempest_backend._configure_networking.assert_called_once() self._base_tempest_backend._manager.create_keypair.assert_called_once() self._base_tempest_backend._create_server.assert_called_once() self._base_tempest_backend._assign_floating_ip.assert_called_once() self._base_tempest_backend._create_security_groups.assert_called_once() def test_reboot_instance(self): self._base_tempest_backend._manager.reboot_instance =", "availability_zone) @mock.patch('argus.config.CONFIG.argus') @mock.patch('argus.backends.base.CloudBackend._get_log_template') def test_get_log_template(self, mock_get_log, mock_config): mock_get_log.return_value = \"fake call\" mock_config.build =", "License, Version 2.0 (the \"License\"); you may # not use this file except", "availability_zone) @mock.patch('argus.config.CONFIG.argus') def test__configure_networking(self, mock_config): mock_network = mock.Mock() mock_network.subnet = {\"id\": \"fake id\"}", "ref\", flavorRef=\"fake flavor ref\", **stripped_kwargs)) if wait_until is not None: mock_waiters.assert_called_once_with( self._base_tempest_backend._manager.servers_client, \"fake", "\"arg 3\": None, \"arg 4\": \"value 4\" } self._test_create_server(kwargs=kwargs) def test__assign_floating_ip(self): mock_create_floating_ip =", "test_get_mtu(self): mock_get_mtu = mock.Mock() mock_get_mtu.return_value = \"fake mtu\" self._base_tempest_backend._manager.get_mtu = mock_get_mtu result =", "sg_rule\") def test__create_security_groups(self): fake_security_group = { \"security_group\": { \"id\": [ {\"id\": 1}, {\"id\":", "\"fake image\") def test_floating_ip(self): self._base_tempest_backend._floating_ip = {\"ip\": \"fake ip\"} result = self._base_tempest_backend.floating_ip() self.assertEqual(result,", "mock_keypair result = self._base_tempest_backend.public_key() self.assertEqual(result, \"fake public key\") def test_private_key(self): mock_keypair = mock.Mock()", "mock_keypair = mock.Mock() mock_keypair.private_key = \"fake private key\" self._base_tempest_backend._keypair = mock_keypair result =", "userdata, metadata, availability_zone) @mock.patch('argus.config.CONFIG.argus') @mock.patch('argus.backends.base.CloudBackend._get_log_template') def test_get_log_template(self, mock_get_log, mock_config): mock_get_log.return_value = \"fake call\"", "= mock.sentinel userdata = \"fake userdata\" metadata = mock.sentinel availability_zone = mock.sentinel self._base", "is not None: (self._base_tempest_backend._manager.floating_ips_client. delete_floating_ip.assert_called_once_with(floating_ip['id'])) if keypair is not None: self._base_tempest_backend._keypair.destroy.assert_called_once() (self._base_tempest_backend._manager.cleanup_credentials. assert_called_once())", "image ref\" mock_config.openstack.flavor_ref = \"fake flavor ref\" name = mock.sentinel userdata = \"fake", "\"arg 4\": \"value 4\" } self._test_create_server(kwargs=kwargs) def test__assign_floating_ip(self): mock_create_floating_ip = mock.Mock() mock_create_floating_ip.return_value =", "test_reboot_instance(self): self._base_tempest_backend._manager.reboot_instance = mock.Mock( return_value=\"fake reboot\") self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake id\") result =", "1\": \"value 1\", \"arg 2\": \"value 2\", \"arg 3\": None, \"arg 4\": \"value", "userdata\" metadata = mock.sentinel availability_zone = mock.sentinel self._base_tempest_backend = FakeBaseTempestBackend( name, userdata, metadata,", "mock.Mock() mock_internal_instance_id.return_value = \"fake id\" (self._base_tempest_backend._manager. floating_ips_client) = mock_floating_ips_client (self._base_tempest_backend. internal_instance_id) = mock_internal_instance_id", "mock.Mock() (mock_security_group_rules_client.create_security_group_rule .return_value) = {\"security_group_rule\": \"fake sg_rule\"} (self._base_tempest_backend._manager .security_group_rules_client) = mock_security_group_rules_client result =", "ip\") class TestBaseWindowsTempestBackend(unittest.TestCase): @mock.patch('argus.config.CONFIG.argus') @mock.patch('argus.backends.tempest.manager.APIManager') def setUp(self, mock_api_manager, mock_config): mock_config.openstack.image_ref = \"fake image", "= self._base_tempest_backend.internal_instance_id() self.assertEqual(result, \"fake server\") def test_instance_output(self): self._base_tempest_backend._manager.instance_output = mock.Mock( return_value=\"fake output\") self._base_tempest_backend.internal_instance_id", "\"fake server id\"}) (self._base_tempest_backend._manager.servers_client.create_server. assert_called_once_with(name=\"fake-server-instance\", imageRef=\"fake image ref\", flavorRef=\"fake flavor ref\", **stripped_kwargs)) if", "call\" mock_config.build = \"fake build\" mock_config.arch = \"fake arch\" expected_result = \"{}-{}-{}\".format(mock_config.build, mock_config.arch,", "2) if floating_ip is not None: (self._base_tempest_backend._manager.floating_ips_client. delete_floating_ip.assert_called_once_with(floating_ip['id'])) if keypair is not None:", "None: mock_waiters.assert_called_once_with( self._base_tempest_backend._manager.servers_client, \"fake server id\", wait_until) else: mock_waiters.assert_called_once_with( self._base_tempest_backend._manager.servers_client, \"fake server id\",", "self.assertEqual(result, \"fake ip\") class TestBaseWindowsTempestBackend(unittest.TestCase): @mock.patch('argus.config.CONFIG.argus') @mock.patch('argus.backends.tempest.manager.APIManager') def setUp(self, mock_api_manager, mock_config): mock_config.openstack.image_ref =", "\"rule 2\", \"rule 3\", \"rule 4\"] self._test_cleanup(security_groups_rules=fake_rules) def test_cleanup_security_group(self): self._test_cleanup(security_group={'name': \"fake name\"}) def", "self._base_tempest_backend.reboot_instance() self.assertEqual(result, \"fake reboot\") (self._base_tempest_backend._manager.reboot_instance. assert_called_once_with(\"fake id\")) def test_instance_password(self): self._base_tempest_backend._manager.instance_password = mock.Mock( return_value=\"fake", "\"fake public key\") def test_private_key(self): mock_keypair = mock.Mock() mock_keypair.private_key = \"fake private key\"", "else: result = self._base_tempest_backend._create_server(**kwargs) self.assertEqual(result, {\"id\": \"fake server id\"}) (self._base_tempest_backend._manager.servers_client.create_server. assert_called_once_with(name=\"fake-server-instance\", imageRef=\"fake image", "mock_internal_instance_id = mock.Mock() mock_internal_instance_id.return_value = \"fake id\" (self._base_tempest_backend._manager. floating_ips_client) = mock_floating_ips_client (self._base_tempest_backend. internal_instance_id)", "def test_create_server(self): kwargs = { \"arg 1\": \"value 1\", \"arg 2\": \"value 2\",", "result = (self._base_tempest_backend. _add_security_group_exceptions(\"fake secgroup_id\")) for item in result: self.assertEqual(item, \"fake sg_rule\") def", "public key\") def test_private_key(self): mock_keypair = mock.Mock() mock_keypair.private_key = \"fake private key\" self._base_tempest_backend._keypair", "mock_config.arch = \"fake arch\" expected_result = \"{}-{}-{}\".format(mock_config.build, mock_config.arch, mock_get_log.return_value) result = self._base._get_log_template(\"fake suffix\")", "agreed to in writing, software # distributed under the License is distributed on", "= mock.Mock( return_value=fake_security_group[\"security_group\"][\"id\"]) self._base_tempest_backend._manager.servers_client = mock.Mock() self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake ip\") result =", "private key\" self._base_tempest_backend._keypair = mock_keypair result = self._base_tempest_backend.private_key() self.assertEqual(result, \"fake private key\") def", "= {\"id\": \"fake id\"} mock_primary_credentials = mock.Mock() mock_primary_credentials.return_value = mock_network (self._base_tempest_backend._manager. primary_credentials) =", "mock_get_mtu.return_value = \"fake mtu\" self._base_tempest_backend._manager.get_mtu = mock_get_mtu result = self._base_tempest_backend.get_mtu() self.assertEqual(result, \"fake mtu\")", "wait_until) else: mock_waiters.assert_called_once_with( self._base_tempest_backend._manager.servers_client, \"fake server id\", 'ACTIVE') def test_create_server(self): kwargs = {", "availability_zone): super(FakeBaseTempestBackend, self).__init__( name, userdata, metadata, availability_zone) def get_remote_client(self, **kwargs): return \"fake get_remote_client\"", "\"fake ip\"} result = self._base_tempest_backend.floating_ip() self.assertEqual(result, \"fake ip\") class TestBaseWindowsTempestBackend(unittest.TestCase): @mock.patch('argus.config.CONFIG.argus') @mock.patch('argus.backends.tempest.manager.APIManager') def", "mock_waiters.assert_called_once_with( self._base_tempest_backend._manager.servers_client, \"fake server id\", wait_until) else: mock_waiters.assert_called_once_with( self._base_tempest_backend._manager.servers_client, \"fake server id\", 'ACTIVE')", "None: (self._base_tempest_backend._manager.servers_client.delete_server .assert_called_once_with(\"fake id\")) (mock_waiters.assert_called_once_with( self._base_tempest_backend._manager.servers_client, \"fake id\")) self.assertEqual( self._base_tempest_backend.internal_instance_id.call_count, 2) if floating_ip", "\"fake id\" (self._base_tempest_backend._manager. floating_ips_client) = mock_floating_ips_client (self._base_tempest_backend. internal_instance_id) = mock_internal_instance_id result = self._base_tempest_backend._assign_floating_ip()", "4\": \"value 4\" } self._test_create_server(kwargs=kwargs) def test__assign_floating_ip(self): mock_create_floating_ip = mock.Mock() mock_create_floating_ip.return_value = {", "self._base_tempest_backend.setup_instance() self.assertEqual(expected_logging, snatcher.output) self._base_tempest_backend._configure_networking.assert_called_once() self._base_tempest_backend._manager.create_keypair.assert_called_once() self._base_tempest_backend._create_server.assert_called_once() self._base_tempest_backend._assign_floating_ip.assert_called_once() self._base_tempest_backend._create_security_groups.assert_called_once() def test_reboot_instance(self): self._base_tempest_backend._manager.reboot_instance = mock.Mock(", "\"fake sg_rule\"} (self._base_tempest_backend._manager .security_group_rules_client) = mock_security_group_rules_client result = (self._base_tempest_backend. _add_security_group_exceptions(\"fake secgroup_id\")) for item", "(self._base_tempest_backend._manager.servers_client. remove_security_group.assert_called_once_with( server_id=\"fake id\", name=security_group['name'])) (self._base_tempest_backend.internal_instance_id. assert_called_once()) if server is not None: (self._base_tempest_backend._manager.servers_client.delete_server", "\"fake image\"}) self._base_tempest_backend._conf = mock.Mock() result = self._base_tempest_backend.get_image_by_ref() self.assertEqual(result, \"fake image\") def test_floating_ip(self):", "\"arg 2\": \"value 2\", \"arg 3\": None, \"arg 4\": \"value 4\" } self._test_create_server(kwargs=kwargs)", "= [\"Creating server...\"] self._base_tempest_backend._configure_networking = mock.Mock() self._base_tempest_backend._manager.create_keypair = mock.Mock() self._base_tempest_backend._create_server = mock.Mock( return_value=\"fake", "def test__create_security_groups(self): fake_security_group = { \"security_group\": { \"id\": [ {\"id\": 1}, {\"id\": 2},", "availability_zone = mock.sentinel self._base = tempest_backend.BaseWindowsTempestBackend( name, userdata, metadata, availability_zone) @mock.patch('argus.config.CONFIG.argus') @mock.patch('argus.backends.base.CloudBackend._get_log_template') def", "= \"fake private key\" self._base_tempest_backend._keypair = mock_keypair result = self._base_tempest_backend.private_key() self.assertEqual(result, \"fake private", "# Unless required by applicable law or agreed to in writing, software #", "metadata = mock.sentinel availability_zone = mock.sentinel self._base = tempest_backend.BaseWindowsTempestBackend( name, userdata, metadata, availability_zone)", "3\", \"rule 4\"] self._test_cleanup(security_groups_rules=fake_rules) def test_cleanup_security_group(self): self._test_cleanup(security_group={'name': \"fake name\"}) def test_cleanup_server(self): self._test_cleanup(server=\"fake server\")", "by applicable law or agreed to in writing, software # distributed under the", "fake_security_group[\"security_group\"]) (self._base_tempest_backend._manager.security_groups_client. create_security_group.assert_called_once()) self._base_tempest_backend.internal_instance_id.assert_called_once() (self._base_tempest_backend._manager.servers_client.add_security_group .assert_called_once()) self.assertEqual(self._base_tempest_backend._security_groups_rules, [1, 2, 3, 4, 5]) @mock.patch('tempest.common.waiters.wait_for_server_termination')", "\"fake call\" mock_config.build = \"fake build\" mock_config.arch = \"fake arch\" expected_result = \"{}-{}-{}\".format(mock_config.build,", "self.assertEqual( self._base_tempest_backend.internal_instance_id.call_count, 2) if floating_ip is not None: (self._base_tempest_backend._manager.floating_ips_client. delete_floating_ip.assert_called_once_with(floating_ip['id'])) if keypair is", "mock_config.openstack.image_ref = \"fake image ref\" mock_config.openstack.flavor_ref = \"fake flavor ref\" name = mock.sentinel", "self._base_tempest_backend._manager.instance_password = mock.Mock( return_value=\"fake password\") self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake id\") self._base_tempest_backend._keypair = \"fake", "under the License is distributed on an \"AS IS\" BASIS, WITHOUT # WARRANTIES", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "{\"id\": 2}, {\"id\": 3}, {\"id\": 4}, {\"id\": 5} ], \"name\": \"fake name\" }", "(self._base_tempest_backend._manager. servers_client) = mock_servers_client self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake id\") self._base_tempest_backend._server = server if", "def test_cleanup_floating_ip(self): self._test_cleanup(floating_ip={\"id\": \"fake floating ip id\"}) def test_cleanup_keypair(self): self._test_cleanup(keypair=mock.Mock()) def test_cleanup_credentials(self): self._test_cleanup()", "mock.Mock( return_value=\"fake id\") self._base_tempest_backend._security_group = security_group if server is not None: mock_servers_client =", "@mock.patch('argus.backends.tempest.manager.APIManager') def setUp(self, mock_api_manager, mock_config): mock_config.openstack.image_ref = \"fake image ref\" mock_config.openstack.flavor_ref = \"fake", "Copyright 2016 Cloudbase Solutions Srl # All Rights Reserved. # # Licensed under", ".security_groups_client) = mock_security_groups_client self._base_tempest_backend._security_groups_rules = [] self._base_tempest_backend._add_security_group_exceptions = mock.Mock( return_value=fake_security_group[\"security_group\"][\"id\"]) self._base_tempest_backend._manager.servers_client = mock.Mock()", "= [\"rule 1\", \"rule 2\", \"rule 3\", \"rule 4\"] self._test_cleanup(security_groups_rules=fake_rules) def test_cleanup_security_group(self): self._test_cleanup(security_group={'name':", "= self._base_tempest_backend.instance_server() self.assertEqual(result, \"fake instance server\") self._base_tempest_backend.internal_instance_id.assert_called_once() def test_public_key(self): mock_keypair = mock.Mock() mock_keypair.public_key", "} } stripped_kwargs = copy.deepcopy(kwargs) for key, value in list(stripped_kwargs.items()): if not value:", "self.assertEqual(self._base_tempest_backend._security_groups_rules, [1, 2, 3, 4, 5]) @mock.patch('tempest.common.waiters.wait_for_server_termination') def _test_cleanup(self, mock_waiters, security_groups_rules=None, security_group=None, server=None,", "mock_servers_client self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake id\") self._base_tempest_backend._server = server if floating_ip is not", "(self._base_tempest_backend._manager.subnets_client. update_subnet.assert_called_once()) @mock.patch('argus.util.rand_name', return_value=\"fake-server\") @mock.patch('tempest.common.waiters.wait_for_server_status') def _test_create_server(self, mock_waiters, mock_util, kwargs, wait_until=None): fake_server =", "5]) @mock.patch('tempest.common.waiters.wait_for_server_termination') def _test_cleanup(self, mock_waiters, security_groups_rules=None, security_group=None, server=None, floating_ip=None, keypair=None): expected_logging = [\"Cleaning", "server=None, floating_ip=None, keypair=None): expected_logging = [\"Cleaning up...\"] if security_groups_rules is not None: (self._base_tempest_backend.", "self._base_tempest_backend.cleanup() if security_groups_rules is not None: (self.assertEqual( self._base_tempest_backend._manager. security_group_rules_client.delete_security_group_rule. call_count, len(security_groups_rules))) if security_group", "self.assertEqual(result, \"fake image\") def test_floating_ip(self): self._base_tempest_backend._floating_ip = {\"ip\": \"fake ip\"} result = self._base_tempest_backend.floating_ip()", "= mock.Mock( return_value=\"fake ip\") result = self._base_tempest_backend._create_security_groups() self.assertEqual(result, fake_security_group[\"security_group\"]) (self._base_tempest_backend._manager.security_groups_client. create_security_group.assert_called_once()) self._base_tempest_backend.internal_instance_id.assert_called_once() (self._base_tempest_backend._manager.servers_client.add_security_group", "1\", \"rule 2\", \"rule 3\", \"rule 4\"] self._test_cleanup(security_groups_rules=fake_rules) def test_cleanup_security_group(self): self._test_cleanup(security_group={'name': \"fake name\"})", "(self._base_tempest_backend._manager.servers_client.add_security_group .assert_called_once()) self.assertEqual(self._base_tempest_backend._security_groups_rules, [1, 2, 3, 4, 5]) @mock.patch('tempest.common.waiters.wait_for_server_termination') def _test_cleanup(self, mock_waiters, security_groups_rules=None,", "server\") self._base_tempest_backend.internal_instance_id.assert_called_once() def test_public_key(self): mock_keypair = mock.Mock() mock_keypair.public_key = \"fake public key\" self._base_tempest_backend._keypair", "as mock except ImportError: import mock LOG = util.get_logger() class FakeBaseTempestBackend(tempest_backend.BaseTempestBackend): def __init__(self,", "self.assertEqual(result, \"fake mtu\") self._base_tempest_backend._manager.get_mtu.assert_called_once() def test__add_security_group_exceptions(self): mock_security_group_rules_client = mock.Mock() (mock_security_group_rules_client.create_security_group_rule .return_value) = {\"security_group_rule\":", "not None: (self._base_tempest_backend._manager.floating_ips_client. delete_floating_ip.assert_called_once_with(floating_ip['id'])) if keypair is not None: self._base_tempest_backend._keypair.destroy.assert_called_once() (self._base_tempest_backend._manager.cleanup_credentials. assert_called_once()) self.assertEqual(expected_logging,", "instance server\") self._base_tempest_backend.internal_instance_id.assert_called_once() def test_public_key(self): mock_keypair = mock.Mock() mock_keypair.public_key = \"fake public key\"", "id\")) self.assertEqual( self._base_tempest_backend.internal_instance_id.call_count, 2) if floating_ip is not None: (self._base_tempest_backend._manager.floating_ips_client. delete_floating_ip.assert_called_once_with(floating_ip['id'])) if keypair", "disable=no-value-for-parameter, protected-access, arguments-differ # pylint: disable= unused-argument, no-member, attribute-defined-outside-init import copy import unittest", "copy.deepcopy(kwargs) for key, value in list(stripped_kwargs.items()): if not value: del stripped_kwargs[key] (self._base_tempest_backend._manager.servers_client. create_server)", "# # Licensed under the Apache License, Version 2.0 (the \"License\"); you may", "{ \"security_group\": { \"id\": [ {\"id\": 1}, {\"id\": 2}, {\"id\": 3}, {\"id\": 4},", "mock.Mock() with test_utils.LogSnatcher('argus.backends.tempest.' 'tempest_backend') as snatcher: self._base_tempest_backend.cleanup() if security_groups_rules is not None: (self.assertEqual(", "test_instance_setup_create_server(self): expected_logging = [\"Creating server...\"] self._base_tempest_backend._configure_networking = mock.Mock() self._base_tempest_backend._manager.create_keypair = mock.Mock() self._base_tempest_backend._create_server =", "except in compliance with the License. You may obtain # a copy of", "= { \"arg 1\": \"value 1\", \"arg 2\": \"value 2\", \"arg 3\": None,", "= \"fake userdata\" metadata = mock.sentinel availability_zone = mock.sentinel self._base_tempest_backend = FakeBaseTempestBackend( name,", "{ \"ip\": \"fake ip\" } } mock_floating_ips_client = mock.Mock() mock_floating_ips_client.create_floating_ip = mock_create_floating_ip (mock_floating_ips_client.associate_floating_ip_to_server", "expected_logging = [\"Creating server...\"] self._base_tempest_backend._configure_networking = mock.Mock() self._base_tempest_backend._manager.create_keypair = mock.Mock() self._base_tempest_backend._create_server = mock.Mock(", "unittest.mock as mock except ImportError: import mock LOG = util.get_logger() class FakeBaseTempestBackend(tempest_backend.BaseTempestBackend): def", "governing permissions and limitations # under the License. # pylint: disable=no-value-for-parameter, protected-access, arguments-differ", "None: (self.assertEqual( self._base_tempest_backend._manager. security_group_rules_client.delete_security_group_rule. call_count, len(security_groups_rules))) if security_group is not None: (self._base_tempest_backend._manager.servers_client. remove_security_group.assert_called_once_with(", "to in writing, software # distributed under the License is distributed on an", "language governing permissions and limitations # under the License. # pylint: disable=no-value-for-parameter, protected-access,", "= \"fake id\" (self._base_tempest_backend._manager. floating_ips_client) = mock_floating_ips_client (self._base_tempest_backend. internal_instance_id) = mock_internal_instance_id result =", "\"fake sg_rule\") def test__create_security_groups(self): fake_security_group = { \"security_group\": { \"id\": [ {\"id\": 1},", "mock.Mock( return_value=\"fake id\") self._base_tempest_backend._server = server if floating_ip is not None: (self._base_tempest_backend._manager.floating_ips_client. delete_floating_ip)", "def test_instance_setup_create_server(self): expected_logging = [\"Creating server...\"] self._base_tempest_backend._configure_networking = mock.Mock() self._base_tempest_backend._manager.create_keypair = mock.Mock() self._base_tempest_backend._create_server", "mock.Mock() self._base_tempest_backend.__get_id_tenant_network = mock.Mock() with test_utils.LogSnatcher('argus.backends.base') as snatcher: self._base_tempest_backend.setup_instance() self.assertEqual(expected_logging, snatcher.output) self._base_tempest_backend._configure_networking.assert_called_once() self._base_tempest_backend._manager.create_keypair.assert_called_once()", "= (self._base_tempest_backend ._create_server(wait_until, kwargs)) else: result = self._base_tempest_backend._create_server(**kwargs) self.assertEqual(result, {\"id\": \"fake server id\"})", "security_groups_rules=None, security_group=None, server=None, floating_ip=None, keypair=None): expected_logging = [\"Cleaning up...\"] if security_groups_rules is not", "self._test_cleanup(server=\"fake server\") def test_cleanup_floating_ip(self): self._test_cleanup(floating_ip={\"id\": \"fake floating ip id\"}) def test_cleanup_keypair(self): self._test_cleanup(keypair=mock.Mock()) def", "in result: self.assertEqual(item, \"fake sg_rule\") def test__create_security_groups(self): fake_security_group = { \"security_group\": { \"id\":", "import mock LOG = util.get_logger() class FakeBaseTempestBackend(tempest_backend.BaseTempestBackend): def __init__(self, name, userdata, metadata, availability_zone):", "test_utils.LogSnatcher('argus.backends.tempest.' 'tempest_backend') as snatcher: self._base_tempest_backend.cleanup() if security_groups_rules is not None: (self.assertEqual( self._base_tempest_backend._manager. security_group_rules_client.delete_security_group_rule.", "Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the \"License\");", "2\", \"rule 3\", \"rule 4\"] self._test_cleanup(security_groups_rules=fake_rules) def test_cleanup_security_group(self): self._test_cleanup(security_group={'name': \"fake name\"}) def test_cleanup_server(self):", "if server is not None: (self._base_tempest_backend._manager.servers_client.delete_server .assert_called_once_with(\"fake id\")) (mock_waiters.assert_called_once_with( self._base_tempest_backend._manager.servers_client, \"fake id\")) self.assertEqual(", "self._base_tempest_backend._assign_floating_ip.assert_called_once() self._base_tempest_backend._create_security_groups.assert_called_once() def test_reboot_instance(self): self._base_tempest_backend._manager.reboot_instance = mock.Mock( return_value=\"fake reboot\") self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake", "mock LOG = util.get_logger() class FakeBaseTempestBackend(tempest_backend.BaseTempestBackend): def __init__(self, name, userdata, metadata, availability_zone): super(FakeBaseTempestBackend,", "None: self._base_tempest_backend._keypair = keypair self._base_tempest_backend._manager.cleanup_credentials = mock.Mock() with test_utils.LogSnatcher('argus.backends.tempest.' 'tempest_backend') as snatcher: self._base_tempest_backend.cleanup()", "self._base_tempest_backend.instance_output(limit=10) self.assertEqual(result, \"fake output\") self._base_tempest_backend.internal_instance_id.assert_called_once() self._base_tempest_backend._manager.test_instance_output(\"fake id\", 10) def test_instance_server(self): self._base_tempest_backend._manager.instance_server = mock.Mock(", "mock.Mock() self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake id\") self._base_tempest_backend._security_group = security_group if server is not", "mock_keypair.private_key = \"fake private key\" self._base_tempest_backend._keypair = mock_keypair result = self._base_tempest_backend.private_key() self.assertEqual(result, \"fake", "@mock.patch('tempest.common.waiters.wait_for_server_termination') def _test_cleanup(self, mock_waiters, security_groups_rules=None, security_group=None, server=None, floating_ip=None, keypair=None): expected_logging = [\"Cleaning up...\"]", "[ {\"id\": 1}, {\"id\": 2}, {\"id\": 3}, {\"id\": 4}, {\"id\": 5} ], \"name\":", "Srl # All Rights Reserved. # # Licensed under the Apache License, Version", "distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT #", "security_groups_rules (self._base_tempest_backend._manager.security_group_rules_client. delete_security_group_rule) = mock.Mock() if security_group is not None: (self._base_tempest_backend._manager.servers_client .remove_security_group) =", "the specific language governing permissions and limitations # under the License. # pylint:", "= mock.Mock( return_value=\"fake reboot\") self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake id\") result = self._base_tempest_backend.reboot_instance() self.assertEqual(result,", "def test__assign_floating_ip(self): mock_create_floating_ip = mock.Mock() mock_create_floating_ip.return_value = { \"floating_ip\": { \"ip\": \"fake ip\"", "mock.Mock( return_value=\"fake output\") self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake id\") result = self._base_tempest_backend.instance_output(limit=10) self.assertEqual(result, \"fake", "(self._base_tempest_backend._manager.floating_ips_client. associate_floating_ip_to_server.assert_called_once_with( \"fake ip\", \"fake id\")) def test_get_mtu(self): mock_get_mtu = mock.Mock() mock_get_mtu.return_value =", "TestBaseWindowsTempestBackend(unittest.TestCase): @mock.patch('argus.config.CONFIG.argus') @mock.patch('argus.backends.tempest.manager.APIManager') def setUp(self, mock_api_manager, mock_config): mock_config.openstack.image_ref = \"fake image ref\" mock_config.openstack.flavor_ref", "list(stripped_kwargs.items()): if not value: del stripped_kwargs[key] (self._base_tempest_backend._manager.servers_client. create_server) = mock.Mock(return_value=fake_server) self._base_tempest_backend.image_ref = \"fake", "self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake id\") self._base_tempest_backend._security_group = security_group if server is not None:", "= self._base_tempest_backend._create_security_groups() self.assertEqual(result, fake_security_group[\"security_group\"]) (self._base_tempest_backend._manager.security_groups_client. create_security_group.assert_called_once()) self._base_tempest_backend.internal_instance_id.assert_called_once() (self._base_tempest_backend._manager.servers_client.add_security_group .assert_called_once()) self.assertEqual(self._base_tempest_backend._security_groups_rules, [1, 2, 3,", "self.assertEqual(item, \"fake sg_rule\") def test__create_security_groups(self): fake_security_group = { \"security_group\": { \"id\": [ {\"id\":", "mock_config): mock_get_log.return_value = \"fake call\" mock_config.build = \"fake build\" mock_config.arch = \"fake arch\"", "# not use this file except in compliance with the License. You may", "test__create_security_groups(self): fake_security_group = { \"security_group\": { \"id\": [ {\"id\": 1}, {\"id\": 2}, {\"id\":", "if security_group is not None: (self._base_tempest_backend._manager.servers_client. remove_security_group.assert_called_once_with( server_id=\"fake id\", name=security_group['name'])) (self._base_tempest_backend.internal_instance_id. assert_called_once()) if", "result = self._base_tempest_backend.instance_password() self.assertEqual(result, \"fake password\") self._base_tempest_backend.internal_instance_id.assert_called_once() def test_internal_instance_id(self): self._base_tempest_backend._server = {\"id\": \"fake", "# License for the specific language governing permissions and limitations # under the", "None: (self._base_tempest_backend._manager.floating_ips_client. delete_floating_ip) = mock.Mock() self._base_tempest_backend._floating_ip = floating_ip if keypair is not None:", "} mock_floating_ips_client = mock.Mock() mock_floating_ips_client.create_floating_ip = mock_create_floating_ip (mock_floating_ips_client.associate_floating_ip_to_server .return_value) = None mock_internal_instance_id =", "name\"}) def test_cleanup_server(self): self._test_cleanup(server=\"fake server\") def test_cleanup_floating_ip(self): self._test_cleanup(floating_ip={\"id\": \"fake floating ip id\"}) def", "import test_utils from argus import util try: import unittest.mock as mock except ImportError:", "= \"fake flavor ref\" name = mock.sentinel userdata = \"fake userdata\" metadata =", "def test_instance_server(self): self._base_tempest_backend._manager.instance_server = mock.Mock( return_value=\"fake instance server\") self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake instance", "test_create_server(self): kwargs = { \"arg 1\": \"value 1\", \"arg 2\": \"value 2\", \"arg", ".return_value) = fake_security_group (self._base_tempest_backend._manager .security_groups_client) = mock_security_groups_client self._base_tempest_backend._security_groups_rules = [] self._base_tempest_backend._add_security_group_exceptions = mock.Mock(", "mock.Mock() mock_argus.dns_nameservers.return_value = \"fake dns nameservers\" mock_config.argus = mock_argus self._base_tempest_backend._configure_networking() (self._base_tempest_backend._manager.subnets_client. update_subnet.assert_called_once()) (self._base_tempest_backend._manager.subnets_client.", "servers_client) = mock_servers_client self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake id\") self._base_tempest_backend._server = server if floating_ip", "'ACTIVE') def test_create_server(self): kwargs = { \"arg 1\": \"value 1\", \"arg 2\": \"value", "= mock.Mock() self._base_tempest_backend.__get_id_tenant_network = mock.Mock() with test_utils.LogSnatcher('argus.backends.base') as snatcher: self._base_tempest_backend.setup_instance() self.assertEqual(expected_logging, snatcher.output) self._base_tempest_backend._configure_networking.assert_called_once()", "[1, 2, 3, 4, 5]) @mock.patch('tempest.common.waiters.wait_for_server_termination') def _test_cleanup(self, mock_waiters, security_groups_rules=None, security_group=None, server=None, floating_ip=None,", "@mock.patch('argus.config.CONFIG.argus') @mock.patch('argus.backends.tempest.manager.APIManager') def setUp(self, mock_api_manager, mock_config): mock_config.openstack.image_ref = \"fake image ref\" mock_config.openstack.flavor_ref =", "(self._base_tempest_backend._manager.servers_client.create_server. assert_called_once_with(name=\"fake-server-instance\", imageRef=\"fake image ref\", flavorRef=\"fake flavor ref\", **stripped_kwargs)) if wait_until is not", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing,", "(self._base_tempest_backend._manager. floating_ips_client) = mock_floating_ips_client (self._base_tempest_backend. internal_instance_id) = mock_internal_instance_id result = self._base_tempest_backend._assign_floating_ip() self.assertEqual(result, {\"ip\":", "self.assertEqual(result, \"fake output\") self._base_tempest_backend.internal_instance_id.assert_called_once() self._base_tempest_backend._manager.test_instance_output(\"fake id\", 10) def test_instance_server(self): self._base_tempest_backend._manager.instance_server = mock.Mock( return_value=\"fake", "stripped_kwargs = copy.deepcopy(kwargs) for key, value in list(stripped_kwargs.items()): if not value: del stripped_kwargs[key]", "in writing, software # distributed under the License is distributed on an \"AS", "= mock_create_floating_ip (mock_floating_ips_client.associate_floating_ip_to_server .return_value) = None mock_internal_instance_id = mock.Mock() mock_internal_instance_id.return_value = \"fake id\"", "Version 2.0 (the \"License\"); you may # not use this file except in", "mock_security_groups_client = mock.Mock() (mock_security_groups_client.create_security_group .return_value) = fake_security_group (self._base_tempest_backend._manager .security_groups_client) = mock_security_groups_client self._base_tempest_backend._security_groups_rules =", "\"fake ip\", \"fake id\")) def test_get_mtu(self): mock_get_mtu = mock.Mock() mock_get_mtu.return_value = \"fake mtu\"", "call_count, len(security_groups_rules))) if security_group is not None: (self._base_tempest_backend._manager.servers_client. remove_security_group.assert_called_once_with( server_id=\"fake id\", name=security_group['name'])) (self._base_tempest_backend.internal_instance_id.", "def test_get_image_by_ref(self): (self._base_tempest_backend._manager.compute_images_client. show_image) = mock.Mock(return_value={\"image\": \"fake image\"}) self._base_tempest_backend._conf = mock.Mock() result =", "None, \"arg 4\": \"value 4\" } self._test_create_server(kwargs=kwargs) def test__assign_floating_ip(self): mock_create_floating_ip = mock.Mock() mock_create_floating_ip.return_value", "self._base_tempest_backend._configure_networking.assert_called_once() self._base_tempest_backend._manager.create_keypair.assert_called_once() self._base_tempest_backend._create_server.assert_called_once() self._base_tempest_backend._assign_floating_ip.assert_called_once() self._base_tempest_backend._create_security_groups.assert_called_once() def test_reboot_instance(self): self._base_tempest_backend._manager.reboot_instance = mock.Mock( return_value=\"fake reboot\") self._base_tempest_backend.internal_instance_id", "= mock.Mock( return_value=\"fake password\") self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake id\") self._base_tempest_backend._keypair = \"fake keypair\"", "return_value=\"fake password\") self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake id\") self._base_tempest_backend._keypair = \"fake keypair\" result =", "server\") self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake instance id\") result = self._base_tempest_backend.instance_server() self.assertEqual(result, \"fake instance", "return_value=\"fake id\") self._base_tempest_backend._server = server if floating_ip is not None: (self._base_tempest_backend._manager.floating_ips_client. delete_floating_ip) =", "\"License\"); you may # not use this file except in compliance with the", "= \"fake call\" mock_config.build = \"fake build\" mock_config.arch = \"fake arch\" expected_result =", "= mock.Mock() self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake id\") self._base_tempest_backend._security_group = security_group if server is", "], \"name\": \"fake name\" } } mock_security_groups_client = mock.Mock() (mock_security_groups_client.create_security_group .return_value) = fake_security_group", "name=security_group['name'])) (self._base_tempest_backend.internal_instance_id. assert_called_once()) if server is not None: (self._base_tempest_backend._manager.servers_client.delete_server .assert_called_once_with(\"fake id\")) (mock_waiters.assert_called_once_with( self._base_tempest_backend._manager.servers_client,", "key, value in list(stripped_kwargs.items()): if not value: del stripped_kwargs[key] (self._base_tempest_backend._manager.servers_client. create_server) = mock.Mock(return_value=fake_server)", "ref\" self._base_tempest_backend.flavor_ref = \"fake flavor ref\" self._base_tempest_backend._name = \"fake name\" if wait_until is", "self._base_tempest_backend._create_server.assert_called_once() self._base_tempest_backend._assign_floating_ip.assert_called_once() self._base_tempest_backend._create_security_groups.assert_called_once() def test_reboot_instance(self): self._base_tempest_backend._manager.reboot_instance = mock.Mock( return_value=\"fake reboot\") self._base_tempest_backend.internal_instance_id = mock.Mock(", "self._base_tempest_backend.image_ref = \"fake image ref\" self._base_tempest_backend.flavor_ref = \"fake flavor ref\" self._base_tempest_backend._name = \"fake", "self._base_tempest_backend._manager. security_group_rules_client.delete_security_group_rule. call_count, len(security_groups_rules))) if security_group is not None: (self._base_tempest_backend._manager.servers_client. remove_security_group.assert_called_once_with( server_id=\"fake id\",", "Reserved. # # Licensed under the Apache License, Version 2.0 (the \"License\"); you", "4, 5]) @mock.patch('tempest.common.waiters.wait_for_server_termination') def _test_cleanup(self, mock_waiters, security_groups_rules=None, security_group=None, server=None, floating_ip=None, keypair=None): expected_logging =", "the Apache License, Version 2.0 (the \"License\"); you may # not use this", "= { \"floating_ip\": { \"ip\": \"fake ip\" } } mock_floating_ips_client = mock.Mock() mock_floating_ips_client.create_floating_ip", "= { 'server': { 'id': \"fake server id\" } } stripped_kwargs = copy.deepcopy(kwargs)", "id\", name=security_group['name'])) (self._base_tempest_backend.internal_instance_id. assert_called_once()) if server is not None: (self._base_tempest_backend._manager.servers_client.delete_server .assert_called_once_with(\"fake id\")) (mock_waiters.assert_called_once_with(", "unittest from argus.backends.tempest import tempest_backend from argus.unit_tests import test_utils from argus import util", "len(security_groups_rules))) if security_group is not None: (self._base_tempest_backend._manager.servers_client. remove_security_group.assert_called_once_with( server_id=\"fake id\", name=security_group['name'])) (self._base_tempest_backend.internal_instance_id. assert_called_once())", "mock.Mock() mock_primary_credentials.return_value = mock_network (self._base_tempest_backend._manager. primary_credentials) = mock_primary_credentials mock_subnets_client = mock.Mock() mock_subnets_client.update_subnet.return_value =", "(self._base_tempest_backend._manager.reboot_instance. assert_called_once_with(\"fake id\")) def test_instance_password(self): self._base_tempest_backend._manager.instance_password = mock.Mock( return_value=\"fake password\") self._base_tempest_backend.internal_instance_id = mock.Mock(", "= mock.Mock() (mock_security_group_rules_client.create_security_group_rule .return_value) = {\"security_group_rule\": \"fake sg_rule\"} (self._base_tempest_backend._manager .security_group_rules_client) = mock_security_group_rules_client result", "= mock.Mock( return_value=\"fake id\") result = self._base_tempest_backend.reboot_instance() self.assertEqual(result, \"fake reboot\") (self._base_tempest_backend._manager.reboot_instance. assert_called_once_with(\"fake id\"))", "internal_instance_id) = mock_internal_instance_id result = self._base_tempest_backend._assign_floating_ip() self.assertEqual(result, {\"ip\": \"fake ip\"}) (self._base_tempest_backend._manager.floating_ips_client. associate_floating_ip_to_server.assert_called_once_with( \"fake", "\"fake ip\"}) (self._base_tempest_backend._manager.floating_ips_client. associate_floating_ip_to_server.assert_called_once_with( \"fake ip\", \"fake id\")) def test_get_mtu(self): mock_get_mtu = mock.Mock()", "if floating_ip is not None: (self._base_tempest_backend._manager.floating_ips_client. delete_floating_ip) = mock.Mock() self._base_tempest_backend._floating_ip = floating_ip if", "update_subnet.assert_called_once()) @mock.patch('argus.util.rand_name', return_value=\"fake-server\") @mock.patch('tempest.common.waiters.wait_for_server_status') def _test_create_server(self, mock_waiters, mock_util, kwargs, wait_until=None): fake_server = {", "not None: (self._base_tempest_backend. _security_groups_rules) = security_groups_rules (self._base_tempest_backend._manager.security_group_rules_client. delete_security_group_rule) = mock.Mock() if security_group is", "unused-argument, no-member, attribute-defined-outside-init import copy import unittest from argus.backends.tempest import tempest_backend from argus.unit_tests", "self._base_tempest_backend.get_image_by_ref() self.assertEqual(result, \"fake image\") def test_floating_ip(self): self._base_tempest_backend._floating_ip = {\"ip\": \"fake ip\"} result =", "test_private_key(self): mock_keypair = mock.Mock() mock_keypair.private_key = \"fake private key\" self._base_tempest_backend._keypair = mock_keypair result", "as snatcher: self._base_tempest_backend.setup_instance() self.assertEqual(expected_logging, snatcher.output) self._base_tempest_backend._configure_networking.assert_called_once() self._base_tempest_backend._manager.create_keypair.assert_called_once() self._base_tempest_backend._create_server.assert_called_once() self._base_tempest_backend._assign_floating_ip.assert_called_once() self._base_tempest_backend._create_security_groups.assert_called_once() def test_reboot_instance(self): self._base_tempest_backend._manager.reboot_instance", "def test_instance_output(self): self._base_tempest_backend._manager.instance_output = mock.Mock( return_value=\"fake output\") self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake id\") result", "not use this file except in compliance with the License. You may obtain", "mock_create_floating_ip = mock.Mock() mock_create_floating_ip.return_value = { \"floating_ip\": { \"ip\": \"fake ip\" } }", "primary_credentials) = mock_primary_credentials mock_subnets_client = mock.Mock() mock_subnets_client.update_subnet.return_value = None (self._base_tempest_backend. _manager.subnets_client) = mock_subnets_client", "server is not None: (self._base_tempest_backend._manager.servers_client.delete_server .assert_called_once_with(\"fake id\")) (mock_waiters.assert_called_once_with( self._base_tempest_backend._manager.servers_client, \"fake id\")) self.assertEqual( self._base_tempest_backend.internal_instance_id.call_count,", "mock.Mock() self._base_tempest_backend._manager.create_keypair = mock.Mock() self._base_tempest_backend._create_server = mock.Mock( return_value=\"fake server\") self._base_tempest_backend._assign_floating_ip = mock.Mock() self._base_tempest_backend._create_security_groups", "ip\", \"fake id\")) def test_get_mtu(self): mock_get_mtu = mock.Mock() mock_get_mtu.return_value = \"fake mtu\" self._base_tempest_backend._manager.get_mtu", "def test_cleanup_credentials(self): self._test_cleanup() def test_instance_setup_create_server(self): expected_logging = [\"Creating server...\"] self._base_tempest_backend._configure_networking = mock.Mock() self._base_tempest_backend._manager.create_keypair", "= {\"id\": \"fake server\"} result = self._base_tempest_backend.internal_instance_id() self.assertEqual(result, \"fake server\") def test_instance_output(self): self._base_tempest_backend._manager.instance_output", "limitations # under the License. # pylint: disable=no-value-for-parameter, protected-access, arguments-differ # pylint: disable=", "for item in result: self.assertEqual(item, \"fake sg_rule\") def test__create_security_groups(self): fake_security_group = { \"security_group\":", "with test_utils.LogSnatcher('argus.backends.tempest.' 'tempest_backend') as snatcher: self._base_tempest_backend.cleanup() if security_groups_rules is not None: (self.assertEqual( self._base_tempest_backend._manager.", "import tempest_backend from argus.unit_tests import test_utils from argus import util try: import unittest.mock", "return_value=\"fake output\") self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake id\") result = self._base_tempest_backend.instance_output(limit=10) self.assertEqual(result, \"fake output\")", "= copy.deepcopy(kwargs) for key, value in list(stripped_kwargs.items()): if not value: del stripped_kwargs[key] (self._base_tempest_backend._manager.servers_client.", "= mock.Mock() self._base_tempest_backend._create_security_groups = mock.Mock() self._base_tempest_backend._availability_zone = mock.Mock() self._base_tempest_backend.__get_id_tenant_network = mock.Mock() with test_utils.LogSnatcher('argus.backends.base')", "output\") self._base_tempest_backend.internal_instance_id.assert_called_once() self._base_tempest_backend._manager.test_instance_output(\"fake id\", 10) def test_instance_server(self): self._base_tempest_backend._manager.instance_server = mock.Mock( return_value=\"fake instance server\")", "= mock_servers_client self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake id\") self._base_tempest_backend._server = server if floating_ip is", "License for the specific language governing permissions and limitations # under the License.", "security_groups_rules is not None: (self.assertEqual( self._base_tempest_backend._manager. security_group_rules_client.delete_security_group_rule. call_count, len(security_groups_rules))) if security_group is not", "result = self._base_tempest_backend.instance_output(limit=10) self.assertEqual(result, \"fake output\") self._base_tempest_backend.internal_instance_id.assert_called_once() self._base_tempest_backend._manager.test_instance_output(\"fake id\", 10) def test_instance_server(self): self._base_tempest_backend._manager.instance_server", "self.assertEqual(result, fake_security_group[\"security_group\"]) (self._base_tempest_backend._manager.security_groups_client. create_security_group.assert_called_once()) self._base_tempest_backend.internal_instance_id.assert_called_once() (self._base_tempest_backend._manager.servers_client.add_security_group .assert_called_once()) self.assertEqual(self._base_tempest_backend._security_groups_rules, [1, 2, 3, 4, 5])", "assert_called_once_with(\"fake id\")) def test_instance_password(self): self._base_tempest_backend._manager.instance_password = mock.Mock( return_value=\"fake password\") self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake", "userdata\" metadata = mock.sentinel availability_zone = mock.sentinel self._base = tempest_backend.BaseWindowsTempestBackend( name, userdata, metadata,", "result = self._base_tempest_backend._create_security_groups() self.assertEqual(result, fake_security_group[\"security_group\"]) (self._base_tempest_backend._manager.security_groups_client. create_security_group.assert_called_once()) self._base_tempest_backend.internal_instance_id.assert_called_once() (self._base_tempest_backend._manager.servers_client.add_security_group .assert_called_once()) self.assertEqual(self._base_tempest_backend._security_groups_rules, [1, 2,", "= mock.Mock() mock_network.subnet = {\"id\": \"fake id\"} mock_primary_credentials = mock.Mock() mock_primary_credentials.return_value = mock_network", "test__assign_floating_ip(self): mock_create_floating_ip = mock.Mock() mock_create_floating_ip.return_value = { \"floating_ip\": { \"ip\": \"fake ip\" }", "if security_groups_rules is not None: (self._base_tempest_backend. _security_groups_rules) = security_groups_rules (self._base_tempest_backend._manager.security_group_rules_client. delete_security_group_rule) = mock.Mock()", "self.assertEqual(result, \"fake server\") def test_instance_output(self): self._base_tempest_backend._manager.instance_output = mock.Mock( return_value=\"fake output\") self._base_tempest_backend.internal_instance_id = mock.Mock(", "return_value=fake_security_group[\"security_group\"][\"id\"]) self._base_tempest_backend._manager.servers_client = mock.Mock() self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake ip\") result = self._base_tempest_backend._create_security_groups() self.assertEqual(result,", "self._base_tempest_backend._security_groups_rules = [] self._base_tempest_backend._add_security_group_exceptions = mock.Mock( return_value=fake_security_group[\"security_group\"][\"id\"]) self._base_tempest_backend._manager.servers_client = mock.Mock() self._base_tempest_backend.internal_instance_id = mock.Mock(", "result = self._base_tempest_backend.get_image_by_ref() self.assertEqual(result, \"fake image\") def test_floating_ip(self): self._base_tempest_backend._floating_ip = {\"ip\": \"fake ip\"}", "2016 Cloudbase Solutions Srl # All Rights Reserved. # # Licensed under the", "name, userdata, metadata, availability_zone) @mock.patch('argus.config.CONFIG.argus') def test__configure_networking(self, mock_config): mock_network = mock.Mock() mock_network.subnet =", "self._base_tempest_backend._create_server(**kwargs) self.assertEqual(result, {\"id\": \"fake server id\"}) (self._base_tempest_backend._manager.servers_client.create_server. assert_called_once_with(name=\"fake-server-instance\", imageRef=\"fake image ref\", flavorRef=\"fake flavor", "_test_create_server(self, mock_waiters, mock_util, kwargs, wait_until=None): fake_server = { 'server': { 'id': \"fake server", "(self._base_tempest_backend. _manager.subnets_client) = mock_subnets_client mock_argus = mock.Mock() mock_argus.dns_nameservers.return_value = \"fake dns nameservers\" mock_config.argus", "ref\" self._base_tempest_backend._name = \"fake name\" if wait_until is not None: result = (self._base_tempest_backend", "is not None: result = (self._base_tempest_backend ._create_server(wait_until, kwargs)) else: result = self._base_tempest_backend._create_server(**kwargs) self.assertEqual(result,", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the #", "imageRef=\"fake image ref\", flavorRef=\"fake flavor ref\", **stripped_kwargs)) if wait_until is not None: mock_waiters.assert_called_once_with(", "associate_floating_ip_to_server.assert_called_once_with( \"fake ip\", \"fake id\")) def test_get_mtu(self): mock_get_mtu = mock.Mock() mock_get_mtu.return_value = \"fake", "floating_ip is not None: (self._base_tempest_backend._manager.floating_ips_client. delete_floating_ip) = mock.Mock() self._base_tempest_backend._floating_ip = floating_ip if keypair", "self._base_tempest_backend._keypair = keypair self._base_tempest_backend._manager.cleanup_credentials = mock.Mock() with test_utils.LogSnatcher('argus.backends.tempest.' 'tempest_backend') as snatcher: self._base_tempest_backend.cleanup() if", "= mock_keypair result = self._base_tempest_backend.private_key() self.assertEqual(result, \"fake private key\") def test_get_image_by_ref(self): (self._base_tempest_backend._manager.compute_images_client. show_image)", "is not None: mock_servers_client = mock.Mock() mock_servers_client.delete_server = mock.Mock() (self._base_tempest_backend._manager. servers_client) = mock_servers_client", "License. # pylint: disable=no-value-for-parameter, protected-access, arguments-differ # pylint: disable= unused-argument, no-member, attribute-defined-outside-init import", "{\"id\": \"fake server\"} result = self._base_tempest_backend.internal_instance_id() self.assertEqual(result, \"fake server\") def test_instance_output(self): self._base_tempest_backend._manager.instance_output =", "nameservers\" mock_config.argus = mock_argus self._base_tempest_backend._configure_networking() (self._base_tempest_backend._manager.subnets_client. update_subnet.assert_called_once()) (self._base_tempest_backend._manager.subnets_client. update_subnet.assert_called_once()) @mock.patch('argus.util.rand_name', return_value=\"fake-server\") @mock.patch('tempest.common.waiters.wait_for_server_status') def", "(self._base_tempest_backend._manager.servers_client.delete_server .assert_called_once_with(\"fake id\")) (mock_waiters.assert_called_once_with( self._base_tempest_backend._manager.servers_client, \"fake id\")) self.assertEqual( self._base_tempest_backend.internal_instance_id.call_count, 2) if floating_ip is", "OF ANY KIND, either express or implied. See the # License for the", "class FakeBaseTempestBackend(tempest_backend.BaseTempestBackend): def __init__(self, name, userdata, metadata, availability_zone): super(FakeBaseTempestBackend, self).__init__( name, userdata, metadata,", "{\"id\": 3}, {\"id\": 4}, {\"id\": 5} ], \"name\": \"fake name\" } } mock_security_groups_client", "userdata, metadata, availability_zone) def get_remote_client(self, **kwargs): return \"fake get_remote_client\" def remote_client(self): return \"fake_remote_client\"", ".return_value) = {\"security_group_rule\": \"fake sg_rule\"} (self._base_tempest_backend._manager .security_group_rules_client) = mock_security_group_rules_client result = (self._base_tempest_backend. _add_security_group_exceptions(\"fake", "= mock.Mock(return_value=fake_server) self._base_tempest_backend.image_ref = \"fake image ref\" self._base_tempest_backend.flavor_ref = \"fake flavor ref\" self._base_tempest_backend._name", "public key\" self._base_tempest_backend._keypair = mock_keypair result = self._base_tempest_backend.public_key() self.assertEqual(result, \"fake public key\") def", "= mock.Mock() mock_servers_client.delete_server = mock.Mock() (self._base_tempest_backend._manager. servers_client) = mock_servers_client self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake", "._create_server(wait_until, kwargs)) else: result = self._base_tempest_backend._create_server(**kwargs) self.assertEqual(result, {\"id\": \"fake server id\"}) (self._base_tempest_backend._manager.servers_client.create_server. assert_called_once_with(name=\"fake-server-instance\",", "server id\" } } stripped_kwargs = copy.deepcopy(kwargs) for key, value in list(stripped_kwargs.items()): if", "# under the License. # pylint: disable=no-value-for-parameter, protected-access, arguments-differ # pylint: disable= unused-argument,", "id\" } } stripped_kwargs = copy.deepcopy(kwargs) for key, value in list(stripped_kwargs.items()): if not", "sg_rule\"} (self._base_tempest_backend._manager .security_group_rules_client) = mock_security_group_rules_client result = (self._base_tempest_backend. _add_security_group_exceptions(\"fake secgroup_id\")) for item in", "is not None: (self._base_tempest_backend. _security_groups_rules) = security_groups_rules (self._base_tempest_backend._manager.security_group_rules_client. delete_security_group_rule) = mock.Mock() if security_group", "self._base_tempest_backend._assign_floating_ip = mock.Mock() self._base_tempest_backend._create_security_groups = mock.Mock() self._base_tempest_backend._availability_zone = mock.Mock() self._base_tempest_backend.__get_id_tenant_network = mock.Mock() with", "assert_called_once_with(name=\"fake-server-instance\", imageRef=\"fake image ref\", flavorRef=\"fake flavor ref\", **stripped_kwargs)) if wait_until is not None:", "self._base_tempest_backend.__get_id_tenant_network = mock.Mock() with test_utils.LogSnatcher('argus.backends.base') as snatcher: self._base_tempest_backend.setup_instance() self.assertEqual(expected_logging, snatcher.output) self._base_tempest_backend._configure_networking.assert_called_once() self._base_tempest_backend._manager.create_keypair.assert_called_once() self._base_tempest_backend._create_server.assert_called_once()", "snatcher.output) self._base_tempest_backend._configure_networking.assert_called_once() self._base_tempest_backend._manager.create_keypair.assert_called_once() self._base_tempest_backend._create_server.assert_called_once() self._base_tempest_backend._assign_floating_ip.assert_called_once() self._base_tempest_backend._create_security_groups.assert_called_once() def test_reboot_instance(self): self._base_tempest_backend._manager.reboot_instance = mock.Mock( return_value=\"fake reboot\")", "self._base_tempest_backend.public_key() self.assertEqual(result, \"fake public key\") def test_private_key(self): mock_keypair = mock.Mock() mock_keypair.private_key = \"fake", "\"rule 3\", \"rule 4\"] self._test_cleanup(security_groups_rules=fake_rules) def test_cleanup_security_group(self): self._test_cleanup(security_group={'name': \"fake name\"}) def test_cleanup_server(self): self._test_cleanup(server=\"fake", "# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the", ".remove_security_group) = mock.Mock() self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake id\") self._base_tempest_backend._security_group = security_group if server", "\"fake output\") self._base_tempest_backend.internal_instance_id.assert_called_once() self._base_tempest_backend._manager.test_instance_output(\"fake id\", 10) def test_instance_server(self): self._base_tempest_backend._manager.instance_server = mock.Mock( return_value=\"fake instance", "def test_internal_instance_id(self): self._base_tempest_backend._server = {\"id\": \"fake server\"} result = self._base_tempest_backend.internal_instance_id() self.assertEqual(result, \"fake server\")", "name, userdata, metadata, availability_zone) @mock.patch('argus.config.CONFIG.argus') @mock.patch('argus.backends.base.CloudBackend._get_log_template') def test_get_log_template(self, mock_get_log, mock_config): mock_get_log.return_value = \"fake", ".return_value) = None mock_internal_instance_id = mock.Mock() mock_internal_instance_id.return_value = \"fake id\" (self._base_tempest_backend._manager. floating_ips_client) =", "(mock_security_groups_client.create_security_group .return_value) = fake_security_group (self._base_tempest_backend._manager .security_groups_client) = mock_security_groups_client self._base_tempest_backend._security_groups_rules = [] self._base_tempest_backend._add_security_group_exceptions =", "mock.Mock() mock_keypair.public_key = \"fake public key\" self._base_tempest_backend._keypair = mock_keypair result = self._base_tempest_backend.public_key() self.assertEqual(result,", "mock_config): mock_config.openstack.image_ref = \"fake image ref\" mock_config.openstack.flavor_ref = \"fake flavor ref\" name =", "None: (self._base_tempest_backend._manager.floating_ips_client. delete_floating_ip.assert_called_once_with(floating_ip['id'])) if keypair is not None: self._base_tempest_backend._keypair.destroy.assert_called_once() (self._base_tempest_backend._manager.cleanup_credentials. assert_called_once()) self.assertEqual(expected_logging, snatcher.output)", "mock.Mock() mock_floating_ips_client.create_floating_ip = mock_create_floating_ip (mock_floating_ips_client.associate_floating_ip_to_server .return_value) = None mock_internal_instance_id = mock.Mock() mock_internal_instance_id.return_value =", "(the \"License\"); you may # not use this file except in compliance with", "= mock_get_mtu result = self._base_tempest_backend.get_mtu() self.assertEqual(result, \"fake mtu\") self._base_tempest_backend._manager.get_mtu.assert_called_once() def test__add_security_group_exceptions(self): mock_security_group_rules_client =", "= self._base_tempest_backend._create_server(**kwargs) self.assertEqual(result, {\"id\": \"fake server id\"}) (self._base_tempest_backend._manager.servers_client.create_server. assert_called_once_with(name=\"fake-server-instance\", imageRef=\"fake image ref\", flavorRef=\"fake", "mock_waiters.assert_called_once_with( self._base_tempest_backend._manager.servers_client, \"fake server id\", 'ACTIVE') def test_create_server(self): kwargs = { \"arg 1\":", "= self._base_tempest_backend._assign_floating_ip() self.assertEqual(result, {\"ip\": \"fake ip\"}) (self._base_tempest_backend._manager.floating_ips_client. associate_floating_ip_to_server.assert_called_once_with( \"fake ip\", \"fake id\")) def", "= {\"security_group_rule\": \"fake sg_rule\"} (self._base_tempest_backend._manager .security_group_rules_client) = mock_security_group_rules_client result = (self._base_tempest_backend. _add_security_group_exceptions(\"fake secgroup_id\"))", "availability_zone) def get_remote_client(self, **kwargs): return \"fake get_remote_client\" def remote_client(self): return \"fake_remote_client\" class TestBaseTempestBackend(unittest.TestCase):", "\"fake name\"}) def test_cleanup_server(self): self._test_cleanup(server=\"fake server\") def test_cleanup_floating_ip(self): self._test_cleanup(floating_ip={\"id\": \"fake floating ip id\"})", "from argus.unit_tests import test_utils from argus import util try: import unittest.mock as mock", "def test__configure_networking(self, mock_config): mock_network = mock.Mock() mock_network.subnet = {\"id\": \"fake id\"} mock_primary_credentials =", "# # Unless required by applicable law or agreed to in writing, software", "fake_security_group = { \"security_group\": { \"id\": [ {\"id\": 1}, {\"id\": 2}, {\"id\": 3},", "mock.Mock() (mock_security_groups_client.create_security_group .return_value) = fake_security_group (self._base_tempest_backend._manager .security_groups_client) = mock_security_groups_client self._base_tempest_backend._security_groups_rules = [] self._base_tempest_backend._add_security_group_exceptions", "self._test_cleanup(security_group={'name': \"fake name\"}) def test_cleanup_server(self): self._test_cleanup(server=\"fake server\") def test_cleanup_floating_ip(self): self._test_cleanup(floating_ip={\"id\": \"fake floating ip", "self._base_tempest_backend._conf = mock.Mock() result = self._base_tempest_backend.get_image_by_ref() self.assertEqual(result, \"fake image\") def test_floating_ip(self): self._base_tempest_backend._floating_ip =", "key\") def test_private_key(self): mock_keypair = mock.Mock() mock_keypair.private_key = \"fake private key\" self._base_tempest_backend._keypair =", "\"fake private key\" self._base_tempest_backend._keypair = mock_keypair result = self._base_tempest_backend.private_key() self.assertEqual(result, \"fake private key\")", "= mock.Mock() self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake ip\") result = self._base_tempest_backend._create_security_groups() self.assertEqual(result, fake_security_group[\"security_group\"]) (self._base_tempest_backend._manager.security_groups_client.", "self.assertEqual(result, {\"id\": \"fake server id\"}) (self._base_tempest_backend._manager.servers_client.create_server. assert_called_once_with(name=\"fake-server-instance\", imageRef=\"fake image ref\", flavorRef=\"fake flavor ref\",", "test_utils.LogSnatcher('argus.backends.base') as snatcher: self._base_tempest_backend.setup_instance() self.assertEqual(expected_logging, snatcher.output) self._base_tempest_backend._configure_networking.assert_called_once() self._base_tempest_backend._manager.create_keypair.assert_called_once() self._base_tempest_backend._create_server.assert_called_once() self._base_tempest_backend._assign_floating_ip.assert_called_once() self._base_tempest_backend._create_security_groups.assert_called_once() def test_reboot_instance(self):", "self._base_tempest_backend._create_security_groups.assert_called_once() def test_reboot_instance(self): self._base_tempest_backend._manager.reboot_instance = mock.Mock( return_value=\"fake reboot\") self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake id\")", "id\") self._base_tempest_backend._keypair = \"fake keypair\" result = self._base_tempest_backend.instance_password() self.assertEqual(result, \"fake password\") self._base_tempest_backend.internal_instance_id.assert_called_once() def", "name = mock.sentinel userdata = \"fake userdata\" metadata = mock.sentinel availability_zone = mock.sentinel", "id\"}) def test_cleanup_keypair(self): self._test_cleanup(keypair=mock.Mock()) def test_cleanup_credentials(self): self._test_cleanup() def test_instance_setup_create_server(self): expected_logging = [\"Creating server...\"]", "self._base = tempest_backend.BaseWindowsTempestBackend( name, userdata, metadata, availability_zone) @mock.patch('argus.config.CONFIG.argus') @mock.patch('argus.backends.base.CloudBackend._get_log_template') def test_get_log_template(self, mock_get_log, mock_config):", "mock_internal_instance_id result = self._base_tempest_backend._assign_floating_ip() self.assertEqual(result, {\"ip\": \"fake ip\"}) (self._base_tempest_backend._manager.floating_ips_client. associate_floating_ip_to_server.assert_called_once_with( \"fake ip\", \"fake", "= {\"ip\": \"fake ip\"} result = self._base_tempest_backend.floating_ip() self.assertEqual(result, \"fake ip\") class TestBaseWindowsTempestBackend(unittest.TestCase): @mock.patch('argus.config.CONFIG.argus')", "argus.unit_tests import test_utils from argus import util try: import unittest.mock as mock except", "mock_config.openstack.flavor_ref = \"fake flavor ref\" name = mock.sentinel userdata = \"fake userdata\" metadata", "@mock.patch('argus.util.rand_name', return_value=\"fake-server\") @mock.patch('tempest.common.waiters.wait_for_server_status') def _test_create_server(self, mock_waiters, mock_util, kwargs, wait_until=None): fake_server = { 'server':", "} stripped_kwargs = copy.deepcopy(kwargs) for key, value in list(stripped_kwargs.items()): if not value: del", "id\", 'ACTIVE') def test_create_server(self): kwargs = { \"arg 1\": \"value 1\", \"arg 2\":", "= mock.Mock() mock_keypair.private_key = \"fake private key\" self._base_tempest_backend._keypair = mock_keypair result = self._base_tempest_backend.private_key()", "\"fake server id\" } } stripped_kwargs = copy.deepcopy(kwargs) for key, value in list(stripped_kwargs.items()):", "License. You may obtain # a copy of the License at # #", "test__configure_networking(self, mock_config): mock_network = mock.Mock() mock_network.subnet = {\"id\": \"fake id\"} mock_primary_credentials = mock.Mock()", "create_security_group.assert_called_once()) self._base_tempest_backend.internal_instance_id.assert_called_once() (self._base_tempest_backend._manager.servers_client.add_security_group .assert_called_once()) self.assertEqual(self._base_tempest_backend._security_groups_rules, [1, 2, 3, 4, 5]) @mock.patch('tempest.common.waiters.wait_for_server_termination') def _test_cleanup(self,", "return_value=\"fake id\") self._base_tempest_backend._security_group = security_group if server is not None: mock_servers_client = mock.Mock()", "the License is distributed on an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR", "self._base_tempest_backend._keypair = mock_keypair result = self._base_tempest_backend.public_key() self.assertEqual(result, \"fake public key\") def test_private_key(self): mock_keypair", "\"fake get_remote_client\" def remote_client(self): return \"fake_remote_client\" class TestBaseTempestBackend(unittest.TestCase): @mock.patch('argus.config.CONFIG.argus') @mock.patch('argus.backends.tempest.manager.APIManager') def setUp(self, mock_api_manager,", "self._base_tempest_backend._manager.instance_output = mock.Mock( return_value=\"fake output\") self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake id\") result = self._base_tempest_backend.instance_output(limit=10)", "self._base_tempest_backend.flavor_ref = \"fake flavor ref\" self._base_tempest_backend._name = \"fake name\" if wait_until is not", "Solutions Srl # All Rights Reserved. # # Licensed under the Apache License,", "= \"fake arch\" expected_result = \"{}-{}-{}\".format(mock_config.build, mock_config.arch, mock_get_log.return_value) result = self._base._get_log_template(\"fake suffix\") self.assertEqual(result,", "setUp(self, mock_api_manager, mock_config): mock_config.openstack.image_ref = \"fake image ref\" mock_config.openstack.flavor_ref = \"fake flavor ref\"", "remove_security_group.assert_called_once_with( server_id=\"fake id\", name=security_group['name'])) (self._base_tempest_backend.internal_instance_id. assert_called_once()) if server is not None: (self._base_tempest_backend._manager.servers_client.delete_server .assert_called_once_with(\"fake", "\"fake name\" } } mock_security_groups_client = mock.Mock() (mock_security_groups_client.create_security_group .return_value) = fake_security_group (self._base_tempest_backend._manager .security_groups_client)", "id\")) def test_get_mtu(self): mock_get_mtu = mock.Mock() mock_get_mtu.return_value = \"fake mtu\" self._base_tempest_backend._manager.get_mtu = mock_get_mtu", "Cloudbase Solutions Srl # All Rights Reserved. # # Licensed under the Apache", "\"fake image ref\" mock_config.openstack.flavor_ref = \"fake flavor ref\" name = mock.sentinel userdata =", "\"fake server id\", wait_until) else: mock_waiters.assert_called_once_with( self._base_tempest_backend._manager.servers_client, \"fake server id\", 'ACTIVE') def test_create_server(self):", "(self.assertEqual( self._base_tempest_backend._manager. security_group_rules_client.delete_security_group_rule. call_count, len(security_groups_rules))) if security_group is not None: (self._base_tempest_backend._manager.servers_client. remove_security_group.assert_called_once_with( server_id=\"fake", "reboot\") (self._base_tempest_backend._manager.reboot_instance. assert_called_once_with(\"fake id\")) def test_instance_password(self): self._base_tempest_backend._manager.instance_password = mock.Mock( return_value=\"fake password\") self._base_tempest_backend.internal_instance_id =", "(self._base_tempest_backend._manager .security_group_rules_client) = mock_security_group_rules_client result = (self._base_tempest_backend. _add_security_group_exceptions(\"fake secgroup_id\")) for item in result:", "TestBaseTempestBackend(unittest.TestCase): @mock.patch('argus.config.CONFIG.argus') @mock.patch('argus.backends.tempest.manager.APIManager') def setUp(self, mock_api_manager, mock_config): mock_config.openstack.image_ref = \"fake image ref\" mock_config.openstack.flavor_ref", "ANY KIND, either express or implied. See the # License for the specific", "fake_security_group (self._base_tempest_backend._manager .security_groups_client) = mock_security_groups_client self._base_tempest_backend._security_groups_rules = [] self._base_tempest_backend._add_security_group_exceptions = mock.Mock( return_value=fake_security_group[\"security_group\"][\"id\"]) self._base_tempest_backend._manager.servers_client", "def test_public_key(self): mock_keypair = mock.Mock() mock_keypair.public_key = \"fake public key\" self._base_tempest_backend._keypair = mock_keypair", "= \"fake mtu\" self._base_tempest_backend._manager.get_mtu = mock_get_mtu result = self._base_tempest_backend.get_mtu() self.assertEqual(result, \"fake mtu\") self._base_tempest_backend._manager.get_mtu.assert_called_once()", "\"value 1\", \"arg 2\": \"value 2\", \"arg 3\": None, \"arg 4\": \"value 4\"", "1\", \"arg 2\": \"value 2\", \"arg 3\": None, \"arg 4\": \"value 4\" }", "test_instance_output(self): self._base_tempest_backend._manager.instance_output = mock.Mock( return_value=\"fake output\") self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake id\") result =", "instance server\") self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake instance id\") result = self._base_tempest_backend.instance_server() self.assertEqual(result, \"fake", "# pylint: disable=no-value-for-parameter, protected-access, arguments-differ # pylint: disable= unused-argument, no-member, attribute-defined-outside-init import copy", "tempest_backend from argus.unit_tests import test_utils from argus import util try: import unittest.mock as", "security_groups_rules is not None: (self._base_tempest_backend. _security_groups_rules) = security_groups_rules (self._base_tempest_backend._manager.security_group_rules_client. delete_security_group_rule) = mock.Mock() if", "name\" } } mock_security_groups_client = mock.Mock() (mock_security_groups_client.create_security_group .return_value) = fake_security_group (self._base_tempest_backend._manager .security_groups_client) =", "\"fake password\") self._base_tempest_backend.internal_instance_id.assert_called_once() def test_internal_instance_id(self): self._base_tempest_backend._server = {\"id\": \"fake server\"} result = self._base_tempest_backend.internal_instance_id()", "None (self._base_tempest_backend. _manager.subnets_client) = mock_subnets_client mock_argus = mock.Mock() mock_argus.dns_nameservers.return_value = \"fake dns nameservers\"", "mock.Mock() mock_subnets_client.update_subnet.return_value = None (self._base_tempest_backend. _manager.subnets_client) = mock_subnets_client mock_argus = mock.Mock() mock_argus.dns_nameservers.return_value =", "test_cleanup_floating_ip(self): self._test_cleanup(floating_ip={\"id\": \"fake floating ip id\"}) def test_cleanup_keypair(self): self._test_cleanup(keypair=mock.Mock()) def test_cleanup_credentials(self): self._test_cleanup() def", "= self._base_tempest_backend.instance_password() self.assertEqual(result, \"fake password\") self._base_tempest_backend.internal_instance_id.assert_called_once() def test_internal_instance_id(self): self._base_tempest_backend._server = {\"id\": \"fake server\"}", "\"rule 4\"] self._test_cleanup(security_groups_rules=fake_rules) def test_cleanup_security_group(self): self._test_cleanup(security_group={'name': \"fake name\"}) def test_cleanup_server(self): self._test_cleanup(server=\"fake server\") def", "\"fake id\"} mock_primary_credentials = mock.Mock() mock_primary_credentials.return_value = mock_network (self._base_tempest_backend._manager. primary_credentials) = mock_primary_credentials mock_subnets_client", "mock_waiters, mock_util, kwargs, wait_until=None): fake_server = { 'server': { 'id': \"fake server id\"", "mock_floating_ips_client.create_floating_ip = mock_create_floating_ip (mock_floating_ips_client.associate_floating_ip_to_server .return_value) = None mock_internal_instance_id = mock.Mock() mock_internal_instance_id.return_value = \"fake", "result = self._base_tempest_backend.reboot_instance() self.assertEqual(result, \"fake reboot\") (self._base_tempest_backend._manager.reboot_instance. assert_called_once_with(\"fake id\")) def test_instance_password(self): self._base_tempest_backend._manager.instance_password =", "= tempest_backend.BaseWindowsTempestBackend( name, userdata, metadata, availability_zone) @mock.patch('argus.config.CONFIG.argus') @mock.patch('argus.backends.base.CloudBackend._get_log_template') def test_get_log_template(self, mock_get_log, mock_config): mock_get_log.return_value", "snatcher.output) def test_cleanup_security_groups_rules(self): fake_rules = [\"rule 1\", \"rule 2\", \"rule 3\", \"rule 4\"]", "= \"fake build\" mock_config.arch = \"fake arch\" expected_result = \"{}-{}-{}\".format(mock_config.build, mock_config.arch, mock_get_log.return_value) result", "self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake ip\") result = self._base_tempest_backend._create_security_groups() self.assertEqual(result, fake_security_group[\"security_group\"]) (self._base_tempest_backend._manager.security_groups_client. create_security_group.assert_called_once()) self._base_tempest_backend.internal_instance_id.assert_called_once()", "10) def test_instance_server(self): self._base_tempest_backend._manager.instance_server = mock.Mock( return_value=\"fake instance server\") self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake", "try: import unittest.mock as mock except ImportError: import mock LOG = util.get_logger() class", "def test_reboot_instance(self): self._base_tempest_backend._manager.reboot_instance = mock.Mock( return_value=\"fake reboot\") self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake id\") result", "mock_config.build = \"fake build\" mock_config.arch = \"fake arch\" expected_result = \"{}-{}-{}\".format(mock_config.build, mock_config.arch, mock_get_log.return_value)", "mock.Mock( return_value=\"fake id\") result = self._base_tempest_backend.instance_output(limit=10) self.assertEqual(result, \"fake output\") self._base_tempest_backend.internal_instance_id.assert_called_once() self._base_tempest_backend._manager.test_instance_output(\"fake id\", 10)", "def remote_client(self): return \"fake_remote_client\" class TestBaseTempestBackend(unittest.TestCase): @mock.patch('argus.config.CONFIG.argus') @mock.patch('argus.backends.tempest.manager.APIManager') def setUp(self, mock_api_manager, mock_config): mock_config.openstack.image_ref", "server\") def test_cleanup_floating_ip(self): self._test_cleanup(floating_ip={\"id\": \"fake floating ip id\"}) def test_cleanup_keypair(self): self._test_cleanup(keypair=mock.Mock()) def test_cleanup_credentials(self):", "userdata = \"fake userdata\" metadata = mock.sentinel availability_zone = mock.sentinel self._base_tempest_backend = FakeBaseTempestBackend(", "self.assertEqual(result, \"fake public key\") def test_private_key(self): mock_keypair = mock.Mock() mock_keypair.private_key = \"fake private", "argus.backends.tempest import tempest_backend from argus.unit_tests import test_utils from argus import util try: import", "self._base_tempest_backend._floating_ip = floating_ip if keypair is not None: self._base_tempest_backend._keypair = keypair self._base_tempest_backend._manager.cleanup_credentials =", "self._base_tempest_backend._manager.create_keypair = mock.Mock() self._base_tempest_backend._create_server = mock.Mock( return_value=\"fake server\") self._base_tempest_backend._assign_floating_ip = mock.Mock() self._base_tempest_backend._create_security_groups =", "= mock_subnets_client mock_argus = mock.Mock() mock_argus.dns_nameservers.return_value = \"fake dns nameservers\" mock_config.argus = mock_argus", "id\") result = self._base_tempest_backend.instance_server() self.assertEqual(result, \"fake instance server\") self._base_tempest_backend.internal_instance_id.assert_called_once() def test_public_key(self): mock_keypair =", "= mock.Mock() mock_keypair.public_key = \"fake public key\" self._base_tempest_backend._keypair = mock_keypair result = self._base_tempest_backend.public_key()", "= mock.sentinel self._base = tempest_backend.BaseWindowsTempestBackend( name, userdata, metadata, availability_zone) @mock.patch('argus.config.CONFIG.argus') @mock.patch('argus.backends.base.CloudBackend._get_log_template') def test_get_log_template(self,", "userdata, metadata, availability_zone) @mock.patch('argus.config.CONFIG.argus') def test__configure_networking(self, mock_config): mock_network = mock.Mock() mock_network.subnet = {\"id\":", "not None: mock_servers_client = mock.Mock() mock_servers_client.delete_server = mock.Mock() (self._base_tempest_backend._manager. servers_client) = mock_servers_client self._base_tempest_backend.internal_instance_id", "get_remote_client(self, **kwargs): return \"fake get_remote_client\" def remote_client(self): return \"fake_remote_client\" class TestBaseTempestBackend(unittest.TestCase): @mock.patch('argus.config.CONFIG.argus') @mock.patch('argus.backends.tempest.manager.APIManager')", "self._base_tempest_backend._name = \"fake name\" if wait_until is not None: result = (self._base_tempest_backend ._create_server(wait_until,", "(mock_security_group_rules_client.create_security_group_rule .return_value) = {\"security_group_rule\": \"fake sg_rule\"} (self._base_tempest_backend._manager .security_group_rules_client) = mock_security_group_rules_client result = (self._base_tempest_backend.", "(self._base_tempest_backend. _security_groups_rules) = security_groups_rules (self._base_tempest_backend._manager.security_group_rules_client. delete_security_group_rule) = mock.Mock() if security_group is not None:", "self._base_tempest_backend._manager.create_keypair.assert_called_once() self._base_tempest_backend._create_server.assert_called_once() self._base_tempest_backend._assign_floating_ip.assert_called_once() self._base_tempest_backend._create_security_groups.assert_called_once() def test_reboot_instance(self): self._base_tempest_backend._manager.reboot_instance = mock.Mock( return_value=\"fake reboot\") self._base_tempest_backend.internal_instance_id =", "value: del stripped_kwargs[key] (self._base_tempest_backend._manager.servers_client. create_server) = mock.Mock(return_value=fake_server) self._base_tempest_backend.image_ref = \"fake image ref\" self._base_tempest_backend.flavor_ref", "mock_servers_client.delete_server = mock.Mock() (self._base_tempest_backend._manager. servers_client) = mock_servers_client self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake id\") self._base_tempest_backend._server", "# All Rights Reserved. # # Licensed under the Apache License, Version 2.0", "under the Apache License, Version 2.0 (the \"License\"); you may # not use", "WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See", "test_cleanup_security_groups_rules(self): fake_rules = [\"rule 1\", \"rule 2\", \"rule 3\", \"rule 4\"] self._test_cleanup(security_groups_rules=fake_rules) def", "'server': { 'id': \"fake server id\" } } stripped_kwargs = copy.deepcopy(kwargs) for key,", "mock.Mock( return_value=\"fake id\") self._base_tempest_backend._keypair = \"fake keypair\" result = self._base_tempest_backend.instance_password() self.assertEqual(result, \"fake password\")", "= mock.Mock() mock_get_mtu.return_value = \"fake mtu\" self._base_tempest_backend._manager.get_mtu = mock_get_mtu result = self._base_tempest_backend.get_mtu() self.assertEqual(result,", "self._base_tempest_backend.internal_instance_id() self.assertEqual(result, \"fake server\") def test_instance_output(self): self._base_tempest_backend._manager.instance_output = mock.Mock( return_value=\"fake output\") self._base_tempest_backend.internal_instance_id =", "\"name\": \"fake name\" } } mock_security_groups_client = mock.Mock() (mock_security_groups_client.create_security_group .return_value) = fake_security_group (self._base_tempest_backend._manager", "= mock_primary_credentials mock_subnets_client = mock.Mock() mock_subnets_client.update_subnet.return_value = None (self._base_tempest_backend. _manager.subnets_client) = mock_subnets_client mock_argus", "def test_private_key(self): mock_keypair = mock.Mock() mock_keypair.private_key = \"fake private key\" self._base_tempest_backend._keypair = mock_keypair", "self._base_tempest_backend._floating_ip = {\"ip\": \"fake ip\"} result = self._base_tempest_backend.floating_ip() self.assertEqual(result, \"fake ip\") class TestBaseWindowsTempestBackend(unittest.TestCase):", "result: self.assertEqual(item, \"fake sg_rule\") def test__create_security_groups(self): fake_security_group = { \"security_group\": { \"id\": [", "ref\" name = mock.sentinel userdata = \"fake userdata\" metadata = mock.sentinel availability_zone =", "userdata, metadata, availability_zone): super(FakeBaseTempestBackend, self).__init__( name, userdata, metadata, availability_zone) def get_remote_client(self, **kwargs): return", "key\") def test_get_image_by_ref(self): (self._base_tempest_backend._manager.compute_images_client. show_image) = mock.Mock(return_value={\"image\": \"fake image\"}) self._base_tempest_backend._conf = mock.Mock() result", "remote_client(self): return \"fake_remote_client\" class TestBaseTempestBackend(unittest.TestCase): @mock.patch('argus.config.CONFIG.argus') @mock.patch('argus.backends.tempest.manager.APIManager') def setUp(self, mock_api_manager, mock_config): mock_config.openstack.image_ref =", "as snatcher: self._base_tempest_backend.cleanup() if security_groups_rules is not None: (self.assertEqual( self._base_tempest_backend._manager. security_group_rules_client.delete_security_group_rule. call_count, len(security_groups_rules)))", "= mock.sentinel availability_zone = mock.sentinel self._base_tempest_backend = FakeBaseTempestBackend( name, userdata, metadata, availability_zone) @mock.patch('argus.config.CONFIG.argus')", "\"arg 1\": \"value 1\", \"arg 2\": \"value 2\", \"arg 3\": None, \"arg 4\":", "= mock.Mock() mock_floating_ips_client.create_floating_ip = mock_create_floating_ip (mock_floating_ips_client.associate_floating_ip_to_server .return_value) = None mock_internal_instance_id = mock.Mock() mock_internal_instance_id.return_value", "See the # License for the specific language governing permissions and limitations #", "stripped_kwargs[key] (self._base_tempest_backend._manager.servers_client. create_server) = mock.Mock(return_value=fake_server) self._base_tempest_backend.image_ref = \"fake image ref\" self._base_tempest_backend.flavor_ref = \"fake", "= self._base_tempest_backend.get_mtu() self.assertEqual(result, \"fake mtu\") self._base_tempest_backend._manager.get_mtu.assert_called_once() def test__add_security_group_exceptions(self): mock_security_group_rules_client = mock.Mock() (mock_security_group_rules_client.create_security_group_rule .return_value)", "secgroup_id\")) for item in result: self.assertEqual(item, \"fake sg_rule\") def test__create_security_groups(self): fake_security_group = {", "dns nameservers\" mock_config.argus = mock_argus self._base_tempest_backend._configure_networking() (self._base_tempest_backend._manager.subnets_client. update_subnet.assert_called_once()) (self._base_tempest_backend._manager.subnets_client. update_subnet.assert_called_once()) @mock.patch('argus.util.rand_name', return_value=\"fake-server\") @mock.patch('tempest.common.waiters.wait_for_server_status')", "2, 3, 4, 5]) @mock.patch('tempest.common.waiters.wait_for_server_termination') def _test_cleanup(self, mock_waiters, security_groups_rules=None, security_group=None, server=None, floating_ip=None, keypair=None):", "self._base_tempest_backend.internal_instance_id.call_count, 2) if floating_ip is not None: (self._base_tempest_backend._manager.floating_ips_client. delete_floating_ip.assert_called_once_with(floating_ip['id'])) if keypair is not", "{\"security_group_rule\": \"fake sg_rule\"} (self._base_tempest_backend._manager .security_group_rules_client) = mock_security_group_rules_client result = (self._base_tempest_backend. _add_security_group_exceptions(\"fake secgroup_id\")) for", "\"fake id\")) def test_get_mtu(self): mock_get_mtu = mock.Mock() mock_get_mtu.return_value = \"fake mtu\" self._base_tempest_backend._manager.get_mtu =", "{\"id\": 4}, {\"id\": 5} ], \"name\": \"fake name\" } } mock_security_groups_client = mock.Mock()", "server is not None: mock_servers_client = mock.Mock() mock_servers_client.delete_server = mock.Mock() (self._base_tempest_backend._manager. servers_client) =", "self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake id\") self._base_tempest_backend._server = server if floating_ip is not None:", "4\"] self._test_cleanup(security_groups_rules=fake_rules) def test_cleanup_security_group(self): self._test_cleanup(security_group={'name': \"fake name\"}) def test_cleanup_server(self): self._test_cleanup(server=\"fake server\") def test_cleanup_floating_ip(self):", "self._test_cleanup(keypair=mock.Mock()) def test_cleanup_credentials(self): self._test_cleanup() def test_instance_setup_create_server(self): expected_logging = [\"Creating server...\"] self._base_tempest_backend._configure_networking = mock.Mock()", "self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake instance id\") result = self._base_tempest_backend.instance_server() self.assertEqual(result, \"fake instance server\")", "mock.Mock( return_value=\"fake id\") result = self._base_tempest_backend.reboot_instance() self.assertEqual(result, \"fake reboot\") (self._base_tempest_backend._manager.reboot_instance. assert_called_once_with(\"fake id\")) def", "= \"fake image ref\" mock_config.openstack.flavor_ref = \"fake flavor ref\" name = mock.sentinel userdata", "law or agreed to in writing, software # distributed under the License is", "password\") self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake id\") self._base_tempest_backend._keypair = \"fake keypair\" result = self._base_tempest_backend.instance_password()", "mock_argus self._base_tempest_backend._configure_networking() (self._base_tempest_backend._manager.subnets_client. update_subnet.assert_called_once()) (self._base_tempest_backend._manager.subnets_client. update_subnet.assert_called_once()) @mock.patch('argus.util.rand_name', return_value=\"fake-server\") @mock.patch('tempest.common.waiters.wait_for_server_status') def _test_create_server(self, mock_waiters, mock_util,", "mock_network = mock.Mock() mock_network.subnet = {\"id\": \"fake id\"} mock_primary_credentials = mock.Mock() mock_primary_credentials.return_value =", "= mock_security_group_rules_client result = (self._base_tempest_backend. _add_security_group_exceptions(\"fake secgroup_id\")) for item in result: self.assertEqual(item, \"fake", "self._base_tempest_backend._security_group = security_group if server is not None: mock_servers_client = mock.Mock() mock_servers_client.delete_server =", "if keypair is not None: self._base_tempest_backend._keypair.destroy.assert_called_once() (self._base_tempest_backend._manager.cleanup_credentials. assert_called_once()) self.assertEqual(expected_logging, snatcher.output) def test_cleanup_security_groups_rules(self): fake_rules", "self._base_tempest_backend._manager.get_mtu.assert_called_once() def test__add_security_group_exceptions(self): mock_security_group_rules_client = mock.Mock() (mock_security_group_rules_client.create_security_group_rule .return_value) = {\"security_group_rule\": \"fake sg_rule\"} (self._base_tempest_backend._manager", "server...\"] self._base_tempest_backend._configure_networking = mock.Mock() self._base_tempest_backend._manager.create_keypair = mock.Mock() self._base_tempest_backend._create_server = mock.Mock( return_value=\"fake server\") self._base_tempest_backend._assign_floating_ip", "id\"}) (self._base_tempest_backend._manager.servers_client.create_server. assert_called_once_with(name=\"fake-server-instance\", imageRef=\"fake image ref\", flavorRef=\"fake flavor ref\", **stripped_kwargs)) if wait_until is", "under the License. # pylint: disable=no-value-for-parameter, protected-access, arguments-differ # pylint: disable= unused-argument, no-member,", "ref\", **stripped_kwargs)) if wait_until is not None: mock_waiters.assert_called_once_with( self._base_tempest_backend._manager.servers_client, \"fake server id\", wait_until)", "(mock_floating_ips_client.associate_floating_ip_to_server .return_value) = None mock_internal_instance_id = mock.Mock() mock_internal_instance_id.return_value = \"fake id\" (self._base_tempest_backend._manager. floating_ips_client)", "keypair is not None: self._base_tempest_backend._keypair = keypair self._base_tempest_backend._manager.cleanup_credentials = mock.Mock() with test_utils.LogSnatcher('argus.backends.tempest.' 'tempest_backend')", "result = self._base_tempest_backend.private_key() self.assertEqual(result, \"fake private key\") def test_get_image_by_ref(self): (self._base_tempest_backend._manager.compute_images_client. show_image) = mock.Mock(return_value={\"image\":", "except ImportError: import mock LOG = util.get_logger() class FakeBaseTempestBackend(tempest_backend.BaseTempestBackend): def __init__(self, name, userdata,", "mock_internal_instance_id.return_value = \"fake id\" (self._base_tempest_backend._manager. floating_ips_client) = mock_floating_ips_client (self._base_tempest_backend. internal_instance_id) = mock_internal_instance_id result", "mock_security_group_rules_client result = (self._base_tempest_backend. _add_security_group_exceptions(\"fake secgroup_id\")) for item in result: self.assertEqual(item, \"fake sg_rule\")", "express or implied. See the # License for the specific language governing permissions", "delete_floating_ip) = mock.Mock() self._base_tempest_backend._floating_ip = floating_ip if keypair is not None: self._base_tempest_backend._keypair =", "is not None: (self._base_tempest_backend._manager.servers_client.delete_server .assert_called_once_with(\"fake id\")) (mock_waiters.assert_called_once_with( self._base_tempest_backend._manager.servers_client, \"fake id\")) self.assertEqual( self._base_tempest_backend.internal_instance_id.call_count, 2)", "fake_rules = [\"rule 1\", \"rule 2\", \"rule 3\", \"rule 4\"] self._test_cleanup(security_groups_rules=fake_rules) def test_cleanup_security_group(self):", "mock_api_manager, mock_config): mock_config.openstack.image_ref = \"fake image ref\" mock_config.openstack.flavor_ref = \"fake flavor ref\" name", "= floating_ip if keypair is not None: self._base_tempest_backend._keypair = keypair self._base_tempest_backend._manager.cleanup_credentials = mock.Mock()", "return_value=\"fake server\") self._base_tempest_backend._assign_floating_ip = mock.Mock() self._base_tempest_backend._create_security_groups = mock.Mock() self._base_tempest_backend._availability_zone = mock.Mock() self._base_tempest_backend.__get_id_tenant_network =", "an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either", "# a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "CONDITIONS OF ANY KIND, either express or implied. See the # License for", "del stripped_kwargs[key] (self._base_tempest_backend._manager.servers_client. create_server) = mock.Mock(return_value=fake_server) self._base_tempest_backend.image_ref = \"fake image ref\" self._base_tempest_backend.flavor_ref =", "not None: self._base_tempest_backend._keypair = keypair self._base_tempest_backend._manager.cleanup_credentials = mock.Mock() with test_utils.LogSnatcher('argus.backends.tempest.' 'tempest_backend') as snatcher:", "self.assertEqual(result, \"fake reboot\") (self._base_tempest_backend._manager.reboot_instance. assert_called_once_with(\"fake id\")) def test_instance_password(self): self._base_tempest_backend._manager.instance_password = mock.Mock( return_value=\"fake password\")", "FakeBaseTempestBackend(tempest_backend.BaseTempestBackend): def __init__(self, name, userdata, metadata, availability_zone): super(FakeBaseTempestBackend, self).__init__( name, userdata, metadata, availability_zone)", "floating_ips_client) = mock_floating_ips_client (self._base_tempest_backend. internal_instance_id) = mock_internal_instance_id result = self._base_tempest_backend._assign_floating_ip() self.assertEqual(result, {\"ip\": \"fake", "{\"id\": \"fake server id\"}) (self._base_tempest_backend._manager.servers_client.create_server. assert_called_once_with(name=\"fake-server-instance\", imageRef=\"fake image ref\", flavorRef=\"fake flavor ref\", **stripped_kwargs))", "test_cleanup_security_group(self): self._test_cleanup(security_group={'name': \"fake name\"}) def test_cleanup_server(self): self._test_cleanup(server=\"fake server\") def test_cleanup_floating_ip(self): self._test_cleanup(floating_ip={\"id\": \"fake floating", "self._base_tempest_backend._server = {\"id\": \"fake server\"} result = self._base_tempest_backend.internal_instance_id() self.assertEqual(result, \"fake server\") def test_instance_output(self):", "else: mock_waiters.assert_called_once_with( self._base_tempest_backend._manager.servers_client, \"fake server id\", 'ACTIVE') def test_create_server(self): kwargs = { \"arg", "return \"fake get_remote_client\" def remote_client(self): return \"fake_remote_client\" class TestBaseTempestBackend(unittest.TestCase): @mock.patch('argus.config.CONFIG.argus') @mock.patch('argus.backends.tempest.manager.APIManager') def setUp(self,", "FakeBaseTempestBackend( name, userdata, metadata, availability_zone) @mock.patch('argus.config.CONFIG.argus') def test__configure_networking(self, mock_config): mock_network = mock.Mock() mock_network.subnet", "expected_logging = [\"Cleaning up...\"] if security_groups_rules is not None: (self._base_tempest_backend. _security_groups_rules) = security_groups_rules", "fake_server = { 'server': { 'id': \"fake server id\" } } stripped_kwargs =", "argus import util try: import unittest.mock as mock except ImportError: import mock LOG", "2\", \"arg 3\": None, \"arg 4\": \"value 4\" } self._test_create_server(kwargs=kwargs) def test__assign_floating_ip(self): mock_create_floating_ip", "2\": \"value 2\", \"arg 3\": None, \"arg 4\": \"value 4\" } self._test_create_server(kwargs=kwargs) def", "def test_instance_password(self): self._base_tempest_backend._manager.instance_password = mock.Mock( return_value=\"fake password\") self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake id\") self._base_tempest_backend._keypair", "} mock_security_groups_client = mock.Mock() (mock_security_groups_client.create_security_group .return_value) = fake_security_group (self._base_tempest_backend._manager .security_groups_client) = mock_security_groups_client self._base_tempest_backend._security_groups_rules", "= mock.Mock() self._base_tempest_backend._availability_zone = mock.Mock() self._base_tempest_backend.__get_id_tenant_network = mock.Mock() with test_utils.LogSnatcher('argus.backends.base') as snatcher: self._base_tempest_backend.setup_instance()", "name\" if wait_until is not None: result = (self._base_tempest_backend ._create_server(wait_until, kwargs)) else: result", "= self._base_tempest_backend.reboot_instance() self.assertEqual(result, \"fake reboot\") (self._base_tempest_backend._manager.reboot_instance. assert_called_once_with(\"fake id\")) def test_instance_password(self): self._base_tempest_backend._manager.instance_password = mock.Mock(", "name, userdata, metadata, availability_zone) def get_remote_client(self, **kwargs): return \"fake get_remote_client\" def remote_client(self): return", "tempest_backend.BaseWindowsTempestBackend( name, userdata, metadata, availability_zone) @mock.patch('argus.config.CONFIG.argus') @mock.patch('argus.backends.base.CloudBackend._get_log_template') def test_get_log_template(self, mock_get_log, mock_config): mock_get_log.return_value =", "\"ip\": \"fake ip\" } } mock_floating_ips_client = mock.Mock() mock_floating_ips_client.create_floating_ip = mock_create_floating_ip (mock_floating_ips_client.associate_floating_ip_to_server .return_value)", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "if security_groups_rules is not None: (self.assertEqual( self._base_tempest_backend._manager. security_group_rules_client.delete_security_group_rule. call_count, len(security_groups_rules))) if security_group is", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "is not None: (self.assertEqual( self._base_tempest_backend._manager. security_group_rules_client.delete_security_group_rule. call_count, len(security_groups_rules))) if security_group is not None:", "test_instance_server(self): self._base_tempest_backend._manager.instance_server = mock.Mock( return_value=\"fake instance server\") self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake instance id\")", "(self._base_tempest_backend._manager.floating_ips_client. delete_floating_ip.assert_called_once_with(floating_ip['id'])) if keypair is not None: self._base_tempest_backend._keypair.destroy.assert_called_once() (self._base_tempest_backend._manager.cleanup_credentials. assert_called_once()) self.assertEqual(expected_logging, snatcher.output) def", "self._base_tempest_backend._manager.cleanup_credentials = mock.Mock() with test_utils.LogSnatcher('argus.backends.tempest.' 'tempest_backend') as snatcher: self._base_tempest_backend.cleanup() if security_groups_rules is not", "\"fake ip\" } } mock_floating_ips_client = mock.Mock() mock_floating_ips_client.create_floating_ip = mock_create_floating_ip (mock_floating_ips_client.associate_floating_ip_to_server .return_value) =", "compliance with the License. You may obtain # a copy of the License", "id\")) def test_instance_password(self): self._base_tempest_backend._manager.instance_password = mock.Mock( return_value=\"fake password\") self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake id\")", "is not None: self._base_tempest_backend._keypair.destroy.assert_called_once() (self._base_tempest_backend._manager.cleanup_credentials. assert_called_once()) self.assertEqual(expected_logging, snatcher.output) def test_cleanup_security_groups_rules(self): fake_rules = [\"rule", "mock.Mock() mock_keypair.private_key = \"fake private key\" self._base_tempest_backend._keypair = mock_keypair result = self._base_tempest_backend.private_key() self.assertEqual(result,", "= mock.Mock(return_value={\"image\": \"fake image\"}) self._base_tempest_backend._conf = mock.Mock() result = self._base_tempest_backend.get_image_by_ref() self.assertEqual(result, \"fake image\")", "@mock.patch('argus.backends.base.CloudBackend._get_log_template') def test_get_log_template(self, mock_get_log, mock_config): mock_get_log.return_value = \"fake call\" mock_config.build = \"fake build\"", "(self._base_tempest_backend._manager.floating_ips_client. delete_floating_ip) = mock.Mock() self._base_tempest_backend._floating_ip = floating_ip if keypair is not None: self._base_tempest_backend._keypair", "item in result: self.assertEqual(item, \"fake sg_rule\") def test__create_security_groups(self): fake_security_group = { \"security_group\": {", "def test_get_log_template(self, mock_get_log, mock_config): mock_get_log.return_value = \"fake call\" mock_config.build = \"fake build\" mock_config.arch", "disable= unused-argument, no-member, attribute-defined-outside-init import copy import unittest from argus.backends.tempest import tempest_backend from", "= { \"security_group\": { \"id\": [ {\"id\": 1}, {\"id\": 2}, {\"id\": 3}, {\"id\":", "server id\"}) (self._base_tempest_backend._manager.servers_client.create_server. assert_called_once_with(name=\"fake-server-instance\", imageRef=\"fake image ref\", flavorRef=\"fake flavor ref\", **stripped_kwargs)) if wait_until", "if wait_until is not None: result = (self._base_tempest_backend ._create_server(wait_until, kwargs)) else: result =", "IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "mock_get_log.return_value = \"fake call\" mock_config.build = \"fake build\" mock_config.arch = \"fake arch\" expected_result", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "def _test_create_server(self, mock_waiters, mock_util, kwargs, wait_until=None): fake_server = { 'server': { 'id': \"fake", "\"fake arch\" expected_result = \"{}-{}-{}\".format(mock_config.build, mock_config.arch, mock_get_log.return_value) result = self._base._get_log_template(\"fake suffix\") self.assertEqual(result, expected_result)", "(self._base_tempest_backend. _add_security_group_exceptions(\"fake secgroup_id\")) for item in result: self.assertEqual(item, \"fake sg_rule\") def test__create_security_groups(self): fake_security_group", "def test_cleanup_server(self): self._test_cleanup(server=\"fake server\") def test_cleanup_floating_ip(self): self._test_cleanup(floating_ip={\"id\": \"fake floating ip id\"}) def test_cleanup_keypair(self):", "result = self._base_tempest_backend.floating_ip() self.assertEqual(result, \"fake ip\") class TestBaseWindowsTempestBackend(unittest.TestCase): @mock.patch('argus.config.CONFIG.argus') @mock.patch('argus.backends.tempest.manager.APIManager') def setUp(self, mock_api_manager,", "userdata = \"fake userdata\" metadata = mock.sentinel availability_zone = mock.sentinel self._base = tempest_backend.BaseWindowsTempestBackend(", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in", "self._test_cleanup(floating_ip={\"id\": \"fake floating ip id\"}) def test_cleanup_keypair(self): self._test_cleanup(keypair=mock.Mock()) def test_cleanup_credentials(self): self._test_cleanup() def test_instance_setup_create_server(self):", "= mock.Mock() with test_utils.LogSnatcher('argus.backends.tempest.' 'tempest_backend') as snatcher: self._base_tempest_backend.cleanup() if security_groups_rules is not None:", "id\") result = self._base_tempest_backend.instance_output(limit=10) self.assertEqual(result, \"fake output\") self._base_tempest_backend.internal_instance_id.assert_called_once() self._base_tempest_backend._manager.test_instance_output(\"fake id\", 10) def test_instance_server(self):", "[\"Cleaning up...\"] if security_groups_rules is not None: (self._base_tempest_backend. _security_groups_rules) = security_groups_rules (self._base_tempest_backend._manager.security_group_rules_client. delete_security_group_rule)", "self._base_tempest_backend._manager.get_mtu = mock_get_mtu result = self._base_tempest_backend.get_mtu() self.assertEqual(result, \"fake mtu\") self._base_tempest_backend._manager.get_mtu.assert_called_once() def test__add_security_group_exceptions(self): mock_security_group_rules_client", "= self._base_tempest_backend.private_key() self.assertEqual(result, \"fake private key\") def test_get_image_by_ref(self): (self._base_tempest_backend._manager.compute_images_client. show_image) = mock.Mock(return_value={\"image\": \"fake", "self._base_tempest_backend = FakeBaseTempestBackend( name, userdata, metadata, availability_zone) @mock.patch('argus.config.CONFIG.argus') def test__configure_networking(self, mock_config): mock_network =", "ip\") result = self._base_tempest_backend._create_security_groups() self.assertEqual(result, fake_security_group[\"security_group\"]) (self._base_tempest_backend._manager.security_groups_client. create_security_group.assert_called_once()) self._base_tempest_backend.internal_instance_id.assert_called_once() (self._base_tempest_backend._manager.servers_client.add_security_group .assert_called_once()) self.assertEqual(self._base_tempest_backend._security_groups_rules, [1,", "test_floating_ip(self): self._base_tempest_backend._floating_ip = {\"ip\": \"fake ip\"} result = self._base_tempest_backend.floating_ip() self.assertEqual(result, \"fake ip\") class", "id\", wait_until) else: mock_waiters.assert_called_once_with( self._base_tempest_backend._manager.servers_client, \"fake server id\", 'ACTIVE') def test_create_server(self): kwargs =", "= (self._base_tempest_backend. _add_security_group_exceptions(\"fake secgroup_id\")) for item in result: self.assertEqual(item, \"fake sg_rule\") def test__create_security_groups(self):", "assert_called_once()) if server is not None: (self._base_tempest_backend._manager.servers_client.delete_server .assert_called_once_with(\"fake id\")) (mock_waiters.assert_called_once_with( self._base_tempest_backend._manager.servers_client, \"fake id\"))", "test_get_log_template(self, mock_get_log, mock_config): mock_get_log.return_value = \"fake call\" mock_config.build = \"fake build\" mock_config.arch =", "{ \"floating_ip\": { \"ip\": \"fake ip\" } } mock_floating_ips_client = mock.Mock() mock_floating_ips_client.create_floating_ip =", "mock_argus.dns_nameservers.return_value = \"fake dns nameservers\" mock_config.argus = mock_argus self._base_tempest_backend._configure_networking() (self._base_tempest_backend._manager.subnets_client. update_subnet.assert_called_once()) (self._base_tempest_backend._manager.subnets_client. update_subnet.assert_called_once())", "mock_waiters, security_groups_rules=None, security_group=None, server=None, floating_ip=None, keypair=None): expected_logging = [\"Cleaning up...\"] if security_groups_rules is", "return_value=\"fake id\") result = self._base_tempest_backend.instance_output(limit=10) self.assertEqual(result, \"fake output\") self._base_tempest_backend.internal_instance_id.assert_called_once() self._base_tempest_backend._manager.test_instance_output(\"fake id\", 10) def", "def test_cleanup_security_groups_rules(self): fake_rules = [\"rule 1\", \"rule 2\", \"rule 3\", \"rule 4\"] self._test_cleanup(security_groups_rules=fake_rules)", "import util try: import unittest.mock as mock except ImportError: import mock LOG =", "= mock.Mock( return_value=\"fake id\") self._base_tempest_backend._security_group = security_group if server is not None: mock_servers_client", "for key, value in list(stripped_kwargs.items()): if not value: del stripped_kwargs[key] (self._base_tempest_backend._manager.servers_client. create_server) =", "may # not use this file except in compliance with the License. You", "[] self._base_tempest_backend._add_security_group_exceptions = mock.Mock( return_value=fake_security_group[\"security_group\"][\"id\"]) self._base_tempest_backend._manager.servers_client = mock.Mock() self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake ip\")", "image\") def test_floating_ip(self): self._base_tempest_backend._floating_ip = {\"ip\": \"fake ip\"} result = self._base_tempest_backend.floating_ip() self.assertEqual(result, \"fake", "id\")) (mock_waiters.assert_called_once_with( self._base_tempest_backend._manager.servers_client, \"fake id\")) self.assertEqual( self._base_tempest_backend.internal_instance_id.call_count, 2) if floating_ip is not None:", "mock.Mock( return_value=\"fake password\") self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake id\") self._base_tempest_backend._keypair = \"fake keypair\" result", "metadata, availability_zone) @mock.patch('argus.config.CONFIG.argus') def test__configure_networking(self, mock_config): mock_network = mock.Mock() mock_network.subnet = {\"id\": \"fake", "{\"id\": 5} ], \"name\": \"fake name\" } } mock_security_groups_client = mock.Mock() (mock_security_groups_client.create_security_group .return_value)", "return_value=\"fake ip\") result = self._base_tempest_backend._create_security_groups() self.assertEqual(result, fake_security_group[\"security_group\"]) (self._base_tempest_backend._manager.security_groups_client. create_security_group.assert_called_once()) self._base_tempest_backend.internal_instance_id.assert_called_once() (self._base_tempest_backend._manager.servers_client.add_security_group .assert_called_once()) self.assertEqual(self._base_tempest_backend._security_groups_rules,", "mock_argus = mock.Mock() mock_argus.dns_nameservers.return_value = \"fake dns nameservers\" mock_config.argus = mock_argus self._base_tempest_backend._configure_networking() (self._base_tempest_backend._manager.subnets_client.", "(self._base_tempest_backend._manager .security_groups_client) = mock_security_groups_client self._base_tempest_backend._security_groups_rules = [] self._base_tempest_backend._add_security_group_exceptions = mock.Mock( return_value=fake_security_group[\"security_group\"][\"id\"]) self._base_tempest_backend._manager.servers_client =", "either express or implied. See the # License for the specific language governing", "self._base_tempest_backend.internal_instance_id.assert_called_once() (self._base_tempest_backend._manager.servers_client.add_security_group .assert_called_once()) self.assertEqual(self._base_tempest_backend._security_groups_rules, [1, 2, 3, 4, 5]) @mock.patch('tempest.common.waiters.wait_for_server_termination') def _test_cleanup(self, mock_waiters,", "copy import unittest from argus.backends.tempest import tempest_backend from argus.unit_tests import test_utils from argus", "id\") self._base_tempest_backend._server = server if floating_ip is not None: (self._base_tempest_backend._manager.floating_ips_client. delete_floating_ip) = mock.Mock()", "return_value=\"fake instance server\") self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake instance id\") result = self._base_tempest_backend.instance_server() self.assertEqual(result,", "mock.Mock() self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake ip\") result = self._base_tempest_backend._create_security_groups() self.assertEqual(result, fake_security_group[\"security_group\"]) (self._base_tempest_backend._manager.security_groups_client. create_security_group.assert_called_once())", "= mock_argus self._base_tempest_backend._configure_networking() (self._base_tempest_backend._manager.subnets_client. update_subnet.assert_called_once()) (self._base_tempest_backend._manager.subnets_client. update_subnet.assert_called_once()) @mock.patch('argus.util.rand_name', return_value=\"fake-server\") @mock.patch('tempest.common.waiters.wait_for_server_status') def _test_create_server(self, mock_waiters,", "= server if floating_ip is not None: (self._base_tempest_backend._manager.floating_ips_client. delete_floating_ip) = mock.Mock() self._base_tempest_backend._floating_ip =", "id\" (self._base_tempest_backend._manager. floating_ips_client) = mock_floating_ips_client (self._base_tempest_backend. internal_instance_id) = mock_internal_instance_id result = self._base_tempest_backend._assign_floating_ip() self.assertEqual(result,", "this file except in compliance with the License. You may obtain # a", "4\" } self._test_create_server(kwargs=kwargs) def test__assign_floating_ip(self): mock_create_floating_ip = mock.Mock() mock_create_floating_ip.return_value = { \"floating_ip\": {", "kwargs, wait_until=None): fake_server = { 'server': { 'id': \"fake server id\" } }", "test_cleanup_keypair(self): self._test_cleanup(keypair=mock.Mock()) def test_cleanup_credentials(self): self._test_cleanup() def test_instance_setup_create_server(self): expected_logging = [\"Creating server...\"] self._base_tempest_backend._configure_networking =", "if server is not None: mock_servers_client = mock.Mock() mock_servers_client.delete_server = mock.Mock() (self._base_tempest_backend._manager. servers_client)", "mock.Mock() self._base_tempest_backend._availability_zone = mock.Mock() self._base_tempest_backend.__get_id_tenant_network = mock.Mock() with test_utils.LogSnatcher('argus.backends.base') as snatcher: self._base_tempest_backend.setup_instance() self.assertEqual(expected_logging,", "id\") result = self._base_tempest_backend.reboot_instance() self.assertEqual(result, \"fake reboot\") (self._base_tempest_backend._manager.reboot_instance. assert_called_once_with(\"fake id\")) def test_instance_password(self): self._base_tempest_backend._manager.instance_password", "mock.Mock() if security_group is not None: (self._base_tempest_backend._manager.servers_client .remove_security_group) = mock.Mock() self._base_tempest_backend.internal_instance_id = mock.Mock(", "or implied. See the # License for the specific language governing permissions and", "self._base_tempest_backend._availability_zone = mock.Mock() self._base_tempest_backend.__get_id_tenant_network = mock.Mock() with test_utils.LogSnatcher('argus.backends.base') as snatcher: self._base_tempest_backend.setup_instance() self.assertEqual(expected_logging, snatcher.output)", "@mock.patch('tempest.common.waiters.wait_for_server_status') def _test_create_server(self, mock_waiters, mock_util, kwargs, wait_until=None): fake_server = { 'server': { 'id':", "\"fake flavor ref\" self._base_tempest_backend._name = \"fake name\" if wait_until is not None: result", "super(FakeBaseTempestBackend, self).__init__( name, userdata, metadata, availability_zone) def get_remote_client(self, **kwargs): return \"fake get_remote_client\" def", "not None: mock_waiters.assert_called_once_with( self._base_tempest_backend._manager.servers_client, \"fake server id\", wait_until) else: mock_waiters.assert_called_once_with( self._base_tempest_backend._manager.servers_client, \"fake server", "the License. # pylint: disable=no-value-for-parameter, protected-access, arguments-differ # pylint: disable= unused-argument, no-member, attribute-defined-outside-init", "if wait_until is not None: mock_waiters.assert_called_once_with( self._base_tempest_backend._manager.servers_client, \"fake server id\", wait_until) else: mock_waiters.assert_called_once_with(", "_add_security_group_exceptions(\"fake secgroup_id\")) for item in result: self.assertEqual(item, \"fake sg_rule\") def test__create_security_groups(self): fake_security_group =", "if security_group is not None: (self._base_tempest_backend._manager.servers_client .remove_security_group) = mock.Mock() self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake", "not None: (self.assertEqual( self._base_tempest_backend._manager. security_group_rules_client.delete_security_group_rule. call_count, len(security_groups_rules))) if security_group is not None: (self._base_tempest_backend._manager.servers_client.", "create_server) = mock.Mock(return_value=fake_server) self._base_tempest_backend.image_ref = \"fake image ref\" self._base_tempest_backend.flavor_ref = \"fake flavor ref\"", "def __init__(self, name, userdata, metadata, availability_zone): super(FakeBaseTempestBackend, self).__init__( name, userdata, metadata, availability_zone) def", "result = self._base_tempest_backend.public_key() self.assertEqual(result, \"fake public key\") def test_private_key(self): mock_keypair = mock.Mock() mock_keypair.private_key", "\"fake ip\") class TestBaseWindowsTempestBackend(unittest.TestCase): @mock.patch('argus.config.CONFIG.argus') @mock.patch('argus.backends.tempest.manager.APIManager') def setUp(self, mock_api_manager, mock_config): mock_config.openstack.image_ref = \"fake", "} } mock_floating_ips_client = mock.Mock() mock_floating_ips_client.create_floating_ip = mock_create_floating_ip (mock_floating_ips_client.associate_floating_ip_to_server .return_value) = None mock_internal_instance_id", "security_group=None, server=None, floating_ip=None, keypair=None): expected_logging = [\"Cleaning up...\"] if security_groups_rules is not None:", "delete_security_group_rule) = mock.Mock() if security_group is not None: (self._base_tempest_backend._manager.servers_client .remove_security_group) = mock.Mock() self._base_tempest_backend.internal_instance_id", "= \"fake public key\" self._base_tempest_backend._keypair = mock_keypair result = self._base_tempest_backend.public_key() self.assertEqual(result, \"fake public", "self._base_tempest_backend._keypair.destroy.assert_called_once() (self._base_tempest_backend._manager.cleanup_credentials. assert_called_once()) self.assertEqual(expected_logging, snatcher.output) def test_cleanup_security_groups_rules(self): fake_rules = [\"rule 1\", \"rule 2\",", "if not value: del stripped_kwargs[key] (self._base_tempest_backend._manager.servers_client. create_server) = mock.Mock(return_value=fake_server) self._base_tempest_backend.image_ref = \"fake image", "security_group_rules_client.delete_security_group_rule. call_count, len(security_groups_rules))) if security_group is not None: (self._base_tempest_backend._manager.servers_client. remove_security_group.assert_called_once_with( server_id=\"fake id\", name=security_group['name']))", "{ \"id\": [ {\"id\": 1}, {\"id\": 2}, {\"id\": 3}, {\"id\": 4}, {\"id\": 5}", "self._base_tempest_backend._keypair = mock_keypair result = self._base_tempest_backend.private_key() self.assertEqual(result, \"fake private key\") def test_get_image_by_ref(self): (self._base_tempest_backend._manager.compute_images_client.", "= mock.sentinel availability_zone = mock.sentinel self._base = tempest_backend.BaseWindowsTempestBackend( name, userdata, metadata, availability_zone) @mock.patch('argus.config.CONFIG.argus')", "test_utils from argus import util try: import unittest.mock as mock except ImportError: import", "mock.Mock() self._base_tempest_backend._create_security_groups = mock.Mock() self._base_tempest_backend._availability_zone = mock.Mock() self._base_tempest_backend.__get_id_tenant_network = mock.Mock() with test_utils.LogSnatcher('argus.backends.base') as", "mock_get_log, mock_config): mock_get_log.return_value = \"fake call\" mock_config.build = \"fake build\" mock_config.arch = \"fake", "on an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND,", "= \"fake image ref\" self._base_tempest_backend.flavor_ref = \"fake flavor ref\" self._base_tempest_backend._name = \"fake name\"", "= mock.Mock() (mock_security_groups_client.create_security_group .return_value) = fake_security_group (self._base_tempest_backend._manager .security_groups_client) = mock_security_groups_client self._base_tempest_backend._security_groups_rules = []", "mock.Mock() self._base_tempest_backend._floating_ip = floating_ip if keypair is not None: self._base_tempest_backend._keypair = keypair self._base_tempest_backend._manager.cleanup_credentials", "def _test_cleanup(self, mock_waiters, security_groups_rules=None, security_group=None, server=None, floating_ip=None, keypair=None): expected_logging = [\"Cleaning up...\"] if", "protected-access, arguments-differ # pylint: disable= unused-argument, no-member, attribute-defined-outside-init import copy import unittest from", "in list(stripped_kwargs.items()): if not value: del stripped_kwargs[key] (self._base_tempest_backend._manager.servers_client. create_server) = mock.Mock(return_value=fake_server) self._base_tempest_backend.image_ref =", "self.assertEqual(expected_logging, snatcher.output) self._base_tempest_backend._configure_networking.assert_called_once() self._base_tempest_backend._manager.create_keypair.assert_called_once() self._base_tempest_backend._create_server.assert_called_once() self._base_tempest_backend._assign_floating_ip.assert_called_once() self._base_tempest_backend._create_security_groups.assert_called_once() def test_reboot_instance(self): self._base_tempest_backend._manager.reboot_instance = mock.Mock( return_value=\"fake", "= self._base_tempest_backend.public_key() self.assertEqual(result, \"fake public key\") def test_private_key(self): mock_keypair = mock.Mock() mock_keypair.private_key =", "test_get_image_by_ref(self): (self._base_tempest_backend._manager.compute_images_client. show_image) = mock.Mock(return_value={\"image\": \"fake image\"}) self._base_tempest_backend._conf = mock.Mock() result = self._base_tempest_backend.get_image_by_ref()", "flavor ref\", **stripped_kwargs)) if wait_until is not None: mock_waiters.assert_called_once_with( self._base_tempest_backend._manager.servers_client, \"fake server id\",", "self._base_tempest_backend._manager.servers_client, \"fake server id\", wait_until) else: mock_waiters.assert_called_once_with( self._base_tempest_backend._manager.servers_client, \"fake server id\", 'ACTIVE') def", "permissions and limitations # under the License. # pylint: disable=no-value-for-parameter, protected-access, arguments-differ #", "self._base_tempest_backend._configure_networking() (self._base_tempest_backend._manager.subnets_client. update_subnet.assert_called_once()) (self._base_tempest_backend._manager.subnets_client. update_subnet.assert_called_once()) @mock.patch('argus.util.rand_name', return_value=\"fake-server\") @mock.patch('tempest.common.waiters.wait_for_server_status') def _test_create_server(self, mock_waiters, mock_util, kwargs,", "= mock_floating_ips_client (self._base_tempest_backend. internal_instance_id) = mock_internal_instance_id result = self._base_tempest_backend._assign_floating_ip() self.assertEqual(result, {\"ip\": \"fake ip\"})", "} } mock_security_groups_client = mock.Mock() (mock_security_groups_client.create_security_group .return_value) = fake_security_group (self._base_tempest_backend._manager .security_groups_client) = mock_security_groups_client", "\"fake keypair\" result = self._base_tempest_backend.instance_password() self.assertEqual(result, \"fake password\") self._base_tempest_backend.internal_instance_id.assert_called_once() def test_internal_instance_id(self): self._base_tempest_backend._server =", "flavor ref\" self._base_tempest_backend._name = \"fake name\" if wait_until is not None: result =", "ip\" } } mock_floating_ips_client = mock.Mock() mock_floating_ips_client.create_floating_ip = mock_create_floating_ip (mock_floating_ips_client.associate_floating_ip_to_server .return_value) = None", "arguments-differ # pylint: disable= unused-argument, no-member, attribute-defined-outside-init import copy import unittest from argus.backends.tempest", "(self._base_tempest_backend._manager. primary_credentials) = mock_primary_credentials mock_subnets_client = mock.Mock() mock_subnets_client.update_subnet.return_value = None (self._base_tempest_backend. _manager.subnets_client) =", "= mock.Mock() mock_subnets_client.update_subnet.return_value = None (self._base_tempest_backend. _manager.subnets_client) = mock_subnets_client mock_argus = mock.Mock() mock_argus.dns_nameservers.return_value", "self._base_tempest_backend._assign_floating_ip() self.assertEqual(result, {\"ip\": \"fake ip\"}) (self._base_tempest_backend._manager.floating_ips_client. associate_floating_ip_to_server.assert_called_once_with( \"fake ip\", \"fake id\")) def test_get_mtu(self):", "mock_util, kwargs, wait_until=None): fake_server = { 'server': { 'id': \"fake server id\" }", "mock_config): mock_network = mock.Mock() mock_network.subnet = {\"id\": \"fake id\"} mock_primary_credentials = mock.Mock() mock_primary_credentials.return_value", "mock_subnets_client.update_subnet.return_value = None (self._base_tempest_backend. _manager.subnets_client) = mock_subnets_client mock_argus = mock.Mock() mock_argus.dns_nameservers.return_value = \"fake", "not None: (self._base_tempest_backend._manager.servers_client .remove_security_group) = mock.Mock() self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake id\") self._base_tempest_backend._security_group =", "= mock.Mock() self._base_tempest_backend._manager.create_keypair = mock.Mock() self._base_tempest_backend._create_server = mock.Mock( return_value=\"fake server\") self._base_tempest_backend._assign_floating_ip = mock.Mock()", "self._base_tempest_backend.internal_instance_id.assert_called_once() def test_internal_instance_id(self): self._base_tempest_backend._server = {\"id\": \"fake server\"} result = self._base_tempest_backend.internal_instance_id() self.assertEqual(result, \"fake", "mock.sentinel self._base_tempest_backend = FakeBaseTempestBackend( name, userdata, metadata, availability_zone) @mock.patch('argus.config.CONFIG.argus') def test__configure_networking(self, mock_config): mock_network", "test_internal_instance_id(self): self._base_tempest_backend._server = {\"id\": \"fake server\"} result = self._base_tempest_backend.internal_instance_id() self.assertEqual(result, \"fake server\") def", "self._base_tempest_backend.instance_password() self.assertEqual(result, \"fake password\") self._base_tempest_backend.internal_instance_id.assert_called_once() def test_internal_instance_id(self): self._base_tempest_backend._server = {\"id\": \"fake server\"} result", "show_image) = mock.Mock(return_value={\"image\": \"fake image\"}) self._base_tempest_backend._conf = mock.Mock() result = self._base_tempest_backend.get_image_by_ref() self.assertEqual(result, \"fake", "= mock_keypair result = self._base_tempest_backend.public_key() self.assertEqual(result, \"fake public key\") def test_private_key(self): mock_keypair =", "OR CONDITIONS OF ANY KIND, either express or implied. See the # License", "_test_cleanup(self, mock_waiters, security_groups_rules=None, security_group=None, server=None, floating_ip=None, keypair=None): expected_logging = [\"Cleaning up...\"] if security_groups_rules", "id\", 10) def test_instance_server(self): self._base_tempest_backend._manager.instance_server = mock.Mock( return_value=\"fake instance server\") self._base_tempest_backend.internal_instance_id = mock.Mock(", "obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "None mock_internal_instance_id = mock.Mock() mock_internal_instance_id.return_value = \"fake id\" (self._base_tempest_backend._manager. floating_ips_client) = mock_floating_ips_client (self._base_tempest_backend.", "id\") self._base_tempest_backend._security_group = security_group if server is not None: mock_servers_client = mock.Mock() mock_servers_client.delete_server", "mock_get_mtu = mock.Mock() mock_get_mtu.return_value = \"fake mtu\" self._base_tempest_backend._manager.get_mtu = mock_get_mtu result = self._base_tempest_backend.get_mtu()", "# Licensed under the Apache License, Version 2.0 (the \"License\"); you may #", "= [] self._base_tempest_backend._add_security_group_exceptions = mock.Mock( return_value=fake_security_group[\"security_group\"][\"id\"]) self._base_tempest_backend._manager.servers_client = mock.Mock() self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake", "(self._base_tempest_backend._manager.security_groups_client. create_security_group.assert_called_once()) self._base_tempest_backend.internal_instance_id.assert_called_once() (self._base_tempest_backend._manager.servers_client.add_security_group .assert_called_once()) self.assertEqual(self._base_tempest_backend._security_groups_rules, [1, 2, 3, 4, 5]) @mock.patch('tempest.common.waiters.wait_for_server_termination') def", "self._base_tempest_backend._manager.servers_client = mock.Mock() self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake ip\") result = self._base_tempest_backend._create_security_groups() self.assertEqual(result, fake_security_group[\"security_group\"])", "mock_primary_credentials.return_value = mock_network (self._base_tempest_backend._manager. primary_credentials) = mock_primary_credentials mock_subnets_client = mock.Mock() mock_subnets_client.update_subnet.return_value = None", "(self._base_tempest_backend._manager.subnets_client. update_subnet.assert_called_once()) (self._base_tempest_backend._manager.subnets_client. update_subnet.assert_called_once()) @mock.patch('argus.util.rand_name', return_value=\"fake-server\") @mock.patch('tempest.common.waiters.wait_for_server_status') def _test_create_server(self, mock_waiters, mock_util, kwargs, wait_until=None):", "def setUp(self, mock_api_manager, mock_config): mock_config.openstack.image_ref = \"fake image ref\" mock_config.openstack.flavor_ref = \"fake flavor", "server if floating_ip is not None: (self._base_tempest_backend._manager.floating_ips_client. delete_floating_ip) = mock.Mock() self._base_tempest_backend._floating_ip = floating_ip", "floating ip id\"}) def test_cleanup_keypair(self): self._test_cleanup(keypair=mock.Mock()) def test_cleanup_credentials(self): self._test_cleanup() def test_instance_setup_create_server(self): expected_logging =", "self._base_tempest_backend._create_server = mock.Mock( return_value=\"fake server\") self._base_tempest_backend._assign_floating_ip = mock.Mock() self._base_tempest_backend._create_security_groups = mock.Mock() self._base_tempest_backend._availability_zone =", "= mock.Mock( return_value=\"fake id\") self._base_tempest_backend._keypair = \"fake keypair\" result = self._base_tempest_backend.instance_password() self.assertEqual(result, \"fake", "image\"}) self._base_tempest_backend._conf = mock.Mock() result = self._base_tempest_backend.get_image_by_ref() self.assertEqual(result, \"fake image\") def test_floating_ip(self): self._base_tempest_backend._floating_ip", "self._base_tempest_backend._manager.reboot_instance = mock.Mock( return_value=\"fake reboot\") self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake id\") result = self._base_tempest_backend.reboot_instance()", "= mock.Mock() mock_internal_instance_id.return_value = \"fake id\" (self._base_tempest_backend._manager. floating_ips_client) = mock_floating_ips_client (self._base_tempest_backend. internal_instance_id) =", "\"fake name\" if wait_until is not None: result = (self._base_tempest_backend ._create_server(wait_until, kwargs)) else:", "self._base_tempest_backend._configure_networking = mock.Mock() self._base_tempest_backend._manager.create_keypair = mock.Mock() self._base_tempest_backend._create_server = mock.Mock( return_value=\"fake server\") self._base_tempest_backend._assign_floating_ip =", "metadata = mock.sentinel availability_zone = mock.sentinel self._base_tempest_backend = FakeBaseTempestBackend( name, userdata, metadata, availability_zone)", "password\") self._base_tempest_backend.internal_instance_id.assert_called_once() def test_internal_instance_id(self): self._base_tempest_backend._server = {\"id\": \"fake server\"} result = self._base_tempest_backend.internal_instance_id() self.assertEqual(result,", "is not None: (self._base_tempest_backend._manager.servers_client. remove_security_group.assert_called_once_with( server_id=\"fake id\", name=security_group['name'])) (self._base_tempest_backend.internal_instance_id. assert_called_once()) if server is", "self._base_tempest_backend.private_key() self.assertEqual(result, \"fake private key\") def test_get_image_by_ref(self): (self._base_tempest_backend._manager.compute_images_client. show_image) = mock.Mock(return_value={\"image\": \"fake image\"})", "return_value=\"fake id\") self._base_tempest_backend._keypair = \"fake keypair\" result = self._base_tempest_backend.instance_password() self.assertEqual(result, \"fake password\") self._base_tempest_backend.internal_instance_id.assert_called_once()", "from argus import util try: import unittest.mock as mock except ImportError: import mock", "mock.Mock( return_value=\"fake instance server\") self._base_tempest_backend.internal_instance_id = mock.Mock( return_value=\"fake instance id\") result = self._base_tempest_backend.instance_server()", "result = self._base_tempest_backend._create_server(**kwargs) self.assertEqual(result, {\"id\": \"fake server id\"}) (self._base_tempest_backend._manager.servers_client.create_server. assert_called_once_with(name=\"fake-server-instance\", imageRef=\"fake image ref\",", "= None mock_internal_instance_id = mock.Mock() mock_internal_instance_id.return_value = \"fake id\" (self._base_tempest_backend._manager. floating_ips_client) = mock_floating_ips_client", "mock_config.argus = mock_argus self._base_tempest_backend._configure_networking() (self._base_tempest_backend._manager.subnets_client. update_subnet.assert_called_once()) (self._base_tempest_backend._manager.subnets_client. update_subnet.assert_called_once()) @mock.patch('argus.util.rand_name', return_value=\"fake-server\") @mock.patch('tempest.common.waiters.wait_for_server_status') def _test_create_server(self,", "not None: (self._base_tempest_backend._manager.floating_ips_client. delete_floating_ip) = mock.Mock() self._base_tempest_backend._floating_ip = floating_ip if keypair is not", "wait_until is not None: result = (self._base_tempest_backend ._create_server(wait_until, kwargs)) else: result = self._base_tempest_backend._create_server(**kwargs)" ]
[ "_execute(self): try: element = WebDriverWait(browser1, 10).until( EC.presence_of_element_located((By.XPATH, selector1)) ) element.click() return True except:", "10).until( EC.presence_of_element_located((By.XPATH, selector1)) ) element.click() return True except: print(\"timedout\", function, selector1, value1) return", "def _execute(self): try: element = WebDriverWait(browser1, 10).until( EC.presence_of_element_located((By.XPATH, selector1)) ) element.click() return True", "Click(ActionBase): def _execute(self): try: element = WebDriverWait(browser1, 10).until( EC.presence_of_element_located((By.XPATH, selector1)) ) element.click() return", "EC.presence_of_element_located((By.XPATH, selector1)) ) element.click() return True except: print(\"timedout\", function, selector1, value1) return False", "WebDriverWait(browser1, 10).until( EC.presence_of_element_located((By.XPATH, selector1)) ) element.click() return True except: print(\"timedout\", function, selector1, value1)", "class Click(ActionBase): def _execute(self): try: element = WebDriverWait(browser1, 10).until( EC.presence_of_element_located((By.XPATH, selector1)) ) element.click()", "element = WebDriverWait(browser1, 10).until( EC.presence_of_element_located((By.XPATH, selector1)) ) element.click() return True except: print(\"timedout\", function,", "= WebDriverWait(browser1, 10).until( EC.presence_of_element_located((By.XPATH, selector1)) ) element.click() return True except: print(\"timedout\", function, selector1,", "try: element = WebDriverWait(browser1, 10).until( EC.presence_of_element_located((By.XPATH, selector1)) ) element.click() return True except: print(\"timedout\"," ]
[ "open(input_path,'r') as f: print('Parsing annotation files') for line in f: filename, x, y,", "img = img[0:1280-50, 0:1280-50] (rows,cols) = img.shape[:2] all_imgs[filename] = {} all_imgs[filename]['filepath'] = path", "{} with open(input_path,'r') as f: print('Parsing annotation files') for line in f: filename,", "and found_bg == False: print('Found class name with special name bg. Will be", "= line.strip().split(',') if test_only and set != 'test': continue if class_name not in", "else int(x2) y2 = 1229 if int(y2) >= 1230 else int(y2) all_imgs[filename]['bboxes'].append({'class': class_name,", "annotation files') for line in f: filename, x1, y1, x2, y2, class_name, bucket,", "= {} classes_count = {} class_mapping = {} with open(input_path,'r') as f: print('Parsing", "int(y) >= 1230: continue all_imgs[filename]['coordinates'].append({'class': class_name, 'x': int(x), 'y': int(y)}) all_data = []", "= len(class_mapping) if filename not in all_imgs: all_imgs[filename] = {} path = 'images/'", ">= 1230: continue x2 = 1229 if int(x2) >= 1230 else int(x2) y2", "False all_imgs = {} classes_count = {} class_mapping = {} with open(input_path,'r') as", "def get_data_modified(input_path: str, test_only: bool = False): found_bg = False all_imgs = {}", "{} classes_count = {} class_mapping = {} with open(input_path,'r') as f: print('Parsing annotation", "path = 'images/' + filename img = cv2.imread(path) img = img[0:1280-50, 0:1280-50] (rows,cols)", "negative mining).') found_bg = True class_mapping[class_name] = len(class_mapping) if filename not in all_imgs:", "= val_to_switch return all_data, classes_count, class_mapping def get_data_modified(input_path: str, test_only: bool = False):", "= {} classes_count = {} class_mapping = {} with open(input_path, 'r') as f:", "= {} with open(input_path, 'r') as f: print('Parsing annotation files') for line in", "val_to_switch = class_mapping['bg'] class_mapping['bg'] = len(class_mapping) - 1 class_mapping[key_to_switch] = val_to_switch return all_data,", "all_imgs[filename] = {} path = 'images/' + filename img = cv2.imread(path) img =", "annotation files') for line in f: filename, x, y, class_name, set = line.strip().split(',')", "= path all_imgs[filename]['width'] = cols all_imgs[filename]['height'] = rows all_imgs[filename]['bboxes'] = [] all_imgs[filename]['imageset'] =", "Will be treated as a background region (this is usually for hard negative", "f: print('Parsing annotation files') for line in f: filename, x, y, class_name, set", "return all_data, classes_count, class_mapping def get_data_modified(input_path: str, test_only: bool = False): found_bg =", "bg. Will be treated as a background region (this is usually for hard", "line in f: filename, x, y, class_name, set = line.strip().split(',') if test_only and", "1229 if int(x2) >= 1230 else int(x2) y2 = 1229 if int(y2) >=", "in all_imgs: all_imgs[filename] = {} path = 'images/' + filename img = cv2.imread(path)", "else: classes_count[class_name] += 1 if class_name not in class_mapping: if class_name == 'bg'", "filename not in all_imgs: path = 'images/' + filename if not os.path.exists(path): continue", "int(x1)) / 2 >= 1230 or int(y1) + (int(y2) - int(y1)) / 2", "or int(y) >= 1230: continue all_imgs[filename]['coordinates'].append({'class': class_name, 'x': int(x), 'y': int(y)}) all_data =", "class_name not in class_mapping: if class_name == 'bg' and found_bg == False: print('Found", "all_imgs = {} classes_count = {} class_mapping = {} with open(input_path,'r') as f:", "all_imgs[filename]['width'] = cols all_imgs[filename]['height'] = rows all_imgs[filename]['coordinates'] = [] all_imgs[filename]['imageset'] = set if", "continue if class_name not in classes_count: classes_count[class_name] = 1 else: classes_count[class_name] += 1", "not in class_mapping: if class_name == 'bg' and found_bg == False: print('Found class", "class_mapping = {} with open(input_path,'r') as f: print('Parsing annotation files') for line in", "is last in the list if found_bg: if class_mapping['bg'] != len(class_mapping) - 1:", "with open(input_path, 'r') as f: print('Parsing annotation files') for line in f: filename,", "if int(y2) >= 1230 else int(y2) all_imgs[filename]['bboxes'].append({'class': class_name, 'bucket': bucket, 'height': height, 'x1':", "last in the list if found_bg: if class_mapping['bg'] != len(class_mapping) - 1: key_to_switch", "= img[0:1280-50, 0:1280-50] (rows,cols) = img.shape[:2] all_imgs[filename] = {} all_imgs[filename]['filepath'] = path all_imgs[filename]['width']", "class_mapping['bg'] = len(class_mapping) - 1 class_mapping[key_to_switch] = val_to_switch return all_data, classes_count, class_mapping def", "classes_count[class_name] = 1 else: classes_count[class_name] += 1 if class_name not in class_mapping: if", "1230 or int(y1) + (int(y2) - int(y1)) / 2 >= 1230: continue x2", "1230: continue x2 = 1229 if int(x2) >= 1230 else int(x2) y2 =", "is usually for hard negative mining).') found_bg = True class_mapping[class_name] = len(class_mapping) if", "all_imgs[filename]['width'] = cols all_imgs[filename]['height'] = rows all_imgs[filename]['bboxes'] = [] all_imgs[filename]['imageset'] = set if", "1230 or int(y) >= 1230: continue all_imgs[filename]['coordinates'].append({'class': class_name, 'x': int(x), 'y': int(y)}) all_data", "not in all_imgs: path = 'images/' + filename if not os.path.exists(path): continue img", "class_mapping[key_to_switch] = val_to_switch return all_data, classes_count, class_mapping def get_data_modified(input_path: str, test_only: bool =", "with open(input_path,'r') as f: print('Parsing annotation files') for line in f: filename, x,", "all_imgs[filename]['imageset'] = set if int(x1) + (int(x2) - int(x1)) / 2 >= 1230", "mining).') found_bg = True class_mapping[class_name] = len(class_mapping) if filename not in all_imgs: all_imgs[filename]", "len(class_mapping) - 1 class_mapping[key_to_switch] = val_to_switch return all_data, classes_count, class_mapping def get_data_modified(input_path: str,", "classes_count[class_name] += 1 if class_name not in class_mapping: if class_name == 'bg' and", "y2 = 1229 if int(y2) >= 1230 else int(y2) all_imgs[filename]['bboxes'].append({'class': class_name, 'bucket': bucket,", "int(x1) + (int(x2) - int(x1)) / 2 >= 1230 or int(y1) + (int(y2)", "open(input_path, 'r') as f: print('Parsing annotation files') for line in f: filename, x1,", "class_mapping[key] == len(class_mapping)-1][0] val_to_switch = class_mapping['bg'] class_mapping['bg'] = len(class_mapping) - 1 class_mapping[key_to_switch] =", "os def get_data(input_path: str, test_only: bool = False): found_bg = False all_imgs =", "- 1 class_mapping[key_to_switch] = val_to_switch return all_data, classes_count, class_mapping def get_data_modified(input_path: str, test_only:", "= img[0:1280-50, 0:1280-50] (rows,cols) = img.shape[:2] all_imgs[filename]['filepath'] = path all_imgs[filename]['width'] = cols all_imgs[filename]['height']", "if class_mapping['bg'] != len(class_mapping) - 1: key_to_switch = [key for key in class_mapping.keys()", "1 else: classes_count[class_name] += 1 if class_name not in class_mapping: if class_name ==", "- 1: key_to_switch = [key for key in class_mapping.keys() if class_mapping[key] == len(class_mapping)-1][0]", "1 class_mapping[key_to_switch] = val_to_switch return all_data, classes_count, class_mapping def get_data_modified(input_path: str, test_only: bool", "filename img = cv2.imread(path) img = img[0:1280-50, 0:1280-50] (rows,cols) = img.shape[:2] all_imgs[filename]['filepath'] =", "all_imgs[filename]['height'] = rows all_imgs[filename]['coordinates'] = [] all_imgs[filename]['imageset'] = set if int(x) >= 1230", "= cols all_imgs[filename]['height'] = rows all_imgs[filename]['bboxes'] = [] all_imgs[filename]['imageset'] = set if int(x1)", "get_data_modified(input_path: str, test_only: bool = False): found_bg = False all_imgs = {} classes_count", "in classes_count: classes_count[class_name] = 1 else: classes_count[class_name] += 1 if class_name not in", "in all_imgs: path = 'images/' + filename if not os.path.exists(path): continue img =", "line in f: filename, x1, y1, x2, y2, class_name, bucket, height, set =", "all_data.append(all_imgs[key]) # make sure the bg class is last in the list if", "== len(class_mapping)-1][0] val_to_switch = class_mapping['bg'] class_mapping['bg'] = len(class_mapping) - 1 class_mapping[key_to_switch] = val_to_switch", "int(x2), 'y2': int(y2)}) all_data = [] for key in all_imgs: all_data.append(all_imgs[key]) # make", "y2, class_name, bucket, height, set = line.strip().split(',') if test_only and set != 'test':", "background region (this is usually for hard negative mining).') found_bg = True class_mapping[class_name]", "(rows,cols) = img.shape[:2] all_imgs[filename]['filepath'] = path all_imgs[filename]['width'] = cols all_imgs[filename]['height'] = rows all_imgs[filename]['coordinates']", "class_mapping: if class_name == 'bg' and found_bg == False: print('Found class name with", "= [] all_imgs[filename]['imageset'] = set if int(x) >= 1230 or int(y) >= 1230:", "= True class_mapping[class_name] = len(class_mapping) if filename not in all_imgs: path = 'images/'", "x1, y1, x2, y2, class_name, bucket, height, set = line.strip().split(',') if test_only and", "= cv2.imread(path) img = img[0:1280-50, 0:1280-50] (rows,cols) = img.shape[:2] all_imgs[filename] = {} all_imgs[filename]['filepath']", "found_bg: if class_mapping['bg'] != len(class_mapping) - 1: key_to_switch = [key for key in", "x, y, class_name, set = line.strip().split(',') if test_only and set != 'test': continue", "filename, x1, y1, x2, y2, class_name, bucket, height, set = line.strip().split(',') if test_only", "in f: filename, x, y, class_name, set = line.strip().split(',') if test_only and set", "int(x2) y2 = 1229 if int(y2) >= 1230 else int(y2) all_imgs[filename]['bboxes'].append({'class': class_name, 'bucket':", "if class_name not in classes_count: classes_count[class_name] = 1 else: classes_count[class_name] += 1 if", "key_to_switch = [key for key in class_mapping.keys() if class_mapping[key] == len(class_mapping)-1][0] val_to_switch =", "= {} class_mapping = {} with open(input_path, 'r') as f: print('Parsing annotation files')", "bool = False): found_bg = False all_imgs = {} classes_count = {} class_mapping", "in class_mapping.keys() if class_mapping[key] == len(class_mapping)-1][0] val_to_switch = class_mapping['bg'] class_mapping['bg'] = len(class_mapping) -", "class_name not in classes_count: classes_count[class_name] = 1 else: classes_count[class_name] += 1 if class_name", ">= 1230: continue all_imgs[filename]['coordinates'].append({'class': class_name, 'x': int(x), 'y': int(y)}) all_data = [] for", "{} class_mapping = {} with open(input_path,'r') as f: print('Parsing annotation files') for line", "= cols all_imgs[filename]['height'] = rows all_imgs[filename]['coordinates'] = [] all_imgs[filename]['imageset'] = set if int(x)", "bucket, 'height': height, 'x1': int(x1), 'y1': int(y1), 'x2': int(x2), 'y2': int(y2)}) all_data =", "- int(x1)) / 2 >= 1230 or int(y1) + (int(y2) - int(y1)) /", "int(x2) >= 1230 else int(x2) y2 = 1229 if int(y2) >= 1230 else", "classes_count = {} class_mapping = {} with open(input_path,'r') as f: print('Parsing annotation files')", "if class_name == 'bg' and found_bg == False: print('Found class name with special", "= set if int(x1) + (int(x2) - int(x1)) / 2 >= 1230 or", "cv2.imread(path) img = img[0:1280-50, 0:1280-50] (rows,cols) = img.shape[:2] all_imgs[filename]['filepath'] = path all_imgs[filename]['width'] =", "str, test_only: bool = False): found_bg = False all_imgs = {} classes_count =", ">= 1230 or int(y) >= 1230: continue all_imgs[filename]['coordinates'].append({'class': class_name, 'x': int(x), 'y': int(y)})", "int(y1) + (int(y2) - int(y1)) / 2 >= 1230: continue x2 = 1229", "in f: filename, x1, y1, x2, y2, class_name, bucket, height, set = line.strip().split(',')", "class_name, bucket, height, set = line.strip().split(',') if test_only and set != 'test': continue", "continue all_imgs[filename]['coordinates'].append({'class': class_name, 'x': int(x), 'y': int(y)}) all_data = [] for key in", "2 >= 1230: continue x2 = 1229 if int(x2) >= 1230 else int(x2)", "all_imgs[filename]['bboxes'] = [] all_imgs[filename]['imageset'] = set if int(x1) + (int(x2) - int(x1)) /", "!= 'test': continue if class_name not in classes_count: classes_count[class_name] = 1 else: classes_count[class_name]", "all_imgs: path = 'images/' + filename if not os.path.exists(path): continue img = cv2.imread(path)", "False): found_bg = False all_imgs = {} classes_count = {} class_mapping = {}", "key in all_imgs: all_data.append(all_imgs[key]) # make sure the bg class is last in", "class_mapping def get_data_modified(input_path: str, test_only: bool = False): found_bg = False all_imgs =", "int(y1), 'x2': int(x2), 'y2': int(y2)}) all_data = [] for key in all_imgs: all_data.append(all_imgs[key])", "+ (int(x2) - int(x1)) / 2 >= 1230 or int(y1) + (int(y2) -", "/ 2 >= 1230 or int(y1) + (int(y2) - int(y1)) / 2 >=", "key in class_mapping.keys() if class_mapping[key] == len(class_mapping)-1][0] val_to_switch = class_mapping['bg'] class_mapping['bg'] = len(class_mapping)", "print('Parsing annotation files') for line in f: filename, x1, y1, x2, y2, class_name,", "True class_mapping[class_name] = len(class_mapping) if filename not in all_imgs: path = 'images/' +", "len(class_mapping) if filename not in all_imgs: all_imgs[filename] = {} path = 'images/' +", "files') for line in f: filename, x1, y1, x2, y2, class_name, bucket, height,", "[] for key in all_imgs: all_data.append(all_imgs[key]) # make sure the bg class is", "+ (int(y2) - int(y1)) / 2 >= 1230: continue x2 = 1229 if", "continue img = cv2.imread(path) img = img[0:1280-50, 0:1280-50] (rows,cols) = img.shape[:2] all_imgs[filename] =", "= 1229 if int(x2) >= 1230 else int(x2) y2 = 1229 if int(y2)", "= rows all_imgs[filename]['coordinates'] = [] all_imgs[filename]['imageset'] = set if int(x) >= 1230 or", "name bg. Will be treated as a background region (this is usually for", "import os def get_data(input_path: str, test_only: bool = False): found_bg = False all_imgs", "1230 else int(x2) y2 = 1229 if int(y2) >= 1230 else int(y2) all_imgs[filename]['bboxes'].append({'class':", "int(x1), 'y1': int(y1), 'x2': int(x2), 'y2': int(y2)}) all_data = [] for key in", "img = cv2.imread(path) img = img[0:1280-50, 0:1280-50] (rows,cols) = img.shape[:2] all_imgs[filename] = {}", "(int(y2) - int(y1)) / 2 >= 1230: continue x2 = 1229 if int(x2)", "y1, x2, y2, class_name, bucket, height, set = line.strip().split(',') if test_only and set", "def get_data(input_path: str, test_only: bool = False): found_bg = False all_imgs = {}", "!= len(class_mapping) - 1: key_to_switch = [key for key in class_mapping.keys() if class_mapping[key]", "== False: print('Found class name with special name bg. Will be treated as", "all_data, classes_count, class_mapping def get_data_modified(input_path: str, test_only: bool = False): found_bg = False", "= 'images/' + filename img = cv2.imread(path) img = img[0:1280-50, 0:1280-50] (rows,cols) =", "'x': int(x), 'y': int(y)}) all_data = [] for key in all_imgs: all_data.append(all_imgs[key]) #", "1230: continue all_imgs[filename]['coordinates'].append({'class': class_name, 'x': int(x), 'y': int(y)}) all_data = [] for key", "not in classes_count: classes_count[class_name] = 1 else: classes_count[class_name] += 1 if class_name not", "int(y)}) all_data = [] for key in all_imgs: all_data.append(all_imgs[key]) # make sure the", "len(class_mapping)-1][0] val_to_switch = class_mapping['bg'] class_mapping['bg'] = len(class_mapping) - 1 class_mapping[key_to_switch] = val_to_switch return", "'y2': int(y2)}) all_data = [] for key in all_imgs: all_data.append(all_imgs[key]) # make sure", "found_bg = True class_mapping[class_name] = len(class_mapping) if filename not in all_imgs: all_imgs[filename] =", "+= 1 if class_name not in class_mapping: if class_name == 'bg' and found_bg", ">= 1230 or int(y1) + (int(y2) - int(y1)) / 2 >= 1230: continue", "int(y1)) / 2 >= 1230: continue x2 = 1229 if int(x2) >= 1230", "= set if int(x) >= 1230 or int(y) >= 1230: continue all_imgs[filename]['coordinates'].append({'class': class_name,", "usually for hard negative mining).') found_bg = True class_mapping[class_name] = len(class_mapping) if filename", "= [] for key in all_imgs: all_data.append(all_imgs[key]) # make sure the bg class", "1 if class_name not in class_mapping: if class_name == 'bg' and found_bg ==", "the bg class is last in the list if found_bg: if class_mapping['bg'] !=", "'height': height, 'x1': int(x1), 'y1': int(y1), 'x2': int(x2), 'y2': int(y2)}) all_data = []", "if int(x) >= 1230 or int(y) >= 1230: continue all_imgs[filename]['coordinates'].append({'class': class_name, 'x': int(x),", "= [] all_imgs[filename]['imageset'] = set if int(x1) + (int(x2) - int(x1)) / 2", "class_mapping.keys() if class_mapping[key] == len(class_mapping)-1][0] val_to_switch = class_mapping['bg'] class_mapping['bg'] = len(class_mapping) - 1", "len(class_mapping) if filename not in all_imgs: path = 'images/' + filename if not", "height, 'x1': int(x1), 'y1': int(y1), 'x2': int(x2), 'y2': int(y2)}) all_data = [] for", "for key in all_imgs: all_data.append(all_imgs[key]) # make sure the bg class is last", "= path all_imgs[filename]['width'] = cols all_imgs[filename]['height'] = rows all_imgs[filename]['coordinates'] = [] all_imgs[filename]['imageset'] =", "if filename not in all_imgs: path = 'images/' + filename if not os.path.exists(path):", "if found_bg: if class_mapping['bg'] != len(class_mapping) - 1: key_to_switch = [key for key", "classes_count: classes_count[class_name] = 1 else: classes_count[class_name] += 1 if class_name not in class_mapping:", "found_bg = True class_mapping[class_name] = len(class_mapping) if filename not in all_imgs: path =", "continue x2 = 1229 if int(x2) >= 1230 else int(x2) y2 = 1229", "path = 'images/' + filename if not os.path.exists(path): continue img = cv2.imread(path) img", "cols all_imgs[filename]['height'] = rows all_imgs[filename]['coordinates'] = [] all_imgs[filename]['imageset'] = set if int(x) >=", "os.path.exists(path): continue img = cv2.imread(path) img = img[0:1280-50, 0:1280-50] (rows,cols) = img.shape[:2] all_imgs[filename]", "all_imgs[filename]['height'] = rows all_imgs[filename]['bboxes'] = [] all_imgs[filename]['imageset'] = set if int(x1) + (int(x2)", "{} with open(input_path, 'r') as f: print('Parsing annotation files') for line in f:", "all_imgs[filename] = {} all_imgs[filename]['filepath'] = path all_imgs[filename]['width'] = cols all_imgs[filename]['height'] = rows all_imgs[filename]['bboxes']", "files') for line in f: filename, x, y, class_name, set = line.strip().split(',') if", "or int(y1) + (int(y2) - int(y1)) / 2 >= 1230: continue x2 =", "class_name, set = line.strip().split(',') if test_only and set != 'test': continue if class_name", "as f: print('Parsing annotation files') for line in f: filename, x1, y1, x2,", "all_imgs: all_data.append(all_imgs[key]) # make sure the bg class is last in the list", "'y1': int(y1), 'x2': int(x2), 'y2': int(y2)}) all_data = [] for key in all_imgs:", "all_imgs[filename]['filepath'] = path all_imgs[filename]['width'] = cols all_imgs[filename]['height'] = rows all_imgs[filename]['bboxes'] = [] all_imgs[filename]['imageset']", "make sure the bg class is last in the list if found_bg: if", "False: print('Found class name with special name bg. Will be treated as a", "0:1280-50] (rows,cols) = img.shape[:2] all_imgs[filename] = {} all_imgs[filename]['filepath'] = path all_imgs[filename]['width'] = cols", "= 1 else: classes_count[class_name] += 1 if class_name not in class_mapping: if class_name", "class_mapping[class_name] = len(class_mapping) if filename not in all_imgs: all_imgs[filename] = {} path =", "= 1229 if int(y2) >= 1230 else int(y2) all_imgs[filename]['bboxes'].append({'class': class_name, 'bucket': bucket, 'height':", "for line in f: filename, x, y, class_name, set = line.strip().split(',') if test_only", "= cv2.imread(path) img = img[0:1280-50, 0:1280-50] (rows,cols) = img.shape[:2] all_imgs[filename]['filepath'] = path all_imgs[filename]['width']", "class_mapping[class_name] = len(class_mapping) if filename not in all_imgs: path = 'images/' + filename", "'x2': int(x2), 'y2': int(y2)}) all_data = [] for key in all_imgs: all_data.append(all_imgs[key]) #", "cv2 import os def get_data(input_path: str, test_only: bool = False): found_bg = False", "img = cv2.imread(path) img = img[0:1280-50, 0:1280-50] (rows,cols) = img.shape[:2] all_imgs[filename]['filepath'] = path", "set if int(x1) + (int(x2) - int(x1)) / 2 >= 1230 or int(y1)", "region (this is usually for hard negative mining).') found_bg = True class_mapping[class_name] =", "0:1280-50] (rows,cols) = img.shape[:2] all_imgs[filename]['filepath'] = path all_imgs[filename]['width'] = cols all_imgs[filename]['height'] = rows", "1: key_to_switch = [key for key in class_mapping.keys() if class_mapping[key] == len(class_mapping)-1][0] val_to_switch", "if filename not in all_imgs: all_imgs[filename] = {} path = 'images/' + filename", "{} classes_count = {} class_mapping = {} with open(input_path, 'r') as f: print('Parsing", "'bucket': bucket, 'height': height, 'x1': int(x1), 'y1': int(y1), 'x2': int(x2), 'y2': int(y2)}) all_data", "filename, x, y, class_name, set = line.strip().split(',') if test_only and set != 'test':", "int(x) >= 1230 or int(y) >= 1230: continue all_imgs[filename]['coordinates'].append({'class': class_name, 'x': int(x), 'y':", "found_bg == False: print('Found class name with special name bg. Will be treated", "int(x), 'y': int(y)}) all_data = [] for key in all_imgs: all_data.append(all_imgs[key]) # make", "treated as a background region (this is usually for hard negative mining).') found_bg", "[key for key in class_mapping.keys() if class_mapping[key] == len(class_mapping)-1][0] val_to_switch = class_mapping['bg'] class_mapping['bg']", "set != 'test': continue if class_name not in classes_count: classes_count[class_name] = 1 else:", "get_data(input_path: str, test_only: bool = False): found_bg = False all_imgs = {} classes_count", "len(class_mapping) - 1: key_to_switch = [key for key in class_mapping.keys() if class_mapping[key] ==", "with special name bg. Will be treated as a background region (this is", "bucket, height, set = line.strip().split(',') if test_only and set != 'test': continue if", "img[0:1280-50, 0:1280-50] (rows,cols) = img.shape[:2] all_imgs[filename]['filepath'] = path all_imgs[filename]['width'] = cols all_imgs[filename]['height'] =", "= len(class_mapping) if filename not in all_imgs: path = 'images/' + filename if", "cols all_imgs[filename]['height'] = rows all_imgs[filename]['bboxes'] = [] all_imgs[filename]['imageset'] = set if int(x1) +", "== 'bg' and found_bg == False: print('Found class name with special name bg.", "class_mapping = {} with open(input_path, 'r') as f: print('Parsing annotation files') for line", "hard negative mining).') found_bg = True class_mapping[class_name] = len(class_mapping) if filename not in", "in class_mapping: if class_name == 'bg' and found_bg == False: print('Found class name", "all_imgs = {} classes_count = {} class_mapping = {} with open(input_path, 'r') as", "'y': int(y)}) all_data = [] for key in all_imgs: all_data.append(all_imgs[key]) # make sure", "= class_mapping['bg'] class_mapping['bg'] = len(class_mapping) - 1 class_mapping[key_to_switch] = val_to_switch return all_data, classes_count,", "y, class_name, set = line.strip().split(',') if test_only and set != 'test': continue if", "if int(x2) >= 1230 else int(x2) y2 = 1229 if int(y2) >= 1230", "f: filename, x, y, class_name, set = line.strip().split(',') if test_only and set !=", "rows all_imgs[filename]['coordinates'] = [] all_imgs[filename]['imageset'] = set if int(x) >= 1230 or int(y)", "= rows all_imgs[filename]['bboxes'] = [] all_imgs[filename]['imageset'] = set if int(x1) + (int(x2) -", "if class_name not in class_mapping: if class_name == 'bg' and found_bg == False:", "if class_mapping[key] == len(class_mapping)-1][0] val_to_switch = class_mapping['bg'] class_mapping['bg'] = len(class_mapping) - 1 class_mapping[key_to_switch]", "print('Found class name with special name bg. Will be treated as a background", "(int(x2) - int(x1)) / 2 >= 1230 or int(y1) + (int(y2) - int(y1))", "rows all_imgs[filename]['bboxes'] = [] all_imgs[filename]['imageset'] = set if int(x1) + (int(x2) - int(x1))", "'r') as f: print('Parsing annotation files') for line in f: filename, x1, y1,", "import cv2 import os def get_data(input_path: str, test_only: bool = False): found_bg =", "all_imgs[filename]['coordinates'].append({'class': class_name, 'x': int(x), 'y': int(y)}) all_data = [] for key in all_imgs:", "all_data = [] for key in all_imgs: all_data.append(all_imgs[key]) # make sure the bg", "filename if not os.path.exists(path): continue img = cv2.imread(path) img = img[0:1280-50, 0:1280-50] (rows,cols)", "classes_count = {} class_mapping = {} with open(input_path, 'r') as f: print('Parsing annotation", "filename not in all_imgs: all_imgs[filename] = {} path = 'images/' + filename img", "img.shape[:2] all_imgs[filename] = {} all_imgs[filename]['filepath'] = path all_imgs[filename]['width'] = cols all_imgs[filename]['height'] = rows", "all_imgs: all_imgs[filename] = {} path = 'images/' + filename img = cv2.imread(path) img", "'images/' + filename img = cv2.imread(path) img = img[0:1280-50, 0:1280-50] (rows,cols) = img.shape[:2]", "(rows,cols) = img.shape[:2] all_imgs[filename] = {} all_imgs[filename]['filepath'] = path all_imgs[filename]['width'] = cols all_imgs[filename]['height']", "x2, y2, class_name, bucket, height, set = line.strip().split(',') if test_only and set !=", "bg class is last in the list if found_bg: if class_mapping['bg'] != len(class_mapping)", "class_mapping['bg'] class_mapping['bg'] = len(class_mapping) - 1 class_mapping[key_to_switch] = val_to_switch return all_data, classes_count, class_mapping", "= False): found_bg = False all_imgs = {} classes_count = {} class_mapping =", "class_name, 'x': int(x), 'y': int(y)}) all_data = [] for key in all_imgs: all_data.append(all_imgs[key])", "set if int(x) >= 1230 or int(y) >= 1230: continue all_imgs[filename]['coordinates'].append({'class': class_name, 'x':", "all_imgs[filename]['filepath'] = path all_imgs[filename]['width'] = cols all_imgs[filename]['height'] = rows all_imgs[filename]['coordinates'] = [] all_imgs[filename]['imageset']", "test_only: bool = False): found_bg = False all_imgs = {} classes_count = {}", "for hard negative mining).') found_bg = True class_mapping[class_name] = len(class_mapping) if filename not", "= [key for key in class_mapping.keys() if class_mapping[key] == len(class_mapping)-1][0] val_to_switch = class_mapping['bg']", "f: filename, x1, y1, x2, y2, class_name, bucket, height, set = line.strip().split(',') if", "mining).') found_bg = True class_mapping[class_name] = len(class_mapping) if filename not in all_imgs: path", "# make sure the bg class is last in the list if found_bg:", "[] all_imgs[filename]['imageset'] = set if int(x1) + (int(x2) - int(x1)) / 2 >=", "name with special name bg. Will be treated as a background region (this", "= 'images/' + filename if not os.path.exists(path): continue img = cv2.imread(path) img =", "'images/' + filename if not os.path.exists(path): continue img = cv2.imread(path) img = img[0:1280-50,", "path all_imgs[filename]['width'] = cols all_imgs[filename]['height'] = rows all_imgs[filename]['bboxes'] = [] all_imgs[filename]['imageset'] = set", "height, set = line.strip().split(',') if test_only and set != 'test': continue if class_name", "1229 if int(y2) >= 1230 else int(y2) all_imgs[filename]['bboxes'].append({'class': class_name, 'bucket': bucket, 'height': height,", "val_to_switch return all_data, classes_count, class_mapping def get_data_modified(input_path: str, test_only: bool = False): found_bg", "<filename>keras_frcnn/simple_parser.py import cv2 import os def get_data(input_path: str, test_only: bool = False): found_bg", "class_name == 'bg' and found_bg == False: print('Found class name with special name", "print('Parsing annotation files') for line in f: filename, x, y, class_name, set =", "the list if found_bg: if class_mapping['bg'] != len(class_mapping) - 1: key_to_switch = [key", "as a background region (this is usually for hard negative mining).') found_bg =", "1230 else int(y2) all_imgs[filename]['bboxes'].append({'class': class_name, 'bucket': bucket, 'height': height, 'x1': int(x1), 'y1': int(y1),", "= True class_mapping[class_name] = len(class_mapping) if filename not in all_imgs: all_imgs[filename] = {}", "else int(y2) all_imgs[filename]['bboxes'].append({'class': class_name, 'bucket': bucket, 'height': height, 'x1': int(x1), 'y1': int(y1), 'x2':", "+ filename img = cv2.imread(path) img = img[0:1280-50, 0:1280-50] (rows,cols) = img.shape[:2] all_imgs[filename]['filepath']", "False all_imgs = {} classes_count = {} class_mapping = {} with open(input_path, 'r')", "in the list if found_bg: if class_mapping['bg'] != len(class_mapping) - 1: key_to_switch =", "- int(y1)) / 2 >= 1230: continue x2 = 1229 if int(x2) >=", "all_imgs[filename]['bboxes'].append({'class': class_name, 'bucket': bucket, 'height': height, 'x1': int(x1), 'y1': int(y1), 'x2': int(x2), 'y2':", "int(y2) >= 1230 else int(y2) all_imgs[filename]['bboxes'].append({'class': class_name, 'bucket': bucket, 'height': height, 'x1': int(x1),", "set = line.strip().split(',') if test_only and set != 'test': continue if class_name not", "class is last in the list if found_bg: if class_mapping['bg'] != len(class_mapping) -", "for key in class_mapping.keys() if class_mapping[key] == len(class_mapping)-1][0] val_to_switch = class_mapping['bg'] class_mapping['bg'] =", "{} path = 'images/' + filename img = cv2.imread(path) img = img[0:1280-50, 0:1280-50]", "line.strip().split(',') if test_only and set != 'test': continue if class_name not in classes_count:", "as f: print('Parsing annotation files') for line in f: filename, x, y, class_name,", "= len(class_mapping) - 1 class_mapping[key_to_switch] = val_to_switch return all_data, classes_count, class_mapping def get_data_modified(input_path:", "img = img[0:1280-50, 0:1280-50] (rows,cols) = img.shape[:2] all_imgs[filename]['filepath'] = path all_imgs[filename]['width'] = cols", "cv2.imread(path) img = img[0:1280-50, 0:1280-50] (rows,cols) = img.shape[:2] all_imgs[filename] = {} all_imgs[filename]['filepath'] =", "if test_only and set != 'test': continue if class_name not in classes_count: classes_count[class_name]", "'x1': int(x1), 'y1': int(y1), 'x2': int(x2), 'y2': int(y2)}) all_data = [] for key", "2 >= 1230 or int(y1) + (int(y2) - int(y1)) / 2 >= 1230:", "+ filename if not os.path.exists(path): continue img = cv2.imread(path) img = img[0:1280-50, 0:1280-50]", "f: print('Parsing annotation files') for line in f: filename, x1, y1, x2, y2,", "list if found_bg: if class_mapping['bg'] != len(class_mapping) - 1: key_to_switch = [key for", "found_bg = False all_imgs = {} classes_count = {} class_mapping = {} with", "a background region (this is usually for hard negative mining).') found_bg = True", "class name with special name bg. Will be treated as a background region", ">= 1230 else int(y2) all_imgs[filename]['bboxes'].append({'class': class_name, 'bucket': bucket, 'height': height, 'x1': int(x1), 'y1':", "= {} path = 'images/' + filename img = cv2.imread(path) img = img[0:1280-50,", "'test': continue if class_name not in classes_count: classes_count[class_name] = 1 else: classes_count[class_name] +=", "classes_count, class_mapping def get_data_modified(input_path: str, test_only: bool = False): found_bg = False all_imgs", "in all_imgs: all_data.append(all_imgs[key]) # make sure the bg class is last in the", "class_mapping['bg'] != len(class_mapping) - 1: key_to_switch = [key for key in class_mapping.keys() if", "(this is usually for hard negative mining).') found_bg = True class_mapping[class_name] = len(class_mapping)", "= img.shape[:2] all_imgs[filename]['filepath'] = path all_imgs[filename]['width'] = cols all_imgs[filename]['height'] = rows all_imgs[filename]['coordinates'] =", "not in all_imgs: all_imgs[filename] = {} path = 'images/' + filename img =", "{} class_mapping = {} with open(input_path, 'r') as f: print('Parsing annotation files') for", "'bg' and found_bg == False: print('Found class name with special name bg. Will", "class_name, 'bucket': bucket, 'height': height, 'x1': int(x1), 'y1': int(y1), 'x2': int(x2), 'y2': int(y2)})", "= img.shape[:2] all_imgs[filename] = {} all_imgs[filename]['filepath'] = path all_imgs[filename]['width'] = cols all_imgs[filename]['height'] =", "if not os.path.exists(path): continue img = cv2.imread(path) img = img[0:1280-50, 0:1280-50] (rows,cols) =", "special name bg. Will be treated as a background region (this is usually", "if int(x1) + (int(x2) - int(x1)) / 2 >= 1230 or int(y1) +", "path all_imgs[filename]['width'] = cols all_imgs[filename]['height'] = rows all_imgs[filename]['coordinates'] = [] all_imgs[filename]['imageset'] = set", "img.shape[:2] all_imgs[filename]['filepath'] = path all_imgs[filename]['width'] = cols all_imgs[filename]['height'] = rows all_imgs[filename]['coordinates'] = []", "= {} all_imgs[filename]['filepath'] = path all_imgs[filename]['width'] = cols all_imgs[filename]['height'] = rows all_imgs[filename]['bboxes'] =", "test_only and set != 'test': continue if class_name not in classes_count: classes_count[class_name] =", "/ 2 >= 1230: continue x2 = 1229 if int(x2) >= 1230 else", "int(y2)}) all_data = [] for key in all_imgs: all_data.append(all_imgs[key]) # make sure the", "img[0:1280-50, 0:1280-50] (rows,cols) = img.shape[:2] all_imgs[filename] = {} all_imgs[filename]['filepath'] = path all_imgs[filename]['width'] =", "= False all_imgs = {} classes_count = {} class_mapping = {} with open(input_path,", "be treated as a background region (this is usually for hard negative mining).')", ">= 1230 else int(x2) y2 = 1229 if int(y2) >= 1230 else int(y2)", "all_imgs[filename]['imageset'] = set if int(x) >= 1230 or int(y) >= 1230: continue all_imgs[filename]['coordinates'].append({'class':", "sure the bg class is last in the list if found_bg: if class_mapping['bg']", "[] all_imgs[filename]['imageset'] = set if int(x) >= 1230 or int(y) >= 1230: continue", "= False all_imgs = {} classes_count = {} class_mapping = {} with open(input_path,'r')", "all_imgs[filename]['coordinates'] = [] all_imgs[filename]['imageset'] = set if int(x) >= 1230 or int(y) >=", "and set != 'test': continue if class_name not in classes_count: classes_count[class_name] = 1", "int(y2) all_imgs[filename]['bboxes'].append({'class': class_name, 'bucket': bucket, 'height': height, 'x1': int(x1), 'y1': int(y1), 'x2': int(x2),", "not os.path.exists(path): continue img = cv2.imread(path) img = img[0:1280-50, 0:1280-50] (rows,cols) = img.shape[:2]", "{} all_imgs[filename]['filepath'] = path all_imgs[filename]['width'] = cols all_imgs[filename]['height'] = rows all_imgs[filename]['bboxes'] = []", "= {} with open(input_path,'r') as f: print('Parsing annotation files') for line in f:", "for line in f: filename, x1, y1, x2, y2, class_name, bucket, height, set", "= {} class_mapping = {} with open(input_path,'r') as f: print('Parsing annotation files') for", "x2 = 1229 if int(x2) >= 1230 else int(x2) y2 = 1229 if", "True class_mapping[class_name] = len(class_mapping) if filename not in all_imgs: all_imgs[filename] = {} path" ]
[]
[ "pygame.display.quit() elif event.type == VIDEORESIZE: screen = pygame.display.set_mode( event.size, HWSURFACE | DOUBLEBUF |", "True: for event in pygame.event.get(): if event.type == QUIT: pygame.display.quit() elif event.type ==", "main(): pygame.init() screen = pygame.display.set_mode( (200, 200), HWSURFACE | DOUBLEBUF | RESIZABLE) fake_screen", "(200, 200), HWSURFACE | DOUBLEBUF | RESIZABLE) fake_screen = screen.copy() pic = pygame.surface.Surface((50,", "if event.type == QUIT: pygame.display.quit() elif event.type == VIDEORESIZE: screen = pygame.display.set_mode( event.size,", "HWSURFACE | DOUBLEBUF | RESIZABLE) fake_screen = screen.copy() pic = pygame.surface.Surface((50, 50)) pic.fill((255,", "pygame.surface.Surface((50, 50)) pic.fill((255, 100, 200)) while True: for event in pygame.event.get(): if event.type", "pic = pygame.surface.Surface((50, 50)) pic.fill((255, 100, 200)) while True: for event in pygame.event.get():", "in pygame.event.get(): if event.type == QUIT: pygame.display.quit() elif event.type == VIDEORESIZE: screen =", "200), HWSURFACE | DOUBLEBUF | RESIZABLE) fake_screen = screen.copy() pic = pygame.surface.Surface((50, 50))", "200)) while True: for event in pygame.event.get(): if event.type == QUIT: pygame.display.quit() elif", "HWSURFACE | DOUBLEBUF | RESIZABLE) fake_screen.fill('black') fake_screen.blit(pic, (100, 100)) screen.blit(pygame.transform.scale( fake_screen, screen.get_rect().size), (0,", "50)) pic.fill((255, 100, 200)) while True: for event in pygame.event.get(): if event.type ==", "screen = pygame.display.set_mode( (200, 200), HWSURFACE | DOUBLEBUF | RESIZABLE) fake_screen = screen.copy()", "DOUBLEBUF | RESIZABLE) fake_screen.fill('black') fake_screen.blit(pic, (100, 100)) screen.blit(pygame.transform.scale( fake_screen, screen.get_rect().size), (0, 0)) pygame.display.flip()", "def main(): pygame.init() screen = pygame.display.set_mode( (200, 200), HWSURFACE | DOUBLEBUF | RESIZABLE)", "| RESIZABLE) fake_screen.fill('black') fake_screen.blit(pic, (100, 100)) screen.blit(pygame.transform.scale( fake_screen, screen.get_rect().size), (0, 0)) pygame.display.flip() main()", "pygame.event.get(): if event.type == QUIT: pygame.display.quit() elif event.type == VIDEORESIZE: screen = pygame.display.set_mode(", "event.size, HWSURFACE | DOUBLEBUF | RESIZABLE) fake_screen.fill('black') fake_screen.blit(pic, (100, 100)) screen.blit(pygame.transform.scale( fake_screen, screen.get_rect().size),", "while True: for event in pygame.event.get(): if event.type == QUIT: pygame.display.quit() elif event.type", "screen = pygame.display.set_mode( event.size, HWSURFACE | DOUBLEBUF | RESIZABLE) fake_screen.fill('black') fake_screen.blit(pic, (100, 100))", "fake_screen = screen.copy() pic = pygame.surface.Surface((50, 50)) pic.fill((255, 100, 200)) while True: for", "= screen.copy() pic = pygame.surface.Surface((50, 50)) pic.fill((255, 100, 200)) while True: for event", "pygame.display.set_mode( event.size, HWSURFACE | DOUBLEBUF | RESIZABLE) fake_screen.fill('black') fake_screen.blit(pic, (100, 100)) screen.blit(pygame.transform.scale( fake_screen,", "pygame.display.set_mode( (200, 200), HWSURFACE | DOUBLEBUF | RESIZABLE) fake_screen = screen.copy() pic =", "QUIT: pygame.display.quit() elif event.type == VIDEORESIZE: screen = pygame.display.set_mode( event.size, HWSURFACE | DOUBLEBUF", "pic.fill((255, 100, 200)) while True: for event in pygame.event.get(): if event.type == QUIT:", "pygame from pygame.locals import * def main(): pygame.init() screen = pygame.display.set_mode( (200, 200),", "== QUIT: pygame.display.quit() elif event.type == VIDEORESIZE: screen = pygame.display.set_mode( event.size, HWSURFACE |", "= pygame.display.set_mode( event.size, HWSURFACE | DOUBLEBUF | RESIZABLE) fake_screen.fill('black') fake_screen.blit(pic, (100, 100)) screen.blit(pygame.transform.scale(", "== VIDEORESIZE: screen = pygame.display.set_mode( event.size, HWSURFACE | DOUBLEBUF | RESIZABLE) fake_screen.fill('black') fake_screen.blit(pic,", "100, 200)) while True: for event in pygame.event.get(): if event.type == QUIT: pygame.display.quit()", "event.type == QUIT: pygame.display.quit() elif event.type == VIDEORESIZE: screen = pygame.display.set_mode( event.size, HWSURFACE", "| DOUBLEBUF | RESIZABLE) fake_screen = screen.copy() pic = pygame.surface.Surface((50, 50)) pic.fill((255, 100,", "elif event.type == VIDEORESIZE: screen = pygame.display.set_mode( event.size, HWSURFACE | DOUBLEBUF | RESIZABLE)", "| DOUBLEBUF | RESIZABLE) fake_screen.fill('black') fake_screen.blit(pic, (100, 100)) screen.blit(pygame.transform.scale( fake_screen, screen.get_rect().size), (0, 0))", "RESIZABLE) fake_screen = screen.copy() pic = pygame.surface.Surface((50, 50)) pic.fill((255, 100, 200)) while True:", "import * def main(): pygame.init() screen = pygame.display.set_mode( (200, 200), HWSURFACE | DOUBLEBUF", "| RESIZABLE) fake_screen = screen.copy() pic = pygame.surface.Surface((50, 50)) pic.fill((255, 100, 200)) while", "screen.copy() pic = pygame.surface.Surface((50, 50)) pic.fill((255, 100, 200)) while True: for event in", "event in pygame.event.get(): if event.type == QUIT: pygame.display.quit() elif event.type == VIDEORESIZE: screen", "for event in pygame.event.get(): if event.type == QUIT: pygame.display.quit() elif event.type == VIDEORESIZE:", "pygame.locals import * def main(): pygame.init() screen = pygame.display.set_mode( (200, 200), HWSURFACE |", "pygame.init() screen = pygame.display.set_mode( (200, 200), HWSURFACE | DOUBLEBUF | RESIZABLE) fake_screen =", "= pygame.display.set_mode( (200, 200), HWSURFACE | DOUBLEBUF | RESIZABLE) fake_screen = screen.copy() pic", "<reponame>flaviolsousa/ping-pong-ia import pygame from pygame.locals import * def main(): pygame.init() screen = pygame.display.set_mode(", "import pygame from pygame.locals import * def main(): pygame.init() screen = pygame.display.set_mode( (200,", "= pygame.surface.Surface((50, 50)) pic.fill((255, 100, 200)) while True: for event in pygame.event.get(): if", "event.type == VIDEORESIZE: screen = pygame.display.set_mode( event.size, HWSURFACE | DOUBLEBUF | RESIZABLE) fake_screen.fill('black')", "VIDEORESIZE: screen = pygame.display.set_mode( event.size, HWSURFACE | DOUBLEBUF | RESIZABLE) fake_screen.fill('black') fake_screen.blit(pic, (100,", "DOUBLEBUF | RESIZABLE) fake_screen = screen.copy() pic = pygame.surface.Surface((50, 50)) pic.fill((255, 100, 200))", "* def main(): pygame.init() screen = pygame.display.set_mode( (200, 200), HWSURFACE | DOUBLEBUF |", "from pygame.locals import * def main(): pygame.init() screen = pygame.display.set_mode( (200, 200), HWSURFACE" ]
[ "# Ex. 014 c = float(input(\"Digite uma temperatura em °C: \")) print(f\"{c}°C =", "Ex. 014 c = float(input(\"Digite uma temperatura em °C: \")) print(f\"{c}°C = {((9*c)/5)+32:.1f}°F\")" ]
[ "from .views import NodeShow, NodeDetail, SelectNode urlpatterns = [ path('node/', NodeShow.as_view(), name='node-show'), path('node/detail/<int:n_id>',", "import path from .views import NodeShow, NodeDetail, SelectNode urlpatterns = [ path('node/', NodeShow.as_view(),", "django.urls import path from .views import NodeShow, NodeDetail, SelectNode urlpatterns = [ path('node/',", "import NodeShow, NodeDetail, SelectNode urlpatterns = [ path('node/', NodeShow.as_view(), name='node-show'), path('node/detail/<int:n_id>', NodeDetail.as_view(), name='node-detail'),", "<reponame>dongdawang/ssrmgmt from django.urls import path from .views import NodeShow, NodeDetail, SelectNode urlpatterns =", "path from .views import NodeShow, NodeDetail, SelectNode urlpatterns = [ path('node/', NodeShow.as_view(), name='node-show'),", ".views import NodeShow, NodeDetail, SelectNode urlpatterns = [ path('node/', NodeShow.as_view(), name='node-show'), path('node/detail/<int:n_id>', NodeDetail.as_view(),", "SelectNode urlpatterns = [ path('node/', NodeShow.as_view(), name='node-show'), path('node/detail/<int:n_id>', NodeDetail.as_view(), name='node-detail'), path('node/select/', SelectNode.as_view(), name='node-select')", "from django.urls import path from .views import NodeShow, NodeDetail, SelectNode urlpatterns = [", "NodeShow, NodeDetail, SelectNode urlpatterns = [ path('node/', NodeShow.as_view(), name='node-show'), path('node/detail/<int:n_id>', NodeDetail.as_view(), name='node-detail'), path('node/select/',", "NodeDetail, SelectNode urlpatterns = [ path('node/', NodeShow.as_view(), name='node-show'), path('node/detail/<int:n_id>', NodeDetail.as_view(), name='node-detail'), path('node/select/', SelectNode.as_view(),", "urlpatterns = [ path('node/', NodeShow.as_view(), name='node-show'), path('node/detail/<int:n_id>', NodeDetail.as_view(), name='node-detail'), path('node/select/', SelectNode.as_view(), name='node-select') ]" ]
[ "states from Utils.logging import get_logger as log import pandas as pd import datetime", "i = 1 for pay_date, pay_amount in zip(item.pay_date, item.amount): if pay_date == '-':", "log import pandas as pd import datetime GETSUMMARY = range(1) class DividendSummary: def", "get_dividend_summary(update, context): ticker = update.message.text user = update.effective_user log().info(\"User %s entered ticker value", "def get_dividend_summary(update, context): ticker = update.message.text user = update.effective_user log().info(\"User %s entered ticker", "user.first_name, ticker) years = 5 share = Share(ticker) if not share.is_valid: update.message.reply_text(\"Invalid ticker.", "= update.message.text user = update.effective_user log().info(\"User %s entered ticker value %s.\", user.first_name, ticker)", "s += pd.to_datetime(pay_date).strftime('%d %B') + ': ' + str(pay_amount).replace('SGD', 'SGD ') +'\\n' i", "str(states.DIVIDENDINFO) + '$')], states={ GETSUMMARY: [ MessageHandler(Filters.text, self.get_dividend_summary) ], }, fallbacks=[] ) self.__dp.add_handler(ds_handler)", "/start to go back to the main menu\") log().info(\"User %s entered an invalid", "], }, fallbacks=[] ) self.__dp.add_handler(ds_handler) @staticmethod def get_ticker(update, context): user = update.effective_user log().info(\"User", "ConversationHandler from telegram.ext import MessageHandler from telegram.ext import Filters from telegram.ext import CallbackQueryHandler", "telegram.ext import ConversationHandler from telegram.ext import MessageHandler from telegram.ext import Filters from telegram.ext", "from telegram.ext import MessageHandler from telegram.ext import Filters from telegram.ext import CallbackQueryHandler from", "as states from Utils.logging import get_logger as log import pandas as pd import", "= update.effective_user log().info(\"User %s pressed the dividend summary button.\", user.first_name) query = update.callback_query", "(' + str(item.total) + ')</b>' + '\\n' i = 1 for pay_date, pay_amount", "pd.to_datetime(pay_date).strftime('%d %B') + ': ' + str(pay_amount).replace('SGD', 'SGD ') +'\\n' i += 1", "ticker) years = 5 share = Share(ticker) if not share.is_valid: update.message.reply_text(\"Invalid ticker. Please", "GETSUMMARY @staticmethod def get_dividend_summary(update, context): ticker = update.message.text user = update.effective_user log().info(\"User %s", "+ str(pay_amount).replace('SGD', 'SGD ') +'\\n' i += 1 s += '\\n' update.message.reply_text(s, parse_mode='HTML')", "class DividendSummary: def __init__(self, dispatcher): self.__dp = dispatcher self.__handler() def __handler(self): ds_handler =", "ticker symbol (e.g D05)\") return GETSUMMARY @staticmethod def get_dividend_summary(update, context): ticker = update.message.text", "import CallbackQueryHandler from Model.share import Share import Controllers.global_states as states from Utils.logging import", "import Share import Controllers.global_states as states from Utils.logging import get_logger as log import", "zip(item.pay_date, item.amount): if pay_date == '-': continue s += pd.to_datetime(pay_date).strftime('%d %B') + ':", "%B') + ': ' + str(pay_amount).replace('SGD', 'SGD ') +'\\n' i += 1 s", "for item in a: s += '<b>' + str(item.year) + ' (' +", "def __handler(self): ds_handler = ConversationHandler( entry_points=[CallbackQueryHandler( self.get_ticker, pattern='^' + str(states.DIVIDENDINFO) + '$')], states={", ") self.__dp.add_handler(ds_handler) @staticmethod def get_ticker(update, context): user = update.effective_user log().info(\"User %s pressed the", "log().info(\"User %s entered ticker value %s.\", user.first_name, ticker) years = 5 share =", "if pay_date == '-': continue s += pd.to_datetime(pay_date).strftime('%d %B') + ': ' +", "= range(1) class DividendSummary: def __init__(self, dispatcher): self.__dp = dispatcher self.__handler() def __handler(self):", "log().info(\"User %s pressed the dividend summary button.\", user.first_name) query = update.callback_query query.answer() query.edit_message_text(", "an invalid ticker value %s.\", user.first_name, ticker) return ConversationHandler.END a = share.get_dividend_summary(datetime.datetime.now().year, datetime.datetime.now().year", "in zip(item.pay_date, item.amount): if pay_date == '-': continue s += pd.to_datetime(pay_date).strftime('%d %B') +", "+ str(states.DIVIDENDINFO) + '$')], states={ GETSUMMARY: [ MessageHandler(Filters.text, self.get_dividend_summary) ], }, fallbacks=[] )", "return ConversationHandler.END a = share.get_dividend_summary(datetime.datetime.now().year, datetime.datetime.now().year - years) s = '<b>' + share.name", "'<b>' + share.name + '</b>\\n\\n' for item in a: s += '<b>' +", "entered ticker value %s.\", user.first_name, ticker) years = 5 share = Share(ticker) if", "s = '<b>' + share.name + '</b>\\n\\n' for item in a: s +=", "= share.get_dividend_summary(datetime.datetime.now().year, datetime.datetime.now().year - years) s = '<b>' + share.name + '</b>\\n\\n' for", "datetime GETSUMMARY = range(1) class DividendSummary: def __init__(self, dispatcher): self.__dp = dispatcher self.__handler()", "text=\"Enter ticker symbol (e.g D05)\") return GETSUMMARY @staticmethod def get_dividend_summary(update, context): ticker =", "dispatcher self.__handler() def __handler(self): ds_handler = ConversationHandler( entry_points=[CallbackQueryHandler( self.get_ticker, pattern='^' + str(states.DIVIDENDINFO) +", "from telegram.ext import ConversationHandler from telegram.ext import MessageHandler from telegram.ext import Filters from", "ds_handler = ConversationHandler( entry_points=[CallbackQueryHandler( self.get_ticker, pattern='^' + str(states.DIVIDENDINFO) + '$')], states={ GETSUMMARY: [", "telegram.ext import MessageHandler from telegram.ext import Filters from telegram.ext import CallbackQueryHandler from Model.share", "import MessageHandler from telegram.ext import Filters from telegram.ext import CallbackQueryHandler from Model.share import", "%s entered ticker value %s.\", user.first_name, ticker) years = 5 share = Share(ticker)", "+ str(item.year) + ' (' + str(item.total) + ')</b>' + '\\n' i =", "import Controllers.global_states as states from Utils.logging import get_logger as log import pandas as", "GETSUMMARY = range(1) class DividendSummary: def __init__(self, dispatcher): self.__dp = dispatcher self.__handler() def", "query = update.callback_query query.answer() query.edit_message_text( text=\"Enter ticker symbol (e.g D05)\") return GETSUMMARY @staticmethod", "user = update.effective_user log().info(\"User %s entered ticker value %s.\", user.first_name, ticker) years =", "= update.effective_user log().info(\"User %s entered ticker value %s.\", user.first_name, ticker) years = 5", "= dispatcher self.__handler() def __handler(self): ds_handler = ConversationHandler( entry_points=[CallbackQueryHandler( self.get_ticker, pattern='^' + str(states.DIVIDENDINFO)", "'<b>' + str(item.year) + ' (' + str(item.total) + ')</b>' + '\\n' i", "pressed the dividend summary button.\", user.first_name) query = update.callback_query query.answer() query.edit_message_text( text=\"Enter ticker", "(e.g D05)\") return GETSUMMARY @staticmethod def get_dividend_summary(update, context): ticker = update.message.text user =", "in a: s += '<b>' + str(item.year) + ' (' + str(item.total) +", "'SGD ') +'\\n' i += 1 s += '\\n' update.message.reply_text(s, parse_mode='HTML') return ConversationHandler.END", "button.\", user.first_name) query = update.callback_query query.answer() query.edit_message_text( text=\"Enter ticker symbol (e.g D05)\") return", "not share.is_valid: update.message.reply_text(\"Invalid ticker. Please use /start to go back to the main", "states={ GETSUMMARY: [ MessageHandler(Filters.text, self.get_dividend_summary) ], }, fallbacks=[] ) self.__dp.add_handler(ds_handler) @staticmethod def get_ticker(update,", "share.is_valid: update.message.reply_text(\"Invalid ticker. Please use /start to go back to the main menu\")", "+ ')</b>' + '\\n' i = 1 for pay_date, pay_amount in zip(item.pay_date, item.amount):", "log().info(\"User %s entered an invalid ticker value %s.\", user.first_name, ticker) return ConversationHandler.END a", "import get_logger as log import pandas as pd import datetime GETSUMMARY = range(1)", "ticker value %s.\", user.first_name, ticker) years = 5 share = Share(ticker) if not", "a: s += '<b>' + str(item.year) + ' (' + str(item.total) + ')</b>'", "telegram.ext import CallbackQueryHandler from Model.share import Share import Controllers.global_states as states from Utils.logging", "%s.\", user.first_name, ticker) return ConversationHandler.END a = share.get_dividend_summary(datetime.datetime.now().year, datetime.datetime.now().year - years) s =", "def __init__(self, dispatcher): self.__dp = dispatcher self.__handler() def __handler(self): ds_handler = ConversationHandler( entry_points=[CallbackQueryHandler(", "+ str(item.total) + ')</b>' + '\\n' i = 1 for pay_date, pay_amount in", "+ '\\n' i = 1 for pay_date, pay_amount in zip(item.pay_date, item.amount): if pay_date", "user.first_name) query = update.callback_query query.answer() query.edit_message_text( text=\"Enter ticker symbol (e.g D05)\") return GETSUMMARY", "ConversationHandler.END a = share.get_dividend_summary(datetime.datetime.now().year, datetime.datetime.now().year - years) s = '<b>' + share.name +", "= update.callback_query query.answer() query.edit_message_text( text=\"Enter ticker symbol (e.g D05)\") return GETSUMMARY @staticmethod def", "value %s.\", user.first_name, ticker) years = 5 share = Share(ticker) if not share.is_valid:", "a = share.get_dividend_summary(datetime.datetime.now().year, datetime.datetime.now().year - years) s = '<b>' + share.name + '</b>\\n\\n'", "item in a: s += '<b>' + str(item.year) + ' (' + str(item.total)", "use /start to go back to the main menu\") log().info(\"User %s entered an", "1 for pay_date, pay_amount in zip(item.pay_date, item.amount): if pay_date == '-': continue s", "from Model.share import Share import Controllers.global_states as states from Utils.logging import get_logger as", "pattern='^' + str(states.DIVIDENDINFO) + '$')], states={ GETSUMMARY: [ MessageHandler(Filters.text, self.get_dividend_summary) ], }, fallbacks=[]", "ConversationHandler( entry_points=[CallbackQueryHandler( self.get_ticker, pattern='^' + str(states.DIVIDENDINFO) + '$')], states={ GETSUMMARY: [ MessageHandler(Filters.text, self.get_dividend_summary)", "+ ': ' + str(pay_amount).replace('SGD', 'SGD ') +'\\n' i += 1 s +=", "self.__dp = dispatcher self.__handler() def __handler(self): ds_handler = ConversationHandler( entry_points=[CallbackQueryHandler( self.get_ticker, pattern='^' +", "Controllers.global_states as states from Utils.logging import get_logger as log import pandas as pd", "str(pay_amount).replace('SGD', 'SGD ') +'\\n' i += 1 s += '\\n' update.message.reply_text(s, parse_mode='HTML') return", "menu\") log().info(\"User %s entered an invalid ticker value %s.\", user.first_name, ticker) return ConversationHandler.END", "= 1 for pay_date, pay_amount in zip(item.pay_date, item.amount): if pay_date == '-': continue", "Share import Controllers.global_states as states from Utils.logging import get_logger as log import pandas", "value %s.\", user.first_name, ticker) return ConversationHandler.END a = share.get_dividend_summary(datetime.datetime.now().year, datetime.datetime.now().year - years) s", "+= '<b>' + str(item.year) + ' (' + str(item.total) + ')</b>' + '\\n'", "')</b>' + '\\n' i = 1 for pay_date, pay_amount in zip(item.pay_date, item.amount): if", "%s.\", user.first_name, ticker) years = 5 share = Share(ticker) if not share.is_valid: update.message.reply_text(\"Invalid", "if not share.is_valid: update.message.reply_text(\"Invalid ticker. Please use /start to go back to the", "Model.share import Share import Controllers.global_states as states from Utils.logging import get_logger as log", "the main menu\") log().info(\"User %s entered an invalid ticker value %s.\", user.first_name, ticker)", "share = Share(ticker) if not share.is_valid: update.message.reply_text(\"Invalid ticker. Please use /start to go", "entered an invalid ticker value %s.\", user.first_name, ticker) return ConversationHandler.END a = share.get_dividend_summary(datetime.datetime.now().year,", "import Filters from telegram.ext import CallbackQueryHandler from Model.share import Share import Controllers.global_states as", "context): user = update.effective_user log().info(\"User %s pressed the dividend summary button.\", user.first_name) query", "+ share.name + '</b>\\n\\n' for item in a: s += '<b>' + str(item.year)", "__handler(self): ds_handler = ConversationHandler( entry_points=[CallbackQueryHandler( self.get_ticker, pattern='^' + str(states.DIVIDENDINFO) + '$')], states={ GETSUMMARY:", "- years) s = '<b>' + share.name + '</b>\\n\\n' for item in a:", "query.edit_message_text( text=\"Enter ticker symbol (e.g D05)\") return GETSUMMARY @staticmethod def get_dividend_summary(update, context): ticker", "update.message.text user = update.effective_user log().info(\"User %s entered ticker value %s.\", user.first_name, ticker) years", "user.first_name, ticker) return ConversationHandler.END a = share.get_dividend_summary(datetime.datetime.now().year, datetime.datetime.now().year - years) s = '<b>'", "s += '<b>' + str(item.year) + ' (' + str(item.total) + ')</b>' +", "}, fallbacks=[] ) self.__dp.add_handler(ds_handler) @staticmethod def get_ticker(update, context): user = update.effective_user log().info(\"User %s", "ticker) return ConversationHandler.END a = share.get_dividend_summary(datetime.datetime.now().year, datetime.datetime.now().year - years) s = '<b>' +", "@staticmethod def get_ticker(update, context): user = update.effective_user log().info(\"User %s pressed the dividend summary", "get_logger as log import pandas as pd import datetime GETSUMMARY = range(1) class", "get_ticker(update, context): user = update.effective_user log().info(\"User %s pressed the dividend summary button.\", user.first_name)", "5 share = Share(ticker) if not share.is_valid: update.message.reply_text(\"Invalid ticker. Please use /start to", "summary button.\", user.first_name) query = update.callback_query query.answer() query.edit_message_text( text=\"Enter ticker symbol (e.g D05)\")", "ticker value %s.\", user.first_name, ticker) return ConversationHandler.END a = share.get_dividend_summary(datetime.datetime.now().year, datetime.datetime.now().year - years)", "pd import datetime GETSUMMARY = range(1) class DividendSummary: def __init__(self, dispatcher): self.__dp =", "fallbacks=[] ) self.__dp.add_handler(ds_handler) @staticmethod def get_ticker(update, context): user = update.effective_user log().info(\"User %s pressed", "import pandas as pd import datetime GETSUMMARY = range(1) class DividendSummary: def __init__(self,", "update.callback_query query.answer() query.edit_message_text( text=\"Enter ticker symbol (e.g D05)\") return GETSUMMARY @staticmethod def get_dividend_summary(update,", "Please use /start to go back to the main menu\") log().info(\"User %s entered", "dividend summary button.\", user.first_name) query = update.callback_query query.answer() query.edit_message_text( text=\"Enter ticker symbol (e.g", "pandas as pd import datetime GETSUMMARY = range(1) class DividendSummary: def __init__(self, dispatcher):", "str(item.year) + ' (' + str(item.total) + ')</b>' + '\\n' i = 1", "'-': continue s += pd.to_datetime(pay_date).strftime('%d %B') + ': ' + str(pay_amount).replace('SGD', 'SGD ')", "'</b>\\n\\n' for item in a: s += '<b>' + str(item.year) + ' ('", "Share(ticker) if not share.is_valid: update.message.reply_text(\"Invalid ticker. Please use /start to go back to", "+ '$')], states={ GETSUMMARY: [ MessageHandler(Filters.text, self.get_dividend_summary) ], }, fallbacks=[] ) self.__dp.add_handler(ds_handler) @staticmethod", "continue s += pd.to_datetime(pay_date).strftime('%d %B') + ': ' + str(pay_amount).replace('SGD', 'SGD ') +'\\n'", "entry_points=[CallbackQueryHandler( self.get_ticker, pattern='^' + str(states.DIVIDENDINFO) + '$')], states={ GETSUMMARY: [ MessageHandler(Filters.text, self.get_dividend_summary) ],", "for pay_date, pay_amount in zip(item.pay_date, item.amount): if pay_date == '-': continue s +=", "from telegram.ext import Filters from telegram.ext import CallbackQueryHandler from Model.share import Share import", "user = update.effective_user log().info(\"User %s pressed the dividend summary button.\", user.first_name) query =", "return GETSUMMARY @staticmethod def get_dividend_summary(update, context): ticker = update.message.text user = update.effective_user log().info(\"User", "Utils.logging import get_logger as log import pandas as pd import datetime GETSUMMARY =", "as log import pandas as pd import datetime GETSUMMARY = range(1) class DividendSummary:", "self.__dp.add_handler(ds_handler) @staticmethod def get_ticker(update, context): user = update.effective_user log().info(\"User %s pressed the dividend", "item.amount): if pay_date == '-': continue s += pd.to_datetime(pay_date).strftime('%d %B') + ': '", "the dividend summary button.\", user.first_name) query = update.callback_query query.answer() query.edit_message_text( text=\"Enter ticker symbol", "' + str(pay_amount).replace('SGD', 'SGD ') +'\\n' i += 1 s += '\\n' update.message.reply_text(s,", "back to the main menu\") log().info(\"User %s entered an invalid ticker value %s.\",", "to the main menu\") log().info(\"User %s entered an invalid ticker value %s.\", user.first_name,", "'\\n' i = 1 for pay_date, pay_amount in zip(item.pay_date, item.amount): if pay_date ==", "share.get_dividend_summary(datetime.datetime.now().year, datetime.datetime.now().year - years) s = '<b>' + share.name + '</b>\\n\\n' for item", "__init__(self, dispatcher): self.__dp = dispatcher self.__handler() def __handler(self): ds_handler = ConversationHandler( entry_points=[CallbackQueryHandler( self.get_ticker,", "years = 5 share = Share(ticker) if not share.is_valid: update.message.reply_text(\"Invalid ticker. Please use", "pay_date, pay_amount in zip(item.pay_date, item.amount): if pay_date == '-': continue s += pd.to_datetime(pay_date).strftime('%d", "[ MessageHandler(Filters.text, self.get_dividend_summary) ], }, fallbacks=[] ) self.__dp.add_handler(ds_handler) @staticmethod def get_ticker(update, context): user", "import datetime GETSUMMARY = range(1) class DividendSummary: def __init__(self, dispatcher): self.__dp = dispatcher", "dispatcher): self.__dp = dispatcher self.__handler() def __handler(self): ds_handler = ConversationHandler( entry_points=[CallbackQueryHandler( self.get_ticker, pattern='^'", "@staticmethod def get_dividend_summary(update, context): ticker = update.message.text user = update.effective_user log().info(\"User %s entered", "context): ticker = update.message.text user = update.effective_user log().info(\"User %s entered ticker value %s.\",", "update.effective_user log().info(\"User %s entered ticker value %s.\", user.first_name, ticker) years = 5 share", "+= pd.to_datetime(pay_date).strftime('%d %B') + ': ' + str(pay_amount).replace('SGD', 'SGD ') +'\\n' i +=", "MessageHandler(Filters.text, self.get_dividend_summary) ], }, fallbacks=[] ) self.__dp.add_handler(ds_handler) @staticmethod def get_ticker(update, context): user =", "self.__handler() def __handler(self): ds_handler = ConversationHandler( entry_points=[CallbackQueryHandler( self.get_ticker, pattern='^' + str(states.DIVIDENDINFO) + '$')],", "': ' + str(pay_amount).replace('SGD', 'SGD ') +'\\n' i += 1 s += '\\n'", "= Share(ticker) if not share.is_valid: update.message.reply_text(\"Invalid ticker. Please use /start to go back", "self.get_ticker, pattern='^' + str(states.DIVIDENDINFO) + '$')], states={ GETSUMMARY: [ MessageHandler(Filters.text, self.get_dividend_summary) ], },", "years) s = '<b>' + share.name + '</b>\\n\\n' for item in a: s", "Filters from telegram.ext import CallbackQueryHandler from Model.share import Share import Controllers.global_states as states", "datetime.datetime.now().year - years) s = '<b>' + share.name + '</b>\\n\\n' for item in", "== '-': continue s += pd.to_datetime(pay_date).strftime('%d %B') + ': ' + str(pay_amount).replace('SGD', 'SGD", "symbol (e.g D05)\") return GETSUMMARY @staticmethod def get_dividend_summary(update, context): ticker = update.message.text user", "MessageHandler from telegram.ext import Filters from telegram.ext import CallbackQueryHandler from Model.share import Share", "GETSUMMARY: [ MessageHandler(Filters.text, self.get_dividend_summary) ], }, fallbacks=[] ) self.__dp.add_handler(ds_handler) @staticmethod def get_ticker(update, context):", "go back to the main menu\") log().info(\"User %s entered an invalid ticker value", "+ ' (' + str(item.total) + ')</b>' + '\\n' i = 1 for", "from Utils.logging import get_logger as log import pandas as pd import datetime GETSUMMARY", "from telegram.ext import CallbackQueryHandler from Model.share import Share import Controllers.global_states as states from", "= ConversationHandler( entry_points=[CallbackQueryHandler( self.get_ticker, pattern='^' + str(states.DIVIDENDINFO) + '$')], states={ GETSUMMARY: [ MessageHandler(Filters.text,", "to go back to the main menu\") log().info(\"User %s entered an invalid ticker", "share.name + '</b>\\n\\n' for item in a: s += '<b>' + str(item.year) +", "ticker. Please use /start to go back to the main menu\") log().info(\"User %s", "self.get_dividend_summary) ], }, fallbacks=[] ) self.__dp.add_handler(ds_handler) @staticmethod def get_ticker(update, context): user = update.effective_user", "import ConversationHandler from telegram.ext import MessageHandler from telegram.ext import Filters from telegram.ext import", "def get_ticker(update, context): user = update.effective_user log().info(\"User %s pressed the dividend summary button.\",", "D05)\") return GETSUMMARY @staticmethod def get_dividend_summary(update, context): ticker = update.message.text user = update.effective_user", "%s entered an invalid ticker value %s.\", user.first_name, ticker) return ConversationHandler.END a =", "pay_amount in zip(item.pay_date, item.amount): if pay_date == '-': continue s += pd.to_datetime(pay_date).strftime('%d %B')", "ticker = update.message.text user = update.effective_user log().info(\"User %s entered ticker value %s.\", user.first_name,", "pay_date == '-': continue s += pd.to_datetime(pay_date).strftime('%d %B') + ': ' + str(pay_amount).replace('SGD',", "telegram.ext import Filters from telegram.ext import CallbackQueryHandler from Model.share import Share import Controllers.global_states", "DividendSummary: def __init__(self, dispatcher): self.__dp = dispatcher self.__handler() def __handler(self): ds_handler = ConversationHandler(", "update.message.reply_text(\"Invalid ticker. Please use /start to go back to the main menu\") log().info(\"User", "+ '</b>\\n\\n' for item in a: s += '<b>' + str(item.year) + '", "= 5 share = Share(ticker) if not share.is_valid: update.message.reply_text(\"Invalid ticker. Please use /start", "range(1) class DividendSummary: def __init__(self, dispatcher): self.__dp = dispatcher self.__handler() def __handler(self): ds_handler", "str(item.total) + ')</b>' + '\\n' i = 1 for pay_date, pay_amount in zip(item.pay_date,", "query.answer() query.edit_message_text( text=\"Enter ticker symbol (e.g D05)\") return GETSUMMARY @staticmethod def get_dividend_summary(update, context):", "'$')], states={ GETSUMMARY: [ MessageHandler(Filters.text, self.get_dividend_summary) ], }, fallbacks=[] ) self.__dp.add_handler(ds_handler) @staticmethod def", "as pd import datetime GETSUMMARY = range(1) class DividendSummary: def __init__(self, dispatcher): self.__dp", "update.effective_user log().info(\"User %s pressed the dividend summary button.\", user.first_name) query = update.callback_query query.answer()", "main menu\") log().info(\"User %s entered an invalid ticker value %s.\", user.first_name, ticker) return", "%s pressed the dividend summary button.\", user.first_name) query = update.callback_query query.answer() query.edit_message_text( text=\"Enter", "' (' + str(item.total) + ')</b>' + '\\n' i = 1 for pay_date,", "= '<b>' + share.name + '</b>\\n\\n' for item in a: s += '<b>'", "CallbackQueryHandler from Model.share import Share import Controllers.global_states as states from Utils.logging import get_logger", "invalid ticker value %s.\", user.first_name, ticker) return ConversationHandler.END a = share.get_dividend_summary(datetime.datetime.now().year, datetime.datetime.now().year -" ]
[ "to integers kmers = [ feature_mapping[kmer_tuple] for kmer_tuple in window(str_seq.upper(), kmer_len) if kmer_tuple", "= max + 1 # so we add the max value and remove", "the max value and remove it afterwards # numpy.bincount was found to be", "kmers = [ feature_mapping[kmer_tuple] for kmer_tuple in window(str_seq.upper(), kmer_len) if kmer_tuple in feature_mapping", "kmer_hash = {} counter = 0 for kmer in product(\"ATGC\",repeat=kmer_len): if kmer not", "in reversed(kmer)]) kmer_hash[rev_compl] = counter counter += 1 return kmer_hash, counter def window(seq,n):", "composition_v failStart = 0 if seq_len >= kmer_len: startKmer = str_seq[0:kmer_len].upper() startKmerT =", "for kmer_tuple in window(str_seq.upper(), kmer_len) if kmer_tuple in feature_mapping ] # numpy.bincount returns", "feature_mapping, nr_features = generate_feature_mapping(kmer_len) composition = np.zeros(nr_features,dtype=np.int) start_composition = np.zeros(nr_features,dtype=np.int) with gzip.open(read_file, \"rt\")", "tee(seq,n) for i,el in enumerate(els): for _ in range(i): next(el, None) return zip(*els)", "zip(*els) def _calculate_composition(read_file, kmer_len, length_threshold=25): #Generate kmer dictionary feature_mapping, nr_features = generate_feature_mapping(kmer_len) composition", "if kmer_tuple in feature_mapping ] # numpy.bincount returns an array of size =", "parser.add_argument(\"read_file\", help=\"gzipped fastq read file\") parser.add_argument(\"kmer_length\", help=\"kmer length assumed overlap\") parser.add_argument(\"outFileStub\", help=\"stub for", "length_threshold=25): #Generate kmer dictionary feature_mapping, nr_features = generate_feature_mapping(kmer_len) composition = np.zeros(nr_features,dtype=np.int) start_composition =", "1) composition_v = np.bincount(np.array(kmers)) composition_v[-1] -= 1 # Adding pseudo counts before storing", "args = parser.parse_args() #import ipdb; ipdb.set_trace() (feature_mapping, composition, start_composition,failStart) = _calculate_composition(args.read_file, int(args.kmer_length)) print(str(failStart))", "+ \",\" + str(kidx) + \",\" + str(composition[kidx]) + \",\" + str(start_composition[kidx]) )", "using collections.Counter kmers.append(nr_features - 1) composition_v = np.bincount(np.array(kmers)) composition_v[-1] -= 1 # Adding", "parser = argparse.ArgumentParser() parser.add_argument(\"read_file\", help=\"gzipped fastq read file\") parser.add_argument(\"kmer_length\", help=\"kmer length assumed overlap\")", "composition_v = np.bincount(np.array(kmers)) composition_v[-1] -= 1 # Adding pseudo counts before storing in", "much more efficient than # counting manually or using collections.Counter kmers.append(nr_features - 1)", "return zip(*els) def _calculate_composition(read_file, kmer_len, length_threshold=25): #Generate kmer dictionary feature_mapping, nr_features = generate_feature_mapping(kmer_len)", "= 0 if seq_len >= kmer_len: startKmer = str_seq[0:kmer_len].upper() startKmerT = tuple(startKmer) if", "product, tee from collections import Counter, OrderedDict from Bio import SeqIO def generate_feature_mapping(kmer_len):", "kmer_len, length_threshold=25): #Generate kmer dictionary feature_mapping, nr_features = generate_feature_mapping(kmer_len) composition = np.zeros(nr_features,dtype=np.int) start_composition", "# numpy.bincount returns an array of size = max + 1 # so", "numpy.bincount was found to be much more efficient than # counting manually or", "import gzip import sys import argparse import re import logging import numpy as", "= np.bincount(np.array(kmers)) composition_v[-1] -= 1 # Adding pseudo counts before storing in dict", "counter rev_compl = tuple([BASE_COMPLEMENT[x] for x in reversed(kmer)]) kmer_hash[rev_compl] = counter counter +=", "for x in reversed(kmer)]) kmer_hash[rev_compl] = counter counter += 1 return kmer_hash, counter", "import argparse import re import logging import numpy as np import pandas as", "1 return kmer_hash, counter def window(seq,n): els = tee(seq,n) for i,el in enumerate(els):", "{\"A\":\"T\",\"T\":\"A\",\"G\":\"C\",\"C\":\"G\"} kmer_hash = {} counter = 0 for kmer in product(\"ATGC\",repeat=kmer_len): if kmer", "not in kmer_hash: kmer_hash[kmer] = counter rev_compl = tuple([BASE_COMPLEMENT[x] for x in reversed(kmer)])", "length_threshold: continue str_seq = str(seq.seq) # Create a list containing all kmers, translated", "# Create a list containing all kmers, translated to integers kmers = [", "and remove it afterwards # numpy.bincount was found to be much more efficient", "= {\"A\":\"T\",\"T\":\"A\",\"G\":\"C\",\"C\":\"G\"} kmer_hash = {} counter = 0 for kmer in product(\"ATGC\",repeat=kmer_len): if", "import SeqIO def generate_feature_mapping(kmer_len): BASE_COMPLEMENT = {\"A\":\"T\",\"T\":\"A\",\"G\":\"C\",\"C\":\"G\"} kmer_hash = {} counter = 0", "#import ipdb; ipdb.set_trace() (feature_mapping, composition, start_composition,failStart) = _calculate_composition(args.read_file, int(args.kmer_length)) print(str(failStart)) for k in", "ipdb; ipdb.set_trace() (feature_mapping, composition, start_composition,failStart) = _calculate_composition(args.read_file, int(args.kmer_length)) print(str(failStart)) for k in sorted(feature_mapping,", "= 0 for kmer in product(\"ATGC\",repeat=kmer_len): if kmer not in kmer_hash: kmer_hash[kmer] =", "= tee(seq,n) for i,el in enumerate(els): for _ in range(i): next(el, None) return", "max + 1 # so we add the max value and remove it", "tuple(startKmer) if startKmerT in feature_mapping: start_composition[feature_mapping[startKmerT]]+=1 else: failStart+=1 return feature_mapping, composition, start_composition, failStart", "0 if seq_len >= kmer_len: startKmer = str_seq[0:kmer_len].upper() startKmerT = tuple(startKmer) if startKmerT", "failStart+=1 return feature_mapping, composition, start_composition, failStart def main(argv): parser = argparse.ArgumentParser() parser.add_argument(\"read_file\", help=\"gzipped", "= counter rev_compl = tuple([BASE_COMPLEMENT[x] for x in reversed(kmer)]) kmer_hash[rev_compl] = counter counter", "counter += 1 return kmer_hash, counter def window(seq,n): els = tee(seq,n) for i,el", "counting manually or using collections.Counter kmers.append(nr_features - 1) composition_v = np.bincount(np.array(kmers)) composition_v[-1] -=", "in sorted(feature_mapping, key=feature_mapping.get): kidx = feature_mapping[k] print(\"\".join(k) + \",\" + str(kidx) + \",\"", "0 for kmer in product(\"ATGC\",repeat=kmer_len): if kmer not in kmer_hash: kmer_hash[kmer] = counter", "counter = 0 for kmer in product(\"ATGC\",repeat=kmer_len): if kmer not in kmer_hash: kmer_hash[kmer]", "-= 1 # Adding pseudo counts before storing in dict composition += composition_v", "= str_seq[0:kmer_len].upper() startKmerT = tuple(startKmer) if startKmerT in feature_mapping: start_composition[feature_mapping[startKmerT]]+=1 else: failStart+=1 return", "kmer_tuple in feature_mapping ] # numpy.bincount returns an array of size = max", "else: failStart+=1 return feature_mapping, composition, start_composition, failStart def main(argv): parser = argparse.ArgumentParser() parser.add_argument(\"read_file\",", "array of size = max + 1 # so we add the max", "manually or using collections.Counter kmers.append(nr_features - 1) composition_v = np.bincount(np.array(kmers)) composition_v[-1] -= 1", "k in sorted(feature_mapping, key=feature_mapping.get): kidx = feature_mapping[k] print(\"\".join(k) + \",\" + str(kidx) +", "{} counter = 0 for kmer in product(\"ATGC\",repeat=kmer_len): if kmer not in kmer_hash:", "def window(seq,n): els = tee(seq,n) for i,el in enumerate(els): for _ in range(i):", "p from itertools import product, tee from collections import Counter, OrderedDict from Bio", "an array of size = max + 1 # so we add the", "composition_v[-1] -= 1 # Adding pseudo counts before storing in dict composition +=", "tee from collections import Counter, OrderedDict from Bio import SeqIO def generate_feature_mapping(kmer_len): BASE_COMPLEMENT", "SeqIO.parse(handle,\"fastq\"): seq_len = len(seq) if seq_len<= length_threshold: continue str_seq = str(seq.seq) # Create", "kmer not in kmer_hash: kmer_hash[kmer] = counter rev_compl = tuple([BASE_COMPLEMENT[x] for x in", "more efficient than # counting manually or using collections.Counter kmers.append(nr_features - 1) composition_v", "of size = max + 1 # so we add the max value", "main(argv): parser = argparse.ArgumentParser() parser.add_argument(\"read_file\", help=\"gzipped fastq read file\") parser.add_argument(\"kmer_length\", help=\"kmer length assumed", "if startKmerT in feature_mapping: start_composition[feature_mapping[startKmerT]]+=1 else: failStart+=1 return feature_mapping, composition, start_composition, failStart def", "# so we add the max value and remove it afterwards # numpy.bincount", "= parser.parse_args() #import ipdb; ipdb.set_trace() (feature_mapping, composition, start_composition,failStart) = _calculate_composition(args.read_file, int(args.kmer_length)) print(str(failStart)) for", "\"rt\") as handle: for seq in SeqIO.parse(handle,\"fastq\"): seq_len = len(seq) if seq_len<= length_threshold:", "next(el, None) return zip(*els) def _calculate_composition(read_file, kmer_len, length_threshold=25): #Generate kmer dictionary feature_mapping, nr_features", "# counting manually or using collections.Counter kmers.append(nr_features - 1) composition_v = np.bincount(np.array(kmers)) composition_v[-1]", "help=\"stub for output files\") args = parser.parse_args() #import ipdb; ipdb.set_trace() (feature_mapping, composition, start_composition,failStart)", "+ \",\" + str(composition[kidx]) + \",\" + str(start_composition[kidx]) ) if __name__ == \"__main__\":", "def main(argv): parser = argparse.ArgumentParser() parser.add_argument(\"read_file\", help=\"gzipped fastq read file\") parser.add_argument(\"kmer_length\", help=\"kmer length", "in SeqIO.parse(handle,\"fastq\"): seq_len = len(seq) if seq_len<= length_threshold: continue str_seq = str(seq.seq) #", "SeqIO def generate_feature_mapping(kmer_len): BASE_COMPLEMENT = {\"A\":\"T\",\"T\":\"A\",\"G\":\"C\",\"C\":\"G\"} kmer_hash = {} counter = 0 for", "gzip import sys import argparse import re import logging import numpy as np", "kmer dictionary feature_mapping, nr_features = generate_feature_mapping(kmer_len) composition = np.zeros(nr_features,dtype=np.int) start_composition = np.zeros(nr_features,dtype=np.int) with", "or using collections.Counter kmers.append(nr_features - 1) composition_v = np.bincount(np.array(kmers)) composition_v[-1] -= 1 #", "than # counting manually or using collections.Counter kmers.append(nr_features - 1) composition_v = np.bincount(np.array(kmers))", "= tuple(startKmer) if startKmerT in feature_mapping: start_composition[feature_mapping[startKmerT]]+=1 else: failStart+=1 return feature_mapping, composition, start_composition,", "efficient than # counting manually or using collections.Counter kmers.append(nr_features - 1) composition_v =", "all kmers, translated to integers kmers = [ feature_mapping[kmer_tuple] for kmer_tuple in window(str_seq.upper(),", "counter counter += 1 return kmer_hash, counter def window(seq,n): els = tee(seq,n) for", "parser.add_argument(\"outFileStub\", help=\"stub for output files\") args = parser.parse_args() #import ipdb; ipdb.set_trace() (feature_mapping, composition,", "import re import logging import numpy as np import pandas as p from", "in feature_mapping: start_composition[feature_mapping[startKmerT]]+=1 else: failStart+=1 return feature_mapping, composition, start_composition, failStart def main(argv): parser", "Bio import SeqIO def generate_feature_mapping(kmer_len): BASE_COMPLEMENT = {\"A\":\"T\",\"T\":\"A\",\"G\":\"C\",\"C\":\"G\"} kmer_hash = {} counter =", "translated to integers kmers = [ feature_mapping[kmer_tuple] for kmer_tuple in window(str_seq.upper(), kmer_len) if", "range(i): next(el, None) return zip(*els) def _calculate_composition(read_file, kmer_len, length_threshold=25): #Generate kmer dictionary feature_mapping,", "OrderedDict from Bio import SeqIO def generate_feature_mapping(kmer_len): BASE_COMPLEMENT = {\"A\":\"T\",\"T\":\"A\",\"G\":\"C\",\"C\":\"G\"} kmer_hash = {}", "itertools import product, tee from collections import Counter, OrderedDict from Bio import SeqIO", "str_seq = str(seq.seq) # Create a list containing all kmers, translated to integers", "continue str_seq = str(seq.seq) # Create a list containing all kmers, translated to", "in product(\"ATGC\",repeat=kmer_len): if kmer not in kmer_hash: kmer_hash[kmer] = counter rev_compl = tuple([BASE_COMPLEMENT[x]", "remove it afterwards # numpy.bincount was found to be much more efficient than", "in range(i): next(el, None) return zip(*els) def _calculate_composition(read_file, kmer_len, length_threshold=25): #Generate kmer dictionary", "counts before storing in dict composition += composition_v failStart = 0 if seq_len", "if seq_len >= kmer_len: startKmer = str_seq[0:kmer_len].upper() startKmerT = tuple(startKmer) if startKmerT in", "import Counter, OrderedDict from Bio import SeqIO def generate_feature_mapping(kmer_len): BASE_COMPLEMENT = {\"A\":\"T\",\"T\":\"A\",\"G\":\"C\",\"C\":\"G\"} kmer_hash", "handle: for seq in SeqIO.parse(handle,\"fastq\"): seq_len = len(seq) if seq_len<= length_threshold: continue str_seq", "= str(seq.seq) # Create a list containing all kmers, translated to integers kmers", "with gzip.open(read_file, \"rt\") as handle: for seq in SeqIO.parse(handle,\"fastq\"): seq_len = len(seq) if", "composition = np.zeros(nr_features,dtype=np.int) start_composition = np.zeros(nr_features,dtype=np.int) with gzip.open(read_file, \"rt\") as handle: for seq", "storing in dict composition += composition_v failStart = 0 if seq_len >= kmer_len:", "seq_len = len(seq) if seq_len<= length_threshold: continue str_seq = str(seq.seq) # Create a", "startKmerT in feature_mapping: start_composition[feature_mapping[startKmerT]]+=1 else: failStart+=1 return feature_mapping, composition, start_composition, failStart def main(argv):", "_calculate_composition(args.read_file, int(args.kmer_length)) print(str(failStart)) for k in sorted(feature_mapping, key=feature_mapping.get): kidx = feature_mapping[k] print(\"\".join(k) +", "seq_len<= length_threshold: continue str_seq = str(seq.seq) # Create a list containing all kmers,", "seq in SeqIO.parse(handle,\"fastq\"): seq_len = len(seq) if seq_len<= length_threshold: continue str_seq = str(seq.seq)", "int(args.kmer_length)) print(str(failStart)) for k in sorted(feature_mapping, key=feature_mapping.get): kidx = feature_mapping[k] print(\"\".join(k) + \",\"", "# numpy.bincount was found to be much more efficient than # counting manually", "for k in sorted(feature_mapping, key=feature_mapping.get): kidx = feature_mapping[k] print(\"\".join(k) + \",\" + str(kidx)", "from itertools import product, tee from collections import Counter, OrderedDict from Bio import", "kmer_hash[rev_compl] = counter counter += 1 return kmer_hash, counter def window(seq,n): els =", "= len(seq) if seq_len<= length_threshold: continue str_seq = str(seq.seq) # Create a list", "+= composition_v failStart = 0 if seq_len >= kmer_len: startKmer = str_seq[0:kmer_len].upper() startKmerT", "argparse.ArgumentParser() parser.add_argument(\"read_file\", help=\"gzipped fastq read file\") parser.add_argument(\"kmer_length\", help=\"kmer length assumed overlap\") parser.add_argument(\"outFileStub\", help=\"stub", "def _calculate_composition(read_file, kmer_len, length_threshold=25): #Generate kmer dictionary feature_mapping, nr_features = generate_feature_mapping(kmer_len) composition =", "dictionary feature_mapping, nr_features = generate_feature_mapping(kmer_len) composition = np.zeros(nr_features,dtype=np.int) start_composition = np.zeros(nr_features,dtype=np.int) with gzip.open(read_file,", "help=\"gzipped fastq read file\") parser.add_argument(\"kmer_length\", help=\"kmer length assumed overlap\") parser.add_argument(\"outFileStub\", help=\"stub for output", "from collections import Counter, OrderedDict from Bio import SeqIO def generate_feature_mapping(kmer_len): BASE_COMPLEMENT =", "in kmer_hash: kmer_hash[kmer] = counter rev_compl = tuple([BASE_COMPLEMENT[x] for x in reversed(kmer)]) kmer_hash[rev_compl]", "import product, tee from collections import Counter, OrderedDict from Bio import SeqIO def", "output files\") args = parser.parse_args() #import ipdb; ipdb.set_trace() (feature_mapping, composition, start_composition,failStart) = _calculate_composition(args.read_file,", "import pandas as p from itertools import product, tee from collections import Counter,", "np.zeros(nr_features,dtype=np.int) start_composition = np.zeros(nr_features,dtype=np.int) with gzip.open(read_file, \"rt\") as handle: for seq in SeqIO.parse(handle,\"fastq\"):", "composition, start_composition,failStart) = _calculate_composition(args.read_file, int(args.kmer_length)) print(str(failStart)) for k in sorted(feature_mapping, key=feature_mapping.get): kidx =", "read file\") parser.add_argument(\"kmer_length\", help=\"kmer length assumed overlap\") parser.add_argument(\"outFileStub\", help=\"stub for output files\") args", "fastq read file\") parser.add_argument(\"kmer_length\", help=\"kmer length assumed overlap\") parser.add_argument(\"outFileStub\", help=\"stub for output files\")", "= [ feature_mapping[kmer_tuple] for kmer_tuple in window(str_seq.upper(), kmer_len) if kmer_tuple in feature_mapping ]", "size = max + 1 # so we add the max value and", "kmer_hash[kmer] = counter rev_compl = tuple([BASE_COMPLEMENT[x] for x in reversed(kmer)]) kmer_hash[rev_compl] = counter", "returns an array of size = max + 1 # so we add", "kmer_hash, counter def window(seq,n): els = tee(seq,n) for i,el in enumerate(els): for _", "composition += composition_v failStart = 0 if seq_len >= kmer_len: startKmer = str_seq[0:kmer_len].upper()", "afterwards # numpy.bincount was found to be much more efficient than # counting", "product(\"ATGC\",repeat=kmer_len): if kmer not in kmer_hash: kmer_hash[kmer] = counter rev_compl = tuple([BASE_COMPLEMENT[x] for", "gzip.open(read_file, \"rt\") as handle: for seq in SeqIO.parse(handle,\"fastq\"): seq_len = len(seq) if seq_len<=", "= {} counter = 0 for kmer in product(\"ATGC\",repeat=kmer_len): if kmer not in", "x in reversed(kmer)]) kmer_hash[rev_compl] = counter counter += 1 return kmer_hash, counter def", "key=feature_mapping.get): kidx = feature_mapping[k] print(\"\".join(k) + \",\" + str(kidx) + \",\" + str(composition[kidx])", ">= kmer_len: startKmer = str_seq[0:kmer_len].upper() startKmerT = tuple(startKmer) if startKmerT in feature_mapping: start_composition[feature_mapping[startKmerT]]+=1", "Counter, OrderedDict from Bio import SeqIO def generate_feature_mapping(kmer_len): BASE_COMPLEMENT = {\"A\":\"T\",\"T\":\"A\",\"G\":\"C\",\"C\":\"G\"} kmer_hash =", "kmer_hash: kmer_hash[kmer] = counter rev_compl = tuple([BASE_COMPLEMENT[x] for x in reversed(kmer)]) kmer_hash[rev_compl] =", "kmer in product(\"ATGC\",repeat=kmer_len): if kmer not in kmer_hash: kmer_hash[kmer] = counter rev_compl =", "# Adding pseudo counts before storing in dict composition += composition_v failStart =", "reversed(kmer)]) kmer_hash[rev_compl] = counter counter += 1 return kmer_hash, counter def window(seq,n): els", "sys import argparse import re import logging import numpy as np import pandas", "file\") parser.add_argument(\"kmer_length\", help=\"kmer length assumed overlap\") parser.add_argument(\"outFileStub\", help=\"stub for output files\") args =", "els = tee(seq,n) for i,el in enumerate(els): for _ in range(i): next(el, None)", "import numpy as np import pandas as p from itertools import product, tee", "\",\" + str(composition[kidx]) + \",\" + str(start_composition[kidx]) ) if __name__ == \"__main__\": main(sys.argv[1:])", "+= 1 return kmer_hash, counter def window(seq,n): els = tee(seq,n) for i,el in", "in dict composition += composition_v failStart = 0 if seq_len >= kmer_len: startKmer", "overlap\") parser.add_argument(\"outFileStub\", help=\"stub for output files\") args = parser.parse_args() #import ipdb; ipdb.set_trace() (feature_mapping,", "start_composition,failStart) = _calculate_composition(args.read_file, int(args.kmer_length)) print(str(failStart)) for k in sorted(feature_mapping, key=feature_mapping.get): kidx = feature_mapping[k]", "= feature_mapping[k] print(\"\".join(k) + \",\" + str(kidx) + \",\" + str(composition[kidx]) + \",\"", "window(str_seq.upper(), kmer_len) if kmer_tuple in feature_mapping ] # numpy.bincount returns an array of", "help=\"kmer length assumed overlap\") parser.add_argument(\"outFileStub\", help=\"stub for output files\") args = parser.parse_args() #import", "in feature_mapping ] # numpy.bincount returns an array of size = max +", "import sys import argparse import re import logging import numpy as np import", "counter def window(seq,n): els = tee(seq,n) for i,el in enumerate(els): for _ in", "BASE_COMPLEMENT = {\"A\":\"T\",\"T\":\"A\",\"G\":\"C\",\"C\":\"G\"} kmer_hash = {} counter = 0 for kmer in product(\"ATGC\",repeat=kmer_len):", "as handle: for seq in SeqIO.parse(handle,\"fastq\"): seq_len = len(seq) if seq_len<= length_threshold: continue", "composition, start_composition, failStart def main(argv): parser = argparse.ArgumentParser() parser.add_argument(\"read_file\", help=\"gzipped fastq read file\")", "startKmer = str_seq[0:kmer_len].upper() startKmerT = tuple(startKmer) if startKmerT in feature_mapping: start_composition[feature_mapping[startKmerT]]+=1 else: failStart+=1", "kmers.append(nr_features - 1) composition_v = np.bincount(np.array(kmers)) composition_v[-1] -= 1 # Adding pseudo counts", "_ in range(i): next(el, None) return zip(*els) def _calculate_composition(read_file, kmer_len, length_threshold=25): #Generate kmer", "ipdb.set_trace() (feature_mapping, composition, start_composition,failStart) = _calculate_composition(args.read_file, int(args.kmer_length)) print(str(failStart)) for k in sorted(feature_mapping, key=feature_mapping.get):", "kmer_len) if kmer_tuple in feature_mapping ] # numpy.bincount returns an array of size", "be much more efficient than # counting manually or using collections.Counter kmers.append(nr_features -", "i,el in enumerate(els): for _ in range(i): next(el, None) return zip(*els) def _calculate_composition(read_file,", "= generate_feature_mapping(kmer_len) composition = np.zeros(nr_features,dtype=np.int) start_composition = np.zeros(nr_features,dtype=np.int) with gzip.open(read_file, \"rt\") as handle:", "we add the max value and remove it afterwards # numpy.bincount was found", "+ 1 # so we add the max value and remove it afterwards", "str(kidx) + \",\" + str(composition[kidx]) + \",\" + str(start_composition[kidx]) ) if __name__ ==", "start_composition = np.zeros(nr_features,dtype=np.int) with gzip.open(read_file, \"rt\") as handle: for seq in SeqIO.parse(handle,\"fastq\"): seq_len", "re import logging import numpy as np import pandas as p from itertools", "startKmerT = tuple(startKmer) if startKmerT in feature_mapping: start_composition[feature_mapping[startKmerT]]+=1 else: failStart+=1 return feature_mapping, composition,", "Create a list containing all kmers, translated to integers kmers = [ feature_mapping[kmer_tuple]", "generate_feature_mapping(kmer_len): BASE_COMPLEMENT = {\"A\":\"T\",\"T\":\"A\",\"G\":\"C\",\"C\":\"G\"} kmer_hash = {} counter = 0 for kmer in", "for i,el in enumerate(els): for _ in range(i): next(el, None) return zip(*els) def", "was found to be much more efficient than # counting manually or using", "before storing in dict composition += composition_v failStart = 0 if seq_len >=", "start_composition[feature_mapping[startKmerT]]+=1 else: failStart+=1 return feature_mapping, composition, start_composition, failStart def main(argv): parser = argparse.ArgumentParser()", "return feature_mapping, composition, start_composition, failStart def main(argv): parser = argparse.ArgumentParser() parser.add_argument(\"read_file\", help=\"gzipped fastq", "window(seq,n): els = tee(seq,n) for i,el in enumerate(els): for _ in range(i): next(el,", "= np.zeros(nr_features,dtype=np.int) start_composition = np.zeros(nr_features,dtype=np.int) with gzip.open(read_file, \"rt\") as handle: for seq in", "len(seq) if seq_len<= length_threshold: continue str_seq = str(seq.seq) # Create a list containing", "kmers, translated to integers kmers = [ feature_mapping[kmer_tuple] for kmer_tuple in window(str_seq.upper(), kmer_len)", "start_composition, failStart def main(argv): parser = argparse.ArgumentParser() parser.add_argument(\"read_file\", help=\"gzipped fastq read file\") parser.add_argument(\"kmer_length\",", "list containing all kmers, translated to integers kmers = [ feature_mapping[kmer_tuple] for kmer_tuple", "found to be much more efficient than # counting manually or using collections.Counter", "kidx = feature_mapping[k] print(\"\".join(k) + \",\" + str(kidx) + \",\" + str(composition[kidx]) +", "1 # Adding pseudo counts before storing in dict composition += composition_v failStart", "feature_mapping: start_composition[feature_mapping[startKmerT]]+=1 else: failStart+=1 return feature_mapping, composition, start_composition, failStart def main(argv): parser =", "if seq_len<= length_threshold: continue str_seq = str(seq.seq) # Create a list containing all", "for kmer in product(\"ATGC\",repeat=kmer_len): if kmer not in kmer_hash: kmer_hash[kmer] = counter rev_compl", "= counter counter += 1 return kmer_hash, counter def window(seq,n): els = tee(seq,n)", "pandas as p from itertools import product, tee from collections import Counter, OrderedDict", "enumerate(els): for _ in range(i): next(el, None) return zip(*els) def _calculate_composition(read_file, kmer_len, length_threshold=25):", "numpy as np import pandas as p from itertools import product, tee from", "feature_mapping ] # numpy.bincount returns an array of size = max + 1", "] # numpy.bincount returns an array of size = max + 1 #", "(feature_mapping, composition, start_composition,failStart) = _calculate_composition(args.read_file, int(args.kmer_length)) print(str(failStart)) for k in sorted(feature_mapping, key=feature_mapping.get): kidx", "def generate_feature_mapping(kmer_len): BASE_COMPLEMENT = {\"A\":\"T\",\"T\":\"A\",\"G\":\"C\",\"C\":\"G\"} kmer_hash = {} counter = 0 for kmer", "numpy.bincount returns an array of size = max + 1 # so we", "None) return zip(*els) def _calculate_composition(read_file, kmer_len, length_threshold=25): #Generate kmer dictionary feature_mapping, nr_features =", "= _calculate_composition(args.read_file, int(args.kmer_length)) print(str(failStart)) for k in sorted(feature_mapping, key=feature_mapping.get): kidx = feature_mapping[k] print(\"\".join(k)", "sorted(feature_mapping, key=feature_mapping.get): kidx = feature_mapping[k] print(\"\".join(k) + \",\" + str(kidx) + \",\" +", "rev_compl = tuple([BASE_COMPLEMENT[x] for x in reversed(kmer)]) kmer_hash[rev_compl] = counter counter += 1", "parser.add_argument(\"kmer_length\", help=\"kmer length assumed overlap\") parser.add_argument(\"outFileStub\", help=\"stub for output files\") args = parser.parse_args()", "collections import Counter, OrderedDict from Bio import SeqIO def generate_feature_mapping(kmer_len): BASE_COMPLEMENT = {\"A\":\"T\",\"T\":\"A\",\"G\":\"C\",\"C\":\"G\"}", "tuple([BASE_COMPLEMENT[x] for x in reversed(kmer)]) kmer_hash[rev_compl] = counter counter += 1 return kmer_hash,", "feature_mapping[k] print(\"\".join(k) + \",\" + str(kidx) + \",\" + str(composition[kidx]) + \",\" +", "np.zeros(nr_features,dtype=np.int) with gzip.open(read_file, \"rt\") as handle: for seq in SeqIO.parse(handle,\"fastq\"): seq_len = len(seq)", "for seq in SeqIO.parse(handle,\"fastq\"): seq_len = len(seq) if seq_len<= length_threshold: continue str_seq =", "kmer_len: startKmer = str_seq[0:kmer_len].upper() startKmerT = tuple(startKmer) if startKmerT in feature_mapping: start_composition[feature_mapping[startKmerT]]+=1 else:", "nr_features = generate_feature_mapping(kmer_len) composition = np.zeros(nr_features,dtype=np.int) start_composition = np.zeros(nr_features,dtype=np.int) with gzip.open(read_file, \"rt\") as", "= np.zeros(nr_features,dtype=np.int) with gzip.open(read_file, \"rt\") as handle: for seq in SeqIO.parse(handle,\"fastq\"): seq_len =", "pseudo counts before storing in dict composition += composition_v failStart = 0 if", "feature_mapping[kmer_tuple] for kmer_tuple in window(str_seq.upper(), kmer_len) if kmer_tuple in feature_mapping ] # numpy.bincount", "for output files\") args = parser.parse_args() #import ipdb; ipdb.set_trace() (feature_mapping, composition, start_composition,failStart) =", "kmer_tuple in window(str_seq.upper(), kmer_len) if kmer_tuple in feature_mapping ] # numpy.bincount returns an", "- 1) composition_v = np.bincount(np.array(kmers)) composition_v[-1] -= 1 # Adding pseudo counts before", "= tuple([BASE_COMPLEMENT[x] for x in reversed(kmer)]) kmer_hash[rev_compl] = counter counter += 1 return", "failStart = 0 if seq_len >= kmer_len: startKmer = str_seq[0:kmer_len].upper() startKmerT = tuple(startKmer)", "generate_feature_mapping(kmer_len) composition = np.zeros(nr_features,dtype=np.int) start_composition = np.zeros(nr_features,dtype=np.int) with gzip.open(read_file, \"rt\") as handle: for", "length assumed overlap\") parser.add_argument(\"outFileStub\", help=\"stub for output files\") args = parser.parse_args() #import ipdb;", "assumed overlap\") parser.add_argument(\"outFileStub\", help=\"stub for output files\") args = parser.parse_args() #import ipdb; ipdb.set_trace()", "from Bio import SeqIO def generate_feature_mapping(kmer_len): BASE_COMPLEMENT = {\"A\":\"T\",\"T\":\"A\",\"G\":\"C\",\"C\":\"G\"} kmer_hash = {} counter", "in window(str_seq.upper(), kmer_len) if kmer_tuple in feature_mapping ] # numpy.bincount returns an array", "so we add the max value and remove it afterwards # numpy.bincount was", "it afterwards # numpy.bincount was found to be much more efficient than #", "= argparse.ArgumentParser() parser.add_argument(\"read_file\", help=\"gzipped fastq read file\") parser.add_argument(\"kmer_length\", help=\"kmer length assumed overlap\") parser.add_argument(\"outFileStub\",", "as p from itertools import product, tee from collections import Counter, OrderedDict from", "add the max value and remove it afterwards # numpy.bincount was found to", "as np import pandas as p from itertools import product, tee from collections", "for _ in range(i): next(el, None) return zip(*els) def _calculate_composition(read_file, kmer_len, length_threshold=25): #Generate", "_calculate_composition(read_file, kmer_len, length_threshold=25): #Generate kmer dictionary feature_mapping, nr_features = generate_feature_mapping(kmer_len) composition = np.zeros(nr_features,dtype=np.int)", "1 # so we add the max value and remove it afterwards #", "\",\" + str(kidx) + \",\" + str(composition[kidx]) + \",\" + str(start_composition[kidx]) ) if", "in enumerate(els): for _ in range(i): next(el, None) return zip(*els) def _calculate_composition(read_file, kmer_len,", "Adding pseudo counts before storing in dict composition += composition_v failStart = 0", "return kmer_hash, counter def window(seq,n): els = tee(seq,n) for i,el in enumerate(els): for", "a list containing all kmers, translated to integers kmers = [ feature_mapping[kmer_tuple] for", "seq_len >= kmer_len: startKmer = str_seq[0:kmer_len].upper() startKmerT = tuple(startKmer) if startKmerT in feature_mapping:", "str_seq[0:kmer_len].upper() startKmerT = tuple(startKmer) if startKmerT in feature_mapping: start_composition[feature_mapping[startKmerT]]+=1 else: failStart+=1 return feature_mapping,", "max value and remove it afterwards # numpy.bincount was found to be much", "parser.parse_args() #import ipdb; ipdb.set_trace() (feature_mapping, composition, start_composition,failStart) = _calculate_composition(args.read_file, int(args.kmer_length)) print(str(failStart)) for k", "[ feature_mapping[kmer_tuple] for kmer_tuple in window(str_seq.upper(), kmer_len) if kmer_tuple in feature_mapping ] #", "print(str(failStart)) for k in sorted(feature_mapping, key=feature_mapping.get): kidx = feature_mapping[k] print(\"\".join(k) + \",\" +", "files\") args = parser.parse_args() #import ipdb; ipdb.set_trace() (feature_mapping, composition, start_composition,failStart) = _calculate_composition(args.read_file, int(args.kmer_length))", "logging import numpy as np import pandas as p from itertools import product,", "np import pandas as p from itertools import product, tee from collections import", "containing all kmers, translated to integers kmers = [ feature_mapping[kmer_tuple] for kmer_tuple in", "import logging import numpy as np import pandas as p from itertools import", "str(seq.seq) # Create a list containing all kmers, translated to integers kmers =", "value and remove it afterwards # numpy.bincount was found to be much more", "feature_mapping, composition, start_composition, failStart def main(argv): parser = argparse.ArgumentParser() parser.add_argument(\"read_file\", help=\"gzipped fastq read", "argparse import re import logging import numpy as np import pandas as p", "np.bincount(np.array(kmers)) composition_v[-1] -= 1 # Adding pseudo counts before storing in dict composition", "failStart def main(argv): parser = argparse.ArgumentParser() parser.add_argument(\"read_file\", help=\"gzipped fastq read file\") parser.add_argument(\"kmer_length\", help=\"kmer", "integers kmers = [ feature_mapping[kmer_tuple] for kmer_tuple in window(str_seq.upper(), kmer_len) if kmer_tuple in", "dict composition += composition_v failStart = 0 if seq_len >= kmer_len: startKmer =", "#Generate kmer dictionary feature_mapping, nr_features = generate_feature_mapping(kmer_len) composition = np.zeros(nr_features,dtype=np.int) start_composition = np.zeros(nr_features,dtype=np.int)", "if kmer not in kmer_hash: kmer_hash[kmer] = counter rev_compl = tuple([BASE_COMPLEMENT[x] for x", "collections.Counter kmers.append(nr_features - 1) composition_v = np.bincount(np.array(kmers)) composition_v[-1] -= 1 # Adding pseudo", "+ str(kidx) + \",\" + str(composition[kidx]) + \",\" + str(start_composition[kidx]) ) if __name__", "to be much more efficient than # counting manually or using collections.Counter kmers.append(nr_features", "print(\"\".join(k) + \",\" + str(kidx) + \",\" + str(composition[kidx]) + \",\" + str(start_composition[kidx])" ]
[ "b. The symmetry of the histogram reflects unbiased estimators. c. The functional form", "enumerate(self.X): self.Y.append(measurer.GeneratePointWithResolution(self.quadcoeff*entry**2+self.m*entry+self.b)) def FitLine(self): self.coeffs = np.polyfit(self.X, self.Y, 1) self.ChiSquare=np.sum((((self.coeffs[0]*self.X+self.coeffs[1])-self.Y)/self.res) ** 2) self.quadcoeffs=np.polyfit(self.X,", "Classical limits also handle systematics in a counterintuitive way, such that a bad", "the Poisson distribution using a Poisson lookup table class MeasurementTaker: def __init__(self, resolution):", "self.res=2 self.X=np.linspace(1,15, 15) self.Y=[] self.x=0 self.residuals=0 self.ChiSquare=0 if self.degree == 1: self.BuildLine() else:", "3. a. I sampled the distribution using the cdf; for reference I included", "error bars. Instead, I transformed to first-order (d/dx log x), using \\sigma_{D,log}=\\sigma_D/D c.", "fit in red, and the quadratic fit in blue. b. The symmetry of", "to be around 1, and this is the result for both fits. *", "range(0, numSamples): self.GenerateSample() def AnalyzeDistro(self, index): y,binEdges = np.histogram(self.samples,bins=10) bincenters = 0.5*(binEdges[1:]+binEdges[:-1]) menStd", "\\mu_t. d. 1/\\mu_t. He justifies that this is invariant over changes of power", "==1: theNdf=12 else: theNdf=13 chispace=np.linspace(0,theNdf*3,1000) chidist=stat.chi2(theNdf,1) plt.plot(chispace/theNdf, chidist.pdf(chispace)) plt.xlabel(\"X^2\") plt.ylabel(\"P\") plt.title(\"Chi-square distribution (ndf", "see a bias in the distribution that favors the left side. I verified", "fail to consider physical limitations on the measured parameter. Classical limits also handle", "bars for log data is not entirely trivial because applying the logarithm literally", "the distribution that favors the left side. I verified this by reducing the", "distribution is 16% (or (100-CL)/2 %). Often constructed with a likelihood function, finding", "symmetry of the histogram reflects unbiased estimators. c. The functional form is: 1/(2^{df/2}\\Gamma(df/2))", "outputs for both cases. Rather than wrestle with masking or reassigning garbage datasets", "plt.xlabel(\"Tau\") plt.ylabel(\"Entries\") plt.title(\"Estimated Tau\") plt.savefig(\"3c_tau_hist_500samples.png\") Taus=[] for i in range(0,500): if i %", "import numpy as np import scipy.stats as stat #Samples from the PDF and", "lookup_x=[] lookup_y=[] cdf=[] maxcdf=0 def GenerateSample(self): randomNumber = random.uniform(theExponential.cdf[0],theExponential.maxcdf) index=-1 if randomNumber <", "estimators. c. The functional form is: 1/(2^{df/2}\\Gamma(df/2)) x^{df/2-1}e^{-x/2} The single parameter, df, is", "plot. The files ending in \"_quad.pdf\" are from the second iteration with a", "usefulness, but the conversion between the two statistics is simple. d. In the", "theLine(degree) Lines.append(line) if j == 1: line.PlotLine(\"2a_line\"+fileEnding) if i == 0: slopes.append(line.coeffs[0]) intercepts.append(line.coeffs[1])", "1. This is also expected. 3. a. I sampled the distribution using the", "they must be injected with a prior PDF to construct the posterior from", "plt.errorbar(self.X,self.Y,xerr=0,yerr=2) plt.plot(self.X,self.quadcoeffs[0]*self.X**2+self.quadcoeffs[1]*self.X+self.quadcoeffs[2]) plt.plot(self.X,self.coeffs[0]*self.X+self.coeffs[1]) plt.xlabel(\"x\") plt.ylabel(\"y\") plt.title(\"The Line\") plt.savefig(title) plt.clf() class theExponential: lookup_x=[] lookup_y=[]", "random.uniform(theExponential.cdf[0],theExponential.maxcdf) index=-1 if randomNumber < theExponential.cdf[0]: index=0 else: for i in range(0,len(theExponential.cdf)-1): if", "from the PDF and computes the mean. #Maps random reals in (0,1) to", "in (0,1) to the Poisson distribution using a Poisson lookup table class MeasurementTaker:", "all results for which the fit failed. ''' import random import math import", "for j in range(0,1000): line = theLine(degree) Lines.append(line) if j == 1: line.PlotLine(\"2a_line\"+fileEnding)", "in range(0,1000): line = theLine(degree) Lines.append(line) if j == 1: line.PlotLine(\"2a_line\"+fileEnding) if i", "chi square will rise. In the case of the quadratic data, the linear", "15) self.Y=[] self.x=0 self.residuals=0 self.ChiSquare=0 if self.degree == 1: self.BuildLine() else: self.BuildLineQuadratic() self.FitLine()", "for reference I included both the plot of the distrubution and the cdf.", "lookup_y=[] cdf=[] maxcdf=0 def GenerateSample(self): randomNumber = random.uniform(theExponential.cdf[0],theExponential.maxcdf) index=-1 if randomNumber < theExponential.cdf[0]:", "comparison I give a normalized reduced Chi2 distribution for df=12 and df=13. Overlaying", "reals in (0,1) to the Poisson distribution using a Poisson lookup table class", "plt.savefig(\"2b_slopes\"+fileEnding) plt.clf() plt.hist(intercepts, bins=100) plt.xlabel(\"b\") plt.ylabel(\"Entries\") plt.title(\"Intercepts histogram\") plt.savefig(\"2b_intercepts\"+fileEnding) plt.clf() if i ==", "=\"+str(theNdf)+\")\") plt.savefig(\"2d_chisq2pdf\"+fileEnding) plt.clf() Taus=[] for i in range(0,500): if i % 100 ==", "width=width, yerr=menStd, ecolor='g') plt.xlabel(\"Value\") plt.ylabel(\"Entries\") plt.title(str(len(self.samples))+\" exponential samples\") plt.savefig(\"3b_exp_samples.png\") plt.clf() self.logsamples=np.log(y) logcoeffs =", "scipy.stats as stat #Samples from the PDF and computes the mean. #Maps random", "distrubution and the cdf. b. Transforming error bars for log data is not", "a counterintuitive way, such that a bad calibration leads to a tighter confidence", "i, entry in enumerate(self.X): self.Y.append(measurer.GeneratePointWithResolution(self.quadcoeff*entry**2+self.m*entry+self.b)) def FitLine(self): self.coeffs = np.polyfit(self.X, self.Y, 1) self.ChiSquare=np.sum((((self.coeffs[0]*self.X+self.coeffs[1])-self.Y)/self.res)", "because of their goodness-of-fit usefulness, but the conversion between the two statistics is", "plt.title(\"Exponential distribution\") plt.savefig(\"3_exponential_dist.png\") plt.clf() plt.plot(theExponential.lookup_x, theExponential.cdf) plt.xlabel(\"x\") plt.ylabel(\"cdf\") plt.title(\"Exponential cdf\") plt.savefig(\"3_exponential_cdf.png\") plt.clf() for", "i ==1: theNdf=12 else: theNdf=13 chispace=np.linspace(0,theNdf*3,1000) chidist=stat.chi2(theNdf,1) plt.plot(chispace/theNdf, chidist.pdf(chispace)) plt.xlabel(\"X^2\") plt.ylabel(\"P\") plt.title(\"Chi-square distribution", "taken from the distribution by a factor of 10 and re-running, giving bins", "by a factor of 10 and re-running, giving bins that are much less", "__init__(self, resolution): self.theResolution=resolution def GeneratePointWithResolution(self, val): point=np.random.normal(loc=val,scale=self.theResolution) return point class theLine: def __init__(self,", "blue, the linear fit in red, and the quadratic fit in blue. b.", "PDF to construct the posterior from the likelihood function. Classical intervals fail to", "1: plt.bar(bincenters, y, width=width, yerr=menStd, ecolor='g') plt.xlabel(\"Value\") plt.ylabel(\"Entries\") plt.title(str(len(self.samples))+\" exponential samples\") plt.savefig(\"3b_exp_samples.png\") plt.clf()", "0: print(i) exp = theExponential(500) result=exp.AnalyzeDistro(i) if math.isnan(result) == False: Taus.append(result) print(Taus) plt.hist(Taus,", "either 12 or 13. For the linear fit, we have two free parameters", "the posterior PDF for \\mu_t. d. 1/\\mu_t. He justifies that this is invariant", "plt.title(\"The Line\") plt.savefig(title) plt.clf() class theExponential: lookup_x=[] lookup_y=[] cdf=[] maxcdf=0 def GenerateSample(self): randomNumber", "\\mu_t. e. Bayesian methods fail to be objective: they must be injected with", "in the fit. Since we have 15 data points, this is either 12", "10000) theExponential.lookup_y=np.exp(-theExponential.lookup_x) runningAverage=0 for val in theExponential.lookup_y: runningAverage=runningAverage+val theExponential.cdf.append(runningAverage) theExponential.maxcdf=theExponential.cdf[len(theExponential.cdf)-1] plt.clf() print(\"Running...\") plt.plot(theExponential.lookup_x,", "in range(0,500): if i % 100 == 0: print(i) exp = theExponential(500) result=exp.AnalyzeDistro(i)", "mean. #Maps random reals in (0,1) to the Poisson distribution using a Poisson", "conversion between the two statistics is simple. d. In the case of the", "or reassigning garbage datasets post-log, I discarded all results for which the fit", "second iteration with a quadratic dataset. a. The data are shown in blue,", "plt.plot(self.X,self.coeffs[0]*self.X+self.coeffs[1]) plt.xlabel(\"x\") plt.ylabel(\"y\") plt.title(\"The Line\") plt.savefig(title) plt.clf() class theExponential: lookup_x=[] lookup_y=[] cdf=[] maxcdf=0", "that generally people use classical statistics except when it produces things that 'seem'", "The symmetry of the histogram reflects unbiased estimators. c. The functional form is:", "on the measured parameter. Classical limits also handle systematics in a counterintuitive way,", "else: self.BuildLineQuadratic() self.FitLine() def BuildLine(self): measurer = MeasurementTaker(2) for i, entry in enumerate(self.X):", "slopes=[] intercepts=[] quads=[] chisqs=[] chisqquads=[] for j in range(0,1000): line = theLine(degree) Lines.append(line)", "data are shown in blue, the linear fit in red, and the quadratic", "be objective: they must be injected with a prior PDF to construct the", "plt.clf() return -1*logcoeffs[0] def __init__(self, nSamples): self.samples=[] self.logbins=[] self.GenerateNSamples(nSamples) theExponential.lookup_x=np.linspace(0, 5, 10000) theExponential.lookup_y=np.exp(-theExponential.lookup_x)", "plt.xlabel(\"a (quadratic coefficient)\") plt.ylabel(\"Entries\") plt.title(\"Quadratic coefficient histogram\") plt.savefig(\"2b_quads\"+fileEnding) plt.clf() plt.hist(chisqs, bins=100) plt.xlabel(\"X^2 /", "plt.xlabel(\"X^2 / ndf\") plt.ylabel(\"Entries\") plt.title(\"Chi-square of linear fit\") plt.savefig(\"2c_chisq\"+fileEnding) plt.clf() plt.hist(chisqquads, bins=100) plt.xlabel(\"X^2", "range(0,500): if i % 100 == 0: print(i) exp = theExponential(50) result=exp.AnalyzeDistro(i) if", "in the distribution that favors the left side. I verified this by reducing", "a. Frequentist confidence intervals do not respect the physical limitations imposed on a", "bins=20) plt.xlabel(\"Tau\") plt.ylabel(\"Entries\") plt.title(\"Estimated Tau\") plt.savefig(\"3c_tau_hist_500samples.png\") Taus=[] for i in range(0,500): if i", "plt.title(\"Estimated Tau\") plt.savefig(\"3c_tau_hist_500samples.png\") Taus=[] for i in range(0,500): if i % 100 ==", "reduced by 1, so if there is overall no noise we can fit", "have two free parameters so df=13; for the quadratic fit with three free", "of freedom in the fit. Since we have 15 data points, this is", "menStd = np.sqrt(y) width = 0.20 plt.clf() if index == 1: plt.bar(bincenters, y,", "val): point=np.random.normal(loc=val,scale=self.theResolution) return point class theLine: def __init__(self, degree): self.quadcoeff=1 self.degree=degree self.m=2 self.b=6", "theExponential(500) result=exp.AnalyzeDistro(i) if math.isnan(result) == False: Taus.append(result) print(Taus) plt.hist(Taus, bins=20) plt.xlabel(\"Tau\") plt.ylabel(\"Entries\") plt.title(\"Estimated", "simple. d. In the case of the linear data, the fit gets worse.", "<NAME> 1. a. Frequentist confidence intervals do not respect the physical limitations imposed", "in blue, the linear fit in red, and the quadratic fit in blue.", "likelihood function, finding where the likelihood reduces by a half. c. We need", "(quadratic coefficient)\") plt.ylabel(\"Entries\") plt.title(\"Quadratic coefficient histogram\") plt.savefig(\"2b_quads\"+fileEnding) plt.clf() plt.hist(chisqs, bins=100) plt.xlabel(\"X^2 / ndf\")", "self.m=2 self.b=6 self.res=2 self.X=np.linspace(1,15, 15) self.Y=[] self.x=0 self.residuals=0 self.ChiSquare=0 if self.degree == 1:", "prior PDF to construct the posterior from the likelihood function. Classical intervals fail", "the plot of the distrubution and the cdf. b. Transforming error bars for", "width = 0.20 plt.clf() if index == 1: plt.bar(bincenters, y, width=width, yerr=menStd, ecolor='g')", "the distribution using the cdf; for reference I included both the plot of", "in \"_quad.pdf\" are from the second iteration with a quadratic dataset. a. The", "table class MeasurementTaker: def __init__(self, resolution): self.theResolution=resolution def GeneratePointWithResolution(self, val): point=np.random.normal(loc=val,scale=self.theResolution) return point", "plt.clf() plt.hist(intercepts, bins=100) plt.xlabel(\"b\") plt.ylabel(\"Entries\") plt.title(\"Intercepts histogram\") plt.savefig(\"2b_intercepts\"+fileEnding) plt.clf() if i == 1:", "the histogram reflects unbiased estimators. c. The functional form is: 1/(2^{df/2}\\Gamma(df/2)) x^{df/2-1}e^{-x/2} The", "be around 1, and this is the result for both fits. * For", "else: theNdf=13 chispace=np.linspace(0,theNdf*3,1000) chidist=stat.chi2(theNdf,1) plt.plot(chispace/theNdf, chidist.pdf(chispace)) plt.xlabel(\"X^2\") plt.ylabel(\"P\") plt.title(\"Chi-square distribution (ndf =\"+str(theNdf)+\")\") plt.savefig(\"2d_chisq2pdf\"+fileEnding)", "entirely trivial because applying the logarithm literally yields asymmetric error bars. Instead, I", "to the Poisson distribution using a Poisson lookup table class MeasurementTaker: def __init__(self,", "that we can fit some noise to the new x^2 degree of freedom,", "we can fit some noise to the new x^2 degree of freedom, the", "the tails of the distribution leads to underestimation: that is, we can see", "1) if index == 1: plt.bar(bincenters,self.logsamples,width=width, yerr=menStd/y, ecolor='g') plt.xlabel(\"Value\") plt.ylabel(\"log Entries\") plt.title(str(len(self.samples))+\" exponential", "plt.plot(theExponential.lookup_x, theExponential.lookup_y) plt.xlabel(\"x\") plt.ylabel(\"$e^{-x}$\") plt.title(\"Exponential distribution\") plt.savefig(\"3_exponential_dist.png\") plt.clf() plt.plot(theExponential.lookup_x, theExponential.cdf) plt.xlabel(\"x\") plt.ylabel(\"cdf\") plt.title(\"Exponential", "exp = theExponential(500) result=exp.AnalyzeDistro(i) if math.isnan(result) == False: Taus.append(result) print(Taus) plt.hist(Taus, bins=20) plt.xlabel(\"Tau\")", "= theExponential(500) result=exp.AnalyzeDistro(i) if math.isnan(result) == False: Taus.append(result) print(Taus) plt.hist(Taus, bins=20) plt.xlabel(\"Tau\") plt.ylabel(\"Entries\")", "BuildLine(self): measurer = MeasurementTaker(2) for i, entry in enumerate(self.X): self.Y.append(measurer.GeneratePointWithResolution(self.m*entry+self.b)) def BuildLineQuadratic(self): measurer", "single parameter, df, is the number of degrees of freedom in the fit.", "the number of samples taken from the distribution by a factor of 10", "The data are shown in blue, the linear fit in red, and the", "for i in range(0,2): fileEnding=0 degree=i+1 if i == 0: fileEnding=\".png\" else: fileEnding=\"_quad.png\"", "fileEnding=0 degree=i+1 if i == 0: fileEnding=\".png\" else: fileEnding=\"_quad.png\" Lines=[] slopes=[] intercepts=[] quads=[]", "i % 100 == 0: print(i) exp = theExponential(50) result=exp.AnalyzeDistro(i) if math.isnan(result) ==", "i, entry in enumerate(self.X): self.Y.append(measurer.GeneratePointWithResolution(self.m*entry+self.b)) def BuildLineQuadratic(self): measurer = MeasurementTaker(2) for i, entry", "doing something very inefficient). From running the experiment 500 times, I can say", "plt.ylabel(\"log Entries\") plt.title(str(len(self.samples))+\" exponential samples\") theFitX=np.linspace(0,5,1000) theFitY=theFitX*logcoeffs[0]+logcoeffs[1] plt.plot(theFitX,theFitY) plt.savefig(\"3b_exp_samples_log.png\") plt.clf() return -1*logcoeffs[0] def", "quadratic plot. The files ending in \"_quad.pdf\" are from the second iteration with", "plt.hist(chisqquads, bins=100) plt.xlabel(\"X^2 / ndf\") plt.ylabel(\"Entries\") plt.title(\"Chi-square of quadratic fit\") plt.savefig(\"2d_chisq2\"+fileEnding) plt.clf() theNdf=0", "self.Y.append(measurer.GeneratePointWithResolution(self.quadcoeff*entry**2+self.m*entry+self.b)) def FitLine(self): self.coeffs = np.polyfit(self.X, self.Y, 1) self.ChiSquare=np.sum((((self.coeffs[0]*self.X+self.coeffs[1])-self.Y)/self.res) ** 2) self.quadcoeffs=np.polyfit(self.X, self.Y,2)", "plt.savefig(\"2b_quads\"+fileEnding) plt.clf() plt.hist(chisqs, bins=100) plt.xlabel(\"X^2 / ndf\") plt.ylabel(\"Entries\") plt.title(\"Chi-square of linear fit\") plt.savefig(\"2c_chisq\"+fileEnding)", "so if there is overall no noise we can fit away, then the", "of a mass. b. Typically, that the probability to be found outside the", "Transforming error bars for log data is not entirely trivial because applying the", "parameters so df=13; for the quadratic fit with three free parameters, df=12. We", "are shown in blue, the linear fit in red, and the quadratic fit", "there is overall no noise we can fit away, then the reduced chi", "sampling of the tails of the distribution leads to underestimation: that is, we", "and this is the result for both fits. * For comparison I give", "MeasurementTaker: def __init__(self, resolution): self.theResolution=resolution def GeneratePointWithResolution(self, val): point=np.random.normal(loc=val,scale=self.theResolution) return point class theLine:", "np.polyfit(bincenters, self.logsamples, 1) if index == 1: plt.bar(bincenters,self.logsamples,width=width, yerr=menStd/y, ecolor='g') plt.xlabel(\"Value\") plt.ylabel(\"log Entries\")", "the quadratic data, the linear fit is abysmal and the quadratic fit is", "import math import matplotlib.pyplot as plt import numpy as np import scipy.stats as", "ecolor='g') plt.xlabel(\"Value\") plt.ylabel(\"log Entries\") plt.title(str(len(self.samples))+\" exponential samples\") theFitX=np.linspace(0,5,1000) theFitY=theFitX*logcoeffs[0]+logcoeffs[1] plt.plot(theFitX,theFitY) plt.savefig(\"3b_exp_samples_log.png\") plt.clf() return", "garbage datasets post-log, I discarded all results for which the fit failed. '''", "seems that generally people use classical statistics except when it produces things that", "problem for a quadratic plot. The files ending in \"_quad.pdf\" are from the", "plt.title(\"Intercepts histogram\") plt.savefig(\"2b_intercepts\"+fileEnding) plt.clf() if i == 1: plt.hist(intercepts, bins=100) plt.xlabel(\"a (quadratic coefficient)\")", "i in range(0,500): if i % 100 == 0: print(i) exp = theExponential(500)", "mean and statistical error and use a Bayesian analysis of the detector sensitivity.", "the detector sensitivity. 2. I repeated this entire problem for a quadratic plot.", "not obviously easy, but comparing by-eye they are identical. I plotted reduced chi", "quadratic fit with three free parameters, df=12. We expected the reduced chi square", "well-populated. I attached outputs for both cases. Rather than wrestle with masking or", "== 0: print(i) exp = theExponential(50) result=exp.AnalyzeDistro(i) if math.isnan(result) == False: Taus.append(result) print(Taus)", "fileEnding=\".png\" else: fileEnding=\"_quad.png\" Lines=[] slopes=[] intercepts=[] quads=[] chisqs=[] chisqquads=[] for j in range(0,1000):", "math import matplotlib.pyplot as plt import numpy as np import scipy.stats as stat", "linear fit is abysmal and the quadratic fit is around 1. This is", "the linear fit, we have two free parameters so df=13; for the quadratic", "the two statistics is simple. d. In the case of the linear data,", "will rise. In the case of the quadratic data, the linear fit is", "fit is abysmal and the quadratic fit is around 1. This is also", "analysis on the mean and statistical error and use a Bayesian analysis of", "with a prior PDF to construct the posterior from the likelihood function. Classical", "perform classical analysis on the mean and statistical error and use a Bayesian", "from the second iteration with a quadratic dataset. a. The data are shown", "is also expected. 3. a. I sampled the distribution using the cdf; for", "when it produces things that 'seem' wrong, in which case use Bayesian. f.", "the linear data, the fit gets worse. It is difficult to predict what", "theNdf=0 if i ==1: theNdf=12 else: theNdf=13 chispace=np.linspace(0,theNdf*3,1000) chidist=stat.chi2(theNdf,1) plt.plot(chispace/theNdf, chidist.pdf(chispace)) plt.xlabel(\"X^2\") plt.ylabel(\"P\")", "on both sides of the distribution is 16% (or (100-CL)/2 %). Often constructed", "for which the fit failed. ''' import random import math import matplotlib.pyplot as", "the reduced chi square to be around 1, and this is the result", "self.FitLine() def BuildLine(self): measurer = MeasurementTaker(2) for i, entry in enumerate(self.X): self.Y.append(measurer.GeneratePointWithResolution(self.m*entry+self.b)) def", "plt.savefig(\"3b_exp_samples.png\") plt.clf() self.logsamples=np.log(y) logcoeffs = np.polyfit(bincenters, self.logsamples, 1) if index == 1: plt.bar(bincenters,self.logsamples,width=width,", "self.quadcoeff=1 self.degree=degree self.m=2 self.b=6 self.res=2 self.X=np.linspace(1,15, 15) self.Y=[] self.x=0 self.residuals=0 self.ChiSquare=0 if self.degree", "plt.clf() if i == 1: plt.hist(intercepts, bins=100) plt.xlabel(\"a (quadratic coefficient)\") plt.ylabel(\"Entries\") plt.title(\"Quadratic coefficient", "plt.title(\"Chi-square distribution (ndf =\"+str(theNdf)+\")\") plt.savefig(\"2d_chisq2pdf\"+fileEnding) plt.clf() Taus=[] for i in range(0,500): if i", "plt.hist(Taus, bins=20) plt.xlabel(\"Tau\") plt.ylabel(\"Entries\") plt.title(\"Estimated Tau\") plt.savefig(\"3c_tau_hist_500samples.png\") Taus=[] for i in range(0,500): if", "to run this with a large number of statistics (maybe I am doing", "theExponential.lookup_y: runningAverage=runningAverage+val theExponential.cdf.append(runningAverage) theExponential.maxcdf=theExponential.cdf[len(theExponential.cdf)-1] plt.clf() print(\"Running...\") plt.plot(theExponential.lookup_x, theExponential.lookup_y) plt.xlabel(\"x\") plt.ylabel(\"$e^{-x}$\") plt.title(\"Exponential distribution\") plt.savefig(\"3_exponential_dist.png\")", "the conversion between the two statistics is simple. d. In the case of", "yields asymmetric error bars. Instead, I transformed to first-order (d/dx log x), using", "__init__(self, degree): self.quadcoeff=1 self.degree=degree self.m=2 self.b=6 self.res=2 self.X=np.linspace(1,15, 15) self.Y=[] self.x=0 self.residuals=0 self.ChiSquare=0", "df=13. Overlaying them was not obviously easy, but comparing by-eye they are identical.", "Typically, that the probability to be found outside the interval on both sides", "the probability to be found outside the interval on both sides of the", "likelihood function. Classical intervals fail to consider physical limitations on the measured parameter.", "intervals fail to consider physical limitations on the measured parameter. Classical limits also", "I am doing something very inefficient). From running the experiment 500 times, I", "plt.ylabel(\"cdf\") plt.title(\"Exponential cdf\") plt.savefig(\"3_exponential_cdf.png\") plt.clf() for i in range(0,2): fileEnding=0 degree=i+1 if i", "Since we have 15 data points, this is either 12 or 13. For", "range(0,500): if i % 100 == 0: print(i) exp = theExponential(500) result=exp.AnalyzeDistro(i) if", "plt.title(str(len(self.samples))+\" exponential samples\") theFitX=np.linspace(0,5,1000) theFitY=theFitX*logcoeffs[0]+logcoeffs[1] plt.plot(theFitX,theFitY) plt.savefig(\"3b_exp_samples_log.png\") plt.clf() return -1*logcoeffs[0] def __init__(self, nSamples):", "in range(0,2): fileEnding=0 degree=i+1 if i == 0: fileEnding=\".png\" else: fileEnding=\"_quad.png\" Lines=[] slopes=[]", "theExponential(50) result=exp.AnalyzeDistro(i) if math.isnan(result) == False: Taus.append(result) print(Taus) plt.hist(Taus, bins=20) plt.xlabel(\"Tau\") plt.ylabel(\"Entries\") plt.title(\"Estimated", "because applying the logarithm literally yields asymmetric error bars. Instead, I transformed to", "2) self.quadcoeffs=np.polyfit(self.X, self.Y,2) self.ChiSquareQuad=np.sum((((self.quadcoeffs[0]*self.X**2+self.quadcoeffs[1]*self.X+self.quadcoeffs[2])-self.Y)/self.res)**2) def PlotLine(self, title): plt.errorbar(self.X,self.Y,xerr=0,yerr=2) plt.plot(self.X,self.quadcoeffs[0]*self.X**2+self.quadcoeffs[1]*self.X+self.quadcoeffs[2]) plt.plot(self.X,self.coeffs[0]*self.X+self.coeffs[1]) plt.xlabel(\"x\") plt.ylabel(\"y\") plt.title(\"The", "parameter, df, is the number of degrees of freedom in the fit. Since", "index != -1: self.samples.append(theExponential.lookup_x[index]) def GenerateNSamples(self, numSamples): for i in range(0, numSamples): self.GenerateSample()", "(100-CL)/2 %). Often constructed with a likelihood function, finding where the likelihood reduces", "distribution for df=12 and df=13. Overlaying them was not obviously easy, but comparing", "something very inefficient). From running the experiment 500 times, I can say that", "of linear fit\") plt.savefig(\"2c_chisq\"+fileEnding) plt.clf() plt.hist(chisqquads, bins=100) plt.xlabel(\"X^2 / ndf\") plt.ylabel(\"Entries\") plt.title(\"Chi-square of", "class theExponential: lookup_x=[] lookup_y=[] cdf=[] maxcdf=0 def GenerateSample(self): randomNumber = random.uniform(theExponential.cdf[0],theExponential.maxcdf) index=-1 if", "= np.histogram(self.samples,bins=10) bincenters = 0.5*(binEdges[1:]+binEdges[:-1]) menStd = np.sqrt(y) width = 0.20 plt.clf() if", "case of the quadratic data, the linear fit is abysmal and the quadratic", "index): y,binEdges = np.histogram(self.samples,bins=10) bincenters = 0.5*(binEdges[1:]+binEdges[:-1]) menStd = np.sqrt(y) width = 0.20", "yerr=menStd/y, ecolor='g') plt.xlabel(\"Value\") plt.ylabel(\"log Entries\") plt.title(str(len(self.samples))+\" exponential samples\") theFitX=np.linspace(0,5,1000) theFitY=theFitX*logcoeffs[0]+logcoeffs[1] plt.plot(theFitX,theFitY) plt.savefig(\"3b_exp_samples_log.png\") plt.clf()", "1) self.ChiSquare=np.sum((((self.coeffs[0]*self.X+self.coeffs[1])-self.Y)/self.res) ** 2) self.quadcoeffs=np.polyfit(self.X, self.Y,2) self.ChiSquareQuad=np.sum((((self.quadcoeffs[0]*self.X**2+self.quadcoeffs[1]*self.X+self.quadcoeffs[2])-self.Y)/self.res)**2) def PlotLine(self, title): plt.errorbar(self.X,self.Y,xerr=0,yerr=2) plt.plot(self.X,self.quadcoeffs[0]*self.X**2+self.quadcoeffs[1]*self.X+self.quadcoeffs[2]) plt.plot(self.X,self.coeffs[0]*self.X+self.coeffs[1])", "verified this by reducing the number of samples taken from the distribution by", "if index == 1: plt.bar(bincenters,self.logsamples,width=width, yerr=menStd/y, ecolor='g') plt.xlabel(\"Value\") plt.ylabel(\"log Entries\") plt.title(str(len(self.samples))+\" exponential samples\")", "1: plt.bar(bincenters,self.logsamples,width=width, yerr=menStd/y, ecolor='g') plt.xlabel(\"Value\") plt.ylabel(\"log Entries\") plt.title(str(len(self.samples))+\" exponential samples\") theFitX=np.linspace(0,5,1000) theFitY=theFitX*logcoeffs[0]+logcoeffs[1] plt.plot(theFitX,theFitY)", "== 1: plt.hist(intercepts, bins=100) plt.xlabel(\"a (quadratic coefficient)\") plt.ylabel(\"Entries\") plt.title(\"Quadratic coefficient histogram\") plt.savefig(\"2b_quads\"+fileEnding) plt.clf()", "GenerateNSamples(self, numSamples): for i in range(0, numSamples): self.GenerateSample() def AnalyzeDistro(self, index): y,binEdges =", "included both the plot of the distrubution and the cdf. b. Transforming error", "# -*- coding: utf-8 -*- ''' <NAME> 1. a. Frequentist confidence intervals do", "people use classical statistics except when it produces things that 'seem' wrong, in", "plt.xlabel(\"Value\") plt.ylabel(\"log Entries\") plt.title(str(len(self.samples))+\" exponential samples\") theFitX=np.linspace(0,5,1000) theFitY=theFitX*logcoeffs[0]+logcoeffs[1] plt.plot(theFitX,theFitY) plt.savefig(\"3b_exp_samples_log.png\") plt.clf() return -1*logcoeffs[0]", "noise to the new x^2 degree of freedom, the X^2 will lower. However,", "bincenters = 0.5*(binEdges[1:]+binEdges[:-1]) menStd = np.sqrt(y) width = 0.20 plt.clf() if index ==", "self.GenerateSample() def AnalyzeDistro(self, index): y,binEdges = np.histogram(self.samples,bins=10) bincenters = 0.5*(binEdges[1:]+binEdges[:-1]) menStd = np.sqrt(y)", "log data is not entirely trivial because applying the logarithm literally yields asymmetric", "However, the ndf has reduced by 1, so if there is overall no", "''' <NAME> 1. a. Frequentist confidence intervals do not respect the physical limitations", "to construct the posterior from the likelihood function. Classical intervals fail to consider", "distribution (ndf =\"+str(theNdf)+\")\") plt.savefig(\"2d_chisq2pdf\"+fileEnding) plt.clf() Taus=[] for i in range(0,500): if i %", "plt.xlabel(\"x\") plt.ylabel(\"cdf\") plt.title(\"Exponential cdf\") plt.savefig(\"3_exponential_cdf.png\") plt.clf() for i in range(0,2): fileEnding=0 degree=i+1 if", "the quadratic fit is around 1. This is also expected. 3. a. I", "plt.clf() self.logsamples=np.log(y) logcoeffs = np.polyfit(bincenters, self.logsamples, 1) if index == 1: plt.bar(bincenters,self.logsamples,width=width, yerr=menStd/y,", "exponential samples\") theFitX=np.linspace(0,5,1000) theFitY=theFitX*logcoeffs[0]+logcoeffs[1] plt.plot(theFitX,theFitY) plt.savefig(\"3b_exp_samples_log.png\") plt.clf() return -1*logcoeffs[0] def __init__(self, nSamples): self.samples=[]", "c. It takes a rather long time to run this with a large", "distribution leads to underestimation: that is, we can see a bias in the", "masking or reassigning garbage datasets post-log, I discarded all results for which the", "to the new x^2 degree of freedom, the X^2 will lower. However, the", "constructed with a likelihood function, finding where the likelihood reduces by a half.", "and randomNumber < theExponential.cdf[i+1]: index=i+1 if index != -1: self.samples.append(theExponential.lookup_x[index]) def GenerateNSamples(self, numSamples):", "some noise to the new x^2 degree of freedom, the X^2 will lower.", "y,binEdges = np.histogram(self.samples,bins=10) bincenters = 0.5*(binEdges[1:]+binEdges[:-1]) menStd = np.sqrt(y) width = 0.20 plt.clf()", "bins=100) plt.xlabel(\"a (quadratic coefficient)\") plt.ylabel(\"Entries\") plt.title(\"Quadratic coefficient histogram\") plt.savefig(\"2b_quads\"+fileEnding) plt.clf() plt.hist(chisqs, bins=100) plt.xlabel(\"X^2", "this entire problem for a quadratic plot. The files ending in \"_quad.pdf\" are", "Frequentist confidence intervals do not respect the physical limitations imposed on a system,", "== 0: fileEnding=\".png\" else: fileEnding=\"_quad.png\" Lines=[] slopes=[] intercepts=[] quads=[] chisqs=[] chisqquads=[] for j", "than wrestle with masking or reassigning garbage datasets post-log, I discarded all results", "def __init__(self, resolution): self.theResolution=resolution def GeneratePointWithResolution(self, val): point=np.random.normal(loc=val,scale=self.theResolution) return point class theLine: def", "theExponential.cdf) plt.xlabel(\"x\") plt.ylabel(\"cdf\") plt.title(\"Exponential cdf\") plt.savefig(\"3_exponential_cdf.png\") plt.clf() for i in range(0,2): fileEnding=0 degree=i+1", "and the quadratic fit is around 1. This is also expected. 3. a.", "to construct the posterior PDF for \\mu_t. d. 1/\\mu_t. He justifies that this", "the left side. I verified this by reducing the number of samples taken", "range(0,1000): line = theLine(degree) Lines.append(line) if j == 1: line.PlotLine(\"2a_line\"+fileEnding) if i ==", "chidist.pdf(chispace)) plt.xlabel(\"X^2\") plt.ylabel(\"P\") plt.title(\"Chi-square distribution (ndf =\"+str(theNdf)+\")\") plt.savefig(\"2d_chisq2pdf\"+fileEnding) plt.clf() Taus=[] for i in", "= random.uniform(theExponential.cdf[0],theExponential.maxcdf) index=-1 if randomNumber < theExponential.cdf[0]: index=0 else: for i in range(0,len(theExponential.cdf)-1):", "finding where the likelihood reduces by a half. c. We need a prior", "difficult to predict what happens here: if we are lucky enough that we", "will lower. However, the ndf has reduced by 1, so if there is", "np.histogram(self.samples,bins=10) bincenters = 0.5*(binEdges[1:]+binEdges[:-1]) menStd = np.sqrt(y) width = 0.20 plt.clf() if index", "samples taken from the distribution by a factor of 10 and re-running, giving", "plt.clf() plt.plot(theExponential.lookup_x, theExponential.cdf) plt.xlabel(\"x\") plt.ylabel(\"cdf\") plt.title(\"Exponential cdf\") plt.savefig(\"3_exponential_cdf.png\") plt.clf() for i in range(0,2):", "-*- ''' <NAME> 1. a. Frequentist confidence intervals do not respect the physical", "plt.ylabel(\"y\") plt.title(\"The Line\") plt.savefig(title) plt.clf() class theExponential: lookup_x=[] lookup_y=[] cdf=[] maxcdf=0 def GenerateSample(self):", "no noise we can fit away, then the reduced chi square will rise.", "points, this is either 12 or 13. For the linear fit, we have", "results for which the fit failed. ''' import random import math import matplotlib.pyplot", "d. 1/\\mu_t. He justifies that this is invariant over changes of power of", "a system, ie non-negativity of a mass. b. Typically, that the probability to", "1/(2^{df/2}\\Gamma(df/2)) x^{df/2-1}e^{-x/2} The single parameter, df, is the number of degrees of freedom", "if i == 1: plt.hist(intercepts, bins=100) plt.xlabel(\"a (quadratic coefficient)\") plt.ylabel(\"Entries\") plt.title(\"Quadratic coefficient histogram\")", "plt.bar(bincenters, y, width=width, yerr=menStd, ecolor='g') plt.xlabel(\"Value\") plt.ylabel(\"Entries\") plt.title(str(len(self.samples))+\" exponential samples\") plt.savefig(\"3b_exp_samples.png\") plt.clf() self.logsamples=np.log(y)", "reference I included both the plot of the distrubution and the cdf. b.", "found outside the interval on both sides of the distribution is 16% (or", "imposed on a system, ie non-negativity of a mass. b. Typically, that the", "distribution that favors the left side. I verified this by reducing the number", "self.Y.append(measurer.GeneratePointWithResolution(self.m*entry+self.b)) def BuildLineQuadratic(self): measurer = MeasurementTaker(2) for i, entry in enumerate(self.X): self.Y.append(measurer.GeneratePointWithResolution(self.quadcoeff*entry**2+self.m*entry+self.b)) def", "abysmal and the quadratic fit is around 1. This is also expected. 3.", "half. c. We need a prior PDF to construct the posterior PDF for", "if there is overall no noise we can fit away, then the reduced", "which case use Bayesian. f. As Cousins did, perform classical analysis on the", "\\sigma_{D,log}=\\sigma_D/D c. It takes a rather long time to run this with a", "for i, entry in enumerate(self.X): self.Y.append(measurer.GeneratePointWithResolution(self.quadcoeff*entry**2+self.m*entry+self.b)) def FitLine(self): self.coeffs = np.polyfit(self.X, self.Y, 1)", "respect the physical limitations imposed on a system, ie non-negativity of a mass.", "say that poor sampling of the tails of the distribution leads to underestimation:", "that favors the left side. I verified this by reducing the number of", "I included both the plot of the distrubution and the cdf. b. Transforming", "distribution using a Poisson lookup table class MeasurementTaker: def __init__(self, resolution): self.theResolution=resolution def", "on the mean and statistical error and use a Bayesian analysis of the", "0.20 plt.clf() if index == 1: plt.bar(bincenters, y, width=width, yerr=menStd, ecolor='g') plt.xlabel(\"Value\") plt.ylabel(\"Entries\")", "data, the fit gets worse. It is difficult to predict what happens here:", "Line\") plt.savefig(title) plt.clf() class theExponential: lookup_x=[] lookup_y=[] cdf=[] maxcdf=0 def GenerateSample(self): randomNumber =", "point class theLine: def __init__(self, degree): self.quadcoeff=1 self.degree=degree self.m=2 self.b=6 self.res=2 self.X=np.linspace(1,15, 15)", "Rather than wrestle with masking or reassigning garbage datasets post-log, I discarded all", "use Bayesian. f. As Cousins did, perform classical analysis on the mean and", "have 15 data points, this is either 12 or 13. For the linear", "if we are lucky enough that we can fit some noise to the", "16% (or (100-CL)/2 %). Often constructed with a likelihood function, finding where the", "away, then the reduced chi square will rise. In the case of the", "#Maps random reals in (0,1) to the Poisson distribution using a Poisson lookup", "range(0,len(theExponential.cdf)-1): if randomNumber > theExponential.cdf[i] and randomNumber < theExponential.cdf[i+1]: index=i+1 if index !=", "plt.title(\"Quadratic coefficient histogram\") plt.savefig(\"2b_quads\"+fileEnding) plt.clf() plt.hist(chisqs, bins=100) plt.xlabel(\"X^2 / ndf\") plt.ylabel(\"Entries\") plt.title(\"Chi-square of", "such that a bad calibration leads to a tighter confidence interval. It seems", "i in range(0,len(theExponential.cdf)-1): if randomNumber > theExponential.cdf[i] and randomNumber < theExponential.cdf[i+1]: index=i+1 if", "if i ==1: theNdf=12 else: theNdf=13 chispace=np.linspace(0,theNdf*3,1000) chidist=stat.chi2(theNdf,1) plt.plot(chispace/theNdf, chidist.pdf(chispace)) plt.xlabel(\"X^2\") plt.ylabel(\"P\") plt.title(\"Chi-square", "= theExponential(50) result=exp.AnalyzeDistro(i) if math.isnan(result) == False: Taus.append(result) print(Taus) plt.hist(Taus, bins=20) plt.xlabel(\"Tau\") plt.ylabel(\"Entries\")", "== 0: print(i) exp = theExponential(500) result=exp.AnalyzeDistro(i) if math.isnan(result) == False: Taus.append(result) print(Taus)", "bins that are much less well-populated. I attached outputs for both cases. Rather", "linear data, the fit gets worse. It is difficult to predict what happens", "df=12 and df=13. Overlaying them was not obviously easy, but comparing by-eye they", "both sides of the distribution is 16% (or (100-CL)/2 %). Often constructed with", "Instead, I transformed to first-order (d/dx log x), using \\sigma_{D,log}=\\sigma_D/D c. It takes", "repeated this entire problem for a quadratic plot. The files ending in \"_quad.pdf\"", "line = theLine(degree) Lines.append(line) if j == 1: line.PlotLine(\"2a_line\"+fileEnding) if i == 0:", "self.residuals=0 self.ChiSquare=0 if self.degree == 1: self.BuildLine() else: self.BuildLineQuadratic() self.FitLine() def BuildLine(self): measurer", "new x^2 degree of freedom, the X^2 will lower. However, the ndf has", "invariant over changes of power of \\mu_t. e. Bayesian methods fail to be", "plt.ylabel(\"Entries\") plt.title(str(len(self.samples))+\" exponential samples\") plt.savefig(\"3b_exp_samples.png\") plt.clf() self.logsamples=np.log(y) logcoeffs = np.polyfit(bincenters, self.logsamples, 1) if", "much less well-populated. I attached outputs for both cases. Rather than wrestle with", "physical limitations on the measured parameter. Classical limits also handle systematics in a", "plt.xlabel(\"X^2 / ndf\") plt.ylabel(\"Entries\") plt.title(\"Chi-square of quadratic fit\") plt.savefig(\"2d_chisq2\"+fileEnding) plt.clf() theNdf=0 if i", "entire problem for a quadratic plot. The files ending in \"_quad.pdf\" are from", "wrestle with masking or reassigning garbage datasets post-log, I discarded all results for", "bad calibration leads to a tighter confidence interval. It seems that generally people", "re-running, giving bins that are much less well-populated. I attached outputs for both", "was not obviously easy, but comparing by-eye they are identical. I plotted reduced", "also handle systematics in a counterintuitive way, such that a bad calibration leads", "theExponential.cdf[i] and randomNumber < theExponential.cdf[i+1]: index=i+1 if index != -1: self.samples.append(theExponential.lookup_x[index]) def GenerateNSamples(self,", "of the distrubution and the cdf. b. Transforming error bars for log data", "b. Transforming error bars for log data is not entirely trivial because applying", "free parameters so df=13; for the quadratic fit with three free parameters, df=12.", "Classical intervals fail to consider physical limitations on the measured parameter. Classical limits", "index=-1 if randomNumber < theExponential.cdf[0]: index=0 else: for i in range(0,len(theExponential.cdf)-1): if randomNumber", "limitations on the measured parameter. Classical limits also handle systematics in a counterintuitive", "cdf=[] maxcdf=0 def GenerateSample(self): randomNumber = random.uniform(theExponential.cdf[0],theExponential.maxcdf) index=-1 if randomNumber < theExponential.cdf[0]: index=0", "fit gets worse. It is difficult to predict what happens here: if we", "asymmetric error bars. Instead, I transformed to first-order (d/dx log x), using \\sigma_{D,log}=\\sigma_D/D", "samples\") plt.savefig(\"3b_exp_samples.png\") plt.clf() self.logsamples=np.log(y) logcoeffs = np.polyfit(bincenters, self.logsamples, 1) if index == 1:", "degrees of freedom in the fit. Since we have 15 data points, this", "of samples taken from the distribution by a factor of 10 and re-running,", "the distribution by a factor of 10 and re-running, giving bins that are", "plt import numpy as np import scipy.stats as stat #Samples from the PDF", "but the conversion between the two statistics is simple. d. In the case", "quadratic dataset. a. The data are shown in blue, the linear fit in", "fit failed. ''' import random import math import matplotlib.pyplot as plt import numpy", "self.samples.append(theExponential.lookup_x[index]) def GenerateNSamples(self, numSamples): for i in range(0, numSamples): self.GenerateSample() def AnalyzeDistro(self, index):", "can fit some noise to the new x^2 degree of freedom, the X^2", "/ ndf\") plt.ylabel(\"Entries\") plt.title(\"Chi-square of quadratic fit\") plt.savefig(\"2d_chisq2\"+fileEnding) plt.clf() theNdf=0 if i ==1:", "\"_quad.pdf\" are from the second iteration with a quadratic dataset. a. The data", "between the two statistics is simple. d. In the case of the linear", "self.ChiSquare=0 if self.degree == 1: self.BuildLine() else: self.BuildLineQuadratic() self.FitLine() def BuildLine(self): measurer =", "PlotLine(self, title): plt.errorbar(self.X,self.Y,xerr=0,yerr=2) plt.plot(self.X,self.quadcoeffs[0]*self.X**2+self.quadcoeffs[1]*self.X+self.quadcoeffs[2]) plt.plot(self.X,self.coeffs[0]*self.X+self.coeffs[1]) plt.xlabel(\"x\") plt.ylabel(\"y\") plt.title(\"The Line\") plt.savefig(title) plt.clf() class theExponential:", "self.coeffs = np.polyfit(self.X, self.Y, 1) self.ChiSquare=np.sum((((self.coeffs[0]*self.X+self.coeffs[1])-self.Y)/self.res) ** 2) self.quadcoeffs=np.polyfit(self.X, self.Y,2) self.ChiSquareQuad=np.sum((((self.quadcoeffs[0]*self.X**2+self.quadcoeffs[1]*self.X+self.quadcoeffs[2])-self.Y)/self.res)**2) def PlotLine(self,", "reduced chi squares through because of their goodness-of-fit usefulness, but the conversion between", "plt.clf() print(\"Running...\") plt.plot(theExponential.lookup_x, theExponential.lookup_y) plt.xlabel(\"x\") plt.ylabel(\"$e^{-x}$\") plt.title(\"Exponential distribution\") plt.savefig(\"3_exponential_dist.png\") plt.clf() plt.plot(theExponential.lookup_x, theExponential.cdf) plt.xlabel(\"x\")", "the logarithm literally yields asymmetric error bars. Instead, I transformed to first-order (d/dx", "for df=12 and df=13. Overlaying them was not obviously easy, but comparing by-eye", "injected with a prior PDF to construct the posterior from the likelihood function.", "(d/dx log x), using \\sigma_{D,log}=\\sigma_D/D c. It takes a rather long time to", "matplotlib.pyplot as plt import numpy as np import scipy.stats as stat #Samples from", "a large number of statistics (maybe I am doing something very inefficient). From", "distribution\") plt.savefig(\"3_exponential_dist.png\") plt.clf() plt.plot(theExponential.lookup_x, theExponential.cdf) plt.xlabel(\"x\") plt.ylabel(\"cdf\") plt.title(\"Exponential cdf\") plt.savefig(\"3_exponential_cdf.png\") plt.clf() for i", "datasets post-log, I discarded all results for which the fit failed. ''' import", "I can say that poor sampling of the tails of the distribution leads", "a half. c. We need a prior PDF to construct the posterior PDF", "if randomNumber < theExponential.cdf[0]: index=0 else: for i in range(0,len(theExponential.cdf)-1): if randomNumber >", "with three free parameters, df=12. We expected the reduced chi square to be", "to predict what happens here: if we are lucky enough that we can", "underestimation: that is, we can see a bias in the distribution that favors", "self.GenerateNSamples(nSamples) theExponential.lookup_x=np.linspace(0, 5, 10000) theExponential.lookup_y=np.exp(-theExponential.lookup_x) runningAverage=0 for val in theExponential.lookup_y: runningAverage=runningAverage+val theExponential.cdf.append(runningAverage) theExponential.maxcdf=theExponential.cdf[len(theExponential.cdf)-1]", "are lucky enough that we can fit some noise to the new x^2", "''' import random import math import matplotlib.pyplot as plt import numpy as np", "physical limitations imposed on a system, ie non-negativity of a mass. b. Typically,", "with masking or reassigning garbage datasets post-log, I discarded all results for which", "for i, entry in enumerate(self.X): self.Y.append(measurer.GeneratePointWithResolution(self.m*entry+self.b)) def BuildLineQuadratic(self): measurer = MeasurementTaker(2) for i,", "gets worse. It is difficult to predict what happens here: if we are", "quads.append(line.quadcoeffs[0]) slopes.append(line.quadcoeffs[1]) intercepts.append(line.quadcoeffs[2]) chisqs.append(line.ChiSquare/13) chisqquads.append(line.ChiSquareQuad/12) plt.hist(slopes, bins=100) plt.xlabel(\"m\") plt.ylabel(\"Entries\") plt.title(\"Slopes histogram\") plt.savefig(\"2b_slopes\"+fileEnding) plt.clf()", "mass. b. Typically, that the probability to be found outside the interval on", "then the reduced chi square will rise. In the case of the quadratic", "self.degree=degree self.m=2 self.b=6 self.res=2 self.X=np.linspace(1,15, 15) self.Y=[] self.x=0 self.residuals=0 self.ChiSquare=0 if self.degree ==", "confidence intervals do not respect the physical limitations imposed on a system, ie", "expected the reduced chi square to be around 1, and this is the", "plt.clf() if index == 1: plt.bar(bincenters, y, width=width, yerr=menStd, ecolor='g') plt.xlabel(\"Value\") plt.ylabel(\"Entries\") plt.title(str(len(self.samples))+\"", "plt.clf() theNdf=0 if i ==1: theNdf=12 else: theNdf=13 chispace=np.linspace(0,theNdf*3,1000) chidist=stat.chi2(theNdf,1) plt.plot(chispace/theNdf, chidist.pdf(chispace)) plt.xlabel(\"X^2\")", "where the likelihood reduces by a half. c. We need a prior PDF", "easy, but comparing by-eye they are identical. I plotted reduced chi squares through", "square to be around 1, and this is the result for both fits.", "number of samples taken from the distribution by a factor of 10 and", "function, finding where the likelihood reduces by a half. c. We need a", "in which case use Bayesian. f. As Cousins did, perform classical analysis on", "For the linear fit, we have two free parameters so df=13; for the", "in range(0, numSamples): self.GenerateSample() def AnalyzeDistro(self, index): y,binEdges = np.histogram(self.samples,bins=10) bincenters = 0.5*(binEdges[1:]+binEdges[:-1])", "the number of degrees of freedom in the fit. Since we have 15", "posterior PDF for \\mu_t. d. 1/\\mu_t. He justifies that this is invariant over", "Bayesian analysis of the detector sensitivity. 2. I repeated this entire problem for", "plt.plot(theExponential.lookup_x, theExponential.cdf) plt.xlabel(\"x\") plt.ylabel(\"cdf\") plt.title(\"Exponential cdf\") plt.savefig(\"3_exponential_cdf.png\") plt.clf() for i in range(0,2): fileEnding=0", "theExponential: lookup_x=[] lookup_y=[] cdf=[] maxcdf=0 def GenerateSample(self): randomNumber = random.uniform(theExponential.cdf[0],theExponential.maxcdf) index=-1 if randomNumber", "a quadratic plot. The files ending in \"_quad.pdf\" are from the second iteration", "j in range(0,1000): line = theLine(degree) Lines.append(line) if j == 1: line.PlotLine(\"2a_line\"+fileEnding) if", "over changes of power of \\mu_t. e. Bayesian methods fail to be objective:", "I transformed to first-order (d/dx log x), using \\sigma_{D,log}=\\sigma_D/D c. It takes a", "for i in range(0,500): if i % 100 == 0: print(i) exp =", "distribution using the cdf; for reference I included both the plot of the", "rather long time to run this with a large number of statistics (maybe", "print(i) exp = theExponential(50) result=exp.AnalyzeDistro(i) if math.isnan(result) == False: Taus.append(result) print(Taus) plt.hist(Taus, bins=20)", "in enumerate(self.X): self.Y.append(measurer.GeneratePointWithResolution(self.m*entry+self.b)) def BuildLineQuadratic(self): measurer = MeasurementTaker(2) for i, entry in enumerate(self.X):", "takes a rather long time to run this with a large number of", "theLine: def __init__(self, degree): self.quadcoeff=1 self.degree=degree self.m=2 self.b=6 self.res=2 self.X=np.linspace(1,15, 15) self.Y=[] self.x=0", "inefficient). From running the experiment 500 times, I can say that poor sampling", "literally yields asymmetric error bars. Instead, I transformed to first-order (d/dx log x),", "randomNumber = random.uniform(theExponential.cdf[0],theExponential.maxcdf) index=-1 if randomNumber < theExponential.cdf[0]: index=0 else: for i in", "plt.xlabel(\"X^2\") plt.ylabel(\"P\") plt.title(\"Chi-square distribution (ndf =\"+str(theNdf)+\")\") plt.savefig(\"2d_chisq2pdf\"+fileEnding) plt.clf() Taus=[] for i in range(0,500):", "enough that we can fit some noise to the new x^2 degree of", "quadratic fit is around 1. This is also expected. 3. a. I sampled", "justifies that this is invariant over changes of power of \\mu_t. e. Bayesian", "point=np.random.normal(loc=val,scale=self.theResolution) return point class theLine: def __init__(self, degree): self.quadcoeff=1 self.degree=degree self.m=2 self.b=6 self.res=2", "also expected. 3. a. I sampled the distribution using the cdf; for reference", "(maybe I am doing something very inefficient). From running the experiment 500 times,", "Chi2 distribution for df=12 and df=13. Overlaying them was not obviously easy, but", "randomNumber < theExponential.cdf[i+1]: index=i+1 if index != -1: self.samples.append(theExponential.lookup_x[index]) def GenerateNSamples(self, numSamples): for", "plt.savefig(\"3c_tau_hist_500samples.png\") Taus=[] for i in range(0,500): if i % 100 == 0: print(i)", "reducing the number of samples taken from the distribution by a factor of", "#Samples from the PDF and computes the mean. #Maps random reals in (0,1)", "tails of the distribution leads to underestimation: that is, we can see a", "y, width=width, yerr=menStd, ecolor='g') plt.xlabel(\"Value\") plt.ylabel(\"Entries\") plt.title(str(len(self.samples))+\" exponential samples\") plt.savefig(\"3b_exp_samples.png\") plt.clf() self.logsamples=np.log(y) logcoeffs", "degree): self.quadcoeff=1 self.degree=degree self.m=2 self.b=6 self.res=2 self.X=np.linspace(1,15, 15) self.Y=[] self.x=0 self.residuals=0 self.ChiSquare=0 if", "self.BuildLine() else: self.BuildLineQuadratic() self.FitLine() def BuildLine(self): measurer = MeasurementTaker(2) for i, entry in", "if index == 1: plt.bar(bincenters, y, width=width, yerr=menStd, ecolor='g') plt.xlabel(\"Value\") plt.ylabel(\"Entries\") plt.title(str(len(self.samples))+\" exponential", "not entirely trivial because applying the logarithm literally yields asymmetric error bars. Instead,", "failed. ''' import random import math import matplotlib.pyplot as plt import numpy as", "discarded all results for which the fit failed. ''' import random import math", "on a system, ie non-negativity of a mass. b. Typically, that the probability", "error and use a Bayesian analysis of the detector sensitivity. 2. I repeated", "sampled the distribution using the cdf; for reference I included both the plot", "of power of \\mu_t. e. Bayesian methods fail to be objective: they must", "i == 0: fileEnding=\".png\" else: fileEnding=\"_quad.png\" Lines=[] slopes=[] intercepts=[] quads=[] chisqs=[] chisqquads=[] for", "def BuildLineQuadratic(self): measurer = MeasurementTaker(2) for i, entry in enumerate(self.X): self.Y.append(measurer.GeneratePointWithResolution(self.quadcoeff*entry**2+self.m*entry+self.b)) def FitLine(self):", "is simple. d. In the case of the linear data, the fit gets", "plt.savefig(\"3_exponential_cdf.png\") plt.clf() for i in range(0,2): fileEnding=0 degree=i+1 if i == 0: fileEnding=\".png\"", "ndf has reduced by 1, so if there is overall no noise we", "if i == 0: fileEnding=\".png\" else: fileEnding=\"_quad.png\" Lines=[] slopes=[] intercepts=[] quads=[] chisqs=[] chisqquads=[]", "histogram\") plt.savefig(\"2b_quads\"+fileEnding) plt.clf() plt.hist(chisqs, bins=100) plt.xlabel(\"X^2 / ndf\") plt.ylabel(\"Entries\") plt.title(\"Chi-square of linear fit\")", "e. Bayesian methods fail to be objective: they must be injected with a", "reduced chi square to be around 1, and this is the result for", "them was not obviously easy, but comparing by-eye they are identical. I plotted", "cdf. b. Transforming error bars for log data is not entirely trivial because", "10 and re-running, giving bins that are much less well-populated. I attached outputs", "case of the linear data, the fit gets worse. It is difficult to", "fit in blue. b. The symmetry of the histogram reflects unbiased estimators. c.", "= MeasurementTaker(2) for i, entry in enumerate(self.X): self.Y.append(measurer.GeneratePointWithResolution(self.quadcoeff*entry**2+self.m*entry+self.b)) def FitLine(self): self.coeffs = np.polyfit(self.X,", "outside the interval on both sides of the distribution is 16% (or (100-CL)/2", "red, and the quadratic fit in blue. b. The symmetry of the histogram", "interval on both sides of the distribution is 16% (or (100-CL)/2 %). Often", "fit is around 1. This is also expected. 3. a. I sampled the", "theExponential.cdf[i+1]: index=i+1 if index != -1: self.samples.append(theExponential.lookup_x[index]) def GenerateNSamples(self, numSamples): for i in", "= 0.20 plt.clf() if index == 1: plt.bar(bincenters, y, width=width, yerr=menStd, ecolor='g') plt.xlabel(\"Value\")", "== False: Taus.append(result) print(Taus) plt.hist(Taus, bins=20) plt.xlabel(\"Tau\") plt.ylabel(\"Entries\") plt.title(\"Estimated Tau\") plt.savefig(\"3c_tau_hist_500samples.png\") Taus=[] for", "FitLine(self): self.coeffs = np.polyfit(self.X, self.Y, 1) self.ChiSquare=np.sum((((self.coeffs[0]*self.X+self.coeffs[1])-self.Y)/self.res) ** 2) self.quadcoeffs=np.polyfit(self.X, self.Y,2) self.ChiSquareQuad=np.sum((((self.quadcoeffs[0]*self.X**2+self.quadcoeffs[1]*self.X+self.quadcoeffs[2])-self.Y)/self.res)**2) def", "generally people use classical statistics except when it produces things that 'seem' wrong,", "this is either 12 or 13. For the linear fit, we have two", "runningAverage=0 for val in theExponential.lookup_y: runningAverage=runningAverage+val theExponential.cdf.append(runningAverage) theExponential.maxcdf=theExponential.cdf[len(theExponential.cdf)-1] plt.clf() print(\"Running...\") plt.plot(theExponential.lookup_x, theExponential.lookup_y) plt.xlabel(\"x\")", "with a large number of statistics (maybe I am doing something very inefficient).", "result=exp.AnalyzeDistro(i) if math.isnan(result) == False: Taus.append(result) print(Taus) plt.hist(Taus, bins=20) plt.xlabel(\"Tau\") plt.ylabel(\"Entries\") plt.title(\"Estimated Tau\")", "entry in enumerate(self.X): self.Y.append(measurer.GeneratePointWithResolution(self.m*entry+self.b)) def BuildLineQuadratic(self): measurer = MeasurementTaker(2) for i, entry in", "enumerate(self.X): self.Y.append(measurer.GeneratePointWithResolution(self.m*entry+self.b)) def BuildLineQuadratic(self): measurer = MeasurementTaker(2) for i, entry in enumerate(self.X): self.Y.append(measurer.GeneratePointWithResolution(self.quadcoeff*entry**2+self.m*entry+self.b))", "return -1*logcoeffs[0] def __init__(self, nSamples): self.samples=[] self.logbins=[] self.GenerateNSamples(nSamples) theExponential.lookup_x=np.linspace(0, 5, 10000) theExponential.lookup_y=np.exp(-theExponential.lookup_x) runningAverage=0", "here: if we are lucky enough that we can fit some noise to", "square will rise. In the case of the quadratic data, the linear fit", "in theExponential.lookup_y: runningAverage=runningAverage+val theExponential.cdf.append(runningAverage) theExponential.maxcdf=theExponential.cdf[len(theExponential.cdf)-1] plt.clf() print(\"Running...\") plt.plot(theExponential.lookup_x, theExponential.lookup_y) plt.xlabel(\"x\") plt.ylabel(\"$e^{-x}$\") plt.title(\"Exponential distribution\")", "the case of the linear data, the fit gets worse. It is difficult", "BuildLineQuadratic(self): measurer = MeasurementTaker(2) for i, entry in enumerate(self.X): self.Y.append(measurer.GeneratePointWithResolution(self.quadcoeff*entry**2+self.m*entry+self.b)) def FitLine(self): self.coeffs", "plt.savefig(title) plt.clf() class theExponential: lookup_x=[] lookup_y=[] cdf=[] maxcdf=0 def GenerateSample(self): randomNumber = random.uniform(theExponential.cdf[0],theExponential.maxcdf)", "if i % 100 == 0: print(i) exp = theExponential(50) result=exp.AnalyzeDistro(i) if math.isnan(result)", "for i in range(0, numSamples): self.GenerateSample() def AnalyzeDistro(self, index): y,binEdges = np.histogram(self.samples,bins=10) bincenters", "theFitY=theFitX*logcoeffs[0]+logcoeffs[1] plt.plot(theFitX,theFitY) plt.savefig(\"3b_exp_samples_log.png\") plt.clf() return -1*logcoeffs[0] def __init__(self, nSamples): self.samples=[] self.logbins=[] self.GenerateNSamples(nSamples) theExponential.lookup_x=np.linspace(0,", "a mass. b. Typically, that the probability to be found outside the interval", "Taus.append(result) print(Taus) plt.hist(Taus, bins=20) plt.xlabel(\"Tau\") plt.ylabel(\"Entries\") plt.title(\"Estimated Tau\") plt.savefig(\"3c_tau_hist_500samples.png\") Taus=[] for i in", "reassigning garbage datasets post-log, I discarded all results for which the fit failed.", "self.Y,2) self.ChiSquareQuad=np.sum((((self.quadcoeffs[0]*self.X**2+self.quadcoeffs[1]*self.X+self.quadcoeffs[2])-self.Y)/self.res)**2) def PlotLine(self, title): plt.errorbar(self.X,self.Y,xerr=0,yerr=2) plt.plot(self.X,self.quadcoeffs[0]*self.X**2+self.quadcoeffs[1]*self.X+self.quadcoeffs[2]) plt.plot(self.X,self.coeffs[0]*self.X+self.coeffs[1]) plt.xlabel(\"x\") plt.ylabel(\"y\") plt.title(\"The Line\") plt.savefig(title)", "which the fit failed. ''' import random import math import matplotlib.pyplot as plt", "reduced Chi2 distribution for df=12 and df=13. Overlaying them was not obviously easy,", "both fits. * For comparison I give a normalized reduced Chi2 distribution for", "nSamples): self.samples=[] self.logbins=[] self.GenerateNSamples(nSamples) theExponential.lookup_x=np.linspace(0, 5, 10000) theExponential.lookup_y=np.exp(-theExponential.lookup_x) runningAverage=0 for val in theExponential.lookup_y:", "data points, this is either 12 or 13. For the linear fit, we", "intervals do not respect the physical limitations imposed on a system, ie non-negativity", "that the probability to be found outside the interval on both sides of", "False: Taus.append(result) print(Taus) plt.hist(Taus, bins=20) plt.xlabel(\"Tau\") plt.ylabel(\"Entries\") plt.title(\"Estimated Tau\") plt.savefig(\"3c_tau_hist_500samples.png\") Taus=[] for i", "logcoeffs = np.polyfit(bincenters, self.logsamples, 1) if index == 1: plt.bar(bincenters,self.logsamples,width=width, yerr=menStd/y, ecolor='g') plt.xlabel(\"Value\")", "if j == 1: line.PlotLine(\"2a_line\"+fileEnding) if i == 0: slopes.append(line.coeffs[0]) intercepts.append(line.coeffs[1]) else: quads.append(line.quadcoeffs[0])", "class theLine: def __init__(self, degree): self.quadcoeff=1 self.degree=degree self.m=2 self.b=6 self.res=2 self.X=np.linspace(1,15, 15) self.Y=[]", "and computes the mean. #Maps random reals in (0,1) to the Poisson distribution", "experiment 500 times, I can say that poor sampling of the tails of", "AnalyzeDistro(self, index): y,binEdges = np.histogram(self.samples,bins=10) bincenters = 0.5*(binEdges[1:]+binEdges[:-1]) menStd = np.sqrt(y) width =", "by 1, so if there is overall no noise we can fit away,", "do not respect the physical limitations imposed on a system, ie non-negativity of", "if self.degree == 1: self.BuildLine() else: self.BuildLineQuadratic() self.FitLine() def BuildLine(self): measurer = MeasurementTaker(2)", "did, perform classical analysis on the mean and statistical error and use a", "def __init__(self, nSamples): self.samples=[] self.logbins=[] self.GenerateNSamples(nSamples) theExponential.lookup_x=np.linspace(0, 5, 10000) theExponential.lookup_y=np.exp(-theExponential.lookup_x) runningAverage=0 for val", "sides of the distribution is 16% (or (100-CL)/2 %). Often constructed with a", "It seems that generally people use classical statistics except when it produces things", "self.logsamples=np.log(y) logcoeffs = np.polyfit(bincenters, self.logsamples, 1) if index == 1: plt.bar(bincenters,self.logsamples,width=width, yerr=menStd/y, ecolor='g')", "the cdf; for reference I included both the plot of the distrubution and", "and df=13. Overlaying them was not obviously easy, but comparing by-eye they are", "handle systematics in a counterintuitive way, such that a bad calibration leads to", "is not entirely trivial because applying the logarithm literally yields asymmetric error bars.", "attached outputs for both cases. Rather than wrestle with masking or reassigning garbage", "GenerateSample(self): randomNumber = random.uniform(theExponential.cdf[0],theExponential.maxcdf) index=-1 if randomNumber < theExponential.cdf[0]: index=0 else: for i", "In the case of the linear data, the fit gets worse. It is", "are identical. I plotted reduced chi squares through because of their goodness-of-fit usefulness,", "for \\mu_t. d. 1/\\mu_t. He justifies that this is invariant over changes of", "cases. Rather than wrestle with masking or reassigning garbage datasets post-log, I discarded", "-1: self.samples.append(theExponential.lookup_x[index]) def GenerateNSamples(self, numSamples): for i in range(0, numSamples): self.GenerateSample() def AnalyzeDistro(self,", "fit\") plt.savefig(\"2d_chisq2\"+fileEnding) plt.clf() theNdf=0 if i ==1: theNdf=12 else: theNdf=13 chispace=np.linspace(0,theNdf*3,1000) chidist=stat.chi2(theNdf,1) plt.plot(chispace/theNdf,", "df=12. We expected the reduced chi square to be around 1, and this", "if math.isnan(result) == False: Taus.append(result) print(Taus) plt.hist(Taus, bins=20) plt.xlabel(\"Tau\") plt.ylabel(\"Entries\") plt.title(\"Estimated Tau\") plt.savefig(\"3c_tau_hist_50samples.png\")", "== 1: self.BuildLine() else: self.BuildLineQuadratic() self.FitLine() def BuildLine(self): measurer = MeasurementTaker(2) for i,", "of the quadratic data, the linear fit is abysmal and the quadratic fit", "2. I repeated this entire problem for a quadratic plot. The files ending", "for i in range(0,len(theExponential.cdf)-1): if randomNumber > theExponential.cdf[i] and randomNumber < theExponential.cdf[i+1]: index=i+1", "plt.ylabel(\"Entries\") plt.title(\"Quadratic coefficient histogram\") plt.savefig(\"2b_quads\"+fileEnding) plt.clf() plt.hist(chisqs, bins=100) plt.xlabel(\"X^2 / ndf\") plt.ylabel(\"Entries\") plt.title(\"Chi-square", "The files ending in \"_quad.pdf\" are from the second iteration with a quadratic", "plt.plot(self.X,self.quadcoeffs[0]*self.X**2+self.quadcoeffs[1]*self.X+self.quadcoeffs[2]) plt.plot(self.X,self.coeffs[0]*self.X+self.coeffs[1]) plt.xlabel(\"x\") plt.ylabel(\"y\") plt.title(\"The Line\") plt.savefig(title) plt.clf() class theExponential: lookup_x=[] lookup_y=[] cdf=[]", "the likelihood reduces by a half. c. We need a prior PDF to", "plt.title(\"Slopes histogram\") plt.savefig(\"2b_slopes\"+fileEnding) plt.clf() plt.hist(intercepts, bins=100) plt.xlabel(\"b\") plt.ylabel(\"Entries\") plt.title(\"Intercepts histogram\") plt.savefig(\"2b_intercepts\"+fileEnding) plt.clf() if", "of the linear data, the fit gets worse. It is difficult to predict", "randomNumber < theExponential.cdf[0]: index=0 else: for i in range(0,len(theExponential.cdf)-1): if randomNumber > theExponential.cdf[i]", "need a prior PDF to construct the posterior PDF for \\mu_t. d. 1/\\mu_t.", "linear fit\") plt.savefig(\"2c_chisq\"+fileEnding) plt.clf() plt.hist(chisqquads, bins=100) plt.xlabel(\"X^2 / ndf\") plt.ylabel(\"Entries\") plt.title(\"Chi-square of quadratic", "(or (100-CL)/2 %). Often constructed with a likelihood function, finding where the likelihood", "result for both fits. * For comparison I give a normalized reduced Chi2", "It is difficult to predict what happens here: if we are lucky enough", "def __init__(self, degree): self.quadcoeff=1 self.degree=degree self.m=2 self.b=6 self.res=2 self.X=np.linspace(1,15, 15) self.Y=[] self.x=0 self.residuals=0", "to be found outside the interval on both sides of the distribution is", "applying the logarithm literally yields asymmetric error bars. Instead, I transformed to first-order", "intercepts.append(line.quadcoeffs[2]) chisqs.append(line.ChiSquare/13) chisqquads.append(line.ChiSquareQuad/12) plt.hist(slopes, bins=100) plt.xlabel(\"m\") plt.ylabel(\"Entries\") plt.title(\"Slopes histogram\") plt.savefig(\"2b_slopes\"+fileEnding) plt.clf() plt.hist(intercepts, bins=100)", "def AnalyzeDistro(self, index): y,binEdges = np.histogram(self.samples,bins=10) bincenters = 0.5*(binEdges[1:]+binEdges[:-1]) menStd = np.sqrt(y) width", "happens here: if we are lucky enough that we can fit some noise", "f. As Cousins did, perform classical analysis on the mean and statistical error", "using a Poisson lookup table class MeasurementTaker: def __init__(self, resolution): self.theResolution=resolution def GeneratePointWithResolution(self,", "PDF and computes the mean. #Maps random reals in (0,1) to the Poisson", "obviously easy, but comparing by-eye they are identical. I plotted reduced chi squares", "is 16% (or (100-CL)/2 %). Often constructed with a likelihood function, finding where", "and re-running, giving bins that are much less well-populated. I attached outputs for", "plotted reduced chi squares through because of their goodness-of-fit usefulness, but the conversion", "favors the left side. I verified this by reducing the number of samples", "maxcdf=0 def GenerateSample(self): randomNumber = random.uniform(theExponential.cdf[0],theExponential.maxcdf) index=-1 if randomNumber < theExponential.cdf[0]: index=0 else:", "plt.ylabel(\"Entries\") plt.title(\"Intercepts histogram\") plt.savefig(\"2b_intercepts\"+fileEnding) plt.clf() if i == 1: plt.hist(intercepts, bins=100) plt.xlabel(\"a (quadratic", "x^{df/2-1}e^{-x/2} The single parameter, df, is the number of degrees of freedom in", "0: print(i) exp = theExponential(50) result=exp.AnalyzeDistro(i) if math.isnan(result) == False: Taus.append(result) print(Taus) plt.hist(Taus,", "x^2 degree of freedom, the X^2 will lower. However, the ndf has reduced", "== 1: plt.bar(bincenters, y, width=width, yerr=menStd, ecolor='g') plt.xlabel(\"Value\") plt.ylabel(\"Entries\") plt.title(str(len(self.samples))+\" exponential samples\") plt.savefig(\"3b_exp_samples.png\")", "a prior PDF to construct the posterior PDF for \\mu_t. d. 1/\\mu_t. He", "to first-order (d/dx log x), using \\sigma_{D,log}=\\sigma_D/D c. It takes a rather long", "a rather long time to run this with a large number of statistics", "fit. Since we have 15 data points, this is either 12 or 13.", "a. I sampled the distribution using the cdf; for reference I included both", "plt.hist(intercepts, bins=100) plt.xlabel(\"b\") plt.ylabel(\"Entries\") plt.title(\"Intercepts histogram\") plt.savefig(\"2b_intercepts\"+fileEnding) plt.clf() if i == 1: plt.hist(intercepts,", "the likelihood function. Classical intervals fail to consider physical limitations on the measured", "way, such that a bad calibration leads to a tighter confidence interval. It", "of the detector sensitivity. 2. I repeated this entire problem for a quadratic", "plt.clf() class theExponential: lookup_x=[] lookup_y=[] cdf=[] maxcdf=0 def GenerateSample(self): randomNumber = random.uniform(theExponential.cdf[0],theExponential.maxcdf) index=-1", "Bayesian methods fail to be objective: they must be injected with a prior", "the result for both fits. * For comparison I give a normalized reduced", "i in range(0,2): fileEnding=0 degree=i+1 if i == 0: fileEnding=\".png\" else: fileEnding=\"_quad.png\" Lines=[]", "chisqs=[] chisqquads=[] for j in range(0,1000): line = theLine(degree) Lines.append(line) if j ==", "the new x^2 degree of freedom, the X^2 will lower. However, the ndf", "so df=13; for the quadratic fit with three free parameters, df=12. We expected", "(0,1) to the Poisson distribution using a Poisson lookup table class MeasurementTaker: def", "first-order (d/dx log x), using \\sigma_{D,log}=\\sigma_D/D c. It takes a rather long time", "= MeasurementTaker(2) for i, entry in enumerate(self.X): self.Y.append(measurer.GeneratePointWithResolution(self.m*entry+self.b)) def BuildLineQuadratic(self): measurer = MeasurementTaker(2)", "numSamples): for i in range(0, numSamples): self.GenerateSample() def AnalyzeDistro(self, index): y,binEdges = np.histogram(self.samples,bins=10)", "produces things that 'seem' wrong, in which case use Bayesian. f. As Cousins", "yerr=menStd, ecolor='g') plt.xlabel(\"Value\") plt.ylabel(\"Entries\") plt.title(str(len(self.samples))+\" exponential samples\") plt.savefig(\"3b_exp_samples.png\") plt.clf() self.logsamples=np.log(y) logcoeffs = np.polyfit(bincenters,", "side. I verified this by reducing the number of samples taken from the", "We need a prior PDF to construct the posterior PDF for \\mu_t. d.", "case use Bayesian. f. As Cousins did, perform classical analysis on the mean", "of the histogram reflects unbiased estimators. c. The functional form is: 1/(2^{df/2}\\Gamma(df/2)) x^{df/2-1}e^{-x/2}", "data, the linear fit is abysmal and the quadratic fit is around 1.", "we are lucky enough that we can fit some noise to the new", "def GenerateNSamples(self, numSamples): for i in range(0, numSamples): self.GenerateSample() def AnalyzeDistro(self, index): y,binEdges", "intercepts=[] quads=[] chisqs=[] chisqquads=[] for j in range(0,1000): line = theLine(degree) Lines.append(line) if", "noise we can fit away, then the reduced chi square will rise. In", "index=i+1 if index != -1: self.samples.append(theExponential.lookup_x[index]) def GenerateNSamples(self, numSamples): for i in range(0,", "= 0.5*(binEdges[1:]+binEdges[:-1]) menStd = np.sqrt(y) width = 0.20 plt.clf() if index == 1:", "freedom, the X^2 will lower. However, the ndf has reduced by 1, so", "self.Y=[] self.x=0 self.residuals=0 self.ChiSquare=0 if self.degree == 1: self.BuildLine() else: self.BuildLineQuadratic() self.FitLine() def", "self.ChiSquareQuad=np.sum((((self.quadcoeffs[0]*self.X**2+self.quadcoeffs[1]*self.X+self.quadcoeffs[2])-self.Y)/self.res)**2) def PlotLine(self, title): plt.errorbar(self.X,self.Y,xerr=0,yerr=2) plt.plot(self.X,self.quadcoeffs[0]*self.X**2+self.quadcoeffs[1]*self.X+self.quadcoeffs[2]) plt.plot(self.X,self.coeffs[0]*self.X+self.coeffs[1]) plt.xlabel(\"x\") plt.ylabel(\"y\") plt.title(\"The Line\") plt.savefig(title) plt.clf()", "the fit. Since we have 15 data points, this is either 12 or", "can see a bias in the distribution that favors the left side. I", "1, and this is the result for both fits. * For comparison I", "ecolor='g') plt.xlabel(\"Value\") plt.ylabel(\"Entries\") plt.title(str(len(self.samples))+\" exponential samples\") plt.savefig(\"3b_exp_samples.png\") plt.clf() self.logsamples=np.log(y) logcoeffs = np.polyfit(bincenters, self.logsamples,", "(ndf =\"+str(theNdf)+\")\") plt.savefig(\"2d_chisq2pdf\"+fileEnding) plt.clf() Taus=[] for i in range(0,500): if i % 100", "that is, we can see a bias in the distribution that favors the", "100 == 0: print(i) exp = theExponential(500) result=exp.AnalyzeDistro(i) if math.isnan(result) == False: Taus.append(result)", "the case of the quadratic data, the linear fit is abysmal and the", "measurer = MeasurementTaker(2) for i, entry in enumerate(self.X): self.Y.append(measurer.GeneratePointWithResolution(self.m*entry+self.b)) def BuildLineQuadratic(self): measurer =", "limitations imposed on a system, ie non-negativity of a mass. b. Typically, that", "Entries\") plt.title(str(len(self.samples))+\" exponential samples\") theFitX=np.linspace(0,5,1000) theFitY=theFitX*logcoeffs[0]+logcoeffs[1] plt.plot(theFitX,theFitY) plt.savefig(\"3b_exp_samples_log.png\") plt.clf() return -1*logcoeffs[0] def __init__(self,", "-1*logcoeffs[0] def __init__(self, nSamples): self.samples=[] self.logbins=[] self.GenerateNSamples(nSamples) theExponential.lookup_x=np.linspace(0, 5, 10000) theExponential.lookup_y=np.exp(-theExponential.lookup_x) runningAverage=0 for", "plt.ylabel(\"Entries\") plt.title(\"Chi-square of linear fit\") plt.savefig(\"2c_chisq\"+fileEnding) plt.clf() plt.hist(chisqquads, bins=100) plt.xlabel(\"X^2 / ndf\") plt.ylabel(\"Entries\")", "in red, and the quadratic fit in blue. b. The symmetry of the", "ndf\") plt.ylabel(\"Entries\") plt.title(\"Chi-square of linear fit\") plt.savefig(\"2c_chisq\"+fileEnding) plt.clf() plt.hist(chisqquads, bins=100) plt.xlabel(\"X^2 / ndf\")", "fit\") plt.savefig(\"2c_chisq\"+fileEnding) plt.clf() plt.hist(chisqquads, bins=100) plt.xlabel(\"X^2 / ndf\") plt.ylabel(\"Entries\") plt.title(\"Chi-square of quadratic fit\")", "are from the second iteration with a quadratic dataset. a. The data are", "in enumerate(self.X): self.Y.append(measurer.GeneratePointWithResolution(self.quadcoeff*entry**2+self.m*entry+self.b)) def FitLine(self): self.coeffs = np.polyfit(self.X, self.Y, 1) self.ChiSquare=np.sum((((self.coeffs[0]*self.X+self.coeffs[1])-self.Y)/self.res) ** 2)", "if randomNumber > theExponential.cdf[i] and randomNumber < theExponential.cdf[i+1]: index=i+1 if index != -1:", "0: slopes.append(line.coeffs[0]) intercepts.append(line.coeffs[1]) else: quads.append(line.quadcoeffs[0]) slopes.append(line.quadcoeffs[1]) intercepts.append(line.quadcoeffs[2]) chisqs.append(line.ChiSquare/13) chisqquads.append(line.ChiSquareQuad/12) plt.hist(slopes, bins=100) plt.xlabel(\"m\") plt.ylabel(\"Entries\")", "error bars for log data is not entirely trivial because applying the logarithm", "He justifies that this is invariant over changes of power of \\mu_t. e.", "linear fit, we have two free parameters so df=13; for the quadratic fit", "plt.xlabel(\"x\") plt.ylabel(\"$e^{-x}$\") plt.title(\"Exponential distribution\") plt.savefig(\"3_exponential_dist.png\") plt.clf() plt.plot(theExponential.lookup_x, theExponential.cdf) plt.xlabel(\"x\") plt.ylabel(\"cdf\") plt.title(\"Exponential cdf\") plt.savefig(\"3_exponential_cdf.png\")", "plt.xlabel(\"x\") plt.ylabel(\"y\") plt.title(\"The Line\") plt.savefig(title) plt.clf() class theExponential: lookup_x=[] lookup_y=[] cdf=[] maxcdf=0 def", "chidist=stat.chi2(theNdf,1) plt.plot(chispace/theNdf, chidist.pdf(chispace)) plt.xlabel(\"X^2\") plt.ylabel(\"P\") plt.title(\"Chi-square distribution (ndf =\"+str(theNdf)+\")\") plt.savefig(\"2d_chisq2pdf\"+fileEnding) plt.clf() Taus=[] for", "of statistics (maybe I am doing something very inefficient). From running the experiment", "statistical error and use a Bayesian analysis of the detector sensitivity. 2. I", "by a half. c. We need a prior PDF to construct the posterior", "quads=[] chisqs=[] chisqquads=[] for j in range(0,1000): line = theLine(degree) Lines.append(line) if j", "statistics is simple. d. In the case of the linear data, the fit", "plt.xlabel(\"m\") plt.ylabel(\"Entries\") plt.title(\"Slopes histogram\") plt.savefig(\"2b_slopes\"+fileEnding) plt.clf() plt.hist(intercepts, bins=100) plt.xlabel(\"b\") plt.ylabel(\"Entries\") plt.title(\"Intercepts histogram\") plt.savefig(\"2b_intercepts\"+fileEnding)", "quadratic fit in blue. b. The symmetry of the histogram reflects unbiased estimators.", "title): plt.errorbar(self.X,self.Y,xerr=0,yerr=2) plt.plot(self.X,self.quadcoeffs[0]*self.X**2+self.quadcoeffs[1]*self.X+self.quadcoeffs[2]) plt.plot(self.X,self.coeffs[0]*self.X+self.coeffs[1]) plt.xlabel(\"x\") plt.ylabel(\"y\") plt.title(\"The Line\") plt.savefig(title) plt.clf() class theExponential: lookup_x=[]", "by reducing the number of samples taken from the distribution by a factor", "PDF for \\mu_t. d. 1/\\mu_t. He justifies that this is invariant over changes", "normalized reduced Chi2 distribution for df=12 and df=13. Overlaying them was not obviously", "x), using \\sigma_{D,log}=\\sigma_D/D c. It takes a rather long time to run this", "the distribution leads to underestimation: that is, we can see a bias in", "plt.clf() for i in range(0,2): fileEnding=0 degree=i+1 if i == 0: fileEnding=\".png\" else:", "not respect the physical limitations imposed on a system, ie non-negativity of a", "self.quadcoeffs=np.polyfit(self.X, self.Y,2) self.ChiSquareQuad=np.sum((((self.quadcoeffs[0]*self.X**2+self.quadcoeffs[1]*self.X+self.quadcoeffs[2])-self.Y)/self.res)**2) def PlotLine(self, title): plt.errorbar(self.X,self.Y,xerr=0,yerr=2) plt.plot(self.X,self.quadcoeffs[0]*self.X**2+self.quadcoeffs[1]*self.X+self.quadcoeffs[2]) plt.plot(self.X,self.coeffs[0]*self.X+self.coeffs[1]) plt.xlabel(\"x\") plt.ylabel(\"y\") plt.title(\"The Line\")", "def PlotLine(self, title): plt.errorbar(self.X,self.Y,xerr=0,yerr=2) plt.plot(self.X,self.quadcoeffs[0]*self.X**2+self.quadcoeffs[1]*self.X+self.quadcoeffs[2]) plt.plot(self.X,self.coeffs[0]*self.X+self.coeffs[1]) plt.xlabel(\"x\") plt.ylabel(\"y\") plt.title(\"The Line\") plt.savefig(title) plt.clf() class", "logarithm literally yields asymmetric error bars. Instead, I transformed to first-order (d/dx log", "the mean. #Maps random reals in (0,1) to the Poisson distribution using a", "a Bayesian analysis of the detector sensitivity. 2. I repeated this entire problem", "what happens here: if we are lucky enough that we can fit some", "changes of power of \\mu_t. e. Bayesian methods fail to be objective: they", "run this with a large number of statistics (maybe I am doing something", "quadratic data, the linear fit is abysmal and the quadratic fit is around", "np.sqrt(y) width = 0.20 plt.clf() if index == 1: plt.bar(bincenters, y, width=width, yerr=menStd,", "is either 12 or 13. For the linear fit, we have two free", "1: plt.hist(intercepts, bins=100) plt.xlabel(\"a (quadratic coefficient)\") plt.ylabel(\"Entries\") plt.title(\"Quadratic coefficient histogram\") plt.savefig(\"2b_quads\"+fileEnding) plt.clf() plt.hist(chisqs,", "/ ndf\") plt.ylabel(\"Entries\") plt.title(\"Chi-square of linear fit\") plt.savefig(\"2c_chisq\"+fileEnding) plt.clf() plt.hist(chisqquads, bins=100) plt.xlabel(\"X^2 /", "we can see a bias in the distribution that favors the left side.", "the quadratic fit in blue. b. The symmetry of the histogram reflects unbiased", "is the result for both fits. * For comparison I give a normalized", "== 1: line.PlotLine(\"2a_line\"+fileEnding) if i == 0: slopes.append(line.coeffs[0]) intercepts.append(line.coeffs[1]) else: quads.append(line.quadcoeffs[0]) slopes.append(line.quadcoeffs[1]) intercepts.append(line.quadcoeffs[2])", "we have 15 data points, this is either 12 or 13. For the", "plt.savefig(\"3_exponential_dist.png\") plt.clf() plt.plot(theExponential.lookup_x, theExponential.cdf) plt.xlabel(\"x\") plt.ylabel(\"cdf\") plt.title(\"Exponential cdf\") plt.savefig(\"3_exponential_cdf.png\") plt.clf() for i in", "utf-8 -*- ''' <NAME> 1. a. Frequentist confidence intervals do not respect the", "log x), using \\sigma_{D,log}=\\sigma_D/D c. It takes a rather long time to run", "functional form is: 1/(2^{df/2}\\Gamma(df/2)) x^{df/2-1}e^{-x/2} The single parameter, df, is the number of", "runningAverage=runningAverage+val theExponential.cdf.append(runningAverage) theExponential.maxcdf=theExponential.cdf[len(theExponential.cdf)-1] plt.clf() print(\"Running...\") plt.plot(theExponential.lookup_x, theExponential.lookup_y) plt.xlabel(\"x\") plt.ylabel(\"$e^{-x}$\") plt.title(\"Exponential distribution\") plt.savefig(\"3_exponential_dist.png\") plt.clf()", "% 100 == 0: print(i) exp = theExponential(500) result=exp.AnalyzeDistro(i) if math.isnan(result) == False:", "a Poisson lookup table class MeasurementTaker: def __init__(self, resolution): self.theResolution=resolution def GeneratePointWithResolution(self, val):", "reduces by a half. c. We need a prior PDF to construct the", "in blue. b. The symmetry of the histogram reflects unbiased estimators. c. The", "the quadratic fit with three free parameters, df=12. We expected the reduced chi", "the X^2 will lower. However, the ndf has reduced by 1, so if", "chisqs.append(line.ChiSquare/13) chisqquads.append(line.ChiSquareQuad/12) plt.hist(slopes, bins=100) plt.xlabel(\"m\") plt.ylabel(\"Entries\") plt.title(\"Slopes histogram\") plt.savefig(\"2b_slopes\"+fileEnding) plt.clf() plt.hist(intercepts, bins=100) plt.xlabel(\"b\")", "plt.ylabel(\"Entries\") plt.title(\"Slopes histogram\") plt.savefig(\"2b_slopes\"+fileEnding) plt.clf() plt.hist(intercepts, bins=100) plt.xlabel(\"b\") plt.ylabel(\"Entries\") plt.title(\"Intercepts histogram\") plt.savefig(\"2b_intercepts\"+fileEnding) plt.clf()", "chispace=np.linspace(0,theNdf*3,1000) chidist=stat.chi2(theNdf,1) plt.plot(chispace/theNdf, chidist.pdf(chispace)) plt.xlabel(\"X^2\") plt.ylabel(\"P\") plt.title(\"Chi-square distribution (ndf =\"+str(theNdf)+\")\") plt.savefig(\"2d_chisq2pdf\"+fileEnding) plt.clf() Taus=[]", "they are identical. I plotted reduced chi squares through because of their goodness-of-fit", "i % 100 == 0: print(i) exp = theExponential(500) result=exp.AnalyzeDistro(i) if math.isnan(result) ==", "the fit failed. ''' import random import math import matplotlib.pyplot as plt import", "samples\") theFitX=np.linspace(0,5,1000) theFitY=theFitX*logcoeffs[0]+logcoeffs[1] plt.plot(theFitX,theFitY) plt.savefig(\"3b_exp_samples_log.png\") plt.clf() return -1*logcoeffs[0] def __init__(self, nSamples): self.samples=[] self.logbins=[]", "plt.title(\"Exponential cdf\") plt.savefig(\"3_exponential_cdf.png\") plt.clf() for i in range(0,2): fileEnding=0 degree=i+1 if i ==", "methods fail to be objective: they must be injected with a prior PDF", "bias in the distribution that favors the left side. I verified this by", "theNdf=12 else: theNdf=13 chispace=np.linspace(0,theNdf*3,1000) chidist=stat.chi2(theNdf,1) plt.plot(chispace/theNdf, chidist.pdf(chispace)) plt.xlabel(\"X^2\") plt.ylabel(\"P\") plt.title(\"Chi-square distribution (ndf =\"+str(theNdf)+\")\")", "exp = theExponential(50) result=exp.AnalyzeDistro(i) if math.isnan(result) == False: Taus.append(result) print(Taus) plt.hist(Taus, bins=20) plt.xlabel(\"Tau\")", "dataset. a. The data are shown in blue, the linear fit in red,", "to underestimation: that is, we can see a bias in the distribution that", "statistics (maybe I am doing something very inefficient). From running the experiment 500", "leads to underestimation: that is, we can see a bias in the distribution", "plt.clf() plt.hist(chisqquads, bins=100) plt.xlabel(\"X^2 / ndf\") plt.ylabel(\"Entries\") plt.title(\"Chi-square of quadratic fit\") plt.savefig(\"2d_chisq2\"+fileEnding) plt.clf()", "is difficult to predict what happens here: if we are lucky enough that", "plt.hist(slopes, bins=100) plt.xlabel(\"m\") plt.ylabel(\"Entries\") plt.title(\"Slopes histogram\") plt.savefig(\"2b_slopes\"+fileEnding) plt.clf() plt.hist(intercepts, bins=100) plt.xlabel(\"b\") plt.ylabel(\"Entries\") plt.title(\"Intercepts", "for both fits. * For comparison I give a normalized reduced Chi2 distribution", "if index != -1: self.samples.append(theExponential.lookup_x[index]) def GenerateNSamples(self, numSamples): for i in range(0, numSamples):", "The functional form is: 1/(2^{df/2}\\Gamma(df/2)) x^{df/2-1}e^{-x/2} The single parameter, df, is the number", "I verified this by reducing the number of samples taken from the distribution", "the posterior from the likelihood function. Classical intervals fail to consider physical limitations", "intercepts.append(line.coeffs[1]) else: quads.append(line.quadcoeffs[0]) slopes.append(line.quadcoeffs[1]) intercepts.append(line.quadcoeffs[2]) chisqs.append(line.ChiSquare/13) chisqquads.append(line.ChiSquareQuad/12) plt.hist(slopes, bins=100) plt.xlabel(\"m\") plt.ylabel(\"Entries\") plt.title(\"Slopes histogram\")", "parameters, df=12. We expected the reduced chi square to be around 1, and", "stat #Samples from the PDF and computes the mean. #Maps random reals in", "use classical statistics except when it produces things that 'seem' wrong, in which", "analysis of the detector sensitivity. 2. I repeated this entire problem for a", "c. The functional form is: 1/(2^{df/2}\\Gamma(df/2)) x^{df/2-1}e^{-x/2} The single parameter, df, is the", "the distrubution and the cdf. b. Transforming error bars for log data is", "resolution): self.theResolution=resolution def GeneratePointWithResolution(self, val): point=np.random.normal(loc=val,scale=self.theResolution) return point class theLine: def __init__(self, degree):", "times, I can say that poor sampling of the tails of the distribution", "construct the posterior PDF for \\mu_t. d. 1/\\mu_t. He justifies that this is", "plt.ylabel(\"P\") plt.title(\"Chi-square distribution (ndf =\"+str(theNdf)+\")\") plt.savefig(\"2d_chisq2pdf\"+fileEnding) plt.clf() Taus=[] for i in range(0,500): if", "limits also handle systematics in a counterintuitive way, such that a bad calibration", "the fit gets worse. It is difficult to predict what happens here: if", "histogram reflects unbiased estimators. c. The functional form is: 1/(2^{df/2}\\Gamma(df/2)) x^{df/2-1}e^{-x/2} The single", "counterintuitive way, such that a bad calibration leads to a tighter confidence interval.", "use a Bayesian analysis of the detector sensitivity. 2. I repeated this entire", "the linear fit is abysmal and the quadratic fit is around 1. This", "coding: utf-8 -*- ''' <NAME> 1. a. Frequentist confidence intervals do not respect", "< theExponential.cdf[0]: index=0 else: for i in range(0,len(theExponential.cdf)-1): if randomNumber > theExponential.cdf[i] and", "of degrees of freedom in the fit. Since we have 15 data points,", "must be injected with a prior PDF to construct the posterior from the", "bins=100) plt.xlabel(\"X^2 / ndf\") plt.ylabel(\"Entries\") plt.title(\"Chi-square of linear fit\") plt.savefig(\"2c_chisq\"+fileEnding) plt.clf() plt.hist(chisqquads, bins=100)", "plt.bar(bincenters,self.logsamples,width=width, yerr=menStd/y, ecolor='g') plt.xlabel(\"Value\") plt.ylabel(\"log Entries\") plt.title(str(len(self.samples))+\" exponential samples\") theFitX=np.linspace(0,5,1000) theFitY=theFitX*logcoeffs[0]+logcoeffs[1] plt.plot(theFitX,theFitY) plt.savefig(\"3b_exp_samples_log.png\")", "plt.savefig(\"2d_chisq2pdf\"+fileEnding) plt.clf() Taus=[] for i in range(0,500): if i % 100 == 0:", "lookup table class MeasurementTaker: def __init__(self, resolution): self.theResolution=resolution def GeneratePointWithResolution(self, val): point=np.random.normal(loc=val,scale=self.theResolution) return", "As Cousins did, perform classical analysis on the mean and statistical error and", "== 0: slopes.append(line.coeffs[0]) intercepts.append(line.coeffs[1]) else: quads.append(line.quadcoeffs[0]) slopes.append(line.quadcoeffs[1]) intercepts.append(line.quadcoeffs[2]) chisqs.append(line.ChiSquare/13) chisqquads.append(line.ChiSquareQuad/12) plt.hist(slopes, bins=100) plt.xlabel(\"m\")", "theExponential.lookup_x=np.linspace(0, 5, 10000) theExponential.lookup_y=np.exp(-theExponential.lookup_x) runningAverage=0 for val in theExponential.lookup_y: runningAverage=runningAverage+val theExponential.cdf.append(runningAverage) theExponential.maxcdf=theExponential.cdf[len(theExponential.cdf)-1] plt.clf()", "tighter confidence interval. It seems that generally people use classical statistics except when", "statistics except when it produces things that 'seem' wrong, in which case use", "plt.savefig(\"2d_chisq2\"+fileEnding) plt.clf() theNdf=0 if i ==1: theNdf=12 else: theNdf=13 chispace=np.linspace(0,theNdf*3,1000) chidist=stat.chi2(theNdf,1) plt.plot(chispace/theNdf, chidist.pdf(chispace))", "we can fit away, then the reduced chi square will rise. In the", "the interval on both sides of the distribution is 16% (or (100-CL)/2 %).", "i in range(0, numSamples): self.GenerateSample() def AnalyzeDistro(self, index): y,binEdges = np.histogram(self.samples,bins=10) bincenters =", "transformed to first-order (d/dx log x), using \\sigma_{D,log}=\\sigma_D/D c. It takes a rather", "GeneratePointWithResolution(self, val): point=np.random.normal(loc=val,scale=self.theResolution) return point class theLine: def __init__(self, degree): self.quadcoeff=1 self.degree=degree self.m=2", "theExponential.maxcdf=theExponential.cdf[len(theExponential.cdf)-1] plt.clf() print(\"Running...\") plt.plot(theExponential.lookup_x, theExponential.lookup_y) plt.xlabel(\"x\") plt.ylabel(\"$e^{-x}$\") plt.title(\"Exponential distribution\") plt.savefig(\"3_exponential_dist.png\") plt.clf() plt.plot(theExponential.lookup_x, theExponential.cdf)", "bins=100) plt.xlabel(\"m\") plt.ylabel(\"Entries\") plt.title(\"Slopes histogram\") plt.savefig(\"2b_slopes\"+fileEnding) plt.clf() plt.hist(intercepts, bins=100) plt.xlabel(\"b\") plt.ylabel(\"Entries\") plt.title(\"Intercepts histogram\")", "files ending in \"_quad.pdf\" are from the second iteration with a quadratic dataset.", "time to run this with a large number of statistics (maybe I am", "that a bad calibration leads to a tighter confidence interval. It seems that", "to be objective: they must be injected with a prior PDF to construct", "Lines=[] slopes=[] intercepts=[] quads=[] chisqs=[] chisqquads=[] for j in range(0,1000): line = theLine(degree)", "From running the experiment 500 times, I can say that poor sampling of", "plt.savefig(\"2b_intercepts\"+fileEnding) plt.clf() if i == 1: plt.hist(intercepts, bins=100) plt.xlabel(\"a (quadratic coefficient)\") plt.ylabel(\"Entries\") plt.title(\"Quadratic", "bins=100) plt.xlabel(\"X^2 / ndf\") plt.ylabel(\"Entries\") plt.title(\"Chi-square of quadratic fit\") plt.savefig(\"2d_chisq2\"+fileEnding) plt.clf() theNdf=0 if", "I attached outputs for both cases. Rather than wrestle with masking or reassigning", "is abysmal and the quadratic fit is around 1. This is also expected.", "plt.ylabel(\"Entries\") plt.title(\"Chi-square of quadratic fit\") plt.savefig(\"2d_chisq2\"+fileEnding) plt.clf() theNdf=0 if i ==1: theNdf=12 else:", "< theExponential.cdf[i+1]: index=i+1 if index != -1: self.samples.append(theExponential.lookup_x[index]) def GenerateNSamples(self, numSamples): for i", "likelihood reduces by a half. c. We need a prior PDF to construct", "of 10 and re-running, giving bins that are much less well-populated. I attached", "measured parameter. Classical limits also handle systematics in a counterintuitive way, such that", "function. Classical intervals fail to consider physical limitations on the measured parameter. Classical", "exponential samples\") plt.savefig(\"3b_exp_samples.png\") plt.clf() self.logsamples=np.log(y) logcoeffs = np.polyfit(bincenters, self.logsamples, 1) if index ==", "ie non-negativity of a mass. b. Typically, that the probability to be found", "iteration with a quadratic dataset. a. The data are shown in blue, the", "a quadratic dataset. a. The data are shown in blue, the linear fit", "the PDF and computes the mean. #Maps random reals in (0,1) to the", "three free parameters, df=12. We expected the reduced chi square to be around", "self.b=6 self.res=2 self.X=np.linspace(1,15, 15) self.Y=[] self.x=0 self.residuals=0 self.ChiSquare=0 if self.degree == 1: self.BuildLine()", "be found outside the interval on both sides of the distribution is 16%", "and the cdf. b. Transforming error bars for log data is not entirely", "MeasurementTaker(2) for i, entry in enumerate(self.X): self.Y.append(measurer.GeneratePointWithResolution(self.m*entry+self.b)) def BuildLineQuadratic(self): measurer = MeasurementTaker(2) for", "a bad calibration leads to a tighter confidence interval. It seems that generally", "c. We need a prior PDF to construct the posterior PDF for \\mu_t.", "identical. I plotted reduced chi squares through because of their goodness-of-fit usefulness, but", "return point class theLine: def __init__(self, degree): self.quadcoeff=1 self.degree=degree self.m=2 self.b=6 self.res=2 self.X=np.linspace(1,15,", "is, we can see a bias in the distribution that favors the left", "np.polyfit(self.X, self.Y, 1) self.ChiSquare=np.sum((((self.coeffs[0]*self.X+self.coeffs[1])-self.Y)/self.res) ** 2) self.quadcoeffs=np.polyfit(self.X, self.Y,2) self.ChiSquareQuad=np.sum((((self.quadcoeffs[0]*self.X**2+self.quadcoeffs[1]*self.X+self.quadcoeffs[2])-self.Y)/self.res)**2) def PlotLine(self, title): plt.errorbar(self.X,self.Y,xerr=0,yerr=2)", "randomNumber > theExponential.cdf[i] and randomNumber < theExponential.cdf[i+1]: index=i+1 if index != -1: self.samples.append(theExponential.lookup_x[index])", "plt.ylabel(\"Entries\") plt.title(\"Estimated Tau\") plt.savefig(\"3c_tau_hist_500samples.png\") Taus=[] for i in range(0,500): if i % 100", "or 13. For the linear fit, we have two free parameters so df=13;", "left side. I verified this by reducing the number of samples taken from", "be injected with a prior PDF to construct the posterior from the likelihood", "but comparing by-eye they are identical. I plotted reduced chi squares through because", "the distribution is 16% (or (100-CL)/2 %). Often constructed with a likelihood function,", "= np.sqrt(y) width = 0.20 plt.clf() if index == 1: plt.bar(bincenters, y, width=width,", "to consider physical limitations on the measured parameter. Classical limits also handle systematics", "freedom in the fit. Since we have 15 data points, this is either", "def GeneratePointWithResolution(self, val): point=np.random.normal(loc=val,scale=self.theResolution) return point class theLine: def __init__(self, degree): self.quadcoeff=1 self.degree=degree", "of the tails of the distribution leads to underestimation: that is, we can", "objective: they must be injected with a prior PDF to construct the posterior", "theExponential.cdf.append(runningAverage) theExponential.maxcdf=theExponential.cdf[len(theExponential.cdf)-1] plt.clf() print(\"Running...\") plt.plot(theExponential.lookup_x, theExponential.lookup_y) plt.xlabel(\"x\") plt.ylabel(\"$e^{-x}$\") plt.title(\"Exponential distribution\") plt.savefig(\"3_exponential_dist.png\") plt.clf() plt.plot(theExponential.lookup_x,", "def GenerateSample(self): randomNumber = random.uniform(theExponential.cdf[0],theExponential.maxcdf) index=-1 if randomNumber < theExponential.cdf[0]: index=0 else: for", "self.logsamples, 1) if index == 1: plt.bar(bincenters,self.logsamples,width=width, yerr=menStd/y, ecolor='g') plt.xlabel(\"Value\") plt.ylabel(\"log Entries\") plt.title(str(len(self.samples))+\"", "self.logbins=[] self.GenerateNSamples(nSamples) theExponential.lookup_x=np.linspace(0, 5, 10000) theExponential.lookup_y=np.exp(-theExponential.lookup_x) runningAverage=0 for val in theExponential.lookup_y: runningAverage=runningAverage+val theExponential.cdf.append(runningAverage)", "trivial because applying the logarithm literally yields asymmetric error bars. Instead, I transformed", "slopes.append(line.coeffs[0]) intercepts.append(line.coeffs[1]) else: quads.append(line.quadcoeffs[0]) slopes.append(line.quadcoeffs[1]) intercepts.append(line.quadcoeffs[2]) chisqs.append(line.ChiSquare/13) chisqquads.append(line.ChiSquareQuad/12) plt.hist(slopes, bins=100) plt.xlabel(\"m\") plt.ylabel(\"Entries\") plt.title(\"Slopes", "* For comparison I give a normalized reduced Chi2 distribution for df=12 and", "the cdf. b. Transforming error bars for log data is not entirely trivial", "cdf; for reference I included both the plot of the distrubution and the", "and the quadratic fit in blue. b. The symmetry of the histogram reflects", "interval. It seems that generally people use classical statistics except when it produces", "the reduced chi square will rise. In the case of the quadratic data,", "power of \\mu_t. e. Bayesian methods fail to be objective: they must be", "for a quadratic plot. The files ending in \"_quad.pdf\" are from the second", "lower. However, the ndf has reduced by 1, so if there is overall", "plt.savefig(\"2c_chisq\"+fileEnding) plt.clf() plt.hist(chisqquads, bins=100) plt.xlabel(\"X^2 / ndf\") plt.ylabel(\"Entries\") plt.title(\"Chi-square of quadratic fit\") plt.savefig(\"2d_chisq2\"+fileEnding)", "construct the posterior from the likelihood function. Classical intervals fail to consider physical", "val in theExponential.lookup_y: runningAverage=runningAverage+val theExponential.cdf.append(runningAverage) theExponential.maxcdf=theExponential.cdf[len(theExponential.cdf)-1] plt.clf() print(\"Running...\") plt.plot(theExponential.lookup_x, theExponential.lookup_y) plt.xlabel(\"x\") plt.ylabel(\"$e^{-x}$\") plt.title(\"Exponential", "slopes.append(line.quadcoeffs[1]) intercepts.append(line.quadcoeffs[2]) chisqs.append(line.ChiSquare/13) chisqquads.append(line.ChiSquareQuad/12) plt.hist(slopes, bins=100) plt.xlabel(\"m\") plt.ylabel(\"Entries\") plt.title(\"Slopes histogram\") plt.savefig(\"2b_slopes\"+fileEnding) plt.clf() plt.hist(intercepts,", "self.theResolution=resolution def GeneratePointWithResolution(self, val): point=np.random.normal(loc=val,scale=self.theResolution) return point class theLine: def __init__(self, degree): self.quadcoeff=1", "by-eye they are identical. I plotted reduced chi squares through because of their", "the experiment 500 times, I can say that poor sampling of the tails", "two statistics is simple. d. In the case of the linear data, the", "np import scipy.stats as stat #Samples from the PDF and computes the mean.", "fit, we have two free parameters so df=13; for the quadratic fit with", "fit with three free parameters, df=12. We expected the reduced chi square to", "> theExponential.cdf[i] and randomNumber < theExponential.cdf[i+1]: index=i+1 if index != -1: self.samples.append(theExponential.lookup_x[index]) def", "is around 1. This is also expected. 3. a. I sampled the distribution", "histogram\") plt.savefig(\"2b_intercepts\"+fileEnding) plt.clf() if i == 1: plt.hist(intercepts, bins=100) plt.xlabel(\"a (quadratic coefficient)\") plt.ylabel(\"Entries\")", "parameter. Classical limits also handle systematics in a counterintuitive way, such that a", "for the quadratic fit with three free parameters, df=12. We expected the reduced", "plt.hist(intercepts, bins=100) plt.xlabel(\"a (quadratic coefficient)\") plt.ylabel(\"Entries\") plt.title(\"Quadratic coefficient histogram\") plt.savefig(\"2b_quads\"+fileEnding) plt.clf() plt.hist(chisqs, bins=100)", "ndf\") plt.ylabel(\"Entries\") plt.title(\"Chi-square of quadratic fit\") plt.savefig(\"2d_chisq2\"+fileEnding) plt.clf() theNdf=0 if i ==1: theNdf=12", "= np.polyfit(bincenters, self.logsamples, 1) if index == 1: plt.bar(bincenters,self.logsamples,width=width, yerr=menStd/y, ecolor='g') plt.xlabel(\"Value\") plt.ylabel(\"log", "for val in theExponential.lookup_y: runningAverage=runningAverage+val theExponential.cdf.append(runningAverage) theExponential.maxcdf=theExponential.cdf[len(theExponential.cdf)-1] plt.clf() print(\"Running...\") plt.plot(theExponential.lookup_x, theExponential.lookup_y) plt.xlabel(\"x\") plt.ylabel(\"$e^{-x}$\")", "plt.plot(chispace/theNdf, chidist.pdf(chispace)) plt.xlabel(\"X^2\") plt.ylabel(\"P\") plt.title(\"Chi-square distribution (ndf =\"+str(theNdf)+\")\") plt.savefig(\"2d_chisq2pdf\"+fileEnding) plt.clf() Taus=[] for i", "if math.isnan(result) == False: Taus.append(result) print(Taus) plt.hist(Taus, bins=20) plt.xlabel(\"Tau\") plt.ylabel(\"Entries\") plt.title(\"Estimated Tau\") plt.savefig(\"3c_tau_hist_500samples.png\")", "this by reducing the number of samples taken from the distribution by a", "!= -1: self.samples.append(theExponential.lookup_x[index]) def GenerateNSamples(self, numSamples): for i in range(0, numSamples): self.GenerateSample() def", "system, ie non-negativity of a mass. b. Typically, that the probability to be", "classical statistics except when it produces things that 'seem' wrong, in which case", "the linear fit in red, and the quadratic fit in blue. b. The", "self.x=0 self.residuals=0 self.ChiSquare=0 if self.degree == 1: self.BuildLine() else: self.BuildLineQuadratic() self.FitLine() def BuildLine(self):", "Taus=[] for i in range(0,500): if i % 100 == 0: print(i) exp", "df, is the number of degrees of freedom in the fit. Since we", "around 1, and this is the result for both fits. * For comparison", "in range(0,len(theExponential.cdf)-1): if randomNumber > theExponential.cdf[i] and randomNumber < theExponential.cdf[i+1]: index=i+1 if index", "less well-populated. I attached outputs for both cases. Rather than wrestle with masking", "plot of the distrubution and the cdf. b. Transforming error bars for log", "number of statistics (maybe I am doing something very inefficient). From running the", "reduced chi square will rise. In the case of the quadratic data, the", "giving bins that are much less well-populated. I attached outputs for both cases.", "shown in blue, the linear fit in red, and the quadratic fit in", "import scipy.stats as stat #Samples from the PDF and computes the mean. #Maps", "self.X=np.linspace(1,15, 15) self.Y=[] self.x=0 self.residuals=0 self.ChiSquare=0 if self.degree == 1: self.BuildLine() else: self.BuildLineQuadratic()", "degree=i+1 if i == 0: fileEnding=\".png\" else: fileEnding=\"_quad.png\" Lines=[] slopes=[] intercepts=[] quads=[] chisqs=[]", "calibration leads to a tighter confidence interval. It seems that generally people use", "a. The data are shown in blue, the linear fit in red, and", "I give a normalized reduced Chi2 distribution for df=12 and df=13. Overlaying them", "In the case of the quadratic data, the linear fit is abysmal and", "both the plot of the distrubution and the cdf. b. Transforming error bars", "the mean and statistical error and use a Bayesian analysis of the detector", "is invariant over changes of power of \\mu_t. e. Bayesian methods fail to", "from the likelihood function. Classical intervals fail to consider physical limitations on the", "print(i) exp = theExponential(500) result=exp.AnalyzeDistro(i) if math.isnan(result) == False: Taus.append(result) print(Taus) plt.hist(Taus, bins=20)", "overall no noise we can fit away, then the reduced chi square will", "the physical limitations imposed on a system, ie non-negativity of a mass. b.", "poor sampling of the tails of the distribution leads to underestimation: that is,", "index=0 else: for i in range(0,len(theExponential.cdf)-1): if randomNumber > theExponential.cdf[i] and randomNumber <", "i in range(0,500): if i % 100 == 0: print(i) exp = theExponential(50)", "ending in \"_quad.pdf\" are from the second iteration with a quadratic dataset. a.", "around 1. This is also expected. 3. a. I sampled the distribution using", "form is: 1/(2^{df/2}\\Gamma(df/2)) x^{df/2-1}e^{-x/2} The single parameter, df, is the number of degrees", "consider physical limitations on the measured parameter. Classical limits also handle systematics in", "a bias in the distribution that favors the left side. I verified this", "computes the mean. #Maps random reals in (0,1) to the Poisson distribution using", "with a quadratic dataset. a. The data are shown in blue, the linear", "d. In the case of the linear data, the fit gets worse. It", "1/\\mu_t. He justifies that this is invariant over changes of power of \\mu_t.", "both cases. Rather than wrestle with masking or reassigning garbage datasets post-log, I", "0.5*(binEdges[1:]+binEdges[:-1]) menStd = np.sqrt(y) width = 0.20 plt.clf() if index == 1: plt.bar(bincenters,", "as stat #Samples from the PDF and computes the mean. #Maps random reals", "cdf\") plt.savefig(\"3_exponential_cdf.png\") plt.clf() for i in range(0,2): fileEnding=0 degree=i+1 if i == 0:", "Lines.append(line) if j == 1: line.PlotLine(\"2a_line\"+fileEnding) if i == 0: slopes.append(line.coeffs[0]) intercepts.append(line.coeffs[1]) else:", "very inefficient). From running the experiment 500 times, I can say that poor", "predict what happens here: if we are lucky enough that we can fit", "classical analysis on the mean and statistical error and use a Bayesian analysis", "print(Taus) plt.hist(Taus, bins=20) plt.xlabel(\"Tau\") plt.ylabel(\"Entries\") plt.title(\"Estimated Tau\") plt.savefig(\"3c_tau_hist_500samples.png\") Taus=[] for i in range(0,500):", "chi square to be around 1, and this is the result for both", "math.isnan(result) == False: Taus.append(result) print(Taus) plt.hist(Taus, bins=20) plt.xlabel(\"Tau\") plt.ylabel(\"Entries\") plt.title(\"Estimated Tau\") plt.savefig(\"3c_tau_hist_500samples.png\") Taus=[]", "systematics in a counterintuitive way, such that a bad calibration leads to a", "'seem' wrong, in which case use Bayesian. f. As Cousins did, perform classical", "and statistical error and use a Bayesian analysis of the detector sensitivity. 2.", "histogram\") plt.savefig(\"2b_slopes\"+fileEnding) plt.clf() plt.hist(intercepts, bins=100) plt.xlabel(\"b\") plt.ylabel(\"Entries\") plt.title(\"Intercepts histogram\") plt.savefig(\"2b_intercepts\"+fileEnding) plt.clf() if i", "in range(0,500): if i % 100 == 0: print(i) exp = theExponential(50) result=exp.AnalyzeDistro(i)", "quadratic fit\") plt.savefig(\"2d_chisq2\"+fileEnding) plt.clf() theNdf=0 if i ==1: theNdf=12 else: theNdf=13 chispace=np.linspace(0,theNdf*3,1000) chidist=stat.chi2(theNdf,1)", "import matplotlib.pyplot as plt import numpy as np import scipy.stats as stat #Samples", "two free parameters so df=13; for the quadratic fit with three free parameters,", "if i % 100 == 0: print(i) exp = theExponential(500) result=exp.AnalyzeDistro(i) if math.isnan(result)", "= theLine(degree) Lines.append(line) if j == 1: line.PlotLine(\"2a_line\"+fileEnding) if i == 0: slopes.append(line.coeffs[0])", "if i == 0: slopes.append(line.coeffs[0]) intercepts.append(line.coeffs[1]) else: quads.append(line.quadcoeffs[0]) slopes.append(line.quadcoeffs[1]) intercepts.append(line.quadcoeffs[2]) chisqs.append(line.ChiSquare/13) chisqquads.append(line.ChiSquareQuad/12) plt.hist(slopes,", "it produces things that 'seem' wrong, in which case use Bayesian. f. As", "using \\sigma_{D,log}=\\sigma_D/D c. It takes a rather long time to run this with", "fail to be objective: they must be injected with a prior PDF to", "for log data is not entirely trivial because applying the logarithm literally yields", "index == 1: plt.bar(bincenters, y, width=width, yerr=menStd, ecolor='g') plt.xlabel(\"Value\") plt.ylabel(\"Entries\") plt.title(str(len(self.samples))+\" exponential samples\")", "we have two free parameters so df=13; for the quadratic fit with three", "chisqquads.append(line.ChiSquareQuad/12) plt.hist(slopes, bins=100) plt.xlabel(\"m\") plt.ylabel(\"Entries\") plt.title(\"Slopes histogram\") plt.savefig(\"2b_slopes\"+fileEnding) plt.clf() plt.hist(intercepts, bins=100) plt.xlabel(\"b\") plt.ylabel(\"Entries\")", "12 or 13. For the linear fit, we have two free parameters so", "It takes a rather long time to run this with a large number", "using the cdf; for reference I included both the plot of the distrubution", "random reals in (0,1) to the Poisson distribution using a Poisson lookup table", "plt.hist(chisqs, bins=100) plt.xlabel(\"X^2 / ndf\") plt.ylabel(\"Entries\") plt.title(\"Chi-square of linear fit\") plt.savefig(\"2c_chisq\"+fileEnding) plt.clf() plt.hist(chisqquads,", "b. Typically, that the probability to be found outside the interval on both", "range(0,2): fileEnding=0 degree=i+1 if i == 0: fileEnding=\".png\" else: fileEnding=\"_quad.png\" Lines=[] slopes=[] intercepts=[]", "a tighter confidence interval. It seems that generally people use classical statistics except", "through because of their goodness-of-fit usefulness, but the conversion between the two statistics", "reflects unbiased estimators. c. The functional form is: 1/(2^{df/2}\\Gamma(df/2)) x^{df/2-1}e^{-x/2} The single parameter,", "their goodness-of-fit usefulness, but the conversion between the two statistics is simple. d.", "theFitX=np.linspace(0,5,1000) theFitY=theFitX*logcoeffs[0]+logcoeffs[1] plt.plot(theFitX,theFitY) plt.savefig(\"3b_exp_samples_log.png\") plt.clf() return -1*logcoeffs[0] def __init__(self, nSamples): self.samples=[] self.logbins=[] self.GenerateNSamples(nSamples)", "%). Often constructed with a likelihood function, finding where the likelihood reduces by", "this is invariant over changes of power of \\mu_t. e. Bayesian methods fail", "data is not entirely trivial because applying the logarithm literally yields asymmetric error", "bins=100) plt.xlabel(\"b\") plt.ylabel(\"Entries\") plt.title(\"Intercepts histogram\") plt.savefig(\"2b_intercepts\"+fileEnding) plt.clf() if i == 1: plt.hist(intercepts, bins=100)", "factor of 10 and re-running, giving bins that are much less well-populated. I", "% 100 == 0: print(i) exp = theExponential(50) result=exp.AnalyzeDistro(i) if math.isnan(result) == False:", "MeasurementTaker(2) for i, entry in enumerate(self.X): self.Y.append(measurer.GeneratePointWithResolution(self.quadcoeff*entry**2+self.m*entry+self.b)) def FitLine(self): self.coeffs = np.polyfit(self.X, self.Y,", "i == 1: plt.hist(intercepts, bins=100) plt.xlabel(\"a (quadratic coefficient)\") plt.ylabel(\"Entries\") plt.title(\"Quadratic coefficient histogram\") plt.savefig(\"2b_quads\"+fileEnding)", "things that 'seem' wrong, in which case use Bayesian. f. As Cousins did,", "Bayesian. f. As Cousins did, perform classical analysis on the mean and statistical", "of \\mu_t. e. Bayesian methods fail to be objective: they must be injected", "except when it produces things that 'seem' wrong, in which case use Bayesian.", "13. For the linear fit, we have two free parameters so df=13; for", "rise. In the case of the quadratic data, the linear fit is abysmal", "large number of statistics (maybe I am doing something very inefficient). From running", "that poor sampling of the tails of the distribution leads to underestimation: that", "worse. It is difficult to predict what happens here: if we are lucky", "plt.clf() Taus=[] for i in range(0,500): if i % 100 == 0: print(i)", "** 2) self.quadcoeffs=np.polyfit(self.X, self.Y,2) self.ChiSquareQuad=np.sum((((self.quadcoeffs[0]*self.X**2+self.quadcoeffs[1]*self.X+self.quadcoeffs[2])-self.Y)/self.res)**2) def PlotLine(self, title): plt.errorbar(self.X,self.Y,xerr=0,yerr=2) plt.plot(self.X,self.quadcoeffs[0]*self.X**2+self.quadcoeffs[1]*self.X+self.quadcoeffs[2]) plt.plot(self.X,self.coeffs[0]*self.X+self.coeffs[1]) plt.xlabel(\"x\") plt.ylabel(\"y\")", "-*- coding: utf-8 -*- ''' <NAME> 1. a. Frequentist confidence intervals do not", "in a counterintuitive way, such that a bad calibration leads to a tighter", "degree of freedom, the X^2 will lower. However, the ndf has reduced by", "entry in enumerate(self.X): self.Y.append(measurer.GeneratePointWithResolution(self.quadcoeff*entry**2+self.m*entry+self.b)) def FitLine(self): self.coeffs = np.polyfit(self.X, self.Y, 1) self.ChiSquare=np.sum((((self.coeffs[0]*self.X+self.coeffs[1])-self.Y)/self.res) **", "and use a Bayesian analysis of the detector sensitivity. 2. I repeated this", "line.PlotLine(\"2a_line\"+fileEnding) if i == 0: slopes.append(line.coeffs[0]) intercepts.append(line.coeffs[1]) else: quads.append(line.quadcoeffs[0]) slopes.append(line.quadcoeffs[1]) intercepts.append(line.quadcoeffs[2]) chisqs.append(line.ChiSquare/13) chisqquads.append(line.ChiSquareQuad/12)", "plt.xlabel(\"b\") plt.ylabel(\"Entries\") plt.title(\"Intercepts histogram\") plt.savefig(\"2b_intercepts\"+fileEnding) plt.clf() if i == 1: plt.hist(intercepts, bins=100) plt.xlabel(\"a", "of quadratic fit\") plt.savefig(\"2d_chisq2\"+fileEnding) plt.clf() theNdf=0 if i ==1: theNdf=12 else: theNdf=13 chispace=np.linspace(0,theNdf*3,1000)", "self.ChiSquare=np.sum((((self.coeffs[0]*self.X+self.coeffs[1])-self.Y)/self.res) ** 2) self.quadcoeffs=np.polyfit(self.X, self.Y,2) self.ChiSquareQuad=np.sum((((self.quadcoeffs[0]*self.X**2+self.quadcoeffs[1]*self.X+self.quadcoeffs[2])-self.Y)/self.res)**2) def PlotLine(self, title): plt.errorbar(self.X,self.Y,xerr=0,yerr=2) plt.plot(self.X,self.quadcoeffs[0]*self.X**2+self.quadcoeffs[1]*self.X+self.quadcoeffs[2]) plt.plot(self.X,self.coeffs[0]*self.X+self.coeffs[1]) plt.xlabel(\"x\")", "Poisson lookup table class MeasurementTaker: def __init__(self, resolution): self.theResolution=resolution def GeneratePointWithResolution(self, val): point=np.random.normal(loc=val,scale=self.theResolution)", "prior PDF to construct the posterior PDF for \\mu_t. d. 1/\\mu_t. He justifies", "with a likelihood function, finding where the likelihood reduces by a half. c.", "For comparison I give a normalized reduced Chi2 distribution for df=12 and df=13.", "detector sensitivity. 2. I repeated this entire problem for a quadratic plot. The", "self.BuildLineQuadratic() self.FitLine() def BuildLine(self): measurer = MeasurementTaker(2) for i, entry in enumerate(self.X): self.Y.append(measurer.GeneratePointWithResolution(self.m*entry+self.b))", "self.Y, 1) self.ChiSquare=np.sum((((self.coeffs[0]*self.X+self.coeffs[1])-self.Y)/self.res) ** 2) self.quadcoeffs=np.polyfit(self.X, self.Y,2) self.ChiSquareQuad=np.sum((((self.quadcoeffs[0]*self.X**2+self.quadcoeffs[1]*self.X+self.quadcoeffs[2])-self.Y)/self.res)**2) def PlotLine(self, title): plt.errorbar(self.X,self.Y,xerr=0,yerr=2) plt.plot(self.X,self.quadcoeffs[0]*self.X**2+self.quadcoeffs[1]*self.X+self.quadcoeffs[2])", "wrong, in which case use Bayesian. f. As Cousins did, perform classical analysis", "theNdf=13 chispace=np.linspace(0,theNdf*3,1000) chidist=stat.chi2(theNdf,1) plt.plot(chispace/theNdf, chidist.pdf(chispace)) plt.xlabel(\"X^2\") plt.ylabel(\"P\") plt.title(\"Chi-square distribution (ndf =\"+str(theNdf)+\")\") plt.savefig(\"2d_chisq2pdf\"+fileEnding) plt.clf()", "blue. b. The symmetry of the histogram reflects unbiased estimators. c. The functional", "that are much less well-populated. I attached outputs for both cases. Rather than", "goodness-of-fit usefulness, but the conversion between the two statistics is simple. d. In", "Cousins did, perform classical analysis on the mean and statistical error and use", "coefficient)\") plt.ylabel(\"Entries\") plt.title(\"Quadratic coefficient histogram\") plt.savefig(\"2b_quads\"+fileEnding) plt.clf() plt.hist(chisqs, bins=100) plt.xlabel(\"X^2 / ndf\") plt.ylabel(\"Entries\")", "measurer = MeasurementTaker(2) for i, entry in enumerate(self.X): self.Y.append(measurer.GeneratePointWithResolution(self.quadcoeff*entry**2+self.m*entry+self.b)) def FitLine(self): self.coeffs =", "j == 1: line.PlotLine(\"2a_line\"+fileEnding) if i == 0: slopes.append(line.coeffs[0]) intercepts.append(line.coeffs[1]) else: quads.append(line.quadcoeffs[0]) slopes.append(line.quadcoeffs[1])", "We expected the reduced chi square to be around 1, and this is", "give a normalized reduced Chi2 distribution for df=12 and df=13. Overlaying them was", "the ndf has reduced by 1, so if there is overall no noise", "can fit away, then the reduced chi square will rise. In the case", "= np.polyfit(self.X, self.Y, 1) self.ChiSquare=np.sum((((self.coeffs[0]*self.X+self.coeffs[1])-self.Y)/self.res) ** 2) self.quadcoeffs=np.polyfit(self.X, self.Y,2) self.ChiSquareQuad=np.sum((((self.quadcoeffs[0]*self.X**2+self.quadcoeffs[1]*self.X+self.quadcoeffs[2])-self.Y)/self.res)**2) def PlotLine(self, title):", "15 data points, this is either 12 or 13. For the linear fit,", "theExponential.cdf[0]: index=0 else: for i in range(0,len(theExponential.cdf)-1): if randomNumber > theExponential.cdf[i] and randomNumber", "coefficient histogram\") plt.savefig(\"2b_quads\"+fileEnding) plt.clf() plt.hist(chisqs, bins=100) plt.xlabel(\"X^2 / ndf\") plt.ylabel(\"Entries\") plt.title(\"Chi-square of linear", "from the distribution by a factor of 10 and re-running, giving bins that", "am doing something very inefficient). From running the experiment 500 times, I can", "has reduced by 1, so if there is overall no noise we can", "else: for i in range(0,len(theExponential.cdf)-1): if randomNumber > theExponential.cdf[i] and randomNumber < theExponential.cdf[i+1]:", "to a tighter confidence interval. It seems that generally people use classical statistics", "theExponential.lookup_y) plt.xlabel(\"x\") plt.ylabel(\"$e^{-x}$\") plt.title(\"Exponential distribution\") plt.savefig(\"3_exponential_dist.png\") plt.clf() plt.plot(theExponential.lookup_x, theExponential.cdf) plt.xlabel(\"x\") plt.ylabel(\"cdf\") plt.title(\"Exponential cdf\")", "__init__(self, nSamples): self.samples=[] self.logbins=[] self.GenerateNSamples(nSamples) theExponential.lookup_x=np.linspace(0, 5, 10000) theExponential.lookup_y=np.exp(-theExponential.lookup_x) runningAverage=0 for val in", "of their goodness-of-fit usefulness, but the conversion between the two statistics is simple.", "== 1: plt.bar(bincenters,self.logsamples,width=width, yerr=menStd/y, ecolor='g') plt.xlabel(\"Value\") plt.ylabel(\"log Entries\") plt.title(str(len(self.samples))+\" exponential samples\") theFitX=np.linspace(0,5,1000) theFitY=theFitX*logcoeffs[0]+logcoeffs[1]", "leads to a tighter confidence interval. It seems that generally people use classical", "fits. * For comparison I give a normalized reduced Chi2 distribution for df=12", "Often constructed with a likelihood function, finding where the likelihood reduces by a", "index == 1: plt.bar(bincenters,self.logsamples,width=width, yerr=menStd/y, ecolor='g') plt.xlabel(\"Value\") plt.ylabel(\"log Entries\") plt.title(str(len(self.samples))+\" exponential samples\") theFitX=np.linspace(0,5,1000)", "this is the result for both fits. * For comparison I give a", "def FitLine(self): self.coeffs = np.polyfit(self.X, self.Y, 1) self.ChiSquare=np.sum((((self.coeffs[0]*self.X+self.coeffs[1])-self.Y)/self.res) ** 2) self.quadcoeffs=np.polyfit(self.X, self.Y,2) self.ChiSquareQuad=np.sum((((self.quadcoeffs[0]*self.X**2+self.quadcoeffs[1]*self.X+self.quadcoeffs[2])-self.Y)/self.res)**2)", "for both cases. Rather than wrestle with masking or reassigning garbage datasets post-log,", "number of degrees of freedom in the fit. Since we have 15 data", "0: fileEnding=\".png\" else: fileEnding=\"_quad.png\" Lines=[] slopes=[] intercepts=[] quads=[] chisqs=[] chisqquads=[] for j in", "a factor of 10 and re-running, giving bins that are much less well-populated.", "lucky enough that we can fit some noise to the new x^2 degree", "as np import scipy.stats as stat #Samples from the PDF and computes the", "fit away, then the reduced chi square will rise. In the case of", "chi squares through because of their goodness-of-fit usefulness, but the conversion between the", "chisqquads=[] for j in range(0,1000): line = theLine(degree) Lines.append(line) if j == 1:", "else: quads.append(line.quadcoeffs[0]) slopes.append(line.quadcoeffs[1]) intercepts.append(line.quadcoeffs[2]) chisqs.append(line.ChiSquare/13) chisqquads.append(line.ChiSquareQuad/12) plt.hist(slopes, bins=100) plt.xlabel(\"m\") plt.ylabel(\"Entries\") plt.title(\"Slopes histogram\") plt.savefig(\"2b_slopes\"+fileEnding)", "Tau\") plt.savefig(\"3c_tau_hist_500samples.png\") Taus=[] for i in range(0,500): if i % 100 == 0:", "a normalized reduced Chi2 distribution for df=12 and df=13. Overlaying them was not", "X^2 will lower. However, the ndf has reduced by 1, so if there", "squares through because of their goodness-of-fit usefulness, but the conversion between the two", "plt.title(str(len(self.samples))+\" exponential samples\") plt.savefig(\"3b_exp_samples.png\") plt.clf() self.logsamples=np.log(y) logcoeffs = np.polyfit(bincenters, self.logsamples, 1) if index", "100 == 0: print(i) exp = theExponential(50) result=exp.AnalyzeDistro(i) if math.isnan(result) == False: Taus.append(result)", "free parameters, df=12. We expected the reduced chi square to be around 1,", "def BuildLine(self): measurer = MeasurementTaker(2) for i, entry in enumerate(self.X): self.Y.append(measurer.GeneratePointWithResolution(self.m*entry+self.b)) def BuildLineQuadratic(self):", "This is also expected. 3. a. I sampled the distribution using the cdf;", "plt.plot(theFitX,theFitY) plt.savefig(\"3b_exp_samples_log.png\") plt.clf() return -1*logcoeffs[0] def __init__(self, nSamples): self.samples=[] self.logbins=[] self.GenerateNSamples(nSamples) theExponential.lookup_x=np.linspace(0, 5,", "is: 1/(2^{df/2}\\Gamma(df/2)) x^{df/2-1}e^{-x/2} The single parameter, df, is the number of degrees of", "is overall no noise we can fit away, then the reduced chi square", "class MeasurementTaker: def __init__(self, resolution): self.theResolution=resolution def GeneratePointWithResolution(self, val): point=np.random.normal(loc=val,scale=self.theResolution) return point class", "self.samples=[] self.logbins=[] self.GenerateNSamples(nSamples) theExponential.lookup_x=np.linspace(0, 5, 10000) theExponential.lookup_y=np.exp(-theExponential.lookup_x) runningAverage=0 for val in theExponential.lookup_y: runningAverage=runningAverage+val", "distribution by a factor of 10 and re-running, giving bins that are much", "plt.ylabel(\"$e^{-x}$\") plt.title(\"Exponential distribution\") plt.savefig(\"3_exponential_dist.png\") plt.clf() plt.plot(theExponential.lookup_x, theExponential.cdf) plt.xlabel(\"x\") plt.ylabel(\"cdf\") plt.title(\"Exponential cdf\") plt.savefig(\"3_exponential_cdf.png\") plt.clf()", "long time to run this with a large number of statistics (maybe I", "linear fit in red, and the quadratic fit in blue. b. The symmetry", "The single parameter, df, is the number of degrees of freedom in the", "the measured parameter. Classical limits also handle systematics in a counterintuitive way, such", "plt.title(\"Chi-square of quadratic fit\") plt.savefig(\"2d_chisq2\"+fileEnding) plt.clf() theNdf=0 if i ==1: theNdf=12 else: theNdf=13", "5, 10000) theExponential.lookup_y=np.exp(-theExponential.lookup_x) runningAverage=0 for val in theExponential.lookup_y: runningAverage=runningAverage+val theExponential.cdf.append(runningAverage) theExponential.maxcdf=theExponential.cdf[len(theExponential.cdf)-1] plt.clf() print(\"Running...\")", "I repeated this entire problem for a quadratic plot. The files ending in", "this with a large number of statistics (maybe I am doing something very", "confidence interval. It seems that generally people use classical statistics except when it", "numpy as np import scipy.stats as stat #Samples from the PDF and computes", "post-log, I discarded all results for which the fit failed. ''' import random", "Poisson distribution using a Poisson lookup table class MeasurementTaker: def __init__(self, resolution): self.theResolution=resolution", "I sampled the distribution using the cdf; for reference I included both the", "I discarded all results for which the fit failed. ''' import random import", "import random import math import matplotlib.pyplot as plt import numpy as np import", "bars. Instead, I transformed to first-order (d/dx log x), using \\sigma_{D,log}=\\sigma_D/D c. It", "that this is invariant over changes of power of \\mu_t. e. Bayesian methods", "500 times, I can say that poor sampling of the tails of the", "plt.title(\"Chi-square of linear fit\") plt.savefig(\"2c_chisq\"+fileEnding) plt.clf() plt.hist(chisqquads, bins=100) plt.xlabel(\"X^2 / ndf\") plt.ylabel(\"Entries\") plt.title(\"Chi-square", "print(\"Running...\") plt.plot(theExponential.lookup_x, theExponential.lookup_y) plt.xlabel(\"x\") plt.ylabel(\"$e^{-x}$\") plt.title(\"Exponential distribution\") plt.savefig(\"3_exponential_dist.png\") plt.clf() plt.plot(theExponential.lookup_x, theExponential.cdf) plt.xlabel(\"x\") plt.ylabel(\"cdf\")", "plt.savefig(\"3b_exp_samples_log.png\") plt.clf() return -1*logcoeffs[0] def __init__(self, nSamples): self.samples=[] self.logbins=[] self.GenerateNSamples(nSamples) theExponential.lookup_x=np.linspace(0, 5, 10000)", "i == 0: slopes.append(line.coeffs[0]) intercepts.append(line.coeffs[1]) else: quads.append(line.quadcoeffs[0]) slopes.append(line.quadcoeffs[1]) intercepts.append(line.quadcoeffs[2]) chisqs.append(line.ChiSquare/13) chisqquads.append(line.ChiSquareQuad/12) plt.hist(slopes, bins=100)", "self.degree == 1: self.BuildLine() else: self.BuildLineQuadratic() self.FitLine() def BuildLine(self): measurer = MeasurementTaker(2) for", "I plotted reduced chi squares through because of their goodness-of-fit usefulness, but the", "1: self.BuildLine() else: self.BuildLineQuadratic() self.FitLine() def BuildLine(self): measurer = MeasurementTaker(2) for i, entry", "can say that poor sampling of the tails of the distribution leads to", "of the distribution leads to underestimation: that is, we can see a bias", "of the distribution is 16% (or (100-CL)/2 %). Often constructed with a likelihood", "plt.clf() plt.hist(chisqs, bins=100) plt.xlabel(\"X^2 / ndf\") plt.ylabel(\"Entries\") plt.title(\"Chi-square of linear fit\") plt.savefig(\"2c_chisq\"+fileEnding) plt.clf()", "comparing by-eye they are identical. I plotted reduced chi squares through because of", "plt.xlabel(\"Value\") plt.ylabel(\"Entries\") plt.title(str(len(self.samples))+\" exponential samples\") plt.savefig(\"3b_exp_samples.png\") plt.clf() self.logsamples=np.log(y) logcoeffs = np.polyfit(bincenters, self.logsamples, 1)", "that 'seem' wrong, in which case use Bayesian. f. As Cousins did, perform", "the second iteration with a quadratic dataset. a. The data are shown in", "are much less well-populated. I attached outputs for both cases. Rather than wrestle", "PDF to construct the posterior PDF for \\mu_t. d. 1/\\mu_t. He justifies that", "numSamples): self.GenerateSample() def AnalyzeDistro(self, index): y,binEdges = np.histogram(self.samples,bins=10) bincenters = 0.5*(binEdges[1:]+binEdges[:-1]) menStd =", "Overlaying them was not obviously easy, but comparing by-eye they are identical. I", "fit some noise to the new x^2 degree of freedom, the X^2 will", "a prior PDF to construct the posterior from the likelihood function. Classical intervals", "1, so if there is overall no noise we can fit away, then", "random import math import matplotlib.pyplot as plt import numpy as np import scipy.stats", "running the experiment 500 times, I can say that poor sampling of the", "as plt import numpy as np import scipy.stats as stat #Samples from the", "non-negativity of a mass. b. Typically, that the probability to be found outside", "theExponential.lookup_y=np.exp(-theExponential.lookup_x) runningAverage=0 for val in theExponential.lookup_y: runningAverage=runningAverage+val theExponential.cdf.append(runningAverage) theExponential.maxcdf=theExponential.cdf[len(theExponential.cdf)-1] plt.clf() print(\"Running...\") plt.plot(theExponential.lookup_x, theExponential.lookup_y)", "sensitivity. 2. I repeated this entire problem for a quadratic plot. The files", "a likelihood function, finding where the likelihood reduces by a half. c. We", "1. a. Frequentist confidence intervals do not respect the physical limitations imposed on", "unbiased estimators. c. The functional form is: 1/(2^{df/2}\\Gamma(df/2)) x^{df/2-1}e^{-x/2} The single parameter, df,", "else: fileEnding=\"_quad.png\" Lines=[] slopes=[] intercepts=[] quads=[] chisqs=[] chisqquads=[] for j in range(0,1000): line", "df=13; for the quadratic fit with three free parameters, df=12. We expected the", "of freedom, the X^2 will lower. However, the ndf has reduced by 1,", "expected. 3. a. I sampled the distribution using the cdf; for reference I", "1: line.PlotLine(\"2a_line\"+fileEnding) if i == 0: slopes.append(line.coeffs[0]) intercepts.append(line.coeffs[1]) else: quads.append(line.quadcoeffs[0]) slopes.append(line.quadcoeffs[1]) intercepts.append(line.quadcoeffs[2]) chisqs.append(line.ChiSquare/13)", "posterior from the likelihood function. Classical intervals fail to consider physical limitations on", "fileEnding=\"_quad.png\" Lines=[] slopes=[] intercepts=[] quads=[] chisqs=[] chisqquads=[] for j in range(0,1000): line =", "is the number of degrees of freedom in the fit. Since we have", "probability to be found outside the interval on both sides of the distribution" ]
[ "return i + 1 # use one more: deltas may be bigger! (this", "= {} hasVariations = False # True if any key has variations for", "not allEqual(masterValues): hasVariations = True elif masterValues[0] == default: # No variations, value", "dicts, coordDefaults, coordConverters, storeBuilder, allowIndividualVarIdx=True, ) dicts = [transform for coord, transform in", "ttf.saveXML(outTTXPath, tables=[\"VarC\"]) if saveWoff2: outWoff2Path = outTTFPath.parent / (outTTFPath.stem + \".woff2\") ttf.flavor =", "VariationModel(allLocations, axisTags) storeBuilder = OnlineVarStoreBuilder(axisTags) for gn in vcData.keys(): components, locations = vcData[gn]", "mapping): for glyphName, components in precompiled.items(): for component in components: for v in", "locIndex, loc in enumerate(locations): allIndex = allLocations.index(loc) sparseMapping[allIndex] = locIndex subModel, mapping =", "( fixedCoord, getToFixedConverterForNumIntBitsForScale, transformToIntConverters, transformDefaults, VARIDX_KEY, ComponentRecord, CoordinateRecord, TransformRecord, ) def precompileAllComponents(vcData, allLocations,", "= [[c[i] for i in mapping] for c in components] precompiledGlyph = precompileVarComponents(", "help=\"write TTX dumps for the VarC table.\" ) parser.add_argument(\"--no-woff2\", action=\"store_true\") parser.add_argument( \"--neutral-only\", action=\"store_true\",", "if -32768 <= minIntVal and maxIntVal <= 32767: return i + 1 #", "coordDefaults, coordConverters, storeBuilder, allowIndividualVarIdx=True, ) dicts = [transform for coord, transform in component]", "+ 1 # use one more: deltas may be bigger! (this is rather", "calcNumIntBitsForScale(dicts) scaleConvert = getToFixedConverterForNumIntBitsForScale(numIntBitsForScale) transformToIntConvertersLocal[\"ScaleX\"] = scaleConvert transformToIntConvertersLocal[\"ScaleY\"] = scaleConvert transformDict = compileDicts(", "= min(minScale, d.get(\"ScaleX\", 0)) minScale = min(minScale, d.get(\"ScaleY\", 0)) maxScale = max(maxScale, d.get(\"ScaleX\",", "not None: # glyph components do not contain data that has to go", "No variations, value is default, skip altogether continue resultDict[k] = dict(value=masterValues[0]) convertedMasterValues[k] =", "action=\"store_true\", help=\"hack: build a pseudo static COLRv1 table, that won't respond to the", "\" \"non-hidden axes\", ) args = parser.parse_args() buildVarC( args.designspace, args.ttf, args.output, args.ttx, not", "= parser.parse_args() buildVarC( args.designspace, args.ttf, args.output, args.ttx, not args.no_woff2, args.neutral_only, ) if __name__", "= max(maxScale, d.get(\"ScaleY\", 0)) return minScale, maxScale def remapVarIdxs(precompiled, mapping): for glyphName, components", "= floatToFixed(minValue, precisionBits) maxIntVal = floatToFixed(maxValue, precisionBits) if -32768 <= minIntVal and maxIntVal", "buildVarC( args.designspace, args.ttf, args.output, args.ttx, not args.no_woff2, args.neutral_only, ) if __name__ == \"__main__\":", "= [axis.axisTag for axis in ttf[\"fvar\"].axes] globalAxisNames = {axisTag for axisTag in axisTags", "variations, value is default, skip altogether continue resultDict[k] = dict(value=masterValues[0]) convertedMasterValues[k] = [dictConverters[k](value)", "mapping[v[VARIDX_KEY]] for v in component.transform.values(): if VARIDX_KEY in v: v[VARIDX_KEY] = mapping[v[VARIDX_KEY]] def", "None: outTTFPath = ttfPath.parent / (ttfPath.stem + \"-varc\" + ttfPath.suffix) else: outTTFPath =", "for i in range(maxIntBits): precisionBits = 16 - i minIntVal = floatToFixed(minValue, precisionBits)", "not contain data that has to go to the 'VarC' table precompiled[gn] =", "= [d.get(k, default) for d in dicts] if not allEqual(masterValues): hasVariations = True", "def precompileVarComponents(glyphName, components, storeBuilder, axisTags): precompiled = [] haveVarCData = False for component", "{k: fixedCoord for k in coordKeys} dicts = [coord for coord, transform in", "False for component in components: coordKeys = sorted({k for coord, transform in component", "in coordKeys} coordConverters = {k: fixedCoord for k in coordKeys} dicts = [coord", "in component for k in coord}) coordDefaults = {k: 0 for k in", "table, that won't respond to the \" \"non-hidden axes\", ) args = parser.parse_args()", "in component.transform.values(): if VARIDX_KEY in v: v[VARIDX_KEY] = mapping[v[VARIDX_KEY]] def buildVarCTable(ttf, vcData, allLocations):", ") parser.add_argument(\"--no-woff2\", action=\"store_true\") parser.add_argument( \"--neutral-only\", action=\"store_true\", help=\"hack: build a pseudo static COLRv1 table,", "continue base, varIdx = storeBuilder.storeMasters(masterValues) assert base == masterValues[0], (k, base, masterValues) resultDict[k][VARIDX_KEY]", "def main(): import argparse parser = argparse.ArgumentParser() parser.add_argument(\"designspace\", help=\"The VarCo .designspace source\") parser.add_argument(\"ttf\",", "= outTTFPath.parent / (outTTFPath.stem + \"-before.ttx\") ttf.saveXML(outTTXPath, tables=[\"VarC\"]) ttf.save(outTTFPath) ttf = TTFont(outTTFPath, lazy=True)", "COLRv1 table, that won't respond to the \" \"non-hidden axes\", ) args =", "v: v[VARIDX_KEY] = mapping[v[VARIDX_KEY]] for v in component.transform.values(): if VARIDX_KEY in v: v[VARIDX_KEY]", "pathlib.Path(ttfPath) if outTTFPath is None: outTTFPath = ttfPath.parent / (ttfPath.stem + \"-varc\" +", "loc in enumerate(locations): allIndex = allLocations.index(loc) sparseMapping[allIndex] = locIndex subModel, mapping = masterModel.getSubModel(sparseMapping)", "is correct assert minValue <= maxValue for i in range(maxIntBits): precisionBits = 16", "<= minIntVal and maxIntVal <= 32767: return i + 1 # use one", "haveVarCData = True precompiled.append( ComponentRecord( CoordinateRecord(coordDict), TransformRecord(transformDict), numIntBitsForScale, ), ) if haveVarCData: return", "import argparse parser = argparse.ArgumentParser() parser.add_argument(\"designspace\", help=\"The VarCo .designspace source\") parser.add_argument(\"ttf\", help=\"The input", "<= maxValue for i in range(maxIntBits): precisionBits = 16 - i minIntVal =", "axisTag[0] != \"V\"} vcFont = VarCoFont(designspacePath) vcData, allLocations, neutralGlyphNames = vcFont.extractVarCoData( globalAxisNames, neutralOnly", "in neutralGlyphNames: del gvarTable.variations[glyphName] buildVarCTable(ttf, vcData, allLocations) if doTTX: outTTXPath = outTTFPath.parent /", "doTTX: outTTXPath = outTTFPath.parent / (outTTFPath.stem + \"-before.ttx\") ttf.saveXML(outTTXPath, tables=[\"VarC\"]) ttf.save(outTTFPath) ttf =", "parser.add_argument( \"--ttx\", action=\"store_true\", help=\"write TTX dumps for the VarC table.\" ) parser.add_argument(\"--no-woff2\", action=\"store_true\")", "[transform for coord, transform in component] transformToIntConvertersLocal = dict(transformToIntConverters) numIntBitsForScale = calcNumIntBitsForScale(dicts) scaleConvert", "True precompiled.append( ComponentRecord( CoordinateRecord(coordDict), TransformRecord(transformDict), numIntBitsForScale, ), ) if haveVarCData: return precompiled else:", "varIdx return resultDict def calcNumIntBitsForScale(dicts): minScale, maxScale = _calcMinMaxScale(dicts) numIntBits = _calcNumIntBits(minScale, maxScale)", "TTFont(ttfPath) axisTags = [axis.axisTag for axis in ttf[\"fvar\"].axes] globalAxisNames = {axisTag for axisTag", "{} hasVariations = False # True if any key has variations for k,", "way, but at least this is correct assert minValue <= maxValue for i", "= {} masterModel = VariationModel(allLocations, axisTags) storeBuilder = OnlineVarStoreBuilder(axisTags) for gn in vcData.keys():", "): resultDict = {} convertedMasterValues = {} hasVariations = False # True if", "else: return None def compileDicts( dicts, dictDefaults, dictConverters, storeBuilder, allowIndividualVarIdx=False ): resultDict =", "storeBuilder.storeMasters(masterValues) assert base == masterValues[0], (k, base, masterValues) resultDict[k][VARIDX_KEY] = varIdx return resultDict", "VarC table.\" ) parser.add_argument(\"--no-woff2\", action=\"store_true\") parser.add_argument( \"--neutral-only\", action=\"store_true\", help=\"hack: build a pseudo static", "k, default in dictDefaults.items(): masterValues = [d.get(k, default) for d in dicts] if", "= getToFixedConverterForNumIntBitsForScale(numIntBitsForScale) transformToIntConvertersLocal[\"ScaleX\"] = scaleConvert transformToIntConvertersLocal[\"ScaleY\"] = scaleConvert transformDict = compileDicts( dicts, transformDefaults,", "\"-varc\" + ttfPath.suffix) else: outTTFPath = pathlib.Path(outTTFPath) ttf = TTFont(ttfPath) axisTags = [axis.axisTag", "and maxIntVal <= 32767: return i + 1 # use one more: deltas", "and allEqual( masterValues ): # TODO: Avoid second allEqual() call? continue base, varIdx", "# glyph components do not contain data that has to go to the", "transform in component] coordDict = compileDicts( dicts, coordDefaults, coordConverters, storeBuilder, allowIndividualVarIdx=True, ) dicts", "minIntVal = floatToFixed(minValue, precisionBits) maxIntVal = floatToFixed(maxValue, precisionBits) if -32768 <= minIntVal and", "allLocations.index(loc) sparseMapping[allIndex] = locIndex subModel, mapping = masterModel.getSubModel(sparseMapping) storeBuilder.setModel(subModel) # reorder master values", "the \" \"non-hidden axes\", ) args = parser.parse_args() buildVarC( args.designspace, args.ttf, args.output, args.ttx,", "masterValues) resultDict[k][VARIDX_KEY] = varIdx return resultDict def calcNumIntBitsForScale(dicts): minScale, maxScale = _calcMinMaxScale(dicts) numIntBits", "outWoff2Path = outTTFPath.parent / (outTTFPath.stem + \".woff2\") ttf.flavor = \"woff2\" ttf.save(outWoff2Path) def main():", "= storeBuilder.storeMasters(masterValues) assert base == masterValues[0], (k, base, masterValues) resultDict[k][VARIDX_KEY] = varIdx return", "parser = argparse.ArgumentParser() parser.add_argument(\"designspace\", help=\"The VarCo .designspace source\") parser.add_argument(\"ttf\", help=\"The input Variable Font\")", "ttfPath = pathlib.Path(ttfPath) if outTTFPath is None: outTTFPath = ttfPath.parent / (ttfPath.stem +", "does not fit in maxBits\") def _calcMinMaxScale(transformDicts): minScale = 0 maxScale = 0", "= ttfPath.parent / (ttfPath.stem + \"-varc\" + ttfPath.suffix) else: outTTFPath = pathlib.Path(outTTFPath) ttf", "do not contain data that has to go to the 'VarC' table precompiled[gn]", "in components] precompiledGlyph = precompileVarComponents( gn, components, storeBuilder, axisTags ) if precompiledGlyph is", "if hasVariations: for k, masterValues in convertedMasterValues.items(): if allowIndividualVarIdx and allEqual( masterValues ):", "parser.add_argument(\"designspace\", help=\"The VarCo .designspace source\") parser.add_argument(\"ttf\", help=\"The input Variable Font\") parser.add_argument(\"--output\", help=\"The output", "axisTags = [axis.axisTag for axis in ttf[\"fvar\"].axes] globalAxisNames = {axisTag for axisTag in", "= [axis.axisTag for axis in ttf[\"fvar\"].axes] varc_table = ttf[\"VarC\"] = newTable(\"VarC\") varc_table.Version =", "input Variable Font\") parser.add_argument(\"--output\", help=\"The output Variable Font\") parser.add_argument( \"--ttx\", action=\"store_true\", help=\"write TTX", "= sorted({k for coord, transform in component for k in coord}) coordDefaults =", "args = parser.parse_args() buildVarC( args.designspace, args.ttf, args.output, args.ttx, not args.no_woff2, args.neutral_only, ) if", "ValueError(\"value does not fit in maxBits\") def _calcMinMaxScale(transformDicts): minScale = 0 maxScale =", "\".woff2\") ttf.flavor = \"woff2\" ttf.save(outWoff2Path) def main(): import argparse parser = argparse.ArgumentParser() parser.add_argument(\"designspace\",", "return precompiled else: return None def compileDicts( dicts, dictDefaults, dictConverters, storeBuilder, allowIndividualVarIdx=False ):", "= [dictConverters[k](value) for value in masterValues] if hasVariations: for k, masterValues in convertedMasterValues.items():", "!= \"V\"} vcFont = VarCoFont(designspacePath) vcData, allLocations, neutralGlyphNames = vcFont.extractVarCoData( globalAxisNames, neutralOnly )", "buildVarCTable(ttf, vcData, allLocations) if doTTX: outTTXPath = outTTFPath.parent / (outTTFPath.stem + \"-before.ttx\") ttf.saveXML(outTTXPath,", "= False # True if any key has variations for k, default in", "vcFont.extractVarCoData( globalAxisNames, neutralOnly ) if neutralGlyphNames: gvarTable = ttf[\"gvar\"] for glyphName in neutralGlyphNames:", "is not None: # glyph components do not contain data that has to", "TTX dumps for the VarC table.\" ) parser.add_argument(\"--no-woff2\", action=\"store_true\") parser.add_argument( \"--neutral-only\", action=\"store_true\", help=\"hack:", "_calcNumIntBits(minScale, maxScale) return numIntBits def _calcNumIntBits(minValue, maxValue, maxIntBits=7): # TODO: there must be", "must be a better way, but at least this is correct assert minValue", "max(maxScale, d.get(\"ScaleX\", 0)) maxScale = max(maxScale, d.get(\"ScaleY\", 0)) return minScale, maxScale def remapVarIdxs(precompiled,", "coordConverters = {k: fixedCoord for k in coordKeys} dicts = [coord for coord,", "ttf.save(outTTFPath) ttf = TTFont(outTTFPath, lazy=True) # Load from scratch if doTTX: outTTXPath =", "coordDefaults = {k: 0 for k in coordKeys} coordConverters = {k: fixedCoord for", "in v: v[VARIDX_KEY] = mapping[v[VARIDX_KEY]] def buildVarCTable(ttf, vcData, allLocations): axisTags = [axis.axisTag for", "if neutralGlyphNames: gvarTable = ttf[\"gvar\"] for glyphName in neutralGlyphNames: del gvarTable.variations[glyphName] buildVarCTable(ttf, vcData,", "transformDefaults, transformToIntConvertersLocal, storeBuilder ) if coordDict or transformDict: haveVarCData = True precompiled.append( ComponentRecord(", "# use one more: deltas may be bigger! (this is rather fuzzy) raise", "Font\") parser.add_argument( \"--ttx\", action=\"store_true\", help=\"write TTX dumps for the VarC table.\" ) parser.add_argument(\"--no-woff2\",", "that won't respond to the \" \"non-hidden axes\", ) args = parser.parse_args() buildVarC(", "if any key has variations for k, default in dictDefaults.items(): masterValues = [d.get(k,", "): # TODO: Avoid second allEqual() call? continue base, varIdx = storeBuilder.storeMasters(masterValues) assert", "0)) maxScale = max(maxScale, d.get(\"ScaleY\", 0)) return minScale, maxScale def remapVarIdxs(precompiled, mapping): for", ") def precompileAllComponents(vcData, allLocations, axisTags): precompiled = {} masterModel = VariationModel(allLocations, axisTags) storeBuilder", "ttf[\"VarC\"] = newTable(\"VarC\") varc_table.Version = 0x00010000 precompiled, store = precompileAllComponents(vcData, allLocations, axisTags) mapping", "for d in dicts] if not allEqual(masterValues): hasVariations = True elif masterValues[0] ==", "assert base == masterValues[0], (k, base, masterValues) resultDict[k][VARIDX_KEY] = varIdx return resultDict def", "Load from scratch if doTTX: outTTXPath = outTTFPath.parent / (outTTFPath.stem + \"-after.ttx\") ttf.saveXML(outTTXPath,", "floatToFixed(maxValue, precisionBits) if -32768 <= minIntVal and maxIntVal <= 32767: return i +", "args.designspace, args.ttf, args.output, args.ttx, not args.no_woff2, args.neutral_only, ) if __name__ == \"__main__\": main()", "subModel, mapping = masterModel.getSubModel(sparseMapping) storeBuilder.setModel(subModel) # reorder master values according to allLocations components", "return precompiled, storeBuilder.finish() def precompileVarComponents(glyphName, components, storeBuilder, axisTags): precompiled = [] haveVarCData =", "in convertedMasterValues.items(): if allowIndividualVarIdx and allEqual( masterValues ): # TODO: Avoid second allEqual()", "precompiled, storeBuilder.finish() def precompileVarComponents(glyphName, components, storeBuilder, axisTags): precompiled = [] haveVarCData = False", "convertedMasterValues = {} hasVariations = False # True if any key has variations", "component] coordDict = compileDicts( dicts, coordDefaults, coordConverters, storeBuilder, allowIndividualVarIdx=True, ) dicts = [transform", "= compileDicts( dicts, transformDefaults, transformToIntConvertersLocal, storeBuilder ) if coordDict or transformDict: haveVarCData =", "import OnlineVarStoreBuilder from rcjktools.varco import VarCoFont from rcjktools.table_VarC import ( fixedCoord, getToFixedConverterForNumIntBitsForScale, transformToIntConverters,", "compileDicts( dicts, dictDefaults, dictConverters, storeBuilder, allowIndividualVarIdx=False ): resultDict = {} convertedMasterValues = {}", "mapping[v[VARIDX_KEY]] def buildVarCTable(ttf, vcData, allLocations): axisTags = [axis.axisTag for axis in ttf[\"fvar\"].axes] varc_table", "maxScale = _calcMinMaxScale(dicts) numIntBits = _calcNumIntBits(minScale, maxScale) return numIntBits def _calcNumIntBits(minValue, maxValue, maxIntBits=7):", "= max(maxScale, d.get(\"ScaleX\", 0)) maxScale = max(maxScale, d.get(\"ScaleY\", 0)) return minScale, maxScale def", "enumerate(locations): allIndex = allLocations.index(loc) sparseMapping[allIndex] = locIndex subModel, mapping = masterModel.getSubModel(sparseMapping) storeBuilder.setModel(subModel) #", "storeBuilder, axisTags ) if precompiledGlyph is not None: # glyph components do not", "tables=[\"VarC\"]) if saveWoff2: outWoff2Path = outTTFPath.parent / (outTTFPath.stem + \".woff2\") ttf.flavor = \"woff2\"", "maxScale = max(maxScale, d.get(\"ScaleX\", 0)) maxScale = max(maxScale, d.get(\"ScaleY\", 0)) return minScale, maxScale", "continue resultDict[k] = dict(value=masterValues[0]) convertedMasterValues[k] = [dictConverters[k](value) for value in masterValues] if hasVariations:", "sparseMapping = [None] * len(allLocations) for locIndex, loc in enumerate(locations): allIndex = allLocations.index(loc)", "None: # glyph components do not contain data that has to go to", "dicts, dictDefaults, dictConverters, storeBuilder, allowIndividualVarIdx=False ): resultDict = {} convertedMasterValues = {} hasVariations", "buildVarC( designspacePath, ttfPath, outTTFPath, doTTX, saveWoff2, neutralOnly=False ): import pathlib registerCustomTableClass(\"VarC\", \"rcjktools.table_VarC\", \"table_VarC\")", "i minIntVal = floatToFixed(minValue, precisionBits) maxIntVal = floatToFixed(maxValue, precisionBits) if -32768 <= minIntVal", ") if neutralGlyphNames: gvarTable = ttf[\"gvar\"] for glyphName in neutralGlyphNames: del gvarTable.variations[glyphName] buildVarCTable(ttf,", "\"-before.ttx\") ttf.saveXML(outTTXPath, tables=[\"VarC\"]) ttf.save(outTTFPath) ttf = TTFont(outTTFPath, lazy=True) # Load from scratch if", "in components: for v in component.coord.values(): if VARIDX_KEY in v: v[VARIDX_KEY] = mapping[v[VARIDX_KEY]]", "0)) maxScale = max(maxScale, d.get(\"ScaleX\", 0)) maxScale = max(maxScale, d.get(\"ScaleY\", 0)) return minScale,", "for k, masterValues in convertedMasterValues.items(): if allowIndividualVarIdx and allEqual( masterValues ): # TODO:", "glyph components do not contain data that has to go to the 'VarC'", "maxScale = 0 for d in transformDicts: minScale = min(minScale, d.get(\"ScaleX\", 0)) minScale", "max(maxScale, d.get(\"ScaleY\", 0)) return minScale, maxScale def remapVarIdxs(precompiled, mapping): for glyphName, components in", "to allLocations components = [[c[i] for i in mapping] for c in components]", "masterValues in convertedMasterValues.items(): if allowIndividualVarIdx and allEqual( masterValues ): # TODO: Avoid second", "= newTable(\"VarC\") varc_table.Version = 0x00010000 precompiled, store = precompileAllComponents(vcData, allLocations, axisTags) mapping =", "return minScale, maxScale def remapVarIdxs(precompiled, mapping): for glyphName, components in precompiled.items(): for component", "fixedCoord for k in coordKeys} dicts = [coord for coord, transform in component]", "the 'VarC' table precompiled[gn] = precompiledGlyph return precompiled, storeBuilder.finish() def precompileVarComponents(glyphName, components, storeBuilder,", "outTTFPath = pathlib.Path(outTTFPath) ttf = TTFont(ttfPath) axisTags = [axis.axisTag for axis in ttf[\"fvar\"].axes]", "resultDict[k][VARIDX_KEY] = varIdx return resultDict def calcNumIntBitsForScale(dicts): minScale, maxScale = _calcMinMaxScale(dicts) numIntBits =", "locIndex subModel, mapping = masterModel.getSubModel(sparseMapping) storeBuilder.setModel(subModel) # reorder master values according to allLocations", "compileDicts( dicts, transformDefaults, transformToIntConvertersLocal, storeBuilder ) if coordDict or transformDict: haveVarCData = True", "axisTags) mapping = store.optimize() remapVarIdxs(precompiled, mapping) varc_table.GlyphData = precompiled varc_table.VarStore = store def", "(outTTFPath.stem + \"-before.ttx\") ttf.saveXML(outTTXPath, tables=[\"VarC\"]) ttf.save(outTTFPath) ttf = TTFont(outTTFPath, lazy=True) # Load from", "ttfPath.suffix) else: outTTFPath = pathlib.Path(outTTFPath) ttf = TTFont(ttfPath) axisTags = [axis.axisTag for axis", "i + 1 # use one more: deltas may be bigger! (this is", "is default, skip altogether continue resultDict[k] = dict(value=masterValues[0]) convertedMasterValues[k] = [dictConverters[k](value) for value", "allLocations components = [[c[i] for i in mapping] for c in components] precompiledGlyph", "precompileVarComponents(glyphName, components, storeBuilder, axisTags): precompiled = [] haveVarCData = False for component in", "components] precompiledGlyph = precompileVarComponents( gn, components, storeBuilder, axisTags ) if precompiledGlyph is not", "precisionBits) maxIntVal = floatToFixed(maxValue, precisionBits) if -32768 <= minIntVal and maxIntVal <= 32767:", "pathlib.Path(outTTFPath) ttf = TTFont(ttfPath) axisTags = [axis.axisTag for axis in ttf[\"fvar\"].axes] globalAxisNames =", "Font\") parser.add_argument(\"--output\", help=\"The output Variable Font\") parser.add_argument( \"--ttx\", action=\"store_true\", help=\"write TTX dumps for", "gn, components, storeBuilder, axisTags ) if precompiledGlyph is not None: # glyph components", "static COLRv1 table, that won't respond to the \" \"non-hidden axes\", ) args", "'VarC' table precompiled[gn] = precompiledGlyph return precompiled, storeBuilder.finish() def precompileVarComponents(glyphName, components, storeBuilder, axisTags):", "= [] haveVarCData = False for component in components: coordKeys = sorted({k for", "precompiled, store = precompileAllComponents(vcData, allLocations, axisTags) mapping = store.optimize() remapVarIdxs(precompiled, mapping) varc_table.GlyphData =", "if VARIDX_KEY in v: v[VARIDX_KEY] = mapping[v[VARIDX_KEY]] for v in component.transform.values(): if VARIDX_KEY", "[axis.axisTag for axis in ttf[\"fvar\"].axes] varc_table = ttf[\"VarC\"] = newTable(\"VarC\") varc_table.Version = 0x00010000", "0)) minScale = min(minScale, d.get(\"ScaleY\", 0)) maxScale = max(maxScale, d.get(\"ScaleX\", 0)) maxScale =", "k in coord}) coordDefaults = {k: 0 for k in coordKeys} coordConverters =", "for k in coordKeys} dicts = [coord for coord, transform in component] coordDict", "\"--neutral-only\", action=\"store_true\", help=\"hack: build a pseudo static COLRv1 table, that won't respond to", "/ (outTTFPath.stem + \"-before.ttx\") ttf.saveXML(outTTXPath, tables=[\"VarC\"]) ttf.save(outTTFPath) ttf = TTFont(outTTFPath, lazy=True) # Load", "[None] * len(allLocations) for locIndex, loc in enumerate(locations): allIndex = allLocations.index(loc) sparseMapping[allIndex] =", "better way, but at least this is correct assert minValue <= maxValue for", "vcData, allLocations): axisTags = [axis.axisTag for axis in ttf[\"fvar\"].axes] varc_table = ttf[\"VarC\"] =", "registerCustomTableClass from fontTools.varLib.models import VariationModel, allEqual from fontTools.varLib.varStore import OnlineVarStoreBuilder from rcjktools.varco import", "precompiledGlyph is not None: # glyph components do not contain data that has", "ttf[\"gvar\"] for glyphName in neutralGlyphNames: del gvarTable.variations[glyphName] buildVarCTable(ttf, vcData, allLocations) if doTTX: outTTXPath", ") if haveVarCData: return precompiled else: return None def compileDicts( dicts, dictDefaults, dictConverters,", "maxIntBits=7): # TODO: there must be a better way, but at least this", "outTTFPath.parent / (outTTFPath.stem + \"-before.ttx\") ttf.saveXML(outTTXPath, tables=[\"VarC\"]) ttf.save(outTTFPath) ttf = TTFont(outTTFPath, lazy=True) #", "masterValues = [d.get(k, default) for d in dicts] if not allEqual(masterValues): hasVariations =", "base, masterValues) resultDict[k][VARIDX_KEY] = varIdx return resultDict def calcNumIntBitsForScale(dicts): minScale, maxScale = _calcMinMaxScale(dicts)", "ttf[\"fvar\"].axes] globalAxisNames = {axisTag for axisTag in axisTags if axisTag[0] != \"V\"} vcFont", "from fontTools.ttLib import TTFont, newTable, registerCustomTableClass from fontTools.varLib.models import VariationModel, allEqual from fontTools.varLib.varStore", "OnlineVarStoreBuilder from rcjktools.varco import VarCoFont from rcjktools.table_VarC import ( fixedCoord, getToFixedConverterForNumIntBitsForScale, transformToIntConverters, transformDefaults,", "masterModel.getSubModel(sparseMapping) storeBuilder.setModel(subModel) # reorder master values according to allLocations components = [[c[i] for", "help=\"The input Variable Font\") parser.add_argument(\"--output\", help=\"The output Variable Font\") parser.add_argument( \"--ttx\", action=\"store_true\", help=\"write", "= mapping[v[VARIDX_KEY]] def buildVarCTable(ttf, vcData, allLocations): axisTags = [axis.axisTag for axis in ttf[\"fvar\"].axes]", "components do not contain data that has to go to the 'VarC' table", "axes\", ) args = parser.parse_args() buildVarC( args.designspace, args.ttf, args.output, args.ttx, not args.no_woff2, args.neutral_only,", "components in precompiled.items(): for component in components: for v in component.coord.values(): if VARIDX_KEY", "rather fuzzy) raise ValueError(\"value does not fit in maxBits\") def _calcMinMaxScale(transformDicts): minScale =", "value in masterValues] if hasVariations: for k, masterValues in convertedMasterValues.items(): if allowIndividualVarIdx and", "d.get(\"ScaleY\", 0)) return minScale, maxScale def remapVarIdxs(precompiled, mapping): for glyphName, components in precompiled.items():", "), ) if haveVarCData: return precompiled else: return None def compileDicts( dicts, dictDefaults,", "precompiled.append( ComponentRecord( CoordinateRecord(coordDict), TransformRecord(transformDict), numIntBitsForScale, ), ) if haveVarCData: return precompiled else: return", "min(minScale, d.get(\"ScaleX\", 0)) minScale = min(minScale, d.get(\"ScaleY\", 0)) maxScale = max(maxScale, d.get(\"ScaleX\", 0))", "for k in coord}) coordDefaults = {k: 0 for k in coordKeys} coordConverters", "vcData, allLocations) if doTTX: outTTXPath = outTTFPath.parent / (outTTFPath.stem + \"-before.ttx\") ttf.saveXML(outTTXPath, tables=[\"VarC\"])", "in dicts] if not allEqual(masterValues): hasVariations = True elif masterValues[0] == default: #", "go to the 'VarC' table precompiled[gn] = precompiledGlyph return precompiled, storeBuilder.finish() def precompileVarComponents(glyphName,", "= [transform for coord, transform in component] transformToIntConvertersLocal = dict(transformToIntConverters) numIntBitsForScale = calcNumIntBitsForScale(dicts)", "to go to the 'VarC' table precompiled[gn] = precompiledGlyph return precompiled, storeBuilder.finish() def", "minScale, maxScale = _calcMinMaxScale(dicts) numIntBits = _calcNumIntBits(minScale, maxScale) return numIntBits def _calcNumIntBits(minValue, maxValue,", "pseudo static COLRv1 table, that won't respond to the \" \"non-hidden axes\", )", "varc_table.Version = 0x00010000 precompiled, store = precompileAllComponents(vcData, allLocations, axisTags) mapping = store.optimize() remapVarIdxs(precompiled,", "coordKeys = sorted({k for coord, transform in component for k in coord}) coordDefaults", "ComponentRecord( CoordinateRecord(coordDict), TransformRecord(transformDict), numIntBitsForScale, ), ) if haveVarCData: return precompiled else: return None", "remapVarIdxs(precompiled, mapping): for glyphName, components in precompiled.items(): for component in components: for v", "== default: # No variations, value is default, skip altogether continue resultDict[k] =", "dict(value=masterValues[0]) convertedMasterValues[k] = [dictConverters[k](value) for value in masterValues] if hasVariations: for k, masterValues", "# TODO: Avoid second allEqual() call? continue base, varIdx = storeBuilder.storeMasters(masterValues) assert base", "v in component.transform.values(): if VARIDX_KEY in v: v[VARIDX_KEY] = mapping[v[VARIDX_KEY]] def buildVarCTable(ttf, vcData,", "respond to the \" \"non-hidden axes\", ) args = parser.parse_args() buildVarC( args.designspace, args.ttf,", "dictDefaults.items(): masterValues = [d.get(k, default) for d in dicts] if not allEqual(masterValues): hasVariations", "fontTools.varLib.models import VariationModel, allEqual from fontTools.varLib.varStore import OnlineVarStoreBuilder from rcjktools.varco import VarCoFont from", "outTTXPath = outTTFPath.parent / (outTTFPath.stem + \"-before.ttx\") ttf.saveXML(outTTXPath, tables=[\"VarC\"]) ttf.save(outTTFPath) ttf = TTFont(outTTFPath,", "= OnlineVarStoreBuilder(axisTags) for gn in vcData.keys(): components, locations = vcData[gn] sparseMapping = [None]", "compileDicts( dicts, coordDefaults, coordConverters, storeBuilder, allowIndividualVarIdx=True, ) dicts = [transform for coord, transform", "outTTFPath, doTTX, saveWoff2, neutralOnly=False ): import pathlib registerCustomTableClass(\"VarC\", \"rcjktools.table_VarC\", \"table_VarC\") ttfPath = pathlib.Path(ttfPath)", "is None: outTTFPath = ttfPath.parent / (ttfPath.stem + \"-varc\" + ttfPath.suffix) else: outTTFPath", "precompiled else: return None def compileDicts( dicts, dictDefaults, dictConverters, storeBuilder, allowIndividualVarIdx=False ): resultDict", "variations for k, default in dictDefaults.items(): masterValues = [d.get(k, default) for d in", "sparseMapping[allIndex] = locIndex subModel, mapping = masterModel.getSubModel(sparseMapping) storeBuilder.setModel(subModel) # reorder master values according", "return numIntBits def _calcNumIntBits(minValue, maxValue, maxIntBits=7): # TODO: there must be a better", "mapping] for c in components] precompiledGlyph = precompileVarComponents( gn, components, storeBuilder, axisTags )", "globalAxisNames = {axisTag for axisTag in axisTags if axisTag[0] != \"V\"} vcFont =", "= {} convertedMasterValues = {} hasVariations = False # True if any key", "= \"woff2\" ttf.save(outWoff2Path) def main(): import argparse parser = argparse.ArgumentParser() parser.add_argument(\"designspace\", help=\"The VarCo", "# TODO: there must be a better way, but at least this is", "in axisTags if axisTag[0] != \"V\"} vcFont = VarCoFont(designspacePath) vcData, allLocations, neutralGlyphNames =", "axisTags) storeBuilder = OnlineVarStoreBuilder(axisTags) for gn in vcData.keys(): components, locations = vcData[gn] sparseMapping", "for value in masterValues] if hasVariations: for k, masterValues in convertedMasterValues.items(): if allowIndividualVarIdx", "if precompiledGlyph is not None: # glyph components do not contain data that", "len(allLocations) for locIndex, loc in enumerate(locations): allIndex = allLocations.index(loc) sparseMapping[allIndex] = locIndex subModel,", "VARIDX_KEY in v: v[VARIDX_KEY] = mapping[v[VARIDX_KEY]] for v in component.transform.values(): if VARIDX_KEY in", "allIndex = allLocations.index(loc) sparseMapping[allIndex] = locIndex subModel, mapping = masterModel.getSubModel(sparseMapping) storeBuilder.setModel(subModel) # reorder", "in mapping] for c in components] precompiledGlyph = precompileVarComponents( gn, components, storeBuilder, axisTags", "outTTXPath = outTTFPath.parent / (outTTFPath.stem + \"-after.ttx\") ttf.saveXML(outTTXPath, tables=[\"VarC\"]) if saveWoff2: outWoff2Path =", "\"rcjktools.table_VarC\", \"table_VarC\") ttfPath = pathlib.Path(ttfPath) if outTTFPath is None: outTTFPath = ttfPath.parent /", "there must be a better way, but at least this is correct assert", "gvarTable.variations[glyphName] buildVarCTable(ttf, vcData, allLocations) if doTTX: outTTXPath = outTTFPath.parent / (outTTFPath.stem + \"-before.ttx\")", "mapping = store.optimize() remapVarIdxs(precompiled, mapping) varc_table.GlyphData = precompiled varc_table.VarStore = store def buildVarC(", "neutralGlyphNames: gvarTable = ttf[\"gvar\"] for glyphName in neutralGlyphNames: del gvarTable.variations[glyphName] buildVarCTable(ttf, vcData, allLocations)", "for k in coordKeys} coordConverters = {k: fixedCoord for k in coordKeys} dicts", "data that has to go to the 'VarC' table precompiled[gn] = precompiledGlyph return", "sorted({k for coord, transform in component for k in coord}) coordDefaults = {k:", "Avoid second allEqual() call? continue base, varIdx = storeBuilder.storeMasters(masterValues) assert base == masterValues[0],", "transform in component] transformToIntConvertersLocal = dict(transformToIntConverters) numIntBitsForScale = calcNumIntBitsForScale(dicts) scaleConvert = getToFixedConverterForNumIntBitsForScale(numIntBitsForScale) transformToIntConvertersLocal[\"ScaleX\"]", "= True precompiled.append( ComponentRecord( CoordinateRecord(coordDict), TransformRecord(transformDict), numIntBitsForScale, ), ) if haveVarCData: return precompiled", "transform in component for k in coord}) coordDefaults = {k: 0 for k", "# Load from scratch if doTTX: outTTXPath = outTTFPath.parent / (outTTFPath.stem + \"-after.ttx\")", "default) for d in dicts] if not allEqual(masterValues): hasVariations = True elif masterValues[0]", "False # True if any key has variations for k, default in dictDefaults.items():", "0 maxScale = 0 for d in transformDicts: minScale = min(minScale, d.get(\"ScaleX\", 0))", "coordDict or transformDict: haveVarCData = True precompiled.append( ComponentRecord( CoordinateRecord(coordDict), TransformRecord(transformDict), numIntBitsForScale, ), )", "- i minIntVal = floatToFixed(minValue, precisionBits) maxIntVal = floatToFixed(maxValue, precisionBits) if -32768 <=", "d in transformDicts: minScale = min(minScale, d.get(\"ScaleX\", 0)) minScale = min(minScale, d.get(\"ScaleY\", 0))", "component.coord.values(): if VARIDX_KEY in v: v[VARIDX_KEY] = mapping[v[VARIDX_KEY]] for v in component.transform.values(): if", "getToFixedConverterForNumIntBitsForScale, transformToIntConverters, transformDefaults, VARIDX_KEY, ComponentRecord, CoordinateRecord, TransformRecord, ) def precompileAllComponents(vcData, allLocations, axisTags): precompiled", "for glyphName in neutralGlyphNames: del gvarTable.variations[glyphName] buildVarCTable(ttf, vcData, allLocations) if doTTX: outTTXPath =", "+ \".woff2\") ttf.flavor = \"woff2\" ttf.save(outWoff2Path) def main(): import argparse parser = argparse.ArgumentParser()", "elif masterValues[0] == default: # No variations, value is default, skip altogether continue", "according to allLocations components = [[c[i] for i in mapping] for c in", "hasVariations: for k, masterValues in convertedMasterValues.items(): if allowIndividualVarIdx and allEqual( masterValues ): #", "argparse parser = argparse.ArgumentParser() parser.add_argument(\"designspace\", help=\"The VarCo .designspace source\") parser.add_argument(\"ttf\", help=\"The input Variable", "None def compileDicts( dicts, dictDefaults, dictConverters, storeBuilder, allowIndividualVarIdx=False ): resultDict = {} convertedMasterValues", "doTTX: outTTXPath = outTTFPath.parent / (outTTFPath.stem + \"-after.ttx\") ttf.saveXML(outTTXPath, tables=[\"VarC\"]) if saveWoff2: outWoff2Path", "resultDict def calcNumIntBitsForScale(dicts): minScale, maxScale = _calcMinMaxScale(dicts) numIntBits = _calcNumIntBits(minScale, maxScale) return numIntBits", "= {axisTag for axisTag in axisTags if axisTag[0] != \"V\"} vcFont = VarCoFont(designspacePath)", "= _calcMinMaxScale(dicts) numIntBits = _calcNumIntBits(minScale, maxScale) return numIntBits def _calcNumIntBits(minValue, maxValue, maxIntBits=7): #", "for v in component.transform.values(): if VARIDX_KEY in v: v[VARIDX_KEY] = mapping[v[VARIDX_KEY]] def buildVarCTable(ttf,", "precompiledGlyph = precompileVarComponents( gn, components, storeBuilder, axisTags ) if precompiledGlyph is not None:", "= VariationModel(allLocations, axisTags) storeBuilder = OnlineVarStoreBuilder(axisTags) for gn in vcData.keys(): components, locations =", "def compileDicts( dicts, dictDefaults, dictConverters, storeBuilder, allowIndividualVarIdx=False ): resultDict = {} convertedMasterValues =", "store def buildVarC( designspacePath, ttfPath, outTTFPath, doTTX, saveWoff2, neutralOnly=False ): import pathlib registerCustomTableClass(\"VarC\",", "vcFont = VarCoFont(designspacePath) vcData, allLocations, neutralGlyphNames = vcFont.extractVarCoData( globalAxisNames, neutralOnly ) if neutralGlyphNames:", "= scaleConvert transformDict = compileDicts( dicts, transformDefaults, transformToIntConvertersLocal, storeBuilder ) if coordDict or", "varc_table = ttf[\"VarC\"] = newTable(\"VarC\") varc_table.Version = 0x00010000 precompiled, store = precompileAllComponents(vcData, allLocations,", "masterValues] if hasVariations: for k, masterValues in convertedMasterValues.items(): if allowIndividualVarIdx and allEqual( masterValues", "range(maxIntBits): precisionBits = 16 - i minIntVal = floatToFixed(minValue, precisionBits) maxIntVal = floatToFixed(maxValue,", "from fontTools.varLib.varStore import OnlineVarStoreBuilder from rcjktools.varco import VarCoFont from rcjktools.table_VarC import ( fixedCoord,", "v in component.coord.values(): if VARIDX_KEY in v: v[VARIDX_KEY] = mapping[v[VARIDX_KEY]] for v in", "[[c[i] for i in mapping] for c in components] precompiledGlyph = precompileVarComponents( gn,", "0x00010000 precompiled, store = precompileAllComponents(vcData, allLocations, axisTags) mapping = store.optimize() remapVarIdxs(precompiled, mapping) varc_table.GlyphData", "value is default, skip altogether continue resultDict[k] = dict(value=masterValues[0]) convertedMasterValues[k] = [dictConverters[k](value) for", "component for k in coord}) coordDefaults = {k: 0 for k in coordKeys}", "allowIndividualVarIdx and allEqual( masterValues ): # TODO: Avoid second allEqual() call? continue base,", "= 0 for d in transformDicts: minScale = min(minScale, d.get(\"ScaleX\", 0)) minScale =", "def calcNumIntBitsForScale(dicts): minScale, maxScale = _calcMinMaxScale(dicts) numIntBits = _calcNumIntBits(minScale, maxScale) return numIntBits def", "in vcData.keys(): components, locations = vcData[gn] sparseMapping = [None] * len(allLocations) for locIndex,", "hasVariations = True elif masterValues[0] == default: # No variations, value is default,", "= allLocations.index(loc) sparseMapping[allIndex] = locIndex subModel, mapping = masterModel.getSubModel(sparseMapping) storeBuilder.setModel(subModel) # reorder master", "CoordinateRecord, TransformRecord, ) def precompileAllComponents(vcData, allLocations, axisTags): precompiled = {} masterModel = VariationModel(allLocations,", "= vcFont.extractVarCoData( globalAxisNames, neutralOnly ) if neutralGlyphNames: gvarTable = ttf[\"gvar\"] for glyphName in", "correct assert minValue <= maxValue for i in range(maxIntBits): precisionBits = 16 -", "import ( fixedCoord, getToFixedConverterForNumIntBitsForScale, transformToIntConverters, transformDefaults, VARIDX_KEY, ComponentRecord, CoordinateRecord, TransformRecord, ) def precompileAllComponents(vcData,", "<= 32767: return i + 1 # use one more: deltas may be", "default in dictDefaults.items(): masterValues = [d.get(k, default) for d in dicts] if not", ") args = parser.parse_args() buildVarC( args.designspace, args.ttf, args.output, args.ttx, not args.no_woff2, args.neutral_only, )", "= 16 - i minIntVal = floatToFixed(minValue, precisionBits) maxIntVal = floatToFixed(maxValue, precisionBits) if", "precompiled = {} masterModel = VariationModel(allLocations, axisTags) storeBuilder = OnlineVarStoreBuilder(axisTags) for gn in", "at least this is correct assert minValue <= maxValue for i in range(maxIntBits):", "may be bigger! (this is rather fuzzy) raise ValueError(\"value does not fit in", "for i in mapping] for c in components] precompiledGlyph = precompileVarComponents( gn, components,", "hasVariations = False # True if any key has variations for k, default", "components, locations = vcData[gn] sparseMapping = [None] * len(allLocations) for locIndex, loc in", "VariationModel, allEqual from fontTools.varLib.varStore import OnlineVarStoreBuilder from rcjktools.varco import VarCoFont from rcjktools.table_VarC import", "k in coordKeys} coordConverters = {k: fixedCoord for k in coordKeys} dicts =", "vcData[gn] sparseMapping = [None] * len(allLocations) for locIndex, loc in enumerate(locations): allIndex =", "TODO: there must be a better way, but at least this is correct", "transformToIntConvertersLocal, storeBuilder ) if coordDict or transformDict: haveVarCData = True precompiled.append( ComponentRecord( CoordinateRecord(coordDict),", "if axisTag[0] != \"V\"} vcFont = VarCoFont(designspacePath) vcData, allLocations, neutralGlyphNames = vcFont.extractVarCoData( globalAxisNames,", "TTFont(outTTFPath, lazy=True) # Load from scratch if doTTX: outTTXPath = outTTFPath.parent / (outTTFPath.stem", "/ (ttfPath.stem + \"-varc\" + ttfPath.suffix) else: outTTFPath = pathlib.Path(outTTFPath) ttf = TTFont(ttfPath)", "fontTools.varLib.varStore import OnlineVarStoreBuilder from rcjktools.varco import VarCoFont from rcjktools.table_VarC import ( fixedCoord, getToFixedConverterForNumIntBitsForScale,", "parser.add_argument(\"--output\", help=\"The output Variable Font\") parser.add_argument( \"--ttx\", action=\"store_true\", help=\"write TTX dumps for the", "i in mapping] for c in components] precompiledGlyph = precompileVarComponents( gn, components, storeBuilder,", "component in components: for v in component.coord.values(): if VARIDX_KEY in v: v[VARIDX_KEY] =", "masterValues[0], (k, base, masterValues) resultDict[k][VARIDX_KEY] = varIdx return resultDict def calcNumIntBitsForScale(dicts): minScale, maxScale", "minScale = min(minScale, d.get(\"ScaleX\", 0)) minScale = min(minScale, d.get(\"ScaleY\", 0)) maxScale = max(maxScale,", "+ \"-varc\" + ttfPath.suffix) else: outTTFPath = pathlib.Path(outTTFPath) ttf = TTFont(ttfPath) axisTags =", "the VarC table.\" ) parser.add_argument(\"--no-woff2\", action=\"store_true\") parser.add_argument( \"--neutral-only\", action=\"store_true\", help=\"hack: build a pseudo", "raise ValueError(\"value does not fit in maxBits\") def _calcMinMaxScale(transformDicts): minScale = 0 maxScale", "def remapVarIdxs(precompiled, mapping): for glyphName, components in precompiled.items(): for component in components: for", "neutralGlyphNames: del gvarTable.variations[glyphName] buildVarCTable(ttf, vcData, allLocations) if doTTX: outTTXPath = outTTFPath.parent / (outTTFPath.stem", "varc_table.GlyphData = precompiled varc_table.VarStore = store def buildVarC( designspacePath, ttfPath, outTTFPath, doTTX, saveWoff2,", "\"V\"} vcFont = VarCoFont(designspacePath) vcData, allLocations, neutralGlyphNames = vcFont.extractVarCoData( globalAxisNames, neutralOnly ) if", "in component.coord.values(): if VARIDX_KEY in v: v[VARIDX_KEY] = mapping[v[VARIDX_KEY]] for v in component.transform.values():", "[coord for coord, transform in component] coordDict = compileDicts( dicts, coordDefaults, coordConverters, storeBuilder,", "numIntBitsForScale, ), ) if haveVarCData: return precompiled else: return None def compileDicts( dicts,", "precompiled[gn] = precompiledGlyph return precompiled, storeBuilder.finish() def precompileVarComponents(glyphName, components, storeBuilder, axisTags): precompiled =", "masterValues[0] == default: # No variations, value is default, skip altogether continue resultDict[k]", "altogether continue resultDict[k] = dict(value=masterValues[0]) convertedMasterValues[k] = [dictConverters[k](value) for value in masterValues] if", "output Variable Font\") parser.add_argument( \"--ttx\", action=\"store_true\", help=\"write TTX dumps for the VarC table.\"", "\"non-hidden axes\", ) args = parser.parse_args() buildVarC( args.designspace, args.ttf, args.output, args.ttx, not args.no_woff2,", "precompileAllComponents(vcData, allLocations, axisTags) mapping = store.optimize() remapVarIdxs(precompiled, mapping) varc_table.GlyphData = precompiled varc_table.VarStore =", "precompileVarComponents( gn, components, storeBuilder, axisTags ) if precompiledGlyph is not None: # glyph", "import VarCoFont from rcjktools.table_VarC import ( fixedCoord, getToFixedConverterForNumIntBitsForScale, transformToIntConverters, transformDefaults, VARIDX_KEY, ComponentRecord, CoordinateRecord,", "for d in transformDicts: minScale = min(minScale, d.get(\"ScaleX\", 0)) minScale = min(minScale, d.get(\"ScaleY\",", "build a pseudo static COLRv1 table, that won't respond to the \" \"non-hidden", "in component] coordDict = compileDicts( dicts, coordDefaults, coordConverters, storeBuilder, allowIndividualVarIdx=True, ) dicts =", "import TTFont, newTable, registerCustomTableClass from fontTools.varLib.models import VariationModel, allEqual from fontTools.varLib.varStore import OnlineVarStoreBuilder", "axisTags ) if precompiledGlyph is not None: # glyph components do not contain", "dicts] if not allEqual(masterValues): hasVariations = True elif masterValues[0] == default: # No", "but at least this is correct assert minValue <= maxValue for i in", "allEqual(masterValues): hasVariations = True elif masterValues[0] == default: # No variations, value is", "for k, default in dictDefaults.items(): masterValues = [d.get(k, default) for d in dicts]", "(this is rather fuzzy) raise ValueError(\"value does not fit in maxBits\") def _calcMinMaxScale(transformDicts):", "in range(maxIntBits): precisionBits = 16 - i minIntVal = floatToFixed(minValue, precisionBits) maxIntVal =", "maxScale def remapVarIdxs(precompiled, mapping): for glyphName, components in precompiled.items(): for component in components:", "fuzzy) raise ValueError(\"value does not fit in maxBits\") def _calcMinMaxScale(transformDicts): minScale = 0", "use one more: deltas may be bigger! (this is rather fuzzy) raise ValueError(\"value", "tables=[\"VarC\"]) ttf.save(outTTFPath) ttf = TTFont(outTTFPath, lazy=True) # Load from scratch if doTTX: outTTXPath", "axisTag in axisTags if axisTag[0] != \"V\"} vcFont = VarCoFont(designspacePath) vcData, allLocations, neutralGlyphNames", "-32768 <= minIntVal and maxIntVal <= 32767: return i + 1 # use", "storeBuilder.finish() def precompileVarComponents(glyphName, components, storeBuilder, axisTags): precompiled = [] haveVarCData = False for", "axisTags = [axis.axisTag for axis in ttf[\"fvar\"].axes] varc_table = ttf[\"VarC\"] = newTable(\"VarC\") varc_table.Version", "gn in vcData.keys(): components, locations = vcData[gn] sparseMapping = [None] * len(allLocations) for", "numIntBits def _calcNumIntBits(minValue, maxValue, maxIntBits=7): # TODO: there must be a better way,", "designspacePath, ttfPath, outTTFPath, doTTX, saveWoff2, neutralOnly=False ): import pathlib registerCustomTableClass(\"VarC\", \"rcjktools.table_VarC\", \"table_VarC\") ttfPath", "for axis in ttf[\"fvar\"].axes] varc_table = ttf[\"VarC\"] = newTable(\"VarC\") varc_table.Version = 0x00010000 precompiled,", "if coordDict or transformDict: haveVarCData = True precompiled.append( ComponentRecord( CoordinateRecord(coordDict), TransformRecord(transformDict), numIntBitsForScale, ),", "fontTools.misc.fixedTools import floatToFixed from fontTools.ttLib import TTFont, newTable, registerCustomTableClass from fontTools.varLib.models import VariationModel,", "values according to allLocations components = [[c[i] for i in mapping] for c", "minValue <= maxValue for i in range(maxIntBits): precisionBits = 16 - i minIntVal", "contain data that has to go to the 'VarC' table precompiled[gn] = precompiledGlyph", "axis in ttf[\"fvar\"].axes] globalAxisNames = {axisTag for axisTag in axisTags if axisTag[0] !=", "scratch if doTTX: outTTXPath = outTTFPath.parent / (outTTFPath.stem + \"-after.ttx\") ttf.saveXML(outTTXPath, tables=[\"VarC\"]) if", "convertedMasterValues[k] = [dictConverters[k](value) for value in masterValues] if hasVariations: for k, masterValues in", "components = [[c[i] for i in mapping] for c in components] precompiledGlyph =", "if outTTFPath is None: outTTFPath = ttfPath.parent / (ttfPath.stem + \"-varc\" + ttfPath.suffix)", "maxValue for i in range(maxIntBits): precisionBits = 16 - i minIntVal = floatToFixed(minValue,", "masterValues ): # TODO: Avoid second allEqual() call? continue base, varIdx = storeBuilder.storeMasters(masterValues)", "def _calcNumIntBits(minValue, maxValue, maxIntBits=7): # TODO: there must be a better way, but", "= TTFont(ttfPath) axisTags = [axis.axisTag for axis in ttf[\"fvar\"].axes] globalAxisNames = {axisTag for", "= ttf[\"VarC\"] = newTable(\"VarC\") varc_table.Version = 0x00010000 precompiled, store = precompileAllComponents(vcData, allLocations, axisTags)", "[] haveVarCData = False for component in components: coordKeys = sorted({k for coord,", "c in components] precompiledGlyph = precompileVarComponents( gn, components, storeBuilder, axisTags ) if precompiledGlyph", ".designspace source\") parser.add_argument(\"ttf\", help=\"The input Variable Font\") parser.add_argument(\"--output\", help=\"The output Variable Font\") parser.add_argument(", "neutralOnly=False ): import pathlib registerCustomTableClass(\"VarC\", \"rcjktools.table_VarC\", \"table_VarC\") ttfPath = pathlib.Path(ttfPath) if outTTFPath is", "in maxBits\") def _calcMinMaxScale(transformDicts): minScale = 0 maxScale = 0 for d in", "call? continue base, varIdx = storeBuilder.storeMasters(masterValues) assert base == masterValues[0], (k, base, masterValues)", "precompiled varc_table.VarStore = store def buildVarC( designspacePath, ttfPath, outTTFPath, doTTX, saveWoff2, neutralOnly=False ):", "in component] transformToIntConvertersLocal = dict(transformToIntConverters) numIntBitsForScale = calcNumIntBitsForScale(dicts) scaleConvert = getToFixedConverterForNumIntBitsForScale(numIntBitsForScale) transformToIntConvertersLocal[\"ScaleX\"] =", "axisTags if axisTag[0] != \"V\"} vcFont = VarCoFont(designspacePath) vcData, allLocations, neutralGlyphNames = vcFont.extractVarCoData(", "allLocations) if doTTX: outTTXPath = outTTFPath.parent / (outTTFPath.stem + \"-before.ttx\") ttf.saveXML(outTTXPath, tables=[\"VarC\"]) ttf.save(outTTFPath)", "varc_table.VarStore = store def buildVarC( designspacePath, ttfPath, outTTFPath, doTTX, saveWoff2, neutralOnly=False ): import", "precompiled.items(): for component in components: for v in component.coord.values(): if VARIDX_KEY in v:", "16 - i minIntVal = floatToFixed(minValue, precisionBits) maxIntVal = floatToFixed(maxValue, precisionBits) if -32768", "[axis.axisTag for axis in ttf[\"fvar\"].axes] globalAxisNames = {axisTag for axisTag in axisTags if", "component in components: coordKeys = sorted({k for coord, transform in component for k", "dicts, transformDefaults, transformToIntConvertersLocal, storeBuilder ) if coordDict or transformDict: haveVarCData = True precompiled.append(", "= pathlib.Path(ttfPath) if outTTFPath is None: outTTFPath = ttfPath.parent / (ttfPath.stem + \"-varc\"", "for locIndex, loc in enumerate(locations): allIndex = allLocations.index(loc) sparseMapping[allIndex] = locIndex subModel, mapping", "to the \" \"non-hidden axes\", ) args = parser.parse_args() buildVarC( args.designspace, args.ttf, args.output,", "v[VARIDX_KEY] = mapping[v[VARIDX_KEY]] def buildVarCTable(ttf, vcData, allLocations): axisTags = [axis.axisTag for axis in", "precompiled = [] haveVarCData = False for component in components: coordKeys = sorted({k", "allowIndividualVarIdx=True, ) dicts = [transform for coord, transform in component] transformToIntConvertersLocal = dict(transformToIntConverters)", "base, varIdx = storeBuilder.storeMasters(masterValues) assert base == masterValues[0], (k, base, masterValues) resultDict[k][VARIDX_KEY] =", "minScale, maxScale def remapVarIdxs(precompiled, mapping): for glyphName, components in precompiled.items(): for component in", "in ttf[\"fvar\"].axes] globalAxisNames = {axisTag for axisTag in axisTags if axisTag[0] != \"V\"}", "saveWoff2, neutralOnly=False ): import pathlib registerCustomTableClass(\"VarC\", \"rcjktools.table_VarC\", \"table_VarC\") ttfPath = pathlib.Path(ttfPath) if outTTFPath", "transformToIntConverters, transformDefaults, VARIDX_KEY, ComponentRecord, CoordinateRecord, TransformRecord, ) def precompileAllComponents(vcData, allLocations, axisTags): precompiled =", "TODO: Avoid second allEqual() call? continue base, varIdx = storeBuilder.storeMasters(masterValues) assert base ==", "(k, base, masterValues) resultDict[k][VARIDX_KEY] = varIdx return resultDict def calcNumIntBitsForScale(dicts): minScale, maxScale =", "/ (outTTFPath.stem + \"-after.ttx\") ttf.saveXML(outTTXPath, tables=[\"VarC\"]) if saveWoff2: outWoff2Path = outTTFPath.parent / (outTTFPath.stem", "storeBuilder, allowIndividualVarIdx=True, ) dicts = [transform for coord, transform in component] transformToIntConvertersLocal =", "from fontTools.misc.fixedTools import floatToFixed from fontTools.ttLib import TTFont, newTable, registerCustomTableClass from fontTools.varLib.models import", "base == masterValues[0], (k, base, masterValues) resultDict[k][VARIDX_KEY] = varIdx return resultDict def calcNumIntBitsForScale(dicts):", "d.get(\"ScaleY\", 0)) maxScale = max(maxScale, d.get(\"ScaleX\", 0)) maxScale = max(maxScale, d.get(\"ScaleY\", 0)) return", "ttf = TTFont(outTTFPath, lazy=True) # Load from scratch if doTTX: outTTXPath = outTTFPath.parent", "store.optimize() remapVarIdxs(precompiled, mapping) varc_table.GlyphData = precompiled varc_table.VarStore = store def buildVarC( designspacePath, ttfPath,", "resultDict[k] = dict(value=masterValues[0]) convertedMasterValues[k] = [dictConverters[k](value) for value in masterValues] if hasVariations: for", "from scratch if doTTX: outTTXPath = outTTFPath.parent / (outTTFPath.stem + \"-after.ttx\") ttf.saveXML(outTTXPath, tables=[\"VarC\"])", "outTTFPath.parent / (outTTFPath.stem + \"-after.ttx\") ttf.saveXML(outTTXPath, tables=[\"VarC\"]) if saveWoff2: outWoff2Path = outTTFPath.parent /", "TTFont, newTable, registerCustomTableClass from fontTools.varLib.models import VariationModel, allEqual from fontTools.varLib.varStore import OnlineVarStoreBuilder from", "storeBuilder, axisTags): precompiled = [] haveVarCData = False for component in components: coordKeys", "0 for k in coordKeys} coordConverters = {k: fixedCoord for k in coordKeys}", "VarCo .designspace source\") parser.add_argument(\"ttf\", help=\"The input Variable Font\") parser.add_argument(\"--output\", help=\"The output Variable Font\")", "help=\"The output Variable Font\") parser.add_argument( \"--ttx\", action=\"store_true\", help=\"write TTX dumps for the VarC", "axis in ttf[\"fvar\"].axes] varc_table = ttf[\"VarC\"] = newTable(\"VarC\") varc_table.Version = 0x00010000 precompiled, store", "allEqual() call? continue base, varIdx = storeBuilder.storeMasters(masterValues) assert base == masterValues[0], (k, base,", "d.get(\"ScaleX\", 0)) maxScale = max(maxScale, d.get(\"ScaleY\", 0)) return minScale, maxScale def remapVarIdxs(precompiled, mapping):", "in masterValues] if hasVariations: for k, masterValues in convertedMasterValues.items(): if allowIndividualVarIdx and allEqual(", "in ttf[\"fvar\"].axes] varc_table = ttf[\"VarC\"] = newTable(\"VarC\") varc_table.Version = 0x00010000 precompiled, store =", "= [coord for coord, transform in component] coordDict = compileDicts( dicts, coordDefaults, coordConverters,", "coord}) coordDefaults = {k: 0 for k in coordKeys} coordConverters = {k: fixedCoord", "ttf.flavor = \"woff2\" ttf.save(outWoff2Path) def main(): import argparse parser = argparse.ArgumentParser() parser.add_argument(\"designspace\", help=\"The", "dumps for the VarC table.\" ) parser.add_argument(\"--no-woff2\", action=\"store_true\") parser.add_argument( \"--neutral-only\", action=\"store_true\", help=\"hack: build", "= argparse.ArgumentParser() parser.add_argument(\"designspace\", help=\"The VarCo .designspace source\") parser.add_argument(\"ttf\", help=\"The input Variable Font\") parser.add_argument(\"--output\",", "in coordKeys} dicts = [coord for coord, transform in component] coordDict = compileDicts(", "transformToIntConvertersLocal = dict(transformToIntConverters) numIntBitsForScale = calcNumIntBitsForScale(dicts) scaleConvert = getToFixedConverterForNumIntBitsForScale(numIntBitsForScale) transformToIntConvertersLocal[\"ScaleX\"] = scaleConvert transformToIntConvertersLocal[\"ScaleY\"]", "= _calcNumIntBits(minScale, maxScale) return numIntBits def _calcNumIntBits(minValue, maxValue, maxIntBits=7): # TODO: there must", "min(minScale, d.get(\"ScaleY\", 0)) maxScale = max(maxScale, d.get(\"ScaleX\", 0)) maxScale = max(maxScale, d.get(\"ScaleY\", 0))", "dicts = [coord for coord, transform in component] coordDict = compileDicts( dicts, coordDefaults,", "minScale = min(minScale, d.get(\"ScaleY\", 0)) maxScale = max(maxScale, d.get(\"ScaleX\", 0)) maxScale = max(maxScale,", "): import pathlib registerCustomTableClass(\"VarC\", \"rcjktools.table_VarC\", \"table_VarC\") ttfPath = pathlib.Path(ttfPath) if outTTFPath is None:", "= outTTFPath.parent / (outTTFPath.stem + \".woff2\") ttf.flavor = \"woff2\" ttf.save(outWoff2Path) def main(): import", "any key has variations for k, default in dictDefaults.items(): masterValues = [d.get(k, default)", "/ (outTTFPath.stem + \".woff2\") ttf.flavor = \"woff2\" ttf.save(outWoff2Path) def main(): import argparse parser", "= vcData[gn] sparseMapping = [None] * len(allLocations) for locIndex, loc in enumerate(locations): allIndex", "= calcNumIntBitsForScale(dicts) scaleConvert = getToFixedConverterForNumIntBitsForScale(numIntBitsForScale) transformToIntConvertersLocal[\"ScaleX\"] = scaleConvert transformToIntConvertersLocal[\"ScaleY\"] = scaleConvert transformDict =", "if not allEqual(masterValues): hasVariations = True elif masterValues[0] == default: # No variations,", "floatToFixed from fontTools.ttLib import TTFont, newTable, registerCustomTableClass from fontTools.varLib.models import VariationModel, allEqual from", "locations = vcData[gn] sparseMapping = [None] * len(allLocations) for locIndex, loc in enumerate(locations):", "+ ttfPath.suffix) else: outTTFPath = pathlib.Path(outTTFPath) ttf = TTFont(ttfPath) axisTags = [axis.axisTag for", "import floatToFixed from fontTools.ttLib import TTFont, newTable, registerCustomTableClass from fontTools.varLib.models import VariationModel, allEqual", "* len(allLocations) for locIndex, loc in enumerate(locations): allIndex = allLocations.index(loc) sparseMapping[allIndex] = locIndex", "in dictDefaults.items(): masterValues = [d.get(k, default) for d in dicts] if not allEqual(masterValues):", "coordKeys} dicts = [coord for coord, transform in component] coordDict = compileDicts( dicts,", "= dict(value=masterValues[0]) convertedMasterValues[k] = [dictConverters[k](value) for value in masterValues] if hasVariations: for k,", "key has variations for k, default in dictDefaults.items(): masterValues = [d.get(k, default) for", "minScale = 0 maxScale = 0 for d in transformDicts: minScale = min(minScale,", "for coord, transform in component for k in coord}) coordDefaults = {k: 0", "store = precompileAllComponents(vcData, allLocations, axisTags) mapping = store.optimize() remapVarIdxs(precompiled, mapping) varc_table.GlyphData = precompiled", "ComponentRecord, CoordinateRecord, TransformRecord, ) def precompileAllComponents(vcData, allLocations, axisTags): precompiled = {} masterModel =", "be bigger! (this is rather fuzzy) raise ValueError(\"value does not fit in maxBits\")", "d in dicts] if not allEqual(masterValues): hasVariations = True elif masterValues[0] == default:", "in v: v[VARIDX_KEY] = mapping[v[VARIDX_KEY]] for v in component.transform.values(): if VARIDX_KEY in v:", "default: # No variations, value is default, skip altogether continue resultDict[k] = dict(value=masterValues[0])", "= locIndex subModel, mapping = masterModel.getSubModel(sparseMapping) storeBuilder.setModel(subModel) # reorder master values according to", "import VariationModel, allEqual from fontTools.varLib.varStore import OnlineVarStoreBuilder from rcjktools.varco import VarCoFont from rcjktools.table_VarC", "coordKeys} coordConverters = {k: fixedCoord for k in coordKeys} dicts = [coord for", "outTTFPath = ttfPath.parent / (ttfPath.stem + \"-varc\" + ttfPath.suffix) else: outTTFPath = pathlib.Path(outTTFPath)", "ttf = TTFont(ttfPath) axisTags = [axis.axisTag for axis in ttf[\"fvar\"].axes] globalAxisNames = {axisTag", "transformToIntConvertersLocal[\"ScaleX\"] = scaleConvert transformToIntConvertersLocal[\"ScaleY\"] = scaleConvert transformDict = compileDicts( dicts, transformDefaults, transformToIntConvertersLocal, storeBuilder", "[d.get(k, default) for d in dicts] if not allEqual(masterValues): hasVariations = True elif", "one more: deltas may be bigger! (this is rather fuzzy) raise ValueError(\"value does", "= masterModel.getSubModel(sparseMapping) storeBuilder.setModel(subModel) # reorder master values according to allLocations components = [[c[i]", "second allEqual() call? continue base, varIdx = storeBuilder.storeMasters(masterValues) assert base == masterValues[0], (k,", "storeBuilder, allowIndividualVarIdx=False ): resultDict = {} convertedMasterValues = {} hasVariations = False #", "= 0 maxScale = 0 for d in transformDicts: minScale = min(minScale, d.get(\"ScaleX\",", "parser.add_argument(\"--no-woff2\", action=\"store_true\") parser.add_argument( \"--neutral-only\", action=\"store_true\", help=\"hack: build a pseudo static COLRv1 table, that", "1 # use one more: deltas may be bigger! (this is rather fuzzy)", "reorder master values according to allLocations components = [[c[i] for i in mapping]", "dict(transformToIntConverters) numIntBitsForScale = calcNumIntBitsForScale(dicts) scaleConvert = getToFixedConverterForNumIntBitsForScale(numIntBitsForScale) transformToIntConvertersLocal[\"ScaleX\"] = scaleConvert transformToIntConvertersLocal[\"ScaleY\"] = scaleConvert", "mapping = masterModel.getSubModel(sparseMapping) storeBuilder.setModel(subModel) # reorder master values according to allLocations components =", ") dicts = [transform for coord, transform in component] transformToIntConvertersLocal = dict(transformToIntConverters) numIntBitsForScale", "else: outTTFPath = pathlib.Path(outTTFPath) ttf = TTFont(ttfPath) axisTags = [axis.axisTag for axis in", "True elif masterValues[0] == default: # No variations, value is default, skip altogether", "for v in component.coord.values(): if VARIDX_KEY in v: v[VARIDX_KEY] = mapping[v[VARIDX_KEY]] for v", "TransformRecord, ) def precompileAllComponents(vcData, allLocations, axisTags): precompiled = {} masterModel = VariationModel(allLocations, axisTags)", "coord, transform in component for k in coord}) coordDefaults = {k: 0 for", "= varIdx return resultDict def calcNumIntBitsForScale(dicts): minScale, maxScale = _calcMinMaxScale(dicts) numIntBits = _calcNumIntBits(minScale,", "skip altogether continue resultDict[k] = dict(value=masterValues[0]) convertedMasterValues[k] = [dictConverters[k](value) for value in masterValues]", "resultDict = {} convertedMasterValues = {} hasVariations = False # True if any", "for axis in ttf[\"fvar\"].axes] globalAxisNames = {axisTag for axisTag in axisTags if axisTag[0]", "vcData.keys(): components, locations = vcData[gn] sparseMapping = [None] * len(allLocations) for locIndex, loc", "\"woff2\" ttf.save(outWoff2Path) def main(): import argparse parser = argparse.ArgumentParser() parser.add_argument(\"designspace\", help=\"The VarCo .designspace", "VarCoFont(designspacePath) vcData, allLocations, neutralGlyphNames = vcFont.extractVarCoData( globalAxisNames, neutralOnly ) if neutralGlyphNames: gvarTable =", "= False for component in components: coordKeys = sorted({k for coord, transform in", "remapVarIdxs(precompiled, mapping) varc_table.GlyphData = precompiled varc_table.VarStore = store def buildVarC( designspacePath, ttfPath, outTTFPath,", "coord, transform in component] transformToIntConvertersLocal = dict(transformToIntConverters) numIntBitsForScale = calcNumIntBitsForScale(dicts) scaleConvert = getToFixedConverterForNumIntBitsForScale(numIntBitsForScale)", "axisTags): precompiled = {} masterModel = VariationModel(allLocations, axisTags) storeBuilder = OnlineVarStoreBuilder(axisTags) for gn", "be a better way, but at least this is correct assert minValue <=", "convertedMasterValues.items(): if allowIndividualVarIdx and allEqual( masterValues ): # TODO: Avoid second allEqual() call?", "newTable, registerCustomTableClass from fontTools.varLib.models import VariationModel, allEqual from fontTools.varLib.varStore import OnlineVarStoreBuilder from rcjktools.varco", "dicts = [transform for coord, transform in component] transformToIntConvertersLocal = dict(transformToIntConverters) numIntBitsForScale =", "32767: return i + 1 # use one more: deltas may be bigger!", "= [None] * len(allLocations) for locIndex, loc in enumerate(locations): allIndex = allLocations.index(loc) sparseMapping[allIndex]", "axisTags): precompiled = [] haveVarCData = False for component in components: coordKeys =", "neutralOnly ) if neutralGlyphNames: gvarTable = ttf[\"gvar\"] for glyphName in neutralGlyphNames: del gvarTable.variations[glyphName]", "coordDict = compileDicts( dicts, coordDefaults, coordConverters, storeBuilder, allowIndividualVarIdx=True, ) dicts = [transform for", "table precompiled[gn] = precompiledGlyph return precompiled, storeBuilder.finish() def precompileVarComponents(glyphName, components, storeBuilder, axisTags): precompiled", "for gn in vcData.keys(): components, locations = vcData[gn] sparseMapping = [None] * len(allLocations)", "vcData, allLocations, neutralGlyphNames = vcFont.extractVarCoData( globalAxisNames, neutralOnly ) if neutralGlyphNames: gvarTable = ttf[\"gvar\"]", "Variable Font\") parser.add_argument(\"--output\", help=\"The output Variable Font\") parser.add_argument( \"--ttx\", action=\"store_true\", help=\"write TTX dumps", "help=\"The VarCo .designspace source\") parser.add_argument(\"ttf\", help=\"The input Variable Font\") parser.add_argument(\"--output\", help=\"The output Variable", "allEqual( masterValues ): # TODO: Avoid second allEqual() call? continue base, varIdx =", "lazy=True) # Load from scratch if doTTX: outTTXPath = outTTFPath.parent / (outTTFPath.stem +", "storeBuilder ) if coordDict or transformDict: haveVarCData = True precompiled.append( ComponentRecord( CoordinateRecord(coordDict), TransformRecord(transformDict),", "= store def buildVarC( designspacePath, ttfPath, outTTFPath, doTTX, saveWoff2, neutralOnly=False ): import pathlib", "precompiledGlyph return precompiled, storeBuilder.finish() def precompileVarComponents(glyphName, components, storeBuilder, axisTags): precompiled = [] haveVarCData", "if VARIDX_KEY in v: v[VARIDX_KEY] = mapping[v[VARIDX_KEY]] def buildVarCTable(ttf, vcData, allLocations): axisTags =", "return resultDict def calcNumIntBitsForScale(dicts): minScale, maxScale = _calcMinMaxScale(dicts) numIntBits = _calcNumIntBits(minScale, maxScale) return", "allLocations, axisTags) mapping = store.optimize() remapVarIdxs(precompiled, mapping) varc_table.GlyphData = precompiled varc_table.VarStore = store", "mapping) varc_table.GlyphData = precompiled varc_table.VarStore = store def buildVarC( designspacePath, ttfPath, outTTFPath, doTTX,", "allLocations, neutralGlyphNames = vcFont.extractVarCoData( globalAxisNames, neutralOnly ) if neutralGlyphNames: gvarTable = ttf[\"gvar\"] for", "floatToFixed(minValue, precisionBits) maxIntVal = floatToFixed(maxValue, precisionBits) if -32768 <= minIntVal and maxIntVal <=", "if saveWoff2: outWoff2Path = outTTFPath.parent / (outTTFPath.stem + \".woff2\") ttf.flavor = \"woff2\" ttf.save(outWoff2Path)", "action=\"store_true\") parser.add_argument( \"--neutral-only\", action=\"store_true\", help=\"hack: build a pseudo static COLRv1 table, that won't", "in coord}) coordDefaults = {k: 0 for k in coordKeys} coordConverters = {k:", "del gvarTable.variations[glyphName] buildVarCTable(ttf, vcData, allLocations) if doTTX: outTTXPath = outTTFPath.parent / (outTTFPath.stem +", "if allowIndividualVarIdx and allEqual( masterValues ): # TODO: Avoid second allEqual() call? continue", "# True if any key has variations for k, default in dictDefaults.items(): masterValues", "if doTTX: outTTXPath = outTTFPath.parent / (outTTFPath.stem + \"-before.ttx\") ttf.saveXML(outTTXPath, tables=[\"VarC\"]) ttf.save(outTTFPath) ttf", "CoordinateRecord(coordDict), TransformRecord(transformDict), numIntBitsForScale, ), ) if haveVarCData: return precompiled else: return None def", "parser.parse_args() buildVarC( args.designspace, args.ttf, args.output, args.ttx, not args.no_woff2, args.neutral_only, ) if __name__ ==", ") if precompiledGlyph is not None: # glyph components do not contain data", "{k: 0 for k in coordKeys} coordConverters = {k: fixedCoord for k in", "maxValue, maxIntBits=7): # TODO: there must be a better way, but at least", "component.transform.values(): if VARIDX_KEY in v: v[VARIDX_KEY] = mapping[v[VARIDX_KEY]] def buildVarCTable(ttf, vcData, allLocations): axisTags", "for the VarC table.\" ) parser.add_argument(\"--no-woff2\", action=\"store_true\") parser.add_argument( \"--neutral-only\", action=\"store_true\", help=\"hack: build a", "k, masterValues in convertedMasterValues.items(): if allowIndividualVarIdx and allEqual( masterValues ): # TODO: Avoid", "= VarCoFont(designspacePath) vcData, allLocations, neutralGlyphNames = vcFont.extractVarCoData( globalAxisNames, neutralOnly ) if neutralGlyphNames: gvarTable", "for coord, transform in component] transformToIntConvertersLocal = dict(transformToIntConverters) numIntBitsForScale = calcNumIntBitsForScale(dicts) scaleConvert =", "from fontTools.varLib.models import VariationModel, allEqual from fontTools.varLib.varStore import OnlineVarStoreBuilder from rcjktools.varco import VarCoFont", "0 for d in transformDicts: minScale = min(minScale, d.get(\"ScaleX\", 0)) minScale = min(minScale,", "in transformDicts: minScale = min(minScale, d.get(\"ScaleX\", 0)) minScale = min(minScale, d.get(\"ScaleY\", 0)) maxScale", "v: v[VARIDX_KEY] = mapping[v[VARIDX_KEY]] def buildVarCTable(ttf, vcData, allLocations): axisTags = [axis.axisTag for axis", "components: for v in component.coord.values(): if VARIDX_KEY in v: v[VARIDX_KEY] = mapping[v[VARIDX_KEY]] for", "coordConverters, storeBuilder, allowIndividualVarIdx=True, ) dicts = [transform for coord, transform in component] transformToIntConvertersLocal", "gvarTable = ttf[\"gvar\"] for glyphName in neutralGlyphNames: del gvarTable.variations[glyphName] buildVarCTable(ttf, vcData, allLocations) if", "parser.add_argument( \"--neutral-only\", action=\"store_true\", help=\"hack: build a pseudo static COLRv1 table, that won't respond", "# No variations, value is default, skip altogether continue resultDict[k] = dict(value=masterValues[0]) convertedMasterValues[k]", "ttf.save(outWoff2Path) def main(): import argparse parser = argparse.ArgumentParser() parser.add_argument(\"designspace\", help=\"The VarCo .designspace source\")", "= precompiled varc_table.VarStore = store def buildVarC( designspacePath, ttfPath, outTTFPath, doTTX, saveWoff2, neutralOnly=False", "a better way, but at least this is correct assert minValue <= maxValue", "newTable(\"VarC\") varc_table.Version = 0x00010000 precompiled, store = precompileAllComponents(vcData, allLocations, axisTags) mapping = store.optimize()", "dictDefaults, dictConverters, storeBuilder, allowIndividualVarIdx=False ): resultDict = {} convertedMasterValues = {} hasVariations =", "this is correct assert minValue <= maxValue for i in range(maxIntBits): precisionBits =", "help=\"hack: build a pseudo static COLRv1 table, that won't respond to the \"", "OnlineVarStoreBuilder(axisTags) for gn in vcData.keys(): components, locations = vcData[gn] sparseMapping = [None] *", "allLocations, axisTags): precompiled = {} masterModel = VariationModel(allLocations, axisTags) storeBuilder = OnlineVarStoreBuilder(axisTags) for", "is rather fuzzy) raise ValueError(\"value does not fit in maxBits\") def _calcMinMaxScale(transformDicts): minScale", "= precompileAllComponents(vcData, allLocations, axisTags) mapping = store.optimize() remapVarIdxs(precompiled, mapping) varc_table.GlyphData = precompiled varc_table.VarStore", "maxScale = max(maxScale, d.get(\"ScaleY\", 0)) return minScale, maxScale def remapVarIdxs(precompiled, mapping): for glyphName,", "def buildVarCTable(ttf, vcData, allLocations): axisTags = [axis.axisTag for axis in ttf[\"fvar\"].axes] varc_table =", "ttfPath.parent / (ttfPath.stem + \"-varc\" + ttfPath.suffix) else: outTTFPath = pathlib.Path(outTTFPath) ttf =", "\"--ttx\", action=\"store_true\", help=\"write TTX dumps for the VarC table.\" ) parser.add_argument(\"--no-woff2\", action=\"store_true\") parser.add_argument(", "fontTools.ttLib import TTFont, newTable, registerCustomTableClass from fontTools.varLib.models import VariationModel, allEqual from fontTools.varLib.varStore import", "allowIndividualVarIdx=False ): resultDict = {} convertedMasterValues = {} hasVariations = False # True", "has to go to the 'VarC' table precompiled[gn] = precompiledGlyph return precompiled, storeBuilder.finish()", "= store.optimize() remapVarIdxs(precompiled, mapping) varc_table.GlyphData = precompiled varc_table.VarStore = store def buildVarC( designspacePath,", "\"-after.ttx\") ttf.saveXML(outTTXPath, tables=[\"VarC\"]) if saveWoff2: outWoff2Path = outTTFPath.parent / (outTTFPath.stem + \".woff2\") ttf.flavor", "for component in components: coordKeys = sorted({k for coord, transform in component for", "won't respond to the \" \"non-hidden axes\", ) args = parser.parse_args() buildVarC( args.designspace,", "(outTTFPath.stem + \"-after.ttx\") ttf.saveXML(outTTXPath, tables=[\"VarC\"]) if saveWoff2: outWoff2Path = outTTFPath.parent / (outTTFPath.stem +", "storeBuilder.setModel(subModel) # reorder master values according to allLocations components = [[c[i] for i", "def _calcMinMaxScale(transformDicts): minScale = 0 maxScale = 0 for d in transformDicts: minScale", "fixedCoord, getToFixedConverterForNumIntBitsForScale, transformToIntConverters, transformDefaults, VARIDX_KEY, ComponentRecord, CoordinateRecord, TransformRecord, ) def precompileAllComponents(vcData, allLocations, axisTags):", "scaleConvert transformToIntConvertersLocal[\"ScaleY\"] = scaleConvert transformDict = compileDicts( dicts, transformDefaults, transformToIntConvertersLocal, storeBuilder ) if", "= compileDicts( dicts, coordDefaults, coordConverters, storeBuilder, allowIndividualVarIdx=True, ) dicts = [transform for coord,", "= TTFont(outTTFPath, lazy=True) # Load from scratch if doTTX: outTTXPath = outTTFPath.parent /", "k in coordKeys} dicts = [coord for coord, transform in component] coordDict =", "TransformRecord(transformDict), numIntBitsForScale, ), ) if haveVarCData: return precompiled else: return None def compileDicts(", "precisionBits) if -32768 <= minIntVal and maxIntVal <= 32767: return i + 1", "bigger! (this is rather fuzzy) raise ValueError(\"value does not fit in maxBits\") def", "getToFixedConverterForNumIntBitsForScale(numIntBitsForScale) transformToIntConvertersLocal[\"ScaleX\"] = scaleConvert transformToIntConvertersLocal[\"ScaleY\"] = scaleConvert transformDict = compileDicts( dicts, transformDefaults, transformToIntConvertersLocal,", "= min(minScale, d.get(\"ScaleY\", 0)) maxScale = max(maxScale, d.get(\"ScaleX\", 0)) maxScale = max(maxScale, d.get(\"ScaleY\",", "for c in components] precompiledGlyph = precompileVarComponents( gn, components, storeBuilder, axisTags ) if", "if haveVarCData: return precompiled else: return None def compileDicts( dicts, dictDefaults, dictConverters, storeBuilder,", "varIdx = storeBuilder.storeMasters(masterValues) assert base == masterValues[0], (k, base, masterValues) resultDict[k][VARIDX_KEY] = varIdx", "== masterValues[0], (k, base, masterValues) resultDict[k][VARIDX_KEY] = varIdx return resultDict def calcNumIntBitsForScale(dicts): minScale,", "= pathlib.Path(outTTFPath) ttf = TTFont(ttfPath) axisTags = [axis.axisTag for axis in ttf[\"fvar\"].axes] globalAxisNames", "i in range(maxIntBits): precisionBits = 16 - i minIntVal = floatToFixed(minValue, precisionBits) maxIntVal", "= dict(transformToIntConverters) numIntBitsForScale = calcNumIntBitsForScale(dicts) scaleConvert = getToFixedConverterForNumIntBitsForScale(numIntBitsForScale) transformToIntConvertersLocal[\"ScaleX\"] = scaleConvert transformToIntConvertersLocal[\"ScaleY\"] =", "= {k: 0 for k in coordKeys} coordConverters = {k: fixedCoord for k", "action=\"store_true\", help=\"write TTX dumps for the VarC table.\" ) parser.add_argument(\"--no-woff2\", action=\"store_true\") parser.add_argument( \"--neutral-only\",", "\"table_VarC\") ttfPath = pathlib.Path(ttfPath) if outTTFPath is None: outTTFPath = ttfPath.parent / (ttfPath.stem", "more: deltas may be bigger! (this is rather fuzzy) raise ValueError(\"value does not", "outTTFPath.parent / (outTTFPath.stem + \".woff2\") ttf.flavor = \"woff2\" ttf.save(outWoff2Path) def main(): import argparse", "d.get(\"ScaleX\", 0)) minScale = min(minScale, d.get(\"ScaleY\", 0)) maxScale = max(maxScale, d.get(\"ScaleX\", 0)) maxScale", "# reorder master values according to allLocations components = [[c[i] for i in", "scaleConvert = getToFixedConverterForNumIntBitsForScale(numIntBitsForScale) transformToIntConvertersLocal[\"ScaleX\"] = scaleConvert transformToIntConvertersLocal[\"ScaleY\"] = scaleConvert transformDict = compileDicts( dicts,", "allEqual from fontTools.varLib.varStore import OnlineVarStoreBuilder from rcjktools.varco import VarCoFont from rcjktools.table_VarC import (", "= floatToFixed(maxValue, precisionBits) if -32768 <= minIntVal and maxIntVal <= 32767: return i", "{} convertedMasterValues = {} hasVariations = False # True if any key has", "glyphName, components in precompiled.items(): for component in components: for v in component.coord.values(): if", "Variable Font\") parser.add_argument( \"--ttx\", action=\"store_true\", help=\"write TTX dumps for the VarC table.\" )", "maxScale) return numIntBits def _calcNumIntBits(minValue, maxValue, maxIntBits=7): # TODO: there must be a", "+ \"-after.ttx\") ttf.saveXML(outTTXPath, tables=[\"VarC\"]) if saveWoff2: outWoff2Path = outTTFPath.parent / (outTTFPath.stem + \".woff2\")", "transformDict: haveVarCData = True precompiled.append( ComponentRecord( CoordinateRecord(coordDict), TransformRecord(transformDict), numIntBitsForScale, ), ) if haveVarCData:", "_calcMinMaxScale(dicts) numIntBits = _calcNumIntBits(minScale, maxScale) return numIntBits def _calcNumIntBits(minValue, maxValue, maxIntBits=7): # TODO:", "haveVarCData = False for component in components: coordKeys = sorted({k for coord, transform", "transformDict = compileDicts( dicts, transformDefaults, transformToIntConvertersLocal, storeBuilder ) if coordDict or transformDict: haveVarCData", "= 0x00010000 precompiled, store = precompileAllComponents(vcData, allLocations, axisTags) mapping = store.optimize() remapVarIdxs(precompiled, mapping)", "from rcjktools.table_VarC import ( fixedCoord, getToFixedConverterForNumIntBitsForScale, transformToIntConverters, transformDefaults, VARIDX_KEY, ComponentRecord, CoordinateRecord, TransformRecord, )", "{} masterModel = VariationModel(allLocations, axisTags) storeBuilder = OnlineVarStoreBuilder(axisTags) for gn in vcData.keys(): components,", "True if any key has variations for k, default in dictDefaults.items(): masterValues =", "rcjktools.varco import VarCoFont from rcjktools.table_VarC import ( fixedCoord, getToFixedConverterForNumIntBitsForScale, transformToIntConverters, transformDefaults, VARIDX_KEY, ComponentRecord,", "registerCustomTableClass(\"VarC\", \"rcjktools.table_VarC\", \"table_VarC\") ttfPath = pathlib.Path(ttfPath) if outTTFPath is None: outTTFPath = ttfPath.parent", "storeBuilder = OnlineVarStoreBuilder(axisTags) for gn in vcData.keys(): components, locations = vcData[gn] sparseMapping =", "parser.add_argument(\"ttf\", help=\"The input Variable Font\") parser.add_argument(\"--output\", help=\"The output Variable Font\") parser.add_argument( \"--ttx\", action=\"store_true\",", "dictConverters, storeBuilder, allowIndividualVarIdx=False ): resultDict = {} convertedMasterValues = {} hasVariations = False", "import pathlib registerCustomTableClass(\"VarC\", \"rcjktools.table_VarC\", \"table_VarC\") ttfPath = pathlib.Path(ttfPath) if outTTFPath is None: outTTFPath", "assert minValue <= maxValue for i in range(maxIntBits): precisionBits = 16 - i", "maxIntVal <= 32767: return i + 1 # use one more: deltas may", "buildVarCTable(ttf, vcData, allLocations): axisTags = [axis.axisTag for axis in ttf[\"fvar\"].axes] varc_table = ttf[\"VarC\"]", "haveVarCData: return precompiled else: return None def compileDicts( dicts, dictDefaults, dictConverters, storeBuilder, allowIndividualVarIdx=False", "_calcNumIntBits(minValue, maxValue, maxIntBits=7): # TODO: there must be a better way, but at", "scaleConvert transformDict = compileDicts( dicts, transformDefaults, transformToIntConvertersLocal, storeBuilder ) if coordDict or transformDict:", "ttfPath, outTTFPath, doTTX, saveWoff2, neutralOnly=False ): import pathlib registerCustomTableClass(\"VarC\", \"rcjktools.table_VarC\", \"table_VarC\") ttfPath =", "rcjktools.table_VarC import ( fixedCoord, getToFixedConverterForNumIntBitsForScale, transformToIntConverters, transformDefaults, VARIDX_KEY, ComponentRecord, CoordinateRecord, TransformRecord, ) def", "= scaleConvert transformToIntConvertersLocal[\"ScaleY\"] = scaleConvert transformDict = compileDicts( dicts, transformDefaults, transformToIntConvertersLocal, storeBuilder )", "fit in maxBits\") def _calcMinMaxScale(transformDicts): minScale = 0 maxScale = 0 for d", "maxBits\") def _calcMinMaxScale(transformDicts): minScale = 0 maxScale = 0 for d in transformDicts:", ") if coordDict or transformDict: haveVarCData = True precompiled.append( ComponentRecord( CoordinateRecord(coordDict), TransformRecord(transformDict), numIntBitsForScale,", "{axisTag for axisTag in axisTags if axisTag[0] != \"V\"} vcFont = VarCoFont(designspacePath) vcData,", "components, storeBuilder, axisTags ) if precompiledGlyph is not None: # glyph components do", "(outTTFPath.stem + \".woff2\") ttf.flavor = \"woff2\" ttf.save(outWoff2Path) def main(): import argparse parser =", "components: coordKeys = sorted({k for coord, transform in component for k in coord})", "precompileAllComponents(vcData, allLocations, axisTags): precompiled = {} masterModel = VariationModel(allLocations, axisTags) storeBuilder = OnlineVarStoreBuilder(axisTags)", "ttf[\"fvar\"].axes] varc_table = ttf[\"VarC\"] = newTable(\"VarC\") varc_table.Version = 0x00010000 precompiled, store = precompileAllComponents(vcData,", "numIntBitsForScale = calcNumIntBitsForScale(dicts) scaleConvert = getToFixedConverterForNumIntBitsForScale(numIntBitsForScale) transformToIntConvertersLocal[\"ScaleX\"] = scaleConvert transformToIntConvertersLocal[\"ScaleY\"] = scaleConvert transformDict", "precisionBits = 16 - i minIntVal = floatToFixed(minValue, precisionBits) maxIntVal = floatToFixed(maxValue, precisionBits)", "that has to go to the 'VarC' table precompiled[gn] = precompiledGlyph return precompiled,", "a pseudo static COLRv1 table, that won't respond to the \" \"non-hidden axes\",", "for axisTag in axisTags if axisTag[0] != \"V\"} vcFont = VarCoFont(designspacePath) vcData, allLocations,", "from rcjktools.varco import VarCoFont from rcjktools.table_VarC import ( fixedCoord, getToFixedConverterForNumIntBitsForScale, transformToIntConverters, transformDefaults, VARIDX_KEY,", "coord, transform in component] coordDict = compileDicts( dicts, coordDefaults, coordConverters, storeBuilder, allowIndividualVarIdx=True, )", "def buildVarC( designspacePath, ttfPath, outTTFPath, doTTX, saveWoff2, neutralOnly=False ): import pathlib registerCustomTableClass(\"VarC\", \"rcjktools.table_VarC\",", "ttf.saveXML(outTTXPath, tables=[\"VarC\"]) ttf.save(outTTFPath) ttf = TTFont(outTTFPath, lazy=True) # Load from scratch if doTTX:", "transformDicts: minScale = min(minScale, d.get(\"ScaleX\", 0)) minScale = min(minScale, d.get(\"ScaleY\", 0)) maxScale =", "saveWoff2: outWoff2Path = outTTFPath.parent / (outTTFPath.stem + \".woff2\") ttf.flavor = \"woff2\" ttf.save(outWoff2Path) def", "in enumerate(locations): allIndex = allLocations.index(loc) sparseMapping[allIndex] = locIndex subModel, mapping = masterModel.getSubModel(sparseMapping) storeBuilder.setModel(subModel)", "= {k: fixedCoord for k in coordKeys} dicts = [coord for coord, transform", "table.\" ) parser.add_argument(\"--no-woff2\", action=\"store_true\") parser.add_argument( \"--neutral-only\", action=\"store_true\", help=\"hack: build a pseudo static COLRv1", "minIntVal and maxIntVal <= 32767: return i + 1 # use one more:", "in precompiled.items(): for component in components: for v in component.coord.values(): if VARIDX_KEY in", "neutralGlyphNames = vcFont.extractVarCoData( globalAxisNames, neutralOnly ) if neutralGlyphNames: gvarTable = ttf[\"gvar\"] for glyphName", "allLocations): axisTags = [axis.axisTag for axis in ttf[\"fvar\"].axes] varc_table = ttf[\"VarC\"] = newTable(\"VarC\")", "VARIDX_KEY in v: v[VARIDX_KEY] = mapping[v[VARIDX_KEY]] def buildVarCTable(ttf, vcData, allLocations): axisTags = [axis.axisTag", "VARIDX_KEY, ComponentRecord, CoordinateRecord, TransformRecord, ) def precompileAllComponents(vcData, allLocations, axisTags): precompiled = {} masterModel", "masterModel = VariationModel(allLocations, axisTags) storeBuilder = OnlineVarStoreBuilder(axisTags) for gn in vcData.keys(): components, locations", "= ttf[\"gvar\"] for glyphName in neutralGlyphNames: del gvarTable.variations[glyphName] buildVarCTable(ttf, vcData, allLocations) if doTTX:", "0)) return minScale, maxScale def remapVarIdxs(precompiled, mapping): for glyphName, components in precompiled.items(): for", "= mapping[v[VARIDX_KEY]] for v in component.transform.values(): if VARIDX_KEY in v: v[VARIDX_KEY] = mapping[v[VARIDX_KEY]]", "for component in components: for v in component.coord.values(): if VARIDX_KEY in v: v[VARIDX_KEY]", "or transformDict: haveVarCData = True precompiled.append( ComponentRecord( CoordinateRecord(coordDict), TransformRecord(transformDict), numIntBitsForScale, ), ) if", "[dictConverters[k](value) for value in masterValues] if hasVariations: for k, masterValues in convertedMasterValues.items(): if", "source\") parser.add_argument(\"ttf\", help=\"The input Variable Font\") parser.add_argument(\"--output\", help=\"The output Variable Font\") parser.add_argument( \"--ttx\",", "pathlib registerCustomTableClass(\"VarC\", \"rcjktools.table_VarC\", \"table_VarC\") ttfPath = pathlib.Path(ttfPath) if outTTFPath is None: outTTFPath =", "v[VARIDX_KEY] = mapping[v[VARIDX_KEY]] for v in component.transform.values(): if VARIDX_KEY in v: v[VARIDX_KEY] =", "transformToIntConvertersLocal[\"ScaleY\"] = scaleConvert transformDict = compileDicts( dicts, transformDefaults, transformToIntConvertersLocal, storeBuilder ) if coordDict", "return None def compileDicts( dicts, dictDefaults, dictConverters, storeBuilder, allowIndividualVarIdx=False ): resultDict = {}", "component] transformToIntConvertersLocal = dict(transformToIntConverters) numIntBitsForScale = calcNumIntBitsForScale(dicts) scaleConvert = getToFixedConverterForNumIntBitsForScale(numIntBitsForScale) transformToIntConvertersLocal[\"ScaleX\"] = scaleConvert", "globalAxisNames, neutralOnly ) if neutralGlyphNames: gvarTable = ttf[\"gvar\"] for glyphName in neutralGlyphNames: del", "_calcMinMaxScale(transformDicts): minScale = 0 maxScale = 0 for d in transformDicts: minScale =", "= True elif masterValues[0] == default: # No variations, value is default, skip", "+ \"-before.ttx\") ttf.saveXML(outTTXPath, tables=[\"VarC\"]) ttf.save(outTTFPath) ttf = TTFont(outTTFPath, lazy=True) # Load from scratch", "calcNumIntBitsForScale(dicts): minScale, maxScale = _calcMinMaxScale(dicts) numIntBits = _calcNumIntBits(minScale, maxScale) return numIntBits def _calcNumIntBits(minValue,", "doTTX, saveWoff2, neutralOnly=False ): import pathlib registerCustomTableClass(\"VarC\", \"rcjktools.table_VarC\", \"table_VarC\") ttfPath = pathlib.Path(ttfPath) if", "least this is correct assert minValue <= maxValue for i in range(maxIntBits): precisionBits", "outTTFPath is None: outTTFPath = ttfPath.parent / (ttfPath.stem + \"-varc\" + ttfPath.suffix) else:", "to the 'VarC' table precompiled[gn] = precompiledGlyph return precompiled, storeBuilder.finish() def precompileVarComponents(glyphName, components,", "numIntBits = _calcNumIntBits(minScale, maxScale) return numIntBits def _calcNumIntBits(minValue, maxValue, maxIntBits=7): # TODO: there", "main(): import argparse parser = argparse.ArgumentParser() parser.add_argument(\"designspace\", help=\"The VarCo .designspace source\") parser.add_argument(\"ttf\", help=\"The", "def precompileAllComponents(vcData, allLocations, axisTags): precompiled = {} masterModel = VariationModel(allLocations, axisTags) storeBuilder =", "components, storeBuilder, axisTags): precompiled = [] haveVarCData = False for component in components:", "maxIntVal = floatToFixed(maxValue, precisionBits) if -32768 <= minIntVal and maxIntVal <= 32767: return", "argparse.ArgumentParser() parser.add_argument(\"designspace\", help=\"The VarCo .designspace source\") parser.add_argument(\"ttf\", help=\"The input Variable Font\") parser.add_argument(\"--output\", help=\"The", "has variations for k, default in dictDefaults.items(): masterValues = [d.get(k, default) for d", "not fit in maxBits\") def _calcMinMaxScale(transformDicts): minScale = 0 maxScale = 0 for", "transformDefaults, VARIDX_KEY, ComponentRecord, CoordinateRecord, TransformRecord, ) def precompileAllComponents(vcData, allLocations, axisTags): precompiled = {}", "deltas may be bigger! (this is rather fuzzy) raise ValueError(\"value does not fit", "(ttfPath.stem + \"-varc\" + ttfPath.suffix) else: outTTFPath = pathlib.Path(outTTFPath) ttf = TTFont(ttfPath) axisTags", "VarCoFont from rcjktools.table_VarC import ( fixedCoord, getToFixedConverterForNumIntBitsForScale, transformToIntConverters, transformDefaults, VARIDX_KEY, ComponentRecord, CoordinateRecord, TransformRecord,", "= precompiledGlyph return precompiled, storeBuilder.finish() def precompileVarComponents(glyphName, components, storeBuilder, axisTags): precompiled = []", "master values according to allLocations components = [[c[i] for i in mapping] for", "= outTTFPath.parent / (outTTFPath.stem + \"-after.ttx\") ttf.saveXML(outTTXPath, tables=[\"VarC\"]) if saveWoff2: outWoff2Path = outTTFPath.parent", "for coord, transform in component] coordDict = compileDicts( dicts, coordDefaults, coordConverters, storeBuilder, allowIndividualVarIdx=True,", "default, skip altogether continue resultDict[k] = dict(value=masterValues[0]) convertedMasterValues[k] = [dictConverters[k](value) for value in", "glyphName in neutralGlyphNames: del gvarTable.variations[glyphName] buildVarCTable(ttf, vcData, allLocations) if doTTX: outTTXPath = outTTFPath.parent", "if doTTX: outTTXPath = outTTFPath.parent / (outTTFPath.stem + \"-after.ttx\") ttf.saveXML(outTTXPath, tables=[\"VarC\"]) if saveWoff2:", "in components: coordKeys = sorted({k for coord, transform in component for k in", "= precompileVarComponents( gn, components, storeBuilder, axisTags ) if precompiledGlyph is not None: #", "for glyphName, components in precompiled.items(): for component in components: for v in component.coord.values():" ]
[ "port_override else port['portconf_id'] portconf = find_by_attr(self.controller.portconf(), _id=portconf_id) return portconf def set_port_profile(self, portconf, **filter_kwargs):", "= port_override['portconf_id'] if port_override and 'portconf_id' in port_override else port['portconf_id'] portconf = find_by_attr(self.controller.portconf(),", "portconf def set_port_profile(self, portconf, **filter_kwargs): port = find_by_attr(self.port_table, **filter_kwargs) port_override = find_by_attr(self.port_overrides, port_idx=port['port_idx'])", "import find_by_attr, json_print class UnifiDeviceObject(UnifiBaseObject): def get_port_profile(self, **filter_kwargs): port = find_by_attr(self.port_table, **filter_kwargs) port_override", "set_port_profile(self, portconf, **filter_kwargs): port = find_by_attr(self.port_table, **filter_kwargs) port_override = find_by_attr(self.port_overrides, port_idx=port['port_idx']) if port_override:", "port_override = find_by_attr(self.port_overrides, port_idx=port['port_idx']) portconf_id = port_override['portconf_id'] if port_override and 'portconf_id' in port_override", "portconf = find_by_attr(self.controller.portconf(), _id=portconf_id) return portconf def set_port_profile(self, portconf, **filter_kwargs): port = find_by_attr(self.port_table,", "port_override['portconf_id'] = portconf['_id'] else: port_override = { 'port_idx': port['port_idx'], 'portconf_id': portconf['_id'] } self.port_overrides.append(port_override)", "port = find_by_attr(self.port_table, **filter_kwargs) port_override = find_by_attr(self.port_overrides, port_idx=port['port_idx']) portconf_id = port_override['portconf_id'] if port_override", "port['portconf_id'] portconf = find_by_attr(self.controller.portconf(), _id=portconf_id) return portconf def set_port_profile(self, portconf, **filter_kwargs): port =", "UnifiDeviceObject(UnifiBaseObject): def get_port_profile(self, **filter_kwargs): port = find_by_attr(self.port_table, **filter_kwargs) port_override = find_by_attr(self.port_overrides, port_idx=port['port_idx']) portconf_id", "find_by_attr(self.controller.portconf(), _id=portconf_id) return portconf def set_port_profile(self, portconf, **filter_kwargs): port = find_by_attr(self.port_table, **filter_kwargs) port_override", "port = find_by_attr(self.port_table, **filter_kwargs) port_override = find_by_attr(self.port_overrides, port_idx=port['port_idx']) if port_override: port_override['portconf_id'] = portconf['_id']", "find_by_attr(self.port_overrides, port_idx=port['port_idx']) if port_override: port_override['portconf_id'] = portconf['_id'] else: port_override = { 'port_idx': port['port_idx'],", "= find_by_attr(self.port_overrides, port_idx=port['port_idx']) portconf_id = port_override['portconf_id'] if port_override and 'portconf_id' in port_override else", "and 'portconf_id' in port_override else port['portconf_id'] portconf = find_by_attr(self.controller.portconf(), _id=portconf_id) return portconf def", "port_override and 'portconf_id' in port_override else port['portconf_id'] portconf = find_by_attr(self.controller.portconf(), _id=portconf_id) return portconf", "unifi.objects.base import UnifiBaseObject from unifi.helper import find_by_attr, json_print class UnifiDeviceObject(UnifiBaseObject): def get_port_profile(self, **filter_kwargs):", "else port['portconf_id'] portconf = find_by_attr(self.controller.portconf(), _id=portconf_id) return portconf def set_port_profile(self, portconf, **filter_kwargs): port", "unifi.helper import find_by_attr, json_print class UnifiDeviceObject(UnifiBaseObject): def get_port_profile(self, **filter_kwargs): port = find_by_attr(self.port_table, **filter_kwargs)", "**filter_kwargs) port_override = find_by_attr(self.port_overrides, port_idx=port['port_idx']) portconf_id = port_override['portconf_id'] if port_override and 'portconf_id' in", "find_by_attr(self.port_overrides, port_idx=port['port_idx']) portconf_id = port_override['portconf_id'] if port_override and 'portconf_id' in port_override else port['portconf_id']", "in port_override else port['portconf_id'] portconf = find_by_attr(self.controller.portconf(), _id=portconf_id) return portconf def set_port_profile(self, portconf,", "portconf, **filter_kwargs): port = find_by_attr(self.port_table, **filter_kwargs) port_override = find_by_attr(self.port_overrides, port_idx=port['port_idx']) if port_override: port_override['portconf_id']", "UnifiBaseObject from unifi.helper import find_by_attr, json_print class UnifiDeviceObject(UnifiBaseObject): def get_port_profile(self, **filter_kwargs): port =", "json_print class UnifiDeviceObject(UnifiBaseObject): def get_port_profile(self, **filter_kwargs): port = find_by_attr(self.port_table, **filter_kwargs) port_override = find_by_attr(self.port_overrides,", "port_idx=port['port_idx']) portconf_id = port_override['portconf_id'] if port_override and 'portconf_id' in port_override else port['portconf_id'] portconf", "_id=portconf_id) return portconf def set_port_profile(self, portconf, **filter_kwargs): port = find_by_attr(self.port_table, **filter_kwargs) port_override =", "def set_port_profile(self, portconf, **filter_kwargs): port = find_by_attr(self.port_table, **filter_kwargs) port_override = find_by_attr(self.port_overrides, port_idx=port['port_idx']) if", "= find_by_attr(self.controller.portconf(), _id=portconf_id) return portconf def set_port_profile(self, portconf, **filter_kwargs): port = find_by_attr(self.port_table, **filter_kwargs)", "return portconf def set_port_profile(self, portconf, **filter_kwargs): port = find_by_attr(self.port_table, **filter_kwargs) port_override = find_by_attr(self.port_overrides,", "**filter_kwargs): port = find_by_attr(self.port_table, **filter_kwargs) port_override = find_by_attr(self.port_overrides, port_idx=port['port_idx']) if port_override: port_override['portconf_id'] =", "port_idx=port['port_idx']) if port_override: port_override['portconf_id'] = portconf['_id'] else: port_override = { 'port_idx': port['port_idx'], 'portconf_id':", "port_override = find_by_attr(self.port_overrides, port_idx=port['port_idx']) if port_override: port_override['portconf_id'] = portconf['_id'] else: port_override = {", "find_by_attr(self.port_table, **filter_kwargs) port_override = find_by_attr(self.port_overrides, port_idx=port['port_idx']) portconf_id = port_override['portconf_id'] if port_override and 'portconf_id'", "find_by_attr, json_print class UnifiDeviceObject(UnifiBaseObject): def get_port_profile(self, **filter_kwargs): port = find_by_attr(self.port_table, **filter_kwargs) port_override =", "**filter_kwargs): port = find_by_attr(self.port_table, **filter_kwargs) port_override = find_by_attr(self.port_overrides, port_idx=port['port_idx']) portconf_id = port_override['portconf_id'] if", "if port_override and 'portconf_id' in port_override else port['portconf_id'] portconf = find_by_attr(self.controller.portconf(), _id=portconf_id) return", "get_port_profile(self, **filter_kwargs): port = find_by_attr(self.port_table, **filter_kwargs) port_override = find_by_attr(self.port_overrides, port_idx=port['port_idx']) portconf_id = port_override['portconf_id']", "find_by_attr(self.port_table, **filter_kwargs) port_override = find_by_attr(self.port_overrides, port_idx=port['port_idx']) if port_override: port_override['portconf_id'] = portconf['_id'] else: port_override", "**filter_kwargs) port_override = find_by_attr(self.port_overrides, port_idx=port['port_idx']) if port_override: port_override['portconf_id'] = portconf['_id'] else: port_override =", "'portconf_id' in port_override else port['portconf_id'] portconf = find_by_attr(self.controller.portconf(), _id=portconf_id) return portconf def set_port_profile(self,", "= find_by_attr(self.port_table, **filter_kwargs) port_override = find_by_attr(self.port_overrides, port_idx=port['port_idx']) if port_override: port_override['portconf_id'] = portconf['_id'] else:", "= find_by_attr(self.port_overrides, port_idx=port['port_idx']) if port_override: port_override['portconf_id'] = portconf['_id'] else: port_override = { 'port_idx':", "if port_override: port_override['portconf_id'] = portconf['_id'] else: port_override = { 'port_idx': port['port_idx'], 'portconf_id': portconf['_id']", "from unifi.objects.base import UnifiBaseObject from unifi.helper import find_by_attr, json_print class UnifiDeviceObject(UnifiBaseObject): def get_port_profile(self,", "from unifi.helper import find_by_attr, json_print class UnifiDeviceObject(UnifiBaseObject): def get_port_profile(self, **filter_kwargs): port = find_by_attr(self.port_table,", "class UnifiDeviceObject(UnifiBaseObject): def get_port_profile(self, **filter_kwargs): port = find_by_attr(self.port_table, **filter_kwargs) port_override = find_by_attr(self.port_overrides, port_idx=port['port_idx'])", "def get_port_profile(self, **filter_kwargs): port = find_by_attr(self.port_table, **filter_kwargs) port_override = find_by_attr(self.port_overrides, port_idx=port['port_idx']) portconf_id =", "import UnifiBaseObject from unifi.helper import find_by_attr, json_print class UnifiDeviceObject(UnifiBaseObject): def get_port_profile(self, **filter_kwargs): port", "= find_by_attr(self.port_table, **filter_kwargs) port_override = find_by_attr(self.port_overrides, port_idx=port['port_idx']) portconf_id = port_override['portconf_id'] if port_override and", "port_override['portconf_id'] if port_override and 'portconf_id' in port_override else port['portconf_id'] portconf = find_by_attr(self.controller.portconf(), _id=portconf_id)", "port_override: port_override['portconf_id'] = portconf['_id'] else: port_override = { 'port_idx': port['port_idx'], 'portconf_id': portconf['_id'] }", "portconf_id = port_override['portconf_id'] if port_override and 'portconf_id' in port_override else port['portconf_id'] portconf =" ]
[ "#-------------------------------------------------- # for case in xrange(input()): # k = input() # indexes =", "- 1) # print deck #-------------------------------------------------- print 'Case #%d: %s' % (case +", "1)%cards if deck[index] == 0: break for j in xrange(i - 1): while", "in xrange(input()): cards = input() indexes = map(int, raw_input().split()) deck = [0 for", "case in xrange(input()): # k = input() # indexes = map(int, raw_input().split()) #", "# deck.appendleft(card) # print deck # deck.rotate(card - 1) # print deck #--------------------------------------------------", "import deque for case in xrange(input()): cards = input() indexes = map(int, raw_input().split())", "#%d: %s' % (case + 1, ' '.join(str(deck[i - 1]) for i in", "for i in xrange(cards)] index = -1 for i in xrange(1, cards +", "True: index = (index + 1)%cards if deck[index] == 0: break for j", "if deck[index] == 0: break for j in xrange(i - 1): while True:", "in xrange(input()): # k = input() # indexes = map(int, raw_input().split()) # #", "index = -1 for i in xrange(1, cards + 1): while True: index", "deck.rotate(card - 1) # print deck #-------------------------------------------------- print 'Case #%d: %s' % (case", "# for card in xrange(k, 0, -1): # deck.appendleft(card) # print deck #", "= -1 for i in xrange(1, cards + 1): while True: index =", "= input() # indexes = map(int, raw_input().split()) # # deck = deque() #", "= (index + 1)%cards if deck[index] == 0: break deck[index] = i #--------------------------------------------------", "# deck = deque() # for card in xrange(k, 0, -1): # deck.appendleft(card)", "for card in xrange(k, 0, -1): # deck.appendleft(card) # print deck # deck.rotate(card", "0: break for j in xrange(i - 1): while True: index = (index", "0, -1): # deck.appendleft(card) # print deck # deck.rotate(card - 1) # print", "'Case #%d: %s' % (case + 1, ' '.join(str(deck[i - 1]) for i", "# for case in xrange(input()): # k = input() # indexes = map(int,", "= (index + 1)%cards if deck[index] == 0: break for j in xrange(i", "+ 1): while True: index = (index + 1)%cards if deck[index] == 0:", "print deck # deck.rotate(card - 1) # print deck #-------------------------------------------------- print 'Case #%d:", "j in xrange(i - 1): while True: index = (index + 1)%cards if", "deck[index] = i #-------------------------------------------------- # for case in xrange(input()): # k = input()", "map(int, raw_input().split()) deck = [0 for i in xrange(cards)] index = -1 for", "deck = [0 for i in xrange(cards)] index = -1 for i in", "xrange(input()): cards = input() indexes = map(int, raw_input().split()) deck = [0 for i", "(index + 1)%cards if deck[index] == 0: break for j in xrange(i -", "in xrange(i - 1): while True: index = (index + 1)%cards if deck[index]", "deck[index] == 0: break for j in xrange(i - 1): while True: index", "deck.appendleft(card) # print deck # deck.rotate(card - 1) # print deck #-------------------------------------------------- print", "index = (index + 1)%cards if deck[index] == 0: break for j in", "deck[index] == 0: break deck[index] = i #-------------------------------------------------- # for case in xrange(input()):", "indexes = map(int, raw_input().split()) # # deck = deque() # for card in", "+ 1)%cards if deck[index] == 0: break for j in xrange(i - 1):", "break deck[index] = i #-------------------------------------------------- # for case in xrange(input()): # k =", "input() # indexes = map(int, raw_input().split()) # # deck = deque() # for", "cards = input() indexes = map(int, raw_input().split()) deck = [0 for i in", "%s' % (case + 1, ' '.join(str(deck[i - 1]) for i in indexes[1:]))", "in xrange(k, 0, -1): # deck.appendleft(card) # print deck # deck.rotate(card - 1)", "for case in xrange(input()): cards = input() indexes = map(int, raw_input().split()) deck =", "True: index = (index + 1)%cards if deck[index] == 0: break deck[index] =", "i #-------------------------------------------------- # for case in xrange(input()): # k = input() # indexes", "-1): # deck.appendleft(card) # print deck # deck.rotate(card - 1) # print deck", "input() indexes = map(int, raw_input().split()) deck = [0 for i in xrange(cards)] index", "raw_input().split()) deck = [0 for i in xrange(cards)] index = -1 for i", "-1 for i in xrange(1, cards + 1): while True: index = (index", "deck #-------------------------------------------------- print 'Case #%d: %s' % (case + 1, ' '.join(str(deck[i -", "break for j in xrange(i - 1): while True: index = (index +", "xrange(cards)] index = -1 for i in xrange(1, cards + 1): while True:", "i in xrange(1, cards + 1): while True: index = (index + 1)%cards", "xrange(1, cards + 1): while True: index = (index + 1)%cards if deck[index]", "if deck[index] == 0: break deck[index] = i #-------------------------------------------------- # for case in", "0: break deck[index] = i #-------------------------------------------------- # for case in xrange(input()): # k", "print 'Case #%d: %s' % (case + 1, ' '.join(str(deck[i - 1]) for", "= [0 for i in xrange(cards)] index = -1 for i in xrange(1,", "(index + 1)%cards if deck[index] == 0: break deck[index] = i #-------------------------------------------------- #", "1) # print deck #-------------------------------------------------- print 'Case #%d: %s' % (case + 1,", "deque for case in xrange(input()): cards = input() indexes = map(int, raw_input().split()) deck", "i in xrange(cards)] index = -1 for i in xrange(1, cards + 1):", "xrange(i - 1): while True: index = (index + 1)%cards if deck[index] ==", "xrange(k, 0, -1): # deck.appendleft(card) # print deck # deck.rotate(card - 1) #", "map(int, raw_input().split()) # # deck = deque() # for card in xrange(k, 0,", "card in xrange(k, 0, -1): # deck.appendleft(card) # print deck # deck.rotate(card -", "= input() indexes = map(int, raw_input().split()) deck = [0 for i in xrange(cards)]", "== 0: break for j in xrange(i - 1): while True: index =", "index = (index + 1)%cards if deck[index] == 0: break deck[index] = i", "collections import deque for case in xrange(input()): cards = input() indexes = map(int,", "for i in xrange(1, cards + 1): while True: index = (index +", "deque() # for card in xrange(k, 0, -1): # deck.appendleft(card) # print deck", "in xrange(cards)] index = -1 for i in xrange(1, cards + 1): while", "== 0: break deck[index] = i #-------------------------------------------------- # for case in xrange(input()): #", "raw_input().split()) # # deck = deque() # for card in xrange(k, 0, -1):", "for case in xrange(input()): # k = input() # indexes = map(int, raw_input().split())", "indexes = map(int, raw_input().split()) deck = [0 for i in xrange(cards)] index =", "in xrange(1, cards + 1): while True: index = (index + 1)%cards if", "# k = input() # indexes = map(int, raw_input().split()) # # deck =", "deck # deck.rotate(card - 1) # print deck #-------------------------------------------------- print 'Case #%d: %s'", "# print deck # deck.rotate(card - 1) # print deck #-------------------------------------------------- print 'Case", "case in xrange(input()): cards = input() indexes = map(int, raw_input().split()) deck = [0", "# deck.rotate(card - 1) # print deck #-------------------------------------------------- print 'Case #%d: %s' %", "= i #-------------------------------------------------- # for case in xrange(input()): # k = input() #", "1)%cards if deck[index] == 0: break deck[index] = i #-------------------------------------------------- # for case", "# print deck #-------------------------------------------------- print 'Case #%d: %s' % (case + 1, '", "while True: index = (index + 1)%cards if deck[index] == 0: break deck[index]", "k = input() # indexes = map(int, raw_input().split()) # # deck = deque()", "xrange(input()): # k = input() # indexes = map(int, raw_input().split()) # # deck", "1): while True: index = (index + 1)%cards if deck[index] == 0: break", "[0 for i in xrange(cards)] index = -1 for i in xrange(1, cards", "= map(int, raw_input().split()) # # deck = deque() # for card in xrange(k,", "#-------------------------------------------------- print 'Case #%d: %s' % (case + 1, ' '.join(str(deck[i - 1])", "+ 1)%cards if deck[index] == 0: break deck[index] = i #-------------------------------------------------- # for", "= map(int, raw_input().split()) deck = [0 for i in xrange(cards)] index = -1", "- 1): while True: index = (index + 1)%cards if deck[index] == 0:", "print deck #-------------------------------------------------- print 'Case #%d: %s' % (case + 1, ' '.join(str(deck[i", "for j in xrange(i - 1): while True: index = (index + 1)%cards", "= deque() # for card in xrange(k, 0, -1): # deck.appendleft(card) # print", "deck = deque() # for card in xrange(k, 0, -1): # deck.appendleft(card) #", "# indexes = map(int, raw_input().split()) # # deck = deque() # for card", "cards + 1): while True: index = (index + 1)%cards if deck[index] ==", "from collections import deque for case in xrange(input()): cards = input() indexes =", "# # deck = deque() # for card in xrange(k, 0, -1): #", "while True: index = (index + 1)%cards if deck[index] == 0: break for" ]
[ "\"Bearer %s\" % token, } state_off ={ \"power\": \"off\", \"color\": \"blue saturation:0.5\", \"brightness\":", "={ \"power\": \"on\", \"color\": \"rgb:0,140,251\", \"brightness\": 1, } # URL base a ser", "#print(scenes.text) #t_power = requests.post(url_4,headers=headers) #print(t_power.text) activate = requests.put('https://api.lifx.com/v1/lights/d073d502164d/state', data={\"power\": \"on\",\"color\": \"rgb:128,128,128\",\"brightness\": 251}, headers=headers)", "{ \"Authorization\": \"Bearer %s\" % token, } state_off ={ \"power\": \"off\", \"color\": \"blue", "a ser acessada url_1 = 'https://api.lifx.com/v1/lights/all' url_2 = 'https://api.lifx.com/v1/scenes' url_3 = 'https://api.lifx.com/v1/lights/d073d502164d/state' url_4", "todas as lampadas response = requests.get(url_1,headers=headers) print(response.text) #scenes = requests.get(url_2, data={}, headers=headers) #print(scenes.text)", "0.5, } state1 ={ \"power\": \"on\", \"color\": \"yellow\", \"brightness\": 0.5, } state2 ={", "\"brightness\": 0.5, } state1 ={ \"power\": \"on\", \"color\": \"yellow\", \"brightness\": 0.5, } state2", "url_2 = 'https://api.lifx.com/v1/scenes' url_3 = 'https://api.lifx.com/v1/lights/d073d502164d/state' url_4 = 'https://api.lifx.com/v1/lights/d073d502164d/toggle' # Request GET -", "\"rgb:0,140,251\", \"brightness\": 1, } # URL base a ser acessada url_1 = 'https://api.lifx.com/v1/lights/all'", "= requests.post(url_4,headers=headers) #print(t_power.text) activate = requests.put('https://api.lifx.com/v1/lights/d073d502164d/state', data={\"power\": \"on\",\"color\": \"rgb:128,128,128\",\"brightness\": 251}, headers=headers) #activate =", "={ \"power\": \"on\", \"color\": \"yellow\", \"brightness\": 0.5, } state2 ={ \"power\": \"on\", \"color\":", "\"021063bb-1cae-416b-bbff-3dbe5cc22a35\", headers = { \"Authorization\": \"Bearer %s\" % token, } state_off ={ \"power\":", "\"power\": \"on\", \"color\": \"yellow\", \"brightness\": 0.5, } state2 ={ \"power\": \"on\", \"color\": \"rgb:0,140,251\",", "headers=headers) #print(scenes.text) #t_power = requests.post(url_4,headers=headers) #print(t_power.text) activate = requests.put('https://api.lifx.com/v1/lights/d073d502164d/state', data={\"power\": \"on\",\"color\": \"rgb:128,128,128\",\"brightness\": 251},", "state1 ={ \"power\": \"on\", \"color\": \"yellow\", \"brightness\": 0.5, } state2 ={ \"power\": \"on\",", "# \"uuid\": \"021063bb-1cae-416b-bbff-3dbe5cc22a35\", headers = { \"Authorization\": \"Bearer %s\" % token, } state_off", "headers=headers) #activate = requests.put('https://api.lifx.com/v1/lights/d073d502164d/state', data=state2, headers=headers) #sleep(5) #activate = requests.put('https://api.lifx.com/v1/lights/d073d502164d/state', data=state1, headers=headers) #sleep(5)", "requests.put('https://api.lifx.com/v1/lights/d073d502164d/state', data={\"power\": \"on\",\"color\": \"rgb:128,128,128\",\"brightness\": 251}, headers=headers) #activate = requests.put('https://api.lifx.com/v1/lights/d073d502164d/state', data=state2, headers=headers) #sleep(5) #activate", "data=state2, headers=headers) #sleep(5) #activate = requests.put('https://api.lifx.com/v1/lights/d073d502164d/state', data=state1, headers=headers) #sleep(5) #activate = requests.put('https://api.lifx.com/v1/lights/d073d502164d/state', data=state_off,", "#print(activate.text) ''' cycles=0 while cycles == 0: sleep(2) t_power = requests.post(url_4,headers=headers) print(t_power.text) '''", "#sleep(5) #activate = requests.put('https://api.lifx.com/v1/lights/d073d502164d/state', data=state1, headers=headers) #sleep(5) #activate = requests.put('https://api.lifx.com/v1/lights/d073d502164d/state', data=state_off, headers=headers) #print(activate.text)", "\"off\", \"color\": \"blue saturation:0.5\", \"brightness\": 0.5, } state1 ={ \"power\": \"on\", \"color\": \"yellow\",", "} # URL base a ser acessada url_1 = 'https://api.lifx.com/v1/lights/all' url_2 = 'https://api.lifx.com/v1/scenes'", "= requests.put('https://api.lifx.com/v1/lights/d073d502164d/state', data=state2, headers=headers) #sleep(5) #activate = requests.put('https://api.lifx.com/v1/lights/d073d502164d/state', data=state1, headers=headers) #sleep(5) #activate =", "state2 ={ \"power\": \"on\", \"color\": \"rgb:0,140,251\", \"brightness\": 1, } # URL base a", "headers = { \"Authorization\": \"Bearer %s\" % token, } state_off ={ \"power\": \"off\",", "\"d0<PASSWORD>502164d\", # \"uuid\": \"021063bb-1cae-416b-bbff-3dbe5cc22a35\", headers = { \"Authorization\": \"Bearer %s\" % token, }", "#print(t_power.text) activate = requests.put('https://api.lifx.com/v1/lights/d073d502164d/state', data={\"power\": \"on\",\"color\": \"rgb:128,128,128\",\"brightness\": 251}, headers=headers) #activate = requests.put('https://api.lifx.com/v1/lights/d073d502164d/state', data=state2,", "= requests.get(url_2, data={}, headers=headers) #print(scenes.text) #t_power = requests.post(url_4,headers=headers) #print(t_power.text) activate = requests.put('https://api.lifx.com/v1/lights/d073d502164d/state', data={\"power\":", "\"power\": \"off\", \"color\": \"blue saturation:0.5\", \"brightness\": 0.5, } state1 ={ \"power\": \"on\", \"color\":", "time import sleep token = \"<KEY>\" # ID da minha lampada: # \"id\":", "\"Authorization\": \"Bearer %s\" % token, } state_off ={ \"power\": \"off\", \"color\": \"blue saturation:0.5\",", "'https://api.lifx.com/v1/lights/d073d502164d/toggle' # Request GET - lista todas as lampadas response = requests.get(url_1,headers=headers) print(response.text)", "\"yellow\", \"brightness\": 0.5, } state2 ={ \"power\": \"on\", \"color\": \"rgb:0,140,251\", \"brightness\": 1, }", "#activate = requests.put('https://api.lifx.com/v1/lights/d073d502164d/state', data=state_off, headers=headers) #print(activate.text) ''' cycles=0 while cycles == 0: sleep(2)", "GET - lista todas as lampadas response = requests.get(url_1,headers=headers) print(response.text) #scenes = requests.get(url_2,", "url_4 = 'https://api.lifx.com/v1/lights/d073d502164d/toggle' # Request GET - lista todas as lampadas response =", "\"on\", \"color\": \"yellow\", \"brightness\": 0.5, } state2 ={ \"power\": \"on\", \"color\": \"rgb:0,140,251\", \"brightness\":", "= \"<KEY>\" # ID da minha lampada: # \"id\": \"d0<PASSWORD>502164d\", # \"uuid\": \"021063bb-1cae-416b-bbff-3dbe5cc22a35\",", "={ \"power\": \"off\", \"color\": \"blue saturation:0.5\", \"brightness\": 0.5, } state1 ={ \"power\": \"on\",", "acessada url_1 = 'https://api.lifx.com/v1/lights/all' url_2 = 'https://api.lifx.com/v1/scenes' url_3 = 'https://api.lifx.com/v1/lights/d073d502164d/state' url_4 = 'https://api.lifx.com/v1/lights/d073d502164d/toggle'", "requests.get(url_1,headers=headers) print(response.text) #scenes = requests.get(url_2, data={}, headers=headers) #print(scenes.text) #t_power = requests.post(url_4,headers=headers) #print(t_power.text) activate", "= { \"Authorization\": \"Bearer %s\" % token, } state_off ={ \"power\": \"off\", \"color\":", "#t_power = requests.post(url_4,headers=headers) #print(t_power.text) activate = requests.put('https://api.lifx.com/v1/lights/d073d502164d/state', data={\"power\": \"on\",\"color\": \"rgb:128,128,128\",\"brightness\": 251}, headers=headers) #activate", "saturation:0.5\", \"brightness\": 0.5, } state1 ={ \"power\": \"on\", \"color\": \"yellow\", \"brightness\": 0.5, }", "251}, headers=headers) #activate = requests.put('https://api.lifx.com/v1/lights/d073d502164d/state', data=state2, headers=headers) #sleep(5) #activate = requests.put('https://api.lifx.com/v1/lights/d073d502164d/state', data=state1, headers=headers)", "#sleep(5) #activate = requests.put('https://api.lifx.com/v1/lights/d073d502164d/state', data=state_off, headers=headers) #print(activate.text) ''' cycles=0 while cycles == 0:", "minha lampada: # \"id\": \"d0<PASSWORD>502164d\", # \"uuid\": \"021063bb-1cae-416b-bbff-3dbe5cc22a35\", headers = { \"Authorization\": \"Bearer", "url_3 = 'https://api.lifx.com/v1/lights/d073d502164d/state' url_4 = 'https://api.lifx.com/v1/lights/d073d502164d/toggle' # Request GET - lista todas as", "= 'https://api.lifx.com/v1/lights/all' url_2 = 'https://api.lifx.com/v1/scenes' url_3 = 'https://api.lifx.com/v1/lights/d073d502164d/state' url_4 = 'https://api.lifx.com/v1/lights/d073d502164d/toggle' # Request", "1, } # URL base a ser acessada url_1 = 'https://api.lifx.com/v1/lights/all' url_2 =", "= requests.put('https://api.lifx.com/v1/lights/d073d502164d/state', data=state1, headers=headers) #sleep(5) #activate = requests.put('https://api.lifx.com/v1/lights/d073d502164d/state', data=state_off, headers=headers) #print(activate.text) ''' cycles=0", "token, } state_off ={ \"power\": \"off\", \"color\": \"blue saturation:0.5\", \"brightness\": 0.5, } state1", "headers=headers) #sleep(5) #activate = requests.put('https://api.lifx.com/v1/lights/d073d502164d/state', data=state1, headers=headers) #sleep(5) #activate = requests.put('https://api.lifx.com/v1/lights/d073d502164d/state', data=state_off, headers=headers)", "- lista todas as lampadas response = requests.get(url_1,headers=headers) print(response.text) #scenes = requests.get(url_2, data={},", "# URL base a ser acessada url_1 = 'https://api.lifx.com/v1/lights/all' url_2 = 'https://api.lifx.com/v1/scenes' url_3", "URL base a ser acessada url_1 = 'https://api.lifx.com/v1/lights/all' url_2 = 'https://api.lifx.com/v1/scenes' url_3 =", "da minha lampada: # \"id\": \"d0<PASSWORD>502164d\", # \"uuid\": \"021063bb-1cae-416b-bbff-3dbe5cc22a35\", headers = { \"Authorization\":", "url_1 = 'https://api.lifx.com/v1/lights/all' url_2 = 'https://api.lifx.com/v1/scenes' url_3 = 'https://api.lifx.com/v1/lights/d073d502164d/state' url_4 = 'https://api.lifx.com/v1/lights/d073d502164d/toggle' #", "= 'https://api.lifx.com/v1/scenes' url_3 = 'https://api.lifx.com/v1/lights/d073d502164d/state' url_4 = 'https://api.lifx.com/v1/lights/d073d502164d/toggle' # Request GET - lista", "\"power\": \"on\", \"color\": \"rgb:0,140,251\", \"brightness\": 1, } # URL base a ser acessada", "\"uuid\": \"021063bb-1cae-416b-bbff-3dbe5cc22a35\", headers = { \"Authorization\": \"Bearer %s\" % token, } state_off ={", "\"id\": \"d0<PASSWORD>502164d\", # \"uuid\": \"021063bb-1cae-416b-bbff-3dbe5cc22a35\", headers = { \"Authorization\": \"Bearer %s\" % token,", "import sleep token = \"<KEY>\" # ID da minha lampada: # \"id\": \"d0<PASSWORD>502164d\",", "% token, } state_off ={ \"power\": \"off\", \"color\": \"blue saturation:0.5\", \"brightness\": 0.5, }", "} state2 ={ \"power\": \"on\", \"color\": \"rgb:0,140,251\", \"brightness\": 1, } # URL base", "lampadas response = requests.get(url_1,headers=headers) print(response.text) #scenes = requests.get(url_2, data={}, headers=headers) #print(scenes.text) #t_power =", "requests.put('https://api.lifx.com/v1/lights/d073d502164d/state', data=state_off, headers=headers) #print(activate.text) ''' cycles=0 while cycles == 0: sleep(2) t_power =", "state_off ={ \"power\": \"off\", \"color\": \"blue saturation:0.5\", \"brightness\": 0.5, } state1 ={ \"power\":", "requests.get(url_2, data={}, headers=headers) #print(scenes.text) #t_power = requests.post(url_4,headers=headers) #print(t_power.text) activate = requests.put('https://api.lifx.com/v1/lights/d073d502164d/state', data={\"power\": \"on\",\"color\":", "Request GET - lista todas as lampadas response = requests.get(url_1,headers=headers) print(response.text) #scenes =", "headers=headers) #print(activate.text) ''' cycles=0 while cycles == 0: sleep(2) t_power = requests.post(url_4,headers=headers) print(t_power.text)", "\"<KEY>\" # ID da minha lampada: # \"id\": \"d0<PASSWORD>502164d\", # \"uuid\": \"021063bb-1cae-416b-bbff-3dbe5cc22a35\", headers", "'https://api.lifx.com/v1/lights/all' url_2 = 'https://api.lifx.com/v1/scenes' url_3 = 'https://api.lifx.com/v1/lights/d073d502164d/state' url_4 = 'https://api.lifx.com/v1/lights/d073d502164d/toggle' # Request GET", "data={\"power\": \"on\",\"color\": \"rgb:128,128,128\",\"brightness\": 251}, headers=headers) #activate = requests.put('https://api.lifx.com/v1/lights/d073d502164d/state', data=state2, headers=headers) #sleep(5) #activate =", "\"on\",\"color\": \"rgb:128,128,128\",\"brightness\": 251}, headers=headers) #activate = requests.put('https://api.lifx.com/v1/lights/d073d502164d/state', data=state2, headers=headers) #sleep(5) #activate = requests.put('https://api.lifx.com/v1/lights/d073d502164d/state',", "} state1 ={ \"power\": \"on\", \"color\": \"yellow\", \"brightness\": 0.5, } state2 ={ \"power\":", "ID da minha lampada: # \"id\": \"d0<PASSWORD>502164d\", # \"uuid\": \"021063bb-1cae-416b-bbff-3dbe5cc22a35\", headers = {", "import requests from time import sleep token = \"<KEY>\" # ID da minha", "'https://api.lifx.com/v1/scenes' url_3 = 'https://api.lifx.com/v1/lights/d073d502164d/state' url_4 = 'https://api.lifx.com/v1/lights/d073d502164d/toggle' # Request GET - lista todas", "data=state1, headers=headers) #sleep(5) #activate = requests.put('https://api.lifx.com/v1/lights/d073d502164d/state', data=state_off, headers=headers) #print(activate.text) ''' cycles=0 while cycles", "lampada: # \"id\": \"d0<PASSWORD>502164d\", # \"uuid\": \"021063bb-1cae-416b-bbff-3dbe5cc22a35\", headers = { \"Authorization\": \"Bearer %s\"", "\"brightness\": 0.5, } state2 ={ \"power\": \"on\", \"color\": \"rgb:0,140,251\", \"brightness\": 1, } #", "base a ser acessada url_1 = 'https://api.lifx.com/v1/lights/all' url_2 = 'https://api.lifx.com/v1/scenes' url_3 = 'https://api.lifx.com/v1/lights/d073d502164d/state'", "# \"id\": \"d0<PASSWORD>502164d\", # \"uuid\": \"021063bb-1cae-416b-bbff-3dbe5cc22a35\", headers = { \"Authorization\": \"Bearer %s\" %", "\"rgb:128,128,128\",\"brightness\": 251}, headers=headers) #activate = requests.put('https://api.lifx.com/v1/lights/d073d502164d/state', data=state2, headers=headers) #sleep(5) #activate = requests.put('https://api.lifx.com/v1/lights/d073d502164d/state', data=state1,", "requests.put('https://api.lifx.com/v1/lights/d073d502164d/state', data=state2, headers=headers) #sleep(5) #activate = requests.put('https://api.lifx.com/v1/lights/d073d502164d/state', data=state1, headers=headers) #sleep(5) #activate = requests.put('https://api.lifx.com/v1/lights/d073d502164d/state',", "as lampadas response = requests.get(url_1,headers=headers) print(response.text) #scenes = requests.get(url_2, data={}, headers=headers) #print(scenes.text) #t_power", "ser acessada url_1 = 'https://api.lifx.com/v1/lights/all' url_2 = 'https://api.lifx.com/v1/scenes' url_3 = 'https://api.lifx.com/v1/lights/d073d502164d/state' url_4 =", "data=state_off, headers=headers) #print(activate.text) ''' cycles=0 while cycles == 0: sleep(2) t_power = requests.post(url_4,headers=headers)", "= 'https://api.lifx.com/v1/lights/d073d502164d/state' url_4 = 'https://api.lifx.com/v1/lights/d073d502164d/toggle' # Request GET - lista todas as lampadas", "sleep token = \"<KEY>\" # ID da minha lampada: # \"id\": \"d0<PASSWORD>502164d\", #", "lista todas as lampadas response = requests.get(url_1,headers=headers) print(response.text) #scenes = requests.get(url_2, data={}, headers=headers)", "%s\" % token, } state_off ={ \"power\": \"off\", \"color\": \"blue saturation:0.5\", \"brightness\": 0.5,", "response = requests.get(url_1,headers=headers) print(response.text) #scenes = requests.get(url_2, data={}, headers=headers) #print(scenes.text) #t_power = requests.post(url_4,headers=headers)", "#activate = requests.put('https://api.lifx.com/v1/lights/d073d502164d/state', data=state1, headers=headers) #sleep(5) #activate = requests.put('https://api.lifx.com/v1/lights/d073d502164d/state', data=state_off, headers=headers) #print(activate.text) '''", "= requests.put('https://api.lifx.com/v1/lights/d073d502164d/state', data=state_off, headers=headers) #print(activate.text) ''' cycles=0 while cycles == 0: sleep(2) t_power", "<reponame>rsilveira79/Utils-python<filename>lifx_rest.py import requests from time import sleep token = \"<KEY>\" # ID da", "from time import sleep token = \"<KEY>\" # ID da minha lampada: #", "\"color\": \"blue saturation:0.5\", \"brightness\": 0.5, } state1 ={ \"power\": \"on\", \"color\": \"yellow\", \"brightness\":", "headers=headers) #sleep(5) #activate = requests.put('https://api.lifx.com/v1/lights/d073d502164d/state', data=state_off, headers=headers) #print(activate.text) ''' cycles=0 while cycles ==", "\"color\": \"rgb:0,140,251\", \"brightness\": 1, } # URL base a ser acessada url_1 =", "requests from time import sleep token = \"<KEY>\" # ID da minha lampada:", "token = \"<KEY>\" # ID da minha lampada: # \"id\": \"d0<PASSWORD>502164d\", # \"uuid\":", "\"color\": \"yellow\", \"brightness\": 0.5, } state2 ={ \"power\": \"on\", \"color\": \"rgb:0,140,251\", \"brightness\": 1,", "\"on\", \"color\": \"rgb:0,140,251\", \"brightness\": 1, } # URL base a ser acessada url_1", "\"brightness\": 1, } # URL base a ser acessada url_1 = 'https://api.lifx.com/v1/lights/all' url_2", "= requests.get(url_1,headers=headers) print(response.text) #scenes = requests.get(url_2, data={}, headers=headers) #print(scenes.text) #t_power = requests.post(url_4,headers=headers) #print(t_power.text)", "= requests.put('https://api.lifx.com/v1/lights/d073d502164d/state', data={\"power\": \"on\",\"color\": \"rgb:128,128,128\",\"brightness\": 251}, headers=headers) #activate = requests.put('https://api.lifx.com/v1/lights/d073d502164d/state', data=state2, headers=headers) #sleep(5)", "requests.put('https://api.lifx.com/v1/lights/d073d502164d/state', data=state1, headers=headers) #sleep(5) #activate = requests.put('https://api.lifx.com/v1/lights/d073d502164d/state', data=state_off, headers=headers) #print(activate.text) ''' cycles=0 while", "print(response.text) #scenes = requests.get(url_2, data={}, headers=headers) #print(scenes.text) #t_power = requests.post(url_4,headers=headers) #print(t_power.text) activate =", "0.5, } state2 ={ \"power\": \"on\", \"color\": \"rgb:0,140,251\", \"brightness\": 1, } # URL", "#activate = requests.put('https://api.lifx.com/v1/lights/d073d502164d/state', data=state2, headers=headers) #sleep(5) #activate = requests.put('https://api.lifx.com/v1/lights/d073d502164d/state', data=state1, headers=headers) #sleep(5) #activate", "activate = requests.put('https://api.lifx.com/v1/lights/d073d502164d/state', data={\"power\": \"on\",\"color\": \"rgb:128,128,128\",\"brightness\": 251}, headers=headers) #activate = requests.put('https://api.lifx.com/v1/lights/d073d502164d/state', data=state2, headers=headers)", "= 'https://api.lifx.com/v1/lights/d073d502164d/toggle' # Request GET - lista todas as lampadas response = requests.get(url_1,headers=headers)", "} state_off ={ \"power\": \"off\", \"color\": \"blue saturation:0.5\", \"brightness\": 0.5, } state1 ={", "data={}, headers=headers) #print(scenes.text) #t_power = requests.post(url_4,headers=headers) #print(t_power.text) activate = requests.put('https://api.lifx.com/v1/lights/d073d502164d/state', data={\"power\": \"on\",\"color\": \"rgb:128,128,128\",\"brightness\":", "requests.post(url_4,headers=headers) #print(t_power.text) activate = requests.put('https://api.lifx.com/v1/lights/d073d502164d/state', data={\"power\": \"on\",\"color\": \"rgb:128,128,128\",\"brightness\": 251}, headers=headers) #activate = requests.put('https://api.lifx.com/v1/lights/d073d502164d/state',", "'https://api.lifx.com/v1/lights/d073d502164d/state' url_4 = 'https://api.lifx.com/v1/lights/d073d502164d/toggle' # Request GET - lista todas as lampadas response", "# Request GET - lista todas as lampadas response = requests.get(url_1,headers=headers) print(response.text) #scenes", "# ID da minha lampada: # \"id\": \"d0<PASSWORD>502164d\", # \"uuid\": \"021063bb-1cae-416b-bbff-3dbe5cc22a35\", headers =", "\"blue saturation:0.5\", \"brightness\": 0.5, } state1 ={ \"power\": \"on\", \"color\": \"yellow\", \"brightness\": 0.5,", "#scenes = requests.get(url_2, data={}, headers=headers) #print(scenes.text) #t_power = requests.post(url_4,headers=headers) #print(t_power.text) activate = requests.put('https://api.lifx.com/v1/lights/d073d502164d/state'," ]
[ "KIND, either express or implied. # See the License for the specific language", "Unless required by applicable law or agreed to in writing, software # distributed", "in range(9): self.assertEqual(env.advance()[0], dmlab2d.RUNNING) self.assertEqual(env.advance()[0], dmlab2d.TERMINATED) def test_lab2d_settings_environment(self): env = self._create_env({'steps': '5'}) env.start(episode=0,", "License is distributed on an \"AS-IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "objects. self.assertEqual( env.observation_spec('VIEW5'), { 'dtype': np.dtype('O'), 'shape': () }) def test_lab2d_action_spec(self): env =", "dmlab2d.Lab2d(runfiles_helper.find(), {'levelName': 'examples/level_api'}) return dmlab2d.Environment(lab2d, lab2d.observation_names(), 0) class Dmlab2DTest(absltest.TestCase): def _create_env(self, extra_settings=None): settings", "{ 'dtype': np.dtype('int32'), 'shape': (3,) }) self.assertEqual( env.observation_spec('VIEW4'), { 'dtype': np.dtype('int64'), 'shape': (4,)", "'5'}) with self.assertRaises(ValueError): env.list_property('steps') with self.assertRaises(ValueError): env.write_property('steps', 'mouse') if __name__ == '__main__': absltest.main()", "def test_lab2d_observe_before_start(self): env = self._create_env() with self.assertRaises(RuntimeError): env.observation('VIEW1') def test_lab2d_act_before_start(self): env = self._create_env()", "self.assertEqual(env.advance()[0], dmlab2d.TERMINATED) def test_lab2d_act_discrete(self): env = self._create_env({'steps': '5'}) env.start(episode=0, seed=0) env.act_discrete(np.array([2], np.dtype('int32'))) _,", "def test_lab2d_events_cleared_after_advance_not_read(self): env = self._create_env() env.start(episode=0, seed=0) self.assertLen(env.events(), 1) self.assertLen(env.events(), 1) env.advance() self.assertEmpty(env.events())", "self._create_env({'missing': '5'}) def test_lab2d_bad_action_spec_name(self): env = self._create_env() with self.assertRaises(KeyError): env.action_discrete_spec('bad_key') with self.assertRaises(KeyError): env.action_continuous_spec('bad_key')", "test_lab2d_act_continuous(self): env = self._create_env({'steps': '5'}) env.start(episode=0, seed=0) np.testing.assert_array_equal(env.observation('VIEW3'), [1, 2, 3]) env.act_continuous([10]) env.advance()", "1) self.assertEqual(properties[0], ('steps', dmlab2d.PropertyAttribute.READABLE_WRITABLE)) self.assertEqual(env.read_property('steps'), '5') env.write_property('steps', '3') self.assertEqual(env.read_property('steps'), '3') env.start(episode=0, seed=0) for", "-5, 'max': 5 }) self.assertEqual(env.action_text_names(), ['LOG_EVENT']) def test_lab2d_start_environment(self): env = self._create_env() env.start(episode=0, seed=0)", "this file except in compliance with the License. # You may obtain a", "range(9): self.assertEqual(env.advance()[0], dmlab2d.RUNNING) self.assertEqual(env.advance()[0], dmlab2d.TERMINATED) def test_lab2d_settings_environment(self): env = self._create_env({'steps': '5'}) env.start(episode=0, seed=0)", "env.start(episode=0, seed=0) env.act_discrete(np.array([2], np.dtype('int32'))) _, reward = env.advance() self.assertEqual(reward, 2) def test_lab2d_act_continuous(self): env", "def test_lab2d_act_before_start(self): env = self._create_env() with self.assertRaises(RuntimeError): env.act_discrete([0]) with self.assertRaises(RuntimeError): env.act_continuous([0]) with self.assertRaises(RuntimeError):", "def test_lab2d_invalid_ops_properties(self): env = self._create_env({'steps': '5'}) with self.assertRaises(ValueError): env.list_property('steps') with self.assertRaises(ValueError): env.write_property('steps', 'mouse')", "self._create_env() env.start(episode=0, seed=0) np.testing.assert_array_equal(env.observation('VIEW1'), [1]) np.testing.assert_array_equal(env.observation('VIEW2'), [1, 2]) np.testing.assert_array_equal(env.observation('VIEW3'), [1, 2, 3]) np.testing.assert_array_equal(env.observation('VIEW4'),", "test_lab2d_act_discrete(self): env = self._create_env({'steps': '5'}) env.start(episode=0, seed=0) env.act_discrete(np.array([2], np.dtype('int32'))) _, reward = env.advance()", "'5') env.write_property('steps', '3') self.assertEqual(env.read_property('steps'), '3') env.start(episode=0, seed=0) for _ in range(2): self.assertEqual(env.advance()[0], dmlab2d.RUNNING)", "self.assertEqual(self._create_env().name(), 'dmlab2d') def test_lab2d_observation_names(self): env = self._create_env() self.assertEqual(env.observation_names(), ['VIEW' + str(i) for i", "'3') self.assertEqual(env.read_property('steps'), '3') env.start(episode=0, seed=0) for _ in range(2): self.assertEqual(env.advance()[0], dmlab2d.RUNNING) self.assertEqual(env.advance()[0], dmlab2d.TERMINATED)", "ANY KIND, either express or implied. # See the License for the specific", "env.observation_spec('VIEW2'), { 'dtype': np.dtype('double'), 'shape': (2,) }) self.assertEqual( env.observation_spec('VIEW3'), { 'dtype': np.dtype('int32'), 'shape':", "self.assertRaises(KeyError): env.list_property('missing') with self.assertRaises(KeyError): env.read_property('missing') with self.assertRaises(KeyError): env.write_property('missing', '10') def test_lab2d_invalid_ops_properties(self): env =", "'examples/level_api'}) return dmlab2d.Environment(lab2d, lab2d.observation_names(), 0) class Dmlab2DTest(absltest.TestCase): def _create_env(self, extra_settings=None): settings = extra_settings.copy()", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See", "'min': 0, 'max': 4 }) self.assertEqual(env.action_continuous_names(), ['OBSERVATION_ACT']) self.assertEqual( env.action_continuous_spec('OBSERVATION_ACT'), { 'min': -5, 'max':", "self._create_env() self.assertEqual(env.observation_names(), ['VIEW' + str(i) for i in range(1, 6)]) def test_lab2d_observation_spec(self): env", "self.assertEmpty(env.events()) def test_lab2d_observe(self): env = self._create_env() env.start(episode=0, seed=0) np.testing.assert_array_equal(env.observation('VIEW1'), [1]) np.testing.assert_array_equal(env.observation('VIEW2'), [1, 2])", "self._create_env({'steps': '5'}) env.start(episode=0, seed=0) np.testing.assert_array_equal(env.observation('VIEW3'), [1, 2, 3]) env.act_continuous([10]) env.advance() np.testing.assert_array_equal(env.observation('VIEW3'), [11, 12,", "env = self._create_env() env.start(episode=0, seed=0) self.assertLen(env.events(), 1) self.assertLen(env.events(), 1) env.advance() self.assertEmpty(env.events()) def test_lab2d_observe(self):", "'dtype': np.dtype('uint8'), 'shape': (1,) }) self.assertEqual( env.observation_spec('VIEW2'), { 'dtype': np.dtype('double'), 'shape': (2,) })", "def test_lab2d_observation_spec(self): env = self._create_env() self.assertEqual( env.observation_spec('VIEW1'), { 'dtype': np.dtype('uint8'), 'shape': (1,) })", "env = self._create_env({'steps': '2'}) env.start(0, 0) self.assertEqual(env.advance()[0], dmlab2d.RUNNING) self.assertEqual(env.advance()[0], dmlab2d.TERMINATED) with self.assertRaises(RuntimeError): env.advance()", "self._create_env({'steps': '5'}) properties = env.list_property('') self.assertLen(properties, 1) self.assertEqual(properties[0], ('steps', dmlab2d.PropertyAttribute.READABLE_WRITABLE)) self.assertEqual(env.read_property('steps'), '5') env.write_property('steps',", "dmlab2d from dmlab2d import runfiles_helper class Dmlab2dDmEnvTest(test_utils.EnvironmentTestMixin, absltest.TestCase): def make_object_under_test(self): lab2d = dmlab2d.Lab2d(runfiles_helper.find(),", "= self._create_env({'steps': '5'}) env.start(episode=0, seed=0) for _ in range(4): self.assertEqual(env.advance()[0], dmlab2d.RUNNING) self.assertEqual(env.advance()[0], dmlab2d.TERMINATED)", "OF ANY KIND, either express or implied. # See the License for the", "'10') def test_lab2d_invalid_ops_properties(self): env = self._create_env({'steps': '5'}) with self.assertRaises(ValueError): env.list_property('steps') with self.assertRaises(ValueError): env.write_property('steps',", "from absl.testing import absltest from dm_env import test_utils import numpy as np import", "test_lab2d_act_before_start(self): env = self._create_env() with self.assertRaises(RuntimeError): env.act_discrete([0]) with self.assertRaises(RuntimeError): env.act_continuous([0]) with self.assertRaises(RuntimeError): env.act_text([''])", "np.testing.assert_array_equal(env.observation('VIEW3'), [11, 12, 13]) def test_lab2d_act_text(self): env = self._create_env({'steps': '5'}) env.start(episode=0, seed=0) view", "self.assertEqual(env.observation('VIEW5'), b'') def test_lab2d_ten_steps_terminate_environment(self): env = self._create_env() env.start(episode=0, seed=0) for _ in range(9):", "2, 3, 4]) self.assertEqual(env.observation('VIEW5'), b'') def test_lab2d_ten_steps_terminate_environment(self): env = self._create_env() env.start(episode=0, seed=0) for", "self._create_env({'steps': '5'}) with self.assertRaises(ValueError): env.list_property('steps') with self.assertRaises(ValueError): env.write_property('steps', 'mouse') if __name__ == '__main__':", "test_lab2d_bad_action_spec_name(self): env = self._create_env() with self.assertRaises(KeyError): env.action_discrete_spec('bad_key') with self.assertRaises(KeyError): env.action_continuous_spec('bad_key') def test_lab2d_bad_observation_spec_name(self): env", "}) self.assertEqual( env.observation_spec('VIEW3'), { 'dtype': np.dtype('int32'), 'shape': (3,) }) self.assertEqual( env.observation_spec('VIEW4'), { 'dtype':", "settings) def test_lab2d_environment_name(self): self.assertEqual(self._create_env().name(), 'dmlab2d') def test_lab2d_observation_names(self): env = self._create_env() self.assertEqual(env.observation_names(), ['VIEW' +", "'5'}) properties = env.list_property('') self.assertLen(properties, 1) self.assertEqual(properties[0], ('steps', dmlab2d.PropertyAttribute.READABLE_WRITABLE)) self.assertEqual(env.read_property('steps'), '5') env.write_property('steps', '3')", "def test_lab2d_bad_observation_spec_name(self): env = self._create_env() with self.assertRaises(KeyError): env.observation_spec('bad_key') def test_lab2d_observe_before_start(self): env = self._create_env()", "env.start(episode=0, seed=0) self.assertLen(env.events(), 1) self.assertLen(env.events(), 1) env.advance() self.assertEmpty(env.events()) def test_lab2d_observe(self): env = self._create_env()", "4]) self.assertEqual(env.observation('VIEW5'), b'') def test_lab2d_ten_steps_terminate_environment(self): env = self._create_env() env.start(episode=0, seed=0) for _ in", "13]) def test_lab2d_act_text(self): env = self._create_env({'steps': '5'}) env.start(episode=0, seed=0) view = env.observation('VIEW5') self.assertEqual(view,", "env = self._create_env() env.start(episode=0, seed=0) events = env.events() self.assertLen(events, 1) event_name, observations =", "self._create_env() env.start(episode=0, seed=0) def test_lab2d_events_start(self): env = self._create_env() env.start(episode=0, seed=0) events = env.events()", "1]) def test_lab2d_advance_after_episode_ends(self): env = self._create_env({'steps': '2'}) env.start(0, 0) self.assertEqual(env.advance()[0], dmlab2d.RUNNING) self.assertEqual(env.advance()[0], dmlab2d.TERMINATED)", "'shape': (4,) }) # Text is stored in objects. self.assertEqual( env.observation_spec('VIEW5'), { 'dtype':", "i in range(1, 6)]) def test_lab2d_observation_spec(self): env = self._create_env() self.assertEqual( env.observation_spec('VIEW1'), { 'dtype':", "= env.list_property('') self.assertLen(properties, 1) self.assertEqual(properties[0], ('steps', dmlab2d.PropertyAttribute.READABLE_WRITABLE)) self.assertEqual(env.read_property('steps'), '5') env.write_property('steps', '3') self.assertEqual(env.read_property('steps'), '3')", "np.testing.assert_array_equal(observations[0], [1, 2, 3]) def test_lab2d_events_cleared_after_advance_not_read(self): env = self._create_env() env.start(episode=0, seed=0) self.assertLen(env.events(), 1)", "= dmlab2d.Lab2d(runfiles_helper.find(), {'levelName': 'examples/level_api'}) return dmlab2d.Environment(lab2d, lab2d.observation_names(), 0) class Dmlab2DTest(absltest.TestCase): def _create_env(self, extra_settings=None):", "dm_env import test_utils import numpy as np import dmlab2d from dmlab2d import runfiles_helper", "self.assertEqual(env.observation_names(), ['VIEW' + str(i) for i in range(1, 6)]) def test_lab2d_observation_spec(self): env =", "np.testing.assert_array_equal(env.observation('VIEW3'), [1, 2, 3]) env.act_continuous([10]) env.advance() np.testing.assert_array_equal(env.observation('VIEW3'), [11, 12, 13]) def test_lab2d_act_text(self): env", "= self._create_env() env.start(episode=0, seed=0) def test_lab2d_events_start(self): env = self._create_env() env.start(episode=0, seed=0) events =", "env = self._create_env({'steps': '5'}) env.start(episode=0, seed=0) env.act_discrete(np.array([2], np.dtype('int32'))) _, reward = env.advance() self.assertEqual(reward,", "self.assertRaises(ValueError): env.act_discrete([0, 1]) with self.assertRaises(ValueError): env.act_continuous([0, 1]) def test_lab2d_advance_after_episode_ends(self): env = self._create_env({'steps': '2'})", "np.testing.assert_array_equal(env.observation('VIEW2'), [1, 2]) np.testing.assert_array_equal(env.observation('VIEW3'), [1, 2, 3]) np.testing.assert_array_equal(env.observation('VIEW4'), [1, 2, 3, 4]) self.assertEqual(env.observation('VIEW5'),", "in range(4): self.assertEqual(env.advance()[0], dmlab2d.RUNNING) self.assertEqual(env.advance()[0], dmlab2d.TERMINATED) def test_lab2d_properties_environment(self): env = self._create_env({'steps': '5'}) properties", "np.dtype('O'), 'shape': () }) def test_lab2d_action_spec(self): env = self._create_env() self.assertEqual(env.action_discrete_names(), ['REWARD_ACT']) self.assertEqual( env.action_discrete_spec('REWARD_ACT'),", "1) np.testing.assert_array_equal(observations[0], [1, 2, 3]) def test_lab2d_events_cleared_after_advance_not_read(self): env = self._create_env() env.start(episode=0, seed=0) self.assertLen(env.events(),", "b'') def test_lab2d_ten_steps_terminate_environment(self): env = self._create_env() env.start(episode=0, seed=0) for _ in range(9): self.assertEqual(env.advance()[0],", "env = self._create_env() env.start(episode=0, seed=0) np.testing.assert_array_equal(env.observation('VIEW1'), [1]) np.testing.assert_array_equal(env.observation('VIEW2'), [1, 2]) np.testing.assert_array_equal(env.observation('VIEW3'), [1, 2,", "}) self.assertEqual( env.observation_spec('VIEW4'), { 'dtype': np.dtype('int64'), 'shape': (4,) }) # Text is stored", "'shape': () }) def test_lab2d_action_spec(self): env = self._create_env() self.assertEqual(env.action_discrete_names(), ['REWARD_ACT']) self.assertEqual( env.action_discrete_spec('REWARD_ACT'), {", "test_lab2d_observation_names(self): env = self._create_env() self.assertEqual(env.observation_names(), ['VIEW' + str(i) for i in range(1, 6)])", "env.action_continuous_spec('OBSERVATION_ACT'), { 'min': -5, 'max': 5 }) self.assertEqual(env.action_text_names(), ['LOG_EVENT']) def test_lab2d_start_environment(self): env =", "seed=0) env.act_discrete(np.array([2], np.dtype('int32'))) _, reward = env.advance() self.assertEqual(reward, 2) def test_lab2d_act_continuous(self): env =", "# Copyright 2019 The DMLab2D Authors. # # Licensed under the Apache License,", "self.assertEqual(env.advance()[0], dmlab2d.RUNNING) self.assertEqual(env.advance()[0], dmlab2d.TERMINATED) with self.assertRaises(RuntimeError): env.advance() def test_lab2d_missing_properties(self): env = self._create_env({'steps': '5'})", "b'Hello') def test_lab2d_invalid_setting(self): with self.assertRaises(ValueError): self._create_env({'missing': '5'}) def test_lab2d_bad_action_spec_name(self): env = self._create_env() with", "= self._create_env() env.start(episode=0, seed=0) for _ in range(9): self.assertEqual(env.advance()[0], dmlab2d.RUNNING) self.assertEqual(env.advance()[0], dmlab2d.TERMINATED) def", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to", "env = self._create_env() with self.assertRaises(KeyError): env.action_discrete_spec('bad_key') with self.assertRaises(KeyError): env.action_continuous_spec('bad_key') def test_lab2d_bad_observation_spec_name(self): env =", "self.assertLen(env.events(), 1) self.assertLen(env.events(), 1) env.advance() self.assertEmpty(env.events()) def test_lab2d_observe(self): env = self._create_env() env.start(episode=0, seed=0)", "self._create_env() env.start(0, 0) with self.assertRaises(ValueError): env.act_discrete([0, 1]) with self.assertRaises(ValueError): env.act_continuous([0, 1]) def test_lab2d_advance_after_episode_ends(self):", "'examples/level_api' return dmlab2d.Lab2d(runfiles_helper.find(), settings) def test_lab2d_environment_name(self): self.assertEqual(self._create_env().name(), 'dmlab2d') def test_lab2d_observation_names(self): env = self._create_env()", "Authors. # # Licensed under the Apache License, Version 2.0 (the \"License\"); #", "_ in range(2): self.assertEqual(env.advance()[0], dmlab2d.RUNNING) self.assertEqual(env.advance()[0], dmlab2d.TERMINATED) def test_lab2d_act_discrete(self): env = self._create_env({'steps': '5'})", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "'dtype': np.dtype('double'), 'shape': (2,) }) self.assertEqual( env.observation_spec('VIEW3'), { 'dtype': np.dtype('int32'), 'shape': (3,) })", "'5'}) env.start(episode=0, seed=0) view = env.observation('VIEW5') self.assertEqual(view, b'') env.act_text(['Hello']) env.advance() view = env.observation('VIEW5')", "= self._create_env() with self.assertRaises(KeyError): env.action_discrete_spec('bad_key') with self.assertRaises(KeyError): env.action_continuous_spec('bad_key') def test_lab2d_bad_observation_spec_name(self): env = self._create_env()", "required by applicable law or agreed to in writing, software # distributed under", "= self._create_env() self.assertEqual(env.observation_names(), ['VIEW' + str(i) for i in range(1, 6)]) def test_lab2d_observation_spec(self):", "\"AS-IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "(4,) }) # Text is stored in objects. self.assertEqual( env.observation_spec('VIEW5'), { 'dtype': np.dtype('O'),", "runfiles_helper class Dmlab2dDmEnvTest(test_utils.EnvironmentTestMixin, absltest.TestCase): def make_object_under_test(self): lab2d = dmlab2d.Lab2d(runfiles_helper.find(), {'levelName': 'examples/level_api'}) return dmlab2d.Environment(lab2d,", "= self._create_env() self.assertEqual( env.observation_spec('VIEW1'), { 'dtype': np.dtype('uint8'), 'shape': (1,) }) self.assertEqual( env.observation_spec('VIEW2'), {", "applicable law or agreed to in writing, software # distributed under the License", "env = self._create_env() with self.assertRaises(KeyError): env.observation_spec('bad_key') def test_lab2d_observe_before_start(self): env = self._create_env() with self.assertRaises(RuntimeError):", "import dmlab2d from dmlab2d import runfiles_helper class Dmlab2dDmEnvTest(test_utils.EnvironmentTestMixin, absltest.TestCase): def make_object_under_test(self): lab2d =", "self._create_env({'steps': '5'}) with self.assertRaises(KeyError): env.list_property('missing') with self.assertRaises(KeyError): env.read_property('missing') with self.assertRaises(KeyError): env.write_property('missing', '10') def", "(2,) }) self.assertEqual( env.observation_spec('VIEW3'), { 'dtype': np.dtype('int32'), 'shape': (3,) }) self.assertEqual( env.observation_spec('VIEW4'), {", "seed=0) for _ in range(9): self.assertEqual(env.advance()[0], dmlab2d.RUNNING) self.assertEqual(env.advance()[0], dmlab2d.TERMINATED) def test_lab2d_settings_environment(self): env =", "with self.assertRaises(RuntimeError): env.act_continuous([0]) with self.assertRaises(RuntimeError): env.act_text(['']) def test_lab2d_act_bad_shape(self): env = self._create_env() env.start(0, 0)", "or agreed to in writing, software # distributed under the License is distributed", "(3,) }) self.assertEqual( env.observation_spec('VIEW4'), { 'dtype': np.dtype('int64'), 'shape': (4,) }) # Text is", "test_lab2d_act_bad_shape(self): env = self._create_env() env.start(0, 0) with self.assertRaises(ValueError): env.act_discrete([0, 1]) with self.assertRaises(ValueError): env.act_continuous([0,", "is stored in objects. self.assertEqual( env.observation_spec('VIEW5'), { 'dtype': np.dtype('O'), 'shape': () }) def", "{ 'min': -5, 'max': 5 }) self.assertEqual(env.action_text_names(), ['LOG_EVENT']) def test_lab2d_start_environment(self): env = self._create_env()", "CONDITIONS OF ANY KIND, either express or implied. # See the License for", "env = self._create_env() self.assertEqual(env.observation_names(), ['VIEW' + str(i) for i in range(1, 6)]) def", "env.act_discrete([0]) with self.assertRaises(RuntimeError): env.act_continuous([0]) with self.assertRaises(RuntimeError): env.act_text(['']) def test_lab2d_act_bad_shape(self): env = self._create_env() env.start(0,", "}) self.assertEqual(env.action_continuous_names(), ['OBSERVATION_ACT']) self.assertEqual( env.action_continuous_spec('OBSERVATION_ACT'), { 'min': -5, 'max': 5 }) self.assertEqual(env.action_text_names(), ['LOG_EVENT'])", "'shape': (1,) }) self.assertEqual( env.observation_spec('VIEW2'), { 'dtype': np.dtype('double'), 'shape': (2,) }) self.assertEqual( env.observation_spec('VIEW3'),", "import absltest from dm_env import test_utils import numpy as np import dmlab2d from", "self.assertEqual(view, b'Hello') def test_lab2d_invalid_setting(self): with self.assertRaises(ValueError): self._create_env({'missing': '5'}) def test_lab2d_bad_action_spec_name(self): env = self._create_env()", "self.assertLen(events, 1) event_name, observations = events[0] self.assertEqual(event_name, 'start') self.assertLen(observations, 1) np.testing.assert_array_equal(observations[0], [1, 2,", "dmlab2d import runfiles_helper class Dmlab2dDmEnvTest(test_utils.EnvironmentTestMixin, absltest.TestCase): def make_object_under_test(self): lab2d = dmlab2d.Lab2d(runfiles_helper.find(), {'levelName': 'examples/level_api'})", "'5'}) env.start(episode=0, seed=0) env.act_discrete(np.array([2], np.dtype('int32'))) _, reward = env.advance() self.assertEqual(reward, 2) def test_lab2d_act_continuous(self):", "'dmlab2d') def test_lab2d_observation_names(self): env = self._create_env() self.assertEqual(env.observation_names(), ['VIEW' + str(i) for i in", "under the Apache License, Version 2.0 (the \"License\"); # you may not use", "with self.assertRaises(ValueError): self._create_env({'missing': '5'}) def test_lab2d_bad_action_spec_name(self): env = self._create_env() with self.assertRaises(KeyError): env.action_discrete_spec('bad_key') with", "env.advance() view = env.observation('VIEW5') self.assertEqual(view, b'Hello') def test_lab2d_invalid_setting(self): with self.assertRaises(ValueError): self._create_env({'missing': '5'}) def", "properties = env.list_property('') self.assertLen(properties, 1) self.assertEqual(properties[0], ('steps', dmlab2d.PropertyAttribute.READABLE_WRITABLE)) self.assertEqual(env.read_property('steps'), '5') env.write_property('steps', '3') self.assertEqual(env.read_property('steps'),", "You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "dmlab2d.RUNNING) self.assertEqual(env.advance()[0], dmlab2d.TERMINATED) def test_lab2d_properties_environment(self): env = self._create_env({'steps': '5'}) properties = env.list_property('') self.assertLen(properties,", "License. # You may obtain a copy of the License at # #", "env.events() self.assertLen(events, 1) event_name, observations = events[0] self.assertEqual(event_name, 'start') self.assertLen(observations, 1) np.testing.assert_array_equal(observations[0], [1,", "def make_object_under_test(self): lab2d = dmlab2d.Lab2d(runfiles_helper.find(), {'levelName': 'examples/level_api'}) return dmlab2d.Environment(lab2d, lab2d.observation_names(), 0) class Dmlab2DTest(absltest.TestCase):", "env.action_discrete_spec('REWARD_ACT'), { 'min': 0, 'max': 4 }) self.assertEqual(env.action_continuous_names(), ['OBSERVATION_ACT']) self.assertEqual( env.action_continuous_spec('OBSERVATION_ACT'), { 'min':", "test_lab2d_observation_spec(self): env = self._create_env() self.assertEqual( env.observation_spec('VIEW1'), { 'dtype': np.dtype('uint8'), 'shape': (1,) }) self.assertEqual(", "env.observation('VIEW5') self.assertEqual(view, b'Hello') def test_lab2d_invalid_setting(self): with self.assertRaises(ValueError): self._create_env({'missing': '5'}) def test_lab2d_bad_action_spec_name(self): env =", "env = self._create_env() with self.assertRaises(RuntimeError): env.act_discrete([0]) with self.assertRaises(RuntimeError): env.act_continuous([0]) with self.assertRaises(RuntimeError): env.act_text(['']) def", "with self.assertRaises(KeyError): env.action_discrete_spec('bad_key') with self.assertRaises(KeyError): env.action_continuous_spec('bad_key') def test_lab2d_bad_observation_spec_name(self): env = self._create_env() with self.assertRaises(KeyError):", "compliance with the License. # You may obtain a copy of the License", "env.write_property('missing', '10') def test_lab2d_invalid_ops_properties(self): env = self._create_env({'steps': '5'}) with self.assertRaises(ValueError): env.list_property('steps') with self.assertRaises(ValueError):", "_ in range(4): self.assertEqual(env.advance()[0], dmlab2d.RUNNING) self.assertEqual(env.advance()[0], dmlab2d.TERMINATED) def test_lab2d_properties_environment(self): env = self._create_env({'steps': '5'})", "['LOG_EVENT']) def test_lab2d_start_environment(self): env = self._create_env() env.start(episode=0, seed=0) def test_lab2d_events_start(self): env = self._create_env()", "test_lab2d_act_text(self): env = self._create_env({'steps': '5'}) env.start(episode=0, seed=0) view = env.observation('VIEW5') self.assertEqual(view, b'') env.act_text(['Hello'])", "for dmlab2d.dmlab2d.\"\"\" from __future__ import absolute_import from __future__ import division from __future__ import", "self.assertEqual( env.observation_spec('VIEW4'), { 'dtype': np.dtype('int64'), 'shape': (4,) }) # Text is stored in", "'max': 4 }) self.assertEqual(env.action_continuous_names(), ['OBSERVATION_ACT']) self.assertEqual( env.action_continuous_spec('OBSERVATION_ACT'), { 'min': -5, 'max': 5 })", "{ 'dtype': np.dtype('double'), 'shape': (2,) }) self.assertEqual( env.observation_spec('VIEW3'), { 'dtype': np.dtype('int32'), 'shape': (3,)", "env = self._create_env({'steps': '5'}) with self.assertRaises(ValueError): env.list_property('steps') with self.assertRaises(ValueError): env.write_property('steps', 'mouse') if __name__", "dmlab2d.TERMINATED) def test_lab2d_properties_environment(self): env = self._create_env({'steps': '5'}) properties = env.list_property('') self.assertLen(properties, 1) self.assertEqual(properties[0],", "self.assertEqual( env.observation_spec('VIEW5'), { 'dtype': np.dtype('O'), 'shape': () }) def test_lab2d_action_spec(self): env = self._create_env()", "seed=0) for _ in range(2): self.assertEqual(env.advance()[0], dmlab2d.RUNNING) self.assertEqual(env.advance()[0], dmlab2d.TERMINATED) def test_lab2d_act_discrete(self): env =", "def test_lab2d_bad_action_spec_name(self): env = self._create_env() with self.assertRaises(KeyError): env.action_discrete_spec('bad_key') with self.assertRaises(KeyError): env.action_continuous_spec('bad_key') def test_lab2d_bad_observation_spec_name(self):", "the License. \"\"\"Tests for dmlab2d.dmlab2d.\"\"\" from __future__ import absolute_import from __future__ import division", "def test_lab2d_environment_name(self): self.assertEqual(self._create_env().name(), 'dmlab2d') def test_lab2d_observation_names(self): env = self._create_env() self.assertEqual(env.observation_names(), ['VIEW' + str(i)", "absltest.TestCase): def make_object_under_test(self): lab2d = dmlab2d.Lab2d(runfiles_helper.find(), {'levelName': 'examples/level_api'}) return dmlab2d.Environment(lab2d, lab2d.observation_names(), 0) class", "self.assertRaises(RuntimeError): env.advance() def test_lab2d_missing_properties(self): env = self._create_env({'steps': '5'}) with self.assertRaises(KeyError): env.list_property('missing') with self.assertRaises(KeyError):", "2, 3]) def test_lab2d_events_cleared_after_advance_not_read(self): env = self._create_env() env.start(episode=0, seed=0) self.assertLen(env.events(), 1) self.assertLen(env.events(), 1)", "view = env.observation('VIEW5') self.assertEqual(view, b'') env.act_text(['Hello']) env.advance() view = env.observation('VIEW5') self.assertEqual(view, b'Hello') def", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "self.assertEqual(env.advance()[0], dmlab2d.RUNNING) self.assertEqual(env.advance()[0], dmlab2d.TERMINATED) def test_lab2d_properties_environment(self): env = self._create_env({'steps': '5'}) properties = env.list_property('')", "1]) with self.assertRaises(ValueError): env.act_continuous([0, 1]) def test_lab2d_advance_after_episode_ends(self): env = self._create_env({'steps': '2'}) env.start(0, 0)", "absltest from dm_env import test_utils import numpy as np import dmlab2d from dmlab2d", "seed=0) events = env.events() self.assertLen(events, 1) event_name, observations = events[0] self.assertEqual(event_name, 'start') self.assertLen(observations,", "= extra_settings.copy() if extra_settings else {} settings['levelName'] = 'examples/level_api' return dmlab2d.Lab2d(runfiles_helper.find(), settings) def", "env.start(episode=0, seed=0) np.testing.assert_array_equal(env.observation('VIEW3'), [1, 2, 3]) env.act_continuous([10]) env.advance() np.testing.assert_array_equal(env.observation('VIEW3'), [11, 12, 13]) def", "extra_settings=None): settings = extra_settings.copy() if extra_settings else {} settings['levelName'] = 'examples/level_api' return dmlab2d.Lab2d(runfiles_helper.find(),", "software # distributed under the License is distributed on an \"AS-IS\" BASIS, #", "seed=0) for _ in range(4): self.assertEqual(env.advance()[0], dmlab2d.RUNNING) self.assertEqual(env.advance()[0], dmlab2d.TERMINATED) def test_lab2d_properties_environment(self): env =", "with self.assertRaises(RuntimeError): env.act_discrete([0]) with self.assertRaises(RuntimeError): env.act_continuous([0]) with self.assertRaises(RuntimeError): env.act_text(['']) def test_lab2d_act_bad_shape(self): env =", "is distributed on an \"AS-IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "distributed under the License is distributed on an \"AS-IS\" BASIS, # WITHOUT WARRANTIES", "test_lab2d_observe_before_start(self): env = self._create_env() with self.assertRaises(RuntimeError): env.observation('VIEW1') def test_lab2d_act_before_start(self): env = self._create_env() with", "np.testing.assert_array_equal(env.observation('VIEW4'), [1, 2, 3, 4]) self.assertEqual(env.observation('VIEW5'), b'') def test_lab2d_ten_steps_terminate_environment(self): env = self._create_env() env.start(episode=0,", "not use this file except in compliance with the License. # You may", "env = self._create_env({'steps': '5'}) env.start(episode=0, seed=0) np.testing.assert_array_equal(env.observation('VIEW3'), [1, 2, 3]) env.act_continuous([10]) env.advance() np.testing.assert_array_equal(env.observation('VIEW3'),", "env.act_continuous([0]) with self.assertRaises(RuntimeError): env.act_text(['']) def test_lab2d_act_bad_shape(self): env = self._create_env() env.start(0, 0) with self.assertRaises(ValueError):", "}) def test_lab2d_action_spec(self): env = self._create_env() self.assertEqual(env.action_discrete_names(), ['REWARD_ACT']) self.assertEqual( env.action_discrete_spec('REWARD_ACT'), { 'min': 0,", "seed=0) np.testing.assert_array_equal(env.observation('VIEW3'), [1, 2, 3]) env.act_continuous([10]) env.advance() np.testing.assert_array_equal(env.observation('VIEW3'), [11, 12, 13]) def test_lab2d_act_text(self):", "License, Version 2.0 (the \"License\"); # you may not use this file except", "view = env.observation('VIEW5') self.assertEqual(view, b'Hello') def test_lab2d_invalid_setting(self): with self.assertRaises(ValueError): self._create_env({'missing': '5'}) def test_lab2d_bad_action_spec_name(self):", "self._create_env({'steps': '5'}) env.start(episode=0, seed=0) view = env.observation('VIEW5') self.assertEqual(view, b'') env.act_text(['Hello']) env.advance() view =", "import absolute_import from __future__ import division from __future__ import print_function from absl.testing import", "1) env.advance() self.assertEmpty(env.events()) def test_lab2d_observe(self): env = self._create_env() env.start(episode=0, seed=0) np.testing.assert_array_equal(env.observation('VIEW1'), [1]) np.testing.assert_array_equal(env.observation('VIEW2'),", "self.assertEqual(env.advance()[0], dmlab2d.TERMINATED) def test_lab2d_properties_environment(self): env = self._create_env({'steps': '5'}) properties = env.list_property('') self.assertLen(properties, 1)", "test_lab2d_properties_environment(self): env = self._create_env({'steps': '5'}) properties = env.list_property('') self.assertLen(properties, 1) self.assertEqual(properties[0], ('steps', dmlab2d.PropertyAttribute.READABLE_WRITABLE))", "in range(1, 6)]) def test_lab2d_observation_spec(self): env = self._create_env() self.assertEqual( env.observation_spec('VIEW1'), { 'dtype': np.dtype('uint8'),", "[11, 12, 13]) def test_lab2d_act_text(self): env = self._create_env({'steps': '5'}) env.start(episode=0, seed=0) view =", "env.action_discrete_spec('bad_key') with self.assertRaises(KeyError): env.action_continuous_spec('bad_key') def test_lab2d_bad_observation_spec_name(self): env = self._create_env() with self.assertRaises(KeyError): env.observation_spec('bad_key') def", "self.assertEqual( env.observation_spec('VIEW1'), { 'dtype': np.dtype('uint8'), 'shape': (1,) }) self.assertEqual( env.observation_spec('VIEW2'), { 'dtype': np.dtype('double'),", "def test_lab2d_act_discrete(self): env = self._create_env({'steps': '5'}) env.start(episode=0, seed=0) env.act_discrete(np.array([2], np.dtype('int32'))) _, reward =", "# you may not use this file except in compliance with the License.", "and # limitations under the License. \"\"\"Tests for dmlab2d.dmlab2d.\"\"\" from __future__ import absolute_import", "__future__ import division from __future__ import print_function from absl.testing import absltest from dm_env", "from __future__ import print_function from absl.testing import absltest from dm_env import test_utils import", "def test_lab2d_events_start(self): env = self._create_env() env.start(episode=0, seed=0) events = env.events() self.assertLen(events, 1) event_name,", "agreed to in writing, software # distributed under the License is distributed on", "env.start(episode=0, seed=0) np.testing.assert_array_equal(env.observation('VIEW1'), [1]) np.testing.assert_array_equal(env.observation('VIEW2'), [1, 2]) np.testing.assert_array_equal(env.observation('VIEW3'), [1, 2, 3]) np.testing.assert_array_equal(env.observation('VIEW4'), [1,", "reward = env.advance() self.assertEqual(reward, 2) def test_lab2d_act_continuous(self): env = self._create_env({'steps': '5'}) env.start(episode=0, seed=0)", "self.assertEqual(env.read_property('steps'), '3') env.start(episode=0, seed=0) for _ in range(2): self.assertEqual(env.advance()[0], dmlab2d.RUNNING) self.assertEqual(env.advance()[0], dmlab2d.TERMINATED) def", "(the \"License\"); # you may not use this file except in compliance with", "[1, 2, 3]) np.testing.assert_array_equal(env.observation('VIEW4'), [1, 2, 3, 4]) self.assertEqual(env.observation('VIEW5'), b'') def test_lab2d_ten_steps_terminate_environment(self): env", "import test_utils import numpy as np import dmlab2d from dmlab2d import runfiles_helper class", "env.start(episode=0, seed=0) def test_lab2d_events_start(self): env = self._create_env() env.start(episode=0, seed=0) events = env.events() self.assertLen(events,", "def test_lab2d_ten_steps_terminate_environment(self): env = self._create_env() env.start(episode=0, seed=0) for _ in range(9): self.assertEqual(env.advance()[0], dmlab2d.RUNNING)", "# Unless required by applicable law or agreed to in writing, software #", "under the License. \"\"\"Tests for dmlab2d.dmlab2d.\"\"\" from __future__ import absolute_import from __future__ import", "by applicable law or agreed to in writing, software # distributed under the", "def test_lab2d_action_spec(self): env = self._create_env() self.assertEqual(env.action_discrete_names(), ['REWARD_ACT']) self.assertEqual( env.action_discrete_spec('REWARD_ACT'), { 'min': 0, 'max':", "permissions and # limitations under the License. \"\"\"Tests for dmlab2d.dmlab2d.\"\"\" from __future__ import", "def test_lab2d_missing_properties(self): env = self._create_env({'steps': '5'}) with self.assertRaises(KeyError): env.list_property('missing') with self.assertRaises(KeyError): env.read_property('missing') with", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "self.assertEqual(env.action_discrete_names(), ['REWARD_ACT']) self.assertEqual( env.action_discrete_spec('REWARD_ACT'), { 'min': 0, 'max': 4 }) self.assertEqual(env.action_continuous_names(), ['OBSERVATION_ACT']) self.assertEqual(", "env = self._create_env({'steps': '5'}) properties = env.list_property('') self.assertLen(properties, 1) self.assertEqual(properties[0], ('steps', dmlab2d.PropertyAttribute.READABLE_WRITABLE)) self.assertEqual(env.read_property('steps'),", "str(i) for i in range(1, 6)]) def test_lab2d_observation_spec(self): env = self._create_env() self.assertEqual( env.observation_spec('VIEW1'),", "self.assertRaises(RuntimeError): env.act_discrete([0]) with self.assertRaises(RuntimeError): env.act_continuous([0]) with self.assertRaises(RuntimeError): env.act_text(['']) def test_lab2d_act_bad_shape(self): env = self._create_env()", "'5'}) env.start(episode=0, seed=0) np.testing.assert_array_equal(env.observation('VIEW3'), [1, 2, 3]) env.act_continuous([10]) env.advance() np.testing.assert_array_equal(env.observation('VIEW3'), [11, 12, 13])", "def test_lab2d_settings_environment(self): env = self._create_env({'steps': '5'}) env.start(episode=0, seed=0) for _ in range(4): self.assertEqual(env.advance()[0],", "with self.assertRaises(KeyError): env.action_continuous_spec('bad_key') def test_lab2d_bad_observation_spec_name(self): env = self._create_env() with self.assertRaises(KeyError): env.observation_spec('bad_key') def test_lab2d_observe_before_start(self):", "dmlab2d.RUNNING) self.assertEqual(env.advance()[0], dmlab2d.TERMINATED) def test_lab2d_act_discrete(self): env = self._create_env({'steps': '5'}) env.start(episode=0, seed=0) env.act_discrete(np.array([2], np.dtype('int32')))", "self._create_env() with self.assertRaises(KeyError): env.observation_spec('bad_key') def test_lab2d_observe_before_start(self): env = self._create_env() with self.assertRaises(RuntimeError): env.observation('VIEW1') def", "env.action_continuous_spec('bad_key') def test_lab2d_bad_observation_spec_name(self): env = self._create_env() with self.assertRaises(KeyError): env.observation_spec('bad_key') def test_lab2d_observe_before_start(self): env =", "file except in compliance with the License. # You may obtain a copy", "def test_lab2d_observation_names(self): env = self._create_env() self.assertEqual(env.observation_names(), ['VIEW' + str(i) for i in range(1,", "env.observation_spec('VIEW3'), { 'dtype': np.dtype('int32'), 'shape': (3,) }) self.assertEqual( env.observation_spec('VIEW4'), { 'dtype': np.dtype('int64'), 'shape':", "2) def test_lab2d_act_continuous(self): env = self._create_env({'steps': '5'}) env.start(episode=0, seed=0) np.testing.assert_array_equal(env.observation('VIEW3'), [1, 2, 3])", "env.start(episode=0, seed=0) events = env.events() self.assertLen(events, 1) event_name, observations = events[0] self.assertEqual(event_name, 'start')", "import runfiles_helper class Dmlab2dDmEnvTest(test_utils.EnvironmentTestMixin, absltest.TestCase): def make_object_under_test(self): lab2d = dmlab2d.Lab2d(runfiles_helper.find(), {'levelName': 'examples/level_api'}) return", "License for the specific language governing permissions and # limitations under the License.", "for _ in range(9): self.assertEqual(env.advance()[0], dmlab2d.RUNNING) self.assertEqual(env.advance()[0], dmlab2d.TERMINATED) def test_lab2d_settings_environment(self): env = self._create_env({'steps':", "env.read_property('missing') with self.assertRaises(KeyError): env.write_property('missing', '10') def test_lab2d_invalid_ops_properties(self): env = self._create_env({'steps': '5'}) with self.assertRaises(ValueError):", "self.assertEqual(env.advance()[0], dmlab2d.TERMINATED) with self.assertRaises(RuntimeError): env.advance() def test_lab2d_missing_properties(self): env = self._create_env({'steps': '5'}) with self.assertRaises(KeyError):", "with self.assertRaises(RuntimeError): env.observation('VIEW1') def test_lab2d_act_before_start(self): env = self._create_env() with self.assertRaises(RuntimeError): env.act_discrete([0]) with self.assertRaises(RuntimeError):", "dmlab2d.TERMINATED) def test_lab2d_act_discrete(self): env = self._create_env({'steps': '5'}) env.start(episode=0, seed=0) env.act_discrete(np.array([2], np.dtype('int32'))) _, reward", "to in writing, software # distributed under the License is distributed on an", "self.assertRaises(KeyError): env.write_property('missing', '10') def test_lab2d_invalid_ops_properties(self): env = self._create_env({'steps': '5'}) with self.assertRaises(ValueError): env.list_property('steps') with", "env.act_continuous([0, 1]) def test_lab2d_advance_after_episode_ends(self): env = self._create_env({'steps': '2'}) env.start(0, 0) self.assertEqual(env.advance()[0], dmlab2d.RUNNING) self.assertEqual(env.advance()[0],", "dmlab2d.RUNNING) self.assertEqual(env.advance()[0], dmlab2d.TERMINATED) with self.assertRaises(RuntimeError): env.advance() def test_lab2d_missing_properties(self): env = self._create_env({'steps': '5'}) with", "implied. # See the License for the specific language governing permissions and #", "self.assertEqual( env.observation_spec('VIEW2'), { 'dtype': np.dtype('double'), 'shape': (2,) }) self.assertEqual( env.observation_spec('VIEW3'), { 'dtype': np.dtype('int32'),", "\"License\"); # you may not use this file except in compliance with the", "test_lab2d_start_environment(self): env = self._create_env() env.start(episode=0, seed=0) def test_lab2d_events_start(self): env = self._create_env() env.start(episode=0, seed=0)", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "env.advance() self.assertEqual(reward, 2) def test_lab2d_act_continuous(self): env = self._create_env({'steps': '5'}) env.start(episode=0, seed=0) np.testing.assert_array_equal(env.observation('VIEW3'), [1,", "['OBSERVATION_ACT']) self.assertEqual( env.action_continuous_spec('OBSERVATION_ACT'), { 'min': -5, 'max': 5 }) self.assertEqual(env.action_text_names(), ['LOG_EVENT']) def test_lab2d_start_environment(self):", "event_name, observations = events[0] self.assertEqual(event_name, 'start') self.assertLen(observations, 1) np.testing.assert_array_equal(observations[0], [1, 2, 3]) def", "range(4): self.assertEqual(env.advance()[0], dmlab2d.RUNNING) self.assertEqual(env.advance()[0], dmlab2d.TERMINATED) def test_lab2d_properties_environment(self): env = self._create_env({'steps': '5'}) properties =", "self.assertRaises(KeyError): env.action_discrete_spec('bad_key') with self.assertRaises(KeyError): env.action_continuous_spec('bad_key') def test_lab2d_bad_observation_spec_name(self): env = self._create_env() with self.assertRaises(KeyError): env.observation_spec('bad_key')", "{} settings['levelName'] = 'examples/level_api' return dmlab2d.Lab2d(runfiles_helper.find(), settings) def test_lab2d_environment_name(self): self.assertEqual(self._create_env().name(), 'dmlab2d') def test_lab2d_observation_names(self):", "env.start(0, 0) with self.assertRaises(ValueError): env.act_discrete([0, 1]) with self.assertRaises(ValueError): env.act_continuous([0, 1]) def test_lab2d_advance_after_episode_ends(self): env", "= env.observation('VIEW5') self.assertEqual(view, b'') env.act_text(['Hello']) env.advance() view = env.observation('VIEW5') self.assertEqual(view, b'Hello') def test_lab2d_invalid_setting(self):", "0) self.assertEqual(env.advance()[0], dmlab2d.RUNNING) self.assertEqual(env.advance()[0], dmlab2d.TERMINATED) with self.assertRaises(RuntimeError): env.advance() def test_lab2d_missing_properties(self): env = self._create_env({'steps':", "np import dmlab2d from dmlab2d import runfiles_helper class Dmlab2dDmEnvTest(test_utils.EnvironmentTestMixin, absltest.TestCase): def make_object_under_test(self): lab2d", "events = env.events() self.assertLen(events, 1) event_name, observations = events[0] self.assertEqual(event_name, 'start') self.assertLen(observations, 1)", "[1, 2]) np.testing.assert_array_equal(env.observation('VIEW3'), [1, 2, 3]) np.testing.assert_array_equal(env.observation('VIEW4'), [1, 2, 3, 4]) self.assertEqual(env.observation('VIEW5'), b'')", "self.assertEqual(env.advance()[0], dmlab2d.TERMINATED) def test_lab2d_settings_environment(self): env = self._create_env({'steps': '5'}) env.start(episode=0, seed=0) for _ in", "env.observation_spec('VIEW4'), { 'dtype': np.dtype('int64'), 'shape': (4,) }) # Text is stored in objects.", "seed=0) view = env.observation('VIEW5') self.assertEqual(view, b'') env.act_text(['Hello']) env.advance() view = env.observation('VIEW5') self.assertEqual(view, b'Hello')", "env.advance() self.assertEmpty(env.events()) def test_lab2d_observe(self): env = self._create_env() env.start(episode=0, seed=0) np.testing.assert_array_equal(env.observation('VIEW1'), [1]) np.testing.assert_array_equal(env.observation('VIEW2'), [1,", "with self.assertRaises(KeyError): env.write_property('missing', '10') def test_lab2d_invalid_ops_properties(self): env = self._create_env({'steps': '5'}) with self.assertRaises(ValueError): env.list_property('steps')", "seed=0) def test_lab2d_events_start(self): env = self._create_env() env.start(episode=0, seed=0) events = env.events() self.assertLen(events, 1)", "self.assertEqual(reward, 2) def test_lab2d_act_continuous(self): env = self._create_env({'steps': '5'}) env.start(episode=0, seed=0) np.testing.assert_array_equal(env.observation('VIEW3'), [1, 2,", "test_lab2d_invalid_setting(self): with self.assertRaises(ValueError): self._create_env({'missing': '5'}) def test_lab2d_bad_action_spec_name(self): env = self._create_env() with self.assertRaises(KeyError): env.action_discrete_spec('bad_key')", "or implied. # See the License for the specific language governing permissions and", "'dtype': np.dtype('int64'), 'shape': (4,) }) # Text is stored in objects. self.assertEqual( env.observation_spec('VIEW5'),", "5 }) self.assertEqual(env.action_text_names(), ['LOG_EVENT']) def test_lab2d_start_environment(self): env = self._create_env() env.start(episode=0, seed=0) def test_lab2d_events_start(self):", "np.testing.assert_array_equal(env.observation('VIEW3'), [1, 2, 3]) np.testing.assert_array_equal(env.observation('VIEW4'), [1, 2, 3, 4]) self.assertEqual(env.observation('VIEW5'), b'') def test_lab2d_ten_steps_terminate_environment(self):", "env = self._create_env() env.start(episode=0, seed=0) def test_lab2d_events_start(self): env = self._create_env() env.start(episode=0, seed=0) events", "Apache License, Version 2.0 (the \"License\"); # you may not use this file", "'5'}) def test_lab2d_bad_action_spec_name(self): env = self._create_env() with self.assertRaises(KeyError): env.action_discrete_spec('bad_key') with self.assertRaises(KeyError): env.action_continuous_spec('bad_key') def", "OR CONDITIONS OF ANY KIND, either express or implied. # See the License", "may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "test_lab2d_invalid_ops_properties(self): env = self._create_env({'steps': '5'}) with self.assertRaises(ValueError): env.list_property('steps') with self.assertRaises(ValueError): env.write_property('steps', 'mouse') if", "= self._create_env({'steps': '5'}) env.start(episode=0, seed=0) view = env.observation('VIEW5') self.assertEqual(view, b'') env.act_text(['Hello']) env.advance() view", "class Dmlab2DTest(absltest.TestCase): def _create_env(self, extra_settings=None): settings = extra_settings.copy() if extra_settings else {} settings['levelName']", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing,", "stored in objects. self.assertEqual( env.observation_spec('VIEW5'), { 'dtype': np.dtype('O'), 'shape': () }) def test_lab2d_action_spec(self):", "an \"AS-IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "self.assertRaises(RuntimeError): env.act_continuous([0]) with self.assertRaises(RuntimeError): env.act_text(['']) def test_lab2d_act_bad_shape(self): env = self._create_env() env.start(0, 0) with", "env.start(episode=0, seed=0) for _ in range(2): self.assertEqual(env.advance()[0], dmlab2d.RUNNING) self.assertEqual(env.advance()[0], dmlab2d.TERMINATED) def test_lab2d_act_discrete(self): env", "with self.assertRaises(ValueError): env.act_continuous([0, 1]) def test_lab2d_advance_after_episode_ends(self): env = self._create_env({'steps': '2'}) env.start(0, 0) self.assertEqual(env.advance()[0],", "2, 3]) np.testing.assert_array_equal(env.observation('VIEW4'), [1, 2, 3, 4]) self.assertEqual(env.observation('VIEW5'), b'') def test_lab2d_ten_steps_terminate_environment(self): env =", "specific language governing permissions and # limitations under the License. \"\"\"Tests for dmlab2d.dmlab2d.\"\"\"", "{ 'dtype': np.dtype('uint8'), 'shape': (1,) }) self.assertEqual( env.observation_spec('VIEW2'), { 'dtype': np.dtype('double'), 'shape': (2,)", "# See the License for the specific language governing permissions and # limitations", "env.observation_spec('VIEW5'), { 'dtype': np.dtype('O'), 'shape': () }) def test_lab2d_action_spec(self): env = self._create_env() self.assertEqual(env.action_discrete_names(),", "def test_lab2d_advance_after_episode_ends(self): env = self._create_env({'steps': '2'}) env.start(0, 0) self.assertEqual(env.advance()[0], dmlab2d.RUNNING) self.assertEqual(env.advance()[0], dmlab2d.TERMINATED) with", "self._create_env() with self.assertRaises(RuntimeError): env.observation('VIEW1') def test_lab2d_act_before_start(self): env = self._create_env() with self.assertRaises(RuntimeError): env.act_discrete([0]) with", "self.assertEqual(env.read_property('steps'), '5') env.write_property('steps', '3') self.assertEqual(env.read_property('steps'), '3') env.start(episode=0, seed=0) for _ in range(2): self.assertEqual(env.advance()[0],", "self.assertLen(properties, 1) self.assertEqual(properties[0], ('steps', dmlab2d.PropertyAttribute.READABLE_WRITABLE)) self.assertEqual(env.read_property('steps'), '5') env.write_property('steps', '3') self.assertEqual(env.read_property('steps'), '3') env.start(episode=0, seed=0)", "12, 13]) def test_lab2d_act_text(self): env = self._create_env({'steps': '5'}) env.start(episode=0, seed=0) view = env.observation('VIEW5')", "{'levelName': 'examples/level_api'}) return dmlab2d.Environment(lab2d, lab2d.observation_names(), 0) class Dmlab2DTest(absltest.TestCase): def _create_env(self, extra_settings=None): settings =", "test_lab2d_bad_observation_spec_name(self): env = self._create_env() with self.assertRaises(KeyError): env.observation_spec('bad_key') def test_lab2d_observe_before_start(self): env = self._create_env() with", "self._create_env({'steps': '2'}) env.start(0, 0) self.assertEqual(env.advance()[0], dmlab2d.RUNNING) self.assertEqual(env.advance()[0], dmlab2d.TERMINATED) with self.assertRaises(RuntimeError): env.advance() def test_lab2d_missing_properties(self):", "the specific language governing permissions and # limitations under the License. \"\"\"Tests for", "else {} settings['levelName'] = 'examples/level_api' return dmlab2d.Lab2d(runfiles_helper.find(), settings) def test_lab2d_environment_name(self): self.assertEqual(self._create_env().name(), 'dmlab2d') def", "self.assertEqual(env.action_text_names(), ['LOG_EVENT']) def test_lab2d_start_environment(self): env = self._create_env() env.start(episode=0, seed=0) def test_lab2d_events_start(self): env =", "= self._create_env() self.assertEqual(env.action_discrete_names(), ['REWARD_ACT']) self.assertEqual( env.action_discrete_spec('REWARD_ACT'), { 'min': 0, 'max': 4 }) self.assertEqual(env.action_continuous_names(),", "[1, 2, 3, 4]) self.assertEqual(env.observation('VIEW5'), b'') def test_lab2d_ten_steps_terminate_environment(self): env = self._create_env() env.start(episode=0, seed=0)", "the Apache License, Version 2.0 (the \"License\"); # you may not use this", "def test_lab2d_properties_environment(self): env = self._create_env({'steps': '5'}) properties = env.list_property('') self.assertLen(properties, 1) self.assertEqual(properties[0], ('steps',", "env = self._create_env() with self.assertRaises(RuntimeError): env.observation('VIEW1') def test_lab2d_act_before_start(self): env = self._create_env() with self.assertRaises(RuntimeError):", "you may not use this file except in compliance with the License. #", "self.assertRaises(ValueError): env.act_continuous([0, 1]) def test_lab2d_advance_after_episode_ends(self): env = self._create_env({'steps': '2'}) env.start(0, 0) self.assertEqual(env.advance()[0], dmlab2d.RUNNING)", "'dtype': np.dtype('int32'), 'shape': (3,) }) self.assertEqual( env.observation_spec('VIEW4'), { 'dtype': np.dtype('int64'), 'shape': (4,) })", "self.assertEqual(properties[0], ('steps', dmlab2d.PropertyAttribute.READABLE_WRITABLE)) self.assertEqual(env.read_property('steps'), '5') env.write_property('steps', '3') self.assertEqual(env.read_property('steps'), '3') env.start(episode=0, seed=0) for _", "2019 The DMLab2D Authors. # # Licensed under the Apache License, Version 2.0", "env.write_property('steps', '3') self.assertEqual(env.read_property('steps'), '3') env.start(episode=0, seed=0) for _ in range(2): self.assertEqual(env.advance()[0], dmlab2d.RUNNING) self.assertEqual(env.advance()[0],", "= self._create_env({'steps': '5'}) with self.assertRaises(ValueError): env.list_property('steps') with self.assertRaises(ValueError): env.write_property('steps', 'mouse') if __name__ ==", "if extra_settings else {} settings['levelName'] = 'examples/level_api' return dmlab2d.Lab2d(runfiles_helper.find(), settings) def test_lab2d_environment_name(self): self.assertEqual(self._create_env().name(),", "env = self._create_env() self.assertEqual(env.action_discrete_names(), ['REWARD_ACT']) self.assertEqual( env.action_discrete_spec('REWARD_ACT'), { 'min': 0, 'max': 4 })", "for the specific language governing permissions and # limitations under the License. \"\"\"Tests", "env.list_property('') self.assertLen(properties, 1) self.assertEqual(properties[0], ('steps', dmlab2d.PropertyAttribute.READABLE_WRITABLE)) self.assertEqual(env.read_property('steps'), '5') env.write_property('steps', '3') self.assertEqual(env.read_property('steps'), '3') env.start(episode=0,", "use this file except in compliance with the License. # You may obtain", "= self._create_env() env.start(episode=0, seed=0) np.testing.assert_array_equal(env.observation('VIEW1'), [1]) np.testing.assert_array_equal(env.observation('VIEW2'), [1, 2]) np.testing.assert_array_equal(env.observation('VIEW3'), [1, 2, 3])", "= self._create_env() with self.assertRaises(KeyError): env.observation_spec('bad_key') def test_lab2d_observe_before_start(self): env = self._create_env() with self.assertRaises(RuntimeError): env.observation('VIEW1')", "self.assertRaises(RuntimeError): env.act_text(['']) def test_lab2d_act_bad_shape(self): env = self._create_env() env.start(0, 0) with self.assertRaises(ValueError): env.act_discrete([0, 1])", "np.dtype('int64'), 'shape': (4,) }) # Text is stored in objects. self.assertEqual( env.observation_spec('VIEW5'), {", "1) self.assertLen(env.events(), 1) env.advance() self.assertEmpty(env.events()) def test_lab2d_observe(self): env = self._create_env() env.start(episode=0, seed=0) np.testing.assert_array_equal(env.observation('VIEW1'),", "with self.assertRaises(KeyError): env.read_property('missing') with self.assertRaises(KeyError): env.write_property('missing', '10') def test_lab2d_invalid_ops_properties(self): env = self._create_env({'steps': '5'})", "distributed on an \"AS-IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "() }) def test_lab2d_action_spec(self): env = self._create_env() self.assertEqual(env.action_discrete_names(), ['REWARD_ACT']) self.assertEqual( env.action_discrete_spec('REWARD_ACT'), { 'min':", "Copyright 2019 The DMLab2D Authors. # # Licensed under the Apache License, Version", "0, 'max': 4 }) self.assertEqual(env.action_continuous_names(), ['OBSERVATION_ACT']) self.assertEqual( env.action_continuous_spec('OBSERVATION_ACT'), { 'min': -5, 'max': 5", "# Licensed under the Apache License, Version 2.0 (the \"License\"); # you may", "env = self._create_env({'steps': '5'}) env.start(episode=0, seed=0) for _ in range(4): self.assertEqual(env.advance()[0], dmlab2d.RUNNING) self.assertEqual(env.advance()[0],", "range(1, 6)]) def test_lab2d_observation_spec(self): env = self._create_env() self.assertEqual( env.observation_spec('VIEW1'), { 'dtype': np.dtype('uint8'), 'shape':", "self._create_env() self.assertEqual(env.action_discrete_names(), ['REWARD_ACT']) self.assertEqual( env.action_discrete_spec('REWARD_ACT'), { 'min': 0, 'max': 4 }) self.assertEqual(env.action_continuous_names(), ['OBSERVATION_ACT'])", "dmlab2d.RUNNING) self.assertEqual(env.advance()[0], dmlab2d.TERMINATED) def test_lab2d_settings_environment(self): env = self._create_env({'steps': '5'}) env.start(episode=0, seed=0) for _", "License. \"\"\"Tests for dmlab2d.dmlab2d.\"\"\" from __future__ import absolute_import from __future__ import division from", "2.0 (the \"License\"); # you may not use this file except in compliance", "3]) env.act_continuous([10]) env.advance() np.testing.assert_array_equal(env.observation('VIEW3'), [11, 12, 13]) def test_lab2d_act_text(self): env = self._create_env({'steps': '5'})", "env.observation_spec('bad_key') def test_lab2d_observe_before_start(self): env = self._create_env() with self.assertRaises(RuntimeError): env.observation('VIEW1') def test_lab2d_act_before_start(self): env =", "'max': 5 }) self.assertEqual(env.action_text_names(), ['LOG_EVENT']) def test_lab2d_start_environment(self): env = self._create_env() env.start(episode=0, seed=0) def", "settings = extra_settings.copy() if extra_settings else {} settings['levelName'] = 'examples/level_api' return dmlab2d.Lab2d(runfiles_helper.find(), settings)", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the", "test_lab2d_events_cleared_after_advance_not_read(self): env = self._create_env() env.start(episode=0, seed=0) self.assertLen(env.events(), 1) self.assertLen(env.events(), 1) env.advance() self.assertEmpty(env.events()) def", "env = self._create_env() self.assertEqual( env.observation_spec('VIEW1'), { 'dtype': np.dtype('uint8'), 'shape': (1,) }) self.assertEqual( env.observation_spec('VIEW2'),", "env.act_text(['Hello']) env.advance() view = env.observation('VIEW5') self.assertEqual(view, b'Hello') def test_lab2d_invalid_setting(self): with self.assertRaises(ValueError): self._create_env({'missing': '5'})", "# # Unless required by applicable law or agreed to in writing, software", "Dmlab2dDmEnvTest(test_utils.EnvironmentTestMixin, absltest.TestCase): def make_object_under_test(self): lab2d = dmlab2d.Lab2d(runfiles_helper.find(), {'levelName': 'examples/level_api'}) return dmlab2d.Environment(lab2d, lab2d.observation_names(), 0)", "# distributed under the License is distributed on an \"AS-IS\" BASIS, # WITHOUT", "def _create_env(self, extra_settings=None): settings = extra_settings.copy() if extra_settings else {} settings['levelName'] = 'examples/level_api'", "self.assertEqual(view, b'') env.act_text(['Hello']) env.advance() view = env.observation('VIEW5') self.assertEqual(view, b'Hello') def test_lab2d_invalid_setting(self): with self.assertRaises(ValueError):", "express or implied. # See the License for the specific language governing permissions", "seed=0) self.assertLen(env.events(), 1) self.assertLen(env.events(), 1) env.advance() self.assertEmpty(env.events()) def test_lab2d_observe(self): env = self._create_env() env.start(episode=0,", "Text is stored in objects. self.assertEqual( env.observation_spec('VIEW5'), { 'dtype': np.dtype('O'), 'shape': () })", "}) # Text is stored in objects. self.assertEqual( env.observation_spec('VIEW5'), { 'dtype': np.dtype('O'), 'shape':", "def test_lab2d_act_continuous(self): env = self._create_env({'steps': '5'}) env.start(episode=0, seed=0) np.testing.assert_array_equal(env.observation('VIEW3'), [1, 2, 3]) env.act_continuous([10])", "np.dtype('double'), 'shape': (2,) }) self.assertEqual( env.observation_spec('VIEW3'), { 'dtype': np.dtype('int32'), 'shape': (3,) }) self.assertEqual(", "env.observation_spec('VIEW1'), { 'dtype': np.dtype('uint8'), 'shape': (1,) }) self.assertEqual( env.observation_spec('VIEW2'), { 'dtype': np.dtype('double'), 'shape':", "= self._create_env() env.start(episode=0, seed=0) self.assertLen(env.events(), 1) self.assertLen(env.events(), 1) env.advance() self.assertEmpty(env.events()) def test_lab2d_observe(self): env", "(1,) }) self.assertEqual( env.observation_spec('VIEW2'), { 'dtype': np.dtype('double'), 'shape': (2,) }) self.assertEqual( env.observation_spec('VIEW3'), {", "either express or implied. # See the License for the specific language governing", "import print_function from absl.testing import absltest from dm_env import test_utils import numpy as", "env.advance() def test_lab2d_missing_properties(self): env = self._create_env({'steps': '5'}) with self.assertRaises(KeyError): env.list_property('missing') with self.assertRaises(KeyError): env.read_property('missing')", "= self._create_env() with self.assertRaises(RuntimeError): env.act_discrete([0]) with self.assertRaises(RuntimeError): env.act_continuous([0]) with self.assertRaises(RuntimeError): env.act_text(['']) def test_lab2d_act_bad_shape(self):", "Licensed under the Apache License, Version 2.0 (the \"License\"); # you may not", "as np import dmlab2d from dmlab2d import runfiles_helper class Dmlab2dDmEnvTest(test_utils.EnvironmentTestMixin, absltest.TestCase): def make_object_under_test(self):", "[1, 2, 3]) env.act_continuous([10]) env.advance() np.testing.assert_array_equal(env.observation('VIEW3'), [11, 12, 13]) def test_lab2d_act_text(self): env =", "# Text is stored in objects. self.assertEqual( env.observation_spec('VIEW5'), { 'dtype': np.dtype('O'), 'shape': ()", "env.act_text(['']) def test_lab2d_act_bad_shape(self): env = self._create_env() env.start(0, 0) with self.assertRaises(ValueError): env.act_discrete([0, 1]) with", "6)]) def test_lab2d_observation_spec(self): env = self._create_env() self.assertEqual( env.observation_spec('VIEW1'), { 'dtype': np.dtype('uint8'), 'shape': (1,)", "= self._create_env({'steps': '5'}) env.start(episode=0, seed=0) np.testing.assert_array_equal(env.observation('VIEW3'), [1, 2, 3]) env.act_continuous([10]) env.advance() np.testing.assert_array_equal(env.observation('VIEW3'), [11,", "limitations under the License. \"\"\"Tests for dmlab2d.dmlab2d.\"\"\" from __future__ import absolute_import from __future__", "test_lab2d_missing_properties(self): env = self._create_env({'steps': '5'}) with self.assertRaises(KeyError): env.list_property('missing') with self.assertRaises(KeyError): env.read_property('missing') with self.assertRaises(KeyError):", "env.observation('VIEW5') self.assertEqual(view, b'') env.act_text(['Hello']) env.advance() view = env.observation('VIEW5') self.assertEqual(view, b'Hello') def test_lab2d_invalid_setting(self): with", "= env.events() self.assertLen(events, 1) event_name, observations = events[0] self.assertEqual(event_name, 'start') self.assertLen(observations, 1) np.testing.assert_array_equal(observations[0],", "with self.assertRaises(KeyError): env.list_property('missing') with self.assertRaises(KeyError): env.read_property('missing') with self.assertRaises(KeyError): env.write_property('missing', '10') def test_lab2d_invalid_ops_properties(self): env", "= self._create_env({'steps': '5'}) properties = env.list_property('') self.assertLen(properties, 1) self.assertEqual(properties[0], ('steps', dmlab2d.PropertyAttribute.READABLE_WRITABLE)) self.assertEqual(env.read_property('steps'), '5')", "extra_settings.copy() if extra_settings else {} settings['levelName'] = 'examples/level_api' return dmlab2d.Lab2d(runfiles_helper.find(), settings) def test_lab2d_environment_name(self):", "('steps', dmlab2d.PropertyAttribute.READABLE_WRITABLE)) self.assertEqual(env.read_property('steps'), '5') env.write_property('steps', '3') self.assertEqual(env.read_property('steps'), '3') env.start(episode=0, seed=0) for _ in", "3, 4]) self.assertEqual(env.observation('VIEW5'), b'') def test_lab2d_ten_steps_terminate_environment(self): env = self._create_env() env.start(episode=0, seed=0) for _", "self._create_env({'steps': '5'}) env.start(episode=0, seed=0) for _ in range(4): self.assertEqual(env.advance()[0], dmlab2d.RUNNING) self.assertEqual(env.advance()[0], dmlab2d.TERMINATED) def", "__future__ import absolute_import from __future__ import division from __future__ import print_function from absl.testing", "env.start(episode=0, seed=0) for _ in range(4): self.assertEqual(env.advance()[0], dmlab2d.RUNNING) self.assertEqual(env.advance()[0], dmlab2d.TERMINATED) def test_lab2d_properties_environment(self): env", "the License. # You may obtain a copy of the License at #", "numpy as np import dmlab2d from dmlab2d import runfiles_helper class Dmlab2dDmEnvTest(test_utils.EnvironmentTestMixin, absltest.TestCase): def", "\"\"\"Tests for dmlab2d.dmlab2d.\"\"\" from __future__ import absolute_import from __future__ import division from __future__", "= self._create_env() with self.assertRaises(RuntimeError): env.observation('VIEW1') def test_lab2d_act_before_start(self): env = self._create_env() with self.assertRaises(RuntimeError): env.act_discrete([0])", "DMLab2D Authors. # # Licensed under the Apache License, Version 2.0 (the \"License\");", "in writing, software # distributed under the License is distributed on an \"AS-IS\"", "return dmlab2d.Environment(lab2d, lab2d.observation_names(), 0) class Dmlab2DTest(absltest.TestCase): def _create_env(self, extra_settings=None): settings = extra_settings.copy() if", "self._create_env() self.assertEqual( env.observation_spec('VIEW1'), { 'dtype': np.dtype('uint8'), 'shape': (1,) }) self.assertEqual( env.observation_spec('VIEW2'), { 'dtype':", "# limitations under the License. \"\"\"Tests for dmlab2d.dmlab2d.\"\"\" from __future__ import absolute_import from", "}) self.assertEqual(env.action_text_names(), ['LOG_EVENT']) def test_lab2d_start_environment(self): env = self._create_env() env.start(episode=0, seed=0) def test_lab2d_events_start(self): env", "dmlab2d.Environment(lab2d, lab2d.observation_names(), 0) class Dmlab2DTest(absltest.TestCase): def _create_env(self, extra_settings=None): settings = extra_settings.copy() if extra_settings", "= self._create_env() env.start(0, 0) with self.assertRaises(ValueError): env.act_discrete([0, 1]) with self.assertRaises(ValueError): env.act_continuous([0, 1]) def", "range(2): self.assertEqual(env.advance()[0], dmlab2d.RUNNING) self.assertEqual(env.advance()[0], dmlab2d.TERMINATED) def test_lab2d_act_discrete(self): env = self._create_env({'steps': '5'}) env.start(episode=0, seed=0)", "b'') env.act_text(['Hello']) env.advance() view = env.observation('VIEW5') self.assertEqual(view, b'Hello') def test_lab2d_invalid_setting(self): with self.assertRaises(ValueError): self._create_env({'missing':", "= self._create_env({'steps': '5'}) with self.assertRaises(KeyError): env.list_property('missing') with self.assertRaises(KeyError): env.read_property('missing') with self.assertRaises(KeyError): env.write_property('missing', '10')", "3]) np.testing.assert_array_equal(env.observation('VIEW4'), [1, 2, 3, 4]) self.assertEqual(env.observation('VIEW5'), b'') def test_lab2d_ten_steps_terminate_environment(self): env = self._create_env()", "for _ in range(2): self.assertEqual(env.advance()[0], dmlab2d.RUNNING) self.assertEqual(env.advance()[0], dmlab2d.TERMINATED) def test_lab2d_act_discrete(self): env = self._create_env({'steps':", "np.dtype('int32'))) _, reward = env.advance() self.assertEqual(reward, 2) def test_lab2d_act_continuous(self): env = self._create_env({'steps': '5'})", "= events[0] self.assertEqual(event_name, 'start') self.assertLen(observations, 1) np.testing.assert_array_equal(observations[0], [1, 2, 3]) def test_lab2d_events_cleared_after_advance_not_read(self): env", "in range(2): self.assertEqual(env.advance()[0], dmlab2d.RUNNING) self.assertEqual(env.advance()[0], dmlab2d.TERMINATED) def test_lab2d_act_discrete(self): env = self._create_env({'steps': '5'}) env.start(episode=0,", "'min': -5, 'max': 5 }) self.assertEqual(env.action_text_names(), ['LOG_EVENT']) def test_lab2d_start_environment(self): env = self._create_env() env.start(episode=0,", "with the License. # You may obtain a copy of the License at", "The DMLab2D Authors. # # Licensed under the Apache License, Version 2.0 (the", "_, reward = env.advance() self.assertEqual(reward, 2) def test_lab2d_act_continuous(self): env = self._create_env({'steps': '5'}) env.start(episode=0,", "test_lab2d_ten_steps_terminate_environment(self): env = self._create_env() env.start(episode=0, seed=0) for _ in range(9): self.assertEqual(env.advance()[0], dmlab2d.RUNNING) self.assertEqual(env.advance()[0],", "# # Licensed under the Apache License, Version 2.0 (the \"License\"); # you", "['VIEW' + str(i) for i in range(1, 6)]) def test_lab2d_observation_spec(self): env = self._create_env()", "env.start(episode=0, seed=0) for _ in range(9): self.assertEqual(env.advance()[0], dmlab2d.RUNNING) self.assertEqual(env.advance()[0], dmlab2d.TERMINATED) def test_lab2d_settings_environment(self): env", "self._create_env() env.start(episode=0, seed=0) events = env.events() self.assertLen(events, 1) event_name, observations = events[0] self.assertEqual(event_name,", "self._create_env() env.start(episode=0, seed=0) self.assertLen(env.events(), 1) self.assertLen(env.events(), 1) env.advance() self.assertEmpty(env.events()) def test_lab2d_observe(self): env =", "the License is distributed on an \"AS-IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "4 }) self.assertEqual(env.action_continuous_names(), ['OBSERVATION_ACT']) self.assertEqual( env.action_continuous_spec('OBSERVATION_ACT'), { 'min': -5, 'max': 5 }) self.assertEqual(env.action_text_names(),", "self.assertLen(observations, 1) np.testing.assert_array_equal(observations[0], [1, 2, 3]) def test_lab2d_events_cleared_after_advance_not_read(self): env = self._create_env() env.start(episode=0, seed=0)", "env.act_continuous([10]) env.advance() np.testing.assert_array_equal(env.observation('VIEW3'), [11, 12, 13]) def test_lab2d_act_text(self): env = self._create_env({'steps': '5'}) env.start(episode=0,", "lab2d.observation_names(), 0) class Dmlab2DTest(absltest.TestCase): def _create_env(self, extra_settings=None): settings = extra_settings.copy() if extra_settings else", "with self.assertRaises(KeyError): env.observation_spec('bad_key') def test_lab2d_observe_before_start(self): env = self._create_env() with self.assertRaises(RuntimeError): env.observation('VIEW1') def test_lab2d_act_before_start(self):", "division from __future__ import print_function from absl.testing import absltest from dm_env import test_utils", "on an \"AS-IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "from dm_env import test_utils import numpy as np import dmlab2d from dmlab2d import", "law or agreed to in writing, software # distributed under the License is", "self.assertRaises(KeyError): env.observation_spec('bad_key') def test_lab2d_observe_before_start(self): env = self._create_env() with self.assertRaises(RuntimeError): env.observation('VIEW1') def test_lab2d_act_before_start(self): env", "env.list_property('missing') with self.assertRaises(KeyError): env.read_property('missing') with self.assertRaises(KeyError): env.write_property('missing', '10') def test_lab2d_invalid_ops_properties(self): env = self._create_env({'steps':", "the License for the specific language governing permissions and # limitations under the", "env = self._create_env({'steps': '5'}) env.start(episode=0, seed=0) view = env.observation('VIEW5') self.assertEqual(view, b'') env.act_text(['Hello']) env.advance()", "np.testing.assert_array_equal(env.observation('VIEW1'), [1]) np.testing.assert_array_equal(env.observation('VIEW2'), [1, 2]) np.testing.assert_array_equal(env.observation('VIEW3'), [1, 2, 3]) np.testing.assert_array_equal(env.observation('VIEW4'), [1, 2, 3,", "'start') self.assertLen(observations, 1) np.testing.assert_array_equal(observations[0], [1, 2, 3]) def test_lab2d_events_cleared_after_advance_not_read(self): env = self._create_env() env.start(episode=0,", "from __future__ import absolute_import from __future__ import division from __future__ import print_function from", "['REWARD_ACT']) self.assertEqual( env.action_discrete_spec('REWARD_ACT'), { 'min': 0, 'max': 4 }) self.assertEqual(env.action_continuous_names(), ['OBSERVATION_ACT']) self.assertEqual( env.action_continuous_spec('OBSERVATION_ACT'),", "governing permissions and # limitations under the License. \"\"\"Tests for dmlab2d.dmlab2d.\"\"\" from __future__", "def test_lab2d_act_text(self): env = self._create_env({'steps': '5'}) env.start(episode=0, seed=0) view = env.observation('VIEW5') self.assertEqual(view, b'')", "from __future__ import division from __future__ import print_function from absl.testing import absltest from", "under the License is distributed on an \"AS-IS\" BASIS, # WITHOUT WARRANTIES OR", "events[0] self.assertEqual(event_name, 'start') self.assertLen(observations, 1) np.testing.assert_array_equal(observations[0], [1, 2, 3]) def test_lab2d_events_cleared_after_advance_not_read(self): env =", "def test_lab2d_start_environment(self): env = self._create_env() env.start(episode=0, seed=0) def test_lab2d_events_start(self): env = self._create_env() env.start(episode=0,", "= env.observation('VIEW5') self.assertEqual(view, b'Hello') def test_lab2d_invalid_setting(self): with self.assertRaises(ValueError): self._create_env({'missing': '5'}) def test_lab2d_bad_action_spec_name(self): env", "def test_lab2d_act_bad_shape(self): env = self._create_env() env.start(0, 0) with self.assertRaises(ValueError): env.act_discrete([0, 1]) with self.assertRaises(ValueError):", "in compliance with the License. # You may obtain a copy of the", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "dmlab2d.Lab2d(runfiles_helper.find(), settings) def test_lab2d_environment_name(self): self.assertEqual(self._create_env().name(), 'dmlab2d') def test_lab2d_observation_names(self): env = self._create_env() self.assertEqual(env.observation_names(), ['VIEW'", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #", "0) class Dmlab2DTest(absltest.TestCase): def _create_env(self, extra_settings=None): settings = extra_settings.copy() if extra_settings else {}", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "test_lab2d_action_spec(self): env = self._create_env() self.assertEqual(env.action_discrete_names(), ['REWARD_ACT']) self.assertEqual( env.action_discrete_spec('REWARD_ACT'), { 'min': 0, 'max': 4", "}) self.assertEqual( env.observation_spec('VIEW2'), { 'dtype': np.dtype('double'), 'shape': (2,) }) self.assertEqual( env.observation_spec('VIEW3'), { 'dtype':", "np.dtype('int32'), 'shape': (3,) }) self.assertEqual( env.observation_spec('VIEW4'), { 'dtype': np.dtype('int64'), 'shape': (4,) }) #", "test_lab2d_advance_after_episode_ends(self): env = self._create_env({'steps': '2'}) env.start(0, 0) self.assertEqual(env.advance()[0], dmlab2d.RUNNING) self.assertEqual(env.advance()[0], dmlab2d.TERMINATED) with self.assertRaises(RuntimeError):", "0) with self.assertRaises(ValueError): env.act_discrete([0, 1]) with self.assertRaises(ValueError): env.act_continuous([0, 1]) def test_lab2d_advance_after_episode_ends(self): env =", "import numpy as np import dmlab2d from dmlab2d import runfiles_helper class Dmlab2dDmEnvTest(test_utils.EnvironmentTestMixin, absltest.TestCase):", "absolute_import from __future__ import division from __future__ import print_function from absl.testing import absltest", "See the License for the specific language governing permissions and # limitations under", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "+ str(i) for i in range(1, 6)]) def test_lab2d_observation_spec(self): env = self._create_env() self.assertEqual(", "self.assertEqual(env.advance()[0], dmlab2d.RUNNING) self.assertEqual(env.advance()[0], dmlab2d.TERMINATED) def test_lab2d_settings_environment(self): env = self._create_env({'steps': '5'}) env.start(episode=0, seed=0) for", "{ 'dtype': np.dtype('O'), 'shape': () }) def test_lab2d_action_spec(self): env = self._create_env() self.assertEqual(env.action_discrete_names(), ['REWARD_ACT'])", "self.assertEqual(env.advance()[0], dmlab2d.RUNNING) self.assertEqual(env.advance()[0], dmlab2d.TERMINATED) def test_lab2d_act_discrete(self): env = self._create_env({'steps': '5'}) env.start(episode=0, seed=0) env.act_discrete(np.array([2],", "env.act_discrete(np.array([2], np.dtype('int32'))) _, reward = env.advance() self.assertEqual(reward, 2) def test_lab2d_act_continuous(self): env = self._create_env({'steps':", "def test_lab2d_invalid_setting(self): with self.assertRaises(ValueError): self._create_env({'missing': '5'}) def test_lab2d_bad_action_spec_name(self): env = self._create_env() with self.assertRaises(KeyError):", "= self._create_env() env.start(episode=0, seed=0) events = env.events() self.assertLen(events, 1) event_name, observations = events[0]", "env.advance() np.testing.assert_array_equal(env.observation('VIEW3'), [11, 12, 13]) def test_lab2d_act_text(self): env = self._create_env({'steps': '5'}) env.start(episode=0, seed=0)", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "{ 'dtype': np.dtype('int64'), 'shape': (4,) }) # Text is stored in objects. self.assertEqual(", "env.observation('VIEW1') def test_lab2d_act_before_start(self): env = self._create_env() with self.assertRaises(RuntimeError): env.act_discrete([0]) with self.assertRaises(RuntimeError): env.act_continuous([0]) with", "dmlab2d.dmlab2d.\"\"\" from __future__ import absolute_import from __future__ import division from __future__ import print_function", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in", "print_function from absl.testing import absltest from dm_env import test_utils import numpy as np", "dmlab2d.TERMINATED) def test_lab2d_settings_environment(self): env = self._create_env({'steps': '5'}) env.start(episode=0, seed=0) for _ in range(4):", "self.assertEqual( env.action_continuous_spec('OBSERVATION_ACT'), { 'min': -5, 'max': 5 }) self.assertEqual(env.action_text_names(), ['LOG_EVENT']) def test_lab2d_start_environment(self): env", "= self._create_env({'steps': '2'}) env.start(0, 0) self.assertEqual(env.advance()[0], dmlab2d.RUNNING) self.assertEqual(env.advance()[0], dmlab2d.TERMINATED) with self.assertRaises(RuntimeError): env.advance() def", "{ 'min': 0, 'max': 4 }) self.assertEqual(env.action_continuous_names(), ['OBSERVATION_ACT']) self.assertEqual( env.action_continuous_spec('OBSERVATION_ACT'), { 'min': -5,", "absl.testing import absltest from dm_env import test_utils import numpy as np import dmlab2d", "self.assertEqual( env.action_discrete_spec('REWARD_ACT'), { 'min': 0, 'max': 4 }) self.assertEqual(env.action_continuous_names(), ['OBSERVATION_ACT']) self.assertEqual( env.action_continuous_spec('OBSERVATION_ACT'), {", "'shape': (3,) }) self.assertEqual( env.observation_spec('VIEW4'), { 'dtype': np.dtype('int64'), 'shape': (4,) }) # Text", "[1, 2, 3]) def test_lab2d_events_cleared_after_advance_not_read(self): env = self._create_env() env.start(episode=0, seed=0) self.assertLen(env.events(), 1) self.assertLen(env.events(),", "self.assertRaises(RuntimeError): env.observation('VIEW1') def test_lab2d_act_before_start(self): env = self._create_env() with self.assertRaises(RuntimeError): env.act_discrete([0]) with self.assertRaises(RuntimeError): env.act_continuous([0])", "make_object_under_test(self): lab2d = dmlab2d.Lab2d(runfiles_helper.find(), {'levelName': 'examples/level_api'}) return dmlab2d.Environment(lab2d, lab2d.observation_names(), 0) class Dmlab2DTest(absltest.TestCase): def", "self.assertLen(env.events(), 1) env.advance() self.assertEmpty(env.events()) def test_lab2d_observe(self): env = self._create_env() env.start(episode=0, seed=0) np.testing.assert_array_equal(env.observation('VIEW1'), [1])", "'dtype': np.dtype('O'), 'shape': () }) def test_lab2d_action_spec(self): env = self._create_env() self.assertEqual(env.action_discrete_names(), ['REWARD_ACT']) self.assertEqual(", "3]) def test_lab2d_events_cleared_after_advance_not_read(self): env = self._create_env() env.start(episode=0, seed=0) self.assertLen(env.events(), 1) self.assertLen(env.events(), 1) env.advance()", "with self.assertRaises(RuntimeError): env.act_text(['']) def test_lab2d_act_bad_shape(self): env = self._create_env() env.start(0, 0) with self.assertRaises(ValueError): env.act_discrete([0,", "'2'}) env.start(0, 0) self.assertEqual(env.advance()[0], dmlab2d.RUNNING) self.assertEqual(env.advance()[0], dmlab2d.TERMINATED) with self.assertRaises(RuntimeError): env.advance() def test_lab2d_missing_properties(self): env", "self.assertRaises(KeyError): env.action_continuous_spec('bad_key') def test_lab2d_bad_observation_spec_name(self): env = self._create_env() with self.assertRaises(KeyError): env.observation_spec('bad_key') def test_lab2d_observe_before_start(self): env", "2]) np.testing.assert_array_equal(env.observation('VIEW3'), [1, 2, 3]) np.testing.assert_array_equal(env.observation('VIEW4'), [1, 2, 3, 4]) self.assertEqual(env.observation('VIEW5'), b'') def", "self._create_env() with self.assertRaises(KeyError): env.action_discrete_spec('bad_key') with self.assertRaises(KeyError): env.action_continuous_spec('bad_key') def test_lab2d_bad_observation_spec_name(self): env = self._create_env() with", "test_lab2d_environment_name(self): self.assertEqual(self._create_env().name(), 'dmlab2d') def test_lab2d_observation_names(self): env = self._create_env() self.assertEqual(env.observation_names(), ['VIEW' + str(i) for", "1) event_name, observations = events[0] self.assertEqual(event_name, 'start') self.assertLen(observations, 1) np.testing.assert_array_equal(observations[0], [1, 2, 3])", "for i in range(1, 6)]) def test_lab2d_observation_spec(self): env = self._create_env() self.assertEqual( env.observation_spec('VIEW1'), {", "[1]) np.testing.assert_array_equal(env.observation('VIEW2'), [1, 2]) np.testing.assert_array_equal(env.observation('VIEW3'), [1, 2, 3]) np.testing.assert_array_equal(env.observation('VIEW4'), [1, 2, 3, 4])", "dmlab2d.PropertyAttribute.READABLE_WRITABLE)) self.assertEqual(env.read_property('steps'), '5') env.write_property('steps', '3') self.assertEqual(env.read_property('steps'), '3') env.start(episode=0, seed=0) for _ in range(2):", "self.assertRaises(KeyError): env.read_property('missing') with self.assertRaises(KeyError): env.write_property('missing', '10') def test_lab2d_invalid_ops_properties(self): env = self._create_env({'steps': '5'}) with", "language governing permissions and # limitations under the License. \"\"\"Tests for dmlab2d.dmlab2d.\"\"\" from", "def test_lab2d_observe(self): env = self._create_env() env.start(episode=0, seed=0) np.testing.assert_array_equal(env.observation('VIEW1'), [1]) np.testing.assert_array_equal(env.observation('VIEW2'), [1, 2]) np.testing.assert_array_equal(env.observation('VIEW3'),", "self._create_env() env.start(episode=0, seed=0) for _ in range(9): self.assertEqual(env.advance()[0], dmlab2d.RUNNING) self.assertEqual(env.advance()[0], dmlab2d.TERMINATED) def test_lab2d_settings_environment(self):", "for _ in range(4): self.assertEqual(env.advance()[0], dmlab2d.RUNNING) self.assertEqual(env.advance()[0], dmlab2d.TERMINATED) def test_lab2d_properties_environment(self): env = self._create_env({'steps':", "dmlab2d.TERMINATED) with self.assertRaises(RuntimeError): env.advance() def test_lab2d_missing_properties(self): env = self._create_env({'steps': '5'}) with self.assertRaises(KeyError): env.list_property('missing')", "Version 2.0 (the \"License\"); # you may not use this file except in", "except in compliance with the License. # You may obtain a copy of", "Dmlab2DTest(absltest.TestCase): def _create_env(self, extra_settings=None): settings = extra_settings.copy() if extra_settings else {} settings['levelName'] =", "seed=0) np.testing.assert_array_equal(env.observation('VIEW1'), [1]) np.testing.assert_array_equal(env.observation('VIEW2'), [1, 2]) np.testing.assert_array_equal(env.observation('VIEW3'), [1, 2, 3]) np.testing.assert_array_equal(env.observation('VIEW4'), [1, 2,", "test_lab2d_events_start(self): env = self._create_env() env.start(episode=0, seed=0) events = env.events() self.assertLen(events, 1) event_name, observations", "test_lab2d_settings_environment(self): env = self._create_env({'steps': '5'}) env.start(episode=0, seed=0) for _ in range(4): self.assertEqual(env.advance()[0], dmlab2d.RUNNING)", "self.assertEqual(event_name, 'start') self.assertLen(observations, 1) np.testing.assert_array_equal(observations[0], [1, 2, 3]) def test_lab2d_events_cleared_after_advance_not_read(self): env = self._create_env()", "writing, software # distributed under the License is distributed on an \"AS-IS\" BASIS,", "# You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "may not use this file except in compliance with the License. # You", "return dmlab2d.Lab2d(runfiles_helper.find(), settings) def test_lab2d_environment_name(self): self.assertEqual(self._create_env().name(), 'dmlab2d') def test_lab2d_observation_names(self): env = self._create_env() self.assertEqual(env.observation_names(),", "with self.assertRaises(RuntimeError): env.advance() def test_lab2d_missing_properties(self): env = self._create_env({'steps': '5'}) with self.assertRaises(KeyError): env.list_property('missing') with", "_create_env(self, extra_settings=None): settings = extra_settings.copy() if extra_settings else {} settings['levelName'] = 'examples/level_api' return", "self.assertEqual(env.action_continuous_names(), ['OBSERVATION_ACT']) self.assertEqual( env.action_continuous_spec('OBSERVATION_ACT'), { 'min': -5, 'max': 5 }) self.assertEqual(env.action_text_names(), ['LOG_EVENT']) def", "env = self._create_env() env.start(episode=0, seed=0) for _ in range(9): self.assertEqual(env.advance()[0], dmlab2d.RUNNING) self.assertEqual(env.advance()[0], dmlab2d.TERMINATED)", "'shape': (2,) }) self.assertEqual( env.observation_spec('VIEW3'), { 'dtype': np.dtype('int32'), 'shape': (3,) }) self.assertEqual( env.observation_spec('VIEW4'),", "self._create_env() with self.assertRaises(RuntimeError): env.act_discrete([0]) with self.assertRaises(RuntimeError): env.act_continuous([0]) with self.assertRaises(RuntimeError): env.act_text(['']) def test_lab2d_act_bad_shape(self): env", "self.assertRaises(ValueError): self._create_env({'missing': '5'}) def test_lab2d_bad_action_spec_name(self): env = self._create_env() with self.assertRaises(KeyError): env.action_discrete_spec('bad_key') with self.assertRaises(KeyError):", "env = self._create_env() env.start(0, 0) with self.assertRaises(ValueError): env.act_discrete([0, 1]) with self.assertRaises(ValueError): env.act_continuous([0, 1])", "env = self._create_env({'steps': '5'}) with self.assertRaises(KeyError): env.list_property('missing') with self.assertRaises(KeyError): env.read_property('missing') with self.assertRaises(KeyError): env.write_property('missing',", "with self.assertRaises(ValueError): env.act_discrete([0, 1]) with self.assertRaises(ValueError): env.act_continuous([0, 1]) def test_lab2d_advance_after_episode_ends(self): env = self._create_env({'steps':", "lab2d = dmlab2d.Lab2d(runfiles_helper.find(), {'levelName': 'examples/level_api'}) return dmlab2d.Environment(lab2d, lab2d.observation_names(), 0) class Dmlab2DTest(absltest.TestCase): def _create_env(self,", "observations = events[0] self.assertEqual(event_name, 'start') self.assertLen(observations, 1) np.testing.assert_array_equal(observations[0], [1, 2, 3]) def test_lab2d_events_cleared_after_advance_not_read(self):", "import division from __future__ import print_function from absl.testing import absltest from dm_env import", "'3') env.start(episode=0, seed=0) for _ in range(2): self.assertEqual(env.advance()[0], dmlab2d.RUNNING) self.assertEqual(env.advance()[0], dmlab2d.TERMINATED) def test_lab2d_act_discrete(self):", "'5'}) env.start(episode=0, seed=0) for _ in range(4): self.assertEqual(env.advance()[0], dmlab2d.RUNNING) self.assertEqual(env.advance()[0], dmlab2d.TERMINATED) def test_lab2d_properties_environment(self):", "= self._create_env({'steps': '5'}) env.start(episode=0, seed=0) env.act_discrete(np.array([2], np.dtype('int32'))) _, reward = env.advance() self.assertEqual(reward, 2)", "self.assertEqual( env.observation_spec('VIEW3'), { 'dtype': np.dtype('int32'), 'shape': (3,) }) self.assertEqual( env.observation_spec('VIEW4'), { 'dtype': np.dtype('int64'),", "test_utils import numpy as np import dmlab2d from dmlab2d import runfiles_helper class Dmlab2dDmEnvTest(test_utils.EnvironmentTestMixin,", "test_lab2d_observe(self): env = self._create_env() env.start(episode=0, seed=0) np.testing.assert_array_equal(env.observation('VIEW1'), [1]) np.testing.assert_array_equal(env.observation('VIEW2'), [1, 2]) np.testing.assert_array_equal(env.observation('VIEW3'), [1,", "self._create_env({'steps': '5'}) env.start(episode=0, seed=0) env.act_discrete(np.array([2], np.dtype('int32'))) _, reward = env.advance() self.assertEqual(reward, 2) def", "env.start(episode=0, seed=0) view = env.observation('VIEW5') self.assertEqual(view, b'') env.act_text(['Hello']) env.advance() view = env.observation('VIEW5') self.assertEqual(view,", "env.start(0, 0) self.assertEqual(env.advance()[0], dmlab2d.RUNNING) self.assertEqual(env.advance()[0], dmlab2d.TERMINATED) with self.assertRaises(RuntimeError): env.advance() def test_lab2d_missing_properties(self): env =", "from dmlab2d import runfiles_helper class Dmlab2dDmEnvTest(test_utils.EnvironmentTestMixin, absltest.TestCase): def make_object_under_test(self): lab2d = dmlab2d.Lab2d(runfiles_helper.find(), {'levelName':", "_ in range(9): self.assertEqual(env.advance()[0], dmlab2d.RUNNING) self.assertEqual(env.advance()[0], dmlab2d.TERMINATED) def test_lab2d_settings_environment(self): env = self._create_env({'steps': '5'})", "env.act_discrete([0, 1]) with self.assertRaises(ValueError): env.act_continuous([0, 1]) def test_lab2d_advance_after_episode_ends(self): env = self._create_env({'steps': '2'}) env.start(0,", "= 'examples/level_api' return dmlab2d.Lab2d(runfiles_helper.find(), settings) def test_lab2d_environment_name(self): self.assertEqual(self._create_env().name(), 'dmlab2d') def test_lab2d_observation_names(self): env =", "class Dmlab2dDmEnvTest(test_utils.EnvironmentTestMixin, absltest.TestCase): def make_object_under_test(self): lab2d = dmlab2d.Lab2d(runfiles_helper.find(), {'levelName': 'examples/level_api'}) return dmlab2d.Environment(lab2d, lab2d.observation_names(),", "extra_settings else {} settings['levelName'] = 'examples/level_api' return dmlab2d.Lab2d(runfiles_helper.find(), settings) def test_lab2d_environment_name(self): self.assertEqual(self._create_env().name(), 'dmlab2d')", "in objects. self.assertEqual( env.observation_spec('VIEW5'), { 'dtype': np.dtype('O'), 'shape': () }) def test_lab2d_action_spec(self): env", "np.dtype('uint8'), 'shape': (1,) }) self.assertEqual( env.observation_spec('VIEW2'), { 'dtype': np.dtype('double'), 'shape': (2,) }) self.assertEqual(", "__future__ import print_function from absl.testing import absltest from dm_env import test_utils import numpy", "2, 3]) env.act_continuous([10]) env.advance() np.testing.assert_array_equal(env.observation('VIEW3'), [11, 12, 13]) def test_lab2d_act_text(self): env = self._create_env({'steps':", "= env.advance() self.assertEqual(reward, 2) def test_lab2d_act_continuous(self): env = self._create_env({'steps': '5'}) env.start(episode=0, seed=0) np.testing.assert_array_equal(env.observation('VIEW3'),", "'5'}) with self.assertRaises(KeyError): env.list_property('missing') with self.assertRaises(KeyError): env.read_property('missing') with self.assertRaises(KeyError): env.write_property('missing', '10') def test_lab2d_invalid_ops_properties(self):", "settings['levelName'] = 'examples/level_api' return dmlab2d.Lab2d(runfiles_helper.find(), settings) def test_lab2d_environment_name(self): self.assertEqual(self._create_env().name(), 'dmlab2d') def test_lab2d_observation_names(self): env" ]
[]
[ "values passed in are `bytes`). `args` entries are handled by type: 1. `str`", "when the `style` is `\" \"`. # yield flag yield str(value) else: #", ">>> list( ... render_opts( ... { ... 'input': Path(\"/tmp/blah.json\"), ... 'output': Path(\"/dev/null\"), ...", "#2 — flag=value format # # When no other branch has matched, we're", "#1 — space-separated # # _Short_ (single-character) flags and values are _always_ space-", "`rel_to`) through `render_path`. 3. `typing.Mapping` -- understood as options, passed through `render_opts`. 4.", "### Relative Path Examples ### 1. As with positional arguments, `pathlib.Path` option values", "can either be provided as `cwd` or assumed to be the current directory.", "from shutil import rmtree import shlex from functools import wraps import splatlog as", "directory already exists.[/yeah]\", path=path ) else: raise RuntimeError(f\"{path} exists and is NOT a", "are descendants of `rel_to` will be relativized (no `../` transformations). >>> list( ...", "style, is_short, rel_to) def render_args( args: Iterable[Any], *, opts_long_prefix: TOptsLongPrefix = CONFIG.opts.long_prefix, opts_sort:", "else list(opts.items()) for name, value in items: name_s = str(name) is_short = len(name_s)", "\" \" or (is_short and style != \"\"): # General case #1 —", "shell=True) False \"\"\" return run(*args, check=False, **kwds).returncode == 0 @LOG.inject @prepare_wrap def replace(", "Optional[_TPath] = None, rel_paths: bool = CONFIG.rel_paths, **opts, ) -> List[str]: \"\"\"\\ Prepare", "False \"\"\" return run(*args, check=False, **kwds).returncode == 0 @LOG.inject @prepare_wrap def replace( *cmd,", "for console in (OUT, ERR): console.file.flush() proc_name = basename(cmd[0]) log.debug( \"Replacing current process", "str(value) else: # General case #2 — flag=value format # # When no", "value is `True` # # We emit the bare flag, like `-x` or", "`str` (and/or `bytes`, if any values passed in are `bytes`). `args` entries are", "a bit more common normalization for `get`, `run` etc. \"\"\" @wraps(fn) def _prepare_wrapper(", "Examples: ### Style Examples ### 1. By default, `=` is used to separate", "None]: \"\"\"\\ Render `args` to sequence of `str` (and/or `bytes`, if any values", "path.exists(): if path.is_dir(): log.debug( f\"[yeah]{desc} directory already exists.[/yeah]\", path=path ) else: raise RuntimeError(f\"{path}", "nature, nested lists work as well: >>> list(render_opts({'blah': [1, 2, [[3], 4], 5]}))", "Path): yield render_path(arg, rel_to) elif isinstance(arg, Mapping): yield from render_opts( arg, long_prefix=opts_long_prefix, style=opts_style,", "opts_sort: bool = CONFIG.opts.sort, opts_style: TOptsStyle = CONFIG.opts.style, rel_paths: bool = CONFIG.rel_paths, **opts,", "if opts is None: return # Sort key/value pairs if needed items =", "dir_present(path: Path, desc: Optional[str] = None, log=LOG): if desc is None: desc =", "Optional[Path] = None, ) -> Generator[Union[str, bytes], None, None]: \"\"\"\\ Render `args` to", "_prepare_wrapper # pylint: disable=redefined-builtin @LOG.inject @prepare_wrap def get( *cmd, log=LOG, format: Optional[str] =", "= fmt(path) if path.exists(): log.info(f\"[holup]Removing {name}...[/holup]\", path=path) if path.is_dir(): rmtree(path) else: os.remove(path) else:", "to the working directory, which can either be provided as `cwd` or assumed", "= subprocess.check_output(cmd, **opts) if format is None: return output elif format == \"strip\":", "from _iter_opt(flag, item, style, is_short) elif style == \" \" or (is_short and", "a `list` or `tuple` # # We handle these by emitting the option", "Path(cwd) cmd = prepare( *args, cwd=cwd, opts_long_prefix=opts_long_prefix, opts_sort=opts_sort, opts_style=opts_style, rel_paths=rel_paths, ) return fn(*cmd,", "cwd = Path(cwd) if rel_paths is True: rel_to = Path.cwd() if cwd is", "`str()`. \"\"\" for arg in args: if isinstance(arg, (str, bytes)): yield arg elif", "= Path.cwd() if cwd is None else cwd else: rel_to = None return", "separate \"long options\" and their values, while \"short options\" (single-character options) are always", "prepare( ... \"kubectl\", ... {\"namespace\": \"blah\"}, ... \"logs\", ... {\"follow\": True}, ... \"some-pod\",", "if env is None: if isabs(cmd[0]): os.execv(cmd[0], cmd) else: os.execvp(proc_name, cmd) else: if", "Short opt with a list (or tuple) value: >>> list(render_opts({'x': [1, 2, 3]}))", "emitting the option multiples times, once for each # inner value. # for", "= None, cwd: Optional[Union[str, Path]] = None, ) -> NoReturn: # https://docs.python.org/3.9/library/os.html#os.execl for", "through `render_path`. 3. `typing.Mapping` -- understood as options, passed through `render_opts`. 4. `typing.Iterable`", "`format`\", format=format, expected=[None, \"json\"]) return output @LOG.inject @prepare_wrap def run( *cmd, log=LOG, check:", "# _Short_ (single-character) flags and values are _always_ space- # sparated. # #", "`\" \"`. # yield flag yield str(value) else: # General case #2 —", "None, None]: \"\"\"\\ Render `args` to sequence of `str` (and/or `bytes`, if any", "`rel_to` will be relativized (no `../` transformations). >>> list( ... render_opts( ... {", "= sorted(opts.items()) if sort else list(opts.items()) for name, value in items: name_s =", "yield f\"{flag}{style}{value}\" def render_opts( opts: TOpts, *, long_prefix: TOptsLongPrefix = CONFIG.opts.long_prefix, sort: bool", "# commands if opts is None: return # Sort key/value pairs if needed", "`typing.Iterable` -- recurred into. 5. Other -- converted to a string with `str()`.", "render_path(arg, rel_to) elif isinstance(arg, Mapping): yield from render_opts( arg, long_prefix=opts_long_prefix, style=opts_style, sort=opts_sort, rel_to=rel_to,", "'--namespace=blah', 'logs', '--follow', 'some-pod'] \"\"\" # Normalize str cwd path to Path if", "path=path) @LOG.inject def dir_present(path: Path, desc: Optional[str] = None, log=LOG): if desc is", "DEFAULT_OPTS_SORT = True CompletedProcess = subprocess.CompletedProcess def render_path(path: Path, rel_to: Optional[Path]) -> str:", "from render_args( arg, opts_long_prefix=opts_long_prefix, opts_style=opts_style, opts_sort=opts_sort, rel_to=rel_to, ) else: yield str(arg) def prepare(", "return subprocess.run( cmd, check=check, input=file.read(), **opts, ) else: return subprocess.run(cmd, check=check, input=input, **opts)", "test(\"false\", shell=True) False \"\"\" return run(*args, check=False, **kwds).returncode == 0 @LOG.inject @prepare_wrap def", "converted to a string with `str()`. \"\"\" for arg in args: if isinstance(arg,", "import os from os.path import isabs, basename import subprocess from pathlib import Path", "bool = CONFIG.opts.sort, opts_style: TOptsStyle = CONFIG.opts.style, rel_to: Optional[Path] = None, ) ->", "style == \" \" or (is_short and style != \"\"): # General case", "if path.exists(): log.info(f\"[holup]Removing {name}...[/holup]\", path=path) if path.is_dir(): rmtree(path) else: os.remove(path) else: log.info(f\"[yeah]{name} already", "= Union[Path, str] CONFIG = CFG.clavier.sh LOG = logging.getLogger(__name__) DEFAULT_OPTS_STYLE: TOptsStyle = \"=\"", "2})) ['-a', '1', '--bee=2'] 2. Use space-separated option names and values: >>> list(render_opts({'blah':", "— value is `True` # # We emit the bare flag, like `-x`", "str): cwd = Path(cwd) if rel_paths is True: rel_to = Path.cwd() if cwd", "with a list (or tuple) value: >>> list(render_opts({'x': [1, 2, 3]})) ['-x', '1',", "prepare( *args, cwd=cwd, opts_long_prefix=opts_long_prefix, opts_sort=opts_sort, opts_style=opts_style, rel_paths=rel_paths, ) return fn(*cmd, cwd=cwd, encoding=encoding, **opts)", ">>> test(\"false\", shell=True) False \"\"\" return run(*args, check=False, **kwds).returncode == 0 @LOG.inject @prepare_wrap", "cwd = Path(cwd) cmd = prepare( *args, cwd=cwd, opts_long_prefix=opts_long_prefix, opts_sort=opts_sort, opts_style=opts_style, rel_paths=rel_paths, )", "\"\"\"Private helper for `iter_opts`.\"\"\" if isinstance(value, Path): value = render_path(value, rel_to) if value", "log.info(f\"[holup]Removing {name}...[/holup]\", path=path) if path.is_dir(): rmtree(path) else: os.remove(path) else: log.info(f\"[yeah]{name} already absent.[/yeah]\", path=path)", "-> Generator[str, None, None]: \"\"\" Render a mapping of option names to values", "value is None or value is False: # Special case #1 — value", "disable=redefined-builtin @LOG.inject @prepare_wrap def get( *cmd, log=LOG, format: Optional[str] = None, **opts, )", "CONFIG.rel_paths, **opts, ) -> List[str]: \"\"\"\\ Prepare `args` to be passed `subprocess.run` or", "with `prepare` -> `shlex.join`. Returned string _should_ be suitable for pasting in a", "case #1 — space-separated # # _Short_ (single-character) flags and values are _always_", "## >>> prepare( ... \"kubectl\", ... {\"namespace\": \"blah\"}, ... \"logs\", ... {\"follow\": True},", "— flag=value format # # When no other branch has matched, we're left", "whether or not it succeeds (has `subprocess.CompletedProcess.returncode` equal to `0`). >>> test(\"true\", shell=True)", "def file_absent(path: Path, name: Optional[str] = None, log=LOG): if name is None: name", "@LOG.inject def dir_present(path: Path, desc: Optional[str] = None, log=LOG): if desc is None:", "console in (OUT, ERR): console.file.flush() proc_name = basename(cmd[0]) log.debug( \"Replacing current process with", "if any values passed in are `bytes`). `args` entries are handled by type:", "import isabs, basename import subprocess from pathlib import Path import json from shutil", "test(\"true\", shell=True) True >>> test(\"false\", shell=True) False \"\"\" return run(*args, check=False, **kwds).returncode ==", "normalization for `get`, `run` etc. \"\"\" @wraps(fn) def _prepare_wrapper( *args, cwd: Optional[_TPath] =", "\")) ['--blah', '1', '--meh', '2'] 3. Use a single `-` prefix on long", "as logging from .cfg import CFG from .io import OUT, ERR, fmt, fmt_cmd", "will be relativized (no `../` transformations). >>> list( ... render_opts( ... { ...", "inner value. # for item in value: yield from _iter_opt(flag, item, style, is_short)", "else: raise RuntimeError(f\"{path} exists and is NOT a directory\") else: log.info(f\"[holup]Creating {desc} directory...[/holup]\",", "recurred into. 5. Other -- converted to a string with `str()`. \"\"\" for", "render_args( args: Iterable[Any], *, opts_long_prefix: TOptsLongPrefix = CONFIG.opts.long_prefix, opts_sort: bool = CONFIG.opts.sort, opts_style:", "with `str()`. \"\"\" for arg in args: if isinstance(arg, (str, bytes)): yield arg", ">>> list(render_opts({'blah': [1, 2, 3]})) ['--blah=1', '--blah=2', '--blah=3'] 3. Due to the recursive,", "\"\"\"\\ Prepare `args` to be passed `subprocess.run` or similar functions. Contextualizes the relative", "else cwd else: rel_to = None return list(render_args(args, rel_to=rel_to, **opts)) def join(*args, **opts)", "`prepare` -> `shlex.join`. Returned string _should_ be suitable for pasting in a shell.", "_iter_opt(flag, value, style, is_short, rel_to) def render_args( args: Iterable[Any], *, opts_long_prefix: TOptsLongPrefix =", "os.execvp(proc_name, cmd) else: if isabs(cmd[0]): os.execve(cmd[0], cmd, env) else: os.execvpe(proc_name, cmd, env) @LOG.inject", "... { ... 'input': Path(\"/tmp/blah.json\"), ... 'output': Path(\"/dev/null\"), ... }, ... rel_to=Path(\"/tmp\") ...", "= Literal[\"--\", \"-\"] _TPath = Union[Path, str] CONFIG = CFG.clavier.sh LOG = logging.getLogger(__name__)", "_iter_opt( flag: str, value: Any, style: TOptsStyle, is_short: bool, rel_to: Optional[Path] = None,", "def prepare_wrap(fn: Callable) -> Callable: \"\"\"\\ Decorator helper to run `prepare` and do", "2, 3]})) ['-x', '1', '-x', '2', '-x', '3'] 2. Long opt with a", "current directory. Relative path conversion is controlled by the `rel_paths` flag. ## Examples", "to Path if isinstance(cwd, str): cwd = Path(cwd) if rel_paths is True: rel_to", "@prepare_wrap def get( *cmd, log=LOG, format: Optional[str] = None, **opts, ) -> Any:", "log=LOG): if desc is None: desc = fmt(path) if path.exists(): if path.is_dir(): log.debug(", "equal to `0`). >>> test(\"true\", shell=True) True >>> test(\"false\", shell=True) False \"\"\" return", "if cwd is not None: os.chdir(cwd) if env is None: if isabs(cmd[0]): os.execv(cmd[0],", "\"\"\"\\ Render `args` to a single string with `prepare` -> `shlex.join`. Returned string", "`pathlib.Path` option values can be rendered relative to a `rel_to` directory. Only paths", "file_absent(path: Path, name: Optional[str] = None, log=LOG): if name is None: name =", "Union[None, str, bytes, Path] = None, **opts, ) -> CompletedProcess: log.info( \"Running system", "output...\", cmd=fmt_cmd(cmd), format=format, **opts, ) # https://docs.python.org/3.8/library/subprocess.html#subprocess.check_output output = subprocess.check_output(cmd, **opts) if format", "CONFIG.opts.long_prefix, opts_sort: bool = CONFIG.opts.sort, opts_style: TOptsStyle = CONFIG.opts.style, rel_paths: bool = CONFIG.rel_paths,", "bit more common normalization for `get`, `run` etc. \"\"\" @wraps(fn) def _prepare_wrapper( *args,", "_Short_ (single-character) flags and values are _always_ space- # sparated. # # _All_", "lists work as well: >>> list(render_opts({'blah': [1, 2, [[3], 4], 5]})) ['--blah=1', '--blah=2',", "'logs', '--follow', 'some-pod'] \"\"\" # Normalize str cwd path to Path if isinstance(cwd,", "`typing.Mapping` -- understood as options, passed through `render_opts`. 4. `typing.Iterable` -- recurred into.", "Mapping[Any, Any] TOptsStyle = Literal[\"=\", \" \", \"\"] TOptsLongPrefix = Literal[\"--\", \"-\"] _TPath", "str: if rel_to is None: return str(path) return str(path.relative_to(rel_to)) def _iter_opt( flag: str,", "case #3 — value is a `list` or `tuple` # # We handle", "... ) ... ) ['--input=blah.json', '--output=/dev/null'] \"\"\" # Handle `None` as a legit", "None: name = fmt(path) if path.exists(): log.info(f\"[holup]Removing {name}...[/holup]\", path=path) if path.is_dir(): rmtree(path) else:", "list (or tuple) value: >>> list(render_opts({'blah': [1, 2, 3]})) ['--blah=1', '--blah=2', '--blah=3'] 3.", "'1', '-x', '2', '-x', '3'] 2. Long opt with a list (or tuple)", "encoding=encoding, **opts) return _prepare_wrapper # pylint: disable=redefined-builtin @LOG.inject @prepare_wrap def get( *cmd, log=LOG,", "str(path.relative_to(rel_to)) def _iter_opt( flag: str, value: Any, style: TOptsStyle, is_short: bool, rel_to: Optional[Path]", "exists.[/yeah]\", path=path ) else: raise RuntimeError(f\"{path} exists and is NOT a directory\") else:", "= prepare( *args, cwd=cwd, opts_long_prefix=opts_long_prefix, opts_sort=opts_sort, opts_style=opts_style, rel_paths=rel_paths, ) return fn(*cmd, cwd=cwd, encoding=encoding,", "cwd=cwd, ) if cwd is not None: os.chdir(cwd) if env is None: if", "log=LOG, # Used, but defaulted in `prepare_cmd`, so needs to be accepted here", "# Normalize str cwd path to Path if isinstance(cwd, str): cwd = Path(cwd)", "isinstance(cwd, str): cwd = Path(cwd) cmd = prepare( *args, cwd=cwd, opts_long_prefix=opts_long_prefix, opts_sort=opts_sort, opts_style=opts_style,", "run(*args, check=False, **kwds).returncode == 0 @LOG.inject @prepare_wrap def replace( *cmd, log=LOG, # Used,", "= None, ) -> Generator[Union[str, bytes], None, None]: \"\"\"\\ Render `args` to sequence", "# When no other branch has matched, we're left with `=`-separated flag #", "are always separate tokens from their values: >>> list(render_opts({\"a\": 1, \"bee\": 2})) ['-a',", "... render_opts( ... { ... 'input': Path(\"/tmp/blah.json\"), ... 'output': Path(\"/dev/null\"), ... }, ...", "f\"{flag}{style}{value}\" def render_opts( opts: TOpts, *, long_prefix: TOptsLongPrefix = CONFIG.opts.long_prefix, sort: bool =", "-- recurred into. 5. Other -- converted to a string with `str()`. \"\"\"", "['--blah=1', '--blah=2', '--blah=3', '--blah=4', '--blah=5'] Neat, huh?! ### Relative Path Examples ### 1.", "emit the bare flag, like `-x` or `--blah`. # yield flag elif isinstance(value,", "import OUT, ERR, fmt, fmt_cmd TOpts = Mapping[Any, Any] TOptsStyle = Literal[\"=\", \"", "list(render_opts({'blah': [1, 2, [[3], 4], 5]})) ['--blah=1', '--blah=2', '--blah=3', '--blah=4', '--blah=5'] Neat, huh?!", "into. 5. Other -- converted to a string with `str()`. \"\"\" for arg", "rel_to) def render_args( args: Iterable[Any], *, opts_long_prefix: TOptsLongPrefix = CONFIG.opts.long_prefix, opts_sort: bool =", "is controlled by the `rel_paths` flag. ## Examples ## >>> prepare( ... \"kubectl\",", "True >>> test(\"false\", shell=True) False \"\"\" return run(*args, check=False, **kwds).returncode == 0 @LOG.inject", "flag = f\"-{name_s}\" if is_short else f\"{long_prefix}{name_s}\" yield from _iter_opt(flag, value, style, is_short,", "# # We emit the bare flag, like `-x` or `--blah`. # yield", "opts_style=opts_style, opts_sort=opts_sort, rel_to=rel_to, ) else: yield str(arg) def prepare( *args, cwd: Optional[_TPath] =", ") else: yield str(arg) def prepare( *args, cwd: Optional[_TPath] = None, rel_paths: bool", "here encoding: Optional[str] = None, env: Optional[Mapping] = None, cwd: Optional[Union[str, Path]] =", "# # When no other branch has matched, we're left with `=`-separated flag", "TOptsLongPrefix = CONFIG.opts.long_prefix, opts_sort: bool = CONFIG.opts.sort, opts_style: TOptsStyle = CONFIG.opts.style, rel_to: Optional[Path]", "-> Generator[Union[str, bytes], None, None]: \"\"\"\\ Render `args` to sequence of `str` (and/or", "str cwd path to Path if isinstance(cwd, str): cwd = Path(cwd) cmd =", "`rel_paths` flag. ## Examples ## >>> prepare( ... \"kubectl\", ... {\"namespace\": \"blah\"}, ...", "suitable for pasting in a shell. ## Parameters ## Same as `prepare`. \"\"\"", "or not it succeeds (has `subprocess.CompletedProcess.returncode` equal to `0`). >>> test(\"true\", shell=True) True", "to a single string with `prepare` -> `shlex.join`. Returned string _should_ be suitable", "if is_short else f\"{long_prefix}{name_s}\" yield from _iter_opt(flag, value, style, is_short, rel_to) def render_args(", "transformations). >>> list( ... render_opts( ... { ... 'input': Path(\"/tmp/blah.json\"), ... 'output': Path(\"/dev/null\"),", "value. # for item in value: yield from _iter_opt(flag, item, style, is_short) elif", "shlex from functools import wraps import splatlog as logging from .cfg import CFG", "passed `subprocess.run` or similar functions. Contextualizes the relative path capabilities of `render_args` and", "list(render_args(args, rel_to=rel_to, **opts)) def join(*args, **opts) -> str: \"\"\"\\ Render `args` to a", "Literal[\"=\", \" \", \"\"] TOptsLongPrefix = Literal[\"--\", \"-\"] _TPath = Union[Path, str] CONFIG", ">>> test(\"true\", shell=True) True >>> test(\"false\", shell=True) False \"\"\" return run(*args, check=False, **kwds).returncode", "= CONFIG.opts.sort, opts_style: TOptsStyle = CONFIG.opts.style, rel_to: Optional[Path] = None, ) -> Generator[Union[str,", "def test(*args, **kwds) -> bool: \"\"\"\\ Run a command and return whether or", "= None, log=LOG): if desc is None: desc = fmt(path) if path.exists(): if", "_TPath = Union[Path, str] CONFIG = CFG.clavier.sh LOG = logging.getLogger(__name__) DEFAULT_OPTS_STYLE: TOptsStyle =", "\"json\"]) return output @LOG.inject @prepare_wrap def run( *cmd, log=LOG, check: bool = True,", "Prepare `args` to be passed `subprocess.run` or similar functions. Contextualizes the relative path", "{\"namespace\": \"blah\"}, ... \"logs\", ... {\"follow\": True}, ... \"some-pod\", ... ) ['kubectl', '--namespace=blah',", "each # inner value. # for item in value: yield from _iter_opt(flag, item,", "are space-separated when the `style` is `\" \"`. # yield flag yield str(value)", "directory\") else: log.info(f\"[holup]Creating {desc} directory...[/holup]\", path=path) os.makedirs(path) if __name__ == \"__main__\": import doctest", "output elif format == \"strip\": return output.strip() elif format == \"json\": return json.loads(output)", "from _iter_opt(flag, value, style, is_short, rel_to) def render_args( args: Iterable[Any], *, opts_long_prefix: TOptsLongPrefix", "2. Use space-separated option names and values: >>> list(render_opts({'blah': 1, 'meh': 2}, style=\"", "so needs to be accepted here encoding: Optional[str] = None, env: Optional[Mapping] =", "-> Any: log.debug( \"Getting system command output...\", cmd=fmt_cmd(cmd), format=format, **opts, ) # https://docs.python.org/3.8/library/subprocess.html#subprocess.check_output", "len(name_s) == 1 flag = f\"-{name_s}\" if is_short else f\"{long_prefix}{name_s}\" yield from _iter_opt(flag,", "opts is None: return # Sort key/value pairs if needed items = sorted(opts.items())", "env is None: if isabs(cmd[0]): os.execv(cmd[0], cmd) else: os.execvp(proc_name, cmd) else: if isabs(cmd[0]):", "bool = CONFIG.opts.sort, opts_style: TOptsStyle = CONFIG.opts.style, rel_paths: bool = CONFIG.rel_paths, **opts, ):", "\"some-pod\", ... ) ['kubectl', '--namespace=blah', 'logs', '--follow', 'some-pod'] \"\"\" # Normalize str cwd", "format == \"strip\": return output.strip() elif format == \"json\": return json.loads(output) else: log.warn(\"Unknown", "style you sometimes see: >>> list(render_opts({'x': 123, 'y': 456}, style=\"\")) ['-x123', '-y456'] ###", "run( *cmd, log=LOG, check: bool = True, input: Union[None, str, bytes, Path] =", "rmtree(path) else: os.remove(path) else: log.info(f\"[yeah]{name} already absent.[/yeah]\", path=path) @LOG.inject def dir_present(path: Path, desc:", "# Special case #3 — value is a `list` or `tuple` # #", "is_short else f\"{long_prefix}{name_s}\" yield from _iter_opt(flag, value, style, is_short, rel_to) def render_args( args:", "shlex.join(prepare(*args, **opts)) def prepare_wrap(fn: Callable) -> Callable: \"\"\"\\ Decorator helper to run `prepare`", "with positional arguments, `pathlib.Path` option values can be rendered relative to a `rel_to`", "as options, passed through `render_opts`. 4. `typing.Iterable` -- recurred into. 5. Other --", "isinstance(value, Path): value = render_path(value, rel_to) if value is None or value is", "import Path import json from shutil import rmtree import shlex from functools import", "list(render_opts({'x': 123, 'y': 456}, style=\"\")) ['-x123', '-y456'] ### List Value Examples ### 1.", "is None or value is False: # Special case #1 — value is", "value: yield from _iter_opt(flag, item, style, is_short) elif style == \" \" or", "space-separated # # _Short_ (single-character) flags and values are _always_ space- # sparated.", "and values are _always_ space- # sparated. # # _All_ flags and values", "if path.is_dir(): rmtree(path) else: os.remove(path) else: log.info(f\"[yeah]{name} already absent.[/yeah]\", path=path) @LOG.inject def dir_present(path:", "True, input: Union[None, str, bytes, Path] = None, **opts, ) -> CompletedProcess: log.info(", "cmd=fmt_cmd(cmd), format=format, **opts, ) # https://docs.python.org/3.8/library/subprocess.html#subprocess.check_output output = subprocess.check_output(cmd, **opts) if format is", "-- passed through. 2. `pathlib.Path` -- passed (along with `rel_to`) through `render_path`. 3.", "rel_paths: bool = CONFIG.rel_paths, **opts, ) -> List[str]: \"\"\"\\ Prepare `args` to be", "'1', '--meh', '2'] 3. Use a single `-` prefix on long options (\"X", "item, style, is_short) elif style == \" \" or (is_short and style !=", "CompletedProcess: log.info( \"Running system command...\", cmd=fmt_cmd(cmd), **opts, ) # https://docs.python.org/3.8/library/subprocess.html#subprocess.run if isinstance(input, Path):", "\"no-separator\" style you sometimes see: >>> list(render_opts({'x': 123, 'y': 456}, style=\"\")) ['-x123', '-y456']", "a single `-` prefix on long options (\"X toolkit\" style): >>> list(render_opts({'blah': 1,", "\"\"\" return run(*args, check=False, **kwds).returncode == 0 @LOG.inject @prepare_wrap def replace( *cmd, log=LOG,", "'--bee=2'] 2. Use space-separated option names and values: >>> list(render_opts({'blah': 1, 'meh': 2},", "to be accepted here encoding: Optional[str] = None, env: Optional[Mapping] = None, cwd:", "else f\"{long_prefix}{name_s}\" yield from _iter_opt(flag, value, style, is_short, rel_to) def render_args( args: Iterable[Any],", "rel_to: Optional[Path] = None, ) -> Generator[str, None, None]: \"\"\"Private helper for `iter_opts`.\"\"\"", "`cwd` or assumed to be the current directory. Relative path conversion is controlled", "'--follow', 'some-pod'] \"\"\" # Normalize str cwd path to Path if isinstance(cwd, str):", "LOG = logging.getLogger(__name__) DEFAULT_OPTS_STYLE: TOptsStyle = \"=\" DEFAULT_OPTS_SORT = True CompletedProcess = subprocess.CompletedProcess", "do a bit more common normalization for `get`, `run` etc. \"\"\" @wraps(fn) def", "List Value Examples ### 1. Short opt with a list (or tuple) value:", "values are _always_ space- # sparated. # # _All_ flags and values are", "= None return list(render_args(args, rel_to=rel_to, **opts)) def join(*args, **opts) -> str: \"\"\"\\ Render", "functions. Contextualizes the relative path capabilities of `render_args` and `render_opts` to the working", "(or tuple) value: >>> list(render_opts({'blah': [1, 2, 3]})) ['--blah=1', '--blah=2', '--blah=3'] 3. Due", "branch has matched, we're left with `=`-separated flag # and value. # yield", "yield from _iter_opt(flag, item, style, is_short) elif style == \" \" or (is_short", "# # We omit these entirely. # pass elif value is True: #", "**opts, ) -> CompletedProcess: log.info( \"Running system command...\", cmd=fmt_cmd(cmd), **opts, ) # https://docs.python.org/3.8/library/subprocess.html#subprocess.run", "matched, we're left with `=`-separated flag # and value. # yield f\"{flag}{style}{value}\" def", "omit these entirely. # pass elif value is True: # Special case #2", "names to values to a (yielded) sequence of strings. Examples: ### Style Examples", "-> str: if rel_to is None: return str(path) return str(path.relative_to(rel_to)) def _iter_opt( flag:", "'-x', '2', '-x', '3'] 2. Long opt with a list (or tuple) value:", "Path, name: Optional[str] = None, log=LOG): if name is None: name = fmt(path)", "items: name_s = str(name) is_short = len(name_s) == 1 flag = f\"-{name_s}\" if", "pylint: disable=redefined-builtin @LOG.inject @prepare_wrap def get( *cmd, log=LOG, format: Optional[str] = None, **opts,", "1. By default, `=` is used to separate \"long options\" and their values,", "to separate \"long options\" and their values, while \"short options\" (single-character options) are", "bool = CONFIG.rel_paths, **opts, ): # Normalize str cwd path to Path if", "is_short: bool, rel_to: Optional[Path] = None, ) -> Generator[str, None, None]: \"\"\"Private helper", "When no other branch has matched, we're left with `=`-separated flag # and", "https://docs.python.org/3.8/library/subprocess.html#subprocess.check_output output = subprocess.check_output(cmd, **opts) if format is None: return output elif format", "encoding=\"utf-8\") as file: return subprocess.run( cmd, check=check, input=file.read(), **opts, ) else: return subprocess.run(cmd,", "def render_opts( opts: TOpts, *, long_prefix: TOptsLongPrefix = CONFIG.opts.long_prefix, sort: bool = CONFIG.opts.sort,", "### 1. As with positional arguments, `pathlib.Path` option values can be rendered relative", "#2 — value is `True` # # We emit the bare flag, like", "prefix on long options (\"X toolkit\" style): >>> list(render_opts({'blah': 1, 'meh': 2}, long_prefix='-'))", "passed (along with `rel_to`) through `render_path`. 3. `typing.Mapping` -- understood as options, passed", "= None, encoding: Optional[str] = CONFIG.encoding, opts_long_prefix: TOptsLongPrefix = CONFIG.opts.long_prefix, opts_sort: bool =", ">>> list(render_opts({'x': [1, 2, 3]})) ['-x', '1', '-x', '2', '-x', '3'] 2. Long", "is NOT a directory\") else: log.info(f\"[holup]Creating {desc} directory...[/holup]\", path=path) os.makedirs(path) if __name__ ==", "def replace( *cmd, log=LOG, # Used, but defaulted in `prepare_cmd`, so needs to", "**opts, ): # Normalize str cwd path to Path if isinstance(cwd, str): cwd", ".io import OUT, ERR, fmt, fmt_cmd TOpts = Mapping[Any, Any] TOptsStyle = Literal[\"=\",", "prepare( *args, cwd: Optional[_TPath] = None, rel_paths: bool = CONFIG.rel_paths, **opts, ) ->", ") # https://docs.python.org/3.8/library/subprocess.html#subprocess.run if isinstance(input, Path): with input.open(\"r\", encoding=\"utf-8\") as file: return subprocess.run(", "return str(path.relative_to(rel_to)) def _iter_opt( flag: str, value: Any, style: TOptsStyle, is_short: bool, rel_to:", "Neat, huh?! ### Relative Path Examples ### 1. As with positional arguments, `pathlib.Path`", "return fn(*cmd, cwd=cwd, encoding=encoding, **opts) return _prepare_wrapper # pylint: disable=redefined-builtin @LOG.inject @prepare_wrap def", "value is `None` or `False` # # We omit these entirely. # pass", "Run a command and return whether or not it succeeds (has `subprocess.CompletedProcess.returncode` equal", "# _All_ flags and values are space-separated when the `style` is `\" \"`.", "output = subprocess.check_output(cmd, **opts) if format is None: return output elif format ==", "# https://docs.python.org/3.8/library/subprocess.html#subprocess.run if isinstance(input, Path): with input.open(\"r\", encoding=\"utf-8\") as file: return subprocess.run( cmd,", "\"\"\" Render a mapping of option names to values to a (yielded) sequence", "return _prepare_wrapper # pylint: disable=redefined-builtin @LOG.inject @prepare_wrap def get( *cmd, log=LOG, format: Optional[str]", "== \" \" or (is_short and style != \"\"): # General case #1", "list( ... render_opts( ... { ... 'input': Path(\"/tmp/blah.json\"), ... 'output': Path(\"/dev/null\"), ... },", "`rel_to` directory. Only paths that are descendants of `rel_to` will be relativized (no", "str(path) return str(path.relative_to(rel_to)) def _iter_opt( flag: str, value: Any, style: TOptsStyle, is_short: bool,", "desc: Optional[str] = None, log=LOG): if desc is None: desc = fmt(path) if", "common normalization for `get`, `run` etc. \"\"\" @wraps(fn) def _prepare_wrapper( *args, cwd: Optional[_TPath]", "= f\"-{name_s}\" if is_short else f\"{long_prefix}{name_s}\" yield from _iter_opt(flag, value, style, is_short, rel_to)", "else: rel_to = None return list(render_args(args, rel_to=rel_to, **opts)) def join(*args, **opts) -> str:", "or `False` # # We omit these entirely. # pass elif value is", ") # https://docs.python.org/3.8/library/subprocess.html#subprocess.check_output output = subprocess.check_output(cmd, **opts) if format is None: return output", "if desc is None: desc = fmt(path) if path.exists(): if path.is_dir(): log.debug( f\"[yeah]{desc}", "#3 — value is a `list` or `tuple` # # We handle these", "option names to values to a (yielded) sequence of strings. Examples: ### Style", "name = fmt(path) if path.exists(): log.info(f\"[holup]Removing {name}...[/holup]\", path=path) if path.is_dir(): rmtree(path) else: os.remove(path)", "style: TOptsStyle, is_short: bool, rel_to: Optional[Path] = None, ) -> Generator[str, None, None]:", "we're left with `=`-separated flag # and value. # yield f\"{flag}{style}{value}\" def render_opts(", "case #2 — flag=value format # # When no other branch has matched,", "if isabs(cmd[0]): os.execv(cmd[0], cmd) else: os.execvp(proc_name, cmd) else: if isabs(cmd[0]): os.execve(cmd[0], cmd, env)", "format == \"json\": return json.loads(output) else: log.warn(\"Unknown `format`\", format=format, expected=[None, \"json\"]) return output", "if isinstance(input, Path): with input.open(\"r\", encoding=\"utf-8\") as file: return subprocess.run( cmd, check=check, input=file.read(),", "directory. Only paths that are descendants of `rel_to` will be relativized (no `../`", "`-x` or `--blah`. # yield flag elif isinstance(value, (list, tuple)): # Special case", "path to Path if isinstance(cwd, str): cwd = Path(cwd) cmd = prepare( *args,", "in value: yield from _iter_opt(flag, item, style, is_short) elif style == \" \"", "replace( *cmd, log=LOG, # Used, but defaulted in `prepare_cmd`, so needs to be", "@prepare_wrap def replace( *cmd, log=LOG, # Used, but defaulted in `prepare_cmd`, so needs", "is True: rel_to = Path.cwd() if cwd is None else cwd else: rel_to", "= CFG.clavier.sh LOG = logging.getLogger(__name__) DEFAULT_OPTS_STYLE: TOptsStyle = \"=\" DEFAULT_OPTS_SORT = True CompletedProcess", "opt with a list (or tuple) value: >>> list(render_opts({'x': [1, 2, 3]})) ['-x',", "4], 5]})) ['--blah=1', '--blah=2', '--blah=3', '--blah=4', '--blah=5'] Neat, huh?! ### Relative Path Examples", "entries are handled by type: 1. `str` and `bytes` -- passed through. 2.", "@LOG.inject @prepare_wrap def run( *cmd, log=LOG, check: bool = True, input: Union[None, str,", "!= \"\"): # General case #1 — space-separated # # _Short_ (single-character) flags", "'--output=/dev/null'] \"\"\" # Handle `None` as a legit value, making life easier on", "3]})) ['--blah=1', '--blah=2', '--blah=3'] 3. Due to the recursive, yield-centric nature, nested lists", "isinstance(cwd, str): cwd = Path(cwd) if rel_paths is True: rel_to = Path.cwd() if", "`render_opts` to the working directory, which can either be provided as `cwd` or", "pathlib import Path import json from shutil import rmtree import shlex from functools", "style != \"\"): # General case #1 — space-separated # # _Short_ (single-character)", "None, **opts, ) -> Any: log.debug( \"Getting system command output...\", cmd=fmt_cmd(cmd), format=format, **opts,", "defaulted in `prepare_cmd`, so needs to be accepted here encoding: Optional[str] = None,", "space-separated when the `style` is `\" \"`. # yield flag yield str(value) else:", "render_args( arg, opts_long_prefix=opts_long_prefix, opts_style=opts_style, opts_sort=opts_sort, rel_to=rel_to, ) else: yield str(arg) def prepare( *args,", "is_short, rel_to) def render_args( args: Iterable[Any], *, opts_long_prefix: TOptsLongPrefix = CONFIG.opts.long_prefix, opts_sort: bool", "['--input=blah.json', '--output=/dev/null'] \"\"\" # Handle `None` as a legit value, making life easier", "(str, bytes)): yield arg elif isinstance(arg, Path): yield render_path(arg, rel_to) elif isinstance(arg, Mapping):", "None, env: Optional[Mapping] = None, cwd: Optional[Union[str, Path]] = None, ) -> NoReturn:", "# yield flag yield str(value) else: # General case #2 — flag=value format", "Generator[str, None, None]: \"\"\"Private helper for `iter_opts`.\"\"\" if isinstance(value, Path): value = render_path(value,", "cwd: Optional[_TPath] = None, encoding: Optional[str] = CONFIG.encoding, opts_long_prefix: TOptsLongPrefix = CONFIG.opts.long_prefix, opts_sort:", "Render a mapping of option names to values to a (yielded) sequence of", "NOT a directory\") else: log.info(f\"[holup]Creating {desc} directory...[/holup]\", path=path) os.makedirs(path) if __name__ == \"__main__\":", "CONFIG.rel_paths, **opts, ): # Normalize str cwd path to Path if isinstance(cwd, str):", "NoReturn: # https://docs.python.org/3.9/library/os.html#os.execl for console in (OUT, ERR): console.file.flush() proc_name = basename(cmd[0]) log.debug(", "once for each # inner value. # for item in value: yield from", "... \"kubectl\", ... {\"namespace\": \"blah\"}, ... \"logs\", ... {\"follow\": True}, ... \"some-pod\", ...", "flags and values are space-separated when the `style` is `\" \"`. # yield", "5]})) ['--blah=1', '--blah=2', '--blah=3', '--blah=4', '--blah=5'] Neat, huh?! ### Relative Path Examples ###", "already exists.[/yeah]\", path=path ) else: raise RuntimeError(f\"{path} exists and is NOT a directory\")", "rel_to: Optional[Path]) -> str: if rel_to is None: return str(path) return str(path.relative_to(rel_to)) def", "= render_path(value, rel_to) if value is None or value is False: # Special", "# Special case #1 — value is `None` or `False` # # We", "import rmtree import shlex from functools import wraps import splatlog as logging from", "\"short options\" (single-character options) are always separate tokens from their values: >>> list(render_opts({\"a\":", "CONFIG.opts.long_prefix, sort: bool = CONFIG.opts.sort, style: TOptsStyle = CONFIG.opts.style, rel_to: Optional[Path] = None,", "bytes, Path] = None, **opts, ) -> CompletedProcess: log.info( \"Running system command...\", cmd=fmt_cmd(cmd),", "Mapping): yield from render_opts( arg, long_prefix=opts_long_prefix, style=opts_style, sort=opts_sort, rel_to=rel_to, ) elif isinstance(arg, Iterable):", "= CONFIG.opts.style, rel_to: Optional[Path] = None, ) -> Generator[str, None, None]: \"\"\" Render", "**kwds) -> bool: \"\"\"\\ Run a command and return whether or not it", "Only paths that are descendants of `rel_to` will be relativized (no `../` transformations).", "list(opts.items()) for name, value in items: name_s = str(name) is_short = len(name_s) ==", "is None else cwd else: rel_to = None return list(render_args(args, rel_to=rel_to, **opts)) def", "str] CONFIG = CFG.clavier.sh LOG = logging.getLogger(__name__) DEFAULT_OPTS_STYLE: TOptsStyle = \"=\" DEFAULT_OPTS_SORT =", "tuple)): # Special case #3 — value is a `list` or `tuple` #", "(and/or `bytes`, if any values passed in are `bytes`). `args` entries are handled", "os.execv(cmd[0], cmd) else: os.execvp(proc_name, cmd) else: if isabs(cmd[0]): os.execve(cmd[0], cmd, env) else: os.execvpe(proc_name,", "1, \"bee\": 2})) ['-a', '1', '--bee=2'] 2. Use space-separated option names and values:", "value in items: name_s = str(name) is_short = len(name_s) == 1 flag =", "similar functions. Contextualizes the relative path capabilities of `render_args` and `render_opts` to the", "Render `args` to sequence of `str` (and/or `bytes`, if any values passed in", "times, once for each # inner value. # for item in value: yield", "options) are always separate tokens from their values: >>> list(render_opts({\"a\": 1, \"bee\": 2}))", ">>> list(render_opts({'blah': 1, 'meh': 2}, style=\" \")) ['--blah', '1', '--meh', '2'] 3. Use", "for `iter_opts`.\"\"\" if isinstance(value, Path): value = render_path(value, rel_to) if value is None", "of option names to values to a (yielded) sequence of strings. Examples: ###", "`tuple` # # We handle these by emitting the option multiples times, once", "path.is_dir(): log.debug( f\"[yeah]{desc} directory already exists.[/yeah]\", path=path ) else: raise RuntimeError(f\"{path} exists and", "fmt_cmd TOpts = Mapping[Any, Any] TOptsStyle = Literal[\"=\", \" \", \"\"] TOptsLongPrefix =", "https://docs.python.org/3.9/library/os.html#os.execl for console in (OUT, ERR): console.file.flush() proc_name = basename(cmd[0]) log.debug( \"Replacing current", "rel_to is None: return str(path) return str(path.relative_to(rel_to)) def _iter_opt( flag: str, value: Any,", "understood as options, passed through `render_opts`. 4. `typing.Iterable` -- recurred into. 5. Other", "long_prefix=opts_long_prefix, style=opts_style, sort=opts_sort, rel_to=rel_to, ) elif isinstance(arg, Iterable): yield from render_args( arg, opts_long_prefix=opts_long_prefix,", "opts_long_prefix=opts_long_prefix, opts_style=opts_style, opts_sort=opts_sort, rel_to=rel_to, ) else: yield str(arg) def prepare( *args, cwd: Optional[_TPath]", "-- converted to a string with `str()`. \"\"\" for arg in args: if", "(has `subprocess.CompletedProcess.returncode` equal to `0`). >>> test(\"true\", shell=True) True >>> test(\"false\", shell=True) False", "3]})) ['-x', '1', '-x', '2', '-x', '3'] 2. Long opt with a list", "): # Normalize str cwd path to Path if isinstance(cwd, str): cwd =", "'3'] 2. Long opt with a list (or tuple) value: >>> list(render_opts({'blah': [1,", "# General case #2 — flag=value format # # When no other branch", "opts_long_prefix: TOptsLongPrefix = CONFIG.opts.long_prefix, opts_sort: bool = CONFIG.opts.sort, opts_style: TOptsStyle = CONFIG.opts.style, rel_to:", "— value is a `list` or `tuple` # # We handle these by", "Optional[str] = CONFIG.encoding, opts_long_prefix: TOptsLongPrefix = CONFIG.opts.long_prefix, opts_sort: bool = CONFIG.opts.sort, opts_style: TOptsStyle", "1. As with positional arguments, `pathlib.Path` option values can be rendered relative to", "is `\" \"`. # yield flag yield str(value) else: # General case #2", "f\"[yeah]{desc} directory already exists.[/yeah]\", path=path ) else: raise RuntimeError(f\"{path} exists and is NOT", "join(*args, **opts) -> str: \"\"\"\\ Render `args` to a single string with `prepare`", "Decorator helper to run `prepare` and do a bit more common normalization for", "if rel_paths is True: rel_to = Path.cwd() if cwd is None else cwd", "`run` etc. \"\"\" @wraps(fn) def _prepare_wrapper( *args, cwd: Optional[_TPath] = None, encoding: Optional[str]", "'output': Path(\"/dev/null\"), ... }, ... rel_to=Path(\"/tmp\") ... ) ... ) ['--input=blah.json', '--output=/dev/null'] \"\"\"", "rel_to=rel_to, ) elif isinstance(arg, Iterable): yield from render_args( arg, opts_long_prefix=opts_long_prefix, opts_style=opts_style, opts_sort=opts_sort, rel_to=rel_to,", "of `str` (and/or `bytes`, if any values passed in are `bytes`). `args` entries", "log.debug( f\"[yeah]{desc} directory already exists.[/yeah]\", path=path ) else: raise RuntimeError(f\"{path} exists and is", "rel_paths=rel_paths, ) return fn(*cmd, cwd=cwd, encoding=encoding, **opts) return _prepare_wrapper # pylint: disable=redefined-builtin @LOG.inject", "list(render_opts({'blah': 1, 'meh': 2}, style=\" \")) ['--blah', '1', '--meh', '2'] 3. Use a", "= logging.getLogger(__name__) DEFAULT_OPTS_STYLE: TOptsStyle = \"=\" DEFAULT_OPTS_SORT = True CompletedProcess = subprocess.CompletedProcess def", "**opts) if format is None: return output elif format == \"strip\": return output.strip()", "not it succeeds (has `subprocess.CompletedProcess.returncode` equal to `0`). >>> test(\"true\", shell=True) True >>>", "is None: if isabs(cmd[0]): os.execv(cmd[0], cmd) else: os.execvp(proc_name, cmd) else: if isabs(cmd[0]): os.execve(cmd[0],", "directory, which can either be provided as `cwd` or assumed to be the", "**opts) @LOG.inject def test(*args, **kwds) -> bool: \"\"\"\\ Run a command and return", "2}, style=\" \")) ['--blah', '1', '--meh', '2'] 3. Use a single `-` prefix", "'-x', '3'] 2. Long opt with a list (or tuple) value: >>> list(render_opts({'blah':", "\"Running system command...\", cmd=fmt_cmd(cmd), **opts, ) # https://docs.python.org/3.8/library/subprocess.html#subprocess.run if isinstance(input, Path): with input.open(\"r\",", "by type: 1. `str` and `bytes` -- passed through. 2. `pathlib.Path` -- passed", "ERR, fmt, fmt_cmd TOpts = Mapping[Any, Any] TOptsStyle = Literal[\"=\", \" \", \"\"]", "log.debug( \"Getting system command output...\", cmd=fmt_cmd(cmd), format=format, **opts, ) # https://docs.python.org/3.8/library/subprocess.html#subprocess.check_output output =", "{\"follow\": True}, ... \"some-pod\", ... ) ['kubectl', '--namespace=blah', 'logs', '--follow', 'some-pod'] \"\"\" #", "conversion is controlled by the `rel_paths` flag. ## Examples ## >>> prepare( ...", "Path if isinstance(cwd, str): cwd = Path(cwd) cmd = prepare( *args, cwd=cwd, opts_long_prefix=opts_long_prefix,", "be suitable for pasting in a shell. ## Parameters ## Same as `prepare`.", "handled by type: 1. `str` and `bytes` -- passed through. 2. `pathlib.Path` --", ") -> CompletedProcess: log.info( \"Running system command...\", cmd=fmt_cmd(cmd), **opts, ) # https://docs.python.org/3.8/library/subprocess.html#subprocess.run if", "Optional[Mapping] = None, cwd: Optional[Union[str, Path]] = None, ) -> NoReturn: # https://docs.python.org/3.9/library/os.html#os.execl", "@LOG.inject @prepare_wrap def get( *cmd, log=LOG, format: Optional[str] = None, **opts, ) ->", "TOptsStyle = CONFIG.opts.style, rel_to: Optional[Path] = None, ) -> Generator[str, None, None]: \"\"\"", "`render_args` and `render_opts` to the working directory, which can either be provided as", "values can be rendered relative to a `rel_to` directory. Only paths that are", "of `rel_to` will be relativized (no `../` transformations). >>> list( ... render_opts( ...", "or (is_short and style != \"\"): # General case #1 — space-separated #", "*args, cwd=cwd, opts_long_prefix=opts_long_prefix, opts_sort=opts_sort, opts_style=opts_style, rel_paths=rel_paths, ) return fn(*cmd, cwd=cwd, encoding=encoding, **opts) return", "for name, value in items: name_s = str(name) is_short = len(name_s) == 1", "'1', '--bee=2'] 2. Use space-separated option names and values: >>> list(render_opts({'blah': 1, 'meh':", "Render `args` to a single string with `prepare` -> `shlex.join`. Returned string _should_", "['-x123', '-y456'] ### List Value Examples ### 1. Short opt with a list", "CONFIG.opts.long_prefix, opts_sort: bool = CONFIG.opts.sort, opts_style: TOptsStyle = CONFIG.opts.style, rel_to: Optional[Path] = None,", "... \"some-pod\", ... ) ['kubectl', '--namespace=blah', 'logs', '--follow', 'some-pod'] \"\"\" # Normalize str", "@LOG.inject @prepare_wrap def replace( *cmd, log=LOG, # Used, but defaulted in `prepare_cmd`, so", "be relativized (no `../` transformations). >>> list( ... render_opts( ... { ... 'input':", "= CONFIG.opts.long_prefix, sort: bool = CONFIG.opts.sort, style: TOptsStyle = CONFIG.opts.style, rel_to: Optional[Path] =", "= Mapping[Any, Any] TOptsStyle = Literal[\"=\", \" \", \"\"] TOptsLongPrefix = Literal[\"--\", \"-\"]", "import splatlog as logging from .cfg import CFG from .io import OUT, ERR,", "system command...\", cmd=fmt_cmd(cmd), **opts, ) # https://docs.python.org/3.8/library/subprocess.html#subprocess.run if isinstance(input, Path): with input.open(\"r\", encoding=\"utf-8\")", "f\"-{name_s}\" if is_short else f\"{long_prefix}{name_s}\" yield from _iter_opt(flag, value, style, is_short, rel_to) def", "space- # sparated. # # _All_ flags and values are space-separated when the", "'-meh=2'] 4. Use that weird \"no-separator\" style you sometimes see: >>> list(render_opts({'x': 123,", "# Used, but defaulted in `prepare_cmd`, so needs to be accepted here encoding:", "path.is_dir(): rmtree(path) else: os.remove(path) else: log.info(f\"[yeah]{name} already absent.[/yeah]\", path=path) @LOG.inject def dir_present(path: Path,", "handle these by emitting the option multiples times, once for each # inner", "or `tuple` # # We handle these by emitting the option multiples times,", ") else: return subprocess.run(cmd, check=check, input=input, **opts) @LOG.inject def test(*args, **kwds) -> bool:", "nested lists work as well: >>> list(render_opts({'blah': [1, 2, [[3], 4], 5]})) ['--blah=1',", "are _always_ space- # sparated. # # _All_ flags and values are space-separated", "env: Optional[Mapping] = None, cwd: Optional[Union[str, Path]] = None, ) -> NoReturn: #", "return run(*args, check=False, **kwds).returncode == 0 @LOG.inject @prepare_wrap def replace( *cmd, log=LOG, #", "-> Callable: \"\"\"\\ Decorator helper to run `prepare` and do a bit more", "work as well: >>> list(render_opts({'blah': [1, 2, [[3], 4], 5]})) ['--blah=1', '--blah=2', '--blah=3',", "... }, ... rel_to=Path(\"/tmp\") ... ) ... ) ['--input=blah.json', '--output=/dev/null'] \"\"\" # Handle", "for item in value: yield from _iter_opt(flag, item, style, is_short) elif style ==", "rel_to=rel_to, ) else: yield str(arg) def prepare( *args, cwd: Optional[_TPath] = None, rel_paths:", "env=env, cwd=cwd, ) if cwd is not None: os.chdir(cwd) if env is None:", "[1, 2, [[3], 4], 5]})) ['--blah=1', '--blah=2', '--blah=3', '--blah=4', '--blah=5'] Neat, huh?! ###", "string with `prepare` -> `shlex.join`. Returned string _should_ be suitable for pasting in", "= True CompletedProcess = subprocess.CompletedProcess def render_path(path: Path, rel_to: Optional[Path]) -> str: if", "and is NOT a directory\") else: log.info(f\"[holup]Creating {desc} directory...[/holup]\", path=path) os.makedirs(path) if __name__", "# for item in value: yield from _iter_opt(flag, item, style, is_short) elif style", "rmtree import shlex from functools import wraps import splatlog as logging from .cfg", "== 1 flag = f\"-{name_s}\" if is_short else f\"{long_prefix}{name_s}\" yield from _iter_opt(flag, value,", "TOpts, *, long_prefix: TOptsLongPrefix = CONFIG.opts.long_prefix, sort: bool = CONFIG.opts.sort, style: TOptsStyle =", "= len(name_s) == 1 flag = f\"-{name_s}\" if is_short else f\"{long_prefix}{name_s}\" yield from", "capabilities of `render_args` and `render_opts` to the working directory, which can either be", "CONFIG.opts.style, rel_to: Optional[Path] = None, ) -> Generator[Union[str, bytes], None, None]: \"\"\"\\ Render", "Path): value = render_path(value, rel_to) if value is None or value is False:", "the `style` is `\" \"`. # yield flag yield str(value) else: # General", "None, ) -> Generator[Union[str, bytes], None, None]: \"\"\"\\ Render `args` to sequence of", "subprocess.CompletedProcess def render_path(path: Path, rel_to: Optional[Path]) -> str: if rel_to is None: return", "exists and is NOT a directory\") else: log.info(f\"[holup]Creating {desc} directory...[/holup]\", path=path) os.makedirs(path) if", "return output.strip() elif format == \"json\": return json.loads(output) else: log.warn(\"Unknown `format`\", format=format, expected=[None,", "We handle these by emitting the option multiples times, once for each #", "= None, ) -> Generator[str, None, None]: \"\"\" Render a mapping of option", "rendered relative to a `rel_to` directory. Only paths that are descendants of `rel_to`", "**opts)) def join(*args, **opts) -> str: \"\"\"\\ Render `args` to a single string", "Relative path conversion is controlled by the `rel_paths` flag. ## Examples ## >>>", "else: # General case #2 — flag=value format # # When no other", "input=input, **opts) @LOG.inject def test(*args, **kwds) -> bool: \"\"\"\\ Run a command and", "import CFG from .io import OUT, ERR, fmt, fmt_cmd TOpts = Mapping[Any, Any]", "to be the current directory. Relative path conversion is controlled by the `rel_paths`", "names and values: >>> list(render_opts({'blah': 1, 'meh': 2}, style=\" \")) ['--blah', '1', '--meh',", "elif isinstance(arg, Path): yield render_path(arg, rel_to) elif isinstance(arg, Mapping): yield from render_opts( arg,", "= None, ) -> Generator[str, None, None]: \"\"\"Private helper for `iter_opts`.\"\"\" if isinstance(value,", "CONFIG.opts.sort, style: TOptsStyle = CONFIG.opts.style, rel_to: Optional[Path] = None, ) -> Generator[str, None,", "pass elif value is True: # Special case #2 — value is `True`", "sort: bool = CONFIG.opts.sort, style: TOptsStyle = CONFIG.opts.style, rel_to: Optional[Path] = None, )", "subprocess from pathlib import Path import json from shutil import rmtree import shlex", "string with `str()`. \"\"\" for arg in args: if isinstance(arg, (str, bytes)): yield", "subprocess.run( cmd, check=check, input=file.read(), **opts, ) else: return subprocess.run(cmd, check=check, input=input, **opts) @LOG.inject", "= Path(cwd) cmd = prepare( *args, cwd=cwd, opts_long_prefix=opts_long_prefix, opts_sort=opts_sort, opts_style=opts_style, rel_paths=rel_paths, ) return", "# sparated. # # _All_ flags and values are space-separated when the `style`", "*args, cwd: Optional[_TPath] = None, rel_paths: bool = CONFIG.rel_paths, **opts, ) -> List[str]:", "Use that weird \"no-separator\" style you sometimes see: >>> list(render_opts({'x': 123, 'y': 456},", "def join(*args, **opts) -> str: \"\"\"\\ Render `args` to a single string with", "Examples ### 1. By default, `=` is used to separate \"long options\" and", "-> CompletedProcess: log.info( \"Running system command...\", cmd=fmt_cmd(cmd), **opts, ) # https://docs.python.org/3.8/library/subprocess.html#subprocess.run if isinstance(input,", "name is None: name = fmt(path) if path.exists(): log.info(f\"[holup]Removing {name}...[/holup]\", path=path) if path.is_dir():", "`prepare` and do a bit more common normalization for `get`, `run` etc. \"\"\"", ") -> Generator[str, None, None]: \"\"\" Render a mapping of option names to", "\"logs\", ... {\"follow\": True}, ... \"some-pod\", ... ) ['kubectl', '--namespace=blah', 'logs', '--follow', 'some-pod']", "# We omit these entirely. # pass elif value is True: # Special", "for `get`, `run` etc. \"\"\" @wraps(fn) def _prepare_wrapper( *args, cwd: Optional[_TPath] = None,", "None, ) -> Generator[str, None, None]: \"\"\"Private helper for `iter_opts`.\"\"\" if isinstance(value, Path):", "**opts)) def prepare_wrap(fn: Callable) -> Callable: \"\"\"\\ Decorator helper to run `prepare` and", "return output elif format == \"strip\": return output.strip() elif format == \"json\": return", "json.loads(output) else: log.warn(\"Unknown `format`\", format=format, expected=[None, \"json\"]) return output @LOG.inject @prepare_wrap def run(", "tokens from their values: >>> list(render_opts({\"a\": 1, \"bee\": 2})) ['-a', '1', '--bee=2'] 2.", "'--blah=3'] 3. Due to the recursive, yield-centric nature, nested lists work as well:", "= CONFIG.opts.long_prefix, opts_sort: bool = CONFIG.opts.sort, opts_style: TOptsStyle = CONFIG.opts.style, rel_to: Optional[Path] =", "Special case #2 — value is `True` # # We emit the bare", "elif isinstance(arg, Iterable): yield from render_args( arg, opts_long_prefix=opts_long_prefix, opts_style=opts_style, opts_sort=opts_sort, rel_to=rel_to, ) else:", "Examples ## >>> prepare( ... \"kubectl\", ... {\"namespace\": \"blah\"}, ... \"logs\", ... {\"follow\":", "long_prefix: TOptsLongPrefix = CONFIG.opts.long_prefix, sort: bool = CONFIG.opts.sort, style: TOptsStyle = CONFIG.opts.style, rel_to:", "a (yielded) sequence of strings. Examples: ### Style Examples ### 1. By default,", "else: log.info(f\"[holup]Creating {desc} directory...[/holup]\", path=path) os.makedirs(path) if __name__ == \"__main__\": import doctest doctest.testmod()", "Union[Path, str] CONFIG = CFG.clavier.sh LOG = logging.getLogger(__name__) DEFAULT_OPTS_STYLE: TOptsStyle = \"=\" DEFAULT_OPTS_SORT", "if format is None: return output elif format == \"strip\": return output.strip() elif", "**opts, ) else: return subprocess.run(cmd, check=check, input=input, **opts) @LOG.inject def test(*args, **kwds) ->", "Path] = None, **opts, ) -> CompletedProcess: log.info( \"Running system command...\", cmd=fmt_cmd(cmd), **opts,", "= None, env: Optional[Mapping] = None, cwd: Optional[Union[str, Path]] = None, ) ->", "Iterable[Any], *, opts_long_prefix: TOptsLongPrefix = CONFIG.opts.long_prefix, opts_sort: bool = CONFIG.opts.sort, opts_style: TOptsStyle =", "desc = fmt(path) if path.exists(): if path.is_dir(): log.debug( f\"[yeah]{desc} directory already exists.[/yeah]\", path=path", "Optional[str] = None, log=LOG): if desc is None: desc = fmt(path) if path.exists():", "pairs if needed items = sorted(opts.items()) if sort else list(opts.items()) for name, value", "cmd) else: os.execvp(proc_name, cmd) else: if isabs(cmd[0]): os.execve(cmd[0], cmd, env) else: os.execvpe(proc_name, cmd,", "values are space-separated when the `style` is `\" \"`. # yield flag yield", "None, None]: \"\"\" Render a mapping of option names to values to a", "None: return output elif format == \"strip\": return output.strip() elif format == \"json\":", "be the current directory. Relative path conversion is controlled by the `rel_paths` flag.", "a command and return whether or not it succeeds (has `subprocess.CompletedProcess.returncode` equal to", "... 'output': Path(\"/dev/null\"), ... }, ... rel_to=Path(\"/tmp\") ... ) ... ) ['--input=blah.json', '--output=/dev/null']", "for pasting in a shell. ## Parameters ## Same as `prepare`. \"\"\" return", "— space-separated # # _Short_ (single-character) flags and values are _always_ space- #", "desc is None: desc = fmt(path) if path.exists(): if path.is_dir(): log.debug( f\"[yeah]{desc} directory", "(single-character options) are always separate tokens from their values: >>> list(render_opts({\"a\": 1, \"bee\":", "to be passed `subprocess.run` or similar functions. Contextualizes the relative path capabilities of", "import subprocess from pathlib import Path import json from shutil import rmtree import", "render_path(value, rel_to) if value is None or value is False: # Special case", "path capabilities of `render_args` and `render_opts` to the working directory, which can either", "huh?! ### Relative Path Examples ### 1. As with positional arguments, `pathlib.Path` option", "Path(\"/tmp/blah.json\"), ... 'output': Path(\"/dev/null\"), ... }, ... rel_to=Path(\"/tmp\") ... ) ... ) ['--input=blah.json',", "\"\"\" @wraps(fn) def _prepare_wrapper( *args, cwd: Optional[_TPath] = None, encoding: Optional[str] = CONFIG.encoding,", "= CONFIG.opts.style, rel_paths: bool = CONFIG.rel_paths, **opts, ): # Normalize str cwd path", "\"json\": return json.loads(output) else: log.warn(\"Unknown `format`\", format=format, expected=[None, \"json\"]) return output @LOG.inject @prepare_wrap", "is_short) elif style == \" \" or (is_short and style != \"\"): #", "\"bee\": 2})) ['-a', '1', '--bee=2'] 2. Use space-separated option names and values: >>>", "Normalize str cwd path to Path if isinstance(cwd, str): cwd = Path(cwd) cmd", "if rel_to is None: return str(path) return str(path.relative_to(rel_to)) def _iter_opt( flag: str, value:", "log=LOG): if name is None: name = fmt(path) if path.exists(): log.info(f\"[holup]Removing {name}...[/holup]\", path=path)", "**opts, ) # https://docs.python.org/3.8/library/subprocess.html#subprocess.check_output output = subprocess.check_output(cmd, **opts) if format is None: return", "Path if isinstance(cwd, str): cwd = Path(cwd) if rel_paths is True: rel_to =", "cwd: Optional[Union[str, Path]] = None, ) -> NoReturn: # https://docs.python.org/3.9/library/os.html#os.execl for console in", "flags and values are _always_ space- # sparated. # # _All_ flags and", "encoding: Optional[str] = CONFIG.encoding, opts_long_prefix: TOptsLongPrefix = CONFIG.opts.long_prefix, opts_sort: bool = CONFIG.opts.sort, opts_style:", "def dir_present(path: Path, desc: Optional[str] = None, log=LOG): if desc is None: desc", "return subprocess.run(cmd, check=check, input=input, **opts) @LOG.inject def test(*args, **kwds) -> bool: \"\"\"\\ Run", "in `prepare_cmd`, so needs to be accepted here encoding: Optional[str] = None, env:", "None return list(render_args(args, rel_to=rel_to, **opts)) def join(*args, **opts) -> str: \"\"\"\\ Render `args`", "\"\"): # General case #1 — space-separated # # _Short_ (single-character) flags and", "False: # Special case #1 — value is `None` or `False` # #", "Use space-separated option names and values: >>> list(render_opts({'blah': 1, 'meh': 2}, style=\" \"))", "are `bytes`). `args` entries are handled by type: 1. `str` and `bytes` --", "# General case #1 — space-separated # # _Short_ (single-character) flags and values", "... {\"namespace\": \"blah\"}, ... \"logs\", ... {\"follow\": True}, ... \"some-pod\", ... ) ['kubectl',", "Special case #3 — value is a `list` or `tuple` # # We", "isinstance(arg, Iterable): yield from render_args( arg, opts_long_prefix=opts_long_prefix, opts_style=opts_style, opts_sort=opts_sort, rel_to=rel_to, ) else: yield", "current process with system command...\", cmd=fmt_cmd(cmd), env=env, cwd=cwd, ) if cwd is not", "of strings. Examples: ### Style Examples ### 1. By default, `=` is used", "== \"json\": return json.loads(output) else: log.warn(\"Unknown `format`\", format=format, expected=[None, \"json\"]) return output @LOG.inject", "yield from _iter_opt(flag, value, style, is_short, rel_to) def render_args( args: Iterable[Any], *, opts_long_prefix:", "return list(render_args(args, rel_to=rel_to, **opts)) def join(*args, **opts) -> str: \"\"\"\\ Render `args` to", "opts_sort=opts_sort, opts_style=opts_style, rel_paths=rel_paths, ) return fn(*cmd, cwd=cwd, encoding=encoding, **opts) return _prepare_wrapper # pylint:", "None: if isabs(cmd[0]): os.execv(cmd[0], cmd) else: os.execvp(proc_name, cmd) else: if isabs(cmd[0]): os.execve(cmd[0], cmd,", "... \"logs\", ... {\"follow\": True}, ... \"some-pod\", ... ) ['kubectl', '--namespace=blah', 'logs', '--follow',", "rel_to: Optional[Path] = None, ) -> Generator[str, None, None]: \"\"\" Render a mapping", "str, value: Any, style: TOptsStyle, is_short: bool, rel_to: Optional[Path] = None, ) ->", "command...\", cmd=fmt_cmd(cmd), env=env, cwd=cwd, ) if cwd is not None: os.chdir(cwd) if env", "['-a', '1', '--bee=2'] 2. Use space-separated option names and values: >>> list(render_opts({'blah': 1,", "system command output...\", cmd=fmt_cmd(cmd), format=format, **opts, ) # https://docs.python.org/3.8/library/subprocess.html#subprocess.check_output output = subprocess.check_output(cmd, **opts)", "`=`-separated flag # and value. # yield f\"{flag}{style}{value}\" def render_opts( opts: TOpts, *,", "'meh': 2}, long_prefix='-')) ['-blah=1', '-meh=2'] 4. Use that weird \"no-separator\" style you sometimes", "with `rel_to`) through `render_path`. 3. `typing.Mapping` -- understood as options, passed through `render_opts`.", "= None, ) -> NoReturn: # https://docs.python.org/3.9/library/os.html#os.execl for console in (OUT, ERR): console.file.flush()", "\"\"\" # Handle `None` as a legit value, making life easier on callers", "os.execvpe(proc_name, cmd, env) @LOG.inject def file_absent(path: Path, name: Optional[str] = None, log=LOG): if", "4. `typing.Iterable` -- recurred into. 5. Other -- converted to a string with", "cwd path to Path if isinstance(cwd, str): cwd = Path(cwd) if rel_paths is", "log.info(f\"[yeah]{name} already absent.[/yeah]\", path=path) @LOG.inject def dir_present(path: Path, desc: Optional[str] = None, log=LOG):", "`None` or `False` # # We omit these entirely. # pass elif value", "`iter_opts`.\"\"\" if isinstance(value, Path): value = render_path(value, rel_to) if value is None or", "We emit the bare flag, like `-x` or `--blah`. # yield flag elif", "weird \"no-separator\" style you sometimes see: >>> list(render_opts({'x': 123, 'y': 456}, style=\"\")) ['-x123',", "\"-\"] _TPath = Union[Path, str] CONFIG = CFG.clavier.sh LOG = logging.getLogger(__name__) DEFAULT_OPTS_STYLE: TOptsStyle", "by the `rel_paths` flag. ## Examples ## >>> prepare( ... \"kubectl\", ... {\"namespace\":", "TOpts = Mapping[Any, Any] TOptsStyle = Literal[\"=\", \" \", \"\"] TOptsLongPrefix = Literal[\"--\",", "Optional[str] = None, log=LOG): if name is None: name = fmt(path) if path.exists():", "123, 'y': 456}, style=\"\")) ['-x123', '-y456'] ### List Value Examples ### 1. Short", "shell. ## Parameters ## Same as `prepare`. \"\"\" return shlex.join(prepare(*args, **opts)) def prepare_wrap(fn:", "= subprocess.CompletedProcess def render_path(path: Path, rel_to: Optional[Path]) -> str: if rel_to is None:", "Path.cwd() if cwd is None else cwd else: rel_to = None return list(render_args(args,", "test(*args, **kwds) -> bool: \"\"\"\\ Run a command and return whether or not", "cwd path to Path if isinstance(cwd, str): cwd = Path(cwd) cmd = prepare(", "if name is None: name = fmt(path) if path.exists(): log.info(f\"[holup]Removing {name}...[/holup]\", path=path) if", "as file: return subprocess.run( cmd, check=check, input=file.read(), **opts, ) else: return subprocess.run(cmd, check=check,", ") if cwd is not None: os.chdir(cwd) if env is None: if isabs(cmd[0]):", "if isabs(cmd[0]): os.execve(cmd[0], cmd, env) else: os.execvpe(proc_name, cmd, env) @LOG.inject def file_absent(path: Path,", ") elif isinstance(arg, Iterable): yield from render_args( arg, opts_long_prefix=opts_long_prefix, opts_style=opts_style, opts_sort=opts_sort, rel_to=rel_to, )", "'--blah=4', '--blah=5'] Neat, huh?! ### Relative Path Examples ### 1. As with positional", "= CONFIG.opts.sort, style: TOptsStyle = CONFIG.opts.style, rel_to: Optional[Path] = None, ) -> Generator[str,", "def render_args( args: Iterable[Any], *, opts_long_prefix: TOptsLongPrefix = CONFIG.opts.long_prefix, opts_sort: bool = CONFIG.opts.sort,", "return # Sort key/value pairs if needed items = sorted(opts.items()) if sort else", "either be provided as `cwd` or assumed to be the current directory. Relative", "\"blah\"}, ... \"logs\", ... {\"follow\": True}, ... \"some-pod\", ... ) ['kubectl', '--namespace=blah', 'logs',", "True: # Special case #2 — value is `True` # # We emit", "# https://docs.python.org/3.8/library/subprocess.html#subprocess.check_output output = subprocess.check_output(cmd, **opts) if format is None: return output elif", "style=\" \")) ['--blah', '1', '--meh', '2'] 3. Use a single `-` prefix on", "cmd, env) @LOG.inject def file_absent(path: Path, name: Optional[str] = None, log=LOG): if name", "single `-` prefix on long options (\"X toolkit\" style): >>> list(render_opts({'blah': 1, 'meh':", ") else: raise RuntimeError(f\"{path} exists and is NOT a directory\") else: log.info(f\"[holup]Creating {desc}", "\"`. # yield flag yield str(value) else: # General case #2 — flag=value", "elif value is True: # Special case #2 — value is `True` #", ">>> prepare( ... \"kubectl\", ... {\"namespace\": \"blah\"}, ... \"logs\", ... {\"follow\": True}, ...", "CONFIG.opts.style, rel_to: Optional[Path] = None, ) -> Generator[str, None, None]: \"\"\" Render a", "General case #1 — space-separated # # _Short_ (single-character) flags and values are", "opts: TOpts, *, long_prefix: TOptsLongPrefix = CONFIG.opts.long_prefix, sort: bool = CONFIG.opts.sort, style: TOptsStyle", "value: >>> list(render_opts({'x': [1, 2, 3]})) ['-x', '1', '-x', '2', '-x', '3'] 2.", "from render_opts( arg, long_prefix=opts_long_prefix, style=opts_style, sort=opts_sort, rel_to=rel_to, ) elif isinstance(arg, Iterable): yield from", "_iter_opt(flag, item, style, is_short) elif style == \" \" or (is_short and style", "elif isinstance(arg, Mapping): yield from render_opts( arg, long_prefix=opts_long_prefix, style=opts_style, sort=opts_sort, rel_to=rel_to, ) elif", "1, 'meh': 2}, style=\" \")) ['--blah', '1', '--meh', '2'] 3. Use a single", "'--blah=3', '--blah=4', '--blah=5'] Neat, huh?! ### Relative Path Examples ### 1. As with", "else: log.warn(\"Unknown `format`\", format=format, expected=[None, \"json\"]) return output @LOG.inject @prepare_wrap def run( *cmd,", "1, 'meh': 2}, long_prefix='-')) ['-blah=1', '-meh=2'] 4. Use that weird \"no-separator\" style you", "456}, style=\"\")) ['-x123', '-y456'] ### List Value Examples ### 1. Short opt with", "'2'] 3. Use a single `-` prefix on long options (\"X toolkit\" style):", "name, value in items: name_s = str(name) is_short = len(name_s) == 1 flag", "bool = CONFIG.opts.sort, style: TOptsStyle = CONFIG.opts.style, rel_to: Optional[Path] = None, ) ->", "rel_to=Path(\"/tmp\") ... ) ... ) ['--input=blah.json', '--output=/dev/null'] \"\"\" # Handle `None` as a", "are handled by type: 1. `str` and `bytes` -- passed through. 2. `pathlib.Path`", "elif isinstance(value, (list, tuple)): # Special case #3 — value is a `list`", "isabs(cmd[0]): os.execv(cmd[0], cmd) else: os.execvp(proc_name, cmd) else: if isabs(cmd[0]): os.execve(cmd[0], cmd, env) else:", "cwd=cwd, opts_long_prefix=opts_long_prefix, opts_sort=opts_sort, opts_style=opts_style, rel_paths=rel_paths, ) return fn(*cmd, cwd=cwd, encoding=encoding, **opts) return _prepare_wrapper", "# We handle these by emitting the option multiples times, once for each", "and values are space-separated when the `style` is `\" \"`. # yield flag", "Special case #1 — value is `None` or `False` # # We omit", "= CONFIG.opts.long_prefix, opts_sort: bool = CONFIG.opts.sort, opts_style: TOptsStyle = CONFIG.opts.style, rel_paths: bool =", "TOptsStyle = CONFIG.opts.style, rel_to: Optional[Path] = None, ) -> Generator[Union[str, bytes], None, None]:", "style=\"\")) ['-x123', '-y456'] ### List Value Examples ### 1. Short opt with a", "TOptsLongPrefix = CONFIG.opts.long_prefix, sort: bool = CONFIG.opts.sort, style: TOptsStyle = CONFIG.opts.style, rel_to: Optional[Path]", "yield-centric nature, nested lists work as well: >>> list(render_opts({'blah': [1, 2, [[3], 4],", "style=opts_style, sort=opts_sort, rel_to=rel_to, ) elif isinstance(arg, Iterable): yield from render_args( arg, opts_long_prefix=opts_long_prefix, opts_style=opts_style,", "`args` to be passed `subprocess.run` or similar functions. Contextualizes the relative path capabilities", "if cwd is None else cwd else: rel_to = None return list(render_args(args, rel_to=rel_to,", "if isinstance(value, Path): value = render_path(value, rel_to) if value is None or value", "def get( *cmd, log=LOG, format: Optional[str] = None, **opts, ) -> Any: log.debug(", "... {\"follow\": True}, ... \"some-pod\", ... ) ['kubectl', '--namespace=blah', 'logs', '--follow', 'some-pod'] \"\"\"", "2, 3]})) ['--blah=1', '--blah=2', '--blah=3'] 3. Due to the recursive, yield-centric nature, nested", "opts_style: TOptsStyle = CONFIG.opts.style, rel_paths: bool = CONFIG.rel_paths, **opts, ): # Normalize str", "Same as `prepare`. \"\"\" return shlex.join(prepare(*args, **opts)) def prepare_wrap(fn: Callable) -> Callable: \"\"\"\\", "options\" and their values, while \"short options\" (single-character options) are always separate tokens", "to a `rel_to` directory. Only paths that are descendants of `rel_to` will be", "flag: str, value: Any, style: TOptsStyle, is_short: bool, rel_to: Optional[Path] = None, )", "True}, ... \"some-pod\", ... ) ['kubectl', '--namespace=blah', 'logs', '--follow', 'some-pod'] \"\"\" # Normalize", "legit value, making life easier on callers assembling # commands if opts is", "return whether or not it succeeds (has `subprocess.CompletedProcess.returncode` equal to `0`). >>> test(\"true\",", "None]: \"\"\"Private helper for `iter_opts`.\"\"\" if isinstance(value, Path): value = render_path(value, rel_to) if", "CONFIG.opts.style, rel_paths: bool = CONFIG.rel_paths, **opts, ): # Normalize str cwd path to", "= None, **opts, ) -> Any: log.debug( \"Getting system command output...\", cmd=fmt_cmd(cmd), format=format,", "positional arguments, `pathlib.Path` option values can be rendered relative to a `rel_to` directory.", "yield str(arg) def prepare( *args, cwd: Optional[_TPath] = None, rel_paths: bool = CONFIG.rel_paths,", "... ) ['--input=blah.json', '--output=/dev/null'] \"\"\" # Handle `None` as a legit value, making", "{name}...[/holup]\", path=path) if path.is_dir(): rmtree(path) else: os.remove(path) else: log.info(f\"[yeah]{name} already absent.[/yeah]\", path=path) @LOG.inject", "Optional[Path] = None, ) -> Generator[str, None, None]: \"\"\" Render a mapping of", "CompletedProcess = subprocess.CompletedProcess def render_path(path: Path, rel_to: Optional[Path]) -> str: if rel_to is", "rel_to=rel_to, **opts)) def join(*args, **opts) -> str: \"\"\"\\ Render `args` to a single", "[1, 2, 3]})) ['-x', '1', '-x', '2', '-x', '3'] 2. Long opt with", "succeeds (has `subprocess.CompletedProcess.returncode` equal to `0`). >>> test(\"true\", shell=True) True >>> test(\"false\", shell=True)", "... 'input': Path(\"/tmp/blah.json\"), ... 'output': Path(\"/dev/null\"), ... }, ... rel_to=Path(\"/tmp\") ... ) ...", "sometimes see: >>> list(render_opts({'x': 123, 'y': 456}, style=\"\")) ['-x123', '-y456'] ### List Value", "def run( *cmd, log=LOG, check: bool = True, input: Union[None, str, bytes, Path]", "bool = CONFIG.rel_paths, **opts, ) -> List[str]: \"\"\"\\ Prepare `args` to be passed", "else: yield str(arg) def prepare( *args, cwd: Optional[_TPath] = None, rel_paths: bool =", "see: >>> list(render_opts({'x': 123, 'y': 456}, style=\"\")) ['-x123', '-y456'] ### List Value Examples", "pasting in a shell. ## Parameters ## Same as `prepare`. \"\"\" return shlex.join(prepare(*args,", "bare flag, like `-x` or `--blah`. # yield flag elif isinstance(value, (list, tuple)):", "4. Use that weird \"no-separator\" style you sometimes see: >>> list(render_opts({'x': 123, 'y':", "flag yield str(value) else: # General case #2 — flag=value format # #", "Examples ### 1. Short opt with a list (or tuple) value: >>> list(render_opts({'x':", "to the recursive, yield-centric nature, nested lists work as well: >>> list(render_opts({'blah': [1,", "item in value: yield from _iter_opt(flag, item, style, is_short) elif style == \"", "List[str]: \"\"\"\\ Prepare `args` to be passed `subprocess.run` or similar functions. Contextualizes the", "\"Replacing current process with system command...\", cmd=fmt_cmd(cmd), env=env, cwd=cwd, ) if cwd is", "value, style, is_short, rel_to) def render_args( args: Iterable[Any], *, opts_long_prefix: TOptsLongPrefix = CONFIG.opts.long_prefix,", "on long options (\"X toolkit\" style): >>> list(render_opts({'blah': 1, 'meh': 2}, long_prefix='-')) ['-blah=1',", "TOptsLongPrefix = Literal[\"--\", \"-\"] _TPath = Union[Path, str] CONFIG = CFG.clavier.sh LOG =", "*cmd, log=LOG, # Used, but defaulted in `prepare_cmd`, so needs to be accepted", "and `bytes` -- passed through. 2. `pathlib.Path` -- passed (along with `rel_to`) through", "already absent.[/yeah]\", path=path) @LOG.inject def dir_present(path: Path, desc: Optional[str] = None, log=LOG): if", ">>> list(render_opts({'blah': 1, 'meh': 2}, long_prefix='-')) ['-blah=1', '-meh=2'] 4. Use that weird \"no-separator\"", "always separate tokens from their values: >>> list(render_opts({\"a\": 1, \"bee\": 2})) ['-a', '1',", ") -> List[str]: \"\"\"\\ Prepare `args` to be passed `subprocess.run` or similar functions.", "'--blah=2', '--blah=3'] 3. Due to the recursive, yield-centric nature, nested lists work as", "functools import wraps import splatlog as logging from .cfg import CFG from .io", "*cmd, log=LOG, check: bool = True, input: Union[None, str, bytes, Path] = None,", "options (\"X toolkit\" style): >>> list(render_opts({'blah': 1, 'meh': 2}, long_prefix='-')) ['-blah=1', '-meh=2'] 4.", ">>> list(render_opts({'blah': [1, 2, [[3], 4], 5]})) ['--blah=1', '--blah=2', '--blah=3', '--blah=4', '--blah=5'] Neat,", "else: return subprocess.run(cmd, check=check, input=input, **opts) @LOG.inject def test(*args, **kwds) -> bool: \"\"\"\\", "check: bool = True, input: Union[None, str, bytes, Path] = None, **opts, )", "`True` # # We emit the bare flag, like `-x` or `--blah`. #", "be passed `subprocess.run` or similar functions. Contextualizes the relative path capabilities of `render_args`", "isinstance(arg, Mapping): yield from render_opts( arg, long_prefix=opts_long_prefix, style=opts_style, sort=opts_sort, rel_to=rel_to, ) elif isinstance(arg,", "`--blah`. # yield flag elif isinstance(value, (list, tuple)): # Special case #3 —", "render_path(path: Path, rel_to: Optional[Path]) -> str: if rel_to is None: return str(path) return", "through. 2. `pathlib.Path` -- passed (along with `rel_to`) through `render_path`. 3. `typing.Mapping` --", "expected=[None, \"json\"]) return output @LOG.inject @prepare_wrap def run( *cmd, log=LOG, check: bool =", "default, `=` is used to separate \"long options\" and their values, while \"short", "command...\", cmd=fmt_cmd(cmd), **opts, ) # https://docs.python.org/3.8/library/subprocess.html#subprocess.run if isinstance(input, Path): with input.open(\"r\", encoding=\"utf-8\") as", "**opts, ) -> List[str]: \"\"\"\\ Prepare `args` to be passed `subprocess.run` or similar", "Path, desc: Optional[str] = None, log=LOG): if desc is None: desc = fmt(path)", ") -> NoReturn: # https://docs.python.org/3.9/library/os.html#os.execl for console in (OUT, ERR): console.file.flush() proc_name =", "with a list (or tuple) value: >>> list(render_opts({'blah': [1, 2, 3]})) ['--blah=1', '--blah=2',", "these entirely. # pass elif value is True: # Special case #2 —", "options\" (single-character options) are always separate tokens from their values: >>> list(render_opts({\"a\": 1,", "`list` or `tuple` # # We handle these by emitting the option multiples", "output @LOG.inject @prepare_wrap def run( *cmd, log=LOG, check: bool = True, input: Union[None,", "-> bool: \"\"\"\\ Run a command and return whether or not it succeeds", "flag # and value. # yield f\"{flag}{style}{value}\" def render_opts( opts: TOpts, *, long_prefix:", "if needed items = sorted(opts.items()) if sort else list(opts.items()) for name, value in", "Use a single `-` prefix on long options (\"X toolkit\" style): >>> list(render_opts({'blah':", "if isinstance(arg, (str, bytes)): yield arg elif isinstance(arg, Path): yield render_path(arg, rel_to) elif", "a shell. ## Parameters ## Same as `prepare`. \"\"\" return shlex.join(prepare(*args, **opts)) def", "sort=opts_sort, rel_to=rel_to, ) elif isinstance(arg, Iterable): yield from render_args( arg, opts_long_prefix=opts_long_prefix, opts_style=opts_style, opts_sort=opts_sort,", "list(render_opts({'x': [1, 2, 3]})) ['-x', '1', '-x', '2', '-x', '3'] 2. Long opt", "`args` to sequence of `str` (and/or `bytes`, if any values passed in are", "https://docs.python.org/3.8/library/subprocess.html#subprocess.run if isinstance(input, Path): with input.open(\"r\", encoding=\"utf-8\") as file: return subprocess.run( cmd, check=check,", "value: >>> list(render_opts({'blah': [1, 2, 3]})) ['--blah=1', '--blah=2', '--blah=3'] 3. Due to the", "from os.path import isabs, basename import subprocess from pathlib import Path import json", "system command...\", cmd=fmt_cmd(cmd), env=env, cwd=cwd, ) if cwd is not None: os.chdir(cwd) if", "command output...\", cmd=fmt_cmd(cmd), format=format, **opts, ) # https://docs.python.org/3.8/library/subprocess.html#subprocess.check_output output = subprocess.check_output(cmd, **opts) if", "str(name) is_short = len(name_s) == 1 flag = f\"-{name_s}\" if is_short else f\"{long_prefix}{name_s}\"", "### 1. By default, `=` is used to separate \"long options\" and their", "Other -- converted to a string with `str()`. \"\"\" for arg in args:", "to sequence of `str` (and/or `bytes`, if any values passed in are `bytes`).", "or value is False: # Special case #1 — value is `None` or", "def _prepare_wrapper( *args, cwd: Optional[_TPath] = None, encoding: Optional[str] = CONFIG.encoding, opts_long_prefix: TOptsLongPrefix", "check=False, **kwds).returncode == 0 @LOG.inject @prepare_wrap def replace( *cmd, log=LOG, # Used, but", "command and return whether or not it succeeds (has `subprocess.CompletedProcess.returncode` equal to `0`).", "opts_style=opts_style, rel_paths=rel_paths, ) return fn(*cmd, cwd=cwd, encoding=encoding, **opts) return _prepare_wrapper # pylint: disable=redefined-builtin", "relative to a `rel_to` directory. Only paths that are descendants of `rel_to` will", "_always_ space- # sparated. # # _All_ flags and values are space-separated when", "**opts, ) -> Any: log.debug( \"Getting system command output...\", cmd=fmt_cmd(cmd), format=format, **opts, )", "= str(name) is_short = len(name_s) == 1 flag = f\"-{name_s}\" if is_short else", "bool, rel_to: Optional[Path] = None, ) -> Generator[str, None, None]: \"\"\"Private helper for", "1. `str` and `bytes` -- passed through. 2. `pathlib.Path` -- passed (along with", "Path): with input.open(\"r\", encoding=\"utf-8\") as file: return subprocess.run( cmd, check=check, input=file.read(), **opts, )", "(along with `rel_to`) through `render_path`. 3. `typing.Mapping` -- understood as options, passed through", "be provided as `cwd` or assumed to be the current directory. Relative path", "log.debug( \"Replacing current process with system command...\", cmd=fmt_cmd(cmd), env=env, cwd=cwd, ) if cwd", ") return fn(*cmd, cwd=cwd, encoding=encoding, **opts) return _prepare_wrapper # pylint: disable=redefined-builtin @LOG.inject @prepare_wrap", "\" or (is_short and style != \"\"): # General case #1 — space-separated", "3. Use a single `-` prefix on long options (\"X toolkit\" style): >>>", "None, log=LOG): if desc is None: desc = fmt(path) if path.exists(): if path.is_dir():", "'y': 456}, style=\"\")) ['-x123', '-y456'] ### List Value Examples ### 1. Short opt", "a directory\") else: log.info(f\"[holup]Creating {desc} directory...[/holup]\", path=path) os.makedirs(path) if __name__ == \"__main__\": import", "elif format == \"strip\": return output.strip() elif format == \"json\": return json.loads(output) else:", "def _iter_opt( flag: str, value: Any, style: TOptsStyle, is_short: bool, rel_to: Optional[Path] =", "return str(path) return str(path.relative_to(rel_to)) def _iter_opt( flag: str, value: Any, style: TOptsStyle, is_short:", "CONFIG.encoding, opts_long_prefix: TOptsLongPrefix = CONFIG.opts.long_prefix, opts_sort: bool = CONFIG.opts.sort, opts_style: TOptsStyle = CONFIG.opts.style,", "recursive, yield-centric nature, nested lists work as well: >>> list(render_opts({'blah': [1, 2, [[3],", "output.strip() elif format == \"json\": return json.loads(output) else: log.warn(\"Unknown `format`\", format=format, expected=[None, \"json\"])", "**opts, ) # https://docs.python.org/3.8/library/subprocess.html#subprocess.run if isinstance(input, Path): with input.open(\"r\", encoding=\"utf-8\") as file: return", "# https://docs.python.org/3.9/library/os.html#os.execl for console in (OUT, ERR): console.file.flush() proc_name = basename(cmd[0]) log.debug( \"Replacing", "args: if isinstance(arg, (str, bytes)): yield arg elif isinstance(arg, Path): yield render_path(arg, rel_to)", "opt with a list (or tuple) value: >>> list(render_opts({'blah': [1, 2, 3]})) ['--blah=1',", "Generator[Union[str, bytes], None, None]: \"\"\"\\ Render `args` to sequence of `str` (and/or `bytes`,", "value is a `list` or `tuple` # # We handle these by emitting", "a list (or tuple) value: >>> list(render_opts({'x': [1, 2, 3]})) ['-x', '1', '-x',", "format: Optional[str] = None, **opts, ) -> Any: log.debug( \"Getting system command output...\",", "is not None: os.chdir(cwd) if env is None: if isabs(cmd[0]): os.execv(cmd[0], cmd) else:", "Sort key/value pairs if needed items = sorted(opts.items()) if sort else list(opts.items()) for", "rel_paths: bool = CONFIG.rel_paths, **opts, ): # Normalize str cwd path to Path", "isabs(cmd[0]): os.execve(cmd[0], cmd, env) else: os.execvpe(proc_name, cmd, env) @LOG.inject def file_absent(path: Path, name:", "assumed to be the current directory. Relative path conversion is controlled by the", "console.file.flush() proc_name = basename(cmd[0]) log.debug( \"Replacing current process with system command...\", cmd=fmt_cmd(cmd), env=env,", "Contextualizes the relative path capabilities of `render_args` and `render_opts` to the working directory,", "rel_to = Path.cwd() if cwd is None else cwd else: rel_to = None", "through `render_opts`. 4. `typing.Iterable` -- recurred into. 5. Other -- converted to a", "2, [[3], 4], 5]})) ['--blah=1', '--blah=2', '--blah=3', '--blah=4', '--blah=5'] Neat, huh?! ### Relative", "-> Generator[str, None, None]: \"\"\"Private helper for `iter_opts`.\"\"\" if isinstance(value, Path): value =", "\"\"\"\\ Decorator helper to run `prepare` and do a bit more common normalization", "list (or tuple) value: >>> list(render_opts({'x': [1, 2, 3]})) ['-x', '1', '-x', '2',", "\" \", \"\"] TOptsLongPrefix = Literal[\"--\", \"-\"] _TPath = Union[Path, str] CONFIG =", "values: >>> list(render_opts({'blah': 1, 'meh': 2}, style=\" \")) ['--blah', '1', '--meh', '2'] 3.", "is None: name = fmt(path) if path.exists(): log.info(f\"[holup]Removing {name}...[/holup]\", path=path) if path.is_dir(): rmtree(path)", "Optional[Union[str, Path]] = None, ) -> NoReturn: # https://docs.python.org/3.9/library/os.html#os.execl for console in (OUT,", "# Sort key/value pairs if needed items = sorted(opts.items()) if sort else list(opts.items())", "type: 1. `str` and `bytes` -- passed through. 2. `pathlib.Path` -- passed (along", "to values to a (yielded) sequence of strings. Examples: ### Style Examples ###", "as well: >>> list(render_opts({'blah': [1, 2, [[3], 4], 5]})) ['--blah=1', '--blah=2', '--blah=3', '--blah=4',", "flag, like `-x` or `--blah`. # yield flag elif isinstance(value, (list, tuple)): #", "arg in args: if isinstance(arg, (str, bytes)): yield arg elif isinstance(arg, Path): yield", "(or tuple) value: >>> list(render_opts({'x': [1, 2, 3]})) ['-x', '1', '-x', '2', '-x',", "= None, log=LOG): if name is None: name = fmt(path) if path.exists(): log.info(f\"[holup]Removing", "3. Due to the recursive, yield-centric nature, nested lists work as well: >>>", "a list (or tuple) value: >>> list(render_opts({'blah': [1, 2, 3]})) ['--blah=1', '--blah=2', '--blah=3']", "format is None: return output elif format == \"strip\": return output.strip() elif format", "else: log.info(f\"[yeah]{name} already absent.[/yeah]\", path=path) @LOG.inject def dir_present(path: Path, desc: Optional[str] = None,", "passed through `render_opts`. 4. `typing.Iterable` -- recurred into. 5. Other -- converted to", "relativized (no `../` transformations). >>> list( ... render_opts( ... { ... 'input': Path(\"/tmp/blah.json\"),", "option names and values: >>> list(render_opts({'blah': 1, 'meh': 2}, style=\" \")) ['--blah', '1',", "None, None]: \"\"\"Private helper for `iter_opts`.\"\"\" if isinstance(value, Path): value = render_path(value, rel_to)", "1 flag = f\"-{name_s}\" if is_short else f\"{long_prefix}{name_s}\" yield from _iter_opt(flag, value, style,", "value, making life easier on callers assembling # commands if opts is None:", "a string with `str()`. \"\"\" for arg in args: if isinstance(arg, (str, bytes)):", "# # We handle these by emitting the option multiples times, once for", "args: Iterable[Any], *, opts_long_prefix: TOptsLongPrefix = CONFIG.opts.long_prefix, opts_sort: bool = CONFIG.opts.sort, opts_style: TOptsStyle", "@wraps(fn) def _prepare_wrapper( *args, cwd: Optional[_TPath] = None, encoding: Optional[str] = CONFIG.encoding, opts_long_prefix:", "Path(\"/dev/null\"), ... }, ... rel_to=Path(\"/tmp\") ... ) ... ) ['--input=blah.json', '--output=/dev/null'] \"\"\" #", "shutil import rmtree import shlex from functools import wraps import splatlog as logging", "-- understood as options, passed through `render_opts`. 4. `typing.Iterable` -- recurred into. 5.", "cwd else: rel_to = None return list(render_args(args, rel_to=rel_to, **opts)) def join(*args, **opts) ->", "Examples ### 1. As with positional arguments, `pathlib.Path` option values can be rendered", "path=path) if path.is_dir(): rmtree(path) else: os.remove(path) else: log.info(f\"[yeah]{name} already absent.[/yeah]\", path=path) @LOG.inject def", "2. Long opt with a list (or tuple) value: >>> list(render_opts({'blah': [1, 2,", "Any: log.debug( \"Getting system command output...\", cmd=fmt_cmd(cmd), format=format, **opts, ) # https://docs.python.org/3.8/library/subprocess.html#subprocess.check_output output", "case #1 — value is `None` or `False` # # We omit these", "yield render_path(arg, rel_to) elif isinstance(arg, Mapping): yield from render_opts( arg, long_prefix=opts_long_prefix, style=opts_style, sort=opts_sort,", "shell=True) True >>> test(\"false\", shell=True) False \"\"\" return run(*args, check=False, **kwds).returncode == 0", "render_opts( ... { ... 'input': Path(\"/tmp/blah.json\"), ... 'output': Path(\"/dev/null\"), ... }, ... rel_to=Path(\"/tmp\")", "opts_sort=opts_sort, rel_to=rel_to, ) else: yield str(arg) def prepare( *args, cwd: Optional[_TPath] = None,", "`subprocess.CompletedProcess.returncode` equal to `0`). >>> test(\"true\", shell=True) True >>> test(\"false\", shell=True) False \"\"\"", "is False: # Special case #1 — value is `None` or `False` #", "return output @LOG.inject @prepare_wrap def run( *cmd, log=LOG, check: bool = True, input:", "to `0`). >>> test(\"true\", shell=True) True >>> test(\"false\", shell=True) False \"\"\" return run(*args,", "the bare flag, like `-x` or `--blah`. # yield flag elif isinstance(value, (list,", "with input.open(\"r\", encoding=\"utf-8\") as file: return subprocess.run( cmd, check=check, input=file.read(), **opts, ) else:", "`bytes`). `args` entries are handled by type: 1. `str` and `bytes` -- passed", "flag. ## Examples ## >>> prepare( ... \"kubectl\", ... {\"namespace\": \"blah\"}, ... \"logs\",", "style: TOptsStyle = CONFIG.opts.style, rel_to: Optional[Path] = None, ) -> Generator[str, None, None]:", "strings. Examples: ### Style Examples ### 1. By default, `=` is used to", "# pass elif value is True: # Special case #2 — value is", "\"\"\" return shlex.join(prepare(*args, **opts)) def prepare_wrap(fn: Callable) -> Callable: \"\"\"\\ Decorator helper to", "os.remove(path) else: log.info(f\"[yeah]{name} already absent.[/yeah]\", path=path) @LOG.inject def dir_present(path: Path, desc: Optional[str] =", "## Same as `prepare`. \"\"\" return shlex.join(prepare(*args, **opts)) def prepare_wrap(fn: Callable) -> Callable:", "# and value. # yield f\"{flag}{style}{value}\" def render_opts( opts: TOpts, *, long_prefix: TOptsLongPrefix", "etc. \"\"\" @wraps(fn) def _prepare_wrapper( *args, cwd: Optional[_TPath] = None, encoding: Optional[str] =", "input.open(\"r\", encoding=\"utf-8\") as file: return subprocess.run( cmd, check=check, input=file.read(), **opts, ) else: return", "path.exists(): log.info(f\"[holup]Removing {name}...[/holup]\", path=path) if path.is_dir(): rmtree(path) else: os.remove(path) else: log.info(f\"[yeah]{name} already absent.[/yeah]\",", "Path Examples ### 1. As with positional arguments, `pathlib.Path` option values can be", "`render_path`. 3. `typing.Mapping` -- understood as options, passed through `render_opts`. 4. `typing.Iterable` --", "if path.exists(): if path.is_dir(): log.debug( f\"[yeah]{desc} directory already exists.[/yeah]\", path=path ) else: raise", "case #2 — value is `True` # # We emit the bare flag,", "\"long options\" and their values, while \"short options\" (single-character options) are always separate", "= Literal[\"=\", \" \", \"\"] TOptsLongPrefix = Literal[\"--\", \"-\"] _TPath = Union[Path, str]", "is None: return str(path) return str(path.relative_to(rel_to)) def _iter_opt( flag: str, value: Any, style:", "process with system command...\", cmd=fmt_cmd(cmd), env=env, cwd=cwd, ) if cwd is not None:", "}, ... rel_to=Path(\"/tmp\") ... ) ... ) ['--input=blah.json', '--output=/dev/null'] \"\"\" # Handle `None`", "os from os.path import isabs, basename import subprocess from pathlib import Path import", "encoding: Optional[str] = None, env: Optional[Mapping] = None, cwd: Optional[Union[str, Path]] = None,", "TOptsStyle = \"=\" DEFAULT_OPTS_SORT = True CompletedProcess = subprocess.CompletedProcess def render_path(path: Path, rel_to:", "sorted(opts.items()) if sort else list(opts.items()) for name, value in items: name_s = str(name)", "cmd = prepare( *args, cwd=cwd, opts_long_prefix=opts_long_prefix, opts_sort=opts_sort, opts_style=opts_style, rel_paths=rel_paths, ) return fn(*cmd, cwd=cwd,", "Used, but defaulted in `prepare_cmd`, so needs to be accepted here encoding: Optional[str]", "prepare_wrap(fn: Callable) -> Callable: \"\"\"\\ Decorator helper to run `prepare` and do a", "if path.is_dir(): log.debug( f\"[yeah]{desc} directory already exists.[/yeah]\", path=path ) else: raise RuntimeError(f\"{path} exists", "(single-character) flags and values are _always_ space- # sparated. # # _All_ flags", "render_opts( opts: TOpts, *, long_prefix: TOptsLongPrefix = CONFIG.opts.long_prefix, sort: bool = CONFIG.opts.sort, style:", "`prepare_cmd`, so needs to be accepted here encoding: Optional[str] = None, env: Optional[Mapping]", "None: return str(path) return str(path.relative_to(rel_to)) def _iter_opt( flag: str, value: Any, style: TOptsStyle,", "\"\"\"\\ Run a command and return whether or not it succeeds (has `subprocess.CompletedProcess.returncode`", "_All_ flags and values are space-separated when the `style` is `\" \"`. #", "CONFIG.opts.sort, opts_style: TOptsStyle = CONFIG.opts.style, rel_paths: bool = CONFIG.rel_paths, **opts, ): # Normalize", ") ['--input=blah.json', '--output=/dev/null'] \"\"\" # Handle `None` as a legit value, making life", "a mapping of option names to values to a (yielded) sequence of strings.", "import wraps import splatlog as logging from .cfg import CFG from .io import", "= CONFIG.opts.sort, opts_style: TOptsStyle = CONFIG.opts.style, rel_paths: bool = CONFIG.rel_paths, **opts, ): #", "cmd=fmt_cmd(cmd), env=env, cwd=cwd, ) if cwd is not None: os.chdir(cwd) if env is", "import json from shutil import rmtree import shlex from functools import wraps import", "descendants of `rel_to` will be relativized (no `../` transformations). >>> list( ... render_opts(", "as `cwd` or assumed to be the current directory. Relative path conversion is", "TOptsStyle = Literal[\"=\", \" \", \"\"] TOptsLongPrefix = Literal[\"--\", \"-\"] _TPath = Union[Path,", "Literal[\"--\", \"-\"] _TPath = Union[Path, str] CONFIG = CFG.clavier.sh LOG = logging.getLogger(__name__) DEFAULT_OPTS_STYLE:", "needed items = sorted(opts.items()) if sort else list(opts.items()) for name, value in items:", "cwd is not None: os.chdir(cwd) if env is None: if isabs(cmd[0]): os.execv(cmd[0], cmd)", "passed through. 2. `pathlib.Path` -- passed (along with `rel_to`) through `render_path`. 3. `typing.Mapping`", "controlled by the `rel_paths` flag. ## Examples ## >>> prepare( ... \"kubectl\", ...", "<gh_stars>0 from typing import * import os from os.path import isabs, basename import", "helper for `iter_opts`.\"\"\" if isinstance(value, Path): value = render_path(value, rel_to) if value is", "value = render_path(value, rel_to) if value is None or value is False: #", "relative path capabilities of `render_args` and `render_opts` to the working directory, which can", "and return whether or not it succeeds (has `subprocess.CompletedProcess.returncode` equal to `0`). >>>", "be rendered relative to a `rel_to` directory. Only paths that are descendants of", "if isinstance(cwd, str): cwd = Path(cwd) cmd = prepare( *args, cwd=cwd, opts_long_prefix=opts_long_prefix, opts_sort=opts_sort,", "from functools import wraps import splatlog as logging from .cfg import CFG from", "`../` transformations). >>> list( ... render_opts( ... { ... 'input': Path(\"/tmp/blah.json\"), ... 'output':", "json from shutil import rmtree import shlex from functools import wraps import splatlog", "\"\"\"\\ Render `args` to sequence of `str` (and/or `bytes`, if any values passed", "render_opts( arg, long_prefix=opts_long_prefix, style=opts_style, sort=opts_sort, rel_to=rel_to, ) elif isinstance(arg, Iterable): yield from render_args(", "5. Other -- converted to a string with `str()`. \"\"\" for arg in", "to Path if isinstance(cwd, str): cwd = Path(cwd) cmd = prepare( *args, cwd=cwd,", "@prepare_wrap def run( *cmd, log=LOG, check: bool = True, input: Union[None, str, bytes,", "# Handle `None` as a legit value, making life easier on callers assembling", "is `None` or `False` # # We omit these entirely. # pass elif", "Parameters ## Same as `prepare`. \"\"\" return shlex.join(prepare(*args, **opts)) def prepare_wrap(fn: Callable) ->", "flag=value format # # When no other branch has matched, we're left with", "options, passed through `render_opts`. 4. `typing.Iterable` -- recurred into. 5. Other -- converted", "['-blah=1', '-meh=2'] 4. Use that weird \"no-separator\" style you sometimes see: >>> list(render_opts({'x':", "cmd=fmt_cmd(cmd), **opts, ) # https://docs.python.org/3.8/library/subprocess.html#subprocess.run if isinstance(input, Path): with input.open(\"r\", encoding=\"utf-8\") as file:", "None, encoding: Optional[str] = CONFIG.encoding, opts_long_prefix: TOptsLongPrefix = CONFIG.opts.long_prefix, opts_sort: bool = CONFIG.opts.sort,", "is a `list` or `tuple` # # We handle these by emitting the", "None, log=LOG): if name is None: name = fmt(path) if path.exists(): log.info(f\"[holup]Removing {name}...[/holup]\",", "string _should_ be suitable for pasting in a shell. ## Parameters ## Same", "for each # inner value. # for item in value: yield from _iter_opt(flag,", "absent.[/yeah]\", path=path) @LOG.inject def dir_present(path: Path, desc: Optional[str] = None, log=LOG): if desc", "if value is None or value is False: # Special case #1 —", "str): cwd = Path(cwd) cmd = prepare( *args, cwd=cwd, opts_long_prefix=opts_long_prefix, opts_sort=opts_sort, opts_style=opts_style, rel_paths=rel_paths,", "name_s = str(name) is_short = len(name_s) == 1 flag = f\"-{name_s}\" if is_short", "in a shell. ## Parameters ## Same as `prepare`. \"\"\" return shlex.join(prepare(*args, **opts))", "single string with `prepare` -> `shlex.join`. Returned string _should_ be suitable for pasting", "assembling # commands if opts is None: return # Sort key/value pairs if", "which can either be provided as `cwd` or assumed to be the current", ") ['kubectl', '--namespace=blah', 'logs', '--follow', 'some-pod'] \"\"\" # Normalize str cwd path to", "— value is `None` or `False` # # We omit these entirely. #", "style, is_short) elif style == \" \" or (is_short and style != \"\"):", "None or value is False: # Special case #1 — value is `None`", "in items: name_s = str(name) is_short = len(name_s) == 1 flag = f\"-{name_s}\"", "### Style Examples ### 1. By default, `=` is used to separate \"long", "\"\"] TOptsLongPrefix = Literal[\"--\", \"-\"] _TPath = Union[Path, str] CONFIG = CFG.clavier.sh LOG", "-> str: \"\"\"\\ Render `args` to a single string with `prepare` -> `shlex.join`.", "isinstance(input, Path): with input.open(\"r\", encoding=\"utf-8\") as file: return subprocess.run( cmd, check=check, input=file.read(), **opts,", "arg, long_prefix=opts_long_prefix, style=opts_style, sort=opts_sort, rel_to=rel_to, ) elif isinstance(arg, Iterable): yield from render_args( arg,", "General case #2 — flag=value format # # When no other branch has", "return shlex.join(prepare(*args, **opts)) def prepare_wrap(fn: Callable) -> Callable: \"\"\"\\ Decorator helper to run", "-> NoReturn: # https://docs.python.org/3.9/library/os.html#os.execl for console in (OUT, ERR): console.file.flush() proc_name = basename(cmd[0])", "by emitting the option multiples times, once for each # inner value. #", "yield flag yield str(value) else: # General case #2 — flag=value format #", "f\"{long_prefix}{name_s}\" yield from _iter_opt(flag, value, style, is_short, rel_to) def render_args( args: Iterable[Any], *,", "`style` is `\" \"`. # yield flag yield str(value) else: # General case", "working directory, which can either be provided as `cwd` or assumed to be", "# We emit the bare flag, like `-x` or `--blah`. # yield flag", "used to separate \"long options\" and their values, while \"short options\" (single-character options)", "**opts) -> str: \"\"\"\\ Render `args` to a single string with `prepare` ->", "# yield f\"{flag}{style}{value}\" def render_opts( opts: TOpts, *, long_prefix: TOptsLongPrefix = CONFIG.opts.long_prefix, sort:", "Returned string _should_ be suitable for pasting in a shell. ## Parameters ##", "fmt, fmt_cmd TOpts = Mapping[Any, Any] TOptsStyle = Literal[\"=\", \" \", \"\"] TOptsLongPrefix", "for arg in args: if isinstance(arg, (str, bytes)): yield arg elif isinstance(arg, Path):", "`=` is used to separate \"long options\" and their values, while \"short options\"", "paths that are descendants of `rel_to` will be relativized (no `../` transformations). >>>", "... rel_to=Path(\"/tmp\") ... ) ... ) ['--input=blah.json', '--output=/dev/null'] \"\"\" # Handle `None` as", "= True, input: Union[None, str, bytes, Path] = None, **opts, ) -> CompletedProcess:", "if isinstance(cwd, str): cwd = Path(cwd) if rel_paths is True: rel_to = Path.cwd()", "name: Optional[str] = None, log=LOG): if name is None: name = fmt(path) if", "and value. # yield f\"{flag}{style}{value}\" def render_opts( opts: TOpts, *, long_prefix: TOptsLongPrefix =", "a `rel_to` directory. Only paths that are descendants of `rel_to` will be relativized", "'input': Path(\"/tmp/blah.json\"), ... 'output': Path(\"/dev/null\"), ... }, ... rel_to=Path(\"/tmp\") ... ) ... )", "fmt(path) if path.exists(): log.info(f\"[holup]Removing {name}...[/holup]\", path=path) if path.is_dir(): rmtree(path) else: os.remove(path) else: log.info(f\"[yeah]{name}", "ERR): console.file.flush() proc_name = basename(cmd[0]) log.debug( \"Replacing current process with system command...\", cmd=fmt_cmd(cmd),", "life easier on callers assembling # commands if opts is None: return #", "= CONFIG.opts.style, rel_to: Optional[Path] = None, ) -> Generator[Union[str, bytes], None, None]: \"\"\"\\", "proc_name = basename(cmd[0]) log.debug( \"Replacing current process with system command...\", cmd=fmt_cmd(cmd), env=env, cwd=cwd,", "DEFAULT_OPTS_STYLE: TOptsStyle = \"=\" DEFAULT_OPTS_SORT = True CompletedProcess = subprocess.CompletedProcess def render_path(path: Path,", "values: >>> list(render_opts({\"a\": 1, \"bee\": 2})) ['-a', '1', '--bee=2'] 2. Use space-separated option", "'--blah=5'] Neat, huh?! ### Relative Path Examples ### 1. As with positional arguments,", "from .io import OUT, ERR, fmt, fmt_cmd TOpts = Mapping[Any, Any] TOptsStyle =", "= basename(cmd[0]) log.debug( \"Replacing current process with system command...\", cmd=fmt_cmd(cmd), env=env, cwd=cwd, )", "-- passed (along with `rel_to`) through `render_path`. 3. `typing.Mapping` -- understood as options,", "from .cfg import CFG from .io import OUT, ERR, fmt, fmt_cmd TOpts =", "_prepare_wrapper( *args, cwd: Optional[_TPath] = None, encoding: Optional[str] = CONFIG.encoding, opts_long_prefix: TOptsLongPrefix =", "= CONFIG.rel_paths, **opts, ) -> List[str]: \"\"\"\\ Prepare `args` to be passed `subprocess.run`", "(OUT, ERR): console.file.flush() proc_name = basename(cmd[0]) log.debug( \"Replacing current process with system command...\",", "input: Union[None, str, bytes, Path] = None, **opts, ) -> CompletedProcess: log.info( \"Running", "return json.loads(output) else: log.warn(\"Unknown `format`\", format=format, expected=[None, \"json\"]) return output @LOG.inject @prepare_wrap def", "and their values, while \"short options\" (single-character options) are always separate tokens from", "separate tokens from their values: >>> list(render_opts({\"a\": 1, \"bee\": 2})) ['-a', '1', '--bee=2']", "CONFIG.opts.sort, opts_style: TOptsStyle = CONFIG.opts.style, rel_to: Optional[Path] = None, ) -> Generator[Union[str, bytes],", "and values: >>> list(render_opts({'blah': 1, 'meh': 2}, style=\" \")) ['--blah', '1', '--meh', '2']", "rel_paths is True: rel_to = Path.cwd() if cwd is None else cwd else:", "log=LOG, check: bool = True, input: Union[None, str, bytes, Path] = None, **opts,", "value is True: # Special case #2 — value is `True` # #", "yield str(value) else: # General case #2 — flag=value format # # When", "is `True` # # We emit the bare flag, like `-x` or `--blah`.", "str: \"\"\"\\ Render `args` to a single string with `prepare` -> `shlex.join`. Returned", "with `=`-separated flag # and value. # yield f\"{flag}{style}{value}\" def render_opts( opts: TOpts,", "`False` # # We omit these entirely. # pass elif value is True:", "as a legit value, making life easier on callers assembling # commands if", "Path]] = None, ) -> NoReturn: # https://docs.python.org/3.9/library/os.html#os.execl for console in (OUT, ERR):", "# # _Short_ (single-character) flags and values are _always_ space- # sparated. #", "bytes], None, None]: \"\"\"\\ Render `args` to sequence of `str` (and/or `bytes`, if", "helper to run `prepare` and do a bit more common normalization for `get`,", "logging from .cfg import CFG from .io import OUT, ERR, fmt, fmt_cmd TOpts", "`None` as a legit value, making life easier on callers assembling # commands", "else: if isabs(cmd[0]): os.execve(cmd[0], cmd, env) else: os.execvpe(proc_name, cmd, env) @LOG.inject def file_absent(path:", "long options (\"X toolkit\" style): >>> list(render_opts({'blah': 1, 'meh': 2}, long_prefix='-')) ['-blah=1', '-meh=2']", "is None: return output elif format == \"strip\": return output.strip() elif format ==", "None, rel_paths: bool = CONFIG.rel_paths, **opts, ) -> List[str]: \"\"\"\\ Prepare `args` to", "CONFIG = CFG.clavier.sh LOG = logging.getLogger(__name__) DEFAULT_OPTS_STYLE: TOptsStyle = \"=\" DEFAULT_OPTS_SORT = True", "2. `pathlib.Path` -- passed (along with `rel_to`) through `render_path`. 3. `typing.Mapping` -- understood", "is None: return # Sort key/value pairs if needed items = sorted(opts.items()) if", "['-x', '1', '-x', '2', '-x', '3'] 2. Long opt with a list (or", "opts_style: TOptsStyle = CONFIG.opts.style, rel_to: Optional[Path] = None, ) -> Generator[Union[str, bytes], None,", "We omit these entirely. # pass elif value is True: # Special case", "well: >>> list(render_opts({'blah': [1, 2, [[3], 4], 5]})) ['--blah=1', '--blah=2', '--blah=3', '--blah=4', '--blah=5']", "`-` prefix on long options (\"X toolkit\" style): >>> list(render_opts({'blah': 1, 'meh': 2},", "## Parameters ## Same as `prepare`. \"\"\" return shlex.join(prepare(*args, **opts)) def prepare_wrap(fn: Callable)", "`0`). >>> test(\"true\", shell=True) True >>> test(\"false\", shell=True) False \"\"\" return run(*args, check=False,", "isabs, basename import subprocess from pathlib import Path import json from shutil import", "`render_opts`. 4. `typing.Iterable` -- recurred into. 5. Other -- converted to a string", "elif format == \"json\": return json.loads(output) else: log.warn(\"Unknown `format`\", format=format, expected=[None, \"json\"]) return", "@LOG.inject def file_absent(path: Path, name: Optional[str] = None, log=LOG): if name is None:", "the option multiples times, once for each # inner value. # for item", "cwd is None else cwd else: rel_to = None return list(render_args(args, rel_to=rel_to, **opts))", "2}, long_prefix='-')) ['-blah=1', '-meh=2'] 4. Use that weird \"no-separator\" style you sometimes see:", "format=format, expected=[None, \"json\"]) return output @LOG.inject @prepare_wrap def run( *cmd, log=LOG, check: bool", "is None: desc = fmt(path) if path.exists(): if path.is_dir(): log.debug( f\"[yeah]{desc} directory already", "\"\"\" # Normalize str cwd path to Path if isinstance(cwd, str): cwd =", "Optional[Path]) -> str: if rel_to is None: return str(path) return str(path.relative_to(rel_to)) def _iter_opt(", "flag elif isinstance(value, (list, tuple)): # Special case #3 — value is a", "in args: if isinstance(arg, (str, bytes)): yield arg elif isinstance(arg, Path): yield render_path(arg,", "'meh': 2}, style=\" \")) ['--blah', '1', '--meh', '2'] 3. Use a single `-`", "`prepare`. \"\"\" return shlex.join(prepare(*args, **opts)) def prepare_wrap(fn: Callable) -> Callable: \"\"\"\\ Decorator helper", "input=file.read(), **opts, ) else: return subprocess.run(cmd, check=check, input=input, **opts) @LOG.inject def test(*args, **kwds)", "`str` and `bytes` -- passed through. 2. `pathlib.Path` -- passed (along with `rel_to`)", "cmd, env) else: os.execvpe(proc_name, cmd, env) @LOG.inject def file_absent(path: Path, name: Optional[str] =", "\"strip\": return output.strip() elif format == \"json\": return json.loads(output) else: log.warn(\"Unknown `format`\", format=format,", "any values passed in are `bytes`). `args` entries are handled by type: 1.", "subprocess.run(cmd, check=check, input=input, **opts) @LOG.inject def test(*args, **kwds) -> bool: \"\"\"\\ Run a", "OUT, ERR, fmt, fmt_cmd TOpts = Mapping[Any, Any] TOptsStyle = Literal[\"=\", \" \",", "True CompletedProcess = subprocess.CompletedProcess def render_path(path: Path, rel_to: Optional[Path]) -> str: if rel_to", "items = sorted(opts.items()) if sort else list(opts.items()) for name, value in items: name_s", "these by emitting the option multiples times, once for each # inner value.", "rel_to: Optional[Path] = None, ) -> Generator[Union[str, bytes], None, None]: \"\"\"\\ Render `args`", "{ ... 'input': Path(\"/tmp/blah.json\"), ... 'output': Path(\"/dev/null\"), ... }, ... rel_to=Path(\"/tmp\") ... )", "Path import json from shutil import rmtree import shlex from functools import wraps", "\"\"\" for arg in args: if isinstance(arg, (str, bytes)): yield arg elif isinstance(arg,", "logging.getLogger(__name__) DEFAULT_OPTS_STYLE: TOptsStyle = \"=\" DEFAULT_OPTS_SORT = True CompletedProcess = subprocess.CompletedProcess def render_path(path:", "option multiples times, once for each # inner value. # for item in", "their values, while \"short options\" (single-character options) are always separate tokens from their", "else: os.remove(path) else: log.info(f\"[yeah]{name} already absent.[/yeah]\", path=path) @LOG.inject def dir_present(path: Path, desc: Optional[str]", "arg, opts_long_prefix=opts_long_prefix, opts_style=opts_style, opts_sort=opts_sort, rel_to=rel_to, ) else: yield str(arg) def prepare( *args, cwd:", "the relative path capabilities of `render_args` and `render_opts` to the working directory, which", "(is_short and style != \"\"): # General case #1 — space-separated # #", "CFG from .io import OUT, ERR, fmt, fmt_cmd TOpts = Mapping[Any, Any] TOptsStyle", "`pathlib.Path` -- passed (along with `rel_to`) through `render_path`. 3. `typing.Mapping` -- understood as", "Any, style: TOptsStyle, is_short: bool, rel_to: Optional[Path] = None, ) -> Generator[str, None,", "### 1. Short opt with a list (or tuple) value: >>> list(render_opts({'x': [1,", "list(render_opts({'blah': 1, 'meh': 2}, long_prefix='-')) ['-blah=1', '-meh=2'] 4. Use that weird \"no-separator\" style", "Long opt with a list (or tuple) value: >>> list(render_opts({'blah': [1, 2, 3]}))", "`bytes` -- passed through. 2. `pathlib.Path` -- passed (along with `rel_to`) through `render_path`.", "import shlex from functools import wraps import splatlog as logging from .cfg import", "format # # When no other branch has matched, we're left with `=`-separated", "file: return subprocess.run( cmd, check=check, input=file.read(), **opts, ) else: return subprocess.run(cmd, check=check, input=input,", "\", \"\"] TOptsLongPrefix = Literal[\"--\", \"-\"] _TPath = Union[Path, str] CONFIG = CFG.clavier.sh", "left with `=`-separated flag # and value. # yield f\"{flag}{style}{value}\" def render_opts( opts:", "-> List[str]: \"\"\"\\ Prepare `args` to be passed `subprocess.run` or similar functions. Contextualizes", "has matched, we're left with `=`-separated flag # and value. # yield f\"{flag}{style}{value}\"", "like `-x` or `--blah`. # yield flag elif isinstance(value, (list, tuple)): # Special", "None, **opts, ) -> CompletedProcess: log.info( \"Running system command...\", cmd=fmt_cmd(cmd), **opts, ) #", "not None: os.chdir(cwd) if env is None: if isabs(cmd[0]): os.execv(cmd[0], cmd) else: os.execvp(proc_name,", "import * import os from os.path import isabs, basename import subprocess from pathlib", "rel_to) if value is None or value is False: # Special case #1", "'2', '-x', '3'] 2. Long opt with a list (or tuple) value: >>>", "`subprocess.run` or similar functions. Contextualizes the relative path capabilities of `render_args` and `render_opts`", "basename import subprocess from pathlib import Path import json from shutil import rmtree", "to a (yielded) sequence of strings. Examples: ### Style Examples ### 1. By", "# # _All_ flags and values are space-separated when the `style` is `\"", "splatlog as logging from .cfg import CFG from .io import OUT, ERR, fmt,", "== 0 @LOG.inject @prepare_wrap def replace( *cmd, log=LOG, # Used, but defaulted in", "None]: \"\"\" Render a mapping of option names to values to a (yielded)", "or similar functions. Contextualizes the relative path capabilities of `render_args` and `render_opts` to", "the working directory, which can either be provided as `cwd` or assumed to", "cwd: Optional[_TPath] = None, rel_paths: bool = CONFIG.rel_paths, **opts, ) -> List[str]: \"\"\"\\", "values to a (yielded) sequence of strings. Examples: ### Style Examples ### 1.", "bytes)): yield arg elif isinstance(arg, Path): yield render_path(arg, rel_to) elif isinstance(arg, Mapping): yield", ") -> Any: log.debug( \"Getting system command output...\", cmd=fmt_cmd(cmd), format=format, **opts, ) #", "of `render_args` and `render_opts` to the working directory, which can either be provided", "None: desc = fmt(path) if path.exists(): if path.is_dir(): log.debug( f\"[yeah]{desc} directory already exists.[/yeah]\",", "Value Examples ### 1. Short opt with a list (or tuple) value: >>>", "get( *cmd, log=LOG, format: Optional[str] = None, **opts, ) -> Any: log.debug( \"Getting", "*args, cwd: Optional[_TPath] = None, encoding: Optional[str] = CONFIG.encoding, opts_long_prefix: TOptsLongPrefix = CONFIG.opts.long_prefix,", "`get`, `run` etc. \"\"\" @wraps(fn) def _prepare_wrapper( *args, cwd: Optional[_TPath] = None, encoding:", "# yield flag elif isinstance(value, (list, tuple)): # Special case #3 — value", "accepted here encoding: Optional[str] = None, env: Optional[Mapping] = None, cwd: Optional[Union[str, Path]]", "as `prepare`. \"\"\" return shlex.join(prepare(*args, **opts)) def prepare_wrap(fn: Callable) -> Callable: \"\"\"\\ Decorator", "the current directory. Relative path conversion is controlled by the `rel_paths` flag. ##", "directory. Relative path conversion is controlled by the `rel_paths` flag. ## Examples ##", "str, bytes, Path] = None, **opts, ) -> CompletedProcess: log.info( \"Running system command...\",", "from typing import * import os from os.path import isabs, basename import subprocess", "more common normalization for `get`, `run` etc. \"\"\" @wraps(fn) def _prepare_wrapper( *args, cwd:", "RuntimeError(f\"{path} exists and is NOT a directory\") else: log.info(f\"[holup]Creating {desc} directory...[/holup]\", path=path) os.makedirs(path)", "format=format, **opts, ) # https://docs.python.org/3.8/library/subprocess.html#subprocess.check_output output = subprocess.check_output(cmd, **opts) if format is None:", "opts_long_prefix=opts_long_prefix, opts_sort=opts_sort, opts_style=opts_style, rel_paths=rel_paths, ) return fn(*cmd, cwd=cwd, encoding=encoding, **opts) return _prepare_wrapper #", "option values can be rendered relative to a `rel_to` directory. Only paths that", "### List Value Examples ### 1. Short opt with a list (or tuple)", "cmd) else: if isabs(cmd[0]): os.execve(cmd[0], cmd, env) else: os.execvpe(proc_name, cmd, env) @LOG.inject def", "raise RuntimeError(f\"{path} exists and is NOT a directory\") else: log.info(f\"[holup]Creating {desc} directory...[/holup]\", path=path)", "env) @LOG.inject def file_absent(path: Path, name: Optional[str] = None, log=LOG): if name is", "is used to separate \"long options\" and their values, while \"short options\" (single-character", "no other branch has matched, we're left with `=`-separated flag # and value.", "tuple) value: >>> list(render_opts({'blah': [1, 2, 3]})) ['--blah=1', '--blah=2', '--blah=3'] 3. Due to", "space-separated option names and values: >>> list(render_opts({'blah': 1, 'meh': 2}, style=\" \")) ['--blah',", "check=check, input=input, **opts) @LOG.inject def test(*args, **kwds) -> bool: \"\"\"\\ Run a command", "opts_long_prefix: TOptsLongPrefix = CONFIG.opts.long_prefix, opts_sort: bool = CONFIG.opts.sort, opts_style: TOptsStyle = CONFIG.opts.style, rel_paths:", "yield flag elif isinstance(value, (list, tuple)): # Special case #3 — value is", "Path, rel_to: Optional[Path]) -> str: if rel_to is None: return str(path) return str(path.relative_to(rel_to))", "= Path(cwd) if rel_paths is True: rel_to = Path.cwd() if cwd is None", "wraps import splatlog as logging from .cfg import CFG from .io import OUT,", "entirely. # pass elif value is True: # Special case #2 — value", "Callable) -> Callable: \"\"\"\\ Decorator helper to run `prepare` and do a bit", "Style Examples ### 1. By default, `=` is used to separate \"long options\"", "sort else list(opts.items()) for name, value in items: name_s = str(name) is_short =", "list(render_opts({'blah': [1, 2, 3]})) ['--blah=1', '--blah=2', '--blah=3'] 3. Due to the recursive, yield-centric", "*cmd, log=LOG, format: Optional[str] = None, **opts, ) -> Any: log.debug( \"Getting system", "[1, 2, 3]})) ['--blah=1', '--blah=2', '--blah=3'] 3. Due to the recursive, yield-centric nature,", "on callers assembling # commands if opts is None: return # Sort key/value", "os.execve(cmd[0], cmd, env) else: os.execvpe(proc_name, cmd, env) @LOG.inject def file_absent(path: Path, name: Optional[str]", "from their values: >>> list(render_opts({\"a\": 1, \"bee\": 2})) ['-a', '1', '--bee=2'] 2. Use", "path to Path if isinstance(cwd, str): cwd = Path(cwd) if rel_paths is True:", "long_prefix='-')) ['-blah=1', '-meh=2'] 4. Use that weird \"no-separator\" style you sometimes see: >>>", "0 @LOG.inject @prepare_wrap def replace( *cmd, log=LOG, # Used, but defaulted in `prepare_cmd`,", "that are descendants of `rel_to` will be relativized (no `../` transformations). >>> list(", "['--blah', '1', '--meh', '2'] 3. Use a single `-` prefix on long options", "== \"strip\": return output.strip() elif format == \"json\": return json.loads(output) else: log.warn(\"Unknown `format`\",", "fmt(path) if path.exists(): if path.is_dir(): log.debug( f\"[yeah]{desc} directory already exists.[/yeah]\", path=path ) else:", "while \"short options\" (single-character options) are always separate tokens from their values: >>>", "that weird \"no-separator\" style you sometimes see: >>> list(render_opts({'x': 123, 'y': 456}, style=\"\"))", "subprocess.check_output(cmd, **opts) if format is None: return output elif format == \"strip\": return", "log.warn(\"Unknown `format`\", format=format, expected=[None, \"json\"]) return output @LOG.inject @prepare_wrap def run( *cmd, log=LOG,", "yield arg elif isinstance(arg, Path): yield render_path(arg, rel_to) elif isinstance(arg, Mapping): yield from", "\"Getting system command output...\", cmd=fmt_cmd(cmd), format=format, **opts, ) # https://docs.python.org/3.8/library/subprocess.html#subprocess.check_output output = subprocess.check_output(cmd,", "or `--blah`. # yield flag elif isinstance(value, (list, tuple)): # Special case #3", "values, while \"short options\" (single-character options) are always separate tokens from their values:", "cwd=cwd, encoding=encoding, **opts) return _prepare_wrapper # pylint: disable=redefined-builtin @LOG.inject @prepare_wrap def get( *cmd,", ">>> list(render_opts({\"a\": 1, \"bee\": 2})) ['-a', '1', '--bee=2'] 2. Use space-separated option names", "*, long_prefix: TOptsLongPrefix = CONFIG.opts.long_prefix, sort: bool = CONFIG.opts.sort, style: TOptsStyle = CONFIG.opts.style,", "cmd, check=check, input=file.read(), **opts, ) else: return subprocess.run(cmd, check=check, input=input, **opts) @LOG.inject def", "toolkit\" style): >>> list(render_opts({'blah': 1, 'meh': 2}, long_prefix='-')) ['-blah=1', '-meh=2'] 4. Use that", "'--blah=2', '--blah=3', '--blah=4', '--blah=5'] Neat, huh?! ### Relative Path Examples ### 1. As", "sparated. # # _All_ flags and values are space-separated when the `style` is", "to a string with `str()`. \"\"\" for arg in args: if isinstance(arg, (str,", "arg elif isinstance(arg, Path): yield render_path(arg, rel_to) elif isinstance(arg, Mapping): yield from render_opts(", "Path(cwd) if rel_paths is True: rel_to = Path.cwd() if cwd is None else", "and style != \"\"): # General case #1 — space-separated # # _Short_", "None else cwd else: rel_to = None return list(render_args(args, rel_to=rel_to, **opts)) def join(*args,", "yield from render_args( arg, opts_long_prefix=opts_long_prefix, opts_style=opts_style, opts_sort=opts_sort, rel_to=rel_to, ) else: yield str(arg) def", "None, cwd: Optional[Union[str, Path]] = None, ) -> NoReturn: # https://docs.python.org/3.9/library/os.html#os.execl for console", "is_short = len(name_s) == 1 flag = f\"-{name_s}\" if is_short else f\"{long_prefix}{name_s}\" yield", "-> `shlex.join`. Returned string _should_ be suitable for pasting in a shell. ##", "with system command...\", cmd=fmt_cmd(cmd), env=env, cwd=cwd, ) if cwd is not None: os.chdir(cwd)", "if sort else list(opts.items()) for name, value in items: name_s = str(name) is_short", "basename(cmd[0]) log.debug( \"Replacing current process with system command...\", cmd=fmt_cmd(cmd), env=env, cwd=cwd, ) if", "and `render_opts` to the working directory, which can either be provided as `cwd`", "to run `prepare` and do a bit more common normalization for `get`, `run`", "None, ) -> NoReturn: # https://docs.python.org/3.9/library/os.html#os.execl for console in (OUT, ERR): console.file.flush() proc_name", "value is False: # Special case #1 — value is `None` or `False`", "mapping of option names to values to a (yielded) sequence of strings. Examples:", "but defaulted in `prepare_cmd`, so needs to be accepted here encoding: Optional[str] =", "key/value pairs if needed items = sorted(opts.items()) if sort else list(opts.items()) for name,", "yield from render_opts( arg, long_prefix=opts_long_prefix, style=opts_style, sort=opts_sort, rel_to=rel_to, ) elif isinstance(arg, Iterable): yield", "a single string with `prepare` -> `shlex.join`. Returned string _should_ be suitable for", "Optional[str] = None, env: Optional[Mapping] = None, cwd: Optional[Union[str, Path]] = None, )", "(no `../` transformations). >>> list( ... render_opts( ... { ... 'input': Path(\"/tmp/blah.json\"), ...", ") -> Generator[Union[str, bytes], None, None]: \"\"\"\\ Render `args` to sequence of `str`", "None: return # Sort key/value pairs if needed items = sorted(opts.items()) if sort", "'-y456'] ### List Value Examples ### 1. Short opt with a list (or", ") ... ) ['--input=blah.json', '--output=/dev/null'] \"\"\" # Handle `None` as a legit value,", "`args` to a single string with `prepare` -> `shlex.join`. Returned string _should_ be", "list(render_opts({\"a\": 1, \"bee\": 2})) ['-a', '1', '--bee=2'] 2. Use space-separated option names and", "or assumed to be the current directory. Relative path conversion is controlled by", "else: os.execvp(proc_name, cmd) else: if isabs(cmd[0]): os.execve(cmd[0], cmd, env) else: os.execvpe(proc_name, cmd, env)", "['--blah=1', '--blah=2', '--blah=3'] 3. Due to the recursive, yield-centric nature, nested lists work", "you sometimes see: >>> list(render_opts({'x': 123, 'y': 456}, style=\"\")) ['-x123', '-y456'] ### List", "env) else: os.execvpe(proc_name, cmd, env) @LOG.inject def file_absent(path: Path, name: Optional[str] = None,", "= CONFIG.rel_paths, **opts, ): # Normalize str cwd path to Path if isinstance(cwd,", "sequence of `str` (and/or `bytes`, if any values passed in are `bytes`). `args`", "Any] TOptsStyle = Literal[\"=\", \" \", \"\"] TOptsLongPrefix = Literal[\"--\", \"-\"] _TPath =", "run `prepare` and do a bit more common normalization for `get`, `run` etc.", "Relative Path Examples ### 1. As with positional arguments, `pathlib.Path` option values can", "= CONFIG.encoding, opts_long_prefix: TOptsLongPrefix = CONFIG.opts.long_prefix, opts_sort: bool = CONFIG.opts.sort, opts_style: TOptsStyle =", "is True: # Special case #2 — value is `True` # # We", "@LOG.inject def test(*args, **kwds) -> bool: \"\"\"\\ Run a command and return whether", "os.path import isabs, basename import subprocess from pathlib import Path import json from", ") -> Generator[str, None, None]: \"\"\"Private helper for `iter_opts`.\"\"\" if isinstance(value, Path): value", "... ) ['kubectl', '--namespace=blah', 'logs', '--follow', 'some-pod'] \"\"\" # Normalize str cwd path", "Optional[str] = None, **opts, ) -> Any: log.debug( \"Getting system command output...\", cmd=fmt_cmd(cmd),", "and do a bit more common normalization for `get`, `run` etc. \"\"\" @wraps(fn)", "TOptsStyle, is_short: bool, rel_to: Optional[Path] = None, ) -> Generator[str, None, None]: \"\"\"Private", "*, opts_long_prefix: TOptsLongPrefix = CONFIG.opts.long_prefix, opts_sort: bool = CONFIG.opts.sort, opts_style: TOptsStyle = CONFIG.opts.style,", "By default, `=` is used to separate \"long options\" and their values, while", "bool = True, input: Union[None, str, bytes, Path] = None, **opts, ) ->", "TOptsStyle = CONFIG.opts.style, rel_paths: bool = CONFIG.rel_paths, **opts, ): # Normalize str cwd", "typing import * import os from os.path import isabs, basename import subprocess from", "passed in are `bytes`). `args` entries are handled by type: 1. `str` and", "rel_to = None return list(render_args(args, rel_to=rel_to, **opts)) def join(*args, **opts) -> str: \"\"\"\\", "commands if opts is None: return # Sort key/value pairs if needed items", "# pylint: disable=redefined-builtin @LOG.inject @prepare_wrap def get( *cmd, log=LOG, format: Optional[str] = None,", ">>> list(render_opts({'x': 123, 'y': 456}, style=\"\")) ['-x123', '-y456'] ### List Value Examples ###", "`args` entries are handled by type: 1. `str` and `bytes` -- passed through.", "'some-pod'] \"\"\" # Normalize str cwd path to Path if isinstance(cwd, str): cwd", "Due to the recursive, yield-centric nature, nested lists work as well: >>> list(render_opts({'blah':", "#1 — value is `None` or `False` # # We omit these entirely.", "arguments, `pathlib.Path` option values can be rendered relative to a `rel_to` directory. Only", "easier on callers assembling # commands if opts is None: return # Sort", "Handle `None` as a legit value, making life easier on callers assembling #", "**kwds).returncode == 0 @LOG.inject @prepare_wrap def replace( *cmd, log=LOG, # Used, but defaulted", "in (OUT, ERR): console.file.flush() proc_name = basename(cmd[0]) log.debug( \"Replacing current process with system", "(list, tuple)): # Special case #3 — value is a `list` or `tuple`", "rel_to) elif isinstance(arg, Mapping): yield from render_opts( arg, long_prefix=opts_long_prefix, style=opts_style, sort=opts_sort, rel_to=rel_to, )", "Optional[_TPath] = None, encoding: Optional[str] = CONFIG.encoding, opts_long_prefix: TOptsLongPrefix = CONFIG.opts.long_prefix, opts_sort: bool", "def prepare( *args, cwd: Optional[_TPath] = None, rel_paths: bool = CONFIG.rel_paths, **opts, )", "(\"X toolkit\" style): >>> list(render_opts({'blah': 1, 'meh': 2}, long_prefix='-')) ['-blah=1', '-meh=2'] 4. Use", "tuple) value: >>> list(render_opts({'x': [1, 2, 3]})) ['-x', '1', '-x', '2', '-x', '3']", "None: os.chdir(cwd) if env is None: if isabs(cmd[0]): os.execv(cmd[0], cmd) else: os.execvp(proc_name, cmd)", ".cfg import CFG from .io import OUT, ERR, fmt, fmt_cmd TOpts = Mapping[Any,", "style): >>> list(render_opts({'blah': 1, 'meh': 2}, long_prefix='-')) ['-blah=1', '-meh=2'] 4. Use that weird", "needs to be accepted here encoding: Optional[str] = None, env: Optional[Mapping] = None,", "[[3], 4], 5]})) ['--blah=1', '--blah=2', '--blah=3', '--blah=4', '--blah=5'] Neat, huh?! ### Relative Path", "callers assembling # commands if opts is None: return # Sort key/value pairs", "isinstance(arg, Path): yield render_path(arg, rel_to) elif isinstance(arg, Mapping): yield from render_opts( arg, long_prefix=opts_long_prefix,", "Generator[str, None, None]: \"\"\" Render a mapping of option names to values to", "path conversion is controlled by the `rel_paths` flag. ## Examples ## >>> prepare(", "# inner value. # for item in value: yield from _iter_opt(flag, item, style,", "value. # yield f\"{flag}{style}{value}\" def render_opts( opts: TOpts, *, long_prefix: TOptsLongPrefix = CONFIG.opts.long_prefix,", "Iterable): yield from render_args( arg, opts_long_prefix=opts_long_prefix, opts_style=opts_style, opts_sort=opts_sort, rel_to=rel_to, ) else: yield str(arg)", "None, ) -> Generator[str, None, None]: \"\"\" Render a mapping of option names", "the recursive, yield-centric nature, nested lists work as well: >>> list(render_opts({'blah': [1, 2,", "log.info( \"Running system command...\", cmd=fmt_cmd(cmd), **opts, ) # https://docs.python.org/3.8/library/subprocess.html#subprocess.run if isinstance(input, Path): with", "TOptsLongPrefix = CONFIG.opts.long_prefix, opts_sort: bool = CONFIG.opts.sort, opts_style: TOptsStyle = CONFIG.opts.style, rel_paths: bool", "elif style == \" \" or (is_short and style != \"\"): # General", "`bytes`, if any values passed in are `bytes`). `args` entries are handled by", "'--meh', '2'] 3. Use a single `-` prefix on long options (\"X toolkit\"", "their values: >>> list(render_opts({\"a\": 1, \"bee\": 2})) ['-a', '1', '--bee=2'] 2. Use space-separated", "# Special case #2 — value is `True` # # We emit the", "sequence of strings. Examples: ### Style Examples ### 1. By default, `=` is", "isinstance(arg, (str, bytes)): yield arg elif isinstance(arg, Path): yield render_path(arg, rel_to) elif isinstance(arg,", "`shlex.join`. Returned string _should_ be suitable for pasting in a shell. ## Parameters", "1. Short opt with a list (or tuple) value: >>> list(render_opts({'x': [1, 2,", "['kubectl', '--namespace=blah', 'logs', '--follow', 'some-pod'] \"\"\" # Normalize str cwd path to Path", "fn(*cmd, cwd=cwd, encoding=encoding, **opts) return _prepare_wrapper # pylint: disable=redefined-builtin @LOG.inject @prepare_wrap def get(", "from pathlib import Path import json from shutil import rmtree import shlex from", "be accepted here encoding: Optional[str] = None, env: Optional[Mapping] = None, cwd: Optional[Union[str,", "a legit value, making life easier on callers assembling # commands if opts", "= fmt(path) if path.exists(): if path.is_dir(): log.debug( f\"[yeah]{desc} directory already exists.[/yeah]\", path=path )", "isinstance(value, (list, tuple)): # Special case #3 — value is a `list` or", "other branch has matched, we're left with `=`-separated flag # and value. #", "log=LOG, format: Optional[str] = None, **opts, ) -> Any: log.debug( \"Getting system command", "can be rendered relative to a `rel_to` directory. Only paths that are descendants", "str(arg) def prepare( *args, cwd: Optional[_TPath] = None, rel_paths: bool = CONFIG.rel_paths, **opts,", "bool: \"\"\"\\ Run a command and return whether or not it succeeds (has", "the `rel_paths` flag. ## Examples ## >>> prepare( ... \"kubectl\", ... {\"namespace\": \"blah\"},", "\"kubectl\", ... {\"namespace\": \"blah\"}, ... \"logs\", ... {\"follow\": True}, ... \"some-pod\", ... )", "it succeeds (has `subprocess.CompletedProcess.returncode` equal to `0`). >>> test(\"true\", shell=True) True >>> test(\"false\",", "Callable: \"\"\"\\ Decorator helper to run `prepare` and do a bit more common", "opts_sort: bool = CONFIG.opts.sort, opts_style: TOptsStyle = CONFIG.opts.style, rel_to: Optional[Path] = None, )", "else: os.execvpe(proc_name, cmd, env) @LOG.inject def file_absent(path: Path, name: Optional[str] = None, log=LOG):", "provided as `cwd` or assumed to be the current directory. Relative path conversion", "As with positional arguments, `pathlib.Path` option values can be rendered relative to a", "path=path ) else: raise RuntimeError(f\"{path} exists and is NOT a directory\") else: log.info(f\"[holup]Creating", "Optional[Path] = None, ) -> Generator[str, None, None]: \"\"\"Private helper for `iter_opts`.\"\"\" if", "## Examples ## >>> prepare( ... \"kubectl\", ... {\"namespace\": \"blah\"}, ... \"logs\", ...", "def render_path(path: Path, rel_to: Optional[Path]) -> str: if rel_to is None: return str(path)", "CFG.clavier.sh LOG = logging.getLogger(__name__) DEFAULT_OPTS_STYLE: TOptsStyle = \"=\" DEFAULT_OPTS_SORT = True CompletedProcess =", "str cwd path to Path if isinstance(cwd, str): cwd = Path(cwd) if rel_paths", "os.chdir(cwd) if env is None: if isabs(cmd[0]): os.execv(cmd[0], cmd) else: os.execvp(proc_name, cmd) else:", "in are `bytes`). `args` entries are handled by type: 1. `str` and `bytes`", "check=check, input=file.read(), **opts, ) else: return subprocess.run(cmd, check=check, input=input, **opts) @LOG.inject def test(*args,", "\"=\" DEFAULT_OPTS_SORT = True CompletedProcess = subprocess.CompletedProcess def render_path(path: Path, rel_to: Optional[Path]) ->", "multiples times, once for each # inner value. # for item in value:", "making life easier on callers assembling # commands if opts is None: return", "= None, rel_paths: bool = CONFIG.rel_paths, **opts, ) -> List[str]: \"\"\"\\ Prepare `args`", "* import os from os.path import isabs, basename import subprocess from pathlib import", "(yielded) sequence of strings. Examples: ### Style Examples ### 1. By default, `=`", "= None, **opts, ) -> CompletedProcess: log.info( \"Running system command...\", cmd=fmt_cmd(cmd), **opts, )", "value: Any, style: TOptsStyle, is_short: bool, rel_to: Optional[Path] = None, ) -> Generator[str,", "Normalize str cwd path to Path if isinstance(cwd, str): cwd = Path(cwd) if", "_should_ be suitable for pasting in a shell. ## Parameters ## Same as", "= \"=\" DEFAULT_OPTS_SORT = True CompletedProcess = subprocess.CompletedProcess def render_path(path: Path, rel_to: Optional[Path])", "3. `typing.Mapping` -- understood as options, passed through `render_opts`. 4. `typing.Iterable` -- recurred", "**opts) return _prepare_wrapper # pylint: disable=redefined-builtin @LOG.inject @prepare_wrap def get( *cmd, log=LOG, format:", "True: rel_to = Path.cwd() if cwd is None else cwd else: rel_to =" ]
[ "# 获取不合规的vpc peering账户id def get_uncom_id(): with DBContext('r') as session: # 获取vpc peering的账户id vpc_peering_info", "= uncom_vpc_peering[\"accepter_cidr_block\"] accepter_owner_id = uncom_vpc_peering[\"accepter_owner_id\"] accepter_vpc_id = uncom_vpc_peering[\"accepter_vpc_id\"] accepter_region = uncom_vpc_peering[\"accepter_region\"] new_uncom_vpc_peering =", "in vpc_peering_info: data_dict = model_to_dict(data) account_id.add(data_dict[\"requester_owner_id\"]) account_id.add(data_dict[\"accepter_owner_id\"]) # 获取owner账户的id owner_info = session.query(OwnerList).all() owner_id", "for uncom_id in uncom_id_list: uncom_vpc_peering_info_request = session.query(VpcPeering).filter(VpcPeering.requester_owner_id == uncom_id).all() for uncom_vpc_peering_request in uncom_vpc_peering_info_request:", "== uncom_id).all() for uncom_vpc_peering_request in uncom_vpc_peering_info_request: uncom_vpc_peering_info_list.add(uncom_vpc_peering_request) uncom_vpc_peering_info_accepter = session.query(VpcPeering).filter(VpcPeering.accepter_owner_id == uncom_id).all() for", "model_to_dict as owner_model_to_list # 获取不合规的vpc peering账户id def get_uncom_id(): with DBContext('r') as session: #", "in uncom_id_list: uncom_vpc_peering_info_request = session.query(VpcPeering).filter(VpcPeering.requester_owner_id == uncom_id).all() for uncom_vpc_peering_request in uncom_vpc_peering_info_request: uncom_vpc_peering_info_list.add(uncom_vpc_peering_request) uncom_vpc_peering_info_accepter", "session: session.query(UncomVpcPeering).delete(synchronize_session=False) # 清空数据库的所有记录 for uncom_vpc_peering in uncom_vpc_peering_list: vpc_peering_connection_id = uncom_vpc_peering[\"vpc_peering_connection_id\"] requester_cidr_block =", "accepter_region = uncom_vpc_peering[\"accepter_region\"] new_uncom_vpc_peering = UncomVpcPeering( vpc_peering_connection_id=vpc_peering_connection_id, requester_cidr_block=requester_cidr_block, requester_owner_id=requester_owner_id, requester_vpc_id=requester_vpc_id, requester_region=requester_region, accepter_cidr_block=accepter_cidr_block, accepter_owner_id=accepter_owner_id,", "requester_owner_id=requester_owner_id, requester_vpc_id=requester_vpc_id, requester_region=requester_region, accepter_cidr_block=accepter_cidr_block, accepter_owner_id=accepter_owner_id, accepter_vpc_id=accepter_vpc_id, accepter_region=accepter_region) session.add(new_uncom_vpc_peering) session.commit() if __name__ == '__main__':", "as owner_model_to_list # 获取不合规的vpc peering账户id def get_uncom_id(): with DBContext('r') as session: # 获取vpc", "import OwnerList from models.owner_list import model_to_dict as owner_model_to_list # 获取不合规的vpc peering账户id def get_uncom_id():", "def get_uncom_id(): with DBContext('r') as session: # 获取vpc peering的账户id vpc_peering_info = session.query(VpcPeering).all() account_id", "requester_vpc_id=requester_vpc_id, requester_region=requester_region, accepter_cidr_block=accepter_cidr_block, accepter_owner_id=accepter_owner_id, accepter_vpc_id=accepter_vpc_id, accepter_region=accepter_region) session.add(new_uncom_vpc_peering) session.commit() if __name__ == '__main__': pass", "DBContext from models.uncom_vpc_peering import UncomVpcPeering from models.vpc_peering import VpcPeering, model_to_dict from models.owner_list import", "for uncom_vpc_peering in uncom_vpc_peering_list: vpc_peering_connection_id = uncom_vpc_peering[\"vpc_peering_connection_id\"] requester_cidr_block = uncom_vpc_peering[\"requester_cidr_block\"] requester_owner_id = uncom_vpc_peering[\"requester_owner_id\"]", "owner_id.append(data_dict[\"owner_id\"]) # 找出不合规的vpc peering账户id uncom_id = [] for a_id in account_id: if a_id", "uncom_id).all() for uncom_vpc_peering_request in uncom_vpc_peering_info_request: uncom_vpc_peering_info_list.add(uncom_vpc_peering_request) uncom_vpc_peering_info_accepter = session.query(VpcPeering).filter(VpcPeering.accepter_owner_id == uncom_id).all() for uncom_vpc_peering_accepter", "account_id = set() for data in vpc_peering_info: data_dict = model_to_dict(data) account_id.add(data_dict[\"requester_owner_id\"]) account_id.add(data_dict[\"accepter_owner_id\"]) #", "for a_id in account_id: if a_id not in owner_id: uncom_id.append(a_id) return uncom_id def", "owner_id: uncom_id.append(a_id) return uncom_id def get_uncom_vpc_peering(): \"\"\"获取到不合规的vpc peering数据列表\"\"\" uncom_id_list = get_uncom_id() with DBContext('r')", "uncom_vpc_peering_info_accepter: uncom_vpc_peering_info_list.add(uncom_vpc_peering_accepter) uncom_vpc_peering = [] for data in uncom_vpc_peering_info_list: data_dict = model_to_dict(data) uncom_vpc_peering.append(data_dict)", "as session: uncom_vpc_peering_info_list = set() for uncom_id in uncom_id_list: uncom_vpc_peering_info_request = session.query(VpcPeering).filter(VpcPeering.requester_owner_id ==", "session.query(OwnerList).all() owner_id = [] for data in owner_info: data_dict = owner_model_to_list(data) owner_id.append(data_dict[\"owner_id\"]) #", "uncom_vpc_peering_info_list: data_dict = model_to_dict(data) uncom_vpc_peering.append(data_dict) return uncom_vpc_peering def uncom_vpc_peering_sync_cmdb(): \"\"\"把uncom_vpc_peering数据同步到数据库\"\"\" uncom_vpc_peering_list = get_uncom_vpc_peering()", "uncom_vpc_peering[\"accepter_vpc_id\"] accepter_region = uncom_vpc_peering[\"accepter_region\"] new_uncom_vpc_peering = UncomVpcPeering( vpc_peering_connection_id=vpc_peering_connection_id, requester_cidr_block=requester_cidr_block, requester_owner_id=requester_owner_id, requester_vpc_id=requester_vpc_id, requester_region=requester_region, accepter_cidr_block=accepter_cidr_block,", "import model_to_dict as owner_model_to_list # 获取不合规的vpc peering账户id def get_uncom_id(): with DBContext('r') as session:", "uncom_vpc_peering[\"accepter_owner_id\"] accepter_vpc_id = uncom_vpc_peering[\"accepter_vpc_id\"] accepter_region = uncom_vpc_peering[\"accepter_region\"] new_uncom_vpc_peering = UncomVpcPeering( vpc_peering_connection_id=vpc_peering_connection_id, requester_cidr_block=requester_cidr_block, requester_owner_id=requester_owner_id,", "accepter_cidr_block = uncom_vpc_peering[\"accepter_cidr_block\"] accepter_owner_id = uncom_vpc_peering[\"accepter_owner_id\"] accepter_vpc_id = uncom_vpc_peering[\"accepter_vpc_id\"] accepter_region = uncom_vpc_peering[\"accepter_region\"] new_uncom_vpc_peering", "# 找出不合规的vpc peering账户id uncom_id = [] for a_id in account_id: if a_id not", "from models.uncom_vpc_peering import UncomVpcPeering from models.vpc_peering import VpcPeering, model_to_dict from models.owner_list import OwnerList", "= session.query(VpcPeering).filter(VpcPeering.accepter_owner_id == uncom_id).all() for uncom_vpc_peering_accepter in uncom_vpc_peering_info_accepter: uncom_vpc_peering_info_list.add(uncom_vpc_peering_accepter) uncom_vpc_peering = [] for", "OwnerList from models.owner_list import model_to_dict as owner_model_to_list # 获取不合规的vpc peering账户id def get_uncom_id(): with", "get_uncom_vpc_peering() with DBContext('w') as session: session.query(UncomVpcPeering).delete(synchronize_session=False) # 清空数据库的所有记录 for uncom_vpc_peering in uncom_vpc_peering_list: vpc_peering_connection_id", "DBContext('r') as session: # 获取vpc peering的账户id vpc_peering_info = session.query(VpcPeering).all() account_id = set() for", "requester_owner_id = uncom_vpc_peering[\"requester_owner_id\"] requester_vpc_id = uncom_vpc_peering[\"requester_vpc_id\"] requester_region = uncom_vpc_peering[\"requester_region\"] accepter_cidr_block = uncom_vpc_peering[\"accepter_cidr_block\"] accepter_owner_id", "= [] for a_id in account_id: if a_id not in owner_id: uncom_id.append(a_id) return", "def uncom_vpc_peering_sync_cmdb(): \"\"\"把uncom_vpc_peering数据同步到数据库\"\"\" uncom_vpc_peering_list = get_uncom_vpc_peering() with DBContext('w') as session: session.query(UncomVpcPeering).delete(synchronize_session=False) # 清空数据库的所有记录", "model_to_dict(data) account_id.add(data_dict[\"requester_owner_id\"]) account_id.add(data_dict[\"accepter_owner_id\"]) # 获取owner账户的id owner_info = session.query(OwnerList).all() owner_id = [] for data", "get_uncom_id(): with DBContext('r') as session: # 获取vpc peering的账户id vpc_peering_info = session.query(VpcPeering).all() account_id =", "session.query(VpcPeering).all() account_id = set() for data in vpc_peering_info: data_dict = model_to_dict(data) account_id.add(data_dict[\"requester_owner_id\"]) account_id.add(data_dict[\"accepter_owner_id\"])", "= uncom_vpc_peering[\"requester_owner_id\"] requester_vpc_id = uncom_vpc_peering[\"requester_vpc_id\"] requester_region = uncom_vpc_peering[\"requester_region\"] accepter_cidr_block = uncom_vpc_peering[\"accepter_cidr_block\"] accepter_owner_id =", "uncom_vpc_peering[\"requester_vpc_id\"] requester_region = uncom_vpc_peering[\"requester_region\"] accepter_cidr_block = uncom_vpc_peering[\"accepter_cidr_block\"] accepter_owner_id = uncom_vpc_peering[\"accepter_owner_id\"] accepter_vpc_id = uncom_vpc_peering[\"accepter_vpc_id\"]", "= owner_model_to_list(data) owner_id.append(data_dict[\"owner_id\"]) # 找出不合规的vpc peering账户id uncom_id = [] for a_id in account_id:", "a_id not in owner_id: uncom_id.append(a_id) return uncom_id def get_uncom_vpc_peering(): \"\"\"获取到不合规的vpc peering数据列表\"\"\" uncom_id_list =", "def get_uncom_vpc_peering(): \"\"\"获取到不合规的vpc peering数据列表\"\"\" uncom_id_list = get_uncom_id() with DBContext('r') as session: uncom_vpc_peering_info_list =", "get_uncom_vpc_peering(): \"\"\"获取到不合规的vpc peering数据列表\"\"\" uncom_id_list = get_uncom_id() with DBContext('r') as session: uncom_vpc_peering_info_list = set()", "new_uncom_vpc_peering = UncomVpcPeering( vpc_peering_connection_id=vpc_peering_connection_id, requester_cidr_block=requester_cidr_block, requester_owner_id=requester_owner_id, requester_vpc_id=requester_vpc_id, requester_region=requester_region, accepter_cidr_block=accepter_cidr_block, accepter_owner_id=accepter_owner_id, accepter_vpc_id=accepter_vpc_id, accepter_region=accepter_region) session.add(new_uncom_vpc_peering)", "get_uncom_id() with DBContext('r') as session: uncom_vpc_peering_info_list = set() for uncom_id in uncom_id_list: uncom_vpc_peering_info_request", "[] for data in uncom_vpc_peering_info_list: data_dict = model_to_dict(data) uncom_vpc_peering.append(data_dict) return uncom_vpc_peering def uncom_vpc_peering_sync_cmdb():", "uncom_vpc_peering_info_request = session.query(VpcPeering).filter(VpcPeering.requester_owner_id == uncom_id).all() for uncom_vpc_peering_request in uncom_vpc_peering_info_request: uncom_vpc_peering_info_list.add(uncom_vpc_peering_request) uncom_vpc_peering_info_accepter = session.query(VpcPeering).filter(VpcPeering.accepter_owner_id", "import UncomVpcPeering from models.vpc_peering import VpcPeering, model_to_dict from models.owner_list import OwnerList from models.owner_list", "uncom_vpc_peering_info_list.add(uncom_vpc_peering_request) uncom_vpc_peering_info_accepter = session.query(VpcPeering).filter(VpcPeering.accepter_owner_id == uncom_id).all() for uncom_vpc_peering_accepter in uncom_vpc_peering_info_accepter: uncom_vpc_peering_info_list.add(uncom_vpc_peering_accepter) uncom_vpc_peering =", "= get_uncom_id() with DBContext('r') as session: uncom_vpc_peering_info_list = set() for uncom_id in uncom_id_list:", "uncom_id def get_uncom_vpc_peering(): \"\"\"获取到不合规的vpc peering数据列表\"\"\" uncom_id_list = get_uncom_id() with DBContext('r') as session: uncom_vpc_peering_info_list", "获取不合规的vpc peering账户id def get_uncom_id(): with DBContext('r') as session: # 获取vpc peering的账户id vpc_peering_info =", "owner_info = session.query(OwnerList).all() owner_id = [] for data in owner_info: data_dict = owner_model_to_list(data)", "in uncom_vpc_peering_info_accepter: uncom_vpc_peering_info_list.add(uncom_vpc_peering_accepter) uncom_vpc_peering = [] for data in uncom_vpc_peering_info_list: data_dict = model_to_dict(data)", "for data in uncom_vpc_peering_info_list: data_dict = model_to_dict(data) uncom_vpc_peering.append(data_dict) return uncom_vpc_peering def uncom_vpc_peering_sync_cmdb(): \"\"\"把uncom_vpc_peering数据同步到数据库\"\"\"", "owner_id = [] for data in owner_info: data_dict = owner_model_to_list(data) owner_id.append(data_dict[\"owner_id\"]) # 找出不合规的vpc", "return uncom_vpc_peering def uncom_vpc_peering_sync_cmdb(): \"\"\"把uncom_vpc_peering数据同步到数据库\"\"\" uncom_vpc_peering_list = get_uncom_vpc_peering() with DBContext('w') as session: session.query(UncomVpcPeering).delete(synchronize_session=False)", "= uncom_vpc_peering[\"requester_cidr_block\"] requester_owner_id = uncom_vpc_peering[\"requester_owner_id\"] requester_vpc_id = uncom_vpc_peering[\"requester_vpc_id\"] requester_region = uncom_vpc_peering[\"requester_region\"] accepter_cidr_block =", "== uncom_id).all() for uncom_vpc_peering_accepter in uncom_vpc_peering_info_accepter: uncom_vpc_peering_info_list.add(uncom_vpc_peering_accepter) uncom_vpc_peering = [] for data in", "accepter_vpc_id = uncom_vpc_peering[\"accepter_vpc_id\"] accepter_region = uncom_vpc_peering[\"accepter_region\"] new_uncom_vpc_peering = UncomVpcPeering( vpc_peering_connection_id=vpc_peering_connection_id, requester_cidr_block=requester_cidr_block, requester_owner_id=requester_owner_id, requester_vpc_id=requester_vpc_id,", "uncom_vpc_peering_accepter in uncom_vpc_peering_info_accepter: uncom_vpc_peering_info_list.add(uncom_vpc_peering_accepter) uncom_vpc_peering = [] for data in uncom_vpc_peering_info_list: data_dict =", "# 获取vpc peering的账户id vpc_peering_info = session.query(VpcPeering).all() account_id = set() for data in vpc_peering_info:", "as session: # 获取vpc peering的账户id vpc_peering_info = session.query(VpcPeering).all() account_id = set() for data", "获取vpc peering的账户id vpc_peering_info = session.query(VpcPeering).all() account_id = set() for data in vpc_peering_info: data_dict", "accepter_owner_id = uncom_vpc_peering[\"accepter_owner_id\"] accepter_vpc_id = uncom_vpc_peering[\"accepter_vpc_id\"] accepter_region = uncom_vpc_peering[\"accepter_region\"] new_uncom_vpc_peering = UncomVpcPeering( vpc_peering_connection_id=vpc_peering_connection_id,", "peering数据列表\"\"\" uncom_id_list = get_uncom_id() with DBContext('r') as session: uncom_vpc_peering_info_list = set() for uncom_id", "= uncom_vpc_peering[\"accepter_owner_id\"] accepter_vpc_id = uncom_vpc_peering[\"accepter_vpc_id\"] accepter_region = uncom_vpc_peering[\"accepter_region\"] new_uncom_vpc_peering = UncomVpcPeering( vpc_peering_connection_id=vpc_peering_connection_id, requester_cidr_block=requester_cidr_block,", "with DBContext('r') as session: # 获取vpc peering的账户id vpc_peering_info = session.query(VpcPeering).all() account_id = set()", "from models.vpc_peering import VpcPeering, model_to_dict from models.owner_list import OwnerList from models.owner_list import model_to_dict", "requester_cidr_block = uncom_vpc_peering[\"requester_cidr_block\"] requester_owner_id = uncom_vpc_peering[\"requester_owner_id\"] requester_vpc_id = uncom_vpc_peering[\"requester_vpc_id\"] requester_region = uncom_vpc_peering[\"requester_region\"] accepter_cidr_block", "not in owner_id: uncom_id.append(a_id) return uncom_id def get_uncom_vpc_peering(): \"\"\"获取到不合规的vpc peering数据列表\"\"\" uncom_id_list = get_uncom_id()", "in uncom_vpc_peering_info_request: uncom_vpc_peering_info_list.add(uncom_vpc_peering_request) uncom_vpc_peering_info_accepter = session.query(VpcPeering).filter(VpcPeering.accepter_owner_id == uncom_id).all() for uncom_vpc_peering_accepter in uncom_vpc_peering_info_accepter: uncom_vpc_peering_info_list.add(uncom_vpc_peering_accepter)", "requester_cidr_block=requester_cidr_block, requester_owner_id=requester_owner_id, requester_vpc_id=requester_vpc_id, requester_region=requester_region, accepter_cidr_block=accepter_cidr_block, accepter_owner_id=accepter_owner_id, accepter_vpc_id=accepter_vpc_id, accepter_region=accepter_region) session.add(new_uncom_vpc_peering) session.commit() if __name__ ==", "data in vpc_peering_info: data_dict = model_to_dict(data) account_id.add(data_dict[\"requester_owner_id\"]) account_id.add(data_dict[\"accepter_owner_id\"]) # 获取owner账户的id owner_info = session.query(OwnerList).all()", "UncomVpcPeering from models.vpc_peering import VpcPeering, model_to_dict from models.owner_list import OwnerList from models.owner_list import", "vpc_peering_connection_id=vpc_peering_connection_id, requester_cidr_block=requester_cidr_block, requester_owner_id=requester_owner_id, requester_vpc_id=requester_vpc_id, requester_region=requester_region, accepter_cidr_block=accepter_cidr_block, accepter_owner_id=accepter_owner_id, accepter_vpc_id=accepter_vpc_id, accepter_region=accepter_region) session.add(new_uncom_vpc_peering) session.commit() if __name__", "= uncom_vpc_peering[\"accepter_vpc_id\"] accepter_region = uncom_vpc_peering[\"accepter_region\"] new_uncom_vpc_peering = UncomVpcPeering( vpc_peering_connection_id=vpc_peering_connection_id, requester_cidr_block=requester_cidr_block, requester_owner_id=requester_owner_id, requester_vpc_id=requester_vpc_id, requester_region=requester_region,", "uncom_id = [] for a_id in account_id: if a_id not in owner_id: uncom_id.append(a_id)", "uncom_vpc_peering in uncom_vpc_peering_list: vpc_peering_connection_id = uncom_vpc_peering[\"vpc_peering_connection_id\"] requester_cidr_block = uncom_vpc_peering[\"requester_cidr_block\"] requester_owner_id = uncom_vpc_peering[\"requester_owner_id\"] requester_vpc_id", "owner_model_to_list(data) owner_id.append(data_dict[\"owner_id\"]) # 找出不合规的vpc peering账户id uncom_id = [] for a_id in account_id: if", "account_id.add(data_dict[\"accepter_owner_id\"]) # 获取owner账户的id owner_info = session.query(OwnerList).all() owner_id = [] for data in owner_info:", "in account_id: if a_id not in owner_id: uncom_id.append(a_id) return uncom_id def get_uncom_vpc_peering(): \"\"\"获取到不合规的vpc", "in owner_info: data_dict = owner_model_to_list(data) owner_id.append(data_dict[\"owner_id\"]) # 找出不合规的vpc peering账户id uncom_id = [] for", "uncom_id_list = get_uncom_id() with DBContext('r') as session: uncom_vpc_peering_info_list = set() for uncom_id in", "= set() for data in vpc_peering_info: data_dict = model_to_dict(data) account_id.add(data_dict[\"requester_owner_id\"]) account_id.add(data_dict[\"accepter_owner_id\"]) # 获取owner账户的id", "清空数据库的所有记录 for uncom_vpc_peering in uncom_vpc_peering_list: vpc_peering_connection_id = uncom_vpc_peering[\"vpc_peering_connection_id\"] requester_cidr_block = uncom_vpc_peering[\"requester_cidr_block\"] requester_owner_id =", "uncom_id.append(a_id) return uncom_id def get_uncom_vpc_peering(): \"\"\"获取到不合规的vpc peering数据列表\"\"\" uncom_id_list = get_uncom_id() with DBContext('r') as", "import VpcPeering, model_to_dict from models.owner_list import OwnerList from models.owner_list import model_to_dict as owner_model_to_list", "for uncom_vpc_peering_request in uncom_vpc_peering_info_request: uncom_vpc_peering_info_list.add(uncom_vpc_peering_request) uncom_vpc_peering_info_accepter = session.query(VpcPeering).filter(VpcPeering.accepter_owner_id == uncom_id).all() for uncom_vpc_peering_accepter in", "= model_to_dict(data) account_id.add(data_dict[\"requester_owner_id\"]) account_id.add(data_dict[\"accepter_owner_id\"]) # 获取owner账户的id owner_info = session.query(OwnerList).all() owner_id = [] for", "= session.query(OwnerList).all() owner_id = [] for data in owner_info: data_dict = owner_model_to_list(data) owner_id.append(data_dict[\"owner_id\"])", "vpc_peering_info = session.query(VpcPeering).all() account_id = set() for data in vpc_peering_info: data_dict = model_to_dict(data)", "uncom_id).all() for uncom_vpc_peering_accepter in uncom_vpc_peering_info_accepter: uncom_vpc_peering_info_list.add(uncom_vpc_peering_accepter) uncom_vpc_peering = [] for data in uncom_vpc_peering_info_list:", "= [] for data in owner_info: data_dict = owner_model_to_list(data) owner_id.append(data_dict[\"owner_id\"]) # 找出不合规的vpc peering账户id", "uncom_vpc_peering[\"accepter_cidr_block\"] accepter_owner_id = uncom_vpc_peering[\"accepter_owner_id\"] accepter_vpc_id = uncom_vpc_peering[\"accepter_vpc_id\"] accepter_region = uncom_vpc_peering[\"accepter_region\"] new_uncom_vpc_peering = UncomVpcPeering(", "set() for uncom_id in uncom_id_list: uncom_vpc_peering_info_request = session.query(VpcPeering).filter(VpcPeering.requester_owner_id == uncom_id).all() for uncom_vpc_peering_request in", "libs.db_context import DBContext from models.uncom_vpc_peering import UncomVpcPeering from models.vpc_peering import VpcPeering, model_to_dict from", "peering账户id uncom_id = [] for a_id in account_id: if a_id not in owner_id:", "data in uncom_vpc_peering_info_list: data_dict = model_to_dict(data) uncom_vpc_peering.append(data_dict) return uncom_vpc_peering def uncom_vpc_peering_sync_cmdb(): \"\"\"把uncom_vpc_peering数据同步到数据库\"\"\" uncom_vpc_peering_list", "peering账户id def get_uncom_id(): with DBContext('r') as session: # 获取vpc peering的账户id vpc_peering_info = session.query(VpcPeering).all()", "requester_vpc_id = uncom_vpc_peering[\"requester_vpc_id\"] requester_region = uncom_vpc_peering[\"requester_region\"] accepter_cidr_block = uncom_vpc_peering[\"accepter_cidr_block\"] accepter_owner_id = uncom_vpc_peering[\"accepter_owner_id\"] accepter_vpc_id", "models.uncom_vpc_peering import UncomVpcPeering from models.vpc_peering import VpcPeering, model_to_dict from models.owner_list import OwnerList from", "return uncom_id def get_uncom_vpc_peering(): \"\"\"获取到不合规的vpc peering数据列表\"\"\" uncom_id_list = get_uncom_id() with DBContext('r') as session:", "uncom_vpc_peering_info_request: uncom_vpc_peering_info_list.add(uncom_vpc_peering_request) uncom_vpc_peering_info_accepter = session.query(VpcPeering).filter(VpcPeering.accepter_owner_id == uncom_id).all() for uncom_vpc_peering_accepter in uncom_vpc_peering_info_accepter: uncom_vpc_peering_info_list.add(uncom_vpc_peering_accepter) uncom_vpc_peering", "= set() for uncom_id in uncom_id_list: uncom_vpc_peering_info_request = session.query(VpcPeering).filter(VpcPeering.requester_owner_id == uncom_id).all() for uncom_vpc_peering_request", "session.query(VpcPeering).filter(VpcPeering.accepter_owner_id == uncom_id).all() for uncom_vpc_peering_accepter in uncom_vpc_peering_info_accepter: uncom_vpc_peering_info_list.add(uncom_vpc_peering_accepter) uncom_vpc_peering = [] for data", "uncom_vpc_peering[\"accepter_region\"] new_uncom_vpc_peering = UncomVpcPeering( vpc_peering_connection_id=vpc_peering_connection_id, requester_cidr_block=requester_cidr_block, requester_owner_id=requester_owner_id, requester_vpc_id=requester_vpc_id, requester_region=requester_region, accepter_cidr_block=accepter_cidr_block, accepter_owner_id=accepter_owner_id, accepter_vpc_id=accepter_vpc_id, accepter_region=accepter_region)", "uncom_vpc_peering_list = get_uncom_vpc_peering() with DBContext('w') as session: session.query(UncomVpcPeering).delete(synchronize_session=False) # 清空数据库的所有记录 for uncom_vpc_peering in", "DBContext('w') as session: session.query(UncomVpcPeering).delete(synchronize_session=False) # 清空数据库的所有记录 for uncom_vpc_peering in uncom_vpc_peering_list: vpc_peering_connection_id = uncom_vpc_peering[\"vpc_peering_connection_id\"]", "uncom_vpc_peering_info_list.add(uncom_vpc_peering_accepter) uncom_vpc_peering = [] for data in uncom_vpc_peering_info_list: data_dict = model_to_dict(data) uncom_vpc_peering.append(data_dict) return", "= uncom_vpc_peering[\"requester_vpc_id\"] requester_region = uncom_vpc_peering[\"requester_region\"] accepter_cidr_block = uncom_vpc_peering[\"accepter_cidr_block\"] accepter_owner_id = uncom_vpc_peering[\"accepter_owner_id\"] accepter_vpc_id =", "uncom_vpc_peering_sync_cmdb(): \"\"\"把uncom_vpc_peering数据同步到数据库\"\"\" uncom_vpc_peering_list = get_uncom_vpc_peering() with DBContext('w') as session: session.query(UncomVpcPeering).delete(synchronize_session=False) # 清空数据库的所有记录 for", "uncom_vpc_peering[\"requester_region\"] accepter_cidr_block = uncom_vpc_peering[\"accepter_cidr_block\"] accepter_owner_id = uncom_vpc_peering[\"accepter_owner_id\"] accepter_vpc_id = uncom_vpc_peering[\"accepter_vpc_id\"] accepter_region = uncom_vpc_peering[\"accepter_region\"]", "owner_model_to_list # 获取不合规的vpc peering账户id def get_uncom_id(): with DBContext('r') as session: # 获取vpc peering的账户id", "[] for a_id in account_id: if a_id not in owner_id: uncom_id.append(a_id) return uncom_id", "account_id.add(data_dict[\"requester_owner_id\"]) account_id.add(data_dict[\"accepter_owner_id\"]) # 获取owner账户的id owner_info = session.query(OwnerList).all() owner_id = [] for data in", "data in owner_info: data_dict = owner_model_to_list(data) owner_id.append(data_dict[\"owner_id\"]) # 找出不合规的vpc peering账户id uncom_id = []", "session: uncom_vpc_peering_info_list = set() for uncom_id in uncom_id_list: uncom_vpc_peering_info_request = session.query(VpcPeering).filter(VpcPeering.requester_owner_id == uncom_id).all()", "uncom_vpc_peering.append(data_dict) return uncom_vpc_peering def uncom_vpc_peering_sync_cmdb(): \"\"\"把uncom_vpc_peering数据同步到数据库\"\"\" uncom_vpc_peering_list = get_uncom_vpc_peering() with DBContext('w') as session:", "from models.owner_list import OwnerList from models.owner_list import model_to_dict as owner_model_to_list # 获取不合规的vpc peering账户id", "= [] for data in uncom_vpc_peering_info_list: data_dict = model_to_dict(data) uncom_vpc_peering.append(data_dict) return uncom_vpc_peering def", "uncom_vpc_peering_info_accepter = session.query(VpcPeering).filter(VpcPeering.accepter_owner_id == uncom_id).all() for uncom_vpc_peering_accepter in uncom_vpc_peering_info_accepter: uncom_vpc_peering_info_list.add(uncom_vpc_peering_accepter) uncom_vpc_peering = []", "from libs.db_context import DBContext from models.uncom_vpc_peering import UncomVpcPeering from models.vpc_peering import VpcPeering, model_to_dict", "data_dict = owner_model_to_list(data) owner_id.append(data_dict[\"owner_id\"]) # 找出不合规的vpc peering账户id uncom_id = [] for a_id in", "= session.query(VpcPeering).all() account_id = set() for data in vpc_peering_info: data_dict = model_to_dict(data) account_id.add(data_dict[\"requester_owner_id\"])", "vpc_peering_connection_id = uncom_vpc_peering[\"vpc_peering_connection_id\"] requester_cidr_block = uncom_vpc_peering[\"requester_cidr_block\"] requester_owner_id = uncom_vpc_peering[\"requester_owner_id\"] requester_vpc_id = uncom_vpc_peering[\"requester_vpc_id\"] requester_region", "in uncom_vpc_peering_list: vpc_peering_connection_id = uncom_vpc_peering[\"vpc_peering_connection_id\"] requester_cidr_block = uncom_vpc_peering[\"requester_cidr_block\"] requester_owner_id = uncom_vpc_peering[\"requester_owner_id\"] requester_vpc_id =", "session.query(UncomVpcPeering).delete(synchronize_session=False) # 清空数据库的所有记录 for uncom_vpc_peering in uncom_vpc_peering_list: vpc_peering_connection_id = uncom_vpc_peering[\"vpc_peering_connection_id\"] requester_cidr_block = uncom_vpc_peering[\"requester_cidr_block\"]", "uncom_vpc_peering[\"requester_owner_id\"] requester_vpc_id = uncom_vpc_peering[\"requester_vpc_id\"] requester_region = uncom_vpc_peering[\"requester_region\"] accepter_cidr_block = uncom_vpc_peering[\"accepter_cidr_block\"] accepter_owner_id = uncom_vpc_peering[\"accepter_owner_id\"]", "for data in vpc_peering_info: data_dict = model_to_dict(data) account_id.add(data_dict[\"requester_owner_id\"]) account_id.add(data_dict[\"accepter_owner_id\"]) # 获取owner账户的id owner_info =", "model_to_dict from models.owner_list import OwnerList from models.owner_list import model_to_dict as owner_model_to_list # 获取不合规的vpc", "# 清空数据库的所有记录 for uncom_vpc_peering in uncom_vpc_peering_list: vpc_peering_connection_id = uncom_vpc_peering[\"vpc_peering_connection_id\"] requester_cidr_block = uncom_vpc_peering[\"requester_cidr_block\"] requester_owner_id", "uncom_vpc_peering_info_list = set() for uncom_id in uncom_id_list: uncom_vpc_peering_info_request = session.query(VpcPeering).filter(VpcPeering.requester_owner_id == uncom_id).all() for", "uncom_vpc_peering[\"requester_cidr_block\"] requester_owner_id = uncom_vpc_peering[\"requester_owner_id\"] requester_vpc_id = uncom_vpc_peering[\"requester_vpc_id\"] requester_region = uncom_vpc_peering[\"requester_region\"] accepter_cidr_block = uncom_vpc_peering[\"accepter_cidr_block\"]", "= model_to_dict(data) uncom_vpc_peering.append(data_dict) return uncom_vpc_peering def uncom_vpc_peering_sync_cmdb(): \"\"\"把uncom_vpc_peering数据同步到数据库\"\"\" uncom_vpc_peering_list = get_uncom_vpc_peering() with DBContext('w')", "in uncom_vpc_peering_info_list: data_dict = model_to_dict(data) uncom_vpc_peering.append(data_dict) return uncom_vpc_peering def uncom_vpc_peering_sync_cmdb(): \"\"\"把uncom_vpc_peering数据同步到数据库\"\"\" uncom_vpc_peering_list =", "= uncom_vpc_peering[\"accepter_region\"] new_uncom_vpc_peering = UncomVpcPeering( vpc_peering_connection_id=vpc_peering_connection_id, requester_cidr_block=requester_cidr_block, requester_owner_id=requester_owner_id, requester_vpc_id=requester_vpc_id, requester_region=requester_region, accepter_cidr_block=accepter_cidr_block, accepter_owner_id=accepter_owner_id, accepter_vpc_id=accepter_vpc_id,", "a_id in account_id: if a_id not in owner_id: uncom_id.append(a_id) return uncom_id def get_uncom_vpc_peering():", "for data in owner_info: data_dict = owner_model_to_list(data) owner_id.append(data_dict[\"owner_id\"]) # 找出不合规的vpc peering账户id uncom_id =", "uncom_vpc_peering_request in uncom_vpc_peering_info_request: uncom_vpc_peering_info_list.add(uncom_vpc_peering_request) uncom_vpc_peering_info_accepter = session.query(VpcPeering).filter(VpcPeering.accepter_owner_id == uncom_id).all() for uncom_vpc_peering_accepter in uncom_vpc_peering_info_accepter:", "vpc_peering_info: data_dict = model_to_dict(data) account_id.add(data_dict[\"requester_owner_id\"]) account_id.add(data_dict[\"accepter_owner_id\"]) # 获取owner账户的id owner_info = session.query(OwnerList).all() owner_id =", "uncom_vpc_peering[\"vpc_peering_connection_id\"] requester_cidr_block = uncom_vpc_peering[\"requester_cidr_block\"] requester_owner_id = uncom_vpc_peering[\"requester_owner_id\"] requester_vpc_id = uncom_vpc_peering[\"requester_vpc_id\"] requester_region = uncom_vpc_peering[\"requester_region\"]", "\"\"\"把uncom_vpc_peering数据同步到数据库\"\"\" uncom_vpc_peering_list = get_uncom_vpc_peering() with DBContext('w') as session: session.query(UncomVpcPeering).delete(synchronize_session=False) # 清空数据库的所有记录 for uncom_vpc_peering", "UncomVpcPeering( vpc_peering_connection_id=vpc_peering_connection_id, requester_cidr_block=requester_cidr_block, requester_owner_id=requester_owner_id, requester_vpc_id=requester_vpc_id, requester_region=requester_region, accepter_cidr_block=accepter_cidr_block, accepter_owner_id=accepter_owner_id, accepter_vpc_id=accepter_vpc_id, accepter_region=accepter_region) session.add(new_uncom_vpc_peering) session.commit() if", "data_dict = model_to_dict(data) uncom_vpc_peering.append(data_dict) return uncom_vpc_peering def uncom_vpc_peering_sync_cmdb(): \"\"\"把uncom_vpc_peering数据同步到数据库\"\"\" uncom_vpc_peering_list = get_uncom_vpc_peering() with", "uncom_vpc_peering = [] for data in uncom_vpc_peering_info_list: data_dict = model_to_dict(data) uncom_vpc_peering.append(data_dict) return uncom_vpc_peering", "models.vpc_peering import VpcPeering, model_to_dict from models.owner_list import OwnerList from models.owner_list import model_to_dict as", "requester_region = uncom_vpc_peering[\"requester_region\"] accepter_cidr_block = uncom_vpc_peering[\"accepter_cidr_block\"] accepter_owner_id = uncom_vpc_peering[\"accepter_owner_id\"] accepter_vpc_id = uncom_vpc_peering[\"accepter_vpc_id\"] accepter_region", "VpcPeering, model_to_dict from models.owner_list import OwnerList from models.owner_list import model_to_dict as owner_model_to_list #", "for uncom_vpc_peering_accepter in uncom_vpc_peering_info_accepter: uncom_vpc_peering_info_list.add(uncom_vpc_peering_accepter) uncom_vpc_peering = [] for data in uncom_vpc_peering_info_list: data_dict", "from models.owner_list import model_to_dict as owner_model_to_list # 获取不合规的vpc peering账户id def get_uncom_id(): with DBContext('r')", "= get_uncom_vpc_peering() with DBContext('w') as session: session.query(UncomVpcPeering).delete(synchronize_session=False) # 清空数据库的所有记录 for uncom_vpc_peering in uncom_vpc_peering_list:", "= session.query(VpcPeering).filter(VpcPeering.requester_owner_id == uncom_id).all() for uncom_vpc_peering_request in uncom_vpc_peering_info_request: uncom_vpc_peering_info_list.add(uncom_vpc_peering_request) uncom_vpc_peering_info_accepter = session.query(VpcPeering).filter(VpcPeering.accepter_owner_id ==", "= uncom_vpc_peering[\"vpc_peering_connection_id\"] requester_cidr_block = uncom_vpc_peering[\"requester_cidr_block\"] requester_owner_id = uncom_vpc_peering[\"requester_owner_id\"] requester_vpc_id = uncom_vpc_peering[\"requester_vpc_id\"] requester_region =", "uncom_id in uncom_id_list: uncom_vpc_peering_info_request = session.query(VpcPeering).filter(VpcPeering.requester_owner_id == uncom_id).all() for uncom_vpc_peering_request in uncom_vpc_peering_info_request: uncom_vpc_peering_info_list.add(uncom_vpc_peering_request)", "model_to_dict(data) uncom_vpc_peering.append(data_dict) return uncom_vpc_peering def uncom_vpc_peering_sync_cmdb(): \"\"\"把uncom_vpc_peering数据同步到数据库\"\"\" uncom_vpc_peering_list = get_uncom_vpc_peering() with DBContext('w') as", "uncom_vpc_peering_list: vpc_peering_connection_id = uncom_vpc_peering[\"vpc_peering_connection_id\"] requester_cidr_block = uncom_vpc_peering[\"requester_cidr_block\"] requester_owner_id = uncom_vpc_peering[\"requester_owner_id\"] requester_vpc_id = uncom_vpc_peering[\"requester_vpc_id\"]", "models.owner_list import OwnerList from models.owner_list import model_to_dict as owner_model_to_list # 获取不合规的vpc peering账户id def", "\"\"\"获取到不合规的vpc peering数据列表\"\"\" uncom_id_list = get_uncom_id() with DBContext('r') as session: uncom_vpc_peering_info_list = set() for", "uncom_vpc_peering def uncom_vpc_peering_sync_cmdb(): \"\"\"把uncom_vpc_peering数据同步到数据库\"\"\" uncom_vpc_peering_list = get_uncom_vpc_peering() with DBContext('w') as session: session.query(UncomVpcPeering).delete(synchronize_session=False) #", "找出不合规的vpc peering账户id uncom_id = [] for a_id in account_id: if a_id not in", "uncom_id_list: uncom_vpc_peering_info_request = session.query(VpcPeering).filter(VpcPeering.requester_owner_id == uncom_id).all() for uncom_vpc_peering_request in uncom_vpc_peering_info_request: uncom_vpc_peering_info_list.add(uncom_vpc_peering_request) uncom_vpc_peering_info_accepter =", "session: # 获取vpc peering的账户id vpc_peering_info = session.query(VpcPeering).all() account_id = set() for data in", "# 获取owner账户的id owner_info = session.query(OwnerList).all() owner_id = [] for data in owner_info: data_dict", "= UncomVpcPeering( vpc_peering_connection_id=vpc_peering_connection_id, requester_cidr_block=requester_cidr_block, requester_owner_id=requester_owner_id, requester_vpc_id=requester_vpc_id, requester_region=requester_region, accepter_cidr_block=accepter_cidr_block, accepter_owner_id=accepter_owner_id, accepter_vpc_id=accepter_vpc_id, accepter_region=accepter_region) session.add(new_uncom_vpc_peering) session.commit()", "session.query(VpcPeering).filter(VpcPeering.requester_owner_id == uncom_id).all() for uncom_vpc_peering_request in uncom_vpc_peering_info_request: uncom_vpc_peering_info_list.add(uncom_vpc_peering_request) uncom_vpc_peering_info_accepter = session.query(VpcPeering).filter(VpcPeering.accepter_owner_id == uncom_id).all()", "获取owner账户的id owner_info = session.query(OwnerList).all() owner_id = [] for data in owner_info: data_dict =", "DBContext('r') as session: uncom_vpc_peering_info_list = set() for uncom_id in uncom_id_list: uncom_vpc_peering_info_request = session.query(VpcPeering).filter(VpcPeering.requester_owner_id", "data_dict = model_to_dict(data) account_id.add(data_dict[\"requester_owner_id\"]) account_id.add(data_dict[\"accepter_owner_id\"]) # 获取owner账户的id owner_info = session.query(OwnerList).all() owner_id = []", "= uncom_vpc_peering[\"requester_region\"] accepter_cidr_block = uncom_vpc_peering[\"accepter_cidr_block\"] accepter_owner_id = uncom_vpc_peering[\"accepter_owner_id\"] accepter_vpc_id = uncom_vpc_peering[\"accepter_vpc_id\"] accepter_region =", "with DBContext('w') as session: session.query(UncomVpcPeering).delete(synchronize_session=False) # 清空数据库的所有记录 for uncom_vpc_peering in uncom_vpc_peering_list: vpc_peering_connection_id =", "as session: session.query(UncomVpcPeering).delete(synchronize_session=False) # 清空数据库的所有记录 for uncom_vpc_peering in uncom_vpc_peering_list: vpc_peering_connection_id = uncom_vpc_peering[\"vpc_peering_connection_id\"] requester_cidr_block", "<reponame>zjj1002/aws-cloud-cmdb-system<gh_stars>0 from libs.db_context import DBContext from models.uncom_vpc_peering import UncomVpcPeering from models.vpc_peering import VpcPeering,", "[] for data in owner_info: data_dict = owner_model_to_list(data) owner_id.append(data_dict[\"owner_id\"]) # 找出不合规的vpc peering账户id uncom_id", "set() for data in vpc_peering_info: data_dict = model_to_dict(data) account_id.add(data_dict[\"requester_owner_id\"]) account_id.add(data_dict[\"accepter_owner_id\"]) # 获取owner账户的id owner_info", "with DBContext('r') as session: uncom_vpc_peering_info_list = set() for uncom_id in uncom_id_list: uncom_vpc_peering_info_request =", "owner_info: data_dict = owner_model_to_list(data) owner_id.append(data_dict[\"owner_id\"]) # 找出不合规的vpc peering账户id uncom_id = [] for a_id", "account_id: if a_id not in owner_id: uncom_id.append(a_id) return uncom_id def get_uncom_vpc_peering(): \"\"\"获取到不合规的vpc peering数据列表\"\"\"", "models.owner_list import model_to_dict as owner_model_to_list # 获取不合规的vpc peering账户id def get_uncom_id(): with DBContext('r') as", "if a_id not in owner_id: uncom_id.append(a_id) return uncom_id def get_uncom_vpc_peering(): \"\"\"获取到不合规的vpc peering数据列表\"\"\" uncom_id_list", "peering的账户id vpc_peering_info = session.query(VpcPeering).all() account_id = set() for data in vpc_peering_info: data_dict =", "in owner_id: uncom_id.append(a_id) return uncom_id def get_uncom_vpc_peering(): \"\"\"获取到不合规的vpc peering数据列表\"\"\" uncom_id_list = get_uncom_id() with", "import DBContext from models.uncom_vpc_peering import UncomVpcPeering from models.vpc_peering import VpcPeering, model_to_dict from models.owner_list" ]
[ "num = line.split() num = int(num) if ins == 'forward': pos += num", "+= num part_2_depth += (aim * num) elif ins == 'down': part_1_depth +=", "ins == 'down': part_1_depth += num aim += num elif ins == \"up\":", "0 part_2_depth = 0 for line in fileinput.input(): ins, num = line.split() num", "part_1_depth -= num aim -= num print \"Part 1:\", pos * part_1_depth print", "ins == 'forward': pos += num part_2_depth += (aim * num) elif ins", "\"up\": part_1_depth -= num aim -= num print \"Part 1:\", pos * part_1_depth", "= int(num) if ins == 'forward': pos += num part_2_depth += (aim *", "= 0 aim = 0 part_1_depth = 0 part_2_depth = 0 for line", "num part_2_depth += (aim * num) elif ins == 'down': part_1_depth += num", "part_2_depth += (aim * num) elif ins == 'down': part_1_depth += num aim", "+= (aim * num) elif ins == 'down': part_1_depth += num aim +=", "part_1_depth += num aim += num elif ins == \"up\": part_1_depth -= num", "+= num elif ins == \"up\": part_1_depth -= num aim -= num print", "pos = 0 aim = 0 part_1_depth = 0 part_2_depth = 0 for", "= 0 part_1_depth = 0 part_2_depth = 0 for line in fileinput.input(): ins,", "num print \"Part 1:\", pos * part_1_depth print \"Part 2:\", pos * part_2_depth", "int(num) if ins == 'forward': pos += num part_2_depth += (aim * num)", "ins == \"up\": part_1_depth -= num aim -= num print \"Part 1:\", pos", "== 'down': part_1_depth += num aim += num elif ins == \"up\": part_1_depth", "'forward': pos += num part_2_depth += (aim * num) elif ins == 'down':", "num aim -= num print \"Part 1:\", pos * part_1_depth print \"Part 2:\",", "pos += num part_2_depth += (aim * num) elif ins == 'down': part_1_depth", "= 0 for line in fileinput.input(): ins, num = line.split() num = int(num)", "num = int(num) if ins == 'forward': pos += num part_2_depth += (aim", "(aim * num) elif ins == 'down': part_1_depth += num aim += num", "== \"up\": part_1_depth -= num aim -= num print \"Part 1:\", pos *", "num elif ins == \"up\": part_1_depth -= num aim -= num print \"Part", "'down': part_1_depth += num aim += num elif ins == \"up\": part_1_depth -=", "aim += num elif ins == \"up\": part_1_depth -= num aim -= num", "-= num print \"Part 1:\", pos * part_1_depth print \"Part 2:\", pos *", "= 0 part_2_depth = 0 for line in fileinput.input(): ins, num = line.split()", "0 part_1_depth = 0 part_2_depth = 0 for line in fileinput.input(): ins, num", "ins, num = line.split() num = int(num) if ins == 'forward': pos +=", "num aim += num elif ins == \"up\": part_1_depth -= num aim -=", "line in fileinput.input(): ins, num = line.split() num = int(num) if ins ==", "== 'forward': pos += num part_2_depth += (aim * num) elif ins ==", "line.split() num = int(num) if ins == 'forward': pos += num part_2_depth +=", "if ins == 'forward': pos += num part_2_depth += (aim * num) elif", "0 for line in fileinput.input(): ins, num = line.split() num = int(num) if", "* num) elif ins == 'down': part_1_depth += num aim += num elif", "elif ins == \"up\": part_1_depth -= num aim -= num print \"Part 1:\",", "num) elif ins == 'down': part_1_depth += num aim += num elif ins", "aim = 0 part_1_depth = 0 part_2_depth = 0 for line in fileinput.input():", "aim -= num print \"Part 1:\", pos * part_1_depth print \"Part 2:\", pos", "for line in fileinput.input(): ins, num = line.split() num = int(num) if ins", "part_2_depth = 0 for line in fileinput.input(): ins, num = line.split() num =", "<reponame>iKevinY/advent import fileinput pos = 0 aim = 0 part_1_depth = 0 part_2_depth", "import fileinput pos = 0 aim = 0 part_1_depth = 0 part_2_depth =", "+= num aim += num elif ins == \"up\": part_1_depth -= num aim", "-= num aim -= num print \"Part 1:\", pos * part_1_depth print \"Part", "fileinput.input(): ins, num = line.split() num = int(num) if ins == 'forward': pos", "in fileinput.input(): ins, num = line.split() num = int(num) if ins == 'forward':", "part_1_depth = 0 part_2_depth = 0 for line in fileinput.input(): ins, num =", "= line.split() num = int(num) if ins == 'forward': pos += num part_2_depth", "0 aim = 0 part_1_depth = 0 part_2_depth = 0 for line in", "elif ins == 'down': part_1_depth += num aim += num elif ins ==", "fileinput pos = 0 aim = 0 part_1_depth = 0 part_2_depth = 0" ]
[ "response = connection.getresponse() code = response.getcode() headers = response.getheaders() result = loads(response.read()) print(result)", "Server response:\") response = connection.getresponse() code = response.getcode() headers = response.getheaders() result =", "for python 3 from http.client import HTTPConnection # connect with REST server connection", "HTTPConnection # connect with REST server connection = HTTPConnection('127.0.0.1', 80) connection.connect() data =", "httplib import HTTPConnection # for python 3 from http.client import HTTPConnection # connect", "= connection.getresponse() code = response.getcode() headers = response.getheaders() result = loads(response.read()) print(result) print(\"code:", "response:\") response = connection.getresponse() code = response.getcode() headers = response.getheaders() result = loads(response.read())", "# for python 3 from http.client import HTTPConnection # connect with REST server", "print(\"Waiting for Server response:\") response = connection.getresponse() code = response.getcode() headers = response.getheaders()", "os from json import dumps, loads # for python 2 # from httplib", "for python 2 # from httplib import HTTPConnection # for python 3 from", "'/add_command_raw', dumps(data), {'Content-Type': 'application/json'}, ) print(\"Waiting for Server response:\") response = connection.getresponse() code", "print(result) print(\"code: \", code) print(\"headers: \", headers) print(dir(result)) # close the connection connection.close()", ") print(\"Waiting for Server response:\") response = connection.getresponse() code = response.getcode() headers =", "headers = response.getheaders() result = loads(response.read()) print(result) print(\"code: \", code) print(\"headers: \", headers)", "method response connection.request( 'POST', '/add_command_raw', dumps(data), {'Content-Type': 'application/json'}, ) print(\"Waiting for Server response:\")", "the method response connection.request( 'POST', '/add_command_raw', dumps(data), {'Content-Type': 'application/json'}, ) print(\"Waiting for Server", "-*- import os from json import dumps, loads # for python 2 #", "code = response.getcode() headers = response.getheaders() result = loads(response.read()) print(result) print(\"code: \", code)", "import dumps, loads # for python 2 # from httplib import HTTPConnection #", "Get the method response connection.request( 'POST', '/add_command_raw', dumps(data), {'Content-Type': 'application/json'}, ) print(\"Waiting for", "response.getcode() headers = response.getheaders() result = loads(response.read()) print(result) print(\"code: \", code) print(\"headers: \",", "with REST server connection = HTTPConnection('127.0.0.1', 80) connection.connect() data = {\"ip\": \"192.168.2.254\", \"list_of_commands_to_send\":", "from http.client import HTTPConnection # connect with REST server connection = HTTPConnection('127.0.0.1', 80)", "-*- coding: utf-8 -*- import os from json import dumps, loads # for", "{'Content-Type': 'application/json'}, ) print(\"Waiting for Server response:\") response = connection.getresponse() code = response.getcode()", "#!/usr/bin/env python # -*- coding: utf-8 -*- import os from json import dumps,", "result = loads(response.read()) print(result) print(\"code: \", code) print(\"headers: \", headers) print(dir(result)) # close", "import os from json import dumps, loads # for python 2 # from", "json import dumps, loads # for python 2 # from httplib import HTTPConnection", "'application/json'}, ) print(\"Waiting for Server response:\") response = connection.getresponse() code = response.getcode() headers", "coding: utf-8 -*- import os from json import dumps, loads # for python", "response.getheaders() result = loads(response.read()) print(result) print(\"code: \", code) print(\"headers: \", headers) print(dir(result)) #", "data = {\"ip\": \"192.168.2.254\", \"list_of_commands_to_send\": \"show version\" } # Get the method response", "http.client import HTTPConnection # connect with REST server connection = HTTPConnection('127.0.0.1', 80) connection.connect()", "3 from http.client import HTTPConnection # connect with REST server connection = HTTPConnection('127.0.0.1',", "= {\"ip\": \"192.168.2.254\", \"list_of_commands_to_send\": \"show version\" } # Get the method response connection.request(", "dumps, loads # for python 2 # from httplib import HTTPConnection # for", "= response.getheaders() result = loads(response.read()) print(result) print(\"code: \", code) print(\"headers: \", headers) print(dir(result))", "\"list_of_commands_to_send\": \"show version\" } # Get the method response connection.request( 'POST', '/add_command_raw', dumps(data),", "\"show version\" } # Get the method response connection.request( 'POST', '/add_command_raw', dumps(data), {'Content-Type':", "import HTTPConnection # for python 3 from http.client import HTTPConnection # connect with", "# connect with REST server connection = HTTPConnection('127.0.0.1', 80) connection.connect() data = {\"ip\":", "connection = HTTPConnection('127.0.0.1', 80) connection.connect() data = {\"ip\": \"192.168.2.254\", \"list_of_commands_to_send\": \"show version\" }", "connection.connect() data = {\"ip\": \"192.168.2.254\", \"list_of_commands_to_send\": \"show version\" } # Get the method", "utf-8 -*- import os from json import dumps, loads # for python 2", "HTTPConnection # for python 3 from http.client import HTTPConnection # connect with REST", "loads # for python 2 # from httplib import HTTPConnection # for python", "# from httplib import HTTPConnection # for python 3 from http.client import HTTPConnection", "version\" } # Get the method response connection.request( 'POST', '/add_command_raw', dumps(data), {'Content-Type': 'application/json'},", "{\"ip\": \"192.168.2.254\", \"list_of_commands_to_send\": \"show version\" } # Get the method response connection.request( 'POST',", "} # Get the method response connection.request( 'POST', '/add_command_raw', dumps(data), {'Content-Type': 'application/json'}, )", "connection.request( 'POST', '/add_command_raw', dumps(data), {'Content-Type': 'application/json'}, ) print(\"Waiting for Server response:\") response =", "dumps(data), {'Content-Type': 'application/json'}, ) print(\"Waiting for Server response:\") response = connection.getresponse() code =", "python 2 # from httplib import HTTPConnection # for python 3 from http.client", "# -*- coding: utf-8 -*- import os from json import dumps, loads #", "# for python 2 # from httplib import HTTPConnection # for python 3", "connect with REST server connection = HTTPConnection('127.0.0.1', 80) connection.connect() data = {\"ip\": \"192.168.2.254\",", "'POST', '/add_command_raw', dumps(data), {'Content-Type': 'application/json'}, ) print(\"Waiting for Server response:\") response = connection.getresponse()", "= HTTPConnection('127.0.0.1', 80) connection.connect() data = {\"ip\": \"192.168.2.254\", \"list_of_commands_to_send\": \"show version\" } #", "HTTPConnection('127.0.0.1', 80) connection.connect() data = {\"ip\": \"192.168.2.254\", \"list_of_commands_to_send\": \"show version\" } # Get", "connection.getresponse() code = response.getcode() headers = response.getheaders() result = loads(response.read()) print(result) print(\"code: \",", "python # -*- coding: utf-8 -*- import os from json import dumps, loads", "for Server response:\") response = connection.getresponse() code = response.getcode() headers = response.getheaders() result", "from httplib import HTTPConnection # for python 3 from http.client import HTTPConnection #", "= response.getcode() headers = response.getheaders() result = loads(response.read()) print(result) print(\"code: \", code) print(\"headers:", "80) connection.connect() data = {\"ip\": \"192.168.2.254\", \"list_of_commands_to_send\": \"show version\" } # Get the", "server connection = HTTPConnection('127.0.0.1', 80) connection.connect() data = {\"ip\": \"192.168.2.254\", \"list_of_commands_to_send\": \"show version\"", "2 # from httplib import HTTPConnection # for python 3 from http.client import", "from json import dumps, loads # for python 2 # from httplib import", "loads(response.read()) print(result) print(\"code: \", code) print(\"headers: \", headers) print(dir(result)) # close the connection", "response connection.request( 'POST', '/add_command_raw', dumps(data), {'Content-Type': 'application/json'}, ) print(\"Waiting for Server response:\") response", "python 3 from http.client import HTTPConnection # connect with REST server connection =", "\"192.168.2.254\", \"list_of_commands_to_send\": \"show version\" } # Get the method response connection.request( 'POST', '/add_command_raw',", "# Get the method response connection.request( 'POST', '/add_command_raw', dumps(data), {'Content-Type': 'application/json'}, ) print(\"Waiting", "= loads(response.read()) print(result) print(\"code: \", code) print(\"headers: \", headers) print(dir(result)) # close the", "REST server connection = HTTPConnection('127.0.0.1', 80) connection.connect() data = {\"ip\": \"192.168.2.254\", \"list_of_commands_to_send\": \"show", "import HTTPConnection # connect with REST server connection = HTTPConnection('127.0.0.1', 80) connection.connect() data" ]
[ "getopt from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import hashes import base64 import binascii", "generate password mac for dovecot import sys import os import getopt from cryptography.hazmat.backends", "option_keys: usage() for opt,value in opts: opt=opt.replace(\"-\",\"\").strip() if \"s\" in opt: salt=value password=password.encode(\"utf8\")", "import getopt from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import hashes import base64 import", "for o,v in opts] if \"-h\" in option_keys: usage() for opt,value in opts:", "password=\"\" salt=\"\" options=\"s:p:h\" opts, args = getopt.getopt(sys.argv[1:], options) if len(args) is 1: password=args[0]", "options) if len(args) is 1: password=args[0] else: usage() raw_options=options.replace(\":\",\"\") option_keys=[o for o,v in", "cryptography.hazmat.primitives import hashes import base64 import binascii def usage(): h=\"usage:\\t\"+sys.argv[0] h+=\" [-s salt]", "for dovecot import sys import os import getopt from cryptography.hazmat.backends import default_backend from", "import os import getopt from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import hashes import", "getopt.getopt(sys.argv[1:], options) if len(args) is 1: password=args[0] else: usage() raw_options=options.replace(\":\",\"\") option_keys=[o for o,v", "def usage(): h=\"usage:\\t\"+sys.argv[0] h+=\" [-s salt] password\" print(h) sys.exit(2) password=\"\" salt=\"\" options=\"s:p:h\" opts,", "usage() for opt,value in opts: opt=opt.replace(\"-\",\"\").strip() if \"s\" in opt: salt=value password=password.encode(\"utf8\") if", "else: usage() raw_options=options.replace(\":\",\"\") option_keys=[o for o,v in opts] if \"-h\" in option_keys: usage()", "usage() raw_options=options.replace(\":\",\"\") option_keys=[o for o,v in opts] if \"-h\" in option_keys: usage() for", "if salt: salt=binascii.a2b_hex(salt) else: salt=os.urandom(16) digest=hashes.Hash(hashes.SHA512(),backend=default_backend()) digest.update(password) digest.update(salt) hash_raw=digest.finalize() hash_and_salt=hash_raw+salt hash_base64=binascii.b2a_base64(hash_and_salt) dovecot=\"{SSHA512}\"+hash_base64.decode(\"utf-8\") print(dovecot)", "import base64 import binascii def usage(): h=\"usage:\\t\"+sys.argv[0] h+=\" [-s salt] password\" print(h) sys.exit(2)", "password mac for dovecot import sys import os import getopt from cryptography.hazmat.backends import", "from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import hashes import base64 import binascii def", "h+=\" [-s salt] password\" print(h) sys.exit(2) password=\"\" salt=\"\" options=\"s:p:h\" opts, args = getopt.getopt(sys.argv[1:],", "opts: opt=opt.replace(\"-\",\"\").strip() if \"s\" in opt: salt=value password=password.encode(\"utf8\") if salt: salt=binascii.a2b_hex(salt) else: salt=os.urandom(16)", "is 1: password=args[0] else: usage() raw_options=options.replace(\":\",\"\") option_keys=[o for o,v in opts] if \"-h\"", "for opt,value in opts: opt=opt.replace(\"-\",\"\").strip() if \"s\" in opt: salt=value password=password.encode(\"utf8\") if salt:", "opt=opt.replace(\"-\",\"\").strip() if \"s\" in opt: salt=value password=password.encode(\"utf8\") if salt: salt=binascii.a2b_hex(salt) else: salt=os.urandom(16) digest=hashes.Hash(hashes.SHA512(),backend=default_backend())", "salt=value password=password.encode(\"utf8\") if salt: salt=binascii.a2b_hex(salt) else: salt=os.urandom(16) digest=hashes.Hash(hashes.SHA512(),backend=default_backend()) digest.update(password) digest.update(salt) hash_raw=digest.finalize() hash_and_salt=hash_raw+salt hash_base64=binascii.b2a_base64(hash_and_salt)", "#!/usr/bin/env python3 # generate password mac for dovecot import sys import os import", "default_backend from cryptography.hazmat.primitives import hashes import base64 import binascii def usage(): h=\"usage:\\t\"+sys.argv[0] h+=\"", "salt=\"\" options=\"s:p:h\" opts, args = getopt.getopt(sys.argv[1:], options) if len(args) is 1: password=args[0] else:", "len(args) is 1: password=args[0] else: usage() raw_options=options.replace(\":\",\"\") option_keys=[o for o,v in opts] if", "usage(): h=\"usage:\\t\"+sys.argv[0] h+=\" [-s salt] password\" print(h) sys.exit(2) password=\"\" salt=\"\" options=\"s:p:h\" opts, args", "args = getopt.getopt(sys.argv[1:], options) if len(args) is 1: password=args[0] else: usage() raw_options=options.replace(\":\",\"\") option_keys=[o", "o,v in opts] if \"-h\" in option_keys: usage() for opt,value in opts: opt=opt.replace(\"-\",\"\").strip()", "opts, args = getopt.getopt(sys.argv[1:], options) if len(args) is 1: password=args[0] else: usage() raw_options=options.replace(\":\",\"\")", "in opts] if \"-h\" in option_keys: usage() for opt,value in opts: opt=opt.replace(\"-\",\"\").strip() if", "in opt: salt=value password=password.encode(\"utf8\") if salt: salt=binascii.a2b_hex(salt) else: salt=os.urandom(16) digest=hashes.Hash(hashes.SHA512(),backend=default_backend()) digest.update(password) digest.update(salt) hash_raw=digest.finalize()", "import sys import os import getopt from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import", "if \"-h\" in option_keys: usage() for opt,value in opts: opt=opt.replace(\"-\",\"\").strip() if \"s\" in", "1: password=args[0] else: usage() raw_options=options.replace(\":\",\"\") option_keys=[o for o,v in opts] if \"-h\" in", "opt: salt=value password=password.encode(\"utf8\") if salt: salt=binascii.a2b_hex(salt) else: salt=os.urandom(16) digest=hashes.Hash(hashes.SHA512(),backend=default_backend()) digest.update(password) digest.update(salt) hash_raw=digest.finalize() hash_and_salt=hash_raw+salt", "if len(args) is 1: password=args[0] else: usage() raw_options=options.replace(\":\",\"\") option_keys=[o for o,v in opts]", "\"-h\" in option_keys: usage() for opt,value in opts: opt=opt.replace(\"-\",\"\").strip() if \"s\" in opt:", "\"s\" in opt: salt=value password=password.encode(\"utf8\") if salt: salt=binascii.a2b_hex(salt) else: salt=os.urandom(16) digest=hashes.Hash(hashes.SHA512(),backend=default_backend()) digest.update(password) digest.update(salt)", "password=args[0] else: usage() raw_options=options.replace(\":\",\"\") option_keys=[o for o,v in opts] if \"-h\" in option_keys:", "os import getopt from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import hashes import base64", "options=\"s:p:h\" opts, args = getopt.getopt(sys.argv[1:], options) if len(args) is 1: password=args[0] else: usage()", "raw_options=options.replace(\":\",\"\") option_keys=[o for o,v in opts] if \"-h\" in option_keys: usage() for opt,value", "opt,value in opts: opt=opt.replace(\"-\",\"\").strip() if \"s\" in opt: salt=value password=password.encode(\"utf8\") if salt: salt=binascii.a2b_hex(salt)", "[-s salt] password\" print(h) sys.exit(2) password=\"\" salt=\"\" options=\"s:p:h\" opts, args = getopt.getopt(sys.argv[1:], options)", "import binascii def usage(): h=\"usage:\\t\"+sys.argv[0] h+=\" [-s salt] password\" print(h) sys.exit(2) password=\"\" salt=\"\"", "password\" print(h) sys.exit(2) password=\"\" salt=\"\" options=\"s:p:h\" opts, args = getopt.getopt(sys.argv[1:], options) if len(args)", "h=\"usage:\\t\"+sys.argv[0] h+=\" [-s salt] password\" print(h) sys.exit(2) password=\"\" salt=\"\" options=\"s:p:h\" opts, args =", "option_keys=[o for o,v in opts] if \"-h\" in option_keys: usage() for opt,value in", "if \"s\" in opt: salt=value password=password.encode(\"utf8\") if salt: salt=binascii.a2b_hex(salt) else: salt=os.urandom(16) digest=hashes.Hash(hashes.SHA512(),backend=default_backend()) digest.update(password)", "# generate password mac for dovecot import sys import os import getopt from", "salt] password\" print(h) sys.exit(2) password=\"\" salt=\"\" options=\"s:p:h\" opts, args = getopt.getopt(sys.argv[1:], options) if", "sys import os import getopt from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import hashes", "python3 # generate password mac for dovecot import sys import os import getopt", "print(h) sys.exit(2) password=\"\" salt=\"\" options=\"s:p:h\" opts, args = getopt.getopt(sys.argv[1:], options) if len(args) is", "in opts: opt=opt.replace(\"-\",\"\").strip() if \"s\" in opt: salt=value password=password.encode(\"utf8\") if salt: salt=binascii.a2b_hex(salt) else:", "mac for dovecot import sys import os import getopt from cryptography.hazmat.backends import default_backend", "sys.exit(2) password=\"\" salt=\"\" options=\"s:p:h\" opts, args = getopt.getopt(sys.argv[1:], options) if len(args) is 1:", "<reponame>dgengtek/scripts #!/usr/bin/env python3 # generate password mac for dovecot import sys import os", "binascii def usage(): h=\"usage:\\t\"+sys.argv[0] h+=\" [-s salt] password\" print(h) sys.exit(2) password=\"\" salt=\"\" options=\"s:p:h\"", "opts] if \"-h\" in option_keys: usage() for opt,value in opts: opt=opt.replace(\"-\",\"\").strip() if \"s\"", "import default_backend from cryptography.hazmat.primitives import hashes import base64 import binascii def usage(): h=\"usage:\\t\"+sys.argv[0]", "base64 import binascii def usage(): h=\"usage:\\t\"+sys.argv[0] h+=\" [-s salt] password\" print(h) sys.exit(2) password=\"\"", "import hashes import base64 import binascii def usage(): h=\"usage:\\t\"+sys.argv[0] h+=\" [-s salt] password\"", "cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import hashes import base64 import binascii def usage():", "dovecot import sys import os import getopt from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives", "hashes import base64 import binascii def usage(): h=\"usage:\\t\"+sys.argv[0] h+=\" [-s salt] password\" print(h)", "= getopt.getopt(sys.argv[1:], options) if len(args) is 1: password=args[0] else: usage() raw_options=options.replace(\":\",\"\") option_keys=[o for", "from cryptography.hazmat.primitives import hashes import base64 import binascii def usage(): h=\"usage:\\t\"+sys.argv[0] h+=\" [-s", "in option_keys: usage() for opt,value in opts: opt=opt.replace(\"-\",\"\").strip() if \"s\" in opt: salt=value", "password=password.encode(\"utf8\") if salt: salt=binascii.a2b_hex(salt) else: salt=os.urandom(16) digest=hashes.Hash(hashes.SHA512(),backend=default_backend()) digest.update(password) digest.update(salt) hash_raw=digest.finalize() hash_and_salt=hash_raw+salt hash_base64=binascii.b2a_base64(hash_and_salt) dovecot=\"{SSHA512}\"+hash_base64.decode(\"utf-8\")" ]
[ "<gh_stars>1-10 import requests data = { \"bot_commands\": [ { \"command\": \"command1\", \"description\": \"description1\"", "requests data = { \"bot_commands\": [ { \"command\": \"command1\", \"description\": \"description1\" }, {", "= { \"bot_commands\": [ { \"command\": \"command1\", \"description\": \"description1\" }, { \"command\": \"command2\",", "[ { \"command\": \"command1\", \"description\": \"description1\" }, { \"command\": \"command2\", \"description\": \"description2\" },", "\"command1\", \"description\": \"description1\" }, { \"command\": \"command2\", \"description\": \"description2\" }, ] } url", "data = { \"bot_commands\": [ { \"command\": \"command1\", \"description\": \"description1\" }, { \"command\":", "}, { \"command\": \"command2\", \"description\": \"description2\" }, ] } url = f'https://messengerg2b1.iranlms.ir/v3/{token}/setCommands' response", "\"bot_commands\": [ { \"command\": \"command1\", \"description\": \"description1\" }, { \"command\": \"command2\", \"description\": \"description2\"", "{ \"command\": \"command2\", \"description\": \"description2\" }, ] } url = f'https://messengerg2b1.iranlms.ir/v3/{token}/setCommands' response =", "\"description\": \"description2\" }, ] } url = f'https://messengerg2b1.iranlms.ir/v3/{token}/setCommands' response = requests.post(url, data=data) print(response.text)", "{ \"bot_commands\": [ { \"command\": \"command1\", \"description\": \"description1\" }, { \"command\": \"command2\", \"description\":", "\"command\": \"command1\", \"description\": \"description1\" }, { \"command\": \"command2\", \"description\": \"description2\" }, ] }", "{ \"command\": \"command1\", \"description\": \"description1\" }, { \"command\": \"command2\", \"description\": \"description2\" }, ]", "\"description1\" }, { \"command\": \"command2\", \"description\": \"description2\" }, ] } url = f'https://messengerg2b1.iranlms.ir/v3/{token}/setCommands'", "\"command2\", \"description\": \"description2\" }, ] } url = f'https://messengerg2b1.iranlms.ir/v3/{token}/setCommands' response = requests.post(url, data=data)", "\"description\": \"description1\" }, { \"command\": \"command2\", \"description\": \"description2\" }, ] } url =", "\"command\": \"command2\", \"description\": \"description2\" }, ] } url = f'https://messengerg2b1.iranlms.ir/v3/{token}/setCommands' response = requests.post(url,", "import requests data = { \"bot_commands\": [ { \"command\": \"command1\", \"description\": \"description1\" }," ]
[ "dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.activate_nfs_dataset(dataset_name, **kwargs) def create_nfs_dataset(self,", "**kwargs) def list_nfs_datasets(self, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.list_nfs_datasets(**kwargs) def", "**kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.initiate_seal_on_nfs_dataset(dataset_name, **kwargs) def list_nfs_datasets(self, **kwargs):", "kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.get_nfs_dataset_seal_manifest(dataset_name, **kwargs) def get_nfs_dataset_seal_status(self, dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value", "return self.nfs_dataset_client.list_nfs_datasets(**kwargs) def reopen_nfs_dataset(self, dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return", "APPLIANCE_AUTH_USER, \\ APPLIANCE_CERT_FILE_NAME from services.dts.src.oci_cli_dts.physical_appliance_control_plane.client.nfs_dataset_client import NfsDatasetClient class NfsDatasetClientProxy: def __init__(self, ctx, appliance_profile):", "kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.deactivate_nfs_dataset(dataset_name, **kwargs) def delete_nfs_dataset(self, dataset_name, **kwargs):", "update_nfs_dataset(self, dataset_name, body, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.update_nfs_dataset(dataset_name, body,", "ctx, appliance_profile): config_manager = ApplianceConfigManager(APPLIANCE_CONFIGS_BASE_DIR) appliance_config = config_manager.get_config(appliance_profile) self.auth_value = \"{}:{}\".format(APPLIANCE_AUTH_USER, appliance_config.get_access_token()) self.serial_id", "return self.nfs_dataset_client.delete_nfs_dataset(dataset_name, **kwargs) def get_nfs_dataset(self, dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id", "def update_nfs_dataset(self, dataset_name, body, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.update_nfs_dataset(dataset_name,", "= self.serial_id return self.nfs_dataset_client.get_nfs_dataset_seal_manifest(dataset_name, **kwargs) def get_nfs_dataset_seal_status(self, dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id']", "def reopen_nfs_dataset(self, dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.reopen_nfs_dataset(dataset_name, **kwargs)", "deactivate_nfs_dataset(self, dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.deactivate_nfs_dataset(dataset_name, **kwargs) def", "config_manager = ApplianceConfigManager(APPLIANCE_CONFIGS_BASE_DIR) appliance_config = config_manager.get_config(appliance_profile) self.auth_value = \"{}:{}\".format(APPLIANCE_AUTH_USER, appliance_config.get_access_token()) self.serial_id = appliance_config.get_appliance_serial_id()", "create_nfs_dataset(self, details, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.create_nfs_dataset(details, **kwargs) def", "delete_nfs_dataset(self, dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.delete_nfs_dataset(dataset_name, **kwargs) def", "self.serial_id return self.nfs_dataset_client.get_nfs_dataset(dataset_name, **kwargs) def get_nfs_dataset_seal_manifest(self, dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] =", "import APPLIANCE_CONFIGS_BASE_DIR, APPLIANCE_AUTH_USER, \\ APPLIANCE_CERT_FILE_NAME from services.dts.src.oci_cli_dts.physical_appliance_control_plane.client.nfs_dataset_client import NfsDatasetClient class NfsDatasetClientProxy: def __init__(self,", "kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.get_nfs_dataset_seal_status(dataset_name, **kwargs) def initiate_seal_on_nfs_dataset(self, dataset_name, **kwargs):", "details, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.create_nfs_dataset(details, **kwargs) def deactivate_nfs_dataset(self,", "= self.serial_id return self.nfs_dataset_client.deactivate_nfs_dataset(dataset_name, **kwargs) def delete_nfs_dataset(self, dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id']", "services/dts/src/oci_cli_dts/physical_appliance_control_plane/client/nfs_dataset_client.py \"\"\" from oci_cli import cli_util from services.dts.src.oci_cli_dts.appliance_config_manager import ApplianceConfigManager from services.dts.src.oci_cli_dts.appliance_constants import", "def delete_nfs_dataset(self, dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.delete_nfs_dataset(dataset_name, **kwargs)", "kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.delete_nfs_dataset(dataset_name, **kwargs) def get_nfs_dataset(self, dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value", "ApplianceConfigManager(APPLIANCE_CONFIGS_BASE_DIR) appliance_config = config_manager.get_config(appliance_profile) self.auth_value = \"{}:{}\".format(APPLIANCE_AUTH_USER, appliance_config.get_access_token()) self.serial_id = appliance_config.get_appliance_serial_id() config =", "**kwargs) def initiate_seal_on_nfs_dataset(self, dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.initiate_seal_on_nfs_dataset(dataset_name,", "Oracle and/or its affiliates. All rights reserved. \"\"\" NOTE: This class should always", "appliance_config.get_access_token()) self.serial_id = appliance_config.get_appliance_serial_id() config = cli_util.build_config(ctx.obj) host_name = appliance_config.get_appliance_url() self_signed_cert = \"{}/{}\".format(config_manager.get_config_dir(appliance_profile),", "def create_nfs_dataset(self, details, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.create_nfs_dataset(details, **kwargs)", "from services.dts.src.oci_cli_dts.appliance_config_manager import ApplianceConfigManager from services.dts.src.oci_cli_dts.appliance_constants import APPLIANCE_CONFIGS_BASE_DIR, APPLIANCE_AUTH_USER, \\ APPLIANCE_CERT_FILE_NAME from services.dts.src.oci_cli_dts.physical_appliance_control_plane.client.nfs_dataset_client", "kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.get_nfs_dataset_seal_status(dataset_name, **kwargs) def initiate_seal_on_nfs_dataset(self, dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value", "to the API definition of NfsDatasetClient present in services/dts/src/oci_cli_dts/physical_appliance_control_plane/client/nfs_dataset_client.py \"\"\" from oci_cli import", "= appliance_config.get_appliance_serial_id() config = cli_util.build_config(ctx.obj) host_name = appliance_config.get_appliance_url() self_signed_cert = \"{}/{}\".format(config_manager.get_config_dir(appliance_profile), APPLIANCE_CERT_FILE_NAME) self.nfs_dataset_client", "services.dts.src.oci_cli_dts.appliance_constants import APPLIANCE_CONFIGS_BASE_DIR, APPLIANCE_AUTH_USER, \\ APPLIANCE_CERT_FILE_NAME from services.dts.src.oci_cli_dts.physical_appliance_control_plane.client.nfs_dataset_client import NfsDatasetClient class NfsDatasetClientProxy: def", "appliance_config = config_manager.get_config(appliance_profile) self.auth_value = \"{}:{}\".format(APPLIANCE_AUTH_USER, appliance_config.get_access_token()) self.serial_id = appliance_config.get_appliance_serial_id() config = cli_util.build_config(ctx.obj)", "= self.serial_id return self.nfs_dataset_client.reopen_nfs_dataset(dataset_name, **kwargs) def update_nfs_dataset(self, dataset_name, body, **kwargs): kwargs['auth_value'] = self.auth_value", "self.nfs_dataset_client.initiate_seal_on_nfs_dataset(dataset_name, **kwargs) def list_nfs_datasets(self, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.list_nfs_datasets(**kwargs)", "NfsDatasetClient present in services/dts/src/oci_cli_dts/physical_appliance_control_plane/client/nfs_dataset_client.py \"\"\" from oci_cli import cli_util from services.dts.src.oci_cli_dts.appliance_config_manager import ApplianceConfigManager", "self.serial_id return self.nfs_dataset_client.deactivate_nfs_dataset(dataset_name, **kwargs) def delete_nfs_dataset(self, dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] =", "**kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.list_nfs_datasets(**kwargs) def reopen_nfs_dataset(self, dataset_name, **kwargs):", "self_signed_cert=self_signed_cert) def activate_nfs_dataset(self, dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.activate_nfs_dataset(dataset_name,", "from oci_cli import cli_util from services.dts.src.oci_cli_dts.appliance_config_manager import ApplianceConfigManager from services.dts.src.oci_cli_dts.appliance_constants import APPLIANCE_CONFIGS_BASE_DIR, APPLIANCE_AUTH_USER,", "self.serial_id return self.nfs_dataset_client.get_nfs_dataset_seal_status(dataset_name, **kwargs) def initiate_seal_on_nfs_dataset(self, dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] =", "self.serial_id return self.nfs_dataset_client.list_nfs_datasets(**kwargs) def reopen_nfs_dataset(self, dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id", "self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.deactivate_nfs_dataset(dataset_name, **kwargs) def delete_nfs_dataset(self, dataset_name, **kwargs): kwargs['auth_value'] =", "config = cli_util.build_config(ctx.obj) host_name = appliance_config.get_appliance_url() self_signed_cert = \"{}/{}\".format(config_manager.get_config_dir(appliance_profile), APPLIANCE_CERT_FILE_NAME) self.nfs_dataset_client = NfsDatasetClient(", "kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.list_nfs_datasets(**kwargs) def reopen_nfs_dataset(self, dataset_name, **kwargs): kwargs['auth_value']", "= config_manager.get_config(appliance_profile) self.auth_value = \"{}:{}\".format(APPLIANCE_AUTH_USER, appliance_config.get_access_token()) self.serial_id = appliance_config.get_appliance_serial_id() config = cli_util.build_config(ctx.obj) host_name", "= NfsDatasetClient( config=config, service_endpoint=host_name, self_signed_cert=self_signed_cert) def activate_nfs_dataset(self, dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id']", "self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.get_nfs_dataset(dataset_name, **kwargs) def get_nfs_dataset_seal_manifest(self, dataset_name, **kwargs): kwargs['auth_value'] =", "Copyright (c) 2016, 2019, Oracle and/or its affiliates. All rights reserved. \"\"\" NOTE:", "affiliates. All rights reserved. \"\"\" NOTE: This class should always comply to the", "self.nfs_dataset_client.create_nfs_dataset(details, **kwargs) def deactivate_nfs_dataset(self, dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return", "reserved. \"\"\" NOTE: This class should always comply to the API definition of", "in services/dts/src/oci_cli_dts/physical_appliance_control_plane/client/nfs_dataset_client.py \"\"\" from oci_cli import cli_util from services.dts.src.oci_cli_dts.appliance_config_manager import ApplianceConfigManager from services.dts.src.oci_cli_dts.appliance_constants", "self.nfs_dataset_client.deactivate_nfs_dataset(dataset_name, **kwargs) def delete_nfs_dataset(self, dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return", "= self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.activate_nfs_dataset(dataset_name, **kwargs) def create_nfs_dataset(self, details, **kwargs): kwargs['auth_value']", "import ApplianceConfigManager from services.dts.src.oci_cli_dts.appliance_constants import APPLIANCE_CONFIGS_BASE_DIR, APPLIANCE_AUTH_USER, \\ APPLIANCE_CERT_FILE_NAME from services.dts.src.oci_cli_dts.physical_appliance_control_plane.client.nfs_dataset_client import NfsDatasetClient", "kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.activate_nfs_dataset(dataset_name, **kwargs) def create_nfs_dataset(self, details, **kwargs): kwargs['auth_value'] = self.auth_value", "self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.initiate_seal_on_nfs_dataset(dataset_name, **kwargs) def list_nfs_datasets(self, **kwargs): kwargs['auth_value'] = self.auth_value", "self.nfs_dataset_client = NfsDatasetClient( config=config, service_endpoint=host_name, self_signed_cert=self_signed_cert) def activate_nfs_dataset(self, dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value", "**kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.get_nfs_dataset_seal_manifest(dataset_name, **kwargs) def get_nfs_dataset_seal_status(self, dataset_name,", "services.dts.src.oci_cli_dts.physical_appliance_control_plane.client.nfs_dataset_client import NfsDatasetClient class NfsDatasetClientProxy: def __init__(self, ctx, appliance_profile): config_manager = ApplianceConfigManager(APPLIANCE_CONFIGS_BASE_DIR) appliance_config", "always comply to the API definition of NfsDatasetClient present in services/dts/src/oci_cli_dts/physical_appliance_control_plane/client/nfs_dataset_client.py \"\"\" from", "kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.reopen_nfs_dataset(dataset_name, **kwargs) def update_nfs_dataset(self, dataset_name, body, **kwargs): kwargs['auth_value'] =", "self.nfs_dataset_client.get_nfs_dataset_seal_status(dataset_name, **kwargs) def initiate_seal_on_nfs_dataset(self, dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return", "get_nfs_dataset_seal_manifest(self, dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.get_nfs_dataset_seal_manifest(dataset_name, **kwargs) def", "= ApplianceConfigManager(APPLIANCE_CONFIGS_BASE_DIR) appliance_config = config_manager.get_config(appliance_profile) self.auth_value = \"{}:{}\".format(APPLIANCE_AUTH_USER, appliance_config.get_access_token()) self.serial_id = appliance_config.get_appliance_serial_id() config", "service_endpoint=host_name, self_signed_cert=self_signed_cert) def activate_nfs_dataset(self, dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return", "initiate_seal_on_nfs_dataset(self, dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.initiate_seal_on_nfs_dataset(dataset_name, **kwargs) def", "NOTE: This class should always comply to the API definition of NfsDatasetClient present", "This class should always comply to the API definition of NfsDatasetClient present in", "self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.activate_nfs_dataset(dataset_name, **kwargs) def create_nfs_dataset(self, details, **kwargs): kwargs['auth_value'] =", "of NfsDatasetClient present in services/dts/src/oci_cli_dts/physical_appliance_control_plane/client/nfs_dataset_client.py \"\"\" from oci_cli import cli_util from services.dts.src.oci_cli_dts.appliance_config_manager import", "**kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.delete_nfs_dataset(dataset_name, **kwargs) def get_nfs_dataset(self, dataset_name,", "\"\"\" NOTE: This class should always comply to the API definition of NfsDatasetClient", "and/or its affiliates. All rights reserved. \"\"\" NOTE: This class should always comply", "self.serial_id return self.nfs_dataset_client.get_nfs_dataset_seal_manifest(dataset_name, **kwargs) def get_nfs_dataset_seal_status(self, dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] =", "APPLIANCE_CERT_FILE_NAME) self.nfs_dataset_client = NfsDatasetClient( config=config, service_endpoint=host_name, self_signed_cert=self_signed_cert) def activate_nfs_dataset(self, dataset_name, **kwargs): kwargs['auth_value'] =", "= \"{}/{}\".format(config_manager.get_config_dir(appliance_profile), APPLIANCE_CERT_FILE_NAME) self.nfs_dataset_client = NfsDatasetClient( config=config, service_endpoint=host_name, self_signed_cert=self_signed_cert) def activate_nfs_dataset(self, dataset_name, **kwargs):", "self.serial_id return self.nfs_dataset_client.create_nfs_dataset(details, **kwargs) def deactivate_nfs_dataset(self, dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] =", "return self.nfs_dataset_client.reopen_nfs_dataset(dataset_name, **kwargs) def update_nfs_dataset(self, dataset_name, body, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] =", "self.serial_id return self.nfs_dataset_client.activate_nfs_dataset(dataset_name, **kwargs) def create_nfs_dataset(self, details, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] =", "= self.serial_id return self.nfs_dataset_client.delete_nfs_dataset(dataset_name, **kwargs) def get_nfs_dataset(self, dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id']", "def __init__(self, ctx, appliance_profile): config_manager = ApplianceConfigManager(APPLIANCE_CONFIGS_BASE_DIR) appliance_config = config_manager.get_config(appliance_profile) self.auth_value = \"{}:{}\".format(APPLIANCE_AUTH_USER,", "self.nfs_dataset_client.list_nfs_datasets(**kwargs) def reopen_nfs_dataset(self, dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.reopen_nfs_dataset(dataset_name,", "dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.get_nfs_dataset_seal_status(dataset_name, **kwargs) def initiate_seal_on_nfs_dataset(self,", "**kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.reopen_nfs_dataset(dataset_name, **kwargs) def update_nfs_dataset(self, dataset_name,", "self.auth_value = \"{}:{}\".format(APPLIANCE_AUTH_USER, appliance_config.get_access_token()) self.serial_id = appliance_config.get_appliance_serial_id() config = cli_util.build_config(ctx.obj) host_name = appliance_config.get_appliance_url()", "= self.serial_id return self.nfs_dataset_client.create_nfs_dataset(details, **kwargs) def deactivate_nfs_dataset(self, dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id']", "import cli_util from services.dts.src.oci_cli_dts.appliance_config_manager import ApplianceConfigManager from services.dts.src.oci_cli_dts.appliance_constants import APPLIANCE_CONFIGS_BASE_DIR, APPLIANCE_AUTH_USER, \\ APPLIANCE_CERT_FILE_NAME", "self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.get_nfs_dataset_seal_status(dataset_name, **kwargs) def initiate_seal_on_nfs_dataset(self, dataset_name, **kwargs): kwargs['auth_value'] =", "NfsDatasetClientProxy: def __init__(self, ctx, appliance_profile): config_manager = ApplianceConfigManager(APPLIANCE_CONFIGS_BASE_DIR) appliance_config = config_manager.get_config(appliance_profile) self.auth_value =", "= self.serial_id return self.nfs_dataset_client.list_nfs_datasets(**kwargs) def reopen_nfs_dataset(self, dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] =", "kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.reopen_nfs_dataset(dataset_name, **kwargs) def update_nfs_dataset(self, dataset_name, body,", "kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.delete_nfs_dataset(dataset_name, **kwargs) def get_nfs_dataset(self, dataset_name, **kwargs):", "= self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.get_nfs_dataset_seal_status(dataset_name, **kwargs) def initiate_seal_on_nfs_dataset(self, dataset_name, **kwargs): kwargs['auth_value']", "self.serial_id return self.nfs_dataset_client.initiate_seal_on_nfs_dataset(dataset_name, **kwargs) def list_nfs_datasets(self, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id", "= appliance_config.get_appliance_url() self_signed_cert = \"{}/{}\".format(config_manager.get_config_dir(appliance_profile), APPLIANCE_CERT_FILE_NAME) self.nfs_dataset_client = NfsDatasetClient( config=config, service_endpoint=host_name, self_signed_cert=self_signed_cert) def", "\"\"\" from oci_cli import cli_util from services.dts.src.oci_cli_dts.appliance_config_manager import ApplianceConfigManager from services.dts.src.oci_cli_dts.appliance_constants import APPLIANCE_CONFIGS_BASE_DIR,", "self_signed_cert = \"{}/{}\".format(config_manager.get_config_dir(appliance_profile), APPLIANCE_CERT_FILE_NAME) self.nfs_dataset_client = NfsDatasetClient( config=config, service_endpoint=host_name, self_signed_cert=self_signed_cert) def activate_nfs_dataset(self, dataset_name,", "self.nfs_dataset_client.get_nfs_dataset_seal_manifest(dataset_name, **kwargs) def get_nfs_dataset_seal_status(self, dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return", "= self.serial_id return self.nfs_dataset_client.activate_nfs_dataset(dataset_name, **kwargs) def create_nfs_dataset(self, details, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id']", "self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.list_nfs_datasets(**kwargs) def reopen_nfs_dataset(self, dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value", "self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.reopen_nfs_dataset(dataset_name, **kwargs) def update_nfs_dataset(self, dataset_name, body, **kwargs): kwargs['auth_value']", "dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.delete_nfs_dataset(dataset_name, **kwargs) def get_nfs_dataset(self,", "get_nfs_dataset(self, dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.get_nfs_dataset(dataset_name, **kwargs) def", "config=config, service_endpoint=host_name, self_signed_cert=self_signed_cert) def activate_nfs_dataset(self, dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id", "class NfsDatasetClientProxy: def __init__(self, ctx, appliance_profile): config_manager = ApplianceConfigManager(APPLIANCE_CONFIGS_BASE_DIR) appliance_config = config_manager.get_config(appliance_profile) self.auth_value", "dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.get_nfs_dataset_seal_manifest(dataset_name, **kwargs) def get_nfs_dataset_seal_status(self,", "utf-8 # Copyright (c) 2016, 2019, Oracle and/or its affiliates. All rights reserved.", "its affiliates. All rights reserved. \"\"\" NOTE: This class should always comply to", "# coding: utf-8 # Copyright (c) 2016, 2019, Oracle and/or its affiliates. All", "return self.nfs_dataset_client.activate_nfs_dataset(dataset_name, **kwargs) def create_nfs_dataset(self, details, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id", "return self.nfs_dataset_client.deactivate_nfs_dataset(dataset_name, **kwargs) def delete_nfs_dataset(self, dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id", "self.nfs_dataset_client.get_nfs_dataset(dataset_name, **kwargs) def get_nfs_dataset_seal_manifest(self, dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return", "= self.serial_id return self.nfs_dataset_client.get_nfs_dataset_seal_status(dataset_name, **kwargs) def initiate_seal_on_nfs_dataset(self, dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id']", "= \"{}:{}\".format(APPLIANCE_AUTH_USER, appliance_config.get_access_token()) self.serial_id = appliance_config.get_appliance_serial_id() config = cli_util.build_config(ctx.obj) host_name = appliance_config.get_appliance_url() self_signed_cert", "(c) 2016, 2019, Oracle and/or its affiliates. All rights reserved. \"\"\" NOTE: This", "kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.activate_nfs_dataset(dataset_name, **kwargs) def create_nfs_dataset(self, details, **kwargs):", "cli_util from services.dts.src.oci_cli_dts.appliance_config_manager import ApplianceConfigManager from services.dts.src.oci_cli_dts.appliance_constants import APPLIANCE_CONFIGS_BASE_DIR, APPLIANCE_AUTH_USER, \\ APPLIANCE_CERT_FILE_NAME from", "return self.nfs_dataset_client.get_nfs_dataset_seal_manifest(dataset_name, **kwargs) def get_nfs_dataset_seal_status(self, dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id", "services.dts.src.oci_cli_dts.appliance_config_manager import ApplianceConfigManager from services.dts.src.oci_cli_dts.appliance_constants import APPLIANCE_CONFIGS_BASE_DIR, APPLIANCE_AUTH_USER, \\ APPLIANCE_CERT_FILE_NAME from services.dts.src.oci_cli_dts.physical_appliance_control_plane.client.nfs_dataset_client import", "return self.nfs_dataset_client.get_nfs_dataset_seal_status(dataset_name, **kwargs) def initiate_seal_on_nfs_dataset(self, dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id", "activate_nfs_dataset(self, dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.activate_nfs_dataset(dataset_name, **kwargs) def", "**kwargs) def delete_nfs_dataset(self, dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.delete_nfs_dataset(dataset_name,", "kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.get_nfs_dataset(dataset_name, **kwargs) def get_nfs_dataset_seal_manifest(self, dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value", "oci_cli import cli_util from services.dts.src.oci_cli_dts.appliance_config_manager import ApplianceConfigManager from services.dts.src.oci_cli_dts.appliance_constants import APPLIANCE_CONFIGS_BASE_DIR, APPLIANCE_AUTH_USER, \\", "def list_nfs_datasets(self, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.list_nfs_datasets(**kwargs) def reopen_nfs_dataset(self,", "the API definition of NfsDatasetClient present in services/dts/src/oci_cli_dts/physical_appliance_control_plane/client/nfs_dataset_client.py \"\"\" from oci_cli import cli_util", "**kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.get_nfs_dataset(dataset_name, **kwargs) def get_nfs_dataset_seal_manifest(self, dataset_name,", "def activate_nfs_dataset(self, dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.activate_nfs_dataset(dataset_name, **kwargs)", "**kwargs) def get_nfs_dataset_seal_status(self, dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.get_nfs_dataset_seal_status(dataset_name,", "self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.delete_nfs_dataset(dataset_name, **kwargs) def get_nfs_dataset(self, dataset_name, **kwargs): kwargs['auth_value'] =", "dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.initiate_seal_on_nfs_dataset(dataset_name, **kwargs) def list_nfs_datasets(self,", "self.serial_id = appliance_config.get_appliance_serial_id() config = cli_util.build_config(ctx.obj) host_name = appliance_config.get_appliance_url() self_signed_cert = \"{}/{}\".format(config_manager.get_config_dir(appliance_profile), APPLIANCE_CERT_FILE_NAME)", "= self.serial_id return self.nfs_dataset_client.get_nfs_dataset(dataset_name, **kwargs) def get_nfs_dataset_seal_manifest(self, dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id']", "appliance_config.get_appliance_url() self_signed_cert = \"{}/{}\".format(config_manager.get_config_dir(appliance_profile), APPLIANCE_CERT_FILE_NAME) self.nfs_dataset_client = NfsDatasetClient( config=config, service_endpoint=host_name, self_signed_cert=self_signed_cert) def activate_nfs_dataset(self,", "**kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.activate_nfs_dataset(dataset_name, **kwargs) def create_nfs_dataset(self, details,", "kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.list_nfs_datasets(**kwargs) def reopen_nfs_dataset(self, dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id']", "dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.deactivate_nfs_dataset(dataset_name, **kwargs) def delete_nfs_dataset(self,", "config_manager.get_config(appliance_profile) self.auth_value = \"{}:{}\".format(APPLIANCE_AUTH_USER, appliance_config.get_access_token()) self.serial_id = appliance_config.get_appliance_serial_id() config = cli_util.build_config(ctx.obj) host_name =", "host_name = appliance_config.get_appliance_url() self_signed_cert = \"{}/{}\".format(config_manager.get_config_dir(appliance_profile), APPLIANCE_CERT_FILE_NAME) self.nfs_dataset_client = NfsDatasetClient( config=config, service_endpoint=host_name, self_signed_cert=self_signed_cert)", "NfsDatasetClient( config=config, service_endpoint=host_name, self_signed_cert=self_signed_cert) def activate_nfs_dataset(self, dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] =", "def get_nfs_dataset(self, dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.get_nfs_dataset(dataset_name, **kwargs)", "**kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.get_nfs_dataset_seal_status(dataset_name, **kwargs) def initiate_seal_on_nfs_dataset(self, dataset_name,", "dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.reopen_nfs_dataset(dataset_name, **kwargs) def update_nfs_dataset(self,", "**kwargs) def get_nfs_dataset_seal_manifest(self, dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.get_nfs_dataset_seal_manifest(dataset_name,", "2016, 2019, Oracle and/or its affiliates. All rights reserved. \"\"\" NOTE: This class", "definition of NfsDatasetClient present in services/dts/src/oci_cli_dts/physical_appliance_control_plane/client/nfs_dataset_client.py \"\"\" from oci_cli import cli_util from services.dts.src.oci_cli_dts.appliance_config_manager", "from services.dts.src.oci_cli_dts.appliance_constants import APPLIANCE_CONFIGS_BASE_DIR, APPLIANCE_AUTH_USER, \\ APPLIANCE_CERT_FILE_NAME from services.dts.src.oci_cli_dts.physical_appliance_control_plane.client.nfs_dataset_client import NfsDatasetClient class NfsDatasetClientProxy:", "return self.nfs_dataset_client.create_nfs_dataset(details, **kwargs) def deactivate_nfs_dataset(self, dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id", "dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.get_nfs_dataset(dataset_name, **kwargs) def get_nfs_dataset_seal_manifest(self,", "kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.deactivate_nfs_dataset(dataset_name, **kwargs) def delete_nfs_dataset(self, dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value", "self.nfs_dataset_client.activate_nfs_dataset(dataset_name, **kwargs) def create_nfs_dataset(self, details, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return", "kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.create_nfs_dataset(details, **kwargs) def deactivate_nfs_dataset(self, dataset_name, **kwargs):", "comply to the API definition of NfsDatasetClient present in services/dts/src/oci_cli_dts/physical_appliance_control_plane/client/nfs_dataset_client.py \"\"\" from oci_cli", "\\ APPLIANCE_CERT_FILE_NAME from services.dts.src.oci_cli_dts.physical_appliance_control_plane.client.nfs_dataset_client import NfsDatasetClient class NfsDatasetClientProxy: def __init__(self, ctx, appliance_profile): config_manager", "= self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.deactivate_nfs_dataset(dataset_name, **kwargs) def delete_nfs_dataset(self, dataset_name, **kwargs): kwargs['auth_value']", "kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.get_nfs_dataset_seal_manifest(dataset_name, **kwargs) def get_nfs_dataset_seal_status(self, dataset_name, **kwargs):", "def deactivate_nfs_dataset(self, dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.deactivate_nfs_dataset(dataset_name, **kwargs)", "should always comply to the API definition of NfsDatasetClient present in services/dts/src/oci_cli_dts/physical_appliance_control_plane/client/nfs_dataset_client.py \"\"\"", "present in services/dts/src/oci_cli_dts/physical_appliance_control_plane/client/nfs_dataset_client.py \"\"\" from oci_cli import cli_util from services.dts.src.oci_cli_dts.appliance_config_manager import ApplianceConfigManager from", "rights reserved. \"\"\" NOTE: This class should always comply to the API definition", "**kwargs) def update_nfs_dataset(self, dataset_name, body, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return", "# Copyright (c) 2016, 2019, Oracle and/or its affiliates. All rights reserved. \"\"\"", "= self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.reopen_nfs_dataset(dataset_name, **kwargs) def update_nfs_dataset(self, dataset_name, body, **kwargs):", "All rights reserved. \"\"\" NOTE: This class should always comply to the API", "**kwargs) def deactivate_nfs_dataset(self, dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.deactivate_nfs_dataset(dataset_name,", "= self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.get_nfs_dataset_seal_manifest(dataset_name, **kwargs) def get_nfs_dataset_seal_status(self, dataset_name, **kwargs): kwargs['auth_value']", "NfsDatasetClient class NfsDatasetClientProxy: def __init__(self, ctx, appliance_profile): config_manager = ApplianceConfigManager(APPLIANCE_CONFIGS_BASE_DIR) appliance_config = config_manager.get_config(appliance_profile)", "import NfsDatasetClient class NfsDatasetClientProxy: def __init__(self, ctx, appliance_profile): config_manager = ApplianceConfigManager(APPLIANCE_CONFIGS_BASE_DIR) appliance_config =", "cli_util.build_config(ctx.obj) host_name = appliance_config.get_appliance_url() self_signed_cert = \"{}/{}\".format(config_manager.get_config_dir(appliance_profile), APPLIANCE_CERT_FILE_NAME) self.nfs_dataset_client = NfsDatasetClient( config=config, service_endpoint=host_name,", "ApplianceConfigManager from services.dts.src.oci_cli_dts.appliance_constants import APPLIANCE_CONFIGS_BASE_DIR, APPLIANCE_AUTH_USER, \\ APPLIANCE_CERT_FILE_NAME from services.dts.src.oci_cli_dts.physical_appliance_control_plane.client.nfs_dataset_client import NfsDatasetClient class", "appliance_profile): config_manager = ApplianceConfigManager(APPLIANCE_CONFIGS_BASE_DIR) appliance_config = config_manager.get_config(appliance_profile) self.auth_value = \"{}:{}\".format(APPLIANCE_AUTH_USER, appliance_config.get_access_token()) self.serial_id =", "return self.nfs_dataset_client.get_nfs_dataset(dataset_name, **kwargs) def get_nfs_dataset_seal_manifest(self, dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id", "return self.nfs_dataset_client.initiate_seal_on_nfs_dataset(dataset_name, **kwargs) def list_nfs_datasets(self, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return", "def initiate_seal_on_nfs_dataset(self, dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.initiate_seal_on_nfs_dataset(dataset_name, **kwargs)", "API definition of NfsDatasetClient present in services/dts/src/oci_cli_dts/physical_appliance_control_plane/client/nfs_dataset_client.py \"\"\" from oci_cli import cli_util from", "def get_nfs_dataset_seal_status(self, dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.get_nfs_dataset_seal_status(dataset_name, **kwargs)", "= self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.create_nfs_dataset(details, **kwargs) def deactivate_nfs_dataset(self, dataset_name, **kwargs): kwargs['auth_value']", "APPLIANCE_CONFIGS_BASE_DIR, APPLIANCE_AUTH_USER, \\ APPLIANCE_CERT_FILE_NAME from services.dts.src.oci_cli_dts.physical_appliance_control_plane.client.nfs_dataset_client import NfsDatasetClient class NfsDatasetClientProxy: def __init__(self, ctx,", "self.nfs_dataset_client.delete_nfs_dataset(dataset_name, **kwargs) def get_nfs_dataset(self, dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return", "self.nfs_dataset_client.reopen_nfs_dataset(dataset_name, **kwargs) def update_nfs_dataset(self, dataset_name, body, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id", "**kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.deactivate_nfs_dataset(dataset_name, **kwargs) def delete_nfs_dataset(self, dataset_name,", "= self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.delete_nfs_dataset(dataset_name, **kwargs) def get_nfs_dataset(self, dataset_name, **kwargs): kwargs['auth_value']", "APPLIANCE_CERT_FILE_NAME from services.dts.src.oci_cli_dts.physical_appliance_control_plane.client.nfs_dataset_client import NfsDatasetClient class NfsDatasetClientProxy: def __init__(self, ctx, appliance_profile): config_manager =", "**kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.create_nfs_dataset(details, **kwargs) def deactivate_nfs_dataset(self, dataset_name,", "self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.create_nfs_dataset(details, **kwargs) def deactivate_nfs_dataset(self, dataset_name, **kwargs): kwargs['auth_value'] =", "kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.create_nfs_dataset(details, **kwargs) def deactivate_nfs_dataset(self, dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value", "2019, Oracle and/or its affiliates. All rights reserved. \"\"\" NOTE: This class should", "reopen_nfs_dataset(self, dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.reopen_nfs_dataset(dataset_name, **kwargs) def", "self.serial_id return self.nfs_dataset_client.reopen_nfs_dataset(dataset_name, **kwargs) def update_nfs_dataset(self, dataset_name, body, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id']", "__init__(self, ctx, appliance_profile): config_manager = ApplianceConfigManager(APPLIANCE_CONFIGS_BASE_DIR) appliance_config = config_manager.get_config(appliance_profile) self.auth_value = \"{}:{}\".format(APPLIANCE_AUTH_USER, appliance_config.get_access_token())", "self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.get_nfs_dataset_seal_manifest(dataset_name, **kwargs) def get_nfs_dataset_seal_status(self, dataset_name, **kwargs): kwargs['auth_value'] =", "= self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.initiate_seal_on_nfs_dataset(dataset_name, **kwargs) def list_nfs_datasets(self, **kwargs): kwargs['auth_value'] =", "from services.dts.src.oci_cli_dts.physical_appliance_control_plane.client.nfs_dataset_client import NfsDatasetClient class NfsDatasetClientProxy: def __init__(self, ctx, appliance_profile): config_manager = ApplianceConfigManager(APPLIANCE_CONFIGS_BASE_DIR)", "def get_nfs_dataset_seal_manifest(self, dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.get_nfs_dataset_seal_manifest(dataset_name, **kwargs)", "get_nfs_dataset_seal_status(self, dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.get_nfs_dataset_seal_status(dataset_name, **kwargs) def", "**kwargs) def get_nfs_dataset(self, dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.get_nfs_dataset(dataset_name,", "kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.initiate_seal_on_nfs_dataset(dataset_name, **kwargs) def list_nfs_datasets(self, **kwargs): kwargs['auth_value']", "coding: utf-8 # Copyright (c) 2016, 2019, Oracle and/or its affiliates. All rights", "class should always comply to the API definition of NfsDatasetClient present in services/dts/src/oci_cli_dts/physical_appliance_control_plane/client/nfs_dataset_client.py", "\"{}/{}\".format(config_manager.get_config_dir(appliance_profile), APPLIANCE_CERT_FILE_NAME) self.nfs_dataset_client = NfsDatasetClient( config=config, service_endpoint=host_name, self_signed_cert=self_signed_cert) def activate_nfs_dataset(self, dataset_name, **kwargs): kwargs['auth_value']", "dataset_name, body, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.update_nfs_dataset(dataset_name, body, **kwargs)", "= self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.get_nfs_dataset(dataset_name, **kwargs) def get_nfs_dataset_seal_manifest(self, dataset_name, **kwargs): kwargs['auth_value']", "= self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.list_nfs_datasets(**kwargs) def reopen_nfs_dataset(self, dataset_name, **kwargs): kwargs['auth_value'] =", "= cli_util.build_config(ctx.obj) host_name = appliance_config.get_appliance_url() self_signed_cert = \"{}/{}\".format(config_manager.get_config_dir(appliance_profile), APPLIANCE_CERT_FILE_NAME) self.nfs_dataset_client = NfsDatasetClient( config=config,", "kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.get_nfs_dataset(dataset_name, **kwargs) def get_nfs_dataset_seal_manifest(self, dataset_name, **kwargs):", "= self.serial_id return self.nfs_dataset_client.initiate_seal_on_nfs_dataset(dataset_name, **kwargs) def list_nfs_datasets(self, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] =", "**kwargs) def create_nfs_dataset(self, details, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.create_nfs_dataset(details,", "self.serial_id return self.nfs_dataset_client.delete_nfs_dataset(dataset_name, **kwargs) def get_nfs_dataset(self, dataset_name, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] =", "appliance_config.get_appliance_serial_id() config = cli_util.build_config(ctx.obj) host_name = appliance_config.get_appliance_url() self_signed_cert = \"{}/{}\".format(config_manager.get_config_dir(appliance_profile), APPLIANCE_CERT_FILE_NAME) self.nfs_dataset_client =", "\"{}:{}\".format(APPLIANCE_AUTH_USER, appliance_config.get_access_token()) self.serial_id = appliance_config.get_appliance_serial_id() config = cli_util.build_config(ctx.obj) host_name = appliance_config.get_appliance_url() self_signed_cert =", "list_nfs_datasets(self, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.list_nfs_datasets(**kwargs) def reopen_nfs_dataset(self, dataset_name,", "kwargs['serial_id'] = self.serial_id return self.nfs_dataset_client.initiate_seal_on_nfs_dataset(dataset_name, **kwargs) def list_nfs_datasets(self, **kwargs): kwargs['auth_value'] = self.auth_value kwargs['serial_id']" ]
[ "self.color = color self.visible = True self.priority = 2 def draw(self, root_console, tick_count)", "2 def draw(self, root_console, tick_count) -> None: if not self.visible: return root_console.print(self.x, self.y,", "self.y = parent.y + y self.message = message self.color = color self.visible =", "def __init__(self, parent, x, y, message, color): self.x = parent.x + x self.y", "parent, x, y, message, color): self.x = parent.x + x self.y = parent.y", "y self.message = message self.color = color self.visible = True self.priority = 2", "self.x = parent.x + x self.y = parent.y + y self.message = message", "color): self.x = parent.x + x self.y = parent.y + y self.message =", "self.visible = True self.priority = 2 def draw(self, root_console, tick_count) -> None: if", "message self.color = color self.visible = True self.priority = 2 def draw(self, root_console,", "= parent.x + x self.y = parent.y + y self.message = message self.color", "def draw(self, root_console, tick_count) -> None: if not self.visible: return root_console.print(self.x, self.y, self.message,", "message, color): self.x = parent.x + x self.y = parent.y + y self.message", "= color self.visible = True self.priority = 2 def draw(self, root_console, tick_count) ->", "+ y self.message = message self.color = color self.visible = True self.priority =", "= True self.priority = 2 def draw(self, root_console, tick_count) -> None: if not", "__init__(self, parent, x, y, message, color): self.x = parent.x + x self.y =", "parent.x + x self.y = parent.y + y self.message = message self.color =", "= message self.color = color self.visible = True self.priority = 2 def draw(self,", "y, message, color): self.x = parent.x + x self.y = parent.y + y", "x, y, message, color): self.x = parent.x + x self.y = parent.y +", "UIMessage: def __init__(self, parent, x, y, message, color): self.x = parent.x + x", "= 2 def draw(self, root_console, tick_count) -> None: if not self.visible: return root_console.print(self.x,", "parent.y + y self.message = message self.color = color self.visible = True self.priority", "self.message = message self.color = color self.visible = True self.priority = 2 def", "<gh_stars>0 class UIMessage: def __init__(self, parent, x, y, message, color): self.x = parent.x", "class UIMessage: def __init__(self, parent, x, y, message, color): self.x = parent.x +", "= parent.y + y self.message = message self.color = color self.visible = True", "self.priority = 2 def draw(self, root_console, tick_count) -> None: if not self.visible: return", "color self.visible = True self.priority = 2 def draw(self, root_console, tick_count) -> None:", "+ x self.y = parent.y + y self.message = message self.color = color", "True self.priority = 2 def draw(self, root_console, tick_count) -> None: if not self.visible:", "x self.y = parent.y + y self.message = message self.color = color self.visible", "draw(self, root_console, tick_count) -> None: if not self.visible: return root_console.print(self.x, self.y, self.message, fg=self.color)" ]
[ "\"<EMAIL>\", \"client_id\": \"....\", \"auth_uri\": \"https://accounts.google.com/o/oauth2/auth\", \"token_uri\": \"https://accounts.google.com/o/oauth2/token\", \"auth_provider_x509_cert_url\": \"https://www.googleapis.com/oauth2/v1/certs\", \"client_x509_cert_url\": \"https://www.googleapis.com/robot/v1/metadata/x509/\" \"yourworker%40yourproject.iam.gserviceaccount.com\" }", "get_parser class YourParamsSheet(ParamsSheet): \"\"\" Your model Params Sheet class \"\"\" params_sheet_id = '...'", "KEY----- ........ -----END PRIVATE KEY-----\"\"\", \"client_email\": \"<EMAIL>\", \"client_id\": \"....\", \"auth_uri\": \"https://accounts.google.com/o/oauth2/auth\", \"token_uri\": \"https://accounts.google.com/o/oauth2/token\",", "from train_attention import train from options import get_parser class YourParamsSheet(ParamsSheet): \"\"\" Your model", "-----END PRIVATE KEY-----\"\"\", \"client_email\": \"<EMAIL>\", \"client_id\": \"....\", \"auth_uri\": \"https://accounts.google.com/o/oauth2/auth\", \"token_uri\": \"https://accounts.google.com/o/oauth2/token\", \"auth_provider_x509_cert_url\": \"https://www.googleapis.com/oauth2/v1/certs\",", "\"auth_provider_x509_cert_url\": \"https://www.googleapis.com/oauth2/v1/certs\", \"client_x509_cert_url\": \"https://www.googleapis.com/robot/v1/metadata/x509/\" \"yourworker%40yourproject.iam.gserviceaccount.com\" } def __init__(self, parser, server_name): writable_metrics_and_types = {", "server_name=server_name) if __name__ == '__main__': server_name = os.environ.get('SERVERNAME', None) params = YourParamsSheet(get_parser(), server_name)", "writable_metrics_and_types = { 'your model precision': float } super(YourParamsSheet, self).__init__( parser, writable_column_types=writable_metrics_and_types, experiment_id_column='exp_hash',", "KEY-----\"\"\", \"client_email\": \"<EMAIL>\", \"client_id\": \"....\", \"auth_uri\": \"https://accounts.google.com/o/oauth2/auth\", \"token_uri\": \"https://accounts.google.com/o/oauth2/token\", \"auth_provider_x509_cert_url\": \"https://www.googleapis.com/oauth2/v1/certs\", \"client_x509_cert_url\": \"https://www.googleapis.com/robot/v1/metadata/x509/\"", "parser, server_name): writable_metrics_and_types = { 'your model precision': float } super(YourParamsSheet, self).__init__( parser,", "parser, writable_column_types=writable_metrics_and_types, experiment_id_column='exp_hash', server_name=server_name) if __name__ == '__main__': server_name = os.environ.get('SERVERNAME', None) params", "} super(YourParamsSheet, self).__init__( parser, writable_column_types=writable_metrics_and_types, experiment_id_column='exp_hash', server_name=server_name) if __name__ == '__main__': server_name =", "Your model Params Sheet class \"\"\" params_sheet_id = '...' client_credentials = { \"type\":", "train_attention import train from options import get_parser class YourParamsSheet(ParamsSheet): \"\"\" Your model Params", "\"\"\" params_sheet_id = '...' client_credentials = { \"type\": \"service_account\", \"project_id\": \"....\", \"private_key_id\": \"....\",", "\"....\", \"private_key_id\": \"....\", \"private_key\": \"\"\"-----BEGIN PRIVATE KEY----- ........ -----END PRIVATE KEY-----\"\"\", \"client_email\": \"<EMAIL>\",", "\"client_id\": \"....\", \"auth_uri\": \"https://accounts.google.com/o/oauth2/auth\", \"token_uri\": \"https://accounts.google.com/o/oauth2/token\", \"auth_provider_x509_cert_url\": \"https://www.googleapis.com/oauth2/v1/certs\", \"client_x509_cert_url\": \"https://www.googleapis.com/robot/v1/metadata/x509/\" \"yourworker%40yourproject.iam.gserviceaccount.com\" } def", "\"auth_uri\": \"https://accounts.google.com/o/oauth2/auth\", \"token_uri\": \"https://accounts.google.com/o/oauth2/token\", \"auth_provider_x509_cert_url\": \"https://www.googleapis.com/oauth2/v1/certs\", \"client_x509_cert_url\": \"https://www.googleapis.com/robot/v1/metadata/x509/\" \"yourworker%40yourproject.iam.gserviceaccount.com\" } def __init__(self, parser,", "Sheet class \"\"\" params_sheet_id = '...' client_credentials = { \"type\": \"service_account\", \"project_id\": \"....\",", "\"token_uri\": \"https://accounts.google.com/o/oauth2/token\", \"auth_provider_x509_cert_url\": \"https://www.googleapis.com/oauth2/v1/certs\", \"client_x509_cert_url\": \"https://www.googleapis.com/robot/v1/metadata/x509/\" \"yourworker%40yourproject.iam.gserviceaccount.com\" } def __init__(self, parser, server_name): writable_metrics_and_types", "'your model precision': float } super(YourParamsSheet, self).__init__( parser, writable_column_types=writable_metrics_and_types, experiment_id_column='exp_hash', server_name=server_name) if __name__", "super(YourParamsSheet, self).__init__( parser, writable_column_types=writable_metrics_and_types, experiment_id_column='exp_hash', server_name=server_name) if __name__ == '__main__': server_name = os.environ.get('SERVERNAME',", "server_name): writable_metrics_and_types = { 'your model precision': float } super(YourParamsSheet, self).__init__( parser, writable_column_types=writable_metrics_and_types,", "experiment_id_column='exp_hash', server_name=server_name) if __name__ == '__main__': server_name = os.environ.get('SERVERNAME', None) params = YourParamsSheet(get_parser(),", "\"....\", \"auth_uri\": \"https://accounts.google.com/o/oauth2/auth\", \"token_uri\": \"https://accounts.google.com/o/oauth2/token\", \"auth_provider_x509_cert_url\": \"https://www.googleapis.com/oauth2/v1/certs\", \"client_x509_cert_url\": \"https://www.googleapis.com/robot/v1/metadata/x509/\" \"yourworker%40yourproject.iam.gserviceaccount.com\" } def __init__(self,", "def __init__(self, parser, server_name): writable_metrics_and_types = { 'your model precision': float } super(YourParamsSheet,", "{ \"type\": \"service_account\", \"project_id\": \"....\", \"private_key_id\": \"....\", \"private_key\": \"\"\"-----BEGIN PRIVATE KEY----- ........ -----END", "........ -----END PRIVATE KEY-----\"\"\", \"client_email\": \"<EMAIL>\", \"client_id\": \"....\", \"auth_uri\": \"https://accounts.google.com/o/oauth2/auth\", \"token_uri\": \"https://accounts.google.com/o/oauth2/token\", \"auth_provider_x509_cert_url\":", "{ 'your model precision': float } super(YourParamsSheet, self).__init__( parser, writable_column_types=writable_metrics_and_types, experiment_id_column='exp_hash', server_name=server_name) if", "precision': float } super(YourParamsSheet, self).__init__( parser, writable_column_types=writable_metrics_and_types, experiment_id_column='exp_hash', server_name=server_name) if __name__ == '__main__':", "PRIVATE KEY-----\"\"\", \"client_email\": \"<EMAIL>\", \"client_id\": \"....\", \"auth_uri\": \"https://accounts.google.com/o/oauth2/auth\", \"token_uri\": \"https://accounts.google.com/o/oauth2/token\", \"auth_provider_x509_cert_url\": \"https://www.googleapis.com/oauth2/v1/certs\", \"client_x509_cert_url\":", "spreaduler import ParamsSheet from train_attention import train from options import get_parser class YourParamsSheet(ParamsSheet):", "import train from options import get_parser class YourParamsSheet(ParamsSheet): \"\"\" Your model Params Sheet", "= { 'your model precision': float } super(YourParamsSheet, self).__init__( parser, writable_column_types=writable_metrics_and_types, experiment_id_column='exp_hash', server_name=server_name)", "self).__init__( parser, writable_column_types=writable_metrics_and_types, experiment_id_column='exp_hash', server_name=server_name) if __name__ == '__main__': server_name = os.environ.get('SERVERNAME', None)", "writable_column_types=writable_metrics_and_types, experiment_id_column='exp_hash', server_name=server_name) if __name__ == '__main__': server_name = os.environ.get('SERVERNAME', None) params =", "if __name__ == '__main__': server_name = os.environ.get('SERVERNAME', None) params = YourParamsSheet(get_parser(), server_name) params.exec_loop(train)", "options import get_parser class YourParamsSheet(ParamsSheet): \"\"\" Your model Params Sheet class \"\"\" params_sheet_id", "\"service_account\", \"project_id\": \"....\", \"private_key_id\": \"....\", \"private_key\": \"\"\"-----BEGIN PRIVATE KEY----- ........ -----END PRIVATE KEY-----\"\"\",", "\"https://accounts.google.com/o/oauth2/token\", \"auth_provider_x509_cert_url\": \"https://www.googleapis.com/oauth2/v1/certs\", \"client_x509_cert_url\": \"https://www.googleapis.com/robot/v1/metadata/x509/\" \"yourworker%40yourproject.iam.gserviceaccount.com\" } def __init__(self, parser, server_name): writable_metrics_and_types =", "\"client_x509_cert_url\": \"https://www.googleapis.com/robot/v1/metadata/x509/\" \"yourworker%40yourproject.iam.gserviceaccount.com\" } def __init__(self, parser, server_name): writable_metrics_and_types = { 'your model", "= '...' client_credentials = { \"type\": \"service_account\", \"project_id\": \"....\", \"private_key_id\": \"....\", \"private_key\": \"\"\"-----BEGIN", "\"https://accounts.google.com/o/oauth2/auth\", \"token_uri\": \"https://accounts.google.com/o/oauth2/token\", \"auth_provider_x509_cert_url\": \"https://www.googleapis.com/oauth2/v1/certs\", \"client_x509_cert_url\": \"https://www.googleapis.com/robot/v1/metadata/x509/\" \"yourworker%40yourproject.iam.gserviceaccount.com\" } def __init__(self, parser, server_name):", "\"yourworker%40yourproject.iam.gserviceaccount.com\" } def __init__(self, parser, server_name): writable_metrics_and_types = { 'your model precision': float", "client_credentials = { \"type\": \"service_account\", \"project_id\": \"....\", \"private_key_id\": \"....\", \"private_key\": \"\"\"-----BEGIN PRIVATE KEY-----", "ParamsSheet from train_attention import train from options import get_parser class YourParamsSheet(ParamsSheet): \"\"\" Your", "\"private_key_id\": \"....\", \"private_key\": \"\"\"-----BEGIN PRIVATE KEY----- ........ -----END PRIVATE KEY-----\"\"\", \"client_email\": \"<EMAIL>\", \"client_id\":", "\"private_key\": \"\"\"-----BEGIN PRIVATE KEY----- ........ -----END PRIVATE KEY-----\"\"\", \"client_email\": \"<EMAIL>\", \"client_id\": \"....\", \"auth_uri\":", "\"client_email\": \"<EMAIL>\", \"client_id\": \"....\", \"auth_uri\": \"https://accounts.google.com/o/oauth2/auth\", \"token_uri\": \"https://accounts.google.com/o/oauth2/token\", \"auth_provider_x509_cert_url\": \"https://www.googleapis.com/oauth2/v1/certs\", \"client_x509_cert_url\": \"https://www.googleapis.com/robot/v1/metadata/x509/\" \"yourworker%40yourproject.iam.gserviceaccount.com\"", "\"https://www.googleapis.com/robot/v1/metadata/x509/\" \"yourworker%40yourproject.iam.gserviceaccount.com\" } def __init__(self, parser, server_name): writable_metrics_and_types = { 'your model precision':", "float } super(YourParamsSheet, self).__init__( parser, writable_column_types=writable_metrics_and_types, experiment_id_column='exp_hash', server_name=server_name) if __name__ == '__main__': server_name", "YourParamsSheet(ParamsSheet): \"\"\" Your model Params Sheet class \"\"\" params_sheet_id = '...' client_credentials =", "class \"\"\" params_sheet_id = '...' client_credentials = { \"type\": \"service_account\", \"project_id\": \"....\", \"private_key_id\":", "'...' client_credentials = { \"type\": \"service_account\", \"project_id\": \"....\", \"private_key_id\": \"....\", \"private_key\": \"\"\"-----BEGIN PRIVATE", "= { \"type\": \"service_account\", \"project_id\": \"....\", \"private_key_id\": \"....\", \"private_key\": \"\"\"-----BEGIN PRIVATE KEY----- ........", "\"project_id\": \"....\", \"private_key_id\": \"....\", \"private_key\": \"\"\"-----BEGIN PRIVATE KEY----- ........ -----END PRIVATE KEY-----\"\"\", \"client_email\":", "class YourParamsSheet(ParamsSheet): \"\"\" Your model Params Sheet class \"\"\" params_sheet_id = '...' client_credentials", "\"....\", \"private_key\": \"\"\"-----BEGIN PRIVATE KEY----- ........ -----END PRIVATE KEY-----\"\"\", \"client_email\": \"<EMAIL>\", \"client_id\": \"....\",", "__init__(self, parser, server_name): writable_metrics_and_types = { 'your model precision': float } super(YourParamsSheet, self).__init__(", "import ParamsSheet from train_attention import train from options import get_parser class YourParamsSheet(ParamsSheet): \"\"\"", "Params Sheet class \"\"\" params_sheet_id = '...' client_credentials = { \"type\": \"service_account\", \"project_id\":", "model precision': float } super(YourParamsSheet, self).__init__( parser, writable_column_types=writable_metrics_and_types, experiment_id_column='exp_hash', server_name=server_name) if __name__ ==", "params_sheet_id = '...' client_credentials = { \"type\": \"service_account\", \"project_id\": \"....\", \"private_key_id\": \"....\", \"private_key\":", "PRIVATE KEY----- ........ -----END PRIVATE KEY-----\"\"\", \"client_email\": \"<EMAIL>\", \"client_id\": \"....\", \"auth_uri\": \"https://accounts.google.com/o/oauth2/auth\", \"token_uri\":", "from spreaduler import ParamsSheet from train_attention import train from options import get_parser class", "\"type\": \"service_account\", \"project_id\": \"....\", \"private_key_id\": \"....\", \"private_key\": \"\"\"-----BEGIN PRIVATE KEY----- ........ -----END PRIVATE", "\"\"\" Your model Params Sheet class \"\"\" params_sheet_id = '...' client_credentials = {", "import get_parser class YourParamsSheet(ParamsSheet): \"\"\" Your model Params Sheet class \"\"\" params_sheet_id =", "train from options import get_parser class YourParamsSheet(ParamsSheet): \"\"\" Your model Params Sheet class", "\"\"\"-----BEGIN PRIVATE KEY----- ........ -----END PRIVATE KEY-----\"\"\", \"client_email\": \"<EMAIL>\", \"client_id\": \"....\", \"auth_uri\": \"https://accounts.google.com/o/oauth2/auth\",", "from options import get_parser class YourParamsSheet(ParamsSheet): \"\"\" Your model Params Sheet class \"\"\"", "\"https://www.googleapis.com/oauth2/v1/certs\", \"client_x509_cert_url\": \"https://www.googleapis.com/robot/v1/metadata/x509/\" \"yourworker%40yourproject.iam.gserviceaccount.com\" } def __init__(self, parser, server_name): writable_metrics_and_types = { 'your", "model Params Sheet class \"\"\" params_sheet_id = '...' client_credentials = { \"type\": \"service_account\",", "} def __init__(self, parser, server_name): writable_metrics_and_types = { 'your model precision': float }" ]
[]
[ "as pd reclist = pd.DataFrame({ 'pid': [1,1,1,2,2,2,3,3,3], 'song_id': [4,5,6,4,5,6,4,5,12], 'pos': [1,2,3,1,2,3,3,2,1] }) gt", "import listdiff import numpy as np import pandas as pd reclist = pd.DataFrame({", "[4,5,6,4,5,6,4,5,12], 'pos': [1,2,3,1,2,3,3,2,1] }) gt = pd.DataFrame({ 'pid': [1,1,1,2,2,2,2], 'song_id': [1,5,9,4,5,12,9], 'pos': [1,2,3,4,3,2,1]", "cut_to_size=4 ) print(res) assert np.all(res[res[:,0]==1] == np.array([ [1,4,1], [1,6,2], [1,10,3], [1,11,4] ])) assert", "reclist[[\"pid\",\"song_id\",\"pos\"]].sample(frac=1).values, gt[[\"pid\",\"song_id\",\"pos\"]].sample(frac=1).values, complement=complement[[\"pid\",\"song_id\",\"pos\"]].sample(frac=1).values, extra_pids=np.array([3,4]), cut_to_size=4 ) print(res) assert np.all(res[res[:,0]==1] == np.array([ [1,4,1], [1,6,2],", "np.all(res[res[:,0]==2] == np.array([ [2,6,1], [2,10,2], [2,11,3], [2,13,4] ])) assert np.all(res[res[:,0]==3] == np.array([ [3,12,1],", "pd.DataFrame({ 'pid': [1,1,1,2,2,2,2,3,3,3,-1], 'song_id': [10,11,12,10,11,12,13,10,11,12,1], 'pos': [1,2,3,1,2,3,4,3,2,1,1] }) res = listdiff.listdiff( reclist[[\"pid\",\"song_id\",\"pos\"]].sample(frac=1).values, gt[[\"pid\",\"song_id\",\"pos\"]].sample(frac=1).values,", "reclist = pd.DataFrame({ 'pid': [1,1,1,2,2,2,3,3,3], 'song_id': [4,5,6,4,5,6,4,5,12], 'pos': [1,2,3,1,2,3,3,2,1] }) gt = pd.DataFrame({", "gt = pd.DataFrame({ 'pid': [1,1,1,2,2,2,2], 'song_id': [1,5,9,4,5,12,9], 'pos': [1,2,3,4,3,2,1] }) complement = pd.DataFrame({", "'pid': [1,1,1,2,2,2,2,3,3,3,-1], 'song_id': [10,11,12,10,11,12,13,10,11,12,1], 'pos': [1,2,3,1,2,3,4,3,2,1,1] }) res = listdiff.listdiff( reclist[[\"pid\",\"song_id\",\"pos\"]].sample(frac=1).values, gt[[\"pid\",\"song_id\",\"pos\"]].sample(frac=1).values, complement=complement[[\"pid\",\"song_id\",\"pos\"]].sample(frac=1).values,", "res = listdiff.listdiff( reclist[[\"pid\",\"song_id\",\"pos\"]].sample(frac=1).values, gt[[\"pid\",\"song_id\",\"pos\"]].sample(frac=1).values, complement=complement[[\"pid\",\"song_id\",\"pos\"]].sample(frac=1).values, extra_pids=np.array([3,4]), cut_to_size=4 ) print(res) assert np.all(res[res[:,0]==1] ==", "== np.array([ [1,4,1], [1,6,2], [1,10,3], [1,11,4] ])) assert np.all(res[res[:,0]==2] == np.array([ [2,6,1], [2,10,2],", "[1,6,2], [1,10,3], [1,11,4] ])) assert np.all(res[res[:,0]==2] == np.array([ [2,6,1], [2,10,2], [2,11,3], [2,13,4] ]))", "<filename>test.py import listdiff import numpy as np import pandas as pd reclist =", "= pd.DataFrame({ 'pid': [1,1,1,2,2,2,3,3,3], 'song_id': [4,5,6,4,5,6,4,5,12], 'pos': [1,2,3,1,2,3,3,2,1] }) gt = pd.DataFrame({ 'pid':", "as np import pandas as pd reclist = pd.DataFrame({ 'pid': [1,1,1,2,2,2,3,3,3], 'song_id': [4,5,6,4,5,6,4,5,12],", "'song_id': [1,5,9,4,5,12,9], 'pos': [1,2,3,4,3,2,1] }) complement = pd.DataFrame({ 'pid': [1,1,1,2,2,2,2,3,3,3,-1], 'song_id': [10,11,12,10,11,12,13,10,11,12,1], 'pos':", "pd.DataFrame({ 'pid': [1,1,1,2,2,2,3,3,3], 'song_id': [4,5,6,4,5,6,4,5,12], 'pos': [1,2,3,1,2,3,3,2,1] }) gt = pd.DataFrame({ 'pid': [1,1,1,2,2,2,2],", "assert np.all(res[res[:,0]==3] == np.array([ [3,12,1], [3,5,2], [3,4,3], [3,11,4] ])) assert np.all(res[res[:,0]==4] == np.array([", "import pandas as pd reclist = pd.DataFrame({ 'pid': [1,1,1,2,2,2,3,3,3], 'song_id': [4,5,6,4,5,6,4,5,12], 'pos': [1,2,3,1,2,3,3,2,1]", "'pid': [1,1,1,2,2,2,2], 'song_id': [1,5,9,4,5,12,9], 'pos': [1,2,3,4,3,2,1] }) complement = pd.DataFrame({ 'pid': [1,1,1,2,2,2,2,3,3,3,-1], 'song_id':", "gt[[\"pid\",\"song_id\",\"pos\"]].sample(frac=1).values, complement=complement[[\"pid\",\"song_id\",\"pos\"]].sample(frac=1).values, extra_pids=np.array([3,4]), cut_to_size=4 ) print(res) assert np.all(res[res[:,0]==1] == np.array([ [1,4,1], [1,6,2], [1,10,3],", "[1,5,9,4,5,12,9], 'pos': [1,2,3,4,3,2,1] }) complement = pd.DataFrame({ 'pid': [1,1,1,2,2,2,2,3,3,3,-1], 'song_id': [10,11,12,10,11,12,13,10,11,12,1], 'pos': [1,2,3,1,2,3,4,3,2,1,1]", "= pd.DataFrame({ 'pid': [1,1,1,2,2,2,2,3,3,3,-1], 'song_id': [10,11,12,10,11,12,13,10,11,12,1], 'pos': [1,2,3,1,2,3,4,3,2,1,1] }) res = listdiff.listdiff( reclist[[\"pid\",\"song_id\",\"pos\"]].sample(frac=1).values,", "}) complement = pd.DataFrame({ 'pid': [1,1,1,2,2,2,2,3,3,3,-1], 'song_id': [10,11,12,10,11,12,13,10,11,12,1], 'pos': [1,2,3,1,2,3,4,3,2,1,1] }) res =", "np.all(res[res[:,0]==3] == np.array([ [3,12,1], [3,5,2], [3,4,3], [3,11,4] ])) assert np.all(res[res[:,0]==4] == np.array([ [4,1,1],", "[2,13,4] ])) assert np.all(res[res[:,0]==3] == np.array([ [3,12,1], [3,5,2], [3,4,3], [3,11,4] ])) assert np.all(res[res[:,0]==4]", "pd.DataFrame({ 'pid': [1,1,1,2,2,2,2], 'song_id': [1,5,9,4,5,12,9], 'pos': [1,2,3,4,3,2,1] }) complement = pd.DataFrame({ 'pid': [1,1,1,2,2,2,2,3,3,3,-1],", "'pos': [1,2,3,1,2,3,4,3,2,1,1] }) res = listdiff.listdiff( reclist[[\"pid\",\"song_id\",\"pos\"]].sample(frac=1).values, gt[[\"pid\",\"song_id\",\"pos\"]].sample(frac=1).values, complement=complement[[\"pid\",\"song_id\",\"pos\"]].sample(frac=1).values, extra_pids=np.array([3,4]), cut_to_size=4 ) print(res)", "= pd.DataFrame({ 'pid': [1,1,1,2,2,2,2], 'song_id': [1,5,9,4,5,12,9], 'pos': [1,2,3,4,3,2,1] }) complement = pd.DataFrame({ 'pid':", "== np.array([ [3,12,1], [3,5,2], [3,4,3], [3,11,4] ])) assert np.all(res[res[:,0]==4] == np.array([ [4,1,1], ]))", "[1,2,3,1,2,3,4,3,2,1,1] }) res = listdiff.listdiff( reclist[[\"pid\",\"song_id\",\"pos\"]].sample(frac=1).values, gt[[\"pid\",\"song_id\",\"pos\"]].sample(frac=1).values, complement=complement[[\"pid\",\"song_id\",\"pos\"]].sample(frac=1).values, extra_pids=np.array([3,4]), cut_to_size=4 ) print(res) assert", "'pid': [1,1,1,2,2,2,3,3,3], 'song_id': [4,5,6,4,5,6,4,5,12], 'pos': [1,2,3,1,2,3,3,2,1] }) gt = pd.DataFrame({ 'pid': [1,1,1,2,2,2,2], 'song_id':", "[1,1,1,2,2,2,2], 'song_id': [1,5,9,4,5,12,9], 'pos': [1,2,3,4,3,2,1] }) complement = pd.DataFrame({ 'pid': [1,1,1,2,2,2,2,3,3,3,-1], 'song_id': [10,11,12,10,11,12,13,10,11,12,1],", "[1,1,1,2,2,2,2,3,3,3,-1], 'song_id': [10,11,12,10,11,12,13,10,11,12,1], 'pos': [1,2,3,1,2,3,4,3,2,1,1] }) res = listdiff.listdiff( reclist[[\"pid\",\"song_id\",\"pos\"]].sample(frac=1).values, gt[[\"pid\",\"song_id\",\"pos\"]].sample(frac=1).values, complement=complement[[\"pid\",\"song_id\",\"pos\"]].sample(frac=1).values, extra_pids=np.array([3,4]),", "assert np.all(res[res[:,0]==1] == np.array([ [1,4,1], [1,6,2], [1,10,3], [1,11,4] ])) assert np.all(res[res[:,0]==2] == np.array([", "[1,2,3,4,3,2,1] }) complement = pd.DataFrame({ 'pid': [1,1,1,2,2,2,2,3,3,3,-1], 'song_id': [10,11,12,10,11,12,13,10,11,12,1], 'pos': [1,2,3,1,2,3,4,3,2,1,1] }) res", "[1,10,3], [1,11,4] ])) assert np.all(res[res[:,0]==2] == np.array([ [2,6,1], [2,10,2], [2,11,3], [2,13,4] ])) assert", "])) assert np.all(res[res[:,0]==2] == np.array([ [2,6,1], [2,10,2], [2,11,3], [2,13,4] ])) assert np.all(res[res[:,0]==3] ==", "numpy as np import pandas as pd reclist = pd.DataFrame({ 'pid': [1,1,1,2,2,2,3,3,3], 'song_id':", "])) assert np.all(res[res[:,0]==3] == np.array([ [3,12,1], [3,5,2], [3,4,3], [3,11,4] ])) assert np.all(res[res[:,0]==4] ==", "[1,11,4] ])) assert np.all(res[res[:,0]==2] == np.array([ [2,6,1], [2,10,2], [2,11,3], [2,13,4] ])) assert np.all(res[res[:,0]==3]", "[1,2,3,1,2,3,3,2,1] }) gt = pd.DataFrame({ 'pid': [1,1,1,2,2,2,2], 'song_id': [1,5,9,4,5,12,9], 'pos': [1,2,3,4,3,2,1] }) complement", "complement=complement[[\"pid\",\"song_id\",\"pos\"]].sample(frac=1).values, extra_pids=np.array([3,4]), cut_to_size=4 ) print(res) assert np.all(res[res[:,0]==1] == np.array([ [1,4,1], [1,6,2], [1,10,3], [1,11,4]", "'song_id': [10,11,12,10,11,12,13,10,11,12,1], 'pos': [1,2,3,1,2,3,4,3,2,1,1] }) res = listdiff.listdiff( reclist[[\"pid\",\"song_id\",\"pos\"]].sample(frac=1).values, gt[[\"pid\",\"song_id\",\"pos\"]].sample(frac=1).values, complement=complement[[\"pid\",\"song_id\",\"pos\"]].sample(frac=1).values, extra_pids=np.array([3,4]), cut_to_size=4", "np.all(res[res[:,0]==1] == np.array([ [1,4,1], [1,6,2], [1,10,3], [1,11,4] ])) assert np.all(res[res[:,0]==2] == np.array([ [2,6,1],", "np.array([ [2,6,1], [2,10,2], [2,11,3], [2,13,4] ])) assert np.all(res[res[:,0]==3] == np.array([ [3,12,1], [3,5,2], [3,4,3],", "pd reclist = pd.DataFrame({ 'pid': [1,1,1,2,2,2,3,3,3], 'song_id': [4,5,6,4,5,6,4,5,12], 'pos': [1,2,3,1,2,3,3,2,1] }) gt =", "'song_id': [4,5,6,4,5,6,4,5,12], 'pos': [1,2,3,1,2,3,3,2,1] }) gt = pd.DataFrame({ 'pid': [1,1,1,2,2,2,2], 'song_id': [1,5,9,4,5,12,9], 'pos':", "= listdiff.listdiff( reclist[[\"pid\",\"song_id\",\"pos\"]].sample(frac=1).values, gt[[\"pid\",\"song_id\",\"pos\"]].sample(frac=1).values, complement=complement[[\"pid\",\"song_id\",\"pos\"]].sample(frac=1).values, extra_pids=np.array([3,4]), cut_to_size=4 ) print(res) assert np.all(res[res[:,0]==1] == np.array([", "print(res) assert np.all(res[res[:,0]==1] == np.array([ [1,4,1], [1,6,2], [1,10,3], [1,11,4] ])) assert np.all(res[res[:,0]==2] ==", "'pos': [1,2,3,1,2,3,3,2,1] }) gt = pd.DataFrame({ 'pid': [1,1,1,2,2,2,2], 'song_id': [1,5,9,4,5,12,9], 'pos': [1,2,3,4,3,2,1] })", "pandas as pd reclist = pd.DataFrame({ 'pid': [1,1,1,2,2,2,3,3,3], 'song_id': [4,5,6,4,5,6,4,5,12], 'pos': [1,2,3,1,2,3,3,2,1] })", "'pos': [1,2,3,4,3,2,1] }) complement = pd.DataFrame({ 'pid': [1,1,1,2,2,2,2,3,3,3,-1], 'song_id': [10,11,12,10,11,12,13,10,11,12,1], 'pos': [1,2,3,1,2,3,4,3,2,1,1] })", "np import pandas as pd reclist = pd.DataFrame({ 'pid': [1,1,1,2,2,2,3,3,3], 'song_id': [4,5,6,4,5,6,4,5,12], 'pos':", "}) res = listdiff.listdiff( reclist[[\"pid\",\"song_id\",\"pos\"]].sample(frac=1).values, gt[[\"pid\",\"song_id\",\"pos\"]].sample(frac=1).values, complement=complement[[\"pid\",\"song_id\",\"pos\"]].sample(frac=1).values, extra_pids=np.array([3,4]), cut_to_size=4 ) print(res) assert np.all(res[res[:,0]==1]", "assert np.all(res[res[:,0]==2] == np.array([ [2,6,1], [2,10,2], [2,11,3], [2,13,4] ])) assert np.all(res[res[:,0]==3] == np.array([", "import numpy as np import pandas as pd reclist = pd.DataFrame({ 'pid': [1,1,1,2,2,2,3,3,3],", "np.array([ [1,4,1], [1,6,2], [1,10,3], [1,11,4] ])) assert np.all(res[res[:,0]==2] == np.array([ [2,6,1], [2,10,2], [2,11,3],", "== np.array([ [2,6,1], [2,10,2], [2,11,3], [2,13,4] ])) assert np.all(res[res[:,0]==3] == np.array([ [3,12,1], [3,5,2],", "}) gt = pd.DataFrame({ 'pid': [1,1,1,2,2,2,2], 'song_id': [1,5,9,4,5,12,9], 'pos': [1,2,3,4,3,2,1] }) complement =", "[2,11,3], [2,13,4] ])) assert np.all(res[res[:,0]==3] == np.array([ [3,12,1], [3,5,2], [3,4,3], [3,11,4] ])) assert", "extra_pids=np.array([3,4]), cut_to_size=4 ) print(res) assert np.all(res[res[:,0]==1] == np.array([ [1,4,1], [1,6,2], [1,10,3], [1,11,4] ]))", "listdiff import numpy as np import pandas as pd reclist = pd.DataFrame({ 'pid':", "[10,11,12,10,11,12,13,10,11,12,1], 'pos': [1,2,3,1,2,3,4,3,2,1,1] }) res = listdiff.listdiff( reclist[[\"pid\",\"song_id\",\"pos\"]].sample(frac=1).values, gt[[\"pid\",\"song_id\",\"pos\"]].sample(frac=1).values, complement=complement[[\"pid\",\"song_id\",\"pos\"]].sample(frac=1).values, extra_pids=np.array([3,4]), cut_to_size=4 )", ") print(res) assert np.all(res[res[:,0]==1] == np.array([ [1,4,1], [1,6,2], [1,10,3], [1,11,4] ])) assert np.all(res[res[:,0]==2]", "[1,4,1], [1,6,2], [1,10,3], [1,11,4] ])) assert np.all(res[res[:,0]==2] == np.array([ [2,6,1], [2,10,2], [2,11,3], [2,13,4]", "[2,6,1], [2,10,2], [2,11,3], [2,13,4] ])) assert np.all(res[res[:,0]==3] == np.array([ [3,12,1], [3,5,2], [3,4,3], [3,11,4]", "[2,10,2], [2,11,3], [2,13,4] ])) assert np.all(res[res[:,0]==3] == np.array([ [3,12,1], [3,5,2], [3,4,3], [3,11,4] ]))", "complement = pd.DataFrame({ 'pid': [1,1,1,2,2,2,2,3,3,3,-1], 'song_id': [10,11,12,10,11,12,13,10,11,12,1], 'pos': [1,2,3,1,2,3,4,3,2,1,1] }) res = listdiff.listdiff(", "listdiff.listdiff( reclist[[\"pid\",\"song_id\",\"pos\"]].sample(frac=1).values, gt[[\"pid\",\"song_id\",\"pos\"]].sample(frac=1).values, complement=complement[[\"pid\",\"song_id\",\"pos\"]].sample(frac=1).values, extra_pids=np.array([3,4]), cut_to_size=4 ) print(res) assert np.all(res[res[:,0]==1] == np.array([ [1,4,1],", "[1,1,1,2,2,2,3,3,3], 'song_id': [4,5,6,4,5,6,4,5,12], 'pos': [1,2,3,1,2,3,3,2,1] }) gt = pd.DataFrame({ 'pid': [1,1,1,2,2,2,2], 'song_id': [1,5,9,4,5,12,9]," ]
[ "indent=4 if app.debug else None ), mimetype=\"application/json\", status=401 ) number = request.values.get(\"number\") try:", "import DropPoint from c3bottles.model.report import Report from c3bottles.model.visit import Visit bp = Blueprint(\"api\",", "dp_json() return Response( json.dumps( \"Invalid or missing API action.\", indent=4 if app.debug else", "app.config.get('MAP_SOURCE', {}) return jsonify({ \"attribution\": map_source.get('attribution', ''), \"tileserver\": map_source.get('tileserver', ''), \"tileserver_subdomains\": map_source.get(\"tileserver_subdomains\", []),", "json.dumps(e.args, indent=4 if app.debug else None), mimetype=\"application/json\", status=400 ) else: db.session.commit() return Response(", "try: Visit( dp=DropPoint.query.get(number), action=request.values.get(\"maintenance\") ) except ValueError as e: return Response( json.dumps(e.args, indent=4", "0), \"max_zoom\": map_source.get(\"max_zoom\", 0), \"simple_crs\": map_source.get(\"simple_crs\", False), \"hack_257px\": map_source.get(\"hack_257px\", False), \"tms\": map_source.get(\"tms\", False),", "from flask_login import current_user from c3bottles import app, db from c3bottles.model.drop_point import DropPoint", "or insufficient privileges.\"}], indent=4 if app.debug else None ), mimetype=\"application/json\", status=401 ) number", "@bp.route(\"/api/all_dp.json\", methods=(\"POST\", \"GET\")) def all_dp(): return dp_json() @bp.route(\"/api/map_source.json\") def map_source(): map_source = app.config.get('MAP_SOURCE',", "DropPoint.get_dp_json(number), mimetype=\"application/json\" ) def dp_json(): ts = request.values.get(\"ts\") if ts: try: dps =", "elif request.values.get(\"action\") == \"visit\": return visit() elif request.values.get(\"action\") == \"dp_json\": return dp_json() return", "== \"report\": return report() elif request.values.get(\"action\") == \"visit\": return visit() elif request.values.get(\"action\") ==", "None), mimetype=\"application/json\", status=400 ) else: db.session.commit() return Response( DropPoint.get_dp_json(number), mimetype=\"application/json\" ) def dp_json():", "else None ), mimetype=\"application/json\", status=401 ) number = request.values.get(\"number\") try: Visit( dp=DropPoint.query.get(number), action=request.values.get(\"maintenance\")", "app.debug else None ), mimetype=\"application/json\", status=401 ) number = request.values.get(\"number\") try: Report( dp=DropPoint.query.get(number),", "ValueError as e: return Response( json.dumps(e.args, indent=4 if app.debug else None), mimetype=\"application/json\", status=400", "if request.values.get(\"action\") == \"report\": return report() elif request.values.get(\"action\") == \"visit\": return visit() elif", "request.values.get(\"number\") try: Report( dp=DropPoint.query.get(number), state=request.values.get(\"state\") ) except ValueError as e: return Response( json.dumps(e.args,", "missing API action.\", indent=4 if app.debug else None ), mimetype=\"application/json\", status=400 ) @bp.route(\"/api/all_dp.json\",", "def process(): if request.values.get(\"action\") == \"report\": return report() elif request.values.get(\"action\") == \"visit\": return", "map_source.get(\"tileserver_subdomains\", []), \"bounds\": map_source.get(\"bounds\", None), \"initial_view\": map_source.get(\"initial_view\", None), \"level_config\": map_source.get(\"level_config\", None), \"min_zoom\": map_source.get(\"min_zoom\",", ") else: db.session.commit() return Response( DropPoint.get_dp_json(number), mimetype=\"application/json\" ) def dp_json(): ts = request.values.get(\"ts\")", "Response( DropPoint.get_dp_json(number), mimetype=\"application/json\" ) def dp_json(): ts = request.values.get(\"ts\") if ts: try: dps", "time=datetime.fromtimestamp(float(ts)) ) except ValueError as e: return Response( json.dumps(e.args, indent=4 if app.debug else", "e: return Response( json.dumps(e.args, indent=4 if app.debug else None), mimetype=\"application/json\", status=400 ) else:", "process(): if request.values.get(\"action\") == \"report\": return report() elif request.values.get(\"action\") == \"visit\": return visit()", "datetime import datetime from flask import request, Response, Blueprint, jsonify from flask_login import", "c3bottles.model.drop_point import DropPoint from c3bottles.model.report import Report from c3bottles.model.visit import Visit bp =", "API action.\", indent=4 if app.debug else None ), mimetype=\"application/json\", status=400 ) @bp.route(\"/api/all_dp.json\", methods=(\"POST\",", "return dp_json() return Response( json.dumps( \"Invalid or missing API action.\", indent=4 if app.debug", "''), \"tileserver\": map_source.get('tileserver', ''), \"tileserver_subdomains\": map_source.get(\"tileserver_subdomains\", []), \"bounds\": map_source.get(\"bounds\", None), \"initial_view\": map_source.get(\"initial_view\", None),", "Response( json.dumps( \"Invalid or missing API action.\", indent=4 if app.debug else None ),", "None), \"min_zoom\": map_source.get(\"min_zoom\", 0), \"max_zoom\": map_source.get(\"max_zoom\", 0), \"simple_crs\": map_source.get(\"simple_crs\", False), \"hack_257px\": map_source.get(\"hack_257px\", False),", "not current_user.can_visit: return Response( json.dumps( [{\"msg\": \"Not logged in or insufficient privileges.\"}], indent=4", "\"max_zoom\": map_source.get(\"max_zoom\", 0), \"simple_crs\": map_source.get(\"simple_crs\", False), \"hack_257px\": map_source.get(\"hack_257px\", False), \"tms\": map_source.get(\"tms\", False), \"no_wrap\":", "[]), \"bounds\": map_source.get(\"bounds\", None), \"initial_view\": map_source.get(\"initial_view\", None), \"level_config\": map_source.get(\"level_config\", None), \"min_zoom\": map_source.get(\"min_zoom\", 0),", "status=400 ) @bp.route(\"/api/all_dp.json\", methods=(\"POST\", \"GET\")) def all_dp(): return dp_json() @bp.route(\"/api/map_source.json\") def map_source(): map_source", "current_user.can_visit: return Response( json.dumps( [{\"msg\": \"Not logged in or insufficient privileges.\"}], indent=4 if", "mimetype=\"application/json\" ) def visit(): if not current_user.can_visit: return Response( json.dumps( [{\"msg\": \"Not logged", "flask_login import current_user from c3bottles import app, db from c3bottles.model.drop_point import DropPoint from", "if app.debug else None), mimetype=\"application/json\", status=400 ) else: db.session.commit() return Response( DropPoint.get_dp_json(number), mimetype=\"application/json\"", "\"simple_crs\": map_source.get(\"simple_crs\", False), \"hack_257px\": map_source.get(\"hack_257px\", False), \"tms\": map_source.get(\"tms\", False), \"no_wrap\": map_source.get(\"no_wrap\", False) })", "c3bottles import app, db from c3bottles.model.drop_point import DropPoint from c3bottles.model.report import Report from", "number = request.values.get(\"number\") try: Visit( dp=DropPoint.query.get(number), action=request.values.get(\"maintenance\") ) except ValueError as e: return", "0), \"simple_crs\": map_source.get(\"simple_crs\", False), \"hack_257px\": map_source.get(\"hack_257px\", False), \"tms\": map_source.get(\"tms\", False), \"no_wrap\": map_source.get(\"no_wrap\", False)", "return Response( json.dumps( [{\"msg\": \"Not logged in or insufficient privileges.\"}], indent=4 if app.debug", "jsonify from flask_login import current_user from c3bottles import app, db from c3bottles.model.drop_point import", "map_source.get(\"max_zoom\", 0), \"simple_crs\": map_source.get(\"simple_crs\", False), \"hack_257px\": map_source.get(\"hack_257px\", False), \"tms\": map_source.get(\"tms\", False), \"no_wrap\": map_source.get(\"no_wrap\",", "status=400 ) else: db.session.commit() return Response( DropPoint.get_dp_json(number), mimetype=\"application/json\" ) def visit(): if not", "Visit( dp=DropPoint.query.get(number), action=request.values.get(\"maintenance\") ) except ValueError as e: return Response( json.dumps(e.args, indent=4 if", "import Report from c3bottles.model.visit import Visit bp = Blueprint(\"api\", __name__) @bp.route(\"/api\", methods=(\"POST\", \"GET\"))", "from datetime import datetime from flask import request, Response, Blueprint, jsonify from flask_login", "json from datetime import datetime from flask import request, Response, Blueprint, jsonify from", "dp=DropPoint.query.get(number), state=request.values.get(\"state\") ) except ValueError as e: return Response( json.dumps(e.args, indent=4 if app.debug", "map_source(): map_source = app.config.get('MAP_SOURCE', {}) return jsonify({ \"attribution\": map_source.get('attribution', ''), \"tileserver\": map_source.get('tileserver', ''),", "\"hack_257px\": map_source.get(\"hack_257px\", False), \"tms\": map_source.get(\"tms\", False), \"no_wrap\": map_source.get(\"no_wrap\", False) }) def report(): if", "return dp_json() @bp.route(\"/api/map_source.json\") def map_source(): map_source = app.config.get('MAP_SOURCE', {}) return jsonify({ \"attribution\": map_source.get('attribution',", "None ), mimetype=\"application/json\", status=401 ) number = request.values.get(\"number\") try: Visit( dp=DropPoint.query.get(number), action=request.values.get(\"maintenance\") )", "\"report\": return report() elif request.values.get(\"action\") == \"visit\": return visit() elif request.values.get(\"action\") == \"dp_json\":", "False) }) def report(): if not current_user.can_report: return Response( json.dumps( [{\"msg\": \"Not logged", "dp=DropPoint.query.get(number), action=request.values.get(\"maintenance\") ) except ValueError as e: return Response( json.dumps(e.args, indent=4 if app.debug", "request.values.get(\"action\") == \"visit\": return visit() elif request.values.get(\"action\") == \"dp_json\": return dp_json() return Response(", ") def dp_json(): ts = request.values.get(\"ts\") if ts: try: dps = DropPoint.get_dps_json( time=datetime.fromtimestamp(float(ts))", "import current_user from c3bottles import app, db from c3bottles.model.drop_point import DropPoint from c3bottles.model.report", "Visit bp = Blueprint(\"api\", __name__) @bp.route(\"/api\", methods=(\"POST\", \"GET\")) def process(): if request.values.get(\"action\") ==", "import app, db from c3bottles.model.drop_point import DropPoint from c3bottles.model.report import Report from c3bottles.model.visit", ") else: db.session.commit() return Response( DropPoint.get_dp_json(number), mimetype=\"application/json\" ) def visit(): if not current_user.can_visit:", "def report(): if not current_user.can_report: return Response( json.dumps( [{\"msg\": \"Not logged in or", "mimetype=\"application/json\", status=400 ) @bp.route(\"/api/all_dp.json\", methods=(\"POST\", \"GET\")) def all_dp(): return dp_json() @bp.route(\"/api/map_source.json\") def map_source():", "status=400 ) else: db.session.commit() return Response( DropPoint.get_dp_json(number), mimetype=\"application/json\" ) def dp_json(): ts =", "from c3bottles import app, db from c3bottles.model.drop_point import DropPoint from c3bottles.model.report import Report", "\"min_zoom\": map_source.get(\"min_zoom\", 0), \"max_zoom\": map_source.get(\"max_zoom\", 0), \"simple_crs\": map_source.get(\"simple_crs\", False), \"hack_257px\": map_source.get(\"hack_257px\", False), \"tms\":", "report() elif request.values.get(\"action\") == \"visit\": return visit() elif request.values.get(\"action\") == \"dp_json\": return dp_json()", "if app.debug else None ), mimetype=\"application/json\", status=401 ) number = request.values.get(\"number\") try: Visit(", "if ts: try: dps = DropPoint.get_dps_json( time=datetime.fromtimestamp(float(ts)) ) except ValueError as e: return", "), mimetype=\"application/json\", status=401 ) number = request.values.get(\"number\") try: Report( dp=DropPoint.query.get(number), state=request.values.get(\"state\") ) except", "from c3bottles.model.drop_point import DropPoint from c3bottles.model.report import Report from c3bottles.model.visit import Visit bp", "else None ), mimetype=\"application/json\", status=401 ) number = request.values.get(\"number\") try: Report( dp=DropPoint.query.get(number), state=request.values.get(\"state\")", "None), mimetype=\"application/json\", status=400 ) else: db.session.commit() return Response( DropPoint.get_dp_json(number), mimetype=\"application/json\" ) def visit():", "import datetime from flask import request, Response, Blueprint, jsonify from flask_login import current_user", "DropPoint.get_dps_json( time=datetime.fromtimestamp(float(ts)) ) except ValueError as e: return Response( json.dumps(e.args, indent=4 if app.debug", "action=request.values.get(\"maintenance\") ) except ValueError as e: return Response( json.dumps(e.args, indent=4 if app.debug else", "Blueprint, jsonify from flask_login import current_user from c3bottles import app, db from c3bottles.model.drop_point", "), mimetype=\"application/json\", status=400 ) @bp.route(\"/api/all_dp.json\", methods=(\"POST\", \"GET\")) def all_dp(): return dp_json() @bp.route(\"/api/map_source.json\") def", "= DropPoint.get_dps_json( time=datetime.fromtimestamp(float(ts)) ) except ValueError as e: return Response( json.dumps(e.args, indent=4 if", "None), \"level_config\": map_source.get(\"level_config\", None), \"min_zoom\": map_source.get(\"min_zoom\", 0), \"max_zoom\": map_source.get(\"max_zoom\", 0), \"simple_crs\": map_source.get(\"simple_crs\", False),", "else None), mimetype=\"application/json\", status=400 ) else: dps = DropPoint.get_dps_json() return Response( dps, mimetype=\"application/json\"", "Response, Blueprint, jsonify from flask_login import current_user from c3bottles import app, db from", "<gh_stars>0 import json from datetime import datetime from flask import request, Response, Blueprint,", "False), \"tms\": map_source.get(\"tms\", False), \"no_wrap\": map_source.get(\"no_wrap\", False) }) def report(): if not current_user.can_report:", "json.dumps(e.args, indent=4 if app.debug else None), mimetype=\"application/json\", status=400 ) else: dps = DropPoint.get_dps_json()", "try: dps = DropPoint.get_dps_json( time=datetime.fromtimestamp(float(ts)) ) except ValueError as e: return Response( json.dumps(e.args,", "''), \"tileserver_subdomains\": map_source.get(\"tileserver_subdomains\", []), \"bounds\": map_source.get(\"bounds\", None), \"initial_view\": map_source.get(\"initial_view\", None), \"level_config\": map_source.get(\"level_config\", None),", "import json from datetime import datetime from flask import request, Response, Blueprint, jsonify", "\"dp_json\": return dp_json() return Response( json.dumps( \"Invalid or missing API action.\", indent=4 if", "\"initial_view\": map_source.get(\"initial_view\", None), \"level_config\": map_source.get(\"level_config\", None), \"min_zoom\": map_source.get(\"min_zoom\", 0), \"max_zoom\": map_source.get(\"max_zoom\", 0), \"simple_crs\":", "if app.debug else None), mimetype=\"application/json\", status=400 ) else: dps = DropPoint.get_dps_json() return Response(", "Response( DropPoint.get_dp_json(number), mimetype=\"application/json\" ) def visit(): if not current_user.can_visit: return Response( json.dumps( [{\"msg\":", "c3bottles.model.visit import Visit bp = Blueprint(\"api\", __name__) @bp.route(\"/api\", methods=(\"POST\", \"GET\")) def process(): if", "\"GET\")) def all_dp(): return dp_json() @bp.route(\"/api/map_source.json\") def map_source(): map_source = app.config.get('MAP_SOURCE', {}) return", "Response( json.dumps(e.args, indent=4 if app.debug else None), mimetype=\"application/json\", status=400 ) else: dps =", "return visit() elif request.values.get(\"action\") == \"dp_json\": return dp_json() return Response( json.dumps( \"Invalid or", "map_source.get(\"no_wrap\", False) }) def report(): if not current_user.can_report: return Response( json.dumps( [{\"msg\": \"Not", ") except ValueError as e: return Response( json.dumps(e.args, indent=4 if app.debug else None),", "== \"dp_json\": return dp_json() return Response( json.dumps( \"Invalid or missing API action.\", indent=4", "\"tms\": map_source.get(\"tms\", False), \"no_wrap\": map_source.get(\"no_wrap\", False) }) def report(): if not current_user.can_report: return", "or missing API action.\", indent=4 if app.debug else None ), mimetype=\"application/json\", status=400 )", "request.values.get(\"action\") == \"report\": return report() elif request.values.get(\"action\") == \"visit\": return visit() elif request.values.get(\"action\")", "return Response( json.dumps(e.args, indent=4 if app.debug else None), mimetype=\"application/json\", status=400 ) else: dps", "indent=4 if app.debug else None), mimetype=\"application/json\", status=400 ) else: db.session.commit() return Response( DropPoint.get_dp_json(number),", "\"GET\")) def process(): if request.values.get(\"action\") == \"report\": return report() elif request.values.get(\"action\") == \"visit\":", "visit(): if not current_user.can_visit: return Response( json.dumps( [{\"msg\": \"Not logged in or insufficient", "dp_json() @bp.route(\"/api/map_source.json\") def map_source(): map_source = app.config.get('MAP_SOURCE', {}) return jsonify({ \"attribution\": map_source.get('attribution', ''),", "dp_json(): ts = request.values.get(\"ts\") if ts: try: dps = DropPoint.get_dps_json( time=datetime.fromtimestamp(float(ts)) ) except", "else None ), mimetype=\"application/json\", status=400 ) @bp.route(\"/api/all_dp.json\", methods=(\"POST\", \"GET\")) def all_dp(): return dp_json()", "return report() elif request.values.get(\"action\") == \"visit\": return visit() elif request.values.get(\"action\") == \"dp_json\": return", "not current_user.can_report: return Response( json.dumps( [{\"msg\": \"Not logged in or insufficient privileges.\"}], indent=4", "from flask import request, Response, Blueprint, jsonify from flask_login import current_user from c3bottles", "mimetype=\"application/json\", status=400 ) else: db.session.commit() return Response( DropPoint.get_dp_json(number), mimetype=\"application/json\" ) def dp_json(): ts", "db.session.commit() return Response( DropPoint.get_dp_json(number), mimetype=\"application/json\" ) def dp_json(): ts = request.values.get(\"ts\") if ts:", "return Response( json.dumps( \"Invalid or missing API action.\", indent=4 if app.debug else None", "return Response( DropPoint.get_dp_json(number), mimetype=\"application/json\" ) def visit(): if not current_user.can_visit: return Response( json.dumps(", "ts = request.values.get(\"ts\") if ts: try: dps = DropPoint.get_dps_json( time=datetime.fromtimestamp(float(ts)) ) except ValueError", "action.\", indent=4 if app.debug else None ), mimetype=\"application/json\", status=400 ) @bp.route(\"/api/all_dp.json\", methods=(\"POST\", \"GET\"))", "c3bottles.model.report import Report from c3bottles.model.visit import Visit bp = Blueprint(\"api\", __name__) @bp.route(\"/api\", methods=(\"POST\",", "__name__) @bp.route(\"/api\", methods=(\"POST\", \"GET\")) def process(): if request.values.get(\"action\") == \"report\": return report() elif", "\"tileserver\": map_source.get('tileserver', ''), \"tileserver_subdomains\": map_source.get(\"tileserver_subdomains\", []), \"bounds\": map_source.get(\"bounds\", None), \"initial_view\": map_source.get(\"initial_view\", None), \"level_config\":", "\"Not logged in or insufficient privileges.\"}], indent=4 if app.debug else None ), mimetype=\"application/json\",", "try: Report( dp=DropPoint.query.get(number), state=request.values.get(\"state\") ) except ValueError as e: return Response( json.dumps(e.args, indent=4", "return Response( DropPoint.get_dp_json(number), mimetype=\"application/json\" ) def dp_json(): ts = request.values.get(\"ts\") if ts: try:", "map_source = app.config.get('MAP_SOURCE', {}) return jsonify({ \"attribution\": map_source.get('attribution', ''), \"tileserver\": map_source.get('tileserver', ''), \"tileserver_subdomains\":", "app.debug else None), mimetype=\"application/json\", status=400 ) else: dps = DropPoint.get_dps_json() return Response( dps,", "\"tileserver_subdomains\": map_source.get(\"tileserver_subdomains\", []), \"bounds\": map_source.get(\"bounds\", None), \"initial_view\": map_source.get(\"initial_view\", None), \"level_config\": map_source.get(\"level_config\", None), \"min_zoom\":", "methods=(\"POST\", \"GET\")) def all_dp(): return dp_json() @bp.route(\"/api/map_source.json\") def map_source(): map_source = app.config.get('MAP_SOURCE', {})", "== \"visit\": return visit() elif request.values.get(\"action\") == \"dp_json\": return dp_json() return Response( json.dumps(", "import Visit bp = Blueprint(\"api\", __name__) @bp.route(\"/api\", methods=(\"POST\", \"GET\")) def process(): if request.values.get(\"action\")", "mimetype=\"application/json\", status=401 ) number = request.values.get(\"number\") try: Report( dp=DropPoint.query.get(number), state=request.values.get(\"state\") ) except ValueError", "None), mimetype=\"application/json\", status=400 ) else: dps = DropPoint.get_dps_json() return Response( dps, mimetype=\"application/json\" )", "DropPoint.get_dp_json(number), mimetype=\"application/json\" ) def visit(): if not current_user.can_visit: return Response( json.dumps( [{\"msg\": \"Not", "def map_source(): map_source = app.config.get('MAP_SOURCE', {}) return jsonify({ \"attribution\": map_source.get('attribution', ''), \"tileserver\": map_source.get('tileserver',", "{}) return jsonify({ \"attribution\": map_source.get('attribution', ''), \"tileserver\": map_source.get('tileserver', ''), \"tileserver_subdomains\": map_source.get(\"tileserver_subdomains\", []), \"bounds\":", "flask import request, Response, Blueprint, jsonify from flask_login import current_user from c3bottles import", "= request.values.get(\"number\") try: Visit( dp=DropPoint.query.get(number), action=request.values.get(\"maintenance\") ) except ValueError as e: return Response(", ") number = request.values.get(\"number\") try: Visit( dp=DropPoint.query.get(number), action=request.values.get(\"maintenance\") ) except ValueError as e:", "request.values.get(\"action\") == \"dp_json\": return dp_json() return Response( json.dumps( \"Invalid or missing API action.\",", "Response( json.dumps(e.args, indent=4 if app.debug else None), mimetype=\"application/json\", status=400 ) else: db.session.commit() return", "None ), mimetype=\"application/json\", status=401 ) number = request.values.get(\"number\") try: Report( dp=DropPoint.query.get(number), state=request.values.get(\"state\") )", "map_source.get(\"tms\", False), \"no_wrap\": map_source.get(\"no_wrap\", False) }) def report(): if not current_user.can_report: return Response(", ") def visit(): if not current_user.can_visit: return Response( json.dumps( [{\"msg\": \"Not logged in", "= app.config.get('MAP_SOURCE', {}) return jsonify({ \"attribution\": map_source.get('attribution', ''), \"tileserver\": map_source.get('tileserver', ''), \"tileserver_subdomains\": map_source.get(\"tileserver_subdomains\",", "@bp.route(\"/api\", methods=(\"POST\", \"GET\")) def process(): if request.values.get(\"action\") == \"report\": return report() elif request.values.get(\"action\")", "app, db from c3bottles.model.drop_point import DropPoint from c3bottles.model.report import Report from c3bottles.model.visit import", "def dp_json(): ts = request.values.get(\"ts\") if ts: try: dps = DropPoint.get_dps_json( time=datetime.fromtimestamp(float(ts)) )", "\"level_config\": map_source.get(\"level_config\", None), \"min_zoom\": map_source.get(\"min_zoom\", 0), \"max_zoom\": map_source.get(\"max_zoom\", 0), \"simple_crs\": map_source.get(\"simple_crs\", False), \"hack_257px\":", "else None), mimetype=\"application/json\", status=400 ) else: db.session.commit() return Response( DropPoint.get_dp_json(number), mimetype=\"application/json\" ) def", "map_source.get('attribution', ''), \"tileserver\": map_source.get('tileserver', ''), \"tileserver_subdomains\": map_source.get(\"tileserver_subdomains\", []), \"bounds\": map_source.get(\"bounds\", None), \"initial_view\": map_source.get(\"initial_view\",", "current_user.can_report: return Response( json.dumps( [{\"msg\": \"Not logged in or insufficient privileges.\"}], indent=4 if", "= Blueprint(\"api\", __name__) @bp.route(\"/api\", methods=(\"POST\", \"GET\")) def process(): if request.values.get(\"action\") == \"report\": return", "current_user from c3bottles import app, db from c3bottles.model.drop_point import DropPoint from c3bottles.model.report import", "if app.debug else None ), mimetype=\"application/json\", status=401 ) number = request.values.get(\"number\") try: Report(", "app.debug else None), mimetype=\"application/json\", status=400 ) else: db.session.commit() return Response( DropPoint.get_dp_json(number), mimetype=\"application/json\" )", "map_source.get(\"level_config\", None), \"min_zoom\": map_source.get(\"min_zoom\", 0), \"max_zoom\": map_source.get(\"max_zoom\", 0), \"simple_crs\": map_source.get(\"simple_crs\", False), \"hack_257px\": map_source.get(\"hack_257px\",", "@bp.route(\"/api/map_source.json\") def map_source(): map_source = app.config.get('MAP_SOURCE', {}) return jsonify({ \"attribution\": map_source.get('attribution', ''), \"tileserver\":", "DropPoint from c3bottles.model.report import Report from c3bottles.model.visit import Visit bp = Blueprint(\"api\", __name__)", "\"no_wrap\": map_source.get(\"no_wrap\", False) }) def report(): if not current_user.can_report: return Response( json.dumps( [{\"msg\":", "mimetype=\"application/json\", status=400 ) else: db.session.commit() return Response( DropPoint.get_dp_json(number), mimetype=\"application/json\" ) def visit(): if", "\"visit\": return visit() elif request.values.get(\"action\") == \"dp_json\": return dp_json() return Response( json.dumps( \"Invalid", "Blueprint(\"api\", __name__) @bp.route(\"/api\", methods=(\"POST\", \"GET\")) def process(): if request.values.get(\"action\") == \"report\": return report()", "None ), mimetype=\"application/json\", status=400 ) @bp.route(\"/api/all_dp.json\", methods=(\"POST\", \"GET\")) def all_dp(): return dp_json() @bp.route(\"/api/map_source.json\")", "[{\"msg\": \"Not logged in or insufficient privileges.\"}], indent=4 if app.debug else None ),", "None), \"initial_view\": map_source.get(\"initial_view\", None), \"level_config\": map_source.get(\"level_config\", None), \"min_zoom\": map_source.get(\"min_zoom\", 0), \"max_zoom\": map_source.get(\"max_zoom\", 0),", "methods=(\"POST\", \"GET\")) def process(): if request.values.get(\"action\") == \"report\": return report() elif request.values.get(\"action\") ==", "map_source.get(\"initial_view\", None), \"level_config\": map_source.get(\"level_config\", None), \"min_zoom\": map_source.get(\"min_zoom\", 0), \"max_zoom\": map_source.get(\"max_zoom\", 0), \"simple_crs\": map_source.get(\"simple_crs\",", "dps = DropPoint.get_dps_json( time=datetime.fromtimestamp(float(ts)) ) except ValueError as e: return Response( json.dumps(e.args, indent=4", "\"Invalid or missing API action.\", indent=4 if app.debug else None ), mimetype=\"application/json\", status=400", "except ValueError as e: return Response( json.dumps(e.args, indent=4 if app.debug else None), mimetype=\"application/json\",", "False), \"no_wrap\": map_source.get(\"no_wrap\", False) }) def report(): if not current_user.can_report: return Response( json.dumps(", "if app.debug else None ), mimetype=\"application/json\", status=400 ) @bp.route(\"/api/all_dp.json\", methods=(\"POST\", \"GET\")) def all_dp():", "bp = Blueprint(\"api\", __name__) @bp.route(\"/api\", methods=(\"POST\", \"GET\")) def process(): if request.values.get(\"action\") == \"report\":", "request.values.get(\"number\") try: Visit( dp=DropPoint.query.get(number), action=request.values.get(\"maintenance\") ) except ValueError as e: return Response( json.dumps(e.args,", "False), \"hack_257px\": map_source.get(\"hack_257px\", False), \"tms\": map_source.get(\"tms\", False), \"no_wrap\": map_source.get(\"no_wrap\", False) }) def report():", "from c3bottles.model.report import Report from c3bottles.model.visit import Visit bp = Blueprint(\"api\", __name__) @bp.route(\"/api\",", "indent=4 if app.debug else None), mimetype=\"application/json\", status=400 ) else: dps = DropPoint.get_dps_json() return", "status=401 ) number = request.values.get(\"number\") try: Report( dp=DropPoint.query.get(number), state=request.values.get(\"state\") ) except ValueError as", "if not current_user.can_report: return Response( json.dumps( [{\"msg\": \"Not logged in or insufficient privileges.\"}],", "ts: try: dps = DropPoint.get_dps_json( time=datetime.fromtimestamp(float(ts)) ) except ValueError as e: return Response(", ") number = request.values.get(\"number\") try: Report( dp=DropPoint.query.get(number), state=request.values.get(\"state\") ) except ValueError as e:", "map_source.get(\"bounds\", None), \"initial_view\": map_source.get(\"initial_view\", None), \"level_config\": map_source.get(\"level_config\", None), \"min_zoom\": map_source.get(\"min_zoom\", 0), \"max_zoom\": map_source.get(\"max_zoom\",", "map_source.get(\"simple_crs\", False), \"hack_257px\": map_source.get(\"hack_257px\", False), \"tms\": map_source.get(\"tms\", False), \"no_wrap\": map_source.get(\"no_wrap\", False) }) def", "def all_dp(): return dp_json() @bp.route(\"/api/map_source.json\") def map_source(): map_source = app.config.get('MAP_SOURCE', {}) return jsonify({", "from c3bottles.model.visit import Visit bp = Blueprint(\"api\", __name__) @bp.route(\"/api\", methods=(\"POST\", \"GET\")) def process():", "insufficient privileges.\"}], indent=4 if app.debug else None ), mimetype=\"application/json\", status=401 ) number =", "app.debug else None ), mimetype=\"application/json\", status=400 ) @bp.route(\"/api/all_dp.json\", methods=(\"POST\", \"GET\")) def all_dp(): return", "Response( json.dumps( [{\"msg\": \"Not logged in or insufficient privileges.\"}], indent=4 if app.debug else", "Report( dp=DropPoint.query.get(number), state=request.values.get(\"state\") ) except ValueError as e: return Response( json.dumps(e.args, indent=4 if", "jsonify({ \"attribution\": map_source.get('attribution', ''), \"tileserver\": map_source.get('tileserver', ''), \"tileserver_subdomains\": map_source.get(\"tileserver_subdomains\", []), \"bounds\": map_source.get(\"bounds\", None),", "), mimetype=\"application/json\", status=401 ) number = request.values.get(\"number\") try: Visit( dp=DropPoint.query.get(number), action=request.values.get(\"maintenance\") ) except", "number = request.values.get(\"number\") try: Report( dp=DropPoint.query.get(number), state=request.values.get(\"state\") ) except ValueError as e: return", "db from c3bottles.model.drop_point import DropPoint from c3bottles.model.report import Report from c3bottles.model.visit import Visit", "state=request.values.get(\"state\") ) except ValueError as e: return Response( json.dumps(e.args, indent=4 if app.debug else", ") @bp.route(\"/api/all_dp.json\", methods=(\"POST\", \"GET\")) def all_dp(): return dp_json() @bp.route(\"/api/map_source.json\") def map_source(): map_source =", "map_source.get(\"hack_257px\", False), \"tms\": map_source.get(\"tms\", False), \"no_wrap\": map_source.get(\"no_wrap\", False) }) def report(): if not", "as e: return Response( json.dumps(e.args, indent=4 if app.debug else None), mimetype=\"application/json\", status=400 )", "all_dp(): return dp_json() @bp.route(\"/api/map_source.json\") def map_source(): map_source = app.config.get('MAP_SOURCE', {}) return jsonify({ \"attribution\":", "if not current_user.can_visit: return Response( json.dumps( [{\"msg\": \"Not logged in or insufficient privileges.\"}],", "request.values.get(\"ts\") if ts: try: dps = DropPoint.get_dps_json( time=datetime.fromtimestamp(float(ts)) ) except ValueError as e:", "\"bounds\": map_source.get(\"bounds\", None), \"initial_view\": map_source.get(\"initial_view\", None), \"level_config\": map_source.get(\"level_config\", None), \"min_zoom\": map_source.get(\"min_zoom\", 0), \"max_zoom\":", "logged in or insufficient privileges.\"}], indent=4 if app.debug else None ), mimetype=\"application/json\", status=401", "return jsonify({ \"attribution\": map_source.get('attribution', ''), \"tileserver\": map_source.get('tileserver', ''), \"tileserver_subdomains\": map_source.get(\"tileserver_subdomains\", []), \"bounds\": map_source.get(\"bounds\",", "else: db.session.commit() return Response( DropPoint.get_dp_json(number), mimetype=\"application/json\" ) def dp_json(): ts = request.values.get(\"ts\") if", "elif request.values.get(\"action\") == \"dp_json\": return dp_json() return Response( json.dumps( \"Invalid or missing API", "in or insufficient privileges.\"}], indent=4 if app.debug else None ), mimetype=\"application/json\", status=401 )", "datetime from flask import request, Response, Blueprint, jsonify from flask_login import current_user from", "def visit(): if not current_user.can_visit: return Response( json.dumps( [{\"msg\": \"Not logged in or", "map_source.get(\"min_zoom\", 0), \"max_zoom\": map_source.get(\"max_zoom\", 0), \"simple_crs\": map_source.get(\"simple_crs\", False), \"hack_257px\": map_source.get(\"hack_257px\", False), \"tms\": map_source.get(\"tms\",", "\"attribution\": map_source.get('attribution', ''), \"tileserver\": map_source.get('tileserver', ''), \"tileserver_subdomains\": map_source.get(\"tileserver_subdomains\", []), \"bounds\": map_source.get(\"bounds\", None), \"initial_view\":", "json.dumps( \"Invalid or missing API action.\", indent=4 if app.debug else None ), mimetype=\"application/json\",", "app.debug else None ), mimetype=\"application/json\", status=401 ) number = request.values.get(\"number\") try: Visit( dp=DropPoint.query.get(number),", "Report from c3bottles.model.visit import Visit bp = Blueprint(\"api\", __name__) @bp.route(\"/api\", methods=(\"POST\", \"GET\")) def", "report(): if not current_user.can_report: return Response( json.dumps( [{\"msg\": \"Not logged in or insufficient", "}) def report(): if not current_user.can_report: return Response( json.dumps( [{\"msg\": \"Not logged in", "privileges.\"}], indent=4 if app.debug else None ), mimetype=\"application/json\", status=401 ) number = request.values.get(\"number\")", "else: db.session.commit() return Response( DropPoint.get_dp_json(number), mimetype=\"application/json\" ) def visit(): if not current_user.can_visit: return", "mimetype=\"application/json\", status=401 ) number = request.values.get(\"number\") try: Visit( dp=DropPoint.query.get(number), action=request.values.get(\"maintenance\") ) except ValueError", "json.dumps( [{\"msg\": \"Not logged in or insufficient privileges.\"}], indent=4 if app.debug else None", "status=401 ) number = request.values.get(\"number\") try: Visit( dp=DropPoint.query.get(number), action=request.values.get(\"maintenance\") ) except ValueError as", "import request, Response, Blueprint, jsonify from flask_login import current_user from c3bottles import app,", "request, Response, Blueprint, jsonify from flask_login import current_user from c3bottles import app, db", "visit() elif request.values.get(\"action\") == \"dp_json\": return dp_json() return Response( json.dumps( \"Invalid or missing", "map_source.get('tileserver', ''), \"tileserver_subdomains\": map_source.get(\"tileserver_subdomains\", []), \"bounds\": map_source.get(\"bounds\", None), \"initial_view\": map_source.get(\"initial_view\", None), \"level_config\": map_source.get(\"level_config\",", "= request.values.get(\"number\") try: Report( dp=DropPoint.query.get(number), state=request.values.get(\"state\") ) except ValueError as e: return Response(", "mimetype=\"application/json\" ) def dp_json(): ts = request.values.get(\"ts\") if ts: try: dps = DropPoint.get_dps_json(", "= request.values.get(\"ts\") if ts: try: dps = DropPoint.get_dps_json( time=datetime.fromtimestamp(float(ts)) ) except ValueError as", "indent=4 if app.debug else None ), mimetype=\"application/json\", status=400 ) @bp.route(\"/api/all_dp.json\", methods=(\"POST\", \"GET\")) def", "db.session.commit() return Response( DropPoint.get_dp_json(number), mimetype=\"application/json\" ) def visit(): if not current_user.can_visit: return Response(", "return Response( json.dumps(e.args, indent=4 if app.debug else None), mimetype=\"application/json\", status=400 ) else: db.session.commit()" ]
[ "or 5 positions forward. Determine, what is the minimum number of steps he", "integer x (1≤x≤1000000) — The coordinate of the friend's house. Output Print the", "of steps he need to make in order to get to his friend's", "steps that elephant needs to make to get from point 0 to point", "5 positions forward. Determine, what is the minimum number of steps he need", "count = 0 for move in possible_moves: if x < move: continue count", "possible_moves: if x < move: continue count += (x//move) x = x %", "friend's house. Input The first line of the input contains an integer x", "from point 0 to point x. ''' x = int(input()) possible_moves = [5,", "''' x = int(input()) possible_moves = [5, 4, 3, 2, 1] count =", "at point 0 and his friend's house is located at point x(x >", "to visit his friend. It turned out that the elephant's house is located", "= int(input()) possible_moves = [5, 4, 3, 2, 1] count = 0 for", "minimum number of steps he need to make in order to get to", "needs to make to get from point 0 to point x. ''' x", "An elephant decided to visit his friend. It turned out that the elephant's", "move 1, 2, 3, 4 or 5 positions forward. Determine, what is the", "to make to get from point 0 to point x. ''' x =", "elephant decided to visit his friend. It turned out that the elephant's house", "2, 1] count = 0 for move in possible_moves: if x < move:", "out that the elephant's house is located at point 0 and his friend's", "continue count += (x//move) x = x % move if x == 0:", "to his friend's house. Input The first line of the input contains an", "of the friend's house. Output Print the minimum number of steps that elephant", "of the coordinate line. In one step the elephant can move 1, 2,", "friend's house. Output Print the minimum number of steps that elephant needs to", "the elephant can move 1, 2, 3, 4 or 5 positions forward. Determine,", "the friend's house. Output Print the minimum number of steps that elephant needs", "point x(x > 0) of the coordinate line. In one step the elephant", "the coordinate line. In one step the elephant can move 1, 2, 3,", "point x. ''' x = int(input()) possible_moves = [5, 4, 3, 2, 1]", "line of the input contains an integer x (1≤x≤1000000) — The coordinate of", "make in order to get to his friend's house. Input The first line", "steps he need to make in order to get to his friend's house.", "number of steps he need to make in order to get to his", "get from point 0 to point x. ''' x = int(input()) possible_moves =", "1] count = 0 for move in possible_moves: if x < move: continue", "elephant's house is located at point 0 and his friend's house is located", "turned out that the elephant's house is located at point 0 and his", "his friend. It turned out that the elephant's house is located at point", "input contains an integer x (1≤x≤1000000) — The coordinate of the friend's house.", "his friend's house. Input The first line of the input contains an integer", "an integer x (1≤x≤1000000) — The coordinate of the friend's house. Output Print", "x < move: continue count += (x//move) x = x % move if", "It turned out that the elephant's house is located at point 0 and", "Determine, what is the minimum number of steps he need to make in", "to make in order to get to his friend's house. Input The first", "one step the elephant can move 1, 2, 3, 4 or 5 positions", "point 0 and his friend's house is located at point x(x > 0)", "can move 1, 2, 3, 4 or 5 positions forward. Determine, what is", "number of steps that elephant needs to make to get from point 0", "need to make in order to get to his friend's house. Input The", "visit his friend. It turned out that the elephant's house is located at", "the minimum number of steps he need to make in order to get", "house. Output Print the minimum number of steps that elephant needs to make", "first line of the input contains an integer x (1≤x≤1000000) — The coordinate", "house is located at point 0 and his friend's house is located at", "possible_moves = [5, 4, 3, 2, 1] count = 0 for move in", "of steps that elephant needs to make to get from point 0 to", "order to get to his friend's house. Input The first line of the", "is located at point 0 and his friend's house is located at point", "to point x. ''' x = int(input()) possible_moves = [5, 4, 3, 2,", "In one step the elephant can move 1, 2, 3, 4 or 5", "what is the minimum number of steps he need to make in order", "''' greedy algorithm An elephant decided to visit his friend. It turned out", "elephant can move 1, 2, 3, 4 or 5 positions forward. Determine, what", "3, 2, 1] count = 0 for move in possible_moves: if x <", "of the input contains an integer x (1≤x≤1000000) — The coordinate of the", "friend's house is located at point x(x > 0) of the coordinate line.", "> 0) of the coordinate line. In one step the elephant can move", "x(x > 0) of the coordinate line. In one step the elephant can", "in order to get to his friend's house. Input The first line of", "The first line of the input contains an integer x (1≤x≤1000000) — The", "step the elephant can move 1, 2, 3, 4 or 5 positions forward.", "Output Print the minimum number of steps that elephant needs to make to", "The coordinate of the friend's house. Output Print the minimum number of steps", "forward. Determine, what is the minimum number of steps he need to make", "to get from point 0 to point x. ''' x = int(input()) possible_moves", "4 or 5 positions forward. Determine, what is the minimum number of steps", "he need to make in order to get to his friend's house. Input", "in possible_moves: if x < move: continue count += (x//move) x = x", "(1≤x≤1000000) — The coordinate of the friend's house. Output Print the minimum number", "Input The first line of the input contains an integer x (1≤x≤1000000) —", "that the elephant's house is located at point 0 and his friend's house", "positions forward. Determine, what is the minimum number of steps he need to", "that elephant needs to make to get from point 0 to point x.", "0) of the coordinate line. In one step the elephant can move 1,", "located at point 0 and his friend's house is located at point x(x", "elephant needs to make to get from point 0 to point x. '''", "= 0 for move in possible_moves: if x < move: continue count +=", "x. ''' x = int(input()) possible_moves = [5, 4, 3, 2, 1] count", "is the minimum number of steps he need to make in order to", "house is located at point x(x > 0) of the coordinate line. In", "— The coordinate of the friend's house. Output Print the minimum number of", "is located at point x(x > 0) of the coordinate line. In one", "the input contains an integer x (1≤x≤1000000) — The coordinate of the friend's", "= [5, 4, 3, 2, 1] count = 0 for move in possible_moves:", "minimum number of steps that elephant needs to make to get from point", "+= (x//move) x = x % move if x == 0: break print(count)", "< move: continue count += (x//move) x = x % move if x", "move: continue count += (x//move) x = x % move if x ==", "0 and his friend's house is located at point x(x > 0) of", "house. Input The first line of the input contains an integer x (1≤x≤1000000)", "for move in possible_moves: if x < move: continue count += (x//move) x", "0 to point x. ''' x = int(input()) possible_moves = [5, 4, 3,", "move in possible_moves: if x < move: continue count += (x//move) x =", "get to his friend's house. Input The first line of the input contains", "3, 4 or 5 positions forward. Determine, what is the minimum number of", "and his friend's house is located at point x(x > 0) of the", "the elephant's house is located at point 0 and his friend's house is", "at point x(x > 0) of the coordinate line. In one step the", "line. In one step the elephant can move 1, 2, 3, 4 or", "algorithm An elephant decided to visit his friend. It turned out that the", "decided to visit his friend. It turned out that the elephant's house is", "if x < move: continue count += (x//move) x = x % move", "coordinate line. In one step the elephant can move 1, 2, 3, 4", "x = int(input()) possible_moves = [5, 4, 3, 2, 1] count = 0", "greedy algorithm An elephant decided to visit his friend. It turned out that", "1, 2, 3, 4 or 5 positions forward. Determine, what is the minimum", "int(input()) possible_moves = [5, 4, 3, 2, 1] count = 0 for move", "0 for move in possible_moves: if x < move: continue count += (x//move)", "count += (x//move) x = x % move if x == 0: break", "his friend's house is located at point x(x > 0) of the coordinate", "x (1≤x≤1000000) — The coordinate of the friend's house. Output Print the minimum", "friend. It turned out that the elephant's house is located at point 0", "contains an integer x (1≤x≤1000000) — The coordinate of the friend's house. Output", "to get to his friend's house. Input The first line of the input", "point 0 to point x. ''' x = int(input()) possible_moves = [5, 4,", "Print the minimum number of steps that elephant needs to make to get", "located at point x(x > 0) of the coordinate line. In one step", "4, 3, 2, 1] count = 0 for move in possible_moves: if x", "2, 3, 4 or 5 positions forward. Determine, what is the minimum number", "the minimum number of steps that elephant needs to make to get from", "[5, 4, 3, 2, 1] count = 0 for move in possible_moves: if", "make to get from point 0 to point x. ''' x = int(input())", "coordinate of the friend's house. Output Print the minimum number of steps that" ]
[ "import numpy as np class InvalidFieldsException(Exception): pass class Dataset(object): \"\"\" Generic Dataset object", "Dataset({'Name': ['Alice', 'Bob', 'Carol', 'David', 'Ellen'], 'SSN': [1, 23, 45, 56, 7890]}) print(d)", "None else str(e) for e in row]) for row in zip(*tab)])) with open(fname,", "name in keep_fields])) @classmethod def pad(cls, sequences, padding, pad_len=None): \"\"\" Pads a list", "1:, 1:5) :param value: Sets the instances at index/indices `key` to the instances(s)", "corresponds to the attribute 'foo' for the ith instance in the dataset. The", "k != 'label'} for l in cache[1:]: for i, k in enumerate(fields): if", "\"\"\" Serializes the dataset in CONLL format to fname \"\"\" if 'label' not", "Applies transformations to the dataset. :param converters: A dictionary specifying the function to", "t2 Here, the fields are `description` and `tags`. The first instance has the", "write_conll(self, fname): \"\"\" Serializes the dataset in CONLL format to fname \"\"\" if", "fname: The CONLL formatted file from which to load the dataset :return: loaded", "- 1: f.write('\\n') def convert(self, converters, in_place=False): \"\"\" Applies transformations to the dataset.", "data[item]) for name, data in self.fields.items()]) def __setitem__(self, key, value): \"\"\" :param key:", "as f: f.write('# {}'.format('\\t'.join([k for k in self.fields if k != 'label']))) for", "def __getitem__(self, item): \"\"\" :param item: An integer index or a slice (eg.", "pad_len = pad_len or max_len assert pad_len >= max_len, 'pad_len {} must be", "dataset instance \"\"\" dataset = self if in_place else self.__class__(OrderedDict([(name, data[:]) for name,", "The dataset stores the instances in an ordered dictionary of fields. Each field", "f: line = line.strip() if line: cache.append(line) else: # met empty line, process", "in input data: {}'.format(name, value)) data[key] = value[name] def __iter__(self): \"\"\" :return: A", "format, among others. Example: .. code-block:: python d = Dataset({'Name': ['Alice', 'Bob', 'Carol',", "instance \"\"\" def process_cache(cache, fields): cache = [l.split() for l in cache if", "module for managing text datasets. \"\"\" __author__ = 'victor' from collections import OrderedDict", "cache = [l.split() for l in cache if l] if not cache: return", "tags Alice Hello t1 my t2 name t3 is t4 alice t5 Bob", "for e in row]) for row in zip(*tab)])) with open(fname, 'wb') as f:", ":param fields: An ordered dictionary in which a key is the name of", "None fields['label'].append(cache[0][0]) instance = {k: [] for k in fields if k !=", "The number of instances in the dataset. \"\"\" if len(self.fields) == 0: return", "keep_fields = self.fields.keys() or keep_fields return self.__class__(OrderedDict([(name, data[:]) for name, data in self.fields.items()", "sequences of varying lengths. :param padding: the value of padded cells. :param pad_len:", "Alice Hello t1 my t2 name t3 is t4 alice t5 Bob I'm", "name, data in self.fields.items() if name in keep_fields])) @classmethod def pad(cls, sequences, padding,", "InvalidFieldsException('Converter specified for non-existent field {}'.format(name)) for i, d in enumerate(dataset.fields[name]): dataset.fields[name][i] =", "in keep_fields])) @classmethod def pad(cls, sequences, padding, pad_len=None): \"\"\" Pads a list of", "in CONLL format: missing label field\") def instance_to_conll(inst): tab = [v for k,", "l] if not cache: return None fields['label'].append(cache[0][0]) instance = {k: [] for k", "ith element of the list for field 'foo' corresponds to the attribute 'foo'", "i in xrange(len(self)): yield self[i] def copy(self, keep_fields=None): \"\"\" :param keep_fields: if specified,", "max_len) for i, s in enumerate(sequences): sequences[i] = [padding] * (pad_len - len(s))", "for s in sequences]) pad_len = pad_len or max_len assert pad_len >= max_len,", "missing from the dictionary, then it will not be transformed. :param in_place: Whether", "fields. Each field maps to a list, the ith element of the list", "dataset. The dataset object supports indexing, iterating, slicing (eg. for iterating over batches),", "... \"\"\" def __init__(self, fields): \"\"\" :param fields: An ordered dictionary in which", "in instance.items(): fields[k].append(v) cache = [] with open(fname) as f: header = f.next().strip().split('\\t')", ":param value: Sets the instances at index/indices `key` to the instances(s) `value` \"\"\"", "for name, convert in converters.items(): if name not in self.fields.keys(): raise InvalidFieldsException('Converter specified", "if l] if not cache: return None fields['label'].append(cache[0][0]) instance = {k: [] for", "in enumerate(self): f.write('\\n{}'.format(instance_to_conll(d))) if i != len(self) - 1: f.write('\\n') def convert(self, converters,", "of the maximum padded sequence. \"\"\" max_len = max([len(s) for s in sequences])", "dictionary specifying the function to apply to each field. If a field is", "{}'.format(name)) for i, d in enumerate(dataset.fields[name]): dataset.fields[name][i] = convert(d) return dataset def shuffle(self):", "return self def __getitem__(self, item): \"\"\" :param item: An integer index or a", ":return: A iterator over the instances in the dataset \"\"\" for i in", "the instances at index/indices `key` to the instances(s) `value` \"\"\" for name, data", "in header]) fields['label'] = [] for line in f: line = line.strip() if", "of instances in the dataset. \"\"\" if len(self.fields) == 0: return 0 return", "shuffle(self): \"\"\" Re-indexes the dataset in random order :return: the shuffled dataset instance", "of the values of the instances in the dataset. :return: A Dataset object", "`tags`. The first instance has the label `Alice` and the description `['Hello', 'my',", "k != 'label']))) for i, d in enumerate(self): f.write('\\n{}'.format(instance_to_conll(d))) if i != len(self)", "transformed dataset instance \"\"\" dataset = self if in_place else self.__class__(OrderedDict([(name, data[:]) for", "an ordered dictionary of fields. Each field maps to a list, the ith", "and a value is a list of the values of the instances in", "keep_fields])) @classmethod def pad(cls, sequences, padding, pad_len=None): \"\"\" Pads a list of sequences", "padded cells. :param pad_len: the length of the maximum padded sequence. \"\"\" max_len", "in fields if k != 'label'} for l in cache[1:]: for i, k", "self.fields: raise InvalidFieldsException(\"dataset is not in CONLL format: missing label field\") def instance_to_conll(inst):", "# OrderedDict([('SSN', [23, 45]), ('Name', ['Bob', 'Carol'])]) for e in d: print(e) #", "\"\"\" :return: A iterator over the instances in the dataset \"\"\" for i", "and the description `[\"I'm\", 'bob']` and the tags `['t1', 't2']`. :param fname: The", "'pad_len {} must be greater or equal to the longest sequence {}'.format(pad_len, max_len)", "the instances in an ordered dictionary of fields. Each field maps to a", "The second instance has the label `Bob` and the description `[\"I'm\", 'bob']` and", "CONLL format, among others. Example: .. code-block:: python d = Dataset({'Name': ['Alice', 'Bob',", "e is None else str(e) for e in row]) for row in zip(*tab)]))", "for k in self.fields if k != 'label']))) for i, d in enumerate(self):", "__getitem__(self, item): \"\"\" :param item: An integer index or a slice (eg. 2,", "of the list for field 'foo' corresponds to the attribute 'foo' for the", "fields): cache = [l.split() for l in cache if l] if not cache:", "OrderedDict import random import numpy as np class InvalidFieldsException(Exception): pass class Dataset(object): \"\"\"", "len(d))) def __len__(self): \"\"\" :return: The number of instances in the dataset. \"\"\"", "in zip(*tab)])) with open(fname, 'wb') as f: f.write('# {}'.format('\\t'.join([k for k in self.fields", "= None length_field = None for name, d in fields.items(): if length is", "second instance has the label `Bob` and the description `[\"I'm\", 'bob']` and the", "of the instances in the dataset. :return: A Dataset object \"\"\" self.fields =", "instance has the label `Bob` and the description `[\"I'm\", 'bob']` and the tags", "if not cache: return None fields['label'].append(cache[0][0]) instance = {k: [] for k in", "fields if k != 'label'} for l in cache[1:]: for i, k in", "fields[k].append(v) cache = [] with open(fname) as f: header = f.next().strip().split('\\t') header[0] =", "for iterating over batches), shuffling, conversion to/from CONLL format, among others. Example: ..", "element of the list for field 'foo' corresponds to the attribute 'foo' for", "`['t1', 't2']`. :param fname: The CONLL formatted file from which to load the", "[]) for head in header]) fields['label'] = [] for line in f: line", "fields) cache = [] if cache: process_cache(cache, fields) return cls(fields) def write_conll(self, fname):", "', '.join(self.fields.keys())) @classmethod def load_conll(cls, fname): \"\"\" The CONLL file must have a", "('Name', 'Carol')]) print(d[1:3]) # OrderedDict([('SSN', [23, 45]), ('Name', ['Bob', 'Carol'])]) for e in", "= [] for line in f: line = line.strip() if line: cache.append(line) else:", "if name in keep_fields])) @classmethod def pad(cls, sequences, padding, pad_len=None): \"\"\" Pads a", "text datasets. \"\"\" __author__ = 'victor' from collections import OrderedDict import random import", "'David', 'Ellen'], 'SSN': [1, 23, 45, 56, 7890]}) print(d) # Dataset(Name, SSN) print(d[2])", "'t3', 't4', 't5']`. The second instance has the label `Bob` and the description", "['Alice', 'Bob', 'Carol', 'David', 'Ellen'], 'SSN': [1, 23, 45, 56, 7890]}) print(d) #", "from which to load the dataset :return: loaded Dataset instance \"\"\" def process_cache(cache,", "indexing, iterating, slicing (eg. for iterating over batches), shuffling, conversion to/from CONLL format,", "fields.items(): if length is None: length = len(d) length_field = name else: if", "d in fields.items(): if length is None: length = len(d) length_field = name", "padding, pad_len=None): \"\"\" Pads a list of sequences such that they form a", "is t4 alice t5 Bob I'm t1 bob t2 Here, the fields are", "= None for name, d in fields.items(): if length is None: length =", "specified for non-existent field {}'.format(name)) for i, d in enumerate(dataset.fields[name]): dataset.fields[name][i] = convert(d)", "len(self.fields) == 0: return 0 return len(self.fields.values()[0]) def __repr__(self): return \"{}({})\".format(self.__class__.__name__, ', '.join(self.fields.keys()))", "and `tags`. The first instance has the label `Alice` and the description `['Hello',", "'t2', 't3', 't4', 't5']`. The second instance has the label `Bob` and the", "'label'} for l in cache[1:]: for i, k in enumerate(fields): if k !=", "dataset in random order :return: the shuffled dataset instance \"\"\" order = range(len(self))", "for e in d: print(e) # OrderedDict([('SSN', 1), ('Name', 'Alice')]) ... \"\"\" def", "[23, 45]), ('Name', ['Bob', 'Carol'])]) for e in d: print(e) # OrderedDict([('SSN', 1),", "i, d in enumerate(dataset.fields[name]): dataset.fields[name][i] = convert(d) return dataset def shuffle(self): \"\"\" Re-indexes", "45), ('Name', 'Carol')]) print(d[1:3]) # OrderedDict([('SSN', [23, 45]), ('Name', ['Bob', 'Carol'])]) for e", "if e is None else str(e) for e in row]) for row in", "value)) data[key] = value[name] def __iter__(self): \"\"\" :return: A iterator over the instances", "attribute and a value is a list of the values of the instances", "format to fname \"\"\" if 'label' not in self.fields: raise InvalidFieldsException(\"dataset is not", "def __setitem__(self, key, value): \"\"\" :param key: An integer index or a slice", "and the description `['Hello', 'my', 'name', 'is', 'alice']` and the tags `['t1', 't2',", "\"\"\" :param keep_fields: if specified, then only the given fields will be kept", "must have a tab delimited header, for example:: # description tags Alice Hello", "InvalidFieldsException(Exception): pass class Dataset(object): \"\"\" Generic Dataset object that encapsulates a list of", "\"\"\" The CONLL file must have a tab delimited header, for example:: #", "np class InvalidFieldsException(Exception): pass class Dataset(object): \"\"\" Generic Dataset object that encapsulates a", "list of instances. The dataset stores the instances in an ordered dictionary of", "= {k: [] for k in fields if k != 'label'} for l", "in f: line = line.strip() if line: cache.append(line) else: # met empty line,", "it will not be transformed. :param in_place: Whether to perform the transformation in", "or a slice (eg. 2, 1:, 1:5) :return: an ordered dictionary of the", "label `Alice` and the description `['Hello', 'my', 'name', 'is', 'alice']` and the tags", "is missing in input data: {}'.format(name, value)) data[key] = value[name] def __iter__(self): \"\"\"", "stores the instances in an ordered dictionary of fields. Each field maps to", "or equal to the longest sequence {}'.format(pad_len, max_len) for i, s in enumerate(sequences):", "i in enumerate(order): reindexed.append(data[i]) self.fields[name] = reindexed return self def __getitem__(self, item): \"\"\"", "is copied). \"\"\" keep_fields = self.fields.keys() or keep_fields return self.__class__(OrderedDict([(name, data[:]) for name,", "else: if len(d) != length: raise InvalidFieldsException('field {} has length {} but field", "attribute 'foo' for the ith instance in the dataset. The dataset object supports", "delimited header, for example:: # description tags Alice Hello t1 my t2 name", ">= max_len, 'pad_len {} must be greater or equal to the longest sequence", "dataset = self if in_place else self.__class__(OrderedDict([(name, data[:]) for name, data in self.fields.items()]))", "my t2 name t3 is t4 alice t5 Bob I'm t1 bob t2", "'\\n'.join(['\\t'.join(['-' if e is None else str(e) for e in row]) for row", "\"\"\" for i in xrange(len(self)): yield self[i] def copy(self, keep_fields=None): \"\"\" :param keep_fields:", "pad_len or max_len assert pad_len >= max_len, 'pad_len {} must be greater or", "name, len(d))) def __len__(self): \"\"\" :return: The number of instances in the dataset.", "print(d) # Dataset(Name, SSN) print(d[2]) # OrderedDict([('SSN', 45), ('Name', 'Carol')]) print(d[1:3]) # OrderedDict([('SSN',", "\"\"\" Applies transformations to the dataset. :param converters: A dictionary specifying the function", "`key` to the instances(s) `value` \"\"\" for name, data in self.fields.items(): if name", "max_len, 'pad_len {} must be greater or equal to the longest sequence {}'.format(pad_len,", "# Dataset(Name, SSN) print(d[2]) # OrderedDict([('SSN', 45), ('Name', 'Carol')]) print(d[1:3]) # OrderedDict([('SSN', [23,", "of the dataset (each instance is copied). \"\"\" keep_fields = self.fields.keys() or keep_fields", "'.join(self.fields.keys())) @classmethod def load_conll(cls, fname): \"\"\" The CONLL file must have a tab", "[] for k in fields if k != 'label'} for l in cache[1:]:", "the transformation in place or create a new dataset instance :return: the transformed", "self.fields.keys(): raise InvalidFieldsException('Converter specified for non-existent field {}'.format(name)) for i, d in enumerate(dataset.fields[name]):", "iterator over the instances in the dataset \"\"\" for i in xrange(len(self)): yield", "cache: process_cache(cache, fields) return cls(fields) def write_conll(self, fname): \"\"\" Serializes the dataset in", "dataset (each instance is copied). \"\"\" keep_fields = self.fields.keys() or keep_fields return self.__class__(OrderedDict([(name,", "CONLL file must have a tab delimited header, for example:: # description tags", "'-' else l[i]) for k, v in instance.items(): fields[k].append(v) cache = [] with", "pad_len >= max_len, 'pad_len {} must be greater or equal to the longest", "the instances in the dataset \"\"\" for i in xrange(len(self)): yield self[i] def", "pad_len: the length of the maximum padded sequence. \"\"\" max_len = max([len(s) for", "tags `['t1', 't2', 't3', 't4', 't5']`. The second instance has the label `Bob`", "k, v in inst.items() if k != 'label'] return '{}\\n{}'.format(inst['label'], '\\n'.join(['\\t'.join(['-' if e", "str(e) for e in row]) for row in zip(*tab)])) with open(fname, 'wb') as", "that encapsulates a list of instances. The dataset stores the instances in an", "enumerate(fields): if k != 'label': instance[k].append(None if l[i] == '-' else l[i]) for", "max([len(s) for s in sequences]) pad_len = pad_len or max_len assert pad_len >=", "will not be transformed. :param in_place: Whether to perform the transformation in place", "have a tab delimited header, for example:: # description tags Alice Hello t1", "instance[k].append(None if l[i] == '-' else l[i]) for k, v in instance.items(): fields[k].append(v)", "managing text datasets. \"\"\" __author__ = 'victor' from collections import OrderedDict import random", "!= len(self) - 1: f.write('\\n') def convert(self, converters, in_place=False): \"\"\" Applies transformations to", "in enumerate(order): reindexed.append(data[i]) self.fields[name] = reindexed return self def __getitem__(self, item): \"\"\" :param", "'label']))) for i, d in enumerate(self): f.write('\\n{}'.format(instance_to_conll(d))) if i != len(self) - 1:", "The CONLL file must have a tab delimited header, for example:: # description", "create a new dataset instance :return: the transformed dataset instance \"\"\" dataset =", "keep_fields=None): \"\"\" :param keep_fields: if specified, then only the given fields will be", "in enumerate(fields): if k != 'label': instance[k].append(None if l[i] == '-' else l[i])", "the given fields will be kept :return: A deep copy of the dataset", "tab = [v for k, v in inst.items() if k != 'label'] return", "varying lengths. :param padding: the value of padded cells. :param pad_len: the length", "description `['Hello', 'my', 'name', 'is', 'alice']` and the tags `['t1', 't2', 't3', 't4',", "for k, v in instance.items(): fields[k].append(v) cache = [] with open(fname) as f:", "in self.fields.items()])) for name, convert in converters.items(): if name not in self.fields.keys(): raise", "!= 'label': instance[k].append(None if l[i] == '-' else l[i]) for k, v in", "\"\"\" Dataset module for managing text datasets. \"\"\" __author__ = 'victor' from collections", "InvalidFieldsException(\"dataset is not in CONLL format: missing label field\") def instance_to_conll(inst): tab =", "be kept :return: A deep copy of the dataset (each instance is copied).", "load the dataset :return: loaded Dataset instance \"\"\" def process_cache(cache, fields): cache =", "The dataset object supports indexing, iterating, slicing (eg. for iterating over batches), shuffling,", "has the label `Bob` and the description `[\"I'm\", 'bob']` and the tags `['t1',", "range(len(self)) random.shuffle(order) for name, data in self.fields.items(): reindexed = [] for _, i", "A deep copy of the dataset (each instance is copied). \"\"\" keep_fields =", "'wb') as f: f.write('# {}'.format('\\t'.join([k for k in self.fields if k != 'label'])))", "'SSN': [1, 23, 45, 56, 7890]}) print(d) # Dataset(Name, SSN) print(d[2]) # OrderedDict([('SSN',", "length = len(d) length_field = name else: if len(d) != length: raise InvalidFieldsException('field", "'alice']` and the tags `['t1', 't2', 't3', 't4', 't5']`. The second instance has", "self.fields.items(): if name not in value: raise InvalidFieldsException('field {} is missing in input", "name else: if len(d) != length: raise InvalidFieldsException('field {} has length {} but", "description tags Alice Hello t1 my t2 name t3 is t4 alice t5", "to fname \"\"\" if 'label' not in self.fields: raise InvalidFieldsException(\"dataset is not in", "a list of instances. The dataset stores the instances in an ordered dictionary", "the dataset. :return: A Dataset object \"\"\" self.fields = OrderedDict(fields) length = None", "__repr__(self): return \"{}({})\".format(self.__class__.__name__, ', '.join(self.fields.keys())) @classmethod def load_conll(cls, fname): \"\"\" The CONLL file", "key, value): \"\"\" :param key: An integer index or a slice (eg. 2,", "for name, data in self.fields.items()]) def __setitem__(self, key, value): \"\"\" :param key: An", "Dataset(Name, SSN) print(d[2]) # OrderedDict([('SSN', 45), ('Name', 'Carol')]) print(d[1:3]) # OrderedDict([('SSN', [23, 45]),", "print(d[1:3]) # OrderedDict([('SSN', [23, 45]), ('Name', ['Bob', 'Carol'])]) for e in d: print(e)", "def shuffle(self): \"\"\" Re-indexes the dataset in random order :return: the shuffled dataset", "enumerate(self): f.write('\\n{}'.format(instance_to_conll(d))) if i != len(self) - 1: f.write('\\n') def convert(self, converters, in_place=False):", "fname \"\"\" if 'label' not in self.fields: raise InvalidFieldsException(\"dataset is not in CONLL", "form a matrix. :param sequences: a list of sequences of varying lengths. :param", "for field 'foo' corresponds to the attribute 'foo' for the ith instance in", "fields = OrderedDict([(head, []) for head in header]) fields['label'] = [] for line", "print(d[2]) # OrderedDict([('SSN', 45), ('Name', 'Carol')]) print(d[1:3]) # OrderedDict([('SSN', [23, 45]), ('Name', ['Bob',", "fields) return cls(fields) def write_conll(self, fname): \"\"\" Serializes the dataset in CONLL format", "with open(fname) as f: header = f.next().strip().split('\\t') header[0] = header[0].lstrip('# ') fields =", "values of the instances in the dataset. :return: A Dataset object \"\"\" self.fields", "or create a new dataset instance :return: the transformed dataset instance \"\"\" dataset", "i, d in enumerate(self): f.write('\\n{}'.format(instance_to_conll(d))) if i != len(self) - 1: f.write('\\n') def", "e in d: print(e) # OrderedDict([('SSN', 1), ('Name', 'Alice')]) ... \"\"\" def __init__(self,", "met empty line, process cache process_cache(cache, fields) cache = [] if cache: process_cache(cache,", "Dataset object that encapsulates a list of instances. The dataset stores the instances", "will be kept :return: A deep copy of the dataset (each instance is", "the list for field 'foo' corresponds to the attribute 'foo' for the ith", "return cls(fields) def write_conll(self, fname): \"\"\" Serializes the dataset in CONLL format to", "pass class Dataset(object): \"\"\" Generic Dataset object that encapsulates a list of instances.", "of sequences such that they form a matrix. :param sequences: a list of", "dictionary of the instance(s) at index/indices `item`. \"\"\" return OrderedDict([(name, data[item]) for name,", "the label `Bob` and the description `[\"I'm\", 'bob']` and the tags `['t1', 't2']`.", "item: An integer index or a slice (eg. 2, 1:, 1:5) :return: an", "in self.fields.items(): if name not in value: raise InvalidFieldsException('field {} is missing in", "in sequences]) pad_len = pad_len or max_len assert pad_len >= max_len, 'pad_len {}", "to a list, the ith element of the list for field 'foo' corresponds", "I'm t1 bob t2 Here, the fields are `description` and `tags`. The first", "self.__class__(OrderedDict([(name, data[:]) for name, data in self.fields.items() if name in keep_fields])) @classmethod def", "d: print(e) # OrderedDict([('SSN', 1), ('Name', 'Alice')]) ... \"\"\" def __init__(self, fields): \"\"\"", "max_len = max([len(s) for s in sequences]) pad_len = pad_len or max_len assert", "[1, 23, 45, 56, 7890]}) print(d) # Dataset(Name, SSN) print(d[2]) # OrderedDict([('SSN', 45),", "maximum padded sequence. \"\"\" max_len = max([len(s) for s in sequences]) pad_len =", "SSN) print(d[2]) # OrderedDict([('SSN', 45), ('Name', 'Carol')]) print(d[1:3]) # OrderedDict([('SSN', [23, 45]), ('Name',", "for k in fields if k != 'label'} for l in cache[1:]: for", "`['t1', 't2', 't3', 't4', 't5']`. The second instance has the label `Bob` and", "`Bob` and the description `[\"I'm\", 'bob']` and the tags `['t1', 't2']`. :param fname:", "the dataset \"\"\" for i in xrange(len(self)): yield self[i] def copy(self, keep_fields=None): \"\"\"", "`description` and `tags`. The first instance has the label `Alice` and the description", "not in CONLL format: missing label field\") def instance_to_conll(inst): tab = [v for", "= [] for _, i in enumerate(order): reindexed.append(data[i]) self.fields[name] = reindexed return self", "if k != 'label'} for l in cache[1:]: for i, k in enumerate(fields):", "non-existent field {}'.format(name)) for i, d in enumerate(dataset.fields[name]): dataset.fields[name][i] = convert(d) return dataset", "example:: # description tags Alice Hello t1 my t2 name t3 is t4", "formatted file from which to load the dataset :return: loaded Dataset instance \"\"\"", "for i, d in enumerate(dataset.fields[name]): dataset.fields[name][i] = convert(d) return dataset def shuffle(self): \"\"\"", "fname): \"\"\" The CONLL file must have a tab delimited header, for example::", "with open(fname, 'wb') as f: f.write('# {}'.format('\\t'.join([k for k in self.fields if k", "tab delimited header, for example:: # description tags Alice Hello t1 my t2", "the maximum padded sequence. \"\"\" max_len = max([len(s) for s in sequences]) pad_len", "in cache[1:]: for i, k in enumerate(fields): if k != 'label': instance[k].append(None if", "for l in cache if l] if not cache: return None fields['label'].append(cache[0][0]) instance", "None length_field = None for name, d in fields.items(): if length is None:", "iterating over batches), shuffling, conversion to/from CONLL format, among others. Example: .. code-block::", "the shuffled dataset instance \"\"\" order = range(len(self)) random.shuffle(order) for name, data in", "process cache process_cache(cache, fields) cache = [] if cache: process_cache(cache, fields) return cls(fields)", "if k != 'label': instance[k].append(None if l[i] == '-' else l[i]) for k,", "the instance(s) at index/indices `item`. \"\"\" return OrderedDict([(name, data[item]) for name, data in", "\"\"\" def __init__(self, fields): \"\"\" :param fields: An ordered dictionary in which a", "Dataset instance \"\"\" def process_cache(cache, fields): cache = [l.split() for l in cache", "transformation in place or create a new dataset instance :return: the transformed dataset", "An integer index or a slice (eg. 2, 1:, 1:5) :param value: Sets", "if 'label' not in self.fields: raise InvalidFieldsException(\"dataset is not in CONLL format: missing", "enumerate(dataset.fields[name]): dataset.fields[name][i] = convert(d) return dataset def shuffle(self): \"\"\" Re-indexes the dataset in", "object supports indexing, iterating, slicing (eg. for iterating over batches), shuffling, conversion to/from", "given fields will be kept :return: A deep copy of the dataset (each", "= convert(d) return dataset def shuffle(self): \"\"\" Re-indexes the dataset in random order", "line in f: line = line.strip() if line: cache.append(line) else: # met empty", "= f.next().strip().split('\\t') header[0] = header[0].lstrip('# ') fields = OrderedDict([(head, []) for head in", "key is the name of an attribute and a value is a list", "numpy as np class InvalidFieldsException(Exception): pass class Dataset(object): \"\"\" Generic Dataset object that", "return None fields['label'].append(cache[0][0]) instance = {k: [] for k in fields if k", "= [] with open(fname) as f: header = f.next().strip().split('\\t') header[0] = header[0].lstrip('# ')", "45]), ('Name', ['Bob', 'Carol'])]) for e in d: print(e) # OrderedDict([('SSN', 1), ('Name',", "'bob']` and the tags `['t1', 't2']`. :param fname: The CONLL formatted file from", ":return: loaded Dataset instance \"\"\" def process_cache(cache, fields): cache = [l.split() for l", "a value is a list of the values of the instances in the", "such that they form a matrix. :param sequences: a list of sequences of", "= name else: if len(d) != length: raise InvalidFieldsException('field {} has length {}", "l in cache if l] if not cache: return None fields['label'].append(cache[0][0]) instance =", "if k != 'label'] return '{}\\n{}'.format(inst['label'], '\\n'.join(['\\t'.join(['-' if e is None else str(e)", "a field is missing from the dictionary, then it will not be transformed.", "the fields are `description` and `tags`. The first instance has the label `Alice`", "in self.fields.items(): reindexed = [] for _, i in enumerate(order): reindexed.append(data[i]) self.fields[name] =", "line.strip() if line: cache.append(line) else: # met empty line, process cache process_cache(cache, fields)", "row]) for row in zip(*tab)])) with open(fname, 'wb') as f: f.write('# {}'.format('\\t'.join([k for", "name not in value: raise InvalidFieldsException('field {} is missing in input data: {}'.format(name,", "k in enumerate(fields): if k != 'label': instance[k].append(None if l[i] == '-' else", "f.next().strip().split('\\t') header[0] = header[0].lstrip('# ') fields = OrderedDict([(head, []) for head in header])", "def __len__(self): \"\"\" :return: The number of instances in the dataset. \"\"\" if", "'is', 'alice']` and the tags `['t1', 't2', 't3', 't4', 't5']`. The second instance", "self.fields = OrderedDict(fields) length = None length_field = None for name, d in", "name not in self.fields.keys(): raise InvalidFieldsException('Converter specified for non-existent field {}'.format(name)) for i,", "an ordered dictionary of the instance(s) at index/indices `item`. \"\"\" return OrderedDict([(name, data[item])", "@classmethod def pad(cls, sequences, padding, pad_len=None): \"\"\" Pads a list of sequences such", "the tags `['t1', 't2', 't3', 't4', 't5']`. The second instance has the label", "l[i] == '-' else l[i]) for k, v in instance.items(): fields[k].append(v) cache =", "Pads a list of sequences such that they form a matrix. :param sequences:", "field {} has length {}'.format(length_field, length, name, len(d))) def __len__(self): \"\"\" :return: The", "integer index or a slice (eg. 2, 1:, 1:5) :param value: Sets the", "in fields.items(): if length is None: length = len(d) length_field = name else:", "'name', 'is', 'alice']` and the tags `['t1', 't2', 't3', 't4', 't5']`. The second", "__setitem__(self, key, value): \"\"\" :param key: An integer index or a slice (eg.", "value[name] def __iter__(self): \"\"\" :return: A iterator over the instances in the dataset", "converters: A dictionary specifying the function to apply to each field. If a", "instance \"\"\" order = range(len(self)) random.shuffle(order) for name, data in self.fields.items(): reindexed =", "('Name', ['Bob', 'Carol'])]) for e in d: print(e) # OrderedDict([('SSN', 1), ('Name', 'Alice')])", "dataset :return: loaded Dataset instance \"\"\" def process_cache(cache, fields): cache = [l.split() for", "fields: An ordered dictionary in which a key is the name of an", "raise InvalidFieldsException('Converter specified for non-existent field {}'.format(name)) for i, d in enumerate(dataset.fields[name]): dataset.fields[name][i]", "f: header = f.next().strip().split('\\t') header[0] = header[0].lstrip('# ') fields = OrderedDict([(head, []) for", "they form a matrix. :param sequences: a list of sequences of varying lengths.", "a key is the name of an attribute and a value is a", "'label': instance[k].append(None if l[i] == '-' else l[i]) for k, v in instance.items():", "= [l.split() for l in cache if l] if not cache: return None", "instance.items(): fields[k].append(v) cache = [] with open(fname) as f: header = f.next().strip().split('\\t') header[0]", ":return: The number of instances in the dataset. \"\"\" if len(self.fields) == 0:", "to the attribute 'foo' for the ith instance in the dataset. The dataset", "python d = Dataset({'Name': ['Alice', 'Bob', 'Carol', 'David', 'Ellen'], 'SSN': [1, 23, 45,", "in row]) for row in zip(*tab)])) with open(fname, 'wb') as f: f.write('# {}'.format('\\t'.join([k", "for line in f: line = line.strip() if line: cache.append(line) else: # met", "dataset. :param converters: A dictionary specifying the function to apply to each field.", "in xrange(len(self)): yield self[i] def copy(self, keep_fields=None): \"\"\" :param keep_fields: if specified, then", "of the instance(s) at index/indices `item`. \"\"\" return OrderedDict([(name, data[item]) for name, data", "k in fields if k != 'label'} for l in cache[1:]: for i,", "1:5) :return: an ordered dictionary of the instance(s) at index/indices `item`. \"\"\" return", "which a key is the name of an attribute and a value is", "the instances(s) `value` \"\"\" for name, data in self.fields.items(): if name not in", "1), ('Name', 'Alice')]) ... \"\"\" def __init__(self, fields): \"\"\" :param fields: An ordered", "= len(d) length_field = name else: if len(d) != length: raise InvalidFieldsException('field {}", "t5 Bob I'm t1 bob t2 Here, the fields are `description` and `tags`.", "\"\"\" :return: The number of instances in the dataset. \"\"\" if len(self.fields) ==", "length {}'.format(length_field, length, name, len(d))) def __len__(self): \"\"\" :return: The number of instances", "{} has length {} but field {} has length {}'.format(length_field, length, name, len(d)))", "of sequences of varying lengths. :param padding: the value of padded cells. :param", "is not in CONLL format: missing label field\") def instance_to_conll(inst): tab = [v", ":param pad_len: the length of the maximum padded sequence. \"\"\" max_len = max([len(s)", "CONLL format: missing label field\") def instance_to_conll(inst): tab = [v for k, v", "the transformed dataset instance \"\"\" dataset = self if in_place else self.__class__(OrderedDict([(name, data[:])", "self.__class__(OrderedDict([(name, data[:]) for name, data in self.fields.items()])) for name, convert in converters.items(): if", "from the dictionary, then it will not be transformed. :param in_place: Whether to", "header[0] = header[0].lstrip('# ') fields = OrderedDict([(head, []) for head in header]) fields['label']", "def load_conll(cls, fname): \"\"\" The CONLL file must have a tab delimited header,", "in_place=False): \"\"\" Applies transformations to the dataset. :param converters: A dictionary specifying the", "if cache: process_cache(cache, fields) return cls(fields) def write_conll(self, fname): \"\"\" Serializes the dataset", "for name, data in self.fields.items()])) for name, convert in converters.items(): if name not", "in self.fields: raise InvalidFieldsException(\"dataset is not in CONLL format: missing label field\") def", "value: Sets the instances at index/indices `key` to the instances(s) `value` \"\"\" for", "raise InvalidFieldsException(\"dataset is not in CONLL format: missing label field\") def instance_to_conll(inst): tab", "length: raise InvalidFieldsException('field {} has length {} but field {} has length {}'.format(length_field,", "= pad_len or max_len assert pad_len >= max_len, 'pad_len {} must be greater", "fields will be kept :return: A deep copy of the dataset (each instance", "t3 is t4 alice t5 Bob I'm t1 bob t2 Here, the fields", "from collections import OrderedDict import random import numpy as np class InvalidFieldsException(Exception): pass", "{k: [] for k in fields if k != 'label'} for l in", "v in instance.items(): fields[k].append(v) cache = [] with open(fname) as f: header =", "has the label `Alice` and the description `['Hello', 'my', 'name', 'is', 'alice']` and", "\"\"\" return OrderedDict([(name, data[item]) for name, data in self.fields.items()]) def __setitem__(self, key, value):", "\"\"\" order = range(len(self)) random.shuffle(order) for name, data in self.fields.items(): reindexed = []", ":return: the transformed dataset instance \"\"\" dataset = self if in_place else self.__class__(OrderedDict([(name,", "random order :return: the shuffled dataset instance \"\"\" order = range(len(self)) random.shuffle(order) for", "must be greater or equal to the longest sequence {}'.format(pad_len, max_len) for i,", "the dataset. :param converters: A dictionary specifying the function to apply to each", "load_conll(cls, fname): \"\"\" The CONLL file must have a tab delimited header, for", "__author__ = 'victor' from collections import OrderedDict import random import numpy as np", "to load the dataset :return: loaded Dataset instance \"\"\" def process_cache(cache, fields): cache", "field\") def instance_to_conll(inst): tab = [v for k, v in inst.items() if k", "1:5) :param value: Sets the instances at index/indices `key` to the instances(s) `value`", "l[i]) for k, v in instance.items(): fields[k].append(v) cache = [] with open(fname) as", "\"\"\" if len(self.fields) == 0: return 0 return len(self.fields.values()[0]) def __repr__(self): return \"{}({})\".format(self.__class__.__name__,", "cache.append(line) else: # met empty line, process cache process_cache(cache, fields) cache = []", "convert(self, converters, in_place=False): \"\"\" Applies transformations to the dataset. :param converters: A dictionary", "the function to apply to each field. If a field is missing from", "\"{}({})\".format(self.__class__.__name__, ', '.join(self.fields.keys())) @classmethod def load_conll(cls, fname): \"\"\" The CONLL file must have", "random import numpy as np class InvalidFieldsException(Exception): pass class Dataset(object): \"\"\" Generic Dataset", "else self.__class__(OrderedDict([(name, data[:]) for name, data in self.fields.items()])) for name, convert in converters.items():", "cache process_cache(cache, fields) cache = [] if cache: process_cache(cache, fields) return cls(fields) def", "{} but field {} has length {}'.format(length_field, length, name, len(d))) def __len__(self): \"\"\"", "the instances in the dataset. :return: A Dataset object \"\"\" self.fields = OrderedDict(fields)", "of instances. The dataset stores the instances in an ordered dictionary of fields.", "len(d) length_field = name else: if len(d) != length: raise InvalidFieldsException('field {} has", "empty line, process cache process_cache(cache, fields) cache = [] if cache: process_cache(cache, fields)", "and the tags `['t1', 't2']`. :param fname: The CONLL formatted file from which", "if name not in value: raise InvalidFieldsException('field {} is missing in input data:", ":return: A Dataset object \"\"\" self.fields = OrderedDict(fields) length = None length_field =", "if l[i] == '-' else l[i]) for k, v in instance.items(): fields[k].append(v) cache", "else l[i]) for k, v in instance.items(): fields[k].append(v) cache = [] with open(fname)", "pad_len=None): \"\"\" Pads a list of sequences such that they form a matrix.", "1: f.write('\\n') def convert(self, converters, in_place=False): \"\"\" Applies transformations to the dataset. :param", "name, data in self.fields.items()])) for name, convert in converters.items(): if name not in", "index or a slice (eg. 2, 1:, 1:5) :param value: Sets the instances", "t1 bob t2 Here, the fields are `description` and `tags`. The first instance", "apply to each field. If a field is missing from the dictionary, then", "in random order :return: the shuffled dataset instance \"\"\" order = range(len(self)) random.shuffle(order)", "An integer index or a slice (eg. 2, 1:, 1:5) :return: an ordered", "return OrderedDict([(name, data[item]) for name, data in self.fields.items()]) def __setitem__(self, key, value): \"\"\"", "self.fields.items()]) def __setitem__(self, key, value): \"\"\" :param key: An integer index or a", "\"\"\" max_len = max([len(s) for s in sequences]) pad_len = pad_len or max_len", "def __repr__(self): return \"{}({})\".format(self.__class__.__name__, ', '.join(self.fields.keys())) @classmethod def load_conll(cls, fname): \"\"\" The CONLL", "def write_conll(self, fname): \"\"\" Serializes the dataset in CONLL format to fname \"\"\"", "= Dataset({'Name': ['Alice', 'Bob', 'Carol', 'David', 'Ellen'], 'SSN': [1, 23, 45, 56, 7890]})", "InvalidFieldsException('field {} is missing in input data: {}'.format(name, value)) data[key] = value[name] def", "cells. :param pad_len: the length of the maximum padded sequence. \"\"\" max_len =", "instances in the dataset \"\"\" for i in xrange(len(self)): yield self[i] def copy(self,", "for name, data in self.fields.items() if name in keep_fields])) @classmethod def pad(cls, sequences,", "{}'.format(name, value)) data[key] = value[name] def __iter__(self): \"\"\" :return: A iterator over the", "\"\"\" keep_fields = self.fields.keys() or keep_fields return self.__class__(OrderedDict([(name, data[:]) for name, data in", "{} must be greater or equal to the longest sequence {}'.format(pad_len, max_len) for", "data[:]) for name, data in self.fields.items() if name in keep_fields])) @classmethod def pad(cls,", "import OrderedDict import random import numpy as np class InvalidFieldsException(Exception): pass class Dataset(object):", "header = f.next().strip().split('\\t') header[0] = header[0].lstrip('# ') fields = OrderedDict([(head, []) for head", "OrderedDict([(head, []) for head in header]) fields['label'] = [] for line in f:", "if len(d) != length: raise InvalidFieldsException('field {} has length {} but field {}", "if specified, then only the given fields will be kept :return: A deep", "enumerate(order): reindexed.append(data[i]) self.fields[name] = reindexed return self def __getitem__(self, item): \"\"\" :param item:", "'{}\\n{}'.format(inst['label'], '\\n'.join(['\\t'.join(['-' if e is None else str(e) for e in row]) for", "a new dataset instance :return: the transformed dataset instance \"\"\" dataset = self", "{}'.format(pad_len, max_len) for i, s in enumerate(sequences): sequences[i] = [padding] * (pad_len -", ":return: A deep copy of the dataset (each instance is copied). \"\"\" keep_fields", "in the dataset \"\"\" for i in xrange(len(self)): yield self[i] def copy(self, keep_fields=None):", "def convert(self, converters, in_place=False): \"\"\" Applies transformations to the dataset. :param converters: A", ":return: an ordered dictionary of the instance(s) at index/indices `item`. \"\"\" return OrderedDict([(name,", "length is None: length = len(d) length_field = name else: if len(d) !=", "slice (eg. 2, 1:, 1:5) :param value: Sets the instances at index/indices `key`", "self.fields.items() if name in keep_fields])) @classmethod def pad(cls, sequences, padding, pad_len=None): \"\"\" Pads", "sequences]) pad_len = pad_len or max_len assert pad_len >= max_len, 'pad_len {} must", "\"\"\" Re-indexes the dataset in random order :return: the shuffled dataset instance \"\"\"", "instances(s) `value` \"\"\" for name, data in self.fields.items(): if name not in value:", "of an attribute and a value is a list of the values of", "d in enumerate(dataset.fields[name]): dataset.fields[name][i] = convert(d) return dataset def shuffle(self): \"\"\" Re-indexes the", "def process_cache(cache, fields): cache = [l.split() for l in cache if l] if", "__iter__(self): \"\"\" :return: A iterator over the instances in the dataset \"\"\" for", "alice t5 Bob I'm t1 bob t2 Here, the fields are `description` and", "self[i] def copy(self, keep_fields=None): \"\"\" :param keep_fields: if specified, then only the given", "'Carol', 'David', 'Ellen'], 'SSN': [1, 23, 45, 56, 7890]}) print(d) # Dataset(Name, SSN)", "a list, the ith element of the list for field 'foo' corresponds to", "to/from CONLL format, among others. Example: .. code-block:: python d = Dataset({'Name': ['Alice',", "datasets. \"\"\" __author__ = 'victor' from collections import OrderedDict import random import numpy", "in self.fields.keys(): raise InvalidFieldsException('Converter specified for non-existent field {}'.format(name)) for i, d in", "a list of sequences such that they form a matrix. :param sequences: a", "for i, s in enumerate(sequences): sequences[i] = [padding] * (pad_len - len(s)) +", "process_cache(cache, fields): cache = [l.split() for l in cache if l] if not", "in an ordered dictionary of fields. Each field maps to a list, the", "the description `[\"I'm\", 'bob']` and the tags `['t1', 't2']`. :param fname: The CONLL", "Whether to perform the transformation in place or create a new dataset instance", "Sets the instances at index/indices `key` to the instances(s) `value` \"\"\" for name,", "0 return len(self.fields.values()[0]) def __repr__(self): return \"{}({})\".format(self.__class__.__name__, ', '.join(self.fields.keys())) @classmethod def load_conll(cls, fname):", "name, data in self.fields.items(): reindexed = [] for _, i in enumerate(order): reindexed.append(data[i])", "__init__(self, fields): \"\"\" :param fields: An ordered dictionary in which a key is", "at index/indices `key` to the instances(s) `value` \"\"\" for name, data in self.fields.items():", "loaded Dataset instance \"\"\" def process_cache(cache, fields): cache = [l.split() for l in", "7890]}) print(d) # Dataset(Name, SSN) print(d[2]) # OrderedDict([('SSN', 45), ('Name', 'Carol')]) print(d[1:3]) #", "convert(d) return dataset def shuffle(self): \"\"\" Re-indexes the dataset in random order :return:", "header]) fields['label'] = [] for line in f: line = line.strip() if line:", "input data: {}'.format(name, value)) data[key] = value[name] def __iter__(self): \"\"\" :return: A iterator", "def __init__(self, fields): \"\"\" :param fields: An ordered dictionary in which a key", "object \"\"\" self.fields = OrderedDict(fields) length = None length_field = None for name,", "in place or create a new dataset instance :return: the transformed dataset instance", "max_len assert pad_len >= max_len, 'pad_len {} must be greater or equal to", "def instance_to_conll(inst): tab = [v for k, v in inst.items() if k !=", "length of the maximum padded sequence. \"\"\" max_len = max([len(s) for s in", "ordered dictionary of the instance(s) at index/indices `item`. \"\"\" return OrderedDict([(name, data[item]) for", "__len__(self): \"\"\" :return: The number of instances in the dataset. \"\"\" if len(self.fields)", "len(self.fields.values()[0]) def __repr__(self): return \"{}({})\".format(self.__class__.__name__, ', '.join(self.fields.keys())) @classmethod def load_conll(cls, fname): \"\"\" The", "which to load the dataset :return: loaded Dataset instance \"\"\" def process_cache(cache, fields):", "name, data in self.fields.items()]) def __setitem__(self, key, value): \"\"\" :param key: An integer", "('Name', 'Alice')]) ... \"\"\" def __init__(self, fields): \"\"\" :param fields: An ordered dictionary", "line: cache.append(line) else: # met empty line, process cache process_cache(cache, fields) cache =", "the value of padded cells. :param pad_len: the length of the maximum padded", "instances in the dataset. :return: A Dataset object \"\"\" self.fields = OrderedDict(fields) length", "`item`. \"\"\" return OrderedDict([(name, data[item]) for name, data in self.fields.items()]) def __setitem__(self, key,", "for k, v in inst.items() if k != 'label'] return '{}\\n{}'.format(inst['label'], '\\n'.join(['\\t'.join(['-' if", "'label'] return '{}\\n{}'.format(inst['label'], '\\n'.join(['\\t'.join(['-' if e is None else str(e) for e in", "to the instances(s) `value` \"\"\" for name, data in self.fields.items(): if name not", "!= 'label'} for l in cache[1:]: for i, k in enumerate(fields): if k", "in inst.items() if k != 'label'] return '{}\\n{}'.format(inst['label'], '\\n'.join(['\\t'.join(['-' if e is None", "== 0: return 0 return len(self.fields.values()[0]) def __repr__(self): return \"{}({})\".format(self.__class__.__name__, ', '.join(self.fields.keys())) @classmethod", "Generic Dataset object that encapsulates a list of instances. The dataset stores the", "None: length = len(d) length_field = name else: if len(d) != length: raise", "order = range(len(self)) random.shuffle(order) for name, data in self.fields.items(): reindexed = [] for", "pad(cls, sequences, padding, pad_len=None): \"\"\" Pads a list of sequences such that they", "Each field maps to a list, the ith element of the list for", "conversion to/from CONLL format, among others. Example: .. code-block:: python d = Dataset({'Name':", "convert in converters.items(): if name not in self.fields.keys(): raise InvalidFieldsException('Converter specified for non-existent", "data: {}'.format(name, value)) data[key] = value[name] def __iter__(self): \"\"\" :return: A iterator over", "copy of the dataset (each instance is copied). \"\"\" keep_fields = self.fields.keys() or", "bob t2 Here, the fields are `description` and `tags`. The first instance has", "\"\"\" :param item: An integer index or a slice (eg. 2, 1:, 1:5)", "instance_to_conll(inst): tab = [v for k, v in inst.items() if k != 'label']", ":param item: An integer index or a slice (eg. 2, 1:, 1:5) :return:", "A Dataset object \"\"\" self.fields = OrderedDict(fields) length = None length_field = None", "others. Example: .. code-block:: python d = Dataset({'Name': ['Alice', 'Bob', 'Carol', 'David', 'Ellen'],", "for i, k in enumerate(fields): if k != 'label': instance[k].append(None if l[i] ==", "be greater or equal to the longest sequence {}'.format(pad_len, max_len) for i, s", "process_cache(cache, fields) return cls(fields) def write_conll(self, fname): \"\"\" Serializes the dataset in CONLL", "for name, data in self.fields.items(): if name not in value: raise InvalidFieldsException('field {}", "Dataset module for managing text datasets. \"\"\" __author__ = 'victor' from collections import", "self.fields.items()])) for name, convert in converters.items(): if name not in self.fields.keys(): raise InvalidFieldsException('Converter", "transformations to the dataset. :param converters: A dictionary specifying the function to apply", "= line.strip() if line: cache.append(line) else: # met empty line, process cache process_cache(cache,", "function to apply to each field. If a field is missing from the", "return dataset def shuffle(self): \"\"\" Re-indexes the dataset in random order :return: the", "list of sequences of varying lengths. :param padding: the value of padded cells.", "the length of the maximum padded sequence. \"\"\" max_len = max([len(s) for s", "{}'.format('\\t'.join([k for k in self.fields if k != 'label']))) for i, d in", "for name, data in self.fields.items(): reindexed = [] for _, i in enumerate(order):", "= 'victor' from collections import OrderedDict import random import numpy as np class", "a list of the values of the instances in the dataset. :return: A", "if length is None: length = len(d) length_field = name else: if len(d)", "\"\"\" def process_cache(cache, fields): cache = [l.split() for l in cache if l]", "reindexed return self def __getitem__(self, item): \"\"\" :param item: An integer index or", ":param key: An integer index or a slice (eg. 2, 1:, 1:5) :param", "instance in the dataset. The dataset object supports indexing, iterating, slicing (eg. for", "def pad(cls, sequences, padding, pad_len=None): \"\"\" Pads a list of sequences such that", "or keep_fields return self.__class__(OrderedDict([(name, data[:]) for name, data in self.fields.items() if name in", "len(d) != length: raise InvalidFieldsException('field {} has length {} but field {} has", "yield self[i] def copy(self, keep_fields=None): \"\"\" :param keep_fields: if specified, then only the", "instance \"\"\" dataset = self if in_place else self.__class__(OrderedDict([(name, data[:]) for name, data", "copy(self, keep_fields=None): \"\"\" :param keep_fields: if specified, then only the given fields will", "kept :return: A deep copy of the dataset (each instance is copied). \"\"\"", "in which a key is the name of an attribute and a value", "t1 my t2 name t3 is t4 alice t5 Bob I'm t1 bob", "instance :return: the transformed dataset instance \"\"\" dataset = self if in_place else", "a slice (eg. 2, 1:, 1:5) :param value: Sets the instances at index/indices", "CONLL format to fname \"\"\" if 'label' not in self.fields: raise InvalidFieldsException(\"dataset is", "(eg. 2, 1:, 1:5) :param value: Sets the instances at index/indices `key` to", "!= 'label']))) for i, d in enumerate(self): f.write('\\n{}'.format(instance_to_conll(d))) if i != len(self) -", "= value[name] def __iter__(self): \"\"\" :return: A iterator over the instances in the", "raise InvalidFieldsException('field {} has length {} but field {} has length {}'.format(length_field, length,", "name, data in self.fields.items(): if name not in value: raise InvalidFieldsException('field {} is", "2, 1:, 1:5) :param value: Sets the instances at index/indices `key` to the", "number of instances in the dataset. \"\"\" if len(self.fields) == 0: return 0", "23, 45, 56, 7890]}) print(d) # Dataset(Name, SSN) print(d[2]) # OrderedDict([('SSN', 45), ('Name',", "# description tags Alice Hello t1 my t2 name t3 is t4 alice", "field {}'.format(name)) for i, d in enumerate(dataset.fields[name]): dataset.fields[name][i] = convert(d) return dataset def", "[] for _, i in enumerate(order): reindexed.append(data[i]) self.fields[name] = reindexed return self def", ":return: the shuffled dataset instance \"\"\" order = range(len(self)) random.shuffle(order) for name, data", "keep_fields return self.__class__(OrderedDict([(name, data[:]) for name, data in self.fields.items() if name in keep_fields]))", "Here, the fields are `description` and `tags`. The first instance has the label", "Hello t1 my t2 name t3 is t4 alice t5 Bob I'm t1", "'Bob', 'Carol', 'David', 'Ellen'], 'SSN': [1, 23, 45, 56, 7890]}) print(d) # Dataset(Name,", "else str(e) for e in row]) for row in zip(*tab)])) with open(fname, 'wb')", "missing in input data: {}'.format(name, value)) data[key] = value[name] def __iter__(self): \"\"\" :return:", "or max_len assert pad_len >= max_len, 'pad_len {} must be greater or equal", "An ordered dictionary in which a key is the name of an attribute", "dataset. :return: A Dataset object \"\"\" self.fields = OrderedDict(fields) length = None length_field", "\"\"\" __author__ = 'victor' from collections import OrderedDict import random import numpy as", "The CONLL formatted file from which to load the dataset :return: loaded Dataset", "object that encapsulates a list of instances. The dataset stores the instances in", "sequences, padding, pad_len=None): \"\"\" Pads a list of sequences such that they form", "as np class InvalidFieldsException(Exception): pass class Dataset(object): \"\"\" Generic Dataset object that encapsulates", "fields are `description` and `tags`. The first instance has the label `Alice` and", "[] for line in f: line = line.strip() if line: cache.append(line) else: #", "in_place: Whether to perform the transformation in place or create a new dataset", "\"\"\" if 'label' not in self.fields: raise InvalidFieldsException(\"dataset is not in CONLL format:", "CONLL formatted file from which to load the dataset :return: loaded Dataset instance", "not cache: return None fields['label'].append(cache[0][0]) instance = {k: [] for k in fields", "Dataset object \"\"\" self.fields = OrderedDict(fields) length = None length_field = None for", "in the dataset. The dataset object supports indexing, iterating, slicing (eg. for iterating", "0: return 0 return len(self.fields.values()[0]) def __repr__(self): return \"{}({})\".format(self.__class__.__name__, ', '.join(self.fields.keys())) @classmethod def", "= self.fields.keys() or keep_fields return self.__class__(OrderedDict([(name, data[:]) for name, data in self.fields.items() if", ":param fname: The CONLL formatted file from which to load the dataset :return:", "Example: .. code-block:: python d = Dataset({'Name': ['Alice', 'Bob', 'Carol', 'David', 'Ellen'], 'SSN':", "'foo' for the ith instance in the dataset. The dataset object supports indexing,", "label field\") def instance_to_conll(inst): tab = [v for k, v in inst.items() if", "place or create a new dataset instance :return: the transformed dataset instance \"\"\"", "# OrderedDict([('SSN', 1), ('Name', 'Alice')]) ... \"\"\" def __init__(self, fields): \"\"\" :param fields:", "if k != 'label']))) for i, d in enumerate(self): f.write('\\n{}'.format(instance_to_conll(d))) if i !=", "Serializes the dataset in CONLL format to fname \"\"\" if 'label' not in", "dataset instance \"\"\" order = range(len(self)) random.shuffle(order) for name, data in self.fields.items(): reindexed", "dataset.fields[name][i] = convert(d) return dataset def shuffle(self): \"\"\" Re-indexes the dataset in random", "open(fname, 'wb') as f: f.write('# {}'.format('\\t'.join([k for k in self.fields if k !=", "d = Dataset({'Name': ['Alice', 'Bob', 'Carol', 'David', 'Ellen'], 'SSN': [1, 23, 45, 56,", "for managing text datasets. \"\"\" __author__ = 'victor' from collections import OrderedDict import", "new dataset instance :return: the transformed dataset instance \"\"\" dataset = self if", "class InvalidFieldsException(Exception): pass class Dataset(object): \"\"\" Generic Dataset object that encapsulates a list", "the dataset in CONLL format to fname \"\"\" if 'label' not in self.fields:", "batches), shuffling, conversion to/from CONLL format, among others. Example: .. code-block:: python d", "in the dataset. \"\"\" if len(self.fields) == 0: return 0 return len(self.fields.values()[0]) def", "for non-existent field {}'.format(name)) for i, d in enumerate(dataset.fields[name]): dataset.fields[name][i] = convert(d) return", "for _, i in enumerate(order): reindexed.append(data[i]) self.fields[name] = reindexed return self def __getitem__(self,", "'label' not in self.fields: raise InvalidFieldsException(\"dataset is not in CONLL format: missing label", "cache[1:]: for i, k in enumerate(fields): if k != 'label': instance[k].append(None if l[i]", "to the dataset. :param converters: A dictionary specifying the function to apply to", "else: # met empty line, process cache process_cache(cache, fields) cache = [] if", "to each field. If a field is missing from the dictionary, then it", "s in sequences]) pad_len = pad_len or max_len assert pad_len >= max_len, 'pad_len", "sequence {}'.format(pad_len, max_len) for i, s in enumerate(sequences): sequences[i] = [padding] * (pad_len", "the dictionary, then it will not be transformed. :param in_place: Whether to perform", "# met empty line, process cache process_cache(cache, fields) cache = [] if cache:", "return self.__class__(OrderedDict([(name, data[:]) for name, data in self.fields.items() if name in keep_fields])) @classmethod", "'Alice')]) ... \"\"\" def __init__(self, fields): \"\"\" :param fields: An ordered dictionary in", "\"\"\" dataset = self if in_place else self.__class__(OrderedDict([(name, data[:]) for name, data in", "A iterator over the instances in the dataset \"\"\" for i in xrange(len(self)):", "data in self.fields.items(): if name not in value: raise InvalidFieldsException('field {} is missing", "'t4', 't5']`. The second instance has the label `Bob` and the description `[\"I'm\",", "== '-' else l[i]) for k, v in instance.items(): fields[k].append(v) cache = []", "v in inst.items() if k != 'label'] return '{}\\n{}'.format(inst['label'], '\\n'.join(['\\t'.join(['-' if e is", "`Alice` and the description `['Hello', 'my', 'name', 'is', 'alice']` and the tags `['t1',", "ith instance in the dataset. The dataset object supports indexing, iterating, slicing (eg.", "assert pad_len >= max_len, 'pad_len {} must be greater or equal to the", "OrderedDict([('SSN', 45), ('Name', 'Carol')]) print(d[1:3]) # OrderedDict([('SSN', [23, 45]), ('Name', ['Bob', 'Carol'])]) for", "dictionary in which a key is the name of an attribute and a", "a list of sequences of varying lengths. :param padding: the value of padded", "field maps to a list, the ith element of the list for field", "padded sequence. \"\"\" max_len = max([len(s) for s in sequences]) pad_len = pad_len", "head in header]) fields['label'] = [] for line in f: line = line.strip()", "OrderedDict([(name, data[item]) for name, data in self.fields.items()]) def __setitem__(self, key, value): \"\"\" :param", "if i != len(self) - 1: f.write('\\n') def convert(self, converters, in_place=False): \"\"\" Applies", "if name not in self.fields.keys(): raise InvalidFieldsException('Converter specified for non-existent field {}'.format(name)) for", "= OrderedDict([(head, []) for head in header]) fields['label'] = [] for line in", "data[key] = value[name] def __iter__(self): \"\"\" :return: A iterator over the instances in", "inst.items() if k != 'label'] return '{}\\n{}'.format(inst['label'], '\\n'.join(['\\t'.join(['-' if e is None else", "key: An integer index or a slice (eg. 2, 1:, 1:5) :param value:", "None for name, d in fields.items(): if length is None: length = len(d)", "i, k in enumerate(fields): if k != 'label': instance[k].append(None if l[i] == '-'", "fields): \"\"\" :param fields: An ordered dictionary in which a key is the", "i != len(self) - 1: f.write('\\n') def convert(self, converters, in_place=False): \"\"\" Applies transformations", "the ith element of the list for field 'foo' corresponds to the attribute", "list for field 'foo' corresponds to the attribute 'foo' for the ith instance", "[] if cache: process_cache(cache, fields) return cls(fields) def write_conll(self, fname): \"\"\" Serializes the", "len(self) - 1: f.write('\\n') def convert(self, converters, in_place=False): \"\"\" Applies transformations to the", "OrderedDict(fields) length = None length_field = None for name, d in fields.items(): if", "not in value: raise InvalidFieldsException('field {} is missing in input data: {}'.format(name, value))", "only the given fields will be kept :return: A deep copy of the", "instances. The dataset stores the instances in an ordered dictionary of fields. Each", "integer index or a slice (eg. 2, 1:, 1:5) :return: an ordered dictionary", "in enumerate(sequences): sequences[i] = [padding] * (pad_len - len(s)) + s return np.array(sequences)", "over batches), shuffling, conversion to/from CONLL format, among others. Example: .. code-block:: python", "list of the values of the instances in the dataset. :return: A Dataset", "in cache if l] if not cache: return None fields['label'].append(cache[0][0]) instance = {k:", "= header[0].lstrip('# ') fields = OrderedDict([(head, []) for head in header]) fields['label'] =", "If a field is missing from the dictionary, then it will not be", "sequence. \"\"\" max_len = max([len(s) for s in sequences]) pad_len = pad_len or", "converters, in_place=False): \"\"\" Applies transformations to the dataset. :param converters: A dictionary specifying", "ordered dictionary of fields. Each field maps to a list, the ith element", ":param in_place: Whether to perform the transformation in place or create a new", "among others. Example: .. code-block:: python d = Dataset({'Name': ['Alice', 'Bob', 'Carol', 'David',", "['Bob', 'Carol'])]) for e in d: print(e) # OrderedDict([('SSN', 1), ('Name', 'Alice')]) ...", "return 0 return len(self.fields.values()[0]) def __repr__(self): return \"{}({})\".format(self.__class__.__name__, ', '.join(self.fields.keys())) @classmethod def load_conll(cls,", "'t2']`. :param fname: The CONLL formatted file from which to load the dataset", "shuffled dataset instance \"\"\" order = range(len(self)) random.shuffle(order) for name, data in self.fields.items():", "return \"{}({})\".format(self.__class__.__name__, ', '.join(self.fields.keys())) @classmethod def load_conll(cls, fname): \"\"\" The CONLL file must", "data[:]) for name, data in self.fields.items()])) for name, convert in converters.items(): if name", "index/indices `key` to the instances(s) `value` \"\"\" for name, data in self.fields.items(): if", "value of padded cells. :param pad_len: the length of the maximum padded sequence.", "f.write('\\n{}'.format(instance_to_conll(d))) if i != len(self) - 1: f.write('\\n') def convert(self, converters, in_place=False): \"\"\"", "to perform the transformation in place or create a new dataset instance :return:", "dictionary of fields. Each field maps to a list, the ith element of", "of varying lengths. :param padding: the value of padded cells. :param pad_len: the", "at index/indices `item`. \"\"\" return OrderedDict([(name, data[item]) for name, data in self.fields.items()]) def", "def copy(self, keep_fields=None): \"\"\" :param keep_fields: if specified, then only the given fields", "value is a list of the values of the instances in the dataset.", "file must have a tab delimited header, for example:: # description tags Alice", "2, 1:, 1:5) :return: an ordered dictionary of the instance(s) at index/indices `item`.", "dictionary, then it will not be transformed. :param in_place: Whether to perform the", "an attribute and a value is a list of the values of the", "`[\"I'm\", 'bob']` and the tags `['t1', 't2']`. :param fname: The CONLL formatted file", "is missing from the dictionary, then it will not be transformed. :param in_place:", "instance(s) at index/indices `item`. \"\"\" return OrderedDict([(name, data[item]) for name, data in self.fields.items()])", "\"\"\" :param fields: An ordered dictionary in which a key is the name", "print(e) # OrderedDict([('SSN', 1), ('Name', 'Alice')]) ... \"\"\" def __init__(self, fields): \"\"\" :param", "cache: return None fields['label'].append(cache[0][0]) instance = {k: [] for k in fields if", "@classmethod def load_conll(cls, fname): \"\"\" The CONLL file must have a tab delimited", "= [v for k, v in inst.items() if k != 'label'] return '{}\\n{}'.format(inst['label'],", "data in self.fields.items()])) for name, convert in converters.items(): if name not in self.fields.keys():", "The first instance has the label `Alice` and the description `['Hello', 'my', 'name',", "name, convert in converters.items(): if name not in self.fields.keys(): raise InvalidFieldsException('Converter specified for", "in_place else self.__class__(OrderedDict([(name, data[:]) for name, data in self.fields.items()])) for name, convert in", "self def __getitem__(self, item): \"\"\" :param item: An integer index or a slice", "length {} but field {} has length {}'.format(length_field, length, name, len(d))) def __len__(self):", "first instance has the label `Alice` and the description `['Hello', 'my', 'name', 'is',", "open(fname) as f: header = f.next().strip().split('\\t') header[0] = header[0].lstrip('# ') fields = OrderedDict([(head,", "of padded cells. :param pad_len: the length of the maximum padded sequence. \"\"\"", "= self if in_place else self.__class__(OrderedDict([(name, data[:]) for name, data in self.fields.items()])) for", "the name of an attribute and a value is a list of the", "f: f.write('# {}'.format('\\t'.join([k for k in self.fields if k != 'label']))) for i,", "self.fields[name] = reindexed return self def __getitem__(self, item): \"\"\" :param item: An integer", "value): \"\"\" :param key: An integer index or a slice (eg. 2, 1:,", "'victor' from collections import OrderedDict import random import numpy as np class InvalidFieldsException(Exception):", "has length {} but field {} has length {}'.format(length_field, length, name, len(d))) def", "supports indexing, iterating, slicing (eg. for iterating over batches), shuffling, conversion to/from CONLL", "ordered dictionary in which a key is the name of an attribute and", "specifying the function to apply to each field. If a field is missing", "the description `['Hello', 'my', 'name', 'is', 'alice']` and the tags `['t1', 't2', 't3',", "e in row]) for row in zip(*tab)])) with open(fname, 'wb') as f: f.write('#", "= OrderedDict(fields) length = None length_field = None for name, d in fields.items():", "label `Bob` and the description `[\"I'm\", 'bob']` and the tags `['t1', 't2']`. :param", "if in_place else self.__class__(OrderedDict([(name, data[:]) for name, data in self.fields.items()])) for name, convert", "dataset instance :return: the transformed dataset instance \"\"\" dataset = self if in_place", "is a list of the values of the instances in the dataset. :return:", "in converters.items(): if name not in self.fields.keys(): raise InvalidFieldsException('Converter specified for non-existent field", "{} has length {}'.format(length_field, length, name, len(d))) def __len__(self): \"\"\" :return: The number", "if len(self.fields) == 0: return 0 return len(self.fields.values()[0]) def __repr__(self): return \"{}({})\".format(self.__class__.__name__, ',", "item): \"\"\" :param item: An integer index or a slice (eg. 2, 1:,", "fields['label'].append(cache[0][0]) instance = {k: [] for k in fields if k != 'label'}", "cache if l] if not cache: return None fields['label'].append(cache[0][0]) instance = {k: []", ":param keep_fields: if specified, then only the given fields will be kept :return:", "the dataset (each instance is copied). \"\"\" keep_fields = self.fields.keys() or keep_fields return", ":param padding: the value of padded cells. :param pad_len: the length of the", "sequences such that they form a matrix. :param sequences: a list of sequences", "k, v in instance.items(): fields[k].append(v) cache = [] with open(fname) as f: header", "index or a slice (eg. 2, 1:, 1:5) :return: an ordered dictionary of", "order :return: the shuffled dataset instance \"\"\" order = range(len(self)) random.shuffle(order) for name,", "header, for example:: # description tags Alice Hello t1 my t2 name t3", "s in enumerate(sequences): sequences[i] = [padding] * (pad_len - len(s)) + s return", "the values of the instances in the dataset. :return: A Dataset object \"\"\"", "return len(self.fields.values()[0]) def __repr__(self): return \"{}({})\".format(self.__class__.__name__, ', '.join(self.fields.keys())) @classmethod def load_conll(cls, fname): \"\"\"", "instances in an ordered dictionary of fields. Each field maps to a list,", "row in zip(*tab)])) with open(fname, 'wb') as f: f.write('# {}'.format('\\t'.join([k for k in", "xrange(len(self)): yield self[i] def copy(self, keep_fields=None): \"\"\" :param keep_fields: if specified, then only", "cache = [] with open(fname) as f: header = f.next().strip().split('\\t') header[0] = header[0].lstrip('#", "{}'.format(length_field, length, name, len(d))) def __len__(self): \"\"\" :return: The number of instances in", "{} is missing in input data: {}'.format(name, value)) data[key] = value[name] def __iter__(self):", "56, 7890]}) print(d) # Dataset(Name, SSN) print(d[2]) # OrderedDict([('SSN', 45), ('Name', 'Carol')]) print(d[1:3])", "dataset stores the instances in an ordered dictionary of fields. Each field maps", "has length {}'.format(length_field, length, name, len(d))) def __len__(self): \"\"\" :return: The number of", "missing label field\") def instance_to_conll(inst): tab = [v for k, v in inst.items()", "1:, 1:5) :return: an ordered dictionary of the instance(s) at index/indices `item`. \"\"\"", "instances at index/indices `key` to the instances(s) `value` \"\"\" for name, data in", "\"\"\" Pads a list of sequences such that they form a matrix. :param", "for i, d in enumerate(self): f.write('\\n{}'.format(instance_to_conll(d))) if i != len(self) - 1: f.write('\\n')", "slice (eg. 2, 1:, 1:5) :return: an ordered dictionary of the instance(s) at", "collections import OrderedDict import random import numpy as np class InvalidFieldsException(Exception): pass class", "for name, d in fields.items(): if length is None: length = len(d) length_field", "iterating, slicing (eg. for iterating over batches), shuffling, conversion to/from CONLL format, among", "'foo' corresponds to the attribute 'foo' for the ith instance in the dataset.", "name t3 is t4 alice t5 Bob I'm t1 bob t2 Here, the", "= [] if cache: process_cache(cache, fields) return cls(fields) def write_conll(self, fname): \"\"\" Serializes", "45, 56, 7890]}) print(d) # Dataset(Name, SSN) print(d[2]) # OrderedDict([('SSN', 45), ('Name', 'Carol')])", "dataset. \"\"\" if len(self.fields) == 0: return 0 return len(self.fields.values()[0]) def __repr__(self): return", "Re-indexes the dataset in random order :return: the shuffled dataset instance \"\"\" order", "specified, then only the given fields will be kept :return: A deep copy", "`['Hello', 'my', 'name', 'is', 'alice']` and the tags `['t1', 't2', 't3', 't4', 't5']`.", "to the longest sequence {}'.format(pad_len, max_len) for i, s in enumerate(sequences): sequences[i] =", "!= 'label'] return '{}\\n{}'.format(inst['label'], '\\n'.join(['\\t'.join(['-' if e is None else str(e) for e", "\"\"\" self.fields = OrderedDict(fields) length = None length_field = None for name, d", "list of sequences such that they form a matrix. :param sequences: a list", "in self.fields.items()]) def __setitem__(self, key, value): \"\"\" :param key: An integer index or", "\"\"\" Generic Dataset object that encapsulates a list of instances. The dataset stores", "a tab delimited header, for example:: # description tags Alice Hello t1 my", "in CONLL format to fname \"\"\" if 'label' not in self.fields: raise InvalidFieldsException(\"dataset", "InvalidFieldsException('field {} has length {} but field {} has length {}'.format(length_field, length, name,", "greater or equal to the longest sequence {}'.format(pad_len, max_len) for i, s in", "the dataset :return: loaded Dataset instance \"\"\" def process_cache(cache, fields): cache = [l.split()", "dataset \"\"\" for i in xrange(len(self)): yield self[i] def copy(self, keep_fields=None): \"\"\" :param", "# OrderedDict([('SSN', 45), ('Name', 'Carol')]) print(d[1:3]) # OrderedDict([('SSN', [23, 45]), ('Name', ['Bob', 'Carol'])])", "\"\"\" for name, data in self.fields.items(): if name not in value: raise InvalidFieldsException('field", "field is missing from the dictionary, then it will not be transformed. :param", "the longest sequence {}'.format(pad_len, max_len) for i, s in enumerate(sequences): sequences[i] = [padding]", "matrix. :param sequences: a list of sequences of varying lengths. :param padding: the", "\"\"\" :param key: An integer index or a slice (eg. 2, 1:, 1:5)", "is None else str(e) for e in row]) for row in zip(*tab)])) with", "not in self.fields: raise InvalidFieldsException(\"dataset is not in CONLL format: missing label field\")", "maps to a list, the ith element of the list for field 'foo'", "f.write('# {}'.format('\\t'.join([k for k in self.fields if k != 'label']))) for i, d", "in d: print(e) # OrderedDict([('SSN', 1), ('Name', 'Alice')]) ... \"\"\" def __init__(self, fields):", "data in self.fields.items(): reindexed = [] for _, i in enumerate(order): reindexed.append(data[i]) self.fields[name]", "process_cache(cache, fields) cache = [] if cache: process_cache(cache, fields) return cls(fields) def write_conll(self,", "padding: the value of padded cells. :param pad_len: the length of the maximum", "longest sequence {}'.format(pad_len, max_len) for i, s in enumerate(sequences): sequences[i] = [padding] *", ".. code-block:: python d = Dataset({'Name': ['Alice', 'Bob', 'Carol', 'David', 'Ellen'], 'SSN': [1,", "the dataset. \"\"\" if len(self.fields) == 0: return 0 return len(self.fields.values()[0]) def __repr__(self):", "lengths. :param padding: the value of padded cells. :param pad_len: the length of", "field 'foo' corresponds to the attribute 'foo' for the ith instance in the", "import random import numpy as np class InvalidFieldsException(Exception): pass class Dataset(object): \"\"\" Generic", "but field {} has length {}'.format(length_field, length, name, len(d))) def __len__(self): \"\"\" :return:", "in value: raise InvalidFieldsException('field {} is missing in input data: {}'.format(name, value)) data[key]", "Dataset(object): \"\"\" Generic Dataset object that encapsulates a list of instances. The dataset", "OrderedDict([('SSN', [23, 45]), ('Name', ['Bob', 'Carol'])]) for e in d: print(e) # OrderedDict([('SSN',", "self.fields.keys() or keep_fields return self.__class__(OrderedDict([(name, data[:]) for name, data in self.fields.items() if name", "A dictionary specifying the function to apply to each field. If a field", "[v for k, v in inst.items() if k != 'label'] return '{}\\n{}'.format(inst['label'], '\\n'.join(['\\t'.join(['-'", "self.fields.items(): reindexed = [] for _, i in enumerate(order): reindexed.append(data[i]) self.fields[name] = reindexed", "is None: length = len(d) length_field = name else: if len(d) != length:", "be transformed. :param in_place: Whether to perform the transformation in place or create", "def __iter__(self): \"\"\" :return: A iterator over the instances in the dataset \"\"\"", "data in self.fields.items()]) def __setitem__(self, key, value): \"\"\" :param key: An integer index", "dataset object supports indexing, iterating, slicing (eg. for iterating over batches), shuffling, conversion", "class Dataset(object): \"\"\" Generic Dataset object that encapsulates a list of instances. The", "OrderedDict([('SSN', 1), ('Name', 'Alice')]) ... \"\"\" def __init__(self, fields): \"\"\" :param fields: An", "(each instance is copied). \"\"\" keep_fields = self.fields.keys() or keep_fields return self.__class__(OrderedDict([(name, data[:])", "= max([len(s) for s in sequences]) pad_len = pad_len or max_len assert pad_len", "each field. If a field is missing from the dictionary, then it will", "not in self.fields.keys(): raise InvalidFieldsException('Converter specified for non-existent field {}'.format(name)) for i, d", "the dataset in random order :return: the shuffled dataset instance \"\"\" order =", "the tags `['t1', 't2']`. :param fname: The CONLL formatted file from which to", "in self.fields.items() if name in keep_fields])) @classmethod def pad(cls, sequences, padding, pad_len=None): \"\"\"", "in the dataset. :return: A Dataset object \"\"\" self.fields = OrderedDict(fields) length =", "k != 'label'] return '{}\\n{}'.format(inst['label'], '\\n'.join(['\\t'.join(['-' if e is None else str(e) for", "to apply to each field. If a field is missing from the dictionary,", "sequences: a list of sequences of varying lengths. :param padding: the value of", "') fields = OrderedDict([(head, []) for head in header]) fields['label'] = [] for", "the label `Alice` and the description `['Hello', 'my', 'name', 'is', 'alice']` and the", "and the tags `['t1', 't2', 't3', 't4', 't5']`. The second instance has the", "then only the given fields will be kept :return: A deep copy of", "'Carol'])]) for e in d: print(e) # OrderedDict([('SSN', 1), ('Name', 'Alice')]) ... \"\"\"", "[l.split() for l in cache if l] if not cache: return None fields['label'].append(cache[0][0])", "fields['label'] = [] for line in f: line = line.strip() if line: cache.append(line)", "random.shuffle(order) for name, data in self.fields.items(): reindexed = [] for _, i in", "= reindexed return self def __getitem__(self, item): \"\"\" :param item: An integer index", "self if in_place else self.__class__(OrderedDict([(name, data[:]) for name, data in self.fields.items()])) for name,", "cls(fields) def write_conll(self, fname): \"\"\" Serializes the dataset in CONLL format to fname", "= range(len(self)) random.shuffle(order) for name, data in self.fields.items(): reindexed = [] for _,", "a slice (eg. 2, 1:, 1:5) :return: an ordered dictionary of the instance(s)", "value: raise InvalidFieldsException('field {} is missing in input data: {}'.format(name, value)) data[key] =", "for l in cache[1:]: for i, k in enumerate(fields): if k != 'label':", "'Ellen'], 'SSN': [1, 23, 45, 56, 7890]}) print(d) # Dataset(Name, SSN) print(d[2]) #", "t4 alice t5 Bob I'm t1 bob t2 Here, the fields are `description`", "as f: header = f.next().strip().split('\\t') header[0] = header[0].lstrip('# ') fields = OrderedDict([(head, [])", "f.write('\\n') def convert(self, converters, in_place=False): \"\"\" Applies transformations to the dataset. :param converters:", "reindexed.append(data[i]) self.fields[name] = reindexed return self def __getitem__(self, item): \"\"\" :param item: An", "[] with open(fname) as f: header = f.next().strip().split('\\t') header[0] = header[0].lstrip('# ') fields", "file from which to load the dataset :return: loaded Dataset instance \"\"\" def", "for i in xrange(len(self)): yield self[i] def copy(self, keep_fields=None): \"\"\" :param keep_fields: if", "header[0].lstrip('# ') fields = OrderedDict([(head, []) for head in header]) fields['label'] = []", "i, s in enumerate(sequences): sequences[i] = [padding] * (pad_len - len(s)) + s", "the attribute 'foo' for the ith instance in the dataset. The dataset object", "instance has the label `Alice` and the description `['Hello', 'my', 'name', 'is', 'alice']`", "description `[\"I'm\", 'bob']` and the tags `['t1', 't2']`. :param fname: The CONLL formatted", "k != 'label': instance[k].append(None if l[i] == '-' else l[i]) for k, v", "data in self.fields.items() if name in keep_fields])) @classmethod def pad(cls, sequences, padding, pad_len=None):", "format: missing label field\") def instance_to_conll(inst): tab = [v for k, v in", "instance = {k: [] for k in fields if k != 'label'} for", "line = line.strip() if line: cache.append(line) else: # met empty line, process cache", "self.fields if k != 'label']))) for i, d in enumerate(self): f.write('\\n{}'.format(instance_to_conll(d))) if i", "name of an attribute and a value is a list of the values", "for example:: # description tags Alice Hello t1 my t2 name t3 is", "index/indices `item`. \"\"\" return OrderedDict([(name, data[item]) for name, data in self.fields.items()]) def __setitem__(self,", "'Carol')]) print(d[1:3]) # OrderedDict([('SSN', [23, 45]), ('Name', ['Bob', 'Carol'])]) for e in d:", "copied). \"\"\" keep_fields = self.fields.keys() or keep_fields return self.__class__(OrderedDict([(name, data[:]) for name, data", "perform the transformation in place or create a new dataset instance :return: the", "converters.items(): if name not in self.fields.keys(): raise InvalidFieldsException('Converter specified for non-existent field {}'.format(name))", "code-block:: python d = Dataset({'Name': ['Alice', 'Bob', 'Carol', 'David', 'Ellen'], 'SSN': [1, 23,", "if line: cache.append(line) else: # met empty line, process cache process_cache(cache, fields) cache", "dataset in CONLL format to fname \"\"\" if 'label' not in self.fields: raise", "over the instances in the dataset \"\"\" for i in xrange(len(self)): yield self[i]", "l in cache[1:]: for i, k in enumerate(fields): if k != 'label': instance[k].append(None", "fname): \"\"\" Serializes the dataset in CONLL format to fname \"\"\" if 'label'", "return '{}\\n{}'.format(inst['label'], '\\n'.join(['\\t'.join(['-' if e is None else str(e) for e in row])", "or a slice (eg. 2, 1:, 1:5) :param value: Sets the instances at", "raise InvalidFieldsException('field {} is missing in input data: {}'.format(name, value)) data[key] = value[name]", "d in enumerate(self): f.write('\\n{}'.format(instance_to_conll(d))) if i != len(self) - 1: f.write('\\n') def convert(self,", "keep_fields: if specified, then only the given fields will be kept :return: A", "instance is copied). \"\"\" keep_fields = self.fields.keys() or keep_fields return self.__class__(OrderedDict([(name, data[:]) for", "name, d in fields.items(): if length is None: length = len(d) length_field =", "the dataset. The dataset object supports indexing, iterating, slicing (eg. for iterating over", "of fields. Each field maps to a list, the ith element of the", "slicing (eg. for iterating over batches), shuffling, conversion to/from CONLL format, among others.", "shuffling, conversion to/from CONLL format, among others. Example: .. code-block:: python d =", "<gh_stars>1-10 \"\"\" Dataset module for managing text datasets. \"\"\" __author__ = 'victor' from", "!= length: raise InvalidFieldsException('field {} has length {} but field {} has length", "length, name, len(d))) def __len__(self): \"\"\" :return: The number of instances in the", "not be transformed. :param in_place: Whether to perform the transformation in place or", "are `description` and `tags`. The first instance has the label `Alice` and the", "is the name of an attribute and a value is a list of", "_, i in enumerate(order): reindexed.append(data[i]) self.fields[name] = reindexed return self def __getitem__(self, item):", ":param sequences: a list of sequences of varying lengths. :param padding: the value", "t2 name t3 is t4 alice t5 Bob I'm t1 bob t2 Here,", "(eg. for iterating over batches), shuffling, conversion to/from CONLL format, among others. Example:", "length = None length_field = None for name, d in fields.items(): if length", "cache = [] if cache: process_cache(cache, fields) return cls(fields) def write_conll(self, fname): \"\"\"", "deep copy of the dataset (each instance is copied). \"\"\" keep_fields = self.fields.keys()", "that they form a matrix. :param sequences: a list of sequences of varying", "Bob I'm t1 bob t2 Here, the fields are `description` and `tags`. The", "(eg. 2, 1:, 1:5) :return: an ordered dictionary of the instance(s) at index/indices", "'t5']`. The second instance has the label `Bob` and the description `[\"I'm\", 'bob']`", "k in self.fields if k != 'label']))) for i, d in enumerate(self): f.write('\\n{}'.format(instance_to_conll(d)))", "`value` \"\"\" for name, data in self.fields.items(): if name not in value: raise", "line, process cache process_cache(cache, fields) cache = [] if cache: process_cache(cache, fields) return", "for head in header]) fields['label'] = [] for line in f: line =", "field. If a field is missing from the dictionary, then it will not", "zip(*tab)])) with open(fname, 'wb') as f: f.write('# {}'.format('\\t'.join([k for k in self.fields if", "list, the ith element of the list for field 'foo' corresponds to the", "encapsulates a list of instances. The dataset stores the instances in an ordered", ":param converters: A dictionary specifying the function to apply to each field. If", "the ith instance in the dataset. The dataset object supports indexing, iterating, slicing", "then it will not be transformed. :param in_place: Whether to perform the transformation", "transformed. :param in_place: Whether to perform the transformation in place or create a", "instances in the dataset. \"\"\" if len(self.fields) == 0: return 0 return len(self.fields.values()[0])", "for row in zip(*tab)])) with open(fname, 'wb') as f: f.write('# {}'.format('\\t'.join([k for k", "in enumerate(dataset.fields[name]): dataset.fields[name][i] = convert(d) return dataset def shuffle(self): \"\"\" Re-indexes the dataset", "dataset def shuffle(self): \"\"\" Re-indexes the dataset in random order :return: the shuffled", "reindexed = [] for _, i in enumerate(order): reindexed.append(data[i]) self.fields[name] = reindexed return", "'my', 'name', 'is', 'alice']` and the tags `['t1', 't2', 't3', 't4', 't5']`. The", "for the ith instance in the dataset. The dataset object supports indexing, iterating,", "length_field = name else: if len(d) != length: raise InvalidFieldsException('field {} has length", "tags `['t1', 't2']`. :param fname: The CONLL formatted file from which to load", "in self.fields if k != 'label']))) for i, d in enumerate(self): f.write('\\n{}'.format(instance_to_conll(d))) if", "equal to the longest sequence {}'.format(pad_len, max_len) for i, s in enumerate(sequences): sequences[i]", "a matrix. :param sequences: a list of sequences of varying lengths. :param padding:", "length_field = None for name, d in fields.items(): if length is None: length" ]
[ "OAUTH_TOKEN = 'TOKEN_HERE' OAUTH_TOKEN_SECRET = 'TOKEN_SECRET_HERE' # Telegram options: TELEGRAM_CHANNEL = 'CHANNEL_NAME_HERE' TELEGRAM_TOKEN", "'USER_NAME_HERE' MSG = '<b>{NAME}</b>:\\n{TEXT}\\n\\n<a href=\"{URL}\">Source</a>' # Technical stuff: TWEET_BASE_URL = 'https://twitter.com/i/web/status/' STATE_FILE =", "TG_LINK = 'https://api.telegram.org/bot{TOKEN}/sendMessage?chat_id=@{CHANNEL}&text={MESSAGE}&parse_mode=html' UNSUPPORTED_TAGS = ['<span class=\"twython-tweet-suffix\">', '<span class=\"twython-tweet-prefix\">', '</span>', 'class=\"twython-url\"', 'class=\"twython-media\"', 'class=\"twython-mention\"',", "TWITTER_USER_NAME = 'USER_NAME_HERE' MSG = '<b>{NAME}</b>:\\n{TEXT}\\n\\n<a href=\"{URL}\">Source</a>' # Technical stuff: TWEET_BASE_URL = 'https://twitter.com/i/web/status/'", "options: TELEGRAM_CHANNEL = 'CHANNEL_NAME_HERE' TELEGRAM_TOKEN = 'TOKEN_HERE' # Misc: TWITTER_USER_NAME = 'USER_NAME_HERE' MSG", "'TOKEN_SECRET_HERE' # Telegram options: TELEGRAM_CHANNEL = 'CHANNEL_NAME_HERE' TELEGRAM_TOKEN = 'TOKEN_HERE' # Misc: TWITTER_USER_NAME", "= 'CHANNEL_NAME_HERE' TELEGRAM_TOKEN = 'TOKEN_HERE' # Misc: TWITTER_USER_NAME = 'USER_NAME_HERE' MSG = '<b>{NAME}</b>:\\n{TEXT}\\n\\n<a", "= 'https://api.telegram.org/bot{TOKEN}/sendMessage?chat_id=@{CHANNEL}&text={MESSAGE}&parse_mode=html' UNSUPPORTED_TAGS = ['<span class=\"twython-tweet-suffix\">', '<span class=\"twython-tweet-prefix\">', '</span>', 'class=\"twython-url\"', 'class=\"twython-media\"', 'class=\"twython-mention\"', 'class=\"twython-hashtag\"',", "stuff: TWEET_BASE_URL = 'https://twitter.com/i/web/status/' STATE_FILE = 'state.p' SLEEP = 3 TG_LINK = 'https://api.telegram.org/bot{TOKEN}/sendMessage?chat_id=@{CHANNEL}&text={MESSAGE}&parse_mode=html'", "STATE_FILE = 'state.p' SLEEP = 3 TG_LINK = 'https://api.telegram.org/bot{TOKEN}/sendMessage?chat_id=@{CHANNEL}&text={MESSAGE}&parse_mode=html' UNSUPPORTED_TAGS = ['<span class=\"twython-tweet-suffix\">',", "'https://twitter.com/i/web/status/' STATE_FILE = 'state.p' SLEEP = 3 TG_LINK = 'https://api.telegram.org/bot{TOKEN}/sendMessage?chat_id=@{CHANNEL}&text={MESSAGE}&parse_mode=html' UNSUPPORTED_TAGS = ['<span", "'APP_KEY_HERE' APP_SECRET = 'APP_SECRET_HERE' OAUTH_TOKEN = 'TOKEN_HERE' OAUTH_TOKEN_SECRET = 'TOKEN_SECRET_HERE' # Telegram options:", "= 'state.p' SLEEP = 3 TG_LINK = 'https://api.telegram.org/bot{TOKEN}/sendMessage?chat_id=@{CHANNEL}&text={MESSAGE}&parse_mode=html' UNSUPPORTED_TAGS = ['<span class=\"twython-tweet-suffix\">', '<span", "UNSUPPORTED_TAGS = ['<span class=\"twython-tweet-suffix\">', '<span class=\"twython-tweet-prefix\">', '</span>', 'class=\"twython-url\"', 'class=\"twython-media\"', 'class=\"twython-mention\"', 'class=\"twython-hashtag\"', 'class=\"twython-symbol\"', ]", "TELEGRAM_CHANNEL = 'CHANNEL_NAME_HERE' TELEGRAM_TOKEN = 'TOKEN_HERE' # Misc: TWITTER_USER_NAME = 'USER_NAME_HERE' MSG =", "Technical stuff: TWEET_BASE_URL = 'https://twitter.com/i/web/status/' STATE_FILE = 'state.p' SLEEP = 3 TG_LINK =", "# Telegram options: TELEGRAM_CHANNEL = 'CHANNEL_NAME_HERE' TELEGRAM_TOKEN = 'TOKEN_HERE' # Misc: TWITTER_USER_NAME =", "APP_KEY = 'APP_KEY_HERE' APP_SECRET = 'APP_SECRET_HERE' OAUTH_TOKEN = 'TOKEN_HERE' OAUTH_TOKEN_SECRET = 'TOKEN_SECRET_HERE' #", "'https://api.telegram.org/bot{TOKEN}/sendMessage?chat_id=@{CHANNEL}&text={MESSAGE}&parse_mode=html' UNSUPPORTED_TAGS = ['<span class=\"twython-tweet-suffix\">', '<span class=\"twython-tweet-prefix\">', '</span>', 'class=\"twython-url\"', 'class=\"twython-media\"', 'class=\"twython-mention\"', 'class=\"twython-hashtag\"', 'class=\"twython-symbol\"',", "APP_SECRET = 'APP_SECRET_HERE' OAUTH_TOKEN = 'TOKEN_HERE' OAUTH_TOKEN_SECRET = 'TOKEN_SECRET_HERE' # Telegram options: TELEGRAM_CHANNEL", "'TOKEN_HERE' OAUTH_TOKEN_SECRET = 'TOKEN_SECRET_HERE' # Telegram options: TELEGRAM_CHANNEL = 'CHANNEL_NAME_HERE' TELEGRAM_TOKEN = 'TOKEN_HERE'", "Telegram options: TELEGRAM_CHANNEL = 'CHANNEL_NAME_HERE' TELEGRAM_TOKEN = 'TOKEN_HERE' # Misc: TWITTER_USER_NAME = 'USER_NAME_HERE'", "= 'APP_KEY_HERE' APP_SECRET = 'APP_SECRET_HERE' OAUTH_TOKEN = 'TOKEN_HERE' OAUTH_TOKEN_SECRET = 'TOKEN_SECRET_HERE' # Telegram", "= 'TOKEN_HERE' OAUTH_TOKEN_SECRET = 'TOKEN_SECRET_HERE' # Telegram options: TELEGRAM_CHANNEL = 'CHANNEL_NAME_HERE' TELEGRAM_TOKEN =", "Misc: TWITTER_USER_NAME = 'USER_NAME_HERE' MSG = '<b>{NAME}</b>:\\n{TEXT}\\n\\n<a href=\"{URL}\">Source</a>' # Technical stuff: TWEET_BASE_URL =", "# Twitter AUTH: APP_KEY = 'APP_KEY_HERE' APP_SECRET = 'APP_SECRET_HERE' OAUTH_TOKEN = 'TOKEN_HERE' OAUTH_TOKEN_SECRET", "'CHANNEL_NAME_HERE' TELEGRAM_TOKEN = 'TOKEN_HERE' # Misc: TWITTER_USER_NAME = 'USER_NAME_HERE' MSG = '<b>{NAME}</b>:\\n{TEXT}\\n\\n<a href=\"{URL}\">Source</a>'", "= 'TOKEN_HERE' # Misc: TWITTER_USER_NAME = 'USER_NAME_HERE' MSG = '<b>{NAME}</b>:\\n{TEXT}\\n\\n<a href=\"{URL}\">Source</a>' # Technical", "'TOKEN_HERE' # Misc: TWITTER_USER_NAME = 'USER_NAME_HERE' MSG = '<b>{NAME}</b>:\\n{TEXT}\\n\\n<a href=\"{URL}\">Source</a>' # Technical stuff:", "# Misc: TWITTER_USER_NAME = 'USER_NAME_HERE' MSG = '<b>{NAME}</b>:\\n{TEXT}\\n\\n<a href=\"{URL}\">Source</a>' # Technical stuff: TWEET_BASE_URL", "'<b>{NAME}</b>:\\n{TEXT}\\n\\n<a href=\"{URL}\">Source</a>' # Technical stuff: TWEET_BASE_URL = 'https://twitter.com/i/web/status/' STATE_FILE = 'state.p' SLEEP =", "# Technical stuff: TWEET_BASE_URL = 'https://twitter.com/i/web/status/' STATE_FILE = 'state.p' SLEEP = 3 TG_LINK", "= 'USER_NAME_HERE' MSG = '<b>{NAME}</b>:\\n{TEXT}\\n\\n<a href=\"{URL}\">Source</a>' # Technical stuff: TWEET_BASE_URL = 'https://twitter.com/i/web/status/' STATE_FILE", "= 'https://twitter.com/i/web/status/' STATE_FILE = 'state.p' SLEEP = 3 TG_LINK = 'https://api.telegram.org/bot{TOKEN}/sendMessage?chat_id=@{CHANNEL}&text={MESSAGE}&parse_mode=html' UNSUPPORTED_TAGS =", "'state.p' SLEEP = 3 TG_LINK = 'https://api.telegram.org/bot{TOKEN}/sendMessage?chat_id=@{CHANNEL}&text={MESSAGE}&parse_mode=html' UNSUPPORTED_TAGS = ['<span class=\"twython-tweet-suffix\">', '<span class=\"twython-tweet-prefix\">',", "Twitter AUTH: APP_KEY = 'APP_KEY_HERE' APP_SECRET = 'APP_SECRET_HERE' OAUTH_TOKEN = 'TOKEN_HERE' OAUTH_TOKEN_SECRET =", "= 'APP_SECRET_HERE' OAUTH_TOKEN = 'TOKEN_HERE' OAUTH_TOKEN_SECRET = 'TOKEN_SECRET_HERE' # Telegram options: TELEGRAM_CHANNEL =", "= 'TOKEN_SECRET_HERE' # Telegram options: TELEGRAM_CHANNEL = 'CHANNEL_NAME_HERE' TELEGRAM_TOKEN = 'TOKEN_HERE' # Misc:", "MSG = '<b>{NAME}</b>:\\n{TEXT}\\n\\n<a href=\"{URL}\">Source</a>' # Technical stuff: TWEET_BASE_URL = 'https://twitter.com/i/web/status/' STATE_FILE = 'state.p'", "href=\"{URL}\">Source</a>' # Technical stuff: TWEET_BASE_URL = 'https://twitter.com/i/web/status/' STATE_FILE = 'state.p' SLEEP = 3", "= 3 TG_LINK = 'https://api.telegram.org/bot{TOKEN}/sendMessage?chat_id=@{CHANNEL}&text={MESSAGE}&parse_mode=html' UNSUPPORTED_TAGS = ['<span class=\"twython-tweet-suffix\">', '<span class=\"twython-tweet-prefix\">', '</span>', 'class=\"twython-url\"',", "= '<b>{NAME}</b>:\\n{TEXT}\\n\\n<a href=\"{URL}\">Source</a>' # Technical stuff: TWEET_BASE_URL = 'https://twitter.com/i/web/status/' STATE_FILE = 'state.p' SLEEP", "SLEEP = 3 TG_LINK = 'https://api.telegram.org/bot{TOKEN}/sendMessage?chat_id=@{CHANNEL}&text={MESSAGE}&parse_mode=html' UNSUPPORTED_TAGS = ['<span class=\"twython-tweet-suffix\">', '<span class=\"twython-tweet-prefix\">', '</span>',", "3 TG_LINK = 'https://api.telegram.org/bot{TOKEN}/sendMessage?chat_id=@{CHANNEL}&text={MESSAGE}&parse_mode=html' UNSUPPORTED_TAGS = ['<span class=\"twython-tweet-suffix\">', '<span class=\"twython-tweet-prefix\">', '</span>', 'class=\"twython-url\"', 'class=\"twython-media\"',", "OAUTH_TOKEN_SECRET = 'TOKEN_SECRET_HERE' # Telegram options: TELEGRAM_CHANNEL = 'CHANNEL_NAME_HERE' TELEGRAM_TOKEN = 'TOKEN_HERE' #", "AUTH: APP_KEY = 'APP_KEY_HERE' APP_SECRET = 'APP_SECRET_HERE' OAUTH_TOKEN = 'TOKEN_HERE' OAUTH_TOKEN_SECRET = 'TOKEN_SECRET_HERE'", "'APP_SECRET_HERE' OAUTH_TOKEN = 'TOKEN_HERE' OAUTH_TOKEN_SECRET = 'TOKEN_SECRET_HERE' # Telegram options: TELEGRAM_CHANNEL = 'CHANNEL_NAME_HERE'", "TELEGRAM_TOKEN = 'TOKEN_HERE' # Misc: TWITTER_USER_NAME = 'USER_NAME_HERE' MSG = '<b>{NAME}</b>:\\n{TEXT}\\n\\n<a href=\"{URL}\">Source</a>' #", "TWEET_BASE_URL = 'https://twitter.com/i/web/status/' STATE_FILE = 'state.p' SLEEP = 3 TG_LINK = 'https://api.telegram.org/bot{TOKEN}/sendMessage?chat_id=@{CHANNEL}&text={MESSAGE}&parse_mode=html' UNSUPPORTED_TAGS" ]
[ "epoch = trained_epoch + i_epoch lr = scheduler.get_last_lr()[0] logging.info('epoch %d lr %e', epoch,", "device id') parser.add_argument('--use_parallel', action='store_true', default=False, help=\"use data parallel default False\") parser.add_argument('--model_name', type=str, default='wresnet40_2',", "default False\") parser.add_argument('--model_name', type=str, default='wresnet40_2', help=\"model name\") parser.add_argument('--model_path', type=str, default='saved_models', help='path to save", "magnitude\") parser.add_argument('--temperature', type=float, default=1.0, help=\"temperature\") parser.add_argument('--n_proj_layer', type=int, default=0, help=\"number of additional hidden layer", "'delta': args.delta, 'temp': args.temperature, 'search_d': get_dataset_dimension(args.search_dataset), 'target_d': get_dataset_dimension(args.dataset)} adaaug = AdaAug(after_transforms=after_transforms, n_class=search_n_class, gf_model=gf_model,", "model, criterion): objs = utils.AvgrageMeter() top1 = utils.AvgrageMeter() top5 = utils.AvgrageMeter() model.eval() with", "args.dataset, args.batch_size, args.num_workers, args.dataroot, args.cutout, args.cutout_length, split=args.train_portion, split_idx=0, target_lb=-1, search=True) logging.info(f'Dataset: {args.dataset}') logging.info(f'", "trained_epoch = utils.restore_ckpt(task_model, optimizer, scheduler, args.restore_path, location=args.gpu) + 1 n_epoch = args.epochs -", "%s\", args) # dataset settings n_class = get_num_class(args.dataset) class2label = get_label_name(args.dataset, args.dataroot) train_queue,", "help='gf_model path') parser.add_argument('--h_model_path', type=str, default='./', help='h_model path') parser.add_argument('--k_ops', type=int, default=1, help=\"number of augmentation", "step == 0: adaaug.add_history(input, target) return top1.avg, objs.avg def infer(valid_queue, model, criterion): objs", "= GradualWarmupScheduler( optimizer, multiplier=m, total_epoch=e, after_scheduler=scheduler) logging.info(f'Optimizer: SGD, scheduler: CosineAnnealing, warmup: {m}/{e}') criterion", "help=\"use data parallel default False\") parser.add_argument('--model_name', type=str, default='wresnet40_2', help=\"model name\") parser.add_argument('--model_path', type=str, default='saved_models',", "%03d %e %f %f', global_step, objs.avg, top1.avg, top5.avg) # log the policy if", "train_obj = train( train_queue, task_model, criterion, optimizer, epoch, args.grad_clip, adaaug) logging.info('train_acc %f', train_acc)", "parser.add_argument('--h_model_path', type=str, default='./', help='h_model path') parser.add_argument('--k_ops', type=int, default=1, help=\"number of augmentation applied during", "help='restore model default False') args = parser.parse_args() debug = True if args.save ==", "test_acc, test_acc5) utils.save_ckpt(task_model, optimizer, scheduler, epoch, os.path.join(args.save, 'weights.pt')) adaaug.save_history(class2label) figure = adaaug.plot_history() test_acc,", "= adaaug.plot_history() test_acc, test_obj, test_acc5, _ = infer(test_queue, task_model, criterion) logging.info('test_acc %f %f',", "from epoch {trained_epoch}') else: trained_epoch = 0 n_epoch = args.epochs # load trained", "import AdaAug from networks import get_model from networks.projection import Projection from dataset import", "args.grad_clip, adaaug) logging.info('train_acc %f', train_acc) valid_acc, valid_obj, _, _ = infer(valid_queue, task_model, criterion)", "available') sys.exit(1) torch.cuda.set_device(args.gpu) utils.reproducibility(args.seed) logging.info('gpu device = %d' % args.gpu) logging.info(\"args = %s\",", "utils.AvgrageMeter() top5 = utils.AvgrageMeter() for step, (input, target) in enumerate(train_queue): target = target.cuda(non_blocking=True)", "from {args.restore_path}, starting from epoch {trained_epoch}') else: trained_epoch = 0 n_epoch = args.epochs", "import logging import argparse import torch.nn as nn import torch.utils from adaptive_augmentor import", "task_model, criterion, optimizer, epoch, args.grad_clip, adaaug) logging.info('train_acc %f', train_acc) valid_acc, valid_obj, _, _", "log the policy if step == 0: adaaug.add_history(input, target) return top1.avg, objs.avg def", "n_layers=args.n_proj_layer, n_hidden=args.n_proj_hidden).cuda() utils.load_model(gf_model, f'{args.gf_model_path}/gf_weights.pt', location=args.gpu) utils.load_model(h_model, f'{args.h_model_path}/h_weights.pt', location=args.gpu) for param in gf_model.parameters(): param.requires_grad", "type=float, default=0.9, help='momentum') parser.add_argument('--weight_decay', type=float, default=3e-4, help='weight decay') parser.add_argument('--grad_clip', type=float, default=5, help='gradient clipping')", "type=str, default='./', help='h_model path') parser.add_argument('--k_ops', type=int, default=1, help=\"number of augmentation applied during training\")", "criterion = nn.CrossEntropyLoss() criterion = criterion.cuda() # restore setting if args.restore: trained_epoch =", "= False for param in h_model.parameters(): param.requires_grad = False after_transforms = train_queue.dataset.after_transforms adaaug_config", "input.size(0) objs.update(loss.detach().item(), n) top1.update(prec1.detach().item(), n) top5.update(prec5.detach().item(), n) global_step = step + epoch *", "test_acc5, _ = infer(test_queue, task_model, criterion) logging.info('test_acc %f %f', test_acc, test_acc5) utils.save_ckpt(task_model, optimizer,", "target) loss.backward() nn.utils.clip_grad_norm_(model.parameters(), grad_clip) optimizer.step() prec1, prec5 = utils.accuracy(logits, target, topk=(1, 5)) n", "of augmentation applied during training\") parser.add_argument('--delta', type=float, default=0.3, help=\"degree of perturbation in magnitude\")", "os.path.join('eval', args.dataset, args.save) utils.create_exp_dir(args.save) log_format = '%(asctime)s %(message)s' logging.basicConfig(stream=sys.stdout, level=logging.INFO, format=log_format, datefmt='%m/%d %I:%M:%S", "policy if step == 0: adaaug.add_history(input, target) return top1.avg, objs.avg def infer(valid_queue, model,", "help='gpu device id') parser.add_argument('--use_parallel', action='store_true', default=False, help=\"use data parallel default False\") parser.add_argument('--model_name', type=str,", "name') parser.add_argument('--gf_model_name', type=str, default='./', help='gf_model name') parser.add_argument('--gf_model_path', type=str, default='./', help='gf_model path') parser.add_argument('--h_model_path', type=str,", "parser.add_argument('--batch_size', type=int, default=96, help='batch size') parser.add_argument('--num_workers', type=int, default=0, help=\"num_workers\") parser.add_argument('--learning_rate', type=float, default=0.025, help='init", "of training data') parser.add_argument('--batch_size', type=int, default=96, help='batch size') parser.add_argument('--num_workers', type=int, default=0, help=\"num_workers\") parser.add_argument('--learning_rate',", "= os.path.join('debug', args.save) else: args.save = os.path.join('eval', args.dataset, args.save) utils.create_exp_dir(args.save) log_format = '%(asctime)s", "optimizer, scheduler, args.restore_path, location=args.gpu) + 1 n_epoch = args.epochs - trained_epoch logging.info(f'Restoring model", "%e %f %f', global_step, objs.avg, top1.avg, top5.avg) # log the policy if step", "= input.size(0) objs.update(loss.detach().item(), n) top1.update(prec1.detach().item(), n) top5.update(prec5.detach().item(), n) global_step = step + epoch", "prec1, prec5 = utils.accuracy(logits, target, topk=(1, 5)) n = input.size(0) objs.update(loss.detach().item(), n) top1.update(prec1.detach().item(),", "default='cifar10', help='name of dataset') parser.add_argument('--train_portion', type=float, default=0.5, help='portion of training data') parser.add_argument('--batch_size', type=int,", "target) return top1.avg, objs.avg def infer(valid_queue, model, criterion): objs = utils.AvgrageMeter() top1 =", "mode='exploit') model.train() optimizer.zero_grad() logits = model(aug_images) loss = criterion(logits, target) loss.backward() nn.utils.clip_grad_norm_(model.parameters(), grad_clip)", "logging.info(f'Optimizer: SGD, scheduler: CosineAnnealing, warmup: {m}/{e}') criterion = nn.CrossEntropyLoss() criterion = criterion.cuda() #", "perturbation in magnitude\") parser.add_argument('--temperature', type=float, default=1.0, help=\"temperature\") parser.add_argument('--n_proj_layer', type=int, default=0, help=\"number of additional", "logging.info(f'Dataset: {args.dataset}') logging.info(f' |total: {len(train_queue.dataset)}') logging.info(f' |train: {len(train_queue)*args.batch_size}') logging.info(f' |valid: {len(valid_queue)*args.batch_size}') # task", "args.save) utils.create_exp_dir(args.save) log_format = '%(asctime)s %(message)s' logging.basicConfig(stream=sys.stdout, level=logging.INFO, format=log_format, datefmt='%m/%d %I:%M:%S %p') fh", "0: logging.info('train %03d %e %f %f', global_step, objs.avg, top1.avg, top5.avg) # log the", "action='store_true', default=False, help=\"use data parallel default False\") parser.add_argument('--model_name', type=str, default='wresnet40_2', help=\"model name\") parser.add_argument('--model_path',", "utils.save_ckpt(task_model, optimizer, scheduler, epoch, os.path.join(args.save, 'weights.pt')) adaaug.save_history(class2label) figure = adaaug.plot_history() test_acc, test_obj, test_acc5,", "config import get_warmup_config from warmup_scheduler import GradualWarmupScheduler parser = argparse.ArgumentParser(\"ada_aug\") parser.add_argument('--dataroot', type=str, default='./',", "help=\"num_workers\") parser.add_argument('--learning_rate', type=float, default=0.025, help='init learning rate') parser.add_argument('--learning_rate_min', type=float, default=0.0001, help='min learning rate')", "test_obj, test_acc5, _ = infer(test_queue, task_model, criterion) logging.info('test_acc %f %f', test_acc, test_acc5) utils.save_ckpt(task_model,", "get_dataset_dimension from config import get_warmup_config from warmup_scheduler import GradualWarmupScheduler parser = argparse.ArgumentParser(\"ada_aug\") parser.add_argument('--dataroot',", "help='path to save the model') parser.add_argument('--cutout', action='store_true', default=False, help='use cutout') parser.add_argument('--cutout_length', type=int, default=16,", "logging.info(\"args = %s\", args) # dataset settings n_class = get_num_class(args.dataset) class2label = get_label_name(args.dataset,", "= step + epoch * len(train_queue) if global_step % args.report_freq == 0: logging.info('train", "help='drop path probability') parser.add_argument('--epochs', type=int, default=600, help='number of training epochs') parser.add_argument('--report_freq', type=float, default=50,", "from adaptive_augmentor import AdaAug from networks import get_model from networks.projection import Projection from", "the policy if step == 0: adaaug.add_history(input, target) return top1.avg, objs.avg def infer(valid_queue,", "# get augmented training data from adaaug aug_images = adaaug(input, mode='exploit') model.train() optimizer.zero_grad()", "{trained_epoch}') else: trained_epoch = 0 n_epoch = args.epochs # load trained adaaug sub", "import torch.nn as nn import torch.utils from adaptive_augmentor import AdaAug from networks import", "dataset') parser.add_argument('--train_portion', type=float, default=0.5, help='portion of training data') parser.add_argument('--batch_size', type=int, default=96, help='batch size')", "type=float, default=0.5, help='portion of training data') parser.add_argument('--batch_size', type=int, default=96, help='batch size') parser.add_argument('--num_workers', type=int,", "save the model') parser.add_argument('--cutout', action='store_true', default=False, help='use cutout') parser.add_argument('--cutout_length', type=int, default=16, help='cutout length')", "num_class=search_n_class, use_cuda=True, data_parallel=False) h_model = Projection(in_features=gf_model.fc.in_features, n_layers=args.n_proj_layer, n_hidden=args.n_proj_hidden).cuda() utils.load_model(gf_model, f'{args.gf_model_path}/gf_weights.pt', location=args.gpu) utils.load_model(h_model, f'{args.h_model_path}/h_weights.pt',", "size') parser.add_argument('--num_workers', type=int, default=0, help=\"num_workers\") parser.add_argument('--learning_rate', type=float, default=0.025, help='init learning rate') parser.add_argument('--learning_rate_min', type=float,", "get augmented training data from adaaug aug_images = adaaug(input, mode='exploit') model.train() optimizer.zero_grad() logits", "in magnitude\") parser.add_argument('--temperature', type=float, default=1.0, help=\"temperature\") parser.add_argument('--n_proj_layer', type=int, default=0, help=\"number of additional hidden", "import get_warmup_config from warmup_scheduler import GradualWarmupScheduler parser = argparse.ArgumentParser(\"ada_aug\") parser.add_argument('--dataroot', type=str, default='./', help='location", "= args.epochs - trained_epoch logging.info(f'Restoring model from {args.restore_path}, starting from epoch {trained_epoch}') else:", "scheduler = torch.optim.lr_scheduler.CosineAnnealingLR( optimizer, float(args.epochs), eta_min=args.learning_rate_min) m, e = get_warmup_config(args.dataset) scheduler = GradualWarmupScheduler(", "type=int, default=0, help='gpu device id') parser.add_argument('--use_parallel', action='store_true', default=False, help=\"use data parallel default False\")", "parallel default False\") parser.add_argument('--model_name', type=str, default='wresnet40_2', help=\"model name\") parser.add_argument('--model_path', type=str, default='saved_models', help='path to", "search_n_class = get_num_class(args.search_dataset) gf_model = get_model(model_name=args.gf_model_name, num_class=search_n_class, use_cuda=True, data_parallel=False) h_model = Projection(in_features=gf_model.fc.in_features, n_layers=args.n_proj_layer,", "objs = utils.AvgrageMeter() top1 = utils.AvgrageMeter() top5 = utils.AvgrageMeter() for step, (input, target)", "input.cuda() target = target.cuda(non_blocking=True) logits = model(input) loss = criterion(logits, target) prec1, prec5", "parser.parse_args() debug = True if args.save == \"debug\" else False args.save = '{}-{}'.format(time.strftime(\"%Y%m%d-%H%M%S\"),", "param in h_model.parameters(): param.requires_grad = False after_transforms = train_queue.dataset.after_transforms adaaug_config = {'sampling': 'prob',", "default='./', help='restore model path') parser.add_argument('--restore', action='store_true', default=False, help='restore model default False') args =", "parser.add_argument('--weight_decay', type=float, default=3e-4, help='weight decay') parser.add_argument('--grad_clip', type=float, default=5, help='gradient clipping') parser.add_argument('--use_cuda', type=bool, default=True,", "loss = criterion(logits, target) prec1, prec5 = utils.accuracy(logits, target, topk=(1, 5)) n =", "adaptive_augmentor import AdaAug from networks import get_model from networks.projection import Projection from dataset", "help='restore model path') parser.add_argument('--restore', action='store_true', default=False, help='restore model default False') args = parser.parse_args()", "momentum=args.momentum, weight_decay=args.weight_decay, nesterov=True ) scheduler = torch.optim.lr_scheduler.CosineAnnealingLR( optimizer, float(args.epochs), eta_min=args.learning_rate_min) m, e =", "m, e = get_warmup_config(args.dataset) scheduler = GradualWarmupScheduler( optimizer, multiplier=m, total_epoch=e, after_scheduler=scheduler) logging.info(f'Optimizer: SGD,", "figure = adaaug.plot_history() test_acc, test_obj, test_acc5, _ = infer(test_queue, task_model, criterion) logging.info('test_acc %f", "logits = model(input) loss = criterion(logits, target) prec1, prec5 = utils.accuracy(logits, target, topk=(1,", "parser.add_argument('--cutout_length', type=int, default=16, help='cutout length') parser.add_argument('--drop_path_prob', type=float, default=0.2, help='drop path probability') parser.add_argument('--epochs', type=int,", "utils.reproducibility(args.seed) logging.info('gpu device = %d' % args.gpu) logging.info(\"args = %s\", args) # dataset", "top1.update(prec1.detach().item(), n) top5.update(prec5.detach().item(), n) global_step = step + epoch * len(train_queue) if global_step", "return top1.avg, objs.avg def infer(valid_queue, model, criterion): objs = utils.AvgrageMeter() top1 = utils.AvgrageMeter()", "save_dir=args.save, config=adaaug_config) # start training for i_epoch in range(n_epoch): epoch = trained_epoch +", "objs.avg def infer(valid_queue, model, criterion): objs = utils.AvgrageMeter() top1 = utils.AvgrageMeter() top5 =", "import os import sys import time import torch import utils import logging import", "logging.info(\"param size = %fMB\", utils.count_parameters_in_MB(task_model)) # task optimization settings optimizer = torch.optim.SGD( task_model.parameters(),", "infer(valid_queue, model, criterion): objs = utils.AvgrageMeter() top1 = utils.AvgrageMeter() top5 = utils.AvgrageMeter() model.eval()", "parser.add_argument('--use_parallel', action='store_true', default=False, help=\"use data parallel default False\") parser.add_argument('--model_name', type=str, default='wresnet40_2', help=\"model name\")", "= %s\", args) # dataset settings n_class = get_num_class(args.dataset) class2label = get_label_name(args.dataset, args.dataroot)", "input = input.cuda() target = target.cuda(non_blocking=True) logits = model(input) loss = criterion(logits, target)", "def main(): if not torch.cuda.is_available(): logging.info('no gpu device available') sys.exit(1) torch.cuda.set_device(args.gpu) utils.reproducibility(args.seed) logging.info('gpu", "projection\") parser.add_argument('--n_proj_hidden', type=int, default=128, help=\"number of hidden units in augmentation policy projection layers\")", "import utils import logging import argparse import torch.nn as nn import torch.utils from", "h_model = Projection(in_features=gf_model.fc.in_features, n_layers=args.n_proj_layer, n_hidden=args.n_proj_hidden).cuda() utils.load_model(gf_model, f'{args.gf_model_path}/gf_weights.pt', location=args.gpu) utils.load_model(h_model, f'{args.h_model_path}/h_weights.pt', location=args.gpu) for param", "+ i_epoch lr = scheduler.get_last_lr()[0] logging.info('epoch %d lr %e', epoch, lr) train_acc, train_obj", "settings task_model = get_model(model_name=args.model_name, num_class=n_class, use_cuda=True, data_parallel=False) logging.info(\"param size = %fMB\", utils.count_parameters_in_MB(task_model)) #", "torch import utils import logging import argparse import torch.nn as nn import torch.utils", "f'{args.h_model_path}/h_weights.pt', location=args.gpu) for param in gf_model.parameters(): param.requires_grad = False for param in h_model.parameters():", "import torch import utils import logging import argparse import torch.nn as nn import", "type=float, default=1.0, help=\"temperature\") parser.add_argument('--n_proj_layer', type=int, default=0, help=\"number of additional hidden layer in augmentation", "type=str, default='./', help='location of the data corpus') parser.add_argument('--dataset', type=str, default='cifar10', help='name of dataset')", "True\") parser.add_argument('--gpu', type=int, default=0, help='gpu device id') parser.add_argument('--use_parallel', action='store_true', default=False, help=\"use data parallel", "False\") parser.add_argument('--model_name', type=str, default='wresnet40_2', help=\"model name\") parser.add_argument('--model_path', type=str, default='saved_models', help='path to save the", "from networks.projection import Projection from dataset import get_num_class, get_dataloaders, get_label_name, get_dataset_dimension from config", "AdaAug from networks import get_model from networks.projection import Projection from dataset import get_num_class,", "for param in h_model.parameters(): param.requires_grad = False after_transforms = train_queue.dataset.after_transforms adaaug_config = {'sampling':", "'%(asctime)s %(message)s' logging.basicConfig(stream=sys.stdout, level=logging.INFO, format=log_format, datefmt='%m/%d %I:%M:%S %p') fh = logging.FileHandler(os.path.join(args.save, 'log.txt')) fh.setFormatter(logging.Formatter(log_format))", "parser.add_argument('--search_dataset', type=str, default='./', help='search dataset name') parser.add_argument('--gf_model_name', type=str, default='./', help='gf_model name') parser.add_argument('--gf_model_path', type=str,", "= infer(valid_queue, task_model, criterion) logging.info('valid_acc %f', valid_acc) scheduler.step() if epoch % args.report_freq ==", "valid_acc) scheduler.step() if epoch % args.report_freq == 0: test_acc, test_obj, test_acc5, _ =", "data parallel default False\") parser.add_argument('--model_name', type=str, default='wresnet40_2', help=\"model name\") parser.add_argument('--model_path', type=str, default='saved_models', help='path", "logging.info(f'save to {args.save}') def train(train_queue, model, criterion, optimizer, epoch, grad_clip, adaaug): objs =", "model settings task_model = get_model(model_name=args.model_name, num_class=n_class, use_cuda=True, data_parallel=False) logging.info(\"param size = %fMB\", utils.count_parameters_in_MB(task_model))", "from networks import get_model from networks.projection import Projection from dataset import get_num_class, get_dataloaders,", "logging.info('gpu device = %d' % args.gpu) logging.info(\"args = %s\", args) # dataset settings", "args.num_workers, args.dataroot, args.cutout, args.cutout_length, split=args.train_portion, split_idx=0, target_lb=-1, search=True) logging.info(f'Dataset: {args.dataset}') logging.info(f' |total: {len(train_queue.dataset)}')", "test_acc5, _ = infer(test_queue, task_model, criterion) logging.info('test_acc %f %f', test_acc, test_acc5) logging.info(f'save to", "args.dataroot, args.cutout, args.cutout_length, split=args.train_portion, split_idx=0, target_lb=-1, search=True) logging.info(f'Dataset: {args.dataset}') logging.info(f' |total: {len(train_queue.dataset)}') logging.info(f'", "1 n_epoch = args.epochs - trained_epoch logging.info(f'Restoring model from {args.restore_path}, starting from epoch", "= model(aug_images) loss = criterion(logits, target) loss.backward() nn.utils.clip_grad_norm_(model.parameters(), grad_clip) optimizer.step() prec1, prec5 =", "topk=(1, 5)) n = input.size(0) objs.update(loss.detach().item(), n) top1.update(prec1.detach().item(), n) top5.update(prec5.detach().item(), n) global_step =", "= criterion(logits, target) prec1, prec5 = utils.accuracy(logits, target, topk=(1, 5)) n = input.size(0)", "nesterov=True ) scheduler = torch.optim.lr_scheduler.CosineAnnealingLR( optimizer, float(args.epochs), eta_min=args.learning_rate_min) m, e = get_warmup_config(args.dataset) scheduler", "= utils.AvgrageMeter() top5 = utils.AvgrageMeter() for step, (input, target) in enumerate(train_queue): target =", "for i_epoch in range(n_epoch): epoch = trained_epoch + i_epoch lr = scheduler.get_last_lr()[0] logging.info('epoch", "type=str, default='saved_models', help='path to save the model') parser.add_argument('--cutout', action='store_true', default=False, help='use cutout') parser.add_argument('--cutout_length',", "from adaaug aug_images = adaaug(input, mode='exploit') model.train() optimizer.zero_grad() logits = model(aug_images) loss =", "def infer(valid_queue, model, criterion): objs = utils.AvgrageMeter() top1 = utils.AvgrageMeter() top5 = utils.AvgrageMeter()", "nn.CrossEntropyLoss() criterion = criterion.cuda() # restore setting if args.restore: trained_epoch = utils.restore_ckpt(task_model, optimizer,", "args.cutout, args.cutout_length, split=args.train_portion, split_idx=0, target_lb=-1, search=True) logging.info(f'Dataset: {args.dataset}') logging.info(f' |total: {len(train_queue.dataset)}') logging.info(f' |train:", "param.requires_grad = False for param in h_model.parameters(): param.requires_grad = False after_transforms = train_queue.dataset.after_transforms", "device = %d' % args.gpu) logging.info(\"args = %s\", args) # dataset settings n_class", "infer(valid_queue, task_model, criterion) logging.info('valid_acc %f', valid_acc) scheduler.step() if epoch % args.report_freq == 0:", "args.save) else: args.save = os.path.join('eval', args.dataset, args.save) utils.create_exp_dir(args.save) log_format = '%(asctime)s %(message)s' logging.basicConfig(stream=sys.stdout,", "get_dataloaders( args.dataset, args.batch_size, args.num_workers, args.dataroot, args.cutout, args.cutout_length, split=args.train_portion, split_idx=0, target_lb=-1, search=True) logging.info(f'Dataset: {args.dataset}')", "= %d' % args.gpu) logging.info(\"args = %s\", args) # dataset settings n_class =", "global_step = step + epoch * len(train_queue) if global_step % args.report_freq == 0:", "gf_model=gf_model, h_model=h_model, save_dir=args.save, config=adaaug_config) # start training for i_epoch in range(n_epoch): epoch =", "from dataset import get_num_class, get_dataloaders, get_label_name, get_dataset_dimension from config import get_warmup_config from warmup_scheduler", "n = input.size(0) objs.update(loss.detach().item(), n) top1.update(prec1.detach().item(), n) top5.update(prec5.detach().item(), n) return top1.avg, objs.avg, top5.avg,", "path') parser.add_argument('--h_model_path', type=str, default='./', help='h_model path') parser.add_argument('--k_ops', type=int, default=1, help=\"number of augmentation applied", "weight_decay=args.weight_decay, nesterov=True ) scheduler = torch.optim.lr_scheduler.CosineAnnealingLR( optimizer, float(args.epochs), eta_min=args.learning_rate_min) m, e = get_warmup_config(args.dataset)", "adaaug.save_history(class2label) figure = adaaug.plot_history() test_acc, test_obj, test_acc5, _ = infer(test_queue, task_model, criterion) logging.info('test_acc", "utils.AvgrageMeter() model.eval() with torch.no_grad(): for input, target in valid_queue: input = input.cuda() target", "{'sampling': 'prob', 'k_ops': args.k_ops, 'delta': args.delta, 'temp': args.temperature, 'search_d': get_dataset_dimension(args.search_dataset), 'target_d': get_dataset_dimension(args.dataset)} adaaug", "False for param in h_model.parameters(): param.requires_grad = False after_transforms = train_queue.dataset.after_transforms adaaug_config =", "help='number of training epochs') parser.add_argument('--report_freq', type=float, default=50, help='report frequency') parser.add_argument('--save', type=str, default='EXP', help='experiment", "parser.add_argument('--restore', action='store_true', default=False, help='restore model default False') args = parser.parse_args() debug = True", "logging.info('train_acc %f', train_acc) valid_acc, valid_obj, _, _ = infer(valid_queue, task_model, criterion) logging.info('valid_acc %f',", "model.train() optimizer.zero_grad() logits = model(aug_images) loss = criterion(logits, target) loss.backward() nn.utils.clip_grad_norm_(model.parameters(), grad_clip) optimizer.step()", "= input.cuda() target = target.cuda(non_blocking=True) logits = model(input) loss = criterion(logits, target) prec1,", "parser.add_argument('--use_cuda', type=bool, default=True, help=\"use cuda default True\") parser.add_argument('--gpu', type=int, default=0, help='gpu device id')", "+ 1 n_epoch = args.epochs - trained_epoch logging.info(f'Restoring model from {args.restore_path}, starting from", "= os.path.join('eval', args.dataset, args.save) utils.create_exp_dir(args.save) log_format = '%(asctime)s %(message)s' logging.basicConfig(stream=sys.stdout, level=logging.INFO, format=log_format, datefmt='%m/%d", "args.epochs - trained_epoch logging.info(f'Restoring model from {args.restore_path}, starting from epoch {trained_epoch}') else: trained_epoch", "length') parser.add_argument('--drop_path_prob', type=float, default=0.2, help='drop path probability') parser.add_argument('--epochs', type=int, default=600, help='number of training", "adaaug.add_history(input, target) return top1.avg, objs.avg def infer(valid_queue, model, criterion): objs = utils.AvgrageMeter() top1", "type=float, default=0.025, help='init learning rate') parser.add_argument('--learning_rate_min', type=float, default=0.0001, help='min learning rate') parser.add_argument('--momentum', type=float,", "gpu device available') sys.exit(1) torch.cuda.set_device(args.gpu) utils.reproducibility(args.seed) logging.info('gpu device = %d' % args.gpu) logging.info(\"args", "help='cutout length') parser.add_argument('--drop_path_prob', type=float, default=0.2, help='drop path probability') parser.add_argument('--epochs', type=int, default=600, help='number of", "%e', epoch, lr) train_acc, train_obj = train( train_queue, task_model, criterion, optimizer, epoch, args.grad_clip,", "task_model.parameters(), args.learning_rate, momentum=args.momentum, weight_decay=args.weight_decay, nesterov=True ) scheduler = torch.optim.lr_scheduler.CosineAnnealingLR( optimizer, float(args.epochs), eta_min=args.learning_rate_min) m,", "= %fMB\", utils.count_parameters_in_MB(task_model)) # task optimization settings optimizer = torch.optim.SGD( task_model.parameters(), args.learning_rate, momentum=args.momentum,", "of additional hidden layer in augmentation policy projection\") parser.add_argument('--n_proj_hidden', type=int, default=128, help=\"number of", "adaaug.plot_history() test_acc, test_obj, test_acc5, _ = infer(test_queue, task_model, criterion) logging.info('test_acc %f %f', test_acc,", "valid_acc, valid_obj, _, _ = infer(valid_queue, task_model, criterion) logging.info('valid_acc %f', valid_acc) scheduler.step() if", "task_model, criterion) logging.info('test_acc %f %f', test_acc, test_acc5) logging.info(f'save to {args.save}') def train(train_queue, model,", "= target.cuda(non_blocking=True) # get augmented training data from adaaug aug_images = adaaug(input, mode='exploit')", "task_model, criterion) logging.info('valid_acc %f', valid_acc) scheduler.step() if epoch % args.report_freq == 0: test_acc,", "= nn.CrossEntropyLoss() criterion = criterion.cuda() # restore setting if args.restore: trained_epoch = utils.restore_ckpt(task_model,", "networks import get_model from networks.projection import Projection from dataset import get_num_class, get_dataloaders, get_label_name,", "of training epochs') parser.add_argument('--report_freq', type=float, default=50, help='report frequency') parser.add_argument('--save', type=str, default='EXP', help='experiment name')", "if step == 0: adaaug.add_history(input, target) return top1.avg, objs.avg def infer(valid_queue, model, criterion):", "model.eval() with torch.no_grad(): for input, target in valid_queue: input = input.cuda() target =", "parser.add_argument('--num_workers', type=int, default=0, help=\"num_workers\") parser.add_argument('--learning_rate', type=float, default=0.025, help='init learning rate') parser.add_argument('--learning_rate_min', type=float, default=0.0001,", "type=str, default='./', help='restore model path') parser.add_argument('--restore', action='store_true', default=False, help='restore model default False') args", "logging.info(f'Restoring model from {args.restore_path}, starting from epoch {trained_epoch}') else: trained_epoch = 0 n_epoch", "model(aug_images) loss = criterion(logits, target) loss.backward() nn.utils.clip_grad_norm_(model.parameters(), grad_clip) optimizer.step() prec1, prec5 = utils.accuracy(logits,", "default='./', help='location of the data corpus') parser.add_argument('--dataset', type=str, default='cifar10', help='name of dataset') parser.add_argument('--train_portion',", "test_acc, test_acc5) logging.info(f'save to {args.save}') def train(train_queue, model, criterion, optimizer, epoch, grad_clip, adaaug):", "{args.save}') def train(train_queue, model, criterion, optimizer, epoch, grad_clip, adaaug): objs = utils.AvgrageMeter() top1", "top1.update(prec1.detach().item(), n) top5.update(prec5.detach().item(), n) return top1.avg, objs.avg, top5.avg, objs.avg if __name__ == '__main__':", "optimizer.step() prec1, prec5 = utils.accuracy(logits, target, topk=(1, 5)) n = input.size(0) objs.update(loss.detach().item(), n)", "n) top5.update(prec5.detach().item(), n) global_step = step + epoch * len(train_queue) if global_step %", "learning rate') parser.add_argument('--momentum', type=float, default=0.9, help='momentum') parser.add_argument('--weight_decay', type=float, default=3e-4, help='weight decay') parser.add_argument('--grad_clip', type=float,", "model from {args.restore_path}, starting from epoch {trained_epoch}') else: trained_epoch = 0 n_epoch =", "%d' % args.gpu) logging.info(\"args = %s\", args) # dataset settings n_class = get_num_class(args.dataset)", "args.restore_path, location=args.gpu) + 1 n_epoch = args.epochs - trained_epoch logging.info(f'Restoring model from {args.restore_path},", "trained_epoch + i_epoch lr = scheduler.get_last_lr()[0] logging.info('epoch %d lr %e', epoch, lr) train_acc,", "top5.avg) # log the policy if step == 0: adaaug.add_history(input, target) return top1.avg,", "parser.add_argument('--model_path', type=str, default='saved_models', help='path to save the model') parser.add_argument('--cutout', action='store_true', default=False, help='use cutout')", "in enumerate(train_queue): target = target.cuda(non_blocking=True) # get augmented training data from adaaug aug_images", "target = target.cuda(non_blocking=True) # get augmented training data from adaaug aug_images = adaaug(input,", "Projection from dataset import get_num_class, get_dataloaders, get_label_name, get_dataset_dimension from config import get_warmup_config from", "objs.update(loss.detach().item(), n) top1.update(prec1.detach().item(), n) top5.update(prec5.detach().item(), n) global_step = step + epoch * len(train_queue)", "top5.update(prec5.detach().item(), n) global_step = step + epoch * len(train_queue) if global_step % args.report_freq", "import torch.utils from adaptive_augmentor import AdaAug from networks import get_model from networks.projection import", "task_model = get_model(model_name=args.model_name, num_class=n_class, use_cuda=True, data_parallel=False) logging.info(\"param size = %fMB\", utils.count_parameters_in_MB(task_model)) # task", "%f %f', test_acc, test_acc5) utils.save_ckpt(task_model, optimizer, scheduler, epoch, os.path.join(args.save, 'weights.pt')) adaaug.save_history(class2label) figure =", "criterion(logits, target) loss.backward() nn.utils.clip_grad_norm_(model.parameters(), grad_clip) optimizer.step() prec1, prec5 = utils.accuracy(logits, target, topk=(1, 5))", "target) prec1, prec5 = utils.accuracy(logits, target, topk=(1, 5)) n = input.size(0) objs.update(loss.detach().item(), n)", "get_dataset_dimension(args.dataset)} adaaug = AdaAug(after_transforms=after_transforms, n_class=search_n_class, gf_model=gf_model, h_model=h_model, save_dir=args.save, config=adaaug_config) # start training for", "default=True, help=\"use cuda default True\") parser.add_argument('--gpu', type=int, default=0, help='gpu device id') parser.add_argument('--use_parallel', action='store_true',", "parser.add_argument('--cutout', action='store_true', default=False, help='use cutout') parser.add_argument('--cutout_length', type=int, default=16, help='cutout length') parser.add_argument('--drop_path_prob', type=float, default=0.2,", "layers\") parser.add_argument('--restore_path', type=str, default='./', help='restore model path') parser.add_argument('--restore', action='store_true', default=False, help='restore model default", "= True if args.save == \"debug\" else False args.save = '{}-{}'.format(time.strftime(\"%Y%m%d-%H%M%S\"), args.save) if", "args.save = os.path.join('debug', args.save) else: args.save = os.path.join('eval', args.dataset, args.save) utils.create_exp_dir(args.save) log_format =", "= utils.restore_ckpt(task_model, optimizer, scheduler, args.restore_path, location=args.gpu) + 1 n_epoch = args.epochs - trained_epoch", "+ epoch * len(train_queue) if global_step % args.report_freq == 0: logging.info('train %03d %e", "in gf_model.parameters(): param.requires_grad = False for param in h_model.parameters(): param.requires_grad = False after_transforms", "default=0.2, help='drop path probability') parser.add_argument('--epochs', type=int, default=600, help='number of training epochs') parser.add_argument('--report_freq', type=float,", "size = %fMB\", utils.count_parameters_in_MB(task_model)) # task optimization settings optimizer = torch.optim.SGD( task_model.parameters(), args.learning_rate,", "config=adaaug_config) # start training for i_epoch in range(n_epoch): epoch = trained_epoch + i_epoch", "type=float, default=0.2, help='drop path probability') parser.add_argument('--epochs', type=int, default=600, help='number of training epochs') parser.add_argument('--report_freq',", "help='momentum') parser.add_argument('--weight_decay', type=float, default=3e-4, help='weight decay') parser.add_argument('--grad_clip', type=float, default=5, help='gradient clipping') parser.add_argument('--use_cuda', type=bool,", "{len(valid_queue)*args.batch_size}') # task model settings task_model = get_model(model_name=args.model_name, num_class=n_class, use_cuda=True, data_parallel=False) logging.info(\"param size", "= infer(test_queue, task_model, criterion) logging.info('test_acc %f %f', test_acc, test_acc5) utils.save_ckpt(task_model, optimizer, scheduler, epoch,", "parser.add_argument('--train_portion', type=float, default=0.5, help='portion of training data') parser.add_argument('--batch_size', type=int, default=96, help='batch size') parser.add_argument('--num_workers',", "policy projection\") parser.add_argument('--n_proj_hidden', type=int, default=128, help=\"number of hidden units in augmentation policy projection", "type=str, default='wresnet40_2', help=\"model name\") parser.add_argument('--model_path', type=str, default='saved_models', help='path to save the model') parser.add_argument('--cutout',", "parser.add_argument('--save', type=str, default='EXP', help='experiment name') parser.add_argument('--seed', type=int, default=0, help='seed') parser.add_argument('--search_dataset', type=str, default='./', help='search", "param.requires_grad = False after_transforms = train_queue.dataset.after_transforms adaaug_config = {'sampling': 'prob', 'k_ops': args.k_ops, 'delta':", "n) top1.update(prec1.detach().item(), n) top5.update(prec5.detach().item(), n) global_step = step + epoch * len(train_queue) if", "n_class = get_num_class(args.dataset) class2label = get_label_name(args.dataset, args.dataroot) train_queue, valid_queue, _, test_queue = get_dataloaders(", "from warmup_scheduler import GradualWarmupScheduler parser = argparse.ArgumentParser(\"ada_aug\") parser.add_argument('--dataroot', type=str, default='./', help='location of the", "'k_ops': args.k_ops, 'delta': args.delta, 'temp': args.temperature, 'search_d': get_dataset_dimension(args.search_dataset), 'target_d': get_dataset_dimension(args.dataset)} adaaug = AdaAug(after_transforms=after_transforms,", "logging import argparse import torch.nn as nn import torch.utils from adaptive_augmentor import AdaAug", "parser.add_argument('--n_proj_layer', type=int, default=0, help=\"number of additional hidden layer in augmentation policy projection\") parser.add_argument('--n_proj_hidden',", "parser.add_argument('--restore_path', type=str, default='./', help='restore model path') parser.add_argument('--restore', action='store_true', default=False, help='restore model default False')", "os.path.join('debug', args.save) else: args.save = os.path.join('eval', args.dataset, args.save) utils.create_exp_dir(args.save) log_format = '%(asctime)s %(message)s'", "criterion, optimizer, epoch, grad_clip, adaaug): objs = utils.AvgrageMeter() top1 = utils.AvgrageMeter() top5 =", "of hidden units in augmentation policy projection layers\") parser.add_argument('--restore_path', type=str, default='./', help='restore model", "# task optimization settings optimizer = torch.optim.SGD( task_model.parameters(), args.learning_rate, momentum=args.momentum, weight_decay=args.weight_decay, nesterov=True )", "frequency') parser.add_argument('--save', type=str, default='EXP', help='experiment name') parser.add_argument('--seed', type=int, default=0, help='seed') parser.add_argument('--search_dataset', type=str, default='./',", "warmup: {m}/{e}') criterion = nn.CrossEntropyLoss() criterion = criterion.cuda() # restore setting if args.restore:", "= adaaug(input, mode='exploit') model.train() optimizer.zero_grad() logits = model(aug_images) loss = criterion(logits, target) loss.backward()", "# dataset settings n_class = get_num_class(args.dataset) class2label = get_label_name(args.dataset, args.dataroot) train_queue, valid_queue, _,", "optimizer, epoch, args.grad_clip, adaaug) logging.info('train_acc %f', train_acc) valid_acc, valid_obj, _, _ = infer(valid_queue,", "logging.info('valid_acc %f', valid_acc) scheduler.step() if epoch % args.report_freq == 0: test_acc, test_obj, test_acc5,", "train_acc) valid_acc, valid_obj, _, _ = infer(valid_queue, task_model, criterion) logging.info('valid_acc %f', valid_acc) scheduler.step()", "type=str, default='./', help='gf_model path') parser.add_argument('--h_model_path', type=str, default='./', help='h_model path') parser.add_argument('--k_ops', type=int, default=1, help=\"number", "split_idx=0, target_lb=-1, search=True) logging.info(f'Dataset: {args.dataset}') logging.info(f' |total: {len(train_queue.dataset)}') logging.info(f' |train: {len(train_queue)*args.batch_size}') logging.info(f' |valid:", "trained adaaug sub models search_n_class = get_num_class(args.search_dataset) gf_model = get_model(model_name=args.gf_model_name, num_class=search_n_class, use_cuda=True, data_parallel=False)", "of dataset') parser.add_argument('--train_portion', type=float, default=0.5, help='portion of training data') parser.add_argument('--batch_size', type=int, default=96, help='batch", "help='search dataset name') parser.add_argument('--gf_model_name', type=str, default='./', help='gf_model name') parser.add_argument('--gf_model_path', type=str, default='./', help='gf_model path')", "adaaug aug_images = adaaug(input, mode='exploit') model.train() optimizer.zero_grad() logits = model(aug_images) loss = criterion(logits,", "dataset name') parser.add_argument('--gf_model_name', type=str, default='./', help='gf_model name') parser.add_argument('--gf_model_path', type=str, default='./', help='gf_model path') parser.add_argument('--h_model_path',", "help=\"number of hidden units in augmentation policy projection layers\") parser.add_argument('--restore_path', type=str, default='./', help='restore", "as nn import torch.utils from adaptive_augmentor import AdaAug from networks import get_model from", "type=int, default=0, help=\"num_workers\") parser.add_argument('--learning_rate', type=float, default=0.025, help='init learning rate') parser.add_argument('--learning_rate_min', type=float, default=0.0001, help='min", "if args.restore: trained_epoch = utils.restore_ckpt(task_model, optimizer, scheduler, args.restore_path, location=args.gpu) + 1 n_epoch =", "data') parser.add_argument('--batch_size', type=int, default=96, help='batch size') parser.add_argument('--num_workers', type=int, default=0, help=\"num_workers\") parser.add_argument('--learning_rate', type=float, default=0.025,", "import Projection from dataset import get_num_class, get_dataloaders, get_label_name, get_dataset_dimension from config import get_warmup_config", "default='EXP', help='experiment name') parser.add_argument('--seed', type=int, default=0, help='seed') parser.add_argument('--search_dataset', type=str, default='./', help='search dataset name')", "path') parser.add_argument('--restore', action='store_true', default=False, help='restore model default False') args = parser.parse_args() debug =", "n_epoch = args.epochs # load trained adaaug sub models search_n_class = get_num_class(args.search_dataset) gf_model", "task_model, criterion) logging.info('test_acc %f %f', test_acc, test_acc5) utils.save_ckpt(task_model, optimizer, scheduler, epoch, os.path.join(args.save, 'weights.pt'))", "args.delta, 'temp': args.temperature, 'search_d': get_dataset_dimension(args.search_dataset), 'target_d': get_dataset_dimension(args.dataset)} adaaug = AdaAug(after_transforms=after_transforms, n_class=search_n_class, gf_model=gf_model, h_model=h_model,", "= utils.AvgrageMeter() top1 = utils.AvgrageMeter() top5 = utils.AvgrageMeter() for step, (input, target) in", "dataset import get_num_class, get_dataloaders, get_label_name, get_dataset_dimension from config import get_warmup_config from warmup_scheduler import", "5)) n = input.size(0) objs.update(loss.detach().item(), n) top1.update(prec1.detach().item(), n) top5.update(prec5.detach().item(), n) global_step = step", "parser.add_argument('--learning_rate_min', type=float, default=0.0001, help='min learning rate') parser.add_argument('--momentum', type=float, default=0.9, help='momentum') parser.add_argument('--weight_decay', type=float, default=3e-4,", "default='saved_models', help='path to save the model') parser.add_argument('--cutout', action='store_true', default=False, help='use cutout') parser.add_argument('--cutout_length', type=int,", "parser.add_argument('--dataset', type=str, default='cifar10', help='name of dataset') parser.add_argument('--train_portion', type=float, default=0.5, help='portion of training data')", "trained_epoch logging.info(f'Restoring model from {args.restore_path}, starting from epoch {trained_epoch}') else: trained_epoch = 0", "range(n_epoch): epoch = trained_epoch + i_epoch lr = scheduler.get_last_lr()[0] logging.info('epoch %d lr %e',", "args.cutout_length, split=args.train_portion, split_idx=0, target_lb=-1, search=True) logging.info(f'Dataset: {args.dataset}') logging.info(f' |total: {len(train_queue.dataset)}') logging.info(f' |train: {len(train_queue)*args.batch_size}')", "'log.txt')) fh.setFormatter(logging.Formatter(log_format)) logging.getLogger().addHandler(fh) def main(): if not torch.cuda.is_available(): logging.info('no gpu device available') sys.exit(1)", "args.epochs # load trained adaaug sub models search_n_class = get_num_class(args.search_dataset) gf_model = get_model(model_name=args.gf_model_name,", "= train_queue.dataset.after_transforms adaaug_config = {'sampling': 'prob', 'k_ops': args.k_ops, 'delta': args.delta, 'temp': args.temperature, 'search_d':", "utils.load_model(h_model, f'{args.h_model_path}/h_weights.pt', location=args.gpu) for param in gf_model.parameters(): param.requires_grad = False for param in", "scheduler, args.restore_path, location=args.gpu) + 1 n_epoch = args.epochs - trained_epoch logging.info(f'Restoring model from", "use_cuda=True, data_parallel=False) h_model = Projection(in_features=gf_model.fc.in_features, n_layers=args.n_proj_layer, n_hidden=args.n_proj_hidden).cuda() utils.load_model(gf_model, f'{args.gf_model_path}/gf_weights.pt', location=args.gpu) utils.load_model(h_model, f'{args.h_model_path}/h_weights.pt', location=args.gpu)", "valid_queue, _, test_queue = get_dataloaders( args.dataset, args.batch_size, args.num_workers, args.dataroot, args.cutout, args.cutout_length, split=args.train_portion, split_idx=0,", "'{}-{}'.format(time.strftime(\"%Y%m%d-%H%M%S\"), args.save) if debug: args.save = os.path.join('debug', args.save) else: args.save = os.path.join('eval', args.dataset,", "default=0, help=\"num_workers\") parser.add_argument('--learning_rate', type=float, default=0.025, help='init learning rate') parser.add_argument('--learning_rate_min', type=float, default=0.0001, help='min learning", "criterion) logging.info('test_acc %f %f', test_acc, test_acc5) utils.save_ckpt(task_model, optimizer, scheduler, epoch, os.path.join(args.save, 'weights.pt')) adaaug.save_history(class2label)", "top1 = utils.AvgrageMeter() top5 = utils.AvgrageMeter() model.eval() with torch.no_grad(): for input, target in", "default='./', help='search dataset name') parser.add_argument('--gf_model_name', type=str, default='./', help='gf_model name') parser.add_argument('--gf_model_path', type=str, default='./', help='gf_model", "epochs') parser.add_argument('--report_freq', type=float, default=50, help='report frequency') parser.add_argument('--save', type=str, default='EXP', help='experiment name') parser.add_argument('--seed', type=int,", "parser = argparse.ArgumentParser(\"ada_aug\") parser.add_argument('--dataroot', type=str, default='./', help='location of the data corpus') parser.add_argument('--dataset', type=str,", "args.save = os.path.join('eval', args.dataset, args.save) utils.create_exp_dir(args.save) log_format = '%(asctime)s %(message)s' logging.basicConfig(stream=sys.stdout, level=logging.INFO, format=log_format,", "default=5, help='gradient clipping') parser.add_argument('--use_cuda', type=bool, default=True, help=\"use cuda default True\") parser.add_argument('--gpu', type=int, default=0,", "default=0.5, help='portion of training data') parser.add_argument('--batch_size', type=int, default=96, help='batch size') parser.add_argument('--num_workers', type=int, default=0,", "test_obj, test_acc5, _ = infer(test_queue, task_model, criterion) logging.info('test_acc %f %f', test_acc, test_acc5) logging.info(f'save", "default=0, help='gpu device id') parser.add_argument('--use_parallel', action='store_true', default=False, help=\"use data parallel default False\") parser.add_argument('--model_name',", "data from adaaug aug_images = adaaug(input, mode='exploit') model.train() optimizer.zero_grad() logits = model(aug_images) loss", "default=0.0001, help='min learning rate') parser.add_argument('--momentum', type=float, default=0.9, help='momentum') parser.add_argument('--weight_decay', type=float, default=3e-4, help='weight decay')", "type=float, default=3e-4, help='weight decay') parser.add_argument('--grad_clip', type=float, default=5, help='gradient clipping') parser.add_argument('--use_cuda', type=bool, default=True, help=\"use", "= parser.parse_args() debug = True if args.save == \"debug\" else False args.save =", "clipping') parser.add_argument('--use_cuda', type=bool, default=True, help=\"use cuda default True\") parser.add_argument('--gpu', type=int, default=0, help='gpu device", "type=str, default='./', help='search dataset name') parser.add_argument('--gf_model_name', type=str, default='./', help='gf_model name') parser.add_argument('--gf_model_path', type=str, default='./',", "data corpus') parser.add_argument('--dataset', type=str, default='cifar10', help='name of dataset') parser.add_argument('--train_portion', type=float, default=0.5, help='portion of", "type=str, default='./', help='gf_model name') parser.add_argument('--gf_model_path', type=str, default='./', help='gf_model path') parser.add_argument('--h_model_path', type=str, default='./', help='h_model", "logging.info(f' |train: {len(train_queue)*args.batch_size}') logging.info(f' |valid: {len(valid_queue)*args.batch_size}') # task model settings task_model = get_model(model_name=args.model_name,", "total_epoch=e, after_scheduler=scheduler) logging.info(f'Optimizer: SGD, scheduler: CosineAnnealing, warmup: {m}/{e}') criterion = nn.CrossEntropyLoss() criterion =", "main(): if not torch.cuda.is_available(): logging.info('no gpu device available') sys.exit(1) torch.cuda.set_device(args.gpu) utils.reproducibility(args.seed) logging.info('gpu device", "target in valid_queue: input = input.cuda() target = target.cuda(non_blocking=True) logits = model(input) loss", "in h_model.parameters(): param.requires_grad = False after_transforms = train_queue.dataset.after_transforms adaaug_config = {'sampling': 'prob', 'k_ops':", "h_model.parameters(): param.requires_grad = False after_transforms = train_queue.dataset.after_transforms adaaug_config = {'sampling': 'prob', 'k_ops': args.k_ops,", "argparse.ArgumentParser(\"ada_aug\") parser.add_argument('--dataroot', type=str, default='./', help='location of the data corpus') parser.add_argument('--dataset', type=str, default='cifar10', help='name", "_ = infer(test_queue, task_model, criterion) logging.info('test_acc %f %f', test_acc, test_acc5) utils.save_ckpt(task_model, optimizer, scheduler,", "import GradualWarmupScheduler parser = argparse.ArgumentParser(\"ada_aug\") parser.add_argument('--dataroot', type=str, default='./', help='location of the data corpus')", "parser.add_argument('--model_name', type=str, default='wresnet40_2', help=\"model name\") parser.add_argument('--model_path', type=str, default='saved_models', help='path to save the model')", "i_epoch lr = scheduler.get_last_lr()[0] logging.info('epoch %d lr %e', epoch, lr) train_acc, train_obj =", "type=int, default=1, help=\"number of augmentation applied during training\") parser.add_argument('--delta', type=float, default=0.3, help=\"degree of", "help='use cutout') parser.add_argument('--cutout_length', type=int, default=16, help='cutout length') parser.add_argument('--drop_path_prob', type=float, default=0.2, help='drop path probability')", "= torch.optim.lr_scheduler.CosineAnnealingLR( optimizer, float(args.epochs), eta_min=args.learning_rate_min) m, e = get_warmup_config(args.dataset) scheduler = GradualWarmupScheduler( optimizer,", "= False after_transforms = train_queue.dataset.after_transforms adaaug_config = {'sampling': 'prob', 'k_ops': args.k_ops, 'delta': args.delta,", "prec5 = utils.accuracy(logits, target, topk=(1, 5)) n = input.size(0) objs.update(loss.detach().item(), n) top1.update(prec1.detach().item(), n)", "args) # dataset settings n_class = get_num_class(args.dataset) class2label = get_label_name(args.dataset, args.dataroot) train_queue, valid_queue,", "default False') args = parser.parse_args() debug = True if args.save == \"debug\" else", "def train(train_queue, model, criterion, optimizer, epoch, grad_clip, adaaug): objs = utils.AvgrageMeter() top1 =", "augmentation policy projection\") parser.add_argument('--n_proj_hidden', type=int, default=128, help=\"number of hidden units in augmentation policy", "utils.accuracy(logits, target, topk=(1, 5)) n = input.size(0) objs.update(loss.detach().item(), n) top1.update(prec1.detach().item(), n) top5.update(prec5.detach().item(), n)", "top5 = utils.AvgrageMeter() model.eval() with torch.no_grad(): for input, target in valid_queue: input =", "5)) n = input.size(0) objs.update(loss.detach().item(), n) top1.update(prec1.detach().item(), n) top5.update(prec5.detach().item(), n) return top1.avg, objs.avg,", "False') args = parser.parse_args() debug = True if args.save == \"debug\" else False", "default=0.9, help='momentum') parser.add_argument('--weight_decay', type=float, default=3e-4, help='weight decay') parser.add_argument('--grad_clip', type=float, default=5, help='gradient clipping') parser.add_argument('--use_cuda',", "default=96, help='batch size') parser.add_argument('--num_workers', type=int, default=0, help=\"num_workers\") parser.add_argument('--learning_rate', type=float, default=0.025, help='init learning rate')", "in range(n_epoch): epoch = trained_epoch + i_epoch lr = scheduler.get_last_lr()[0] logging.info('epoch %d lr", "utils.AvgrageMeter() for step, (input, target) in enumerate(train_queue): target = target.cuda(non_blocking=True) # get augmented", "torch.cuda.set_device(args.gpu) utils.reproducibility(args.seed) logging.info('gpu device = %d' % args.gpu) logging.info(\"args = %s\", args) #", "optimization settings optimizer = torch.optim.SGD( task_model.parameters(), args.learning_rate, momentum=args.momentum, weight_decay=args.weight_decay, nesterov=True ) scheduler =", "test_acc5) utils.save_ckpt(task_model, optimizer, scheduler, epoch, os.path.join(args.save, 'weights.pt')) adaaug.save_history(class2label) figure = adaaug.plot_history() test_acc, test_obj,", "objs = utils.AvgrageMeter() top1 = utils.AvgrageMeter() top5 = utils.AvgrageMeter() model.eval() with torch.no_grad(): for", "target) in enumerate(train_queue): target = target.cuda(non_blocking=True) # get augmented training data from adaaug", "search=True) logging.info(f'Dataset: {args.dataset}') logging.info(f' |total: {len(train_queue.dataset)}') logging.info(f' |train: {len(train_queue)*args.batch_size}') logging.info(f' |valid: {len(valid_queue)*args.batch_size}') #", "if args.save == \"debug\" else False args.save = '{}-{}'.format(time.strftime(\"%Y%m%d-%H%M%S\"), args.save) if debug: args.save", "(input, target) in enumerate(train_queue): target = target.cuda(non_blocking=True) # get augmented training data from", "top1.avg, top5.avg) # log the policy if step == 0: adaaug.add_history(input, target) return", "in augmentation policy projection\") parser.add_argument('--n_proj_hidden', type=int, default=128, help=\"number of hidden units in augmentation", "%f %f', test_acc, test_acc5) logging.info(f'save to {args.save}') def train(train_queue, model, criterion, optimizer, epoch,", "if not torch.cuda.is_available(): logging.info('no gpu device available') sys.exit(1) torch.cuda.set_device(args.gpu) utils.reproducibility(args.seed) logging.info('gpu device =", "test_acc5) logging.info(f'save to {args.save}') def train(train_queue, model, criterion, optimizer, epoch, grad_clip, adaaug): objs", "scheduler, epoch, os.path.join(args.save, 'weights.pt')) adaaug.save_history(class2label) figure = adaaug.plot_history() test_acc, test_obj, test_acc5, _ =", "'target_d': get_dataset_dimension(args.dataset)} adaaug = AdaAug(after_transforms=after_transforms, n_class=search_n_class, gf_model=gf_model, h_model=h_model, save_dir=args.save, config=adaaug_config) # start training", "torch.nn as nn import torch.utils from adaptive_augmentor import AdaAug from networks import get_model", "== 0: logging.info('train %03d %e %f %f', global_step, objs.avg, top1.avg, top5.avg) # log", "additional hidden layer in augmentation policy projection\") parser.add_argument('--n_proj_hidden', type=int, default=128, help=\"number of hidden", "* len(train_queue) if global_step % args.report_freq == 0: logging.info('train %03d %e %f %f',", "adaaug): objs = utils.AvgrageMeter() top1 = utils.AvgrageMeter() top5 = utils.AvgrageMeter() for step, (input,", "top5 = utils.AvgrageMeter() for step, (input, target) in enumerate(train_queue): target = target.cuda(non_blocking=True) #", "input.size(0) objs.update(loss.detach().item(), n) top1.update(prec1.detach().item(), n) top5.update(prec5.detach().item(), n) return top1.avg, objs.avg, top5.avg, objs.avg if", "optimizer, scheduler, epoch, os.path.join(args.save, 'weights.pt')) adaaug.save_history(class2label) figure = adaaug.plot_history() test_acc, test_obj, test_acc5, _", "% args.gpu) logging.info(\"args = %s\", args) # dataset settings n_class = get_num_class(args.dataset) class2label", "utils.count_parameters_in_MB(task_model)) # task optimization settings optimizer = torch.optim.SGD( task_model.parameters(), args.learning_rate, momentum=args.momentum, weight_decay=args.weight_decay, nesterov=True", "use_cuda=True, data_parallel=False) logging.info(\"param size = %fMB\", utils.count_parameters_in_MB(task_model)) # task optimization settings optimizer =", "gf_model = get_model(model_name=args.gf_model_name, num_class=search_n_class, use_cuda=True, data_parallel=False) h_model = Projection(in_features=gf_model.fc.in_features, n_layers=args.n_proj_layer, n_hidden=args.n_proj_hidden).cuda() utils.load_model(gf_model, f'{args.gf_model_path}/gf_weights.pt',", "= scheduler.get_last_lr()[0] logging.info('epoch %d lr %e', epoch, lr) train_acc, train_obj = train( train_queue,", "default=16, help='cutout length') parser.add_argument('--drop_path_prob', type=float, default=0.2, help='drop path probability') parser.add_argument('--epochs', type=int, default=600, help='number", "sys.exit(1) torch.cuda.set_device(args.gpu) utils.reproducibility(args.seed) logging.info('gpu device = %d' % args.gpu) logging.info(\"args = %s\", args)", "== 0: adaaug.add_history(input, target) return top1.avg, objs.avg def infer(valid_queue, model, criterion): objs =", "= 0 n_epoch = args.epochs # load trained adaaug sub models search_n_class =", "action='store_true', default=False, help='use cutout') parser.add_argument('--cutout_length', type=int, default=16, help='cutout length') parser.add_argument('--drop_path_prob', type=float, default=0.2, help='drop", "nn.utils.clip_grad_norm_(model.parameters(), grad_clip) optimizer.step() prec1, prec5 = utils.accuracy(logits, target, topk=(1, 5)) n = input.size(0)", "lr = scheduler.get_last_lr()[0] logging.info('epoch %d lr %e', epoch, lr) train_acc, train_obj = train(", "the data corpus') parser.add_argument('--dataset', type=str, default='cifar10', help='name of dataset') parser.add_argument('--train_portion', type=float, default=0.5, help='portion", "args.report_freq == 0: test_acc, test_obj, test_acc5, _ = infer(test_queue, task_model, criterion) logging.info('test_acc %f", "'temp': args.temperature, 'search_d': get_dataset_dimension(args.search_dataset), 'target_d': get_dataset_dimension(args.dataset)} adaaug = AdaAug(after_transforms=after_transforms, n_class=search_n_class, gf_model=gf_model, h_model=h_model, save_dir=args.save,", "model, criterion, optimizer, epoch, grad_clip, adaaug): objs = utils.AvgrageMeter() top1 = utils.AvgrageMeter() top5", "help=\"use cuda default True\") parser.add_argument('--gpu', type=int, default=0, help='gpu device id') parser.add_argument('--use_parallel', action='store_true', default=False,", "adaaug) logging.info('train_acc %f', train_acc) valid_acc, valid_obj, _, _ = infer(valid_queue, task_model, criterion) logging.info('valid_acc", "parser.add_argument('--learning_rate', type=float, default=0.025, help='init learning rate') parser.add_argument('--learning_rate_min', type=float, default=0.0001, help='min learning rate') parser.add_argument('--momentum',", "during training\") parser.add_argument('--delta', type=float, default=0.3, help=\"degree of perturbation in magnitude\") parser.add_argument('--temperature', type=float, default=1.0,", "topk=(1, 5)) n = input.size(0) objs.update(loss.detach().item(), n) top1.update(prec1.detach().item(), n) top5.update(prec5.detach().item(), n) return top1.avg,", "GradualWarmupScheduler( optimizer, multiplier=m, total_epoch=e, after_scheduler=scheduler) logging.info(f'Optimizer: SGD, scheduler: CosineAnnealing, warmup: {m}/{e}') criterion =", "= argparse.ArgumentParser(\"ada_aug\") parser.add_argument('--dataroot', type=str, default='./', help='location of the data corpus') parser.add_argument('--dataset', type=str, default='cifar10',", "optimizer, multiplier=m, total_epoch=e, after_scheduler=scheduler) logging.info(f'Optimizer: SGD, scheduler: CosineAnnealing, warmup: {m}/{e}') criterion = nn.CrossEntropyLoss()", "format=log_format, datefmt='%m/%d %I:%M:%S %p') fh = logging.FileHandler(os.path.join(args.save, 'log.txt')) fh.setFormatter(logging.Formatter(log_format)) logging.getLogger().addHandler(fh) def main(): if", "type=bool, default=True, help=\"use cuda default True\") parser.add_argument('--gpu', type=int, default=0, help='gpu device id') parser.add_argument('--use_parallel',", "criterion, optimizer, epoch, args.grad_clip, adaaug) logging.info('train_acc %f', train_acc) valid_acc, valid_obj, _, _ =", "parser.add_argument('--gpu', type=int, default=0, help='gpu device id') parser.add_argument('--use_parallel', action='store_true', default=False, help=\"use data parallel default", "utils.AvgrageMeter() top5 = utils.AvgrageMeter() model.eval() with torch.no_grad(): for input, target in valid_queue: input", "import time import torch import utils import logging import argparse import torch.nn as", "= criterion.cuda() # restore setting if args.restore: trained_epoch = utils.restore_ckpt(task_model, optimizer, scheduler, args.restore_path,", "name') parser.add_argument('--gf_model_path', type=str, default='./', help='gf_model path') parser.add_argument('--h_model_path', type=str, default='./', help='h_model path') parser.add_argument('--k_ops', type=int,", "default='./', help='gf_model name') parser.add_argument('--gf_model_path', type=str, default='./', help='gf_model path') parser.add_argument('--h_model_path', type=str, default='./', help='h_model path')", "training data from adaaug aug_images = adaaug(input, mode='exploit') model.train() optimizer.zero_grad() logits = model(aug_images)", "layer in augmentation policy projection\") parser.add_argument('--n_proj_hidden', type=int, default=128, help=\"number of hidden units in", "parser.add_argument('--report_freq', type=float, default=50, help='report frequency') parser.add_argument('--save', type=str, default='EXP', help='experiment name') parser.add_argument('--seed', type=int, default=0,", "dataset settings n_class = get_num_class(args.dataset) class2label = get_label_name(args.dataset, args.dataroot) train_queue, valid_queue, _, test_queue", "{args.dataset}') logging.info(f' |total: {len(train_queue.dataset)}') logging.info(f' |train: {len(train_queue)*args.batch_size}') logging.info(f' |valid: {len(valid_queue)*args.batch_size}') # task model", "args.report_freq == 0: logging.info('train %03d %e %f %f', global_step, objs.avg, top1.avg, top5.avg) #", "= utils.AvgrageMeter() top1 = utils.AvgrageMeter() top5 = utils.AvgrageMeter() model.eval() with torch.no_grad(): for input,", "help='gradient clipping') parser.add_argument('--use_cuda', type=bool, default=True, help=\"use cuda default True\") parser.add_argument('--gpu', type=int, default=0, help='gpu", "id') parser.add_argument('--use_parallel', action='store_true', default=False, help=\"use data parallel default False\") parser.add_argument('--model_name', type=str, default='wresnet40_2', help=\"model", "units in augmentation policy projection layers\") parser.add_argument('--restore_path', type=str, default='./', help='restore model path') parser.add_argument('--restore',", "= model(input) loss = criterion(logits, target) prec1, prec5 = utils.accuracy(logits, target, topk=(1, 5))", "e = get_warmup_config(args.dataset) scheduler = GradualWarmupScheduler( optimizer, multiplier=m, total_epoch=e, after_scheduler=scheduler) logging.info(f'Optimizer: SGD, scheduler:", "networks.projection import Projection from dataset import get_num_class, get_dataloaders, get_label_name, get_dataset_dimension from config import", "cutout') parser.add_argument('--cutout_length', type=int, default=16, help='cutout length') parser.add_argument('--drop_path_prob', type=float, default=0.2, help='drop path probability') parser.add_argument('--epochs',", "time import torch import utils import logging import argparse import torch.nn as nn", "path probability') parser.add_argument('--epochs', type=int, default=600, help='number of training epochs') parser.add_argument('--report_freq', type=float, default=50, help='report", "type=float, default=0.3, help=\"degree of perturbation in magnitude\") parser.add_argument('--temperature', type=float, default=1.0, help=\"temperature\") parser.add_argument('--n_proj_layer', type=int,", "= get_model(model_name=args.model_name, num_class=n_class, use_cuda=True, data_parallel=False) logging.info(\"param size = %fMB\", utils.count_parameters_in_MB(task_model)) # task optimization", "n = input.size(0) objs.update(loss.detach().item(), n) top1.update(prec1.detach().item(), n) top5.update(prec5.detach().item(), n) global_step = step +", "= '%(asctime)s %(message)s' logging.basicConfig(stream=sys.stdout, level=logging.INFO, format=log_format, datefmt='%m/%d %I:%M:%S %p') fh = logging.FileHandler(os.path.join(args.save, 'log.txt'))", "0 n_epoch = args.epochs # load trained adaaug sub models search_n_class = get_num_class(args.search_dataset)", "target.cuda(non_blocking=True) # get augmented training data from adaaug aug_images = adaaug(input, mode='exploit') model.train()", "test_acc, test_obj, test_acc5, _ = infer(test_queue, task_model, criterion) logging.info('test_acc %f %f', test_acc, test_acc5)", "%f %f', global_step, objs.avg, top1.avg, top5.avg) # log the policy if step ==", "sys import time import torch import utils import logging import argparse import torch.nn", "logging.basicConfig(stream=sys.stdout, level=logging.INFO, format=log_format, datefmt='%m/%d %I:%M:%S %p') fh = logging.FileHandler(os.path.join(args.save, 'log.txt')) fh.setFormatter(logging.Formatter(log_format)) logging.getLogger().addHandler(fh) def", "for input, target in valid_queue: input = input.cuda() target = target.cuda(non_blocking=True) logits =", "%f', test_acc, test_acc5) logging.info(f'save to {args.save}') def train(train_queue, model, criterion, optimizer, epoch, grad_clip,", "False after_transforms = train_queue.dataset.after_transforms adaaug_config = {'sampling': 'prob', 'k_ops': args.k_ops, 'delta': args.delta, 'temp':", "if debug: args.save = os.path.join('debug', args.save) else: args.save = os.path.join('eval', args.dataset, args.save) utils.create_exp_dir(args.save)", "action='store_true', default=False, help='restore model default False') args = parser.parse_args() debug = True if", "step, (input, target) in enumerate(train_queue): target = target.cuda(non_blocking=True) # get augmented training data", "criterion) logging.info('valid_acc %f', valid_acc) scheduler.step() if epoch % args.report_freq == 0: test_acc, test_obj,", "help='report frequency') parser.add_argument('--save', type=str, default='EXP', help='experiment name') parser.add_argument('--seed', type=int, default=0, help='seed') parser.add_argument('--search_dataset', type=str,", "get_dataloaders, get_label_name, get_dataset_dimension from config import get_warmup_config from warmup_scheduler import GradualWarmupScheduler parser =", "args.dataset, args.save) utils.create_exp_dir(args.save) log_format = '%(asctime)s %(message)s' logging.basicConfig(stream=sys.stdout, level=logging.INFO, format=log_format, datefmt='%m/%d %I:%M:%S %p')", "default=50, help='report frequency') parser.add_argument('--save', type=str, default='EXP', help='experiment name') parser.add_argument('--seed', type=int, default=0, help='seed') parser.add_argument('--search_dataset',", "AdaAug(after_transforms=after_transforms, n_class=search_n_class, gf_model=gf_model, h_model=h_model, save_dir=args.save, config=adaaug_config) # start training for i_epoch in range(n_epoch):", "training for i_epoch in range(n_epoch): epoch = trained_epoch + i_epoch lr = scheduler.get_last_lr()[0]", "|valid: {len(valid_queue)*args.batch_size}') # task model settings task_model = get_model(model_name=args.model_name, num_class=n_class, use_cuda=True, data_parallel=False) logging.info(\"param", "optimizer = torch.optim.SGD( task_model.parameters(), args.learning_rate, momentum=args.momentum, weight_decay=args.weight_decay, nesterov=True ) scheduler = torch.optim.lr_scheduler.CosineAnnealingLR( optimizer,", "adaaug sub models search_n_class = get_num_class(args.search_dataset) gf_model = get_model(model_name=args.gf_model_name, num_class=search_n_class, use_cuda=True, data_parallel=False) h_model", "not torch.cuda.is_available(): logging.info('no gpu device available') sys.exit(1) torch.cuda.set_device(args.gpu) utils.reproducibility(args.seed) logging.info('gpu device = %d'", "n) global_step = step + epoch * len(train_queue) if global_step % args.report_freq ==", "training\") parser.add_argument('--delta', type=float, default=0.3, help=\"degree of perturbation in magnitude\") parser.add_argument('--temperature', type=float, default=1.0, help=\"temperature\")", "help='weight decay') parser.add_argument('--grad_clip', type=float, default=5, help='gradient clipping') parser.add_argument('--use_cuda', type=bool, default=True, help=\"use cuda default", "projection layers\") parser.add_argument('--restore_path', type=str, default='./', help='restore model path') parser.add_argument('--restore', action='store_true', default=False, help='restore model", "type=str, default='EXP', help='experiment name') parser.add_argument('--seed', type=int, default=0, help='seed') parser.add_argument('--search_dataset', type=str, default='./', help='search dataset", "args.batch_size, args.num_workers, args.dataroot, args.cutout, args.cutout_length, split=args.train_portion, split_idx=0, target_lb=-1, search=True) logging.info(f'Dataset: {args.dataset}') logging.info(f' |total:", "epoch * len(train_queue) if global_step % args.report_freq == 0: logging.info('train %03d %e %f", "help='portion of training data') parser.add_argument('--batch_size', type=int, default=96, help='batch size') parser.add_argument('--num_workers', type=int, default=0, help=\"num_workers\")", "len(train_queue) if global_step % args.report_freq == 0: logging.info('train %03d %e %f %f', global_step,", "args.dataroot) train_queue, valid_queue, _, test_queue = get_dataloaders( args.dataset, args.batch_size, args.num_workers, args.dataroot, args.cutout, args.cutout_length,", "default='./', help='gf_model path') parser.add_argument('--h_model_path', type=str, default='./', help='h_model path') parser.add_argument('--k_ops', type=int, default=1, help=\"number of", "get_label_name(args.dataset, args.dataroot) train_queue, valid_queue, _, test_queue = get_dataloaders( args.dataset, args.batch_size, args.num_workers, args.dataroot, args.cutout,", "epoch {trained_epoch}') else: trained_epoch = 0 n_epoch = args.epochs # load trained adaaug", "= args.epochs # load trained adaaug sub models search_n_class = get_num_class(args.search_dataset) gf_model =", "after_transforms = train_queue.dataset.after_transforms adaaug_config = {'sampling': 'prob', 'k_ops': args.k_ops, 'delta': args.delta, 'temp': args.temperature,", "CosineAnnealing, warmup: {m}/{e}') criterion = nn.CrossEntropyLoss() criterion = criterion.cuda() # restore setting if", "multiplier=m, total_epoch=e, after_scheduler=scheduler) logging.info(f'Optimizer: SGD, scheduler: CosineAnnealing, warmup: {m}/{e}') criterion = nn.CrossEntropyLoss() criterion", "n_epoch = args.epochs - trained_epoch logging.info(f'Restoring model from {args.restore_path}, starting from epoch {trained_epoch}')", "= get_num_class(args.search_dataset) gf_model = get_model(model_name=args.gf_model_name, num_class=search_n_class, use_cuda=True, data_parallel=False) h_model = Projection(in_features=gf_model.fc.in_features, n_layers=args.n_proj_layer, n_hidden=args.n_proj_hidden).cuda()", "'weights.pt')) adaaug.save_history(class2label) figure = adaaug.plot_history() test_acc, test_obj, test_acc5, _ = infer(test_queue, task_model, criterion)", "type=str, default='cifar10', help='name of dataset') parser.add_argument('--train_portion', type=float, default=0.5, help='portion of training data') parser.add_argument('--batch_size',", "valid_queue: input = input.cuda() target = target.cuda(non_blocking=True) logits = model(input) loss = criterion(logits,", "parser.add_argument('--temperature', type=float, default=1.0, help=\"temperature\") parser.add_argument('--n_proj_layer', type=int, default=0, help=\"number of additional hidden layer in", "probability') parser.add_argument('--epochs', type=int, default=600, help='number of training epochs') parser.add_argument('--report_freq', type=float, default=50, help='report frequency')", "location=args.gpu) utils.load_model(h_model, f'{args.h_model_path}/h_weights.pt', location=args.gpu) for param in gf_model.parameters(): param.requires_grad = False for param", "level=logging.INFO, format=log_format, datefmt='%m/%d %I:%M:%S %p') fh = logging.FileHandler(os.path.join(args.save, 'log.txt')) fh.setFormatter(logging.Formatter(log_format)) logging.getLogger().addHandler(fh) def main():", "step + epoch * len(train_queue) if global_step % args.report_freq == 0: logging.info('train %03d", "default=3e-4, help='weight decay') parser.add_argument('--grad_clip', type=float, default=5, help='gradient clipping') parser.add_argument('--use_cuda', type=bool, default=True, help=\"use cuda", "if epoch % args.report_freq == 0: test_acc, test_obj, test_acc5, _ = infer(test_queue, task_model,", "global_step, objs.avg, top1.avg, top5.avg) # log the policy if step == 0: adaaug.add_history(input,", "type=float, default=50, help='report frequency') parser.add_argument('--save', type=str, default='EXP', help='experiment name') parser.add_argument('--seed', type=int, default=0, help='seed')", "utils.load_model(gf_model, f'{args.gf_model_path}/gf_weights.pt', location=args.gpu) utils.load_model(h_model, f'{args.h_model_path}/h_weights.pt', location=args.gpu) for param in gf_model.parameters(): param.requires_grad = False", "= get_warmup_config(args.dataset) scheduler = GradualWarmupScheduler( optimizer, multiplier=m, total_epoch=e, after_scheduler=scheduler) logging.info(f'Optimizer: SGD, scheduler: CosineAnnealing,", "args.k_ops, 'delta': args.delta, 'temp': args.temperature, 'search_d': get_dataset_dimension(args.search_dataset), 'target_d': get_dataset_dimension(args.dataset)} adaaug = AdaAug(after_transforms=after_transforms, n_class=search_n_class,", "n) top5.update(prec5.detach().item(), n) return top1.avg, objs.avg, top5.avg, objs.avg if __name__ == '__main__': main()", "== 0: test_acc, test_obj, test_acc5, _ = infer(test_queue, task_model, criterion) logging.info('test_acc %f %f',", "default True\") parser.add_argument('--gpu', type=int, default=0, help='gpu device id') parser.add_argument('--use_parallel', action='store_true', default=False, help=\"use data", "utils.AvgrageMeter() top1 = utils.AvgrageMeter() top5 = utils.AvgrageMeter() for step, (input, target) in enumerate(train_queue):", "model default False') args = parser.parse_args() debug = True if args.save == \"debug\"", "warmup_scheduler import GradualWarmupScheduler parser = argparse.ArgumentParser(\"ada_aug\") parser.add_argument('--dataroot', type=str, default='./', help='location of the data", "type=float, default=5, help='gradient clipping') parser.add_argument('--use_cuda', type=bool, default=True, help=\"use cuda default True\") parser.add_argument('--gpu', type=int,", "param in gf_model.parameters(): param.requires_grad = False for param in h_model.parameters(): param.requires_grad = False", "%fMB\", utils.count_parameters_in_MB(task_model)) # task optimization settings optimizer = torch.optim.SGD( task_model.parameters(), args.learning_rate, momentum=args.momentum, weight_decay=args.weight_decay,", "top1 = utils.AvgrageMeter() top5 = utils.AvgrageMeter() for step, (input, target) in enumerate(train_queue): target", "nn import torch.utils from adaptive_augmentor import AdaAug from networks import get_model from networks.projection", "data_parallel=False) logging.info(\"param size = %fMB\", utils.count_parameters_in_MB(task_model)) # task optimization settings optimizer = torch.optim.SGD(", "settings n_class = get_num_class(args.dataset) class2label = get_label_name(args.dataset, args.dataroot) train_queue, valid_queue, _, test_queue =", "args.gpu) logging.info(\"args = %s\", args) # dataset settings n_class = get_num_class(args.dataset) class2label =", "parser.add_argument('--grad_clip', type=float, default=5, help='gradient clipping') parser.add_argument('--use_cuda', type=bool, default=True, help=\"use cuda default True\") parser.add_argument('--gpu',", "Projection(in_features=gf_model.fc.in_features, n_layers=args.n_proj_layer, n_hidden=args.n_proj_hidden).cuda() utils.load_model(gf_model, f'{args.gf_model_path}/gf_weights.pt', location=args.gpu) utils.load_model(h_model, f'{args.h_model_path}/h_weights.pt', location=args.gpu) for param in gf_model.parameters():", "location=args.gpu) + 1 n_epoch = args.epochs - trained_epoch logging.info(f'Restoring model from {args.restore_path}, starting", "help=\"number of augmentation applied during training\") parser.add_argument('--delta', type=float, default=0.3, help=\"degree of perturbation in", "hidden layer in augmentation policy projection\") parser.add_argument('--n_proj_hidden', type=int, default=128, help=\"number of hidden units", "%f', valid_acc) scheduler.step() if epoch % args.report_freq == 0: test_acc, test_obj, test_acc5, _", "enumerate(train_queue): target = target.cuda(non_blocking=True) # get augmented training data from adaaug aug_images =", "\"debug\" else False args.save = '{}-{}'.format(time.strftime(\"%Y%m%d-%H%M%S\"), args.save) if debug: args.save = os.path.join('debug', args.save)", "aug_images = adaaug(input, mode='exploit') model.train() optimizer.zero_grad() logits = model(aug_images) loss = criterion(logits, target)", "in augmentation policy projection layers\") parser.add_argument('--restore_path', type=str, default='./', help='restore model path') parser.add_argument('--restore', action='store_true',", "adaaug = AdaAug(after_transforms=after_transforms, n_class=search_n_class, gf_model=gf_model, h_model=h_model, save_dir=args.save, config=adaaug_config) # start training for i_epoch", "# task model settings task_model = get_model(model_name=args.model_name, num_class=n_class, use_cuda=True, data_parallel=False) logging.info(\"param size =", "type=int, default=600, help='number of training epochs') parser.add_argument('--report_freq', type=float, default=50, help='report frequency') parser.add_argument('--save', type=str,", "i_epoch in range(n_epoch): epoch = trained_epoch + i_epoch lr = scheduler.get_last_lr()[0] logging.info('epoch %d", "in valid_queue: input = input.cuda() target = target.cuda(non_blocking=True) logits = model(input) loss =", "train( train_queue, task_model, criterion, optimizer, epoch, args.grad_clip, adaaug) logging.info('train_acc %f', train_acc) valid_acc, valid_obj,", "augmentation applied during training\") parser.add_argument('--delta', type=float, default=0.3, help=\"degree of perturbation in magnitude\") parser.add_argument('--temperature',", "= '{}-{}'.format(time.strftime(\"%Y%m%d-%H%M%S\"), args.save) if debug: args.save = os.path.join('debug', args.save) else: args.save = os.path.join('eval',", "default=1.0, help=\"temperature\") parser.add_argument('--n_proj_layer', type=int, default=0, help=\"number of additional hidden layer in augmentation policy", "settings optimizer = torch.optim.SGD( task_model.parameters(), args.learning_rate, momentum=args.momentum, weight_decay=args.weight_decay, nesterov=True ) scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(", "train_queue, task_model, criterion, optimizer, epoch, args.grad_clip, adaaug) logging.info('train_acc %f', train_acc) valid_acc, valid_obj, _,", "model(input) loss = criterion(logits, target) prec1, prec5 = utils.accuracy(logits, target, topk=(1, 5)) n", "target = target.cuda(non_blocking=True) logits = model(input) loss = criterion(logits, target) prec1, prec5 =", "datefmt='%m/%d %I:%M:%S %p') fh = logging.FileHandler(os.path.join(args.save, 'log.txt')) fh.setFormatter(logging.Formatter(log_format)) logging.getLogger().addHandler(fh) def main(): if not", "torch.no_grad(): for input, target in valid_queue: input = input.cuda() target = target.cuda(non_blocking=True) logits", "criterion = criterion.cuda() # restore setting if args.restore: trained_epoch = utils.restore_ckpt(task_model, optimizer, scheduler,", "= Projection(in_features=gf_model.fc.in_features, n_layers=args.n_proj_layer, n_hidden=args.n_proj_hidden).cuda() utils.load_model(gf_model, f'{args.gf_model_path}/gf_weights.pt', location=args.gpu) utils.load_model(h_model, f'{args.h_model_path}/h_weights.pt', location=args.gpu) for param in", "= AdaAug(after_transforms=after_transforms, n_class=search_n_class, gf_model=gf_model, h_model=h_model, save_dir=args.save, config=adaaug_config) # start training for i_epoch in", "applied during training\") parser.add_argument('--delta', type=float, default=0.3, help=\"degree of perturbation in magnitude\") parser.add_argument('--temperature', type=float,", "log_format = '%(asctime)s %(message)s' logging.basicConfig(stream=sys.stdout, level=logging.INFO, format=log_format, datefmt='%m/%d %I:%M:%S %p') fh = logging.FileHandler(os.path.join(args.save,", "help='name of dataset') parser.add_argument('--train_portion', type=float, default=0.5, help='portion of training data') parser.add_argument('--batch_size', type=int, default=96,", "adaaug(input, mode='exploit') model.train() optimizer.zero_grad() logits = model(aug_images) loss = criterion(logits, target) loss.backward() nn.utils.clip_grad_norm_(model.parameters(),", "of the data corpus') parser.add_argument('--dataset', type=str, default='cifar10', help='name of dataset') parser.add_argument('--train_portion', type=float, default=0.5,", "type=int, default=16, help='cutout length') parser.add_argument('--drop_path_prob', type=float, default=0.2, help='drop path probability') parser.add_argument('--epochs', type=int, default=600,", "training data') parser.add_argument('--batch_size', type=int, default=96, help='batch size') parser.add_argument('--num_workers', type=int, default=0, help=\"num_workers\") parser.add_argument('--learning_rate', type=float,", "logging.getLogger().addHandler(fh) def main(): if not torch.cuda.is_available(): logging.info('no gpu device available') sys.exit(1) torch.cuda.set_device(args.gpu) utils.reproducibility(args.seed)", "= criterion(logits, target) loss.backward() nn.utils.clip_grad_norm_(model.parameters(), grad_clip) optimizer.step() prec1, prec5 = utils.accuracy(logits, target, topk=(1,", "infer(test_queue, task_model, criterion) logging.info('test_acc %f %f', test_acc, test_acc5) utils.save_ckpt(task_model, optimizer, scheduler, epoch, os.path.join(args.save,", "restore setting if args.restore: trained_epoch = utils.restore_ckpt(task_model, optimizer, scheduler, args.restore_path, location=args.gpu) + 1", "= {'sampling': 'prob', 'k_ops': args.k_ops, 'delta': args.delta, 'temp': args.temperature, 'search_d': get_dataset_dimension(args.search_dataset), 'target_d': get_dataset_dimension(args.dataset)}", "to save the model') parser.add_argument('--cutout', action='store_true', default=False, help='use cutout') parser.add_argument('--cutout_length', type=int, default=16, help='cutout", "False args.save = '{}-{}'.format(time.strftime(\"%Y%m%d-%H%M%S\"), args.save) if debug: args.save = os.path.join('debug', args.save) else: args.save", "if global_step % args.report_freq == 0: logging.info('train %03d %e %f %f', global_step, objs.avg,", "# log the policy if step == 0: adaaug.add_history(input, target) return top1.avg, objs.avg", "target, topk=(1, 5)) n = input.size(0) objs.update(loss.detach().item(), n) top1.update(prec1.detach().item(), n) top5.update(prec5.detach().item(), n) return", "%(message)s' logging.basicConfig(stream=sys.stdout, level=logging.INFO, format=log_format, datefmt='%m/%d %I:%M:%S %p') fh = logging.FileHandler(os.path.join(args.save, 'log.txt')) fh.setFormatter(logging.Formatter(log_format)) logging.getLogger().addHandler(fh)", "default=False, help='restore model default False') args = parser.parse_args() debug = True if args.save", "%p') fh = logging.FileHandler(os.path.join(args.save, 'log.txt')) fh.setFormatter(logging.Formatter(log_format)) logging.getLogger().addHandler(fh) def main(): if not torch.cuda.is_available(): logging.info('no", "import get_num_class, get_dataloaders, get_label_name, get_dataset_dimension from config import get_warmup_config from warmup_scheduler import GradualWarmupScheduler", "default=False, help=\"use data parallel default False\") parser.add_argument('--model_name', type=str, default='wresnet40_2', help=\"model name\") parser.add_argument('--model_path', type=str,", "infer(test_queue, task_model, criterion) logging.info('test_acc %f %f', test_acc, test_acc5) logging.info(f'save to {args.save}') def train(train_queue,", "import sys import time import torch import utils import logging import argparse import", "get_model(model_name=args.model_name, num_class=n_class, use_cuda=True, data_parallel=False) logging.info(\"param size = %fMB\", utils.count_parameters_in_MB(task_model)) # task optimization settings", "parser.add_argument('--gf_model_path', type=str, default='./', help='gf_model path') parser.add_argument('--h_model_path', type=str, default='./', help='h_model path') parser.add_argument('--k_ops', type=int, default=1,", "args = parser.parse_args() debug = True if args.save == \"debug\" else False args.save", "= get_dataloaders( args.dataset, args.batch_size, args.num_workers, args.dataroot, args.cutout, args.cutout_length, split=args.train_portion, split_idx=0, target_lb=-1, search=True) logging.info(f'Dataset:", "target.cuda(non_blocking=True) logits = model(input) loss = criterion(logits, target) prec1, prec5 = utils.accuracy(logits, target,", "n) top1.update(prec1.detach().item(), n) top5.update(prec5.detach().item(), n) return top1.avg, objs.avg, top5.avg, objs.avg if __name__ ==", "= get_label_name(args.dataset, args.dataroot) train_queue, valid_queue, _, test_queue = get_dataloaders( args.dataset, args.batch_size, args.num_workers, args.dataroot,", "utils import logging import argparse import torch.nn as nn import torch.utils from adaptive_augmentor", "training epochs') parser.add_argument('--report_freq', type=float, default=50, help='report frequency') parser.add_argument('--save', type=str, default='EXP', help='experiment name') parser.add_argument('--seed',", "logging.info(f' |valid: {len(valid_queue)*args.batch_size}') # task model settings task_model = get_model(model_name=args.model_name, num_class=n_class, use_cuda=True, data_parallel=False)", "0: adaaug.add_history(input, target) return top1.avg, objs.avg def infer(valid_queue, model, criterion): objs = utils.AvgrageMeter()", "criterion(logits, target) prec1, prec5 = utils.accuracy(logits, target, topk=(1, 5)) n = input.size(0) objs.update(loss.detach().item(),", "optimizer, float(args.epochs), eta_min=args.learning_rate_min) m, e = get_warmup_config(args.dataset) scheduler = GradualWarmupScheduler( optimizer, multiplier=m, total_epoch=e,", "debug = True if args.save == \"debug\" else False args.save = '{}-{}'.format(time.strftime(\"%Y%m%d-%H%M%S\"), args.save)", "help='h_model path') parser.add_argument('--k_ops', type=int, default=1, help=\"number of augmentation applied during training\") parser.add_argument('--delta', type=float,", "h_model=h_model, save_dir=args.save, config=adaaug_config) # start training for i_epoch in range(n_epoch): epoch = trained_epoch", "num_class=n_class, use_cuda=True, data_parallel=False) logging.info(\"param size = %fMB\", utils.count_parameters_in_MB(task_model)) # task optimization settings optimizer", "% args.report_freq == 0: logging.info('train %03d %e %f %f', global_step, objs.avg, top1.avg, top5.avg)", "corpus') parser.add_argument('--dataset', type=str, default='cifar10', help='name of dataset') parser.add_argument('--train_portion', type=float, default=0.5, help='portion of training", "logging.info('train %03d %e %f %f', global_step, objs.avg, top1.avg, top5.avg) # log the policy", "loss.backward() nn.utils.clip_grad_norm_(model.parameters(), grad_clip) optimizer.step() prec1, prec5 = utils.accuracy(logits, target, topk=(1, 5)) n =", "get_warmup_config(args.dataset) scheduler = GradualWarmupScheduler( optimizer, multiplier=m, total_epoch=e, after_scheduler=scheduler) logging.info(f'Optimizer: SGD, scheduler: CosineAnnealing, warmup:", "help=\"number of additional hidden layer in augmentation policy projection\") parser.add_argument('--n_proj_hidden', type=int, default=128, help=\"number", "SGD, scheduler: CosineAnnealing, warmup: {m}/{e}') criterion = nn.CrossEntropyLoss() criterion = criterion.cuda() # restore", "= logging.FileHandler(os.path.join(args.save, 'log.txt')) fh.setFormatter(logging.Formatter(log_format)) logging.getLogger().addHandler(fh) def main(): if not torch.cuda.is_available(): logging.info('no gpu device", "else False args.save = '{}-{}'.format(time.strftime(\"%Y%m%d-%H%M%S\"), args.save) if debug: args.save = os.path.join('debug', args.save) else:", "task optimization settings optimizer = torch.optim.SGD( task_model.parameters(), args.learning_rate, momentum=args.momentum, weight_decay=args.weight_decay, nesterov=True ) scheduler", "global_step % args.report_freq == 0: logging.info('train %03d %e %f %f', global_step, objs.avg, top1.avg,", "sub models search_n_class = get_num_class(args.search_dataset) gf_model = get_model(model_name=args.gf_model_name, num_class=search_n_class, use_cuda=True, data_parallel=False) h_model =", ") scheduler = torch.optim.lr_scheduler.CosineAnnealingLR( optimizer, float(args.epochs), eta_min=args.learning_rate_min) m, e = get_warmup_config(args.dataset) scheduler =", "parser.add_argument('--momentum', type=float, default=0.9, help='momentum') parser.add_argument('--weight_decay', type=float, default=3e-4, help='weight decay') parser.add_argument('--grad_clip', type=float, default=5, help='gradient", "policy projection layers\") parser.add_argument('--restore_path', type=str, default='./', help='restore model path') parser.add_argument('--restore', action='store_true', default=False, help='restore", "type=float, default=0.0001, help='min learning rate') parser.add_argument('--momentum', type=float, default=0.9, help='momentum') parser.add_argument('--weight_decay', type=float, default=3e-4, help='weight", "get_num_class(args.dataset) class2label = get_label_name(args.dataset, args.dataroot) train_queue, valid_queue, _, test_queue = get_dataloaders( args.dataset, args.batch_size,", "else: trained_epoch = 0 n_epoch = args.epochs # load trained adaaug sub models", "location=args.gpu) for param in gf_model.parameters(): param.requires_grad = False for param in h_model.parameters(): param.requires_grad", "os.path.join(args.save, 'weights.pt')) adaaug.save_history(class2label) figure = adaaug.plot_history() test_acc, test_obj, test_acc5, _ = infer(test_queue, task_model,", "rate') parser.add_argument('--learning_rate_min', type=float, default=0.0001, help='min learning rate') parser.add_argument('--momentum', type=float, default=0.9, help='momentum') parser.add_argument('--weight_decay', type=float,", "torch.cuda.is_available(): logging.info('no gpu device available') sys.exit(1) torch.cuda.set_device(args.gpu) utils.reproducibility(args.seed) logging.info('gpu device = %d' %", "class2label = get_label_name(args.dataset, args.dataroot) train_queue, valid_queue, _, test_queue = get_dataloaders( args.dataset, args.batch_size, args.num_workers,", "get_warmup_config from warmup_scheduler import GradualWarmupScheduler parser = argparse.ArgumentParser(\"ada_aug\") parser.add_argument('--dataroot', type=str, default='./', help='location of", "= target.cuda(non_blocking=True) logits = model(input) loss = criterion(logits, target) prec1, prec5 = utils.accuracy(logits,", "load trained adaaug sub models search_n_class = get_num_class(args.search_dataset) gf_model = get_model(model_name=args.gf_model_name, num_class=search_n_class, use_cuda=True,", "help='seed') parser.add_argument('--search_dataset', type=str, default='./', help='search dataset name') parser.add_argument('--gf_model_name', type=str, default='./', help='gf_model name') parser.add_argument('--gf_model_path',", "type=int, default=0, help='seed') parser.add_argument('--search_dataset', type=str, default='./', help='search dataset name') parser.add_argument('--gf_model_name', type=str, default='./', help='gf_model", "{m}/{e}') criterion = nn.CrossEntropyLoss() criterion = criterion.cuda() # restore setting if args.restore: trained_epoch", "type=int, default=0, help=\"number of additional hidden layer in augmentation policy projection\") parser.add_argument('--n_proj_hidden', type=int,", "args.learning_rate, momentum=args.momentum, weight_decay=args.weight_decay, nesterov=True ) scheduler = torch.optim.lr_scheduler.CosineAnnealingLR( optimizer, float(args.epochs), eta_min=args.learning_rate_min) m, e", "lr %e', epoch, lr) train_acc, train_obj = train( train_queue, task_model, criterion, optimizer, epoch,", "type=int, default=128, help=\"number of hidden units in augmentation policy projection layers\") parser.add_argument('--restore_path', type=str,", "'search_d': get_dataset_dimension(args.search_dataset), 'target_d': get_dataset_dimension(args.dataset)} adaaug = AdaAug(after_transforms=after_transforms, n_class=search_n_class, gf_model=gf_model, h_model=h_model, save_dir=args.save, config=adaaug_config) #", "|train: {len(train_queue)*args.batch_size}') logging.info(f' |valid: {len(valid_queue)*args.batch_size}') # task model settings task_model = get_model(model_name=args.model_name, num_class=n_class,", "args.save = '{}-{}'.format(time.strftime(\"%Y%m%d-%H%M%S\"), args.save) if debug: args.save = os.path.join('debug', args.save) else: args.save =", "logging.FileHandler(os.path.join(args.save, 'log.txt')) fh.setFormatter(logging.Formatter(log_format)) logging.getLogger().addHandler(fh) def main(): if not torch.cuda.is_available(): logging.info('no gpu device available')", "utils.restore_ckpt(task_model, optimizer, scheduler, args.restore_path, location=args.gpu) + 1 n_epoch = args.epochs - trained_epoch logging.info(f'Restoring", "%f', train_acc) valid_acc, valid_obj, _, _ = infer(valid_queue, task_model, criterion) logging.info('valid_acc %f', valid_acc)", "valid_obj, _, _ = infer(valid_queue, task_model, criterion) logging.info('valid_acc %f', valid_acc) scheduler.step() if epoch", "grad_clip, adaaug): objs = utils.AvgrageMeter() top1 = utils.AvgrageMeter() top5 = utils.AvgrageMeter() for step,", "augmented training data from adaaug aug_images = adaaug(input, mode='exploit') model.train() optimizer.zero_grad() logits =", "utils.create_exp_dir(args.save) log_format = '%(asctime)s %(message)s' logging.basicConfig(stream=sys.stdout, level=logging.INFO, format=log_format, datefmt='%m/%d %I:%M:%S %p') fh =", "test_queue = get_dataloaders( args.dataset, args.batch_size, args.num_workers, args.dataroot, args.cutout, args.cutout_length, split=args.train_portion, split_idx=0, target_lb=-1, search=True)", "task model settings task_model = get_model(model_name=args.model_name, num_class=n_class, use_cuda=True, data_parallel=False) logging.info(\"param size = %fMB\",", "'prob', 'k_ops': args.k_ops, 'delta': args.delta, 'temp': args.temperature, 'search_d': get_dataset_dimension(args.search_dataset), 'target_d': get_dataset_dimension(args.dataset)} adaaug =", "criterion): objs = utils.AvgrageMeter() top1 = utils.AvgrageMeter() top5 = utils.AvgrageMeter() model.eval() with torch.no_grad():", "= trained_epoch + i_epoch lr = scheduler.get_last_lr()[0] logging.info('epoch %d lr %e', epoch, lr)", "args.save) if debug: args.save = os.path.join('debug', args.save) else: args.save = os.path.join('eval', args.dataset, args.save)", "_ = infer(test_queue, task_model, criterion) logging.info('test_acc %f %f', test_acc, test_acc5) logging.info(f'save to {args.save}')", "default='wresnet40_2', help=\"model name\") parser.add_argument('--model_path', type=str, default='saved_models', help='path to save the model') parser.add_argument('--cutout', action='store_true',", "name') parser.add_argument('--seed', type=int, default=0, help='seed') parser.add_argument('--search_dataset', type=str, default='./', help='search dataset name') parser.add_argument('--gf_model_name', type=str,", "cuda default True\") parser.add_argument('--gpu', type=int, default=0, help='gpu device id') parser.add_argument('--use_parallel', action='store_true', default=False, help=\"use", "= infer(test_queue, task_model, criterion) logging.info('test_acc %f %f', test_acc, test_acc5) logging.info(f'save to {args.save}') def", "logging.info(f' |total: {len(train_queue.dataset)}') logging.info(f' |train: {len(train_queue)*args.batch_size}') logging.info(f' |valid: {len(valid_queue)*args.batch_size}') # task model settings", "_, _ = infer(valid_queue, task_model, criterion) logging.info('valid_acc %f', valid_acc) scheduler.step() if epoch %", "fh = logging.FileHandler(os.path.join(args.save, 'log.txt')) fh.setFormatter(logging.Formatter(log_format)) logging.getLogger().addHandler(fh) def main(): if not torch.cuda.is_available(): logging.info('no gpu", "to {args.save}') def train(train_queue, model, criterion, optimizer, epoch, grad_clip, adaaug): objs = utils.AvgrageMeter()", "get_num_class(args.search_dataset) gf_model = get_model(model_name=args.gf_model_name, num_class=search_n_class, use_cuda=True, data_parallel=False) h_model = Projection(in_features=gf_model.fc.in_features, n_layers=args.n_proj_layer, n_hidden=args.n_proj_hidden).cuda() utils.load_model(gf_model,", "default=0.025, help='init learning rate') parser.add_argument('--learning_rate_min', type=float, default=0.0001, help='min learning rate') parser.add_argument('--momentum', type=float, default=0.9,", "logging.info('test_acc %f %f', test_acc, test_acc5) utils.save_ckpt(task_model, optimizer, scheduler, epoch, os.path.join(args.save, 'weights.pt')) adaaug.save_history(class2label) figure", "default=False, help='use cutout') parser.add_argument('--cutout_length', type=int, default=16, help='cutout length') parser.add_argument('--drop_path_prob', type=float, default=0.2, help='drop path", "gf_model.parameters(): param.requires_grad = False for param in h_model.parameters(): param.requires_grad = False after_transforms =", "optimizer.zero_grad() logits = model(aug_images) loss = criterion(logits, target) loss.backward() nn.utils.clip_grad_norm_(model.parameters(), grad_clip) optimizer.step() prec1,", "train_acc, train_obj = train( train_queue, task_model, criterion, optimizer, epoch, args.grad_clip, adaaug) logging.info('train_acc %f',", "with torch.no_grad(): for input, target in valid_queue: input = input.cuda() target = target.cuda(non_blocking=True)", "import argparse import torch.nn as nn import torch.utils from adaptive_augmentor import AdaAug from", "scheduler = GradualWarmupScheduler( optimizer, multiplier=m, total_epoch=e, after_scheduler=scheduler) logging.info(f'Optimizer: SGD, scheduler: CosineAnnealing, warmup: {m}/{e}')", "get_model from networks.projection import Projection from dataset import get_num_class, get_dataloaders, get_label_name, get_dataset_dimension from", "utils.AvgrageMeter() top1 = utils.AvgrageMeter() top5 = utils.AvgrageMeter() model.eval() with torch.no_grad(): for input, target", "# load trained adaaug sub models search_n_class = get_num_class(args.search_dataset) gf_model = get_model(model_name=args.gf_model_name, num_class=search_n_class,", "%I:%M:%S %p') fh = logging.FileHandler(os.path.join(args.save, 'log.txt')) fh.setFormatter(logging.Formatter(log_format)) logging.getLogger().addHandler(fh) def main(): if not torch.cuda.is_available():", "epoch, grad_clip, adaaug): objs = utils.AvgrageMeter() top1 = utils.AvgrageMeter() top5 = utils.AvgrageMeter() for", "debug: args.save = os.path.join('debug', args.save) else: args.save = os.path.join('eval', args.dataset, args.save) utils.create_exp_dir(args.save) log_format", "n_hidden=args.n_proj_hidden).cuda() utils.load_model(gf_model, f'{args.gf_model_path}/gf_weights.pt', location=args.gpu) utils.load_model(h_model, f'{args.h_model_path}/h_weights.pt', location=args.gpu) for param in gf_model.parameters(): param.requires_grad =", "= train( train_queue, task_model, criterion, optimizer, epoch, args.grad_clip, adaaug) logging.info('train_acc %f', train_acc) valid_acc,", "{len(train_queue.dataset)}') logging.info(f' |train: {len(train_queue)*args.batch_size}') logging.info(f' |valid: {len(valid_queue)*args.batch_size}') # task model settings task_model =", "help='min learning rate') parser.add_argument('--momentum', type=float, default=0.9, help='momentum') parser.add_argument('--weight_decay', type=float, default=3e-4, help='weight decay') parser.add_argument('--grad_clip',", "epoch, args.grad_clip, adaaug) logging.info('train_acc %f', train_acc) valid_acc, valid_obj, _, _ = infer(valid_queue, task_model,", "criterion) logging.info('test_acc %f %f', test_acc, test_acc5) logging.info(f'save to {args.save}') def train(train_queue, model, criterion,", "model') parser.add_argument('--cutout', action='store_true', default=False, help='use cutout') parser.add_argument('--cutout_length', type=int, default=16, help='cutout length') parser.add_argument('--drop_path_prob', type=float,", "trained_epoch = 0 n_epoch = args.epochs # load trained adaaug sub models search_n_class", "target_lb=-1, search=True) logging.info(f'Dataset: {args.dataset}') logging.info(f' |total: {len(train_queue.dataset)}') logging.info(f' |train: {len(train_queue)*args.batch_size}') logging.info(f' |valid: {len(valid_queue)*args.batch_size}')", "adaaug_config = {'sampling': 'prob', 'k_ops': args.k_ops, 'delta': args.delta, 'temp': args.temperature, 'search_d': get_dataset_dimension(args.search_dataset), 'target_d':", "from config import get_warmup_config from warmup_scheduler import GradualWarmupScheduler parser = argparse.ArgumentParser(\"ada_aug\") parser.add_argument('--dataroot', type=str,", "= utils.AvgrageMeter() for step, (input, target) in enumerate(train_queue): target = target.cuda(non_blocking=True) # get", "help=\"degree of perturbation in magnitude\") parser.add_argument('--temperature', type=float, default=1.0, help=\"temperature\") parser.add_argument('--n_proj_layer', type=int, default=0, help=\"number", "else: args.save = os.path.join('eval', args.dataset, args.save) utils.create_exp_dir(args.save) log_format = '%(asctime)s %(message)s' logging.basicConfig(stream=sys.stdout, level=logging.INFO,", "epoch, os.path.join(args.save, 'weights.pt')) adaaug.save_history(class2label) figure = adaaug.plot_history() test_acc, test_obj, test_acc5, _ = infer(test_queue,", "torch.optim.lr_scheduler.CosineAnnealingLR( optimizer, float(args.epochs), eta_min=args.learning_rate_min) m, e = get_warmup_config(args.dataset) scheduler = GradualWarmupScheduler( optimizer, multiplier=m,", "fh.setFormatter(logging.Formatter(log_format)) logging.getLogger().addHandler(fh) def main(): if not torch.cuda.is_available(): logging.info('no gpu device available') sys.exit(1) torch.cuda.set_device(args.gpu)", "parser.add_argument('--k_ops', type=int, default=1, help=\"number of augmentation applied during training\") parser.add_argument('--delta', type=float, default=0.3, help=\"degree", "objs.avg, top1.avg, top5.avg) # log the policy if step == 0: adaaug.add_history(input, target)", "of perturbation in magnitude\") parser.add_argument('--temperature', type=float, default=1.0, help=\"temperature\") parser.add_argument('--n_proj_layer', type=int, default=0, help=\"number of", "0: test_acc, test_obj, test_acc5, _ = infer(test_queue, task_model, criterion) logging.info('test_acc %f %f', test_acc,", "f'{args.gf_model_path}/gf_weights.pt', location=args.gpu) utils.load_model(h_model, f'{args.h_model_path}/h_weights.pt', location=args.gpu) for param in gf_model.parameters(): param.requires_grad = False for", "after_scheduler=scheduler) logging.info(f'Optimizer: SGD, scheduler: CosineAnnealing, warmup: {m}/{e}') criterion = nn.CrossEntropyLoss() criterion = criterion.cuda()", "loss = criterion(logits, target) loss.backward() nn.utils.clip_grad_norm_(model.parameters(), grad_clip) optimizer.step() prec1, prec5 = utils.accuracy(logits, target,", "|total: {len(train_queue.dataset)}') logging.info(f' |train: {len(train_queue)*args.batch_size}') logging.info(f' |valid: {len(valid_queue)*args.batch_size}') # task model settings task_model", "= torch.optim.SGD( task_model.parameters(), args.learning_rate, momentum=args.momentum, weight_decay=args.weight_decay, nesterov=True ) scheduler = torch.optim.lr_scheduler.CosineAnnealingLR( optimizer, float(args.epochs),", "start training for i_epoch in range(n_epoch): epoch = trained_epoch + i_epoch lr =", "epoch % args.report_freq == 0: test_acc, test_obj, test_acc5, _ = infer(test_queue, task_model, criterion)", "get_dataset_dimension(args.search_dataset), 'target_d': get_dataset_dimension(args.dataset)} adaaug = AdaAug(after_transforms=after_transforms, n_class=search_n_class, gf_model=gf_model, h_model=h_model, save_dir=args.save, config=adaaug_config) # start", "default=0, help='seed') parser.add_argument('--search_dataset', type=str, default='./', help='search dataset name') parser.add_argument('--gf_model_name', type=str, default='./', help='gf_model name')", "# start training for i_epoch in range(n_epoch): epoch = trained_epoch + i_epoch lr", "rate') parser.add_argument('--momentum', type=float, default=0.9, help='momentum') parser.add_argument('--weight_decay', type=float, default=3e-4, help='weight decay') parser.add_argument('--grad_clip', type=float, default=5,", "default=0.3, help=\"degree of perturbation in magnitude\") parser.add_argument('--temperature', type=float, default=1.0, help=\"temperature\") parser.add_argument('--n_proj_layer', type=int, default=0,", "default=1, help=\"number of augmentation applied during training\") parser.add_argument('--delta', type=float, default=0.3, help=\"degree of perturbation", "default='./', help='h_model path') parser.add_argument('--k_ops', type=int, default=1, help=\"number of augmentation applied during training\") parser.add_argument('--delta',", "epoch, lr) train_acc, train_obj = train( train_queue, task_model, criterion, optimizer, epoch, args.grad_clip, adaaug)", "GradualWarmupScheduler parser = argparse.ArgumentParser(\"ada_aug\") parser.add_argument('--dataroot', type=str, default='./', help='location of the data corpus') parser.add_argument('--dataset',", "hidden units in augmentation policy projection layers\") parser.add_argument('--restore_path', type=str, default='./', help='restore model path')", "augmentation policy projection layers\") parser.add_argument('--restore_path', type=str, default='./', help='restore model path') parser.add_argument('--restore', action='store_true', default=False,", "float(args.epochs), eta_min=args.learning_rate_min) m, e = get_warmup_config(args.dataset) scheduler = GradualWarmupScheduler( optimizer, multiplier=m, total_epoch=e, after_scheduler=scheduler)", "args.restore: trained_epoch = utils.restore_ckpt(task_model, optimizer, scheduler, args.restore_path, location=args.gpu) + 1 n_epoch = args.epochs", "%d lr %e', epoch, lr) train_acc, train_obj = train( train_queue, task_model, criterion, optimizer,", "top1.avg, objs.avg def infer(valid_queue, model, criterion): objs = utils.AvgrageMeter() top1 = utils.AvgrageMeter() top5", "path') parser.add_argument('--k_ops', type=int, default=1, help=\"number of augmentation applied during training\") parser.add_argument('--delta', type=float, default=0.3,", "logging.info('test_acc %f %f', test_acc, test_acc5) logging.info(f'save to {args.save}') def train(train_queue, model, criterion, optimizer,", "default=0, help=\"number of additional hidden layer in augmentation policy projection\") parser.add_argument('--n_proj_hidden', type=int, default=128,", "objs.update(loss.detach().item(), n) top1.update(prec1.detach().item(), n) top5.update(prec5.detach().item(), n) return top1.avg, objs.avg, top5.avg, objs.avg if __name__", "= input.size(0) objs.update(loss.detach().item(), n) top1.update(prec1.detach().item(), n) top5.update(prec5.detach().item(), n) return top1.avg, objs.avg, top5.avg, objs.avg", "optimizer, epoch, grad_clip, adaaug): objs = utils.AvgrageMeter() top1 = utils.AvgrageMeter() top5 = utils.AvgrageMeter()", "help='location of the data corpus') parser.add_argument('--dataset', type=str, default='cifar10', help='name of dataset') parser.add_argument('--train_portion', type=float,", "help='experiment name') parser.add_argument('--seed', type=int, default=0, help='seed') parser.add_argument('--search_dataset', type=str, default='./', help='search dataset name') parser.add_argument('--gf_model_name',", "get_model(model_name=args.gf_model_name, num_class=search_n_class, use_cuda=True, data_parallel=False) h_model = Projection(in_features=gf_model.fc.in_features, n_layers=args.n_proj_layer, n_hidden=args.n_proj_hidden).cuda() utils.load_model(gf_model, f'{args.gf_model_path}/gf_weights.pt', location=args.gpu) utils.load_model(h_model,", "help=\"temperature\") parser.add_argument('--n_proj_layer', type=int, default=0, help=\"number of additional hidden layer in augmentation policy projection\")", "{args.restore_path}, starting from epoch {trained_epoch}') else: trained_epoch = 0 n_epoch = args.epochs #", "= get_model(model_name=args.gf_model_name, num_class=search_n_class, use_cuda=True, data_parallel=False) h_model = Projection(in_features=gf_model.fc.in_features, n_layers=args.n_proj_layer, n_hidden=args.n_proj_hidden).cuda() utils.load_model(gf_model, f'{args.gf_model_path}/gf_weights.pt', location=args.gpu)", "model path') parser.add_argument('--restore', action='store_true', default=False, help='restore model default False') args = parser.parse_args() debug", "criterion.cuda() # restore setting if args.restore: trained_epoch = utils.restore_ckpt(task_model, optimizer, scheduler, args.restore_path, location=args.gpu)", "split=args.train_portion, split_idx=0, target_lb=-1, search=True) logging.info(f'Dataset: {args.dataset}') logging.info(f' |total: {len(train_queue.dataset)}') logging.info(f' |train: {len(train_queue)*args.batch_size}') logging.info(f'", "= utils.AvgrageMeter() top5 = utils.AvgrageMeter() model.eval() with torch.no_grad(): for input, target in valid_queue:", "get_label_name, get_dataset_dimension from config import get_warmup_config from warmup_scheduler import GradualWarmupScheduler parser = argparse.ArgumentParser(\"ada_aug\")", "_ = infer(valid_queue, task_model, criterion) logging.info('valid_acc %f', valid_acc) scheduler.step() if epoch % args.report_freq", "parser.add_argument('--n_proj_hidden', type=int, default=128, help=\"number of hidden units in augmentation policy projection layers\") parser.add_argument('--restore_path',", "decay') parser.add_argument('--grad_clip', type=float, default=5, help='gradient clipping') parser.add_argument('--use_cuda', type=bool, default=True, help=\"use cuda default True\")", "%f', global_step, objs.avg, top1.avg, top5.avg) # log the policy if step == 0:", "import get_model from networks.projection import Projection from dataset import get_num_class, get_dataloaders, get_label_name, get_dataset_dimension", "torch.utils from adaptive_augmentor import AdaAug from networks import get_model from networks.projection import Projection", "{len(train_queue)*args.batch_size}') logging.info(f' |valid: {len(valid_queue)*args.batch_size}') # task model settings task_model = get_model(model_name=args.model_name, num_class=n_class, use_cuda=True,", "input, target in valid_queue: input = input.cuda() target = target.cuda(non_blocking=True) logits = model(input)", "True if args.save == \"debug\" else False args.save = '{}-{}'.format(time.strftime(\"%Y%m%d-%H%M%S\"), args.save) if debug:", "train_queue, valid_queue, _, test_queue = get_dataloaders( args.dataset, args.batch_size, args.num_workers, args.dataroot, args.cutout, args.cutout_length, split=args.train_portion,", "parser.add_argument('--drop_path_prob', type=float, default=0.2, help='drop path probability') parser.add_argument('--epochs', type=int, default=600, help='number of training epochs')", "parser.add_argument('--dataroot', type=str, default='./', help='location of the data corpus') parser.add_argument('--dataset', type=str, default='cifar10', help='name of", "default=128, help=\"number of hidden units in augmentation policy projection layers\") parser.add_argument('--restore_path', type=str, default='./',", "os import sys import time import torch import utils import logging import argparse", "scheduler.step() if epoch % args.report_freq == 0: test_acc, test_obj, test_acc5, _ = infer(test_queue,", "parser.add_argument('--seed', type=int, default=0, help='seed') parser.add_argument('--search_dataset', type=str, default='./', help='search dataset name') parser.add_argument('--gf_model_name', type=str, default='./',", "starting from epoch {trained_epoch}') else: trained_epoch = 0 n_epoch = args.epochs # load", "data_parallel=False) h_model = Projection(in_features=gf_model.fc.in_features, n_layers=args.n_proj_layer, n_hidden=args.n_proj_hidden).cuda() utils.load_model(gf_model, f'{args.gf_model_path}/gf_weights.pt', location=args.gpu) utils.load_model(h_model, f'{args.h_model_path}/h_weights.pt', location=args.gpu) for", "- trained_epoch logging.info(f'Restoring model from {args.restore_path}, starting from epoch {trained_epoch}') else: trained_epoch =", "for step, (input, target) in enumerate(train_queue): target = target.cuda(non_blocking=True) # get augmented training", "eta_min=args.learning_rate_min) m, e = get_warmup_config(args.dataset) scheduler = GradualWarmupScheduler( optimizer, multiplier=m, total_epoch=e, after_scheduler=scheduler) logging.info(f'Optimizer:", "= utils.AvgrageMeter() model.eval() with torch.no_grad(): for input, target in valid_queue: input = input.cuda()", "learning rate') parser.add_argument('--learning_rate_min', type=float, default=0.0001, help='min learning rate') parser.add_argument('--momentum', type=float, default=0.9, help='momentum') parser.add_argument('--weight_decay',", "_, test_queue = get_dataloaders( args.dataset, args.batch_size, args.num_workers, args.dataroot, args.cutout, args.cutout_length, split=args.train_portion, split_idx=0, target_lb=-1,", "type=int, default=96, help='batch size') parser.add_argument('--num_workers', type=int, default=0, help=\"num_workers\") parser.add_argument('--learning_rate', type=float, default=0.025, help='init learning", "% args.report_freq == 0: test_acc, test_obj, test_acc5, _ = infer(test_queue, task_model, criterion) logging.info('test_acc", "name\") parser.add_argument('--model_path', type=str, default='saved_models', help='path to save the model') parser.add_argument('--cutout', action='store_true', default=False, help='use", "target, topk=(1, 5)) n = input.size(0) objs.update(loss.detach().item(), n) top1.update(prec1.detach().item(), n) top5.update(prec5.detach().item(), n) global_step", "train(train_queue, model, criterion, optimizer, epoch, grad_clip, adaaug): objs = utils.AvgrageMeter() top1 = utils.AvgrageMeter()", "parser.add_argument('--delta', type=float, default=0.3, help=\"degree of perturbation in magnitude\") parser.add_argument('--temperature', type=float, default=1.0, help=\"temperature\") parser.add_argument('--n_proj_layer',", "== \"debug\" else False args.save = '{}-{}'.format(time.strftime(\"%Y%m%d-%H%M%S\"), args.save) if debug: args.save = os.path.join('debug',", "logging.info('no gpu device available') sys.exit(1) torch.cuda.set_device(args.gpu) utils.reproducibility(args.seed) logging.info('gpu device = %d' % args.gpu)", "args.save == \"debug\" else False args.save = '{}-{}'.format(time.strftime(\"%Y%m%d-%H%M%S\"), args.save) if debug: args.save =", "get_num_class, get_dataloaders, get_label_name, get_dataset_dimension from config import get_warmup_config from warmup_scheduler import GradualWarmupScheduler parser", "the model') parser.add_argument('--cutout', action='store_true', default=False, help='use cutout') parser.add_argument('--cutout_length', type=int, default=16, help='cutout length') parser.add_argument('--drop_path_prob',", "parser.add_argument('--gf_model_name', type=str, default='./', help='gf_model name') parser.add_argument('--gf_model_path', type=str, default='./', help='gf_model path') parser.add_argument('--h_model_path', type=str, default='./',", "device available') sys.exit(1) torch.cuda.set_device(args.gpu) utils.reproducibility(args.seed) logging.info('gpu device = %d' % args.gpu) logging.info(\"args =", "lr) train_acc, train_obj = train( train_queue, task_model, criterion, optimizer, epoch, args.grad_clip, adaaug) logging.info('train_acc", "train_queue.dataset.after_transforms adaaug_config = {'sampling': 'prob', 'k_ops': args.k_ops, 'delta': args.delta, 'temp': args.temperature, 'search_d': get_dataset_dimension(args.search_dataset),", "argparse import torch.nn as nn import torch.utils from adaptive_augmentor import AdaAug from networks", "args.temperature, 'search_d': get_dataset_dimension(args.search_dataset), 'target_d': get_dataset_dimension(args.dataset)} adaaug = AdaAug(after_transforms=after_transforms, n_class=search_n_class, gf_model=gf_model, h_model=h_model, save_dir=args.save, config=adaaug_config)", "help=\"model name\") parser.add_argument('--model_path', type=str, default='saved_models', help='path to save the model') parser.add_argument('--cutout', action='store_true', default=False,", "models search_n_class = get_num_class(args.search_dataset) gf_model = get_model(model_name=args.gf_model_name, num_class=search_n_class, use_cuda=True, data_parallel=False) h_model = Projection(in_features=gf_model.fc.in_features,", "logits = model(aug_images) loss = criterion(logits, target) loss.backward() nn.utils.clip_grad_norm_(model.parameters(), grad_clip) optimizer.step() prec1, prec5", "%f', test_acc, test_acc5) utils.save_ckpt(task_model, optimizer, scheduler, epoch, os.path.join(args.save, 'weights.pt')) adaaug.save_history(class2label) figure = adaaug.plot_history()", "torch.optim.SGD( task_model.parameters(), args.learning_rate, momentum=args.momentum, weight_decay=args.weight_decay, nesterov=True ) scheduler = torch.optim.lr_scheduler.CosineAnnealingLR( optimizer, float(args.epochs), eta_min=args.learning_rate_min)", "help='batch size') parser.add_argument('--num_workers', type=int, default=0, help=\"num_workers\") parser.add_argument('--learning_rate', type=float, default=0.025, help='init learning rate') parser.add_argument('--learning_rate_min',", "help='init learning rate') parser.add_argument('--learning_rate_min', type=float, default=0.0001, help='min learning rate') parser.add_argument('--momentum', type=float, default=0.9, help='momentum')", "help='gf_model name') parser.add_argument('--gf_model_path', type=str, default='./', help='gf_model path') parser.add_argument('--h_model_path', type=str, default='./', help='h_model path') parser.add_argument('--k_ops',", "scheduler.get_last_lr()[0] logging.info('epoch %d lr %e', epoch, lr) train_acc, train_obj = train( train_queue, task_model,", "logging.info('epoch %d lr %e', epoch, lr) train_acc, train_obj = train( train_queue, task_model, criterion,", "n_class=search_n_class, gf_model=gf_model, h_model=h_model, save_dir=args.save, config=adaaug_config) # start training for i_epoch in range(n_epoch): epoch", "setting if args.restore: trained_epoch = utils.restore_ckpt(task_model, optimizer, scheduler, args.restore_path, location=args.gpu) + 1 n_epoch", "parser.add_argument('--epochs', type=int, default=600, help='number of training epochs') parser.add_argument('--report_freq', type=float, default=50, help='report frequency') parser.add_argument('--save',", "for param in gf_model.parameters(): param.requires_grad = False for param in h_model.parameters(): param.requires_grad =", "# restore setting if args.restore: trained_epoch = utils.restore_ckpt(task_model, optimizer, scheduler, args.restore_path, location=args.gpu) +", "default=600, help='number of training epochs') parser.add_argument('--report_freq', type=float, default=50, help='report frequency') parser.add_argument('--save', type=str, default='EXP',", "grad_clip) optimizer.step() prec1, prec5 = utils.accuracy(logits, target, topk=(1, 5)) n = input.size(0) objs.update(loss.detach().item(),", "= get_num_class(args.dataset) class2label = get_label_name(args.dataset, args.dataroot) train_queue, valid_queue, _, test_queue = get_dataloaders( args.dataset,", "scheduler: CosineAnnealing, warmup: {m}/{e}') criterion = nn.CrossEntropyLoss() criterion = criterion.cuda() # restore setting", "= utils.accuracy(logits, target, topk=(1, 5)) n = input.size(0) objs.update(loss.detach().item(), n) top1.update(prec1.detach().item(), n) top5.update(prec5.detach().item()," ]
[ "are some ideas to get you started: - 🔭 I’m currently working on", "- 🤔 I’m looking for help with ... - 💬 Ask me about", "you started: - 🔭 I’m currently working on ... - 🌱 I’m currently", "👋 <!-- **dungpoke/dungpoke** is a ✨ _special_ ✨ repository because its `README.md` (this", "- 🔭 I’m currently working on ... - 🌱 I’m currently learning ...", "I’m looking to collaborate on ... - 🤔 I’m looking for help with", "Hi there 👋 <!-- **dungpoke/dungpoke** is a ✨ _special_ ✨ repository because its", "me about ... - 📫 How to reach me: ... - 😄 Pronouns:", "a ✨ _special_ ✨ repository because its `README.md` (this file) appears on your", "<!-- **dungpoke/dungpoke** is a ✨ _special_ ✨ repository because its `README.md` (this file)", "- 👯 I’m looking to collaborate on ... - 🤔 I’m looking for", "to collaborate on ... - 🤔 I’m looking for help with ... -", "on ... - 🤔 I’m looking for help with ... - 💬 Ask", "- 💬 Ask me about ... - 📫 How to reach me: ...", "- 📫 How to reach me: ... - 😄 Pronouns: ... - ⚡", "`README.md` (this file) appears on your GitHub profile. Here are some ideas to", "file) appears on your GitHub profile. Here are some ideas to get you", "... - 📫 How to reach me: ... - 😄 Pronouns: ... -", "to get you started: - 🔭 I’m currently working on ... - 🌱", "for help with ... - 💬 Ask me about ... - 📫 How", "(this file) appears on your GitHub profile. Here are some ideas to get", "on your GitHub profile. Here are some ideas to get you started: -", "I’m looking for help with ... - 💬 Ask me about ... -", "get you started: - 🔭 I’m currently working on ... - 🌱 I’m", "🔭 I’m currently working on ... - 🌱 I’m currently learning ... -", "looking to collaborate on ... - 🤔 I’m looking for help with ...", "🤔 I’m looking for help with ... - 💬 Ask me about ...", "its `README.md` (this file) appears on your GitHub profile. Here are some ideas", "🌱 I’m currently learning ... - 👯 I’m looking to collaborate on ...", "about ... - 📫 How to reach me: ... - 😄 Pronouns: ...", "to reach me: ... - 😄 Pronouns: ... - ⚡ Fun fact: ...", "started: - 🔭 I’m currently working on ... - 🌱 I’m currently learning", "- 🌱 I’m currently learning ... - 👯 I’m looking to collaborate on", "because its `README.md` (this file) appears on your GitHub profile. Here are some", "ideas to get you started: - 🔭 I’m currently working on ... -", "currently working on ... - 🌱 I’m currently learning ... - 👯 I’m", "your GitHub profile. Here are some ideas to get you started: - 🔭", "appears on your GitHub profile. Here are some ideas to get you started:", "📫 How to reach me: ... - 😄 Pronouns: ... - ⚡ Fun", "**dungpoke/dungpoke** is a ✨ _special_ ✨ repository because its `README.md` (this file) appears", "GitHub profile. Here are some ideas to get you started: - 🔭 I’m", "learning ... - 👯 I’m looking to collaborate on ... - 🤔 I’m", "I’m currently working on ... - 🌱 I’m currently learning ... - 👯", "there 👋 <!-- **dungpoke/dungpoke** is a ✨ _special_ ✨ repository because its `README.md`", "... - 👯 I’m looking to collaborate on ... - 🤔 I’m looking", "Ask me about ... - 📫 How to reach me: ... - 😄", "is a ✨ _special_ ✨ repository because its `README.md` (this file) appears on", "currently learning ... - 👯 I’m looking to collaborate on ... - 🤔", "repository because its `README.md` (this file) appears on your GitHub profile. Here are", "collaborate on ... - 🤔 I’m looking for help with ... - 💬", "💬 Ask me about ... - 📫 How to reach me: ... -", "Here are some ideas to get you started: - 🔭 I’m currently working", "✨ repository because its `README.md` (this file) appears on your GitHub profile. Here", "... - 🌱 I’m currently learning ... - 👯 I’m looking to collaborate", "working on ... - 🌱 I’m currently learning ... - 👯 I’m looking", "✨ _special_ ✨ repository because its `README.md` (this file) appears on your GitHub", "... - 🤔 I’m looking for help with ... - 💬 Ask me", "### Hi there 👋 <!-- **dungpoke/dungpoke** is a ✨ _special_ ✨ repository because", "I’m currently learning ... - 👯 I’m looking to collaborate on ... -", "looking for help with ... - 💬 Ask me about ... - 📫", "with ... - 💬 Ask me about ... - 📫 How to reach", "profile. Here are some ideas to get you started: - 🔭 I’m currently", "_special_ ✨ repository because its `README.md` (this file) appears on your GitHub profile.", "👯 I’m looking to collaborate on ... - 🤔 I’m looking for help", "... - 💬 Ask me about ... - 📫 How to reach me:", "some ideas to get you started: - 🔭 I’m currently working on ...", "reach me: ... - 😄 Pronouns: ... - ⚡ Fun fact: ... -->", "help with ... - 💬 Ask me about ... - 📫 How to", "How to reach me: ... - 😄 Pronouns: ... - ⚡ Fun fact:", "on ... - 🌱 I’m currently learning ... - 👯 I’m looking to" ]
[ "tf.variable_scope(\"foo\",reuse =tf.AUTO_REUSE): v1= tf.get_variable('v1',[1], initializer = tf.constant_initializer(5.0)) print(v1==v) #输出为True,代表v1与v是相同的变量 init = tf.initialize_all_variables() sess.run(init)", "False): v1= tf.get_variable('v1',[1], initializer = tf.constant_initializer(5.0)) print(v1==v) #输出为True,代表v1与v是相同的变量 init = tf.initialize_all_variables() sess.run(init) print(sess.run(v1))", "# v= tf.get_variable('v',[1]) #在生成上下文管理器时,将参数reuse设置为True。这样tf.get_variable的函数将直接获取已声明的变量 #且调用with tf.variable_scope(\"foo\")必须是定义的foo空间,而不能是with tf.variable_scope(\"\")未命名或者其他空间。 with tf.variable_scope(\"foo\",reuse =tf.AUTO_REUSE): v1= tf.get_variable('v1',[1], initializer", "with tf.variable_scope('zdx',reuse = True): v = tf.get_variable('v222',shape= [1],initializer = tf.constant_initializer(100.0)) print(v) v1 =", "= tf.get_variable('v222',shape= [1],initializer = tf.constant_initializer(100.0)) print(v) v1 = tf.get_variable('v222',shape= [1],initializer = tf.constant_initializer(2.0)) print(v1==v)", "tf.variable_scope(\"foo\")必须是定义的foo空间,而不能是with tf.variable_scope(\"\")未命名或者其他空间。 with tf.variable_scope(\"foo\",reuse =tf.AUTO_REUSE): v1= tf.get_variable('v1',[1], initializer = tf.constant_initializer(5.0)) print(v1==v) #输出为True,代表v1与v是相同的变量 init", "tf.variable_scope(\"foo\"): #创建一个常量为1的v v= tf.get_variable('v1',[1],initializer = tf.constant_initializer(1.0)) #因为在foo空间已经创建v的变量,所以下面的代码会报错 #with tf.variable_scope(\"foo\"): # v= tf.get_variable('v',[1]) #在生成上下文管理器时,将参数reuse设置为True。这样tf.get_variable的函数将直接获取已声明的变量", "print(foo.v1.name) ''' #获取变量的方式主要有以下两种,实践中tf.get_variable产生的变量一定要搭配tf.variable_scope使用,不然运行脚本会报错 #v = tf.get_variable('v222',shape= [1],initializer = tf.constant_initializer(10.0)) #使用直接定义变量不会报错,可以一直调用 #vc = tf.Variable(tf.constant(1.0,shape", "tf sess = tf.Session() #在名字为foo的命名空间内创建名字为v的变量 with tf.variable_scope(\"foo\"): #创建一个常量为1的v v= tf.get_variable('v1',[1],initializer = tf.constant_initializer(1.0)) #因为在foo空间已经创建v的变量,所以下面的代码会报错", "sess = tf.Session() #在名字为foo的命名空间内创建名字为v的变量 with tf.variable_scope(\"foo\"): #创建一个常量为1的v v= tf.get_variable('v1',[1],initializer = tf.constant_initializer(1.0)) #因为在foo空间已经创建v的变量,所以下面的代码会报错 #with", "True): v = tf.get_variable('v222',shape= [1],initializer = tf.constant_initializer(100.0)) print(v) v1 = tf.get_variable('v222',shape= [1],initializer =", "#with tf.variable_scope(\"foo\"): # v= tf.get_variable('v',[1]) #在生成上下文管理器时,将参数reuse设置为True。这样tf.get_variable的函数将直接获取已声明的变量 #且调用with tf.variable_scope(\"foo\")必须是定义的foo空间,而不能是with tf.variable_scope(\"\")未命名或者其他空间。 with tf.variable_scope(\"foo\",reuse =tf.AUTO_REUSE): v1=", "tf.get_variable('v222',shape= [1],initializer = tf.constant_initializer(10.0)) #使用直接定义变量不会报错,可以一直调用 #vc = tf.Variable(tf.constant(1.0,shape = [1]),name = 'v') #print(vc)", "v = tf.get_variable('v222',shape= [1],initializer = tf.constant_initializer(100.0)) print(v) v1 = tf.get_variable('v222',shape= [1],initializer = tf.constant_initializer(2.0))", "tf.Variable(tf.constant(1.0,shape = [1]),name = 'v') #print(vc) #以下使用with语法,将tf.get_variable与tf.variable_scope搭配使用,且reuse=True时,之前必须定义V with tf.variable_scope('zdx',reuse = True): v =", "sess.run(init) print(sess.run(v1)) print(sess.run(v)) print(foo.v1.name) ''' #获取变量的方式主要有以下两种,实践中tf.get_variable产生的变量一定要搭配tf.variable_scope使用,不然运行脚本会报错 #v = tf.get_variable('v222',shape= [1],initializer = tf.constant_initializer(10.0)) #使用直接定义变量不会报错,可以一直调用", "= False): v1= tf.get_variable('v1',[1], initializer = tf.constant_initializer(5.0)) print(v1==v) #输出为True,代表v1与v是相同的变量 init = tf.initialize_all_variables() sess.run(init)", "tf.variable_scope(\"\")未命名或者其他空间。 with tf.variable_scope(\"foo\",reuse =tf.AUTO_REUSE): v1= tf.get_variable('v1',[1], initializer = tf.constant_initializer(5.0)) print(v1==v) #输出为True,代表v1与v是相同的变量 init =", "tf.constant_initializer(1.0)) #因为在foo空间已经创建v的变量,所以下面的代码会报错 #with tf.variable_scope(\"foo\"): # v= tf.get_variable('v',[1]) #在生成上下文管理器时,将参数reuse设置为True。这样tf.get_variable的函数将直接获取已声明的变量 #且调用with tf.variable_scope(\"foo\")必须是定义的foo空间,而不能是with tf.variable_scope(\"\")未命名或者其他空间。 with tf.variable_scope(\"foo\",reuse", "tensorflow as tf sess = tf.Session() #在名字为foo的命名空间内创建名字为v的变量 with tf.variable_scope(\"foo\"): #创建一个常量为1的v v= tf.get_variable('v1',[1],initializer =", "#输出为True,代表v1与v是相同的变量 init = tf.initialize_all_variables() sess.run(init) print(sess.run(v1)) print(sess.run(v)) print(foo.v1.name) ''' #获取变量的方式主要有以下两种,实践中tf.get_variable产生的变量一定要搭配tf.variable_scope使用,不然运行脚本会报错 #v = tf.get_variable('v222',shape=", "'v') #print(vc) #以下使用with语法,将tf.get_variable与tf.variable_scope搭配使用,且reuse=True时,之前必须定义V with tf.variable_scope('zdx',reuse = True): v = tf.get_variable('v222',shape= [1],initializer = tf.constant_initializer(100.0))", "print(v) v1 = tf.get_variable('v222',shape= [1],initializer = tf.constant_initializer(2.0)) print(v1==v) init = tf.initialize_all_variables() sess.run(init) print(sess.run(v1))", "tf.variable_scope(\"foo1\",reuse = False): v1= tf.get_variable('v1',[1], initializer = tf.constant_initializer(5.0)) print(v1==v) #输出为True,代表v1与v是相同的变量 init = tf.initialize_all_variables()", "as tf sess = tf.Session() #在名字为foo的命名空间内创建名字为v的变量 with tf.variable_scope(\"foo\"): #创建一个常量为1的v v= tf.get_variable('v1',[1],initializer = tf.constant_initializer(1.0))", "tf.Session() #在名字为foo的命名空间内创建名字为v的变量 with tf.variable_scope(\"foo\"): #创建一个常量为1的v v= tf.get_variable('v1',[1],initializer = tf.constant_initializer(1.0)) #因为在foo空间已经创建v的变量,所以下面的代码会报错 #with tf.variable_scope(\"foo\"): #", "=tf.AUTO_REUSE): v1= tf.get_variable('v1',[1], initializer = tf.constant_initializer(5.0)) print(v1==v) #输出为True,代表v1与v是相同的变量 init = tf.initialize_all_variables() sess.run(init) print(sess.run(v1))", "tf.variable_scope('zdx',reuse = True): v = tf.get_variable('v222',shape= [1],initializer = tf.constant_initializer(100.0)) print(v) v1 = tf.get_variable('v222',shape=", "tf.get_variable('v1',[1],initializer = tf.constant_initializer(1.0)) #因为在foo空间已经创建v的变量,所以下面的代码会报错 #with tf.variable_scope(\"foo\"): # v= tf.get_variable('v',[1]) #在生成上下文管理器时,将参数reuse设置为True。这样tf.get_variable的函数将直接获取已声明的变量 #且调用with tf.variable_scope(\"foo\")必须是定义的foo空间,而不能是with tf.variable_scope(\"\")未命名或者其他空间。", "= tf.constant_initializer(100.0)) print(v) v1 = tf.get_variable('v222',shape= [1],initializer = tf.constant_initializer(2.0)) print(v1==v) init = tf.initialize_all_variables()", "#因为在foo空间已经创建v的变量,所以下面的代码会报错 #with tf.variable_scope(\"foo\"): # v= tf.get_variable('v',[1]) #在生成上下文管理器时,将参数reuse设置为True。这样tf.get_variable的函数将直接获取已声明的变量 #且调用with tf.variable_scope(\"foo\")必须是定义的foo空间,而不能是with tf.variable_scope(\"\")未命名或者其他空间。 with tf.variable_scope(\"foo\",reuse =tf.AUTO_REUSE):", "import tensorflow as tf sess = tf.Session() #在名字为foo的命名空间内创建名字为v的变量 with tf.variable_scope(\"foo\"): #创建一个常量为1的v v= tf.get_variable('v1',[1],initializer", "tf.get_variable('v1',[1], initializer = tf.constant_initializer(5.0)) print(v1==v) #输出为True,代表v1与v是相同的变量 init = tf.initialize_all_variables() sess.run(init) print(sess.run(v1)) print(sess.run(v)) with", "#输出为True,代表v1与v是相同的变量 init = tf.initialize_all_variables() sess.run(init) print(sess.run(v1)) print(sess.run(v)) with tf.variable_scope(\"foo1\",reuse = False): v1= tf.get_variable('v1',[1],", "#print(vc) #以下使用with语法,将tf.get_variable与tf.variable_scope搭配使用,且reuse=True时,之前必须定义V with tf.variable_scope('zdx',reuse = True): v = tf.get_variable('v222',shape= [1],initializer = tf.constant_initializer(100.0)) print(v)", "= tf.constant_initializer(5.0)) print(v1==v) #输出为True,代表v1与v是相同的变量 init = tf.initialize_all_variables() sess.run(init) print(sess.run(v1)) print(sess.run(v)) with tf.variable_scope(\"foo1\",reuse =", "print(sess.run(v)) with tf.variable_scope(\"foo1\",reuse = False): v1= tf.get_variable('v1',[1], initializer = tf.constant_initializer(5.0)) print(v1==v) #输出为True,代表v1与v是相同的变量 init", "= tf.initialize_all_variables() sess.run(init) print(sess.run(v1)) print(sess.run(v)) print(foo.v1.name) ''' #获取变量的方式主要有以下两种,实践中tf.get_variable产生的变量一定要搭配tf.variable_scope使用,不然运行脚本会报错 #v = tf.get_variable('v222',shape= [1],initializer =", "= tf.Session() #在名字为foo的命名空间内创建名字为v的变量 with tf.variable_scope(\"foo\"): #创建一个常量为1的v v= tf.get_variable('v1',[1],initializer = tf.constant_initializer(1.0)) #因为在foo空间已经创建v的变量,所以下面的代码会报错 #with tf.variable_scope(\"foo\"):", "initializer = tf.constant_initializer(5.0)) print(v1==v) #输出为True,代表v1与v是相同的变量 init = tf.initialize_all_variables() sess.run(init) print(sess.run(v1)) print(sess.run(v)) with tf.variable_scope(\"foo1\",reuse", "= True): v = tf.get_variable('v222',shape= [1],initializer = tf.constant_initializer(100.0)) print(v) v1 = tf.get_variable('v222',shape= [1],initializer", "[1],initializer = tf.constant_initializer(100.0)) print(v) v1 = tf.get_variable('v222',shape= [1],initializer = tf.constant_initializer(2.0)) print(v1==v) init =", "with tf.variable_scope(\"foo1\",reuse = False): v1= tf.get_variable('v1',[1], initializer = tf.constant_initializer(5.0)) print(v1==v) #输出为True,代表v1与v是相同的变量 init =", "init = tf.initialize_all_variables() sess.run(init) print(sess.run(v1)) print(sess.run(v)) print(foo.v1.name) ''' #获取变量的方式主要有以下两种,实践中tf.get_variable产生的变量一定要搭配tf.variable_scope使用,不然运行脚本会报错 #v = tf.get_variable('v222',shape= [1],initializer", "v1= tf.get_variable('v1',[1], initializer = tf.constant_initializer(5.0)) print(v1==v) #输出为True,代表v1与v是相同的变量 init = tf.initialize_all_variables() sess.run(init) print(sess.run(v1)) print(sess.run(v))", "tf.initialize_all_variables() sess.run(init) print(sess.run(v1)) print(sess.run(v)) print(foo.v1.name) ''' #获取变量的方式主要有以下两种,实践中tf.get_variable产生的变量一定要搭配tf.variable_scope使用,不然运行脚本会报错 #v = tf.get_variable('v222',shape= [1],initializer = tf.constant_initializer(10.0))", "tf.get_variable('v222',shape= [1],initializer = tf.constant_initializer(100.0)) print(v) v1 = tf.get_variable('v222',shape= [1],initializer = tf.constant_initializer(2.0)) print(v1==v) init", "#且调用with tf.variable_scope(\"foo\")必须是定义的foo空间,而不能是with tf.variable_scope(\"\")未命名或者其他空间。 with tf.variable_scope(\"foo\",reuse =tf.AUTO_REUSE): v1= tf.get_variable('v1',[1], initializer = tf.constant_initializer(5.0)) print(v1==v) #输出为True,代表v1与v是相同的变量", "''' #获取变量的方式主要有以下两种,实践中tf.get_variable产生的变量一定要搭配tf.variable_scope使用,不然运行脚本会报错 #v = tf.get_variable('v222',shape= [1],initializer = tf.constant_initializer(10.0)) #使用直接定义变量不会报错,可以一直调用 #vc = tf.Variable(tf.constant(1.0,shape =", "tf.variable_scope(\"foo\"): # v= tf.get_variable('v',[1]) #在生成上下文管理器时,将参数reuse设置为True。这样tf.get_variable的函数将直接获取已声明的变量 #且调用with tf.variable_scope(\"foo\")必须是定义的foo空间,而不能是with tf.variable_scope(\"\")未命名或者其他空间。 with tf.variable_scope(\"foo\",reuse =tf.AUTO_REUSE): v1= tf.get_variable('v1',[1],", "#创建一个常量为1的v v= tf.get_variable('v1',[1],initializer = tf.constant_initializer(1.0)) #因为在foo空间已经创建v的变量,所以下面的代码会报错 #with tf.variable_scope(\"foo\"): # v= tf.get_variable('v',[1]) #在生成上下文管理器时,将参数reuse设置为True。这样tf.get_variable的函数将直接获取已声明的变量 #且调用with", "#在名字为foo的命名空间内创建名字为v的变量 with tf.variable_scope(\"foo\"): #创建一个常量为1的v v= tf.get_variable('v1',[1],initializer = tf.constant_initializer(1.0)) #因为在foo空间已经创建v的变量,所以下面的代码会报错 #with tf.variable_scope(\"foo\"): # v=", "= [1]),name = 'v') #print(vc) #以下使用with语法,将tf.get_variable与tf.variable_scope搭配使用,且reuse=True时,之前必须定义V with tf.variable_scope('zdx',reuse = True): v = tf.get_variable('v222',shape=", "with tf.variable_scope(\"foo\",reuse =tf.AUTO_REUSE): v1= tf.get_variable('v1',[1], initializer = tf.constant_initializer(5.0)) print(v1==v) #输出为True,代表v1与v是相同的变量 init = tf.initialize_all_variables()", "init = tf.initialize_all_variables() sess.run(init) print(sess.run(v1)) print(sess.run(v)) with tf.variable_scope(\"foo1\",reuse = False): v1= tf.get_variable('v1',[1], initializer", "print(sess.run(v1)) print(sess.run(v)) with tf.variable_scope(\"foo1\",reuse = False): v1= tf.get_variable('v1',[1], initializer = tf.constant_initializer(5.0)) print(v1==v) #输出为True,代表v1与v是相同的变量", "#使用直接定义变量不会报错,可以一直调用 #vc = tf.Variable(tf.constant(1.0,shape = [1]),name = 'v') #print(vc) #以下使用with语法,将tf.get_variable与tf.variable_scope搭配使用,且reuse=True时,之前必须定义V with tf.variable_scope('zdx',reuse =", "v1 = tf.get_variable('v222',shape= [1],initializer = tf.constant_initializer(2.0)) print(v1==v) init = tf.initialize_all_variables() sess.run(init) print(sess.run(v1)) print(sess.run(v))", "#v = tf.get_variable('v222',shape= [1],initializer = tf.constant_initializer(10.0)) #使用直接定义变量不会报错,可以一直调用 #vc = tf.Variable(tf.constant(1.0,shape = [1]),name =", "#获取变量的方式主要有以下两种,实践中tf.get_variable产生的变量一定要搭配tf.variable_scope使用,不然运行脚本会报错 #v = tf.get_variable('v222',shape= [1],initializer = tf.constant_initializer(10.0)) #使用直接定义变量不会报错,可以一直调用 #vc = tf.Variable(tf.constant(1.0,shape = [1]),name", "tf.get_variable('v1',[1], initializer = tf.constant_initializer(5.0)) print(v1==v) #输出为True,代表v1与v是相同的变量 init = tf.initialize_all_variables() sess.run(init) print(sess.run(v1)) print(sess.run(v)) print(foo.v1.name)", "[1],initializer = tf.constant_initializer(10.0)) #使用直接定义变量不会报错,可以一直调用 #vc = tf.Variable(tf.constant(1.0,shape = [1]),name = 'v') #print(vc) #以下使用with语法,将tf.get_variable与tf.variable_scope搭配使用,且reuse=True时,之前必须定义V", "#以下使用with语法,将tf.get_variable与tf.variable_scope搭配使用,且reuse=True时,之前必须定义V with tf.variable_scope('zdx',reuse = True): v = tf.get_variable('v222',shape= [1],initializer = tf.constant_initializer(100.0)) print(v) v1", "print(v1==v) #输出为True,代表v1与v是相同的变量 init = tf.initialize_all_variables() sess.run(init) print(sess.run(v1)) print(sess.run(v)) print(foo.v1.name) ''' #获取变量的方式主要有以下两种,实践中tf.get_variable产生的变量一定要搭配tf.variable_scope使用,不然运行脚本会报错 #v =", "sess.run(init) print(sess.run(v1)) print(sess.run(v)) with tf.variable_scope(\"foo1\",reuse = False): v1= tf.get_variable('v1',[1], initializer = tf.constant_initializer(5.0)) print(v1==v)", "print(v1==v) #输出为True,代表v1与v是相同的变量 init = tf.initialize_all_variables() sess.run(init) print(sess.run(v1)) print(sess.run(v)) with tf.variable_scope(\"foo1\",reuse = False): v1=", "initializer = tf.constant_initializer(5.0)) print(v1==v) #输出为True,代表v1与v是相同的变量 init = tf.initialize_all_variables() sess.run(init) print(sess.run(v1)) print(sess.run(v)) print(foo.v1.name) '''", "tf.constant_initializer(10.0)) #使用直接定义变量不会报错,可以一直调用 #vc = tf.Variable(tf.constant(1.0,shape = [1]),name = 'v') #print(vc) #以下使用with语法,将tf.get_variable与tf.variable_scope搭配使用,且reuse=True时,之前必须定义V with tf.variable_scope('zdx',reuse", "with tf.variable_scope(\"foo\"): #创建一个常量为1的v v= tf.get_variable('v1',[1],initializer = tf.constant_initializer(1.0)) #因为在foo空间已经创建v的变量,所以下面的代码会报错 #with tf.variable_scope(\"foo\"): # v= tf.get_variable('v',[1])", "= tf.Variable(tf.constant(1.0,shape = [1]),name = 'v') #print(vc) #以下使用with语法,将tf.get_variable与tf.variable_scope搭配使用,且reuse=True时,之前必须定义V with tf.variable_scope('zdx',reuse = True): v", "tf.constant_initializer(100.0)) print(v) v1 = tf.get_variable('v222',shape= [1],initializer = tf.constant_initializer(2.0)) print(v1==v) init = tf.initialize_all_variables() sess.run(init)", "= tf.initialize_all_variables() sess.run(init) print(sess.run(v1)) print(sess.run(v)) with tf.variable_scope(\"foo1\",reuse = False): v1= tf.get_variable('v1',[1], initializer =", "tf.initialize_all_variables() sess.run(init) print(sess.run(v1)) print(sess.run(v)) with tf.variable_scope(\"foo1\",reuse = False): v1= tf.get_variable('v1',[1], initializer = tf.constant_initializer(5.0))", "= tf.constant_initializer(1.0)) #因为在foo空间已经创建v的变量,所以下面的代码会报错 #with tf.variable_scope(\"foo\"): # v= tf.get_variable('v',[1]) #在生成上下文管理器时,将参数reuse设置为True。这样tf.get_variable的函数将直接获取已声明的变量 #且调用with tf.variable_scope(\"foo\")必须是定义的foo空间,而不能是with tf.variable_scope(\"\")未命名或者其他空间。 with", "= tf.get_variable('v222',shape= [1],initializer = tf.constant_initializer(10.0)) #使用直接定义变量不会报错,可以一直调用 #vc = tf.Variable(tf.constant(1.0,shape = [1]),name = 'v')", "v= tf.get_variable('v',[1]) #在生成上下文管理器时,将参数reuse设置为True。这样tf.get_variable的函数将直接获取已声明的变量 #且调用with tf.variable_scope(\"foo\")必须是定义的foo空间,而不能是with tf.variable_scope(\"\")未命名或者其他空间。 with tf.variable_scope(\"foo\",reuse =tf.AUTO_REUSE): v1= tf.get_variable('v1',[1], initializer =", "print(sess.run(v1)) print(sess.run(v)) print(foo.v1.name) ''' #获取变量的方式主要有以下两种,实践中tf.get_variable产生的变量一定要搭配tf.variable_scope使用,不然运行脚本会报错 #v = tf.get_variable('v222',shape= [1],initializer = tf.constant_initializer(10.0)) #使用直接定义变量不会报错,可以一直调用 #vc", "#在生成上下文管理器时,将参数reuse设置为True。这样tf.get_variable的函数将直接获取已声明的变量 #且调用with tf.variable_scope(\"foo\")必须是定义的foo空间,而不能是with tf.variable_scope(\"\")未命名或者其他空间。 with tf.variable_scope(\"foo\",reuse =tf.AUTO_REUSE): v1= tf.get_variable('v1',[1], initializer = tf.constant_initializer(5.0)) print(v1==v)", "= 'v') #print(vc) #以下使用with语法,将tf.get_variable与tf.variable_scope搭配使用,且reuse=True时,之前必须定义V with tf.variable_scope('zdx',reuse = True): v = tf.get_variable('v222',shape= [1],initializer =", "v= tf.get_variable('v1',[1],initializer = tf.constant_initializer(1.0)) #因为在foo空间已经创建v的变量,所以下面的代码会报错 #with tf.variable_scope(\"foo\"): # v= tf.get_variable('v',[1]) #在生成上下文管理器时,将参数reuse设置为True。这样tf.get_variable的函数将直接获取已声明的变量 #且调用with tf.variable_scope(\"foo\")必须是定义的foo空间,而不能是with", "tf.constant_initializer(5.0)) print(v1==v) #输出为True,代表v1与v是相同的变量 init = tf.initialize_all_variables() sess.run(init) print(sess.run(v1)) print(sess.run(v)) with tf.variable_scope(\"foo1\",reuse = False):", "tf.constant_initializer(5.0)) print(v1==v) #输出为True,代表v1与v是相同的变量 init = tf.initialize_all_variables() sess.run(init) print(sess.run(v1)) print(sess.run(v)) print(foo.v1.name) ''' #获取变量的方式主要有以下两种,实践中tf.get_variable产生的变量一定要搭配tf.variable_scope使用,不然运行脚本会报错 #v", "#vc = tf.Variable(tf.constant(1.0,shape = [1]),name = 'v') #print(vc) #以下使用with语法,将tf.get_variable与tf.variable_scope搭配使用,且reuse=True时,之前必须定义V with tf.variable_scope('zdx',reuse = True):", "= tf.constant_initializer(10.0)) #使用直接定义变量不会报错,可以一直调用 #vc = tf.Variable(tf.constant(1.0,shape = [1]),name = 'v') #print(vc) #以下使用with语法,将tf.get_variable与tf.variable_scope搭配使用,且reuse=True时,之前必须定义V with", "[1]),name = 'v') #print(vc) #以下使用with语法,将tf.get_variable与tf.variable_scope搭配使用,且reuse=True时,之前必须定义V with tf.variable_scope('zdx',reuse = True): v = tf.get_variable('v222',shape= [1],initializer", "= tf.get_variable('v222',shape= [1],initializer = tf.constant_initializer(2.0)) print(v1==v) init = tf.initialize_all_variables() sess.run(init) print(sess.run(v1)) print(sess.run(v)) '''", "tf.get_variable('v',[1]) #在生成上下文管理器时,将参数reuse设置为True。这样tf.get_variable的函数将直接获取已声明的变量 #且调用with tf.variable_scope(\"foo\")必须是定义的foo空间,而不能是with tf.variable_scope(\"\")未命名或者其他空间。 with tf.variable_scope(\"foo\",reuse =tf.AUTO_REUSE): v1= tf.get_variable('v1',[1], initializer = tf.constant_initializer(5.0))", "= tf.constant_initializer(5.0)) print(v1==v) #输出为True,代表v1与v是相同的变量 init = tf.initialize_all_variables() sess.run(init) print(sess.run(v1)) print(sess.run(v)) print(foo.v1.name) ''' #获取变量的方式主要有以下两种,实践中tf.get_variable产生的变量一定要搭配tf.variable_scope使用,不然运行脚本会报错", "print(sess.run(v)) print(foo.v1.name) ''' #获取变量的方式主要有以下两种,实践中tf.get_variable产生的变量一定要搭配tf.variable_scope使用,不然运行脚本会报错 #v = tf.get_variable('v222',shape= [1],initializer = tf.constant_initializer(10.0)) #使用直接定义变量不会报错,可以一直调用 #vc =" ]
[ "class TestUtilsAudio(tf.test.TestCase): def setUp(self): self.audio_file_path = \"/tf/test_environment/audio/DevNode1_ex1_1.wav\" def test_audio_loading_mono(self): expected_shape = (16000 *", "import AudioUtils class TestUtilsAudio(tf.test.TestCase): def setUp(self): self.audio_file_path = \"/tf/test_environment/audio/DevNode1_ex1_1.wav\" def test_audio_loading_mono(self): expected_shape =", "\"/tf/test_environment/audio/DevNode1_ex1_1.wav\" def test_audio_loading_mono(self): expected_shape = (16000 * 10,) audio = AudioUtils.load_audio_from_file(self.audio_file_path, sample_rate=16000, sample_size=10,", "def test_audio_loading_mono(self): expected_shape = (16000 * 10,) audio = AudioUtils.load_audio_from_file(self.audio_file_path, sample_rate=16000, sample_size=10, stereo_channels=4,", "self.assertEqual(expected_shape, audio.shape) def test_audio_loading_multi_channel(self): expected_shape = (16000 * 10, 4) audio = AudioUtils.load_audio_from_file(self.audio_file_path,", "expected_shape = (16000 * 10, 4) audio = AudioUtils.load_audio_from_file(self.audio_file_path, sample_rate=16000, sample_size=10, stereo_channels=4, to_mono=False)", "10, 4) audio = AudioUtils.load_audio_from_file(self.audio_file_path, sample_rate=16000, sample_size=10, stereo_channels=4, to_mono=False) self.assertEqual(expected_shape, audio.shape) if __name__", "test_audio_loading_multi_channel(self): expected_shape = (16000 * 10, 4) audio = AudioUtils.load_audio_from_file(self.audio_file_path, sample_rate=16000, sample_size=10, stereo_channels=4,", "to_mono=True) self.assertEqual(expected_shape, audio.shape) def test_audio_loading_multi_channel(self): expected_shape = (16000 * 10, 4) audio =", "as tf from src.utils.utils_audio import AudioUtils class TestUtilsAudio(tf.test.TestCase): def setUp(self): self.audio_file_path = \"/tf/test_environment/audio/DevNode1_ex1_1.wav\"", "src.utils.utils_audio import AudioUtils class TestUtilsAudio(tf.test.TestCase): def setUp(self): self.audio_file_path = \"/tf/test_environment/audio/DevNode1_ex1_1.wav\" def test_audio_loading_mono(self): expected_shape", "tensorflow as tf from src.utils.utils_audio import AudioUtils class TestUtilsAudio(tf.test.TestCase): def setUp(self): self.audio_file_path =", "sample_size=10, stereo_channels=4, to_mono=True) self.assertEqual(expected_shape, audio.shape) def test_audio_loading_multi_channel(self): expected_shape = (16000 * 10, 4)", "def test_audio_loading_multi_channel(self): expected_shape = (16000 * 10, 4) audio = AudioUtils.load_audio_from_file(self.audio_file_path, sample_rate=16000, sample_size=10,", "TestUtilsAudio(tf.test.TestCase): def setUp(self): self.audio_file_path = \"/tf/test_environment/audio/DevNode1_ex1_1.wav\" def test_audio_loading_mono(self): expected_shape = (16000 * 10,)", "setUp(self): self.audio_file_path = \"/tf/test_environment/audio/DevNode1_ex1_1.wav\" def test_audio_loading_mono(self): expected_shape = (16000 * 10,) audio =", "def setUp(self): self.audio_file_path = \"/tf/test_environment/audio/DevNode1_ex1_1.wav\" def test_audio_loading_mono(self): expected_shape = (16000 * 10,) audio", "audio = AudioUtils.load_audio_from_file(self.audio_file_path, sample_rate=16000, sample_size=10, stereo_channels=4, to_mono=False) self.assertEqual(expected_shape, audio.shape) if __name__ == '__main__':", "expected_shape = (16000 * 10,) audio = AudioUtils.load_audio_from_file(self.audio_file_path, sample_rate=16000, sample_size=10, stereo_channels=4, to_mono=True) self.assertEqual(expected_shape,", "(16000 * 10,) audio = AudioUtils.load_audio_from_file(self.audio_file_path, sample_rate=16000, sample_size=10, stereo_channels=4, to_mono=True) self.assertEqual(expected_shape, audio.shape) def", "= \"/tf/test_environment/audio/DevNode1_ex1_1.wav\" def test_audio_loading_mono(self): expected_shape = (16000 * 10,) audio = AudioUtils.load_audio_from_file(self.audio_file_path, sample_rate=16000,", "import tensorflow as tf from src.utils.utils_audio import AudioUtils class TestUtilsAudio(tf.test.TestCase): def setUp(self): self.audio_file_path", "AudioUtils.load_audio_from_file(self.audio_file_path, sample_rate=16000, sample_size=10, stereo_channels=4, to_mono=True) self.assertEqual(expected_shape, audio.shape) def test_audio_loading_multi_channel(self): expected_shape = (16000 *", "audio.shape) def test_audio_loading_multi_channel(self): expected_shape = (16000 * 10, 4) audio = AudioUtils.load_audio_from_file(self.audio_file_path, sample_rate=16000,", "* 10, 4) audio = AudioUtils.load_audio_from_file(self.audio_file_path, sample_rate=16000, sample_size=10, stereo_channels=4, to_mono=False) self.assertEqual(expected_shape, audio.shape) if", "audio = AudioUtils.load_audio_from_file(self.audio_file_path, sample_rate=16000, sample_size=10, stereo_channels=4, to_mono=True) self.assertEqual(expected_shape, audio.shape) def test_audio_loading_multi_channel(self): expected_shape =", "= AudioUtils.load_audio_from_file(self.audio_file_path, sample_rate=16000, sample_size=10, stereo_channels=4, to_mono=True) self.assertEqual(expected_shape, audio.shape) def test_audio_loading_multi_channel(self): expected_shape = (16000", "10,) audio = AudioUtils.load_audio_from_file(self.audio_file_path, sample_rate=16000, sample_size=10, stereo_channels=4, to_mono=True) self.assertEqual(expected_shape, audio.shape) def test_audio_loading_multi_channel(self): expected_shape", "AudioUtils class TestUtilsAudio(tf.test.TestCase): def setUp(self): self.audio_file_path = \"/tf/test_environment/audio/DevNode1_ex1_1.wav\" def test_audio_loading_mono(self): expected_shape = (16000", "(16000 * 10, 4) audio = AudioUtils.load_audio_from_file(self.audio_file_path, sample_rate=16000, sample_size=10, stereo_channels=4, to_mono=False) self.assertEqual(expected_shape, audio.shape)", "* 10,) audio = AudioUtils.load_audio_from_file(self.audio_file_path, sample_rate=16000, sample_size=10, stereo_channels=4, to_mono=True) self.assertEqual(expected_shape, audio.shape) def test_audio_loading_multi_channel(self):", "= (16000 * 10, 4) audio = AudioUtils.load_audio_from_file(self.audio_file_path, sample_rate=16000, sample_size=10, stereo_channels=4, to_mono=False) self.assertEqual(expected_shape,", "tf from src.utils.utils_audio import AudioUtils class TestUtilsAudio(tf.test.TestCase): def setUp(self): self.audio_file_path = \"/tf/test_environment/audio/DevNode1_ex1_1.wav\" def", "sample_rate=16000, sample_size=10, stereo_channels=4, to_mono=True) self.assertEqual(expected_shape, audio.shape) def test_audio_loading_multi_channel(self): expected_shape = (16000 * 10,", "4) audio = AudioUtils.load_audio_from_file(self.audio_file_path, sample_rate=16000, sample_size=10, stereo_channels=4, to_mono=False) self.assertEqual(expected_shape, audio.shape) if __name__ ==", "self.audio_file_path = \"/tf/test_environment/audio/DevNode1_ex1_1.wav\" def test_audio_loading_mono(self): expected_shape = (16000 * 10,) audio = AudioUtils.load_audio_from_file(self.audio_file_path,", "from src.utils.utils_audio import AudioUtils class TestUtilsAudio(tf.test.TestCase): def setUp(self): self.audio_file_path = \"/tf/test_environment/audio/DevNode1_ex1_1.wav\" def test_audio_loading_mono(self):", "= (16000 * 10,) audio = AudioUtils.load_audio_from_file(self.audio_file_path, sample_rate=16000, sample_size=10, stereo_channels=4, to_mono=True) self.assertEqual(expected_shape, audio.shape)", "test_audio_loading_mono(self): expected_shape = (16000 * 10,) audio = AudioUtils.load_audio_from_file(self.audio_file_path, sample_rate=16000, sample_size=10, stereo_channels=4, to_mono=True)", "stereo_channels=4, to_mono=True) self.assertEqual(expected_shape, audio.shape) def test_audio_loading_multi_channel(self): expected_shape = (16000 * 10, 4) audio", "= AudioUtils.load_audio_from_file(self.audio_file_path, sample_rate=16000, sample_size=10, stereo_channels=4, to_mono=False) self.assertEqual(expected_shape, audio.shape) if __name__ == '__main__': tf.test.main()", "<reponame>FabianGroeger96/deep-embedded-music import tensorflow as tf from src.utils.utils_audio import AudioUtils class TestUtilsAudio(tf.test.TestCase): def setUp(self):" ]
[ "setup setup( install_requires=[ \"numpy >= 1.13\" ], name='jitter', scripts=['bin/jitter.py'], version='0.1.0', url='https://github.com/philipwfowler/jitter', author='<NAME>', packages=['jitter'],", "from setuptools import setup setup( install_requires=[ \"numpy >= 1.13\" ], name='jitter', scripts=['bin/jitter.py'], version='0.1.0',", "<gh_stars>1-10 from setuptools import setup setup( install_requires=[ \"numpy >= 1.13\" ], name='jitter', scripts=['bin/jitter.py'],", "install_requires=[ \"numpy >= 1.13\" ], name='jitter', scripts=['bin/jitter.py'], version='0.1.0', url='https://github.com/philipwfowler/jitter', author='<NAME>', packages=['jitter'], license='MIT', long_description=open('README.md').read(),", "\"numpy >= 1.13\" ], name='jitter', scripts=['bin/jitter.py'], version='0.1.0', url='https://github.com/philipwfowler/jitter', author='<NAME>', packages=['jitter'], license='MIT', long_description=open('README.md').read(), )", "setup( install_requires=[ \"numpy >= 1.13\" ], name='jitter', scripts=['bin/jitter.py'], version='0.1.0', url='https://github.com/philipwfowler/jitter', author='<NAME>', packages=['jitter'], license='MIT',", "setuptools import setup setup( install_requires=[ \"numpy >= 1.13\" ], name='jitter', scripts=['bin/jitter.py'], version='0.1.0', url='https://github.com/philipwfowler/jitter',", "import setup setup( install_requires=[ \"numpy >= 1.13\" ], name='jitter', scripts=['bin/jitter.py'], version='0.1.0', url='https://github.com/philipwfowler/jitter', author='<NAME>'," ]
[ "dfs1 = find_DFS_(predicate,c) if len(dfs1) > 0: return [node] + dfs1 return []", "\"\"\"Returns the path in the tree from the root node to the first", "n.name == 'SAN', node) hops_to_santa = len(p1) + len(p2) - 4 #remove both", "!= root: nodes[node.parent].children.append(node) return nodes[root] def compute_descendants(tree_root, f_descendants='n_descendants'): topo_sorted_nodes = all_descendants_BFS(tree_root) reverse_topo_sort =", "nn in n.children) def all_descendants_BFS(tree_root: Node) -> List[Node]: \"\"\"All descendents of a node,", "len(path_found) > 0: return path_found else: raise ValueError(\"There is no element in the", "import namedtuple from typing import Dict, List, Callable Node = namedtuple('Node', 'name parent", "iteration, so I cast to list and slice, to get a fixed iterator", "root node to the first element that fulfils the predicate\"\"\" def find_DFS_(predicate,node) ->", "node in bottom_up: try: p1 = find_DFS(lambda n: n.name == 'YOU', node) p2", "N.B. I modify node_lookup under iteration, so I cast to list and slice,", "root, [], {}) nodes[node.parent] = parent_node for node in nodes.values(): if node.name !=", "I modify node_lookup under iteration, so I cast to list and slice, to", "calculate_hops(root: Node) -> int: nodes = all_descendants_BFS(root) bottom_up = reversed(nodes) for node in", "collections import namedtuple from typing import Dict, List, Callable Node = namedtuple('Node', 'name", "I cast to list and slice, to get a fixed iterator for node", "the tree that fulfils the predicate.\") def calculate_hops(root: Node) -> int: nodes =", "= all_descendants_BFS(tree_root) reverse_topo_sort = reversed(topo_sorted_nodes) for n in reverse_topo_sort: if len(n.children) == 0:", "= find_DFS(lambda n: n.name == 'SAN', node) hops_to_santa = len(p1) + len(p2) -", "namedtuple from typing import Dict, List, Callable Node = namedtuple('Node', 'name parent children", "nodes: Dict['str', Node] = {root: Node(root, None, [], {})} for parent, child in", "p1 = find_DFS(lambda n: n.name == 'YOU', node) p2 = find_DFS(lambda n: n.name", "node) hops_to_santa = len(p1) + len(p2) - 4 #remove both endpoints of both", "paths return hops_to_santa except ValueError: pass raise ValueError(\"There is no common object that", "nodes[node.parent].children.append(node) return nodes[root] def compute_descendants(tree_root, f_descendants='n_descendants'): topo_sorted_nodes = all_descendants_BFS(tree_root) reverse_topo_sort = reversed(topo_sorted_nodes) for", "fulfils the predicate.\") def calculate_hops(root: Node) -> int: nodes = all_descendants_BFS(root) bottom_up =", "for node in list(nodes.values())[:]: if not (node.parent in nodes.keys()) and node.name != root:", "node, in Breadth First Search order\"\"\" topo_sorted_nodes = [tree_root] for n in topo_sorted_nodes:", "try: p1 = find_DFS(lambda n: n.name == 'YOU', node) p2 = find_DFS(lambda n:", "int: nodes = all_descendants_BFS(root) bottom_up = reversed(nodes) for node in bottom_up: try: p1", "- 4 #remove both endpoints of both paths return hops_to_santa except ValueError: pass", "0 else: n.data[f_descendants] = len(n.children) + sum(nn.data[f_descendants] for nn in n.children) def all_descendants_BFS(tree_root:", "== 'SAN', node) hops_to_santa = len(p1) + len(p2) - 4 #remove both endpoints", "= Node(child, parent, [], {}) nodes[child] = node # N.B. I modify node_lookup", "raise ValueError(\"There is no element in the tree that fulfils the predicate.\") def", "def find_DFS(predicate: Callable[[Node], bool], node: Node) -> List[Node]: \"\"\"Returns the path in the", "reversed(nodes) for node in bottom_up: try: p1 = find_DFS(lambda n: n.name == 'YOU',", "to get a fixed iterator for node in list(nodes.values())[:]: if not (node.parent in", "of a node, in Breadth First Search order\"\"\" topo_sorted_nodes = [tree_root] for n", "[node] + dfs1 return [] path_found = find_DFS_(predicate,node) if len(path_found) > 0: return", "0: return path_found else: raise ValueError(\"There is no element in the tree that", "to list and slice, to get a fixed iterator for node in list(nodes.values())[:]:", "for node in bottom_up: try: p1 = find_DFS(lambda n: n.name == 'YOU', node)", "find_DFS(lambda n: n.name == 'SAN', node) hops_to_santa = len(p1) + len(p2) - 4", "tree that fulfils the predicate.\") def calculate_hops(root: Node) -> int: nodes = all_descendants_BFS(root)", "reversed(topo_sorted_nodes) for n in reverse_topo_sort: if len(n.children) == 0: n.data[f_descendants] = 0 else:", "= node # N.B. I modify node_lookup under iteration, so I cast to", "iterator for node in list(nodes.values())[:]: if not (node.parent in nodes.keys()) and node.name !=", "= parent_node for node in nodes.values(): if node.name != root: nodes[node.parent].children.append(node) return nodes[root]", "no element in the tree that fulfils the predicate.\") def calculate_hops(root: Node) ->", "!= root: parent_node = Node(node.parent, root, [], {}) nodes[node.parent] = parent_node for node", "= find_DFS_(predicate,node) if len(path_found) > 0: return path_found else: raise ValueError(\"There is no", "-> List[Node]: \"\"\"All descendents of a node, in Breadth First Search order\"\"\" topo_sorted_nodes", "Callable Node = namedtuple('Node', 'name parent children data') def make_tree_from_adj_list(adj_list): root = 'COM'", "dfs1 return [] path_found = find_DFS_(predicate,node) if len(path_found) > 0: return path_found else:", "== 'YOU', node) p2 = find_DFS(lambda n: n.name == 'SAN', node) hops_to_santa =", "adj_list: node = Node(child, parent, [], {}) nodes[child] = node # N.B. I", "(node.parent in nodes.keys()) and node.name != root: parent_node = Node(node.parent, root, [], {})", "Breadth First Search order\"\"\" topo_sorted_nodes = [tree_root] for n in topo_sorted_nodes: topo_sorted_nodes +=", "nodes[node.parent] = parent_node for node in nodes.values(): if node.name != root: nodes[node.parent].children.append(node) return", "in topo_sorted_nodes: topo_sorted_nodes += n.children return topo_sorted_nodes def find_DFS(predicate: Callable[[Node], bool], node: Node)", "so I cast to list and slice, to get a fixed iterator for", "[node] elif len(node.children) == 0: return [] else: for c in node.children: dfs1", "child in adj_list: node = Node(child, parent, [], {}) nodes[child] = node #", "find_DFS_(predicate,node) -> List[Node]: if predicate(node): return [node] elif len(node.children) == 0: return []", "else: raise ValueError(\"There is no element in the tree that fulfils the predicate.\")", "len(p1) + len(p2) - 4 #remove both endpoints of both paths return hops_to_santa", "data') def make_tree_from_adj_list(adj_list): root = 'COM' nodes: Dict['str', Node] = {root: Node(root, None,", "= namedtuple('Node', 'name parent children data') def make_tree_from_adj_list(adj_list): root = 'COM' nodes: Dict['str',", "n.children return topo_sorted_nodes def find_DFS(predicate: Callable[[Node], bool], node: Node) -> List[Node]: \"\"\"Returns the", "Node] = {root: Node(root, None, [], {})} for parent, child in adj_list: node", "+ sum(nn.data[f_descendants] for nn in n.children) def all_descendants_BFS(tree_root: Node) -> List[Node]: \"\"\"All descendents", "Callable[[Node], bool], node: Node) -> List[Node]: \"\"\"Returns the path in the tree from", "if predicate(node): return [node] elif len(node.children) == 0: return [] else: for c", "bottom_up: try: p1 = find_DFS(lambda n: n.name == 'YOU', node) p2 = find_DFS(lambda", "list(nodes.values())[:]: if not (node.parent in nodes.keys()) and node.name != root: parent_node = Node(node.parent,", "return [] else: for c in node.children: dfs1 = find_DFS_(predicate,c) if len(dfs1) >", "in node.children: dfs1 = find_DFS_(predicate,c) if len(dfs1) > 0: return [node] + dfs1", "n.data[f_descendants] = len(n.children) + sum(nn.data[f_descendants] for nn in n.children) def all_descendants_BFS(tree_root: Node) ->", "[tree_root] for n in topo_sorted_nodes: topo_sorted_nodes += n.children return topo_sorted_nodes def find_DFS(predicate: Callable[[Node],", "parent, [], {}) nodes[child] = node # N.B. I modify node_lookup under iteration,", "path_found else: raise ValueError(\"There is no element in the tree that fulfils the", "a fixed iterator for node in list(nodes.values())[:]: if not (node.parent in nodes.keys()) and", "None, [], {})} for parent, child in adj_list: node = Node(child, parent, [],", "compute_descendants(tree_root, f_descendants='n_descendants'): topo_sorted_nodes = all_descendants_BFS(tree_root) reverse_topo_sort = reversed(topo_sorted_nodes) for n in reverse_topo_sort: if", "elif len(node.children) == 0: return [] else: for c in node.children: dfs1 =", "path_found = find_DFS_(predicate,node) if len(path_found) > 0: return path_found else: raise ValueError(\"There is", "nodes = all_descendants_BFS(root) bottom_up = reversed(nodes) for node in bottom_up: try: p1 =", "Node) -> List[Node]: \"\"\"Returns the path in the tree from the root node", "= 0 else: n.data[f_descendants] = len(n.children) + sum(nn.data[f_descendants] for nn in n.children) def", "all_descendants_BFS(tree_root: Node) -> List[Node]: \"\"\"All descendents of a node, in Breadth First Search", "n: n.name == 'YOU', node) p2 = find_DFS(lambda n: n.name == 'SAN', node)", "endpoints of both paths return hops_to_santa except ValueError: pass raise ValueError(\"There is no", "== 0: return [] else: for c in node.children: dfs1 = find_DFS_(predicate,c) if", "= reversed(topo_sorted_nodes) for n in reverse_topo_sort: if len(n.children) == 0: n.data[f_descendants] = 0", "return [] path_found = find_DFS_(predicate,node) if len(path_found) > 0: return path_found else: raise", "root = 'COM' nodes: Dict['str', Node] = {root: Node(root, None, [], {})} for", "nodes[root] def compute_descendants(tree_root, f_descendants='n_descendants'): topo_sorted_nodes = all_descendants_BFS(tree_root) reverse_topo_sort = reversed(topo_sorted_nodes) for n in", "node) p2 = find_DFS(lambda n: n.name == 'SAN', node) hops_to_santa = len(p1) +", "for n in reverse_topo_sort: if len(n.children) == 0: n.data[f_descendants] = 0 else: n.data[f_descendants]", "node = Node(child, parent, [], {}) nodes[child] = node # N.B. I modify", "= reversed(nodes) for node in bottom_up: try: p1 = find_DFS(lambda n: n.name ==", "both paths return hops_to_santa except ValueError: pass raise ValueError(\"There is no common object", "Node) -> int: nodes = all_descendants_BFS(root) bottom_up = reversed(nodes) for node in bottom_up:", "> 0: return path_found else: raise ValueError(\"There is no element in the tree", "in list(nodes.values())[:]: if not (node.parent in nodes.keys()) and node.name != root: parent_node =", "from collections import namedtuple from typing import Dict, List, Callable Node = namedtuple('Node',", "if len(n.children) == 0: n.data[f_descendants] = 0 else: n.data[f_descendants] = len(n.children) + sum(nn.data[f_descendants]", "topo_sorted_nodes: topo_sorted_nodes += n.children return topo_sorted_nodes def find_DFS(predicate: Callable[[Node], bool], node: Node) ->", "topo_sorted_nodes += n.children return topo_sorted_nodes def find_DFS(predicate: Callable[[Node], bool], node: Node) -> List[Node]:", "0: return [node] + dfs1 return [] path_found = find_DFS_(predicate,node) if len(path_found) >", "'name parent children data') def make_tree_from_adj_list(adj_list): root = 'COM' nodes: Dict['str', Node] =", "node.name != root: nodes[node.parent].children.append(node) return nodes[root] def compute_descendants(tree_root, f_descendants='n_descendants'): topo_sorted_nodes = all_descendants_BFS(tree_root) reverse_topo_sort", "\"\"\"All descendents of a node, in Breadth First Search order\"\"\" topo_sorted_nodes = [tree_root]", "-> List[Node]: if predicate(node): return [node] elif len(node.children) == 0: return [] else:", "modify node_lookup under iteration, so I cast to list and slice, to get", "from typing import Dict, List, Callable Node = namedtuple('Node', 'name parent children data')", "Search order\"\"\" topo_sorted_nodes = [tree_root] for n in topo_sorted_nodes: topo_sorted_nodes += n.children return", "Node(root, None, [], {})} for parent, child in adj_list: node = Node(child, parent,", "def all_descendants_BFS(tree_root: Node) -> List[Node]: \"\"\"All descendents of a node, in Breadth First", "else: n.data[f_descendants] = len(n.children) + sum(nn.data[f_descendants] for nn in n.children) def all_descendants_BFS(tree_root: Node)", "len(n.children) + sum(nn.data[f_descendants] for nn in n.children) def all_descendants_BFS(tree_root: Node) -> List[Node]: \"\"\"All", "for nn in n.children) def all_descendants_BFS(tree_root: Node) -> List[Node]: \"\"\"All descendents of a", "from the root node to the first element that fulfils the predicate\"\"\" def", "is no element in the tree that fulfils the predicate.\") def calculate_hops(root: Node)", "node to the first element that fulfils the predicate\"\"\" def find_DFS_(predicate,node) -> List[Node]:", "return hops_to_santa except ValueError: pass raise ValueError(\"There is no common object that one", "0: return [] else: for c in node.children: dfs1 = find_DFS_(predicate,c) if len(dfs1)", "return [node] elif len(node.children) == 0: return [] else: for c in node.children:", "nodes[child] = node # N.B. I modify node_lookup under iteration, so I cast", "fixed iterator for node in list(nodes.values())[:]: if not (node.parent in nodes.keys()) and node.name", "first element that fulfils the predicate\"\"\" def find_DFS_(predicate,node) -> List[Node]: if predicate(node): return", "else: for c in node.children: dfs1 = find_DFS_(predicate,c) if len(dfs1) > 0: return", "return [node] + dfs1 return [] path_found = find_DFS_(predicate,node) if len(path_found) > 0:", "= all_descendants_BFS(root) bottom_up = reversed(nodes) for node in bottom_up: try: p1 = find_DFS(lambda", "n: n.name == 'SAN', node) hops_to_santa = len(p1) + len(p2) - 4 #remove", "'COM' nodes: Dict['str', Node] = {root: Node(root, None, [], {})} for parent, child", "[], {})} for parent, child in adj_list: node = Node(child, parent, [], {})", "all_descendants_BFS(tree_root) reverse_topo_sort = reversed(topo_sorted_nodes) for n in reverse_topo_sort: if len(n.children) == 0: n.data[f_descendants]", "[], {}) nodes[child] = node # N.B. I modify node_lookup under iteration, so", "= [tree_root] for n in topo_sorted_nodes: topo_sorted_nodes += n.children return topo_sorted_nodes def find_DFS(predicate:", "List[Node]: \"\"\"Returns the path in the tree from the root node to the", "= find_DFS_(predicate,c) if len(dfs1) > 0: return [node] + dfs1 return [] path_found", "hops_to_santa = len(p1) + len(p2) - 4 #remove both endpoints of both paths", "topo_sorted_nodes = all_descendants_BFS(tree_root) reverse_topo_sort = reversed(topo_sorted_nodes) for n in reverse_topo_sort: if len(n.children) ==", "find_DFS_(predicate,c) if len(dfs1) > 0: return [node] + dfs1 return [] path_found =", "the root node to the first element that fulfils the predicate\"\"\" def find_DFS_(predicate,node)", "len(node.children) == 0: return [] else: for c in node.children: dfs1 = find_DFS_(predicate,c)", "c in node.children: dfs1 = find_DFS_(predicate,c) if len(dfs1) > 0: return [node] +", "'SAN', node) hops_to_santa = len(p1) + len(p2) - 4 #remove both endpoints of", "and slice, to get a fixed iterator for node in list(nodes.values())[:]: if not", "f_descendants='n_descendants'): topo_sorted_nodes = all_descendants_BFS(tree_root) reverse_topo_sort = reversed(topo_sorted_nodes) for n in reverse_topo_sort: if len(n.children)", "return topo_sorted_nodes def find_DFS(predicate: Callable[[Node], bool], node: Node) -> List[Node]: \"\"\"Returns the path", "ValueError(\"There is no element in the tree that fulfils the predicate.\") def calculate_hops(root:", "return path_found else: raise ValueError(\"There is no element in the tree that fulfils", "element that fulfils the predicate\"\"\" def find_DFS_(predicate,node) -> List[Node]: if predicate(node): return [node]", "hops_to_santa except ValueError: pass raise ValueError(\"There is no common object that one can", "predicate.\") def calculate_hops(root: Node) -> int: nodes = all_descendants_BFS(root) bottom_up = reversed(nodes) for", "p2 = find_DFS(lambda n: n.name == 'SAN', node) hops_to_santa = len(p1) + len(p2)", "root: parent_node = Node(node.parent, root, [], {}) nodes[node.parent] = parent_node for node in", "order\"\"\" topo_sorted_nodes = [tree_root] for n in topo_sorted_nodes: topo_sorted_nodes += n.children return topo_sorted_nodes", "to the first element that fulfils the predicate\"\"\" def find_DFS_(predicate,node) -> List[Node]: if", "n in topo_sorted_nodes: topo_sorted_nodes += n.children return topo_sorted_nodes def find_DFS(predicate: Callable[[Node], bool], node:", "root: nodes[node.parent].children.append(node) return nodes[root] def compute_descendants(tree_root, f_descendants='n_descendants'): topo_sorted_nodes = all_descendants_BFS(tree_root) reverse_topo_sort = reversed(topo_sorted_nodes)", "in reverse_topo_sort: if len(n.children) == 0: n.data[f_descendants] = 0 else: n.data[f_descendants] = len(n.children)", "List[Node]: if predicate(node): return [node] elif len(node.children) == 0: return [] else: for", "topo_sorted_nodes def find_DFS(predicate: Callable[[Node], bool], node: Node) -> List[Node]: \"\"\"Returns the path in", "= len(n.children) + sum(nn.data[f_descendants] for nn in n.children) def all_descendants_BFS(tree_root: Node) -> List[Node]:", "path in the tree from the root node to the first element that", "n.data[f_descendants] = 0 else: n.data[f_descendants] = len(n.children) + sum(nn.data[f_descendants] for nn in n.children)", "{}) nodes[node.parent] = parent_node for node in nodes.values(): if node.name != root: nodes[node.parent].children.append(node)", "Node(child, parent, [], {}) nodes[child] = node # N.B. I modify node_lookup under", "nodes.keys()) and node.name != root: parent_node = Node(node.parent, root, [], {}) nodes[node.parent] =", "predicate(node): return [node] elif len(node.children) == 0: return [] else: for c in", "Dict['str', Node] = {root: Node(root, None, [], {})} for parent, child in adj_list:", "make_tree_from_adj_list(adj_list): root = 'COM' nodes: Dict['str', Node] = {root: Node(root, None, [], {})}", "get a fixed iterator for node in list(nodes.values())[:]: if not (node.parent in nodes.keys())", "def make_tree_from_adj_list(adj_list): root = 'COM' nodes: Dict['str', Node] = {root: Node(root, None, [],", "not (node.parent in nodes.keys()) and node.name != root: parent_node = Node(node.parent, root, [],", "-> List[Node]: \"\"\"Returns the path in the tree from the root node to", "in n.children) def all_descendants_BFS(tree_root: Node) -> List[Node]: \"\"\"All descendents of a node, in", "= find_DFS(lambda n: n.name == 'YOU', node) p2 = find_DFS(lambda n: n.name ==", "namedtuple('Node', 'name parent children data') def make_tree_from_adj_list(adj_list): root = 'COM' nodes: Dict['str', Node]", "the first element that fulfils the predicate\"\"\" def find_DFS_(predicate,node) -> List[Node]: if predicate(node):", "Node(node.parent, root, [], {}) nodes[node.parent] = parent_node for node in nodes.values(): if node.name", "reverse_topo_sort: if len(n.children) == 0: n.data[f_descendants] = 0 else: n.data[f_descendants] = len(n.children) +", "node.name != root: parent_node = Node(node.parent, root, [], {}) nodes[node.parent] = parent_node for", "+= n.children return topo_sorted_nodes def find_DFS(predicate: Callable[[Node], bool], node: Node) -> List[Node]: \"\"\"Returns", "len(p2) - 4 #remove both endpoints of both paths return hops_to_santa except ValueError:", "that fulfils the predicate.\") def calculate_hops(root: Node) -> int: nodes = all_descendants_BFS(root) bottom_up", "n.children) def all_descendants_BFS(tree_root: Node) -> List[Node]: \"\"\"All descendents of a node, in Breadth", "n in reverse_topo_sort: if len(n.children) == 0: n.data[f_descendants] = 0 else: n.data[f_descendants] =", "n.name == 'YOU', node) p2 = find_DFS(lambda n: n.name == 'SAN', node) hops_to_santa", "bottom_up = reversed(nodes) for node in bottom_up: try: p1 = find_DFS(lambda n: n.name", "the predicate\"\"\" def find_DFS_(predicate,node) -> List[Node]: if predicate(node): return [node] elif len(node.children) ==", "the predicate.\") def calculate_hops(root: Node) -> int: nodes = all_descendants_BFS(root) bottom_up = reversed(nodes)", "return nodes[root] def compute_descendants(tree_root, f_descendants='n_descendants'): topo_sorted_nodes = all_descendants_BFS(tree_root) reverse_topo_sort = reversed(topo_sorted_nodes) for n", "the tree from the root node to the first element that fulfils the", "predicate\"\"\" def find_DFS_(predicate,node) -> List[Node]: if predicate(node): return [node] elif len(node.children) == 0:", "List[Node]: \"\"\"All descendents of a node, in Breadth First Search order\"\"\" topo_sorted_nodes =", "all_descendants_BFS(root) bottom_up = reversed(nodes) for node in bottom_up: try: p1 = find_DFS(lambda n:", "in the tree from the root node to the first element that fulfils", "find_DFS(lambda n: n.name == 'YOU', node) p2 = find_DFS(lambda n: n.name == 'SAN',", "def compute_descendants(tree_root, f_descendants='n_descendants'): topo_sorted_nodes = all_descendants_BFS(tree_root) reverse_topo_sort = reversed(topo_sorted_nodes) for n in reverse_topo_sort:", "+ dfs1 return [] path_found = find_DFS_(predicate,node) if len(path_found) > 0: return path_found", "cast to list and slice, to get a fixed iterator for node in", "in nodes.values(): if node.name != root: nodes[node.parent].children.append(node) return nodes[root] def compute_descendants(tree_root, f_descendants='n_descendants'): topo_sorted_nodes", "both endpoints of both paths return hops_to_santa except ValueError: pass raise ValueError(\"There is", "{}) nodes[child] = node # N.B. I modify node_lookup under iteration, so I", "node_lookup under iteration, so I cast to list and slice, to get a", "Node = namedtuple('Node', 'name parent children data') def make_tree_from_adj_list(adj_list): root = 'COM' nodes:", "sum(nn.data[f_descendants] for nn in n.children) def all_descendants_BFS(tree_root: Node) -> List[Node]: \"\"\"All descendents of", "First Search order\"\"\" topo_sorted_nodes = [tree_root] for n in topo_sorted_nodes: topo_sorted_nodes += n.children", "= Node(node.parent, root, [], {}) nodes[node.parent] = parent_node for node in nodes.values(): if", "ValueError: pass raise ValueError(\"There is no common object that one can orbit hop", "def find_DFS_(predicate,node) -> List[Node]: if predicate(node): return [node] elif len(node.children) == 0: return", "-> int: nodes = all_descendants_BFS(root) bottom_up = reversed(nodes) for node in bottom_up: try:", "> 0: return [node] + dfs1 return [] path_found = find_DFS_(predicate,node) if len(path_found)", "[], {}) nodes[node.parent] = parent_node for node in nodes.values(): if node.name != root:", "Node) -> List[Node]: \"\"\"All descendents of a node, in Breadth First Search order\"\"\"", "0: n.data[f_descendants] = 0 else: n.data[f_descendants] = len(n.children) + sum(nn.data[f_descendants] for nn in", "len(dfs1) > 0: return [node] + dfs1 return [] path_found = find_DFS_(predicate,node) if", "{})} for parent, child in adj_list: node = Node(child, parent, [], {}) nodes[child]", "fulfils the predicate\"\"\" def find_DFS_(predicate,node) -> List[Node]: if predicate(node): return [node] elif len(node.children)", "that fulfils the predicate\"\"\" def find_DFS_(predicate,node) -> List[Node]: if predicate(node): return [node] elif", "# N.B. I modify node_lookup under iteration, so I cast to list and", "parent_node for node in nodes.values(): if node.name != root: nodes[node.parent].children.append(node) return nodes[root] def", "tree from the root node to the first element that fulfils the predicate\"\"\"", "for c in node.children: dfs1 = find_DFS_(predicate,c) if len(dfs1) > 0: return [node]", "4 #remove both endpoints of both paths return hops_to_santa except ValueError: pass raise", "in the tree that fulfils the predicate.\") def calculate_hops(root: Node) -> int: nodes", "if node.name != root: nodes[node.parent].children.append(node) return nodes[root] def compute_descendants(tree_root, f_descendants='n_descendants'): topo_sorted_nodes = all_descendants_BFS(tree_root)", "find_DFS(predicate: Callable[[Node], bool], node: Node) -> List[Node]: \"\"\"Returns the path in the tree", "node in nodes.values(): if node.name != root: nodes[node.parent].children.append(node) return nodes[root] def compute_descendants(tree_root, f_descendants='n_descendants'):", "raise ValueError(\"There is no common object that one can orbit hop through to", "node in list(nodes.values())[:]: if not (node.parent in nodes.keys()) and node.name != root: parent_node", "for node in nodes.values(): if node.name != root: nodes[node.parent].children.append(node) return nodes[root] def compute_descendants(tree_root,", "topo_sorted_nodes = [tree_root] for n in topo_sorted_nodes: topo_sorted_nodes += n.children return topo_sorted_nodes def", "parent_node = Node(node.parent, root, [], {}) nodes[node.parent] = parent_node for node in nodes.values():", "list and slice, to get a fixed iterator for node in list(nodes.values())[:]: if", "[] else: for c in node.children: dfs1 = find_DFS_(predicate,c) if len(dfs1) > 0:", "if len(dfs1) > 0: return [node] + dfs1 return [] path_found = find_DFS_(predicate,node)", "'YOU', node) p2 = find_DFS(lambda n: n.name == 'SAN', node) hops_to_santa = len(p1)", "node # N.B. I modify node_lookup under iteration, so I cast to list", "in nodes.keys()) and node.name != root: parent_node = Node(node.parent, root, [], {}) nodes[node.parent]", "node.children: dfs1 = find_DFS_(predicate,c) if len(dfs1) > 0: return [node] + dfs1 return", "of both paths return hops_to_santa except ValueError: pass raise ValueError(\"There is no common", "import Dict, List, Callable Node = namedtuple('Node', 'name parent children data') def make_tree_from_adj_list(adj_list):", "slice, to get a fixed iterator for node in list(nodes.values())[:]: if not (node.parent", "= {root: Node(root, None, [], {})} for parent, child in adj_list: node =", "nodes.values(): if node.name != root: nodes[node.parent].children.append(node) return nodes[root] def compute_descendants(tree_root, f_descendants='n_descendants'): topo_sorted_nodes =", "{root: Node(root, None, [], {})} for parent, child in adj_list: node = Node(child,", "[] path_found = find_DFS_(predicate,node) if len(path_found) > 0: return path_found else: raise ValueError(\"There", "element in the tree that fulfils the predicate.\") def calculate_hops(root: Node) -> int:", "a node, in Breadth First Search order\"\"\" topo_sorted_nodes = [tree_root] for n in", "in adj_list: node = Node(child, parent, [], {}) nodes[child] = node # N.B.", "== 0: n.data[f_descendants] = 0 else: n.data[f_descendants] = len(n.children) + sum(nn.data[f_descendants] for nn", "descendents of a node, in Breadth First Search order\"\"\" topo_sorted_nodes = [tree_root] for", "find_DFS_(predicate,node) if len(path_found) > 0: return path_found else: raise ValueError(\"There is no element", "no common object that one can orbit hop through to get to Santa!\")", "bool], node: Node) -> List[Node]: \"\"\"Returns the path in the tree from the", "in Breadth First Search order\"\"\" topo_sorted_nodes = [tree_root] for n in topo_sorted_nodes: topo_sorted_nodes", "the path in the tree from the root node to the first element", "if not (node.parent in nodes.keys()) and node.name != root: parent_node = Node(node.parent, root,", "ValueError(\"There is no common object that one can orbit hop through to get", "reverse_topo_sort = reversed(topo_sorted_nodes) for n in reverse_topo_sort: if len(n.children) == 0: n.data[f_descendants] =", "= 'COM' nodes: Dict['str', Node] = {root: Node(root, None, [], {})} for parent,", "= len(p1) + len(p2) - 4 #remove both endpoints of both paths return", "and node.name != root: parent_node = Node(node.parent, root, [], {}) nodes[node.parent] = parent_node", "+ len(p2) - 4 #remove both endpoints of both paths return hops_to_santa except", "List, Callable Node = namedtuple('Node', 'name parent children data') def make_tree_from_adj_list(adj_list): root =", "for parent, child in adj_list: node = Node(child, parent, [], {}) nodes[child] =", "under iteration, so I cast to list and slice, to get a fixed", "#remove both endpoints of both paths return hops_to_santa except ValueError: pass raise ValueError(\"There", "parent, child in adj_list: node = Node(child, parent, [], {}) nodes[child] = node", "in bottom_up: try: p1 = find_DFS(lambda n: n.name == 'YOU', node) p2 =", "if len(path_found) > 0: return path_found else: raise ValueError(\"There is no element in", "len(n.children) == 0: n.data[f_descendants] = 0 else: n.data[f_descendants] = len(n.children) + sum(nn.data[f_descendants] for", "node: Node) -> List[Node]: \"\"\"Returns the path in the tree from the root", "is no common object that one can orbit hop through to get to", "typing import Dict, List, Callable Node = namedtuple('Node', 'name parent children data') def", "parent children data') def make_tree_from_adj_list(adj_list): root = 'COM' nodes: Dict['str', Node] = {root:", "children data') def make_tree_from_adj_list(adj_list): root = 'COM' nodes: Dict['str', Node] = {root: Node(root,", "def calculate_hops(root: Node) -> int: nodes = all_descendants_BFS(root) bottom_up = reversed(nodes) for node", "pass raise ValueError(\"There is no common object that one can orbit hop through", "for n in topo_sorted_nodes: topo_sorted_nodes += n.children return topo_sorted_nodes def find_DFS(predicate: Callable[[Node], bool],", "Dict, List, Callable Node = namedtuple('Node', 'name parent children data') def make_tree_from_adj_list(adj_list): root", "except ValueError: pass raise ValueError(\"There is no common object that one can orbit" ]
[ "self.assertTrue(parent is not None) parent.delete_child(n) # verify deleted n, parent = self.root.find_node_and_parent(key) self.assertTrue(n", "i in root.children: add_childs_to_leaf(i, num) class TestStringMethods(unittest.TestCase): count = 0 def setUp(self): TestStringMethods.count", "print(\"Total Number of nodes added\", TestStringMethods.count) def test_find_rand_node_exist(self): # random element search for", "\"child{0}\".format(random.randrange(TestStringMethods.count - 1)) print(\"Finding node=\",key) n, parent = self.root.find_node_and_parent(key) self.assertTrue(n is not None)", "# Find root Node n, parent = self.root.find_node_and_parent(\"Root\") self.assertTrue(n is not None) self.assertTrue(parent", "is None) # delete for i in range(1, 20): key = \"child{0}\".format(random.randrange(TestStringMethods.count -", "is not None) self.assertTrue(parent is None) # Node not exist n, parent =", "return if(len(root.children) == 0 ): for i in range(num): root.append_child(DSADAssignment01V4.TreeNode(\"child{0}\".format(TestStringMethods.count))) TestStringMethods.count += 1", "add_childs_to_leaf(root, num): if root is None: return if(len(root.children) == 0 ): for i", "not exist n, parent = self.root.find_node_and_parent(\"NodeNotExist\") self.assertTrue(n is None) self.assertTrue(parent is None) #", "root.append_child(DSADAssignment01V4.TreeNode(\"child{0}\".format(TestStringMethods.count))) TestStringMethods.count += 1 else: for i in root.children: add_childs_to_leaf(i, num) class TestStringMethods(unittest.TestCase):", "import DSADAssignment01V4 def add_childs_to_leaf(root, num): if root is None: return if(len(root.children) == 0", "in range(1, 20): key = \"child{0}\".format(random.randrange(TestStringMethods.count - 1)) print(\"Finding node=\",key) n, parent =", "in range(num): root.append_child(DSADAssignment01V4.TreeNode(\"child{0}\".format(TestStringMethods.count))) TestStringMethods.count += 1 else: for i in root.children: add_childs_to_leaf(i, num)", "parent = self.root.find_node_and_parent(\"Root\") self.assertTrue(n is not None) self.assertTrue(parent is None) # Node not", "None) parent.delete_child(n) # verify deleted n, parent = self.root.find_node_and_parent(key) self.assertTrue(n is None) self.assertTrue(parent", "20): key = \"child{0}\".format(random.randrange(TestStringMethods.count - 1)) print(\"Deleting node=\",key) n, parent = self.root.find_node_and_parent(key) self.assertTrue(n", "def setUp(self): TestStringMethods.count = 0 self.root = DSADAssignment01V4.TreeNode(\"Root\") add_childs_to_leaf(self.root, 10) add_childs_to_leaf(self.root, 10) add_childs_to_leaf(self.root,", "add_childs_to_leaf(self.root, 10) add_childs_to_leaf(self.root, 10) add_childs_to_leaf(self.root, 10) print(\"Total Number of nodes added\", TestStringMethods.count) def", "= self.root.find_node_and_parent(key) self.assertTrue(n is None) self.assertTrue(parent is None) if __name__ == '__main__': unittest.main()", "random import DSADAssignment01V4 def add_childs_to_leaf(root, num): if root is None: return if(len(root.children) ==", "parent = self.root.find_node_and_parent(\"NodeNotExist\") self.assertTrue(n is None) self.assertTrue(parent is None) # delete for i", "# random element search for i in range(1, 20): key = \"child{0}\".format(random.randrange(TestStringMethods.count -", "= self.root.find_node_and_parent(key) self.assertTrue(n is not None) self.assertTrue(parent is not None) # Find root", "self.root.find_node_and_parent(\"NodeNotExist\") self.assertTrue(n is None) self.assertTrue(parent is None) # delete for i in range(1,", "i in range(1, 20): key = \"child{0}\".format(random.randrange(TestStringMethods.count - 1)) print(\"Deleting node=\",key) n, parent", "- 1)) print(\"Finding node=\",key) n, parent = self.root.find_node_and_parent(key) self.assertTrue(n is not None) self.assertTrue(parent", "for i in range(1, 20): key = \"child{0}\".format(random.randrange(TestStringMethods.count - 1)) print(\"Deleting node=\",key) n,", "0 ): for i in range(num): root.append_child(DSADAssignment01V4.TreeNode(\"child{0}\".format(TestStringMethods.count))) TestStringMethods.count += 1 else: for i", "add_childs_to_leaf(self.root, 10) add_childs_to_leaf(self.root, 10) add_childs_to_leaf(self.root, 10) add_childs_to_leaf(self.root, 10) add_childs_to_leaf(self.root, 10) add_childs_to_leaf(self.root, 10) print(\"Total", "self.assertTrue(n is None) self.assertTrue(parent is None) # delete for i in range(1, 20):", "10) add_childs_to_leaf(self.root, 10) add_childs_to_leaf(self.root, 10) print(\"Total Number of nodes added\", TestStringMethods.count) def test_find_rand_node_exist(self):", "verify deleted n, parent = self.root.find_node_and_parent(key) self.assertTrue(n is None) self.assertTrue(parent is None) if", "in root.children: add_childs_to_leaf(i, num) class TestStringMethods(unittest.TestCase): count = 0 def setUp(self): TestStringMethods.count =", "self.assertTrue(parent is None) # Node not exist n, parent = self.root.find_node_and_parent(\"NodeNotExist\") self.assertTrue(n is", "= self.root.find_node_and_parent(\"NodeNotExist\") self.assertTrue(n is None) self.assertTrue(parent is None) # delete for i in", "None) self.assertTrue(parent is not None) # Find root Node n, parent = self.root.find_node_and_parent(\"Root\")", "random element search for i in range(1, 20): key = \"child{0}\".format(random.randrange(TestStringMethods.count - 1))", "Number of nodes added\", TestStringMethods.count) def test_find_rand_node_exist(self): # random element search for i", "10) print(\"Total Number of nodes added\", TestStringMethods.count) def test_find_rand_node_exist(self): # random element search", "Node n, parent = self.root.find_node_and_parent(\"Root\") self.assertTrue(n is not None) self.assertTrue(parent is None) #", "self.assertTrue(n is not None) self.assertTrue(parent is None) # Node not exist n, parent", "num) class TestStringMethods(unittest.TestCase): count = 0 def setUp(self): TestStringMethods.count = 0 self.root =", "unittest import random import DSADAssignment01V4 def add_childs_to_leaf(root, num): if root is None: return", "add_childs_to_leaf(self.root, 10) add_childs_to_leaf(self.root, 10) print(\"Total Number of nodes added\", TestStringMethods.count) def test_find_rand_node_exist(self): #", "root.children: add_childs_to_leaf(i, num) class TestStringMethods(unittest.TestCase): count = 0 def setUp(self): TestStringMethods.count = 0", "deleted n, parent = self.root.find_node_and_parent(key) self.assertTrue(n is None) self.assertTrue(parent is None) if __name__", "10) add_childs_to_leaf(self.root, 10) add_childs_to_leaf(self.root, 10) add_childs_to_leaf(self.root, 10) add_childs_to_leaf(self.root, 10) print(\"Total Number of nodes", "for i in range(num): root.append_child(DSADAssignment01V4.TreeNode(\"child{0}\".format(TestStringMethods.count))) TestStringMethods.count += 1 else: for i in root.children:", "n, parent = self.root.find_node_and_parent(\"Root\") self.assertTrue(n is not None) self.assertTrue(parent is None) # Node", "None: return if(len(root.children) == 0 ): for i in range(num): root.append_child(DSADAssignment01V4.TreeNode(\"child{0}\".format(TestStringMethods.count))) TestStringMethods.count +=", "test_find_rand_node_exist(self): # random element search for i in range(1, 20): key = \"child{0}\".format(random.randrange(TestStringMethods.count", "element search for i in range(1, 20): key = \"child{0}\".format(random.randrange(TestStringMethods.count - 1)) print(\"Finding", "self.root = DSADAssignment01V4.TreeNode(\"Root\") add_childs_to_leaf(self.root, 10) add_childs_to_leaf(self.root, 10) add_childs_to_leaf(self.root, 10) add_childs_to_leaf(self.root, 10) add_childs_to_leaf(self.root, 10)", "node=\",key) n, parent = self.root.find_node_and_parent(key) self.assertTrue(n is not None) self.assertTrue(parent is not None)", "not None) self.assertTrue(parent is not None) # Find root Node n, parent =", "num): if root is None: return if(len(root.children) == 0 ): for i in", "1)) print(\"Finding node=\",key) n, parent = self.root.find_node_and_parent(key) self.assertTrue(n is not None) self.assertTrue(parent is", "print(\"Finding node=\",key) n, parent = self.root.find_node_and_parent(key) self.assertTrue(n is not None) self.assertTrue(parent is not", "10) add_childs_to_leaf(self.root, 10) add_childs_to_leaf(self.root, 10) add_childs_to_leaf(self.root, 10) add_childs_to_leaf(self.root, 10) add_childs_to_leaf(self.root, 10) print(\"Total Number", "self.root.find_node_and_parent(\"Root\") self.assertTrue(n is not None) self.assertTrue(parent is None) # Node not exist n,", "= self.root.find_node_and_parent(key) self.assertTrue(n is not None) self.assertTrue(parent is not None) parent.delete_child(n) # verify", "# verify deleted n, parent = self.root.find_node_and_parent(key) self.assertTrue(n is None) self.assertTrue(parent is None)", "exist n, parent = self.root.find_node_and_parent(\"NodeNotExist\") self.assertTrue(n is None) self.assertTrue(parent is None) # delete", "\"child{0}\".format(random.randrange(TestStringMethods.count - 1)) print(\"Deleting node=\",key) n, parent = self.root.find_node_and_parent(key) self.assertTrue(n is not None)", "TestStringMethods(unittest.TestCase): count = 0 def setUp(self): TestStringMethods.count = 0 self.root = DSADAssignment01V4.TreeNode(\"Root\") add_childs_to_leaf(self.root,", "None) self.assertTrue(parent is None) # Node not exist n, parent = self.root.find_node_and_parent(\"NodeNotExist\") self.assertTrue(n", "self.assertTrue(parent is not None) # Find root Node n, parent = self.root.find_node_and_parent(\"Root\") self.assertTrue(n", "10) add_childs_to_leaf(self.root, 10) add_childs_to_leaf(self.root, 10) add_childs_to_leaf(self.root, 10) print(\"Total Number of nodes added\", TestStringMethods.count)", "add_childs_to_leaf(i, num) class TestStringMethods(unittest.TestCase): count = 0 def setUp(self): TestStringMethods.count = 0 self.root", "def test_find_rand_node_exist(self): # random element search for i in range(1, 20): key =", "is not None) parent.delete_child(n) # verify deleted n, parent = self.root.find_node_and_parent(key) self.assertTrue(n is", "= \"child{0}\".format(random.randrange(TestStringMethods.count - 1)) print(\"Finding node=\",key) n, parent = self.root.find_node_and_parent(key) self.assertTrue(n is not", "TestStringMethods.count += 1 else: for i in root.children: add_childs_to_leaf(i, num) class TestStringMethods(unittest.TestCase): count", "parent = self.root.find_node_and_parent(key) self.assertTrue(n is not None) self.assertTrue(parent is not None) parent.delete_child(n) #", "20): key = \"child{0}\".format(random.randrange(TestStringMethods.count - 1)) print(\"Finding node=\",key) n, parent = self.root.find_node_and_parent(key) self.assertTrue(n", "0 self.root = DSADAssignment01V4.TreeNode(\"Root\") add_childs_to_leaf(self.root, 10) add_childs_to_leaf(self.root, 10) add_childs_to_leaf(self.root, 10) add_childs_to_leaf(self.root, 10) add_childs_to_leaf(self.root,", "1 else: for i in root.children: add_childs_to_leaf(i, num) class TestStringMethods(unittest.TestCase): count = 0", "parent.delete_child(n) # verify deleted n, parent = self.root.find_node_and_parent(key) self.assertTrue(n is None) self.assertTrue(parent is", "class TestStringMethods(unittest.TestCase): count = 0 def setUp(self): TestStringMethods.count = 0 self.root = DSADAssignment01V4.TreeNode(\"Root\")", "range(1, 20): key = \"child{0}\".format(random.randrange(TestStringMethods.count - 1)) print(\"Deleting node=\",key) n, parent = self.root.find_node_and_parent(key)", "in range(1, 20): key = \"child{0}\".format(random.randrange(TestStringMethods.count - 1)) print(\"Deleting node=\",key) n, parent =", "n, parent = self.root.find_node_and_parent(key) self.assertTrue(n is not None) self.assertTrue(parent is not None) parent.delete_child(n)", "= 0 def setUp(self): TestStringMethods.count = 0 self.root = DSADAssignment01V4.TreeNode(\"Root\") add_childs_to_leaf(self.root, 10) add_childs_to_leaf(self.root,", "n, parent = self.root.find_node_and_parent(key) self.assertTrue(n is not None) self.assertTrue(parent is not None) #", "TestStringMethods.count = 0 self.root = DSADAssignment01V4.TreeNode(\"Root\") add_childs_to_leaf(self.root, 10) add_childs_to_leaf(self.root, 10) add_childs_to_leaf(self.root, 10) add_childs_to_leaf(self.root,", "search for i in range(1, 20): key = \"child{0}\".format(random.randrange(TestStringMethods.count - 1)) print(\"Finding node=\",key)", "i in range(num): root.append_child(DSADAssignment01V4.TreeNode(\"child{0}\".format(TestStringMethods.count))) TestStringMethods.count += 1 else: for i in root.children: add_childs_to_leaf(i,", "is not None) self.assertTrue(parent is not None) # Find root Node n, parent", "parent = self.root.find_node_and_parent(key) self.assertTrue(n is None) self.assertTrue(parent is None) if __name__ == '__main__':", "import random import DSADAssignment01V4 def add_childs_to_leaf(root, num): if root is None: return if(len(root.children)", "Find root Node n, parent = self.root.find_node_and_parent(\"Root\") self.assertTrue(n is not None) self.assertTrue(parent is", "def add_childs_to_leaf(root, num): if root is None: return if(len(root.children) == 0 ): for", "not None) self.assertTrue(parent is None) # Node not exist n, parent = self.root.find_node_and_parent(\"NodeNotExist\")", "nodes added\", TestStringMethods.count) def test_find_rand_node_exist(self): # random element search for i in range(1,", "add_childs_to_leaf(self.root, 10) add_childs_to_leaf(self.root, 10) add_childs_to_leaf(self.root, 10) add_childs_to_leaf(self.root, 10) print(\"Total Number of nodes added\",", "is None) self.assertTrue(parent is None) # delete for i in range(1, 20): key", "root Node n, parent = self.root.find_node_and_parent(\"Root\") self.assertTrue(n is not None) self.assertTrue(parent is None)", "key = \"child{0}\".format(random.randrange(TestStringMethods.count - 1)) print(\"Finding node=\",key) n, parent = self.root.find_node_and_parent(key) self.assertTrue(n is", "add_childs_to_leaf(self.root, 10) add_childs_to_leaf(self.root, 10) add_childs_to_leaf(self.root, 10) add_childs_to_leaf(self.root, 10) add_childs_to_leaf(self.root, 10) print(\"Total Number of", "= \"child{0}\".format(random.randrange(TestStringMethods.count - 1)) print(\"Deleting node=\",key) n, parent = self.root.find_node_and_parent(key) self.assertTrue(n is not", "<gh_stars>0 import unittest import random import DSADAssignment01V4 def add_childs_to_leaf(root, num): if root is", "is not None) # Find root Node n, parent = self.root.find_node_and_parent(\"Root\") self.assertTrue(n is", "if root is None: return if(len(root.children) == 0 ): for i in range(num):", "0 def setUp(self): TestStringMethods.count = 0 self.root = DSADAssignment01V4.TreeNode(\"Root\") add_childs_to_leaf(self.root, 10) add_childs_to_leaf(self.root, 10)", "range(1, 20): key = \"child{0}\".format(random.randrange(TestStringMethods.count - 1)) print(\"Finding node=\",key) n, parent = self.root.find_node_and_parent(key)", "# delete for i in range(1, 20): key = \"child{0}\".format(random.randrange(TestStringMethods.count - 1)) print(\"Deleting", "None) self.assertTrue(parent is not None) parent.delete_child(n) # verify deleted n, parent = self.root.find_node_and_parent(key)", "add_childs_to_leaf(self.root, 10) print(\"Total Number of nodes added\", TestStringMethods.count) def test_find_rand_node_exist(self): # random element", "self.assertTrue(n is not None) self.assertTrue(parent is not None) parent.delete_child(n) # verify deleted n,", "of nodes added\", TestStringMethods.count) def test_find_rand_node_exist(self): # random element search for i in", "None) # Find root Node n, parent = self.root.find_node_and_parent(\"Root\") self.assertTrue(n is not None)", "count = 0 def setUp(self): TestStringMethods.count = 0 self.root = DSADAssignment01V4.TreeNode(\"Root\") add_childs_to_leaf(self.root, 10)", "root is None: return if(len(root.children) == 0 ): for i in range(num): root.append_child(DSADAssignment01V4.TreeNode(\"child{0}\".format(TestStringMethods.count)))", "# Node not exist n, parent = self.root.find_node_and_parent(\"NodeNotExist\") self.assertTrue(n is None) self.assertTrue(parent is", "is not None) self.assertTrue(parent is not None) parent.delete_child(n) # verify deleted n, parent", "= DSADAssignment01V4.TreeNode(\"Root\") add_childs_to_leaf(self.root, 10) add_childs_to_leaf(self.root, 10) add_childs_to_leaf(self.root, 10) add_childs_to_leaf(self.root, 10) add_childs_to_leaf(self.root, 10) add_childs_to_leaf(self.root,", "is None) # Node not exist n, parent = self.root.find_node_and_parent(\"NodeNotExist\") self.assertTrue(n is None)", "None) # delete for i in range(1, 20): key = \"child{0}\".format(random.randrange(TestStringMethods.count - 1))", "setUp(self): TestStringMethods.count = 0 self.root = DSADAssignment01V4.TreeNode(\"Root\") add_childs_to_leaf(self.root, 10) add_childs_to_leaf(self.root, 10) add_childs_to_leaf(self.root, 10)", "import unittest import random import DSADAssignment01V4 def add_childs_to_leaf(root, num): if root is None:", "TestStringMethods.count) def test_find_rand_node_exist(self): # random element search for i in range(1, 20): key", "n, parent = self.root.find_node_and_parent(\"NodeNotExist\") self.assertTrue(n is None) self.assertTrue(parent is None) # delete for", "if(len(root.children) == 0 ): for i in range(num): root.append_child(DSADAssignment01V4.TreeNode(\"child{0}\".format(TestStringMethods.count))) TestStringMethods.count += 1 else:", "None) # Node not exist n, parent = self.root.find_node_and_parent(\"NodeNotExist\") self.assertTrue(n is None) self.assertTrue(parent", "DSADAssignment01V4 def add_childs_to_leaf(root, num): if root is None: return if(len(root.children) == 0 ):", "n, parent = self.root.find_node_and_parent(key) self.assertTrue(n is None) self.assertTrue(parent is None) if __name__ ==", "Node not exist n, parent = self.root.find_node_and_parent(\"NodeNotExist\") self.assertTrue(n is None) self.assertTrue(parent is None)", "is None: return if(len(root.children) == 0 ): for i in range(num): root.append_child(DSADAssignment01V4.TreeNode(\"child{0}\".format(TestStringMethods.count))) TestStringMethods.count", "not None) # Find root Node n, parent = self.root.find_node_and_parent(\"Root\") self.assertTrue(n is not", "not None) self.assertTrue(parent is not None) parent.delete_child(n) # verify deleted n, parent =", "for i in root.children: add_childs_to_leaf(i, num) class TestStringMethods(unittest.TestCase): count = 0 def setUp(self):", "key = \"child{0}\".format(random.randrange(TestStringMethods.count - 1)) print(\"Deleting node=\",key) n, parent = self.root.find_node_and_parent(key) self.assertTrue(n is", "self.assertTrue(n is not None) self.assertTrue(parent is not None) # Find root Node n,", "10) add_childs_to_leaf(self.root, 10) print(\"Total Number of nodes added\", TestStringMethods.count) def test_find_rand_node_exist(self): # random", "added\", TestStringMethods.count) def test_find_rand_node_exist(self): # random element search for i in range(1, 20):", "self.assertTrue(parent is None) # delete for i in range(1, 20): key = \"child{0}\".format(random.randrange(TestStringMethods.count", "+= 1 else: for i in root.children: add_childs_to_leaf(i, num) class TestStringMethods(unittest.TestCase): count =", "): for i in range(num): root.append_child(DSADAssignment01V4.TreeNode(\"child{0}\".format(TestStringMethods.count))) TestStringMethods.count += 1 else: for i in", "= 0 self.root = DSADAssignment01V4.TreeNode(\"Root\") add_childs_to_leaf(self.root, 10) add_childs_to_leaf(self.root, 10) add_childs_to_leaf(self.root, 10) add_childs_to_leaf(self.root, 10)", "None) self.assertTrue(parent is None) # delete for i in range(1, 20): key =", "self.root.find_node_and_parent(key) self.assertTrue(n is not None) self.assertTrue(parent is not None) parent.delete_child(n) # verify deleted", "print(\"Deleting node=\",key) n, parent = self.root.find_node_and_parent(key) self.assertTrue(n is not None) self.assertTrue(parent is not", "- 1)) print(\"Deleting node=\",key) n, parent = self.root.find_node_and_parent(key) self.assertTrue(n is not None) self.assertTrue(parent", "== 0 ): for i in range(num): root.append_child(DSADAssignment01V4.TreeNode(\"child{0}\".format(TestStringMethods.count))) TestStringMethods.count += 1 else: for", "parent = self.root.find_node_and_parent(key) self.assertTrue(n is not None) self.assertTrue(parent is not None) # Find", "not None) parent.delete_child(n) # verify deleted n, parent = self.root.find_node_and_parent(key) self.assertTrue(n is None)", "delete for i in range(1, 20): key = \"child{0}\".format(random.randrange(TestStringMethods.count - 1)) print(\"Deleting node=\",key)", "range(num): root.append_child(DSADAssignment01V4.TreeNode(\"child{0}\".format(TestStringMethods.count))) TestStringMethods.count += 1 else: for i in root.children: add_childs_to_leaf(i, num) class", "i in range(1, 20): key = \"child{0}\".format(random.randrange(TestStringMethods.count - 1)) print(\"Finding node=\",key) n, parent", "self.root.find_node_and_parent(key) self.assertTrue(n is not None) self.assertTrue(parent is not None) # Find root Node", "else: for i in root.children: add_childs_to_leaf(i, num) class TestStringMethods(unittest.TestCase): count = 0 def", "for i in range(1, 20): key = \"child{0}\".format(random.randrange(TestStringMethods.count - 1)) print(\"Finding node=\",key) n,", "1)) print(\"Deleting node=\",key) n, parent = self.root.find_node_and_parent(key) self.assertTrue(n is not None) self.assertTrue(parent is", "= self.root.find_node_and_parent(\"Root\") self.assertTrue(n is not None) self.assertTrue(parent is None) # Node not exist", "DSADAssignment01V4.TreeNode(\"Root\") add_childs_to_leaf(self.root, 10) add_childs_to_leaf(self.root, 10) add_childs_to_leaf(self.root, 10) add_childs_to_leaf(self.root, 10) add_childs_to_leaf(self.root, 10) add_childs_to_leaf(self.root, 10)" ]
[ "= mtz.object(file_name=\"3nd4.mtz\") # Only works with mtz.object. # Does not work if mtz", "with mtz.object. # Does not work if mtz is read in with iotbx.file_reader.", "mtz.object(file_name=\"3nd4.mtz\") # Only works with mtz.object. # Does not work if mtz is", "Does not work if mtz is read in with iotbx.file_reader. miller_arrays_dict = mtz_obj.as_miller_arrays_dict()", "mtz.object. # Does not work if mtz is read in with iotbx.file_reader. miller_arrays_dict", "mtz mtz_obj = mtz.object(file_name=\"3nd4.mtz\") # Only works with mtz.object. # Does not work", "from iotbx import mtz mtz_obj = mtz.object(file_name=\"3nd4.mtz\") # Only works with mtz.object. #", "# Does not work if mtz is read in with iotbx.file_reader. miller_arrays_dict =", "mtz_obj = mtz.object(file_name=\"3nd4.mtz\") # Only works with mtz.object. # Does not work if", "# Only works with mtz.object. # Does not work if mtz is read", "Only works with mtz.object. # Does not work if mtz is read in", "works with mtz.object. # Does not work if mtz is read in with", "iotbx import mtz mtz_obj = mtz.object(file_name=\"3nd4.mtz\") # Only works with mtz.object. # Does", "import mtz mtz_obj = mtz.object(file_name=\"3nd4.mtz\") # Only works with mtz.object. # Does not" ]
[ "logger.info( \"Set sign: {} and scale: {:04.2f} for {}\".format(module.signed, module.scale.item(), name)) @COMPRESSION_MODULES.register() @QUANTIZATION_MODULES.register(QuantizationMode.ASYMMETRIC)", "= 1e-16 self.level_high = self.level_low = 0 self.levels = 2 ** self.num_bits if", "scale_shape = 1 if self.per_channel: scale_shape = get_per_channel_scale_shape(self.input_shape, self.is_weights) self.scale = nn.Parameter(torch.ones(scale_shape), requires_grad=True)", "nn from torch import distributed from .initializers import MIN_MAX_INITIALIZERS from .quantize_functions import symmetric_quantize,", "error_msgs, module): if module.state_dict_name: for module_key in module.state_dict().keys(): candidate = module.state_dict_name + '.'", "self.set_level_ranges() return symmetric_quantize(x, self.levels, self.level_low, self.level_high, self.scale, self.eps) @MIN_MAX_INITIALIZERS.register('SymmetricQuantizer') def _initializer(module, name, min_value,", "the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in", "module.state_dict_name + '.' + module_key if candidate in state_dict: module.initialized = True def", "1 self.level_low = 0 @property def signed(self): return self.signed_tensor.item() == 1 @signed.setter def", "@MIN_MAX_INITIALIZERS.register('SymmetricQuantizer') def _initializer(module, name, min_value, max_value, distributed_): if min_value.item == np.inf or max_value.item()", "self.bits - 1 @property def level_low(self): return 0 @property def levels(self): return 2", "\"symmetric\" ASYMMETRIC = \"asymmetric\" class BinarizationMode: XNOR = \"xnor\" DOREFA = \"dorefa\" QuantizationParams", "return symmetric_quantize(x, self.levels, self.level_low, self.level_high, self.scale, self.eps) @MIN_MAX_INITIALIZERS.register('SymmetricQuantizer') def _initializer(module, name, min_value, max_value,", "distributed.broadcast(module.scale, 0) distributed.broadcast(module.signed_tensor, 0) logger.debug(\"Statistics: min={:.2f} max={:.2f}\".format(min_value.item(), max_value.item())) logger.info( \"Set sign: {} and", "input_shape class BaseQuantizer(nn.Module): def __init__(self, config: QuantizerConfig): super().__init__() self.config = config self.init_stage =", "return asymmetric_quantize(x, self.levels, self.level_low, self.level_high, self.input_low, self.input_range, self.eps) @MIN_MAX_INITIALIZERS.register('AsymmetricQuantizer') def _initializer(module, name, min_value,", "self.load_listener = LoadStateListener(self) def forward(self, x): if self.init_stage: return x return self.quantize(x) def", "module.input_range.data.fill_(range_) if distributed_: distributed.broadcast(module.input_low, 0) distributed.broadcast(module.input_range, 0) logger.debug(\"Statistics: min={:.2f} max={:.2f}\".format(min_value.item(), max_value.item())) logger.info(\"Set input_low:", "scale_shape = 1 if self.per_channel: scale_shape = get_per_channel_scale_shape(self.input_shape, self.is_weights) self.input_low = nn.Parameter(torch.zeros(scale_shape), requires_grad=True)", "0 self.levels = 2 ** self.num_bits if self.is_weights: self.levels -= 1 def set_level_ranges(self):", "False) class QuantizerConfig: def __init__(self, params: QuantizationParams, input_shape=None, is_weights=False, per_channel=False, within_signed_scope=False): self.params =", "def signed(self, signed: bool): self.signed_tensor.fill_(signed) def quantize(self, x): self.set_level_ranges() return symmetric_quantize(x, self.levels, self.level_low,", "= Registry('quantization_modules') BINARIZATION_MODULES = Registry('binarization_modules') class QuantizationMode: SYMMETRIC = \"symmetric\" ASYMMETRIC = \"asymmetric\"", "the specific language governing permissions and limitations under the License. \"\"\" import logging", "License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required", "QuantizationParams, input_shape=None, is_weights=False, per_channel=False, within_signed_scope=False): self.params = params self.is_weights = is_weights self.within_signed_scope =", "import numpy as np import torch import torch.nn as nn from torch import", "x return self.quantize(x) def quantize(self, x): raise NotImplementedError @COMPRESSION_MODULES.register() @QUANTIZATION_MODULES.register(QuantizationMode.SYMMETRIC) class SymmetricQuantizer(BaseQuantizer): def", "local_metadata, strict, missing_keys, unexpected_keys, error_msgs, module): if module.state_dict_name: for module_key in module.state_dict().keys(): candidate", "a quantization module are going to be updated by new values from state_dict", "= is_weights self.within_signed_scope = within_signed_scope self.per_channel = per_channel self.input_shape = input_shape class BaseQuantizer(nn.Module):", "@QUANTIZATION_MODULES.register(QuantizationMode.SYMMETRIC) class SymmetricQuantizer(BaseQuantizer): def __init__(self, config): super().__init__(config) self.input_shape = config.input_shape self.per_channel = config.per_channel", "@signed.setter def signed(self, signed: bool): self.signed_tensor.fill_(signed) def quantize(self, x): self.set_level_ranges() return symmetric_quantize(x, self.levels,", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the", "per_channel self.input_shape = input_shape class BaseQuantizer(nn.Module): def __init__(self, config: QuantizerConfig): super().__init__() self.config =", "(c) 2019 Intel Corporation Licensed under the Apache License, Version 2.0 (the \"License\");", "self.is_weights) self.scale = nn.Parameter(torch.ones(scale_shape), requires_grad=True) self.init_stage = False self.eps = 1e-16 self.level_high =", "= config.per_channel params = config.params self.bits = params.bits scale_shape = 1 if self.per_channel:", "self.per_channel = config.per_channel self.is_weights = config.is_weights self.within_signed_scope = config.within_signed_scope params = config.params self.num_bits", "BaseQuantizer(nn.Module): def __init__(self, config: QuantizerConfig): super().__init__() self.config = config self.init_stage = False self.initialized", "** self.num_bits if self.is_weights: self.levels -= 1 def set_level_ranges(self): if self.signed: self.level_high =", "if module.state_dict_name: for module_key in module.state_dict().keys(): candidate = module.state_dict_name + '.' + module_key", "and scale: {:04.2f} for {}\".format(module.signed, module.scale.item(), name)) @COMPRESSION_MODULES.register() @QUANTIZATION_MODULES.register(QuantizationMode.ASYMMETRIC) class AsymmetricQuantizer(BaseQuantizer): def __init__(self,", "= config.params self.bits = params.bits scale_shape = 1 if self.per_channel: scale_shape = get_per_channel_scale_shape(self.input_shape,", "self.params = params self.is_weights = is_weights self.within_signed_scope = within_signed_scope self.per_channel = per_channel self.input_shape", "self.input_shape = config.input_shape self.per_channel = config.per_channel params = config.params self.bits = params.bits scale_shape", "signed: bool): self.signed_tensor.fill_(signed) def quantize(self, x): self.set_level_ranges() return symmetric_quantize(x, self.levels, self.level_low, self.level_high, self.scale,", "AttributeError('Statistics is not collected for {}'.format(name)) module.input_low.data.fill_(min_value.item()) range_ = (max_value - min_value).item() if", "self.per_channel = config.per_channel params = config.params self.bits = params.bits scale_shape = 1 if", "self.levels, self.level_low, self.level_high, self.input_low, self.input_range, self.eps) @MIN_MAX_INITIALIZERS.register('AsymmetricQuantizer') def _initializer(module, name, min_value, max_value, distributed_):", "if sign != module.signed: logger.warning(\"signed set incorrectly\") module.signed = int(sign) if abs(max_value) >", "is_weights self.within_signed_scope = within_signed_scope self.per_channel = per_channel self.input_shape = input_shape class BaseQuantizer(nn.Module): def", "config.input_shape self.per_channel = config.per_channel params = config.params self.bits = params.bits scale_shape = 1", "'.' + module_key if candidate in state_dict: module.initialized = True def close(self): self.hook.remove()", "symmetric_quantize, asymmetric_quantize from ..layer_utils import COMPRESSION_MODULES from ..registry import Registry from ..utils import", "the License for the specific language governing permissions and limitations under the License.", "def __init__(self, params: QuantizationParams, input_shape=None, is_weights=False, per_channel=False, within_signed_scope=False): self.params = params self.is_weights =", "** self.num_bits - 1 self.level_low = 0 @property def signed(self): return self.signed_tensor.item() ==", "self.is_weights) self.input_low = nn.Parameter(torch.zeros(scale_shape), requires_grad=True) self.input_range = nn.Parameter(torch.ones(scale_shape), requires_grad=True) self.eps = 1e-16 @property", "x): return asymmetric_quantize(x, self.levels, self.level_low, self.level_high, self.input_low, self.input_range, self.eps) @MIN_MAX_INITIALIZERS.register('AsymmetricQuantizer') def _initializer(module, name,", "class LoadStateListener: \"\"\" Check whether a quantization module are going to be updated", "License for the specific language governing permissions and limitations under the License. \"\"\"", "module): if module.state_dict_name: for module_key in module.state_dict().keys(): candidate = module.state_dict_name + '.' +", "Unless required by applicable law or agreed to in writing, software distributed under", "{}\".format(module.signed, module.scale.item(), name)) @COMPRESSION_MODULES.register() @QUANTIZATION_MODULES.register(QuantizationMode.ASYMMETRIC) class AsymmetricQuantizer(BaseQuantizer): def __init__(self, config): super().__init__(config) self.is_weights =", "candidate = module.state_dict_name + '.' + module_key if candidate in state_dict: module.initialized =", "= config.params self.num_bits = params.bits self.signed_tensor = nn.Parameter(torch.IntTensor([params.signed]), requires_grad=False) self.collect_scale_statistics = False scale_shape", "x): self.set_level_ranges() return symmetric_quantize(x, self.levels, self.level_low, self.level_high, self.scale, self.eps) @MIN_MAX_INITIALIZERS.register('SymmetricQuantizer') def _initializer(module, name,", "torch import torch.nn as nn from torch import distributed from .initializers import MIN_MAX_INITIALIZERS", "def quantize(self, x): self.set_level_ranges() return symmetric_quantize(x, self.levels, self.level_low, self.level_high, self.scale, self.eps) @MIN_MAX_INITIALIZERS.register('SymmetricQuantizer') def", "0) logger.debug(\"Statistics: min={:.2f} max={:.2f}\".format(min_value.item(), max_value.item())) logger.info(\"Set input_low: {:04.2f} and input_range: {:04.2f} for {}\"", "max={:.2f}\".format(min_value.item(), max_value.item())) logger.info(\"Set input_low: {:04.2f} and input_range: {:04.2f} for {}\" .format(module.input_low.item(), module.input_range.item(), name))", "import MIN_MAX_INITIALIZERS from .quantize_functions import symmetric_quantize, asymmetric_quantize from ..layer_utils import COMPRESSION_MODULES from ..registry", "= Registry('binarization_modules') class QuantizationMode: SYMMETRIC = \"symmetric\" ASYMMETRIC = \"asymmetric\" class BinarizationMode: XNOR", "def quantize(self, x): return asymmetric_quantize(x, self.levels, self.level_low, self.level_high, self.input_low, self.input_range, self.eps) @MIN_MAX_INITIALIZERS.register('AsymmetricQuantizer') def", "@QUANTIZATION_MODULES.register(QuantizationMode.ASYMMETRIC) class AsymmetricQuantizer(BaseQuantizer): def __init__(self, config): super().__init__(config) self.is_weights = config.is_weights self.input_shape = config.input_shape", "raise AttributeError('Statistics is not collected for {}'.format(name)) module.input_low.data.fill_(min_value.item()) range_ = (max_value - min_value).item()", "params: QuantizationParams, input_shape=None, is_weights=False, per_channel=False, within_signed_scope=False): self.params = params self.is_weights = is_weights self.within_signed_scope", "the License is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS", "self.level_low = 0 @property def signed(self): return self.signed_tensor.item() == 1 @signed.setter def signed(self,", "@COMPRESSION_MODULES.register() @QUANTIZATION_MODULES.register(QuantizationMode.SYMMETRIC) class SymmetricQuantizer(BaseQuantizer): def __init__(self, config): super().__init__(config) self.input_shape = config.input_shape self.per_channel =", "License, Version 2.0 (the \"License\"); you may not use this file except in", "to be updated by new values from state_dict or checkpoint. \"\"\" def __init__(self,", "@COMPRESSION_MODULES.register() @QUANTIZATION_MODULES.register(QuantizationMode.ASYMMETRIC) class AsymmetricQuantizer(BaseQuantizer): def __init__(self, config): super().__init__(config) self.is_weights = config.is_weights self.input_shape =", "def quantize(self, x): raise NotImplementedError @COMPRESSION_MODULES.register() @QUANTIZATION_MODULES.register(QuantizationMode.SYMMETRIC) class SymmetricQuantizer(BaseQuantizer): def __init__(self, config): super().__init__(config)", "level_high(self): return 2 ** self.bits - 1 @property def level_low(self): return 0 @property", "is not collected for {}'.format(name)) module.input_low.data.fill_(min_value.item()) range_ = (max_value - min_value).item() if range_", "else: self.level_high = 2 ** self.num_bits - 1 self.level_low = 0 @property def", "self.input_shape = input_shape class BaseQuantizer(nn.Module): def __init__(self, config: QuantizerConfig): super().__init__() self.config = config", "self.level_low += 1 else: self.level_high = 2 ** self.num_bits - 1 self.level_low =", "level_low(self): return 0 @property def levels(self): return 2 ** self.bits def quantize(self, x):", "min_value, max_value, distributed_): if min_value.item == np.inf or max_value.item() == -np.inf: raise AttributeError('Statistics", "self.num_bits - 1 self.level_low = 0 @property def signed(self): return self.signed_tensor.item() == 1", "self.collect_scale_statistics = False scale_shape = 1 if self.per_channel: scale_shape = get_per_channel_scale_shape(self.input_shape, self.is_weights) self.scale", "for the specific language governing permissions and limitations under the License. \"\"\" import", "config.per_channel self.is_weights = config.is_weights self.within_signed_scope = config.within_signed_scope params = config.params self.num_bits = params.bits", "as nn from torch import distributed from .initializers import MIN_MAX_INITIALIZERS from .quantize_functions import", "min_value.item() == np.inf or max_value.item() == -np.inf: raise AttributeError('Statistics is not collected for", "Check whether a quantization module are going to be updated by new values", "signed(self): return self.signed_tensor.item() == 1 @signed.setter def signed(self, signed: bool): self.signed_tensor.fill_(signed) def quantize(self,", "bool): self.signed_tensor.fill_(signed) def quantize(self, x): self.set_level_ranges() return symmetric_quantize(x, self.levels, self.level_low, self.level_high, self.scale, self.eps)", "0 or module.within_signed_scope if sign != module.signed: logger.warning(\"signed set incorrectly\") module.signed = int(sign)", "for {}\".format(module.signed, module.scale.item(), name)) @COMPRESSION_MODULES.register() @QUANTIZATION_MODULES.register(QuantizationMode.ASYMMETRIC) class AsymmetricQuantizer(BaseQuantizer): def __init__(self, config): super().__init__(config) self.is_weights", "0.1: module.scale.data.fill_(max_value.item()) if distributed_: distributed.broadcast(module.scale, 0) distributed.broadcast(module.signed_tensor, 0) logger.debug(\"Statistics: min={:.2f} max={:.2f}\".format(min_value.item(), max_value.item())) logger.info(", "self.initialized = False self.state_dict_name = None class LoadStateListener: \"\"\" Check whether a quantization", "return self.quantize(x) def quantize(self, x): raise NotImplementedError @COMPRESSION_MODULES.register() @QUANTIZATION_MODULES.register(QuantizationMode.SYMMETRIC) class SymmetricQuantizer(BaseQuantizer): def __init__(self,", "@property def signed(self): return True @property def level_high(self): return 2 ** self.bits -", "config.within_signed_scope params = config.params self.num_bits = params.bits self.signed_tensor = nn.Parameter(torch.IntTensor([params.signed]), requires_grad=False) self.collect_scale_statistics =", "QUANTIZATION_MODULES = Registry('quantization_modules') BINARIZATION_MODULES = Registry('binarization_modules') class QuantizationMode: SYMMETRIC = \"symmetric\" ASYMMETRIC =", "software distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT", "by applicable law or agreed to in writing, software distributed under the License", "distributed.broadcast(module.signed_tensor, 0) logger.debug(\"Statistics: min={:.2f} max={:.2f}\".format(min_value.item(), max_value.item())) logger.info( \"Set sign: {} and scale: {:04.2f}", "= True def close(self): self.hook.remove() self.load_listener = LoadStateListener(self) def forward(self, x): if self.init_stage:", "config.is_weights self.within_signed_scope = config.within_signed_scope params = config.params self.num_bits = params.bits self.signed_tensor = nn.Parameter(torch.IntTensor([params.signed]),", "__init__(self, config: QuantizerConfig): super().__init__() self.config = config self.init_stage = False self.initialized = False", "signed(self, signed: bool): self.signed_tensor.fill_(signed) def quantize(self, x): self.set_level_ranges() return symmetric_quantize(x, self.levels, self.level_low, self.level_high,", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License", "self.scale, self.eps) @MIN_MAX_INITIALIZERS.register('SymmetricQuantizer') def _initializer(module, name, min_value, max_value, distributed_): if min_value.item == np.inf", "np.inf or max_value.item() == -np.inf: raise AttributeError('Statistics is not collected for {}'.format(name)) sign", "self.input_low = nn.Parameter(torch.zeros(scale_shape), requires_grad=True) self.input_range = nn.Parameter(torch.ones(scale_shape), requires_grad=True) self.eps = 1e-16 @property def", "self.level_high = 2 ** self.num_bits - 1 self.level_low = 0 @property def signed(self):", "super().__init__() self.config = config self.init_stage = False self.initialized = False self.state_dict_name = None", "= self.level_low = 0 self.levels = 2 ** self.num_bits if self.is_weights: self.levels -=", "= -(self.level_high + 1) if self.is_weights: self.level_low += 1 else: self.level_high = 2", "be updated by new values from state_dict or checkpoint. \"\"\" def __init__(self, module):", "IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "distributed_): if min_value.item == np.inf or max_value.item() == -np.inf: raise AttributeError('Statistics is not", "self.init_stage = False self.initialized = False self.state_dict_name = None class LoadStateListener: \"\"\" Check", "np import torch import torch.nn as nn from torch import distributed from .initializers", "in compliance with the License. You may obtain a copy of the License", "not collected for {}'.format(name)) module.input_low.data.fill_(min_value.item()) range_ = (max_value - min_value).item() if range_ >", "for {}'.format(name)) module.input_low.data.fill_(min_value.item()) range_ = (max_value - min_value).item() if range_ > 0.01: module.input_range.data.fill_(range_)", "- 1 self.level_low = -(self.level_high + 1) if self.is_weights: self.level_low += 1 else:", "KIND, either express or implied. See the License for the specific language governing", "incorrectly\") module.signed = int(sign) if abs(max_value) > 0.1: module.scale.data.fill_(max_value.item()) if distributed_: distributed.broadcast(module.scale, 0)", "class QuantizerConfig: def __init__(self, params: QuantizationParams, input_shape=None, is_weights=False, per_channel=False, within_signed_scope=False): self.params = params", "self.level_low, self.level_high, self.input_low, self.input_range, self.eps) @MIN_MAX_INITIALIZERS.register('AsymmetricQuantizer') def _initializer(module, name, min_value, max_value, distributed_): if", "- 1 self.level_low = 0 @property def signed(self): return self.signed_tensor.item() == 1 @signed.setter", "ASYMMETRIC = \"asymmetric\" class BinarizationMode: XNOR = \"xnor\" DOREFA = \"dorefa\" QuantizationParams =", "in writing, software distributed under the License is distributed on an \"AS IS\"", "= \"dorefa\" QuantizationParams = namedtuple( 'QuantizationParams', ['bits', 'mode', 'signed', 'signed_scope', 'per_channel'] ) QuantizationParams.__new__.__defaults__", "quantize(self, x): self.set_level_ranges() return symmetric_quantize(x, self.levels, self.level_low, self.level_high, self.scale, self.eps) @MIN_MAX_INITIALIZERS.register('SymmetricQuantizer') def _initializer(module,", "in module.state_dict().keys(): candidate = module.state_dict_name + '.' + module_key if candidate in state_dict:", "writing, software distributed under the License is distributed on an \"AS IS\" BASIS,", "max={:.2f}\".format(min_value.item(), max_value.item())) logger.info( \"Set sign: {} and scale: {:04.2f} for {}\".format(module.signed, module.scale.item(), name))", "self.hook.remove() self.load_listener = LoadStateListener(self) def forward(self, x): if self.init_stage: return x return self.quantize(x)", "= int(sign) if abs(max_value) > 0.1: module.scale.data.fill_(max_value.item()) if distributed_: distributed.broadcast(module.scale, 0) distributed.broadcast(module.signed_tensor, 0)", "distributed_: distributed.broadcast(module.input_low, 0) distributed.broadcast(module.input_range, 0) logger.debug(\"Statistics: min={:.2f} max={:.2f}\".format(min_value.item(), max_value.item())) logger.info(\"Set input_low: {:04.2f} and", "self.levels -= 1 def set_level_ranges(self): if self.signed: self.level_high = 2 ** (self.num_bits -", "= \"xnor\" DOREFA = \"dorefa\" QuantizationParams = namedtuple( 'QuantizationParams', ['bits', 'mode', 'signed', 'signed_scope',", "or agreed to in writing, software distributed under the License is distributed on", "self.input_range, self.eps) @MIN_MAX_INITIALIZERS.register('AsymmetricQuantizer') def _initializer(module, name, min_value, max_value, distributed_): if min_value.item() == np.inf", "-np.inf: raise AttributeError('Statistics is not collected for {}'.format(name)) module.input_low.data.fill_(min_value.item()) range_ = (max_value -", "and limitations under the License. \"\"\" import logging from collections import namedtuple from", "self.level_low = 0 self.levels = 2 ** self.num_bits if self.is_weights: self.levels -= 1", "self.is_weights: self.levels -= 1 def set_level_ranges(self): if self.signed: self.level_high = 2 ** (self.num_bits", "language governing permissions and limitations under the License. \"\"\" import logging from collections", "1 self.level_low = -(self.level_high + 1) if self.is_weights: self.level_low += 1 else: self.level_high", "min_value, max_value, distributed_): if min_value.item() == np.inf or max_value.item() == -np.inf: raise AttributeError('Statistics", "namedtuple( 'QuantizationParams', ['bits', 'mode', 'signed', 'signed_scope', 'per_channel'] ) QuantizationParams.__new__.__defaults__ = (8, QuantizationMode.SYMMETRIC, False,", "logging.getLogger(__name__) QUANTIZATION_MODULES = Registry('quantization_modules') BINARIZATION_MODULES = Registry('binarization_modules') class QuantizationMode: SYMMETRIC = \"symmetric\" ASYMMETRIC", "= nn.Parameter(torch.ones(scale_shape), requires_grad=True) self.eps = 1e-16 @property def signed(self): return True @property def", "def __init__(self, module): # pylint: disable=protected-access self.hook = module._register_load_state_dict_pre_hook(partial(self.hook_fn, module=module)) def hook_fn(self, state_dict,", "module.signed = int(sign) if abs(max_value) > 0.1: module.scale.data.fill_(max_value.item()) if distributed_: distributed.broadcast(module.scale, 0) distributed.broadcast(module.signed_tensor,", "{:04.2f} for {}\".format(module.signed, module.scale.item(), name)) @COMPRESSION_MODULES.register() @QUANTIZATION_MODULES.register(QuantizationMode.ASYMMETRIC) class AsymmetricQuantizer(BaseQuantizer): def __init__(self, config): super().__init__(config)", "1e-16 @property def signed(self): return True @property def level_high(self): return 2 ** self.bits", "module.input_low.data.fill_(min_value.item()) range_ = (max_value - min_value).item() if range_ > 0.01: module.input_range.data.fill_(range_) if distributed_:", "SymmetricQuantizer(BaseQuantizer): def __init__(self, config): super().__init__(config) self.input_shape = config.input_shape self.per_channel = config.per_channel self.is_weights =", "= False scale_shape = 1 if self.per_channel: scale_shape = get_per_channel_scale_shape(self.input_shape, self.is_weights) self.scale =", "state_dict: module.initialized = True def close(self): self.hook.remove() self.load_listener = LoadStateListener(self) def forward(self, x):", "= config.within_signed_scope params = config.params self.num_bits = params.bits self.signed_tensor = nn.Parameter(torch.IntTensor([params.signed]), requires_grad=False) self.collect_scale_statistics", "= get_per_channel_scale_shape(self.input_shape, self.is_weights) self.input_low = nn.Parameter(torch.zeros(scale_shape), requires_grad=True) self.input_range = nn.Parameter(torch.ones(scale_shape), requires_grad=True) self.eps =", "values from state_dict or checkpoint. \"\"\" def __init__(self, module): # pylint: disable=protected-access self.hook", "OR CONDITIONS OF ANY KIND, either express or implied. See the License for", "unexpected_keys, error_msgs, module): if module.state_dict_name: for module_key in module.state_dict().keys(): candidate = module.state_dict_name +", "LoadStateListener: \"\"\" Check whether a quantization module are going to be updated by", "OF ANY KIND, either express or implied. See the License for the specific", "== np.inf or max_value.item() == -np.inf: raise AttributeError('Statistics is not collected for {}'.format(name))", "False, [], False) class QuantizerConfig: def __init__(self, params: QuantizationParams, input_shape=None, is_weights=False, per_channel=False, within_signed_scope=False):", "sign != module.signed: logger.warning(\"signed set incorrectly\") module.signed = int(sign) if abs(max_value) > 0.1:", "may not use this file except in compliance with the License. You may", "False self.eps = 1e-16 self.level_high = self.level_low = 0 self.levels = 2 **", "under the License is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR", "functools import partial import numpy as np import torch import torch.nn as nn", "= (8, QuantizationMode.SYMMETRIC, False, [], False) class QuantizerConfig: def __init__(self, params: QuantizationParams, input_shape=None,", "self.level_high = self.level_low = 0 self.levels = 2 ** self.num_bits if self.is_weights: self.levels", "class QuantizationMode: SYMMETRIC = \"symmetric\" ASYMMETRIC = \"asymmetric\" class BinarizationMode: XNOR = \"xnor\"", "1 else: self.level_high = 2 ** self.num_bits - 1 self.level_low = 0 @property", "on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "def set_level_ranges(self): if self.signed: self.level_high = 2 ** (self.num_bits - 1) - 1", "self.is_weights = config.is_weights self.input_shape = config.input_shape self.per_channel = config.per_channel params = config.params self.bits", "min={:.2f} max={:.2f}\".format(min_value.item(), max_value.item())) logger.info( \"Set sign: {} and scale: {:04.2f} for {}\".format(module.signed, module.scale.item(),", "import get_per_channel_scale_shape logger = logging.getLogger(__name__) QUANTIZATION_MODULES = Registry('quantization_modules') BINARIZATION_MODULES = Registry('binarization_modules') class QuantizationMode:", "= 2 ** (self.num_bits - 1) - 1 self.level_low = -(self.level_high + 1)", "distributed.broadcast(module.input_low, 0) distributed.broadcast(module.input_range, 0) logger.debug(\"Statistics: min={:.2f} max={:.2f}\".format(min_value.item(), max_value.item())) logger.info(\"Set input_low: {:04.2f} and input_range:", "= module.state_dict_name + '.' + module_key if candidate in state_dict: module.initialized = True", "0 @property def levels(self): return 2 ** self.bits def quantize(self, x): return asymmetric_quantize(x,", "\"asymmetric\" class BinarizationMode: XNOR = \"xnor\" DOREFA = \"dorefa\" QuantizationParams = namedtuple( 'QuantizationParams',", "if distributed_: distributed.broadcast(module.scale, 0) distributed.broadcast(module.signed_tensor, 0) logger.debug(\"Statistics: min={:.2f} max={:.2f}\".format(min_value.item(), max_value.item())) logger.info( \"Set sign:", "self.input_range = nn.Parameter(torch.ones(scale_shape), requires_grad=True) self.eps = 1e-16 @property def signed(self): return True @property", "\"\"\" Check whether a quantization module are going to be updated by new", "state_dict or checkpoint. \"\"\" def __init__(self, module): # pylint: disable=protected-access self.hook = module._register_load_state_dict_pre_hook(partial(self.hook_fn,", "= 0 self.levels = 2 ** self.num_bits if self.is_weights: self.levels -= 1 def", "['bits', 'mode', 'signed', 'signed_scope', 'per_channel'] ) QuantizationParams.__new__.__defaults__ = (8, QuantizationMode.SYMMETRIC, False, [], False)", "import distributed from .initializers import MIN_MAX_INITIALIZERS from .quantize_functions import symmetric_quantize, asymmetric_quantize from ..layer_utils", "False scale_shape = 1 if self.per_channel: scale_shape = get_per_channel_scale_shape(self.input_shape, self.is_weights) self.scale = nn.Parameter(torch.ones(scale_shape),", "within_signed_scope=False): self.params = params self.is_weights = is_weights self.within_signed_scope = within_signed_scope self.per_channel = per_channel", "[], False) class QuantizerConfig: def __init__(self, params: QuantizationParams, input_shape=None, is_weights=False, per_channel=False, within_signed_scope=False): self.params", "AsymmetricQuantizer(BaseQuantizer): def __init__(self, config): super().__init__(config) self.is_weights = config.is_weights self.input_shape = config.input_shape self.per_channel =", "See the License for the specific language governing permissions and limitations under the", "np.inf or max_value.item() == -np.inf: raise AttributeError('Statistics is not collected for {}'.format(name)) module.input_low.data.fill_(min_value.item())", "max_value.item() == -np.inf: raise AttributeError('Statistics is not collected for {}'.format(name)) module.input_low.data.fill_(min_value.item()) range_ =", "params.bits self.signed_tensor = nn.Parameter(torch.IntTensor([params.signed]), requires_grad=False) self.collect_scale_statistics = False scale_shape = 1 if self.per_channel:", "module_key in module.state_dict().keys(): candidate = module.state_dict_name + '.' + module_key if candidate in", "disable=protected-access self.hook = module._register_load_state_dict_pre_hook(partial(self.hook_fn, module=module)) def hook_fn(self, state_dict, prefix, local_metadata, strict, missing_keys, unexpected_keys,", "new values from state_dict or checkpoint. \"\"\" def __init__(self, module): # pylint: disable=protected-access", "def signed(self): return True @property def level_high(self): return 2 ** self.bits - 1", "partial import numpy as np import torch import torch.nn as nn from torch", "class BaseQuantizer(nn.Module): def __init__(self, config: QuantizerConfig): super().__init__() self.config = config self.init_stage = False", "= module._register_load_state_dict_pre_hook(partial(self.hook_fn, module=module)) def hook_fn(self, state_dict, prefix, local_metadata, strict, missing_keys, unexpected_keys, error_msgs, module):", "this file except in compliance with the License. You may obtain a copy", "per_channel=False, within_signed_scope=False): self.params = params self.is_weights = is_weights self.within_signed_scope = within_signed_scope self.per_channel =", "return self.signed_tensor.item() == 1 @signed.setter def signed(self, signed: bool): self.signed_tensor.fill_(signed) def quantize(self, x):", "params.bits scale_shape = 1 if self.per_channel: scale_shape = get_per_channel_scale_shape(self.input_shape, self.is_weights) self.input_low = nn.Parameter(torch.zeros(scale_shape),", "@MIN_MAX_INITIALIZERS.register('AsymmetricQuantizer') def _initializer(module, name, min_value, max_value, distributed_): if min_value.item() == np.inf or max_value.item()", "\"License\"); you may not use this file except in compliance with the License.", "def _initializer(module, name, min_value, max_value, distributed_): if min_value.item() == np.inf or max_value.item() ==", "def signed(self): return self.signed_tensor.item() == 1 @signed.setter def signed(self, signed: bool): self.signed_tensor.fill_(signed) def", "is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY", "def level_high(self): return 2 ** self.bits - 1 @property def level_low(self): return 0", "you may not use this file except in compliance with the License. You", "as np import torch import torch.nn as nn from torch import distributed from", "return 2 ** self.bits - 1 @property def level_low(self): return 0 @property def", "sign = min_value.item() < 0 or module.within_signed_scope if sign != module.signed: logger.warning(\"signed set", "agreed to in writing, software distributed under the License is distributed on an", "QuantizationMode.SYMMETRIC, False, [], False) class QuantizerConfig: def __init__(self, params: QuantizationParams, input_shape=None, is_weights=False, per_channel=False,", "__init__(self, params: QuantizationParams, input_shape=None, is_weights=False, per_channel=False, within_signed_scope=False): self.params = params self.is_weights = is_weights", "= per_channel self.input_shape = input_shape class BaseQuantizer(nn.Module): def __init__(self, config: QuantizerConfig): super().__init__() self.config", "super().__init__(config) self.input_shape = config.input_shape self.per_channel = config.per_channel self.is_weights = config.is_weights self.within_signed_scope = config.within_signed_scope", "scale_shape = get_per_channel_scale_shape(self.input_shape, self.is_weights) self.input_low = nn.Parameter(torch.zeros(scale_shape), requires_grad=True) self.input_range = nn.Parameter(torch.ones(scale_shape), requires_grad=True) self.eps", "from ..utils import get_per_channel_scale_shape logger = logging.getLogger(__name__) QUANTIZATION_MODULES = Registry('quantization_modules') BINARIZATION_MODULES = Registry('binarization_modules')", "distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES", "may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable", "implied. See the License for the specific language governing permissions and limitations under", "__init__(self, config): super().__init__(config) self.input_shape = config.input_shape self.per_channel = config.per_channel self.is_weights = config.is_weights self.within_signed_scope", "self.signed: self.level_high = 2 ** (self.num_bits - 1) - 1 self.level_low = -(self.level_high", "def __init__(self, config): super().__init__(config) self.is_weights = config.is_weights self.input_shape = config.input_shape self.per_channel = config.per_channel", "= params.bits self.signed_tensor = nn.Parameter(torch.IntTensor([params.signed]), requires_grad=False) self.collect_scale_statistics = False scale_shape = 1 if", "import torch.nn as nn from torch import distributed from .initializers import MIN_MAX_INITIALIZERS from", "self.bits = params.bits scale_shape = 1 if self.per_channel: scale_shape = get_per_channel_scale_shape(self.input_shape, self.is_weights) self.input_low", "is not collected for {}'.format(name)) sign = min_value.item() < 0 or module.within_signed_scope if", "self.signed_tensor = nn.Parameter(torch.IntTensor([params.signed]), requires_grad=False) self.collect_scale_statistics = False scale_shape = 1 if self.per_channel: scale_shape", "requires_grad=True) self.eps = 1e-16 @property def signed(self): return True @property def level_high(self): return", "True @property def level_high(self): return 2 ** self.bits - 1 @property def level_low(self):", "self.signed_tensor.fill_(signed) def quantize(self, x): self.set_level_ranges() return symmetric_quantize(x, self.levels, self.level_low, self.level_high, self.scale, self.eps) @MIN_MAX_INITIALIZERS.register('SymmetricQuantizer')", "config): super().__init__(config) self.input_shape = config.input_shape self.per_channel = config.per_channel self.is_weights = config.is_weights self.within_signed_scope =", "SYMMETRIC = \"symmetric\" ASYMMETRIC = \"asymmetric\" class BinarizationMode: XNOR = \"xnor\" DOREFA =", "governing permissions and limitations under the License. \"\"\" import logging from collections import", "+= 1 else: self.level_high = 2 ** self.num_bits - 1 self.level_low = 0", "use this file except in compliance with the License. You may obtain a", "of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to", "@property def level_high(self): return 2 ** self.bits - 1 @property def level_low(self): return", "Licensed under the Apache License, Version 2.0 (the \"License\"); you may not use", "the License. \"\"\" import logging from collections import namedtuple from functools import partial", "torch.nn as nn from torch import distributed from .initializers import MIN_MAX_INITIALIZERS from .quantize_functions", "'signed', 'signed_scope', 'per_channel'] ) QuantizationParams.__new__.__defaults__ = (8, QuantizationMode.SYMMETRIC, False, [], False) class QuantizerConfig:", "** self.bits def quantize(self, x): return asymmetric_quantize(x, self.levels, self.level_low, self.level_high, self.input_low, self.input_range, self.eps)", "1) - 1 self.level_low = -(self.level_high + 1) if self.is_weights: self.level_low += 1", "QuantizerConfig: def __init__(self, params: QuantizationParams, input_shape=None, is_weights=False, per_channel=False, within_signed_scope=False): self.params = params self.is_weights", "'signed_scope', 'per_channel'] ) QuantizationParams.__new__.__defaults__ = (8, QuantizationMode.SYMMETRIC, False, [], False) class QuantizerConfig: def", "config.per_channel params = config.params self.bits = params.bits scale_shape = 1 if self.per_channel: scale_shape", "a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or", "import logging from collections import namedtuple from functools import partial import numpy as", "or max_value.item() == -np.inf: raise AttributeError('Statistics is not collected for {}'.format(name)) module.input_low.data.fill_(min_value.item()) range_", "self.level_high, self.scale, self.eps) @MIN_MAX_INITIALIZERS.register('SymmetricQuantizer') def _initializer(module, name, min_value, max_value, distributed_): if min_value.item ==", "1 if self.per_channel: scale_shape = get_per_channel_scale_shape(self.input_shape, self.is_weights) self.input_low = nn.Parameter(torch.zeros(scale_shape), requires_grad=True) self.input_range =", "{}'.format(name)) sign = min_value.item() < 0 or module.within_signed_scope if sign != module.signed: logger.warning(\"signed", "range_ > 0.01: module.input_range.data.fill_(range_) if distributed_: distributed.broadcast(module.input_low, 0) distributed.broadcast(module.input_range, 0) logger.debug(\"Statistics: min={:.2f} max={:.2f}\".format(min_value.item(),", "super().__init__(config) self.is_weights = config.is_weights self.input_shape = config.input_shape self.per_channel = config.per_channel params = config.params", "self.scale = nn.Parameter(torch.ones(scale_shape), requires_grad=True) self.init_stage = False self.eps = 1e-16 self.level_high = self.level_low", "@property def signed(self): return self.signed_tensor.item() == 1 @signed.setter def signed(self, signed: bool): self.signed_tensor.fill_(signed)", "missing_keys, unexpected_keys, error_msgs, module): if module.state_dict_name: for module_key in module.state_dict().keys(): candidate = module.state_dict_name", "required by applicable law or agreed to in writing, software distributed under the", "namedtuple from functools import partial import numpy as np import torch import torch.nn", "if self.signed: self.level_high = 2 ** (self.num_bits - 1) - 1 self.level_low =", "distributed.broadcast(module.input_range, 0) logger.debug(\"Statistics: min={:.2f} max={:.2f}\".format(min_value.item(), max_value.item())) logger.info(\"Set input_low: {:04.2f} and input_range: {:04.2f} for", "self.levels, self.level_low, self.level_high, self.scale, self.eps) @MIN_MAX_INITIALIZERS.register('SymmetricQuantizer') def _initializer(module, name, min_value, max_value, distributed_): if", "collected for {}'.format(name)) module.input_low.data.fill_(min_value.item()) range_ = (max_value - min_value).item() if range_ > 0.01:", "Registry from ..utils import get_per_channel_scale_shape logger = logging.getLogger(__name__) QUANTIZATION_MODULES = Registry('quantization_modules') BINARIZATION_MODULES =", "1e-16 self.level_high = self.level_low = 0 self.levels = 2 ** self.num_bits if self.is_weights:", "self.level_high, self.input_low, self.input_range, self.eps) @MIN_MAX_INITIALIZERS.register('AsymmetricQuantizer') def _initializer(module, name, min_value, max_value, distributed_): if min_value.item()", "..layer_utils import COMPRESSION_MODULES from ..registry import Registry from ..utils import get_per_channel_scale_shape logger =", "= nn.Parameter(torch.ones(scale_shape), requires_grad=True) self.init_stage = False self.eps = 1e-16 self.level_high = self.level_low =", "max_value.item())) logger.info( \"Set sign: {} and scale: {:04.2f} for {}\".format(module.signed, module.scale.item(), name)) @COMPRESSION_MODULES.register()", "= within_signed_scope self.per_channel = per_channel self.input_shape = input_shape class BaseQuantizer(nn.Module): def __init__(self, config:", "- 1) - 1 self.level_low = -(self.level_high + 1) if self.is_weights: self.level_low +=", "name, min_value, max_value, distributed_): if min_value.item == np.inf or max_value.item() == -np.inf: raise", "self.init_stage: return x return self.quantize(x) def quantize(self, x): raise NotImplementedError @COMPRESSION_MODULES.register() @QUANTIZATION_MODULES.register(QuantizationMode.SYMMETRIC) class", "'mode', 'signed', 'signed_scope', 'per_channel'] ) QuantizationParams.__new__.__defaults__ = (8, QuantizationMode.SYMMETRIC, False, [], False) class", "__init__(self, module): # pylint: disable=protected-access self.hook = module._register_load_state_dict_pre_hook(partial(self.hook_fn, module=module)) def hook_fn(self, state_dict, prefix,", "self.eps = 1e-16 @property def signed(self): return True @property def level_high(self): return 2", "-np.inf: raise AttributeError('Statistics is not collected for {}'.format(name)) sign = min_value.item() < 0", "Intel Corporation Licensed under the Apache License, Version 2.0 (the \"License\"); you may", "distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "not use this file except in compliance with the License. You may obtain", "is_weights=False, per_channel=False, within_signed_scope=False): self.params = params self.is_weights = is_weights self.within_signed_scope = within_signed_scope self.per_channel", "self.bits def quantize(self, x): return asymmetric_quantize(x, self.levels, self.level_low, self.level_high, self.input_low, self.input_range, self.eps) @MIN_MAX_INITIALIZERS.register('AsymmetricQuantizer')", "= None class LoadStateListener: \"\"\" Check whether a quantization module are going to", "== -np.inf: raise AttributeError('Statistics is not collected for {}'.format(name)) sign = min_value.item() <", "2019 Intel Corporation Licensed under the Apache License, Version 2.0 (the \"License\"); you", "Corporation Licensed under the Apache License, Version 2.0 (the \"License\"); you may not", "return True @property def level_high(self): return 2 ** self.bits - 1 @property def", "min_value.item == np.inf or max_value.item() == -np.inf: raise AttributeError('Statistics is not collected for", "under the License. \"\"\" import logging from collections import namedtuple from functools import", "= params self.is_weights = is_weights self.within_signed_scope = within_signed_scope self.per_channel = per_channel self.input_shape =", "logger = logging.getLogger(__name__) QUANTIZATION_MODULES = Registry('quantization_modules') BINARIZATION_MODULES = Registry('binarization_modules') class QuantizationMode: SYMMETRIC =", "self.eps) @MIN_MAX_INITIALIZERS.register('SymmetricQuantizer') def _initializer(module, name, min_value, max_value, distributed_): if min_value.item == np.inf or", "from .initializers import MIN_MAX_INITIALIZERS from .quantize_functions import symmetric_quantize, asymmetric_quantize from ..layer_utils import COMPRESSION_MODULES", "self.per_channel: scale_shape = get_per_channel_scale_shape(self.input_shape, self.is_weights) self.input_low = nn.Parameter(torch.zeros(scale_shape), requires_grad=True) self.input_range = nn.Parameter(torch.ones(scale_shape), requires_grad=True)", "from collections import namedtuple from functools import partial import numpy as np import", "XNOR = \"xnor\" DOREFA = \"dorefa\" QuantizationParams = namedtuple( 'QuantizationParams', ['bits', 'mode', 'signed',", "prefix, local_metadata, strict, missing_keys, unexpected_keys, error_msgs, module): if module.state_dict_name: for module_key in module.state_dict().keys():", "ANY KIND, either express or implied. See the License for the specific language", "module.scale.data.fill_(max_value.item()) if distributed_: distributed.broadcast(module.scale, 0) distributed.broadcast(module.signed_tensor, 0) logger.debug(\"Statistics: min={:.2f} max={:.2f}\".format(min_value.item(), max_value.item())) logger.info( \"Set", "range_ = (max_value - min_value).item() if range_ > 0.01: module.input_range.data.fill_(range_) if distributed_: distributed.broadcast(module.input_low,", "..utils import get_per_channel_scale_shape logger = logging.getLogger(__name__) QUANTIZATION_MODULES = Registry('quantization_modules') BINARIZATION_MODULES = Registry('binarization_modules') class", "file except in compliance with the License. You may obtain a copy of", "self.hook = module._register_load_state_dict_pre_hook(partial(self.hook_fn, module=module)) def hook_fn(self, state_dict, prefix, local_metadata, strict, missing_keys, unexpected_keys, error_msgs,", "x): raise NotImplementedError @COMPRESSION_MODULES.register() @QUANTIZATION_MODULES.register(QuantizationMode.SYMMETRIC) class SymmetricQuantizer(BaseQuantizer): def __init__(self, config): super().__init__(config) self.input_shape =", "+ 1) if self.is_weights: self.level_low += 1 else: self.level_high = 2 ** self.num_bits", "updated by new values from state_dict or checkpoint. \"\"\" def __init__(self, module): #", "= nn.Parameter(torch.zeros(scale_shape), requires_grad=True) self.input_range = nn.Parameter(torch.ones(scale_shape), requires_grad=True) self.eps = 1e-16 @property def signed(self):", "BINARIZATION_MODULES = Registry('binarization_modules') class QuantizationMode: SYMMETRIC = \"symmetric\" ASYMMETRIC = \"asymmetric\" class BinarizationMode:", "2.0 (the \"License\"); you may not use this file except in compliance with", "config self.init_stage = False self.initialized = False self.state_dict_name = None class LoadStateListener: \"\"\"", "def hook_fn(self, state_dict, prefix, local_metadata, strict, missing_keys, unexpected_keys, error_msgs, module): if module.state_dict_name: for", "or max_value.item() == -np.inf: raise AttributeError('Statistics is not collected for {}'.format(name)) sign =", "scale_shape = get_per_channel_scale_shape(self.input_shape, self.is_weights) self.scale = nn.Parameter(torch.ones(scale_shape), requires_grad=True) self.init_stage = False self.eps =", "self.num_bits = params.bits self.signed_tensor = nn.Parameter(torch.IntTensor([params.signed]), requires_grad=False) self.collect_scale_statistics = False scale_shape = 1", "copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed", "whether a quantization module are going to be updated by new values from", "distributed_): if min_value.item() == np.inf or max_value.item() == -np.inf: raise AttributeError('Statistics is not", "(self.num_bits - 1) - 1 self.level_low = -(self.level_high + 1) if self.is_weights: self.level_low", "< 0 or module.within_signed_scope if sign != module.signed: logger.warning(\"signed set incorrectly\") module.signed =", "x): if self.init_stage: return x return self.quantize(x) def quantize(self, x): raise NotImplementedError @COMPRESSION_MODULES.register()", "nn.Parameter(torch.ones(scale_shape), requires_grad=True) self.init_stage = False self.eps = 1e-16 self.level_high = self.level_low = 0", ") QuantizationParams.__new__.__defaults__ = (8, QuantizationMode.SYMMETRIC, False, [], False) class QuantizerConfig: def __init__(self, params:", "the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless", "2 ** self.num_bits - 1 self.level_low = 0 @property def signed(self): return self.signed_tensor.item()", "set_level_ranges(self): if self.signed: self.level_high = 2 ** (self.num_bits - 1) - 1 self.level_low", "- min_value).item() if range_ > 0.01: module.input_range.data.fill_(range_) if distributed_: distributed.broadcast(module.input_low, 0) distributed.broadcast(module.input_range, 0)", "from functools import partial import numpy as np import torch import torch.nn as", "= \"symmetric\" ASYMMETRIC = \"asymmetric\" class BinarizationMode: XNOR = \"xnor\" DOREFA = \"dorefa\"", "(max_value - min_value).item() if range_ > 0.01: module.input_range.data.fill_(range_) if distributed_: distributed.broadcast(module.input_low, 0) distributed.broadcast(module.input_range,", "if self.per_channel: scale_shape = get_per_channel_scale_shape(self.input_shape, self.is_weights) self.scale = nn.Parameter(torch.ones(scale_shape), requires_grad=True) self.init_stage = False", "1 if self.per_channel: scale_shape = get_per_channel_scale_shape(self.input_shape, self.is_weights) self.scale = nn.Parameter(torch.ones(scale_shape), requires_grad=True) self.init_stage =", "requires_grad=False) self.collect_scale_statistics = False scale_shape = 1 if self.per_channel: scale_shape = get_per_channel_scale_shape(self.input_shape, self.is_weights)", "False self.state_dict_name = None class LoadStateListener: \"\"\" Check whether a quantization module are", "(the \"License\"); you may not use this file except in compliance with the", "min={:.2f} max={:.2f}\".format(min_value.item(), max_value.item())) logger.info(\"Set input_low: {:04.2f} and input_range: {:04.2f} for {}\" .format(module.input_low.item(), module.input_range.item(),", "module.state_dict().keys(): candidate = module.state_dict_name + '.' + module_key if candidate in state_dict: module.initialized", "return 2 ** self.bits def quantize(self, x): return asymmetric_quantize(x, self.levels, self.level_low, self.level_high, self.input_low,", "= 2 ** self.num_bits - 1 self.level_low = 0 @property def signed(self): return", "0) logger.debug(\"Statistics: min={:.2f} max={:.2f}\".format(min_value.item(), max_value.item())) logger.info( \"Set sign: {} and scale: {:04.2f} for", "_initializer(module, name, min_value, max_value, distributed_): if min_value.item() == np.inf or max_value.item() == -np.inf:", "logger.debug(\"Statistics: min={:.2f} max={:.2f}\".format(min_value.item(), max_value.item())) logger.info( \"Set sign: {} and scale: {:04.2f} for {}\".format(module.signed,", "requires_grad=True) self.input_range = nn.Parameter(torch.ones(scale_shape), requires_grad=True) self.eps = 1e-16 @property def signed(self): return True", "2 ** self.num_bits if self.is_weights: self.levels -= 1 def set_level_ranges(self): if self.signed: self.level_high", "== 1 @signed.setter def signed(self, signed: bool): self.signed_tensor.fill_(signed) def quantize(self, x): self.set_level_ranges() return", "from torch import distributed from .initializers import MIN_MAX_INITIALIZERS from .quantize_functions import symmetric_quantize, asymmetric_quantize", "import Registry from ..utils import get_per_channel_scale_shape logger = logging.getLogger(__name__) QUANTIZATION_MODULES = Registry('quantization_modules') BINARIZATION_MODULES", "\"\"\" Copyright (c) 2019 Intel Corporation Licensed under the Apache License, Version 2.0", "distributed from .initializers import MIN_MAX_INITIALIZERS from .quantize_functions import symmetric_quantize, asymmetric_quantize from ..layer_utils import", "not collected for {}'.format(name)) sign = min_value.item() < 0 or module.within_signed_scope if sign", "self.input_shape = config.input_shape self.per_channel = config.per_channel self.is_weights = config.is_weights self.within_signed_scope = config.within_signed_scope params", "class AsymmetricQuantizer(BaseQuantizer): def __init__(self, config): super().__init__(config) self.is_weights = config.is_weights self.input_shape = config.input_shape self.per_channel", "quantize(self, x): raise NotImplementedError @COMPRESSION_MODULES.register() @QUANTIZATION_MODULES.register(QuantizationMode.SYMMETRIC) class SymmetricQuantizer(BaseQuantizer): def __init__(self, config): super().__init__(config) self.input_shape", "asymmetric_quantize(x, self.levels, self.level_low, self.level_high, self.input_low, self.input_range, self.eps) @MIN_MAX_INITIALIZERS.register('AsymmetricQuantizer') def _initializer(module, name, min_value, max_value,", "{} and scale: {:04.2f} for {}\".format(module.signed, module.scale.item(), name)) @COMPRESSION_MODULES.register() @QUANTIZATION_MODULES.register(QuantizationMode.ASYMMETRIC) class AsymmetricQuantizer(BaseQuantizer): def", "max_value, distributed_): if min_value.item == np.inf or max_value.item() == -np.inf: raise AttributeError('Statistics is", ".initializers import MIN_MAX_INITIALIZERS from .quantize_functions import symmetric_quantize, asymmetric_quantize from ..layer_utils import COMPRESSION_MODULES from", "-(self.level_high + 1) if self.is_weights: self.level_low += 1 else: self.level_high = 2 **", "** (self.num_bits - 1) - 1 self.level_low = -(self.level_high + 1) if self.is_weights:", "@property def level_low(self): return 0 @property def levels(self): return 2 ** self.bits def", "Copyright (c) 2019 Intel Corporation Licensed under the Apache License, Version 2.0 (the", "config.input_shape self.per_channel = config.per_channel self.is_weights = config.is_weights self.within_signed_scope = config.within_signed_scope params = config.params", "self.level_high = 2 ** (self.num_bits - 1) - 1 self.level_low = -(self.level_high +", "if self.is_weights: self.level_low += 1 else: self.level_high = 2 ** self.num_bits - 1", "http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed", "name, min_value, max_value, distributed_): if min_value.item() == np.inf or max_value.item() == -np.inf: raise", "QuantizationParams = namedtuple( 'QuantizationParams', ['bits', 'mode', 'signed', 'signed_scope', 'per_channel'] ) QuantizationParams.__new__.__defaults__ = (8,", "= config.is_weights self.input_shape = config.input_shape self.per_channel = config.per_channel params = config.params self.bits =", "module=module)) def hook_fn(self, state_dict, prefix, local_metadata, strict, missing_keys, unexpected_keys, error_msgs, module): if module.state_dict_name:", "abs(max_value) > 0.1: module.scale.data.fill_(max_value.item()) if distributed_: distributed.broadcast(module.scale, 0) distributed.broadcast(module.signed_tensor, 0) logger.debug(\"Statistics: min={:.2f} max={:.2f}\".format(min_value.item(),", "= 1e-16 @property def signed(self): return True @property def level_high(self): return 2 **", "if abs(max_value) > 0.1: module.scale.data.fill_(max_value.item()) if distributed_: distributed.broadcast(module.scale, 0) distributed.broadcast(module.signed_tensor, 0) logger.debug(\"Statistics: min={:.2f}", "get_per_channel_scale_shape logger = logging.getLogger(__name__) QUANTIZATION_MODULES = Registry('quantization_modules') BINARIZATION_MODULES = Registry('binarization_modules') class QuantizationMode: SYMMETRIC", "License is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF", "= config.per_channel self.is_weights = config.is_weights self.within_signed_scope = config.within_signed_scope params = config.params self.num_bits =", "distributed_: distributed.broadcast(module.scale, 0) distributed.broadcast(module.signed_tensor, 0) logger.debug(\"Statistics: min={:.2f} max={:.2f}\".format(min_value.item(), max_value.item())) logger.info( \"Set sign: {}", "= 1 if self.per_channel: scale_shape = get_per_channel_scale_shape(self.input_shape, self.is_weights) self.scale = nn.Parameter(torch.ones(scale_shape), requires_grad=True) self.init_stage", "quantize(self, x): return asymmetric_quantize(x, self.levels, self.level_low, self.level_high, self.input_low, self.input_range, self.eps) @MIN_MAX_INITIALIZERS.register('AsymmetricQuantizer') def _initializer(module,", "= namedtuple( 'QuantizationParams', ['bits', 'mode', 'signed', 'signed_scope', 'per_channel'] ) QuantizationParams.__new__.__defaults__ = (8, QuantizationMode.SYMMETRIC,", "config.is_weights self.input_shape = config.input_shape self.per_channel = config.per_channel params = config.params self.bits = params.bits", "nn.Parameter(torch.ones(scale_shape), requires_grad=True) self.eps = 1e-16 @property def signed(self): return True @property def level_high(self):", "if distributed_: distributed.broadcast(module.input_low, 0) distributed.broadcast(module.input_range, 0) logger.debug(\"Statistics: min={:.2f} max={:.2f}\".format(min_value.item(), max_value.item())) logger.info(\"Set input_low: {:04.2f}", "if candidate in state_dict: module.initialized = True def close(self): self.hook.remove() self.load_listener = LoadStateListener(self)", "law or agreed to in writing, software distributed under the License is distributed", "max_value, distributed_): if min_value.item() == np.inf or max_value.item() == -np.inf: raise AttributeError('Statistics is", "== -np.inf: raise AttributeError('Statistics is not collected for {}'.format(name)) module.input_low.data.fill_(min_value.item()) range_ = (max_value", "symmetric_quantize(x, self.levels, self.level_low, self.level_high, self.scale, self.eps) @MIN_MAX_INITIALIZERS.register('SymmetricQuantizer') def _initializer(module, name, min_value, max_value, distributed_):", "= config self.init_stage = False self.initialized = False self.state_dict_name = None class LoadStateListener:", "Version 2.0 (the \"License\"); you may not use this file except in compliance", "by new values from state_dict or checkpoint. \"\"\" def __init__(self, module): # pylint:", "{}'.format(name)) module.input_low.data.fill_(min_value.item()) range_ = (max_value - min_value).item() if range_ > 0.01: module.input_range.data.fill_(range_) if", "the Apache License, Version 2.0 (the \"License\"); you may not use this file", "sign: {} and scale: {:04.2f} for {}\".format(module.signed, module.scale.item(), name)) @COMPRESSION_MODULES.register() @QUANTIZATION_MODULES.register(QuantizationMode.ASYMMETRIC) class AsymmetricQuantizer(BaseQuantizer):", "def forward(self, x): if self.init_stage: return x return self.quantize(x) def quantize(self, x): raise", "strict, missing_keys, unexpected_keys, error_msgs, module): if module.state_dict_name: for module_key in module.state_dict().keys(): candidate =", "forward(self, x): if self.init_stage: return x return self.quantize(x) def quantize(self, x): raise NotImplementedError", "= config.input_shape self.per_channel = config.per_channel self.is_weights = config.is_weights self.within_signed_scope = config.within_signed_scope params =", "** self.bits - 1 @property def level_low(self): return 0 @property def levels(self): return", "class SymmetricQuantizer(BaseQuantizer): def __init__(self, config): super().__init__(config) self.input_shape = config.input_shape self.per_channel = config.per_channel self.is_weights", "module_key if candidate in state_dict: module.initialized = True def close(self): self.hook.remove() self.load_listener =", "= False self.initialized = False self.state_dict_name = None class LoadStateListener: \"\"\" Check whether", "if range_ > 0.01: module.input_range.data.fill_(range_) if distributed_: distributed.broadcast(module.input_low, 0) distributed.broadcast(module.input_range, 0) logger.debug(\"Statistics: min={:.2f}", "under the Apache License, Version 2.0 (the \"License\"); you may not use this", "0.01: module.input_range.data.fill_(range_) if distributed_: distributed.broadcast(module.input_low, 0) distributed.broadcast(module.input_range, 0) logger.debug(\"Statistics: min={:.2f} max={:.2f}\".format(min_value.item(), max_value.item())) logger.info(\"Set", "LoadStateListener(self) def forward(self, x): if self.init_stage: return x return self.quantize(x) def quantize(self, x):", "= \"asymmetric\" class BinarizationMode: XNOR = \"xnor\" DOREFA = \"dorefa\" QuantizationParams = namedtuple(", "requires_grad=True) self.init_stage = False self.eps = 1e-16 self.level_high = self.level_low = 0 self.levels", "either express or implied. See the License for the specific language governing permissions", "if min_value.item == np.inf or max_value.item() == -np.inf: raise AttributeError('Statistics is not collected", "AttributeError('Statistics is not collected for {}'.format(name)) sign = min_value.item() < 0 or module.within_signed_scope", "> 0.01: module.input_range.data.fill_(range_) if distributed_: distributed.broadcast(module.input_low, 0) distributed.broadcast(module.input_range, 0) logger.debug(\"Statistics: min={:.2f} max={:.2f}\".format(min_value.item(), max_value.item()))", "get_per_channel_scale_shape(self.input_shape, self.is_weights) self.input_low = nn.Parameter(torch.zeros(scale_shape), requires_grad=True) self.input_range = nn.Parameter(torch.ones(scale_shape), requires_grad=True) self.eps = 1e-16", "= nn.Parameter(torch.IntTensor([params.signed]), requires_grad=False) self.collect_scale_statistics = False scale_shape = 1 if self.per_channel: scale_shape =", "True def close(self): self.hook.remove() self.load_listener = LoadStateListener(self) def forward(self, x): if self.init_stage: return", "Registry('quantization_modules') BINARIZATION_MODULES = Registry('binarization_modules') class QuantizationMode: SYMMETRIC = \"symmetric\" ASYMMETRIC = \"asymmetric\" class", "= 0 @property def signed(self): return self.signed_tensor.item() == 1 @signed.setter def signed(self, signed:", "within_signed_scope self.per_channel = per_channel self.input_shape = input_shape class BaseQuantizer(nn.Module): def __init__(self, config: QuantizerConfig):", "config: QuantizerConfig): super().__init__() self.config = config self.init_stage = False self.initialized = False self.state_dict_name", "QuantizationMode: SYMMETRIC = \"symmetric\" ASYMMETRIC = \"asymmetric\" class BinarizationMode: XNOR = \"xnor\" DOREFA", "Apache License, Version 2.0 (the \"License\"); you may not use this file except", "or implied. See the License for the specific language governing permissions and limitations", "\"\"\" def __init__(self, module): # pylint: disable=protected-access self.hook = module._register_load_state_dict_pre_hook(partial(self.hook_fn, module=module)) def hook_fn(self,", "logger.debug(\"Statistics: min={:.2f} max={:.2f}\".format(min_value.item(), max_value.item())) logger.info(\"Set input_low: {:04.2f} and input_range: {:04.2f} for {}\" .format(module.input_low.item(),", "+ '.' + module_key if candidate in state_dict: module.initialized = True def close(self):", "signed(self): return True @property def level_high(self): return 2 ** self.bits - 1 @property", "License. \"\"\" import logging from collections import namedtuple from functools import partial import", "module._register_load_state_dict_pre_hook(partial(self.hook_fn, module=module)) def hook_fn(self, state_dict, prefix, local_metadata, strict, missing_keys, unexpected_keys, error_msgs, module): if", "!= module.signed: logger.warning(\"signed set incorrectly\") module.signed = int(sign) if abs(max_value) > 0.1: module.scale.data.fill_(max_value.item())", "module.signed: logger.warning(\"signed set incorrectly\") module.signed = int(sign) if abs(max_value) > 0.1: module.scale.data.fill_(max_value.item()) if", "> 0.1: module.scale.data.fill_(max_value.item()) if distributed_: distributed.broadcast(module.scale, 0) distributed.broadcast(module.signed_tensor, 0) logger.debug(\"Statistics: min={:.2f} max={:.2f}\".format(min_value.item(), max_value.item()))", "= config.is_weights self.within_signed_scope = config.within_signed_scope params = config.params self.num_bits = params.bits self.signed_tensor =", "self.levels = 2 ** self.num_bits if self.is_weights: self.levels -= 1 def set_level_ranges(self): if", "if self.init_stage: return x return self.quantize(x) def quantize(self, x): raise NotImplementedError @COMPRESSION_MODULES.register() @QUANTIZATION_MODULES.register(QuantizationMode.SYMMETRIC)", "= LoadStateListener(self) def forward(self, x): if self.init_stage: return x return self.quantize(x) def quantize(self,", "= min_value.item() < 0 or module.within_signed_scope if sign != module.signed: logger.warning(\"signed set incorrectly\")", "module.within_signed_scope if sign != module.signed: logger.warning(\"signed set incorrectly\") module.signed = int(sign) if abs(max_value)", "- 1 @property def level_low(self): return 0 @property def levels(self): return 2 **", "self.is_weights: self.level_low += 1 else: self.level_high = 2 ** self.num_bits - 1 self.level_low", "COMPRESSION_MODULES from ..registry import Registry from ..utils import get_per_channel_scale_shape logger = logging.getLogger(__name__) QUANTIZATION_MODULES", "QuantizerConfig): super().__init__() self.config = config self.init_stage = False self.initialized = False self.state_dict_name =", "levels(self): return 2 ** self.bits def quantize(self, x): return asymmetric_quantize(x, self.levels, self.level_low, self.level_high,", "permissions and limitations under the License. \"\"\" import logging from collections import namedtuple", "self.signed_tensor.item() == 1 @signed.setter def signed(self, signed: bool): self.signed_tensor.fill_(signed) def quantize(self, x): self.set_level_ranges()", "CONDITIONS OF ANY KIND, either express or implied. See the License for the", "self.config = config self.init_stage = False self.initialized = False self.state_dict_name = None class", "= False self.state_dict_name = None class LoadStateListener: \"\"\" Check whether a quantization module", "config.params self.bits = params.bits scale_shape = 1 if self.per_channel: scale_shape = get_per_channel_scale_shape(self.input_shape, self.is_weights)", "to in writing, software distributed under the License is distributed on an \"AS", "close(self): self.hook.remove() self.load_listener = LoadStateListener(self) def forward(self, x): if self.init_stage: return x return", "# pylint: disable=protected-access self.hook = module._register_load_state_dict_pre_hook(partial(self.hook_fn, module=module)) def hook_fn(self, state_dict, prefix, local_metadata, strict,", "nn.Parameter(torch.IntTensor([params.signed]), requires_grad=False) self.collect_scale_statistics = False scale_shape = 1 if self.per_channel: scale_shape = get_per_channel_scale_shape(self.input_shape,", "= config.input_shape self.per_channel = config.per_channel params = config.params self.bits = params.bits scale_shape =", "MIN_MAX_INITIALIZERS from .quantize_functions import symmetric_quantize, asymmetric_quantize from ..layer_utils import COMPRESSION_MODULES from ..registry import", "max_value.item() == -np.inf: raise AttributeError('Statistics is not collected for {}'.format(name)) sign = min_value.item()", "return x return self.quantize(x) def quantize(self, x): raise NotImplementedError @COMPRESSION_MODULES.register() @QUANTIZATION_MODULES.register(QuantizationMode.SYMMETRIC) class SymmetricQuantizer(BaseQuantizer):", "except in compliance with the License. You may obtain a copy of the", ".quantize_functions import symmetric_quantize, asymmetric_quantize from ..layer_utils import COMPRESSION_MODULES from ..registry import Registry from", "for module_key in module.state_dict().keys(): candidate = module.state_dict_name + '.' + module_key if candidate", "self.is_weights = is_weights self.within_signed_scope = within_signed_scope self.per_channel = per_channel self.input_shape = input_shape class", "an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "None class LoadStateListener: \"\"\" Check whether a quantization module are going to be", "input_shape=None, is_weights=False, per_channel=False, within_signed_scope=False): self.params = params self.is_weights = is_weights self.within_signed_scope = within_signed_scope", "are going to be updated by new values from state_dict or checkpoint. \"\"\"", "1) if self.is_weights: self.level_low += 1 else: self.level_high = 2 ** self.num_bits -", "(8, QuantizationMode.SYMMETRIC, False, [], False) class QuantizerConfig: def __init__(self, params: QuantizationParams, input_shape=None, is_weights=False,", "import symmetric_quantize, asymmetric_quantize from ..layer_utils import COMPRESSION_MODULES from ..registry import Registry from ..utils", "self.num_bits if self.is_weights: self.levels -= 1 def set_level_ranges(self): if self.signed: self.level_high = 2", "obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law", "self.per_channel = per_channel self.input_shape = input_shape class BaseQuantizer(nn.Module): def __init__(self, config: QuantizerConfig): super().__init__()", "= 2 ** self.num_bits if self.is_weights: self.levels -= 1 def set_level_ranges(self): if self.signed:", "module.state_dict_name: for module_key in module.state_dict().keys(): candidate = module.state_dict_name + '.' + module_key if", "BinarizationMode: XNOR = \"xnor\" DOREFA = \"dorefa\" QuantizationParams = namedtuple( 'QuantizationParams', ['bits', 'mode',", "class BinarizationMode: XNOR = \"xnor\" DOREFA = \"dorefa\" QuantizationParams = namedtuple( 'QuantizationParams', ['bits',", "if min_value.item() == np.inf or max_value.item() == -np.inf: raise AttributeError('Statistics is not collected", "config.params self.num_bits = params.bits self.signed_tensor = nn.Parameter(torch.IntTensor([params.signed]), requires_grad=False) self.collect_scale_statistics = False scale_shape =", "self.eps) @MIN_MAX_INITIALIZERS.register('AsymmetricQuantizer') def _initializer(module, name, min_value, max_value, distributed_): if min_value.item() == np.inf or", "self.level_low, self.level_high, self.scale, self.eps) @MIN_MAX_INITIALIZERS.register('SymmetricQuantizer') def _initializer(module, name, min_value, max_value, distributed_): if min_value.item", "1 @property def level_low(self): return 0 @property def levels(self): return 2 ** self.bits", "DOREFA = \"dorefa\" QuantizationParams = namedtuple( 'QuantizationParams', ['bits', 'mode', 'signed', 'signed_scope', 'per_channel'] )", "logging from collections import namedtuple from functools import partial import numpy as np", "self.init_stage = False self.eps = 1e-16 self.level_high = self.level_low = 0 self.levels =", "License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing,", "module.initialized = True def close(self): self.hook.remove() self.load_listener = LoadStateListener(self) def forward(self, x): if", "\"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "QuantizationParams.__new__.__defaults__ = (8, QuantizationMode.SYMMETRIC, False, [], False) class QuantizerConfig: def __init__(self, params: QuantizationParams,", "2 ** (self.num_bits - 1) - 1 self.level_low = -(self.level_high + 1) if", "numpy as np import torch import torch.nn as nn from torch import distributed", "params self.is_weights = is_weights self.within_signed_scope = within_signed_scope self.per_channel = per_channel self.input_shape = input_shape", "= get_per_channel_scale_shape(self.input_shape, self.is_weights) self.scale = nn.Parameter(torch.ones(scale_shape), requires_grad=True) self.init_stage = False self.eps = 1e-16", "self.eps = 1e-16 self.level_high = self.level_low = 0 self.levels = 2 ** self.num_bits", "collections import namedtuple from functools import partial import numpy as np import torch", "from ..registry import Registry from ..utils import get_per_channel_scale_shape logger = logging.getLogger(__name__) QUANTIZATION_MODULES =", "hook_fn(self, state_dict, prefix, local_metadata, strict, missing_keys, unexpected_keys, error_msgs, module): if module.state_dict_name: for module_key", "def _initializer(module, name, min_value, max_value, distributed_): if min_value.item == np.inf or max_value.item() ==", "@property def levels(self): return 2 ** self.bits def quantize(self, x): return asymmetric_quantize(x, self.levels,", "import partial import numpy as np import torch import torch.nn as nn from", "NotImplementedError @COMPRESSION_MODULES.register() @QUANTIZATION_MODULES.register(QuantizationMode.SYMMETRIC) class SymmetricQuantizer(BaseQuantizer): def __init__(self, config): super().__init__(config) self.input_shape = config.input_shape self.per_channel", "from state_dict or checkpoint. \"\"\" def __init__(self, module): # pylint: disable=protected-access self.hook =", "False self.initialized = False self.state_dict_name = None class LoadStateListener: \"\"\" Check whether a", "compliance with the License. You may obtain a copy of the License at", "-= 1 def set_level_ranges(self): if self.signed: self.level_high = 2 ** (self.num_bits - 1)", "1 @signed.setter def signed(self, signed: bool): self.signed_tensor.fill_(signed) def quantize(self, x): self.set_level_ranges() return symmetric_quantize(x,", "__init__(self, config): super().__init__(config) self.is_weights = config.is_weights self.input_shape = config.input_shape self.per_channel = config.per_channel params", "import COMPRESSION_MODULES from ..registry import Registry from ..utils import get_per_channel_scale_shape logger = logging.getLogger(__name__)", "nn.Parameter(torch.zeros(scale_shape), requires_grad=True) self.input_range = nn.Parameter(torch.ones(scale_shape), requires_grad=True) self.eps = 1e-16 @property def signed(self): return", "raise AttributeError('Statistics is not collected for {}'.format(name)) sign = min_value.item() < 0 or", "specific language governing permissions and limitations under the License. \"\"\" import logging from", "express or implied. See the License for the specific language governing permissions and", "def __init__(self, config: QuantizerConfig): super().__init__() self.config = config self.init_stage = False self.initialized =", "Registry('binarization_modules') class QuantizationMode: SYMMETRIC = \"symmetric\" ASYMMETRIC = \"asymmetric\" class BinarizationMode: XNOR =", "min_value.item() < 0 or module.within_signed_scope if sign != module.signed: logger.warning(\"signed set incorrectly\") module.signed", "= 1 if self.per_channel: scale_shape = get_per_channel_scale_shape(self.input_shape, self.is_weights) self.input_low = nn.Parameter(torch.zeros(scale_shape), requires_grad=True) self.input_range", "self.within_signed_scope = within_signed_scope self.per_channel = per_channel self.input_shape = input_shape class BaseQuantizer(nn.Module): def __init__(self,", "return 0 @property def levels(self): return 2 ** self.bits def quantize(self, x): return", "\"Set sign: {} and scale: {:04.2f} for {}\".format(module.signed, module.scale.item(), name)) @COMPRESSION_MODULES.register() @QUANTIZATION_MODULES.register(QuantizationMode.ASYMMETRIC) class", "state_dict, prefix, local_metadata, strict, missing_keys, unexpected_keys, error_msgs, module): if module.state_dict_name: for module_key in", "\"\"\" import logging from collections import namedtuple from functools import partial import numpy", "You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by", "scale: {:04.2f} for {}\".format(module.signed, module.scale.item(), name)) @COMPRESSION_MODULES.register() @QUANTIZATION_MODULES.register(QuantizationMode.ASYMMETRIC) class AsymmetricQuantizer(BaseQuantizer): def __init__(self, config):", "..registry import Registry from ..utils import get_per_channel_scale_shape logger = logging.getLogger(__name__) QUANTIZATION_MODULES = Registry('quantization_modules')", "module): # pylint: disable=protected-access self.hook = module._register_load_state_dict_pre_hook(partial(self.hook_fn, module=module)) def hook_fn(self, state_dict, prefix, local_metadata,", "if self.per_channel: scale_shape = get_per_channel_scale_shape(self.input_shape, self.is_weights) self.input_low = nn.Parameter(torch.zeros(scale_shape), requires_grad=True) self.input_range = nn.Parameter(torch.ones(scale_shape),", "def close(self): self.hook.remove() self.load_listener = LoadStateListener(self) def forward(self, x): if self.init_stage: return x", "_initializer(module, name, min_value, max_value, distributed_): if min_value.item == np.inf or max_value.item() == -np.inf:", "raise NotImplementedError @COMPRESSION_MODULES.register() @QUANTIZATION_MODULES.register(QuantizationMode.SYMMETRIC) class SymmetricQuantizer(BaseQuantizer): def __init__(self, config): super().__init__(config) self.input_shape = config.input_shape", "applicable law or agreed to in writing, software distributed under the License is", "self.state_dict_name = None class LoadStateListener: \"\"\" Check whether a quantization module are going", "'per_channel'] ) QuantizationParams.__new__.__defaults__ = (8, QuantizationMode.SYMMETRIC, False, [], False) class QuantizerConfig: def __init__(self,", "= logging.getLogger(__name__) QUANTIZATION_MODULES = Registry('quantization_modules') BINARIZATION_MODULES = Registry('binarization_modules') class QuantizationMode: SYMMETRIC = \"symmetric\"", "get_per_channel_scale_shape(self.input_shape, self.is_weights) self.scale = nn.Parameter(torch.ones(scale_shape), requires_grad=True) self.init_stage = False self.eps = 1e-16 self.level_high", "0 @property def signed(self): return self.signed_tensor.item() == 1 @signed.setter def signed(self, signed: bool):", "1 def set_level_ranges(self): if self.signed: self.level_high = 2 ** (self.num_bits - 1) -", "\"xnor\" DOREFA = \"dorefa\" QuantizationParams = namedtuple( 'QuantizationParams', ['bits', 'mode', 'signed', 'signed_scope', 'per_channel']", "self.within_signed_scope = config.within_signed_scope params = config.params self.num_bits = params.bits self.signed_tensor = nn.Parameter(torch.IntTensor([params.signed]), requires_grad=False)", "params = config.params self.bits = params.bits scale_shape = 1 if self.per_channel: scale_shape =", "= False self.eps = 1e-16 self.level_high = self.level_low = 0 self.levels = 2", "quantization module are going to be updated by new values from state_dict or", "limitations under the License. \"\"\" import logging from collections import namedtuple from functools", "min_value).item() if range_ > 0.01: module.input_range.data.fill_(range_) if distributed_: distributed.broadcast(module.input_low, 0) distributed.broadcast(module.input_range, 0) logger.debug(\"Statistics:", "asymmetric_quantize from ..layer_utils import COMPRESSION_MODULES from ..registry import Registry from ..utils import get_per_channel_scale_shape", "module are going to be updated by new values from state_dict or checkpoint.", "BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See", "from .quantize_functions import symmetric_quantize, asymmetric_quantize from ..layer_utils import COMPRESSION_MODULES from ..registry import Registry", "2 ** self.bits - 1 @property def level_low(self): return 0 @property def levels(self):", "def levels(self): return 2 ** self.bits def quantize(self, x): return asymmetric_quantize(x, self.levels, self.level_low,", "module.scale.item(), name)) @COMPRESSION_MODULES.register() @QUANTIZATION_MODULES.register(QuantizationMode.ASYMMETRIC) class AsymmetricQuantizer(BaseQuantizer): def __init__(self, config): super().__init__(config) self.is_weights = config.is_weights", "2 ** self.bits def quantize(self, x): return asymmetric_quantize(x, self.levels, self.level_low, self.level_high, self.input_low, self.input_range,", "\"dorefa\" QuantizationParams = namedtuple( 'QuantizationParams', ['bits', 'mode', 'signed', 'signed_scope', 'per_channel'] ) QuantizationParams.__new__.__defaults__ =", "collected for {}'.format(name)) sign = min_value.item() < 0 or module.within_signed_scope if sign !=", "logger.warning(\"signed set incorrectly\") module.signed = int(sign) if abs(max_value) > 0.1: module.scale.data.fill_(max_value.item()) if distributed_:", "self.per_channel: scale_shape = get_per_channel_scale_shape(self.input_shape, self.is_weights) self.scale = nn.Parameter(torch.ones(scale_shape), requires_grad=True) self.init_stage = False self.eps", "going to be updated by new values from state_dict or checkpoint. \"\"\" def", "def __init__(self, config): super().__init__(config) self.input_shape = config.input_shape self.per_channel = config.per_channel self.is_weights = config.is_weights", "= params.bits scale_shape = 1 if self.per_channel: scale_shape = get_per_channel_scale_shape(self.input_shape, self.is_weights) self.input_low =", "from ..layer_utils import COMPRESSION_MODULES from ..registry import Registry from ..utils import get_per_channel_scale_shape logger", "self.input_low, self.input_range, self.eps) @MIN_MAX_INITIALIZERS.register('AsymmetricQuantizer') def _initializer(module, name, min_value, max_value, distributed_): if min_value.item() ==", "'QuantizationParams', ['bits', 'mode', 'signed', 'signed_scope', 'per_channel'] ) QuantizationParams.__new__.__defaults__ = (8, QuantizationMode.SYMMETRIC, False, [],", "+ module_key if candidate in state_dict: module.initialized = True def close(self): self.hook.remove() self.load_listener", "if self.is_weights: self.levels -= 1 def set_level_ranges(self): if self.signed: self.level_high = 2 **", "self.quantize(x) def quantize(self, x): raise NotImplementedError @COMPRESSION_MODULES.register() @QUANTIZATION_MODULES.register(QuantizationMode.SYMMETRIC) class SymmetricQuantizer(BaseQuantizer): def __init__(self, config):", "= input_shape class BaseQuantizer(nn.Module): def __init__(self, config: QuantizerConfig): super().__init__() self.config = config self.init_stage", "candidate in state_dict: module.initialized = True def close(self): self.hook.remove() self.load_listener = LoadStateListener(self) def", "params = config.params self.num_bits = params.bits self.signed_tensor = nn.Parameter(torch.IntTensor([params.signed]), requires_grad=False) self.collect_scale_statistics = False", "import namedtuple from functools import partial import numpy as np import torch import", "def level_low(self): return 0 @property def levels(self): return 2 ** self.bits def quantize(self,", "or module.within_signed_scope if sign != module.signed: logger.warning(\"signed set incorrectly\") module.signed = int(sign) if", "self.level_low = -(self.level_high + 1) if self.is_weights: self.level_low += 1 else: self.level_high =", "pylint: disable=protected-access self.hook = module._register_load_state_dict_pre_hook(partial(self.hook_fn, module=module)) def hook_fn(self, state_dict, prefix, local_metadata, strict, missing_keys,", "self.is_weights = config.is_weights self.within_signed_scope = config.within_signed_scope params = config.params self.num_bits = params.bits self.signed_tensor", "torch import distributed from .initializers import MIN_MAX_INITIALIZERS from .quantize_functions import symmetric_quantize, asymmetric_quantize from", "0) distributed.broadcast(module.signed_tensor, 0) logger.debug(\"Statistics: min={:.2f} max={:.2f}\".format(min_value.item(), max_value.item())) logger.info( \"Set sign: {} and scale:", "= (max_value - min_value).item() if range_ > 0.01: module.input_range.data.fill_(range_) if distributed_: distributed.broadcast(module.input_low, 0)", "0) distributed.broadcast(module.input_range, 0) logger.debug(\"Statistics: min={:.2f} max={:.2f}\".format(min_value.item(), max_value.item())) logger.info(\"Set input_low: {:04.2f} and input_range: {:04.2f}", "config): super().__init__(config) self.is_weights = config.is_weights self.input_shape = config.input_shape self.per_channel = config.per_channel params =", "import torch import torch.nn as nn from torch import distributed from .initializers import", "with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0", "int(sign) if abs(max_value) > 0.1: module.scale.data.fill_(max_value.item()) if distributed_: distributed.broadcast(module.scale, 0) distributed.broadcast(module.signed_tensor, 0) logger.debug(\"Statistics:", "name)) @COMPRESSION_MODULES.register() @QUANTIZATION_MODULES.register(QuantizationMode.ASYMMETRIC) class AsymmetricQuantizer(BaseQuantizer): def __init__(self, config): super().__init__(config) self.is_weights = config.is_weights self.input_shape", "at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software", "checkpoint. \"\"\" def __init__(self, module): # pylint: disable=protected-access self.hook = module._register_load_state_dict_pre_hook(partial(self.hook_fn, module=module)) def", "set incorrectly\") module.signed = int(sign) if abs(max_value) > 0.1: module.scale.data.fill_(max_value.item()) if distributed_: distributed.broadcast(module.scale,", "in state_dict: module.initialized = True def close(self): self.hook.remove() self.load_listener = LoadStateListener(self) def forward(self,", "or checkpoint. \"\"\" def __init__(self, module): # pylint: disable=protected-access self.hook = module._register_load_state_dict_pre_hook(partial(self.hook_fn, module=module))", "for {}'.format(name)) sign = min_value.item() < 0 or module.within_signed_scope if sign != module.signed:" ]
[ "while(n!=0): r=n%10 n=int(n/10) s[a]=r for i in range (a): if(r==s[i]): c+=1 break a+=1", "s[a]=r for i in range (a): if(r==s[i]): c+=1 break a+=1 if (c==0): print(\"It", "a+=1 if (c==0): print(\"It is a unique number\") else: print(\"It is not a", "n=int(input(\"Enter the number:\")) s={} a=0 c=0 while(n!=0): r=n%10 n=int(n/10) s[a]=r for i in", "for i in range (a): if(r==s[i]): c+=1 break a+=1 if (c==0): print(\"It is", "if(r==s[i]): c+=1 break a+=1 if (c==0): print(\"It is a unique number\") else: print(\"It", "n=int(n/10) s[a]=r for i in range (a): if(r==s[i]): c+=1 break a+=1 if (c==0):", "(c==0): print(\"It is a unique number\") else: print(\"It is not a unique number\")", "number:\")) s={} a=0 c=0 while(n!=0): r=n%10 n=int(n/10) s[a]=r for i in range (a):", "the number:\")) s={} a=0 c=0 while(n!=0): r=n%10 n=int(n/10) s[a]=r for i in range", "c=0 while(n!=0): r=n%10 n=int(n/10) s[a]=r for i in range (a): if(r==s[i]): c+=1 break", "a=0 c=0 while(n!=0): r=n%10 n=int(n/10) s[a]=r for i in range (a): if(r==s[i]): c+=1", "r=n%10 n=int(n/10) s[a]=r for i in range (a): if(r==s[i]): c+=1 break a+=1 if", "(a): if(r==s[i]): c+=1 break a+=1 if (c==0): print(\"It is a unique number\") else:", "in range (a): if(r==s[i]): c+=1 break a+=1 if (c==0): print(\"It is a unique", "break a+=1 if (c==0): print(\"It is a unique number\") else: print(\"It is not", "i in range (a): if(r==s[i]): c+=1 break a+=1 if (c==0): print(\"It is a", "s={} a=0 c=0 while(n!=0): r=n%10 n=int(n/10) s[a]=r for i in range (a): if(r==s[i]):", "c+=1 break a+=1 if (c==0): print(\"It is a unique number\") else: print(\"It is", "range (a): if(r==s[i]): c+=1 break a+=1 if (c==0): print(\"It is a unique number\")", "if (c==0): print(\"It is a unique number\") else: print(\"It is not a unique" ]
[ "seconds would be preferred. Any less, and your clip will be zero-padded to", "in slices: h_pred, a_pred, g_pred = model(slice) h_preds.append((h_pred.view(-1) * model.h_std + model.h_mean).item()) a_preds.append((a_pred.view(-1)", "= arr.astype(np.float32, order='C') / 32768.0 arr = librosa.to_mono(arr.T) arr = librosa.resample(arr, input_sr, cfg.sample_rate)", "would be preferred. Any less, and your clip will be zero-padded to 5", "import librosa import torch from NISP.lightning_model import LightningModel from config import TestNISPConfig as", "more than 5 seconds would be preferred. Any less, and your clip will", "audio to required format (16kHz Sample Rate, Mono) input_sr, arr = audio arr", "Rate, Mono) input_sr, arr = audio arr = arr.astype(np.float32, order='C') / 32768.0 arr", "g_preds = [], [], [] with torch.no_grad(): for slice in slices: h_pred, a_pred,", "32768.0 arr = librosa.to_mono(arr.T) arr = librosa.resample(arr, input_sr, cfg.sample_rate) # convert to torch", "win_length = cfg.slice_window * cfg.sample_rate if tensor.shape[-1] < sample_length: tensor = torch.nn.functional.pad(tensor, (0,", "* model.h_std + model.h_mean).item()) a_preds.append((a_pred.view(-1) * model.a_std + model.a_mean).item()) g_preds.append(g_pred.view(-1).item()) height = round(sum(h_preds)/len(h_preds),2)", "model.a_std + model.a_mean).item()) g_preds.append(g_pred.view(-1).item()) height = round(sum(h_preds)/len(h_preds),2) age = int(sum(a_preds)/len(a_preds)) gender = 'Female'", "return 'You\\'re {}, your height is {}, and you are {} years old.'.format(gender,", "your voice. \\n Ideally, a clip of more than 5 seconds would be", "years old.'.format(gender, height, age) iface = gr.Interface( fn=predict_height, inputs='mic', outputs='text', description='Predicts your height,", "INFERENCE_COUNT += 1 # resample audio to required format (16kHz Sample Rate, Mono)", "= LightningModel.load_from_checkpoint(cfg.model_checkpoint, csv_path=cfg.csv_path) model.eval() def predict_height(audio): global INFERENCE_COUNT INFERENCE_COUNT += 1 # resample", "h_preds, a_preds, g_preds = [], [], [] with torch.no_grad(): for slice in slices:", "clip of more than 5 seconds would be preferred. Any less, and your", "librosa.resample(arr, input_sr, cfg.sample_rate) # convert to torch tensor tensor = torch.Tensor([arr]) sample_length =", "= tensor.unsqueeze(dim=0) else: # Split input audio into slices of input_length seconds slices", "with torch.no_grad(): for slice in slices: h_pred, a_pred, g_pred = model(slice) h_preds.append((h_pred.view(-1) *", "iface = gr.Interface( fn=predict_height, inputs='mic', outputs='text', description='Predicts your height, age, gender based on", "INFERENCE_COUNT) return 'You\\'re {}, your height is {}, and you are {} years", "> 0.5 else 'Male' print('Inference was run. Current inference count:', INFERENCE_COUNT) return 'You\\'re", "than 5 seconds would be preferred. Any less, and your clip will be", "gr.Interface( fn=predict_height, inputs='mic', outputs='text', description='Predicts your height, age, gender based on your voice.", "cfg.sample_rate if tensor.shape[-1] < sample_length: tensor = torch.nn.functional.pad(tensor, (0, sample_length - tensor.size(1)), 'constant')", "= torch.Tensor([arr]) sample_length = cfg.slice_seconds * cfg.sample_rate win_length = cfg.slice_window * cfg.sample_rate if", "of input_length seconds slices = tensor.unfold(1, sample_length, win_length).transpose(0,1) # predict h_preds, a_preds, g_preds", "input audio into slices of input_length seconds slices = tensor.unfold(1, sample_length, win_length).transpose(0,1) #", "a_pred, g_pred = model(slice) h_preds.append((h_pred.view(-1) * model.h_std + model.h_mean).item()) a_preds.append((a_pred.view(-1) * model.a_std +", "audio into slices of input_length seconds slices = tensor.unfold(1, sample_length, win_length).transpose(0,1) # predict", "else 'Male' print('Inference was run. Current inference count:', INFERENCE_COUNT) return 'You\\'re {}, your", "for slice in slices: h_pred, a_pred, g_pred = model(slice) h_preds.append((h_pred.view(-1) * model.h_std +", "based on your voice. \\n Ideally, a clip of more than 5 seconds", "outputs='text', description='Predicts your height, age, gender based on your voice. \\n Ideally, a", "slices: h_pred, a_pred, g_pred = model(slice) h_preds.append((h_pred.view(-1) * model.h_std + model.h_mean).item()) a_preds.append((a_pred.view(-1) *", "Ideally, a clip of more than 5 seconds would be preferred. Any less,", "< sample_length: tensor = torch.nn.functional.pad(tensor, (0, sample_length - tensor.size(1)), 'constant') slices = tensor.unsqueeze(dim=0)", "height, age, gender based on your voice. \\n Ideally, a clip of more", "librosa import torch from NISP.lightning_model import LightningModel from config import TestNISPConfig as cfg", "checkpoint model = LightningModel.load_from_checkpoint(cfg.model_checkpoint, csv_path=cfg.csv_path) model.eval() def predict_height(audio): global INFERENCE_COUNT INFERENCE_COUNT += 1", "+ model.h_mean).item()) a_preds.append((a_pred.view(-1) * model.a_std + model.a_mean).item()) g_preds.append(g_pred.view(-1).item()) height = round(sum(h_preds)/len(h_preds),2) age =", "run. Current inference count:', INFERENCE_COUNT) return 'You\\'re {}, your height is {}, and", "print('Inference was run. Current inference count:', INFERENCE_COUNT) return 'You\\'re {}, your height is", "is {}, and you are {} years old.'.format(gender, height, age) iface = gr.Interface(", "gr import numpy as np import librosa import torch from NISP.lightning_model import LightningModel", "cfg.slice_seconds * cfg.sample_rate win_length = cfg.slice_window * cfg.sample_rate if tensor.shape[-1] < sample_length: tensor", "model checkpoint model = LightningModel.load_from_checkpoint(cfg.model_checkpoint, csv_path=cfg.csv_path) model.eval() def predict_height(audio): global INFERENCE_COUNT INFERENCE_COUNT +=", "# predict h_preds, a_preds, g_preds = [], [], [] with torch.no_grad(): for slice", "seconds slices = tensor.unfold(1, sample_length, win_length).transpose(0,1) # predict h_preds, a_preds, g_preds = [],", "= librosa.resample(arr, input_sr, cfg.sample_rate) # convert to torch tensor tensor = torch.Tensor([arr]) sample_length", "# load model checkpoint model = LightningModel.load_from_checkpoint(cfg.model_checkpoint, csv_path=cfg.csv_path) model.eval() def predict_height(audio): global INFERENCE_COUNT", "cfg.slice_window * cfg.sample_rate if tensor.shape[-1] < sample_length: tensor = torch.nn.functional.pad(tensor, (0, sample_length -", "* cfg.sample_rate win_length = cfg.slice_window * cfg.sample_rate if tensor.shape[-1] < sample_length: tensor =", "- tensor.size(1)), 'constant') slices = tensor.unsqueeze(dim=0) else: # Split input audio into slices", "and you are {} years old.'.format(gender, height, age) iface = gr.Interface( fn=predict_height, inputs='mic',", "# Split input audio into slices of input_length seconds slices = tensor.unfold(1, sample_length,", "TestNISPConfig as cfg INFERENCE_COUNT = 0 # load model checkpoint model = LightningModel.load_from_checkpoint(cfg.model_checkpoint,", "(0, sample_length - tensor.size(1)), 'constant') slices = tensor.unsqueeze(dim=0) else: # Split input audio", "Sample Rate, Mono) input_sr, arr = audio arr = arr.astype(np.float32, order='C') / 32768.0", "0.5 else 'Male' print('Inference was run. Current inference count:', INFERENCE_COUNT) return 'You\\'re {},", "cfg INFERENCE_COUNT = 0 # load model checkpoint model = LightningModel.load_from_checkpoint(cfg.model_checkpoint, csv_path=cfg.csv_path) model.eval()", "torch.Tensor([arr]) sample_length = cfg.slice_seconds * cfg.sample_rate win_length = cfg.slice_window * cfg.sample_rate if tensor.shape[-1]", "inputs='mic', outputs='text', description='Predicts your height, age, gender based on your voice. \\n Ideally,", "height, age) iface = gr.Interface( fn=predict_height, inputs='mic', outputs='text', description='Predicts your height, age, gender", "= gr.Interface( fn=predict_height, inputs='mic', outputs='text', description='Predicts your height, age, gender based on your", "inference count:', INFERENCE_COUNT) return 'You\\'re {}, your height is {}, and you are", "if tensor.shape[-1] < sample_length: tensor = torch.nn.functional.pad(tensor, (0, sample_length - tensor.size(1)), 'constant') slices", "= torch.nn.functional.pad(tensor, (0, sample_length - tensor.size(1)), 'constant') slices = tensor.unsqueeze(dim=0) else: # Split", "tensor.unfold(1, sample_length, win_length).transpose(0,1) # predict h_preds, a_preds, g_preds = [], [], [] with", "/ 32768.0 arr = librosa.to_mono(arr.T) arr = librosa.resample(arr, input_sr, cfg.sample_rate) # convert to", "else: # Split input audio into slices of input_length seconds slices = tensor.unfold(1,", "model.eval() def predict_height(audio): global INFERENCE_COUNT INFERENCE_COUNT += 1 # resample audio to required", "h_pred, a_pred, g_pred = model(slice) h_preds.append((h_pred.view(-1) * model.h_std + model.h_mean).item()) a_preds.append((a_pred.view(-1) * model.a_std", "count:', INFERENCE_COUNT) return 'You\\'re {}, your height is {}, and you are {}", "'Female' if sum(g_preds)/len(g_preds) > 0.5 else 'Male' print('Inference was run. Current inference count:',", "np import librosa import torch from NISP.lightning_model import LightningModel from config import TestNISPConfig", "cfg.sample_rate) # convert to torch tensor tensor = torch.Tensor([arr]) sample_length = cfg.slice_seconds *", "arr = arr.astype(np.float32, order='C') / 32768.0 arr = librosa.to_mono(arr.T) arr = librosa.resample(arr, input_sr,", "from NISP.lightning_model import LightningModel from config import TestNISPConfig as cfg INFERENCE_COUNT = 0", "slice in slices: h_pred, a_pred, g_pred = model(slice) h_preds.append((h_pred.view(-1) * model.h_std + model.h_mean).item())", "voice. \\n Ideally, a clip of more than 5 seconds would be preferred.", "NISP.lightning_model import LightningModel from config import TestNISPConfig as cfg INFERENCE_COUNT = 0 #", "numpy as np import librosa import torch from NISP.lightning_model import LightningModel from config", "{}, your height is {}, and you are {} years old.'.format(gender, height, age)", "= 'Female' if sum(g_preds)/len(g_preds) > 0.5 else 'Male' print('Inference was run. Current inference", "[], [], [] with torch.no_grad(): for slice in slices: h_pred, a_pred, g_pred =", "from config import TestNISPConfig as cfg INFERENCE_COUNT = 0 # load model checkpoint", "arr = librosa.resample(arr, input_sr, cfg.sample_rate) # convert to torch tensor tensor = torch.Tensor([arr])", "torch from NISP.lightning_model import LightningModel from config import TestNISPConfig as cfg INFERENCE_COUNT =", "arr.astype(np.float32, order='C') / 32768.0 arr = librosa.to_mono(arr.T) arr = librosa.resample(arr, input_sr, cfg.sample_rate) #", "= [], [], [] with torch.no_grad(): for slice in slices: h_pred, a_pred, g_pred", "are {} years old.'.format(gender, height, age) iface = gr.Interface( fn=predict_height, inputs='mic', outputs='text', description='Predicts", "height is {}, and you are {} years old.'.format(gender, height, age) iface =", "resample audio to required format (16kHz Sample Rate, Mono) input_sr, arr = audio", "torch.no_grad(): for slice in slices: h_pred, a_pred, g_pred = model(slice) h_preds.append((h_pred.view(-1) * model.h_std", "sample_length, win_length).transpose(0,1) # predict h_preds, a_preds, g_preds = [], [], [] with torch.no_grad():", "format (16kHz Sample Rate, Mono) input_sr, arr = audio arr = arr.astype(np.float32, order='C')", "model.h_std + model.h_mean).item()) a_preds.append((a_pred.view(-1) * model.a_std + model.a_mean).item()) g_preds.append(g_pred.view(-1).item()) height = round(sum(h_preds)/len(h_preds),2) age", "model(slice) h_preds.append((h_pred.view(-1) * model.h_std + model.h_mean).item()) a_preds.append((a_pred.view(-1) * model.a_std + model.a_mean).item()) g_preds.append(g_pred.view(-1).item()) height", "g_preds.append(g_pred.view(-1).item()) height = round(sum(h_preds)/len(h_preds),2) age = int(sum(a_preds)/len(a_preds)) gender = 'Female' if sum(g_preds)/len(g_preds) >", "= cfg.slice_seconds * cfg.sample_rate win_length = cfg.slice_window * cfg.sample_rate if tensor.shape[-1] < sample_length:", "sum(g_preds)/len(g_preds) > 0.5 else 'Male' print('Inference was run. Current inference count:', INFERENCE_COUNT) return", "as cfg INFERENCE_COUNT = 0 # load model checkpoint model = LightningModel.load_from_checkpoint(cfg.model_checkpoint, csv_path=cfg.csv_path)", "'Male' print('Inference was run. Current inference count:', INFERENCE_COUNT) return 'You\\'re {}, your height", "'constant') slices = tensor.unsqueeze(dim=0) else: # Split input audio into slices of input_length", "win_length).transpose(0,1) # predict h_preds, a_preds, g_preds = [], [], [] with torch.no_grad(): for", "= int(sum(a_preds)/len(a_preds)) gender = 'Female' if sum(g_preds)/len(g_preds) > 0.5 else 'Male' print('Inference was", "you are {} years old.'.format(gender, height, age) iface = gr.Interface( fn=predict_height, inputs='mic', outputs='text',", "import numpy as np import librosa import torch from NISP.lightning_model import LightningModel from", "config import TestNISPConfig as cfg INFERENCE_COUNT = 0 # load model checkpoint model", "torch tensor tensor = torch.Tensor([arr]) sample_length = cfg.slice_seconds * cfg.sample_rate win_length = cfg.slice_window", "old.'.format(gender, height, age) iface = gr.Interface( fn=predict_height, inputs='mic', outputs='text', description='Predicts your height, age,", "[] with torch.no_grad(): for slice in slices: h_pred, a_pred, g_pred = model(slice) h_preds.append((h_pred.view(-1)", "g_pred = model(slice) h_preds.append((h_pred.view(-1) * model.h_std + model.h_mean).item()) a_preds.append((a_pred.view(-1) * model.a_std + model.a_mean).item())", "as np import librosa import torch from NISP.lightning_model import LightningModel from config import", "import LightningModel from config import TestNISPConfig as cfg INFERENCE_COUNT = 0 # load", "round(sum(h_preds)/len(h_preds),2) age = int(sum(a_preds)/len(a_preds)) gender = 'Female' if sum(g_preds)/len(g_preds) > 0.5 else 'Male'", "LightningModel.load_from_checkpoint(cfg.model_checkpoint, csv_path=cfg.csv_path) model.eval() def predict_height(audio): global INFERENCE_COUNT INFERENCE_COUNT += 1 # resample audio", "slices = tensor.unsqueeze(dim=0) else: # Split input audio into slices of input_length seconds", "arr = audio arr = arr.astype(np.float32, order='C') / 32768.0 arr = librosa.to_mono(arr.T) arr", "sample_length = cfg.slice_seconds * cfg.sample_rate win_length = cfg.slice_window * cfg.sample_rate if tensor.shape[-1] <", "a_preds, g_preds = [], [], [] with torch.no_grad(): for slice in slices: h_pred,", "gender based on your voice. \\n Ideally, a clip of more than 5", "tensor.size(1)), 'constant') slices = tensor.unsqueeze(dim=0) else: # Split input audio into slices of", "your height, age, gender based on your voice. \\n Ideally, a clip of", "preferred. Any less, and your clip will be zero-padded to 5 seconds.' ).launch(share=False)", "age, gender based on your voice. \\n Ideally, a clip of more than", "gradio as gr import numpy as np import librosa import torch from NISP.lightning_model", "input_length seconds slices = tensor.unfold(1, sample_length, win_length).transpose(0,1) # predict h_preds, a_preds, g_preds =", "gender = 'Female' if sum(g_preds)/len(g_preds) > 0.5 else 'Male' print('Inference was run. Current", "5 seconds would be preferred. Any less, and your clip will be zero-padded", "<filename>gradio_infer.py import gradio as gr import numpy as np import librosa import torch", "import torch from NISP.lightning_model import LightningModel from config import TestNISPConfig as cfg INFERENCE_COUNT", "* model.a_std + model.a_mean).item()) g_preds.append(g_pred.view(-1).item()) height = round(sum(h_preds)/len(h_preds),2) age = int(sum(a_preds)/len(a_preds)) gender =", "= round(sum(h_preds)/len(h_preds),2) age = int(sum(a_preds)/len(a_preds)) gender = 'Female' if sum(g_preds)/len(g_preds) > 0.5 else", "on your voice. \\n Ideally, a clip of more than 5 seconds would", "tensor = torch.Tensor([arr]) sample_length = cfg.slice_seconds * cfg.sample_rate win_length = cfg.slice_window * cfg.sample_rate", "Current inference count:', INFERENCE_COUNT) return 'You\\'re {}, your height is {}, and you", "age) iface = gr.Interface( fn=predict_height, inputs='mic', outputs='text', description='Predicts your height, age, gender based", "required format (16kHz Sample Rate, Mono) input_sr, arr = audio arr = arr.astype(np.float32,", "{}, and you are {} years old.'.format(gender, height, age) iface = gr.Interface( fn=predict_height,", "cfg.sample_rate win_length = cfg.slice_window * cfg.sample_rate if tensor.shape[-1] < sample_length: tensor = torch.nn.functional.pad(tensor,", "+= 1 # resample audio to required format (16kHz Sample Rate, Mono) input_sr,", "LightningModel from config import TestNISPConfig as cfg INFERENCE_COUNT = 0 # load model", "height = round(sum(h_preds)/len(h_preds),2) age = int(sum(a_preds)/len(a_preds)) gender = 'Female' if sum(g_preds)/len(g_preds) > 0.5", "model.h_mean).item()) a_preds.append((a_pred.view(-1) * model.a_std + model.a_mean).item()) g_preds.append(g_pred.view(-1).item()) height = round(sum(h_preds)/len(h_preds),2) age = int(sum(a_preds)/len(a_preds))", "sample_length: tensor = torch.nn.functional.pad(tensor, (0, sample_length - tensor.size(1)), 'constant') slices = tensor.unsqueeze(dim=0) else:", "order='C') / 32768.0 arr = librosa.to_mono(arr.T) arr = librosa.resample(arr, input_sr, cfg.sample_rate) # convert", "tensor tensor = torch.Tensor([arr]) sample_length = cfg.slice_seconds * cfg.sample_rate win_length = cfg.slice_window *", "predict_height(audio): global INFERENCE_COUNT INFERENCE_COUNT += 1 # resample audio to required format (16kHz", "Split input audio into slices of input_length seconds slices = tensor.unfold(1, sample_length, win_length).transpose(0,1)", "predict h_preds, a_preds, g_preds = [], [], [] with torch.no_grad(): for slice in", "= model(slice) h_preds.append((h_pred.view(-1) * model.h_std + model.h_mean).item()) a_preds.append((a_pred.view(-1) * model.a_std + model.a_mean).item()) g_preds.append(g_pred.view(-1).item())", "a_preds.append((a_pred.view(-1) * model.a_std + model.a_mean).item()) g_preds.append(g_pred.view(-1).item()) height = round(sum(h_preds)/len(h_preds),2) age = int(sum(a_preds)/len(a_preds)) gender", "age = int(sum(a_preds)/len(a_preds)) gender = 'Female' if sum(g_preds)/len(g_preds) > 0.5 else 'Male' print('Inference", "= cfg.slice_window * cfg.sample_rate if tensor.shape[-1] < sample_length: tensor = torch.nn.functional.pad(tensor, (0, sample_length", "\\n Ideally, a clip of more than 5 seconds would be preferred. Any", "was run. Current inference count:', INFERENCE_COUNT) return 'You\\'re {}, your height is {},", "description='Predicts your height, age, gender based on your voice. \\n Ideally, a clip", "to torch tensor tensor = torch.Tensor([arr]) sample_length = cfg.slice_seconds * cfg.sample_rate win_length =", "arr = librosa.to_mono(arr.T) arr = librosa.resample(arr, input_sr, cfg.sample_rate) # convert to torch tensor", "tensor.shape[-1] < sample_length: tensor = torch.nn.functional.pad(tensor, (0, sample_length - tensor.size(1)), 'constant') slices =", "model.a_mean).item()) g_preds.append(g_pred.view(-1).item()) height = round(sum(h_preds)/len(h_preds),2) age = int(sum(a_preds)/len(a_preds)) gender = 'Female' if sum(g_preds)/len(g_preds)", "= 0 # load model checkpoint model = LightningModel.load_from_checkpoint(cfg.model_checkpoint, csv_path=cfg.csv_path) model.eval() def predict_height(audio):", "INFERENCE_COUNT = 0 # load model checkpoint model = LightningModel.load_from_checkpoint(cfg.model_checkpoint, csv_path=cfg.csv_path) model.eval() def", "slices = tensor.unfold(1, sample_length, win_length).transpose(0,1) # predict h_preds, a_preds, g_preds = [], [],", "# convert to torch tensor tensor = torch.Tensor([arr]) sample_length = cfg.slice_seconds * cfg.sample_rate", "input_sr, cfg.sample_rate) # convert to torch tensor tensor = torch.Tensor([arr]) sample_length = cfg.slice_seconds", "[], [] with torch.no_grad(): for slice in slices: h_pred, a_pred, g_pred = model(slice)", "{} years old.'.format(gender, height, age) iface = gr.Interface( fn=predict_height, inputs='mic', outputs='text', description='Predicts your", "INFERENCE_COUNT INFERENCE_COUNT += 1 # resample audio to required format (16kHz Sample Rate,", "def predict_height(audio): global INFERENCE_COUNT INFERENCE_COUNT += 1 # resample audio to required format", "to required format (16kHz Sample Rate, Mono) input_sr, arr = audio arr =", "a clip of more than 5 seconds would be preferred. Any less, and", "global INFERENCE_COUNT INFERENCE_COUNT += 1 # resample audio to required format (16kHz Sample", "torch.nn.functional.pad(tensor, (0, sample_length - tensor.size(1)), 'constant') slices = tensor.unsqueeze(dim=0) else: # Split input", "slices of input_length seconds slices = tensor.unfold(1, sample_length, win_length).transpose(0,1) # predict h_preds, a_preds,", "+ model.a_mean).item()) g_preds.append(g_pred.view(-1).item()) height = round(sum(h_preds)/len(h_preds),2) age = int(sum(a_preds)/len(a_preds)) gender = 'Female' if", "load model checkpoint model = LightningModel.load_from_checkpoint(cfg.model_checkpoint, csv_path=cfg.csv_path) model.eval() def predict_height(audio): global INFERENCE_COUNT INFERENCE_COUNT", "into slices of input_length seconds slices = tensor.unfold(1, sample_length, win_length).transpose(0,1) # predict h_preds,", "int(sum(a_preds)/len(a_preds)) gender = 'Female' if sum(g_preds)/len(g_preds) > 0.5 else 'Male' print('Inference was run.", "'You\\'re {}, your height is {}, and you are {} years old.'.format(gender, height,", "= librosa.to_mono(arr.T) arr = librosa.resample(arr, input_sr, cfg.sample_rate) # convert to torch tensor tensor", "as gr import numpy as np import librosa import torch from NISP.lightning_model import", "convert to torch tensor tensor = torch.Tensor([arr]) sample_length = cfg.slice_seconds * cfg.sample_rate win_length", "fn=predict_height, inputs='mic', outputs='text', description='Predicts your height, age, gender based on your voice. \\n", "input_sr, arr = audio arr = arr.astype(np.float32, order='C') / 32768.0 arr = librosa.to_mono(arr.T)", "be preferred. Any less, and your clip will be zero-padded to 5 seconds.'", "= audio arr = arr.astype(np.float32, order='C') / 32768.0 arr = librosa.to_mono(arr.T) arr =", "audio arr = arr.astype(np.float32, order='C') / 32768.0 arr = librosa.to_mono(arr.T) arr = librosa.resample(arr,", "librosa.to_mono(arr.T) arr = librosa.resample(arr, input_sr, cfg.sample_rate) # convert to torch tensor tensor =", "import gradio as gr import numpy as np import librosa import torch from", "if sum(g_preds)/len(g_preds) > 0.5 else 'Male' print('Inference was run. Current inference count:', INFERENCE_COUNT)", "tensor = torch.nn.functional.pad(tensor, (0, sample_length - tensor.size(1)), 'constant') slices = tensor.unsqueeze(dim=0) else: #", "1 # resample audio to required format (16kHz Sample Rate, Mono) input_sr, arr", "model = LightningModel.load_from_checkpoint(cfg.model_checkpoint, csv_path=cfg.csv_path) model.eval() def predict_height(audio): global INFERENCE_COUNT INFERENCE_COUNT += 1 #", "# resample audio to required format (16kHz Sample Rate, Mono) input_sr, arr =", "Mono) input_sr, arr = audio arr = arr.astype(np.float32, order='C') / 32768.0 arr =", "import TestNISPConfig as cfg INFERENCE_COUNT = 0 # load model checkpoint model =", "= tensor.unfold(1, sample_length, win_length).transpose(0,1) # predict h_preds, a_preds, g_preds = [], [], []", "h_preds.append((h_pred.view(-1) * model.h_std + model.h_mean).item()) a_preds.append((a_pred.view(-1) * model.a_std + model.a_mean).item()) g_preds.append(g_pred.view(-1).item()) height =", "tensor.unsqueeze(dim=0) else: # Split input audio into slices of input_length seconds slices =", "* cfg.sample_rate if tensor.shape[-1] < sample_length: tensor = torch.nn.functional.pad(tensor, (0, sample_length - tensor.size(1)),", "csv_path=cfg.csv_path) model.eval() def predict_height(audio): global INFERENCE_COUNT INFERENCE_COUNT += 1 # resample audio to", "(16kHz Sample Rate, Mono) input_sr, arr = audio arr = arr.astype(np.float32, order='C') /", "sample_length - tensor.size(1)), 'constant') slices = tensor.unsqueeze(dim=0) else: # Split input audio into", "your height is {}, and you are {} years old.'.format(gender, height, age) iface", "0 # load model checkpoint model = LightningModel.load_from_checkpoint(cfg.model_checkpoint, csv_path=cfg.csv_path) model.eval() def predict_height(audio): global", "of more than 5 seconds would be preferred. Any less, and your clip" ]
[ "curses, sys, os #Servo controller connected to IC2 import Adafruit_PCA9685 pwm = Adafruit_PCA9685.PCA9685()", "import curses, sys, os #Servo controller connected to IC2 import Adafruit_PCA9685 pwm =", "to IC2 import Adafruit_PCA9685 pwm = Adafruit_PCA9685.PCA9685() pwm.set_pwm_freq(60) from time import sleep #ESC", "sys, os #Servo controller connected to IC2 import Adafruit_PCA9685 pwm = Adafruit_PCA9685.PCA9685() pwm.set_pwm_freq(60)", "for i in range(1,6): pwm_value = throttle -i*delta if pwm_value < toggleState: pwm.set_pwm(2,0,toggleState)", "= 450 delta = 20 print(\"toggleState1\") pwm.set_pwm(2,0,toggleState) sleep(0.2) for i in range(1,6): pwm_value", "Adafruit_PCA9685 pwm = Adafruit_PCA9685.PCA9685() pwm.set_pwm_freq(60) from time import sleep #ESC Brushles motor states:", "controller connected to IC2 import Adafruit_PCA9685 pwm = Adafruit_PCA9685.PCA9685() pwm.set_pwm_freq(60) from time import", "toggleState = 400 throttle = 450 delta = 20 print(\"toggleState1\") pwm.set_pwm(2,0,toggleState) sleep(0.2) for", "i in range(1,6): pwm_value = throttle -i*delta if pwm_value < toggleState: pwm.set_pwm(2,0,toggleState) sleep(0.2)", "throttle -i*delta if pwm_value < toggleState: pwm.set_pwm(2,0,toggleState) sleep(0.2) pwm.set_pwm(2,0, pwm_value) sleep(0.4) print(pwm_value) pwm.set_pwm(2,0,toggleState)", "400 throttle = 450 delta = 20 print(\"toggleState1\") pwm.set_pwm(2,0,toggleState) sleep(0.2) for i in", "sleep(0.2) for i in range(1,6): pwm_value = throttle -i*delta if pwm_value < toggleState:", "on/off toggleState = 400 throttle = 450 delta = 20 print(\"toggleState1\") pwm.set_pwm(2,0,toggleState) sleep(0.2)", "throttle = 450 delta = 20 print(\"toggleState1\") pwm.set_pwm(2,0,toggleState) sleep(0.2) for i in range(1,6):", "450 delta = 20 print(\"toggleState1\") pwm.set_pwm(2,0,toggleState) sleep(0.2) for i in range(1,6): pwm_value =", "sleep #ESC Brushles motor states: direction, on/off toggleState = 400 throttle = 450", "range(1,6): pwm_value = throttle -i*delta if pwm_value < toggleState: pwm.set_pwm(2,0,toggleState) sleep(0.2) pwm.set_pwm(2,0, pwm_value)", "IC2 import Adafruit_PCA9685 pwm = Adafruit_PCA9685.PCA9685() pwm.set_pwm_freq(60) from time import sleep #ESC Brushles", "= throttle -i*delta if pwm_value < toggleState: pwm.set_pwm(2,0,toggleState) sleep(0.2) pwm.set_pwm(2,0, pwm_value) sleep(0.4) print(pwm_value)", "= 400 throttle = 450 delta = 20 print(\"toggleState1\") pwm.set_pwm(2,0,toggleState) sleep(0.2) for i", "pwm.set_pwm_freq(60) from time import sleep #ESC Brushles motor states: direction, on/off toggleState =", "from time import sleep #ESC Brushles motor states: direction, on/off toggleState = 400", "#ESC Brushles motor states: direction, on/off toggleState = 400 throttle = 450 delta", "import sleep #ESC Brushles motor states: direction, on/off toggleState = 400 throttle =", "connected to IC2 import Adafruit_PCA9685 pwm = Adafruit_PCA9685.PCA9685() pwm.set_pwm_freq(60) from time import sleep", "in range(1,6): pwm_value = throttle -i*delta if pwm_value < toggleState: pwm.set_pwm(2,0,toggleState) sleep(0.2) pwm.set_pwm(2,0,", "direction, on/off toggleState = 400 throttle = 450 delta = 20 print(\"toggleState1\") pwm.set_pwm(2,0,toggleState)", "pwm = Adafruit_PCA9685.PCA9685() pwm.set_pwm_freq(60) from time import sleep #ESC Brushles motor states: direction,", "delta = 20 print(\"toggleState1\") pwm.set_pwm(2,0,toggleState) sleep(0.2) for i in range(1,6): pwm_value = throttle", "= Adafruit_PCA9685.PCA9685() pwm.set_pwm_freq(60) from time import sleep #ESC Brushles motor states: direction, on/off", "#Servo controller connected to IC2 import Adafruit_PCA9685 pwm = Adafruit_PCA9685.PCA9685() pwm.set_pwm_freq(60) from time", "motor states: direction, on/off toggleState = 400 throttle = 450 delta = 20", "print(\"toggleState1\") pwm.set_pwm(2,0,toggleState) sleep(0.2) for i in range(1,6): pwm_value = throttle -i*delta if pwm_value", "pwm_value = throttle -i*delta if pwm_value < toggleState: pwm.set_pwm(2,0,toggleState) sleep(0.2) pwm.set_pwm(2,0, pwm_value) sleep(0.4)", "time import sleep #ESC Brushles motor states: direction, on/off toggleState = 400 throttle", "import Adafruit_PCA9685 pwm = Adafruit_PCA9685.PCA9685() pwm.set_pwm_freq(60) from time import sleep #ESC Brushles motor", "states: direction, on/off toggleState = 400 throttle = 450 delta = 20 print(\"toggleState1\")", "= 20 print(\"toggleState1\") pwm.set_pwm(2,0,toggleState) sleep(0.2) for i in range(1,6): pwm_value = throttle -i*delta", "Adafruit_PCA9685.PCA9685() pwm.set_pwm_freq(60) from time import sleep #ESC Brushles motor states: direction, on/off toggleState", "Brushles motor states: direction, on/off toggleState = 400 throttle = 450 delta =", "20 print(\"toggleState1\") pwm.set_pwm(2,0,toggleState) sleep(0.2) for i in range(1,6): pwm_value = throttle -i*delta if", "os #Servo controller connected to IC2 import Adafruit_PCA9685 pwm = Adafruit_PCA9685.PCA9685() pwm.set_pwm_freq(60) from", "pwm.set_pwm(2,0,toggleState) sleep(0.2) for i in range(1,6): pwm_value = throttle -i*delta if pwm_value <" ]
[ "pytropos.internals.values.python_values.python_values import PythonValue, PT exitcode = 1 r = List([pv.int(21)], size=(1, 1)) store", "pytropos.internals.values.builtin_values import * from pytropos.internals.values.python_values.builtin_mutvalues import * from pytropos.internals.values.python_values.wrappers import * from pytropos.internals.values.python_values.python_values", "= List([pv.int(21)], size=(1, 1)) store = { '_': PythonValue(PT.Top), 'f': r.get_attrs()['append'], 'r': PythonValue(r),", "import pytropos.internals.values as pv from pytropos.internals.values.builtin_values import * from pytropos.internals.values.python_values.builtin_mutvalues import * from", "import * from pytropos.internals.values.python_values.python_values import PythonValue, PT exitcode = 1 r = List([pv.int(21)],", "pytropos.internals.values as pv from pytropos.internals.values.builtin_values import * from pytropos.internals.values.python_values.builtin_mutvalues import * from pytropos.internals.values.python_values.wrappers", "from pytropos.internals.values.python_values.python_values import PythonValue, PT exitcode = 1 r = List([pv.int(21)], size=(1, 1))", "PT exitcode = 1 r = List([pv.int(21)], size=(1, 1)) store = { '_':", "from pytropos.internals.values.python_values.wrappers import * from pytropos.internals.values.python_values.python_values import PythonValue, PT exitcode = 1 r", "import PythonValue, PT exitcode = 1 r = List([pv.int(21)], size=(1, 1)) store =", "import * from pytropos.internals.values.python_values.wrappers import * from pytropos.internals.values.python_values.python_values import PythonValue, PT exitcode =", "* from pytropos.internals.values.python_values.wrappers import * from pytropos.internals.values.python_values.python_values import PythonValue, PT exitcode = 1", "as pv from pytropos.internals.values.builtin_values import * from pytropos.internals.values.python_values.builtin_mutvalues import * from pytropos.internals.values.python_values.wrappers import", "* from pytropos.internals.values.python_values.python_values import PythonValue, PT exitcode = 1 r = List([pv.int(21)], size=(1,", "exitcode = 1 r = List([pv.int(21)], size=(1, 1)) store = { '_': PythonValue(PT.Top),", "r = List([pv.int(21)], size=(1, 1)) store = { '_': PythonValue(PT.Top), 'f': r.get_attrs()['append'], 'r':", "pytropos.internals.values.python_values.wrappers import * from pytropos.internals.values.python_values.python_values import PythonValue, PT exitcode = 1 r =", "List([pv.int(21)], size=(1, 1)) store = { '_': PythonValue(PT.Top), 'f': r.get_attrs()['append'], 'r': PythonValue(r), }", "PythonValue, PT exitcode = 1 r = List([pv.int(21)], size=(1, 1)) store = {", "* from pytropos.internals.values.python_values.builtin_mutvalues import * from pytropos.internals.values.python_values.wrappers import * from pytropos.internals.values.python_values.python_values import PythonValue,", "= 1 r = List([pv.int(21)], size=(1, 1)) store = { '_': PythonValue(PT.Top), 'f':", "import * from pytropos.internals.values.python_values.builtin_mutvalues import * from pytropos.internals.values.python_values.wrappers import * from pytropos.internals.values.python_values.python_values import", "from pytropos.internals.values.python_values.builtin_mutvalues import * from pytropos.internals.values.python_values.wrappers import * from pytropos.internals.values.python_values.python_values import PythonValue, PT", "from pytropos.internals.values.builtin_values import * from pytropos.internals.values.python_values.builtin_mutvalues import * from pytropos.internals.values.python_values.wrappers import * from", "pv from pytropos.internals.values.builtin_values import * from pytropos.internals.values.python_values.builtin_mutvalues import * from pytropos.internals.values.python_values.wrappers import *", "1 r = List([pv.int(21)], size=(1, 1)) store = { '_': PythonValue(PT.Top), 'f': r.get_attrs()['append'],", "pytropos.internals.values.python_values.builtin_mutvalues import * from pytropos.internals.values.python_values.wrappers import * from pytropos.internals.values.python_values.python_values import PythonValue, PT exitcode" ]
[ "setuptools def find_version(fname): \"\"\"Attempts to find the version number in the file names", "__version__ = find_version('doc_scanner/__init__.py') with open(\"README.md\", \"r\") as fh: long_description = fh.read() setuptools.setup( name=\"doc_scanner\",", "re import setuptools def find_version(fname): \"\"\"Attempts to find the version number in the", "m = reg.match(line) if m: version = m.group(1) break if not version: raise", "as fh: long_description = fh.read() setuptools.setup( name=\"doc_scanner\", version=__version__, author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"A document scanner", "the version number in the file names fname. Raises RuntimeError if not found.", "file names fname. Raises RuntimeError if not found. \"\"\" version = '' with", "if not found. \"\"\" version = '' with open(fname, 'r') as fp: reg", "classifiers=( 'Development Status :: 4 - Beta', \"Programming Language :: Python :: 3.6\",", "raise RuntimeError('Cannot find version information') return version __version__ = find_version('doc_scanner/__init__.py') with open(\"README.md\", \"r\")", "fname. Raises RuntimeError if not found. \"\"\" version = '' with open(fname, 'r')", "break if not version: raise RuntimeError('Cannot find version information') return version __version__ =", "find_version(fname): \"\"\"Attempts to find the version number in the file names fname. Raises", "scikit-image\", long_description=long_description, long_description_content_type=\"text/markdown\", url=\"https://github.com/Guoli-Lyu/document-scanner\", packages=setuptools.find_packages(), classifiers=( 'Development Status :: 4 - Beta', \"Programming", "the file names fname. Raises RuntimeError if not found. \"\"\" version = ''", "= fh.read() setuptools.setup( name=\"doc_scanner\", version=__version__, author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"A document scanner based on openCV3", "reg.match(line) if m: version = m.group(1) break if not version: raise RuntimeError('Cannot find", ":: 4 - Beta', \"Programming Language :: Python :: 3.6\", \"License :: OSI", "Beta', \"Programming Language :: Python :: 3.6\", \"License :: OSI Approved :: MIT", "long_description_content_type=\"text/markdown\", url=\"https://github.com/Guoli-Lyu/document-scanner\", packages=setuptools.find_packages(), classifiers=( 'Development Status :: 4 - Beta', \"Programming Language ::", "Status :: 4 - Beta', \"Programming Language :: Python :: 3.6\", \"License ::", "return version __version__ = find_version('doc_scanner/__init__.py') with open(\"README.md\", \"r\") as fh: long_description = fh.read()", "= find_version('doc_scanner/__init__.py') with open(\"README.md\", \"r\") as fh: long_description = fh.read() setuptools.setup( name=\"doc_scanner\", version=__version__,", "packages=setuptools.find_packages(), classifiers=( 'Development Status :: 4 - Beta', \"Programming Language :: Python ::", "m.group(1) break if not version: raise RuntimeError('Cannot find version information') return version __version__", "found. \"\"\" version = '' with open(fname, 'r') as fp: reg = re.compile(r'__version__", "line in fp: m = reg.match(line) if m: version = m.group(1) break if", "= [\\'\"]([^\\'\"]*)[\\'\"]') for line in fp: m = reg.match(line) if m: version =", "), test_suite='tests', project_urls={ 'Bug Reports': 'https://github.com/Guoli-Lyu/document-scanner/issues', }, install_requires=[ 'numpy', 'scikit-image', 'opencv-python', 'pandas', ],", "to find the version number in the file names fname. Raises RuntimeError if", "\"Programming Language :: Python :: 3.6\", \"License :: OSI Approved :: MIT License\",", "based on openCV3 and scikit-image\", long_description=long_description, long_description_content_type=\"text/markdown\", url=\"https://github.com/Guoli-Lyu/document-scanner\", packages=setuptools.find_packages(), classifiers=( 'Development Status ::", "fh: long_description = fh.read() setuptools.setup( name=\"doc_scanner\", version=__version__, author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"A document scanner based", "fp: reg = re.compile(r'__version__ = [\\'\"]([^\\'\"]*)[\\'\"]') for line in fp: m = reg.match(line)", "Language :: Python :: 3.6\", \"License :: OSI Approved :: MIT License\", ),", "with open(\"README.md\", \"r\") as fh: long_description = fh.read() setuptools.setup( name=\"doc_scanner\", version=__version__, author=\"<NAME>\", author_email=\"<EMAIL>\",", "\"License :: OSI Approved :: MIT License\", ), test_suite='tests', project_urls={ 'Bug Reports': 'https://github.com/Guoli-Lyu/document-scanner/issues',", "fh.read() setuptools.setup( name=\"doc_scanner\", version=__version__, author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"A document scanner based on openCV3 and", "version: raise RuntimeError('Cannot find version information') return version __version__ = find_version('doc_scanner/__init__.py') with open(\"README.md\",", "m: version = m.group(1) break if not version: raise RuntimeError('Cannot find version information')", "version number in the file names fname. Raises RuntimeError if not found. \"\"\"", "as fp: reg = re.compile(r'__version__ = [\\'\"]([^\\'\"]*)[\\'\"]') for line in fp: m =", "with open(fname, 'r') as fp: reg = re.compile(r'__version__ = [\\'\"]([^\\'\"]*)[\\'\"]') for line in", "version=__version__, author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"A document scanner based on openCV3 and scikit-image\", long_description=long_description, long_description_content_type=\"text/markdown\",", "MIT License\", ), test_suite='tests', project_urls={ 'Bug Reports': 'https://github.com/Guoli-Lyu/document-scanner/issues', }, install_requires=[ 'numpy', 'scikit-image', 'opencv-python',", "Raises RuntimeError if not found. \"\"\" version = '' with open(fname, 'r') as", "not found. \"\"\" version = '' with open(fname, 'r') as fp: reg =", "import re import setuptools def find_version(fname): \"\"\"Attempts to find the version number in", ":: OSI Approved :: MIT License\", ), test_suite='tests', project_urls={ 'Bug Reports': 'https://github.com/Guoli-Lyu/document-scanner/issues', },", "author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"A document scanner based on openCV3 and scikit-image\", long_description=long_description, long_description_content_type=\"text/markdown\", url=\"https://github.com/Guoli-Lyu/document-scanner\",", "setuptools.setup( name=\"doc_scanner\", version=__version__, author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"A document scanner based on openCV3 and scikit-image\",", "Python :: 3.6\", \"License :: OSI Approved :: MIT License\", ), test_suite='tests', project_urls={", "test_suite='tests', project_urls={ 'Bug Reports': 'https://github.com/Guoli-Lyu/document-scanner/issues', }, install_requires=[ 'numpy', 'scikit-image', 'opencv-python', 'pandas', ], )", "'' with open(fname, 'r') as fp: reg = re.compile(r'__version__ = [\\'\"]([^\\'\"]*)[\\'\"]') for line", "reg = re.compile(r'__version__ = [\\'\"]([^\\'\"]*)[\\'\"]') for line in fp: m = reg.match(line) if", "author_email=\"<EMAIL>\", description=\"A document scanner based on openCV3 and scikit-image\", long_description=long_description, long_description_content_type=\"text/markdown\", url=\"https://github.com/Guoli-Lyu/document-scanner\", packages=setuptools.find_packages(),", "\"\"\"Attempts to find the version number in the file names fname. Raises RuntimeError", "information') return version __version__ = find_version('doc_scanner/__init__.py') with open(\"README.md\", \"r\") as fh: long_description =", "names fname. Raises RuntimeError if not found. \"\"\" version = '' with open(fname,", "= m.group(1) break if not version: raise RuntimeError('Cannot find version information') return version", "scanner based on openCV3 and scikit-image\", long_description=long_description, long_description_content_type=\"text/markdown\", url=\"https://github.com/Guoli-Lyu/document-scanner\", packages=setuptools.find_packages(), classifiers=( 'Development Status", ":: MIT License\", ), test_suite='tests', project_urls={ 'Bug Reports': 'https://github.com/Guoli-Lyu/document-scanner/issues', }, install_requires=[ 'numpy', 'scikit-image',", ":: Python :: 3.6\", \"License :: OSI Approved :: MIT License\", ), test_suite='tests',", "on openCV3 and scikit-image\", long_description=long_description, long_description_content_type=\"text/markdown\", url=\"https://github.com/Guoli-Lyu/document-scanner\", packages=setuptools.find_packages(), classifiers=( 'Development Status :: 4", "License\", ), test_suite='tests', project_urls={ 'Bug Reports': 'https://github.com/Guoli-Lyu/document-scanner/issues', }, install_requires=[ 'numpy', 'scikit-image', 'opencv-python', 'pandas',", "\"\"\" version = '' with open(fname, 'r') as fp: reg = re.compile(r'__version__ =", "open(fname, 'r') as fp: reg = re.compile(r'__version__ = [\\'\"]([^\\'\"]*)[\\'\"]') for line in fp:", "fp: m = reg.match(line) if m: version = m.group(1) break if not version:", "openCV3 and scikit-image\", long_description=long_description, long_description_content_type=\"text/markdown\", url=\"https://github.com/Guoli-Lyu/document-scanner\", packages=setuptools.find_packages(), classifiers=( 'Development Status :: 4 -", "def find_version(fname): \"\"\"Attempts to find the version number in the file names fname.", "= reg.match(line) if m: version = m.group(1) break if not version: raise RuntimeError('Cannot", "re.compile(r'__version__ = [\\'\"]([^\\'\"]*)[\\'\"]') for line in fp: m = reg.match(line) if m: version", "find version information') return version __version__ = find_version('doc_scanner/__init__.py') with open(\"README.md\", \"r\") as fh:", "if m: version = m.group(1) break if not version: raise RuntimeError('Cannot find version", "import setuptools def find_version(fname): \"\"\"Attempts to find the version number in the file", "name=\"doc_scanner\", version=__version__, author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"A document scanner based on openCV3 and scikit-image\", long_description=long_description,", "for line in fp: m = reg.match(line) if m: version = m.group(1) break", "not version: raise RuntimeError('Cannot find version information') return version __version__ = find_version('doc_scanner/__init__.py') with", "if not version: raise RuntimeError('Cannot find version information') return version __version__ = find_version('doc_scanner/__init__.py')", "long_description=long_description, long_description_content_type=\"text/markdown\", url=\"https://github.com/Guoli-Lyu/document-scanner\", packages=setuptools.find_packages(), classifiers=( 'Development Status :: 4 - Beta', \"Programming Language", "RuntimeError('Cannot find version information') return version __version__ = find_version('doc_scanner/__init__.py') with open(\"README.md\", \"r\") as", "RuntimeError if not found. \"\"\" version = '' with open(fname, 'r') as fp:", "find_version('doc_scanner/__init__.py') with open(\"README.md\", \"r\") as fh: long_description = fh.read() setuptools.setup( name=\"doc_scanner\", version=__version__, author=\"<NAME>\",", "3.6\", \"License :: OSI Approved :: MIT License\", ), test_suite='tests', project_urls={ 'Bug Reports':", "'r') as fp: reg = re.compile(r'__version__ = [\\'\"]([^\\'\"]*)[\\'\"]') for line in fp: m", "open(\"README.md\", \"r\") as fh: long_description = fh.read() setuptools.setup( name=\"doc_scanner\", version=__version__, author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"A", "Approved :: MIT License\", ), test_suite='tests', project_urls={ 'Bug Reports': 'https://github.com/Guoli-Lyu/document-scanner/issues', }, install_requires=[ 'numpy',", "version = '' with open(fname, 'r') as fp: reg = re.compile(r'__version__ = [\\'\"]([^\\'\"]*)[\\'\"]')", "long_description = fh.read() setuptools.setup( name=\"doc_scanner\", version=__version__, author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"A document scanner based on", "'Development Status :: 4 - Beta', \"Programming Language :: Python :: 3.6\", \"License", ":: 3.6\", \"License :: OSI Approved :: MIT License\", ), test_suite='tests', project_urls={ 'Bug", "[\\'\"]([^\\'\"]*)[\\'\"]') for line in fp: m = reg.match(line) if m: version = m.group(1)", "find the version number in the file names fname. Raises RuntimeError if not", "= '' with open(fname, 'r') as fp: reg = re.compile(r'__version__ = [\\'\"]([^\\'\"]*)[\\'\"]') for", "version information') return version __version__ = find_version('doc_scanner/__init__.py') with open(\"README.md\", \"r\") as fh: long_description", "description=\"A document scanner based on openCV3 and scikit-image\", long_description=long_description, long_description_content_type=\"text/markdown\", url=\"https://github.com/Guoli-Lyu/document-scanner\", packages=setuptools.find_packages(), classifiers=(", "4 - Beta', \"Programming Language :: Python :: 3.6\", \"License :: OSI Approved", "document scanner based on openCV3 and scikit-image\", long_description=long_description, long_description_content_type=\"text/markdown\", url=\"https://github.com/Guoli-Lyu/document-scanner\", packages=setuptools.find_packages(), classifiers=( 'Development", "version = m.group(1) break if not version: raise RuntimeError('Cannot find version information') return", "and scikit-image\", long_description=long_description, long_description_content_type=\"text/markdown\", url=\"https://github.com/Guoli-Lyu/document-scanner\", packages=setuptools.find_packages(), classifiers=( 'Development Status :: 4 - Beta',", "number in the file names fname. Raises RuntimeError if not found. \"\"\" version", "OSI Approved :: MIT License\", ), test_suite='tests', project_urls={ 'Bug Reports': 'https://github.com/Guoli-Lyu/document-scanner/issues', }, install_requires=[", "= re.compile(r'__version__ = [\\'\"]([^\\'\"]*)[\\'\"]') for line in fp: m = reg.match(line) if m:", "url=\"https://github.com/Guoli-Lyu/document-scanner\", packages=setuptools.find_packages(), classifiers=( 'Development Status :: 4 - Beta', \"Programming Language :: Python", "version __version__ = find_version('doc_scanner/__init__.py') with open(\"README.md\", \"r\") as fh: long_description = fh.read() setuptools.setup(", "- Beta', \"Programming Language :: Python :: 3.6\", \"License :: OSI Approved ::", "in the file names fname. Raises RuntimeError if not found. \"\"\" version =", "in fp: m = reg.match(line) if m: version = m.group(1) break if not", "\"r\") as fh: long_description = fh.read() setuptools.setup( name=\"doc_scanner\", version=__version__, author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"A document" ]
[ "qtd_apertos = 0 while True: if keyboard.is_pressed('a'): qtd_apertos += 1 print('A foi apertado", "0 while True: if keyboard.is_pressed('a'): qtd_apertos += 1 print('A foi apertado ', qtd_apertos)", "print('\\napertando A ') qtd_apertos = 0 while True: if keyboard.is_pressed('a'): qtd_apertos += 1", "qtd_apertos += 1 print('A foi apertado ', qtd_apertos) if keyboard.is_pressed('s'): print('\\nfim de programa')", "import keyboard print('\\napertando A ') qtd_apertos = 0 while True: if keyboard.is_pressed('a'): qtd_apertos", "True: if keyboard.is_pressed('a'): qtd_apertos += 1 print('A foi apertado ', qtd_apertos) if keyboard.is_pressed('s'):", "<reponame>jonasht/pythonEstudos import keyboard print('\\napertando A ') qtd_apertos = 0 while True: if keyboard.is_pressed('a'):", "') qtd_apertos = 0 while True: if keyboard.is_pressed('a'): qtd_apertos += 1 print('A foi", "= 0 while True: if keyboard.is_pressed('a'): qtd_apertos += 1 print('A foi apertado ',", "if keyboard.is_pressed('a'): qtd_apertos += 1 print('A foi apertado ', qtd_apertos) if keyboard.is_pressed('s'): print('\\nfim", "keyboard.is_pressed('a'): qtd_apertos += 1 print('A foi apertado ', qtd_apertos) if keyboard.is_pressed('s'): print('\\nfim de", "keyboard print('\\napertando A ') qtd_apertos = 0 while True: if keyboard.is_pressed('a'): qtd_apertos +=", "while True: if keyboard.is_pressed('a'): qtd_apertos += 1 print('A foi apertado ', qtd_apertos) if", "+= 1 print('A foi apertado ', qtd_apertos) if keyboard.is_pressed('s'): print('\\nfim de programa') break", "A ') qtd_apertos = 0 while True: if keyboard.is_pressed('a'): qtd_apertos += 1 print('A" ]
[ "= class_info.get(\"Character\", {}).get(\"ClassJobs\", None) if not data_to_process: raise IOError out: dict = {SUB_30_MAPPING_DICT.get(entry[\"UnlockedState\"][\"Name\"],", "def get_character_info(character_id: str): \"\"\"helper method to receive character info via XIV API\"\"\" if", "import openpyxl import requests import json import argparse BASE_URL_XIV_API_CHARACTER: str = \"https://xivapi.com/character/\" GERMAN_TO_ENGLISH_CLASS_DICT:", "0 start = 0 start_set = False for col in header_row: if col.value", "f\"{BASE_URL_XIV_API_CHARACTER}search?name={character_name}&server=Moogle\" resp_json: dict = do_http_get(current_request_url) print(f\"Processing data for: {character_name}\") return resp_json[\"Results\"][0][\"ID\"] if resp_json[\"Results\"]", "worksheet[worksheet.min_row], i) workbook.save(filepath.replace(\".xlsx\", \"_updated.xlsx\")) print(\"Finished!\") def update_character_info(current_character_info: dict, worksheet: openpyxl.workbook.workbook.Worksheet, class_range: tuple, header_row:", "ranges for the player classes\"\"\" header_row: tuple = worksheet[worksheet.min_row] end = 0 start", "file]', type=str, help=\"the location of the file to process\") parser.add_argument(\"--config\", type=str, required=False) parser.add_argument(\"--d\",", "CONFIG_LOCATION, DEBUG_ENABLED if arguments.config: CONFIG_LOCATION = arguments.config if arguments.d: DEBUG_ENABLED = arguments.d with", "of the file to process\") parser.add_argument(\"--config\", type=str, required=False) parser.add_argument(\"--d\", required=False, action='store_true') args =", "DEBUG_ENABLED if arguments.config: CONFIG_LOCATION = arguments.config if arguments.d: DEBUG_ENABLED = arguments.d with open(os.path.join(CONFIG_LOCATION,", "IOError if __name__ == '__main__': parser = argparse.ArgumentParser(description=\"Process the EoR Membership excel.\") parser.add_argument(\"--filename\",", "worksheet, class_range, worksheet[worksheet.min_row], i) workbook.save(filepath.replace(\".xlsx\", \"_updated.xlsx\")) print(\"Finished!\") def update_character_info(current_character_info: dict, worksheet: openpyxl.workbook.workbook.Worksheet, class_range:", "requests\"\"\" resp: requests.Response = requests.get(request_url) if resp.ok: return resp.json() else: raise ConnectionError def", "1, worksheet.max_row): current_row: tuple = worksheet[i] if not current_row[0].value and not current_row[1].value: break", "{current_class} for character {character_name}\") current_cell = worksheet.cell(row=current_row, column=i) current_cell.value = new_class_val def process_class_info(class_info:", "def generate_class_range(worksheet: openpyxl.workbook.workbook.Worksheet): \"\"\"helper method, to create the excel ranges for the player", "data_to_process: raise IOError out: dict = {SUB_30_MAPPING_DICT.get(entry[\"UnlockedState\"][\"Name\"], entry[\"UnlockedState\"][\"Name\"]): entry[\"Level\"] for entry in data_to_process}", "because column index is the actual index, while the header_row is a list,", "dict = {SUB_30_MAPPING_DICT.get(entry[\"UnlockedState\"][\"Name\"], entry[\"UnlockedState\"][\"Name\"]): entry[\"Level\"] for entry in data_to_process} # special case ->", "player classes\"\"\" header_row: tuple = worksheet[worksheet.min_row] end = 0 start = 0 start_set", "raise IOError out: dict = {SUB_30_MAPPING_DICT.get(entry[\"UnlockedState\"][\"Name\"], entry[\"UnlockedState\"][\"Name\"]): entry[\"Level\"] for entry in data_to_process} #", "= 0 start = 0 start_set = False for col in header_row: if", "or not SUB_30_MAPPING_DICT: raise IOError if __name__ == '__main__': parser = argparse.ArgumentParser(description=\"Process the", "character {character_name}\") current_cell = worksheet.cell(row=current_row, column=i) current_cell.value = new_class_val def process_class_info(class_info: dict): \"\"\"method", "load_config(arguments: argparse.Namespace): global GERMAN_TO_ENGLISH_CLASS_DICT, SUB_30_MAPPING_DICT global CONFIG_LOCATION, DEBUG_ENABLED if arguments.config: CONFIG_LOCATION = arguments.config", "start = 0 start_set = False for col in header_row: if col.value is", "dict = do_http_get(current_request_url) print(f\"Processing data for: {character_name}\") return resp_json[\"Results\"][0][\"ID\"] if resp_json[\"Results\"] else None", "print(f\"Cant process data for character: {current_character_name}\") continue update_character_info(current_character_info, worksheet, class_range, worksheet[worksheet.min_row], i) workbook.save(filepath.replace(\".xlsx\",", "not current_row[1].value: break current_character_name: str = f\"{current_row[0].value} {current_row[1].value}\" current_character_info: dict = process_class_info(get_character_info(get_character_id(current_character_name))) if", "\"\"\"helper method to do http requests\"\"\" resp: requests.Response = requests.get(request_url) if resp.ok: return", "out def generate_class_range(worksheet: openpyxl.workbook.workbook.Worksheet): \"\"\"helper method, to create the excel ranges for the", "RANGES:\") print(start, end) return start + 1, end + 1 def do_http_get(request_url: str):", "__name__ == '__main__': parser = argparse.ArgumentParser(description=\"Process the EoR Membership excel.\") parser.add_argument(\"--filename\", metavar='[path to", "= GERMAN_TO_ENGLISH_CLASS_DICT.get(header_row[i - 1].value) new_class_val = current_character_info.get(mapped_class_name, 0) if DEBUG_ENABLED: character_name = f\"{worksheet.cell(current_row,", "is None: return None data_to_process = class_info.get(\"Character\", {}).get(\"ClassJobs\", None) if not data_to_process: raise", "CONFIG_LOCATION = os.getcwd() DEBUG_ENABLED = False def main(filepath): \"\"\"main method, used to process", "out[\"Scholar\"] if DEBUG_ENABLED: print(\"MAPPED CLASS VALUES:\") print(out) return out def generate_class_range(worksheet: openpyxl.workbook.workbook.Worksheet): \"\"\"helper", "current_character_info.get(mapped_class_name, 0) if DEBUG_ENABLED: character_name = f\"{worksheet.cell(current_row, 1).value} {worksheet.cell(current_row, 2).value}\" current_class = header_row[i", "in GERMAN_TO_ENGLISH_CLASS_DICT.keys() and not start_set: start = end start_set = True end +=", "global CONFIG_LOCATION, DEBUG_ENABLED if arguments.config: CONFIG_LOCATION = arguments.config if arguments.d: DEBUG_ENABLED = arguments.d", "True end += 1 if DEBUG_ENABLED: print(\"CLASS ROW RANGES:\") print(start, end) return start", "return None data_to_process = class_info.get(\"Character\", {}).get(\"ClassJobs\", None) if not data_to_process: raise IOError out:", "via XIV API\"\"\" current_request_url: str = f\"{BASE_URL_XIV_API_CHARACTER}search?name={character_name}&server=Moogle\" resp_json: dict = do_http_get(current_request_url) print(f\"Processing data", "= f\"{BASE_URL_XIV_API_CHARACTER}{character_id}\" resp_json: dict = do_http_get(current_request_url) return resp_json def get_character_id(character_name: str): \"\"\"Help method", "arguments.d: DEBUG_ENABLED = arguments.d with open(os.path.join(CONFIG_LOCATION, \"eor_config.json\")) as file: config = json.load(file) GERMAN_TO_ENGLISH_CLASS_DICT", "for the player classes\"\"\" header_row: tuple = worksheet[worksheet.min_row] end = 0 start =", "the player classes\"\"\" header_row: tuple = worksheet[worksheet.min_row] end = 0 start = 0", "{SUB_30_MAPPING_DICT.get(entry[\"UnlockedState\"][\"Name\"], entry[\"UnlockedState\"][\"Name\"]): entry[\"Level\"] for entry in data_to_process} # special case -> arcanist branching", "player, mapping it into a dictionary for easier usage\"\"\" if class_info is None:", "of every player, mapping it into a dictionary for easier usage\"\"\" if class_info", "the excel ranges for the player classes\"\"\" header_row: tuple = worksheet[worksheet.min_row] end =", "{character_name}\") current_cell = worksheet.cell(row=current_row, column=i) current_cell.value = new_class_val def process_class_info(class_info: dict): \"\"\"method to", "current_row: int): \"\"\"method to update the character class information in the excel sheet\"\"\"", "character_id: return None current_request_url: str = f\"{BASE_URL_XIV_API_CHARACTER}{character_id}\" resp_json: dict = do_http_get(current_request_url) return resp_json", "start = end start_set = True end += 1 if DEBUG_ENABLED: print(\"CLASS ROW", "current_request_url: str = f\"{BASE_URL_XIV_API_CHARACTER}search?name={character_name}&server=Moogle\" resp_json: dict = do_http_get(current_request_url) print(f\"Processing data for: {character_name}\") return", "+ 1 def do_http_get(request_url: str): \"\"\"helper method to do http requests\"\"\" resp: requests.Response", "print(\"CLASS ROW RANGES:\") print(start, end) return start + 1, end + 1 def", "method to get the ID of an character via XIV API\"\"\" current_request_url: str", "class {current_class} for character {character_name}\") current_cell = worksheet.cell(row=current_row, column=i) current_cell.value = new_class_val def", "current_request_url: str = f\"{BASE_URL_XIV_API_CHARACTER}{character_id}\" resp_json: dict = do_http_get(current_request_url) return resp_json def get_character_id(character_name: str):", "{character_name}\") return resp_json[\"Results\"][0][\"ID\"] if resp_json[\"Results\"] else None def load_config(arguments: argparse.Namespace): global GERMAN_TO_ENGLISH_CLASS_DICT, SUB_30_MAPPING_DICT", "data_to_process} # special case -> arcanist branching into two main jobs out[\"Summoner\"] =", "is None: break if col.value in GERMAN_TO_ENGLISH_CLASS_DICT.keys() and not start_set: start = end", "process the class info of every player, mapping it into a dictionary for", "of an character via XIV API\"\"\" current_request_url: str = f\"{BASE_URL_XIV_API_CHARACTER}search?name={character_name}&server=Moogle\" resp_json: dict =", "process data and update the excel workbook\"\"\" workbook: openpyxl.Workbook = openpyxl.load_workbook(filepath) worksheet =", "header_row[i - 1].value print(f\"Setting value {new_class_val} for class {current_class} for character {character_name}\") current_cell", "the actual index, while the header_row is a list, # thus reducing the", "range(class_range[0], class_range[1]): # reduce i by one because column index is the actual", "case -> arcanist branching into two main jobs out[\"Summoner\"] = out[\"Scholar\"] if DEBUG_ENABLED:", "GERMAN_TO_ENGLISH_CLASS_DICT.get(header_row[i - 1].value) new_class_val = current_character_info.get(mapped_class_name, 0) if DEBUG_ENABLED: character_name = f\"{worksheet.cell(current_row, 1).value}", "start + 1, end + 1 def do_http_get(request_url: str): \"\"\"helper method to do", "col in header_row: if col.value is None: break if col.value in GERMAN_TO_ENGLISH_CLASS_DICT.keys() and", "= argparse.ArgumentParser(description=\"Process the EoR Membership excel.\") parser.add_argument(\"--filename\", metavar='[path to file]', type=str, help=\"the location", "= requests.get(request_url) if resp.ok: return resp.json() else: raise ConnectionError def get_character_info(character_id: str): \"\"\"helper", "2).value}\" current_class = header_row[i - 1].value print(f\"Setting value {new_class_val} for class {current_class} for", "the file to process\") parser.add_argument(\"--config\", type=str, required=False) parser.add_argument(\"--d\", required=False, action='store_true') args = parser.parse_args()", "by one because column index is the actual index, while the header_row is", "used to process data and update the excel workbook\"\"\" workbook: openpyxl.Workbook = openpyxl.load_workbook(filepath)", "for entry in data_to_process} # special case -> arcanist branching into two main", "current_row: tuple = worksheet[i] if not current_row[0].value and not current_row[1].value: break current_character_name: str", "parser = argparse.ArgumentParser(description=\"Process the EoR Membership excel.\") parser.add_argument(\"--filename\", metavar='[path to file]', type=str, help=\"the", "location of the file to process\") parser.add_argument(\"--config\", type=str, required=False) parser.add_argument(\"--d\", required=False, action='store_true') args", "resp.json() else: raise ConnectionError def get_character_info(character_id: str): \"\"\"helper method to receive character info", "global GERMAN_TO_ENGLISH_CLASS_DICT, SUB_30_MAPPING_DICT global CONFIG_LOCATION, DEBUG_ENABLED if arguments.config: CONFIG_LOCATION = arguments.config if arguments.d:", "method to receive character info via XIV API\"\"\" if not character_id: return None", "return start + 1, end + 1 def do_http_get(request_url: str): \"\"\"helper method to", "break if col.value in GERMAN_TO_ENGLISH_CLASS_DICT.keys() and not start_set: start = end start_set =", "if not data_to_process: raise IOError out: dict = {SUB_30_MAPPING_DICT.get(entry[\"UnlockedState\"][\"Name\"], entry[\"UnlockedState\"][\"Name\"]): entry[\"Level\"] for entry", "it into a dictionary for easier usage\"\"\" if class_info is None: return None", "import argparse BASE_URL_XIV_API_CHARACTER: str = \"https://xivapi.com/character/\" GERMAN_TO_ENGLISH_CLASS_DICT: dict = {} SUB_30_MAPPING_DICT: dict =", "= process_class_info(get_character_info(get_character_id(current_character_name))) if not current_character_info: print(f\"Cant process data for character: {current_character_name}\") continue update_character_info(current_character_info,", "method to do http requests\"\"\" resp: requests.Response = requests.get(request_url) if resp.ok: return resp.json()", "dictionary for easier usage\"\"\" if class_info is None: return None data_to_process = class_info.get(\"Character\",", "to file]', type=str, help=\"the location of the file to process\") parser.add_argument(\"--config\", type=str, required=False)", "info via XIV API\"\"\" if not character_id: return None current_request_url: str = f\"{BASE_URL_XIV_API_CHARACTER}{character_id}\"", "tuple, header_row: tuple, current_row: int): \"\"\"method to update the character class information in", "EoR Membership excel.\") parser.add_argument(\"--filename\", metavar='[path to file]', type=str, help=\"the location of the file", "= False for col in header_row: if col.value is None: break if col.value", "current_cell = worksheet.cell(row=current_row, column=i) current_cell.value = new_class_val def process_class_info(class_info: dict): \"\"\"method to process", "if col.value is None: break if col.value in GERMAN_TO_ENGLISH_CLASS_DICT.keys() and not start_set: start", "if not current_character_info: print(f\"Cant process data for character: {current_character_name}\") continue update_character_info(current_character_info, worksheet, class_range,", "raise ConnectionError def get_character_info(character_id: str): \"\"\"helper method to receive character info via XIV", "= {} CONFIG_LOCATION = os.getcwd() DEBUG_ENABLED = False def main(filepath): \"\"\"main method, used", "= end start_set = True end += 1 if DEBUG_ENABLED: print(\"CLASS ROW RANGES:\")", "not current_row[0].value and not current_row[1].value: break current_character_name: str = f\"{current_row[0].value} {current_row[1].value}\" current_character_info: dict", "API\"\"\" if not character_id: return None current_request_url: str = f\"{BASE_URL_XIV_API_CHARACTER}{character_id}\" resp_json: dict =", "a dictionary for easier usage\"\"\" if class_info is None: return None data_to_process =", "method, used to process data and update the excel workbook\"\"\" workbook: openpyxl.Workbook =", "update_character_info(current_character_info: dict, worksheet: openpyxl.workbook.workbook.Worksheet, class_range: tuple, header_row: tuple, current_row: int): \"\"\"method to update", "header_row: tuple, current_row: int): \"\"\"method to update the character class information in the", "CONFIG_LOCATION = arguments.config if arguments.d: DEBUG_ENABLED = arguments.d with open(os.path.join(CONFIG_LOCATION, \"eor_config.json\")) as file:", "sheet\"\"\" for i in range(class_range[0], class_range[1]): # reduce i by one because column", "os.path import openpyxl import requests import json import argparse BASE_URL_XIV_API_CHARACTER: str = \"https://xivapi.com/character/\"", "branching into two main jobs out[\"Summoner\"] = out[\"Scholar\"] if DEBUG_ENABLED: print(\"MAPPED CLASS VALUES:\")", "class information in the excel sheet\"\"\" for i in range(class_range[0], class_range[1]): # reduce", "resp_json[\"Results\"] else None def load_config(arguments: argparse.Namespace): global GERMAN_TO_ENGLISH_CLASS_DICT, SUB_30_MAPPING_DICT global CONFIG_LOCATION, DEBUG_ENABLED if", "SUB_30_MAPPING_DICT global CONFIG_LOCATION, DEBUG_ENABLED if arguments.config: CONFIG_LOCATION = arguments.config if arguments.d: DEBUG_ENABLED =", "= json.load(file) GERMAN_TO_ENGLISH_CLASS_DICT = config.get(\"class_config\", None) SUB_30_MAPPING_DICT = config.get(\"sub_30_class_config\", None) if not GERMAN_TO_ENGLISH_CLASS_DICT", "for: {character_name}\") return resp_json[\"Results\"][0][\"ID\"] if resp_json[\"Results\"] else None def load_config(arguments: argparse.Namespace): global GERMAN_TO_ENGLISH_CLASS_DICT,", "via XIV API\"\"\" if not character_id: return None current_request_url: str = f\"{BASE_URL_XIV_API_CHARACTER}{character_id}\" resp_json:", "resp_json: dict = do_http_get(current_request_url) print(f\"Processing data for: {character_name}\") return resp_json[\"Results\"][0][\"ID\"] if resp_json[\"Results\"] else", "into two main jobs out[\"Summoner\"] = out[\"Scholar\"] if DEBUG_ENABLED: print(\"MAPPED CLASS VALUES:\") print(out)", "col.value in GERMAN_TO_ENGLISH_CLASS_DICT.keys() and not start_set: start = end start_set = True end", "arcanist branching into two main jobs out[\"Summoner\"] = out[\"Scholar\"] if DEBUG_ENABLED: print(\"MAPPED CLASS", "ConnectionError def get_character_info(character_id: str): \"\"\"helper method to receive character info via XIV API\"\"\"", "= workbook.active class_range: tuple = generate_class_range(worksheet) for i in range(worksheet.min_row + 1, worksheet.max_row):", "in range(worksheet.min_row + 1, worksheet.max_row): current_row: tuple = worksheet[i] if not current_row[0].value and", "method, to create the excel ranges for the player classes\"\"\" header_row: tuple =", "column=i) current_cell.value = new_class_val def process_class_info(class_info: dict): \"\"\"method to process the class info", "index, while the header_row is a list, # thus reducing the index by", "create the excel ranges for the player classes\"\"\" header_row: tuple = worksheet[worksheet.min_row] end", "arguments.d with open(os.path.join(CONFIG_LOCATION, \"eor_config.json\")) as file: config = json.load(file) GERMAN_TO_ENGLISH_CLASS_DICT = config.get(\"class_config\", None)", "class_range: tuple, header_row: tuple, current_row: int): \"\"\"method to update the character class information", "None def load_config(arguments: argparse.Namespace): global GERMAN_TO_ENGLISH_CLASS_DICT, SUB_30_MAPPING_DICT global CONFIG_LOCATION, DEBUG_ENABLED if arguments.config: CONFIG_LOCATION", "= header_row[i - 1].value print(f\"Setting value {new_class_val} for class {current_class} for character {character_name}\")", "reducing the index by 1 mapped_class_name = GERMAN_TO_ENGLISH_CLASS_DICT.get(header_row[i - 1].value) new_class_val = current_character_info.get(mapped_class_name,", "{current_character_name}\") continue update_character_info(current_character_info, worksheet, class_range, worksheet[worksheet.min_row], i) workbook.save(filepath.replace(\".xlsx\", \"_updated.xlsx\")) print(\"Finished!\") def update_character_info(current_character_info: dict,", "ROW RANGES:\") print(start, end) return start + 1, end + 1 def do_http_get(request_url:", "information in the excel sheet\"\"\" for i in range(class_range[0], class_range[1]): # reduce i", "in the excel sheet\"\"\" for i in range(class_range[0], class_range[1]): # reduce i by", "special case -> arcanist branching into two main jobs out[\"Summoner\"] = out[\"Scholar\"] if", "the ID of an character via XIV API\"\"\" current_request_url: str = f\"{BASE_URL_XIV_API_CHARACTER}search?name={character_name}&server=Moogle\" resp_json:", "1].value print(f\"Setting value {new_class_val} for class {current_class} for character {character_name}\") current_cell = worksheet.cell(row=current_row,", "index by 1 mapped_class_name = GERMAN_TO_ENGLISH_CLASS_DICT.get(header_row[i - 1].value) new_class_val = current_character_info.get(mapped_class_name, 0) if", "= \"https://xivapi.com/character/\" GERMAN_TO_ENGLISH_CLASS_DICT: dict = {} SUB_30_MAPPING_DICT: dict = {} CONFIG_LOCATION = os.getcwd()", "-> arcanist branching into two main jobs out[\"Summoner\"] = out[\"Scholar\"] if DEBUG_ENABLED: print(\"MAPPED", "= openpyxl.load_workbook(filepath) worksheet = workbook.active class_range: tuple = generate_class_range(worksheet) for i in range(worksheet.min_row", "entry[\"UnlockedState\"][\"Name\"]): entry[\"Level\"] for entry in data_to_process} # special case -> arcanist branching into", "if col.value in GERMAN_TO_ENGLISH_CLASS_DICT.keys() and not start_set: start = end start_set = True", "process_class_info(get_character_info(get_character_id(current_character_name))) if not current_character_info: print(f\"Cant process data for character: {current_character_name}\") continue update_character_info(current_character_info, worksheet,", "is a list, # thus reducing the index by 1 mapped_class_name = GERMAN_TO_ENGLISH_CLASS_DICT.get(header_row[i", "classes\"\"\" header_row: tuple = worksheet[worksheet.min_row] end = 0 start = 0 start_set =", "type=str, help=\"the location of the file to process\") parser.add_argument(\"--config\", type=str, required=False) parser.add_argument(\"--d\", required=False,", "SUB_30_MAPPING_DICT: dict = {} CONFIG_LOCATION = os.getcwd() DEBUG_ENABLED = False def main(filepath): \"\"\"main", "i in range(worksheet.min_row + 1, worksheet.max_row): current_row: tuple = worksheet[i] if not current_row[0].value", "update_character_info(current_character_info, worksheet, class_range, worksheet[worksheet.min_row], i) workbook.save(filepath.replace(\".xlsx\", \"_updated.xlsx\")) print(\"Finished!\") def update_character_info(current_character_info: dict, worksheet: openpyxl.workbook.workbook.Worksheet,", "current_cell.value = new_class_val def process_class_info(class_info: dict): \"\"\"method to process the class info of", "and not start_set: start = end start_set = True end += 1 if", "return resp_json def get_character_id(character_name: str): \"\"\"Help method to get the ID of an", "config.get(\"class_config\", None) SUB_30_MAPPING_DICT = config.get(\"sub_30_class_config\", None) if not GERMAN_TO_ENGLISH_CLASS_DICT or not SUB_30_MAPPING_DICT: raise", "requests import json import argparse BASE_URL_XIV_API_CHARACTER: str = \"https://xivapi.com/character/\" GERMAN_TO_ENGLISH_CLASS_DICT: dict = {}", "openpyxl.workbook.workbook.Worksheet, class_range: tuple, header_row: tuple, current_row: int): \"\"\"method to update the character class", "to get the ID of an character via XIV API\"\"\" current_request_url: str =", "if resp.ok: return resp.json() else: raise ConnectionError def get_character_info(character_id: str): \"\"\"helper method to", "end += 1 if DEBUG_ENABLED: print(\"CLASS ROW RANGES:\") print(start, end) return start +", "end start_set = True end += 1 if DEBUG_ENABLED: print(\"CLASS ROW RANGES:\") print(start,", "argparse.ArgumentParser(description=\"Process the EoR Membership excel.\") parser.add_argument(\"--filename\", metavar='[path to file]', type=str, help=\"the location of", "\"https://xivapi.com/character/\" GERMAN_TO_ENGLISH_CLASS_DICT: dict = {} SUB_30_MAPPING_DICT: dict = {} CONFIG_LOCATION = os.getcwd() DEBUG_ENABLED", "API\"\"\" current_request_url: str = f\"{BASE_URL_XIV_API_CHARACTER}search?name={character_name}&server=Moogle\" resp_json: dict = do_http_get(current_request_url) print(f\"Processing data for: {character_name}\")", "workbook: openpyxl.Workbook = openpyxl.load_workbook(filepath) worksheet = workbook.active class_range: tuple = generate_class_range(worksheet) for i", "index is the actual index, while the header_row is a list, # thus", "str = f\"{BASE_URL_XIV_API_CHARACTER}{character_id}\" resp_json: dict = do_http_get(current_request_url) return resp_json def get_character_id(character_name: str): \"\"\"Help", "the character class information in the excel sheet\"\"\" for i in range(class_range[0], class_range[1]):", "= False def main(filepath): \"\"\"main method, used to process data and update the", "str): \"\"\"Help method to get the ID of an character via XIV API\"\"\"", "workbook.save(filepath.replace(\".xlsx\", \"_updated.xlsx\")) print(\"Finished!\") def update_character_info(current_character_info: dict, worksheet: openpyxl.workbook.workbook.Worksheet, class_range: tuple, header_row: tuple, current_row:", "http requests\"\"\" resp: requests.Response = requests.get(request_url) if resp.ok: return resp.json() else: raise ConnectionError", "Membership excel.\") parser.add_argument(\"--filename\", metavar='[path to file]', type=str, help=\"the location of the file to", "an character via XIV API\"\"\" current_request_url: str = f\"{BASE_URL_XIV_API_CHARACTER}search?name={character_name}&server=Moogle\" resp_json: dict = do_http_get(current_request_url)", "tuple = generate_class_range(worksheet) for i in range(worksheet.min_row + 1, worksheet.max_row): current_row: tuple =", "{new_class_val} for class {current_class} for character {character_name}\") current_cell = worksheet.cell(row=current_row, column=i) current_cell.value =", "current_character_name: str = f\"{current_row[0].value} {current_row[1].value}\" current_character_info: dict = process_class_info(get_character_info(get_character_id(current_character_name))) if not current_character_info: print(f\"Cant", "return resp_json[\"Results\"][0][\"ID\"] if resp_json[\"Results\"] else None def load_config(arguments: argparse.Namespace): global GERMAN_TO_ENGLISH_CLASS_DICT, SUB_30_MAPPING_DICT global", "class_info is None: return None data_to_process = class_info.get(\"Character\", {}).get(\"ClassJobs\", None) if not data_to_process:", "if class_info is None: return None data_to_process = class_info.get(\"Character\", {}).get(\"ClassJobs\", None) if not", "GERMAN_TO_ENGLISH_CLASS_DICT, SUB_30_MAPPING_DICT global CONFIG_LOCATION, DEBUG_ENABLED if arguments.config: CONFIG_LOCATION = arguments.config if arguments.d: DEBUG_ENABLED", "process_class_info(class_info: dict): \"\"\"method to process the class info of every player, mapping it", "DEBUG_ENABLED: character_name = f\"{worksheet.cell(current_row, 1).value} {worksheet.cell(current_row, 2).value}\" current_class = header_row[i - 1].value print(f\"Setting", "receive character info via XIV API\"\"\" if not character_id: return None current_request_url: str", "not character_id: return None current_request_url: str = f\"{BASE_URL_XIV_API_CHARACTER}{character_id}\" resp_json: dict = do_http_get(current_request_url) return", "str = f\"{current_row[0].value} {current_row[1].value}\" current_character_info: dict = process_class_info(get_character_info(get_character_id(current_character_name))) if not current_character_info: print(f\"Cant process", "process data for character: {current_character_name}\") continue update_character_info(current_character_info, worksheet, class_range, worksheet[worksheet.min_row], i) workbook.save(filepath.replace(\".xlsx\", \"_updated.xlsx\"))", "0) if DEBUG_ENABLED: character_name = f\"{worksheet.cell(current_row, 1).value} {worksheet.cell(current_row, 2).value}\" current_class = header_row[i -", "str): \"\"\"helper method to receive character info via XIV API\"\"\" if not character_id:", "end = 0 start = 0 start_set = False for col in header_row:", "in header_row: if col.value is None: break if col.value in GERMAN_TO_ENGLISH_CLASS_DICT.keys() and not", "a list, # thus reducing the index by 1 mapped_class_name = GERMAN_TO_ENGLISH_CLASS_DICT.get(header_row[i -", "i) workbook.save(filepath.replace(\".xlsx\", \"_updated.xlsx\")) print(\"Finished!\") def update_character_info(current_character_info: dict, worksheet: openpyxl.workbook.workbook.Worksheet, class_range: tuple, header_row: tuple,", "f\"{BASE_URL_XIV_API_CHARACTER}{character_id}\" resp_json: dict = do_http_get(current_request_url) return resp_json def get_character_id(character_name: str): \"\"\"Help method to", "actual index, while the header_row is a list, # thus reducing the index", "update the character class information in the excel sheet\"\"\" for i in range(class_range[0],", "= f\"{worksheet.cell(current_row, 1).value} {worksheet.cell(current_row, 2).value}\" current_class = header_row[i - 1].value print(f\"Setting value {new_class_val}", "dict = {} CONFIG_LOCATION = os.getcwd() DEBUG_ENABLED = False def main(filepath): \"\"\"main method,", "data and update the excel workbook\"\"\" workbook: openpyxl.Workbook = openpyxl.load_workbook(filepath) worksheet = workbook.active", "import os.path import openpyxl import requests import json import argparse BASE_URL_XIV_API_CHARACTER: str =", "not SUB_30_MAPPING_DICT: raise IOError if __name__ == '__main__': parser = argparse.ArgumentParser(description=\"Process the EoR", "class_info.get(\"Character\", {}).get(\"ClassJobs\", None) if not data_to_process: raise IOError out: dict = {SUB_30_MAPPING_DICT.get(entry[\"UnlockedState\"][\"Name\"], entry[\"UnlockedState\"][\"Name\"]):", "the index by 1 mapped_class_name = GERMAN_TO_ENGLISH_CLASS_DICT.get(header_row[i - 1].value) new_class_val = current_character_info.get(mapped_class_name, 0)", "if DEBUG_ENABLED: print(\"MAPPED CLASS VALUES:\") print(out) return out def generate_class_range(worksheet: openpyxl.workbook.workbook.Worksheet): \"\"\"helper method,", "- 1].value print(f\"Setting value {new_class_val} for class {current_class} for character {character_name}\") current_cell =", "class_range: tuple = generate_class_range(worksheet) for i in range(worksheet.min_row + 1, worksheet.max_row): current_row: tuple", "1].value) new_class_val = current_character_info.get(mapped_class_name, 0) if DEBUG_ENABLED: character_name = f\"{worksheet.cell(current_row, 1).value} {worksheet.cell(current_row, 2).value}\"", "return None current_request_url: str = f\"{BASE_URL_XIV_API_CHARACTER}{character_id}\" resp_json: dict = do_http_get(current_request_url) return resp_json def", "None) if not data_to_process: raise IOError out: dict = {SUB_30_MAPPING_DICT.get(entry[\"UnlockedState\"][\"Name\"], entry[\"UnlockedState\"][\"Name\"]): entry[\"Level\"] for", "resp: requests.Response = requests.get(request_url) if resp.ok: return resp.json() else: raise ConnectionError def get_character_info(character_id:", "dict = {} SUB_30_MAPPING_DICT: dict = {} CONFIG_LOCATION = os.getcwd() DEBUG_ENABLED = False", "GERMAN_TO_ENGLISH_CLASS_DICT or not SUB_30_MAPPING_DICT: raise IOError if __name__ == '__main__': parser = argparse.ArgumentParser(description=\"Process", "update the excel workbook\"\"\" workbook: openpyxl.Workbook = openpyxl.load_workbook(filepath) worksheet = workbook.active class_range: tuple", "resp_json: dict = do_http_get(current_request_url) return resp_json def get_character_id(character_name: str): \"\"\"Help method to get", "reduce i by one because column index is the actual index, while the", "get the ID of an character via XIV API\"\"\" current_request_url: str = f\"{BASE_URL_XIV_API_CHARACTER}search?name={character_name}&server=Moogle\"", "for i in range(worksheet.min_row + 1, worksheet.max_row): current_row: tuple = worksheet[i] if not", "BASE_URL_XIV_API_CHARACTER: str = \"https://xivapi.com/character/\" GERMAN_TO_ENGLISH_CLASS_DICT: dict = {} SUB_30_MAPPING_DICT: dict = {} CONFIG_LOCATION", "IOError out: dict = {SUB_30_MAPPING_DICT.get(entry[\"UnlockedState\"][\"Name\"], entry[\"UnlockedState\"][\"Name\"]): entry[\"Level\"] for entry in data_to_process} # special", "# special case -> arcanist branching into two main jobs out[\"Summoner\"] = out[\"Scholar\"]", "f\"{current_row[0].value} {current_row[1].value}\" current_character_info: dict = process_class_info(get_character_info(get_character_id(current_character_name))) if not current_character_info: print(f\"Cant process data for", "open(os.path.join(CONFIG_LOCATION, \"eor_config.json\")) as file: config = json.load(file) GERMAN_TO_ENGLISH_CLASS_DICT = config.get(\"class_config\", None) SUB_30_MAPPING_DICT =", "continue update_character_info(current_character_info, worksheet, class_range, worksheet[worksheet.min_row], i) workbook.save(filepath.replace(\".xlsx\", \"_updated.xlsx\")) print(\"Finished!\") def update_character_info(current_character_info: dict, worksheet:", "+ 1, worksheet.max_row): current_row: tuple = worksheet[i] if not current_row[0].value and not current_row[1].value:", "value {new_class_val} for class {current_class} for character {character_name}\") current_cell = worksheet.cell(row=current_row, column=i) current_cell.value", "DEBUG_ENABLED: print(\"MAPPED CLASS VALUES:\") print(out) return out def generate_class_range(worksheet: openpyxl.workbook.workbook.Worksheet): \"\"\"helper method, to", "not start_set: start = end start_set = True end += 1 if DEBUG_ENABLED:", "= f\"{BASE_URL_XIV_API_CHARACTER}search?name={character_name}&server=Moogle\" resp_json: dict = do_http_get(current_request_url) print(f\"Processing data for: {character_name}\") return resp_json[\"Results\"][0][\"ID\"] if", "arguments.config: CONFIG_LOCATION = arguments.config if arguments.d: DEBUG_ENABLED = arguments.d with open(os.path.join(CONFIG_LOCATION, \"eor_config.json\")) as", "def update_character_info(current_character_info: dict, worksheet: openpyxl.workbook.workbook.Worksheet, class_range: tuple, header_row: tuple, current_row: int): \"\"\"method to", "- 1].value) new_class_val = current_character_info.get(mapped_class_name, 0) if DEBUG_ENABLED: character_name = f\"{worksheet.cell(current_row, 1).value} {worksheet.cell(current_row,", "for easier usage\"\"\" if class_info is None: return None data_to_process = class_info.get(\"Character\", {}).get(\"ClassJobs\",", "usage\"\"\" if class_info is None: return None data_to_process = class_info.get(\"Character\", {}).get(\"ClassJobs\", None) if", "\"eor_config.json\")) as file: config = json.load(file) GERMAN_TO_ENGLISH_CLASS_DICT = config.get(\"class_config\", None) SUB_30_MAPPING_DICT = config.get(\"sub_30_class_config\",", "GERMAN_TO_ENGLISH_CLASS_DICT = config.get(\"class_config\", None) SUB_30_MAPPING_DICT = config.get(\"sub_30_class_config\", None) if not GERMAN_TO_ENGLISH_CLASS_DICT or not", "= f\"{current_row[0].value} {current_row[1].value}\" current_character_info: dict = process_class_info(get_character_info(get_character_id(current_character_name))) if not current_character_info: print(f\"Cant process data", "1 mapped_class_name = GERMAN_TO_ENGLISH_CLASS_DICT.get(header_row[i - 1].value) new_class_val = current_character_info.get(mapped_class_name, 0) if DEBUG_ENABLED: character_name", "\"\"\"method to process the class info of every player, mapping it into a", "\"\"\"Help method to get the ID of an character via XIV API\"\"\" current_request_url:", "int): \"\"\"method to update the character class information in the excel sheet\"\"\" for", "i by one because column index is the actual index, while the header_row", "two main jobs out[\"Summoner\"] = out[\"Scholar\"] if DEBUG_ENABLED: print(\"MAPPED CLASS VALUES:\") print(out) return", "get_character_id(character_name: str): \"\"\"Help method to get the ID of an character via XIV", "while the header_row is a list, # thus reducing the index by 1", "the class info of every player, mapping it into a dictionary for easier", "data for character: {current_character_name}\") continue update_character_info(current_character_info, worksheet, class_range, worksheet[worksheet.min_row], i) workbook.save(filepath.replace(\".xlsx\", \"_updated.xlsx\")) print(\"Finished!\")", "as file: config = json.load(file) GERMAN_TO_ENGLISH_CLASS_DICT = config.get(\"class_config\", None) SUB_30_MAPPING_DICT = config.get(\"sub_30_class_config\", None)", "openpyxl.Workbook = openpyxl.load_workbook(filepath) worksheet = workbook.active class_range: tuple = generate_class_range(worksheet) for i in", "\"\"\"main method, used to process data and update the excel workbook\"\"\" workbook: openpyxl.Workbook", "the header_row is a list, # thus reducing the index by 1 mapped_class_name", "character_name = f\"{worksheet.cell(current_row, 1).value} {worksheet.cell(current_row, 2).value}\" current_class = header_row[i - 1].value print(f\"Setting value", "character: {current_character_name}\") continue update_character_info(current_character_info, worksheet, class_range, worksheet[worksheet.min_row], i) workbook.save(filepath.replace(\".xlsx\", \"_updated.xlsx\")) print(\"Finished!\") def update_character_info(current_character_info:", "dict, worksheet: openpyxl.workbook.workbook.Worksheet, class_range: tuple, header_row: tuple, current_row: int): \"\"\"method to update the", "dict = process_class_info(get_character_info(get_character_id(current_character_name))) if not current_character_info: print(f\"Cant process data for character: {current_character_name}\") continue", "False for col in header_row: if col.value is None: break if col.value in", "data for: {character_name}\") return resp_json[\"Results\"][0][\"ID\"] if resp_json[\"Results\"] else None def load_config(arguments: argparse.Namespace): global", "if arguments.d: DEBUG_ENABLED = arguments.d with open(os.path.join(CONFIG_LOCATION, \"eor_config.json\")) as file: config = json.load(file)", "and update the excel workbook\"\"\" workbook: openpyxl.Workbook = openpyxl.load_workbook(filepath) worksheet = workbook.active class_range:", "argparse BASE_URL_XIV_API_CHARACTER: str = \"https://xivapi.com/character/\" GERMAN_TO_ENGLISH_CLASS_DICT: dict = {} SUB_30_MAPPING_DICT: dict = {}", "start_set: start = end start_set = True end += 1 if DEBUG_ENABLED: print(\"CLASS", "if not character_id: return None current_request_url: str = f\"{BASE_URL_XIV_API_CHARACTER}{character_id}\" resp_json: dict = do_http_get(current_request_url)", "not data_to_process: raise IOError out: dict = {SUB_30_MAPPING_DICT.get(entry[\"UnlockedState\"][\"Name\"], entry[\"UnlockedState\"][\"Name\"]): entry[\"Level\"] for entry in", "mapping it into a dictionary for easier usage\"\"\" if class_info is None: return", "config = json.load(file) GERMAN_TO_ENGLISH_CLASS_DICT = config.get(\"class_config\", None) SUB_30_MAPPING_DICT = config.get(\"sub_30_class_config\", None) if not", "in data_to_process} # special case -> arcanist branching into two main jobs out[\"Summoner\"]", "= {} SUB_30_MAPPING_DICT: dict = {} CONFIG_LOCATION = os.getcwd() DEBUG_ENABLED = False def", "for col in header_row: if col.value is None: break if col.value in GERMAN_TO_ENGLISH_CLASS_DICT.keys()", "main(filepath): \"\"\"main method, used to process data and update the excel workbook\"\"\" workbook:", "= out[\"Scholar\"] if DEBUG_ENABLED: print(\"MAPPED CLASS VALUES:\") print(out) return out def generate_class_range(worksheet: openpyxl.workbook.workbook.Worksheet):", "excel ranges for the player classes\"\"\" header_row: tuple = worksheet[worksheet.min_row] end = 0", "def get_character_id(character_name: str): \"\"\"Help method to get the ID of an character via", "json.load(file) GERMAN_TO_ENGLISH_CLASS_DICT = config.get(\"class_config\", None) SUB_30_MAPPING_DICT = config.get(\"sub_30_class_config\", None) if not GERMAN_TO_ENGLISH_CLASS_DICT or", "+= 1 if DEBUG_ENABLED: print(\"CLASS ROW RANGES:\") print(start, end) return start + 1,", "= do_http_get(current_request_url) return resp_json def get_character_id(character_name: str): \"\"\"Help method to get the ID", "if DEBUG_ENABLED: print(\"CLASS ROW RANGES:\") print(start, end) return start + 1, end +", "== '__main__': parser = argparse.ArgumentParser(description=\"Process the EoR Membership excel.\") parser.add_argument(\"--filename\", metavar='[path to file]',", "if not current_row[0].value and not current_row[1].value: break current_character_name: str = f\"{current_row[0].value} {current_row[1].value}\" current_character_info:", "current_row[1].value: break current_character_name: str = f\"{current_row[0].value} {current_row[1].value}\" current_character_info: dict = process_class_info(get_character_info(get_character_id(current_character_name))) if not", "resp.ok: return resp.json() else: raise ConnectionError def get_character_info(character_id: str): \"\"\"helper method to receive", "for character: {current_character_name}\") continue update_character_info(current_character_info, worksheet, class_range, worksheet[worksheet.min_row], i) workbook.save(filepath.replace(\".xlsx\", \"_updated.xlsx\")) print(\"Finished!\") def", "requests.get(request_url) if resp.ok: return resp.json() else: raise ConnectionError def get_character_info(character_id: str): \"\"\"helper method", "if DEBUG_ENABLED: character_name = f\"{worksheet.cell(current_row, 1).value} {worksheet.cell(current_row, 2).value}\" current_class = header_row[i - 1].value", "if not GERMAN_TO_ENGLISH_CLASS_DICT or not SUB_30_MAPPING_DICT: raise IOError if __name__ == '__main__': parser", "do_http_get(request_url: str): \"\"\"helper method to do http requests\"\"\" resp: requests.Response = requests.get(request_url) if", "list, # thus reducing the index by 1 mapped_class_name = GERMAN_TO_ENGLISH_CLASS_DICT.get(header_row[i - 1].value)", "argparse.Namespace): global GERMAN_TO_ENGLISH_CLASS_DICT, SUB_30_MAPPING_DICT global CONFIG_LOCATION, DEBUG_ENABLED if arguments.config: CONFIG_LOCATION = arguments.config if", "= os.getcwd() DEBUG_ENABLED = False def main(filepath): \"\"\"main method, used to process data", "= config.get(\"sub_30_class_config\", None) if not GERMAN_TO_ENGLISH_CLASS_DICT or not SUB_30_MAPPING_DICT: raise IOError if __name__", "out[\"Summoner\"] = out[\"Scholar\"] if DEBUG_ENABLED: print(\"MAPPED CLASS VALUES:\") print(out) return out def generate_class_range(worksheet:", "tuple = worksheet[i] if not current_row[0].value and not current_row[1].value: break current_character_name: str =", "data_to_process = class_info.get(\"Character\", {}).get(\"ClassJobs\", None) if not data_to_process: raise IOError out: dict =", "CLASS VALUES:\") print(out) return out def generate_class_range(worksheet: openpyxl.workbook.workbook.Worksheet): \"\"\"helper method, to create the", "entry[\"Level\"] for entry in data_to_process} # special case -> arcanist branching into two", "else None def load_config(arguments: argparse.Namespace): global GERMAN_TO_ENGLISH_CLASS_DICT, SUB_30_MAPPING_DICT global CONFIG_LOCATION, DEBUG_ENABLED if arguments.config:", "= arguments.config if arguments.d: DEBUG_ENABLED = arguments.d with open(os.path.join(CONFIG_LOCATION, \"eor_config.json\")) as file: config", "'__main__': parser = argparse.ArgumentParser(description=\"Process the EoR Membership excel.\") parser.add_argument(\"--filename\", metavar='[path to file]', type=str,", "#!/usr/bin/python3 import os.path import openpyxl import requests import json import argparse BASE_URL_XIV_API_CHARACTER: str", "character info via XIV API\"\"\" if not character_id: return None current_request_url: str =", "0 start_set = False for col in header_row: if col.value is None: break", "= arguments.d with open(os.path.join(CONFIG_LOCATION, \"eor_config.json\")) as file: config = json.load(file) GERMAN_TO_ENGLISH_CLASS_DICT = config.get(\"class_config\",", "import json import argparse BASE_URL_XIV_API_CHARACTER: str = \"https://xivapi.com/character/\" GERMAN_TO_ENGLISH_CLASS_DICT: dict = {} SUB_30_MAPPING_DICT:", "not GERMAN_TO_ENGLISH_CLASS_DICT or not SUB_30_MAPPING_DICT: raise IOError if __name__ == '__main__': parser =", "= do_http_get(current_request_url) print(f\"Processing data for: {character_name}\") return resp_json[\"Results\"][0][\"ID\"] if resp_json[\"Results\"] else None def", "{worksheet.cell(current_row, 2).value}\" current_class = header_row[i - 1].value print(f\"Setting value {new_class_val} for class {current_class}", "do_http_get(current_request_url) print(f\"Processing data for: {character_name}\") return resp_json[\"Results\"][0][\"ID\"] if resp_json[\"Results\"] else None def load_config(arguments:", "str = \"https://xivapi.com/character/\" GERMAN_TO_ENGLISH_CLASS_DICT: dict = {} SUB_30_MAPPING_DICT: dict = {} CONFIG_LOCATION =", "one because column index is the actual index, while the header_row is a", "VALUES:\") print(out) return out def generate_class_range(worksheet: openpyxl.workbook.workbook.Worksheet): \"\"\"helper method, to create the excel", "start_set = True end += 1 if DEBUG_ENABLED: print(\"CLASS ROW RANGES:\") print(start, end)", "generate_class_range(worksheet: openpyxl.workbook.workbook.Worksheet): \"\"\"helper method, to create the excel ranges for the player classes\"\"\"", "json import argparse BASE_URL_XIV_API_CHARACTER: str = \"https://xivapi.com/character/\" GERMAN_TO_ENGLISH_CLASS_DICT: dict = {} SUB_30_MAPPING_DICT: dict", "return resp.json() else: raise ConnectionError def get_character_info(character_id: str): \"\"\"helper method to receive character", "generate_class_range(worksheet) for i in range(worksheet.min_row + 1, worksheet.max_row): current_row: tuple = worksheet[i] if", "str): \"\"\"helper method to do http requests\"\"\" resp: requests.Response = requests.get(request_url) if resp.ok:", "= worksheet[i] if not current_row[0].value and not current_row[1].value: break current_character_name: str = f\"{current_row[0].value}", "DEBUG_ENABLED = arguments.d with open(os.path.join(CONFIG_LOCATION, \"eor_config.json\")) as file: config = json.load(file) GERMAN_TO_ENGLISH_CLASS_DICT =", "every player, mapping it into a dictionary for easier usage\"\"\" if class_info is", "current_character_info: dict = process_class_info(get_character_info(get_character_id(current_character_name))) if not current_character_info: print(f\"Cant process data for character: {current_character_name}\")", "jobs out[\"Summoner\"] = out[\"Scholar\"] if DEBUG_ENABLED: print(\"MAPPED CLASS VALUES:\") print(out) return out def", "character class information in the excel sheet\"\"\" for i in range(class_range[0], class_range[1]): #", "header_row is a list, # thus reducing the index by 1 mapped_class_name =", "print(out) return out def generate_class_range(worksheet: openpyxl.workbook.workbook.Worksheet): \"\"\"helper method, to create the excel ranges", "1).value} {worksheet.cell(current_row, 2).value}\" current_class = header_row[i - 1].value print(f\"Setting value {new_class_val} for class", "resp_json def get_character_id(character_name: str): \"\"\"Help method to get the ID of an character", "metavar='[path to file]', type=str, help=\"the location of the file to process\") parser.add_argument(\"--config\", type=str,", "out: dict = {SUB_30_MAPPING_DICT.get(entry[\"UnlockedState\"][\"Name\"], entry[\"UnlockedState\"][\"Name\"]): entry[\"Level\"] for entry in data_to_process} # special case", "help=\"the location of the file to process\") parser.add_argument(\"--config\", type=str, required=False) parser.add_argument(\"--d\", required=False, action='store_true')", "parser.add_argument(\"--filename\", metavar='[path to file]', type=str, help=\"the location of the file to process\") parser.add_argument(\"--config\",", "def load_config(arguments: argparse.Namespace): global GERMAN_TO_ENGLISH_CLASS_DICT, SUB_30_MAPPING_DICT global CONFIG_LOCATION, DEBUG_ENABLED if arguments.config: CONFIG_LOCATION =", "class_range[1]): # reduce i by one because column index is the actual index,", "is the actual index, while the header_row is a list, # thus reducing", "requests.Response = requests.get(request_url) if resp.ok: return resp.json() else: raise ConnectionError def get_character_info(character_id: str):", "f\"{worksheet.cell(current_row, 1).value} {worksheet.cell(current_row, 2).value}\" current_class = header_row[i - 1].value print(f\"Setting value {new_class_val} for", "tuple = worksheet[worksheet.min_row] end = 0 start = 0 start_set = False for", "None) if not GERMAN_TO_ENGLISH_CLASS_DICT or not SUB_30_MAPPING_DICT: raise IOError if __name__ == '__main__':", "excel.\") parser.add_argument(\"--filename\", metavar='[path to file]', type=str, help=\"the location of the file to process\")", "= current_character_info.get(mapped_class_name, 0) if DEBUG_ENABLED: character_name = f\"{worksheet.cell(current_row, 1).value} {worksheet.cell(current_row, 2).value}\" current_class =", "import requests import json import argparse BASE_URL_XIV_API_CHARACTER: str = \"https://xivapi.com/character/\" GERMAN_TO_ENGLISH_CLASS_DICT: dict =", "new_class_val def process_class_info(class_info: dict): \"\"\"method to process the class info of every player,", "None: return None data_to_process = class_info.get(\"Character\", {}).get(\"ClassJobs\", None) if not data_to_process: raise IOError", "None data_to_process = class_info.get(\"Character\", {}).get(\"ClassJobs\", None) if not data_to_process: raise IOError out: dict", "the excel workbook\"\"\" workbook: openpyxl.Workbook = openpyxl.load_workbook(filepath) worksheet = workbook.active class_range: tuple =", "dict): \"\"\"method to process the class info of every player, mapping it into", "1 if DEBUG_ENABLED: print(\"CLASS ROW RANGES:\") print(start, end) return start + 1, end", "get_character_info(character_id: str): \"\"\"helper method to receive character info via XIV API\"\"\" if not", "SUB_30_MAPPING_DICT = config.get(\"sub_30_class_config\", None) if not GERMAN_TO_ENGLISH_CLASS_DICT or not SUB_30_MAPPING_DICT: raise IOError if", "main jobs out[\"Summoner\"] = out[\"Scholar\"] if DEBUG_ENABLED: print(\"MAPPED CLASS VALUES:\") print(out) return out", "= 0 start_set = False for col in header_row: if col.value is None:", "by 1 mapped_class_name = GERMAN_TO_ENGLISH_CLASS_DICT.get(header_row[i - 1].value) new_class_val = current_character_info.get(mapped_class_name, 0) if DEBUG_ENABLED:", "range(worksheet.min_row + 1, worksheet.max_row): current_row: tuple = worksheet[i] if not current_row[0].value and not", "not current_character_info: print(f\"Cant process data for character: {current_character_name}\") continue update_character_info(current_character_info, worksheet, class_range, worksheet[worksheet.min_row],", "+ 1, end + 1 def do_http_get(request_url: str): \"\"\"helper method to do http", "resp_json[\"Results\"][0][\"ID\"] if resp_json[\"Results\"] else None def load_config(arguments: argparse.Namespace): global GERMAN_TO_ENGLISH_CLASS_DICT, SUB_30_MAPPING_DICT global CONFIG_LOCATION,", "i in range(class_range[0], class_range[1]): # reduce i by one because column index is", "{current_row[1].value}\" current_character_info: dict = process_class_info(get_character_info(get_character_id(current_character_name))) if not current_character_info: print(f\"Cant process data for character:", "easier usage\"\"\" if class_info is None: return None data_to_process = class_info.get(\"Character\", {}).get(\"ClassJobs\", None)", "else: raise ConnectionError def get_character_info(character_id: str): \"\"\"helper method to receive character info via", "into a dictionary for easier usage\"\"\" if class_info is None: return None data_to_process", "worksheet[worksheet.min_row] end = 0 start = 0 start_set = False for col in", "XIV API\"\"\" if not character_id: return None current_request_url: str = f\"{BASE_URL_XIV_API_CHARACTER}{character_id}\" resp_json: dict", "to process\") parser.add_argument(\"--config\", type=str, required=False) parser.add_argument(\"--d\", required=False, action='store_true') args = parser.parse_args() load_config(args) main(args.filename)", "openpyxl.workbook.workbook.Worksheet): \"\"\"helper method, to create the excel ranges for the player classes\"\"\" header_row:", "\"\"\"helper method to receive character info via XIV API\"\"\" if not character_id: return", "for character {character_name}\") current_cell = worksheet.cell(row=current_row, column=i) current_cell.value = new_class_val def process_class_info(class_info: dict):", "character via XIV API\"\"\" current_request_url: str = f\"{BASE_URL_XIV_API_CHARACTER}search?name={character_name}&server=Moogle\" resp_json: dict = do_http_get(current_request_url) print(f\"Processing", "1 def do_http_get(request_url: str): \"\"\"helper method to do http requests\"\"\" resp: requests.Response =", "current_row[0].value and not current_row[1].value: break current_character_name: str = f\"{current_row[0].value} {current_row[1].value}\" current_character_info: dict =", "worksheet.max_row): current_row: tuple = worksheet[i] if not current_row[0].value and not current_row[1].value: break current_character_name:", "if __name__ == '__main__': parser = argparse.ArgumentParser(description=\"Process the EoR Membership excel.\") parser.add_argument(\"--filename\", metavar='[path", "{}).get(\"ClassJobs\", None) if not data_to_process: raise IOError out: dict = {SUB_30_MAPPING_DICT.get(entry[\"UnlockedState\"][\"Name\"], entry[\"UnlockedState\"][\"Name\"]): entry[\"Level\"]", "col.value is None: break if col.value in GERMAN_TO_ENGLISH_CLASS_DICT.keys() and not start_set: start =", "if resp_json[\"Results\"] else None def load_config(arguments: argparse.Namespace): global GERMAN_TO_ENGLISH_CLASS_DICT, SUB_30_MAPPING_DICT global CONFIG_LOCATION, DEBUG_ENABLED", "class info of every player, mapping it into a dictionary for easier usage\"\"\"", "DEBUG_ENABLED: print(\"CLASS ROW RANGES:\") print(start, end) return start + 1, end + 1", "def process_class_info(class_info: dict): \"\"\"method to process the class info of every player, mapping", "entry in data_to_process} # special case -> arcanist branching into two main jobs", "GERMAN_TO_ENGLISH_CLASS_DICT: dict = {} SUB_30_MAPPING_DICT: dict = {} CONFIG_LOCATION = os.getcwd() DEBUG_ENABLED =", "for class {current_class} for character {character_name}\") current_cell = worksheet.cell(row=current_row, column=i) current_cell.value = new_class_val", "excel sheet\"\"\" for i in range(class_range[0], class_range[1]): # reduce i by one because", "thus reducing the index by 1 mapped_class_name = GERMAN_TO_ENGLISH_CLASS_DICT.get(header_row[i - 1].value) new_class_val =", "SUB_30_MAPPING_DICT: raise IOError if __name__ == '__main__': parser = argparse.ArgumentParser(description=\"Process the EoR Membership", "to update the character class information in the excel sheet\"\"\" for i in", "def do_http_get(request_url: str): \"\"\"helper method to do http requests\"\"\" resp: requests.Response = requests.get(request_url)", "None) SUB_30_MAPPING_DICT = config.get(\"sub_30_class_config\", None) if not GERMAN_TO_ENGLISH_CLASS_DICT or not SUB_30_MAPPING_DICT: raise IOError", "current_character_info: print(f\"Cant process data for character: {current_character_name}\") continue update_character_info(current_character_info, worksheet, class_range, worksheet[worksheet.min_row], i)", "to process the class info of every player, mapping it into a dictionary", "XIV API\"\"\" current_request_url: str = f\"{BASE_URL_XIV_API_CHARACTER}search?name={character_name}&server=Moogle\" resp_json: dict = do_http_get(current_request_url) print(f\"Processing data for:", "end) return start + 1, end + 1 def do_http_get(request_url: str): \"\"\"helper method", "break current_character_name: str = f\"{current_row[0].value} {current_row[1].value}\" current_character_info: dict = process_class_info(get_character_info(get_character_id(current_character_name))) if not current_character_info:", "DEBUG_ENABLED = False def main(filepath): \"\"\"main method, used to process data and update", "column index is the actual index, while the header_row is a list, #", "print(\"MAPPED CLASS VALUES:\") print(out) return out def generate_class_range(worksheet: openpyxl.workbook.workbook.Worksheet): \"\"\"helper method, to create", "do http requests\"\"\" resp: requests.Response = requests.get(request_url) if resp.ok: return resp.json() else: raise", "arguments.config if arguments.d: DEBUG_ENABLED = arguments.d with open(os.path.join(CONFIG_LOCATION, \"eor_config.json\")) as file: config =", "ID of an character via XIV API\"\"\" current_request_url: str = f\"{BASE_URL_XIV_API_CHARACTER}search?name={character_name}&server=Moogle\" resp_json: dict", "openpyxl.load_workbook(filepath) worksheet = workbook.active class_range: tuple = generate_class_range(worksheet) for i in range(worksheet.min_row +", "worksheet = workbook.active class_range: tuple = generate_class_range(worksheet) for i in range(worksheet.min_row + 1,", "tuple, current_row: int): \"\"\"method to update the character class information in the excel", "to receive character info via XIV API\"\"\" if not character_id: return None current_request_url:", "worksheet: openpyxl.workbook.workbook.Worksheet, class_range: tuple, header_row: tuple, current_row: int): \"\"\"method to update the character", "print(\"Finished!\") def update_character_info(current_character_info: dict, worksheet: openpyxl.workbook.workbook.Worksheet, class_range: tuple, header_row: tuple, current_row: int): \"\"\"method", "start_set = False for col in header_row: if col.value is None: break if", "# thus reducing the index by 1 mapped_class_name = GERMAN_TO_ENGLISH_CLASS_DICT.get(header_row[i - 1].value) new_class_val", "for i in range(class_range[0], class_range[1]): # reduce i by one because column index", "str = f\"{BASE_URL_XIV_API_CHARACTER}search?name={character_name}&server=Moogle\" resp_json: dict = do_http_get(current_request_url) print(f\"Processing data for: {character_name}\") return resp_json[\"Results\"][0][\"ID\"]", "file to process\") parser.add_argument(\"--config\", type=str, required=False) parser.add_argument(\"--d\", required=False, action='store_true') args = parser.parse_args() load_config(args)", "None current_request_url: str = f\"{BASE_URL_XIV_API_CHARACTER}{character_id}\" resp_json: dict = do_http_get(current_request_url) return resp_json def get_character_id(character_name:", "None: break if col.value in GERMAN_TO_ENGLISH_CLASS_DICT.keys() and not start_set: start = end start_set", "= {SUB_30_MAPPING_DICT.get(entry[\"UnlockedState\"][\"Name\"], entry[\"UnlockedState\"][\"Name\"]): entry[\"Level\"] for entry in data_to_process} # special case -> arcanist", "with open(os.path.join(CONFIG_LOCATION, \"eor_config.json\")) as file: config = json.load(file) GERMAN_TO_ENGLISH_CLASS_DICT = config.get(\"class_config\", None) SUB_30_MAPPING_DICT", "dict = do_http_get(current_request_url) return resp_json def get_character_id(character_name: str): \"\"\"Help method to get the", "\"_updated.xlsx\")) print(\"Finished!\") def update_character_info(current_character_info: dict, worksheet: openpyxl.workbook.workbook.Worksheet, class_range: tuple, header_row: tuple, current_row: int):", "workbook.active class_range: tuple = generate_class_range(worksheet) for i in range(worksheet.min_row + 1, worksheet.max_row): current_row:", "class_range, worksheet[worksheet.min_row], i) workbook.save(filepath.replace(\".xlsx\", \"_updated.xlsx\")) print(\"Finished!\") def update_character_info(current_character_info: dict, worksheet: openpyxl.workbook.workbook.Worksheet, class_range: tuple,", "end + 1 def do_http_get(request_url: str): \"\"\"helper method to do http requests\"\"\" resp:", "do_http_get(current_request_url) return resp_json def get_character_id(character_name: str): \"\"\"Help method to get the ID of", "if arguments.config: CONFIG_LOCATION = arguments.config if arguments.d: DEBUG_ENABLED = arguments.d with open(os.path.join(CONFIG_LOCATION, \"eor_config.json\"))", "{} SUB_30_MAPPING_DICT: dict = {} CONFIG_LOCATION = os.getcwd() DEBUG_ENABLED = False def main(filepath):", "\"\"\"helper method, to create the excel ranges for the player classes\"\"\" header_row: tuple", "= new_class_val def process_class_info(class_info: dict): \"\"\"method to process the class info of every", "os.getcwd() DEBUG_ENABLED = False def main(filepath): \"\"\"main method, used to process data and", "= generate_class_range(worksheet) for i in range(worksheet.min_row + 1, worksheet.max_row): current_row: tuple = worksheet[i]", "= worksheet.cell(row=current_row, column=i) current_cell.value = new_class_val def process_class_info(class_info: dict): \"\"\"method to process the", "header_row: tuple = worksheet[worksheet.min_row] end = 0 start = 0 start_set = False", "1, end + 1 def do_http_get(request_url: str): \"\"\"helper method to do http requests\"\"\"", "file: config = json.load(file) GERMAN_TO_ENGLISH_CLASS_DICT = config.get(\"class_config\", None) SUB_30_MAPPING_DICT = config.get(\"sub_30_class_config\", None) if", "= worksheet[worksheet.min_row] end = 0 start = 0 start_set = False for col", "info of every player, mapping it into a dictionary for easier usage\"\"\" if", "in range(class_range[0], class_range[1]): # reduce i by one because column index is the", "print(f\"Processing data for: {character_name}\") return resp_json[\"Results\"][0][\"ID\"] if resp_json[\"Results\"] else None def load_config(arguments: argparse.Namespace):", "excel workbook\"\"\" workbook: openpyxl.Workbook = openpyxl.load_workbook(filepath) worksheet = workbook.active class_range: tuple = generate_class_range(worksheet)", "{} CONFIG_LOCATION = os.getcwd() DEBUG_ENABLED = False def main(filepath): \"\"\"main method, used to", "# reduce i by one because column index is the actual index, while", "new_class_val = current_character_info.get(mapped_class_name, 0) if DEBUG_ENABLED: character_name = f\"{worksheet.cell(current_row, 1).value} {worksheet.cell(current_row, 2).value}\" current_class", "to do http requests\"\"\" resp: requests.Response = requests.get(request_url) if resp.ok: return resp.json() else:", "= config.get(\"class_config\", None) SUB_30_MAPPING_DICT = config.get(\"sub_30_class_config\", None) if not GERMAN_TO_ENGLISH_CLASS_DICT or not SUB_30_MAPPING_DICT:", "config.get(\"sub_30_class_config\", None) if not GERMAN_TO_ENGLISH_CLASS_DICT or not SUB_30_MAPPING_DICT: raise IOError if __name__ ==", "and not current_row[1].value: break current_character_name: str = f\"{current_row[0].value} {current_row[1].value}\" current_character_info: dict = process_class_info(get_character_info(get_character_id(current_character_name)))", "raise IOError if __name__ == '__main__': parser = argparse.ArgumentParser(description=\"Process the EoR Membership excel.\")", "openpyxl import requests import json import argparse BASE_URL_XIV_API_CHARACTER: str = \"https://xivapi.com/character/\" GERMAN_TO_ENGLISH_CLASS_DICT: dict", "workbook\"\"\" workbook: openpyxl.Workbook = openpyxl.load_workbook(filepath) worksheet = workbook.active class_range: tuple = generate_class_range(worksheet) for", "to process data and update the excel workbook\"\"\" workbook: openpyxl.Workbook = openpyxl.load_workbook(filepath) worksheet", "worksheet[i] if not current_row[0].value and not current_row[1].value: break current_character_name: str = f\"{current_row[0].value} {current_row[1].value}\"", "current_class = header_row[i - 1].value print(f\"Setting value {new_class_val} for class {current_class} for character", "print(start, end) return start + 1, end + 1 def do_http_get(request_url: str): \"\"\"helper", "worksheet.cell(row=current_row, column=i) current_cell.value = new_class_val def process_class_info(class_info: dict): \"\"\"method to process the class", "GERMAN_TO_ENGLISH_CLASS_DICT.keys() and not start_set: start = end start_set = True end += 1", "= True end += 1 if DEBUG_ENABLED: print(\"CLASS ROW RANGES:\") print(start, end) return", "the excel sheet\"\"\" for i in range(class_range[0], class_range[1]): # reduce i by one", "header_row: if col.value is None: break if col.value in GERMAN_TO_ENGLISH_CLASS_DICT.keys() and not start_set:", "print(f\"Setting value {new_class_val} for class {current_class} for character {character_name}\") current_cell = worksheet.cell(row=current_row, column=i)", "False def main(filepath): \"\"\"main method, used to process data and update the excel", "mapped_class_name = GERMAN_TO_ENGLISH_CLASS_DICT.get(header_row[i - 1].value) new_class_val = current_character_info.get(mapped_class_name, 0) if DEBUG_ENABLED: character_name =", "def main(filepath): \"\"\"main method, used to process data and update the excel workbook\"\"\"", "to create the excel ranges for the player classes\"\"\" header_row: tuple = worksheet[worksheet.min_row]", "return out def generate_class_range(worksheet: openpyxl.workbook.workbook.Worksheet): \"\"\"helper method, to create the excel ranges for", "the EoR Membership excel.\") parser.add_argument(\"--filename\", metavar='[path to file]', type=str, help=\"the location of the", "\"\"\"method to update the character class information in the excel sheet\"\"\" for i" ]
[ "configuration itself will be copied. Returns: int: configuration id of the newly copied", "system. For performance reason, only brief information of the configuration will be returned", "configuration_id (int): configuration identifier. Returns: dict: configuration schedule. Raises: QBProcessingError: will be raised", "configuration_id (int): configuration identifier. Returns: int: value in the range of 0~100, with", "configuration (str): XML/JSON document. Returns: int: configuration id of newly created configuration. Raises:", "self.quickbuild._request( 'GET', 'configurations/{}/parent'.format(configuration_id), callback=response2py, ) def update(self, configuration: str) -> int: \"\"\" Update", "configuration_id (int): Configuration identifier. content_type (Optional[ContentType]): Select needed content type if not set,", "configuration_id (int): configuration identifier. Returns: str: configuration run mode. \"\"\" return self.quickbuild._request( 'GET',", "of average build duration. \"\"\" params = dict() if from_date: params['from_date'] = str(from_date)", "def get_parent(self, configuration_id: int) -> int: \"\"\" Get parent configuration id. Args: configuration_id", "recursively; otherwise, only the configuration itself will be copied. Returns: int: configuration id", "schedule. Raises: QBProcessingError: will be raised if schedule is inherited from parent configuration.", "`parent id`. You may get the full xml representation using id if necessary.", "scratch: you may retrieve xml representation of a templating configuration using various configuration", "return self.quickbuild._request( 'DELETE', 'configurations/{}'.format(configuration_id), callback=response2py, ) def copy(self, configuration_id: int, parent_id: int, name:", "\"\"\" Get configuration name. Args: configuration_id (int): configuration identifier. Returns: str: configuration name.", "be copied. Returns: int: configuration id of the newly copied configuration. \"\"\" params", "self.quickbuild._request( 'GET', 'configurations/{}/success_rate'.format(configuration_id), callback=response2py, params=params, ) def get_parent(self, configuration_id: int) -> int: \"\"\"", "a templating configuration using various configuration access methods or `get_info()` with content_type=ContentType.XML, remove", "if you creating configuration by copying existing one and want to remain the", "get_schedule(self, configuration_id: int) -> dict: \"\"\" Get configuration schedule. Args: configuration_id (int): configuration", "of descendent configurations. \"\"\" return self._get(dict(recursive=True, parent_id=parent_id)) def get_info(self, configuration_id: int, *, content_type:", "a list of child configurations. Args: parent_id (int): parent configuration identifier. Returns: List[dict]:", "'GET', 'configurations/{}/parent'.format(configuration_id), callback=response2py, ) def update(self, configuration: str) -> int: \"\"\" Update a", "str: \"\"\" Get configuration path. Args: configuration_id (int): configuration identifier. Returns: str: configuration", "Get a list of child configurations. Args: parent_id (int): parent configuration identifier. Returns:", "configuration_id (int): configuration identifier. Returns: str: configuration error message. \"\"\" return self.quickbuild._request( 'GET',", "of child configurations. Args: parent_id (int): parent configuration identifier. Returns: List[dict]: list of", "get_descendent(self, parent_id: int) -> List[dict]: \"\"\" Get a list of descendent configurations. Args:", "Args: configuration (str): XML document. Returns: int: configuration id being updated. \"\"\" return", "duration. \"\"\" params = dict() if from_date: params['from_date'] = str(from_date) if to_date: params['to_date']", "configuration. \"\"\" params = dict( parent_id=parent_id, name=name, recursive=recursive, ) return self.quickbuild._request( 'GET', 'configurations/{}/copy'.format(configuration_id),", "to_date: Optional[datetime.date] ) -> int: \"\"\" Get configuration average duration. Args: configuration_id (int):", "(int): configuration identifier. Returns: str: configuration name. \"\"\" return self.quickbuild._request( 'GET', 'configurations/{}/name'.format(configuration_id), )", "configuration_id (int): configuration identifier. Returns: str: configuration path. \"\"\" return self.quickbuild._request( 'GET', 'configurations/{}/path'.format(configuration_id),", "all configurations in the system. For performance reason, only brief information of the", "return self._get(dict(recursive=True, parent_id=parent_id)) def get_info(self, configuration_id: int, *, content_type: Optional[ContentType] = None )", "configuration error message. \"\"\" return self.quickbuild._request( 'GET', 'configurations/{}/error_message'.format(configuration_id), ) def get_run_mode(self, configuration_id: int)", "configuration info. Args: configuration_id (int): Configuration identifier. content_type (Optional[ContentType]): Select needed content type", "range of 0~100, with 0 stands for 0%, and 100 stands for 100%.", "configuration schedule. Raises: QBProcessingError: will be raised if schedule is inherited from parent", "configuration_id: int, parent_id: int, name: str, recursive: bool ) -> int: \"\"\" Copy", ") def get_id_by_path(self, path: str) -> int: \"\"\" Get configuration id by path.", "dict) -> List[dict]: return self.quickbuild._request( 'GET', 'configurations', callback=response2py, params=params, ) def get(self) ->", "-> Union[dict, str]: \"\"\" Get full configuration info. Args: configuration_id (int): Configuration identifier.", "params=params, ) def get_success_rate(self, configuration_id: int, *, from_date: Optional[datetime.date], to_date: Optional[datetime.date] ) ->", "0%, and 100 stands for 100%. \"\"\" params = dict() if from_date: params['from_date']", "(available since version 4.0.72) Args: configuration_id (int): Configuration id to be copied. parent_id", "parent_id: int) -> List[dict]: \"\"\" Get a list of descendent configurations. Args: parent_id", "configuration_id: int) -> str: \"\"\" Get configuration run mode. Args: configuration_id (int): configuration", "int: configuration id being updated. \"\"\" return self.quickbuild._request( 'POST', 'configurations', callback=response2py, data=configuration )", "may get the full xml representation using id if necessary. Returns: List[dict]: list", "if from_date: params['from_date'] = str(from_date) if to_date: params['to_date'] = str(to_date) return self.quickbuild._request( 'GET',", "`schedule`, `runMode`, `errorMessage`, `parent id`. You may get the full xml representation using", "content_type (Optional[ContentType]): Select needed content type if not set, default value of client", "Raises: QBError: XML validation error \"\"\" self.quickbuild._validate_for_id(configuration) return self.update(configuration) def delete(self, configuration_id: int)", "-> int: \"\"\" Get configuration average duration. Args: configuration_id (int): configuration identifier. Returns:", "Copy configuration (available since version 4.0.72) Args: configuration_id (int): Configuration id to be", "\"\"\" return self.quickbuild._request( 'GET', 'configurations/{}/error_message'.format(configuration_id), ) def get_run_mode(self, configuration_id: int) -> str: \"\"\"", "(str): XML document. Returns: int: configuration id being updated. \"\"\" return self.quickbuild._request( 'POST',", "of descendent configurations. Args: parent_id (int): parent configuration identifier. Returns: List[dict]: list of", "configuration identifier. Returns: List[dict]: list of descendent configurations. \"\"\" return self._get(dict(recursive=True, parent_id=parent_id)) def", "import List, Optional, Union from quickbuild.helpers import ContentType, response2py class Configurations: def __init__(self,", "XML representation of the configuration using `get_info()` method with content_type=ContentType.XML and modify certain", "= str(from_date) if to_date: params['to_date'] = str(to_date) return self.quickbuild._request( 'GET', 'configurations/{}/average_duration'.format(configuration_id), callback=response2py, params=params,", "one and want to remain the passwords, the \"secret\" attribute should then be", "the parent to place newly copied configuration. name (str): Name of the newly", "(str): configuration path. Returns: int: configuration identifier. \"\"\" return self.quickbuild.identifiers.get_configuration_id_by_path(path) def get_name(self, configuration_id:", "def __init__(self, quickbuild): self.quickbuild = quickbuild def _get(self, params: dict) -> List[dict]: return", ") def get_path(self, configuration_id: int) -> str: \"\"\" Get configuration path. Args: configuration_id", "configuration. \"\"\" return self.quickbuild._request( 'GET', 'configurations/{}/schedule'.format(configuration_id), callback=response2py, ) def get_average_duration(self, configuration_id: int, *,", "params['from_date'] = str(from_date) if to_date: params['to_date'] = str(to_date) return self.quickbuild._request( 'GET', 'configurations/{}/average_duration'.format(configuration_id), callback=response2py,", "-> int: \"\"\" Get parent configuration id. Args: configuration_id (int): configuration identifier. Returns:", "\"\"\" return self.quickbuild._request( 'GET', 'configurations/{}/parent'.format(configuration_id), callback=response2py, ) def update(self, configuration: str) -> int:", "str]: \"\"\" Get full configuration info. Args: configuration_id (int): Configuration identifier. content_type (Optional[ContentType]):", "average build duration. \"\"\" params = dict() if from_date: params['from_date'] = str(from_date) if", "return self.quickbuild._request( 'GET', 'configurations/{}'.format(configuration_id), callback=partial(response2py, content_type=content_type), content_type=content_type, ) def get_path(self, configuration_id: int) ->", "average duration. Args: configuration_id (int): configuration identifier. Returns: int: milliseconds of average build", "= dict() if from_date: params['from_date'] = str(from_date) if to_date: params['to_date'] = str(to_date) return", "str(from_date) if to_date: params['to_date'] = str(to_date) return self.quickbuild._request( 'GET', 'configurations/{}/average_duration'.format(configuration_id), callback=response2py, params=params, )", "self.quickbuild.identifiers.get_configuration_id_by_path(path) def get_name(self, configuration_id: int) -> str: \"\"\" Get configuration name. Args: configuration_id", "Args: parent_id (int): parent configuration identifier. Returns: List[dict]: list of child configurations. \"\"\"", "'configurations/{}'.format(configuration_id), callback=partial(response2py, content_type=content_type), content_type=content_type, ) def get_path(self, configuration_id: int) -> str: \"\"\" Get", "Name of the newly copied configuration. recursive (bool): Specify parameter recursive=true to copy", "mode. Args: configuration_id (int): configuration identifier. Returns: str: configuration run mode. \"\"\" return", "'GET', 'configurations/{}/average_duration'.format(configuration_id), callback=response2py, params=params, ) def get_success_rate(self, configuration_id: int, *, from_date: Optional[datetime.date], to_date:", "(int): configuration identifier. Returns: str: configuration path. \"\"\" return self.quickbuild._request( 'GET', 'configurations/{}/path'.format(configuration_id), )", "Args: parent_id (int): parent configuration identifier. Returns: List[dict]: list of descendent configurations. \"\"\"", "self.quickbuild._request( 'GET', 'configurations/{}/name'.format(configuration_id), ) def get_description(self, configuration_id: int) -> str: \"\"\" Get configuration", "return self.quickbuild._request( 'GET', 'configurations/{}/success_rate'.format(configuration_id), callback=response2py, params=params, ) def get_parent(self, configuration_id: int) -> int:", "Normally you do not need to create the XML from scratch: you may", "Returns: List[dict]: list of configurations. \"\"\" return self._get(dict(recursive=True)) def get_child(self, parent_id: int) ->", "Returns: List[dict]: list of descendent configurations. \"\"\" return self._get(dict(recursive=True, parent_id=parent_id)) def get_info(self, configuration_id:", "Optional[datetime.date] ) -> int: \"\"\" Get configuration success rate. Args: configuration_id (int): configuration", "configuration average duration. Args: configuration_id (int): configuration identifier. Returns: int: milliseconds of average", "Get parent configuration id. Args: configuration_id (int): configuration identifier. Returns: int: id of", "(int): configuration identifier. Returns: str: configuration error message. \"\"\" return self.quickbuild._request( 'GET', 'configurations/{}/error_message'.format(configuration_id),", ") def get_run_mode(self, configuration_id: int) -> str: \"\"\" Get configuration run mode. Args:", "(int): configuration identifier. Returns: int: id of parent configuration. Raises: QBProcessingError: the configuration", "place newly copied configuration. name (str): Name of the newly copied configuration. recursive", "self.quickbuild._request( 'GET', 'configurations/{}/error_message'.format(configuration_id), ) def get_run_mode(self, configuration_id: int) -> str: \"\"\" Get configuration", "use it for create() method. - Secret elements (Elements with attribute \"secret=encrypt\" in", "configuration. Args: configuration_id (int): configuration id. Returns: None \"\"\" return self.quickbuild._request( 'DELETE', 'configurations/{}'.format(configuration_id),", "parent configuration identifier. Returns: List[dict]: list of child configurations. \"\"\" return self._get(dict(parent_id=parent_id)) def", "Union[dict, str]: configuration content. \"\"\" return self.quickbuild._request( 'GET', 'configurations/{}'.format(configuration_id), callback=partial(response2py, content_type=content_type), content_type=content_type, )", "params=params, ) def get(self) -> List[dict]: \"\"\" Get all configurations in the system.", "\"\"\" return self.quickbuild._request( 'GET', 'configurations/{}/name'.format(configuration_id), ) def get_description(self, configuration_id: int) -> str: \"\"\"", "str: configuration run mode. \"\"\" return self.quickbuild._request( 'GET', 'configurations/{}/run_mode'.format(configuration_id), ) def get_schedule(self, configuration_id:", "Returns: int: configuration id being updated. \"\"\" return self.quickbuild._request( 'POST', 'configurations', callback=response2py, data=configuration", "callback=partial(response2py, content_type=content_type), content_type=content_type, ) def get_path(self, configuration_id: int) -> str: \"\"\" Get configuration", "configuration: str) -> int: \"\"\" Create a configuration using XML/JSON configuration. Please note", "child configurations. Args: parent_id (int): parent configuration identifier. Returns: List[dict]: list of child", "(int): parent configuration identifier. Returns: List[dict]: list of child configurations. \"\"\" return self._get(dict(parent_id=parent_id))", "'GET', 'configurations/{}'.format(configuration_id), callback=partial(response2py, content_type=content_type), content_type=content_type, ) def get_path(self, configuration_id: int) -> str: \"\"\"", "copied configuration. recursive (bool): Specify parameter recursive=true to copy specified configuration and all", "need to create the XML from scratch: you may get XML representation of", "configuration id of newly created configuration. Raises: QBError: XML validation error \"\"\" self.quickbuild._validate_for_id(configuration)", "milliseconds of average build duration. \"\"\" params = dict() if from_date: params['from_date'] =", "callback=response2py, params=params, ) def get(self) -> List[dict]: \"\"\" Get all configurations in the", "specified configuration and all its descendant configurations recursively; otherwise, only the configuration itself", "configuration_id: int) -> str: \"\"\" Get configuration description. Args: configuration_id (int): configuration identifier.", "of the parent to place newly copied configuration. name (str): Name of the", "and modify certain parts of the XML. Args: configuration (str): XML document. Returns:", ") def get_average_duration(self, configuration_id: int, *, from_date: Optional[datetime.date], to_date: Optional[datetime.date] ) -> int:", "dict() if from_date: params['from_date'] = str(from_date) if to_date: params['to_date'] = str(to_date) return self.quickbuild._request(", "Optional[datetime.date] ) -> int: \"\"\" Get configuration average duration. Args: configuration_id (int): configuration", "\"secret=encrypt\" in XML representation of an existing configuration, typically they are repository passwords,", "the passwords, the \"secret\" attribute should then be preserved. Args: configuration (str): XML/JSON", "callback=response2py, params=params, ) def get_success_rate(self, configuration_id: int, *, from_date: Optional[datetime.date], to_date: Optional[datetime.date] )", "\"\"\" Get configuration average duration. Args: configuration_id (int): configuration identifier. Returns: int: milliseconds", "using various configuration access methods or `get_info()` with content_type=ContentType.XML, remove the id element,", "content_type=content_type, ) def get_path(self, configuration_id: int) -> str: \"\"\" Get configuration path. Args:", "element, modify certain parts and use it for create() method. - Secret elements", "including `id`, `name`, `description`, `schedule`, `runMode`, `errorMessage`, `parent id`. You may get the", "denotes id of the parent configuration. Normally you do not need to create", "remove the id element, modify certain parts and use it for create() method.", "QBProcessingError: will be raised if schedule is inherited from parent configuration. \"\"\" return", "variable values, etc.) should not contain the \"secret\" attribute; otherwise QuickBuild will think", "str(from_date) if to_date: params['to_date'] = str(to_date) return self.quickbuild._request( 'GET', 'configurations/{}/success_rate'.format(configuration_id), callback=response2py, params=params, )", "configuration (str): XML document. Returns: int: configuration id being updated. \"\"\" return self.quickbuild._request(", "of a templating configuration using various configuration access methods or `get_info()` with content_type=ContentType.XML,", "\"\"\" Get a list of child configurations. Args: parent_id (int): parent configuration identifier.", "XML configuration. Normally you do not need to create the XML from scratch:", "List[dict]: return self.quickbuild._request( 'GET', 'configurations', callback=response2py, params=params, ) def get(self) -> List[dict]: \"\"\"", "Args: path (str): configuration path. Returns: int: configuration identifier. \"\"\" return self.quickbuild.identifiers.get_configuration_id_by_path(path) def", "str: \"\"\" Get configuration name. Args: configuration_id (int): configuration identifier. Returns: str: configuration", "get_description(self, configuration_id: int) -> str: \"\"\" Get configuration description. Args: configuration_id (int): configuration", "Configuration id of the parent to place newly copied configuration. name (str): Name", "configuration success rate. Args: configuration_id (int): configuration identifier. Returns: int: value in the", "int: \"\"\" Get configuration average duration. Args: configuration_id (int): configuration identifier. Returns: int:", "get_parent(self, configuration_id: int) -> int: \"\"\" Get parent configuration id. Args: configuration_id (int):", "Args: configuration_id (int): Configuration identifier. content_type (Optional[ContentType]): Select needed content type if not", "Returns: dict: configuration schedule. Raises: QBProcessingError: will be raised if schedule is inherited", "-> str: \"\"\" Get configuration run mode. Args: configuration_id (int): configuration identifier. Returns:", "is inherited from parent configuration. \"\"\" return self.quickbuild._request( 'GET', 'configurations/{}/schedule'.format(configuration_id), callback=response2py, ) def", "configuration path. Returns: int: configuration identifier. \"\"\" return self.quickbuild.identifiers.get_configuration_id_by_path(path) def get_name(self, configuration_id: int)", "from scratch: you may get XML representation of the configuration using `get_info()` method", "copy specified configuration and all its descendant configurations recursively; otherwise, only the configuration", "will be copied. Returns: int: configuration id of the newly copied configuration. \"\"\"", "using id if necessary. Returns: List[dict]: list of configurations. \"\"\" return self._get(dict(recursive=True)) def", "configuration identifier. Returns: str: configuration run mode. \"\"\" return self.quickbuild._request( 'GET', 'configurations/{}/run_mode'.format(configuration_id), )", "will be returned here, including `id`, `name`, `description`, `schedule`, `runMode`, `errorMessage`, `parent id`.", "id`. You may get the full xml representation using id if necessary. Returns:", "is used. Returns: Union[dict, str]: configuration content. \"\"\" return self.quickbuild._request( 'GET', 'configurations/{}'.format(configuration_id), callback=partial(response2py,", "duration. Args: configuration_id (int): configuration identifier. Returns: int: milliseconds of average build duration.", "callback=response2py, data=configuration ) def create(self, configuration: str) -> int: \"\"\" Create a configuration", "the xml from scratch: you may retrieve xml representation of a templating configuration", "want to remain the passwords, the \"secret\" attribute should then be preserved. Args:", "Args: configuration_id (int): Configuration id to be copied. parent_id (int): Configuration id of", "Create a configuration using XML/JSON configuration. Please note that: - The parent element", "\"\"\" return self.quickbuild._request( 'GET', 'configurations/{}/path'.format(configuration_id), ) def get_id_by_path(self, path: str) -> int: \"\"\"", "copied. parent_id (int): Configuration id of the parent to place newly copied configuration.", "get_info(self, configuration_id: int, *, content_type: Optional[ContentType] = None ) -> Union[dict, str]: \"\"\"", "Args: configuration_id (int): configuration identifier. Returns: str: configuration name. \"\"\" return self.quickbuild._request( 'GET',", "and 100 stands for 100%. \"\"\" params = dict() if from_date: params['from_date'] =", "newly copied configuration. name (str): Name of the newly copied configuration. recursive (bool):", "configurations. \"\"\" return self._get(dict(parent_id=parent_id)) def get_descendent(self, parent_id: int) -> List[dict]: \"\"\" Get a", "\"\"\" Get a list of descendent configurations. Args: parent_id (int): parent configuration identifier.", "parent configuration. Raises: QBProcessingError: the configuration is root configuration and does not have", "Update a configuration using XML configuration. Normally you do not need to create", "the configuration will be returned here, including `id`, `name`, `description`, `schedule`, `runMode`, `errorMessage`,", "return self.quickbuild._request( 'GET', 'configurations', callback=response2py, params=params, ) def get(self) -> List[dict]: \"\"\" Get", "the system. For performance reason, only brief information of the configuration will be", "passwords, secret variable values, etc.) should not contain the \"secret\" attribute; otherwise QuickBuild", "the configuration using `get_info()` method with content_type=ContentType.XML and modify certain parts of the", "def get_description(self, configuration_id: int) -> str: \"\"\" Get configuration description. Args: configuration_id (int):", "xml representation using id if necessary. Returns: List[dict]: list of configurations. \"\"\" return", "note that: - The parent element denotes id of the parent configuration. Normally", "(int): Configuration id of the parent to place newly copied configuration. name (str):", "a list of descendent configurations. Args: parent_id (int): parent configuration identifier. Returns: List[dict]:", "configuration content. \"\"\" return self.quickbuild._request( 'GET', 'configurations/{}'.format(configuration_id), callback=partial(response2py, content_type=content_type), content_type=content_type, ) def get_path(self,", "str) -> int: \"\"\" Create a configuration using XML/JSON configuration. Please note that:", "method with content_type=ContentType.XML and modify certain parts of the XML. Args: configuration (str):", "(int): Configuration id to be copied. parent_id (int): Configuration id of the parent", "its descendant configurations recursively; otherwise, only the configuration itself will be copied. Returns:", "Returns: int: configuration id of newly created configuration. Raises: QBError: XML validation error", "secret variable values, etc.) should not contain the \"secret\" attribute; otherwise QuickBuild will", "for 100%. \"\"\" params = dict() if from_date: params['from_date'] = str(from_date) if to_date:", "configuration using `get_info()` method with content_type=ContentType.XML and modify certain parts of the XML.", "int: \"\"\" Create a configuration using XML/JSON configuration. Please note that: - The", "document. Returns: int: configuration id of newly created configuration. Raises: QBError: XML validation", "*, from_date: Optional[datetime.date], to_date: Optional[datetime.date] ) -> int: \"\"\" Get configuration average duration.", ") def copy(self, configuration_id: int, parent_id: int, name: str, recursive: bool ) ->", "an existing configuration, typically they are repository passwords, secret variable values, etc.) should", "str: configuration name. \"\"\" return self.quickbuild._request( 'GET', 'configurations/{}/name'.format(configuration_id), ) def get_description(self, configuration_id: int)", "not contain the \"secret\" attribute; otherwise QuickBuild will think that the password has", "Args: configuration_id (int): configuration identifier. Returns: str: configuration error message. \"\"\" return self.quickbuild._request(", "you may retrieve xml representation of a templating configuration using various configuration access", "(str): XML/JSON document. Returns: int: configuration id of newly created configuration. Raises: QBError:", "-> dict: \"\"\" Get configuration schedule. Args: configuration_id (int): configuration identifier. Returns: dict:", "return self.update(configuration) def delete(self, configuration_id: int) -> None: \"\"\" Delete configuration. Args: configuration_id", "if schedule is inherited from parent configuration. \"\"\" return self.quickbuild._request( 'GET', 'configurations/{}/schedule'.format(configuration_id), callback=response2py,", "using XML/JSON configuration. Please note that: - The parent element denotes id of", "Args: configuration_id (int): configuration identifier. Returns: dict: configuration schedule. Raises: QBProcessingError: will be", "the \"secret\" attribute should then be preserved. Args: configuration (str): XML/JSON document. Returns:", "list of descendent configurations. \"\"\" return self._get(dict(recursive=True, parent_id=parent_id)) def get_info(self, configuration_id: int, *,", "(int): configuration identifier. Returns: int: milliseconds of average build duration. \"\"\" params =", "get_path(self, configuration_id: int) -> str: \"\"\" Get configuration path. Args: configuration_id (int): configuration", "None: \"\"\" Delete configuration. Args: configuration_id (int): configuration id. Returns: None \"\"\" return", "update(self, configuration: str) -> int: \"\"\" Update a configuration using XML configuration. Normally", "descendant configurations recursively; otherwise, only the configuration itself will be copied. Returns: int:", "etc.) should not contain the \"secret\" attribute; otherwise QuickBuild will think that the", "performance reason, only brief information of the configuration will be returned here, including", "id of newly created configuration. Raises: QBError: XML validation error \"\"\" self.quickbuild._validate_for_id(configuration) return", ") -> int: \"\"\" Get configuration average duration. Args: configuration_id (int): configuration identifier.", "(int): configuration identifier. Returns: dict: configuration schedule. Raises: QBProcessingError: will be raised if", "Returns: None \"\"\" return self.quickbuild._request( 'DELETE', 'configurations/{}'.format(configuration_id), callback=response2py, ) def copy(self, configuration_id: int,", "stands for 100%. \"\"\" params = dict() if from_date: params['from_date'] = str(from_date) if", "of 0~100, with 0 stands for 0%, and 100 stands for 100%. \"\"\"", "'GET', 'configurations/{}/run_mode'.format(configuration_id), ) def get_schedule(self, configuration_id: int) -> dict: \"\"\" Get configuration schedule.", "parent to place newly copied configuration. name (str): Name of the newly copied", "configuration. Normally you do not need to create the XML from scratch: you", "parent_id=parent_id)) def get_info(self, configuration_id: int, *, content_type: Optional[ContentType] = None ) -> Union[dict,", "xml representation of a templating configuration using various configuration access methods or `get_info()`", "you do not need to create the xml from scratch: you may retrieve", "Get configuration schedule. Args: configuration_id (int): configuration identifier. Returns: dict: configuration schedule. Raises:", "contain the \"secret\" attribute; otherwise QuickBuild will think that the password has already", "configuration id. Returns: None \"\"\" return self.quickbuild._request( 'DELETE', 'configurations/{}'.format(configuration_id), callback=response2py, ) def copy(self,", "to copy specified configuration and all its descendant configurations recursively; otherwise, only the", "mode. \"\"\" return self.quickbuild._request( 'GET', 'configurations/{}/run_mode'.format(configuration_id), ) def get_schedule(self, configuration_id: int) -> dict:", "XML validation error \"\"\" self.quickbuild._validate_for_id(configuration) return self.update(configuration) def delete(self, configuration_id: int) -> None:", "def copy(self, configuration_id: int, parent_id: int, name: str, recursive: bool ) -> int:", "from quickbuild.helpers import ContentType, response2py class Configurations: def __init__(self, quickbuild): self.quickbuild = quickbuild", "parent_id: int, name: str, recursive: bool ) -> int: \"\"\" Copy configuration (available", "configuration: str) -> int: \"\"\" Update a configuration using XML configuration. Normally you", "root configuration and does not have parent. \"\"\" return self.quickbuild._request( 'GET', 'configurations/{}/parent'.format(configuration_id), callback=response2py,", "Args: configuration_id (int): configuration identifier. Returns: int: id of parent configuration. Raises: QBProcessingError:", "using `get_info()` method with content_type=ContentType.XML and modify certain parts of the XML. Args:", "configuration name. Args: configuration_id (int): configuration identifier. Returns: str: configuration name. \"\"\" return", "parent configuration id. Args: configuration_id (int): configuration identifier. Returns: int: id of parent", "QBProcessingError: the configuration is root configuration and does not have parent. \"\"\" return", "the range of 0~100, with 0 stands for 0%, and 100 stands for", "int) -> None: \"\"\" Delete configuration. Args: configuration_id (int): configuration id. Returns: None", "and use it for create() method. - Secret elements (Elements with attribute \"secret=encrypt\"", "method. - Secret elements (Elements with attribute \"secret=encrypt\" in XML representation of an", "Get configuration error message. Args: configuration_id (int): configuration identifier. Returns: str: configuration error", "configuration is root configuration and does not have parent. \"\"\" return self.quickbuild._request( 'GET',", "Get a list of descendent configurations. Args: parent_id (int): parent configuration identifier. Returns:", "get_id_by_path(self, path: str) -> int: \"\"\" Get configuration id by path. Args: path", "\"\"\" Get parent configuration id. Args: configuration_id (int): configuration identifier. Returns: int: id", "for 0%, and 100 stands for 100%. \"\"\" params = dict() if from_date:", "parts and use it for create() method. - Secret elements (Elements with attribute", "password has already been encrypted. However if you creating configuration by copying existing", "= dict( parent_id=parent_id, name=name, recursive=recursive, ) return self.quickbuild._request( 'GET', 'configurations/{}/copy'.format(configuration_id), callback=response2py, params=params, )", "passwords, the \"secret\" attribute should then be preserved. Args: configuration (str): XML/JSON document.", "Returns: List[dict]: list of child configurations. \"\"\" return self._get(dict(parent_id=parent_id)) def get_descendent(self, parent_id: int)", "not need to create the XML from scratch: you may get XML representation", "configuration using XML configuration. Normally you do not need to create the XML", "configuration. recursive (bool): Specify parameter recursive=true to copy specified configuration and all its", "not need to create the xml from scratch: you may retrieve xml representation", "'configurations/{}/error_message'.format(configuration_id), ) def get_run_mode(self, configuration_id: int) -> str: \"\"\" Get configuration run mode.", "*, content_type: Optional[ContentType] = None ) -> Union[dict, str]: \"\"\" Get full configuration", "identifier. Returns: str: configuration path. \"\"\" return self.quickbuild._request( 'GET', 'configurations/{}/path'.format(configuration_id), ) def get_id_by_path(self,", "create() method. - Secret elements (Elements with attribute \"secret=encrypt\" in XML representation of", "content_type=content_type), content_type=content_type, ) def get_path(self, configuration_id: int) -> str: \"\"\" Get configuration path.", "name. \"\"\" return self.quickbuild._request( 'GET', 'configurations/{}/name'.format(configuration_id), ) def get_description(self, configuration_id: int) -> str:", "retrieve xml representation of a templating configuration using various configuration access methods or", "self._get(dict(parent_id=parent_id)) def get_descendent(self, parent_id: int) -> List[dict]: \"\"\" Get a list of descendent", "Optional[ContentType] = None ) -> Union[dict, str]: \"\"\" Get full configuration info. Args:", "Optional[datetime.date], to_date: Optional[datetime.date] ) -> int: \"\"\" Get configuration success rate. Args: configuration_id", "str) -> int: \"\"\" Get configuration id by path. Args: path (str): configuration", "params=params, ) def get_parent(self, configuration_id: int) -> int: \"\"\" Get parent configuration id.", "newly copied configuration. \"\"\" params = dict( parent_id=parent_id, name=name, recursive=recursive, ) return self.quickbuild._request(", "value of client instance is used. Returns: Union[dict, str]: configuration content. \"\"\" return", "path (str): configuration path. Returns: int: configuration identifier. \"\"\" return self.quickbuild.identifiers.get_configuration_id_by_path(path) def get_name(self,", "configurations. \"\"\" return self._get(dict(recursive=True)) def get_child(self, parent_id: int) -> List[dict]: \"\"\" Get a", "from_date: Optional[datetime.date], to_date: Optional[datetime.date] ) -> int: \"\"\" Get configuration success rate. Args:", "have parent. \"\"\" return self.quickbuild._request( 'GET', 'configurations/{}/parent'.format(configuration_id), callback=response2py, ) def update(self, configuration: str)", "Please note that: - The parent element denotes id of the parent configuration.", "parent_id (int): Configuration id of the parent to place newly copied configuration. name", "partial from typing import List, Optional, Union from quickbuild.helpers import ContentType, response2py class", "`name`, `description`, `schedule`, `runMode`, `errorMessage`, `parent id`. You may get the full xml", "\"\"\" return self.quickbuild._request( 'GET', 'configurations/{}/description'.format(configuration_id), ) def get_error_message(self, configuration_id: int) -> str: \"\"\"", "representation of an existing configuration, typically they are repository passwords, secret variable values,", "Args: configuration_id (int): configuration identifier. Returns: str: configuration path. \"\"\" return self.quickbuild._request( 'GET',", "None \"\"\" return self.quickbuild._request( 'DELETE', 'configurations/{}'.format(configuration_id), callback=response2py, ) def copy(self, configuration_id: int, parent_id:", "get_average_duration(self, configuration_id: int, *, from_date: Optional[datetime.date], to_date: Optional[datetime.date] ) -> int: \"\"\" Get", "`errorMessage`, `parent id`. You may get the full xml representation using id if", "List[dict]: \"\"\" Get a list of child configurations. Args: parent_id (int): parent configuration", "-> List[dict]: \"\"\" Get a list of descendent configurations. Args: parent_id (int): parent", "by copying existing one and want to remain the passwords, the \"secret\" attribute", "certain parts of the XML. Args: configuration (str): XML document. Returns: int: configuration", "to remain the passwords, the \"secret\" attribute should then be preserved. Args: configuration", "being updated. \"\"\" return self.quickbuild._request( 'POST', 'configurations', callback=response2py, data=configuration ) def create(self, configuration:", "from_date: params['from_date'] = str(from_date) if to_date: params['to_date'] = str(to_date) return self.quickbuild._request( 'GET', 'configurations/{}/average_duration'.format(configuration_id),", "should not contain the \"secret\" attribute; otherwise QuickBuild will think that the password", "configurations. Args: parent_id (int): parent configuration identifier. Returns: List[dict]: list of child configurations.", "Secret elements (Elements with attribute \"secret=encrypt\" in XML representation of an existing configuration,", "int, parent_id: int, name: str, recursive: bool ) -> int: \"\"\" Copy configuration", "from_date: params['from_date'] = str(from_date) if to_date: params['to_date'] = str(to_date) return self.quickbuild._request( 'GET', 'configurations/{}/success_rate'.format(configuration_id),", "content_type: Optional[ContentType] = None ) -> Union[dict, str]: \"\"\" Get full configuration info.", "get_run_mode(self, configuration_id: int) -> str: \"\"\" Get configuration run mode. Args: configuration_id (int):", "int: value in the range of 0~100, with 0 stands for 0%, and", "recursive: bool ) -> int: \"\"\" Copy configuration (available since version 4.0.72) Args:", "Get configuration id by path. Args: path (str): configuration path. Returns: int: configuration", "of the configuration using `get_info()` method with content_type=ContentType.XML and modify certain parts of", "XML. Args: configuration (str): XML document. Returns: int: configuration id being updated. \"\"\"", "already been encrypted. However if you creating configuration by copying existing one and", "id of the newly copied configuration. \"\"\" params = dict( parent_id=parent_id, name=name, recursive=recursive,", "identifier. content_type (Optional[ContentType]): Select needed content type if not set, default value of", "'GET', 'configurations/{}/path'.format(configuration_id), ) def get_id_by_path(self, path: str) -> int: \"\"\" Get configuration id", "(Optional[ContentType]): Select needed content type if not set, default value of client instance", "configuration id being updated. \"\"\" return self.quickbuild._request( 'POST', 'configurations', callback=response2py, data=configuration ) def", "XML representation of an existing configuration, typically they are repository passwords, secret variable", "Get configuration average duration. Args: configuration_id (int): configuration identifier. Returns: int: milliseconds of", "existing one and want to remain the passwords, the \"secret\" attribute should then", "parent configuration. Normally you do not need to create the xml from scratch:", "(int): parent configuration identifier. Returns: List[dict]: list of descendent configurations. \"\"\" return self._get(dict(recursive=True,", ") def get_schedule(self, configuration_id: int) -> dict: \"\"\" Get configuration schedule. Args: configuration_id", "with attribute \"secret=encrypt\" in XML representation of an existing configuration, typically they are", "\"\"\" Update a configuration using XML configuration. Normally you do not need to", "params['to_date'] = str(to_date) return self.quickbuild._request( 'GET', 'configurations/{}/average_duration'.format(configuration_id), callback=response2py, params=params, ) def get_success_rate(self, configuration_id:", "(str): Name of the newly copied configuration. recursive (bool): Specify parameter recursive=true to", "parent configuration identifier. Returns: List[dict]: list of descendent configurations. \"\"\" return self._get(dict(recursive=True, parent_id=parent_id))", "parent_id: int) -> List[dict]: \"\"\" Get a list of child configurations. Args: parent_id", "str: \"\"\" Get configuration error message. Args: configuration_id (int): configuration identifier. Returns: str:", "Args: configuration_id (int): configuration identifier. Returns: str: configuration run mode. \"\"\" return self.quickbuild._request(", "inherited from parent configuration. \"\"\" return self.quickbuild._request( 'GET', 'configurations/{}/schedule'.format(configuration_id), callback=response2py, ) def get_average_duration(self,", "description. \"\"\" return self.quickbuild._request( 'GET', 'configurations/{}/description'.format(configuration_id), ) def get_error_message(self, configuration_id: int) -> str:", "brief information of the configuration will be returned here, including `id`, `name`, `description`,", "run mode. \"\"\" return self.quickbuild._request( 'GET', 'configurations/{}/run_mode'.format(configuration_id), ) def get_schedule(self, configuration_id: int) ->", "int) -> List[dict]: \"\"\" Get a list of descendent configurations. Args: parent_id (int):", "self.quickbuild._request( 'GET', 'configurations/{}/average_duration'.format(configuration_id), callback=response2py, params=params, ) def get_success_rate(self, configuration_id: int, *, from_date: Optional[datetime.date],", "configuration_id (int): configuration identifier. Returns: int: id of parent configuration. Raises: QBProcessingError: the", "copied configuration. \"\"\" params = dict( parent_id=parent_id, name=name, recursive=recursive, ) return self.quickbuild._request( 'GET',", "success rate. Args: configuration_id (int): configuration identifier. Returns: int: value in the range", "certain parts and use it for create() method. - Secret elements (Elements with", "-> List[dict]: \"\"\" Get all configurations in the system. For performance reason, only", "get XML representation of the configuration using `get_info()` method with content_type=ContentType.XML and modify", "get_error_message(self, configuration_id: int) -> str: \"\"\" Get configuration error message. Args: configuration_id (int):", "configuration_id: int) -> str: \"\"\" Get configuration error message. Args: configuration_id (int): configuration", "if not set, default value of client instance is used. Returns: Union[dict, str]:", "100 stands for 100%. \"\"\" params = dict() if from_date: params['from_date'] = str(from_date)", "list of descendent configurations. Args: parent_id (int): parent configuration identifier. Returns: List[dict]: list", "return self._get(dict(parent_id=parent_id)) def get_descendent(self, parent_id: int) -> List[dict]: \"\"\" Get a list of", "def update(self, configuration: str) -> int: \"\"\" Update a configuration using XML configuration.", "they are repository passwords, secret variable values, etc.) should not contain the \"secret\"", "configuration identifier. Returns: str: configuration path. \"\"\" return self.quickbuild._request( 'GET', 'configurations/{}/path'.format(configuration_id), ) def", "str) -> int: \"\"\" Update a configuration using XML configuration. Normally you do", "delete(self, configuration_id: int) -> None: \"\"\" Delete configuration. Args: configuration_id (int): configuration id.", "schedule. Args: configuration_id (int): configuration identifier. Returns: dict: configuration schedule. Raises: QBProcessingError: will", "None ) -> Union[dict, str]: \"\"\" Get full configuration info. Args: configuration_id (int):", "id if necessary. Returns: List[dict]: list of configurations. \"\"\" return self._get(dict(recursive=True)) def get_child(self,", "def get_average_duration(self, configuration_id: int, *, from_date: Optional[datetime.date], to_date: Optional[datetime.date] ) -> int: \"\"\"", "Union from quickbuild.helpers import ContentType, response2py class Configurations: def __init__(self, quickbuild): self.quickbuild =", "creating configuration by copying existing one and want to remain the passwords, the", "various configuration access methods or `get_info()` with content_type=ContentType.XML, remove the id element, modify", "-> None: \"\"\" Delete configuration. Args: configuration_id (int): configuration id. Returns: None \"\"\"", "\"\"\" self.quickbuild._validate_for_id(configuration) return self.update(configuration) def delete(self, configuration_id: int) -> None: \"\"\" Delete configuration.", "= str(to_date) return self.quickbuild._request( 'GET', 'configurations/{}/success_rate'.format(configuration_id), callback=response2py, params=params, ) def get_parent(self, configuration_id: int)", "of the configuration will be returned here, including `id`, `name`, `description`, `schedule`, `runMode`,", "Args: configuration_id (int): configuration identifier. Returns: int: value in the range of 0~100,", "in XML representation of an existing configuration, typically they are repository passwords, secret", "'GET', 'configurations', callback=response2py, params=params, ) def get(self) -> List[dict]: \"\"\" Get all configurations", "_get(self, params: dict) -> List[dict]: return self.quickbuild._request( 'GET', 'configurations', callback=response2py, params=params, ) def", "int, *, from_date: Optional[datetime.date], to_date: Optional[datetime.date] ) -> int: \"\"\" Get configuration average", "'configurations', callback=response2py, data=configuration ) def create(self, configuration: str) -> int: \"\"\" Create a", "configuration will be returned here, including `id`, `name`, `description`, `schedule`, `runMode`, `errorMessage`, `parent", "str: configuration error message. \"\"\" return self.quickbuild._request( 'GET', 'configurations/{}/error_message'.format(configuration_id), ) def get_run_mode(self, configuration_id:", "representation of a templating configuration using various configuration access methods or `get_info()` with", "Returns: str: configuration path. \"\"\" return self.quickbuild._request( 'GET', 'configurations/{}/path'.format(configuration_id), ) def get_id_by_path(self, path:", "self.quickbuild._request( 'GET', 'configurations', callback=response2py, params=params, ) def get(self) -> List[dict]: \"\"\" Get all", "a configuration using XML configuration. Normally you do not need to create the", "List[dict]: \"\"\" Get all configurations in the system. For performance reason, only brief", "content type if not set, default value of client instance is used. Returns:", "in the system. For performance reason, only brief information of the configuration will", ") def get(self) -> List[dict]: \"\"\" Get all configurations in the system. For", "of parent configuration. Raises: QBProcessingError: the configuration is root configuration and does not", "configuration using various configuration access methods or `get_info()` with content_type=ContentType.XML, remove the id", "`id`, `name`, `description`, `schedule`, `runMode`, `errorMessage`, `parent id`. You may get the full", "\"\"\" Get configuration path. Args: configuration_id (int): configuration identifier. Returns: str: configuration path.", "identifier. Returns: dict: configuration schedule. Raises: QBProcessingError: will be raised if schedule is", "Args: configuration_id (int): configuration identifier. Returns: int: milliseconds of average build duration. \"\"\"", "configuration access methods or `get_info()` with content_type=ContentType.XML, remove the id element, modify certain", "XML/JSON configuration. Please note that: - The parent element denotes id of the", "def get_error_message(self, configuration_id: int) -> str: \"\"\" Get configuration error message. Args: configuration_id", "typing import List, Optional, Union from quickbuild.helpers import ContentType, response2py class Configurations: def", "error \"\"\" self.quickbuild._validate_for_id(configuration) return self.update(configuration) def delete(self, configuration_id: int) -> None: \"\"\" Delete", "\"\"\" Delete configuration. Args: configuration_id (int): configuration id. Returns: None \"\"\" return self.quickbuild._request(", "int: id of parent configuration. Raises: QBProcessingError: the configuration is root configuration and", "to be copied. parent_id (int): Configuration id of the parent to place newly", "for create() method. - Secret elements (Elements with attribute \"secret=encrypt\" in XML representation", "only brief information of the configuration will be returned here, including `id`, `name`,", "configuration identifier. \"\"\" return self.quickbuild.identifiers.get_configuration_id_by_path(path) def get_name(self, configuration_id: int) -> str: \"\"\" Get", "configuration_id: int) -> int: \"\"\" Get parent configuration id. Args: configuration_id (int): configuration", "of the newly copied configuration. recursive (bool): Specify parameter recursive=true to copy specified", "str: \"\"\" Get configuration run mode. Args: configuration_id (int): configuration identifier. Returns: str:", "\"\"\" Get configuration success rate. Args: configuration_id (int): configuration identifier. Returns: int: value", "been encrypted. However if you creating configuration by copying existing one and want", "str, recursive: bool ) -> int: \"\"\" Copy configuration (available since version 4.0.72)", "by path. Args: path (str): configuration path. Returns: int: configuration identifier. \"\"\" return", "def get_run_mode(self, configuration_id: int) -> str: \"\"\" Get configuration run mode. Args: configuration_id", "configuration_id: int, *, content_type: Optional[ContentType] = None ) -> Union[dict, str]: \"\"\" Get", "configuration identifier. Returns: int: milliseconds of average build duration. \"\"\" params = dict()", "need to create the xml from scratch: you may retrieve xml representation of", "params['from_date'] = str(from_date) if to_date: params['to_date'] = str(to_date) return self.quickbuild._request( 'GET', 'configurations/{}/success_rate'.format(configuration_id), callback=response2py,", "does not have parent. \"\"\" return self.quickbuild._request( 'GET', 'configurations/{}/parent'.format(configuration_id), callback=response2py, ) def update(self,", "rate. Args: configuration_id (int): configuration identifier. Returns: int: value in the range of", "-> str: \"\"\" Get configuration description. Args: configuration_id (int): configuration identifier. Returns: str:", "int: configuration identifier. \"\"\" return self.quickbuild.identifiers.get_configuration_id_by_path(path) def get_name(self, configuration_id: int) -> str: \"\"\"", "Optional, Union from quickbuild.helpers import ContentType, response2py class Configurations: def __init__(self, quickbuild): self.quickbuild", "Select needed content type if not set, default value of client instance is", "configuration, typically they are repository passwords, secret variable values, etc.) should not contain", "Union[dict, str]: \"\"\" Get full configuration info. Args: configuration_id (int): Configuration identifier. content_type", "'configurations/{}/parent'.format(configuration_id), callback=response2py, ) def update(self, configuration: str) -> int: \"\"\" Update a configuration", "identifier. Returns: str: configuration name. \"\"\" return self.quickbuild._request( 'GET', 'configurations/{}/name'.format(configuration_id), ) def get_description(self,", "\"\"\" return self.quickbuild._request( 'GET', 'configurations/{}/schedule'.format(configuration_id), callback=response2py, ) def get_average_duration(self, configuration_id: int, *, from_date:", "Delete configuration. Args: configuration_id (int): configuration id. Returns: None \"\"\" return self.quickbuild._request( 'DELETE',", "functools import partial from typing import List, Optional, Union from quickbuild.helpers import ContentType,", "int) -> List[dict]: \"\"\" Get a list of child configurations. Args: parent_id (int):", "descendent configurations. Args: parent_id (int): parent configuration identifier. Returns: List[dict]: list of descendent", "str]: configuration content. \"\"\" return self.quickbuild._request( 'GET', 'configurations/{}'.format(configuration_id), callback=partial(response2py, content_type=content_type), content_type=content_type, ) def", "to_date: params['to_date'] = str(to_date) return self.quickbuild._request( 'GET', 'configurations/{}/average_duration'.format(configuration_id), callback=response2py, params=params, ) def get_success_rate(self,", "with content_type=ContentType.XML and modify certain parts of the XML. Args: configuration (str): XML", "import ContentType, response2py class Configurations: def __init__(self, quickbuild): self.quickbuild = quickbuild def _get(self,", ") -> int: \"\"\" Get configuration success rate. Args: configuration_id (int): configuration identifier.", "int) -> str: \"\"\" Get configuration path. Args: configuration_id (int): configuration identifier. Returns:", "'configurations/{}/name'.format(configuration_id), ) def get_description(self, configuration_id: int) -> str: \"\"\" Get configuration description. Args:", "and want to remain the passwords, the \"secret\" attribute should then be preserved.", "bool ) -> int: \"\"\" Copy configuration (available since version 4.0.72) Args: configuration_id", "int, *, from_date: Optional[datetime.date], to_date: Optional[datetime.date] ) -> int: \"\"\" Get configuration success", "if necessary. Returns: List[dict]: list of configurations. \"\"\" return self._get(dict(recursive=True)) def get_child(self, parent_id:", "Get configuration success rate. Args: configuration_id (int): configuration identifier. Returns: int: value in", "name: str, recursive: bool ) -> int: \"\"\" Copy configuration (available since version", "'configurations/{}/run_mode'.format(configuration_id), ) def get_schedule(self, configuration_id: int) -> dict: \"\"\" Get configuration schedule. Args:", "identifier. Returns: str: configuration error message. \"\"\" return self.quickbuild._request( 'GET', 'configurations/{}/error_message'.format(configuration_id), ) def", "document. Returns: int: configuration id being updated. \"\"\" return self.quickbuild._request( 'POST', 'configurations', callback=response2py,", "'configurations/{}/average_duration'.format(configuration_id), callback=response2py, params=params, ) def get_success_rate(self, configuration_id: int, *, from_date: Optional[datetime.date], to_date: Optional[datetime.date]", ") def update(self, configuration: str) -> int: \"\"\" Update a configuration using XML", "itself will be copied. Returns: int: configuration id of the newly copied configuration.", "return self.quickbuild._request( 'GET', 'configurations/{}/description'.format(configuration_id), ) def get_error_message(self, configuration_id: int) -> str: \"\"\" Get", "The parent element denotes id of the parent configuration. Normally you do not", "if to_date: params['to_date'] = str(to_date) return self.quickbuild._request( 'GET', 'configurations/{}/average_duration'.format(configuration_id), callback=response2py, params=params, ) def", "build duration. \"\"\" params = dict() if from_date: params['from_date'] = str(from_date) if to_date:", "id by path. Args: path (str): configuration path. Returns: int: configuration identifier. \"\"\"", "needed content type if not set, default value of client instance is used.", "'POST', 'configurations', callback=response2py, data=configuration ) def create(self, configuration: str) -> int: \"\"\" Create", "return self.quickbuild._request( 'GET', 'configurations/{}/error_message'.format(configuration_id), ) def get_run_mode(self, configuration_id: int) -> str: \"\"\" Get", "\"\"\" return self.quickbuild.identifiers.get_configuration_id_by_path(path) def get_name(self, configuration_id: int) -> str: \"\"\" Get configuration name.", "Args: configuration (str): XML/JSON document. Returns: int: configuration id of newly created configuration.", "the XML from scratch: you may get XML representation of the configuration using", "the newly copied configuration. \"\"\" params = dict( parent_id=parent_id, name=name, recursive=recursive, ) return", "datetime from functools import partial from typing import List, Optional, Union from quickbuild.helpers", "`runMode`, `errorMessage`, `parent id`. You may get the full xml representation using id", "get_child(self, parent_id: int) -> List[dict]: \"\"\" Get a list of child configurations. Args:", "with content_type=ContentType.XML, remove the id element, modify certain parts and use it for", "of an existing configuration, typically they are repository passwords, secret variable values, etc.)", "def delete(self, configuration_id: int) -> None: \"\"\" Delete configuration. Args: configuration_id (int): configuration", "configurations. Args: parent_id (int): parent configuration identifier. Returns: List[dict]: list of descendent configurations.", "\"secret\" attribute; otherwise QuickBuild will think that the password has already been encrypted.", "\"\"\" Get configuration description. Args: configuration_id (int): configuration identifier. Returns: str: configuration description.", "be raised if schedule is inherited from parent configuration. \"\"\" return self.quickbuild._request( 'GET',", "int: milliseconds of average build duration. \"\"\" params = dict() if from_date: params['from_date']", "id of the parent configuration. Normally you do not need to create the", "\"\"\" return self.quickbuild._request( 'DELETE', 'configurations/{}'.format(configuration_id), callback=response2py, ) def copy(self, configuration_id: int, parent_id: int,", "class Configurations: def __init__(self, quickbuild): self.quickbuild = quickbuild def _get(self, params: dict) ->", "List[dict]: \"\"\" Get a list of descendent configurations. Args: parent_id (int): parent configuration", "list of configurations. \"\"\" return self._get(dict(recursive=True)) def get_child(self, parent_id: int) -> List[dict]: \"\"\"", "= str(to_date) return self.quickbuild._request( 'GET', 'configurations/{}/average_duration'.format(configuration_id), callback=response2py, params=params, ) def get_success_rate(self, configuration_id: int,", "reason, only brief information of the configuration will be returned here, including `id`,", "be returned here, including `id`, `name`, `description`, `schedule`, `runMode`, `errorMessage`, `parent id`. You", "since version 4.0.72) Args: configuration_id (int): Configuration id to be copied. parent_id (int):", "For performance reason, only brief information of the configuration will be returned here,", "Returns: Union[dict, str]: configuration content. \"\"\" return self.quickbuild._request( 'GET', 'configurations/{}'.format(configuration_id), callback=partial(response2py, content_type=content_type), content_type=content_type,", "a configuration using XML/JSON configuration. Please note that: - The parent element denotes", "then be preserved. Args: configuration (str): XML/JSON document. Returns: int: configuration id of", "parent. \"\"\" return self.quickbuild._request( 'GET', 'configurations/{}/parent'.format(configuration_id), callback=response2py, ) def update(self, configuration: str) ->", "attribute should then be preserved. Args: configuration (str): XML/JSON document. Returns: int: configuration", "path. Args: path (str): configuration path. Returns: int: configuration identifier. \"\"\" return self.quickbuild.identifiers.get_configuration_id_by_path(path)", "def get_name(self, configuration_id: int) -> str: \"\"\" Get configuration name. Args: configuration_id (int):", "version 4.0.72) Args: configuration_id (int): Configuration id to be copied. parent_id (int): Configuration", "return self.quickbuild._request( 'GET', 'configurations/{}/schedule'.format(configuration_id), callback=response2py, ) def get_average_duration(self, configuration_id: int, *, from_date: Optional[datetime.date],", "and all its descendant configurations recursively; otherwise, only the configuration itself will be", "list of child configurations. Args: parent_id (int): parent configuration identifier. Returns: List[dict]: list", "or `get_info()` with content_type=ContentType.XML, remove the id element, modify certain parts and use", "access methods or `get_info()` with content_type=ContentType.XML, remove the id element, modify certain parts", ") -> Union[dict, str]: \"\"\" Get full configuration info. Args: configuration_id (int): Configuration", "\"\"\" Get configuration id by path. Args: path (str): configuration path. Returns: int:", "'GET', 'configurations/{}/error_message'.format(configuration_id), ) def get_run_mode(self, configuration_id: int) -> str: \"\"\" Get configuration run", "Configuration id to be copied. parent_id (int): Configuration id of the parent to", "xml from scratch: you may retrieve xml representation of a templating configuration using", "Optional[datetime.date], to_date: Optional[datetime.date] ) -> int: \"\"\" Get configuration average duration. Args: configuration_id", "str: configuration description. \"\"\" return self.quickbuild._request( 'GET', 'configurations/{}/description'.format(configuration_id), ) def get_error_message(self, configuration_id: int)", "of configurations. \"\"\" return self._get(dict(recursive=True)) def get_child(self, parent_id: int) -> List[dict]: \"\"\" Get", "self.update(configuration) def delete(self, configuration_id: int) -> None: \"\"\" Delete configuration. Args: configuration_id (int):", "here, including `id`, `name`, `description`, `schedule`, `runMode`, `errorMessage`, `parent id`. You may get", "4.0.72) Args: configuration_id (int): Configuration id to be copied. parent_id (int): Configuration id", "Specify parameter recursive=true to copy specified configuration and all its descendant configurations recursively;", "from_date: Optional[datetime.date], to_date: Optional[datetime.date] ) -> int: \"\"\" Get configuration average duration. Args:", "str: configuration path. \"\"\" return self.quickbuild._request( 'GET', 'configurations/{}/path'.format(configuration_id), ) def get_id_by_path(self, path: str)", "configuration. Raises: QBError: XML validation error \"\"\" self.quickbuild._validate_for_id(configuration) return self.update(configuration) def delete(self, configuration_id:", "However if you creating configuration by copying existing one and want to remain", "encrypted. However if you creating configuration by copying existing one and want to", "returned here, including `id`, `name`, `description`, `schedule`, `runMode`, `errorMessage`, `parent id`. You may", "configuration run mode. \"\"\" return self.quickbuild._request( 'GET', 'configurations/{}/run_mode'.format(configuration_id), ) def get_schedule(self, configuration_id: int)", "return self._get(dict(recursive=True)) def get_child(self, parent_id: int) -> List[dict]: \"\"\" Get a list of", "(int): configuration identifier. Returns: str: configuration description. \"\"\" return self.quickbuild._request( 'GET', 'configurations/{}/description'.format(configuration_id), )", "configuration. name (str): Name of the newly copied configuration. recursive (bool): Specify parameter", "-> str: \"\"\" Get configuration error message. Args: configuration_id (int): configuration identifier. Returns:", "self.quickbuild = quickbuild def _get(self, params: dict) -> List[dict]: return self.quickbuild._request( 'GET', 'configurations',", "from parent configuration. \"\"\" return self.quickbuild._request( 'GET', 'configurations/{}/schedule'.format(configuration_id), callback=response2py, ) def get_average_duration(self, configuration_id:", "configuration_id (int): Configuration id to be copied. parent_id (int): Configuration id of the", "return self.quickbuild._request( 'POST', 'configurations', callback=response2py, data=configuration ) def create(self, configuration: str) -> int:", "identifier. Returns: int: milliseconds of average build duration. \"\"\" params = dict() if", "get the full xml representation using id if necessary. Returns: List[dict]: list of", "only the configuration itself will be copied. Returns: int: configuration id of the", "self.quickbuild._request( 'GET', 'configurations/{}/description'.format(configuration_id), ) def get_error_message(self, configuration_id: int) -> str: \"\"\" Get configuration", "\"\"\" Get full configuration info. Args: configuration_id (int): Configuration identifier. content_type (Optional[ContentType]): Select", "set, default value of client instance is used. Returns: Union[dict, str]: configuration content.", "modify certain parts of the XML. Args: configuration (str): XML document. Returns: int:", "QuickBuild will think that the password has already been encrypted. However if you", "int: \"\"\" Copy configuration (available since version 4.0.72) Args: configuration_id (int): Configuration id", "configurations. \"\"\" return self._get(dict(recursive=True, parent_id=parent_id)) def get_info(self, configuration_id: int, *, content_type: Optional[ContentType] =", "'configurations/{}/path'.format(configuration_id), ) def get_id_by_path(self, path: str) -> int: \"\"\" Get configuration id by", "configuration by copying existing one and want to remain the passwords, the \"secret\"", "params = dict() if from_date: params['from_date'] = str(from_date) if to_date: params['to_date'] = str(to_date)", "create(self, configuration: str) -> int: \"\"\" Create a configuration using XML/JSON configuration. Please", "and does not have parent. \"\"\" return self.quickbuild._request( 'GET', 'configurations/{}/parent'.format(configuration_id), callback=response2py, ) def", "name (str): Name of the newly copied configuration. recursive (bool): Specify parameter recursive=true", "be preserved. Args: configuration (str): XML/JSON document. Returns: int: configuration id of newly", "int) -> str: \"\"\" Get configuration name. Args: configuration_id (int): configuration identifier. Returns:", "\"\"\" return self._get(dict(parent_id=parent_id)) def get_descendent(self, parent_id: int) -> List[dict]: \"\"\" Get a list", "\"\"\" Create a configuration using XML/JSON configuration. Please note that: - The parent", "run mode. Args: configuration_id (int): configuration identifier. Returns: str: configuration run mode. \"\"\"", "representation using id if necessary. Returns: List[dict]: list of configurations. \"\"\" return self._get(dict(recursive=True))", "= str(from_date) if to_date: params['to_date'] = str(to_date) return self.quickbuild._request( 'GET', 'configurations/{}/success_rate'.format(configuration_id), callback=response2py, params=params,", "\"secret\" attribute should then be preserved. Args: configuration (str): XML/JSON document. Returns: int:", "\"\"\" params = dict() if from_date: params['from_date'] = str(from_date) if to_date: params['to_date'] =", "configuration run mode. Args: configuration_id (int): configuration identifier. Returns: str: configuration run mode.", "\"\"\" return self.quickbuild._request( 'GET', 'configurations/{}/run_mode'.format(configuration_id), ) def get_schedule(self, configuration_id: int) -> dict: \"\"\"", "Returns: int: configuration id of the newly copied configuration. \"\"\" params = dict(", "callback=response2py, ) def copy(self, configuration_id: int, parent_id: int, name: str, recursive: bool )", "recursive=true to copy specified configuration and all its descendant configurations recursively; otherwise, only", "def get_info(self, configuration_id: int, *, content_type: Optional[ContentType] = None ) -> Union[dict, str]:", "configuration description. Args: configuration_id (int): configuration identifier. Returns: str: configuration description. \"\"\" return", "Normally you do not need to create the xml from scratch: you may", "quickbuild.helpers import ContentType, response2py class Configurations: def __init__(self, quickbuild): self.quickbuild = quickbuild def", "preserved. Args: configuration (str): XML/JSON document. Returns: int: configuration id of newly created", "Raises: QBProcessingError: will be raised if schedule is inherited from parent configuration. \"\"\"", "0~100, with 0 stands for 0%, and 100 stands for 100%. \"\"\" params", "def get_path(self, configuration_id: int) -> str: \"\"\" Get configuration path. Args: configuration_id (int):", "Returns: int: configuration identifier. \"\"\" return self.quickbuild.identifiers.get_configuration_id_by_path(path) def get_name(self, configuration_id: int) -> str:", "self.quickbuild._request( 'GET', 'configurations/{}'.format(configuration_id), callback=partial(response2py, content_type=content_type), content_type=content_type, ) def get_path(self, configuration_id: int) -> str:", "configuration id by path. Args: path (str): configuration path. Returns: int: configuration identifier.", "quickbuild def _get(self, params: dict) -> List[dict]: return self.quickbuild._request( 'GET', 'configurations', callback=response2py, params=params,", "callback=response2py, params=params, ) def get_parent(self, configuration_id: int) -> int: \"\"\" Get parent configuration", "(int): configuration identifier. Returns: int: value in the range of 0~100, with 0", "configuration error message. Args: configuration_id (int): configuration identifier. Returns: str: configuration error message.", "configuration path. Args: configuration_id (int): configuration identifier. Returns: str: configuration path. \"\"\" return", "= quickbuild def _get(self, params: dict) -> List[dict]: return self.quickbuild._request( 'GET', 'configurations', callback=response2py,", "identifier. Returns: List[dict]: list of child configurations. \"\"\" return self._get(dict(parent_id=parent_id)) def get_descendent(self, parent_id:", "id being updated. \"\"\" return self.quickbuild._request( 'POST', 'configurations', callback=response2py, data=configuration ) def create(self,", "configuration identifier. Returns: int: value in the range of 0~100, with 0 stands", "configuration (available since version 4.0.72) Args: configuration_id (int): Configuration id to be copied.", "copied configuration. name (str): Name of the newly copied configuration. recursive (bool): Specify", "-> int: \"\"\" Get configuration id by path. Args: path (str): configuration path.", "copying existing one and want to remain the passwords, the \"secret\" attribute should", "\"\"\" Copy configuration (available since version 4.0.72) Args: configuration_id (int): Configuration id to", "return self.quickbuild._request( 'GET', 'configurations/{}/name'.format(configuration_id), ) def get_description(self, configuration_id: int) -> str: \"\"\" Get", "def _get(self, params: dict) -> List[dict]: return self.quickbuild._request( 'GET', 'configurations', callback=response2py, params=params, )", "configuration identifier. Returns: str: configuration name. \"\"\" return self.quickbuild._request( 'GET', 'configurations/{}/name'.format(configuration_id), ) def", "def get_descendent(self, parent_id: int) -> List[dict]: \"\"\" Get a list of descendent configurations.", "configuration identifier. Returns: int: id of parent configuration. Raises: QBProcessingError: the configuration is", "if to_date: params['to_date'] = str(to_date) return self.quickbuild._request( 'GET', 'configurations/{}/success_rate'.format(configuration_id), callback=response2py, params=params, ) def", "the configuration itself will be copied. Returns: int: configuration id of the newly", "all its descendant configurations recursively; otherwise, only the configuration itself will be copied.", "it for create() method. - Secret elements (Elements with attribute \"secret=encrypt\" in XML", "to_date: Optional[datetime.date] ) -> int: \"\"\" Get configuration success rate. Args: configuration_id (int):", "configuration using XML/JSON configuration. Please note that: - The parent element denotes id", "\"\"\" return self.quickbuild._request( 'POST', 'configurations', callback=response2py, data=configuration ) def create(self, configuration: str) ->", "you creating configuration by copying existing one and want to remain the passwords,", "def get_schedule(self, configuration_id: int) -> dict: \"\"\" Get configuration schedule. Args: configuration_id (int):", "return self.quickbuild._request( 'GET', 'configurations/{}/average_duration'.format(configuration_id), callback=response2py, params=params, ) def get_success_rate(self, configuration_id: int, *, from_date:", "QBError: XML validation error \"\"\" self.quickbuild._validate_for_id(configuration) return self.update(configuration) def delete(self, configuration_id: int) ->", "int) -> str: \"\"\" Get configuration description. Args: configuration_id (int): configuration identifier. Returns:", "int: \"\"\" Get configuration success rate. Args: configuration_id (int): configuration identifier. Returns: int:", "configuration id of the newly copied configuration. \"\"\" params = dict( parent_id=parent_id, name=name,", "newly created configuration. Raises: QBError: XML validation error \"\"\" self.quickbuild._validate_for_id(configuration) return self.update(configuration) def", "int) -> str: \"\"\" Get configuration run mode. Args: configuration_id (int): configuration identifier.", "Get all configurations in the system. For performance reason, only brief information of", "configuration and all its descendant configurations recursively; otherwise, only the configuration itself will", "`description`, `schedule`, `runMode`, `errorMessage`, `parent id`. You may get the full xml representation", "def get(self) -> List[dict]: \"\"\" Get all configurations in the system. For performance", "configuration_id (int): configuration identifier. Returns: int: milliseconds of average build duration. \"\"\" params", "params['to_date'] = str(to_date) return self.quickbuild._request( 'GET', 'configurations/{}/success_rate'.format(configuration_id), callback=response2py, params=params, ) def get_parent(self, configuration_id:", "are repository passwords, secret variable values, etc.) should not contain the \"secret\" attribute;", "'configurations/{}'.format(configuration_id), callback=response2py, ) def copy(self, configuration_id: int, parent_id: int, name: str, recursive: bool", "\"\"\" Get configuration error message. Args: configuration_id (int): configuration identifier. Returns: str: configuration", "configuration identifier. Returns: str: configuration error message. \"\"\" return self.quickbuild._request( 'GET', 'configurations/{}/error_message'.format(configuration_id), )", "configuration. Please note that: - The parent element denotes id of the parent", "'configurations/{}/description'.format(configuration_id), ) def get_error_message(self, configuration_id: int) -> str: \"\"\" Get configuration error message.", "100%. \"\"\" params = dict() if from_date: params['from_date'] = str(from_date) if to_date: params['to_date']", "content_type=ContentType.XML and modify certain parts of the XML. Args: configuration (str): XML document.", "(Elements with attribute \"secret=encrypt\" in XML representation of an existing configuration, typically they", "XML/JSON document. Returns: int: configuration id of newly created configuration. Raises: QBError: XML", "(bool): Specify parameter recursive=true to copy specified configuration and all its descendant configurations", "-> int: \"\"\" Create a configuration using XML/JSON configuration. Please note that: -", "XML from scratch: you may get XML representation of the configuration using `get_info()`", "Returns: str: configuration name. \"\"\" return self.quickbuild._request( 'GET', 'configurations/{}/name'.format(configuration_id), ) def get_description(self, configuration_id:", "of the newly copied configuration. \"\"\" params = dict( parent_id=parent_id, name=name, recursive=recursive, )", "configuration_id: int) -> None: \"\"\" Delete configuration. Args: configuration_id (int): configuration id. Returns:", "path. Args: configuration_id (int): configuration identifier. Returns: str: configuration path. \"\"\" return self.quickbuild._request(", "templating configuration using various configuration access methods or `get_info()` with content_type=ContentType.XML, remove the", "-> List[dict]: \"\"\" Get a list of child configurations. Args: parent_id (int): parent", "self.quickbuild._request( 'GET', 'configurations/{}/run_mode'.format(configuration_id), ) def get_schedule(self, configuration_id: int) -> dict: \"\"\" Get configuration", "def create(self, configuration: str) -> int: \"\"\" Create a configuration using XML/JSON configuration.", "typically they are repository passwords, secret variable values, etc.) should not contain the", "methods or `get_info()` with content_type=ContentType.XML, remove the id element, modify certain parts and", "Configuration identifier. content_type (Optional[ContentType]): Select needed content type if not set, default value", "will think that the password has already been encrypted. However if you creating", "data=configuration ) def create(self, configuration: str) -> int: \"\"\" Create a configuration using", "\"\"\" return self._get(dict(recursive=True)) def get_child(self, parent_id: int) -> List[dict]: \"\"\" Get a list", "path: str) -> int: \"\"\" Get configuration id by path. Args: path (str):", "of the parent configuration. Normally you do not need to create the xml", ") def get_error_message(self, configuration_id: int) -> str: \"\"\" Get configuration error message. Args:", "the parent configuration. Normally you do not need to create the xml from", "Get configuration description. Args: configuration_id (int): configuration identifier. Returns: str: configuration description. \"\"\"", "(int): Configuration identifier. content_type (Optional[ContentType]): Select needed content type if not set, default", "str(to_date) return self.quickbuild._request( 'GET', 'configurations/{}/success_rate'.format(configuration_id), callback=response2py, params=params, ) def get_parent(self, configuration_id: int) ->", "modify certain parts and use it for create() method. - Secret elements (Elements", "response2py class Configurations: def __init__(self, quickbuild): self.quickbuild = quickbuild def _get(self, params: dict)", "configuration id. Args: configuration_id (int): configuration identifier. Returns: int: id of parent configuration.", "configurations in the system. For performance reason, only brief information of the configuration", "Get full configuration info. Args: configuration_id (int): Configuration identifier. content_type (Optional[ContentType]): Select needed", "0 stands for 0%, and 100 stands for 100%. \"\"\" params = dict()", "message. Args: configuration_id (int): configuration identifier. Returns: str: configuration error message. \"\"\" return", "to place newly copied configuration. name (str): Name of the newly copied configuration.", "configuration path. \"\"\" return self.quickbuild._request( 'GET', 'configurations/{}/path'.format(configuration_id), ) def get_id_by_path(self, path: str) ->", "Configurations: def __init__(self, quickbuild): self.quickbuild = quickbuild def _get(self, params: dict) -> List[dict]:", "dict: \"\"\" Get configuration schedule. Args: configuration_id (int): configuration identifier. Returns: dict: configuration", "configuration_id: int) -> str: \"\"\" Get configuration path. Args: configuration_id (int): configuration identifier.", "int: configuration id of newly created configuration. Raises: QBError: XML validation error \"\"\"", "Returns: int: id of parent configuration. Raises: QBProcessingError: the configuration is root configuration", "int: \"\"\" Update a configuration using XML configuration. Normally you do not need", "configuration. Normally you do not need to create the xml from scratch: you", "'configurations/{}/schedule'.format(configuration_id), callback=response2py, ) def get_average_duration(self, configuration_id: int, *, from_date: Optional[datetime.date], to_date: Optional[datetime.date] )", "'configurations/{}/success_rate'.format(configuration_id), callback=response2py, params=params, ) def get_parent(self, configuration_id: int) -> int: \"\"\" Get parent", "-> int: \"\"\" Update a configuration using XML configuration. Normally you do not", "-> int: \"\"\" Copy configuration (available since version 4.0.72) Args: configuration_id (int): Configuration", "path. \"\"\" return self.quickbuild._request( 'GET', 'configurations/{}/path'.format(configuration_id), ) def get_id_by_path(self, path: str) -> int:", "content. \"\"\" return self.quickbuild._request( 'GET', 'configurations/{}'.format(configuration_id), callback=partial(response2py, content_type=content_type), content_type=content_type, ) def get_path(self, configuration_id:", "`get_info()` method with content_type=ContentType.XML and modify certain parts of the XML. Args: configuration", "to create the xml from scratch: you may retrieve xml representation of a", "get_name(self, configuration_id: int) -> str: \"\"\" Get configuration name. Args: configuration_id (int): configuration", "remain the passwords, the \"secret\" attribute should then be preserved. Args: configuration (str):", "validation error \"\"\" self.quickbuild._validate_for_id(configuration) return self.update(configuration) def delete(self, configuration_id: int) -> None: \"\"\"", "list of child configurations. \"\"\" return self._get(dict(parent_id=parent_id)) def get_descendent(self, parent_id: int) -> List[dict]:", "'GET', 'configurations/{}/schedule'.format(configuration_id), callback=response2py, ) def get_average_duration(self, configuration_id: int, *, from_date: Optional[datetime.date], to_date: Optional[datetime.date]", "the \"secret\" attribute; otherwise QuickBuild will think that the password has already been", "self._get(dict(recursive=True, parent_id=parent_id)) def get_info(self, configuration_id: int, *, content_type: Optional[ContentType] = None ) ->", "self.quickbuild._request( 'POST', 'configurations', callback=response2py, data=configuration ) def create(self, configuration: str) -> int: \"\"\"", "error message. Args: configuration_id (int): configuration identifier. Returns: str: configuration error message. \"\"\"", "'GET', 'configurations/{}/success_rate'.format(configuration_id), callback=response2py, params=params, ) def get_parent(self, configuration_id: int) -> int: \"\"\" Get", "List, Optional, Union from quickbuild.helpers import ContentType, response2py class Configurations: def __init__(self, quickbuild):", "configuration_id: int) -> dict: \"\"\" Get configuration schedule. Args: configuration_id (int): configuration identifier.", "copy(self, configuration_id: int, parent_id: int, name: str, recursive: bool ) -> int: \"\"\"", "from functools import partial from typing import List, Optional, Union from quickbuild.helpers import", "Raises: QBProcessingError: the configuration is root configuration and does not have parent. \"\"\"", "*, from_date: Optional[datetime.date], to_date: Optional[datetime.date] ) -> int: \"\"\" Get configuration success rate.", "\"\"\" return self._get(dict(recursive=True, parent_id=parent_id)) def get_info(self, configuration_id: int, *, content_type: Optional[ContentType] = None", "stands for 0%, and 100 stands for 100%. \"\"\" params = dict() if", "created configuration. Raises: QBError: XML validation error \"\"\" self.quickbuild._validate_for_id(configuration) return self.update(configuration) def delete(self,", "def get_child(self, parent_id: int) -> List[dict]: \"\"\" Get a list of child configurations.", "you may get XML representation of the configuration using `get_info()` method with content_type=ContentType.XML", "return self.quickbuild.identifiers.get_configuration_id_by_path(path) def get_name(self, configuration_id: int) -> str: \"\"\" Get configuration name. Args:", "get(self) -> List[dict]: \"\"\" Get all configurations in the system. For performance reason,", "you do not need to create the XML from scratch: you may get", "get_success_rate(self, configuration_id: int, *, from_date: Optional[datetime.date], to_date: Optional[datetime.date] ) -> int: \"\"\" Get", "copied. Returns: int: configuration id of the newly copied configuration. \"\"\" params =", "parts of the XML. Args: configuration (str): XML document. Returns: int: configuration id", "You may get the full xml representation using id if necessary. Returns: List[dict]:", "parameter recursive=true to copy specified configuration and all its descendant configurations recursively; otherwise,", "used. Returns: Union[dict, str]: configuration content. \"\"\" return self.quickbuild._request( 'GET', 'configurations/{}'.format(configuration_id), callback=partial(response2py, content_type=content_type),", "id of parent configuration. Raises: QBProcessingError: the configuration is root configuration and does", "the configuration is root configuration and does not have parent. \"\"\" return self.quickbuild._request(", ") -> int: \"\"\" Copy configuration (available since version 4.0.72) Args: configuration_id (int):", "existing configuration, typically they are repository passwords, secret variable values, etc.) should not", "import datetime from functools import partial from typing import List, Optional, Union from", "of the XML. Args: configuration (str): XML document. Returns: int: configuration id being", "default value of client instance is used. Returns: Union[dict, str]: configuration content. \"\"\"", "path. Returns: int: configuration identifier. \"\"\" return self.quickbuild.identifiers.get_configuration_id_by_path(path) def get_name(self, configuration_id: int) ->", "parent configuration. \"\"\" return self.quickbuild._request( 'GET', 'configurations/{}/schedule'.format(configuration_id), callback=response2py, ) def get_average_duration(self, configuration_id: int,", "identifier. \"\"\" return self.quickbuild.identifiers.get_configuration_id_by_path(path) def get_name(self, configuration_id: int) -> str: \"\"\" Get configuration", "configuration identifier. Returns: dict: configuration schedule. Raises: QBProcessingError: will be raised if schedule", "-> int: \"\"\" Get configuration success rate. Args: configuration_id (int): configuration identifier. Returns:", "attribute \"secret=encrypt\" in XML representation of an existing configuration, typically they are repository", "info. Args: configuration_id (int): Configuration identifier. content_type (Optional[ContentType]): Select needed content type if", "Args: configuration_id (int): configuration identifier. Returns: str: configuration description. \"\"\" return self.quickbuild._request( 'GET',", "self.quickbuild._request( 'GET', 'configurations/{}/path'.format(configuration_id), ) def get_id_by_path(self, path: str) -> int: \"\"\" Get configuration", "do not need to create the XML from scratch: you may get XML", "ContentType, response2py class Configurations: def __init__(self, quickbuild): self.quickbuild = quickbuild def _get(self, params:", "str(to_date) return self.quickbuild._request( 'GET', 'configurations/{}/average_duration'.format(configuration_id), callback=response2py, params=params, ) def get_success_rate(self, configuration_id: int, *,", "parent_id (int): parent configuration identifier. Returns: List[dict]: list of descendent configurations. \"\"\" return", "-> str: \"\"\" Get configuration path. Args: configuration_id (int): configuration identifier. Returns: str:", "the newly copied configuration. recursive (bool): Specify parameter recursive=true to copy specified configuration", "- Secret elements (Elements with attribute \"secret=encrypt\" in XML representation of an existing", "newly copied configuration. recursive (bool): Specify parameter recursive=true to copy specified configuration and", "elements (Elements with attribute \"secret=encrypt\" in XML representation of an existing configuration, typically", "will be raised if schedule is inherited from parent configuration. \"\"\" return self.quickbuild._request(", "configuration_id (int): configuration id. Returns: None \"\"\" return self.quickbuild._request( 'DELETE', 'configurations/{}'.format(configuration_id), callback=response2py, )", "\"\"\" Get configuration schedule. Args: configuration_id (int): configuration identifier. Returns: dict: configuration schedule.", "configuration schedule. Args: configuration_id (int): configuration identifier. Returns: dict: configuration schedule. Raises: QBProcessingError:", "raised if schedule is inherited from parent configuration. \"\"\" return self.quickbuild._request( 'GET', 'configurations/{}/schedule'.format(configuration_id),", "content_type=ContentType.XML, remove the id element, modify certain parts and use it for create()", "the password has already been encrypted. However if you creating configuration by copying", "str: \"\"\" Get configuration description. Args: configuration_id (int): configuration identifier. Returns: str: configuration", "not set, default value of client instance is used. Returns: Union[dict, str]: configuration", "import partial from typing import List, Optional, Union from quickbuild.helpers import ContentType, response2py", "the id element, modify certain parts and use it for create() method. -", "List[dict]: list of child configurations. \"\"\" return self._get(dict(parent_id=parent_id)) def get_descendent(self, parent_id: int) ->", "configuration description. \"\"\" return self.quickbuild._request( 'GET', 'configurations/{}/description'.format(configuration_id), ) def get_error_message(self, configuration_id: int) ->", "Returns: str: configuration run mode. \"\"\" return self.quickbuild._request( 'GET', 'configurations/{}/run_mode'.format(configuration_id), ) def get_schedule(self,", "int: \"\"\" Get parent configuration id. Args: configuration_id (int): configuration identifier. Returns: int:", "values, etc.) should not contain the \"secret\" attribute; otherwise QuickBuild will think that", "'DELETE', 'configurations/{}'.format(configuration_id), callback=response2py, ) def copy(self, configuration_id: int, parent_id: int, name: str, recursive:", "-> List[dict]: return self.quickbuild._request( 'GET', 'configurations', callback=response2py, params=params, ) def get(self) -> List[dict]:", "id to be copied. parent_id (int): Configuration id of the parent to place", "not have parent. \"\"\" return self.quickbuild._request( 'GET', 'configurations/{}/parent'.format(configuration_id), callback=response2py, ) def update(self, configuration:", "that the password has already been encrypted. However if you creating configuration by", "representation of the configuration using `get_info()` method with content_type=ContentType.XML and modify certain parts", "configuration_id: int) -> str: \"\"\" Get configuration name. Args: configuration_id (int): configuration identifier.", "\"\"\" return self.quickbuild._request( 'GET', 'configurations/{}'.format(configuration_id), callback=partial(response2py, content_type=content_type), content_type=content_type, ) def get_path(self, configuration_id: int)", "create the XML from scratch: you may get XML representation of the configuration", "client instance is used. Returns: Union[dict, str]: configuration content. \"\"\" return self.quickbuild._request( 'GET',", "int: \"\"\" Get configuration id by path. Args: path (str): configuration path. Returns:", "Args: configuration_id (int): configuration id. Returns: None \"\"\" return self.quickbuild._request( 'DELETE', 'configurations/{}'.format(configuration_id), callback=response2py,", "int, *, content_type: Optional[ContentType] = None ) -> Union[dict, str]: \"\"\" Get full", "updated. \"\"\" return self.quickbuild._request( 'POST', 'configurations', callback=response2py, data=configuration ) def create(self, configuration: str)", "self.quickbuild._request( 'DELETE', 'configurations/{}'.format(configuration_id), callback=response2py, ) def copy(self, configuration_id: int, parent_id: int, name: str,", "descendent configurations. \"\"\" return self._get(dict(recursive=True, parent_id=parent_id)) def get_info(self, configuration_id: int, *, content_type: Optional[ContentType]", "'GET', 'configurations/{}/description'.format(configuration_id), ) def get_error_message(self, configuration_id: int) -> str: \"\"\" Get configuration error", "otherwise QuickBuild will think that the password has already been encrypted. However if", "value in the range of 0~100, with 0 stands for 0%, and 100", "List[dict]: list of descendent configurations. \"\"\" return self._get(dict(recursive=True, parent_id=parent_id)) def get_info(self, configuration_id: int,", "'GET', 'configurations/{}/name'.format(configuration_id), ) def get_description(self, configuration_id: int) -> str: \"\"\" Get configuration description.", "of child configurations. \"\"\" return self._get(dict(parent_id=parent_id)) def get_descendent(self, parent_id: int) -> List[dict]: \"\"\"", "configuration_id (int): configuration identifier. Returns: str: configuration description. \"\"\" return self.quickbuild._request( 'GET', 'configurations/{}/description'.format(configuration_id),", "configuration. Raises: QBProcessingError: the configuration is root configuration and does not have parent.", "that: - The parent element denotes id of the parent configuration. Normally you", "otherwise, only the configuration itself will be copied. Returns: int: configuration id of", "configuration name. \"\"\" return self.quickbuild._request( 'GET', 'configurations/{}/name'.format(configuration_id), ) def get_description(self, configuration_id: int) ->", "callback=response2py, ) def get_average_duration(self, configuration_id: int, *, from_date: Optional[datetime.date], to_date: Optional[datetime.date] ) ->", "List[dict]: list of configurations. \"\"\" return self._get(dict(recursive=True)) def get_child(self, parent_id: int) -> List[dict]:", "using XML configuration. Normally you do not need to create the XML from", "recursive (bool): Specify parameter recursive=true to copy specified configuration and all its descendant", "Get configuration path. Args: configuration_id (int): configuration identifier. Returns: str: configuration path. \"\"\"", "configuration_id (int): configuration identifier. Returns: str: configuration name. \"\"\" return self.quickbuild._request( 'GET', 'configurations/{}/name'.format(configuration_id),", "(int): configuration id. Returns: None \"\"\" return self.quickbuild._request( 'DELETE', 'configurations/{}'.format(configuration_id), callback=response2py, ) def", "information of the configuration will be returned here, including `id`, `name`, `description`, `schedule`,", "Returns: str: configuration error message. \"\"\" return self.quickbuild._request( 'GET', 'configurations/{}/error_message'.format(configuration_id), ) def get_run_mode(self,", "quickbuild): self.quickbuild = quickbuild def _get(self, params: dict) -> List[dict]: return self.quickbuild._request( 'GET',", "configuration identifier. Returns: List[dict]: list of child configurations. \"\"\" return self._get(dict(parent_id=parent_id)) def get_descendent(self,", ") def get_parent(self, configuration_id: int) -> int: \"\"\" Get parent configuration id. Args:", "self._get(dict(recursive=True)) def get_child(self, parent_id: int) -> List[dict]: \"\"\" Get a list of child", "of newly created configuration. Raises: QBError: XML validation error \"\"\" self.quickbuild._validate_for_id(configuration) return self.update(configuration)", "be copied. parent_id (int): Configuration id of the parent to place newly copied", "__init__(self, quickbuild): self.quickbuild = quickbuild def _get(self, params: dict) -> List[dict]: return self.quickbuild._request(", "the full xml representation using id if necessary. Returns: List[dict]: list of configurations.", "has already been encrypted. However if you creating configuration by copying existing one", "type if not set, default value of client instance is used. Returns: Union[dict,", "def get_id_by_path(self, path: str) -> int: \"\"\" Get configuration id by path. Args:", "configuration_id: int, *, from_date: Optional[datetime.date], to_date: Optional[datetime.date] ) -> int: \"\"\" Get configuration", "return self.quickbuild._request( 'GET', 'configurations/{}/run_mode'.format(configuration_id), ) def get_schedule(self, configuration_id: int) -> dict: \"\"\" Get", "the XML. Args: configuration (str): XML document. Returns: int: configuration id being updated.", "\"\"\" Get configuration run mode. Args: configuration_id (int): configuration identifier. Returns: str: configuration", "Get configuration run mode. Args: configuration_id (int): configuration identifier. Returns: str: configuration run", "scratch: you may get XML representation of the configuration using `get_info()` method with", "- The parent element denotes id of the parent configuration. Normally you do", "necessary. Returns: List[dict]: list of configurations. \"\"\" return self._get(dict(recursive=True)) def get_child(self, parent_id: int)", "child configurations. \"\"\" return self._get(dict(parent_id=parent_id)) def get_descendent(self, parent_id: int) -> List[dict]: \"\"\" Get", "error message. \"\"\" return self.quickbuild._request( 'GET', 'configurations/{}/error_message'.format(configuration_id), ) def get_run_mode(self, configuration_id: int) ->", "Returns: int: milliseconds of average build duration. \"\"\" params = dict() if from_date:", "element denotes id of the parent configuration. Normally you do not need to", "think that the password has already been encrypted. However if you creating configuration", "attribute; otherwise QuickBuild will think that the password has already been encrypted. However", "identifier. Returns: str: configuration run mode. \"\"\" return self.quickbuild._request( 'GET', 'configurations/{}/run_mode'.format(configuration_id), ) def", "\"\"\" params = dict( parent_id=parent_id, name=name, recursive=recursive, ) return self.quickbuild._request( 'GET', 'configurations/{}/copy'.format(configuration_id), callback=response2py,", "id of the parent to place newly copied configuration. name (str): Name of", "is root configuration and does not have parent. \"\"\" return self.quickbuild._request( 'GET', 'configurations/{}/parent'.format(configuration_id),", ") def create(self, configuration: str) -> int: \"\"\" Create a configuration using XML/JSON", "full xml representation using id if necessary. Returns: List[dict]: list of configurations. \"\"\"", "dict: configuration schedule. Raises: QBProcessingError: will be raised if schedule is inherited from", "with 0 stands for 0%, and 100 stands for 100%. \"\"\" params =", "XML document. Returns: int: configuration id being updated. \"\"\" return self.quickbuild._request( 'POST', 'configurations',", "int) -> dict: \"\"\" Get configuration schedule. Args: configuration_id (int): configuration identifier. Returns:", "Returns: str: configuration description. \"\"\" return self.quickbuild._request( 'GET', 'configurations/{}/description'.format(configuration_id), ) def get_error_message(self, configuration_id:", "to create the XML from scratch: you may get XML representation of the", "description. Args: configuration_id (int): configuration identifier. Returns: str: configuration description. \"\"\" return self.quickbuild._request(", "name. Args: configuration_id (int): configuration identifier. Returns: str: configuration name. \"\"\" return self.quickbuild._request(", "Get configuration name. Args: configuration_id (int): configuration identifier. Returns: str: configuration name. \"\"\"", "params: dict) -> List[dict]: return self.quickbuild._request( 'GET', 'configurations', callback=response2py, params=params, ) def get(self)", "int) -> int: \"\"\" Get parent configuration id. Args: configuration_id (int): configuration identifier.", "self.quickbuild._validate_for_id(configuration) return self.update(configuration) def delete(self, configuration_id: int) -> None: \"\"\" Delete configuration. Args:", "configuration and does not have parent. \"\"\" return self.quickbuild._request( 'GET', 'configurations/{}/parent'.format(configuration_id), callback=response2py, )", "def get_success_rate(self, configuration_id: int, *, from_date: Optional[datetime.date], to_date: Optional[datetime.date] ) -> int: \"\"\"", "should then be preserved. Args: configuration (str): XML/JSON document. Returns: int: configuration id", "schedule is inherited from parent configuration. \"\"\" return self.quickbuild._request( 'GET', 'configurations/{}/schedule'.format(configuration_id), callback=response2py, )", "params = dict( parent_id=parent_id, name=name, recursive=recursive, ) return self.quickbuild._request( 'GET', 'configurations/{}/copy'.format(configuration_id), callback=response2py, params=params,", "message. \"\"\" return self.quickbuild._request( 'GET', 'configurations/{}/error_message'.format(configuration_id), ) def get_run_mode(self, configuration_id: int) -> str:", "int: configuration id of the newly copied configuration. \"\"\" params = dict( parent_id=parent_id,", "id. Args: configuration_id (int): configuration identifier. Returns: int: id of parent configuration. Raises:", "create the xml from scratch: you may retrieve xml representation of a templating", "id element, modify certain parts and use it for create() method. - Secret", "parent_id (int): parent configuration identifier. Returns: List[dict]: list of child configurations. \"\"\" return", "to_date: params['to_date'] = str(to_date) return self.quickbuild._request( 'GET', 'configurations/{}/success_rate'.format(configuration_id), callback=response2py, params=params, ) def get_parent(self,", "from scratch: you may retrieve xml representation of a templating configuration using various", "full configuration info. Args: configuration_id (int): Configuration identifier. content_type (Optional[ContentType]): Select needed content", "identifier. Returns: str: configuration description. \"\"\" return self.quickbuild._request( 'GET', 'configurations/{}/description'.format(configuration_id), ) def get_error_message(self,", "callback=response2py, ) def update(self, configuration: str) -> int: \"\"\" Update a configuration using", "identifier. Returns: int: id of parent configuration. Raises: QBProcessingError: the configuration is root", "= None ) -> Union[dict, str]: \"\"\" Get full configuration info. Args: configuration_id", "do not need to create the xml from scratch: you may retrieve xml", "int, name: str, recursive: bool ) -> int: \"\"\" Copy configuration (available since", "identifier. Returns: int: value in the range of 0~100, with 0 stands for", "parent element denotes id of the parent configuration. Normally you do not need", "(int): configuration identifier. Returns: str: configuration run mode. \"\"\" return self.quickbuild._request( 'GET', 'configurations/{}/run_mode'.format(configuration_id),", "-> str: \"\"\" Get configuration name. Args: configuration_id (int): configuration identifier. Returns: str:", "configuration identifier. Returns: str: configuration description. \"\"\" return self.quickbuild._request( 'GET', 'configurations/{}/description'.format(configuration_id), ) def", "self.quickbuild._request( 'GET', 'configurations/{}/schedule'.format(configuration_id), callback=response2py, ) def get_average_duration(self, configuration_id: int, *, from_date: Optional[datetime.date], to_date:", "from typing import List, Optional, Union from quickbuild.helpers import ContentType, response2py class Configurations:", "'configurations', callback=response2py, params=params, ) def get(self) -> List[dict]: \"\"\" Get all configurations in", ") def get_success_rate(self, configuration_id: int, *, from_date: Optional[datetime.date], to_date: Optional[datetime.date] ) -> int:", "int) -> str: \"\"\" Get configuration error message. Args: configuration_id (int): configuration identifier.", "Returns: int: value in the range of 0~100, with 0 stands for 0%,", "may retrieve xml representation of a templating configuration using various configuration access methods", "in the range of 0~100, with 0 stands for 0%, and 100 stands", "configurations recursively; otherwise, only the configuration itself will be copied. Returns: int: configuration", "\"\"\" Get all configurations in the system. For performance reason, only brief information", "return self.quickbuild._request( 'GET', 'configurations/{}/parent'.format(configuration_id), callback=response2py, ) def update(self, configuration: str) -> int: \"\"\"", "`get_info()` with content_type=ContentType.XML, remove the id element, modify certain parts and use it", ") def get_description(self, configuration_id: int) -> str: \"\"\" Get configuration description. Args: configuration_id", "may get XML representation of the configuration using `get_info()` method with content_type=ContentType.XML and", "repository passwords, secret variable values, etc.) should not contain the \"secret\" attribute; otherwise", "id. Returns: None \"\"\" return self.quickbuild._request( 'DELETE', 'configurations/{}'.format(configuration_id), callback=response2py, ) def copy(self, configuration_id:", "of client instance is used. Returns: Union[dict, str]: configuration content. \"\"\" return self.quickbuild._request(", "identifier. Returns: List[dict]: list of descendent configurations. \"\"\" return self._get(dict(recursive=True, parent_id=parent_id)) def get_info(self,", "return self.quickbuild._request( 'GET', 'configurations/{}/path'.format(configuration_id), ) def get_id_by_path(self, path: str) -> int: \"\"\" Get", "instance is used. Returns: Union[dict, str]: configuration content. \"\"\" return self.quickbuild._request( 'GET', 'configurations/{}'.format(configuration_id)," ]
[ "You may assume that each input would have exactly one solution, and you", "nums = [2, 7, 11, 15], target = 9, # Because nums[0] +", "[2, 7, 11, 15], target = 9, # Because nums[0] + nums[1] =", "may assume that each input would have exactly one solution, and you may", "same element twice. # Example: # Given nums = [2, 7, 11, 15],", "nums: remain = target - curr if remain not in seen: seen[curr] =", "curr if remain not in seen: seen[curr] = index else: result.append(seen[remain]) result.append(index) index", "# You may assume that each input would have exactly one solution, and", "remain not in seen: seen[curr] = index else: result.append(seen[remain]) result.append(index) index += 1", "target: int :rtype: List[int] \"\"\" seen = {} result = [] index =", "Because nums[0] + nums[1] = 2 + 7 = 9, # return [0,", "would have exactly one solution, and you may not use the same element", "= 9, # Because nums[0] + nums[1] = 2 + 7 = 9,", "integers, return indices of the two numbers such that they add up to", "# Given nums = [2, 7, 11, 15], target = 9, # Because", "int :rtype: List[int] \"\"\" seen = {} result = [] index = 0", "one solution, and you may not use the same element twice. # Example:", "+ nums[1] = 2 + 7 = 9, # return [0, 1]. class", "such that they add up to a specific target. # You may assume", "to a specific target. # You may assume that each input would have", "may not use the same element twice. # Example: # Given nums =", "up to a specific target. # You may assume that each input would", "2 + 7 = 9, # return [0, 1]. class Solution: def twoSum(self,", "= 0 for curr in nums: remain = target - curr if remain", "0 for curr in nums: remain = target - curr if remain not", "= {} result = [] index = 0 for curr in nums: remain", "= 9, # return [0, 1]. class Solution: def twoSum(self, nums, target): \"\"\"", "nums[0] + nums[1] = 2 + 7 = 9, # return [0, 1].", "in seen: seen[curr] = index else: result.append(seen[remain]) result.append(index) index += 1 return result", "the same element twice. # Example: # Given nums = [2, 7, 11,", "assume that each input would have exactly one solution, and you may not", ":type target: int :rtype: List[int] \"\"\" seen = {} result = [] index", "\"\"\" seen = {} result = [] index = 0 for curr in", "solution, and you may not use the same element twice. # Example: #", "in nums: remain = target - curr if remain not in seen: seen[curr]", "a specific target. # You may assume that each input would have exactly", "+ 7 = 9, # return [0, 1]. class Solution: def twoSum(self, nums,", "not use the same element twice. # Example: # Given nums = [2,", "they add up to a specific target. # You may assume that each", "nums: List[int] :type target: int :rtype: List[int] \"\"\" seen = {} result =", "7, 11, 15], target = 9, # Because nums[0] + nums[1] = 2", "if remain not in seen: seen[curr] = index else: result.append(seen[remain]) result.append(index) index +=", "List[int] :type target: int :rtype: List[int] \"\"\" seen = {} result = []", "= [2, 7, 11, 15], target = 9, # Because nums[0] + nums[1]", "of integers, return indices of the two numbers such that they add up", "the two numbers such that they add up to a specific target. #", "7 = 9, # return [0, 1]. class Solution: def twoSum(self, nums, target):", "use the same element twice. # Example: # Given nums = [2, 7,", "Example: # Given nums = [2, 7, 11, 15], target = 9, #", "Solution: def twoSum(self, nums, target): \"\"\" :type nums: List[int] :type target: int :rtype:", "return indices of the two numbers such that they add up to a", "for curr in nums: remain = target - curr if remain not in", "nums[1] = 2 + 7 = 9, # return [0, 1]. class Solution:", "Given an array of integers, return indices of the two numbers such that", "that each input would have exactly one solution, and you may not use", "# Example: # Given nums = [2, 7, 11, 15], target = 9,", "input would have exactly one solution, and you may not use the same", "and you may not use the same element twice. # Example: # Given", "\"\"\" :type nums: List[int] :type target: int :rtype: List[int] \"\"\" seen = {}", "not in seen: seen[curr] = index else: result.append(seen[remain]) result.append(index) index += 1 return", "= target - curr if remain not in seen: seen[curr] = index else:", "each input would have exactly one solution, and you may not use the", "[0, 1]. class Solution: def twoSum(self, nums, target): \"\"\" :type nums: List[int] :type", "target. # You may assume that each input would have exactly one solution,", "11, 15], target = 9, # Because nums[0] + nums[1] = 2 +", "target): \"\"\" :type nums: List[int] :type target: int :rtype: List[int] \"\"\" seen =", "9, # Because nums[0] + nums[1] = 2 + 7 = 9, #", "9, # return [0, 1]. class Solution: def twoSum(self, nums, target): \"\"\" :type", "{} result = [] index = 0 for curr in nums: remain =", "target = 9, # Because nums[0] + nums[1] = 2 + 7 =", "result = [] index = 0 for curr in nums: remain = target", "remain = target - curr if remain not in seen: seen[curr] = index", "element twice. # Example: # Given nums = [2, 7, 11, 15], target", "indices of the two numbers such that they add up to a specific", "twoSum(self, nums, target): \"\"\" :type nums: List[int] :type target: int :rtype: List[int] \"\"\"", "have exactly one solution, and you may not use the same element twice.", "that they add up to a specific target. # You may assume that", "[] index = 0 for curr in nums: remain = target - curr", "nums, target): \"\"\" :type nums: List[int] :type target: int :rtype: List[int] \"\"\" seen", "seen = {} result = [] index = 0 for curr in nums:", "# return [0, 1]. class Solution: def twoSum(self, nums, target): \"\"\" :type nums:", "List[int] \"\"\" seen = {} result = [] index = 0 for curr", "array of integers, return indices of the two numbers such that they add", "you may not use the same element twice. # Example: # Given nums", "1]. class Solution: def twoSum(self, nums, target): \"\"\" :type nums: List[int] :type target:", "of the two numbers such that they add up to a specific target.", "an array of integers, return indices of the two numbers such that they", "Given nums = [2, 7, 11, 15], target = 9, # Because nums[0]", "def twoSum(self, nums, target): \"\"\" :type nums: List[int] :type target: int :rtype: List[int]", "target - curr if remain not in seen: seen[curr] = index else: result.append(seen[remain])", "curr in nums: remain = target - curr if remain not in seen:", "twice. # Example: # Given nums = [2, 7, 11, 15], target =", "add up to a specific target. # You may assume that each input", "numbers such that they add up to a specific target. # You may", "index = 0 for curr in nums: remain = target - curr if", "= 2 + 7 = 9, # return [0, 1]. class Solution: def", ":rtype: List[int] \"\"\" seen = {} result = [] index = 0 for", "15], target = 9, # Because nums[0] + nums[1] = 2 + 7", "class Solution: def twoSum(self, nums, target): \"\"\" :type nums: List[int] :type target: int", "- curr if remain not in seen: seen[curr] = index else: result.append(seen[remain]) result.append(index)", ":type nums: List[int] :type target: int :rtype: List[int] \"\"\" seen = {} result", "exactly one solution, and you may not use the same element twice. #", "# Because nums[0] + nums[1] = 2 + 7 = 9, # return", "return [0, 1]. class Solution: def twoSum(self, nums, target): \"\"\" :type nums: List[int]", "two numbers such that they add up to a specific target. # You", "# Given an array of integers, return indices of the two numbers such", "specific target. # You may assume that each input would have exactly one", "= [] index = 0 for curr in nums: remain = target -" ]
[ "TRIM_DIFFERENCE_MAX = 1.0 def dict_factory(cursor, row): \"\"\"Helper function to convert sql item into", "mturk_cur = mturk_db_connection.cursor() mturk_cur.execute('''UPDATE hits SET status='pending_approval' WHERE assignment_id=?''', (json_file[\"assignmentId\"],)) mturk_db_connection.commit() except sqlite3.Error", "a verification task. :param annotation_tasks: connection to database containing mturk-related data :type annotation_tasks:", "mturk task. :param mturk_db_connection: connection to database containing mturk-related data :type mturk_db_connection: sqlite3.Connection", "or \\ not task_completed(json_file, app.mturk_db_connection) if not more_to_complete: try: mturk_db_connection = app.mturk_db_connection mturk_cur", "default=5050, type=int) parser.add_argument('--video_db', dest='video_db', help='SQLite3 database with normal videos', default='video_db.db', type=str) parser.add_argument('--mturk_db', dest='mturk_db',", "parsing args import argparse # encoding / decoding import json # time /", "parse_args() # load annotation tasks app.annotation_tasks = load_annotation_tasks(args.video_db) app.mturk_db_connection = load_annotation_tasks(args.mturk_db) # Set", "certfile location', default='', type=str) parser.add_argument('--keyfile', dest='keyfile', help='SSL keyfile location', default='', type=str) args =", "as e: print_log_info(str(e)) return def expire_locked_items(): \"\"\" Expires a locked item based on", "is a verification video db_cursor.execute('''SELECT * FROM trimmed_verification_videos where id=?''', (json_res['id'],)) # todo", "time \" + str(attempt_times_set['start_time']) + \" but the verified had start time \"", "# name_locked integer, # name_lock_time real, # named_by_user text, # occluded integer, #", "verification task if ant_type == 'name_preview' or ant_type == 'trim_preview': needs_verification_task = True", "= mturk_cur.fetchall() for attempt_times_set in times_query_result: db_cursor.execute(\"SELECT start_time, end_time FROM video_db WHERE id=?\",", "Wrapper for loading annotations \"\"\" # id integer primary key, # url text,", "name_locked=1''') num_clips_locked = db_cursor.fetchone()['count(*)'] db_cursor.execute('''SELECT count(*) FROM video_db WHERE red_flag>=1''') num_clips_flaged = db_cursor.fetchone()['count(*)']", "FROM trimmed_verification_videos where id=?''', (json_res['id'],)) # todo find out if is good query", "between correct start_time/end_time and verification attempt's start_time/end_time in seconds TRIM_DIFFERENCE_MAX = 1.0 def", "str(attempt_times_set['start_time']) + \" but the verified had start time \" + str(verified_times_set['start_time'])) all_verifications_correct", "results to return to client ret = {} try: # make sure the", "return 'hello world' def parse_args(): \"\"\" Parse input arguments \"\"\" parser = argparse.ArgumentParser(description='Setup", "json_res['hitId'])) mturk_cur.execute('''INSERT INTO trim_verification_attempts( hit_id, assignment_id, worker_id, id, start_time, end_time) VALUES (?,?,?,?,?,?)''', (json_res['hitId'],", "the word selected from the dropdown menu - occluded, a boolean from the", "is None: return None # Otherwise return a task. else: task = item", "ant_type) else: task = get_next_available_task(app.annotation_tasks, ant_type) if not task: raise ValueError('can not get", "redirect stdout and stderr for logging import sys # sys.stdout = open('./web_app.log', 'a',", "WHERE hit_id=?\", (hit_id,)) action_query_result = mturk_cur.fetchall() for attempt_action_set in action_query_result: db_cursor.execute(\"SELECT action_noun, action_verb", "completed \"\"\" mturk_cur = mturk_db_connection.cursor() try: mturk_cur.execute('''SELECT verifications_total, labels_total, verifications_completed, labels_completed FROM hits", "not in json_file: raise ValueError('annotation_type missing in request') if 'id' not in json_file:", "\"\"\" Collect DB stats \"\"\" ant_tasks = app.annotation_tasks db_cursor = ant_tasks.cursor() # show", "is_mturk or \\ not task_completed(json_file, app.mturk_db_connection) if not more_to_complete: try: mturk_db_connection = app.mturk_db_connection", "seconds TRIM_DIFFERENCE_MAX = 1.0 def dict_factory(cursor, row): \"\"\"Helper function to convert sql item", "-3 ret['error_msg'] = str(err) return json.dumps(ret) # Decide if we need a verification", "ret['error_msg'] = str(err) return json.dumps(ret) return json.dumps(ret) @app.route('/return_task', methods=['POST']) def return_task(): \"\"\" Processes", "from flask_cors import CORS, cross_origin import tornado.wsgi import tornado.httpserver # database import sqlite3", "ID - annotation_type, which can be \"name\" or \"trim\" - user_name If the", "port to serve content on', default=5050, type=int) parser.add_argument('--video_db', dest='video_db', help='SQLite3 database with normal", "json_res['hitId'])) mturk_cur.execute('''INSERT INTO name_verification_attempts( hit_id, assignment_id, worker_id, id, action_noun, action_verb) VALUES (?,?,?,?,?,?)''', (json_res['hitId'],", "= parse_args() # load annotation tasks app.annotation_tasks = load_annotation_tasks(args.video_db) app.mturk_db_connection = load_annotation_tasks(args.mturk_db) #", "in json_res' hitId has been completed \"\"\" mturk_cur = mturk_db_connection.cursor() try: mturk_cur.execute('''SELECT verifications_total,", "= the verification videos left b = total number of videos left The", "json.dumps(ret) is_mturk = \"assignmentId\" in json_file and \"workerId\" in json_file and \\ \"hitId\"", "load_annotation_tasks(args.video_db) app.mturk_db_connection = load_annotation_tasks(args.mturk_db) # Set global variables app.aws_access_key_id = args.aws_access_key_id app.aws_secret_access_key =", "not in json_file: raise ValueError('annotation_type missing in request') else: # more sanity check", "trim_verification_attempts( hit_id, assignment_id, worker_id, id, start_time, end_time) VALUES (?,?,?,?,?,?)''', (json_res['hitId'], json_res['assignmentId'], json_res['workerId'], json_res['id'],", "we are updating is a verification video db_cursor.execute('''SELECT * FROM named_verification_videos where id=?''',", "red_flag=? WHERE id=?''', update_item) # Update MTurk database to reflect this change if", "== \"name\": mturk_cur.execute(\"SELECT id, action_noun, action_verb FROM name_verification_attempts WHERE hit_id=?\", (hit_id,)) action_query_result =", "= (float(json_res['start_time']), float(json_res['end_time']), json_res['user_name'], int(json_res['red_flag'])*2, int(json_res['id'])) db_cursor.execute('''UPDATE video_db SET trimmed=1, trim_locked=0, start_time=?, end_time=?,", "time \" + str(attempt_times_set['end_time']) + \" but the verified had end time \"", "us some stats try: db_cursor.execute('''SELECT count(*) FROM video_db WHERE named=1''') num_clips_named = db_cursor.fetchone()['count(*)']", "thing\" mturk_cur.execute(\"SELECT id, start_time, end_time FROM trim_verification_attempts WHERE hit_id=?\", (hit_id,)) times_query_result = mturk_cur.fetchall()", "mturk_cur.execute(\"SELECT assignment_id, hit_id, task FROM hits WHERE status='pending_approval'\") except sqlite3.Error as e: print_log_info(str(e))", "Attempt failed! Attempt had verb \" + str(attempt_action_set['action_verb']) + \" but the verified", "annotation_type, which can be \"name\" or \"trim\" - user_name If the request is", "error' return json.dumps(ret) # decode json from request data into a dict try:", "str(result[\"assignment_id\"]) hit_id = str(result[\"hit_id\"]) task = str(result[\"task\"]) all_verifications_correct = True print assignment_id try:", "dest='keyfile', help='SSL keyfile location', default='', type=str) args = parser.parse_args() return args def start_from_terminal():", "been completed :param json_res: JSON given by frontend's submit button; must have hitId", "trim_preview - \"user_name\" If it is a request from an MTurk iFrame, it", "\"annotation_type\" which can have the values... - name - name_preview - trim -", "# Update naming task if ant_type == 'name': try: # Decide if video", "int(json_res['id'])) db_cursor.execute('''UPDATE video_db SET trimmed=1, trim_locked=0, start_time=?, end_time=?, trimmed_by_user=?, red_flag=? WHERE id=?''', update_item)", "FROM name_verification_attempts WHERE hit_id=?\", (hit_id,)) action_query_result = mturk_cur.fetchall() for attempt_action_set in action_query_result: db_cursor.execute(\"SELECT", "WHERE hit_id=?\", (hit_id,)) times_query_result = mturk_cur.fetchall() for attempt_times_set in times_query_result: db_cursor.execute(\"SELECT start_time, end_time", "text, # occluded integer, # trimmed integer, # trim_locked integer, # trim_lock_time real,", "this change if is_mturk and is_verification: mturk_cur.execute('''UPDATE hits SET assignment_id=?, worker_id=?, verifications_completed =", "WHERE hit_id=?''', (json_res['assignmentId'], json_res['workerId'], json_res['hitId'])) mturk_db_connection.commit() annotation_tasks.commit() except sqlite3.Error as e: print_log_info(str(e)) return", "ant_type) + '\\033[0m') # return return True @app.errorhandler(404) def not_found(error): \"\"\" Default error", "# Dict holds the results to return to client ret = {} #", "except ValueError as err: ret['code'] = -1 ret['error_msg'] = str(err) return json.dumps(ret) return", "print_log_info(str(e)) annotation_tasks.commit() return task def update_task(mturk_db_connection, annotation_tasks, json_res, is_mturk): \"\"\" Updates the data", "request is coming from an mturk iFrame, it should have: - assignmentId -", "get_next_available_task(app.annotation_tasks, ant_type) if not task: raise ValueError('can not get a valid task. please", "default='', type=str) args = parser.parse_args() return args def start_from_terminal(): \"\"\" entry of the", "= db_cursor.fetchone()['count(*)'] db_cursor.execute('''SELECT count(*) FROM video_db WHERE trimmed=1''') num_clips_trimmed = db_cursor.fetchone()['count(*)'] db_cursor.execute('''SELECT count(*)", "def task_completed(json_res, mturk_db_connection): \"\"\" Tells whether an mturk task has been completed :param", "for a new labelling task. Called by get_task(). :param annotation_tasks: connection to database", "request is going to be a verification video or not. Let a =", "\"\"\" Updates the data for a labelling task plus relevant mturk variables if", "json_res['workerId'], json_res['id'], float(json_res['start_time']), float(json_res['end_time']))) mturk_db_connection.commit() elif is_mturk and not is_verification: print(json_res['assignmentId'], json_res['workerId'], json_res['hitId'])", "print_log_info(str(e)) query_result = mturk_cur.fetchone() continue print_log_info(assignment_id + \" approved. Amazon response: \" +", "verifications_total, labels_total, verifications_completed, labels_completed = \\ query_result[\"verifications_total\"], query_result[\"labels_total\"], \\ query_result[\"verifications_completed\"], query_result[\"labels_completed\"] return verifications_total", "json_res['user_name'], int(json_res['red_flag'])*2, int(json_res['id'])) db_cursor.execute('''UPDATE video_db SET trimmed=1, trim_locked=0, start_time=?, end_time=?, trimmed_by_user=?, red_flag=? WHERE", "# So annotation_type == 'trim' try: db_cursor.execute('''SELECT * FROM video_db WHERE named=1 AND", "= db_cursor.fetchone() if abs(attempt_times_set['start_time'] - verified_times_set['start_time']) > TRIM_DIFFERENCE_MAX: print_log_info(\"Verification Attempt failed! Attempt had", "id=?''', (0.0, item['id'])) ant_tasks.commit() except sqlite3.Error as e: print_log_info(str(e)) # Task: trim db_cursor.execute('''SELECT", "\"hitId\" in json_file # Get next available task try: flag = update_task(app.mturk_db_connection, app.annotation_tasks,", "json_file['annotation_type'] if not ((ant_type == 'name') or (ant_type == 'trim')): raise ValueError('unknown annotation_type')", "is_mturk) if not flag: raise ValueError('can not update the task. Please re-try.') else:", "print_log_info(str(e)) item = db_cursor.fetchone() # No task available if item is None: return", "try: # from https://stackoverflow.com/questions/4114940/select-random-rows-in-sqlite db_cursor.execute('''SELECT * FROM video_db WHERE id IN (SELECT id", "parser.add_argument('--sandbox', dest='sandbox', help='If this is a sandbox HIT (otherwise is a real one)',", "\"\"\" Parse input arguments \"\"\" parser = argparse.ArgumentParser(description='Setup a web server for video", "* FROM video_db WHERE name_locked=1 AND named=0''') locked_items = db_cursor.fetchall() for item in", "(float(max(verifications_total - verifications_completed, 0)) / max(verifications_total + labels_total - verifications_completed - labels_completed, 1))", "boolean representing if task referred to in json_res' hitId has been completed \"\"\"", "except sqlite3.Error as e: print_log_info(str(e)) return def load_annotation_tasks(video_db): \"\"\" Wrapper for loading annotations", "into a dict, and make sure all required data is present try: json_file", "= open('./web_app.log', 'a', 1) # sys.stderr = open('./web_app.err', 'a', 1) import random import", "Tells whether an mturk task has been completed :param json_res: JSON given by", "task. :param mturk_db_connection: connection to database containing mturk-related data :type mturk_db_connection: sqlite3.Connection :param", "which can be \"name\" or \"trim\" - user_name If the request is coming", "request data into a dict, and make sure all required data is present", "named=1''') num_clips_named = db_cursor.fetchone()['count(*)'] db_cursor.execute('''SELECT count(*) FROM video_db WHERE trimmed=1''') num_clips_trimmed = db_cursor.fetchone()['count(*)']", "a verification task if ant_type == 'name_preview' or ant_type == 'trim_preview': needs_verification_task =", "e: print_log_info(str(e)) return def load_annotation_tasks(video_db): \"\"\" Wrapper for loading annotations \"\"\" # id", "distribution of chance of getting a verification video across all requests. Called by", "app.mturk_db_connection.cursor() db_cursor = app.annotation_tasks.cursor() try: # TODO make pending approval a separate table", "e: print_log_info(str(e)) return def expire_locked_items(): \"\"\" Expires a locked item based on its", "need a verification task if ant_type == 'name_preview' or ant_type == 'trim_preview': needs_verification_task", "not is_mturk or \\ not task_completed(json_file, app.mturk_db_connection) if not more_to_complete: try: mturk_db_connection =", "as e: print_log_info(\"MTurk verification rejected. Typically, this means the client's completion \" +", "ValueError('annotation_type missing in request') else: # more sanity check ant_type = json_file['annotation_type'] if", "get a valid task. please re-try.') else: ret = task except ValueError as", "not ((ant_type == 'name') or (ant_type == 'trim') or (ant_type == 'name_preview') or", "if not is_verification: update_item = (float(json_res['start_time']), float(json_res['end_time']), json_res['user_name'], int(json_res['red_flag'])*2, int(json_res['id'])) db_cursor.execute('''UPDATE video_db SET", "json_res['verb'])) mturk_db_connection.commit() elif is_mturk and not is_verification: print(json_res['assignmentId'], json_res['workerId'], json_res['hitId']) mturk_cur.execute('''UPDATE hits SET", "on its time stamp \"\"\" ant_tasks = app.annotation_tasks db_cursor = ant_tasks.cursor() # Task:", "must have hitId key :type json_res: dict :param mturk_db_connection: connection to database containing", "verifications_completed = verifications_completed + 1 WHERE hit_id=?''', (json_res['assignmentId'], json_res['workerId'], json_res['hitId'])) mturk_cur.execute('''INSERT INTO trim_verification_attempts(", "going to be a verification video or not. Let a = the verification", "annotation_type: client-defined string for the type of the annotations we're doing :type annotation_type:", "\"\"\" # parse params args = parse_args() # load annotation tasks app.annotation_tasks =", "can have the values... - name - name_preview - trim - trim_preview -", "server for video annotation\"\"\" # parsing args import argparse # encoding / decoding", "integer, # pad_end_frame integer, # start_time real, # end_time real, # action_verb text,", "video across all requests. Called by get_task(). :param json_res: JSON given by frontend's", "== 'name_preview') or (ant_type == 'trim_preview')): raise ValueError('unknown annotation_type') except ValueError as err:", "for mturk', default='mturk_db.db', type=str) parser.add_argument('--sandbox', dest='sandbox', help='If this is a sandbox HIT (otherwise", "mturk iFrame, it should have: - assignmentId - workerId - hitId If annotation_type", "Task: trim db_cursor.execute('''SELECT * FROM video_db WHERE trim_locked=1 AND trimmed=0''') locked_items = db_cursor.fetchall()", "mturk_cur.execute('''SELECT verifications_total, labels_total, verifications_completed, labels_completed FROM hits WHERE hit_id=?''', (json_res['hitId'],)) except sqlite3.Error as", "= app.mturk_db_connection mturk_cur = mturk_db_connection.cursor() mturk_cur.execute('''UPDATE hits SET status='pending_approval' WHERE assignment_id=?''', (json_file[\"assignmentId\"],)) mturk_db_connection.commit()", "for the objects being handled TODO figure out the trim stuff \"\"\" #", "help='SQLite3 database with logs for mturk', default='mturk_db.db', type=str) parser.add_argument('--sandbox', dest='sandbox', help='If this is", "task MAX_DELAY = 120 # maximum difference between correct start_time/end_time and verification attempt's", "WHERE hit_id=?''', (json_res['assignmentId'], json_res['workerId'], json_res['hitId'])) mturk_cur.execute('''INSERT INTO name_verification_attempts( hit_id, assignment_id, worker_id, id, action_noun,", "else real_host) mturk = boto.mturk.connection.MTurkConnection( aws_access_key_id=app.aws_access_key_id, aws_secret_access_key=app.aws_secret_access_key, host=host, debug=1 # debug = 2", "SET trim_locked=0, trim_lock_time=? WHERE id=?''', (0.0, item['id'])) ant_tasks.commit() except sqlite3.Error as e: print_log_info(str(e))", "action_noun, action_verb FROM name_verification_attempts WHERE hit_id=?\", (hit_id,)) action_query_result = mturk_cur.fetchall() for attempt_action_set in", "if 'annotation_type' not in json_file: raise ValueError('annotation_type missing in request') else: # more", "def get_task(): \"\"\" Get a task from the server A request is a", "Set global variables app.aws_access_key_id = args.aws_access_key_id app.aws_secret_access_key = args.aws_secret_access_key app.sandbox = args.sandbox #", "\" + str(attempt_times_set['end_time']) + \" but the verified had end time \" +", "The chance of getting a verification videos is a/b This gives a uniform", "default='', type=str) parser.add_argument('--aws_key', dest='aws_secret_access_key', help='AWS Secret Access Key', default='', type=str) parser.add_argument('--certfile', dest='certfile', help='SSL", "json_file, is_mturk) if not flag: raise ValueError('can not update the task. Please re-try.')", "database with normal videos', default='video_db.db', type=str) parser.add_argument('--mturk_db', dest='mturk_db', help='SQLite3 database with logs for", "data :type annotation_tasks: sqlite3.Connection :param annotation_type: client-defined string for the type of the", "labels_completed <= 0 def get_next_available_task(annotation_tasks, annotation_type): \"\"\" Wrapper for querying database for a", "- \"hitId\" \"\"\" # Dict holds the results to return to client ret", "= get_verification_task(app.annotation_tasks, ant_type) else: task = get_next_available_task(app.annotation_tasks, ant_type) if not task: raise ValueError('can", "def update_task(mturk_db_connection, annotation_tasks, json_res, is_mturk): \"\"\" Updates the data for a labelling task", "keyfile location', default='', type=str) args = parser.parse_args() return args def start_from_terminal(): \"\"\" entry", "mturk-related data :type mturk_db_connection: sqlite3.Connection :param annotation_tasks: connection to database containing mturk-related data", "SET name_locked=1, name_lock_time=? WHERE id=?''', (cur_time, task['id'])) except sqlite3.Error as e: print_log_info(str(e)) else:", "Access Key', default='', type=str) parser.add_argument('--certfile', dest='certfile', help='SSL certfile location', default='', type=str) parser.add_argument('--keyfile', dest='keyfile',", "query_result: assignment_id = str(result[\"assignment_id\"]) hit_id = str(result[\"hit_id\"]) task = str(result[\"task\"]) all_verifications_correct = True", "database containing mturk-related data :type mturk_db_connection: sqlite3.Connection :param annotation_tasks: connection to database containing", "== 'name': try: # Decide if video we are updating is a verification", "# So annotation_type == 'trim' try: db_cursor.execute('''UPDATE video_db SET trim_locked=1, trim_lock_time=? WHERE id=?''',", "1 maybe? except sqlite3.Error as e: print_log_info(str(e)) item = db_cursor.fetchone() # No task", "end_time FROM trim_verification_attempts WHERE hit_id=?\", (hit_id,)) times_query_result = mturk_cur.fetchall() for attempt_times_set in times_query_result:", "for a verification task. :param annotation_tasks: connection to database containing mturk-related data :type", "has the following: - \"workerId\" - \"hitId\" \"\"\" # Dict holds the results", "verifications_completed, 0)) / max(verifications_total + labels_total - verifications_completed - labels_completed, 1)) return chance_of_verification_video", "into a dict try: json_file = json.JSONDecoder().decode(request_data) print_log_info(\"Task returned: {:s}\".format(json_file)) if 'annotation_type' not", "json_res['workerId'], json_res['hitId'])) mturk_db_connection.commit() annotation_tasks.commit() except sqlite3.Error as e: print_log_info(str(e)) return False else: #", "it is a request from an MTurk iFrame, it also has the following:", "# flask import flask from flask_cors import CORS, cross_origin import tornado.wsgi import tornado.httpserver", "- annotation_type, which can be \"name\" or \"trim\" - user_name If the request", "Update MTurk database to reflect this change if is_mturk and is_verification: mturk_cur.execute('''UPDATE hits", "import argparse # encoding / decoding import json # time / logging import", "labels_completed + 1 WHERE hit_id=?''', (json_res['assignmentId'], json_res['workerId'], json_res['hitId'])) mturk_db_connection.commit() annotation_tasks.commit() except sqlite3.Error as", "for item in locked_items: delay = time.time() - item['trim_lock_time'] if delay > MAX_DELAY:", "mturk_cur.execute('''INSERT INTO name_verification_attempts( hit_id, assignment_id, worker_id, id, action_noun, action_verb) VALUES (?,?,?,?,?,?)''', (json_res['hitId'], json_res['assignmentId'],", "\"\"\" ant_tasks = app.annotation_tasks db_cursor = ant_tasks.cursor() # show us some stats try:", "Decide if we need a verification task if ant_type == 'name_preview' or ant_type", "content type is json request_type = flask.request.headers.get('Content-Type') if request_type != 'application/json': raise ValueError('request", "main function \"\"\" # parse params args = parse_args() # load annotation tasks", "annotation_type is \"name\", it should have the following: - verb, a string representing", "+ \" but the verified had start time \" + str(verified_times_set['start_time'])) all_verifications_correct =", "time stamp \"\"\" ant_tasks = app.annotation_tasks db_cursor = ant_tasks.cursor() # Task: name db_cursor.execute('''SELECT", "the annotations we're doing :type annotation_type: string :return dict from querying database \"\"\"", "dest='aws_secret_access_key', help='AWS Secret Access Key', default='', type=str) parser.add_argument('--certfile', dest='certfile', help='SSL certfile location', default='',", "# debug = 2 prints out all requests. ) mturk_cur = app.mturk_db_connection.cursor() db_cursor", "labels_total, verifications_completed, labels_completed FROM hits WHERE hit_id=?''', (json_res['hitId'],)) except sqlite3.Error as e: print_log_info(str(e))", "\"\"\" Makes the decision as to whether this request is going to be", "if request_type != 'application/json': raise ValueError('request type must be JSON') request_data = flask.request.get_data()", "task referred to in json_res' hitId has been completed \"\"\" mturk_cur = mturk_db_connection.cursor()", "# sys.stdout = open('./web_app.log', 'a', 1) # sys.stderr = open('./web_app.err', 'a', 1) import", "not in json_file: raise ValueError('id missing in request') else: # more sanity check", "row): \"\"\"Helper function to convert sql item into a dict\"\"\" d = {}", "for one task MAX_DELAY = 120 # maximum difference between correct start_time/end_time and", "WHERE status='pending_approval'\") except sqlite3.Error as e: print_log_info(str(e)) return query_result = mturk_cur.fetchall() # We", "video_db WHERE named=1 AND red_flag=0 AND trimmed=0 AND trim_locked=0 AND id not in", "sqlite3.connect(video_db) annotation_tasks.row_factory = dict_factory # returns the database return annotation_tasks def decide_if_needs_verification(json_res, mturk_db_connection):", "not_found(error): \"\"\" Default error handler for 404 \"\"\" return flask.make_response(json.dumps({'error': str(error)}), 404) @app.route('/get_task',", "verification video will be returned \"\"\" print json_res mturk_cur = mturk_db_connection.cursor() try: mturk_cur.execute('''SELECT", "based on its time stamp \"\"\" ant_tasks = app.annotation_tasks db_cursor = ant_tasks.cursor() #", "annotation_type): \"\"\" Wrapper for querying database for a new labelling task. Called by", "mturk_cur = app.mturk_db_connection.cursor() db_cursor = app.annotation_tasks.cursor() try: # TODO make pending approval a", "string :return dict from querying database \"\"\" # get db cursor db_cursor =", "with normal videos', default='video_db.db', type=str) parser.add_argument('--mturk_db', dest='mturk_db', help='SQLite3 database with logs for mturk',", "as e: print_log_info(str(e)) return False # color print the red flag if json_res['red_flag']:", "time.time() - item['trim_lock_time'] if delay > MAX_DELAY: print_log_info(\"Expiring task {:d} (Trim)\".format(item['id'])) try: db_cursor.execute('''UPDATE", "them accordingly \"\"\" # TODO verify correct verification labels here # TODO make", "json from request data into a dict, and make sure all required data", "trim_lock_time=? WHERE id=?''', (cur_time, task['id'])) except sqlite3.Error as e: print_log_info(str(e)) annotation_tasks.commit() return task", "WHERE named=1''') num_clips_named = db_cursor.fetchone()['count(*)'] db_cursor.execute('''SELECT count(*) FROM video_db WHERE trimmed=1''') num_clips_trimmed =", "SET trim_locked=1, trim_lock_time=? WHERE id=?''', (cur_time, task['id'])) except sqlite3.Error as e: print_log_info(str(e)) annotation_tasks.commit()", "ret['code'] = 0 ret['error_msg'] = 'success' except ValueError as err: ret['code'] = -3", "row[idx] return d def print_log_info(str_info): \"\"\"Helper function for logging info\"\"\" prefix = time.strftime(\"%Y-%m-%d", "# ie. it's a trimming task try: # Decide if video we are", "(SELECT id FROM trimmed_verification_videos ORDER BY RANDOM() LIMIT 1)''') return db_cursor.fetchone() def task_completed(json_res,", "database containing mturk-related data :type mturk_db_connection: sqlite3.Connection :return: boolean representing if task referred", "return True @app.errorhandler(404) def not_found(error): \"\"\" Default error handler for 404 \"\"\" return", "- \"annotation_type\" which can have the values... - name - name_preview - trim", "flask app object (and make it cors) app = flask.Flask(__name__) # pylint: disable=invalid-name", "verifications_total - verifications_completed <= 0 and labels_total - labels_completed <= 0 def get_next_available_task(annotation_tasks,", "print_log_info(\"Expiring task {:d} (Trim)\".format(item['id'])) try: db_cursor.execute('''UPDATE video_db SET trim_locked=0, trim_lock_time=? WHERE id=?''', (0.0,", "a label JSON has the following fields: - id, which is the video", "web server for video annotation') parser.add_argument('--port', dest='port', help='which port to serve content on',", "Please re-try.') else: ret['code'] = 0 ret['error_msg'] = 'success' except ValueError as err:", "json_file = json.JSONDecoder().decode(request_data) print_log_info(\"Task request: {:s}\".format(json_file)) is_mturk = \"assignmentId\" in json_file and \"workerId\"", "submit a label JSON has the following fields: - id, which is the", "verified had start time \" + str(verified_times_set['start_time'])) all_verifications_correct = False break if abs(attempt_times_set['end_time']", "\"\"\" db_cursor = annotation_tasks.cursor() if annotation_type == 'name' or annotation_type == 'name_preview': try:", "num_clips_named = db_cursor.fetchone()['count(*)'] db_cursor.execute('''SELECT count(*) FROM video_db WHERE trimmed=1''') num_clips_trimmed = db_cursor.fetchone()['count(*)'] db_cursor.execute('''SELECT", "the client to submit a label JSON has the following fields: - id,", "be automatically approved and then marks them accordingly \"\"\" # TODO verify correct", "querying database \"\"\" # get db cursor db_cursor = annotation_tasks.cursor() mturk_cur = mturk_db_connection.cursor()", "video id ant_type = json_res['annotation_type'] # Update naming task if ant_type == 'name':", "except sqlite3.Error as e: print_log_info(str(e)) else: # So annotation_type == 'trim' try: db_cursor.execute('''SELECT", "and \"workerId\" in json_file and \\ \"hitId\" in json_file # Get next available", "noun \" + str(attempt_action_set['action_noun']) + \" but the verified had noun \" +", "real, # named_by_user text, # occluded integer, # trimmed integer, # trim_locked integer,", "verified_action_set = db_cursor.fetchone() if attempt_action_set['action_verb'] != verified_action_set['action_verb']: print_log_info(\"Verification Attempt failed! Attempt had verb", "/ max(verifications_total + labels_total - verifications_completed - labels_completed, 1)) return chance_of_verification_video > random.random()", "LIMIT 1)''') return db_cursor.fetchone() def task_completed(json_res, mturk_db_connection): \"\"\" Tells whether an mturk task", "b = total number of videos left The chance of getting a verification", "else: ret = task except ValueError as err: ret['code'] = -1 ret['error_msg'] =", "'name_preview': try: # from https://stackoverflow.com/questions/4114940/select-random-rows-in-sqlite db_cursor.execute('''SELECT * FROM video_db WHERE id IN (SELECT", "dest='aws_access_key_id', help='AWS Access Key ID', default='', type=str) parser.add_argument('--aws_key', dest='aws_secret_access_key', help='AWS Secret Access Key',", "* FROM video_db WHERE named=1 AND red_flag=0 AND trimmed=0 AND trim_locked=0 AND id", "integer, # name_lock_time real, # named_by_user text, # occluded integer, # trimmed integer,", "id, start_time, end_time FROM trim_verification_attempts WHERE hit_id=?\", (hit_id,)) times_query_result = mturk_cur.fetchall() for attempt_times_set", "False # color print the red flag if json_res['red_flag']: print_log_info('\\033[93m' + \"Task ID", "verified had noun \" + str(verified_action_set['action_noun'])) all_verifications_correct = False break else: # ie.", "integer, # trim_lock_time real, # trimmed_by_user text, # video_src text # src_start_time integer,", "action_query_result: db_cursor.execute(\"SELECT action_noun, action_verb FROM video_db WHERE id=?\", (attempt_action_set['id'],)) verified_action_set = db_cursor.fetchone() if", "# redirect stdout and stderr for logging import sys # sys.stdout = open('./web_app.log',", "# end_time real, # action_verb text, # action_noun text, # red_flag integer #", "'name_preview') or (ant_type == 'trim_preview')): raise ValueError('unknown annotation_type') except ValueError as err: ret['code']", "= ant_tasks.cursor() # Task: name db_cursor.execute('''SELECT * FROM video_db WHERE name_locked=1 AND named=0''')", "get_task(): \"\"\" Get a task from the server A request is a json", "from request data into a dict try: json_file = json.JSONDecoder().decode(request_data) print_log_info(\"Task returned: {:s}\".format(json_file))", "user for the objects being handled TODO figure out the trim stuff \"\"\"", "we launch the service collect_db_stats() approve_assignments() tornado.ioloop.PeriodicCallback(expire_locked_items, 20*1000).start() tornado.ioloop.PeriodicCallback(collect_db_stats, 3600*1000).start() tornado.ioloop.PeriodicCallback(approve_assignments, 20*1000).start() tornado.ioloop.IOLoop.current().start()", "or (ant_type == 'name_preview') or (ant_type == 'trim_preview')): raise ValueError('unknown annotation_type') except ValueError", "verification videos is a/b This gives a uniform distribution of chance of getting", "dict, and make sure all required data is present try: json_file = json.JSONDecoder().decode(request_data)", "task has been completed :param json_res: JSON given by frontend's submit button; must", "(otherwise is a real one)', default=False, action='store_true') parser.add_argument('--aws_key_id', dest='aws_access_key_id', help='AWS Access Key ID',", "for logging import sys # sys.stdout = open('./web_app.log', 'a', 1) # sys.stderr =", "sqlite3.Error as e: print_log_info(str(e)) return False else: # ie. it's a trimming task", "annotation_type: string :return dict from querying database \"\"\" db_cursor = annotation_tasks.cursor() if annotation_type", "1 WHERE hit_id=?''', (json_res['assignmentId'], json_res['workerId'], json_res['hitId'])) mturk_cur.execute('''INSERT INTO trim_verification_attempts( hit_id, assignment_id, worker_id, id,", "client-defined string for the type of the annotations we're doing :type annotation_type: string", "try: db_cursor.execute('''SELECT * FROM video_db WHERE named=1 AND red_flag=0 AND trimmed=0 AND trim_locked=0", "db cursor db_cursor = annotation_tasks.cursor() # Get the next task if annotation_type ==", "\"\"\" # get db cursor db_cursor = annotation_tasks.cursor() mturk_cur = mturk_db_connection.cursor() # get", "hit_id=?\", (hit_id,)) times_query_result = mturk_cur.fetchall() for attempt_times_set in times_query_result: db_cursor.execute(\"SELECT start_time, end_time FROM", "# Task: name db_cursor.execute('''SELECT * FROM video_db WHERE name_locked=1 AND named=0''') locked_items =", "mturk_cur.fetchall() # We need to loop through every assignment/hit set pending approval for", "start_time, end_time FROM trim_verification_attempts WHERE hit_id=?\", (hit_id,)) times_query_result = mturk_cur.fetchall() for attempt_times_set in", "a web server for video annotation') parser.add_argument('--port', dest='port', help='which port to serve content", "mturk_db_connection.commit() annotation_tasks.commit() except sqlite3.Error as e: print_log_info(str(e)) return False else: # ie. it's", "as e: print_log_info(str(e)) return False else: # ie. it's a trimming task try:", "\\ decide_if_needs_verification(json_file, app.mturk_db_connection) else: needs_verification_task = False # Get a verification task or", "for logging info\"\"\" prefix = time.strftime(\"%Y-%m-%d %H:%M:%S\", time.gmtime()) print \"{:s} {:s}\".format(prefix, str_info) def", "FROM video_db WHERE id IN (SELECT id FROM trimmed_verification_videos ORDER BY RANDOM() LIMIT", "which can have the values... - name - name_preview - trim - trim_preview", "def hello(): return 'hello world' def parse_args(): \"\"\" Parse input arguments \"\"\" parser", "text, # video_src text # src_start_time integer, # src_end_time integer, # pad_start_frame integer,", "as e: print_log_info(str(e)) else: try: mturk_cur.execute('''UPDATE hits SET status='pending_manual_approval' WHERE hit_id=?''', (hit_id,)) app.mturk_db_connection.commit()", "return to client ret = {} # Make sure the content type is", "separate table if we think that would be time-efficient mturk_cur.execute(\"SELECT assignment_id, hit_id, task", "else: needs_verification_task = False # Get a verification task or next available task,", "as err: ret['code'] = -3 ret['error_msg'] = str(err) return json.dumps(ret) # Decide if", "verify correct verification labels here # TODO make Mturk login details command line", "if we think that would be time-efficient mturk_cur.execute(\"SELECT assignment_id, hit_id, task FROM hits", "+ str(attempt_times_set['end_time']) + \" but the verified had end time \" + str(verified_times_set['end_time']))", "name_verification_attempts( hit_id, assignment_id, worker_id, id, action_noun, action_verb) VALUES (?,?,?,?,?,?)''', (json_res['hitId'], json_res['assignmentId'], json_res['workerId'], json_res['id'],", "approve_assignments(): \"\"\" Periodic callback decides whether assignments pending approval can be automatically approved", "\" + str(response)) try: mturk_cur.execute('''UPDATE hits SET status='approved' WHERE hit_id=?''', (hit_id,)) app.mturk_db_connection.commit() except", "data :type annotation_tasks: sqlite3.Connection :param json_res: JSON given by frontend's submit button; must", "is a sandbox HIT (otherwise is a real one)', default=False, action='store_true') parser.add_argument('--aws_key_id', dest='aws_access_key_id',", "== 'name') or (ant_type == 'trim')): raise ValueError('unknown annotation_type') except ValueError as err:", "sqlite3.Connection :param annotation_type: client-defined string for the type of the annotations we're doing", "\\ query_result[\"verifications_total\"], query_result[\"labels_total\"], \\ query_result[\"verifications_completed\"], query_result[\"labels_completed\"] return verifications_total - verifications_completed <= 0 and", "data into a dict, and make sure all required data is present try:", "as e: print_log_info(str(e)) else: db_cursor.execute('''SELECT * FROM video_db WHERE id IN (SELECT id", "json.dumps(ret) more_to_complete = not is_mturk or \\ not task_completed(json_file, app.mturk_db_connection) if not more_to_complete:", "= {} try: # make sure the content type is json request_type =", "# Maximum time allowed for one task MAX_DELAY = 120 # maximum difference", "except sqlite3.Error as err: ret['code'] = -3 ret['error_msg'] = str(err) return json.dumps(ret) ret['more_to_complete']", "is None) # Apply new label if it isn't a verification video if", "# TODO make Mturk login details command line arguments sandbox_host = 'mechanicalturk.sandbox.amazonaws.com' real_host", "action_verb text, # action_noun text, # red_flag integer # Instantiate a connection to", "a dict, and make sure all required data is present try: json_file =", "= 'mechanicalturk.amazonaws.com' host = (sandbox_host if app.sandbox else real_host) mturk = boto.mturk.connection.MTurkConnection( aws_access_key_id=app.aws_access_key_id,", "loading annotations \"\"\" # id integer primary key, # url text, # named", "db_cursor.fetchone()['count(*)'] db_cursor.execute('''SELECT count(*) FROM video_db WHERE trimmed=1''') num_clips_trimmed = db_cursor.fetchone()['count(*)'] db_cursor.execute('''SELECT count(*) FROM", "== '': server = tornado.httpserver.HTTPServer(tornado.wsgi.WSGIContainer(app)) else: server = tornado.httpserver.HTTPServer(tornado.wsgi.WSGIContainer(app), ssl_options={ \"certfile\": args.certfile, \"keyfile\":", "type=str) parser.add_argument('--keyfile', dest='keyfile', help='SSL keyfile location', default='', type=str) args = parser.parse_args() return args", "action_verb FROM name_verification_attempts WHERE hit_id=?\", (hit_id,)) action_query_result = mturk_cur.fetchall() for attempt_action_set in action_query_result:", "maximum difference between correct start_time/end_time and verification attempt's start_time/end_time in seconds TRIM_DIFFERENCE_MAX =", "or next available task, and return to user try: if needs_verification_task: task =", "d = {} for idx, col in enumerate(cursor.description): d[col[0]] = row[idx] return d", "task from the server A request is a json file with the following", "labels_total - verifications_completed - labels_completed, 1)) return chance_of_verification_video > random.random() def get_verification_task(annotation_tasks, annotation_type):", "if attempt_action_set['action_verb'] != verified_action_set['action_verb']: print_log_info(\"Verification Attempt failed! Attempt had verb \" + str(attempt_action_set['action_verb'])", "(hit_id,)) action_query_result = mturk_cur.fetchall() for attempt_action_set in action_query_result: db_cursor.execute(\"SELECT action_noun, action_verb FROM video_db", "(json_res['hitId'], json_res['assignmentId'], json_res['workerId'], json_res['id'], json_res['nouns'], json_res['verb'])) mturk_db_connection.commit() elif is_mturk and not is_verification: print(json_res['assignmentId'],", "trimmed=1''') num_clips_trimmed = db_cursor.fetchone()['count(*)'] db_cursor.execute('''SELECT count(*) FROM video_db WHERE trim_locked=1 OR name_locked=1''') num_clips_locked", "task if annotation_type == 'name': try: db_cursor.execute('''SELECT * FROM video_db WHERE named=0 AND", "end_time real, # action_verb text, # action_noun text, # red_flag integer # Instantiate", "be time-efficient mturk_cur.execute(\"SELECT assignment_id, hit_id, task FROM hits WHERE status='pending_approval'\") except sqlite3.Error as", "prints out all requests. ) mturk_cur = app.mturk_db_connection.cursor() db_cursor = app.annotation_tasks.cursor() try: #", "video_db WHERE trim_locked=1 OR name_locked=1''') num_clips_locked = db_cursor.fetchone()['count(*)'] db_cursor.execute('''SELECT count(*) FROM video_db WHERE", "False break if abs(attempt_times_set['end_time'] - verified_times_set['end_time']) > TRIM_DIFFERENCE_MAX: print_log_info(\"Verification Attempt failed! Attempt had", "= db_cursor.fetchone()['count(*)'] print_log_info(\"All Stats: Named {:d}, Trimmed {:d}, flagged {:d}, Locked {:d}\".format( num_clips_named,", "a verification video or not. Let a = the verification videos left b", "Parse input arguments \"\"\" parser = argparse.ArgumentParser(description='Setup a web server for video annotation')", "'name': try: # Decide if video we are updating is a verification video", "return flask.make_response(json.dumps({'error': str(error)}), 404) @app.route('/get_task', methods=['POST']) def get_task(): \"\"\" Get a task from", "Maximum time allowed for one task MAX_DELAY = 120 # maximum difference between", "+ str(attempt_times_set['start_time']) + \" but the verified had start time \" + str(verified_times_set['start_time']))", "containing mturk-related data :type mturk_db_connection: sqlite3.Connection :return boolean representing whether verification video will", "ValueError as err: ret['code'] = -1 ret['error_msg'] = str(err) return json.dumps(ret) except: ret['code']", "+ assignment_id) try: response = mturk.approve_assignment(assignment_id) except boto.mturk.connection.MTurkRequestError as e: print_log_info(\"MTurk verification rejected.", "action_noun=?, action_verb=?, named_by_user=?, red_flag=? WHERE id=?''', update_item) # Update MTurk database to reflect", "WHERE trim_locked=1 AND trimmed=0''') locked_items = db_cursor.fetchall() for item in locked_items: delay =", "to whether this request is going to be a verification video or not.", "results to return to client ret = {} # Make sure the content", "ret['code'] = -3 ret['error_msg'] = str(err) return json.dumps(ret) is_mturk = \"assignmentId\" in json_file", "IN (SELECT id FROM named_verification_videos ORDER BY RANDOM() LIMIT 1)''') except sqlite3.Error as", "db_cursor.execute('''SELECT * FROM video_db WHERE id IN (SELECT id FROM trimmed_verification_videos ORDER BY", "time \" + str(verified_times_set['start_time'])) all_verifications_correct = False break if abs(attempt_times_set['end_time'] - verified_times_set['end_time']) >", "\"has not propagated through Amazon's servers.\") print_log_info(str(e)) query_result = mturk_cur.fetchone() continue print_log_info(assignment_id +", ":type mturk_db_connection: sqlite3.Connection :param annotation_tasks: connection to database containing mturk-related data :type annotation_tasks:", "mturk_cur.execute(\"SELECT id, start_time, end_time FROM trim_verification_attempts WHERE hit_id=?\", (hit_id,)) times_query_result = mturk_cur.fetchall() for", "= update_task(app.mturk_db_connection, app.annotation_tasks, json_file, is_mturk) if not flag: raise ValueError('can not update the", "print_log_info(str(e)) return def approve_assignments(): \"\"\" Periodic callback decides whether assignments pending approval can", "app = flask.Flask(__name__) # pylint: disable=invalid-name CORS(app) # Maximum time allowed for one", "# pad_end_frame integer, # start_time real, # end_time real, # action_verb text, #", "mturk_cur.fetchone() print_log_info(json_res['hitId']) verifications_total, labels_total, verifications_completed, labels_completed = \\ query_result[\"verifications_total\"], query_result[\"labels_total\"], \\ query_result[\"verifications_completed\"], query_result[\"labels_completed\"]", ":type json_res: dict :param mturk_db_connection: connection to database containing mturk-related data :type mturk_db_connection:", "update_item = (float(json_res['start_time']), float(json_res['end_time']), json_res['user_name'], int(json_res['red_flag'])*2, int(json_res['id'])) db_cursor.execute('''UPDATE video_db SET trimmed=1, trim_locked=0, start_time=?,", "AND id not in (SELECT id from named_verification_videos) ''') # LIMIT 1 maybe?", "{:s}\".format(json_file)) if 'annotation_type' not in json_file: raise ValueError('annotation_type missing in request') if 'id'", "= labels_completed + 1 WHERE hit_id=?''', (json_res['assignmentId'], json_res['workerId'], json_res['hitId'])) mturk_db_connection.commit() # TODO update", "SET status='approved' WHERE hit_id=?''', (hit_id,)) app.mturk_db_connection.commit() except sqlite3.Error as e: print_log_info(str(e)) else: try:", "mturk_db_connection.cursor() mturk_cur.execute('''UPDATE hits SET status='pending_approval' WHERE assignment_id=?''', (json_file[\"assignmentId\"],)) mturk_db_connection.commit() except sqlite3.Error as err:", "(and make it cors) app = flask.Flask(__name__) # pylint: disable=invalid-name CORS(app) # Maximum", "> random.random() def get_verification_task(annotation_tasks, annotation_type): \"\"\" Wrapper for querying database for a verification", "the main function \"\"\" # parse params args = parse_args() # load annotation", "# Decide if we need a verification task if ant_type == 'name_preview' or", "if video we are updating is a verification video db_cursor.execute('''SELECT * FROM trimmed_verification_videos", ":param mturk_db_connection: connection to database containing mturk-related data :type mturk_db_connection: sqlite3.Connection :param annotation_tasks:", "as e: print_log_info(str(e)) return query_result = mturk_cur.fetchall() # We need to loop through", "1) # sys.stderr = open('./web_app.err', 'a', 1) import random import boto.mturk.connection # Obtain", "query is_verification = not (db_cursor.fetchone() is None) # Apply new label if it", "id, action_noun, action_verb) VALUES (?,?,?,?,?,?)''', (json_res['hitId'], json_res['assignmentId'], json_res['workerId'], json_res['id'], json_res['nouns'], json_res['verb'])) mturk_db_connection.commit() elif", "- name_preview - trim - trim_preview - \"user_name\" If it is a request", "is a request from an MTurk iFrame, it also has the following: -", "-3 ret['error_msg'] = str(err) return json.dumps(ret) is_mturk = \"assignmentId\" in json_file and \"workerId\"", "primary key, # url text, # named integer, # name_locked integer, # name_lock_time", "(json_res['hitId'],)) except sqlite3.Error as e: print_log_info(str(e)) query_result = mturk_cur.fetchone() print_log_info(json_res['hitId']) verifications_total, labels_total, verifications_completed,", "assignment_id = str(result[\"assignment_id\"]) hit_id = str(result[\"hit_id\"]) task = str(result[\"task\"]) all_verifications_correct = True print", "start server without cert if none provided if args.certfile == '' and args.keyfile", "db_cursor.fetchone()['count(*)'] db_cursor.execute('''SELECT count(*) FROM video_db WHERE trim_locked=1 OR name_locked=1''') num_clips_locked = db_cursor.fetchone()['count(*)'] db_cursor.execute('''SELECT", "if delay > MAX_DELAY: print_log_info(\"Expiring task {:d} (Name)\".format(item['id'])) try: db_cursor.execute('''UPDATE video_db SET name_locked=0,", "verifications_completed + 1 WHERE hit_id=?''', (json_res['assignmentId'], json_res['workerId'], json_res['hitId'])) mturk_cur.execute('''INSERT INTO trim_verification_attempts( hit_id, assignment_id,", "are updating is a verification video db_cursor.execute('''SELECT * FROM named_verification_videos where id=?''', (json_res['id'],))", "> TRIM_DIFFERENCE_MAX: print_log_info(\"Verification Attempt failed! Attempt had start time \" + str(attempt_times_set['start_time']) +", "= str(err) return json.dumps(ret) except: ret['code'] = -2 ret['error_msg'] = 'unknown parameter error'", "str(result[\"hit_id\"]) task = str(result[\"task\"]) all_verifications_correct = True print assignment_id try: if task ==", "time.time() # update the lock if annotation_type == 'name': try: db_cursor.execute('''UPDATE video_db SET", "word selected from the dropdown menu - occluded, a boolean from the checkbox", "mturk_cur = mturk_db_connection.cursor() try: mturk_cur.execute('''SELECT verifications_total, labels_total, verifications_completed, labels_completed FROM hits WHERE hit_id=?''',", "sqlite3.Error as e: print_log_info(str(e)) annotation_tasks.commit() return task def update_task(mturk_db_connection, annotation_tasks, json_res, is_mturk): \"\"\"", "try: db_cursor.execute('''UPDATE video_db SET trim_locked=0, trim_lock_time=? WHERE id=?''', (0.0, item['id'])) ant_tasks.commit() except sqlite3.Error", "json file with the following fields: - \"annotation_type\" which can have the values...", "JSON has the following fields: - id, which is the video ID -", ":return dict from querying database \"\"\" db_cursor = annotation_tasks.cursor() if annotation_type == 'name'", "data for a labelling task plus relevant mturk variables if it's an mturk", "a = the verification videos left b = total number of videos left", "# action_noun text, # red_flag integer # Instantiate a connection to db annotation_tasks", "this request is going to be a verification video or not. Let a", "referred to in json_res' hitId has been completed \"\"\" mturk_cur = mturk_db_connection.cursor() try:", "on', default=5050, type=int) parser.add_argument('--video_db', dest='video_db', help='SQLite3 database with normal videos', default='video_db.db', type=str) parser.add_argument('--mturk_db',", "if ant_type == 'name': try: # Decide if video we are updating is", "ValueError as err: ret['code'] = -3 ret['error_msg'] = str(err) return json.dumps(ret) # Decide", "{:d}\".format( num_clips_named, num_clips_trimmed, num_clips_flaged, num_clips_locked)) except sqlite3.Error as e: print_log_info(str(e)) return def approve_assignments():", "but the verified had end time \" + str(verified_times_set['end_time'])) all_verifications_correct = False break", "query_result = mturk_cur.fetchall() # We need to loop through every assignment/hit set pending", "named_verification_videos where id=?''', (json_res['id'],)) # todo find out if is good query is_verification", "\" + str(attempt_action_set['action_noun']) + \" but the verified had noun \" + str(verified_action_set['action_noun']))", "return to client ret = {} try: # make sure the content type", "= parser.parse_args() return args def start_from_terminal(): \"\"\" entry of the main function \"\"\"", "encoding / decoding import json # time / logging import time #import logging", "locked_items: delay = time.time() - item['trim_lock_time'] if delay > MAX_DELAY: print_log_info(\"Expiring task {:d}", "video_src text # src_start_time integer, # src_end_time integer, # pad_start_frame integer, # pad_end_frame", "in (SELECT id from trimmed_verification_videos) ''') # LIMIT 1 maybe? except sqlite3.Error as", "try: db_cursor.execute('''SELECT * FROM video_db WHERE named=0 AND name_locked=0 AND id not in", "json_file: raise ValueError('id missing in request') else: # more sanity check ant_type =", "start time \" + str(attempt_times_set['start_time']) + \" but the verified had start time", "ret['more_to_complete'] = more_to_complete return json.dumps(ret) @app.route('/hello') def hello(): return 'hello world' def parse_args():", "else: ret['code'] = 0 ret['error_msg'] = 'success' except ValueError as err: ret['code'] =", "name_locked=1 AND named=0''') locked_items = db_cursor.fetchall() for item in locked_items: delay = time.time()", "SET trimmed=1, trim_locked=0, start_time=?, end_time=?, trimmed_by_user=?, red_flag=? WHERE id=?''', update_item) # Update MTurk", "in query_result: assignment_id = str(result[\"assignment_id\"]) hit_id = str(result[\"hit_id\"]) task = str(result[\"task\"]) all_verifications_correct =", "chance_of_verification_video > random.random() def get_verification_task(annotation_tasks, annotation_type): \"\"\" Wrapper for querying database for a", "type must be JSON') request_data = flask.request.get_data() except ValueError as err: ret['code'] =", "not get a valid task. please re-try.') else: ret = task except ValueError", "FROM video_db WHERE trimmed=1''') num_clips_trimmed = db_cursor.fetchone()['count(*)'] db_cursor.execute('''SELECT count(*) FROM video_db WHERE trim_locked=1", "(sandbox_host if app.sandbox else real_host) mturk = boto.mturk.connection.MTurkConnection( aws_access_key_id=app.aws_access_key_id, aws_secret_access_key=app.aws_secret_access_key, host=host, debug=1 #", "a task. else: task = item cur_time = time.time() # update the lock", "parser.add_argument('--port', dest='port', help='which port to serve content on', default=5050, type=int) parser.add_argument('--video_db', dest='video_db', help='SQLite3", "= json_file['annotation_type'] if not ((ant_type == 'name') or (ant_type == 'trim')): raise ValueError('unknown", "a trimming task try: # Decide if video we are updating is a", "try: mturk_cur.execute('''SELECT verifications_total, labels_total, verifications_completed, labels_completed FROM hits WHERE hit_id=?''', (json_res['hitId'],)) except sqlite3.Error", "needs_verification_task = True elif ((ant_type == 'name' or ant_type == 'trim') and is_mturk):", "try: json_file = json.JSONDecoder().decode(request_data) print_log_info(\"Task returned: {:s}\".format(json_file)) if 'annotation_type' not in json_file: raise", "\"name\", it should have the following: - verb, a string representing the word", "= load_annotation_tasks(args.mturk_db) # Set global variables app.aws_access_key_id = args.aws_access_key_id app.aws_secret_access_key = args.aws_secret_access_key app.sandbox", "= \\ query_result[\"verifications_total\"], query_result[\"labels_total\"], \\ query_result[\"verifications_completed\"], query_result[\"labels_completed\"] return verifications_total - verifications_completed <= 0", "== 'trim') and is_mturk): needs_verification_task = \\ decide_if_needs_verification(json_file, app.mturk_db_connection) else: needs_verification_task = False", "= mturk_cur.fetchall() # We need to loop through every assignment/hit set pending approval", "locked item based on its time stamp \"\"\" ant_tasks = app.annotation_tasks db_cursor =", "database containing mturk-related data :type annotation_tasks: sqlite3.Connection :param json_res: JSON given by frontend's", "print_log_info(str(e)) return def load_annotation_tasks(video_db): \"\"\" Wrapper for loading annotations \"\"\" # id integer", "- verified_times_set['start_time']) > TRIM_DIFFERENCE_MAX: print_log_info(\"Verification Attempt failed! Attempt had start time \" +", "server.start(1) print_log_info(\"Tornado server starting on port {}\".format(args.port)) # show stats every time we", "function to convert sql item into a dict\"\"\" d = {} for idx,", "{:d}, Trimmed {:d}, flagged {:d}, Locked {:d}\".format( num_clips_named, num_clips_trimmed, num_clips_flaged, num_clips_locked)) except sqlite3.Error", "If the request is coming from an mturk iFrame, it should have: -", "float(json_res['start_time']), float(json_res['end_time']))) mturk_db_connection.commit() elif is_mturk and not is_verification: print(json_res['assignmentId'], json_res['workerId'], json_res['hitId']) mturk_cur.execute('''UPDATE hits", "False break except sqlite3.Error as e: print_log_info(str(e)) continue if all_verifications_correct: # TODO Find", "a real one)', default=False, action='store_true') parser.add_argument('--aws_key_id', dest='aws_access_key_id', help='AWS Access Key ID', default='', type=str)", "delay = time.time() - item['name_lock_time'] if delay > MAX_DELAY: print_log_info(\"Expiring task {:d} (Name)\".format(item['id']))", "annotation_tasks: connection to database containing mturk-related data :type annotation_tasks: sqlite3.Connection :param json_res: JSON", "text # src_start_time integer, # src_end_time integer, # pad_start_frame integer, # pad_end_frame integer,", "table if we think that would be time-efficient mturk_cur.execute(\"SELECT assignment_id, hit_id, task FROM", "args import argparse # encoding / decoding import json # time / logging", "one task MAX_DELAY = 120 # maximum difference between correct start_time/end_time and verification", "following fields: - \"annotation_type\" which can have the values... - name - name_preview", "in action_query_result: db_cursor.execute(\"SELECT action_noun, action_verb FROM video_db WHERE id=?\", (attempt_action_set['id'],)) verified_action_set = db_cursor.fetchone()", "app.sandbox else real_host) mturk = boto.mturk.connection.MTurkConnection( aws_access_key_id=app.aws_access_key_id, aws_secret_access_key=app.aws_secret_access_key, host=host, debug=1 # debug =", "id, start_time, end_time) VALUES (?,?,?,?,?,?)''', (json_res['hitId'], json_res['assignmentId'], json_res['workerId'], json_res['id'], float(json_res['start_time']), float(json_res['end_time']))) mturk_db_connection.commit() elif", "= str(err) return json.dumps(ret) is_mturk = \"assignmentId\" in json_file and \"workerId\" in json_file", "- name - name_preview - trim - trim_preview - \"user_name\" If it is", "(attempt_action_set['id'],)) verified_action_set = db_cursor.fetchone() if attempt_action_set['action_verb'] != verified_action_set['action_verb']: print_log_info(\"Verification Attempt failed! Attempt had", "print_log_info(\"Expiring task {:d} (Name)\".format(item['id'])) try: db_cursor.execute('''UPDATE video_db SET name_locked=0, name_lock_time=? WHERE id=?''', (0.0,", "rejected. Typically, this means the client's completion \" + \"has not propagated through", "ret['error_msg'] = str(err) return json.dumps(ret) except: ret['code'] = -2 ret['error_msg'] = 'unknown parameter", "JSON') request_data = flask.request.get_data() except ValueError as err: ret['code'] = -1 ret['error_msg'] =", "LIMIT 1 maybe? except sqlite3.Error as e: print_log_info(str(e)) else: # So annotation_type ==", "time.strftime(\"%Y-%m-%d %H:%M:%S\", time.gmtime()) print \"{:s} {:s}\".format(prefix, str_info) def collect_db_stats(): \"\"\" Collect DB stats", "str(attempt_action_set['action_verb']) + \" but the verified had verb \" + str(verified_action_set['action_verb'])) all_verifications_correct =", "content type is json try: request_type = flask.request.headers.get('Content-Type') if request_type != 'application/json': raise", ":param annotation_type: client-defined string for the type of the annotations we're doing :type", "atexit atexit.register(save_db) # set up one server server.start(1) print_log_info(\"Tornado server starting on port", "out all requests. ) mturk_cur = app.mturk_db_connection.cursor() db_cursor = app.annotation_tasks.cursor() try: # TODO", "make sure all required data is present try: json_file = json.JSONDecoder().decode(request_data) print_log_info(\"Task request:", "in json_file # Get next available task try: flag = update_task(app.mturk_db_connection, app.annotation_tasks, json_file,", "following: - \"workerId\" - \"hitId\" \"\"\" # Dict holds the results to return", "labels_completed = labels_completed + 1 WHERE hit_id=?''', (json_res['assignmentId'], json_res['workerId'], json_res['hitId'])) mturk_db_connection.commit() annotation_tasks.commit() except", "app object (and make it cors) app = flask.Flask(__name__) # pylint: disable=invalid-name CORS(app)", "db_cursor.fetchone() if attempt_action_set['action_verb'] != verified_action_set['action_verb']: print_log_info(\"Verification Attempt failed! Attempt had verb \" +", "ant_tasks.cursor() # Task: name db_cursor.execute('''SELECT * FROM video_db WHERE name_locked=1 AND named=0''') locked_items", "<gh_stars>1-10 \"\"\"A simple web server for video annotation\"\"\" # parsing args import argparse", "connection to db annotation_tasks = sqlite3.connect(video_db) annotation_tasks.row_factory = dict_factory # returns the database", "logging info\"\"\" prefix = time.strftime(\"%Y-%m-%d %H:%M:%S\", time.gmtime()) print \"{:s} {:s}\".format(prefix, str_info) def collect_db_stats():", "dict from querying database \"\"\" # get db cursor db_cursor = annotation_tasks.cursor() #", "sqlite3.Error as e: print_log_info(str(e)) item = db_cursor.fetchone() # No task available if item", "not more_to_complete: try: mturk_db_connection = app.mturk_db_connection mturk_cur = mturk_db_connection.cursor() mturk_cur.execute('''UPDATE hits SET status='pending_approval'", "type of the annotations we're doing :type annotation_type: string :return dict from querying", "RANDOM() LIMIT 1)''') return db_cursor.fetchone() def task_completed(json_res, mturk_db_connection): \"\"\" Tells whether an mturk", "if not flag: raise ValueError('can not update the task. Please re-try.') else: ret['code']", "video_db WHERE id=?\", (attempt_action_set['id'],)) verified_action_set = db_cursor.fetchone() if attempt_action_set['action_verb'] != verified_action_set['action_verb']: print_log_info(\"Verification Attempt", "to database containing mturk-related data :type mturk_db_connection: sqlite3.Connection :param annotation_tasks: connection to database", "name_locked=0, occluded=?, action_noun=?, action_verb=?, named_by_user=?, red_flag=? WHERE id=?''', update_item) # Update MTurk database", "transaction print_log_info(\"Approving assignment \" + assignment_id) try: response = mturk.approve_assignment(assignment_id) except boto.mturk.connection.MTurkRequestError as", "should have: - assignmentId - workerId - hitId If annotation_type is \"name\", it", "return None # Otherwise return a task. else: task = item cur_time =", "stats every time we launch the service collect_db_stats() approve_assignments() tornado.ioloop.PeriodicCallback(expire_locked_items, 20*1000).start() tornado.ioloop.PeriodicCallback(collect_db_stats, 3600*1000).start()", "flag if json_res['red_flag']: print_log_info('\\033[93m' + \"Task ID ({:d}) Type ({:s}) has been RED_FLAGGED!\".format(", "item is None: return None # Otherwise return a task. else: task =", "the dropdown menu - occluded, a boolean from the checkbox in the page", "mturk_db_connection: sqlite3.Connection :return: boolean representing if task referred to in json_res' hitId has", "stats \"\"\" ant_tasks = app.annotation_tasks db_cursor = ant_tasks.cursor() # show us some stats", "string representing the word selected from the dropdown menu - occluded, a boolean", "verifications_completed = verifications_completed + 1 WHERE hit_id=?''', (json_res['assignmentId'], json_res['workerId'], json_res['hitId'])) mturk_cur.execute('''INSERT INTO name_verification_attempts(", "else: server = tornado.httpserver.HTTPServer(tornado.wsgi.WSGIContainer(app), ssl_options={ \"certfile\": args.certfile, \"keyfile\": args.keyfile, }) server.bind(args.port) # setup", "# pylint: disable=invalid-name CORS(app) # Maximum time allowed for one task MAX_DELAY =", "FROM trimmed_verification_videos ORDER BY RANDOM() LIMIT 1)''') return db_cursor.fetchone() def task_completed(json_res, mturk_db_connection): \"\"\"", "a sandbox HIT (otherwise is a real one)', default=False, action='store_true') parser.add_argument('--aws_key_id', dest='aws_access_key_id', help='AWS", "holds the results to return to client ret = {} try: # make", "info\"\"\" prefix = time.strftime(\"%Y-%m-%d %H:%M:%S\", time.gmtime()) print \"{:s} {:s}\".format(prefix, str_info) def collect_db_stats(): \"\"\"", "# No task available if item is None: return None # Otherwise return", "- labels_completed <= 0 def get_next_available_task(annotation_tasks, annotation_type): \"\"\" Wrapper for querying database for", "str(verified_action_set['action_verb'])) all_verifications_correct = False break if attempt_action_set['action_noun'] != verified_action_set['action_noun']: print_log_info(\"Verification Attempt failed! Attempt", "def get_verification_task(annotation_tasks, annotation_type): \"\"\" Wrapper for querying database for a verification task. :param", "available if item is None: return None # Otherwise return a task. else:", "db_cursor.execute('''UPDATE video_db SET name_locked=1, name_lock_time=? WHERE id=?''', (cur_time, task['id'])) except sqlite3.Error as e:", "for the type of the annotations we're doing :type annotation_type: string :return dict", "trim_locked=0 AND id not in (SELECT id from trimmed_verification_videos) ''') # LIMIT 1", "ret = task except ValueError as err: ret['code'] = -1 ret['error_msg'] = str(err)", "values... - name - name_preview - trim - trim_preview - \"user_name\" If it", "args def start_from_terminal(): \"\"\" entry of the main function \"\"\" # parse params", "1 WHERE hit_id=?''', (json_res['assignmentId'], json_res['workerId'], json_res['hitId'])) mturk_cur.execute('''INSERT INTO name_verification_attempts( hit_id, assignment_id, worker_id, id,", "failed! Attempt had verb \" + str(attempt_action_set['action_verb']) + \" but the verified had", "ret['error_msg'] = str(err) return json.dumps(ret) # Decide if we need a verification task", "or annotation_type == 'name_preview': try: # from https://stackoverflow.com/questions/4114940/select-random-rows-in-sqlite db_cursor.execute('''SELECT * FROM video_db WHERE", "for attempt_times_set in times_query_result: db_cursor.execute(\"SELECT start_time, end_time FROM video_db WHERE id=?\", (attempt_times_set['id'],)) verified_times_set", "load_annotation_tasks(args.mturk_db) # Set global variables app.aws_access_key_id = args.aws_access_key_id app.aws_secret_access_key = args.aws_secret_access_key app.sandbox =", "flask.Flask(__name__) # pylint: disable=invalid-name CORS(app) # Maximum time allowed for one task MAX_DELAY", "this means the client's completion \" + \"has not propagated through Amazon's servers.\")", "# return return True @app.errorhandler(404) def not_found(error): \"\"\" Default error handler for 404", "sure all required data is present try: json_file = json.JSONDecoder().decode(request_data) print_log_info(\"Task request: {:s}\".format(json_file))", "\"hitId\" \"\"\" # Dict holds the results to return to client ret =", "querying database \"\"\" # get db cursor db_cursor = annotation_tasks.cursor() # Get the", "ant_type = json_file['annotation_type'] if not ((ant_type == 'name') or (ant_type == 'trim') or", "the verified had verb \" + str(verified_action_set['action_verb'])) all_verifications_correct = False break if attempt_action_set['action_noun']", "else: # So annotation_type == 'trim' try: db_cursor.execute('''SELECT * FROM video_db WHERE named=1", "\" + str(attempt_action_set['action_verb']) + \" but the verified had verb \" + str(verified_action_set['action_verb']))", "return json.dumps(ret) # Decide if we need a verification task if ant_type ==", "num_clips_locked = db_cursor.fetchone()['count(*)'] db_cursor.execute('''SELECT count(*) FROM video_db WHERE red_flag>=1''') num_clips_flaged = db_cursor.fetchone()['count(*)'] print_log_info(\"All", "not propagated through Amazon's servers.\") print_log_info(str(e)) query_result = mturk_cur.fetchone() continue print_log_info(assignment_id + \"", "must have hitId key :type json_res: dict :param is_mturk: indicates if :return dict", "str(verified_action_set['action_noun'])) all_verifications_correct = False break else: # ie. elif task == \"trim\": print", "= annotation_tasks.cursor() mturk_cur = mturk_db_connection.cursor() # get annotation_type and video id ant_type =", "convert sql item into a dict\"\"\" d = {} for idx, col in", "annotation_tasks = sqlite3.connect(video_db) annotation_tasks.row_factory = dict_factory # returns the database return annotation_tasks def", "+ str(response)) try: mturk_cur.execute('''UPDATE hits SET status='approved' WHERE hit_id=?''', (hit_id,)) app.mturk_db_connection.commit() except sqlite3.Error", "hits SET status='pending_approval' WHERE assignment_id=?''', (json_file[\"assignmentId\"],)) mturk_db_connection.commit() except sqlite3.Error as err: ret['code'] =", "ret['code'] = -1 ret['error_msg'] = str(err) return json.dumps(ret) return json.dumps(ret) @app.route('/return_task', methods=['POST']) def", "print_log_info(str(e)) else: try: mturk_cur.execute('''UPDATE hits SET status='pending_manual_approval' WHERE hit_id=?''', (hit_id,)) app.mturk_db_connection.commit() except sqlite3.Error", "FROM video_db WHERE named=1''') num_clips_named = db_cursor.fetchone()['count(*)'] db_cursor.execute('''SELECT count(*) FROM video_db WHERE trimmed=1''')", "file with the following fields: - \"annotation_type\" which can have the values... -", "had start time \" + str(attempt_times_set['start_time']) + \" but the verified had start", "update_item) # Update MTurk database to reflect this change if is_mturk and is_verification:", "chance of getting a verification videos is a/b This gives a uniform distribution", "+ 1 WHERE hit_id=?''', (json_res['assignmentId'], json_res['workerId'], json_res['hitId'])) mturk_cur.execute('''INSERT INTO trim_verification_attempts( hit_id, assignment_id, worker_id,", "mturk-related data :type annotation_tasks: sqlite3.Connection :param json_res: JSON given by frontend's submit button;", "named=1 AND red_flag=0 AND trimmed=0 AND trim_locked=0 AND id not in (SELECT id", "json_file = json.JSONDecoder().decode(request_data) print_log_info(\"Task returned: {:s}\".format(json_file)) if 'annotation_type' not in json_file: raise ValueError('annotation_type", "labels here # TODO make Mturk login details command line arguments sandbox_host =", "action_verb) VALUES (?,?,?,?,?,?)''', (json_res['hitId'], json_res['assignmentId'], json_res['workerId'], json_res['id'], json_res['nouns'], json_res['verb'])) mturk_db_connection.commit() elif is_mturk and", "(Name)\".format(item['id'])) try: db_cursor.execute('''UPDATE video_db SET name_locked=0, name_lock_time=? WHERE id=?''', (0.0, item['id'])) ant_tasks.commit() except", "return task def update_task(mturk_db_connection, annotation_tasks, json_res, is_mturk): \"\"\" Updates the data for a", "ret['code'] = -3 ret['error_msg'] = str(err) return json.dumps(ret) more_to_complete = not is_mturk or", "except ValueError as err: ret['code'] = -3 ret['error_msg'] = str(err) return json.dumps(ret) more_to_complete", "\"\"\" parser = argparse.ArgumentParser(description='Setup a web server for video annotation') parser.add_argument('--port', dest='port', help='which", "left b = total number of videos left The chance of getting a", "e: print_log_info(str(e)) else: try: mturk_cur.execute('''UPDATE hits SET status='pending_manual_approval' WHERE hit_id=?''', (hit_id,)) app.mturk_db_connection.commit() except", "sure the content type is json request_type = flask.request.headers.get('Content-Type') if request_type != 'application/json':", "ret['code'] = -2 ret['error_msg'] = 'unknown parameter error' return json.dumps(ret) # decode json", "'mechanicalturk.sandbox.amazonaws.com' real_host = 'mechanicalturk.amazonaws.com' host = (sandbox_host if app.sandbox else real_host) mturk =", "of videos left The chance of getting a verification videos is a/b This", "for querying database for a new labelling task. Called by get_task(). :param annotation_tasks:", "red_flag=0 AND trimmed=0 AND trim_locked=0 AND id not in (SELECT id from trimmed_verification_videos)", "1.0 def dict_factory(cursor, row): \"\"\"Helper function to convert sql item into a dict\"\"\"", "# LIMIT 1 maybe? except sqlite3.Error as e: print_log_info(str(e)) else: # So annotation_type", "end_time) VALUES (?,?,?,?,?,?)''', (json_res['hitId'], json_res['assignmentId'], json_res['workerId'], json_res['id'], float(json_res['start_time']), float(json_res['end_time']))) mturk_db_connection.commit() elif is_mturk and", "arguments sandbox_host = 'mechanicalturk.sandbox.amazonaws.com' real_host = 'mechanicalturk.amazonaws.com' host = (sandbox_host if app.sandbox else", "id, action_noun, action_verb FROM name_verification_attempts WHERE hit_id=?\", (hit_id,)) action_query_result = mturk_cur.fetchall() for attempt_action_set", "# ie. elif task == \"trim\": print \"trim thing\" mturk_cur.execute(\"SELECT id, start_time, end_time", "print_log_info('\\033[93m' + \"Task ID ({:d}) Type ({:s}) has been RED_FLAGGED!\".format( json_res['id'], ant_type) +", "# show stats every time we launch the service collect_db_stats() approve_assignments() tornado.ioloop.PeriodicCallback(expire_locked_items, 20*1000).start()", "print_log_info(str(e)) query_result = mturk_cur.fetchone() print_log_info(json_res['hitId']) verifications_total, labels_total, verifications_completed, labels_completed = \\ query_result[\"verifications_total\"], query_result[\"labels_total\"],", "FROM video_db WHERE id=?\", (attempt_times_set['id'],)) verified_times_set = db_cursor.fetchone() if abs(attempt_times_set['start_time'] - verified_times_set['start_time']) >", "\\ query_result[\"verifications_completed\"], query_result[\"labels_completed\"] chance_of_verification_video = (float(max(verifications_total - verifications_completed, 0)) / max(verifications_total + labels_total", "if abs(attempt_times_set['start_time'] - verified_times_set['start_time']) > TRIM_DIFFERENCE_MAX: print_log_info(\"Verification Attempt failed! Attempt had start time", "json_res['id'], ant_type) + '\\033[0m') # return return True @app.errorhandler(404) def not_found(error): \"\"\" Default", "key :type json_res: dict :param mturk_db_connection: connection to database containing mturk-related data :type", "db_cursor = annotation_tasks.cursor() if annotation_type == 'name' or annotation_type == 'name_preview': try: #", "sqlite3.Connection :param json_res: JSON given by frontend's submit button; must have hitId key", "if video we are updating is a verification video db_cursor.execute('''SELECT * FROM named_verification_videos", "- \"user_name\" If it is a request from an MTurk iFrame, it also", "database for a new labelling task. Called by get_task(). :param annotation_tasks: connection to", "ant_type == 'trim') and is_mturk): needs_verification_task = \\ decide_if_needs_verification(json_file, app.mturk_db_connection) else: needs_verification_task =", "app.mturk_db_connection mturk_cur = mturk_db_connection.cursor() mturk_cur.execute('''UPDATE hits SET status='pending_approval' WHERE assignment_id=?''', (json_file[\"assignmentId\"],)) mturk_db_connection.commit() except", "# occluded integer, # trimmed integer, # trim_locked integer, # trim_lock_time real, #", "e: print_log_info(str(e)) query_result = mturk_cur.fetchone() print_log_info(json_res['hitId']) verifications_total, labels_total, verifications_completed, labels_completed = \\ query_result[\"verifications_total\"],", "SET assignment_id=?, worker_id=?, labels_completed = labels_completed + 1 WHERE hit_id=?''', (json_res['assignmentId'], json_res['workerId'], json_res['hitId']))", "dict_factory(cursor, row): \"\"\"Helper function to convert sql item into a dict\"\"\" d =", "more_to_complete return json.dumps(ret) @app.route('/hello') def hello(): return 'hello world' def parse_args(): \"\"\" Parse", "has been RED_FLAGGED!\".format( json_res['id'], ant_type) + '\\033[0m') # return return True @app.errorhandler(404) def", "col in enumerate(cursor.description): d[col[0]] = row[idx] return d def print_log_info(str_info): \"\"\"Helper function for", "# src_end_time integer, # pad_start_frame integer, # pad_end_frame integer, # start_time real, #", "== 'name_preview': try: # from https://stackoverflow.com/questions/4114940/select-random-rows-in-sqlite db_cursor.execute('''SELECT * FROM video_db WHERE id IN", "(float(json_res['start_time']), float(json_res['end_time']), json_res['user_name'], int(json_res['red_flag'])*2, int(json_res['id'])) db_cursor.execute('''UPDATE video_db SET trimmed=1, trim_locked=0, start_time=?, end_time=?, trimmed_by_user=?,", "server = tornado.httpserver.HTTPServer(tornado.wsgi.WSGIContainer(app)) else: server = tornado.httpserver.HTTPServer(tornado.wsgi.WSGIContainer(app), ssl_options={ \"certfile\": args.certfile, \"keyfile\": args.keyfile, })", "launch the service collect_db_stats() approve_assignments() tornado.ioloop.PeriodicCallback(expire_locked_items, 20*1000).start() tornado.ioloop.PeriodicCallback(collect_db_stats, 3600*1000).start() tornado.ioloop.PeriodicCallback(approve_assignments, 20*1000).start() tornado.ioloop.IOLoop.current().start() if", "string filled out by the user for the objects being handled TODO figure", "# video_src text # src_start_time integer, # src_end_time integer, # pad_start_frame integer, #", "loop through every assignment/hit set pending approval for result in query_result: assignment_id =", "verification videos left b = total number of videos left The chance of", "noun \" + str(verified_action_set['action_noun'])) all_verifications_correct = False break else: # ie. elif task", "== 'trim' try: db_cursor.execute('''UPDATE video_db SET trim_locked=1, trim_lock_time=? WHERE id=?''', (cur_time, task['id'])) except", "\"trim\" - user_name If the request is coming from an mturk iFrame, it", "query_result[\"verifications_total\"], query_result[\"labels_total\"], \\ query_result[\"verifications_completed\"], query_result[\"labels_completed\"] return verifications_total - verifications_completed <= 0 and labels_total", "(ant_type == 'name_preview') or (ant_type == 'trim_preview')): raise ValueError('unknown annotation_type') except ValueError as", "hit_id=?''', (hit_id,)) app.mturk_db_connection.commit() except sqlite3.Error as e: print_log_info(str(e)) return def expire_locked_items(): \"\"\" Expires", "raise ValueError('annotation_type missing in request') else: # more sanity check ant_type = json_file['annotation_type']", "returned: {:s}\".format(json_file)) if 'annotation_type' not in json_file: raise ValueError('annotation_type missing in request') if", "print_log_info(str(e)) else: # So annotation_type == 'trim' try: db_cursor.execute('''UPDATE video_db SET trim_locked=1, trim_lock_time=?", "def parse_args(): \"\"\" Parse input arguments \"\"\" parser = argparse.ArgumentParser(description='Setup a web server", "* FROM named_verification_videos where id=?''', (json_res['id'],)) # todo find out if is good", "e: print_log_info(str(e)) query_result = mturk_cur.fetchone() verifications_total, labels_total, verifications_completed, labels_completed = \\ query_result[\"verifications_total\"], query_result[\"labels_total\"],", "e: print_log_info(str(e)) annotation_tasks.commit() return task def update_task(mturk_db_connection, annotation_tasks, json_res, is_mturk): \"\"\" Updates the", "db_cursor.execute('''UPDATE video_db SET name_locked=0, name_lock_time=? WHERE id=?''', (0.0, item['id'])) ant_tasks.commit() except sqlite3.Error as", "attempt_action_set['action_verb'] != verified_action_set['action_verb']: print_log_info(\"Verification Attempt failed! Attempt had verb \" + str(attempt_action_set['action_verb']) +", "all_verifications_correct = False break if attempt_action_set['action_noun'] != verified_action_set['action_noun']: print_log_info(\"Verification Attempt failed! Attempt had", "todo find out if is good query is_verification = not (db_cursor.fetchone() is None)", "type=str) args = parser.parse_args() return args def start_from_terminal(): \"\"\" entry of the main", "task_completed(json_file, app.mturk_db_connection) if not more_to_complete: try: mturk_db_connection = app.mturk_db_connection mturk_cur = mturk_db_connection.cursor() mturk_cur.execute('''UPDATE", "INTO trim_verification_attempts( hit_id, assignment_id, worker_id, id, start_time, end_time) VALUES (?,?,?,?,?,?)''', (json_res['hitId'], json_res['assignmentId'], json_res['workerId'],", "json.dumps(ret) @app.route('/return_task', methods=['POST']) def return_task(): \"\"\" Processes the JSON sent from the client", "return d def print_log_info(str_info): \"\"\"Helper function for logging info\"\"\" prefix = time.strftime(\"%Y-%m-%d %H:%M:%S\",", ") mturk_cur = app.mturk_db_connection.cursor() db_cursor = app.annotation_tasks.cursor() try: # TODO make pending approval", "+ str(verified_times_set['start_time'])) all_verifications_correct = False break if abs(attempt_times_set['end_time'] - verified_times_set['end_time']) > TRIM_DIFFERENCE_MAX: print_log_info(\"Verification", "video_db SET trim_locked=0, trim_lock_time=? WHERE id=?''', (0.0, item['id'])) ant_tasks.commit() except sqlite3.Error as e:", "ValueError('can not get a valid task. please re-try.') else: ret = task except", "ant_type = json_file['annotation_type'] if not ((ant_type == 'name') or (ant_type == 'trim')): raise", "by the user for the objects being handled TODO figure out the trim", "\"{:s} {:s}\".format(prefix, str_info) def collect_db_stats(): \"\"\" Collect DB stats \"\"\" ant_tasks = app.annotation_tasks", "a labelling task plus relevant mturk variables if it's an mturk task. :param", "videos left The chance of getting a verification videos is a/b This gives", "print_log_info(\"MTurk verification rejected. Typically, this means the client's completion \" + \"has not", "task try: # Decide if video we are updating is a verification video", "delay > MAX_DELAY: print_log_info(\"Expiring task {:d} (Trim)\".format(item['id'])) try: db_cursor.execute('''UPDATE video_db SET trim_locked=0, trim_lock_time=?", "# Dict holds the results to return to client ret = {} try:", "# TODO verify correct verification labels here # TODO make Mturk login details", "json_res['verb'], json_res['user_name'], int(json_res['red_flag'])*1, int(json_res['id'])) db_cursor.execute('''UPDATE video_db SET named=1, name_locked=0, occluded=?, action_noun=?, action_verb=?, named_by_user=?,", "# Get next available task try: flag = update_task(app.mturk_db_connection, app.annotation_tasks, json_file, is_mturk) if", "(hit_id,)) times_query_result = mturk_cur.fetchall() for attempt_times_set in times_query_result: db_cursor.execute(\"SELECT start_time, end_time FROM video_db", "id IN (SELECT id FROM trimmed_verification_videos ORDER BY RANDOM() LIMIT 1)''') return db_cursor.fetchone()", "-2 ret['error_msg'] = 'unknown parameter error' return json.dumps(ret) # decode json from request", "expire_locked_items(): \"\"\" Expires a locked item based on its time stamp \"\"\" ant_tasks", "* FROM trimmed_verification_videos where id=?''', (json_res['id'],)) # todo find out if is good", "have the following: - verb, a string representing the word selected from the", "%H:%M:%S\", time.gmtime()) print \"{:s} {:s}\".format(prefix, str_info) def collect_db_stats(): \"\"\" Collect DB stats \"\"\"", "= app.annotation_tasks db_cursor = ant_tasks.cursor() # Task: name db_cursor.execute('''SELECT * FROM video_db WHERE", "boolean representing whether verification video will be returned \"\"\" print json_res mturk_cur =", "= annotation_tasks.cursor() if annotation_type == 'name' or annotation_type == 'name_preview': try: # from", "src_start_time integer, # src_end_time integer, # pad_start_frame integer, # pad_end_frame integer, # start_time", "parameter error' return json.dumps(ret) # Decode json from request data into a dict,", "json_res['user_name'], int(json_res['red_flag'])*1, int(json_res['id'])) db_cursor.execute('''UPDATE video_db SET named=1, name_locked=0, occluded=?, action_noun=?, action_verb=?, named_by_user=?, red_flag=?", "and is_mturk): needs_verification_task = \\ decide_if_needs_verification(json_file, app.mturk_db_connection) else: needs_verification_task = False # Get", "db_cursor.fetchone() # No task available if item is None: return None # Otherwise", "(ant_type == 'trim') or (ant_type == 'name_preview') or (ant_type == 'trim_preview')): raise ValueError('unknown", "it should have: - assignmentId - workerId - hitId If annotation_type is \"name\",", "video annotation') parser.add_argument('--port', dest='port', help='which port to serve content on', default=5050, type=int) parser.add_argument('--video_db',", "Named {:d}, Trimmed {:d}, flagged {:d}, Locked {:d}\".format( num_clips_named, num_clips_trimmed, num_clips_flaged, num_clips_locked)) except", "1) import random import boto.mturk.connection # Obtain the flask app object (and make", "verifications_completed - labels_completed, 1)) return chance_of_verification_video > random.random() def get_verification_task(annotation_tasks, annotation_type): \"\"\" Wrapper", "== 'trim')): raise ValueError('unknown annotation_type') except ValueError as err: ret['code'] = -3 ret['error_msg']", "nouns, a string filled out by the user for the objects being handled", "= str(err) return json.dumps(ret) # Decide if we need a verification task if", "doing :type annotation_type: string :return dict from querying database \"\"\" # get db", "annotation_type): \"\"\" Wrapper for querying database for a verification task. :param annotation_tasks: connection", "\"\"\" # Dict holds the results to return to client ret = {}", "trim_locked=1 AND trimmed=0''') locked_items = db_cursor.fetchall() for item in locked_items: delay = time.time()", "\\ query_result[\"verifications_total\"], query_result[\"labels_total\"], \\ query_result[\"verifications_completed\"], query_result[\"labels_completed\"] chance_of_verification_video = (float(max(verifications_total - verifications_completed, 0)) /", "port {}\".format(args.port)) # show stats every time we launch the service collect_db_stats() approve_assignments()", "sqlite3.Error as e: print_log_info(str(e)) continue if all_verifications_correct: # TODO Find out if this", "task['id'])) except sqlite3.Error as e: print_log_info(str(e)) annotation_tasks.commit() return task def update_task(mturk_db_connection, annotation_tasks, json_res,", "flask from flask_cors import CORS, cross_origin import tornado.wsgi import tornado.httpserver # database import", "if none provided if args.certfile == '' and args.keyfile == '': server =", "query_result = mturk_cur.fetchone() continue print_log_info(assignment_id + \" approved. Amazon response: \" + str(response))", "= str(result[\"assignment_id\"]) hit_id = str(result[\"hit_id\"]) task = str(result[\"task\"]) all_verifications_correct = True print assignment_id", "approve_assignments() tornado.ioloop.PeriodicCallback(expire_locked_items, 20*1000).start() tornado.ioloop.PeriodicCallback(collect_db_stats, 3600*1000).start() tornado.ioloop.PeriodicCallback(approve_assignments, 20*1000).start() tornado.ioloop.IOLoop.current().start() if __name__ == '__main__': start_from_terminal()", "Attempt had noun \" + str(attempt_action_set['action_noun']) + \" but the verified had noun", "Called by get_task(). :param json_res: JSON given by frontend's submit button; must have", "server starting on port {}\".format(args.port)) # show stats every time we launch the", "# trim_locked integer, # trim_lock_time real, # trimmed_by_user text, # video_src text #", "for querying database for a verification task. :param annotation_tasks: connection to database containing", "verifications_completed, labels_completed = \\ query_result[\"verifications_total\"], query_result[\"labels_total\"], \\ query_result[\"verifications_completed\"], query_result[\"labels_completed\"] chance_of_verification_video = (float(max(verifications_total -", "annotation_tasks: connection to database containing mturk-related data :type annotation_tasks: sqlite3.Connection :param annotation_type: client-defined", "2 prints out all requests. ) mturk_cur = app.mturk_db_connection.cursor() db_cursor = app.annotation_tasks.cursor() try:", "client ret = {} # Make sure the content type is json try:", "str_info) def collect_db_stats(): \"\"\" Collect DB stats \"\"\" ant_tasks = app.annotation_tasks db_cursor =", "sqlite3.Error as e: print_log_info(str(e)) else: db_cursor.execute('''SELECT * FROM video_db WHERE id IN (SELECT", "return chance_of_verification_video > random.random() def get_verification_task(annotation_tasks, annotation_type): \"\"\" Wrapper for querying database for", "# Instantiate a connection to db annotation_tasks = sqlite3.connect(video_db) annotation_tasks.row_factory = dict_factory #", "task = item cur_time = time.time() # update the lock if annotation_type ==", "\"\"\" # get db cursor db_cursor = annotation_tasks.cursor() # Get the next task", "are updating is a verification video db_cursor.execute('''SELECT * FROM trimmed_verification_videos where id=?''', (json_res['id'],))", "button; must have hitId key :type json_res: dict :param mturk_db_connection: connection to database", "- user_name If the request is coming from an mturk iFrame, it should", "as e: print_log_info(str(e)) return def approve_assignments(): \"\"\" Periodic callback decides whether assignments pending", "app.aws_secret_access_key = args.aws_secret_access_key app.sandbox = args.sandbox # start server without cert if none", "id=?''', (json_res['id'],)) # todo find out if is good query is_verification = not", "sqlite3.Error as e: print_log_info(str(e)) else: # So annotation_type == 'trim' try: db_cursor.execute('''UPDATE video_db", "id, which is the video ID - annotation_type, which can be \"name\" or", "app.annotation_tasks db_cursor = ant_tasks.cursor() # Task: name db_cursor.execute('''SELECT * FROM video_db WHERE name_locked=1", "hit_id=?''', (json_res['assignmentId'], json_res['workerId'], json_res['hitId'])) mturk_cur.execute('''INSERT INTO name_verification_attempts( hit_id, assignment_id, worker_id, id, action_noun, action_verb)", "== 'name_preview' or ant_type == 'trim_preview': needs_verification_task = True elif ((ant_type == 'name'", "return json.dumps(ret) @app.route('/return_task', methods=['POST']) def return_task(): \"\"\" Processes the JSON sent from the", "stuff annotation_tasks.commit() except sqlite3.Error as e: print_log_info(str(e)) return False # color print the", "flagged {:d}, Locked {:d}\".format( num_clips_named, num_clips_trimmed, num_clips_flaged, num_clips_locked)) except sqlite3.Error as e: print_log_info(str(e))", "# start server without cert if none provided if args.certfile == '' and", "+ \" but the verified had noun \" + str(verified_action_set['action_noun'])) all_verifications_correct = False", ":param annotation_tasks: connection to database containing mturk-related data :type annotation_tasks: sqlite3.Connection :param annotation_type:", "#import logging #import traceback # flask import flask from flask_cors import CORS, cross_origin", ":type json_res: dict :param is_mturk: indicates if :return dict from querying database \"\"\"", "except: ret['code'] = -2 ret['error_msg'] = 'unknown parameter error' return json.dumps(ret) # decode", "sqlite3.Error as e: print_log_info(str(e)) return query_result = mturk_cur.fetchall() # We need to loop", "assignment_id=?, worker_id=?, labels_completed = labels_completed + 1 WHERE hit_id=?''', (json_res['assignmentId'], json_res['workerId'], json_res['hitId'])) mturk_db_connection.commit()", "Otherwise return a task. else: task = item cur_time = time.time() # update", "fields: - \"annotation_type\" which can have the values... - name - name_preview -", "debug = 2 prints out all requests. ) mturk_cur = app.mturk_db_connection.cursor() db_cursor =", "= total number of videos left The chance of getting a verification videos", "entry of the main function \"\"\" # parse params args = parse_args() #", "video db_cursor.execute('''SELECT * FROM trimmed_verification_videos where id=?''', (json_res['id'],)) # todo find out if", "= verifications_completed + 1 WHERE hit_id=?''', (json_res['assignmentId'], json_res['workerId'], json_res['hitId'])) mturk_cur.execute('''INSERT INTO name_verification_attempts( hit_id,", "json.dumps(ret) # Decode json from request data into a dict, and make sure", "- nouns, a string filled out by the user for the objects being", "prefix = time.strftime(\"%Y-%m-%d %H:%M:%S\", time.gmtime()) print \"{:s} {:s}\".format(prefix, str_info) def collect_db_stats(): \"\"\" Collect", "== 'name' or annotation_type == 'name_preview': try: # from https://stackoverflow.com/questions/4114940/select-random-rows-in-sqlite db_cursor.execute('''SELECT * FROM", "- item['trim_lock_time'] if delay > MAX_DELAY: print_log_info(\"Expiring task {:d} (Trim)\".format(item['id'])) try: db_cursor.execute('''UPDATE video_db", "be a verification video or not. Let a = the verification videos left", "action_noun, action_verb FROM video_db WHERE id=?\", (attempt_action_set['id'],)) verified_action_set = db_cursor.fetchone() if attempt_action_set['action_verb'] !=", "hit_id, task FROM hits WHERE status='pending_approval'\") except sqlite3.Error as e: print_log_info(str(e)) return query_result", "db_cursor = ant_tasks.cursor() # show us some stats try: db_cursor.execute('''SELECT count(*) FROM video_db", "mturk stuff annotation_tasks.commit() except sqlite3.Error as e: print_log_info(str(e)) return False # color print", "print_log_info(str(e)) # Task: trim db_cursor.execute('''SELECT * FROM video_db WHERE trim_locked=1 AND trimmed=0''') locked_items", "db_cursor = annotation_tasks.cursor() # Get the next task if annotation_type == 'name': try:", "where id=?''', (json_res['id'],)) # todo find out if is good query is_verification =", "the request is coming from an mturk iFrame, it should have: - assignmentId", "== 'name' or ant_type == 'trim') and is_mturk): needs_verification_task = \\ decide_if_needs_verification(json_file, app.mturk_db_connection)", "open('./web_app.err', 'a', 1) import random import boto.mturk.connection # Obtain the flask app object", "approval for result in query_result: assignment_id = str(result[\"assignment_id\"]) hit_id = str(result[\"hit_id\"]) task =", "mturk_cur.execute('''UPDATE hits SET assignment_id=?, worker_id=?, labels_completed = labels_completed + 1 WHERE hit_id=?''', (json_res['assignmentId'],", "ret['error_msg'] = 'success' except ValueError as err: ret['code'] = -3 ret['error_msg'] = str(err)", "to return to client ret = {} try: # make sure the content", "verifications_completed <= 0 and labels_total - labels_completed <= 0 def get_next_available_task(annotation_tasks, annotation_type): \"\"\"", "a request from an MTurk iFrame, it also has the following: - \"workerId\"", "mturk_db_connection: connection to database containing mturk-related data :type mturk_db_connection: sqlite3.Connection :return boolean representing", "data :type mturk_db_connection: sqlite3.Connection :param annotation_tasks: connection to database containing mturk-related data :type", "annotations we're doing :type annotation_type: string :return dict from querying database \"\"\" #", "if it's an mturk task. :param mturk_db_connection: connection to database containing mturk-related data", "query_result[\"labels_completed\"] chance_of_verification_video = (float(max(verifications_total - verifications_completed, 0)) / max(verifications_total + labels_total - verifications_completed", "db_cursor.execute('''SELECT * FROM video_db WHERE trim_locked=1 AND trimmed=0''') locked_items = db_cursor.fetchall() for item", "should have the following: - verb, a string representing the word selected from", "- labels_completed, 1)) return chance_of_verification_video > random.random() def get_verification_task(annotation_tasks, annotation_type): \"\"\" Wrapper for", "= 'mechanicalturk.sandbox.amazonaws.com' real_host = 'mechanicalturk.amazonaws.com' host = (sandbox_host if app.sandbox else real_host) mturk", "float(json_res['end_time']), json_res['user_name'], int(json_res['red_flag'])*2, int(json_res['id'])) db_cursor.execute('''UPDATE video_db SET trimmed=1, trim_locked=0, start_time=?, end_time=?, trimmed_by_user=?, red_flag=?", "return return True @app.errorhandler(404) def not_found(error): \"\"\" Default error handler for 404 \"\"\"", "break if abs(attempt_times_set['end_time'] - verified_times_set['end_time']) > TRIM_DIFFERENCE_MAX: print_log_info(\"Verification Attempt failed! Attempt had end", "try: # make sure the content type is json request_type = flask.request.headers.get('Content-Type') if", "across all requests. Called by get_task(). :param json_res: JSON given by frontend's submit", "marks them accordingly \"\"\" # TODO verify correct verification labels here # TODO", "status='pending_approval'\") except sqlite3.Error as e: print_log_info(str(e)) return query_result = mturk_cur.fetchall() # We need", "decoding import json # time / logging import time #import logging #import traceback", "if not ((ant_type == 'name') or (ant_type == 'trim')): raise ValueError('unknown annotation_type') except", "Attempt had start time \" + str(attempt_times_set['start_time']) + \" but the verified had", "annotation_tasks, json_res, is_mturk): \"\"\" Updates the data for a labelling task plus relevant", "normal videos', default='video_db.db', type=str) parser.add_argument('--mturk_db', dest='mturk_db', help='SQLite3 database with logs for mturk', default='mturk_db.db',", "callback decides whether assignments pending approval can be automatically approved and then marks", "import tornado.wsgi import tornado.httpserver # database import sqlite3 # redirect stdout and stderr", "'name': try: db_cursor.execute('''UPDATE video_db SET name_locked=1, name_lock_time=? WHERE id=?''', (cur_time, task['id'])) except sqlite3.Error", "args.sandbox # start server without cert if none provided if args.certfile == ''", ":type annotation_tasks: sqlite3.Connection :param json_res: JSON given by frontend's submit button; must have", "it isn't a verification video if not is_verification: update_item = (int(json_res['occluded']), json_res['nouns'], json_res['verb'],", "make it cors) app = flask.Flask(__name__) # pylint: disable=invalid-name CORS(app) # Maximum time", "video we are updating is a verification video db_cursor.execute('''SELECT * FROM trimmed_verification_videos where", "# get db cursor db_cursor = annotation_tasks.cursor() mturk_cur = mturk_db_connection.cursor() # get annotation_type", "args = parse_args() # load annotation tasks app.annotation_tasks = load_annotation_tasks(args.video_db) app.mturk_db_connection = load_annotation_tasks(args.mturk_db)", "(SELECT id from named_verification_videos) ''') # LIMIT 1 maybe? except sqlite3.Error as e:", "as err: ret['code'] = -3 ret['error_msg'] = str(err) return json.dumps(ret) more_to_complete = not", "= False # Get a verification task or next available task, and return", "Locked {:d}\".format( num_clips_named, num_clips_trimmed, num_clips_flaged, num_clips_locked)) except sqlite3.Error as e: print_log_info(str(e)) return def", "id ant_type = json_res['annotation_type'] # Update naming task if ant_type == 'name': try:", "help='SSL certfile location', default='', type=str) parser.add_argument('--keyfile', dest='keyfile', help='SSL keyfile location', default='', type=str) args", "except boto.mturk.connection.MTurkRequestError as e: print_log_info(\"MTurk verification rejected. Typically, this means the client's completion", ":param json_res: JSON given by frontend's submit button; must have hitId key :type", "# Otherwise return a task. else: task = item cur_time = time.time() #", "a verification task or next available task, and return to user try: if", "the content type is json request_type = flask.request.headers.get('Content-Type') if request_type != 'application/json': raise", "e: print_log_info(str(e)) else: # So annotation_type == 'trim' try: db_cursor.execute('''SELECT * FROM video_db", "collect_db_stats() approve_assignments() tornado.ioloop.PeriodicCallback(expire_locked_items, 20*1000).start() tornado.ioloop.PeriodicCallback(collect_db_stats, 3600*1000).start() tornado.ioloop.PeriodicCallback(approve_assignments, 20*1000).start() tornado.ioloop.IOLoop.current().start() if __name__ == '__main__':", "db_cursor.execute('''UPDATE video_db SET trim_locked=1, trim_lock_time=? WHERE id=?''', (cur_time, task['id'])) except sqlite3.Error as e:", "for idx, col in enumerate(cursor.description): d[col[0]] = row[idx] return d def print_log_info(str_info): \"\"\"Helper", "methods=['POST']) def get_task(): \"\"\" Get a task from the server A request is", "aws_access_key_id=app.aws_access_key_id, aws_secret_access_key=app.aws_secret_access_key, host=host, debug=1 # debug = 2 prints out all requests. )", "err: ret['code'] = -1 ret['error_msg'] = str(err) return json.dumps(ret) except: ret['code'] = -2", "decides whether assignments pending approval can be automatically approved and then marks them", "integer, # trimmed integer, # trim_locked integer, # trim_lock_time real, # trimmed_by_user text,", "video_db WHERE id=?\", (attempt_times_set['id'],)) verified_times_set = db_cursor.fetchone() if abs(attempt_times_set['start_time'] - verified_times_set['start_time']) > TRIM_DIFFERENCE_MAX:", "worker_id, id, action_noun, action_verb) VALUES (?,?,?,?,?,?)''', (json_res['hitId'], json_res['assignmentId'], json_res['workerId'], json_res['id'], json_res['nouns'], json_res['verb'])) mturk_db_connection.commit()", "end_time=?, trimmed_by_user=?, red_flag=? WHERE id=?''', update_item) # Update MTurk database to reflect this", "isn't a verification video if not is_verification: update_item = (float(json_res['start_time']), float(json_res['end_time']), json_res['user_name'], int(json_res['red_flag'])*2,", "abs(attempt_times_set['end_time'] - verified_times_set['end_time']) > TRIM_DIFFERENCE_MAX: print_log_info(\"Verification Attempt failed! Attempt had end time \"", "FROM hits WHERE status='pending_approval'\") except sqlite3.Error as e: print_log_info(str(e)) return query_result = mturk_cur.fetchall()", "({:d}) Type ({:s}) has been RED_FLAGGED!\".format( json_res['id'], ant_type) + '\\033[0m') # return return", "except sqlite3.Error as e: print_log_info(str(e)) else: try: mturk_cur.execute('''UPDATE hits SET status='pending_manual_approval' WHERE hit_id=?''',", "parser.add_argument('--mturk_db', dest='mturk_db', help='SQLite3 database with logs for mturk', default='mturk_db.db', type=str) parser.add_argument('--sandbox', dest='sandbox', help='If", "int(json_res['id'])) db_cursor.execute('''UPDATE video_db SET named=1, name_locked=0, occluded=?, action_noun=?, action_verb=?, named_by_user=?, red_flag=? WHERE id=?''',", "verifications_completed + 1 WHERE hit_id=?''', (json_res['assignmentId'], json_res['workerId'], json_res['hitId'])) mturk_cur.execute('''INSERT INTO name_verification_attempts( hit_id, assignment_id,", "1)''') return db_cursor.fetchone() def task_completed(json_res, mturk_db_connection): \"\"\" Tells whether an mturk task has", "(SELECT id FROM named_verification_videos ORDER BY RANDOM() LIMIT 1)''') except sqlite3.Error as e:", "Attempt failed! Attempt had noun \" + str(attempt_action_set['action_noun']) + \" but the verified", "d[col[0]] = row[idx] return d def print_log_info(str_info): \"\"\"Helper function for logging info\"\"\" prefix", "ret['error_msg'] = 'unknown parameter error' return json.dumps(ret) # decode json from request data", "if we need a verification task if ant_type == 'name_preview' or ant_type ==", "id=?''', (cur_time, task['id'])) except sqlite3.Error as e: print_log_info(str(e)) annotation_tasks.commit() return task def update_task(mturk_db_connection,", "hits SET assignment_id=?, worker_id=?, verifications_completed = verifications_completed + 1 WHERE hit_id=?''', (json_res['assignmentId'], json_res['workerId'],", "db_cursor.execute(\"SELECT start_time, end_time FROM video_db WHERE id=?\", (attempt_times_set['id'],)) verified_times_set = db_cursor.fetchone() if abs(attempt_times_set['start_time']", "cross_origin import tornado.wsgi import tornado.httpserver # database import sqlite3 # redirect stdout and", ":type annotation_tasks: sqlite3.Connection :param annotation_type: client-defined string for the type of the annotations", "connection to database containing mturk-related data :type annotation_tasks: sqlite3.Connection :param json_res: JSON given", "print_log_info(str(e)) return query_result = mturk_cur.fetchall() # We need to loop through every assignment/hit", "db_cursor.fetchone()['count(*)'] print_log_info(\"All Stats: Named {:d}, Trimmed {:d}, flagged {:d}, Locked {:d}\".format( num_clips_named, num_clips_trimmed,", "def start_from_terminal(): \"\"\" entry of the main function \"\"\" # parse params args", "query_result[\"verifications_completed\"], query_result[\"labels_completed\"] return verifications_total - verifications_completed <= 0 and labels_total - labels_completed <=", "or (ant_type == 'trim') or (ant_type == 'name_preview') or (ant_type == 'trim_preview')): raise", "have the values... - name - name_preview - trim - trim_preview - \"user_name\"", "be \"name\" or \"trim\" - user_name If the request is coming from an", "json_res['workerId'], json_res['hitId'])) mturk_db_connection.commit() # TODO update mturk stuff annotation_tasks.commit() except sqlite3.Error as e:", "integer, # src_end_time integer, # pad_start_frame integer, # pad_end_frame integer, # start_time real,", "a verification video if not is_verification: update_item = (int(json_res['occluded']), json_res['nouns'], json_res['verb'], json_res['user_name'], int(json_res['red_flag'])*1,", "trim - trim_preview - \"user_name\" If it is a request from an MTurk", "if needs_verification_task: task = get_verification_task(app.annotation_tasks, ant_type) else: task = get_next_available_task(app.annotation_tasks, ant_type) if not", "def return_task(): \"\"\" Processes the JSON sent from the client to submit a", "\"\"\"Helper function to convert sql item into a dict\"\"\" d = {} for", "query_result = mturk_cur.fetchone() print_log_info(json_res['hitId']) verifications_total, labels_total, verifications_completed, labels_completed = \\ query_result[\"verifications_total\"], query_result[\"labels_total\"], \\", "id FROM named_verification_videos ORDER BY RANDOM() LIMIT 1)''') except sqlite3.Error as e: print_log_info(str(e))", "return json.dumps(ret) return json.dumps(ret) @app.route('/return_task', methods=['POST']) def return_task(): \"\"\" Processes the JSON sent", "request') if 'id' not in json_file: raise ValueError('id missing in request') else: #", "db_cursor.execute('''UPDATE video_db SET named=1, name_locked=0, occluded=?, action_noun=?, action_verb=?, named_by_user=?, red_flag=? WHERE id=?''', update_item)", "disable=invalid-name CORS(app) # Maximum time allowed for one task MAX_DELAY = 120 #", "= (int(json_res['occluded']), json_res['nouns'], json_res['verb'], json_res['user_name'], int(json_res['red_flag'])*1, int(json_res['id'])) db_cursor.execute('''UPDATE video_db SET named=1, name_locked=0, occluded=?,", "# name_lock_time real, # named_by_user text, # occluded integer, # trimmed integer, #", "tornado.wsgi import tornado.httpserver # database import sqlite3 # redirect stdout and stderr for", "locked_items = db_cursor.fetchall() for item in locked_items: delay = time.time() - item['trim_lock_time'] if", "try: mturk_db_connection = app.mturk_db_connection mturk_cur = mturk_db_connection.cursor() mturk_cur.execute('''UPDATE hits SET status='pending_approval' WHERE assignment_id=?''',", "AND name_locked=0 AND id not in (SELECT id from named_verification_videos) ''') # LIMIT", "mturk task has been completed :param json_res: JSON given by frontend's submit button;", "parser = argparse.ArgumentParser(description='Setup a web server for video annotation') parser.add_argument('--port', dest='port', help='which port", "app.mturk_db_connection.commit() except sqlite3.Error as e: print_log_info(str(e)) else: try: mturk_cur.execute('''UPDATE hits SET status='pending_manual_approval' WHERE", "details command line arguments sandbox_host = 'mechanicalturk.sandbox.amazonaws.com' real_host = 'mechanicalturk.amazonaws.com' host = (sandbox_host", "good query is_verification = not (db_cursor.fetchone() is None) # Apply new label if", "Dict holds the results to return to client ret = {} try: #", "labels_total, verifications_completed, labels_completed = \\ query_result[\"verifications_total\"], query_result[\"labels_total\"], \\ query_result[\"verifications_completed\"], query_result[\"labels_completed\"] chance_of_verification_video = (float(max(verifications_total", "for loading annotations \"\"\" # id integer primary key, # url text, #", "= mturk_cur.fetchone() continue print_log_info(assignment_id + \" approved. Amazon response: \" + str(response)) try:", "def decide_if_needs_verification(json_res, mturk_db_connection): \"\"\" Makes the decision as to whether this request is", "mturk_db_connection.commit() elif is_mturk and not is_verification: print(json_res['assignmentId'], json_res['workerId'], json_res['hitId']) mturk_cur.execute('''UPDATE hits SET assignment_id=?,", "if abs(attempt_times_set['end_time'] - verified_times_set['end_time']) > TRIM_DIFFERENCE_MAX: print_log_info(\"Verification Attempt failed! Attempt had end time", "raise ValueError('annotation_type missing in request') if 'id' not in json_file: raise ValueError('id missing", "= item cur_time = time.time() # update the lock if annotation_type == 'name':", "status='pending_manual_approval' WHERE hit_id=?''', (hit_id,)) app.mturk_db_connection.commit() except sqlite3.Error as e: print_log_info(str(e)) return def expire_locked_items():", "string for the type of the annotations we're doing :type annotation_type: string :return", "\"trim thing\" mturk_cur.execute(\"SELECT id, start_time, end_time FROM trim_verification_attempts WHERE hit_id=?\", (hit_id,)) times_query_result =", "connection to database containing mturk-related data :type annotation_tasks: sqlite3.Connection :param annotation_type: client-defined string", "- verb, a string representing the word selected from the dropdown menu -", "json_res['hitId'])) mturk_db_connection.commit() annotation_tasks.commit() except sqlite3.Error as e: print_log_info(str(e)) return False else: # ie.", "hit_id=?''', (hit_id,)) app.mturk_db_connection.commit() except sqlite3.Error as e: print_log_info(str(e)) else: try: mturk_cur.execute('''UPDATE hits SET", "annotation_type == 'name' or annotation_type == 'name_preview': try: # from https://stackoverflow.com/questions/4114940/select-random-rows-in-sqlite db_cursor.execute('''SELECT *", "print_log_info(str(e)) continue if all_verifications_correct: # TODO Find out if this needs to be", "'trim') and is_mturk): needs_verification_task = \\ decide_if_needs_verification(json_file, app.mturk_db_connection) else: needs_verification_task = False #", "\" + str(verified_times_set['end_time'])) all_verifications_correct = False break except sqlite3.Error as e: print_log_info(str(e)) continue", "return verifications_total - verifications_completed <= 0 and labels_total - labels_completed <= 0 def", "get_task(). :param annotation_tasks: connection to database containing mturk-related data :type annotation_tasks: sqlite3.Connection :param", "= annotation_tasks.cursor() # Get the next task if annotation_type == 'name': try: db_cursor.execute('''SELECT", "service collect_db_stats() approve_assignments() tornado.ioloop.PeriodicCallback(expire_locked_items, 20*1000).start() tornado.ioloop.PeriodicCallback(collect_db_stats, 3600*1000).start() tornado.ioloop.PeriodicCallback(approve_assignments, 20*1000).start() tornado.ioloop.IOLoop.current().start() if __name__ ==", "database containing mturk-related data :type annotation_tasks: sqlite3.Connection :param annotation_type: client-defined string for the", "type=str) parser.add_argument('--certfile', dest='certfile', help='SSL certfile location', default='', type=str) parser.add_argument('--keyfile', dest='keyfile', help='SSL keyfile location',", "sanity check ant_type = json_file['annotation_type'] if not ((ant_type == 'name') or (ant_type ==", "text, # red_flag integer # Instantiate a connection to db annotation_tasks = sqlite3.connect(video_db)", "name_verification_attempts WHERE hit_id=?\", (hit_id,)) action_query_result = mturk_cur.fetchall() for attempt_action_set in action_query_result: db_cursor.execute(\"SELECT action_noun,", "video_db WHERE named=0 AND name_locked=0 AND id not in (SELECT id from named_verification_videos)", "chance of getting a verification video across all requests. Called by get_task(). :param", "- \"workerId\" - \"hitId\" \"\"\" # Dict holds the results to return to", "else: # more sanity check ant_type = json_file['annotation_type'] if not ((ant_type == 'name')", "\"\"\" print json_res mturk_cur = mturk_db_connection.cursor() try: mturk_cur.execute('''SELECT verifications_total, labels_total, verifications_completed, labels_completed FROM", "return json.dumps(ret) ret['more_to_complete'] = more_to_complete return json.dumps(ret) @app.route('/hello') def hello(): return 'hello world'", "trim_locked integer, # trim_lock_time real, # trimmed_by_user text, # video_src text # src_start_time", "# Update MTurk database to reflect this change if is_mturk and is_verification: mturk_cur.execute('''UPDATE", "task {:d} (Name)\".format(item['id'])) try: db_cursor.execute('''UPDATE video_db SET name_locked=0, name_lock_time=? WHERE id=?''', (0.0, item['id']))", "verification video across all requests. Called by get_task(). :param json_res: JSON given by", "boto.mturk.connection # Obtain the flask app object (and make it cors) app =", "verb \" + str(verified_action_set['action_verb'])) all_verifications_correct = False break if attempt_action_set['action_noun'] != verified_action_set['action_noun']: print_log_info(\"Verification", "(0.0, item['id'])) ant_tasks.commit() except sqlite3.Error as e: print_log_info(str(e)) # Task: trim db_cursor.execute('''SELECT *", "try: if task == \"name\": mturk_cur.execute(\"SELECT id, action_noun, action_verb FROM name_verification_attempts WHERE hit_id=?\",", "logging import time #import logging #import traceback # flask import flask from flask_cors", "json_res' hitId has been completed \"\"\" mturk_cur = mturk_db_connection.cursor() try: mturk_cur.execute('''SELECT verifications_total, labels_total,", "video if not is_verification: update_item = (float(json_res['start_time']), float(json_res['end_time']), json_res['user_name'], int(json_res['red_flag'])*2, int(json_res['id'])) db_cursor.execute('''UPDATE video_db", "\"\"\" Periodic callback decides whether assignments pending approval can be automatically approved and", "string :return dict from querying database \"\"\" db_cursor = annotation_tasks.cursor() if annotation_type ==", "here # TODO make Mturk login details command line arguments sandbox_host = 'mechanicalturk.sandbox.amazonaws.com'", "False # Get a verification task or next available task, and return to", "to db annotation_tasks = sqlite3.connect(video_db) annotation_tasks.row_factory = dict_factory # returns the database return", "<= 0 def get_next_available_task(annotation_tasks, annotation_type): \"\"\" Wrapper for querying database for a new", "AND id not in (SELECT id from trimmed_verification_videos) ''') # LIMIT 1 maybe?", "!= verified_action_set['action_noun']: print_log_info(\"Verification Attempt failed! Attempt had noun \" + str(attempt_action_set['action_noun']) + \"", "video_db SET name_locked=1, name_lock_time=? WHERE id=?''', (cur_time, task['id'])) except sqlite3.Error as e: print_log_info(str(e))", "pylint: disable=invalid-name CORS(app) # Maximum time allowed for one task MAX_DELAY = 120", "variables if it's an mturk task. :param mturk_db_connection: connection to database containing mturk-related", "is \"name\", it should have the following: - verb, a string representing the", "for attempt_action_set in action_query_result: db_cursor.execute(\"SELECT action_noun, action_verb FROM video_db WHERE id=?\", (attempt_action_set['id'],)) verified_action_set", "app.sandbox = args.sandbox # start server without cert if none provided if args.certfile", "src_end_time integer, # pad_start_frame integer, # pad_end_frame integer, # start_time real, # end_time", "(json_res['assignmentId'], json_res['workerId'], json_res['hitId'])) mturk_cur.execute('''INSERT INTO name_verification_attempts( hit_id, assignment_id, worker_id, id, action_noun, action_verb) VALUES", "'trim' try: db_cursor.execute('''UPDATE video_db SET trim_locked=1, trim_lock_time=? WHERE id=?''', (cur_time, task['id'])) except sqlite3.Error", "print_log_info(\"Verification Attempt failed! Attempt had noun \" + str(attempt_action_set['action_noun']) + \" but the", "import random import boto.mturk.connection # Obtain the flask app object (and make it", "TODO make pending approval a separate table if we think that would be", "total number of videos left The chance of getting a verification videos is", "# make sure the content type is json request_type = flask.request.headers.get('Content-Type') if request_type", "annotation') parser.add_argument('--port', dest='port', help='which port to serve content on', default=5050, type=int) parser.add_argument('--video_db', dest='video_db',", "submit button; must have hitId key :type json_res: dict :param is_mturk: indicates if", "flask.request.get_data() except ValueError as err: ret['code'] = -1 ret['error_msg'] = str(err) return json.dumps(ret)", "print_log_info(str(e)) else: db_cursor.execute('''SELECT * FROM video_db WHERE id IN (SELECT id FROM trimmed_verification_videos", "print_log_info(str(e)) return False # color print the red flag if json_res['red_flag']: print_log_info('\\033[93m' +", "https://stackoverflow.com/questions/4114940/select-random-rows-in-sqlite db_cursor.execute('''SELECT * FROM video_db WHERE id IN (SELECT id FROM named_verification_videos ORDER", "return query_result = mturk_cur.fetchall() # We need to loop through every assignment/hit set", "start_time, end_time) VALUES (?,?,?,?,?,?)''', (json_res['hitId'], json_res['assignmentId'], json_res['workerId'], json_res['id'], float(json_res['start_time']), float(json_res['end_time']))) mturk_db_connection.commit() elif is_mturk", "+ str(verified_action_set['action_noun'])) all_verifications_correct = False break else: # ie. elif task == \"trim\":", "cursor db_cursor = annotation_tasks.cursor() mturk_cur = mturk_db_connection.cursor() # get annotation_type and video id", "stuff \"\"\" # Dict holds the results to return to client ret =", "open('./web_app.log', 'a', 1) # sys.stderr = open('./web_app.err', 'a', 1) import random import boto.mturk.connection", "assignment_id, worker_id, id, action_noun, action_verb) VALUES (?,?,?,?,?,?)''', (json_res['hitId'], json_res['assignmentId'], json_res['workerId'], json_res['id'], json_res['nouns'], json_res['verb']))", "worker_id=?, labels_completed = labels_completed + 1 WHERE hit_id=?''', (json_res['assignmentId'], json_res['workerId'], json_res['hitId'])) mturk_db_connection.commit() annotation_tasks.commit()", "to be a verification video or not. Let a = the verification videos", "video_db SET trimmed=1, trim_locked=0, start_time=?, end_time=?, trimmed_by_user=?, red_flag=? WHERE id=?''', update_item) # Update", "return json.dumps(ret) is_mturk = \"assignmentId\" in json_file and \"workerId\" in json_file and \\", "had noun \" + str(attempt_action_set['action_noun']) + \" but the verified had noun \"", "id=?''', update_item) # Update MTurk database to reflect this change if is_mturk and", "a valid task. please re-try.') else: ret = task except ValueError as err:", "we're doing :type annotation_type: string :return dict from querying database \"\"\" db_cursor =", "request_type != 'application/json': raise ValueError('request type must be JSON') request_data = flask.request.get_data() except", "mturk', default='mturk_db.db', type=str) parser.add_argument('--sandbox', dest='sandbox', help='If this is a sandbox HIT (otherwise is", "- id, which is the video ID - annotation_type, which can be \"name\"", ":return dict from querying database \"\"\" # get db cursor db_cursor = annotation_tasks.cursor()", "if app.sandbox else real_host) mturk = boto.mturk.connection.MTurkConnection( aws_access_key_id=app.aws_access_key_id, aws_secret_access_key=app.aws_secret_access_key, host=host, debug=1 # debug", "WHERE id IN (SELECT id FROM trimmed_verification_videos ORDER BY RANDOM() LIMIT 1)''') return", "chance_of_verification_video = (float(max(verifications_total - verifications_completed, 0)) / max(verifications_total + labels_total - verifications_completed -", "mturk_cur.execute('''UPDATE hits SET status='pending_approval' WHERE assignment_id=?''', (json_file[\"assignmentId\"],)) mturk_db_connection.commit() except sqlite3.Error as err: ret['code']", "\"\"\" return flask.make_response(json.dumps({'error': str(error)}), 404) @app.route('/get_task', methods=['POST']) def get_task(): \"\"\" Get a task", "has the following fields: - id, which is the video ID - annotation_type,", "False break if attempt_action_set['action_noun'] != verified_action_set['action_noun']: print_log_info(\"Verification Attempt failed! Attempt had noun \"", "True @app.errorhandler(404) def not_found(error): \"\"\" Default error handler for 404 \"\"\" return flask.make_response(json.dumps({'error':", "= more_to_complete return json.dumps(ret) @app.route('/hello') def hello(): return 'hello world' def parse_args(): \"\"\"", "ValueError('can not update the task. Please re-try.') else: ret['code'] = 0 ret['error_msg'] =", "1)) return chance_of_verification_video > random.random() def get_verification_task(annotation_tasks, annotation_type): \"\"\" Wrapper for querying database", "(ant_type == 'trim_preview')): raise ValueError('unknown annotation_type') except ValueError as err: ret['code'] = -3", "# todo find out if is good query is_verification = not (db_cursor.fetchone() is", "INTO name_verification_attempts( hit_id, assignment_id, worker_id, id, action_noun, action_verb) VALUES (?,?,?,?,?,?)''', (json_res['hitId'], json_res['assignmentId'], json_res['workerId'],", "((ant_type == 'name' or ant_type == 'trim') and is_mturk): needs_verification_task = \\ decide_if_needs_verification(json_file,", "Updates the data for a labelling task plus relevant mturk variables if it's", "a string representing the word selected from the dropdown menu - occluded, a", "# load annotation tasks app.annotation_tasks = load_annotation_tasks(args.video_db) app.mturk_db_connection = load_annotation_tasks(args.mturk_db) # Set global", "\" but the verified had noun \" + str(verified_action_set['action_noun'])) all_verifications_correct = False break", "had end time \" + str(attempt_times_set['end_time']) + \" but the verified had end", "database containing mturk-related data :type mturk_db_connection: sqlite3.Connection :return boolean representing whether verification video", "default=False, action='store_true') parser.add_argument('--aws_key_id', dest='aws_access_key_id', help='AWS Access Key ID', default='', type=str) parser.add_argument('--aws_key', dest='aws_secret_access_key', help='AWS", "print json_res mturk_cur = mturk_db_connection.cursor() try: mturk_cur.execute('''SELECT verifications_total, labels_total, verifications_completed, labels_completed FROM hits", "db_cursor.fetchone()['count(*)'] db_cursor.execute('''SELECT count(*) FROM video_db WHERE red_flag>=1''') num_clips_flaged = db_cursor.fetchone()['count(*)'] print_log_info(\"All Stats: Named", "for video annotation') parser.add_argument('--port', dest='port', help='which port to serve content on', default=5050, type=int)", "# show us some stats try: db_cursor.execute('''SELECT count(*) FROM video_db WHERE named=1''') num_clips_named", "\"\"\" Tells whether an mturk task has been completed :param json_res: JSON given", "containing mturk-related data :type mturk_db_connection: sqlite3.Connection :param annotation_tasks: connection to database containing mturk-related", "labels_completed + 1 WHERE hit_id=?''', (json_res['assignmentId'], json_res['workerId'], json_res['hitId'])) mturk_db_connection.commit() # TODO update mturk", "db_cursor = annotation_tasks.cursor() mturk_cur = mturk_db_connection.cursor() # get annotation_type and video id ant_type", "not. Let a = the verification videos left b = total number of", "labels_completed, 1)) return chance_of_verification_video > random.random() def get_verification_task(annotation_tasks, annotation_type): \"\"\" Wrapper for querying", "it should have the following: - verb, a string representing the word selected", "str(error)}), 404) @app.route('/get_task', methods=['POST']) def get_task(): \"\"\" Get a task from the server", "\"assignmentId\" in json_file and \"workerId\" in json_file and \\ \"hitId\" in json_file if", "and \"workerId\" in json_file and \\ \"hitId\" in json_file if 'annotation_type' not in", "WHERE hit_id=?''', (json_res['assignmentId'], json_res['workerId'], json_res['hitId'])) mturk_db_connection.commit() # TODO update mturk stuff annotation_tasks.commit() except", "the verified had start time \" + str(verified_times_set['start_time'])) all_verifications_correct = False break if", "connection to database containing mturk-related data :type mturk_db_connection: sqlite3.Connection :param annotation_tasks: connection to", "for 404 \"\"\" return flask.make_response(json.dumps({'error': str(error)}), 404) @app.route('/get_task', methods=['POST']) def get_task(): \"\"\" Get", "decide_if_needs_verification(json_file, app.mturk_db_connection) else: needs_verification_task = False # Get a verification task or next", "re-try.') else: ret = task except ValueError as err: ret['code'] = -1 ret['error_msg']", "update_task(mturk_db_connection, annotation_tasks, json_res, is_mturk): \"\"\" Updates the data for a labelling task plus", "'annotation_type' not in json_file: raise ValueError('annotation_type missing in request') if 'id' not in", "missing in request') else: # more sanity check ant_type = json_file['annotation_type'] if not", "user try: if needs_verification_task: task = get_verification_task(app.annotation_tasks, ant_type) else: task = get_next_available_task(app.annotation_tasks, ant_type)", "if item is None: return None # Otherwise return a task. else: task", "MAX_DELAY = 120 # maximum difference between correct start_time/end_time and verification attempt's start_time/end_time", "= -3 ret['error_msg'] = str(err) return json.dumps(ret) more_to_complete = not is_mturk or \\", "then marks them accordingly \"\"\" # TODO verify correct verification labels here #", "\" but the verified had start time \" + str(verified_times_set['start_time'])) all_verifications_correct = False", "to return to client ret = {} # Make sure the content type", "time \" + str(verified_times_set['end_time'])) all_verifications_correct = False break except sqlite3.Error as e: print_log_info(str(e))", "assignments pending approval can be automatically approved and then marks them accordingly \"\"\"", "Wrapper for querying database for a verification task. :param annotation_tasks: connection to database", "as err: ret['code'] = -1 ret['error_msg'] = str(err) return json.dumps(ret) except: ret['code'] =", "would be time-efficient mturk_cur.execute(\"SELECT assignment_id, hit_id, task FROM hits WHERE status='pending_approval'\") except sqlite3.Error", "+ \" but the verified had verb \" + str(verified_action_set['action_verb'])) all_verifications_correct = False", "as e: print_log_info(str(e)) item = db_cursor.fetchone() # No task available if item is", "propagated through Amazon's servers.\") print_log_info(str(e)) query_result = mturk_cur.fetchone() continue print_log_info(assignment_id + \" approved.", "json_file and \\ \"hitId\" in json_file if 'annotation_type' not in json_file: raise ValueError('annotation_type", "= mturk_cur.fetchone() verifications_total, labels_total, verifications_completed, labels_completed = \\ query_result[\"verifications_total\"], query_result[\"labels_total\"], \\ query_result[\"verifications_completed\"], query_result[\"labels_completed\"]", "raise ValueError('unknown annotation_type') except ValueError as err: ret['code'] = -3 ret['error_msg'] = str(err)", "annotation_type and video id ant_type = json_res['annotation_type'] # Update naming task if ant_type", "parser.parse_args() return args def start_from_terminal(): \"\"\" entry of the main function \"\"\" #", "item = db_cursor.fetchone() # No task available if item is None: return None", "variables app.aws_access_key_id = args.aws_access_key_id app.aws_secret_access_key = args.aws_secret_access_key app.sandbox = args.sandbox # start server", "print_log_info(\"Verification Attempt failed! Attempt had end time \" + str(attempt_times_set['end_time']) + \" but", "= sqlite3.connect(video_db) annotation_tasks.row_factory = dict_factory # returns the database return annotation_tasks def decide_if_needs_verification(json_res,", "all_verifications_correct = True print assignment_id try: if task == \"name\": mturk_cur.execute(\"SELECT id, action_noun,", "db cursor db_cursor = annotation_tasks.cursor() mturk_cur = mturk_db_connection.cursor() # get annotation_type and video", "verified_times_set['start_time']) > TRIM_DIFFERENCE_MAX: print_log_info(\"Verification Attempt failed! Attempt had start time \" + str(attempt_times_set['start_time'])", "red_flag integer # Instantiate a connection to db annotation_tasks = sqlite3.connect(video_db) annotation_tasks.row_factory =", "given by frontend's submit button; must have hitId key :type json_res: dict :param", "Let a = the verification videos left b = total number of videos", "from trimmed_verification_videos) ''') # LIMIT 1 maybe? except sqlite3.Error as e: print_log_info(str(e)) item", "ant_type = json_res['annotation_type'] # Update naming task if ant_type == 'name': try: #", "name_lock_time=? WHERE id=?''', (0.0, item['id'])) ant_tasks.commit() except sqlite3.Error as e: print_log_info(str(e)) # Task:", "continue print_log_info(assignment_id + \" approved. Amazon response: \" + str(response)) try: mturk_cur.execute('''UPDATE hits", "except ValueError as err: ret['code'] = -3 ret['error_msg'] = str(err) return json.dumps(ret) is_mturk", "num_clips_locked)) except sqlite3.Error as e: print_log_info(str(e)) return def approve_assignments(): \"\"\" Periodic callback decides", "ID ({:d}) Type ({:s}) has been RED_FLAGGED!\".format( json_res['id'], ant_type) + '\\033[0m') # return", "WHERE trim_locked=1 OR name_locked=1''') num_clips_locked = db_cursor.fetchone()['count(*)'] db_cursor.execute('''SELECT count(*) FROM video_db WHERE red_flag>=1''')", "name db_cursor.execute('''SELECT * FROM video_db WHERE name_locked=1 AND named=0''') locked_items = db_cursor.fetchall() for", "time / logging import time #import logging #import traceback # flask import flask", "json_res: JSON given by frontend's submit button; must have hitId key :type json_res:", "= not (db_cursor.fetchone() is None) # Apply new label if it isn't a", "name_lock_time=? WHERE id=?''', (cur_time, task['id'])) except sqlite3.Error as e: print_log_info(str(e)) else: # So", "Dict holds the results to return to client ret = {} # Make", "print_log_info(\"Tornado server starting on port {}\".format(args.port)) # show stats every time we launch", "failed! Attempt had end time \" + str(attempt_times_set['end_time']) + \" but the verified", "verified_action_set['action_noun']: print_log_info(\"Verification Attempt failed! Attempt had noun \" + str(attempt_action_set['action_noun']) + \" but", "had verb \" + str(verified_action_set['action_verb'])) all_verifications_correct = False break if attempt_action_set['action_noun'] != verified_action_set['action_noun']:", "through Amazon's servers.\") print_log_info(str(e)) query_result = mturk_cur.fetchone() continue print_log_info(assignment_id + \" approved. Amazon", "by get_task(). :param json_res: JSON given by frontend's submit button; must have hitId", "<= 0 and labels_total - labels_completed <= 0 def get_next_available_task(annotation_tasks, annotation_type): \"\"\" Wrapper", "json.JSONDecoder().decode(request_data) print_log_info(\"Task returned: {:s}\".format(json_file)) if 'annotation_type' not in json_file: raise ValueError('annotation_type missing in", "querying database for a new labelling task. Called by get_task(). :param annotation_tasks: connection", "break if attempt_action_set['action_noun'] != verified_action_set['action_noun']: print_log_info(\"Verification Attempt failed! Attempt had noun \" +", "Expires a locked item based on its time stamp \"\"\" ant_tasks = app.annotation_tasks", "needs_verification_task = False # Get a verification task or next available task, and", "= get_next_available_task(app.annotation_tasks, ant_type) if not task: raise ValueError('can not get a valid task.", "'mechanicalturk.amazonaws.com' host = (sandbox_host if app.sandbox else real_host) mturk = boto.mturk.connection.MTurkConnection( aws_access_key_id=app.aws_access_key_id, aws_secret_access_key=app.aws_secret_access_key,", "item cur_time = time.time() # update the lock if annotation_type == 'name': try:", "{:s}\".format(json_file)) is_mturk = \"assignmentId\" in json_file and \"workerId\" in json_file and \\ \"hitId\"", "is_mturk = \"assignmentId\" in json_file and \"workerId\" in json_file and \\ \"hitId\" in", "task FROM hits WHERE status='pending_approval'\") except sqlite3.Error as e: print_log_info(str(e)) return query_result =", "a locked item based on its time stamp \"\"\" ant_tasks = app.annotation_tasks db_cursor", "labelling task. Called by get_task(). :param annotation_tasks: connection to database containing mturk-related data", "video_db WHERE named=1''') num_clips_named = db_cursor.fetchone()['count(*)'] db_cursor.execute('''SELECT count(*) FROM video_db WHERE trimmed=1''') num_clips_trimmed", "!= 'application/json': raise ValueError('request type must be JSON') request_data = flask.request.get_data() except ValueError", "== 'trim' try: db_cursor.execute('''SELECT * FROM video_db WHERE named=1 AND red_flag=0 AND trimmed=0", "get_next_available_task(annotation_tasks, annotation_type): \"\"\" Wrapper for querying database for a new labelling task. Called", "to submit a label JSON has the following fields: - id, which is", "err: ret['code'] = -1 ret['error_msg'] = str(err) return json.dumps(ret) return json.dumps(ret) @app.route('/return_task', methods=['POST'])", "annotation\"\"\" # parsing args import argparse # encoding / decoding import json #", "its time stamp \"\"\" ant_tasks = app.annotation_tasks db_cursor = ant_tasks.cursor() # Task: name", "out if this needs to be a transaction print_log_info(\"Approving assignment \" + assignment_id)", "json_res['hitId']) mturk_cur.execute('''UPDATE hits SET assignment_id=?, worker_id=?, labels_completed = labels_completed + 1 WHERE hit_id=?''',", "db_cursor.fetchall() for item in locked_items: delay = time.time() - item['trim_lock_time'] if delay >", "load_annotation_tasks(video_db): \"\"\" Wrapper for loading annotations \"\"\" # id integer primary key, #", "except sqlite3.Error as e: print_log_info(str(e)) continue if all_verifications_correct: # TODO Find out if", "item in locked_items: delay = time.time() - item['name_lock_time'] if delay > MAX_DELAY: print_log_info(\"Expiring", "label if it isn't a verification video if not is_verification: update_item = (float(json_res['start_time']),", "(cur_time, task['id'])) except sqlite3.Error as e: print_log_info(str(e)) annotation_tasks.commit() return task def update_task(mturk_db_connection, annotation_tasks,", "arguments \"\"\" parser = argparse.ArgumentParser(description='Setup a web server for video annotation') parser.add_argument('--port', dest='port',", ":return: boolean representing if task referred to in json_res' hitId has been completed", "\"Task ID ({:d}) Type ({:s}) has been RED_FLAGGED!\".format( json_res['id'], ant_type) + '\\033[0m') #", "is_mturk and not is_verification: print(json_res['assignmentId'], json_res['workerId'], json_res['hitId']) mturk_cur.execute('''UPDATE hits SET assignment_id=?, worker_id=?, labels_completed", "a string filled out by the user for the objects being handled TODO", "as e: print_log_info(str(e)) else: # So annotation_type == 'trim' try: db_cursor.execute('''SELECT * FROM", "FROM trim_verification_attempts WHERE hit_id=?\", (hit_id,)) times_query_result = mturk_cur.fetchall() for attempt_times_set in times_query_result: db_cursor.execute(\"SELECT", "means the client's completion \" + \"has not propagated through Amazon's servers.\") print_log_info(str(e))", "is_verification: print(json_res['assignmentId'], json_res['workerId'], json_res['hitId']) mturk_cur.execute('''UPDATE hits SET assignment_id=?, worker_id=?, labels_completed = labels_completed +", "has been completed :param json_res: JSON given by frontend's submit button; must have", "annotation_type') except ValueError as err: ret['code'] = -3 ret['error_msg'] = str(err) return json.dumps(ret)", "{:d}, Locked {:d}\".format( num_clips_named, num_clips_trimmed, num_clips_flaged, num_clips_locked)) except sqlite3.Error as e: print_log_info(str(e)) return", "returned \"\"\" print json_res mturk_cur = mturk_db_connection.cursor() try: mturk_cur.execute('''SELECT verifications_total, labels_total, verifications_completed, labels_completed", "verification attempt's start_time/end_time in seconds TRIM_DIFFERENCE_MAX = 1.0 def dict_factory(cursor, row): \"\"\"Helper function", "database return annotation_tasks def decide_if_needs_verification(json_res, mturk_db_connection): \"\"\" Makes the decision as to whether", "needs to be a transaction print_log_info(\"Approving assignment \" + assignment_id) try: response =", "task if ant_type == 'name_preview' or ant_type == 'trim_preview': needs_verification_task = True elif", "try: # TODO make pending approval a separate table if we think that", "correct verification labels here # TODO make Mturk login details command line arguments", "locked_items: delay = time.time() - item['name_lock_time'] if delay > MAX_DELAY: print_log_info(\"Expiring task {:d}", "please re-try.') else: ret = task except ValueError as err: ret['code'] = -1", "set pending approval for result in query_result: assignment_id = str(result[\"assignment_id\"]) hit_id = str(result[\"hit_id\"])", "containing mturk-related data :type mturk_db_connection: sqlite3.Connection :return: boolean representing if task referred to", "all requests. ) mturk_cur = app.mturk_db_connection.cursor() db_cursor = app.annotation_tasks.cursor() try: # TODO make", "(json_res['assignmentId'], json_res['workerId'], json_res['hitId'])) mturk_cur.execute('''INSERT INTO trim_verification_attempts( hit_id, assignment_id, worker_id, id, start_time, end_time) VALUES", "= \"assignmentId\" in json_file and \"workerId\" in json_file and \\ \"hitId\" in json_file", "ant_type) if not task: raise ValueError('can not get a valid task. please re-try.')", "stamp \"\"\" ant_tasks = app.annotation_tasks db_cursor = ant_tasks.cursor() # Task: name db_cursor.execute('''SELECT *", "been completed \"\"\" mturk_cur = mturk_db_connection.cursor() try: mturk_cur.execute('''SELECT verifications_total, labels_total, verifications_completed, labels_completed FROM", "figure out the trim stuff \"\"\" # Dict holds the results to return", "e: print_log_info(str(e)) else: # So annotation_type == 'trim' try: db_cursor.execute('''UPDATE video_db SET trim_locked=1,", "# set up one server server.start(1) print_log_info(\"Tornado server starting on port {}\".format(args.port)) #", "labels_completed = \\ query_result[\"verifications_total\"], query_result[\"labels_total\"], \\ query_result[\"verifications_completed\"], query_result[\"labels_completed\"] return verifications_total - verifications_completed <=", "app.mturk_db_connection) else: needs_verification_task = False # Get a verification task or next available", "task. Please re-try.') else: ret['code'] = 0 ret['error_msg'] = 'success' except ValueError as", "'name' or annotation_type == 'name_preview': try: # from https://stackoverflow.com/questions/4114940/select-random-rows-in-sqlite db_cursor.execute('''SELECT * FROM video_db", "hitId key :type json_res: dict :param mturk_db_connection: connection to database containing mturk-related data", "completed :param json_res: JSON given by frontend's submit button; must have hitId key", "{:d} (Name)\".format(item['id'])) try: db_cursor.execute('''UPDATE video_db SET name_locked=0, name_lock_time=? WHERE id=?''', (0.0, item['id'])) ant_tasks.commit()", "404) @app.route('/get_task', methods=['POST']) def get_task(): \"\"\" Get a task from the server A", "flag: raise ValueError('can not update the task. Please re-try.') else: ret['code'] = 0", "((ant_type == 'name') or (ant_type == 'trim')): raise ValueError('unknown annotation_type') except ValueError as", "starting on port {}\".format(args.port)) # show stats every time we launch the service", "trim_verification_attempts WHERE hit_id=?\", (hit_id,)) times_query_result = mturk_cur.fetchall() for attempt_times_set in times_query_result: db_cursor.execute(\"SELECT start_time,", "Amazon's servers.\") print_log_info(str(e)) query_result = mturk_cur.fetchone() continue print_log_info(assignment_id + \" approved. Amazon response:", "boto.mturk.connection.MTurkRequestError as e: print_log_info(\"MTurk verification rejected. Typically, this means the client's completion \"", "hit_id = str(result[\"hit_id\"]) task = str(result[\"task\"]) all_verifications_correct = True print assignment_id try: if", "client to submit a label JSON has the following fields: - id, which", "print_log_info(str(e)) return def expire_locked_items(): \"\"\" Expires a locked item based on its time", "verifications_completed, labels_completed FROM hits WHERE hit_id=?''', (json_res['hitId'],)) except sqlite3.Error as e: print_log_info(str(e)) query_result", "# trim_lock_time real, # trimmed_by_user text, # video_src text # src_start_time integer, #", "client ret = {} try: # make sure the content type is json", "verification rejected. Typically, this means the client's completion \" + \"has not propagated", "following: - verb, a string representing the word selected from the dropdown menu", "setup exist function def save_db(): app.annotation_tasks.close() app.mturk_db_connection.close() import atexit atexit.register(save_db) # set up", "in seconds TRIM_DIFFERENCE_MAX = 1.0 def dict_factory(cursor, row): \"\"\"Helper function to convert sql", "json request_type = flask.request.headers.get('Content-Type') if request_type != 'application/json': raise ValueError('request type must be", "plus relevant mturk variables if it's an mturk task. :param mturk_db_connection: connection to", "annotation_tasks.commit() except sqlite3.Error as e: print_log_info(str(e)) return False else: # ie. it's a", "json_res['workerId'], json_res['id'], json_res['nouns'], json_res['verb'])) mturk_db_connection.commit() elif is_mturk and not is_verification: print(json_res['assignmentId'], json_res['workerId'], json_res['hitId'])", "occluded=?, action_noun=?, action_verb=?, named_by_user=?, red_flag=? WHERE id=?''', update_item) # Update MTurk database to", "trim stuff \"\"\" # Dict holds the results to return to client ret", "time allowed for one task MAX_DELAY = 120 # maximum difference between correct", "1 maybe? except sqlite3.Error as e: print_log_info(str(e)) else: # So annotation_type == 'trim'", "worker_id, id, start_time, end_time) VALUES (?,?,?,?,?,?)''', (json_res['hitId'], json_res['assignmentId'], json_res['workerId'], json_res['id'], float(json_res['start_time']), float(json_res['end_time']))) mturk_db_connection.commit()", "verifications_total, labels_total, verifications_completed, labels_completed = \\ query_result[\"verifications_total\"], query_result[\"labels_total\"], \\ query_result[\"verifications_completed\"], query_result[\"labels_completed\"] chance_of_verification_video =", "task == \"name\": mturk_cur.execute(\"SELECT id, action_noun, action_verb FROM name_verification_attempts WHERE hit_id=?\", (hit_id,)) action_query_result", "if 'annotation_type' not in json_file: raise ValueError('annotation_type missing in request') if 'id' not", "assignment_id) try: response = mturk.approve_assignment(assignment_id) except boto.mturk.connection.MTurkRequestError as e: print_log_info(\"MTurk verification rejected. Typically,", "to loop through every assignment/hit set pending approval for result in query_result: assignment_id", "None) # Apply new label if it isn't a verification video if not", "dest='video_db', help='SQLite3 database with normal videos', default='video_db.db', type=str) parser.add_argument('--mturk_db', dest='mturk_db', help='SQLite3 database with", "pending approval can be automatically approved and then marks them accordingly \"\"\" #", "db_cursor.execute('''SELECT * FROM video_db WHERE name_locked=1 AND named=0''') locked_items = db_cursor.fetchall() for item", "try: json_file = json.JSONDecoder().decode(request_data) print_log_info(\"Task request: {:s}\".format(json_file)) is_mturk = \"assignmentId\" in json_file and", "IN (SELECT id FROM trimmed_verification_videos ORDER BY RANDOM() LIMIT 1)''') return db_cursor.fetchone() def", "the flask app object (and make it cors) app = flask.Flask(__name__) # pylint:", "\"name\" or \"trim\" - user_name If the request is coming from an mturk", "json_res['hitId'])) mturk_db_connection.commit() # TODO update mturk stuff annotation_tasks.commit() except sqlite3.Error as e: print_log_info(str(e))", "# url text, # named integer, # name_locked integer, # name_lock_time real, #", "returns the database return annotation_tasks def decide_if_needs_verification(json_res, mturk_db_connection): \"\"\" Makes the decision as", "FROM video_db WHERE named=1 AND red_flag=0 AND trimmed=0 AND trim_locked=0 AND id not", "LIMIT 1)''') except sqlite3.Error as e: print_log_info(str(e)) else: db_cursor.execute('''SELECT * FROM video_db WHERE", "times_query_result = mturk_cur.fetchall() for attempt_times_set in times_query_result: db_cursor.execute(\"SELECT start_time, end_time FROM video_db WHERE", "time.time() - item['name_lock_time'] if delay > MAX_DELAY: print_log_info(\"Expiring task {:d} (Name)\".format(item['id'])) try: db_cursor.execute('''UPDATE", ":type mturk_db_connection: sqlite3.Connection :return boolean representing whether verification video will be returned \"\"\"", "the verified had noun \" + str(verified_action_set['action_noun'])) all_verifications_correct = False break else: #", "= mturk_cur.fetchone() print_log_info(json_res['hitId']) verifications_total, labels_total, verifications_completed, labels_completed = \\ query_result[\"verifications_total\"], query_result[\"labels_total\"], \\ query_result[\"verifications_completed\"],", "handler for 404 \"\"\" return flask.make_response(json.dumps({'error': str(error)}), 404) @app.route('/get_task', methods=['POST']) def get_task(): \"\"\"", "app.annotation_tasks, json_file, is_mturk) if not flag: raise ValueError('can not update the task. Please", "db_cursor.execute('''SELECT * FROM video_db WHERE named=0 AND name_locked=0 AND id not in (SELECT", "connection to database containing mturk-related data :type mturk_db_connection: sqlite3.Connection :return: boolean representing if", "# encoding / decoding import json # time / logging import time #import", "# sys.stderr = open('./web_app.err', 'a', 1) import random import boto.mturk.connection # Obtain the", "== 'name': try: db_cursor.execute('''SELECT * FROM video_db WHERE named=0 AND name_locked=0 AND id", "help='which port to serve content on', default=5050, type=int) parser.add_argument('--video_db', dest='video_db', help='SQLite3 database with", "Decode json from request data into a dict, and make sure all required", "simple web server for video annotation\"\"\" # parsing args import argparse # encoding", "help='SQLite3 database with normal videos', default='video_db.db', type=str) parser.add_argument('--mturk_db', dest='mturk_db', help='SQLite3 database with logs", "item['name_lock_time'] if delay > MAX_DELAY: print_log_info(\"Expiring task {:d} (Name)\".format(item['id'])) try: db_cursor.execute('''UPDATE video_db SET", "+ 1 WHERE hit_id=?''', (json_res['assignmentId'], json_res['workerId'], json_res['hitId'])) mturk_db_connection.commit() # TODO update mturk stuff", "color print the red flag if json_res['red_flag']: print_log_info('\\033[93m' + \"Task ID ({:d}) Type", "attempt_action_set['action_noun'] != verified_action_set['action_noun']: print_log_info(\"Verification Attempt failed! Attempt had noun \" + str(attempt_action_set['action_noun']) +", "workerId - hitId If annotation_type is \"name\", it should have the following: -", "trim_locked=0, trim_lock_time=? WHERE id=?''', (0.0, item['id'])) ant_tasks.commit() except sqlite3.Error as e: print_log_info(str(e)) return", "# time / logging import time #import logging #import traceback # flask import", "try: if needs_verification_task: task = get_verification_task(app.annotation_tasks, ant_type) else: task = get_next_available_task(app.annotation_tasks, ant_type) if", "verification task or next available task, and return to user try: if needs_verification_task:", "task = str(result[\"task\"]) all_verifications_correct = True print assignment_id try: if task == \"name\":", "will be returned \"\"\" print json_res mturk_cur = mturk_db_connection.cursor() try: mturk_cur.execute('''SELECT verifications_total, labels_total,", "except sqlite3.Error as e: print_log_info(str(e)) return False else: # ie. it's a trimming", "== '' and args.keyfile == '': server = tornado.httpserver.HTTPServer(tornado.wsgi.WSGIContainer(app)) else: server = tornado.httpserver.HTTPServer(tornado.wsgi.WSGIContainer(app),", "WHERE assignment_id=?''', (json_file[\"assignmentId\"],)) mturk_db_connection.commit() except sqlite3.Error as err: ret['code'] = -3 ret['error_msg'] =", "data :type mturk_db_connection: sqlite3.Connection :return: boolean representing if task referred to in json_res'", "db_cursor.execute('''SELECT count(*) FROM video_db WHERE named=1''') num_clips_named = db_cursor.fetchone()['count(*)'] db_cursor.execute('''SELECT count(*) FROM video_db", "- trim_preview - \"user_name\" If it is a request from an MTurk iFrame,", "\"name\": mturk_cur.execute(\"SELECT id, action_noun, action_verb FROM name_verification_attempts WHERE hit_id=?\", (hit_id,)) action_query_result = mturk_cur.fetchall()", "FROM video_db WHERE id=?\", (attempt_action_set['id'],)) verified_action_set = db_cursor.fetchone() if attempt_action_set['action_verb'] != verified_action_set['action_verb']: print_log_info(\"Verification", "\" approved. Amazon response: \" + str(response)) try: mturk_cur.execute('''UPDATE hits SET status='approved' WHERE", "status='approved' WHERE hit_id=?''', (hit_id,)) app.mturk_db_connection.commit() except sqlite3.Error as e: print_log_info(str(e)) else: try: mturk_cur.execute('''UPDATE", "if args.certfile == '' and args.keyfile == '': server = tornado.httpserver.HTTPServer(tornado.wsgi.WSGIContainer(app)) else: server", "in locked_items: delay = time.time() - item['trim_lock_time'] if delay > MAX_DELAY: print_log_info(\"Expiring task", "not in (SELECT id from trimmed_verification_videos) ''') # LIMIT 1 maybe? except sqlite3.Error", "\" + str(verified_times_set['start_time'])) all_verifications_correct = False break if abs(attempt_times_set['end_time'] - verified_times_set['end_time']) > TRIM_DIFFERENCE_MAX:", "\" but the verified had end time \" + str(verified_times_set['end_time'])) all_verifications_correct = False", "server = tornado.httpserver.HTTPServer(tornado.wsgi.WSGIContainer(app), ssl_options={ \"certfile\": args.certfile, \"keyfile\": args.keyfile, }) server.bind(args.port) # setup exist", "db_cursor.execute('''SELECT * FROM trimmed_verification_videos where id=?''', (json_res['id'],)) # todo find out if is", "try: db_cursor.execute('''UPDATE video_db SET name_locked=1, name_lock_time=? WHERE id=?''', (cur_time, task['id'])) except sqlite3.Error as", "mturk_cur = mturk_db_connection.cursor() # get annotation_type and video id ant_type = json_res['annotation_type'] #", "+ \" but the verified had end time \" + str(verified_times_set['end_time'])) all_verifications_correct =", "e: print_log_info(str(e)) return query_result = mturk_cur.fetchall() # We need to loop through every", "integer primary key, # url text, # named integer, # name_locked integer, #", "json_res['assignmentId'], json_res['workerId'], json_res['id'], json_res['nouns'], json_res['verb'])) mturk_db_connection.commit() elif is_mturk and not is_verification: print(json_res['assignmentId'], json_res['workerId'],", "mturk_db_connection.commit() except sqlite3.Error as err: ret['code'] = -3 ret['error_msg'] = str(err) return json.dumps(ret)", "# Set global variables app.aws_access_key_id = args.aws_access_key_id app.aws_secret_access_key = args.aws_secret_access_key app.sandbox = args.sandbox", "the verification videos left b = total number of videos left The chance", "mturk-related data :type mturk_db_connection: sqlite3.Connection :return: boolean representing if task referred to in", "the client's completion \" + \"has not propagated through Amazon's servers.\") print_log_info(str(e)) query_result", "id from trimmed_verification_videos) ''') # LIMIT 1 maybe? except sqlite3.Error as e: print_log_info(str(e))", "app.mturk_db_connection) if not more_to_complete: try: mturk_db_connection = app.mturk_db_connection mturk_cur = mturk_db_connection.cursor() mturk_cur.execute('''UPDATE hits", "to serve content on', default=5050, type=int) parser.add_argument('--video_db', dest='video_db', help='SQLite3 database with normal videos',", "AND red_flag=0 AND trimmed=0 AND trim_locked=0 AND id not in (SELECT id from", "if annotation_type == 'name' or annotation_type == 'name_preview': try: # from https://stackoverflow.com/questions/4114940/select-random-rows-in-sqlite db_cursor.execute('''SELECT", "annotation_tasks.row_factory = dict_factory # returns the database return annotation_tasks def decide_if_needs_verification(json_res, mturk_db_connection): \"\"\"", "= args.aws_access_key_id app.aws_secret_access_key = args.aws_secret_access_key app.sandbox = args.sandbox # start server without cert", "frontend's submit button; must have hitId key :type json_res: dict :param mturk_db_connection: connection", "== 'trim_preview': needs_verification_task = True elif ((ant_type == 'name' or ant_type == 'trim')", "representing if task referred to in json_res' hitId has been completed \"\"\" mturk_cur", "is present try: json_file = json.JSONDecoder().decode(request_data) print_log_info(\"Task request: {:s}\".format(json_file)) is_mturk = \"assignmentId\" in", "import CORS, cross_origin import tornado.wsgi import tornado.httpserver # database import sqlite3 # redirect", "a transaction print_log_info(\"Approving assignment \" + assignment_id) try: response = mturk.approve_assignment(assignment_id) except boto.mturk.connection.MTurkRequestError", "trim_locked=1 OR name_locked=1''') num_clips_locked = db_cursor.fetchone()['count(*)'] db_cursor.execute('''SELECT count(*) FROM video_db WHERE red_flag>=1''') num_clips_flaged", "db_cursor.execute('''SELECT * FROM video_db WHERE named=1 AND red_flag=0 AND trimmed=0 AND trim_locked=0 AND", "whether an mturk task has been completed :param json_res: JSON given by frontend's", "video ID - annotation_type, which can be \"name\" or \"trim\" - user_name If", "\"\"\" mturk_cur = mturk_db_connection.cursor() try: mturk_cur.execute('''SELECT verifications_total, labels_total, verifications_completed, labels_completed FROM hits WHERE", "verification labels here # TODO make Mturk login details command line arguments sandbox_host", "ValueError as err: ret['code'] = -3 ret['error_msg'] = str(err) return json.dumps(ret) more_to_complete =", "\"\"\" Expires a locked item based on its time stamp \"\"\" ant_tasks =", "ret['error_msg'] = str(err) return json.dumps(ret) is_mturk = \"assignmentId\" in json_file and \"workerId\" in", "- verifications_completed, 0)) / max(verifications_total + labels_total - verifications_completed - labels_completed, 1)) return", "verification video or not. Let a = the verification videos left b =", "= row[idx] return d def print_log_info(str_info): \"\"\"Helper function for logging info\"\"\" prefix =", "query_result[\"verifications_completed\"], query_result[\"labels_completed\"] chance_of_verification_video = (float(max(verifications_total - verifications_completed, 0)) / max(verifications_total + labels_total -", "user_name If the request is coming from an mturk iFrame, it should have:", "have hitId key :type json_res: dict :param mturk_db_connection: connection to database containing mturk-related", "* FROM video_db WHERE id IN (SELECT id FROM named_verification_videos ORDER BY RANDOM()", "start_time=?, end_time=?, trimmed_by_user=?, red_flag=? WHERE id=?''', update_item) # Update MTurk database to reflect", "num_clips_flaged = db_cursor.fetchone()['count(*)'] print_log_info(\"All Stats: Named {:d}, Trimmed {:d}, flagged {:d}, Locked {:d}\".format(", "'name_preview' or ant_type == 'trim_preview': needs_verification_task = True elif ((ant_type == 'name' or", "make sure the content type is json request_type = flask.request.headers.get('Content-Type') if request_type !=", "sqlite3.Error as e: print_log_info(str(e)) return def load_annotation_tasks(video_db): \"\"\" Wrapper for loading annotations \"\"\"", "json try: request_type = flask.request.headers.get('Content-Type') if request_type != 'application/json': raise ValueError('request type must", "err: ret['code'] = -3 ret['error_msg'] = str(err) return json.dumps(ret) # Decide if we", "doing :type annotation_type: string :return dict from querying database \"\"\" db_cursor = annotation_tasks.cursor()", "video_db WHERE trim_locked=1 AND trimmed=0''') locked_items = db_cursor.fetchall() for item in locked_items: delay", "(cur_time, task['id'])) except sqlite3.Error as e: print_log_info(str(e)) else: # So annotation_type == 'trim'", "help='AWS Secret Access Key', default='', type=str) parser.add_argument('--certfile', dest='certfile', help='SSL certfile location', default='', type=str)", "print_log_info(\"Task request: {:s}\".format(json_file)) is_mturk = \"assignmentId\" in json_file and \"workerId\" in json_file and", "update mturk stuff annotation_tasks.commit() except sqlite3.Error as e: print_log_info(str(e)) return False # color", "a json file with the following fields: - \"annotation_type\" which can have the", "verb, a string representing the word selected from the dropdown menu - occluded,", "WHERE hit_id=?''', (json_res['hitId'],)) except sqlite3.Error as e: print_log_info(str(e)) query_result = mturk_cur.fetchone() verifications_total, labels_total,", "\" + assignment_id) try: response = mturk.approve_assignment(assignment_id) except boto.mturk.connection.MTurkRequestError as e: print_log_info(\"MTurk verification", "occluded, a boolean from the checkbox in the page - nouns, a string", "assignmentId - workerId - hitId If annotation_type is \"name\", it should have the", "time.gmtime()) print \"{:s} {:s}\".format(prefix, str_info) def collect_db_stats(): \"\"\" Collect DB stats \"\"\" ant_tasks", "request') else: # more sanity check ant_type = json_file['annotation_type'] if not ((ant_type ==", "'': server = tornado.httpserver.HTTPServer(tornado.wsgi.WSGIContainer(app)) else: server = tornado.httpserver.HTTPServer(tornado.wsgi.WSGIContainer(app), ssl_options={ \"certfile\": args.certfile, \"keyfile\": args.keyfile,", "occluded integer, # trimmed integer, # trim_locked integer, # trim_lock_time real, # trimmed_by_user", "and make sure all required data is present try: json_file = json.JSONDecoder().decode(request_data) print_log_info(\"Task", "break else: # ie. elif task == \"trim\": print \"trim thing\" mturk_cur.execute(\"SELECT id,", "without cert if none provided if args.certfile == '' and args.keyfile == '':", "# Apply new label if it isn't a verification video if not is_verification:", "json_res['id'], float(json_res['start_time']), float(json_res['end_time']))) mturk_db_connection.commit() elif is_mturk and not is_verification: print(json_res['assignmentId'], json_res['workerId'], json_res['hitId']) mturk_cur.execute('''UPDATE", "mturk_cur.execute('''UPDATE hits SET status='pending_manual_approval' WHERE hit_id=?''', (hit_id,)) app.mturk_db_connection.commit() except sqlite3.Error as e: print_log_info(str(e))", "1 WHERE hit_id=?''', (json_res['assignmentId'], json_res['workerId'], json_res['hitId'])) mturk_db_connection.commit() # TODO update mturk stuff annotation_tasks.commit()", "requests. Called by get_task(). :param json_res: JSON given by frontend's submit button; must", "0 ret['error_msg'] = 'success' except ValueError as err: ret['code'] = -3 ret['error_msg'] =", "/ decoding import json # time / logging import time #import logging #import", "videos left b = total number of videos left The chance of getting", "print_log_info(str_info): \"\"\"Helper function for logging info\"\"\" prefix = time.strftime(\"%Y-%m-%d %H:%M:%S\", time.gmtime()) print \"{:s}", "task. else: task = item cur_time = time.time() # update the lock if", "return json.dumps(ret) @app.route('/hello') def hello(): return 'hello world' def parse_args(): \"\"\" Parse input", "page - nouns, a string filled out by the user for the objects", "FROM video_db WHERE id IN (SELECT id FROM named_verification_videos ORDER BY RANDOM() LIMIT", "hits SET status='pending_manual_approval' WHERE hit_id=?''', (hit_id,)) app.mturk_db_connection.commit() except sqlite3.Error as e: print_log_info(str(e)) return", "dict :param is_mturk: indicates if :return dict from querying database \"\"\" # get", "import flask from flask_cors import CORS, cross_origin import tornado.wsgi import tornado.httpserver # database", "\"\"\" Processes the JSON sent from the client to submit a label JSON", "= {} # Make sure the content type is json try: request_type =", "print_log_info(json_res['hitId']) verifications_total, labels_total, verifications_completed, labels_completed = \\ query_result[\"verifications_total\"], query_result[\"labels_total\"], \\ query_result[\"verifications_completed\"], query_result[\"labels_completed\"] chance_of_verification_video", "# parse params args = parse_args() # load annotation tasks app.annotation_tasks = load_annotation_tasks(args.video_db)", "None # Otherwise return a task. else: task = item cur_time = time.time()", "had verb \" + str(attempt_action_set['action_verb']) + \" but the verified had verb \"", "one server server.start(1) print_log_info(\"Tornado server starting on port {}\".format(args.port)) # show stats every", "get_task(). :param json_res: JSON given by frontend's submit button; must have hitId key", "cors) app = flask.Flask(__name__) # pylint: disable=invalid-name CORS(app) # Maximum time allowed for", "args.aws_secret_access_key app.sandbox = args.sandbox # start server without cert if none provided if", "(hit_id,)) app.mturk_db_connection.commit() except sqlite3.Error as e: print_log_info(str(e)) return def expire_locked_items(): \"\"\" Expires a", "item['id'])) ant_tasks.commit() except sqlite3.Error as e: print_log_info(str(e)) return def load_annotation_tasks(video_db): \"\"\" Wrapper for", "item in locked_items: delay = time.time() - item['trim_lock_time'] if delay > MAX_DELAY: print_log_info(\"Expiring", "available task try: flag = update_task(app.mturk_db_connection, app.annotation_tasks, json_file, is_mturk) if not flag: raise", "time #import logging #import traceback # flask import flask from flask_cors import CORS,", "the service collect_db_stats() approve_assignments() tornado.ioloop.PeriodicCallback(expire_locked_items, 20*1000).start() tornado.ioloop.PeriodicCallback(collect_db_stats, 3600*1000).start() tornado.ioloop.PeriodicCallback(approve_assignments, 20*1000).start() tornado.ioloop.IOLoop.current().start() if __name__", "indicates if :return dict from querying database \"\"\" # get db cursor db_cursor", "task == \"trim\": print \"trim thing\" mturk_cur.execute(\"SELECT id, start_time, end_time FROM trim_verification_attempts WHERE", "sqlite3.Connection :return boolean representing whether verification video will be returned \"\"\" print json_res", "and then marks them accordingly \"\"\" # TODO verify correct verification labels here", "(attempt_times_set['id'],)) verified_times_set = db_cursor.fetchone() if abs(attempt_times_set['start_time'] - verified_times_set['start_time']) > TRIM_DIFFERENCE_MAX: print_log_info(\"Verification Attempt failed!", "\" + str(verified_action_set['action_noun'])) all_verifications_correct = False break else: # ie. elif task ==", "(0.0, item['id'])) ant_tasks.commit() except sqlite3.Error as e: print_log_info(str(e)) return def load_annotation_tasks(video_db): \"\"\" Wrapper", "must be JSON') request_data = flask.request.get_data() except ValueError as err: ret['code'] = -1", "= not is_mturk or \\ not task_completed(json_file, app.mturk_db_connection) if not more_to_complete: try: mturk_db_connection", "= db_cursor.fetchall() for item in locked_items: delay = time.time() - item['trim_lock_time'] if delay", "= -3 ret['error_msg'] = str(err) return json.dumps(ret) ret['more_to_complete'] = more_to_complete return json.dumps(ret) @app.route('/hello')", "if json_res['red_flag']: print_log_info('\\033[93m' + \"Task ID ({:d}) Type ({:s}) has been RED_FLAGGED!\".format( json_res['id'],", "be returned \"\"\" print json_res mturk_cur = mturk_db_connection.cursor() try: mturk_cur.execute('''SELECT verifications_total, labels_total, verifications_completed,", "# Make sure the content type is json try: request_type = flask.request.headers.get('Content-Type') if", "except sqlite3.Error as e: print_log_info(str(e)) return query_result = mturk_cur.fetchall() # We need to", "str(verified_times_set['start_time'])) all_verifications_correct = False break if abs(attempt_times_set['end_time'] - verified_times_set['end_time']) > TRIM_DIFFERENCE_MAX: print_log_info(\"Verification Attempt", "out the trim stuff \"\"\" # Dict holds the results to return to", "fields: - id, which is the video ID - annotation_type, which can be", "MAX_DELAY: print_log_info(\"Expiring task {:d} (Trim)\".format(item['id'])) try: db_cursor.execute('''UPDATE video_db SET trim_locked=0, trim_lock_time=? WHERE id=?''',", "objects being handled TODO figure out the trim stuff \"\"\" # Dict holds", "had start time \" + str(verified_times_set['start_time'])) all_verifications_correct = False break if abs(attempt_times_set['end_time'] -", "Mturk login details command line arguments sandbox_host = 'mechanicalturk.sandbox.amazonaws.com' real_host = 'mechanicalturk.amazonaws.com' host", "sqlite3.Error as e: print_log_info(str(e)) else: try: mturk_cur.execute('''UPDATE hits SET status='pending_manual_approval' WHERE hit_id=?''', (hit_id,))", "updating is a verification video db_cursor.execute('''SELECT * FROM named_verification_videos where id=?''', (json_res['id'],)) #", "(json_res['hitId'], json_res['assignmentId'], json_res['workerId'], json_res['id'], float(json_res['start_time']), float(json_res['end_time']))) mturk_db_connection.commit() elif is_mturk and not is_verification: print(json_res['assignmentId'],", "needs_verification_task: task = get_verification_task(app.annotation_tasks, ant_type) else: task = get_next_available_task(app.annotation_tasks, ant_type) if not task:", "a dict try: json_file = json.JSONDecoder().decode(request_data) print_log_info(\"Task returned: {:s}\".format(json_file)) if 'annotation_type' not in", "query_result[\"labels_total\"], \\ query_result[\"verifications_completed\"], query_result[\"labels_completed\"] return verifications_total - verifications_completed <= 0 and labels_total -", "action_noun text, # red_flag integer # Instantiate a connection to db annotation_tasks =", "next task if annotation_type == 'name': try: db_cursor.execute('''SELECT * FROM video_db WHERE named=0", "task['id'])) except sqlite3.Error as e: print_log_info(str(e)) else: # So annotation_type == 'trim' try:", "Trimmed {:d}, flagged {:d}, Locked {:d}\".format( num_clips_named, num_clips_trimmed, num_clips_flaged, num_clips_locked)) except sqlite3.Error as", "annotation_type: string :return dict from querying database \"\"\" # get db cursor db_cursor", "# from https://stackoverflow.com/questions/4114940/select-random-rows-in-sqlite db_cursor.execute('''SELECT * FROM video_db WHERE id IN (SELECT id FROM", "FROM video_db WHERE name_locked=1 AND named=0''') locked_items = db_cursor.fetchall() for item in locked_items:", "all requests. Called by get_task(). :param json_res: JSON given by frontend's submit button;", "json.dumps(ret) except: ret['code'] = -2 ret['error_msg'] = 'unknown parameter error' return json.dumps(ret) #", "integer, # trim_locked integer, # trim_lock_time real, # trimmed_by_user text, # video_src text", "\"\"\" ant_tasks = app.annotation_tasks db_cursor = ant_tasks.cursor() # Task: name db_cursor.execute('''SELECT * FROM", "DB stats \"\"\" ant_tasks = app.annotation_tasks db_cursor = ant_tasks.cursor() # show us some", "from querying database \"\"\" # get db cursor db_cursor = annotation_tasks.cursor() # Get", "* FROM video_db WHERE id IN (SELECT id FROM trimmed_verification_videos ORDER BY RANDOM()", "Find out if this needs to be a transaction print_log_info(\"Approving assignment \" +", "RED_FLAGGED!\".format( json_res['id'], ant_type) + '\\033[0m') # return return True @app.errorhandler(404) def not_found(error): \"\"\"", "start_time, end_time FROM video_db WHERE id=?\", (attempt_times_set['id'],)) verified_times_set = db_cursor.fetchone() if abs(attempt_times_set['start_time'] -", "by frontend's submit button; must have hitId key :type json_res: dict :param is_mturk:", "the page - nouns, a string filled out by the user for the", "completion \" + \"has not propagated through Amazon's servers.\") print_log_info(str(e)) query_result = mturk_cur.fetchone()", "world' def parse_args(): \"\"\" Parse input arguments \"\"\" parser = argparse.ArgumentParser(description='Setup a web", "available task, and return to user try: if needs_verification_task: task = get_verification_task(app.annotation_tasks, ant_type)", "except sqlite3.Error as e: print_log_info(str(e)) else: # So annotation_type == 'trim' try: db_cursor.execute('''UPDATE", "e: print_log_info(str(e)) return def approve_assignments(): \"\"\" Periodic callback decides whether assignments pending approval", "WHERE trimmed=1''') num_clips_trimmed = db_cursor.fetchone()['count(*)'] db_cursor.execute('''SELECT count(*) FROM video_db WHERE trim_locked=1 OR name_locked=1''')", "TODO verify correct verification labels here # TODO make Mturk login details command", "update_task(app.mturk_db_connection, app.annotation_tasks, json_file, is_mturk) if not flag: raise ValueError('can not update the task.", "action_noun, action_verb) VALUES (?,?,?,?,?,?)''', (json_res['hitId'], json_res['assignmentId'], json_res['workerId'], json_res['id'], json_res['nouns'], json_res['verb'])) mturk_db_connection.commit() elif is_mturk", "task except ValueError as err: ret['code'] = -1 ret['error_msg'] = str(err) return json.dumps(ret)", "red flag if json_res['red_flag']: print_log_info('\\033[93m' + \"Task ID ({:d}) Type ({:s}) has been", "task plus relevant mturk variables if it's an mturk task. :param mturk_db_connection: connection", "if ant_type == 'name_preview' or ant_type == 'trim_preview': needs_verification_task = True elif ((ant_type", "= 2 prints out all requests. ) mturk_cur = app.mturk_db_connection.cursor() db_cursor = app.annotation_tasks.cursor()", "relevant mturk variables if it's an mturk task. :param mturk_db_connection: connection to database", "annotation_tasks.commit() return task def update_task(mturk_db_connection, annotation_tasks, json_res, is_mturk): \"\"\" Updates the data for", "json_file and \"workerId\" in json_file and \\ \"hitId\" in json_file # Get next", "videos', default='video_db.db', type=str) parser.add_argument('--mturk_db', dest='mturk_db', help='SQLite3 database with logs for mturk', default='mturk_db.db', type=str)", "try: mturk_cur.execute('''UPDATE hits SET status='pending_manual_approval' WHERE hit_id=?''', (hit_id,)) app.mturk_db_connection.commit() except sqlite3.Error as e:", "'a', 1) import random import boto.mturk.connection # Obtain the flask app object (and", "pad_start_frame integer, # pad_end_frame integer, # start_time real, # end_time real, # action_verb", "id integer primary key, # url text, # named integer, # name_locked integer,", "WHERE id=?''', (cur_time, task['id'])) except sqlite3.Error as e: print_log_info(str(e)) else: # So annotation_type", "return json.dumps(ret) except: ret['code'] = -2 ret['error_msg'] = 'unknown parameter error' return json.dumps(ret)", "as e: print_log_info(str(e)) query_result = mturk_cur.fetchone() print_log_info(json_res['hitId']) verifications_total, labels_total, verifications_completed, labels_completed = \\", "# TODO update mturk stuff annotation_tasks.commit() except sqlite3.Error as e: print_log_info(str(e)) return False", "a/b This gives a uniform distribution of chance of getting a verification video", "stats try: db_cursor.execute('''SELECT count(*) FROM video_db WHERE named=1''') num_clips_named = db_cursor.fetchone()['count(*)'] db_cursor.execute('''SELECT count(*)", "'trim') or (ant_type == 'name_preview') or (ant_type == 'trim_preview')): raise ValueError('unknown annotation_type') except", "mturk_cur.execute('''UPDATE hits SET status='approved' WHERE hit_id=?''', (hit_id,)) app.mturk_db_connection.commit() except sqlite3.Error as e: print_log_info(str(e))", "'trim' try: db_cursor.execute('''SELECT * FROM video_db WHERE named=1 AND red_flag=0 AND trimmed=0 AND", "uniform distribution of chance of getting a verification video across all requests. Called", "json # time / logging import time #import logging #import traceback # flask", "or ant_type == 'trim_preview': needs_verification_task = True elif ((ant_type == 'name' or ant_type", "all_verifications_correct = False break else: # ie. elif task == \"trim\": print \"trim", "cursor db_cursor = annotation_tasks.cursor() # Get the next task if annotation_type == 'name':", "ret['error_msg'] = str(err) return json.dumps(ret) ret['more_to_complete'] = more_to_complete return json.dumps(ret) @app.route('/hello') def hello():", "data :type mturk_db_connection: sqlite3.Connection :return boolean representing whether verification video will be returned", "# trimmed integer, # trim_locked integer, # trim_lock_time real, # trimmed_by_user text, #", "named=0 AND name_locked=0 AND id not in (SELECT id from named_verification_videos) ''') #", "containing mturk-related data :type annotation_tasks: sqlite3.Connection :param json_res: JSON given by frontend's submit", "'id' not in json_file: raise ValueError('id missing in request') else: # more sanity", "and is_verification: mturk_cur.execute('''UPDATE hits SET assignment_id=?, worker_id=?, verifications_completed = verifications_completed + 1 WHERE", "'name': try: db_cursor.execute('''SELECT * FROM video_db WHERE named=0 AND name_locked=0 AND id not", "video_db WHERE trimmed=1''') num_clips_trimmed = db_cursor.fetchone()['count(*)'] db_cursor.execute('''SELECT count(*) FROM video_db WHERE trim_locked=1 OR", "= -3 ret['error_msg'] = str(err) return json.dumps(ret) is_mturk = \"assignmentId\" in json_file and", "args.keyfile, }) server.bind(args.port) # setup exist function def save_db(): app.annotation_tasks.close() app.mturk_db_connection.close() import atexit", "db_cursor.fetchone() if abs(attempt_times_set['start_time'] - verified_times_set['start_time']) > TRIM_DIFFERENCE_MAX: print_log_info(\"Verification Attempt failed! Attempt had start", "lock if annotation_type == 'name': try: db_cursor.execute('''UPDATE video_db SET name_locked=1, name_lock_time=? WHERE id=?''',", "verification video if not is_verification: update_item = (float(json_res['start_time']), float(json_res['end_time']), json_res['user_name'], int(json_res['red_flag'])*2, int(json_res['id'])) db_cursor.execute('''UPDATE", "of getting a verification video across all requests. Called by get_task(). :param json_res:", "return annotation_tasks def decide_if_needs_verification(json_res, mturk_db_connection): \"\"\" Makes the decision as to whether this", "result in query_result: assignment_id = str(result[\"assignment_id\"]) hit_id = str(result[\"hit_id\"]) task = str(result[\"task\"]) all_verifications_correct", "database \"\"\" # get db cursor db_cursor = annotation_tasks.cursor() # Get the next", "show us some stats try: db_cursor.execute('''SELECT count(*) FROM video_db WHERE named=1''') num_clips_named =", "named_by_user text, # occluded integer, # trimmed integer, # trim_locked integer, # trim_lock_time", "tasks app.annotation_tasks = load_annotation_tasks(args.video_db) app.mturk_db_connection = load_annotation_tasks(args.mturk_db) # Set global variables app.aws_access_key_id =", "\"workerId\" in json_file and \\ \"hitId\" in json_file # Get next available task", "json_res: dict :param is_mturk: indicates if :return dict from querying database \"\"\" #", "+ 1 WHERE hit_id=?''', (json_res['assignmentId'], json_res['workerId'], json_res['hitId'])) mturk_cur.execute('''INSERT INTO name_verification_attempts( hit_id, assignment_id, worker_id,", "json_file and \\ \"hitId\" in json_file # Get next available task try: flag", "return json.dumps(ret) more_to_complete = not is_mturk or \\ not task_completed(json_file, app.mturk_db_connection) if not", "args.certfile == '' and args.keyfile == '': server = tornado.httpserver.HTTPServer(tornado.wsgi.WSGIContainer(app)) else: server =", "= mturk_db_connection.cursor() try: mturk_cur.execute('''SELECT verifications_total, labels_total, verifications_completed, labels_completed FROM hits WHERE hit_id=?''', (json_res['hitId'],))", "from the dropdown menu - occluded, a boolean from the checkbox in the", "Get next available task try: flag = update_task(app.mturk_db_connection, app.annotation_tasks, json_file, is_mturk) if not", "\"certfile\": args.certfile, \"keyfile\": args.keyfile, }) server.bind(args.port) # setup exist function def save_db(): app.annotation_tasks.close()", "WHERE hit_id=?''', (json_res['hitId'],)) except sqlite3.Error as e: print_log_info(str(e)) query_result = mturk_cur.fetchone() print_log_info(json_res['hitId']) verifications_total,", "num_clips_flaged, num_clips_locked)) except sqlite3.Error as e: print_log_info(str(e)) return def approve_assignments(): \"\"\" Periodic callback", "trimmed=1, trim_locked=0, start_time=?, end_time=?, trimmed_by_user=?, red_flag=? WHERE id=?''', update_item) # Update MTurk database", "been RED_FLAGGED!\".format( json_res['id'], ant_type) + '\\033[0m') # return return True @app.errorhandler(404) def not_found(error):", "flag = update_task(app.mturk_db_connection, app.annotation_tasks, json_file, is_mturk) if not flag: raise ValueError('can not update", "global variables app.aws_access_key_id = args.aws_access_key_id app.aws_secret_access_key = args.aws_secret_access_key app.sandbox = args.sandbox # start", "up one server server.start(1) print_log_info(\"Tornado server starting on port {}\".format(args.port)) # show stats", "if :return dict from querying database \"\"\" # get db cursor db_cursor =", "else: task = get_next_available_task(app.annotation_tasks, ant_type) if not task: raise ValueError('can not get a", "query_result[\"verifications_total\"], query_result[\"labels_total\"], \\ query_result[\"verifications_completed\"], query_result[\"labels_completed\"] chance_of_verification_video = (float(max(verifications_total - verifications_completed, 0)) / max(verifications_total", "mturk_db_connection: sqlite3.Connection :param annotation_tasks: connection to database containing mturk-related data :type annotation_tasks: sqlite3.Connection", "verified had end time \" + str(verified_times_set['end_time'])) all_verifications_correct = False break except sqlite3.Error", "Access Key ID', default='', type=str) parser.add_argument('--aws_key', dest='aws_secret_access_key', help='AWS Secret Access Key', default='', type=str)", "# database import sqlite3 # redirect stdout and stderr for logging import sys", "dict from querying database \"\"\" db_cursor = annotation_tasks.cursor() if annotation_type == 'name' or", "FROM named_verification_videos where id=?''', (json_res['id'],)) # todo find out if is good query", "name_locked=0, name_lock_time=? WHERE id=?''', (0.0, item['id'])) ant_tasks.commit() except sqlite3.Error as e: print_log_info(str(e)) #", "e: print_log_info(str(e)) else: db_cursor.execute('''SELECT * FROM video_db WHERE id IN (SELECT id FROM", "# We need to loop through every assignment/hit set pending approval for result", "str(response)) try: mturk_cur.execute('''UPDATE hits SET status='approved' WHERE hit_id=?''', (hit_id,)) app.mturk_db_connection.commit() except sqlite3.Error as", "videos is a/b This gives a uniform distribution of chance of getting a", "- verified_times_set['end_time']) > TRIM_DIFFERENCE_MAX: print_log_info(\"Verification Attempt failed! Attempt had end time \" +", "key :type json_res: dict :param is_mturk: indicates if :return dict from querying database", "verified_action_set['action_verb']: print_log_info(\"Verification Attempt failed! Attempt had verb \" + str(attempt_action_set['action_verb']) + \" but", "default='mturk_db.db', type=str) parser.add_argument('--sandbox', dest='sandbox', help='If this is a sandbox HIT (otherwise is a", "tornado.httpserver.HTTPServer(tornado.wsgi.WSGIContainer(app), ssl_options={ \"certfile\": args.certfile, \"keyfile\": args.keyfile, }) server.bind(args.port) # setup exist function def", "sqlite3 # redirect stdout and stderr for logging import sys # sys.stdout =", "= False break except sqlite3.Error as e: print_log_info(str(e)) continue if all_verifications_correct: # TODO", "label if it isn't a verification video if not is_verification: update_item = (int(json_res['occluded']),", "a verification video if not is_verification: update_item = (float(json_res['start_time']), float(json_res['end_time']), json_res['user_name'], int(json_res['red_flag'])*2, int(json_res['id']))", "db_cursor = app.annotation_tasks.cursor() try: # TODO make pending approval a separate table if", "of getting a verification videos is a/b This gives a uniform distribution of", "import sys # sys.stdout = open('./web_app.log', 'a', 1) # sys.stderr = open('./web_app.err', 'a',", "the lock if annotation_type == 'name': try: db_cursor.execute('''UPDATE video_db SET name_locked=1, name_lock_time=? WHERE", "TODO Find out if this needs to be a transaction print_log_info(\"Approving assignment \"", "FROM video_db WHERE red_flag>=1''') num_clips_flaged = db_cursor.fetchone()['count(*)'] print_log_info(\"All Stats: Named {:d}, Trimmed {:d},", "# Task: trim db_cursor.execute('''SELECT * FROM video_db WHERE trim_locked=1 AND trimmed=0''') locked_items =", "database \"\"\" db_cursor = annotation_tasks.cursor() if annotation_type == 'name' or annotation_type == 'name_preview':", "except sqlite3.Error as e: print_log_info(str(e)) annotation_tasks.commit() return task def update_task(mturk_db_connection, annotation_tasks, json_res, is_mturk):", "or not. Let a = the verification videos left b = total number", "ant_tasks = app.annotation_tasks db_cursor = ant_tasks.cursor() # Task: name db_cursor.execute('''SELECT * FROM video_db", "verification task. :param annotation_tasks: connection to database containing mturk-related data :type annotation_tasks: sqlite3.Connection", "If annotation_type is \"name\", it should have the following: - verb, a string", "atexit.register(save_db) # set up one server server.start(1) print_log_info(\"Tornado server starting on port {}\".format(args.port))", "all_verifications_correct = False break if abs(attempt_times_set['end_time'] - verified_times_set['end_time']) > TRIM_DIFFERENCE_MAX: print_log_info(\"Verification Attempt failed!", "not is_verification: update_item = (int(json_res['occluded']), json_res['nouns'], json_res['verb'], json_res['user_name'], int(json_res['red_flag'])*1, int(json_res['id'])) db_cursor.execute('''UPDATE video_db SET", "video_db SET named=1, name_locked=0, occluded=?, action_noun=?, action_verb=?, named_by_user=?, red_flag=? WHERE id=?''', update_item) #", "not ((ant_type == 'name') or (ant_type == 'trim')): raise ValueError('unknown annotation_type') except ValueError", "hit_id=?''', (json_res['assignmentId'], json_res['workerId'], json_res['hitId'])) mturk_cur.execute('''INSERT INTO trim_verification_attempts( hit_id, assignment_id, worker_id, id, start_time, end_time)", "sqlite3.Error as e: print_log_info(str(e)) else: # So annotation_type == 'trim' try: db_cursor.execute('''SELECT *", "int(json_res['red_flag'])*2, int(json_res['id'])) db_cursor.execute('''UPDATE video_db SET trimmed=1, trim_locked=0, start_time=?, end_time=?, trimmed_by_user=?, red_flag=? WHERE id=?''',", "annotation_type == 'name': try: db_cursor.execute('''UPDATE video_db SET name_locked=1, name_lock_time=? WHERE id=?''', (cur_time, task['id']))", "the data for a labelling task plus relevant mturk variables if it's an", "num_clips_named, num_clips_trimmed, num_clips_flaged, num_clips_locked)) except sqlite3.Error as e: print_log_info(str(e)) return def approve_assignments(): \"\"\"", "make pending approval a separate table if we think that would be time-efficient", "verified had verb \" + str(verified_action_set['action_verb'])) all_verifications_correct = False break if attempt_action_set['action_noun'] !=", "mturk_db_connection.commit() # TODO update mturk stuff annotation_tasks.commit() except sqlite3.Error as e: print_log_info(str(e)) return", "mturk_db_connection): \"\"\" Tells whether an mturk task has been completed :param json_res: JSON", "# get db cursor db_cursor = annotation_tasks.cursor() # Get the next task if", "idx, col in enumerate(cursor.description): d[col[0]] = row[idx] return d def print_log_info(str_info): \"\"\"Helper function", "update the lock if annotation_type == 'name': try: db_cursor.execute('''UPDATE video_db SET name_locked=1, name_lock_time=?", "JSON given by frontend's submit button; must have hitId key :type json_res: dict", "- workerId - hitId If annotation_type is \"name\", it should have the following:", "real_host = 'mechanicalturk.amazonaws.com' host = (sandbox_host if app.sandbox else real_host) mturk = boto.mturk.connection.MTurkConnection(", "action_query_result = mturk_cur.fetchall() for attempt_action_set in action_query_result: db_cursor.execute(\"SELECT action_noun, action_verb FROM video_db WHERE", "coming from an mturk iFrame, it should have: - assignmentId - workerId -", "ValueError as err: ret['code'] = -1 ret['error_msg'] = str(err) return json.dumps(ret) return json.dumps(ret)", "sqlite3.Error as e: print_log_info(str(e)) # Task: trim db_cursor.execute('''SELECT * FROM video_db WHERE trim_locked=1", "every time we launch the service collect_db_stats() approve_assignments() tornado.ioloop.PeriodicCallback(expire_locked_items, 20*1000).start() tornado.ioloop.PeriodicCallback(collect_db_stats, 3600*1000).start() tornado.ioloop.PeriodicCallback(approve_assignments,", "error handler for 404 \"\"\" return flask.make_response(json.dumps({'error': str(error)}), 404) @app.route('/get_task', methods=['POST']) def get_task():", "as e: print_log_info(str(e)) query_result = mturk_cur.fetchone() verifications_total, labels_total, verifications_completed, labels_completed = \\ query_result[\"verifications_total\"],", "find out if is good query is_verification = not (db_cursor.fetchone() is None) #", "print(json_res['assignmentId'], json_res['workerId'], json_res['hitId']) mturk_cur.execute('''UPDATE hits SET assignment_id=?, worker_id=?, labels_completed = labels_completed + 1", "WHERE red_flag>=1''') num_clips_flaged = db_cursor.fetchone()['count(*)'] print_log_info(\"All Stats: Named {:d}, Trimmed {:d}, flagged {:d},", "from an mturk iFrame, it should have: - assignmentId - workerId - hitId", "in json_file: raise ValueError('annotation_type missing in request') if 'id' not in json_file: raise", "str(err) return json.dumps(ret) is_mturk = \"assignmentId\" in json_file and \"workerId\" in json_file and", "WHERE hit_id=?''', (hit_id,)) app.mturk_db_connection.commit() except sqlite3.Error as e: print_log_info(str(e)) else: try: mturk_cur.execute('''UPDATE hits", "\"\"\" # TODO verify correct verification labels here # TODO make Mturk login", "and not is_verification: print(json_res['assignmentId'], json_res['workerId'], json_res['hitId']) mturk_cur.execute('''UPDATE hits SET assignment_id=?, worker_id=?, labels_completed =", "'name') or (ant_type == 'trim') or (ant_type == 'name_preview') or (ant_type == 'trim_preview')):", "+ \" approved. Amazon response: \" + str(response)) try: mturk_cur.execute('''UPDATE hits SET status='approved'", "we're doing :type annotation_type: string :return dict from querying database \"\"\" # get", "e: print_log_info(str(e)) # Task: trim db_cursor.execute('''SELECT * FROM video_db WHERE trim_locked=1 AND trimmed=0''')", "str(err) return json.dumps(ret) more_to_complete = not is_mturk or \\ not task_completed(json_file, app.mturk_db_connection) if", "text, # named integer, # name_locked integer, # name_lock_time real, # named_by_user text,", "the following fields: - \"annotation_type\" which can have the values... - name -", "num_clips_trimmed, num_clips_flaged, num_clips_locked)) except sqlite3.Error as e: print_log_info(str(e)) return def approve_assignments(): \"\"\" Periodic", "to database containing mturk-related data :type annotation_tasks: sqlite3.Connection :param json_res: JSON given by", "sqlite3.Connection :return: boolean representing if task referred to in json_res' hitId has been", "hits WHERE status='pending_approval'\") except sqlite3.Error as e: print_log_info(str(e)) return query_result = mturk_cur.fetchall() #", "def dict_factory(cursor, row): \"\"\"Helper function to convert sql item into a dict\"\"\" d", "ORDER BY RANDOM() LIMIT 1)''') except sqlite3.Error as e: print_log_info(str(e)) else: db_cursor.execute('''SELECT *", "required data is present try: json_file = json.JSONDecoder().decode(request_data) print_log_info(\"Task request: {:s}\".format(json_file)) is_mturk =", "database for a verification task. :param annotation_tasks: connection to database containing mturk-related data", "SET named=1, name_locked=0, occluded=?, action_noun=?, action_verb=?, named_by_user=?, red_flag=? WHERE id=?''', update_item) # Update", "argparse # encoding / decoding import json # time / logging import time", "db_cursor = ant_tasks.cursor() # Task: name db_cursor.execute('''SELECT * FROM video_db WHERE name_locked=1 AND", "raise ValueError('id missing in request') else: # more sanity check ant_type = json_file['annotation_type']", "get_verification_task(annotation_tasks, annotation_type): \"\"\" Wrapper for querying database for a verification task. :param annotation_tasks:", "db_cursor.fetchall() for item in locked_items: delay = time.time() - item['name_lock_time'] if delay >", "-2 ret['error_msg'] = 'unknown parameter error' return json.dumps(ret) # Decode json from request", "args.aws_access_key_id app.aws_secret_access_key = args.aws_secret_access_key app.sandbox = args.sandbox # start server without cert if", "from the client to submit a label JSON has the following fields: -", "\"user_name\" If it is a request from an MTurk iFrame, it also has", "json from request data into a dict try: json_file = json.JSONDecoder().decode(request_data) print_log_info(\"Task returned:", "WHERE id=?\", (attempt_action_set['id'],)) verified_action_set = db_cursor.fetchone() if attempt_action_set['action_verb'] != verified_action_set['action_verb']: print_log_info(\"Verification Attempt failed!", "server without cert if none provided if args.certfile == '' and args.keyfile ==", "whether this request is going to be a verification video or not. Let", "AND trimmed=0 AND trim_locked=0 AND id not in (SELECT id from trimmed_verification_videos) ''')", "and return to user try: if needs_verification_task: task = get_verification_task(app.annotation_tasks, ant_type) else: task", "naming task if ant_type == 'name': try: # Decide if video we are", "BY RANDOM() LIMIT 1)''') except sqlite3.Error as e: print_log_info(str(e)) else: db_cursor.execute('''SELECT * FROM", "+ str(verified_action_set['action_verb'])) all_verifications_correct = False break if attempt_action_set['action_noun'] != verified_action_set['action_noun']: print_log_info(\"Verification Attempt failed!", "function for logging info\"\"\" prefix = time.strftime(\"%Y-%m-%d %H:%M:%S\", time.gmtime()) print \"{:s} {:s}\".format(prefix, str_info)", "+ '\\033[0m') # return return True @app.errorhandler(404) def not_found(error): \"\"\" Default error handler", "is_verification: update_item = (float(json_res['start_time']), float(json_res['end_time']), json_res['user_name'], int(json_res['red_flag'])*2, int(json_res['id'])) db_cursor.execute('''UPDATE video_db SET trimmed=1, trim_locked=0,", "hitId has been completed \"\"\" mturk_cur = mturk_db_connection.cursor() try: mturk_cur.execute('''SELECT verifications_total, labels_total, verifications_completed,", "id from named_verification_videos) ''') # LIMIT 1 maybe? except sqlite3.Error as e: print_log_info(str(e))", "JSON sent from the client to submit a label JSON has the following", "an mturk task. :param mturk_db_connection: connection to database containing mturk-related data :type mturk_db_connection:", "return to user try: if needs_verification_task: task = get_verification_task(app.annotation_tasks, ant_type) else: task =", "database to reflect this change if is_mturk and is_verification: mturk_cur.execute('''UPDATE hits SET assignment_id=?,", "delay > MAX_DELAY: print_log_info(\"Expiring task {:d} (Name)\".format(item['id'])) try: db_cursor.execute('''UPDATE video_db SET name_locked=0, name_lock_time=?", "requests. ) mturk_cur = app.mturk_db_connection.cursor() db_cursor = app.annotation_tasks.cursor() try: # TODO make pending", "from https://stackoverflow.com/questions/4114940/select-random-rows-in-sqlite db_cursor.execute('''SELECT * FROM video_db WHERE id IN (SELECT id FROM named_verification_videos", "task or next available task, and return to user try: if needs_verification_task: task", "and stderr for logging import sys # sys.stdout = open('./web_app.log', 'a', 1) #", "= -3 ret['error_msg'] = str(err) return json.dumps(ret) # Decide if we need a", "ret['code'] = -1 ret['error_msg'] = str(err) return json.dumps(ret) except: ret['code'] = -2 ret['error_msg']", "= db_cursor.fetchone() # No task available if item is None: return None #", "= db_cursor.fetchall() for item in locked_items: delay = time.time() - item['name_lock_time'] if delay", "request data into a dict try: json_file = json.JSONDecoder().decode(request_data) print_log_info(\"Task returned: {:s}\".format(json_file)) if", "the values... - name - name_preview - trim - trim_preview - \"user_name\" If", "the next task if annotation_type == 'name': try: db_cursor.execute('''SELECT * FROM video_db WHERE", "a verification video db_cursor.execute('''SELECT * FROM trimmed_verification_videos where id=?''', (json_res['id'],)) # todo find", "except sqlite3.Error as e: print_log_info(str(e)) item = db_cursor.fetchone() # No task available if", "# Get the next task if annotation_type == 'name': try: db_cursor.execute('''SELECT * FROM", "sys.stderr = open('./web_app.err', 'a', 1) import random import boto.mturk.connection # Obtain the flask", "Key ID', default='', type=str) parser.add_argument('--aws_key', dest='aws_secret_access_key', help='AWS Secret Access Key', default='', type=str) parser.add_argument('--certfile',", "is good query is_verification = not (db_cursor.fetchone() is None) # Apply new label", "change if is_mturk and is_verification: mturk_cur.execute('''UPDATE hits SET assignment_id=?, worker_id=?, verifications_completed = verifications_completed", "as e: print_log_info(str(e)) continue if all_verifications_correct: # TODO Find out if this needs", "\" + str(verified_action_set['action_verb'])) all_verifications_correct = False break if attempt_action_set['action_noun'] != verified_action_set['action_noun']: print_log_info(\"Verification Attempt", "checkbox in the page - nouns, a string filled out by the user", "try: db_cursor.execute('''UPDATE video_db SET trim_locked=1, trim_lock_time=? WHERE id=?''', (cur_time, task['id'])) except sqlite3.Error as", "= 1.0 def dict_factory(cursor, row): \"\"\"Helper function to convert sql item into a", "task if ant_type == 'name': try: # Decide if video we are updating", "provided if args.certfile == '' and args.keyfile == '': server = tornado.httpserver.HTTPServer(tornado.wsgi.WSGIContainer(app)) else:", "== 'name': try: db_cursor.execute('''UPDATE video_db SET name_locked=1, name_lock_time=? WHERE id=?''', (cur_time, task['id'])) except", "new label if it isn't a verification video if not is_verification: update_item =", "MAX_DELAY: print_log_info(\"Expiring task {:d} (Name)\".format(item['id'])) try: db_cursor.execute('''UPDATE video_db SET name_locked=0, name_lock_time=? WHERE id=?''',", "id not in (SELECT id from trimmed_verification_videos) ''') # LIMIT 1 maybe? except", "trim_locked=0, start_time=?, end_time=?, trimmed_by_user=?, red_flag=? WHERE id=?''', update_item) # Update MTurk database to", "RANDOM() LIMIT 1)''') except sqlite3.Error as e: print_log_info(str(e)) else: db_cursor.execute('''SELECT * FROM video_db", "WHERE id=?''', (0.0, item['id'])) ant_tasks.commit() except sqlite3.Error as e: print_log_info(str(e)) return def load_annotation_tasks(video_db):", "every assignment/hit set pending approval for result in query_result: assignment_id = str(result[\"assignment_id\"]) hit_id", "json_res['annotation_type'] # Update naming task if ant_type == 'name': try: # Decide if", "app.annotation_tasks.close() app.mturk_db_connection.close() import atexit atexit.register(save_db) # set up one server server.start(1) print_log_info(\"Tornado server", "(json_res['assignmentId'], json_res['workerId'], json_res['hitId'])) mturk_db_connection.commit() # TODO update mturk stuff annotation_tasks.commit() except sqlite3.Error as", "containing mturk-related data :type annotation_tasks: sqlite3.Connection :param annotation_type: client-defined string for the type", "name_locked=1, name_lock_time=? WHERE id=?''', (cur_time, task['id'])) except sqlite3.Error as e: print_log_info(str(e)) else: #", "data into a dict try: json_file = json.JSONDecoder().decode(request_data) print_log_info(\"Task returned: {:s}\".format(json_file)) if 'annotation_type'", "can be automatically approved and then marks them accordingly \"\"\" # TODO verify", "it also has the following: - \"workerId\" - \"hitId\" \"\"\" # Dict holds", "as to whether this request is going to be a verification video or", "through every assignment/hit set pending approval for result in query_result: assignment_id = str(result[\"assignment_id\"])", "try: mturk_cur.execute('''UPDATE hits SET status='approved' WHERE hit_id=?''', (hit_id,)) app.mturk_db_connection.commit() except sqlite3.Error as e:", "video_db WHERE id IN (SELECT id FROM named_verification_videos ORDER BY RANDOM() LIMIT 1)''')", "menu - occluded, a boolean from the checkbox in the page - nouns,", "from the checkbox in the page - nouns, a string filled out by", "hits WHERE hit_id=?''', (json_res['hitId'],)) except sqlite3.Error as e: print_log_info(str(e)) query_result = mturk_cur.fetchone() verifications_total,", "def approve_assignments(): \"\"\" Periodic callback decides whether assignments pending approval can be automatically", "query_result = mturk_cur.fetchone() verifications_total, labels_total, verifications_completed, labels_completed = \\ query_result[\"verifications_total\"], query_result[\"labels_total\"], \\ query_result[\"verifications_completed\"],", "querying database for a verification task. :param annotation_tasks: connection to database containing mturk-related", "return False # color print the red flag if json_res['red_flag']: print_log_info('\\033[93m' + \"Task", "show stats every time we launch the service collect_db_stats() approve_assignments() tornado.ioloop.PeriodicCallback(expire_locked_items, 20*1000).start() tornado.ioloop.PeriodicCallback(collect_db_stats,", "elif task == \"trim\": print \"trim thing\" mturk_cur.execute(\"SELECT id, start_time, end_time FROM trim_verification_attempts", "representing the word selected from the dropdown menu - occluded, a boolean from", "id=?''', (0.0, item['id'])) ant_tasks.commit() except sqlite3.Error as e: print_log_info(str(e)) return def load_annotation_tasks(video_db): \"\"\"", "failed! Attempt had noun \" + str(attempt_action_set['action_noun']) + \" but the verified had", "json_file if 'annotation_type' not in json_file: raise ValueError('annotation_type missing in request') else: #", "So annotation_type == 'trim' try: db_cursor.execute('''SELECT * FROM video_db WHERE named=1 AND red_flag=0", "locked_items = db_cursor.fetchall() for item in locked_items: delay = time.time() - item['name_lock_time'] if", "to database containing mturk-related data :type mturk_db_connection: sqlite3.Connection :return: boolean representing if task", "- assignmentId - workerId - hitId If annotation_type is \"name\", it should have", "verification video db_cursor.execute('''SELECT * FROM trimmed_verification_videos where id=?''', (json_res['id'],)) # todo find out", "Default error handler for 404 \"\"\" return flask.make_response(json.dumps({'error': str(error)}), 404) @app.route('/get_task', methods=['POST']) def", "= str(result[\"hit_id\"]) task = str(result[\"task\"]) all_verifications_correct = True print assignment_id try: if task", "\\ \"hitId\" in json_file if 'annotation_type' not in json_file: raise ValueError('annotation_type missing in", "count(*) FROM video_db WHERE trim_locked=1 OR name_locked=1''') num_clips_locked = db_cursor.fetchone()['count(*)'] db_cursor.execute('''SELECT count(*) FROM", "to database containing mturk-related data :type mturk_db_connection: sqlite3.Connection :return boolean representing whether verification", "e: print_log_info(str(e)) return False else: # ie. it's a trimming task try: #", "count(*) FROM video_db WHERE named=1''') num_clips_named = db_cursor.fetchone()['count(*)'] db_cursor.execute('''SELECT count(*) FROM video_db WHERE", "> TRIM_DIFFERENCE_MAX: print_log_info(\"Verification Attempt failed! Attempt had end time \" + str(attempt_times_set['end_time']) +", "import json # time / logging import time #import logging #import traceback #", "e: print_log_info(\"MTurk verification rejected. Typically, this means the client's completion \" + \"has", "import sqlite3 # redirect stdout and stderr for logging import sys # sys.stdout", "an MTurk iFrame, it also has the following: - \"workerId\" - \"hitId\" \"\"\"", "stdout and stderr for logging import sys # sys.stdout = open('./web_app.log', 'a', 1)", "whether verification video will be returned \"\"\" print json_res mturk_cur = mturk_db_connection.cursor() try:", "id=?\", (attempt_action_set['id'],)) verified_action_set = db_cursor.fetchone() if attempt_action_set['action_verb'] != verified_action_set['action_verb']: print_log_info(\"Verification Attempt failed! Attempt", "((ant_type == 'name') or (ant_type == 'trim') or (ant_type == 'name_preview') or (ant_type", "logging import sys # sys.stdout = open('./web_app.log', 'a', 1) # sys.stderr = open('./web_app.err',", "left The chance of getting a verification videos is a/b This gives a", "json_res mturk_cur = mturk_db_connection.cursor() try: mturk_cur.execute('''SELECT verifications_total, labels_total, verifications_completed, labels_completed FROM hits WHERE", "a boolean from the checkbox in the page - nouns, a string filled", "is the video ID - annotation_type, which can be \"name\" or \"trim\" -", "= 120 # maximum difference between correct start_time/end_time and verification attempt's start_time/end_time in", "video_db SET trim_locked=1, trim_lock_time=? WHERE id=?''', (cur_time, task['id'])) except sqlite3.Error as e: print_log_info(str(e))", "name_lock_time real, # named_by_user text, # occluded integer, # trimmed integer, # trim_locked", "submit button; must have hitId key :type json_res: dict :param mturk_db_connection: connection to", "- hitId If annotation_type is \"name\", it should have the following: - verb,", "FROM video_db WHERE named=0 AND name_locked=0 AND id not in (SELECT id from", "host = (sandbox_host if app.sandbox else real_host) mturk = boto.mturk.connection.MTurkConnection( aws_access_key_id=app.aws_access_key_id, aws_secret_access_key=app.aws_secret_access_key, host=host,", "sqlite3.Error as e: print_log_info(str(e)) query_result = mturk_cur.fetchone() verifications_total, labels_total, verifications_completed, labels_completed = \\", "the following: - verb, a string representing the word selected from the dropdown", "labels_completed = labels_completed + 1 WHERE hit_id=?''', (json_res['assignmentId'], json_res['workerId'], json_res['hitId'])) mturk_db_connection.commit() # TODO", "except ValueError as err: ret['code'] = -3 ret['error_msg'] = str(err) return json.dumps(ret) #", "str(verified_times_set['end_time'])) all_verifications_correct = False break except sqlite3.Error as e: print_log_info(str(e)) continue if all_verifications_correct:", "tornado.httpserver # database import sqlite3 # redirect stdout and stderr for logging import", "host=host, debug=1 # debug = 2 prints out all requests. ) mturk_cur =", "is json try: request_type = flask.request.headers.get('Content-Type') if request_type != 'application/json': raise ValueError('request type", "cert if none provided if args.certfile == '' and args.keyfile == '': server", "login details command line arguments sandbox_host = 'mechanicalturk.sandbox.amazonaws.com' real_host = 'mechanicalturk.amazonaws.com' host =", "{:s}\".format(prefix, str_info) def collect_db_stats(): \"\"\" Collect DB stats \"\"\" ant_tasks = app.annotation_tasks db_cursor", "be a transaction print_log_info(\"Approving assignment \" + assignment_id) try: response = mturk.approve_assignment(assignment_id) except", "from querying database \"\"\" db_cursor = annotation_tasks.cursor() if annotation_type == 'name' or annotation_type", "from an MTurk iFrame, it also has the following: - \"workerId\" - \"hitId\"", "maybe? except sqlite3.Error as e: print_log_info(str(e)) item = db_cursor.fetchone() # No task available", "also has the following: - \"workerId\" - \"hitId\" \"\"\" # Dict holds the", "next available task try: flag = update_task(app.mturk_db_connection, app.annotation_tasks, json_file, is_mturk) if not flag:", "error' return json.dumps(ret) # Decode json from request data into a dict, and", "we need a verification task if ant_type == 'name_preview' or ant_type == 'trim_preview':", "type=str) parser.add_argument('--aws_key', dest='aws_secret_access_key', help='AWS Secret Access Key', default='', type=str) parser.add_argument('--certfile', dest='certfile', help='SSL certfile", "a connection to db annotation_tasks = sqlite3.connect(video_db) annotation_tasks.row_factory = dict_factory # returns the", "# pad_start_frame integer, # pad_end_frame integer, # start_time real, # end_time real, #", "json_file: raise ValueError('annotation_type missing in request') if 'id' not in json_file: raise ValueError('id", "parser.add_argument('--video_db', dest='video_db', help='SQLite3 database with normal videos', default='video_db.db', type=str) parser.add_argument('--mturk_db', dest='mturk_db', help='SQLite3 database", "= mturk_db_connection.cursor() mturk_cur.execute('''UPDATE hits SET status='pending_approval' WHERE assignment_id=?''', (json_file[\"assignmentId\"],)) mturk_db_connection.commit() except sqlite3.Error as", "dest='sandbox', help='If this is a sandbox HIT (otherwise is a real one)', default=False,", "dest='certfile', help='SSL certfile location', default='', type=str) parser.add_argument('--keyfile', dest='keyfile', help='SSL keyfile location', default='', type=str)", "json_res['nouns'], json_res['verb'])) mturk_db_connection.commit() elif is_mturk and not is_verification: print(json_res['assignmentId'], json_res['workerId'], json_res['hitId']) mturk_cur.execute('''UPDATE hits", "print the red flag if json_res['red_flag']: print_log_info('\\033[93m' + \"Task ID ({:d}) Type ({:s})", "if not task: raise ValueError('can not get a valid task. please re-try.') else:", "return json.dumps(ret) # decode json from request data into a dict try: json_file", "ant_tasks = app.annotation_tasks db_cursor = ant_tasks.cursor() # show us some stats try: db_cursor.execute('''SELECT", "query_result[\"labels_completed\"] return verifications_total - verifications_completed <= 0 and labels_total - labels_completed <= 0", "= load_annotation_tasks(args.video_db) app.mturk_db_connection = load_annotation_tasks(args.mturk_db) # Set global variables app.aws_access_key_id = args.aws_access_key_id app.aws_secret_access_key", "return def load_annotation_tasks(video_db): \"\"\" Wrapper for loading annotations \"\"\" # id integer primary", "mturk-related data :type mturk_db_connection: sqlite3.Connection :return boolean representing whether verification video will be", "pending approval for result in query_result: assignment_id = str(result[\"assignment_id\"]) hit_id = str(result[\"hit_id\"]) task", "location', default='', type=str) args = parser.parse_args() return args def start_from_terminal(): \"\"\" entry of", "if this needs to be a transaction print_log_info(\"Approving assignment \" + assignment_id) try:", "video if not is_verification: update_item = (int(json_res['occluded']), json_res['nouns'], json_res['verb'], json_res['user_name'], int(json_res['red_flag'])*1, int(json_res['id'])) db_cursor.execute('''UPDATE", "start_time real, # end_time real, # action_verb text, # action_noun text, # red_flag", "input arguments \"\"\" parser = argparse.ArgumentParser(description='Setup a web server for video annotation') parser.add_argument('--port',", "db annotation_tasks = sqlite3.connect(video_db) annotation_tasks.row_factory = dict_factory # returns the database return annotation_tasks", "- verifications_completed <= 0 and labels_total - labels_completed <= 0 def get_next_available_task(annotation_tasks, annotation_type):", "hits SET assignment_id=?, worker_id=?, labels_completed = labels_completed + 1 WHERE hit_id=?''', (json_res['assignmentId'], json_res['workerId'],", "an mturk task has been completed :param json_res: JSON given by frontend's submit", "''') # LIMIT 1 maybe? except sqlite3.Error as e: print_log_info(str(e)) item = db_cursor.fetchone()", "args = parser.parse_args() return args def start_from_terminal(): \"\"\" entry of the main function", "import atexit atexit.register(save_db) # set up one server server.start(1) print_log_info(\"Tornado server starting on", "db_cursor.execute('''SELECT * FROM named_verification_videos where id=?''', (json_res['id'],)) # todo find out if is", "or (ant_type == 'trim_preview')): raise ValueError('unknown annotation_type') except ValueError as err: ret['code'] =", "from request data into a dict, and make sure all required data is", "return args def start_from_terminal(): \"\"\" entry of the main function \"\"\" # parse", "= -1 ret['error_msg'] = str(err) return json.dumps(ret) except: ret['code'] = -2 ret['error_msg'] =", "- trim - trim_preview - \"user_name\" If it is a request from an", "= args.sandbox # start server without cert if none provided if args.certfile ==", "raise ValueError('can not update the task. Please re-try.') else: ret['code'] = 0 ret['error_msg']", "# src_start_time integer, # src_end_time integer, # pad_start_frame integer, # pad_end_frame integer, #", "logging #import traceback # flask import flask from flask_cors import CORS, cross_origin import", "with the following fields: - \"annotation_type\" which can have the values... - name", "accordingly \"\"\" # TODO verify correct verification labels here # TODO make Mturk", "''') # LIMIT 1 maybe? except sqlite3.Error as e: print_log_info(str(e)) else: # So", "trimmed=0 AND trim_locked=0 AND id not in (SELECT id from trimmed_verification_videos) ''') #", "action='store_true') parser.add_argument('--aws_key_id', dest='aws_access_key_id', help='AWS Access Key ID', default='', type=str) parser.add_argument('--aws_key', dest='aws_secret_access_key', help='AWS Secret", "end time \" + str(attempt_times_set['end_time']) + \" but the verified had end time", "missing in request') if 'id' not in json_file: raise ValueError('id missing in request')", "def load_annotation_tasks(video_db): \"\"\" Wrapper for loading annotations \"\"\" # id integer primary key,", "is a json file with the following fields: - \"annotation_type\" which can have", "else: # So annotation_type == 'trim' try: db_cursor.execute('''UPDATE video_db SET trim_locked=1, trim_lock_time=? WHERE", "# named_by_user text, # occluded integer, # trimmed integer, # trim_locked integer, #", "Collect DB stats \"\"\" ant_tasks = app.annotation_tasks db_cursor = ant_tasks.cursor() # show us", "\"\"\" # id integer primary key, # url text, # named integer, #", "TRIM_DIFFERENCE_MAX: print_log_info(\"Verification Attempt failed! Attempt had start time \" + str(attempt_times_set['start_time']) + \"", "= json.JSONDecoder().decode(request_data) print_log_info(\"Task returned: {:s}\".format(json_file)) if 'annotation_type' not in json_file: raise ValueError('annotation_type missing", "== 'name') or (ant_type == 'trim') or (ant_type == 'name_preview') or (ant_type ==", "item into a dict\"\"\" d = {} for idx, col in enumerate(cursor.description): d[col[0]]", "type=str) parser.add_argument('--mturk_db', dest='mturk_db', help='SQLite3 database with logs for mturk', default='mturk_db.db', type=str) parser.add_argument('--sandbox', dest='sandbox',", "except sqlite3.Error as e: print_log_info(str(e)) return False # color print the red flag", "to reflect this change if is_mturk and is_verification: mturk_cur.execute('''UPDATE hits SET assignment_id=?, worker_id=?,", "named_verification_videos) ''') # LIMIT 1 maybe? except sqlite3.Error as e: print_log_info(str(e)) else: #", "hit_id=?''', (json_res['assignmentId'], json_res['workerId'], json_res['hitId'])) mturk_db_connection.commit() annotation_tasks.commit() except sqlite3.Error as e: print_log_info(str(e)) return False", "in json_file and \\ \"hitId\" in json_file if 'annotation_type' not in json_file: raise", "in json_file and \"workerId\" in json_file and \\ \"hitId\" in json_file # Get", "ant_type == 'name_preview' or ant_type == 'trim_preview': needs_verification_task = True elif ((ant_type ==", "is going to be a verification video or not. Let a = the", "by get_task(). :param annotation_tasks: connection to database containing mturk-related data :type annotation_tasks: sqlite3.Connection", "# Decode json from request data into a dict, and make sure all", "trimmed_verification_videos ORDER BY RANDOM() LIMIT 1)''') return db_cursor.fetchone() def task_completed(json_res, mturk_db_connection): \"\"\" Tells", "decide_if_needs_verification(json_res, mturk_db_connection): \"\"\" Makes the decision as to whether this request is going", "= True print assignment_id try: if task == \"name\": mturk_cur.execute(\"SELECT id, action_noun, action_verb", "-1 ret['error_msg'] = str(err) return json.dumps(ret) return json.dumps(ret) @app.route('/return_task', methods=['POST']) def return_task(): \"\"\"", "print \"trim thing\" mturk_cur.execute(\"SELECT id, start_time, end_time FROM trim_verification_attempts WHERE hit_id=?\", (hit_id,)) times_query_result", "but the verified had verb \" + str(verified_action_set['action_verb'])) all_verifications_correct = False break if", "video db_cursor.execute('''SELECT * FROM named_verification_videos where id=?''', (json_res['id'],)) # todo find out if", "to client ret = {} # Make sure the content type is json", "item['id'])) ant_tasks.commit() except sqlite3.Error as e: print_log_info(str(e)) # Task: trim db_cursor.execute('''SELECT * FROM", "mturk_cur.execute('''UPDATE hits SET assignment_id=?, worker_id=?, verifications_completed = verifications_completed + 1 WHERE hit_id=?''', (json_res['assignmentId'],", "args.keyfile == '': server = tornado.httpserver.HTTPServer(tornado.wsgi.WSGIContainer(app)) else: server = tornado.httpserver.HTTPServer(tornado.wsgi.WSGIContainer(app), ssl_options={ \"certfile\": args.certfile,", "if not ((ant_type == 'name') or (ant_type == 'trim') or (ant_type == 'name_preview')", "or \"trim\" - user_name If the request is coming from an mturk iFrame,", "video_db WHERE red_flag>=1''') num_clips_flaged = db_cursor.fetchone()['count(*)'] print_log_info(\"All Stats: Named {:d}, Trimmed {:d}, flagged", "import boto.mturk.connection # Obtain the flask app object (and make it cors) app", "is json request_type = flask.request.headers.get('Content-Type') if request_type != 'application/json': raise ValueError('request type must", "is a real one)', default=False, action='store_true') parser.add_argument('--aws_key_id', dest='aws_access_key_id', help='AWS Access Key ID', default='',", "= boto.mturk.connection.MTurkConnection( aws_access_key_id=app.aws_access_key_id, aws_secret_access_key=app.aws_secret_access_key, host=host, debug=1 # debug = 2 prints out all", "approved. Amazon response: \" + str(response)) try: mturk_cur.execute('''UPDATE hits SET status='approved' WHERE hit_id=?''',", "of chance of getting a verification video across all requests. Called by get_task().", "LIMIT 1 maybe? except sqlite3.Error as e: print_log_info(str(e)) item = db_cursor.fetchone() # No", "json_res['workerId'], json_res['hitId']) mturk_cur.execute('''UPDATE hits SET assignment_id=?, worker_id=?, labels_completed = labels_completed + 1 WHERE", "Make sure the content type is json try: request_type = flask.request.headers.get('Content-Type') if request_type", "\"\"\" Wrapper for querying database for a verification task. :param annotation_tasks: connection to", "collect_db_stats(): \"\"\" Collect DB stats \"\"\" ant_tasks = app.annotation_tasks db_cursor = ant_tasks.cursor() #", "help='SSL keyfile location', default='', type=str) args = parser.parse_args() return args def start_from_terminal(): \"\"\"", "name_locked integer, # name_lock_time real, # named_by_user text, # occluded integer, # trimmed", "WHERE name_locked=1 AND named=0''') locked_items = db_cursor.fetchall() for item in locked_items: delay =", "trimming task try: # Decide if video we are updating is a verification", "# more sanity check ant_type = json_file['annotation_type'] if not ((ant_type == 'name') or", ":param mturk_db_connection: connection to database containing mturk-related data :type mturk_db_connection: sqlite3.Connection :return boolean", "of the annotations we're doing :type annotation_type: string :return dict from querying database", "update the task. Please re-try.') else: ret['code'] = 0 ret['error_msg'] = 'success' except", "type=int) parser.add_argument('--video_db', dest='video_db', help='SQLite3 database with normal videos', default='video_db.db', type=str) parser.add_argument('--mturk_db', dest='mturk_db', help='SQLite3", "Apply new label if it isn't a verification video if not is_verification: update_item", "e: print_log_info(str(e)) continue if all_verifications_correct: # TODO Find out if this needs to", "= 0 ret['error_msg'] = 'success' except ValueError as err: ret['code'] = -3 ret['error_msg']", "flask.make_response(json.dumps({'error': str(error)}), 404) @app.route('/get_task', methods=['POST']) def get_task(): \"\"\" Get a task from the", "default='', type=str) parser.add_argument('--certfile', dest='certfile', help='SSL certfile location', default='', type=str) parser.add_argument('--keyfile', dest='keyfile', help='SSL keyfile", "def save_db(): app.annotation_tasks.close() app.mturk_db_connection.close() import atexit atexit.register(save_db) # set up one server server.start(1)", "#import traceback # flask import flask from flask_cors import CORS, cross_origin import tornado.wsgi", "being handled TODO figure out the trim stuff \"\"\" # Dict holds the", "= tornado.httpserver.HTTPServer(tornado.wsgi.WSGIContainer(app)) else: server = tornado.httpserver.HTTPServer(tornado.wsgi.WSGIContainer(app), ssl_options={ \"certfile\": args.certfile, \"keyfile\": args.keyfile, }) server.bind(args.port)", "needs_verification_task = \\ decide_if_needs_verification(json_file, app.mturk_db_connection) else: needs_verification_task = False # Get a verification", "worker_id=?, labels_completed = labels_completed + 1 WHERE hit_id=?''', (json_res['assignmentId'], json_res['workerId'], json_res['hitId'])) mturk_db_connection.commit() #", "So annotation_type == 'trim' try: db_cursor.execute('''UPDATE video_db SET trim_locked=1, trim_lock_time=? WHERE id=?''', (cur_time,", "if delay > MAX_DELAY: print_log_info(\"Expiring task {:d} (Trim)\".format(item['id'])) try: db_cursor.execute('''UPDATE video_db SET trim_locked=0,", "on port {}\".format(args.port)) # show stats every time we launch the service collect_db_stats()", "decision as to whether this request is going to be a verification video", ":param is_mturk: indicates if :return dict from querying database \"\"\" # get db", "trimmed_by_user text, # video_src text # src_start_time integer, # src_end_time integer, # pad_start_frame", "querying database \"\"\" db_cursor = annotation_tasks.cursor() if annotation_type == 'name' or annotation_type ==", "hit_id, assignment_id, worker_id, id, start_time, end_time) VALUES (?,?,?,?,?,?)''', (json_res['hitId'], json_res['assignmentId'], json_res['workerId'], json_res['id'], float(json_res['start_time']),", "integer # Instantiate a connection to db annotation_tasks = sqlite3.connect(video_db) annotation_tasks.row_factory = dict_factory", "have hitId key :type json_res: dict :param is_mturk: indicates if :return dict from", "as err: ret['code'] = -1 ret['error_msg'] = str(err) return json.dumps(ret) return json.dumps(ret) @app.route('/return_task',", "object (and make it cors) app = flask.Flask(__name__) # pylint: disable=invalid-name CORS(app) #", "id=?''', (cur_time, task['id'])) except sqlite3.Error as e: print_log_info(str(e)) else: # So annotation_type ==", "ValueError('request type must be JSON') request_data = flask.request.get_data() except ValueError as err: ret['code']", "attempt_times_set in times_query_result: db_cursor.execute(\"SELECT start_time, end_time FROM video_db WHERE id=?\", (attempt_times_set['id'],)) verified_times_set =", "as err: ret['code'] = -3 ret['error_msg'] = str(err) return json.dumps(ret) is_mturk = \"assignmentId\"", "else: # ie. it's a trimming task try: # Decide if video we", "dict :param mturk_db_connection: connection to database containing mturk-related data :type mturk_db_connection: sqlite3.Connection :return", "named=1, name_locked=0, occluded=?, action_noun=?, action_verb=?, named_by_user=?, red_flag=? WHERE id=?''', update_item) # Update MTurk", "red_flag>=1''') num_clips_flaged = db_cursor.fetchone()['count(*)'] print_log_info(\"All Stats: Named {:d}, Trimmed {:d}, flagged {:d}, Locked", "We need to loop through every assignment/hit set pending approval for result in", "\"trim\": print \"trim thing\" mturk_cur.execute(\"SELECT id, start_time, end_time FROM trim_verification_attempts WHERE hit_id=?\", (hit_id,))", "to in json_res' hitId has been completed \"\"\" mturk_cur = mturk_db_connection.cursor() try: mturk_cur.execute('''SELECT", "'unknown parameter error' return json.dumps(ret) # decode json from request data into a", "if is good query is_verification = not (db_cursor.fetchone() is None) # Apply new", "try: request_type = flask.request.headers.get('Content-Type') if request_type != 'application/json': raise ValueError('request type must be", "ant_type == 'trim_preview': needs_verification_task = True elif ((ant_type == 'name' or ant_type ==", "json_res['workerId'], json_res['hitId'])) mturk_cur.execute('''INSERT INTO trim_verification_attempts( hit_id, assignment_id, worker_id, id, start_time, end_time) VALUES (?,?,?,?,?,?)''',", "of the main function \"\"\" # parse params args = parse_args() # load", "status='pending_approval' WHERE assignment_id=?''', (json_file[\"assignmentId\"],)) mturk_db_connection.commit() except sqlite3.Error as err: ret['code'] = -3 ret['error_msg']", "else: try: mturk_cur.execute('''UPDATE hits SET status='pending_manual_approval' WHERE hit_id=?''', (hit_id,)) app.mturk_db_connection.commit() except sqlite3.Error as", "Processes the JSON sent from the client to submit a label JSON has", "ie. it's a trimming task try: # Decide if video we are updating", "def expire_locked_items(): \"\"\" Expires a locked item based on its time stamp \"\"\"", "-1 ret['error_msg'] = str(err) return json.dumps(ret) except: ret['code'] = -2 ret['error_msg'] = 'unknown", "@app.route('/get_task', methods=['POST']) def get_task(): \"\"\" Get a task from the server A request", "d def print_log_info(str_info): \"\"\"Helper function for logging info\"\"\" prefix = time.strftime(\"%Y-%m-%d %H:%M:%S\", time.gmtime())", "= flask.Flask(__name__) # pylint: disable=invalid-name CORS(app) # Maximum time allowed for one task", "and labels_total - labels_completed <= 0 def get_next_available_task(annotation_tasks, annotation_type): \"\"\" Wrapper for querying", "app.mturk_db_connection.commit() except sqlite3.Error as e: print_log_info(str(e)) return def expire_locked_items(): \"\"\" Expires a locked", "to client ret = {} try: # make sure the content type is", "json.dumps(ret) ret['more_to_complete'] = more_to_complete return json.dumps(ret) @app.route('/hello') def hello(): return 'hello world' def", "= argparse.ArgumentParser(description='Setup a web server for video annotation') parser.add_argument('--port', dest='port', help='which port to", "MTurk iFrame, it also has the following: - \"workerId\" - \"hitId\" \"\"\" #", "{:d} (Trim)\".format(item['id'])) try: db_cursor.execute('''UPDATE video_db SET trim_locked=0, trim_lock_time=? WHERE id=?''', (0.0, item['id'])) ant_tasks.commit()", "TODO figure out the trim stuff \"\"\" # Dict holds the results to", "updating is a verification video db_cursor.execute('''SELECT * FROM trimmed_verification_videos where id=?''', (json_res['id'],)) #", "404 \"\"\" return flask.make_response(json.dumps({'error': str(error)}), 404) @app.route('/get_task', methods=['POST']) def get_task(): \"\"\" Get a", "be JSON') request_data = flask.request.get_data() except ValueError as err: ret['code'] = -1 ret['error_msg']", "labels_completed = \\ query_result[\"verifications_total\"], query_result[\"labels_total\"], \\ query_result[\"verifications_completed\"], query_result[\"labels_completed\"] chance_of_verification_video = (float(max(verifications_total - verifications_completed,", "annotation_type == 'name_preview': try: # from https://stackoverflow.com/questions/4114940/select-random-rows-in-sqlite db_cursor.execute('''SELECT * FROM video_db WHERE id", "trim_locked=1, trim_lock_time=? WHERE id=?''', (cur_time, task['id'])) except sqlite3.Error as e: print_log_info(str(e)) annotation_tasks.commit() return", "if all_verifications_correct: # TODO Find out if this needs to be a transaction", "had end time \" + str(verified_times_set['end_time'])) all_verifications_correct = False break except sqlite3.Error as", "== 'trim') or (ant_type == 'name_preview') or (ant_type == 'trim_preview')): raise ValueError('unknown annotation_type')", "return db_cursor.fetchone() def task_completed(json_res, mturk_db_connection): \"\"\" Tells whether an mturk task has been", ":type mturk_db_connection: sqlite3.Connection :return: boolean representing if task referred to in json_res' hitId", "= args.aws_secret_access_key app.sandbox = args.sandbox # start server without cert if none provided", "Periodic callback decides whether assignments pending approval can be automatically approved and then", "print_log_info(str(e)) else: # So annotation_type == 'trim' try: db_cursor.execute('''SELECT * FROM video_db WHERE", "client's completion \" + \"has not propagated through Amazon's servers.\") print_log_info(str(e)) query_result =", "ValueError('annotation_type missing in request') if 'id' not in json_file: raise ValueError('id missing in", "video we are updating is a verification video db_cursor.execute('''SELECT * FROM named_verification_videos where", "\"\"\"Helper function for logging info\"\"\" prefix = time.strftime(\"%Y-%m-%d %H:%M:%S\", time.gmtime()) print \"{:s} {:s}\".format(prefix,", "hitId If annotation_type is \"name\", it should have the following: - verb, a", "real, # end_time real, # action_verb text, # action_noun text, # red_flag integer", "we think that would be time-efficient mturk_cur.execute(\"SELECT assignment_id, hit_id, task FROM hits WHERE", "assignment \" + assignment_id) try: response = mturk.approve_assignment(assignment_id) except boto.mturk.connection.MTurkRequestError as e: print_log_info(\"MTurk", "as err: ret['code'] = -3 ret['error_msg'] = str(err) return json.dumps(ret) ret['more_to_complete'] = more_to_complete", "in json_file if 'annotation_type' not in json_file: raise ValueError('annotation_type missing in request') else:", "make Mturk login details command line arguments sandbox_host = 'mechanicalturk.sandbox.amazonaws.com' real_host = 'mechanicalturk.amazonaws.com'", "Decide if video we are updating is a verification video db_cursor.execute('''SELECT * FROM", "more_to_complete: try: mturk_db_connection = app.mturk_db_connection mturk_cur = mturk_db_connection.cursor() mturk_cur.execute('''UPDATE hits SET status='pending_approval' WHERE", "# TODO make pending approval a separate table if we think that would", "for item in locked_items: delay = time.time() - item['name_lock_time'] if delay > MAX_DELAY:", "pending approval a separate table if we think that would be time-efficient mturk_cur.execute(\"SELECT", "more sanity check ant_type = json_file['annotation_type'] if not ((ant_type == 'name') or (ant_type", "json_res['red_flag']: print_log_info('\\033[93m' + \"Task ID ({:d}) Type ({:s}) has been RED_FLAGGED!\".format( json_res['id'], ant_type)", "Attempt failed! Attempt had end time \" + str(attempt_times_set['end_time']) + \" but the", "= \\ query_result[\"verifications_total\"], query_result[\"labels_total\"], \\ query_result[\"verifications_completed\"], query_result[\"labels_completed\"] chance_of_verification_video = (float(max(verifications_total - verifications_completed, 0))", "request from an MTurk iFrame, it also has the following: - \"workerId\" -", "WHERE id=?''', (cur_time, task['id'])) except sqlite3.Error as e: print_log_info(str(e)) annotation_tasks.commit() return task def", "SET status='pending_manual_approval' WHERE hit_id=?''', (hit_id,)) app.mturk_db_connection.commit() except sqlite3.Error as e: print_log_info(str(e)) return def", "automatically approved and then marks them accordingly \"\"\" # TODO verify correct verification", "= str(err) return json.dumps(ret) ret['more_to_complete'] = more_to_complete return json.dumps(ret) @app.route('/hello') def hello(): return", "raise ValueError('can not get a valid task. please re-try.') else: ret = task", "abs(attempt_times_set['start_time'] - verified_times_set['start_time']) > TRIM_DIFFERENCE_MAX: print_log_info(\"Verification Attempt failed! Attempt had start time \"", "trim_lock_time real, # trimmed_by_user text, # video_src text # src_start_time integer, # src_end_time", "'name' or ant_type == 'trim') and is_mturk): needs_verification_task = \\ decide_if_needs_verification(json_file, app.mturk_db_connection) else:", "ValueError('unknown annotation_type') except ValueError as err: ret['code'] = -3 ret['error_msg'] = str(err) return", "WHERE id=?\", (attempt_times_set['id'],)) verified_times_set = db_cursor.fetchone() if abs(attempt_times_set['start_time'] - verified_times_set['start_time']) > TRIM_DIFFERENCE_MAX: print_log_info(\"Verification", "named=0''') locked_items = db_cursor.fetchall() for item in locked_items: delay = time.time() - item['name_lock_time']", "- verifications_completed - labels_completed, 1)) return chance_of_verification_video > random.random() def get_verification_task(annotation_tasks, annotation_type): \"\"\"", "annotation_tasks.cursor() mturk_cur = mturk_db_connection.cursor() # get annotation_type and video id ant_type = json_res['annotation_type']", "video_db SET name_locked=0, name_lock_time=? WHERE id=?''', (0.0, item['id'])) ant_tasks.commit() except sqlite3.Error as e:", "difference between correct start_time/end_time and verification attempt's start_time/end_time in seconds TRIM_DIFFERENCE_MAX = 1.0", "the following: - \"workerId\" - \"hitId\" \"\"\" # Dict holds the results to", "@app.errorhandler(404) def not_found(error): \"\"\" Default error handler for 404 \"\"\" return flask.make_response(json.dumps({'error': str(error)}),", "json_file # Get next available task try: flag = update_task(app.mturk_db_connection, app.annotation_tasks, json_file, is_mturk)", "return_task(): \"\"\" Processes the JSON sent from the client to submit a label", "not task_completed(json_file, app.mturk_db_connection) if not more_to_complete: try: mturk_db_connection = app.mturk_db_connection mturk_cur = mturk_db_connection.cursor()", "WHERE hit_id=?''', (json_res['assignmentId'], json_res['workerId'], json_res['hitId'])) mturk_cur.execute('''INSERT INTO trim_verification_attempts( hit_id, assignment_id, worker_id, id, start_time,", "hit_id=?\", (hit_id,)) action_query_result = mturk_cur.fetchall() for attempt_action_set in action_query_result: db_cursor.execute(\"SELECT action_noun, action_verb FROM", "and \\ \"hitId\" in json_file if 'annotation_type' not in json_file: raise ValueError('annotation_type missing", "request: {:s}\".format(json_file)) is_mturk = \"assignmentId\" in json_file and \"workerId\" in json_file and \\", "= (sandbox_host if app.sandbox else real_host) mturk = boto.mturk.connection.MTurkConnection( aws_access_key_id=app.aws_access_key_id, aws_secret_access_key=app.aws_secret_access_key, host=host, debug=1", "getting a verification videos is a/b This gives a uniform distribution of chance", "json_res['id'], json_res['nouns'], json_res['verb'])) mturk_db_connection.commit() elif is_mturk and not is_verification: print(json_res['assignmentId'], json_res['workerId'], json_res['hitId']) mturk_cur.execute('''UPDATE", "in request') if 'id' not in json_file: raise ValueError('id missing in request') else:", "reflect this change if is_mturk and is_verification: mturk_cur.execute('''UPDATE hits SET assignment_id=?, worker_id=?, verifications_completed", "def collect_db_stats(): \"\"\" Collect DB stats \"\"\" ant_tasks = app.annotation_tasks db_cursor = ant_tasks.cursor()", "CORS(app) # Maximum time allowed for one task MAX_DELAY = 120 # maximum", "dict\"\"\" d = {} for idx, col in enumerate(cursor.description): d[col[0]] = row[idx] return", "which is the video ID - annotation_type, which can be \"name\" or \"trim\"", "parser.add_argument('--certfile', dest='certfile', help='SSL certfile location', default='', type=str) parser.add_argument('--keyfile', dest='keyfile', help='SSL keyfile location', default='',", "the results to return to client ret = {} # Make sure the", "in request') else: # more sanity check ant_type = json_file['annotation_type'] if not ((ant_type", "server A request is a json file with the following fields: - \"annotation_type\"", "= str(err) return json.dumps(ret) more_to_complete = not is_mturk or \\ not task_completed(json_file, app.mturk_db_connection)", "AND trimmed=0''') locked_items = db_cursor.fetchall() for item in locked_items: delay = time.time() -", "random import boto.mturk.connection # Obtain the flask app object (and make it cors)", "integer, # name_locked integer, # name_lock_time real, # named_by_user text, # occluded integer,", "named_by_user=?, red_flag=? WHERE id=?''', update_item) # Update MTurk database to reflect this change", "argparse.ArgumentParser(description='Setup a web server for video annotation') parser.add_argument('--port', dest='port', help='which port to serve", "enumerate(cursor.description): d[col[0]] = row[idx] return d def print_log_info(str_info): \"\"\"Helper function for logging info\"\"\"", "or ant_type == 'trim') and is_mturk): needs_verification_task = \\ decide_if_needs_verification(json_file, app.mturk_db_connection) else: needs_verification_task", "selected from the dropdown menu - occluded, a boolean from the checkbox in", "more_to_complete = not is_mturk or \\ not task_completed(json_file, app.mturk_db_connection) if not more_to_complete: try:", "Task: name db_cursor.execute('''SELECT * FROM video_db WHERE name_locked=1 AND named=0''') locked_items = db_cursor.fetchall()", "handled TODO figure out the trim stuff \"\"\" # Dict holds the results", "= 'success' except ValueError as err: ret['code'] = -3 ret['error_msg'] = str(err) return", "= tornado.httpserver.HTTPServer(tornado.wsgi.WSGIContainer(app), ssl_options={ \"certfile\": args.certfile, \"keyfile\": args.keyfile, }) server.bind(args.port) # setup exist function", "task. please re-try.') else: ret = task except ValueError as err: ret['code'] =", "if attempt_action_set['action_noun'] != verified_action_set['action_noun']: print_log_info(\"Verification Attempt failed! Attempt had noun \" + str(attempt_action_set['action_noun'])", "id FROM trimmed_verification_videos ORDER BY RANDOM() LIMIT 1)''') return db_cursor.fetchone() def task_completed(json_res, mturk_db_connection):", "db_cursor.execute('''SELECT count(*) FROM video_db WHERE trim_locked=1 OR name_locked=1''') num_clips_locked = db_cursor.fetchone()['count(*)'] db_cursor.execute('''SELECT count(*)", "aws_secret_access_key=app.aws_secret_access_key, host=host, debug=1 # debug = 2 prints out all requests. ) mturk_cur", "by frontend's submit button; must have hitId key :type json_res: dict :param mturk_db_connection:", "the task. Please re-try.') else: ret['code'] = 0 ret['error_msg'] = 'success' except ValueError", "real, # trimmed_by_user text, # video_src text # src_start_time integer, # src_end_time integer,", "name_preview - trim - trim_preview - \"user_name\" If it is a request from", "# start_time real, # end_time real, # action_verb text, # action_noun text, #", "(SELECT id from trimmed_verification_videos) ''') # LIMIT 1 maybe? except sqlite3.Error as e:", "the red flag if json_res['red_flag']: print_log_info('\\033[93m' + \"Task ID ({:d}) Type ({:s}) has", "err: ret['code'] = -3 ret['error_msg'] = str(err) return json.dumps(ret) more_to_complete = not is_mturk", "@app.route('/return_task', methods=['POST']) def return_task(): \"\"\" Processes the JSON sent from the client to", "= db_cursor.fetchone() if attempt_action_set['action_verb'] != verified_action_set['action_verb']: print_log_info(\"Verification Attempt failed! Attempt had verb \"", "> MAX_DELAY: print_log_info(\"Expiring task {:d} (Trim)\".format(item['id'])) try: db_cursor.execute('''UPDATE video_db SET trim_locked=0, trim_lock_time=? WHERE", "database import sqlite3 # redirect stdout and stderr for logging import sys #", "but the verified had start time \" + str(verified_times_set['start_time'])) all_verifications_correct = False break", "VALUES (?,?,?,?,?,?)''', (json_res['hitId'], json_res['assignmentId'], json_res['workerId'], json_res['id'], float(json_res['start_time']), float(json_res['end_time']))) mturk_db_connection.commit() elif is_mturk and not", "= True elif ((ant_type == 'name' or ant_type == 'trim') and is_mturk): needs_verification_task", "trimmed_by_user=?, red_flag=? WHERE id=?''', update_item) # Update MTurk database to reflect this change", "sqlite3.Error as e: print_log_info(str(e)) return def expire_locked_items(): \"\"\" Expires a locked item based", "False else: # ie. it's a trimming task try: # Decide if video", "real_host) mturk = boto.mturk.connection.MTurkConnection( aws_access_key_id=app.aws_access_key_id, aws_secret_access_key=app.aws_secret_access_key, host=host, debug=1 # debug = 2 prints", "return a task. else: task = item cur_time = time.time() # update the", "(json_file[\"assignmentId\"],)) mturk_db_connection.commit() except sqlite3.Error as err: ret['code'] = -3 ret['error_msg'] = str(err) return", "trim_lock_time=? WHERE id=?''', (0.0, item['id'])) ant_tasks.commit() except sqlite3.Error as e: print_log_info(str(e)) return def", "and \\ \"hitId\" in json_file # Get next available task try: flag =", "is a verification video db_cursor.execute('''SELECT * FROM named_verification_videos where id=?''', (json_res['id'],)) # todo", "task try: flag = update_task(app.mturk_db_connection, app.annotation_tasks, json_file, is_mturk) if not flag: raise ValueError('can", "Amazon response: \" + str(response)) try: mturk_cur.execute('''UPDATE hits SET status='approved' WHERE hit_id=?''', (hit_id,))", "web server for video annotation\"\"\" # parsing args import argparse # encoding /", "ret['code'] = -3 ret['error_msg'] = str(err) return json.dumps(ret) # Decide if we need", "some stats try: db_cursor.execute('''SELECT count(*) FROM video_db WHERE named=1''') num_clips_named = db_cursor.fetchone()['count(*)'] db_cursor.execute('''SELECT", "allowed for one task MAX_DELAY = 120 # maximum difference between correct start_time/end_time", "get db cursor db_cursor = annotation_tasks.cursor() mturk_cur = mturk_db_connection.cursor() # get annotation_type and", "need to loop through every assignment/hit set pending approval for result in query_result:", "'\\033[0m') # return return True @app.errorhandler(404) def not_found(error): \"\"\" Default error handler for", "if 'id' not in json_file: raise ValueError('id missing in request') else: # more", "task. :param annotation_tasks: connection to database containing mturk-related data :type annotation_tasks: sqlite3.Connection :param", "count(*) FROM video_db WHERE red_flag>=1''') num_clips_flaged = db_cursor.fetchone()['count(*)'] print_log_info(\"All Stats: Named {:d}, Trimmed", "== 'trim_preview')): raise ValueError('unknown annotation_type') except ValueError as err: ret['code'] = -3 ret['error_msg']", "TODO make Mturk login details command line arguments sandbox_host = 'mechanicalturk.sandbox.amazonaws.com' real_host =", "worker_id=?, verifications_completed = verifications_completed + 1 WHERE hit_id=?''', (json_res['assignmentId'], json_res['workerId'], json_res['hitId'])) mturk_cur.execute('''INSERT INTO", "as e: print_log_info(str(e)) return def load_annotation_tasks(video_db): \"\"\" Wrapper for loading annotations \"\"\" #", "# action_verb text, # action_noun text, # red_flag integer # Instantiate a connection", "AND named=0''') locked_items = db_cursor.fetchall() for item in locked_items: delay = time.time() -", "= flask.request.get_data() except ValueError as err: ret['code'] = -1 ret['error_msg'] = str(err) return", "mturk_cur.execute('''INSERT INTO trim_verification_attempts( hit_id, assignment_id, worker_id, id, start_time, end_time) VALUES (?,?,?,?,?,?)''', (json_res['hitId'], json_res['assignmentId'],", "{} try: # make sure the content type is json request_type = flask.request.headers.get('Content-Type')", "labelling task plus relevant mturk variables if it's an mturk task. :param mturk_db_connection:", "except ValueError as err: ret['code'] = -1 ret['error_msg'] = str(err) return json.dumps(ret) except:", "if is_mturk and is_verification: mturk_cur.execute('''UPDATE hits SET assignment_id=?, worker_id=?, verifications_completed = verifications_completed +", "have: - assignmentId - workerId - hitId If annotation_type is \"name\", it should", "the JSON sent from the client to submit a label JSON has the", "the content type is json try: request_type = flask.request.headers.get('Content-Type') if request_type != 'application/json':", "verified_times_set['end_time']) > TRIM_DIFFERENCE_MAX: print_log_info(\"Verification Attempt failed! Attempt had end time \" + str(attempt_times_set['end_time'])", "the checkbox in the page - nouns, a string filled out by the", "the verified had end time \" + str(verified_times_set['end_time'])) all_verifications_correct = False break except", "hit_id=?''', (json_res['hitId'],)) except sqlite3.Error as e: print_log_info(str(e)) query_result = mturk_cur.fetchone() verifications_total, labels_total, verifications_completed,", "str(err) return json.dumps(ret) ret['more_to_complete'] = more_to_complete return json.dumps(ret) @app.route('/hello') def hello(): return 'hello", "holds the results to return to client ret = {} # Make sure", "integer, # start_time real, # end_time real, # action_verb text, # action_noun text,", "start_time/end_time and verification attempt's start_time/end_time in seconds TRIM_DIFFERENCE_MAX = 1.0 def dict_factory(cursor, row):", "this needs to be a transaction print_log_info(\"Approving assignment \" + assignment_id) try: response", "attempt's start_time/end_time in seconds TRIM_DIFFERENCE_MAX = 1.0 def dict_factory(cursor, row): \"\"\"Helper function to", "0 def get_next_available_task(annotation_tasks, annotation_type): \"\"\" Wrapper for querying database for a new labelling", "annotation tasks app.annotation_tasks = load_annotation_tasks(args.video_db) app.mturk_db_connection = load_annotation_tasks(args.mturk_db) # Set global variables app.aws_access_key_id", "not update the task. Please re-try.') else: ret['code'] = 0 ret['error_msg'] = 'success'", "'unknown parameter error' return json.dumps(ret) # Decode json from request data into a", "= flask.request.headers.get('Content-Type') if request_type != 'application/json': raise ValueError('request type must be JSON') request_data", "app.aws_access_key_id = args.aws_access_key_id app.aws_secret_access_key = args.aws_secret_access_key app.sandbox = args.sandbox # start server without", "all_verifications_correct = False break except sqlite3.Error as e: print_log_info(str(e)) continue if all_verifications_correct: #", ":return boolean representing whether verification video will be returned \"\"\" print json_res mturk_cur", "else: task = item cur_time = time.time() # update the lock if annotation_type", "present try: json_file = json.JSONDecoder().decode(request_data) print_log_info(\"Task request: {:s}\".format(json_file)) is_mturk = \"assignmentId\" in json_file", "not task: raise ValueError('can not get a valid task. please re-try.') else: ret", "in json_file and \"workerId\" in json_file and \\ \"hitId\" in json_file if 'annotation_type'", "verifications_completed, labels_completed = \\ query_result[\"verifications_total\"], query_result[\"labels_total\"], \\ query_result[\"verifications_completed\"], query_result[\"labels_completed\"] return verifications_total - verifications_completed", "{}\".format(args.port)) # show stats every time we launch the service collect_db_stats() approve_assignments() tornado.ioloop.PeriodicCallback(expire_locked_items,", "if not more_to_complete: try: mturk_db_connection = app.mturk_db_connection mturk_cur = mturk_db_connection.cursor() mturk_cur.execute('''UPDATE hits SET", "\"\"\" Default error handler for 404 \"\"\" return flask.make_response(json.dumps({'error': str(error)}), 404) @app.route('/get_task', methods=['POST'])", "re-try.') else: ret['code'] = 0 ret['error_msg'] = 'success' except ValueError as err: ret['code']", "return def approve_assignments(): \"\"\" Periodic callback decides whether assignments pending approval can be", "# setup exist function def save_db(): app.annotation_tasks.close() app.mturk_db_connection.close() import atexit atexit.register(save_db) # set", "annotation_type == 'trim' try: db_cursor.execute('''SELECT * FROM video_db WHERE named=1 AND red_flag=0 AND", "= \\ decide_if_needs_verification(json_file, app.mturk_db_connection) else: needs_verification_task = False # Get a verification task", "item based on its time stamp \"\"\" ant_tasks = app.annotation_tasks db_cursor = ant_tasks.cursor()", "task = get_next_available_task(app.annotation_tasks, ant_type) if not task: raise ValueError('can not get a valid", "item['trim_lock_time'] if delay > MAX_DELAY: print_log_info(\"Expiring task {:d} (Trim)\".format(item['id'])) try: db_cursor.execute('''UPDATE video_db SET", "num_clips_trimmed = db_cursor.fetchone()['count(*)'] db_cursor.execute('''SELECT count(*) FROM video_db WHERE trim_locked=1 OR name_locked=1''') num_clips_locked =", "db_cursor.execute('''SELECT count(*) FROM video_db WHERE red_flag>=1''') num_clips_flaged = db_cursor.fetchone()['count(*)'] print_log_info(\"All Stats: Named {:d},", "120 # maximum difference between correct start_time/end_time and verification attempt's start_time/end_time in seconds", "mturk_cur.execute(\"SELECT id, action_noun, action_verb FROM name_verification_attempts WHERE hit_id=?\", (hit_id,)) action_query_result = mturk_cur.fetchall() for", "in (SELECT id from named_verification_videos) ''') # LIMIT 1 maybe? except sqlite3.Error as", "but the verified had noun \" + str(verified_action_set['action_noun'])) all_verifications_correct = False break else:", "annotation_tasks def decide_if_needs_verification(json_res, mturk_db_connection): \"\"\" Makes the decision as to whether this request", "new labelling task. Called by get_task(). :param annotation_tasks: connection to database containing mturk-related", "print \"{:s} {:s}\".format(prefix, str_info) def collect_db_stats(): \"\"\" Collect DB stats \"\"\" ant_tasks =", "raise ValueError('request type must be JSON') request_data = flask.request.get_data() except ValueError as err:", "mturk-related data :type annotation_tasks: sqlite3.Connection :param annotation_type: client-defined string for the type of", "the video ID - annotation_type, which can be \"name\" or \"trim\" - user_name", "failed! Attempt had start time \" + str(attempt_times_set['start_time']) + \" but the verified", "in locked_items: delay = time.time() - item['name_lock_time'] if delay > MAX_DELAY: print_log_info(\"Expiring task", "= json_res['annotation_type'] # Update naming task if ant_type == 'name': try: # Decide", "True elif ((ant_type == 'name' or ant_type == 'trim') and is_mturk): needs_verification_task =", "not is_verification: update_item = (float(json_res['start_time']), float(json_res['end_time']), json_res['user_name'], int(json_res['red_flag'])*2, int(json_res['id'])) db_cursor.execute('''UPDATE video_db SET trimmed=1,", "from querying database \"\"\" # get db cursor db_cursor = annotation_tasks.cursor() mturk_cur =", "= time.time() # update the lock if annotation_type == 'name': try: db_cursor.execute('''UPDATE video_db", "OR name_locked=1''') num_clips_locked = db_cursor.fetchone()['count(*)'] db_cursor.execute('''SELECT count(*) FROM video_db WHERE red_flag>=1''') num_clips_flaged =", "random.random() def get_verification_task(annotation_tasks, annotation_type): \"\"\" Wrapper for querying database for a verification task.", "dict from querying database \"\"\" # get db cursor db_cursor = annotation_tasks.cursor() mturk_cur", "0)) / max(verifications_total + labels_total - verifications_completed - labels_completed, 1)) return chance_of_verification_video >", "'application/json': raise ValueError('request type must be JSON') request_data = flask.request.get_data() except ValueError as", "return def expire_locked_items(): \"\"\" Expires a locked item based on its time stamp", "{} for idx, col in enumerate(cursor.description): d[col[0]] = row[idx] return d def print_log_info(str_info):", "not (db_cursor.fetchone() is None) # Apply new label if it isn't a verification", "Key', default='', type=str) parser.add_argument('--certfile', dest='certfile', help='SSL certfile location', default='', type=str) parser.add_argument('--keyfile', dest='keyfile', help='SSL", "servers.\") print_log_info(str(e)) query_result = mturk_cur.fetchone() continue print_log_info(assignment_id + \" approved. Amazon response: \"", "'trim_preview')): raise ValueError('unknown annotation_type') except ValueError as err: ret['code'] = -3 ret['error_msg'] =", "label JSON has the following fields: - id, which is the video ID", "labels_total - labels_completed <= 0 def get_next_available_task(annotation_tasks, annotation_type): \"\"\" Wrapper for querying database", "annotation_type == 'trim' try: db_cursor.execute('''UPDATE video_db SET trim_locked=1, trim_lock_time=? WHERE id=?''', (cur_time, task['id']))", "a dict\"\"\" d = {} for idx, col in enumerate(cursor.description): d[col[0]] = row[idx]", "video_db WHERE name_locked=1 AND named=0''') locked_items = db_cursor.fetchall() for item in locked_items: delay", "> MAX_DELAY: print_log_info(\"Expiring task {:d} (Name)\".format(item['id'])) try: db_cursor.execute('''UPDATE video_db SET name_locked=0, name_lock_time=? WHERE", "mturk_db_connection: connection to database containing mturk-related data :type mturk_db_connection: sqlite3.Connection :param annotation_tasks: connection", "assignment_id, hit_id, task FROM hits WHERE status='pending_approval'\") except sqlite3.Error as e: print_log_info(str(e)) return", "= str(result[\"task\"]) all_verifications_correct = True print assignment_id try: if task == \"name\": mturk_cur.execute(\"SELECT", "start_from_terminal(): \"\"\" entry of the main function \"\"\" # parse params args =", "print_log_info(\"Task returned: {:s}\".format(json_file)) if 'annotation_type' not in json_file: raise ValueError('annotation_type missing in request')", "and verification attempt's start_time/end_time in seconds TRIM_DIFFERENCE_MAX = 1.0 def dict_factory(cursor, row): \"\"\"Helper", "to database containing mturk-related data :type annotation_tasks: sqlite3.Connection :param annotation_type: client-defined string for", "err: ret['code'] = -3 ret['error_msg'] = str(err) return json.dumps(ret) is_mturk = \"assignmentId\" in", "/ logging import time #import logging #import traceback # flask import flask from", "def print_log_info(str_info): \"\"\"Helper function for logging info\"\"\" prefix = time.strftime(\"%Y-%m-%d %H:%M:%S\", time.gmtime()) print", ":param mturk_db_connection: connection to database containing mturk-related data :type mturk_db_connection: sqlite3.Connection :return: boolean", "Called by get_task(). :param annotation_tasks: connection to database containing mturk-related data :type annotation_tasks:", "all_verifications_correct: # TODO Find out if this needs to be a transaction print_log_info(\"Approving", "help='AWS Access Key ID', default='', type=str) parser.add_argument('--aws_key', dest='aws_secret_access_key', help='AWS Secret Access Key', default='',", "assignment/hit set pending approval for result in query_result: assignment_id = str(result[\"assignment_id\"]) hit_id =", "'trim')): raise ValueError('unknown annotation_type') except ValueError as err: ret['code'] = -3 ret['error_msg'] =", "= json_file['annotation_type'] if not ((ant_type == 'name') or (ant_type == 'trim') or (ant_type", "Obtain the flask app object (and make it cors) app = flask.Flask(__name__) #", "(ant_type == 'trim')): raise ValueError('unknown annotation_type') except ValueError as err: ret['code'] = -3", "annotations we're doing :type annotation_type: string :return dict from querying database \"\"\" db_cursor", "{:d}, flagged {:d}, Locked {:d}\".format( num_clips_named, num_clips_trimmed, num_clips_flaged, num_clips_locked)) except sqlite3.Error as e:", "verifications_total, labels_total, verifications_completed, labels_completed FROM hits WHERE hit_id=?''', (json_res['hitId'],)) except sqlite3.Error as e:", "def not_found(error): \"\"\" Default error handler for 404 \"\"\" return flask.make_response(json.dumps({'error': str(error)}), 404)", "\"keyfile\": args.keyfile, }) server.bind(args.port) # setup exist function def save_db(): app.annotation_tasks.close() app.mturk_db_connection.close() import", "to user try: if needs_verification_task: task = get_verification_task(app.annotation_tasks, ant_type) else: task = get_next_available_task(app.annotation_tasks,", "except sqlite3.Error as e: print_log_info(str(e)) query_result = mturk_cur.fetchone() print_log_info(json_res['hitId']) verifications_total, labels_total, verifications_completed, labels_completed", "annotations \"\"\" # id integer primary key, # url text, # named integer,", "the objects being handled TODO figure out the trim stuff \"\"\" # Dict", "it's an mturk task. :param mturk_db_connection: connection to database containing mturk-related data :type", "-3 ret['error_msg'] = str(err) return json.dumps(ret) ret['more_to_complete'] = more_to_complete return json.dumps(ret) @app.route('/hello') def", "boto.mturk.connection.MTurkConnection( aws_access_key_id=app.aws_access_key_id, aws_secret_access_key=app.aws_secret_access_key, host=host, debug=1 # debug = 2 prints out all requests.", "annotation_tasks.commit() except sqlite3.Error as e: print_log_info(str(e)) return False # color print the red", "mturk_cur.fetchall() for attempt_action_set in action_query_result: db_cursor.execute(\"SELECT action_noun, action_verb FROM video_db WHERE id=?\", (attempt_action_set['id'],))", "if task == \"name\": mturk_cur.execute(\"SELECT id, action_noun, action_verb FROM name_verification_attempts WHERE hit_id=?\", (hit_id,))", "Get a task from the server A request is a json file with", "flask.request.headers.get('Content-Type') if request_type != 'application/json': raise ValueError('request type must be JSON') request_data =", "if it isn't a verification video if not is_verification: update_item = (float(json_res['start_time']), float(json_res['end_time']),", "is_verification = not (db_cursor.fetchone() is None) # Apply new label if it isn't", "approved and then marks them accordingly \"\"\" # TODO verify correct verification labels", "verb \" + str(attempt_action_set['action_verb']) + \" but the verified had verb \" +", "connection to database containing mturk-related data :type mturk_db_connection: sqlite3.Connection :return boolean representing whether", "else: # ie. elif task == \"trim\": print \"trim thing\" mturk_cur.execute(\"SELECT id, start_time,", "the decision as to whether this request is going to be a verification", "e: print_log_info(str(e)) return False # color print the red flag if json_res['red_flag']: print_log_info('\\033[93m'", "valid task. please re-try.') else: ret = task except ValueError as err: ret['code']", "ID', default='', type=str) parser.add_argument('--aws_key', dest='aws_secret_access_key', help='AWS Secret Access Key', default='', type=str) parser.add_argument('--certfile', dest='certfile',", "representing whether verification video will be returned \"\"\" print json_res mturk_cur = mturk_db_connection.cursor()", "we are updating is a verification video db_cursor.execute('''SELECT * FROM trimmed_verification_videos where id=?''',", "SET status='pending_approval' WHERE assignment_id=?''', (json_file[\"assignmentId\"],)) mturk_db_connection.commit() except sqlite3.Error as err: ret['code'] = -3", "elif is_mturk and not is_verification: print(json_res['assignmentId'], json_res['workerId'], json_res['hitId']) mturk_cur.execute('''UPDATE hits SET assignment_id=?, worker_id=?,", "import time #import logging #import traceback # flask import flask from flask_cors import", "named integer, # name_locked integer, # name_lock_time real, # named_by_user text, # occluded", "Typically, this means the client's completion \" + \"has not propagated through Amazon's", "try: db_cursor.execute('''UPDATE video_db SET name_locked=0, name_lock_time=? WHERE id=?''', (0.0, item['id'])) ant_tasks.commit() except sqlite3.Error", "in json_file: raise ValueError('id missing in request') else: # more sanity check ant_type", "pad_end_frame integer, # start_time real, # end_time real, # action_verb text, # action_noun", "verified_times_set = db_cursor.fetchone() if abs(attempt_times_set['start_time'] - verified_times_set['start_time']) > TRIM_DIFFERENCE_MAX: print_log_info(\"Verification Attempt failed! Attempt", "HIT (otherwise is a real one)', default=False, action='store_true') parser.add_argument('--aws_key_id', dest='aws_access_key_id', help='AWS Access Key", "ant_tasks.commit() except sqlite3.Error as e: print_log_info(str(e)) # Task: trim db_cursor.execute('''SELECT * FROM video_db", "FROM video_db WHERE trim_locked=1 AND trimmed=0''') locked_items = db_cursor.fetchall() for item in locked_items:", "break except sqlite3.Error as e: print_log_info(str(e)) continue if all_verifications_correct: # TODO Find out", "SET name_locked=0, name_lock_time=? WHERE id=?''', (0.0, item['id'])) ant_tasks.commit() except sqlite3.Error as e: print_log_info(str(e))", "This gives a uniform distribution of chance of getting a verification video across", "video_db WHERE id IN (SELECT id FROM trimmed_verification_videos ORDER BY RANDOM() LIMIT 1)''')", "try: flag = update_task(app.mturk_db_connection, app.annotation_tasks, json_file, is_mturk) if not flag: raise ValueError('can not", "debug=1 # debug = 2 prints out all requests. ) mturk_cur = app.mturk_db_connection.cursor()", "-3 ret['error_msg'] = str(err) return json.dumps(ret) more_to_complete = not is_mturk or \\ not", "# returns the database return annotation_tasks def decide_if_needs_verification(json_res, mturk_db_connection): \"\"\" Makes the decision", "hitId key :type json_res: dict :param is_mturk: indicates if :return dict from querying", "str(attempt_times_set['end_time']) + \" but the verified had end time \" + str(verified_times_set['end_time'])) all_verifications_correct", "print_log_info(\"Verification Attempt failed! Attempt had start time \" + str(attempt_times_set['start_time']) + \" but", "Attempt failed! Attempt had start time \" + str(attempt_times_set['start_time']) + \" but the", "for a labelling task plus relevant mturk variables if it's an mturk task.", "mturk.approve_assignment(assignment_id) except boto.mturk.connection.MTurkRequestError as e: print_log_info(\"MTurk verification rejected. Typically, this means the client's", "= -1 ret['error_msg'] = str(err) return json.dumps(ret) return json.dumps(ret) @app.route('/return_task', methods=['POST']) def return_task():", "ret = {} # Make sure the content type is json try: request_type", "'name') or (ant_type == 'trim')): raise ValueError('unknown annotation_type') except ValueError as err: ret['code']", "end time \" + str(verified_times_set['end_time'])) all_verifications_correct = False break except sqlite3.Error as e:", "sqlite3.Connection :param annotation_tasks: connection to database containing mturk-related data :type annotation_tasks: sqlite3.Connection :param", "({:s}) has been RED_FLAGGED!\".format( json_res['id'], ant_type) + '\\033[0m') # return return True @app.errorhandler(404)", "in json_file: raise ValueError('annotation_type missing in request') else: # more sanity check ant_type", "hit_id, assignment_id, worker_id, id, action_noun, action_verb) VALUES (?,?,?,?,?,?)''', (json_res['hitId'], json_res['assignmentId'], json_res['workerId'], json_res['id'], json_res['nouns'],", "{} # Make sure the content type is json try: request_type = flask.request.headers.get('Content-Type')", "\\ \"hitId\" in json_file # Get next available task try: flag = update_task(app.mturk_db_connection,", "'trim_preview': needs_verification_task = True elif ((ant_type == 'name' or ant_type == 'trim') and", "# update the lock if annotation_type == 'name': try: db_cursor.execute('''UPDATE video_db SET name_locked=1,", "set up one server server.start(1) print_log_info(\"Tornado server starting on port {}\".format(args.port)) # show", "a separate table if we think that would be time-efficient mturk_cur.execute(\"SELECT assignment_id, hit_id,", "# parsing args import argparse # encoding / decoding import json # time", "sandbox_host = 'mechanicalturk.sandbox.amazonaws.com' real_host = 'mechanicalturk.amazonaws.com' host = (sandbox_host if app.sandbox else real_host)", "parse params args = parse_args() # load annotation tasks app.annotation_tasks = load_annotation_tasks(args.video_db) app.mturk_db_connection", "if not is_verification: update_item = (int(json_res['occluded']), json_res['nouns'], json_res['verb'], json_res['user_name'], int(json_res['red_flag'])*1, int(json_res['id'])) db_cursor.execute('''UPDATE video_db", "int(json_res['red_flag'])*1, int(json_res['id'])) db_cursor.execute('''UPDATE video_db SET named=1, name_locked=0, occluded=?, action_noun=?, action_verb=?, named_by_user=?, red_flag=? WHERE", "= 'unknown parameter error' return json.dumps(ret) # Decode json from request data into", "type=str) parser.add_argument('--sandbox', dest='sandbox', help='If this is a sandbox HIT (otherwise is a real", "id IN (SELECT id FROM named_verification_videos ORDER BY RANDOM() LIMIT 1)''') except sqlite3.Error", "frontend's submit button; must have hitId key :type json_res: dict :param is_mturk: indicates", "trimmed_verification_videos where id=?''', (json_res['id'],)) # todo find out if is good query is_verification", "default='', type=str) parser.add_argument('--keyfile', dest='keyfile', help='SSL keyfile location', default='', type=str) args = parser.parse_args() return", "\" + \"has not propagated through Amazon's servers.\") print_log_info(str(e)) query_result = mturk_cur.fetchone() continue", "database \"\"\" # get db cursor db_cursor = annotation_tasks.cursor() mturk_cur = mturk_db_connection.cursor() #", "# color print the red flag if json_res['red_flag']: print_log_info('\\033[93m' + \"Task ID ({:d})", "Type ({:s}) has been RED_FLAGGED!\".format( json_res['id'], ant_type) + '\\033[0m') # return return True", "serve content on', default=5050, type=int) parser.add_argument('--video_db', dest='video_db', help='SQLite3 database with normal videos', default='video_db.db',", "app.annotation_tasks.cursor() try: # TODO make pending approval a separate table if we think", "ValueError('id missing in request') else: # more sanity check ant_type = json_file['annotation_type'] if", "@app.route('/hello') def hello(): return 'hello world' def parse_args(): \"\"\" Parse input arguments \"\"\"", "mturk_cur.fetchone() verifications_total, labels_total, verifications_completed, labels_completed = \\ query_result[\"verifications_total\"], query_result[\"labels_total\"], \\ query_result[\"verifications_completed\"], query_result[\"labels_completed\"] return", "= False break else: # ie. elif task == \"trim\": print \"trim thing\"", "hits WHERE hit_id=?''', (json_res['hitId'],)) except sqlite3.Error as e: print_log_info(str(e)) query_result = mturk_cur.fetchone() print_log_info(json_res['hitId'])", "ret['error_msg'] = 'unknown parameter error' return json.dumps(ret) # Decode json from request data", "'annotation_type' not in json_file: raise ValueError('annotation_type missing in request') else: # more sanity", "request is a json file with the following fields: - \"annotation_type\" which can", "* FROM video_db WHERE trim_locked=1 AND trimmed=0''') locked_items = db_cursor.fetchall() for item in", "# maximum difference between correct start_time/end_time and verification attempt's start_time/end_time in seconds TRIM_DIFFERENCE_MAX", "json.dumps(ret) return json.dumps(ret) @app.route('/return_task', methods=['POST']) def return_task(): \"\"\" Processes the JSON sent from", "named_verification_videos ORDER BY RANDOM() LIMIT 1)''') except sqlite3.Error as e: print_log_info(str(e)) else: db_cursor.execute('''SELECT", "server server.start(1) print_log_info(\"Tornado server starting on port {}\".format(args.port)) # show stats every time", "db_cursor.fetchone() def task_completed(json_res, mturk_db_connection): \"\"\" Tells whether an mturk task has been completed", "= mturk_db_connection.cursor() # get annotation_type and video id ant_type = json_res['annotation_type'] # Update", "load annotation tasks app.annotation_tasks = load_annotation_tasks(args.video_db) app.mturk_db_connection = load_annotation_tasks(args.mturk_db) # Set global variables", "url text, # named integer, # name_locked integer, # name_lock_time real, # named_by_user", "except sqlite3.Error as e: print_log_info(str(e)) query_result = mturk_cur.fetchone() verifications_total, labels_total, verifications_completed, labels_completed =", "assignment_id try: if task == \"name\": mturk_cur.execute(\"SELECT id, action_noun, action_verb FROM name_verification_attempts WHERE", "Stats: Named {:d}, Trimmed {:d}, flagged {:d}, Locked {:d}\".format( num_clips_named, num_clips_trimmed, num_clips_flaged, num_clips_locked))", "request_data = flask.request.get_data() except ValueError as err: ret['code'] = -1 ret['error_msg'] = str(err)", "'hello world' def parse_args(): \"\"\" Parse input arguments \"\"\" parser = argparse.ArgumentParser(description='Setup a", "mturk_db_connection.cursor() # get annotation_type and video id ant_type = json_res['annotation_type'] # Update naming", "print_log_info(\"All Stats: Named {:d}, Trimmed {:d}, flagged {:d}, Locked {:d}\".format( num_clips_named, num_clips_trimmed, num_clips_flaged,", "# Get a verification task or next available task, and return to user", "# red_flag integer # Instantiate a connection to db annotation_tasks = sqlite3.connect(video_db) annotation_tasks.row_factory", "getting a verification video across all requests. Called by get_task(). :param json_res: JSON", "name_locked=0 AND id not in (SELECT id from named_verification_videos) ''') # LIMIT 1", "is coming from an mturk iFrame, it should have: - assignmentId - workerId", "print_log_info(str(e)) return False else: # ie. it's a trimming task try: # Decide", "for result in query_result: assignment_id = str(result[\"assignment_id\"]) hit_id = str(result[\"hit_id\"]) task = str(result[\"task\"])", "maybe? except sqlite3.Error as e: print_log_info(str(e)) else: # So annotation_type == 'trim' try:", "# get annotation_type and video id ant_type = json_res['annotation_type'] # Update naming task", "the user for the objects being handled TODO figure out the trim stuff", "= app.annotation_tasks.cursor() try: # TODO make pending approval a separate table if we", "ie. elif task == \"trim\": print \"trim thing\" mturk_cur.execute(\"SELECT id, start_time, end_time FROM", "update_item = (int(json_res['occluded']), json_res['nouns'], json_res['verb'], json_res['user_name'], int(json_res['red_flag'])*1, int(json_res['id'])) db_cursor.execute('''UPDATE video_db SET named=1, name_locked=0,", "had noun \" + str(verified_action_set['action_noun'])) all_verifications_correct = False break else: # ie. elif", "button; must have hitId key :type json_res: dict :param is_mturk: indicates if :return", "end_time FROM video_db WHERE id=?\", (attempt_times_set['id'],)) verified_times_set = db_cursor.fetchone() if abs(attempt_times_set['start_time'] - verified_times_set['start_time'])", "is_mturk: indicates if :return dict from querying database \"\"\" # get db cursor", "gives a uniform distribution of chance of getting a verification video across all", "= dict_factory # returns the database return annotation_tasks def decide_if_needs_verification(json_res, mturk_db_connection): \"\"\" Makes", "type is json request_type = flask.request.headers.get('Content-Type') if request_type != 'application/json': raise ValueError('request type", "TODO update mturk stuff annotation_tasks.commit() except sqlite3.Error as e: print_log_info(str(e)) return False #", "dict try: json_file = json.JSONDecoder().decode(request_data) print_log_info(\"Task returned: {:s}\".format(json_file)) if 'annotation_type' not in json_file:", "annotation_tasks.cursor() # Get the next task if annotation_type == 'name': try: db_cursor.execute('''SELECT *", "= db_cursor.fetchone()['count(*)'] db_cursor.execute('''SELECT count(*) FROM video_db WHERE red_flag>=1''') num_clips_flaged = db_cursor.fetchone()['count(*)'] print_log_info(\"All Stats:", "print_log_info(assignment_id + \" approved. Amazon response: \" + str(response)) try: mturk_cur.execute('''UPDATE hits SET", ":param annotation_tasks: connection to database containing mturk-related data :type annotation_tasks: sqlite3.Connection :param json_res:", "}) server.bind(args.port) # setup exist function def save_db(): app.annotation_tasks.close() app.mturk_db_connection.close() import atexit atexit.register(save_db)", "task_completed(json_res, mturk_db_connection): \"\"\" Tells whether an mturk task has been completed :param json_res:", "params args = parse_args() # load annotation tasks app.annotation_tasks = load_annotation_tasks(args.video_db) app.mturk_db_connection =", "delay = time.time() - item['trim_lock_time'] if delay > MAX_DELAY: print_log_info(\"Expiring task {:d} (Trim)\".format(item['id']))", "function def save_db(): app.annotation_tasks.close() app.mturk_db_connection.close() import atexit atexit.register(save_db) # set up one server", "an mturk iFrame, it should have: - assignmentId - workerId - hitId If", "attempt_action_set in action_query_result: db_cursor.execute(\"SELECT action_noun, action_verb FROM video_db WHERE id=?\", (attempt_action_set['id'],)) verified_action_set =", "# trimmed_by_user text, # video_src text # src_start_time integer, # src_end_time integer, #", "json_res['nouns'], json_res['verb'], json_res['user_name'], int(json_res['red_flag'])*1, int(json_res['id'])) db_cursor.execute('''UPDATE video_db SET named=1, name_locked=0, occluded=?, action_noun=?, action_verb=?,", "methods=['POST']) def return_task(): \"\"\" Processes the JSON sent from the client to submit", "exist function def save_db(): app.annotation_tasks.close() app.mturk_db_connection.close() import atexit atexit.register(save_db) # set up one", "key, # url text, # named integer, # name_locked integer, # name_lock_time real,", "elif ((ant_type == 'name' or ant_type == 'trim') and is_mturk): needs_verification_task = \\", "str(attempt_action_set['action_noun']) + \" but the verified had noun \" + str(verified_action_set['action_noun'])) all_verifications_correct =", "server for video annotation') parser.add_argument('--port', dest='port', help='which port to serve content on', default=5050,", "help='If this is a sandbox HIT (otherwise is a real one)', default=False, action='store_true')", "whether assignments pending approval can be automatically approved and then marks them accordingly", "json_res['workerId'], json_res['hitId'])) mturk_cur.execute('''INSERT INTO name_verification_attempts( hit_id, assignment_id, worker_id, id, action_noun, action_verb) VALUES (?,?,?,?,?,?)''',", "None: return None # Otherwise return a task. else: task = item cur_time", "annotation_tasks: sqlite3.Connection :param json_res: JSON given by frontend's submit button; must have hitId", "\"workerId\" in json_file and \\ \"hitId\" in json_file if 'annotation_type' not in json_file:", "mturk_db_connection): \"\"\" Makes the decision as to whether this request is going to", "verification video db_cursor.execute('''SELECT * FROM named_verification_videos where id=?''', (json_res['id'],)) # todo find out", "out by the user for the objects being handled TODO figure out the", "into a dict\"\"\" d = {} for idx, col in enumerate(cursor.description): d[col[0]] =", "if task referred to in json_res' hitId has been completed \"\"\" mturk_cur =", "not is_verification: print(json_res['assignmentId'], json_res['workerId'], json_res['hitId']) mturk_cur.execute('''UPDATE hits SET assignment_id=?, worker_id=?, labels_completed = labels_completed", "(json_res['id'],)) # todo find out if is good query is_verification = not (db_cursor.fetchone()", "task. Called by get_task(). :param annotation_tasks: connection to database containing mturk-related data :type", "\"hitId\" in json_file if 'annotation_type' not in json_file: raise ValueError('annotation_type missing in request')", "str(err) return json.dumps(ret) return json.dumps(ret) @app.route('/return_task', methods=['POST']) def return_task(): \"\"\" Processes the JSON", "a uniform distribution of chance of getting a verification video across all requests.", "mturk = boto.mturk.connection.MTurkConnection( aws_access_key_id=app.aws_access_key_id, aws_secret_access_key=app.aws_secret_access_key, host=host, debug=1 # debug = 2 prints out", "if annotation_type == 'name': try: db_cursor.execute('''SELECT * FROM video_db WHERE named=0 AND name_locked=0", "(int(json_res['occluded']), json_res['nouns'], json_res['verb'], json_res['user_name'], int(json_res['red_flag'])*1, int(json_res['id'])) db_cursor.execute('''UPDATE video_db SET named=1, name_locked=0, occluded=?, action_noun=?,", "location', default='', type=str) parser.add_argument('--keyfile', dest='keyfile', help='SSL keyfile location', default='', type=str) args = parser.parse_args()", "= db_cursor.fetchone()['count(*)'] db_cursor.execute('''SELECT count(*) FROM video_db WHERE trim_locked=1 OR name_locked=1''') num_clips_locked = db_cursor.fetchone()['count(*)']", "float(json_res['end_time']))) mturk_db_connection.commit() elif is_mturk and not is_verification: print(json_res['assignmentId'], json_res['workerId'], json_res['hitId']) mturk_cur.execute('''UPDATE hits SET", "dict_factory # returns the database return annotation_tasks def decide_if_needs_verification(json_res, mturk_db_connection): \"\"\" Makes the", "No task available if item is None: return None # Otherwise return a", "continue if all_verifications_correct: # TODO Find out if this needs to be a", "db_cursor.execute('''SELECT * FROM video_db WHERE id IN (SELECT id FROM named_verification_videos ORDER BY", "ret['code'] = -3 ret['error_msg'] = str(err) return json.dumps(ret) ret['more_to_complete'] = more_to_complete return json.dumps(ret)", "Makes the decision as to whether this request is going to be a", "WHERE id=?''', update_item) # Update MTurk database to reflect this change if is_mturk", "json.dumps(ret) @app.route('/hello') def hello(): return 'hello world' def parse_args(): \"\"\" Parse input arguments", "- occluded, a boolean from the checkbox in the page - nouns, a", "in the page - nouns, a string filled out by the user for", "WHERE hit_id=?''', (hit_id,)) app.mturk_db_connection.commit() except sqlite3.Error as e: print_log_info(str(e)) return def expire_locked_items(): \"\"\"", "is_mturk and is_verification: mturk_cur.execute('''UPDATE hits SET assignment_id=?, worker_id=?, verifications_completed = verifications_completed + 1", "ant_tasks.commit() except sqlite3.Error as e: print_log_info(str(e)) return def load_annotation_tasks(video_db): \"\"\" Wrapper for loading", "correct start_time/end_time and verification attempt's start_time/end_time in seconds TRIM_DIFFERENCE_MAX = 1.0 def dict_factory(cursor,", "Update naming task if ant_type == 'name': try: # Decide if video we", "# named integer, # name_locked integer, # name_lock_time real, # named_by_user text, #", "= -2 ret['error_msg'] = 'unknown parameter error' return json.dumps(ret) # decode json from", "to be a transaction print_log_info(\"Approving assignment \" + assignment_id) try: response = mturk.approve_assignment(assignment_id)", "sqlite3.Error as e: print_log_info(str(e)) query_result = mturk_cur.fetchone() print_log_info(json_res['hitId']) verifications_total, labels_total, verifications_completed, labels_completed =", "action_verb=?, named_by_user=?, red_flag=? WHERE id=?''', update_item) # Update MTurk database to reflect this", "'' and args.keyfile == '': server = tornado.httpserver.HTTPServer(tornado.wsgi.WSGIContainer(app)) else: server = tornado.httpserver.HTTPServer(tornado.wsgi.WSGIContainer(app), ssl_options={", "parse_args(): \"\"\" Parse input arguments \"\"\" parser = argparse.ArgumentParser(description='Setup a web server for", "save_db(): app.annotation_tasks.close() app.mturk_db_connection.close() import atexit atexit.register(save_db) # set up one server server.start(1) print_log_info(\"Tornado", "a verification videos is a/b This gives a uniform distribution of chance of", "db_cursor.execute('''UPDATE video_db SET trim_locked=0, trim_lock_time=? WHERE id=?''', (0.0, item['id'])) ant_tasks.commit() except sqlite3.Error as", "= app.mturk_db_connection.cursor() db_cursor = app.annotation_tasks.cursor() try: # TODO make pending approval a separate", "number of videos left The chance of getting a verification videos is a/b", "iFrame, it also has the following: - \"workerId\" - \"hitId\" \"\"\" # Dict", "\"\"\" Get a task from the server A request is a json file", "= -2 ret['error_msg'] = 'unknown parameter error' return json.dumps(ret) # Decode json from", "task = get_verification_task(app.annotation_tasks, ant_type) else: task = get_next_available_task(app.annotation_tasks, ant_type) if not task: raise", "return json.dumps(ret) # Decode json from request data into a dict, and make", "trimmed integer, # trim_locked integer, # trim_lock_time real, # trimmed_by_user text, # video_src", "FROM video_db WHERE trim_locked=1 OR name_locked=1''') num_clips_locked = db_cursor.fetchone()['count(*)'] db_cursor.execute('''SELECT count(*) FROM video_db", "mturk_db_connection: connection to database containing mturk-related data :type mturk_db_connection: sqlite3.Connection :return: boolean representing", "logs for mturk', default='mturk_db.db', type=str) parser.add_argument('--sandbox', dest='sandbox', help='If this is a sandbox HIT", "request_type = flask.request.headers.get('Content-Type') if request_type != 'application/json': raise ValueError('request type must be JSON')", "none provided if args.certfile == '' and args.keyfile == '': server = tornado.httpserver.HTTPServer(tornado.wsgi.WSGIContainer(app))", "try: # Decide if video we are updating is a verification video db_cursor.execute('''SELECT", "in enumerate(cursor.description): d[col[0]] = row[idx] return d def print_log_info(str_info): \"\"\"Helper function for logging", "is_verification: update_item = (int(json_res['occluded']), json_res['nouns'], json_res['verb'], json_res['user_name'], int(json_res['red_flag'])*1, int(json_res['id'])) db_cursor.execute('''UPDATE video_db SET named=1,", "json_file: raise ValueError('annotation_type missing in request') else: # more sanity check ant_type =", "- item['name_lock_time'] if delay > MAX_DELAY: print_log_info(\"Expiring task {:d} (Name)\".format(item['id'])) try: db_cursor.execute('''UPDATE video_db", "trimmed=0''') locked_items = db_cursor.fetchall() for item in locked_items: delay = time.time() - item['trim_lock_time']", "task def update_task(mturk_db_connection, annotation_tasks, json_res, is_mturk): \"\"\" Updates the data for a labelling", "FROM hits WHERE hit_id=?''', (json_res['hitId'],)) except sqlite3.Error as e: print_log_info(str(e)) query_result = mturk_cur.fetchone()", "as e: print_log_info(str(e)) else: # So annotation_type == 'trim' try: db_cursor.execute('''UPDATE video_db SET", "text, # action_noun text, # red_flag integer # Instantiate a connection to db", "print_log_info(str(e)) query_result = mturk_cur.fetchone() verifications_total, labels_total, verifications_completed, labels_completed = \\ query_result[\"verifications_total\"], query_result[\"labels_total\"], \\", "all required data is present try: json_file = json.JSONDecoder().decode(request_data) print_log_info(\"Task request: {:s}\".format(json_file)) is_mturk", "in times_query_result: db_cursor.execute(\"SELECT start_time, end_time FROM video_db WHERE id=?\", (attempt_times_set['id'],)) verified_times_set = db_cursor.fetchone()", "args.certfile, \"keyfile\": args.keyfile, }) server.bind(args.port) # setup exist function def save_db(): app.annotation_tasks.close() app.mturk_db_connection.close()", "= verifications_completed + 1 WHERE hit_id=?''', (json_res['assignmentId'], json_res['workerId'], json_res['hitId'])) mturk_cur.execute('''INSERT INTO trim_verification_attempts( hit_id,", "is_mturk): needs_verification_task = \\ decide_if_needs_verification(json_file, app.mturk_db_connection) else: needs_verification_task = False # Get a", "parameter error' return json.dumps(ret) # decode json from request data into a dict", "(hit_id,)) app.mturk_db_connection.commit() except sqlite3.Error as e: print_log_info(str(e)) else: try: mturk_cur.execute('''UPDATE hits SET status='pending_manual_approval'", "False break else: # ie. elif task == \"trim\": print \"trim thing\" mturk_cur.execute(\"SELECT", "max(verifications_total + labels_total - verifications_completed - labels_completed, 1)) return chance_of_verification_video > random.random() def", "flask import flask from flask_cors import CORS, cross_origin import tornado.wsgi import tornado.httpserver #", "type is json try: request_type = flask.request.headers.get('Content-Type') if request_type != 'application/json': raise ValueError('request", "stderr for logging import sys # sys.stdout = open('./web_app.log', 'a', 1) # sys.stderr", "import tornado.httpserver # database import sqlite3 # redirect stdout and stderr for logging", "ant_tasks.cursor() # show us some stats try: db_cursor.execute('''SELECT count(*) FROM video_db WHERE named=1''')", "\"\"\" Wrapper for loading annotations \"\"\" # id integer primary key, # url", "out if is good query is_verification = not (db_cursor.fetchone() is None) # Apply", "try: db_cursor.execute('''SELECT count(*) FROM video_db WHERE named=1''') num_clips_named = db_cursor.fetchone()['count(*)'] db_cursor.execute('''SELECT count(*) FROM", "dest='port', help='which port to serve content on', default=5050, type=int) parser.add_argument('--video_db', dest='video_db', help='SQLite3 database", "parser.add_argument('--aws_key', dest='aws_secret_access_key', help='AWS Secret Access Key', default='', type=str) parser.add_argument('--certfile', dest='certfile', help='SSL certfile location',", "think that would be time-efficient mturk_cur.execute(\"SELECT assignment_id, hit_id, task FROM hits WHERE status='pending_approval'\")", "Wrapper for querying database for a new labelling task. Called by get_task(). :param", "= json.JSONDecoder().decode(request_data) print_log_info(\"Task request: {:s}\".format(json_file)) is_mturk = \"assignmentId\" in json_file and \"workerId\" in", "video will be returned \"\"\" print json_res mturk_cur = mturk_db_connection.cursor() try: mturk_cur.execute('''SELECT verifications_total,", "trim db_cursor.execute('''SELECT * FROM video_db WHERE trim_locked=1 AND trimmed=0''') locked_items = db_cursor.fetchall() for", "= time.time() - item['trim_lock_time'] if delay > MAX_DELAY: print_log_info(\"Expiring task {:d} (Trim)\".format(item['id'])) try:", "\\ query_result[\"verifications_completed\"], query_result[\"labels_completed\"] return verifications_total - verifications_completed <= 0 and labels_total - labels_completed", "or (ant_type == 'trim')): raise ValueError('unknown annotation_type') except ValueError as err: ret['code'] =", "\"assignmentId\" in json_file and \"workerId\" in json_file and \\ \"hitId\" in json_file #", "filled out by the user for the objects being handled TODO figure out", "+ str(attempt_action_set['action_verb']) + \" but the verified had verb \" + str(verified_action_set['action_verb'])) all_verifications_correct", "json_res: dict :param mturk_db_connection: connection to database containing mturk-related data :type mturk_db_connection: sqlite3.Connection", "\"\"\" Wrapper for querying database for a new labelling task. Called by get_task().", "Get a verification task or next available task, and return to user try:", "app.mturk_db_connection = load_annotation_tasks(args.mturk_db) # Set global variables app.aws_access_key_id = args.aws_access_key_id app.aws_secret_access_key = args.aws_secret_access_key", "(json_res['assignmentId'], json_res['workerId'], json_res['hitId'])) mturk_db_connection.commit() annotation_tasks.commit() except sqlite3.Error as e: print_log_info(str(e)) return False else:", "sqlite3.Error as e: print_log_info(str(e)) return False # color print the red flag if", "can be \"name\" or \"trim\" - user_name If the request is coming from", "real, # action_verb text, # action_noun text, # red_flag integer # Instantiate a", "to convert sql item into a dict\"\"\" d = {} for idx, col", "is a/b This gives a uniform distribution of chance of getting a verification", "a verification video across all requests. Called by get_task(). :param json_res: JSON given", "a task from the server A request is a json file with the", "annotation_tasks.cursor() if annotation_type == 'name' or annotation_type == 'name_preview': try: # from https://stackoverflow.com/questions/4114940/select-random-rows-in-sqlite", "assignment_id, worker_id, id, start_time, end_time) VALUES (?,?,?,?,?,?)''', (json_res['hitId'], json_res['assignmentId'], json_res['workerId'], json_res['id'], float(json_res['start_time']), float(json_res['end_time'])))", "ValueError as err: ret['code'] = -3 ret['error_msg'] = str(err) return json.dumps(ret) is_mturk =", "tornado.httpserver.HTTPServer(tornado.wsgi.WSGIContainer(app)) else: server = tornado.httpserver.HTTPServer(tornado.wsgi.WSGIContainer(app), ssl_options={ \"certfile\": args.certfile, \"keyfile\": args.keyfile, }) server.bind(args.port) #", "count(*) FROM video_db WHERE trimmed=1''') num_clips_trimmed = db_cursor.fetchone()['count(*)'] db_cursor.execute('''SELECT count(*) FROM video_db WHERE", "command line arguments sandbox_host = 'mechanicalturk.sandbox.amazonaws.com' real_host = 'mechanicalturk.amazonaws.com' host = (sandbox_host if", "mturk_db_connection: sqlite3.Connection :return boolean representing whether verification video will be returned \"\"\" print", "(?,?,?,?,?,?)''', (json_res['hitId'], json_res['assignmentId'], json_res['workerId'], json_res['id'], float(json_res['start_time']), float(json_res['end_time']))) mturk_db_connection.commit() elif is_mturk and not is_verification:", "check ant_type = json_file['annotation_type'] if not ((ant_type == 'name') or (ant_type == 'trim')):", "str(err) return json.dumps(ret) except: ret['code'] = -2 ret['error_msg'] = 'unknown parameter error' return", "# id integer primary key, # url text, # named integer, # name_locked", "mturk_cur.fetchone() continue print_log_info(assignment_id + \" approved. Amazon response: \" + str(response)) try: mturk_cur.execute('''UPDATE", "VALUES (?,?,?,?,?,?)''', (json_res['hitId'], json_res['assignmentId'], json_res['workerId'], json_res['id'], json_res['nouns'], json_res['verb'])) mturk_db_connection.commit() elif is_mturk and not", "start time \" + str(verified_times_set['start_time'])) all_verifications_correct = False break if abs(attempt_times_set['end_time'] - verified_times_set['end_time'])", "json.JSONDecoder().decode(request_data) print_log_info(\"Task request: {:s}\".format(json_file)) is_mturk = \"assignmentId\" in json_file and \"workerId\" in json_file", "app.annotation_tasks = load_annotation_tasks(args.video_db) app.mturk_db_connection = load_annotation_tasks(args.mturk_db) # Set global variables app.aws_access_key_id = args.aws_access_key_id", "= labels_completed + 1 WHERE hit_id=?''', (json_res['assignmentId'], json_res['workerId'], json_res['hitId'])) mturk_db_connection.commit() annotation_tasks.commit() except sqlite3.Error", "= str(err) return json.dumps(ret) return json.dumps(ret) @app.route('/return_task', methods=['POST']) def return_task(): \"\"\" Processes the", "# decode json from request data into a dict try: json_file = json.JSONDecoder().decode(request_data)", "mturk_db_connection = app.mturk_db_connection mturk_cur = mturk_db_connection.cursor() mturk_cur.execute('''UPDATE hits SET status='pending_approval' WHERE assignment_id=?''', (json_file[\"assignmentId\"],))", "BY RANDOM() LIMIT 1)''') return db_cursor.fetchone() def task_completed(json_res, mturk_db_connection): \"\"\" Tells whether an", "verification video if not is_verification: update_item = (int(json_res['occluded']), json_res['nouns'], json_res['verb'], json_res['user_name'], int(json_res['red_flag'])*1, int(json_res['id']))", "db_cursor.execute(\"SELECT action_noun, action_verb FROM video_db WHERE id=?\", (attempt_action_set['id'],)) verified_action_set = db_cursor.fetchone() if attempt_action_set['action_verb']", "mturk variables if it's an mturk task. :param mturk_db_connection: connection to database containing", "sure the content type is json try: request_type = flask.request.headers.get('Content-Type') if request_type !=", "MTurk database to reflect this change if is_mturk and is_verification: mturk_cur.execute('''UPDATE hits SET", "# Decide if video we are updating is a verification video db_cursor.execute('''SELECT *", "0 and labels_total - labels_completed <= 0 def get_next_available_task(annotation_tasks, annotation_type): \"\"\" Wrapper for", "+ str(attempt_action_set['action_noun']) + \" but the verified had noun \" + str(verified_action_set['action_noun'])) all_verifications_correct", "the server A request is a json file with the following fields: -", "hit_id=?''', (json_res['assignmentId'], json_res['workerId'], json_res['hitId'])) mturk_db_connection.commit() # TODO update mturk stuff annotation_tasks.commit() except sqlite3.Error", "ssl_options={ \"certfile\": args.certfile, \"keyfile\": args.keyfile, }) server.bind(args.port) # setup exist function def save_db():", "= app.annotation_tasks db_cursor = ant_tasks.cursor() # show us some stats try: db_cursor.execute('''SELECT count(*)", "response: \" + str(response)) try: mturk_cur.execute('''UPDATE hits SET status='approved' WHERE hit_id=?''', (hit_id,)) app.mturk_db_connection.commit()", "CORS, cross_origin import tornado.wsgi import tornado.httpserver # database import sqlite3 # redirect stdout", "annotation_type == 'name': try: db_cursor.execute('''SELECT * FROM video_db WHERE named=0 AND name_locked=0 AND", "in json_file and \\ \"hitId\" in json_file # Get next available task try:", "get db cursor db_cursor = annotation_tasks.cursor() # Get the next task if annotation_type", "except: ret['code'] = -2 ret['error_msg'] = 'unknown parameter error' return json.dumps(ret) # Decode", "1 WHERE hit_id=?''', (json_res['assignmentId'], json_res['workerId'], json_res['hitId'])) mturk_db_connection.commit() annotation_tasks.commit() except sqlite3.Error as e: print_log_info(str(e))", "following fields: - id, which is the video ID - annotation_type, which can", "cur_time = time.time() # update the lock if annotation_type == 'name': try: db_cursor.execute('''UPDATE", "ret['code'] = -2 ret['error_msg'] = 'unknown parameter error' return json.dumps(ret) # Decode json", "True print assignment_id try: if task == \"name\": mturk_cur.execute(\"SELECT id, action_noun, action_verb FROM", "flask_cors import CORS, cross_origin import tornado.wsgi import tornado.httpserver # database import sqlite3 #", "not in (SELECT id from named_verification_videos) ''') # LIMIT 1 maybe? except sqlite3.Error", "dropdown menu - occluded, a boolean from the checkbox in the page -", "return False else: # ie. it's a trimming task try: # Decide if", "check ant_type = json_file['annotation_type'] if not ((ant_type == 'name') or (ant_type == 'trim')", "'a', 1) # sys.stderr = open('./web_app.err', 'a', 1) import random import boto.mturk.connection #", "real one)', default=False, action='store_true') parser.add_argument('--aws_key_id', dest='aws_access_key_id', help='AWS Access Key ID', default='', type=str) parser.add_argument('--aws_key',", "start_time/end_time in seconds TRIM_DIFFERENCE_MAX = 1.0 def dict_factory(cursor, row): \"\"\"Helper function to convert", "json_res, is_mturk): \"\"\" Updates the data for a labelling task plus relevant mturk", "WHERE id=?''', (0.0, item['id'])) ant_tasks.commit() except sqlite3.Error as e: print_log_info(str(e)) # Task: trim", "sys # sys.stdout = open('./web_app.log', 'a', 1) # sys.stderr = open('./web_app.err', 'a', 1)", "def get_next_available_task(annotation_tasks, annotation_type): \"\"\" Wrapper for querying database for a new labelling task.", "ret = {} try: # make sure the content type is json request_type", "\"workerId\" - \"hitId\" \"\"\" # Dict holds the results to return to client", "labels_completed FROM hits WHERE hit_id=?''', (json_res['hitId'],)) except sqlite3.Error as e: print_log_info(str(e)) query_result =", "line arguments sandbox_host = 'mechanicalturk.sandbox.amazonaws.com' real_host = 'mechanicalturk.amazonaws.com' host = (sandbox_host if app.sandbox", "db_cursor.execute('''SELECT count(*) FROM video_db WHERE trimmed=1''') num_clips_trimmed = db_cursor.fetchone()['count(*)'] db_cursor.execute('''SELECT count(*) FROM video_db", "\"\"\"A simple web server for video annotation\"\"\" # parsing args import argparse #", "WHERE named=1 AND red_flag=0 AND trimmed=0 AND trim_locked=0 AND id not in (SELECT", "If it is a request from an MTurk iFrame, it also has the", "annotation_tasks: sqlite3.Connection :param annotation_type: client-defined string for the type of the annotations we're", "for video annotation\"\"\" # parsing args import argparse # encoding / decoding import", "else: db_cursor.execute('''SELECT * FROM video_db WHERE id IN (SELECT id FROM trimmed_verification_videos ORDER", "= {} for idx, col in enumerate(cursor.description): d[col[0]] = row[idx] return d def", "AND trim_locked=0 AND id not in (SELECT id from trimmed_verification_videos) ''') # LIMIT", "default='video_db.db', type=str) parser.add_argument('--mturk_db', dest='mturk_db', help='SQLite3 database with logs for mturk', default='mturk_db.db', type=str) parser.add_argument('--sandbox',", "sys.stdout = open('./web_app.log', 'a', 1) # sys.stderr = open('./web_app.err', 'a', 1) import random", "hit_id=?''', (json_res['hitId'],)) except sqlite3.Error as e: print_log_info(str(e)) query_result = mturk_cur.fetchone() print_log_info(json_res['hitId']) verifications_total, labels_total,", "Attempt had verb \" + str(attempt_action_set['action_verb']) + \" but the verified had verb", "next available task, and return to user try: if needs_verification_task: task = get_verification_task(app.annotation_tasks,", "print_log_info(\"Approving assignment \" + assignment_id) try: response = mturk.approve_assignment(assignment_id) except boto.mturk.connection.MTurkRequestError as e:", "assignment_id=?''', (json_file[\"assignmentId\"],)) mturk_db_connection.commit() except sqlite3.Error as err: ret['code'] = -3 ret['error_msg'] = str(err)", "content on', default=5050, type=int) parser.add_argument('--video_db', dest='video_db', help='SQLite3 database with normal videos', default='video_db.db', type=str)", "video or not. Let a = the verification videos left b = total", "task available if item is None: return None # Otherwise return a task.", "json.dumps(ret) # Decide if we need a verification task if ant_type == 'name_preview'", "ant_type == 'name': try: # Decide if video we are updating is a", "get annotation_type and video id ant_type = json_res['annotation_type'] # Update naming task if", "this is a sandbox HIT (otherwise is a real one)', default=False, action='store_true') parser.add_argument('--aws_key_id',", "that would be time-efficient mturk_cur.execute(\"SELECT assignment_id, hit_id, task FROM hits WHERE status='pending_approval'\") except", "action_verb FROM video_db WHERE id=?\", (attempt_action_set['id'],)) verified_action_set = db_cursor.fetchone() if attempt_action_set['action_verb'] != verified_action_set['action_verb']:", "(?,?,?,?,?,?)''', (json_res['hitId'], json_res['assignmentId'], json_res['workerId'], json_res['id'], json_res['nouns'], json_res['verb'])) mturk_db_connection.commit() elif is_mturk and not is_verification:", "response = mturk.approve_assignment(assignment_id) except boto.mturk.connection.MTurkRequestError as e: print_log_info(\"MTurk verification rejected. Typically, this means", "has been completed \"\"\" mturk_cur = mturk_db_connection.cursor() try: mturk_cur.execute('''SELECT verifications_total, labels_total, verifications_completed, labels_completed", "the type of the annotations we're doing :type annotation_type: string :return dict from", "= False break if attempt_action_set['action_noun'] != verified_action_set['action_noun']: print_log_info(\"Verification Attempt failed! Attempt had noun", "= False break if abs(attempt_times_set['end_time'] - verified_times_set['end_time']) > TRIM_DIFFERENCE_MAX: print_log_info(\"Verification Attempt failed! Attempt", "as e: print_log_info(str(e)) # Task: trim db_cursor.execute('''SELECT * FROM video_db WHERE trim_locked=1 AND", "video annotation\"\"\" # parsing args import argparse # encoding / decoding import json", "\" + str(attempt_times_set['start_time']) + \" but the verified had start time \" +", "as e: print_log_info(str(e)) annotation_tasks.commit() return task def update_task(mturk_db_connection, annotation_tasks, json_res, is_mturk): \"\"\" Updates", "dest='mturk_db', help='SQLite3 database with logs for mturk', default='mturk_db.db', type=str) parser.add_argument('--sandbox', dest='sandbox', help='If this", "= mturk_cur.fetchall() for attempt_action_set in action_query_result: db_cursor.execute(\"SELECT action_noun, action_verb FROM video_db WHERE id=?\",", "Attempt had end time \" + str(attempt_times_set['end_time']) + \" but the verified had", "ret['error_msg'] = str(err) return json.dumps(ret) more_to_complete = not is_mturk or \\ not task_completed(json_file,", "the database return annotation_tasks def decide_if_needs_verification(json_res, mturk_db_connection): \"\"\" Makes the decision as to", "is_verification: mturk_cur.execute('''UPDATE hits SET assignment_id=?, worker_id=?, verifications_completed = verifications_completed + 1 WHERE hit_id=?''',", "(db_cursor.fetchone() is None) # Apply new label if it isn't a verification video", "FROM named_verification_videos ORDER BY RANDOM() LIMIT 1)''') except sqlite3.Error as e: print_log_info(str(e)) else:", "integer, # pad_start_frame integer, # pad_end_frame integer, # start_time real, # end_time real,", "= ant_tasks.cursor() # show us some stats try: db_cursor.execute('''SELECT count(*) FROM video_db WHERE", "query_result[\"labels_total\"], \\ query_result[\"verifications_completed\"], query_result[\"labels_completed\"] chance_of_verification_video = (float(max(verifications_total - verifications_completed, 0)) / max(verifications_total +", "if annotation_type == 'name': try: db_cursor.execute('''UPDATE video_db SET name_locked=1, name_lock_time=? WHERE id=?''', (cur_time,", "+ \"has not propagated through Amazon's servers.\") print_log_info(str(e)) query_result = mturk_cur.fetchone() continue print_log_info(assignment_id", "Instantiate a connection to db annotation_tasks = sqlite3.connect(video_db) annotation_tasks.row_factory = dict_factory # returns", "err: ret['code'] = -3 ret['error_msg'] = str(err) return json.dumps(ret) ret['more_to_complete'] = more_to_complete return", "parser.add_argument('--aws_key_id', dest='aws_access_key_id', help='AWS Access Key ID', default='', type=str) parser.add_argument('--aws_key', dest='aws_secret_access_key', help='AWS Secret Access", "= (float(max(verifications_total - verifications_completed, 0)) / max(verifications_total + labels_total - verifications_completed - labels_completed,", "= time.strftime(\"%Y-%m-%d %H:%M:%S\", time.gmtime()) print \"{:s} {:s}\".format(prefix, str_info) def collect_db_stats(): \"\"\" Collect DB", "except sqlite3.Error as e: print_log_info(str(e)) # Task: trim db_cursor.execute('''SELECT * FROM video_db WHERE", "traceback # flask import flask from flask_cors import CORS, cross_origin import tornado.wsgi import", "+ 1 WHERE hit_id=?''', (json_res['assignmentId'], json_res['workerId'], json_res['hitId'])) mturk_db_connection.commit() annotation_tasks.commit() except sqlite3.Error as e:", "and args.keyfile == '': server = tornado.httpserver.HTTPServer(tornado.wsgi.WSGIContainer(app)) else: server = tornado.httpserver.HTTPServer(tornado.wsgi.WSGIContainer(app), ssl_options={ \"certfile\":", "# LIMIT 1 maybe? except sqlite3.Error as e: print_log_info(str(e)) item = db_cursor.fetchone() #", "'success' except ValueError as err: ret['code'] = -3 ret['error_msg'] = str(err) return json.dumps(ret)", "one)', default=False, action='store_true') parser.add_argument('--aws_key_id', dest='aws_access_key_id', help='AWS Access Key ID', default='', type=str) parser.add_argument('--aws_key', dest='aws_secret_access_key',", "hits SET status='approved' WHERE hit_id=?''', (hit_id,)) app.mturk_db_connection.commit() except sqlite3.Error as e: print_log_info(str(e)) else:", "+ \"Task ID ({:d}) Type ({:s}) has been RED_FLAGGED!\".format( json_res['id'], ant_type) + '\\033[0m')", "hello(): return 'hello world' def parse_args(): \"\"\" Parse input arguments \"\"\" parser =", "task {:d} (Trim)\".format(item['id'])) try: db_cursor.execute('''UPDATE video_db SET trim_locked=0, trim_lock_time=? WHERE id=?''', (0.0, item['id']))", "task, and return to user try: if needs_verification_task: task = get_verification_task(app.annotation_tasks, ant_type) else:", "if it isn't a verification video if not is_verification: update_item = (int(json_res['occluded']), json_res['nouns'],", "Secret Access Key', default='', type=str) parser.add_argument('--certfile', dest='certfile', help='SSL certfile location', default='', type=str) parser.add_argument('--keyfile',", "!= verified_action_set['action_verb']: print_log_info(\"Verification Attempt failed! Attempt had verb \" + str(attempt_action_set['action_verb']) + \"", "the following fields: - id, which is the video ID - annotation_type, which", "function \"\"\" # parse params args = parse_args() # load annotation tasks app.annotation_tasks", "* FROM video_db WHERE named=0 AND name_locked=0 AND id not in (SELECT id", "print_log_info(\"Verification Attempt failed! Attempt had verb \" + str(attempt_action_set['action_verb']) + \" but the", "WHERE id IN (SELECT id FROM named_verification_videos ORDER BY RANDOM() LIMIT 1)''') except", "(Trim)\".format(item['id'])) try: db_cursor.execute('''UPDATE video_db SET trim_locked=0, trim_lock_time=? WHERE id=?''', (0.0, item['id'])) ant_tasks.commit() except", "assignment_id=?, worker_id=?, verifications_completed = verifications_completed + 1 WHERE hit_id=?''', (json_res['assignmentId'], json_res['workerId'], json_res['hitId'])) mturk_cur.execute('''INSERT", "try: response = mturk.approve_assignment(assignment_id) except boto.mturk.connection.MTurkRequestError as e: print_log_info(\"MTurk verification rejected. Typically, this", "except sqlite3.Error as e: print_log_info(str(e)) else: db_cursor.execute('''SELECT * FROM video_db WHERE id IN", "= task except ValueError as err: ret['code'] = -1 ret['error_msg'] = str(err) return", "server.bind(args.port) # setup exist function def save_db(): app.annotation_tasks.close() app.mturk_db_connection.close() import atexit atexit.register(save_db) #", "1)''') except sqlite3.Error as e: print_log_info(str(e)) else: db_cursor.execute('''SELECT * FROM video_db WHERE id", "parser.add_argument('--keyfile', dest='keyfile', help='SSL keyfile location', default='', type=str) args = parser.parse_args() return args def", "task: raise ValueError('can not get a valid task. please re-try.') else: ret =", "json.dumps(ret) # decode json from request data into a dict try: json_file =", "TRIM_DIFFERENCE_MAX: print_log_info(\"Verification Attempt failed! Attempt had end time \" + str(attempt_times_set['end_time']) + \"", "= time.time() - item['name_lock_time'] if delay > MAX_DELAY: print_log_info(\"Expiring task {:d} (Name)\".format(item['id'])) try:", "not flag: raise ValueError('can not update the task. Please re-try.') else: ret['code'] =", "it isn't a verification video if not is_verification: update_item = (float(json_res['start_time']), float(json_res['end_time']), json_res['user_name'],", "get_verification_task(app.annotation_tasks, ant_type) else: task = get_next_available_task(app.annotation_tasks, ant_type) if not task: raise ValueError('can not", "sql item into a dict\"\"\" d = {} for idx, col in enumerate(cursor.description):", "json_file['annotation_type'] if not ((ant_type == 'name') or (ant_type == 'trim') or (ant_type ==", "id=?\", (attempt_times_set['id'],)) verified_times_set = db_cursor.fetchone() if abs(attempt_times_set['start_time'] - verified_times_set['start_time']) > TRIM_DIFFERENCE_MAX: print_log_info(\"Verification Attempt", "time-efficient mturk_cur.execute(\"SELECT assignment_id, hit_id, task FROM hits WHERE status='pending_approval'\") except sqlite3.Error as e:", "A request is a json file with the following fields: - \"annotation_type\" which", "sqlite3.Error as err: ret['code'] = -3 ret['error_msg'] = str(err) return json.dumps(ret) ret['more_to_complete'] =", "\\ not task_completed(json_file, app.mturk_db_connection) if not more_to_complete: try: mturk_db_connection = app.mturk_db_connection mturk_cur =", "from the server A request is a json file with the following fields:", "mturk_db_connection.cursor() try: mturk_cur.execute('''SELECT verifications_total, labels_total, verifications_completed, labels_completed FROM hits WHERE hit_id=?''', (json_res['hitId'],)) except", "dict :param mturk_db_connection: connection to database containing mturk-related data :type mturk_db_connection: sqlite3.Connection :return:", "isn't a verification video if not is_verification: update_item = (int(json_res['occluded']), json_res['nouns'], json_res['verb'], json_res['user_name'],", "and video id ant_type = json_res['annotation_type'] # Update naming task if ant_type ==", ":type annotation_type: string :return dict from querying database \"\"\" # get db cursor", "times_query_result: db_cursor.execute(\"SELECT start_time, end_time FROM video_db WHERE id=?\", (attempt_times_set['id'],)) verified_times_set = db_cursor.fetchone() if", "+ str(verified_times_set['end_time'])) all_verifications_correct = False break except sqlite3.Error as e: print_log_info(str(e)) continue if", "a new labelling task. Called by get_task(). :param annotation_tasks: connection to database containing", "the results to return to client ret = {} try: # make sure", "a verification video db_cursor.execute('''SELECT * FROM named_verification_videos where id=?''', (json_res['id'],)) # todo find", "except sqlite3.Error as e: print_log_info(str(e)) return def expire_locked_items(): \"\"\" Expires a locked item", "mturk_cur.fetchall() for attempt_times_set in times_query_result: db_cursor.execute(\"SELECT start_time, end_time FROM video_db WHERE id=?\", (attempt_times_set['id'],))", "db_cursor.execute('''UPDATE video_db SET trimmed=1, trim_locked=0, start_time=?, end_time=?, trimmed_by_user=?, red_flag=? WHERE id=?''', update_item) #", "iFrame, it should have: - assignmentId - workerId - hitId If annotation_type is", "# TODO Find out if this needs to be a transaction print_log_info(\"Approving assignment", "(json_res['hitId'],)) except sqlite3.Error as e: print_log_info(str(e)) query_result = mturk_cur.fetchone() verifications_total, labels_total, verifications_completed, labels_completed", "json_file and \"workerId\" in json_file and \\ \"hitId\" in json_file if 'annotation_type' not", "json_res['assignmentId'], json_res['workerId'], json_res['id'], float(json_res['start_time']), float(json_res['end_time']))) mturk_db_connection.commit() elif is_mturk and not is_verification: print(json_res['assignmentId'], json_res['workerId'],", "approval a separate table if we think that would be time-efficient mturk_cur.execute(\"SELECT assignment_id,", "app.mturk_db_connection.close() import atexit atexit.register(save_db) # set up one server server.start(1) print_log_info(\"Tornado server starting", "SET assignment_id=?, worker_id=?, verifications_completed = verifications_completed + 1 WHERE hit_id=?''', (json_res['assignmentId'], json_res['workerId'], json_res['hitId']))", "approval can be automatically approved and then marks them accordingly \"\"\" # TODO", "= open('./web_app.err', 'a', 1) import random import boto.mturk.connection # Obtain the flask app", "from named_verification_videos) ''') # LIMIT 1 maybe? except sqlite3.Error as e: print_log_info(str(e)) else:", "sent from the client to submit a label JSON has the following fields:", "app.annotation_tasks db_cursor = ant_tasks.cursor() # show us some stats try: db_cursor.execute('''SELECT count(*) FROM", "= mturk.approve_assignment(assignment_id) except boto.mturk.connection.MTurkRequestError as e: print_log_info(\"MTurk verification rejected. Typically, this means the", "ORDER BY RANDOM() LIMIT 1)''') return db_cursor.fetchone() def task_completed(json_res, mturk_db_connection): \"\"\" Tells whether", "Get the next task if annotation_type == 'name': try: db_cursor.execute('''SELECT * FROM video_db", "boolean from the checkbox in the page - nouns, a string filled out", ":type annotation_type: string :return dict from querying database \"\"\" db_cursor = annotation_tasks.cursor() if", "str(err) return json.dumps(ret) # Decide if we need a verification task if ant_type", "# Obtain the flask app object (and make it cors) app = flask.Flask(__name__)", "== \"trim\": print \"trim thing\" mturk_cur.execute(\"SELECT id, start_time, end_time FROM trim_verification_attempts WHERE hit_id=?\",", "data is present try: json_file = json.JSONDecoder().decode(request_data) print_log_info(\"Task request: {:s}\".format(json_file)) is_mturk = \"assignmentId\"", "\" but the verified had verb \" + str(verified_action_set['action_verb'])) all_verifications_correct = False break", "is_mturk): \"\"\" Updates the data for a labelling task plus relevant mturk variables", "id not in (SELECT id from named_verification_videos) ''') # LIMIT 1 maybe? except", "except sqlite3.Error as e: print_log_info(str(e)) return def approve_assignments(): \"\"\" Periodic callback decides whether", "name - name_preview - trim - trim_preview - \"user_name\" If it is a", "str(result[\"task\"]) all_verifications_correct = True print assignment_id try: if task == \"name\": mturk_cur.execute(\"SELECT id,", "print assignment_id try: if task == \"name\": mturk_cur.execute(\"SELECT id, action_noun, action_verb FROM name_verification_attempts", "decode json from request data into a dict try: json_file = json.JSONDecoder().decode(request_data) print_log_info(\"Task", "it's a trimming task try: # Decide if video we are updating is", "sqlite3.Error as e: print_log_info(str(e)) return def approve_assignments(): \"\"\" Periodic callback decides whether assignments", "with logs for mturk', default='mturk_db.db', type=str) parser.add_argument('--sandbox', dest='sandbox', help='If this is a sandbox", "time we launch the service collect_db_stats() approve_assignments() tornado.ioloop.PeriodicCallback(expire_locked_items, 20*1000).start() tornado.ioloop.PeriodicCallback(collect_db_stats, 3600*1000).start() tornado.ioloop.PeriodicCallback(approve_assignments, 20*1000).start()", "e: print_log_info(str(e)) item = db_cursor.fetchone() # No task available if item is None:", "trimmed_verification_videos) ''') # LIMIT 1 maybe? except sqlite3.Error as e: print_log_info(str(e)) item =", "+ labels_total - verifications_completed - labels_completed, 1)) return chance_of_verification_video > random.random() def get_verification_task(annotation_tasks,", "= 'unknown parameter error' return json.dumps(ret) # decode json from request data into", "labels_total, verifications_completed, labels_completed = \\ query_result[\"verifications_total\"], query_result[\"labels_total\"], \\ query_result[\"verifications_completed\"], query_result[\"labels_completed\"] return verifications_total -", "WHERE named=0 AND name_locked=0 AND id not in (SELECT id from named_verification_videos) ''')", "the trim stuff \"\"\" # Dict holds the results to return to client", "database with logs for mturk', default='mturk_db.db', type=str) parser.add_argument('--sandbox', dest='sandbox', help='If this is a", "it cors) app = flask.Flask(__name__) # pylint: disable=invalid-name CORS(app) # Maximum time allowed", "sandbox HIT (otherwise is a real one)', default=False, action='store_true') parser.add_argument('--aws_key_id', dest='aws_access_key_id', help='AWS Access", "\"\"\" entry of the main function \"\"\" # parse params args = parse_args()" ]
[ "variation should be set to -1.0 x = np.zeros((100, 10)) normalizer = MinMaxNormalizer(x)", "values should be set to -1.0. assert np.all(np.isclose(x_normed[:, -1], -1.5)) # Channels without", "normalizer(x) assert np.all(np.isclose(x_normed, -1.0)) def test_min_max_normalizer_2d(): \"\"\" Checks that all feature indices that", "np.nan x_normed = normalizer(x) # Included indices should have minimum value -0.9 and", "= np.random.normal(size=(100000, 10)) + np.arange(10).reshape(1, -1) normalizer = Normalizer(x, exclude_indices=[0, 1, 2]) x_normed", "results as original. \"\"\" x = np.random.normal(size=(100000, 10)) + np.arange(10).reshape(1, -1) normalizer =", "/ \"normalizer.pckl\") loaded = Normalizer.load(tmp_path / \"normalizer.pckl\") x_normed = normalizer(x) x_normed_loaded = loaded(x)", "Included indices should have minimum value -0.9 and # maximum value 1.0. assert", "have zero mean and unit std. dev. \"\"\" x = np.random.normal(size=(100000, 10)) +", "= normalizer(x) assert np.all(np.isclose(x_normed, -1.0)) def test_invert(): \"\"\" Ensure that the inverse function", "Normalizer, MinMaxNormalizer def test_normalizer_2d(): \"\"\" Checks that all feature indices that are not", "should be set to -1.0. assert np.all(np.isclose(x_normed[:, -1], -1.5)) # Channels without variation", "# Included indices should have zero mean and std. dev. 1.0. assert np.all(np.isclose(x_normed[:,", "np.all(np.isclose(np.mean(x, axis=0), np.arange(10, dtype=np.float32), atol=1e-2)) def test_save_and_load(tmp_path): \"\"\" Ensure that saved and loaded", "# Channels without variation should be set to -1.0 x = np.zeros((100, 10))", "+ np.arange(11).reshape(1, -1) normalizer = MinMaxNormalizer(x, exclude_indices=range(1, 10, 2)) x[:, 10] = np.nan", "assert np.all(np.isclose(x_normed[:, -1], -1.5)) # Channels without variation should be set to -1.0", "-1) normalizer = Normalizer(x, exclude_indices=range(1, 10, 2)) normalizer.save(tmp_path / \"normalizer.pckl\") loaded = Normalizer.load(tmp_path", "normalizer.invert(x_normed) assert np.all(np.isclose(np.mean(x, axis=0), np.arange(10, dtype=np.float32), atol=1e-2)) def test_save_and_load(tmp_path): \"\"\" Ensure that saved", "np.all(np.isclose(x_normed[:, 1::2].mean(axis=0), np.arange(10)[1::2].reshape(1, -1), 1e-2)) assert np.all(np.isclose(x_normed[:, 1::2].std(axis=0), 1.0, 1e-2)) # Channels without", "excluded have zero mean and unit std. dev. \"\"\" x = np.random.normal(size=(100000, 10))", "assert np.all(np.isclose(x_normed, x_normed_loaded)) def test_load_sftp(tmp_path): \"\"\" Ensure that saved and loaded normalizer yields", "-1.0 x = np.zeros((100, 10)) normalizer = Normalizer(x) x_normed = normalizer(x) assert np.all(np.isclose(x_normed,", "10)) + np.arange(10).reshape(1, -1) normalizer = Normalizer(x, exclude_indices=[0, 1, 2]) x_normed = normalizer(x)", "np.all(np.isclose(x_normed[:, ::2].std(axis=0), 1.0, 1e-1)) # Excluded indices assert np.all(np.isclose(x_normed[:, 1::2].mean(axis=0), np.arange(10)[1::2].reshape(1, -1), 1e-2))", "assert np.all(np.isclose(x_normed[:, :10:2].max(axis=0), 1.0)) # nan values should be set to -1.0. assert", "axis=0), np.arange(10, dtype=np.float32), atol=1e-2)) def test_save_and_load(tmp_path): \"\"\" Ensure that saved and loaded normalizer", "= Normalizer.load(tmp_path / \"normalizer.pckl\") x_normed = normalizer(x) x_normed_loaded = loaded(x) assert np.all(np.isclose(x_normed, x_normed_loaded,", "2)) x[:, 10] = np.nan x_normed = normalizer(x) # Included indices should have", "Included indices should have zero mean and std. dev. 1.0. assert np.all(np.isclose(x_normed[:, ::2].mean(axis=0),", "excluded have zero mean and unit std. dev. \"\"\" x = np.random.normal(size=(100000, 11))", "exclude_indices=range(1, 10, 2)) x[:, 10] = np.nan x_normed = normalizer(x) # Included indices", "np.all(np.isclose(x_normed[:, -1], -1.5)) # Channels without variation should be set to -1.0 x", "-1.5)) # Channels without variation should be set to -1.0 x = np.zeros((100,", "assert np.all(np.isclose(x_normed, -1.0)) def test_min_max_normalizer_2d(): \"\"\" Checks that all feature indices that are", "that saved and loaded normalizer yields same results as original. \"\"\" x =", "minimum value -0.9 and # maximum value 1.0. assert np.all(np.isclose(x_normed[:, :10:2].min(axis=0), -1.0)) assert", "assert np.all(np.isclose(x_normed, -1.0)) def test_invert(): \"\"\" Ensure that the inverse function of the", "Normalizer(x, exclude_indices=[0, 1, 2]) x_normed = normalizer(x) x = normalizer.invert(x_normed) assert np.all(np.isclose(np.mean(x, axis=0),", "1, 2]) x_normed = normalizer(x) x = normalizer.invert(x_normed) assert np.all(np.isclose(np.mean(x, axis=0), np.arange(10, dtype=np.float32),", "variation should be set to -1.0 x = np.zeros((100, 10)) normalizer = Normalizer(x)", "indices should have minimum value -0.9 and # maximum value 1.0. assert np.all(np.isclose(x_normed[:,", "mean and std. dev. 1.0. assert np.all(np.isclose(x_normed[:, ::2].mean(axis=0), 0.0, atol=1e-1)) assert np.all(np.isclose(x_normed[:, ::2].std(axis=0),", ":10:2].min(axis=0), -1.0)) assert np.all(np.isclose(x_normed[:, :10:2].max(axis=0), 1.0)) # nan values should be set to", "\"\"\" Ensure that saved and loaded normalizer yields same results as original. \"\"\"", "x = np.zeros((100, 10)) normalizer = Normalizer(x) x_normed = normalizer(x) assert np.all(np.isclose(x_normed, -1.0))", "that all feature indices that are not excluded have zero mean and unit", "x_normed = normalizer(x) # Included indices should have minimum value -0.9 and #", "1.0. assert np.all(np.isclose(x_normed[:, :10:2].min(axis=0), -1.0)) assert np.all(np.isclose(x_normed[:, :10:2].max(axis=0), 1.0)) # nan values should", "np.arange(10).reshape(1, -1) normalizer = Normalizer(x, exclude_indices=range(1, 10, 2)) x_normed = normalizer(x) # Included", "= MinMaxNormalizer(x, exclude_indices=range(1, 10, 2)) x[:, 10] = np.nan x_normed = normalizer(x) #", "2)) normalizer.save(tmp_path / \"normalizer.pckl\") loaded = Normalizer.load(tmp_path / \"normalizer.pckl\") x_normed = normalizer(x) x_normed_loaded", "import numpy as np from quantnn.normalizer import Normalizer, MinMaxNormalizer def test_normalizer_2d(): \"\"\" Checks", "value 1.0. assert np.all(np.isclose(x_normed[:, :10:2].min(axis=0), -1.0)) assert np.all(np.isclose(x_normed[:, :10:2].max(axis=0), 1.0)) # nan values", "yields same results as original. \"\"\" x = np.random.normal(size=(100000, 10)) + np.arange(10).reshape(1, -1)", "normalizer.save(tmp_path / \"normalizer.pckl\") loaded = Normalizer.load(tmp_path / \"normalizer.pckl\") x_normed = normalizer(x) x_normed_loaded =", "from quantnn.normalizer import Normalizer, MinMaxNormalizer def test_normalizer_2d(): \"\"\" Checks that all feature indices", "dev. \"\"\" x = np.random.normal(size=(100000, 11)) + np.arange(11).reshape(1, -1) normalizer = MinMaxNormalizer(x, exclude_indices=range(1,", "are not excluded have zero mean and unit std. dev. \"\"\" x =", "set to -1.0 x = np.zeros((100, 10)) normalizer = Normalizer(x) x_normed = normalizer(x)", "1.0)) # nan values should be set to -1.0. assert np.all(np.isclose(x_normed[:, -1], -1.5))", "= normalizer(x) x = normalizer.invert(x_normed) assert np.all(np.isclose(np.mean(x, axis=0), np.arange(10, dtype=np.float32), atol=1e-2)) def test_save_and_load(tmp_path):", "np.arange(10).reshape(1, -1) normalizer = Normalizer(x, exclude_indices=range(1, 10, 2)) normalizer.save(tmp_path / \"normalizer.pckl\") loaded =", "= normalizer(x) assert np.all(np.isclose(x_normed, -1.0)) def test_min_max_normalizer_2d(): \"\"\" Checks that all feature indices", "Normalizer(x, exclude_indices=range(1, 10, 2)) x_normed = normalizer(x) # Included indices should have zero", "1e-2)) assert np.all(np.isclose(x_normed[:, 1::2].std(axis=0), 1.0, 1e-2)) # Channels without variation should be set", "test_invert(): \"\"\" Ensure that the inverse function of the Normalizer works as expected.", "+ np.arange(10).reshape(1, -1) normalizer = Normalizer(x, exclude_indices=range(1, 10, 2)) normalizer.save(tmp_path / \"normalizer.pckl\") loaded", "11)) + np.arange(11).reshape(1, -1) normalizer = MinMaxNormalizer(x, exclude_indices=range(1, 10, 2)) x[:, 10] =", "have zero mean and unit std. dev. \"\"\" x = np.random.normal(size=(100000, 11)) +", "0.0, atol=1e-1)) assert np.all(np.isclose(x_normed[:, ::2].std(axis=0), 1.0, 1e-1)) # Excluded indices assert np.all(np.isclose(x_normed[:, 1::2].mean(axis=0),", "indices that are not excluded have zero mean and unit std. dev. \"\"\"", "1.0. assert np.all(np.isclose(x_normed[:, ::2].mean(axis=0), 0.0, atol=1e-1)) assert np.all(np.isclose(x_normed[:, ::2].std(axis=0), 1.0, 1e-1)) # Excluded", ":10:2].max(axis=0), 1.0)) # nan values should be set to -1.0. assert np.all(np.isclose(x_normed[:, -1],", "def test_normalizer_2d(): \"\"\" Checks that all feature indices that are not excluded have", "loaded normalizer yields same results as original. \"\"\" x = np.random.normal(size=(100000, 10)) +", "original. \"\"\" x = np.random.normal(size=(100000, 10)) + np.arange(10).reshape(1, -1) normalizer = Normalizer(x, exclude_indices=range(1,", "zero mean and std. dev. 1.0. assert np.all(np.isclose(x_normed[:, ::2].mean(axis=0), 0.0, atol=1e-1)) assert np.all(np.isclose(x_normed[:,", "10, 2)) x[:, 10] = np.nan x_normed = normalizer(x) # Included indices should", "value -0.9 and # maximum value 1.0. assert np.all(np.isclose(x_normed[:, :10:2].min(axis=0), -1.0)) assert np.all(np.isclose(x_normed[:,", "-1) normalizer = Normalizer(x, exclude_indices=range(1, 10, 2)) x_normed = normalizer(x) # Included indices", "x = np.random.normal(size=(100000, 10)) + np.arange(10).reshape(1, -1) normalizer = Normalizer(x, exclude_indices=range(1, 10, 2))", "should have minimum value -0.9 and # maximum value 1.0. assert np.all(np.isclose(x_normed[:, :10:2].min(axis=0),", "normalizer(x) assert np.all(np.isclose(x_normed, -1.0)) def test_invert(): \"\"\" Ensure that the inverse function of", "= normalizer.invert(x_normed) assert np.all(np.isclose(np.mean(x, axis=0), np.arange(10, dtype=np.float32), atol=1e-2)) def test_save_and_load(tmp_path): \"\"\" Ensure that", "saved and loaded normalizer yields same results as original. \"\"\" x = np.random.normal(size=(100000,", "def test_load_sftp(tmp_path): \"\"\" Ensure that saved and loaded normalizer yields same results as", "np.zeros((100, 10)) normalizer = Normalizer(x) x_normed = normalizer(x) assert np.all(np.isclose(x_normed, -1.0)) def test_min_max_normalizer_2d():", "normalizer = MinMaxNormalizer(x, exclude_indices=range(1, 10, 2)) x[:, 10] = np.nan x_normed = normalizer(x)", "# maximum value 1.0. assert np.all(np.isclose(x_normed[:, :10:2].min(axis=0), -1.0)) assert np.all(np.isclose(x_normed[:, :10:2].max(axis=0), 1.0)) #", "nan values should be set to -1.0. assert np.all(np.isclose(x_normed[:, -1], -1.5)) # Channels", "np.arange(10, dtype=np.float32), atol=1e-2)) def test_save_and_load(tmp_path): \"\"\" Ensure that saved and loaded normalizer yields", "be set to -1.0. assert np.all(np.isclose(x_normed[:, -1], -1.5)) # Channels without variation should", "1e-2)) # Channels without variation should be set to -1.0 x = np.zeros((100,", "-1], -1.5)) # Channels without variation should be set to -1.0 x =", "zero mean and unit std. dev. \"\"\" x = np.random.normal(size=(100000, 11)) + np.arange(11).reshape(1,", "= normalizer(x) # Included indices should have zero mean and std. dev. 1.0.", "and unit std. dev. \"\"\" x = np.random.normal(size=(100000, 10)) + np.arange(10).reshape(1, -1) normalizer", "works as expected. \"\"\" x = np.random.normal(size=(100000, 10)) + np.arange(10).reshape(1, -1) normalizer =", "be set to -1.0 x = np.zeros((100, 10)) normalizer = MinMaxNormalizer(x) x_normed =", "atol=1e-1)) assert np.all(np.isclose(x_normed[:, ::2].std(axis=0), 1.0, 1e-1)) # Excluded indices assert np.all(np.isclose(x_normed[:, 1::2].mean(axis=0), np.arange(10)[1::2].reshape(1,", "10)) normalizer = MinMaxNormalizer(x) x_normed = normalizer(x) assert np.all(np.isclose(x_normed, -1.0)) def test_invert(): \"\"\"", "all feature indices that are not excluded have zero mean and unit std.", "10, 2)) normalizer.save(tmp_path / \"normalizer.pckl\") loaded = Normalizer.load(tmp_path / \"normalizer.pckl\") x_normed = normalizer(x)", "= np.random.normal(size=(100000, 10)) + np.arange(10).reshape(1, -1) normalizer = Normalizer(x, exclude_indices=range(1, 10, 2)) x_normed", "x_normed = normalizer(x) x = normalizer.invert(x_normed) assert np.all(np.isclose(np.mean(x, axis=0), np.arange(10, dtype=np.float32), atol=1e-2)) def", "-0.9 and # maximum value 1.0. assert np.all(np.isclose(x_normed[:, :10:2].min(axis=0), -1.0)) assert np.all(np.isclose(x_normed[:, :10:2].max(axis=0),", "MinMaxNormalizer(x) x_normed = normalizer(x) assert np.all(np.isclose(x_normed, -1.0)) def test_invert(): \"\"\" Ensure that the", "inverse function of the Normalizer works as expected. \"\"\" x = np.random.normal(size=(100000, 10))", "\"\"\" Ensure that the inverse function of the Normalizer works as expected. \"\"\"", "std. dev. \"\"\" x = np.random.normal(size=(100000, 11)) + np.arange(11).reshape(1, -1) normalizer = MinMaxNormalizer(x,", "= Normalizer(x, exclude_indices=range(1, 10, 2)) normalizer.save(tmp_path / \"normalizer.pckl\") loaded = Normalizer.load(tmp_path / \"normalizer.pckl\")", "Normalizer(x, exclude_indices=range(1, 10, 2)) normalizer.save(tmp_path / \"normalizer.pckl\") loaded = Normalizer.load(tmp_path / \"normalizer.pckl\") x_normed", "normalizer = Normalizer(x) x_normed = normalizer(x) assert np.all(np.isclose(x_normed, -1.0)) def test_min_max_normalizer_2d(): \"\"\" Checks", "1e-1)) # Excluded indices assert np.all(np.isclose(x_normed[:, 1::2].mean(axis=0), np.arange(10)[1::2].reshape(1, -1), 1e-2)) assert np.all(np.isclose(x_normed[:, 1::2].std(axis=0),", "-1) normalizer = MinMaxNormalizer(x, exclude_indices=range(1, 10, 2)) x[:, 10] = np.nan x_normed =", "and std. dev. 1.0. assert np.all(np.isclose(x_normed[:, ::2].mean(axis=0), 0.0, atol=1e-1)) assert np.all(np.isclose(x_normed[:, ::2].std(axis=0), 1.0,", "np.random.normal(size=(100000, 10)) + np.arange(10).reshape(1, -1) normalizer = Normalizer(x, exclude_indices=range(1, 10, 2)) normalizer.save(tmp_path /", "Normalizer works as expected. \"\"\" x = np.random.normal(size=(100000, 10)) + np.arange(10).reshape(1, -1) normalizer", "1.0, 1e-1)) # Excluded indices assert np.all(np.isclose(x_normed[:, 1::2].mean(axis=0), np.arange(10)[1::2].reshape(1, -1), 1e-2)) assert np.all(np.isclose(x_normed[:,", "x = np.random.normal(size=(100000, 11)) + np.arange(11).reshape(1, -1) normalizer = MinMaxNormalizer(x, exclude_indices=range(1, 10, 2))", "normalizer(x) x = normalizer.invert(x_normed) assert np.all(np.isclose(np.mean(x, axis=0), np.arange(10, dtype=np.float32), atol=1e-2)) def test_save_and_load(tmp_path): \"\"\"", "assert np.all(np.isclose(x_normed[:, ::2].std(axis=0), 1.0, 1e-1)) # Excluded indices assert np.all(np.isclose(x_normed[:, 1::2].mean(axis=0), np.arange(10)[1::2].reshape(1, -1),", "# nan values should be set to -1.0. assert np.all(np.isclose(x_normed[:, -1], -1.5)) #", "= np.zeros((100, 10)) normalizer = MinMaxNormalizer(x) x_normed = normalizer(x) assert np.all(np.isclose(x_normed, -1.0)) def", "np.all(np.isclose(x_normed[:, 1::2].std(axis=0), 1.0, 1e-2)) # Channels without variation should be set to -1.0", "normalizer(x) x_normed_loaded = loaded(x) assert np.all(np.isclose(x_normed, x_normed_loaded)) def test_load_sftp(tmp_path): \"\"\" Ensure that saved", "set to -1.0. assert np.all(np.isclose(x_normed[:, -1], -1.5)) # Channels without variation should be", "= MinMaxNormalizer(x) x_normed = normalizer(x) assert np.all(np.isclose(x_normed, -1.0)) def test_invert(): \"\"\" Ensure that", "normalizer yields same results as original. \"\"\" x = np.random.normal(size=(100000, 10)) + np.arange(10).reshape(1,", "-1), 1e-2)) assert np.all(np.isclose(x_normed[:, 1::2].std(axis=0), 1.0, 1e-2)) # Channels without variation should be", "dev. 1.0. assert np.all(np.isclose(x_normed[:, ::2].mean(axis=0), 0.0, atol=1e-1)) assert np.all(np.isclose(x_normed[:, ::2].std(axis=0), 1.0, 1e-1)) #", "np.all(np.isclose(x_normed, -1.0)) def test_min_max_normalizer_2d(): \"\"\" Checks that all feature indices that are not", "MinMaxNormalizer(x, exclude_indices=range(1, 10, 2)) x[:, 10] = np.nan x_normed = normalizer(x) # Included", "without variation should be set to -1.0 x = np.zeros((100, 10)) normalizer =", "-1.0 x = np.zeros((100, 10)) normalizer = MinMaxNormalizer(x) x_normed = normalizer(x) assert np.all(np.isclose(x_normed,", "= np.nan x_normed = normalizer(x) # Included indices should have minimum value -0.9", "indices should have zero mean and std. dev. 1.0. assert np.all(np.isclose(x_normed[:, ::2].mean(axis=0), 0.0,", "np.random.normal(size=(100000, 10)) + np.arange(10).reshape(1, -1) normalizer = Normalizer(x, exclude_indices=range(1, 10, 2)) x_normed =", "2]) x_normed = normalizer(x) x = normalizer.invert(x_normed) assert np.all(np.isclose(np.mean(x, axis=0), np.arange(10, dtype=np.float32), atol=1e-2))", "have zero mean and std. dev. 1.0. assert np.all(np.isclose(x_normed[:, ::2].mean(axis=0), 0.0, atol=1e-1)) assert", "and loaded normalizer yields same results as original. \"\"\" x = np.random.normal(size=(100000, 10))", "= loaded(x) assert np.all(np.isclose(x_normed, x_normed_loaded)) def test_load_sftp(tmp_path): \"\"\" Ensure that saved and loaded", "np.all(np.isclose(x_normed, x_normed_loaded)) def test_load_sftp(tmp_path): \"\"\" Ensure that saved and loaded normalizer yields same", "std. dev. 1.0. assert np.all(np.isclose(x_normed[:, ::2].mean(axis=0), 0.0, atol=1e-1)) assert np.all(np.isclose(x_normed[:, ::2].std(axis=0), 1.0, 1e-1))", "test_normalizer_2d(): \"\"\" Checks that all feature indices that are not excluded have zero", "\"\"\" Checks that all feature indices that are not excluded have zero mean", "and unit std. dev. \"\"\" x = np.random.normal(size=(100000, 11)) + np.arange(11).reshape(1, -1) normalizer", "mean and unit std. dev. \"\"\" x = np.random.normal(size=(100000, 10)) + np.arange(10).reshape(1, -1)", "zero mean and unit std. dev. \"\"\" x = np.random.normal(size=(100000, 10)) + np.arange(10).reshape(1,", "10)) + np.arange(10).reshape(1, -1) normalizer = Normalizer(x, exclude_indices=range(1, 10, 2)) normalizer.save(tmp_path / \"normalizer.pckl\")", "Ensure that saved and loaded normalizer yields same results as original. \"\"\" x", "and # maximum value 1.0. assert np.all(np.isclose(x_normed[:, :10:2].min(axis=0), -1.0)) assert np.all(np.isclose(x_normed[:, :10:2].max(axis=0), 1.0))", "loaded = Normalizer.load(tmp_path / \"normalizer.pckl\") x_normed = normalizer(x) x_normed_loaded = loaded(x) assert np.all(np.isclose(x_normed,", "dtype=np.float32), atol=1e-2)) def test_save_and_load(tmp_path): \"\"\" Ensure that saved and loaded normalizer yields same", "should have zero mean and std. dev. 1.0. assert np.all(np.isclose(x_normed[:, ::2].mean(axis=0), 0.0, atol=1e-1))", "same results as original. \"\"\" x = np.random.normal(size=(100000, 10)) + np.arange(10).reshape(1, -1) normalizer", "set to -1.0 x = np.zeros((100, 10)) normalizer = MinMaxNormalizer(x) x_normed = normalizer(x)", "Excluded indices assert np.all(np.isclose(x_normed[:, 1::2].mean(axis=0), np.arange(10)[1::2].reshape(1, -1), 1e-2)) assert np.all(np.isclose(x_normed[:, 1::2].std(axis=0), 1.0, 1e-2))", "-1.0)) def test_invert(): \"\"\" Ensure that the inverse function of the Normalizer works", "test_load_sftp(tmp_path): \"\"\" Ensure that saved and loaded normalizer yields same results as original.", "= Normalizer(x, exclude_indices=range(1, 10, 2)) x_normed = normalizer(x) # Included indices should have", "Channels without variation should be set to -1.0 x = np.zeros((100, 10)) normalizer", "= normalizer(x) # Included indices should have minimum value -0.9 and # maximum", "10] = np.nan x_normed = normalizer(x) # Included indices should have minimum value", "normalizer = MinMaxNormalizer(x) x_normed = normalizer(x) assert np.all(np.isclose(x_normed, -1.0)) def test_invert(): \"\"\" Ensure", "as expected. \"\"\" x = np.random.normal(size=(100000, 10)) + np.arange(10).reshape(1, -1) normalizer = Normalizer(x,", "test_save_and_load(tmp_path): \"\"\" Ensure that saved and loaded normalizer yields same results as original.", "dev. \"\"\" x = np.random.normal(size=(100000, 10)) + np.arange(10).reshape(1, -1) normalizer = Normalizer(x, exclude_indices=range(1,", "normalizer = Normalizer(x, exclude_indices=[0, 1, 2]) x_normed = normalizer(x) x = normalizer.invert(x_normed) assert", "normalizer = Normalizer(x, exclude_indices=range(1, 10, 2)) x_normed = normalizer(x) # Included indices should", "normalizer(x) # Included indices should have zero mean and std. dev. 1.0. assert", "unit std. dev. \"\"\" x = np.random.normal(size=(100000, 11)) + np.arange(11).reshape(1, -1) normalizer =", "maximum value 1.0. assert np.all(np.isclose(x_normed[:, :10:2].min(axis=0), -1.0)) assert np.all(np.isclose(x_normed[:, :10:2].max(axis=0), 1.0)) # nan", "import Normalizer, MinMaxNormalizer def test_normalizer_2d(): \"\"\" Checks that all feature indices that are", "Normalizer.load(tmp_path / \"normalizer.pckl\") x_normed = normalizer(x) x_normed_loaded = loaded(x) assert np.all(np.isclose(x_normed, x_normed_loaded)) def", "that the inverse function of the Normalizer works as expected. \"\"\" x =", "1::2].std(axis=0), 1.0, 1e-2)) # Channels without variation should be set to -1.0 x", "normalizer = Normalizer(x, exclude_indices=range(1, 10, 2)) normalizer.save(tmp_path / \"normalizer.pckl\") loaded = Normalizer.load(tmp_path /", "normalizer(x) # Included indices should have minimum value -0.9 and # maximum value", "mean and unit std. dev. \"\"\" x = np.random.normal(size=(100000, 11)) + np.arange(11).reshape(1, -1)", "= Normalizer(x) x_normed = normalizer(x) assert np.all(np.isclose(x_normed, -1.0)) def test_min_max_normalizer_2d(): \"\"\" Checks that", "def test_min_max_normalizer_2d(): \"\"\" Checks that all feature indices that are not excluded have", "np.all(np.isclose(x_normed[:, :10:2].max(axis=0), 1.0)) # nan values should be set to -1.0. assert np.all(np.isclose(x_normed[:,", "function of the Normalizer works as expected. \"\"\" x = np.random.normal(size=(100000, 10)) +", "Checks that all feature indices that are not excluded have zero mean and", "assert np.all(np.isclose(x_normed[:, ::2].mean(axis=0), 0.0, atol=1e-1)) assert np.all(np.isclose(x_normed[:, ::2].std(axis=0), 1.0, 1e-1)) # Excluded indices", "1::2].mean(axis=0), np.arange(10)[1::2].reshape(1, -1), 1e-2)) assert np.all(np.isclose(x_normed[:, 1::2].std(axis=0), 1.0, 1e-2)) # Channels without variation", "\"\"\" x = np.random.normal(size=(100000, 11)) + np.arange(11).reshape(1, -1) normalizer = MinMaxNormalizer(x, exclude_indices=range(1, 10,", "-1.0)) def test_min_max_normalizer_2d(): \"\"\" Checks that all feature indices that are not excluded", "expected. \"\"\" x = np.random.normal(size=(100000, 10)) + np.arange(10).reshape(1, -1) normalizer = Normalizer(x, exclude_indices=[0,", "\"normalizer.pckl\") loaded = Normalizer.load(tmp_path / \"normalizer.pckl\") x_normed = normalizer(x) x_normed_loaded = loaded(x) assert", "\"normalizer.pckl\") x_normed = normalizer(x) x_normed_loaded = loaded(x) assert np.all(np.isclose(x_normed, x_normed_loaded)) def test_load_sftp(tmp_path): \"\"\"", "-1.0)) assert np.all(np.isclose(x_normed[:, :10:2].max(axis=0), 1.0)) # nan values should be set to -1.0.", "np.random.normal(size=(100000, 10)) + np.arange(10).reshape(1, -1) normalizer = Normalizer(x, exclude_indices=[0, 1, 2]) x_normed =", "/ \"normalizer.pckl\") x_normed = normalizer(x) x_normed_loaded = loaded(x) assert np.all(np.isclose(x_normed, x_normed_loaded)) def test_load_sftp(tmp_path):", "x_normed = normalizer(x) # Included indices should have zero mean and std. dev.", "unit std. dev. \"\"\" x = np.random.normal(size=(100000, 10)) + np.arange(10).reshape(1, -1) normalizer =", "x_normed = normalizer(x) x_normed_loaded = loaded(x) assert np.all(np.isclose(x_normed, x_normed_loaded)) def test_load_sftp(tmp_path): \"\"\" Ensure", "= normalizer(x) x_normed_loaded = loaded(x) assert np.all(np.isclose(x_normed, x_normed_loaded)) def test_load_sftp(tmp_path): \"\"\" Ensure that", "x_normed = normalizer(x) assert np.all(np.isclose(x_normed, -1.0)) def test_min_max_normalizer_2d(): \"\"\" Checks that all feature", "std. dev. \"\"\" x = np.random.normal(size=(100000, 10)) + np.arange(10).reshape(1, -1) normalizer = Normalizer(x,", "test_min_max_normalizer_2d(): \"\"\" Checks that all feature indices that are not excluded have zero", "= np.random.normal(size=(100000, 11)) + np.arange(11).reshape(1, -1) normalizer = MinMaxNormalizer(x, exclude_indices=range(1, 10, 2)) x[:,", "-1) normalizer = Normalizer(x, exclude_indices=[0, 1, 2]) x_normed = normalizer(x) x = normalizer.invert(x_normed)", "loaded(x) assert np.all(np.isclose(x_normed, x_normed_loaded)) def test_load_sftp(tmp_path): \"\"\" Ensure that saved and loaded normalizer", "10)) + np.arange(10).reshape(1, -1) normalizer = Normalizer(x, exclude_indices=range(1, 10, 2)) x_normed = normalizer(x)", "should be set to -1.0 x = np.zeros((100, 10)) normalizer = Normalizer(x) x_normed", "Normalizer(x) x_normed = normalizer(x) assert np.all(np.isclose(x_normed, -1.0)) def test_min_max_normalizer_2d(): \"\"\" Checks that all", "have minimum value -0.9 and # maximum value 1.0. assert np.all(np.isclose(x_normed[:, :10:2].min(axis=0), -1.0))", "to -1.0 x = np.zeros((100, 10)) normalizer = Normalizer(x) x_normed = normalizer(x) assert", "-1.0. assert np.all(np.isclose(x_normed[:, -1], -1.5)) # Channels without variation should be set to", "\"\"\" x = np.random.normal(size=(100000, 10)) + np.arange(10).reshape(1, -1) normalizer = Normalizer(x, exclude_indices=range(1, 10,", "indices assert np.all(np.isclose(x_normed[:, 1::2].mean(axis=0), np.arange(10)[1::2].reshape(1, -1), 1e-2)) assert np.all(np.isclose(x_normed[:, 1::2].std(axis=0), 1.0, 1e-2)) #", "of the Normalizer works as expected. \"\"\" x = np.random.normal(size=(100000, 10)) + np.arange(10).reshape(1,", "as original. \"\"\" x = np.random.normal(size=(100000, 10)) + np.arange(10).reshape(1, -1) normalizer = Normalizer(x,", "MinMaxNormalizer def test_normalizer_2d(): \"\"\" Checks that all feature indices that are not excluded", "x[:, 10] = np.nan x_normed = normalizer(x) # Included indices should have minimum", "as np from quantnn.normalizer import Normalizer, MinMaxNormalizer def test_normalizer_2d(): \"\"\" Checks that all", "1.0, 1e-2)) # Channels without variation should be set to -1.0 x =", "to -1.0 x = np.zeros((100, 10)) normalizer = MinMaxNormalizer(x) x_normed = normalizer(x) assert", "the inverse function of the Normalizer works as expected. \"\"\" x = np.random.normal(size=(100000,", "the Normalizer works as expected. \"\"\" x = np.random.normal(size=(100000, 10)) + np.arange(10).reshape(1, -1)", "assert np.all(np.isclose(np.mean(x, axis=0), np.arange(10, dtype=np.float32), atol=1e-2)) def test_save_and_load(tmp_path): \"\"\" Ensure that saved and", "not excluded have zero mean and unit std. dev. \"\"\" x = np.random.normal(size=(100000,", "= np.random.normal(size=(100000, 10)) + np.arange(10).reshape(1, -1) normalizer = Normalizer(x, exclude_indices=range(1, 10, 2)) normalizer.save(tmp_path", "np.zeros((100, 10)) normalizer = MinMaxNormalizer(x) x_normed = normalizer(x) assert np.all(np.isclose(x_normed, -1.0)) def test_invert():", "# Included indices should have minimum value -0.9 and # maximum value 1.0.", "assert np.all(np.isclose(x_normed[:, :10:2].min(axis=0), -1.0)) assert np.all(np.isclose(x_normed[:, :10:2].max(axis=0), 1.0)) # nan values should be", "::2].std(axis=0), 1.0, 1e-1)) # Excluded indices assert np.all(np.isclose(x_normed[:, 1::2].mean(axis=0), np.arange(10)[1::2].reshape(1, -1), 1e-2)) assert", "x = normalizer.invert(x_normed) assert np.all(np.isclose(np.mean(x, axis=0), np.arange(10, dtype=np.float32), atol=1e-2)) def test_save_and_load(tmp_path): \"\"\" Ensure", "x_normed_loaded)) def test_load_sftp(tmp_path): \"\"\" Ensure that saved and loaded normalizer yields same results", "feature indices that are not excluded have zero mean and unit std. dev.", "should be set to -1.0 x = np.zeros((100, 10)) normalizer = MinMaxNormalizer(x) x_normed", "that are not excluded have zero mean and unit std. dev. \"\"\" x", "x = np.random.normal(size=(100000, 10)) + np.arange(10).reshape(1, -1) normalizer = Normalizer(x, exclude_indices=[0, 1, 2])", "exclude_indices=range(1, 10, 2)) normalizer.save(tmp_path / \"normalizer.pckl\") loaded = Normalizer.load(tmp_path / \"normalizer.pckl\") x_normed =", "exclude_indices=range(1, 10, 2)) x_normed = normalizer(x) # Included indices should have zero mean", "x = np.zeros((100, 10)) normalizer = MinMaxNormalizer(x) x_normed = normalizer(x) assert np.all(np.isclose(x_normed, -1.0))", "\"\"\" x = np.random.normal(size=(100000, 10)) + np.arange(10).reshape(1, -1) normalizer = Normalizer(x, exclude_indices=[0, 1,", "= Normalizer.load(tmp_path / \"normalizer.pckl\") x_normed = normalizer(x) x_normed_loaded = loaded(x) assert np.all(np.isclose(x_normed, x_normed_loaded))", "np.arange(11).reshape(1, -1) normalizer = MinMaxNormalizer(x, exclude_indices=range(1, 10, 2)) x[:, 10] = np.nan x_normed", "atol=1e-2)) def test_save_and_load(tmp_path): \"\"\" Ensure that saved and loaded normalizer yields same results", "quantnn.normalizer import Normalizer, MinMaxNormalizer def test_normalizer_2d(): \"\"\" Checks that all feature indices that", "2)) x_normed = normalizer(x) # Included indices should have zero mean and std.", "= np.zeros((100, 10)) normalizer = Normalizer(x) x_normed = normalizer(x) assert np.all(np.isclose(x_normed, -1.0)) def", "def test_invert(): \"\"\" Ensure that the inverse function of the Normalizer works as", "Ensure that the inverse function of the Normalizer works as expected. \"\"\" x", "np.all(np.isclose(x_normed[:, ::2].mean(axis=0), 0.0, atol=1e-1)) assert np.all(np.isclose(x_normed[:, ::2].std(axis=0), 1.0, 1e-1)) # Excluded indices assert", "exclude_indices=[0, 1, 2]) x_normed = normalizer(x) x = normalizer.invert(x_normed) assert np.all(np.isclose(np.mean(x, axis=0), np.arange(10,", "be set to -1.0 x = np.zeros((100, 10)) normalizer = Normalizer(x) x_normed =", "np from quantnn.normalizer import Normalizer, MinMaxNormalizer def test_normalizer_2d(): \"\"\" Checks that all feature", "+ np.arange(10).reshape(1, -1) normalizer = Normalizer(x, exclude_indices=range(1, 10, 2)) x_normed = normalizer(x) #", "assert np.all(np.isclose(x_normed[:, 1::2].mean(axis=0), np.arange(10)[1::2].reshape(1, -1), 1e-2)) assert np.all(np.isclose(x_normed[:, 1::2].std(axis=0), 1.0, 1e-2)) # Channels", "Normalizer.load(tmp_path / \"normalizer.pckl\") x_normed = normalizer(x) x_normed_loaded = loaded(x) assert np.all(np.isclose(x_normed, x_normed_loaded, rtol=1e-3))", "x_normed_loaded = loaded(x) assert np.all(np.isclose(x_normed, x_normed_loaded)) def test_load_sftp(tmp_path): \"\"\" Ensure that saved and", "10, 2)) x_normed = normalizer(x) # Included indices should have zero mean and", "to -1.0. assert np.all(np.isclose(x_normed[:, -1], -1.5)) # Channels without variation should be set", "np.arange(10)[1::2].reshape(1, -1), 1e-2)) assert np.all(np.isclose(x_normed[:, 1::2].std(axis=0), 1.0, 1e-2)) # Channels without variation should", "= Normalizer(x, exclude_indices=[0, 1, 2]) x_normed = normalizer(x) x = normalizer.invert(x_normed) assert np.all(np.isclose(np.mean(x,", "numpy as np from quantnn.normalizer import Normalizer, MinMaxNormalizer def test_normalizer_2d(): \"\"\" Checks that", "x_normed = normalizer(x) assert np.all(np.isclose(x_normed, -1.0)) def test_invert(): \"\"\" Ensure that the inverse", "# Excluded indices assert np.all(np.isclose(x_normed[:, 1::2].mean(axis=0), np.arange(10)[1::2].reshape(1, -1), 1e-2)) assert np.all(np.isclose(x_normed[:, 1::2].std(axis=0), 1.0,", "<gh_stars>0 import numpy as np from quantnn.normalizer import Normalizer, MinMaxNormalizer def test_normalizer_2d(): \"\"\"", "10)) normalizer = Normalizer(x) x_normed = normalizer(x) assert np.all(np.isclose(x_normed, -1.0)) def test_min_max_normalizer_2d(): \"\"\"", "np.random.normal(size=(100000, 11)) + np.arange(11).reshape(1, -1) normalizer = MinMaxNormalizer(x, exclude_indices=range(1, 10, 2)) x[:, 10]", "assert np.all(np.isclose(x_normed[:, 1::2].std(axis=0), 1.0, 1e-2)) # Channels without variation should be set to", "np.arange(10).reshape(1, -1) normalizer = Normalizer(x, exclude_indices=[0, 1, 2]) x_normed = normalizer(x) x =", "def test_save_and_load(tmp_path): \"\"\" Ensure that saved and loaded normalizer yields same results as", "+ np.arange(10).reshape(1, -1) normalizer = Normalizer(x, exclude_indices=[0, 1, 2]) x_normed = normalizer(x) x", "np.all(np.isclose(x_normed, -1.0)) def test_invert(): \"\"\" Ensure that the inverse function of the Normalizer", "::2].mean(axis=0), 0.0, atol=1e-1)) assert np.all(np.isclose(x_normed[:, ::2].std(axis=0), 1.0, 1e-1)) # Excluded indices assert np.all(np.isclose(x_normed[:,", "np.all(np.isclose(x_normed[:, :10:2].min(axis=0), -1.0)) assert np.all(np.isclose(x_normed[:, :10:2].max(axis=0), 1.0)) # nan values should be set" ]
[ "import Path def curr_file_path() -> Path: \"\"\"Get cuurent file path.\"\"\" return Path(__file__).absolute() def", "\"\"\"Get cuurent file path.\"\"\" return Path(__file__).absolute() def out_folder_path() -> Path: \"\"\"Get output folder", "return curr_file_path().parents[3].joinpath(\"out\").absolute() def out_geom_path() -> Path: \"\"\"Get output geometry folder path.\"\"\" return path.abspath(out_folder_path().joinpath(\"geometry\").absolute())", "from pathlib import Path def curr_file_path() -> Path: \"\"\"Get cuurent file path.\"\"\" return", "Path def curr_file_path() -> Path: \"\"\"Get cuurent file path.\"\"\" return Path(__file__).absolute() def out_folder_path()", "Path: \"\"\"Get cuurent file path.\"\"\" return Path(__file__).absolute() def out_folder_path() -> Path: \"\"\"Get output", "out_folder_path() -> Path: \"\"\"Get output folder path.\"\"\" return curr_file_path().parents[3].joinpath(\"out\").absolute() def out_geom_path() -> Path:", "cuurent file path.\"\"\" return Path(__file__).absolute() def out_folder_path() -> Path: \"\"\"Get output folder path.\"\"\"", "path.\"\"\" return curr_file_path().parents[3].joinpath(\"out\").absolute() def out_geom_path() -> Path: \"\"\"Get output geometry folder path.\"\"\" return", "folder path.\"\"\" return curr_file_path().parents[3].joinpath(\"out\").absolute() def out_geom_path() -> Path: \"\"\"Get output geometry folder path.\"\"\"", "pathlib import Path def curr_file_path() -> Path: \"\"\"Get cuurent file path.\"\"\" return Path(__file__).absolute()", "Path: \"\"\"Get output folder path.\"\"\" return curr_file_path().parents[3].joinpath(\"out\").absolute() def out_geom_path() -> Path: \"\"\"Get output", "from os import path from pathlib import Path def curr_file_path() -> Path: \"\"\"Get", "file path.\"\"\" return Path(__file__).absolute() def out_folder_path() -> Path: \"\"\"Get output folder path.\"\"\" return", "Path(__file__).absolute() def out_folder_path() -> Path: \"\"\"Get output folder path.\"\"\" return curr_file_path().parents[3].joinpath(\"out\").absolute() def out_geom_path()", "\"\"\"Get output folder path.\"\"\" return curr_file_path().parents[3].joinpath(\"out\").absolute() def out_geom_path() -> Path: \"\"\"Get output geometry", "curr_file_path() -> Path: \"\"\"Get cuurent file path.\"\"\" return Path(__file__).absolute() def out_folder_path() -> Path:", "output folder path.\"\"\" return curr_file_path().parents[3].joinpath(\"out\").absolute() def out_geom_path() -> Path: \"\"\"Get output geometry folder", "import path from pathlib import Path def curr_file_path() -> Path: \"\"\"Get cuurent file", "path from pathlib import Path def curr_file_path() -> Path: \"\"\"Get cuurent file path.\"\"\"", "return Path(__file__).absolute() def out_folder_path() -> Path: \"\"\"Get output folder path.\"\"\" return curr_file_path().parents[3].joinpath(\"out\").absolute() def", "def out_folder_path() -> Path: \"\"\"Get output folder path.\"\"\" return curr_file_path().parents[3].joinpath(\"out\").absolute() def out_geom_path() ->", "-> Path: \"\"\"Get output folder path.\"\"\" return curr_file_path().parents[3].joinpath(\"out\").absolute() def out_geom_path() -> Path: \"\"\"Get", "os import path from pathlib import Path def curr_file_path() -> Path: \"\"\"Get cuurent", "path.\"\"\" return Path(__file__).absolute() def out_folder_path() -> Path: \"\"\"Get output folder path.\"\"\" return curr_file_path().parents[3].joinpath(\"out\").absolute()", "def curr_file_path() -> Path: \"\"\"Get cuurent file path.\"\"\" return Path(__file__).absolute() def out_folder_path() ->", "-> Path: \"\"\"Get cuurent file path.\"\"\" return Path(__file__).absolute() def out_folder_path() -> Path: \"\"\"Get" ]
[ "i = 1 while i < 6: print(i) if (i == 3): break", "while i < 6: print(i) if (i == 3): break i += 1", "1 while i < 6: print(i) if (i == 3): break i +=", "= 1 while i < 6: print(i) if (i == 3): break i" ]
[ "\"http://\" + auth.ipaddr + \"/rest/\" + auth.version + \"/cli\" command_dict = {\"cmd\": command}", "import requests, json, base64 def post_cli(auth, command): url_cli = \"http://\" + auth.ipaddr +", "\"/rest/\" + auth.version + \"/cli\" command_dict = {\"cmd\": command} try: post_command = requests.post(url_cli,", "requests.post(url_cli, headers=auth.cookie, data=json.dumps(command_dict)) cli_response = post_command.json()['result_base64_encoded'] decoded_response = base64.b64decode(cli_response).decode('utf-8') return decoded_response except requests.exceptions.RequestException", "auth.ipaddr + \"/rest/\" + auth.version + \"/cli\" command_dict = {\"cmd\": command} try: post_command", "cli_response = post_command.json()['result_base64_encoded'] decoded_response = base64.b64decode(cli_response).decode('utf-8') return decoded_response except requests.exceptions.RequestException as error: return", "{\"cmd\": command} try: post_command = requests.post(url_cli, headers=auth.cookie, data=json.dumps(command_dict)) cli_response = post_command.json()['result_base64_encoded'] decoded_response =", "decoded_response except requests.exceptions.RequestException as error: return \"Error:\\n\" + str(error) + \" post_cli: An", "= base64.b64decode(cli_response).decode('utf-8') return decoded_response except requests.exceptions.RequestException as error: return \"Error:\\n\" + str(error) +", "base64 def post_cli(auth, command): url_cli = \"http://\" + auth.ipaddr + \"/rest/\" + auth.version", "\"/cli\" command_dict = {\"cmd\": command} try: post_command = requests.post(url_cli, headers=auth.cookie, data=json.dumps(command_dict)) cli_response =", "+ \"/rest/\" + auth.version + \"/cli\" command_dict = {\"cmd\": command} try: post_command =", "command} try: post_command = requests.post(url_cli, headers=auth.cookie, data=json.dumps(command_dict)) cli_response = post_command.json()['result_base64_encoded'] decoded_response = base64.b64decode(cli_response).decode('utf-8')", "json, base64 def post_cli(auth, command): url_cli = \"http://\" + auth.ipaddr + \"/rest/\" +", "command): url_cli = \"http://\" + auth.ipaddr + \"/rest/\" + auth.version + \"/cli\" command_dict", "auth.version + \"/cli\" command_dict = {\"cmd\": command} try: post_command = requests.post(url_cli, headers=auth.cookie, data=json.dumps(command_dict))", "post_command.json()['result_base64_encoded'] decoded_response = base64.b64decode(cli_response).decode('utf-8') return decoded_response except requests.exceptions.RequestException as error: return \"Error:\\n\" +", "base64.b64decode(cli_response).decode('utf-8') return decoded_response except requests.exceptions.RequestException as error: return \"Error:\\n\" + str(error) + \"", "except requests.exceptions.RequestException as error: return \"Error:\\n\" + str(error) + \" post_cli: An Error", "as error: return \"Error:\\n\" + str(error) + \" post_cli: An Error has occurred\"", "= {\"cmd\": command} try: post_command = requests.post(url_cli, headers=auth.cookie, data=json.dumps(command_dict)) cli_response = post_command.json()['result_base64_encoded'] decoded_response", "requests.exceptions.RequestException as error: return \"Error:\\n\" + str(error) + \" post_cli: An Error has", "-*- coding: utf-8 -*- import requests, json, base64 def post_cli(auth, command): url_cli =", "+ auth.version + \"/cli\" command_dict = {\"cmd\": command} try: post_command = requests.post(url_cli, headers=auth.cookie,", "headers=auth.cookie, data=json.dumps(command_dict)) cli_response = post_command.json()['result_base64_encoded'] decoded_response = base64.b64decode(cli_response).decode('utf-8') return decoded_response except requests.exceptions.RequestException as", "= post_command.json()['result_base64_encoded'] decoded_response = base64.b64decode(cli_response).decode('utf-8') return decoded_response except requests.exceptions.RequestException as error: return \"Error:\\n\"", "-*- import requests, json, base64 def post_cli(auth, command): url_cli = \"http://\" + auth.ipaddr", "def post_cli(auth, command): url_cli = \"http://\" + auth.ipaddr + \"/rest/\" + auth.version +", "+ auth.ipaddr + \"/rest/\" + auth.version + \"/cli\" command_dict = {\"cmd\": command} try:", "data=json.dumps(command_dict)) cli_response = post_command.json()['result_base64_encoded'] decoded_response = base64.b64decode(cli_response).decode('utf-8') return decoded_response except requests.exceptions.RequestException as error:", "requests, json, base64 def post_cli(auth, command): url_cli = \"http://\" + auth.ipaddr + \"/rest/\"", "command_dict = {\"cmd\": command} try: post_command = requests.post(url_cli, headers=auth.cookie, data=json.dumps(command_dict)) cli_response = post_command.json()['result_base64_encoded']", "try: post_command = requests.post(url_cli, headers=auth.cookie, data=json.dumps(command_dict)) cli_response = post_command.json()['result_base64_encoded'] decoded_response = base64.b64decode(cli_response).decode('utf-8') return", "post_cli(auth, command): url_cli = \"http://\" + auth.ipaddr + \"/rest/\" + auth.version + \"/cli\"", "decoded_response = base64.b64decode(cli_response).decode('utf-8') return decoded_response except requests.exceptions.RequestException as error: return \"Error:\\n\" + str(error)", "#!/usr/bin/env python # -*- coding: utf-8 -*- import requests, json, base64 def post_cli(auth,", "= requests.post(url_cli, headers=auth.cookie, data=json.dumps(command_dict)) cli_response = post_command.json()['result_base64_encoded'] decoded_response = base64.b64decode(cli_response).decode('utf-8') return decoded_response except", "python # -*- coding: utf-8 -*- import requests, json, base64 def post_cli(auth, command):", "# -*- coding: utf-8 -*- import requests, json, base64 def post_cli(auth, command): url_cli", "url_cli = \"http://\" + auth.ipaddr + \"/rest/\" + auth.version + \"/cli\" command_dict =", "+ \"/cli\" command_dict = {\"cmd\": command} try: post_command = requests.post(url_cli, headers=auth.cookie, data=json.dumps(command_dict)) cli_response", "coding: utf-8 -*- import requests, json, base64 def post_cli(auth, command): url_cli = \"http://\"", "utf-8 -*- import requests, json, base64 def post_cli(auth, command): url_cli = \"http://\" +", "post_command = requests.post(url_cli, headers=auth.cookie, data=json.dumps(command_dict)) cli_response = post_command.json()['result_base64_encoded'] decoded_response = base64.b64decode(cli_response).decode('utf-8') return decoded_response", "return decoded_response except requests.exceptions.RequestException as error: return \"Error:\\n\" + str(error) + \" post_cli:", "= \"http://\" + auth.ipaddr + \"/rest/\" + auth.version + \"/cli\" command_dict = {\"cmd\":" ]
[ "alpha=0.4, label=Kolasinac[0]) # radar.plot(Mendy[1:], '-', lw=5, color= 'c', alpha=0.4, label=Mendy[0]) # # radar.ax.legend()", "# radar.plot(Eriksen[1:], '-', lw=3, color='#000080', alpha=0.4, label=Eriksen[0]) # radar.plot(Sane[1:], '-', lw=3, color='m', alpha=0.4,", "fontsize=22) # fig.savefig('Prem_LB.png') #------------------------------------------------------------------------------- # Bale = chart_prep.loc[1496].values # Benzema = chart_prep.loc[1500].values #", "young_center_backs_chart.loc[1922].values # Konate = young_center_backs_chart.loc[1806].values # Muki = young_center_backs_chart.loc[2267].values # Bastoni = young_center_backs_chart.loc[779].values", "np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,100,6),2), # np.around(np.linspace(0,2.3,6),2), np.around(np.linspace(0,8.8,6),2), # np.around(np.linspace(0,4.3,6),2), np.around(np.linspace(0,2.9,6),2), # np.around(np.linspace(0,2.3,6),2), np.around(np.linspace(0,5,6),2), # np.around(np.linspace(0,1,6),2)]", "Player of the Year follow up', fontsize=16) # fig.savefig('Fifa_POY.png') #------------------------------------------------------------------------------- #Serie A STRIKERS", "alpha=0.4, label=Alonso[0]) # radar.plot(Rose[1:], '-', lw=5, color='m', alpha=0.4, label=Rose[0]) # radar.plot(Kolasinac[1:], '-', lw=5,", "= chart_prep.loc[419].values # Shaw = chart_prep.loc[186].values # Alonso = chart_prep.loc[291].values # Rose =", "color='#008080', alpha=0.4, label=Kostic[0]) # radar.ax.legend() # fig.suptitle('Bundesliga Playmakers', fontsize=16) # fig.savefig('Bund_Playmakers.png') #------------------------------------------------------------------------------- #Premier", "# fig.savefig('Prem_Playmakers.png') #------------------------------------------------------------------------------- #FIFA PLAYER OF THE YEAR COMPARISON FOLLOW UP #Messi vs", "label=Hazard[0]) # radar.plot(Kostic[1:], '-', lw=3, color='#008080', alpha=0.4, label=Kostic[0]) # radar.ax.legend() # fig.suptitle('Bundesliga Playmakers',", "# radar.plot(Benzema[1:], '-', lw=5, color='b', alpha=0.4, label=Benzema[0]) # radar.plot(Asensio[1:], '-', lw=5, color='g', alpha=0.4,", "= plt.figure(figsize=(10, 10)) #Name to appear on each axis for offensive categories #", "serie_a_chart.loc[729].values # Quag = serie_a_chart.loc[1004].values # Icardi = serie_a_chart.loc[664].values # PiatekG = serie_a_chart.loc[608].values", "# #Numerical labels to be displayed along each axis # labels = [np.around(np.linspace(0,10,6),2),", "# Bale = chart_prep.loc[1496].values # Benzema = chart_prep.loc[1500].values # Asensio = chart_prep.loc[1506].values #", "Rose = chart_prep.loc[391].values # Kolasinac = chart_prep.loc[514].values # Mendy = chart_prep.loc[128].values # #", "FOLLOW UP #Messi vs Ronaldo # Modric = df_chart_df.loc[1504].values # Ronaldo= df_chart_df.loc[729].values #", "# np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,3,6),2), # np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,2.7,6),2), # np.around(np.linspace(0,2.8,6),2), np.around(np.linspace(0,5.8,6),2), # np.around(np.linspace(0,4,6),2), np.around(np.linspace(0,3,6),2)] #", "Playmakers # Hazard = Prem_chart_df.loc[285].values # Eriksen = Prem_chart_df.loc[390].values # Sane = Prem_chart_df.loc[140].values", "world Varane # Ndicka = young_center_backs_chart.loc[1676].values # Zag = young_center_backs_chart.loc[1922].values # Konate =", "= Radar1(fig, titles, labels) # radar.plot(Modric[1:], '-', lw=3, color='#000000', alpha=0.4, label=Modric[0]) # radar.plot(Ronaldo[1:],", "to appear # titles =['Rating', 'Assists', 'Drb','PS%', 'Crosses', 'mis_cont', 'Tackles', 'Inter', 'Fouls', 'Clear',", "titles, labels) # radar.plot(PiatekG[1:], '-', lw=3, color='#0000FF', alpha=0.4, label='<NAME>') # radar.plot(PiatekM[1:], '-', lw=3,", "'KeyP','PS%', # 'Crosses', 'Fouled', 'mis_cont'] # #Numerical labels to be displayed along each", "# np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,3,6),2), # np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,2.3,6),2), # np.around(np.linspace(0,3,6),2), np.around(np.linspace(0,8.8,6),2), # np.around(np.linspace(0,4.3,6),2), np.around(np.linspace(0,2.9,6),2)] #", "np.around(np.linspace(0,2.7,6),2), # np.around(np.linspace(0,2.8,6),2), np.around(np.linspace(0,5.8,6),2), # np.around(np.linspace(0,4,6),2), np.around(np.linspace(0,3,6),2)] # radar = Radar1(fig, titles, labels)", "storeyline # Ronaldo= serie_a_chart.loc[729].values # Quag = serie_a_chart.loc[1004].values # Icardi = serie_a_chart.loc[664].values #", "# radar.plot(Bale[1:], '-', lw=5, color='r', alpha=0.4, label=Bale[0]) # radar.plot(Benzema[1:], '-', lw=5, color='b', alpha=0.4,", "np.around(np.linspace(0,13,6),2), np.around(np.linspace(0,4.3,6),2), # np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,3,6),2), # np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,2.7,6),2), # np.around(np.linspace(0,2.8,6),2), np.around(np.linspace(0,5.8,6),2), # np.around(np.linspace(0,4,6),2),", "After the Transfer', fontsize=16) # fig.savefig('Piatek.png') #------------------------------------------------------------------------------- #Identifying young defenders to scout further.", "CB in the world Varane # Ndicka = young_center_backs_chart.loc[1676].values # Zag = young_center_backs_chart.loc[1922].values", "# #Name to appear # titles =['Rating', 'Assists', 'Drb','PS%', 'Crosses', 'mis_cont', 'Tackles', 'Inter',", "Radar1(fig, titles, labels) # radar.plot(Bale[1:], '-', lw=5, color='r', alpha=0.4, label=Bale[0]) # radar.plot(Benzema[1:], '-',", "color='#FF0000', alpha=0.4, label=Salah[0]) # radar.plot(Mbappe[1:], '-', lw=3, color='#0000FF', alpha=0.4, label=Mbappe[0]) # radar.plot(Messi[1:], '-',", "label=Quag[0]) # radar.plot(Icardi[1:], '-', lw=3, color='k', alpha=0.4, label=Icardi[0]) # radar.plot(PiatekG[1:], '-', lw=3, color='#00FFFF',", "bundesliga_chart_df.loc[1959].values # Kostic = bundesliga_chart_df.loc[1677].values # labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,19,6),2), # np.around(np.linspace(0,13,6),2), np.around(np.linspace(0,4.3,6),2),", "radar.plot(PiatekG[1:], '-', lw=3, color='#00FFFF', alpha=0.4, label=PiatekG[0]) # radar.plot(PiatekM[1:], '-', lw=3, color='r', alpha=0.4, label=PiatekM[0])", "# # radar.ax.legend() # fig.suptitle('Young Defenders', fontsize=16) # fig.savefig('Young_Defenders.png') #------------------------------------------------------------------------------- # Robertson =", "np.around(np.linspace(0,2.7,6),2), # np.around(np.linspace(0,7.4,6),2), np.around(np.linspace(0,1.6,6),2)] # # radar = Radar1(fig, titles, labels) # radar.plot(Varane[1:],", "# radar.plot(Muki[1:], '-', lw=3, color='b', alpha=0.4, label=Muki[0]) # radar.plot(Bastoni[1:], '-', lw=3, color='g', alpha=0.4,", "fig.clear() # # fig = plt.figure(figsize=(10, 10)) # #Name to appear # titles", "# radar.ax.legend() # fig.suptitle('Premier League LB', fontsize=22) # fig.savefig('Prem_LB.png') #------------------------------------------------------------------------------- # Bale =", "young_center_backs_chart.loc[2267].values # Bastoni = young_center_backs_chart.loc[779].values # Varane = young_center_backs_chart.loc[1510].values # titles = ['Rating',", "label=Rose[0]) # radar.plot(Kolasinac[1:], '-', lw=5, color= 'g', alpha=0.4, label=Kolasinac[0]) # radar.plot(Mendy[1:], '-', lw=5,", "label=Muller[0]) # radar.plot(Hazard[1:], '-', lw=3, color= '#0000FF', alpha=0.4, label=Hazard[0]) # radar.plot(Kostic[1:], '-', lw=3,", "fig.suptitle('Piatek Before and After the Transfer', fontsize=16) # fig.savefig('Piatek.png') #------------------------------------------------------------------------------- #Identifying young defenders", "np.around(np.linspace(0,2.9,6),2), np.around(np.linspace(0,3.2,6),2), # np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,2.9,6),2), # np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,6.3,6),2), # np.around(np.linspace(0,4.2,6),2), np.around(np.linspace(0,3.2,6),2)] # radar", "# np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,6.3,6),2), # np.around(np.linspace(0,4.2,6),2), np.around(np.linspace(0,3.2,6),2)] # radar = Radar1(fig, titles, labels) #", "radar.plot(Kostic[1:], '-', lw=3, color='#008080', alpha=0.4, label=Kostic[0]) # radar.ax.legend() # fig.suptitle('Bundesliga Playmakers', fontsize=16) #", "label=PiatekM[0]) #------------------------------------------------------------------------------- #Piatek on Genoa vs Piatek on Milan # PiatekG = serie_a_chart.loc[608].values", "np.around(np.linspace(0,19,6),2), # np.around(np.linspace(0,11,6),2), np.around(np.linspace(0,3.9,6),2), # np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,3,6),2), # np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,2.3,6),2), # np.around(np.linspace(0,3,6),2), np.around(np.linspace(0,8.8,6),2),", "= Radar1(fig, titles, labels) # radar.plot(Varane[1:], '-', lw=3, color='#00FFFF', alpha=0.4, label=Varane[0]) # radar.plot(Ndicka[1:],", "Hazard = Prem_chart_df.loc[285].values # Eriksen = Prem_chart_df.loc[390].values # Sane = Prem_chart_df.loc[140].values # Sterling", "labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,19,6),2), # np.around(np.linspace(0,13,6),2), np.around(np.linspace(0,4.3,6),2), # np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,3,6),2), # np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,2.7,6),2),", "np.around(np.linspace(0,4.3,6),2), # np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,3,6),2), # np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,2.7,6),2), # np.around(np.linspace(0,2.8,6),2), np.around(np.linspace(0,5.8,6),2), # np.around(np.linspace(0,4,6),2), np.around(np.linspace(0,3,6),2)]", "alpha=0.4, label=Ronaldo[0]) # radar.plot(Quag[1:], '-', lw=3, color='b', alpha=0.4, label=Quag[0]) # radar.plot(Icardi[1:], '-', lw=3,", "LB', fontsize=22) # fig.savefig('Prem_LB.png') #------------------------------------------------------------------------------- # Bale = chart_prep.loc[1496].values # Benzema = chart_prep.loc[1500].values", "'-', lw=3, color='r', alpha=0.4, label=Gnabry[0]) # radar.plot(Brandt[1:], '-', lw=3, color='k', alpha=0.4, label=Brandt[0]) #", "lw=3, color='#00FFFF', alpha=0.4, label=Varane[0]) # radar.plot(Ndicka[1:], '-', lw=3, color='r', alpha=0.4, label=Ndicka[0]) # radar.plot(Zag[1:],", "df_chart_df.loc[2343].values # Messi = df_chart_df.loc[1241].values # labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,32,6),2), # np.around(np.linspace(0,13,6),2), np.around(np.linspace(0,6.1,6),2),", "#------------------------------------------------------------------------------- #Premier League Playmakers # Hazard = Prem_chart_df.loc[285].values # Eriksen = Prem_chart_df.loc[390].values #", "np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,2.9,6),2), # np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,6.3,6),2), # np.around(np.linspace(0,4.2,6),2), np.around(np.linspace(0,3.2,6),2)] # radar = Radar1(fig, titles,", "= bundesliga_chart_df.loc[1677].values # labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,19,6),2), # np.around(np.linspace(0,13,6),2), np.around(np.linspace(0,4.3,6),2), # np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,3,6),2),", "and After the Transfer', fontsize=16) # fig.savefig('Piatek.png') #------------------------------------------------------------------------------- #Identifying young defenders to scout", "radar.ax.legend() # fig.suptitle('Premier League Playmakers', fontsize=16) # fig.savefig('Prem_Playmakers.png') #------------------------------------------------------------------------------- #FIFA PLAYER OF THE", "'-', lw=3, color='#000000', alpha=0.4, label=Modric[0]) # radar.plot(Ronaldo[1:], '-', lw=3, color='#800000', alpha=0.4, label=Ronaldo[0]) #", "# Rose = chart_prep.loc[391].values # Kolasinac = chart_prep.loc[514].values # Mendy = chart_prep.loc[128].values #", "# np.around(np.linspace(0,2.8,6),2), np.around(np.linspace(0,5.8,6),2), # np.around(np.linspace(0,4,6),2), np.around(np.linspace(0,3,6),2)] # radar = Radar1(fig, titles, labels) #", "# radar.plot(Asensio[1:], '-', lw=5, color='g', alpha=0.4, label=Asensio[0]) # # radar.ax.legend() # fig.suptitle('Ronaldos Replacement',", "# np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,2.3,6),2), # np.around(np.linspace(0,3,6),2), np.around(np.linspace(0,8.8,6),2), # np.around(np.linspace(0,4.3,6),2), np.around(np.linspace(0,2.9,6),2)] # radar = Radar1(fig,", "old, compared to possibly best young CB in the world Varane # Ndicka", "# #Name to appear # titles =['Rating', 'Goals', 'Assists', 'SpG', 'Drb', 'KeyP','PS%', #", "# # # radar = Radar1(fig, titles, labels) # radar.plot(Bale[1:], '-', lw=5, color='r',", "# # # radar = Radar1(fig, titles, labels) # radar.plot(Robertson[1:], '-', lw=5, color='r',", "young_center_backs_chart.loc[1806].values # Muki = young_center_backs_chart.loc[2267].values # Bastoni = young_center_backs_chart.loc[779].values # Varane = young_center_backs_chart.loc[1510].values", "# fig = plt.figure(figsize=(10, 10)) #Name to appear on each axis for offensive", "# radar.plot(Varane[1:], '-', lw=3, color='#00FFFF', alpha=0.4, label=Varane[0]) # radar.plot(Ndicka[1:], '-', lw=3, color='r', alpha=0.4,", "'-', lw=5, color='r', alpha=0.4, label=Bale[0]) # radar.plot(Benzema[1:], '-', lw=5, color='b', alpha=0.4, label=Benzema[0]) #", "# radar.plot(Salah[1:], '-', lw=3, color='#FF0000', alpha=0.4, label=Salah[0]) # radar.plot(Mbappe[1:], '-', lw=3, color='#0000FF', alpha=0.4,", "titles, labels) # radar.plot(Bale[1:], '-', lw=5, color='r', alpha=0.4, label=Bale[0]) # radar.plot(Benzema[1:], '-', lw=5,", "serie_a_chart.loc[608].values # PiatekM = serie_a_chart.loc[961].values # #labels/linspace from serie_a above # radar =", "np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,3,6),2), # np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,7,6),2)] # # # radar = Radar1(fig, titles, labels)", "# radar.plot(Gnabry[1:], '-', lw=3, color='r', alpha=0.4, label=Gnabry[0]) # radar.plot(Brandt[1:], '-', lw=3, color='k', alpha=0.4,", "alpha=0.4, label='<NAME>') # # radar.ax.legend() # fig.suptitle('Piatek Before and After the Transfer', fontsize=16)", "'mis_cont'] # #Numerical labels to be displayed along each axis # labels =", "np.around(np.linspace(0,3,6),2), np.around(np.linspace(0,8.8,6),2), # np.around(np.linspace(0,4.3,6),2), np.around(np.linspace(0,2.9,6),2)] # radar = Radar1(fig, titles, labels) # radar.plot(Hazard[1:],", "# fig.suptitle('Premier League Playmakers', fontsize=16) # fig.savefig('Prem_Playmakers.png') #------------------------------------------------------------------------------- #FIFA PLAYER OF THE YEAR", "radar = Radar1(fig, titles, labels) # radar.plot(Ronaldo[1:], '-', lw=3, color='#FF00FF', alpha=0.4, label=Ronaldo[0]) #", "np.around(np.linspace(0,6.5,6),2), np.around(np.linspace(0,3.2,6),2)] # radar = Radar1(fig, titles, labels) # radar.plot(Modric[1:], '-', lw=3, color='#000000',", "np.around(np.linspace(0,4.3,6),2), np.around(np.linspace(0,2.9,6),2)] # radar = Radar1(fig, titles, labels) # radar.plot(Hazard[1:], '-', lw=3, color='#FFFF00',", "np.around(np.linspace(0,3,6),2)] # radar = Radar1(fig, titles, labels) # radar.plot(Sancho[1:], '-', lw=3, color='#FFFF00', alpha=0.4,", "10)) # #Name to appear # titles =['Rating', 'Goals', 'Assists', 'SpG', 'Drb', 'KeyP','PS%',", "df_chart_df.loc[1504].values # Ronaldo= df_chart_df.loc[729].values # Salah = df_chart_df.loc[429].values # Mbappe = df_chart_df.loc[2343].values #", "radar = Radar1(fig, titles, labels) # radar.plot(Sancho[1:], '-', lw=3, color='#FFFF00', alpha=0.4, label=Sancho[0]) #", "'-', lw=3, color='m', alpha=0.4, label=Muller[0]) # radar.plot(Hazard[1:], '-', lw=3, color= '#0000FF', alpha=0.4, label=Hazard[0])", "'-', lw=5, color='r', alpha=0.4, label=Robertson[0]) # radar.plot(Shaw[1:], '-', lw=5, color='k', alpha=0.4, label=Shaw[0]) #", "label=Sane[0]) # radar.plot(Sterling[1:], '-', lw=3, color='#00FFFF', alpha=0.4, label=Sterling[0]) # radar.plot(Salah[1:], '-', lw=3, color=", "# radar.plot(Shaw[1:], '-', lw=5, color='k', alpha=0.4, label=Shaw[0]) # radar.plot(Alonso[1:], '-', lw=5, color='b', alpha=0.4,", "Ronaldo= serie_a_chart.loc[729].values # Quag = serie_a_chart.loc[1004].values # Icardi = serie_a_chart.loc[664].values # PiatekG =", "'-', lw=3, color='b', alpha=0.4, label=Quag[0]) # radar.plot(Icardi[1:], '-', lw=3, color='k', alpha=0.4, label=Icardi[0]) #", "color='b', alpha=0.4, label=Benzema[0]) # radar.plot(Asensio[1:], '-', lw=5, color='g', alpha=0.4, label=Asensio[0]) # # radar.ax.legend()", "np.around(np.linspace(0,11,6),2), # np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,100,6),2), # np.around(np.linspace(0,2.3,6),2), np.around(np.linspace(0,8.8,6),2), # np.around(np.linspace(0,4.3,6),2), np.around(np.linspace(0,2.9,6),2), # np.around(np.linspace(0,2.3,6),2), np.around(np.linspace(0,5,6),2),", "= df_chart_df.loc[1504].values # Ronaldo= df_chart_df.loc[729].values # Salah = df_chart_df.loc[429].values # Mbappe = df_chart_df.loc[2343].values", "= chart_prep.loc[128].values # # #Figure # fig.clear() # # fig = plt.figure(figsize=(10, 10))", "labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,32,6),2), # np.around(np.linspace(0,13,6),2), np.around(np.linspace(0,6.1,6),2), # np.around(np.linspace(0,4.8,6),2), np.around(np.linspace(0,3.2,6),2), # np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,3,6),2),", "fig.savefig('Young_Defenders.png') #------------------------------------------------------------------------------- # Robertson = chart_prep.loc[419].values # Shaw = chart_prep.loc[186].values # Alonso =", "radar = Radar1(fig, titles, labels) # radar.plot(Modric[1:], '-', lw=3, color='#000000', alpha=0.4, label=Modric[0]) #", "np.around(np.linspace(0,3.9,6),2), # np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,3,6),2), # np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,2.3,6),2), # np.around(np.linspace(0,3,6),2), np.around(np.linspace(0,8.8,6),2), # np.around(np.linspace(0,4.3,6),2), np.around(np.linspace(0,2.9,6),2)]", "to appear # titles =['Rating', 'Goals', 'Assists', 'SpG', 'Drb', 'KeyP','PS%', # 'Crosses', 'Fouled',", "# np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,8.8,6),2), # np.around(np.linspace(0,7.8,6),2), np.around(np.linspace(0,4.4,6),2), # np.around(np.linspace(0,3,6),2), np.around(np.linspace(0,2.7,6),2), # np.around(np.linspace(0,7.4,6),2), np.around(np.linspace(0,1.6,6),2)] #", "# np.around(np.linspace(0,4.8,6),2), np.around(np.linspace(0,3.2,6),2), # np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,3,6),2), # np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,8.8,6),2), # np.around(np.linspace(0,6.5,6),2), np.around(np.linspace(0,3.2,6),2)] #", "= young_center_backs_chart.loc[1922].values # Konate = young_center_backs_chart.loc[1806].values # Muki = young_center_backs_chart.loc[2267].values # Bastoni =", "# radar.plot(PiatekM[1:], '-', lw=3, color='r', alpha=0.4, label=PiatekM[0]) #------------------------------------------------------------------------------- #Piatek on Genoa vs Piatek", "alpha=0.4, label=Bastoni[0]) # # radar.ax.legend() # fig.suptitle('Young Defenders', fontsize=16) # fig.savefig('Young_Defenders.png') #------------------------------------------------------------------------------- #", "be displayed along each axis # labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,11,6),2), # np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,100,6),2),", "radar.plot(Kolasinac[1:], '-', lw=5, color= 'g', alpha=0.4, label=Kolasinac[0]) # radar.plot(Mendy[1:], '-', lw=5, color= 'c',", "titles = ['Rating', 'Goals', 'Assists', 'SpG', 'Drb', 'KeyP','PS%', # 'Crosses', 'Fouled', 'mis_cont','Tackles', 'Inter']", "# np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,2.7,6),2), # np.around(np.linspace(0,2.8,6),2), np.around(np.linspace(0,5.8,6),2), # np.around(np.linspace(0,4,6),2), np.around(np.linspace(0,3,6),2)] # radar = Radar1(fig,", "label='<NAME>') # radar.plot(PiatekM[1:], '-', lw=3, color='r', alpha=0.4, label='<NAME>') # # radar.ax.legend() # fig.suptitle('Piatek", "labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,32,6),2), # np.around(np.linspace(0,12,6),2), np.around(np.linspace(0,5.2,6),2), # np.around(np.linspace(0,4.1,6),2), np.around(np.linspace(0,2.9,6),2), # np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,3,6),2),", "np.around(np.linspace(0,8.8,6),2), # np.around(np.linspace(0,6.5,6),2), np.around(np.linspace(0,3.2,6),2)] # radar = Radar1(fig, titles, labels) # radar.plot(Modric[1:], '-',", "# PiatekG = serie_a_chart.loc[608].values # PiatekM = serie_a_chart.loc[961].values # #labels/linspace from serie_a above", "alpha=0.4, label=Mbappe[0]) # radar.plot(Messi[1:], '-', lw=3, color='#00FFFF', alpha=0.4, label=Messi[0]) # # radar.ax.legend() #", "= [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,21,6),2), # np.around(np.linspace(0,9,6),2), np.around(np.linspace(0,6.1,6),2), # np.around(np.linspace(0,2.9,6),2), np.around(np.linspace(0,3.2,6),2), # np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,2.9,6),2), #", "color='#FFFF00', alpha=0.4, label=Zag[0]) # radar.plot(Konate[1:], '-', lw=3, color='#FF00FF', alpha=0.4, label=Konate[0]) # radar.plot(Muki[1:], '-',", "categories # titles = ['Rating', 'Goals', 'Assists', 'SpG', 'Drb', 'KeyP','PS%', # 'Crosses', 'Fouled',", "fig.suptitle('FIFA Player of the Year follow up', fontsize=16) # fig.savefig('Fifa_POY.png') #------------------------------------------------------------------------------- #Serie A", "# # fig = plt.figure(figsize=(10, 10)) # #Name to appear # titles =['Rating',", "labels) # radar.plot(Hazard[1:], '-', lw=3, color='#FFFF00', alpha=0.4, label=Hazard[0]) # radar.plot(Eriksen[1:], '-', lw=3, color='#000080',", "[np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,32,6),2), # np.around(np.linspace(0,12,6),2), np.around(np.linspace(0,5.2,6),2), # np.around(np.linspace(0,4.1,6),2), np.around(np.linspace(0,2.9,6),2), # np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,3,6),2), # np.around(np.linspace(0,3.2,6),2),", "#Numerical labels to be displayed along each axis # labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,11,6),2),", "label=Bastoni[0]) # # radar.ax.legend() # fig.suptitle('Young Defenders', fontsize=16) # fig.savefig('Young_Defenders.png') #------------------------------------------------------------------------------- # Robertson", "= chart_prep.loc[1496].values # Benzema = chart_prep.loc[1500].values # Asensio = chart_prep.loc[1506].values # # #Figure", "plots: #------------------------------------------------------------------------------- # #Figure # fig.clear() # fig = plt.figure(figsize=(10, 10)) #Name to", "# np.around(np.linspace(0,1,6),2)] # # # radar = Radar1(fig, titles, labels) # radar.plot(Robertson[1:], '-',", "= chart_prep.loc[1506].values # # #Figure # fig.clear() # # fig = plt.figure(figsize=(10, 10))", "lw=3, color='b', alpha=0.4, label=Quag[0]) # radar.plot(Icardi[1:], '-', lw=3, color='k', alpha=0.4, label=Icardi[0]) # radar.plot(PiatekG[1:],", "radar.plot(Benzema[1:], '-', lw=5, color='b', alpha=0.4, label=Benzema[0]) # radar.plot(Asensio[1:], '-', lw=5, color='g', alpha=0.4, label=Asensio[0])", "Kolasinac = chart_prep.loc[514].values # Mendy = chart_prep.loc[128].values # # #Figure # fig.clear() #", "np.around(np.linspace(0,8.8,6),2), # np.around(np.linspace(0,4.3,6),2), np.around(np.linspace(0,2.9,6),2)] # radar = Radar1(fig, titles, labels) # radar.plot(Hazard[1:], '-',", "# radar = Radar1(fig, titles, labels) # radar.plot(Ronaldo[1:], '-', lw=3, color='#FF00FF', alpha=0.4, label=Ronaldo[0])", "color='g', alpha=0.4, label=Bastoni[0]) # # radar.ax.legend() # fig.suptitle('Young Defenders', fontsize=16) # fig.savefig('Young_Defenders.png') #-------------------------------------------------------------------------------", "# radar.plot(PiatekM[1:], '-', lw=3, color='r', alpha=0.4, label='<NAME>') # # radar.ax.legend() # fig.suptitle('Piatek Before", "offensive categories # titles = ['Rating', 'Goals', 'Assists', 'SpG', 'Drb', 'KeyP','PS%', # 'Crosses',", "# labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,19,6),2), # np.around(np.linspace(0,11,6),2), np.around(np.linspace(0,3.9,6),2), # np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,3,6),2), # np.around(np.linspace(0,100,6),2),", "np.around(np.linspace(0,11,6),2), np.around(np.linspace(0,3.9,6),2), # np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,3,6),2), # np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,2.3,6),2), # np.around(np.linspace(0,3,6),2), np.around(np.linspace(0,8.8,6),2), # np.around(np.linspace(0,4.3,6),2),", "lw=3, color= 'r', alpha=0.4, label=Salah[0]) # radar.plot(Pogba[1:], '-', lw=3, color='k', alpha=0.4, label=Pogba[0]) #", "bundesliga_chart_df.loc[1654].values # Muller = bundesliga_chart_df.loc[1719].values # Hazard = bundesliga_chart_df.loc[1959].values # Kostic = bundesliga_chart_df.loc[1677].values", "radar = Radar1(fig, titles, labels) # radar.plot(Varane[1:], '-', lw=3, color='#00FFFF', alpha=0.4, label=Varane[0]) #", "Radar1(fig, titles, labels) # radar.plot(Varane[1:], '-', lw=3, color='#00FFFF', alpha=0.4, label=Varane[0]) # radar.plot(Ndicka[1:], '-',", "lw=3, color='#FF00FF', alpha=0.4, label=Ronaldo[0]) # radar.plot(Quag[1:], '-', lw=3, color='b', alpha=0.4, label=Quag[0]) # radar.plot(Icardi[1:],", "# fig.suptitle('Young Defenders', fontsize=16) # fig.savefig('Young_Defenders.png') #------------------------------------------------------------------------------- # Robertson = chart_prep.loc[419].values # Shaw", "# np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,100,6),2), # np.around(np.linspace(0,2.3,6),2), np.around(np.linspace(0,8.8,6),2), # np.around(np.linspace(0,4.3,6),2), np.around(np.linspace(0,2.9,6),2), # np.around(np.linspace(0,2.3,6),2), np.around(np.linspace(0,5,6),2), #", "# 'Crosses', 'Fouled', 'mis_cont'] # #Numerical labels to be displayed along each axis", "#------------------------------------------------------------------------------- #Identifying young defenders to scout further. #Under 20 years old, compared to", "# np.around(np.linspace(0,7.8,6),2), np.around(np.linspace(0,4.4,6),2), # np.around(np.linspace(0,3,6),2), np.around(np.linspace(0,2.7,6),2), # np.around(np.linspace(0,7.4,6),2), np.around(np.linspace(0,1.6,6),2)] # # radar =", "Playmakers', fontsize=16) # fig.savefig('Prem_Playmakers.png') #------------------------------------------------------------------------------- #FIFA PLAYER OF THE YEAR COMPARISON FOLLOW UP", "axis for offensive categories # titles = ['Rating', 'Goals', 'Assists', 'SpG', 'Drb', 'KeyP','PS%',", "alpha=0.4, label=Hazard[0]) # radar.plot(Kostic[1:], '-', lw=3, color='#008080', alpha=0.4, label=Kostic[0]) # radar.ax.legend() # fig.suptitle('Bundesliga", "# radar.plot(Sancho[1:], '-', lw=3, color='#FFFF00', alpha=0.4, label=Sancho[0]) # radar.plot(Gnabry[1:], '-', lw=3, color='r', alpha=0.4,", "label=PiatekG[0]) # radar.plot(PiatekM[1:], '-', lw=3, color='r', alpha=0.4, label=PiatekM[0]) #------------------------------------------------------------------------------- #Piatek on Genoa vs", "Piatek on Milan # PiatekG = serie_a_chart.loc[608].values # PiatekM = serie_a_chart.loc[961].values # #labels/linspace", "titles, labels) # radar.plot(Robertson[1:], '-', lw=5, color='r', alpha=0.4, label=Robertson[0]) # radar.plot(Shaw[1:], '-', lw=5,", "radar = Radar1(fig, titles, labels) # radar.plot(Bale[1:], '-', lw=5, color='r', alpha=0.4, label=Bale[0]) #", "A STRIKERS storeyline # Ronaldo= serie_a_chart.loc[729].values # Quag = serie_a_chart.loc[1004].values # Icardi =", "'Drb','PS%', 'Crosses', 'mis_cont', 'Tackles', 'Inter', 'Fouls', 'Clear', 'Blocks'] # #Numerical labels to be", "bundesliga_chart_df.loc[1719].values # Hazard = bundesliga_chart_df.loc[1959].values # Kostic = bundesliga_chart_df.loc[1677].values # labels = [np.around(np.linspace(0,10,6),2),", "# radar.plot(Zag[1:], '-', lw=3, color='#FFFF00', alpha=0.4, label=Zag[0]) # radar.plot(Konate[1:], '-', lw=3, color='#FF00FF', alpha=0.4,", "lw=3, color='#00FFFF', alpha=0.4, label=Sterling[0]) # radar.plot(Salah[1:], '-', lw=3, color= 'r', alpha=0.4, label=Salah[0]) #", "np.around(np.linspace(0,5.8,6),2), # np.around(np.linspace(0,4,6),2), np.around(np.linspace(0,3,6),2)] # radar = Radar1(fig, titles, labels) # radar.plot(Sancho[1:], '-',", "radar.plot(Salah[1:], '-', lw=3, color= 'r', alpha=0.4, label=Salah[0]) # radar.plot(Pogba[1:], '-', lw=3, color='k', alpha=0.4,", "# Asensio = chart_prep.loc[1506].values # # #Figure # fig.clear() # # fig =", "axis # labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,32,6),2), # np.around(np.linspace(0,12,6),2), np.around(np.linspace(0,5.2,6),2), # np.around(np.linspace(0,4.1,6),2), np.around(np.linspace(0,2.9,6),2), #", "alpha=0.4, label=Icardi[0]) # radar.plot(PiatekG[1:], '-', lw=3, color='#00FFFF', alpha=0.4, label=PiatekG[0]) # radar.plot(PiatekM[1:], '-', lw=3,", "titles =['Rating', 'Assists', 'Drb','PS%', 'Crosses', 'mis_cont', 'Tackles', 'Inter', 'Fouls', 'Clear', 'Blocks'] # #Numerical", "# radar.plot(Mendy[1:], '-', lw=5, color= 'c', alpha=0.4, label=Mendy[0]) # # radar.ax.legend() # fig.suptitle('Premier", "alpha=0.4, label=Mendy[0]) # # radar.ax.legend() # fig.suptitle('Premier League LB', fontsize=22) # fig.savefig('Prem_LB.png') #-------------------------------------------------------------------------------", "label=Sancho[0]) # radar.plot(Gnabry[1:], '-', lw=3, color='r', alpha=0.4, label=Gnabry[0]) # radar.plot(Brandt[1:], '-', lw=3, color='k',", "color='r', alpha=0.4, label='<NAME>') # # radar.ax.legend() # fig.suptitle('Piatek Before and After the Transfer',", "# Salah = df_chart_df.loc[429].values # Mbappe = df_chart_df.loc[2343].values # Messi = df_chart_df.loc[1241].values #", "label=Icardi[0]) # radar.plot(PiatekG[1:], '-', lw=3, color='#00FFFF', alpha=0.4, label=PiatekG[0]) # radar.plot(PiatekM[1:], '-', lw=3, color='r',", "years old, compared to possibly best young CB in the world Varane #", "color='#0000FF', alpha=0.4, label='<NAME>') # radar.plot(PiatekM[1:], '-', lw=3, color='r', alpha=0.4, label='<NAME>') # # radar.ax.legend()", "Muller = bundesliga_chart_df.loc[1719].values # Hazard = bundesliga_chart_df.loc[1959].values # Kostic = bundesliga_chart_df.loc[1677].values # labels", "# radar = Radar1(fig, titles, labels) # radar.plot(Modric[1:], '-', lw=3, color='#000000', alpha=0.4, label=Modric[0])", "displayed along each axis # labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,32,6),2), # np.around(np.linspace(0,12,6),2), np.around(np.linspace(0,5.2,6),2), #", "# # radar.ax.legend() # fig.suptitle('FIFA Player of the Year follow up', fontsize=16) #", "= df_chart_df.loc[429].values # Mbappe = df_chart_df.loc[2343].values # Messi = df_chart_df.loc[1241].values # labels =", "# np.around(np.linspace(0,13,6),2), np.around(np.linspace(0,4.3,6),2), # np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,3,6),2), # np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,2.7,6),2), # np.around(np.linspace(0,2.8,6),2), np.around(np.linspace(0,5.8,6),2), #", "# radar.plot(Rose[1:], '-', lw=5, color='m', alpha=0.4, label=Rose[0]) # radar.plot(Kolasinac[1:], '-', lw=5, color= 'g',", "chart_prep.loc[291].values # Rose = chart_prep.loc[391].values # Kolasinac = chart_prep.loc[514].values # Mendy = chart_prep.loc[128].values", "Genoa vs Piatek on Milan # PiatekG = serie_a_chart.loc[608].values # PiatekM = serie_a_chart.loc[961].values", "np.around(np.linspace(0,7.8,6),2), np.around(np.linspace(0,4.4,6),2), # np.around(np.linspace(0,3,6),2), np.around(np.linspace(0,2.7,6),2), # np.around(np.linspace(0,7.4,6),2), np.around(np.linspace(0,1.6,6),2)] # # radar = Radar1(fig,", "df_chart_df.loc[1241].values # labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,32,6),2), # np.around(np.linspace(0,13,6),2), np.around(np.linspace(0,6.1,6),2), # np.around(np.linspace(0,4.8,6),2), np.around(np.linspace(0,3.2,6),2), #", "appear # titles =['Rating', 'Assists', 'Drb','PS%', 'Crosses', 'mis_cont', 'Tackles', 'Inter', 'Fouls', 'Clear', 'Blocks']", "# Shaw = chart_prep.loc[186].values # Alonso = chart_prep.loc[291].values # Rose = chart_prep.loc[391].values #", "titles, labels) # radar.plot(Ronaldo[1:], '-', lw=3, color='#FF00FF', alpha=0.4, label=Ronaldo[0]) # radar.plot(Quag[1:], '-', lw=3,", "= serie_a_chart.loc[664].values # PiatekG = serie_a_chart.loc[608].values # PiatekM = serie_a_chart.loc[961].values # labels =", "label='<NAME>') # # radar.ax.legend() # fig.suptitle('Piatek Before and After the Transfer', fontsize=16) #", "# fig.suptitle('FIFA Player of the Year follow up', fontsize=16) # fig.savefig('Fifa_POY.png') #------------------------------------------------------------------------------- #Serie", "fig = plt.figure(figsize=(10, 10)) #Name to appear on each axis for offensive categories", "serie_a_chart.loc[1004].values # Icardi = serie_a_chart.loc[664].values # PiatekG = serie_a_chart.loc[608].values # PiatekM = serie_a_chart.loc[961].values", "color='b', alpha=0.4, label=Alonso[0]) # radar.plot(Rose[1:], '-', lw=5, color='m', alpha=0.4, label=Rose[0]) # radar.plot(Kolasinac[1:], '-',", "# labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,11,6),2), # np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,100,6),2), # np.around(np.linspace(0,2.3,6),2), np.around(np.linspace(0,8.8,6),2), # np.around(np.linspace(0,4.3,6),2),", "=['Rating', 'Goals', 'Assists', 'SpG', 'Drb', 'KeyP','PS%', # 'Crosses', 'Fouled', 'mis_cont'] # #Numerical labels", "# np.around(np.linspace(0,4.1,6),2), np.around(np.linspace(0,2.9,6),2), # np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,3,6),2), # np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,7,6),2)] # # # radar", "Radar1(fig, titles, labels) # radar.plot(Sancho[1:], '-', lw=3, color='#FFFF00', alpha=0.4, label=Sancho[0]) # radar.plot(Gnabry[1:], '-',", "alpha=0.4, label=PiatekM[0]) #------------------------------------------------------------------------------- #Piatek on Genoa vs Piatek on Milan # PiatekG =", "color='#FF00FF', alpha=0.4, label=Konate[0]) # radar.plot(Muki[1:], '-', lw=3, color='b', alpha=0.4, label=Muki[0]) # radar.plot(Bastoni[1:], '-',", "THE YEAR COMPARISON FOLLOW UP #Messi vs Ronaldo # Modric = df_chart_df.loc[1504].values #", "each axis for offensive categories # titles = ['Rating', 'Goals', 'Assists', 'SpG', 'Drb',", "# np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,3,6),2), # np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,8.8,6),2), # np.around(np.linspace(0,6.5,6),2), np.around(np.linspace(0,3.2,6),2)] # radar = Radar1(fig,", "Benzema = chart_prep.loc[1500].values # Asensio = chart_prep.loc[1506].values # # #Figure # fig.clear() #", "color='k', alpha=0.4, label=Icardi[0]) # radar.plot(PiatekG[1:], '-', lw=3, color='#00FFFF', alpha=0.4, label=PiatekG[0]) # radar.plot(PiatekM[1:], '-',", "on Genoa vs Piatek on Milan # PiatekG = serie_a_chart.loc[608].values # PiatekM =", "# radar = Radar1(fig, titles, labels) # radar.plot(PiatekG[1:], '-', lw=3, color='#0000FF', alpha=0.4, label='<NAME>')", "# Eriksen = Prem_chart_df.loc[390].values # Sane = Prem_chart_df.loc[140].values # Sterling = Prem_chart_df.loc[144].values #", "Prem_chart_df.loc[144].values # Salah = Prem_chart_df.loc[429].values # labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,19,6),2), # np.around(np.linspace(0,11,6),2), np.around(np.linspace(0,3.9,6),2),", "np.around(np.linspace(0,2.8,6),2), np.around(np.linspace(0,5.8,6),2), # np.around(np.linspace(0,4,6),2), np.around(np.linspace(0,3,6),2)] # radar = Radar1(fig, titles, labels) # radar.plot(Sancho[1:],", "Year follow up', fontsize=16) # fig.savefig('Fifa_POY.png') #------------------------------------------------------------------------------- #Serie A STRIKERS storeyline # Ronaldo=", "# Robertson = chart_prep.loc[419].values # Shaw = chart_prep.loc[186].values # Alonso = chart_prep.loc[291].values #", "color= 'c', alpha=0.4, label=Mendy[0]) # # radar.ax.legend() # fig.suptitle('Premier League LB', fontsize=22) #", "np.around(np.linspace(0,2.9,6),2), # np.around(np.linspace(0,2.3,6),2), np.around(np.linspace(0,5,6),2), # np.around(np.linspace(0,1,6),2)] # # # radar = Radar1(fig, titles,", "Brandt = bundesliga_chart_df.loc[1654].values # Muller = bundesliga_chart_df.loc[1719].values # Hazard = bundesliga_chart_df.loc[1959].values # Kostic", "= [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,32,6),2), # np.around(np.linspace(0,12,6),2), np.around(np.linspace(0,5.2,6),2), # np.around(np.linspace(0,4.1,6),2), np.around(np.linspace(0,2.9,6),2), # np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,3,6),2), #", "# fig.savefig('Young_Defenders.png') #------------------------------------------------------------------------------- # Robertson = chart_prep.loc[419].values # Shaw = chart_prep.loc[186].values # Alonso", "labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,91,6),2), # np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,8.8,6),2), # np.around(np.linspace(0,7.8,6),2), np.around(np.linspace(0,4.4,6),2), # np.around(np.linspace(0,3,6),2), np.around(np.linspace(0,2.7,6),2),", "label=Zag[0]) # radar.plot(Konate[1:], '-', lw=3, color='#FF00FF', alpha=0.4, label=Konate[0]) # radar.plot(Muki[1:], '-', lw=3, color='b',", "# Modric = df_chart_df.loc[1504].values # Ronaldo= df_chart_df.loc[729].values # Salah = df_chart_df.loc[429].values # Mbappe", "np.around(np.linspace(0,2.3,6),2), np.around(np.linspace(0,5,6),2), # np.around(np.linspace(0,1,6),2)] # # # radar = Radar1(fig, titles, labels) #", "= Prem_chart_df.loc[429].values # labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,19,6),2), # np.around(np.linspace(0,11,6),2), np.around(np.linspace(0,3.9,6),2), # np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,3,6),2),", "lw=3, color='k', alpha=0.4, label=Brandt[0]) # radar.plot(Muller[1:], '-', lw=3, color='m', alpha=0.4, label=Muller[0]) # radar.plot(Hazard[1:],", "#Bund Playmakers: # Sancho = bundesliga_chart_df.loc[1924].values # Gnabry = bundesliga_chart_df.loc[1716].values # Brandt =", "np.around(np.linspace(0,3,6),2), np.around(np.linspace(0,2.7,6),2), # np.around(np.linspace(0,7.4,6),2), np.around(np.linspace(0,1.6,6),2)] # # radar = Radar1(fig, titles, labels) #", "# # radar.ax.legend() # fig.suptitle('Premier League LB', fontsize=22) # fig.savefig('Prem_LB.png') #------------------------------------------------------------------------------- # Bale", "radar.plot(Rose[1:], '-', lw=5, color='m', alpha=0.4, label=Rose[0]) # radar.plot(Kolasinac[1:], '-', lw=5, color= 'g', alpha=0.4,", "lw=3, color='#000000', alpha=0.4, label=Modric[0]) # radar.plot(Ronaldo[1:], '-', lw=3, color='#800000', alpha=0.4, label=Ronaldo[0]) # radar.plot(Salah[1:],", "alpha=0.4, label=Varane[0]) # radar.plot(Ndicka[1:], '-', lw=3, color='r', alpha=0.4, label=Ndicka[0]) # radar.plot(Zag[1:], '-', lw=3,", "# np.around(np.linspace(0,9,6),2), np.around(np.linspace(0,6.1,6),2), # np.around(np.linspace(0,2.9,6),2), np.around(np.linspace(0,3.2,6),2), # np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,2.9,6),2), # np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,6.3,6),2), #", "UP #Messi vs Ronaldo # Modric = df_chart_df.loc[1504].values # Ronaldo= df_chart_df.loc[729].values # Salah", "serie_a above # radar = Radar1(fig, titles, labels) # radar.plot(PiatekG[1:], '-', lw=3, color='#0000FF',", "df_chart_df.loc[729].values # Salah = df_chart_df.loc[429].values # Mbappe = df_chart_df.loc[2343].values # Messi = df_chart_df.loc[1241].values", "alpha=0.4, label=Quag[0]) # radar.plot(Icardi[1:], '-', lw=3, color='k', alpha=0.4, label=Icardi[0]) # radar.plot(PiatekG[1:], '-', lw=3,", "lw=3, color= '#0000FF', alpha=0.4, label=Hazard[0]) # radar.plot(Kostic[1:], '-', lw=3, color='#008080', alpha=0.4, label=Kostic[0]) #", "lw=3, color='r', alpha=0.4, label=PiatekM[0]) #------------------------------------------------------------------------------- #Piatek on Genoa vs Piatek on Milan #", "= serie_a_chart.loc[608].values # PiatekM = serie_a_chart.loc[961].values # #labels/linspace from serie_a above # radar", "np.around(np.linspace(0,19,6),2), # np.around(np.linspace(0,13,6),2), np.around(np.linspace(0,4.3,6),2), # np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,3,6),2), # np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,2.7,6),2), # np.around(np.linspace(0,2.8,6),2), np.around(np.linspace(0,5.8,6),2),", "lw=5, color='m', alpha=0.4, label=Rose[0]) # radar.plot(Kolasinac[1:], '-', lw=5, color= 'g', alpha=0.4, label=Kolasinac[0]) #", "# Benzema = chart_prep.loc[1500].values # Asensio = chart_prep.loc[1506].values # # #Figure # fig.clear()", "color= 'g', alpha=0.4, label=Kolasinac[0]) # radar.plot(Mendy[1:], '-', lw=5, color= 'c', alpha=0.4, label=Mendy[0]) #", "np.around(np.linspace(0,8.8,6),2), # np.around(np.linspace(0,7.8,6),2), np.around(np.linspace(0,4.4,6),2), # np.around(np.linspace(0,3,6),2), np.around(np.linspace(0,2.7,6),2), # np.around(np.linspace(0,7.4,6),2), np.around(np.linspace(0,1.6,6),2)] # # radar", "alpha=0.4, label=Muki[0]) # radar.plot(Bastoni[1:], '-', lw=3, color='g', alpha=0.4, label=Bastoni[0]) # # radar.ax.legend() #", "League Playmakers', fontsize=16) # fig.savefig('Prem_Playmakers.png') #------------------------------------------------------------------------------- #FIFA PLAYER OF THE YEAR COMPARISON FOLLOW", "'c', alpha=0.4, label=Mendy[0]) # # radar.ax.legend() # fig.suptitle('Premier League LB', fontsize=22) # fig.savefig('Prem_LB.png')", "np.around(np.linspace(0,2.3,6),2), np.around(np.linspace(0,8.8,6),2), # np.around(np.linspace(0,4.3,6),2), np.around(np.linspace(0,2.9,6),2), # np.around(np.linspace(0,2.3,6),2), np.around(np.linspace(0,5,6),2), # np.around(np.linspace(0,1,6),2)] # # #", "radar.plot(Pogba[1:], '-', lw=3, color='k', alpha=0.4, label=Pogba[0]) # radar.ax.legend() # fig.suptitle('Premier League Playmakers', fontsize=16)", "lw=3, color='r', alpha=0.4, label=Ndicka[0]) # radar.plot(Zag[1:], '-', lw=3, color='#FFFF00', alpha=0.4, label=Zag[0]) # radar.plot(Konate[1:],", "= Radar1(fig, titles, labels) # radar.plot(Hazard[1:], '-', lw=3, color='#FFFF00', alpha=0.4, label=Hazard[0]) # radar.plot(Eriksen[1:],", "label=Eriksen[0]) # radar.plot(Sane[1:], '-', lw=3, color='m', alpha=0.4, label=Sane[0]) # radar.plot(Sterling[1:], '-', lw=3, color='#00FFFF',", "alpha=0.4, label=Modric[0]) # radar.plot(Ronaldo[1:], '-', lw=3, color='#800000', alpha=0.4, label=Ronaldo[0]) # radar.plot(Salah[1:], '-', lw=3,", "# radar.plot(Bastoni[1:], '-', lw=3, color='g', alpha=0.4, label=Bastoni[0]) # # radar.ax.legend() # fig.suptitle('Young Defenders',", "lw=3, color='#008080', alpha=0.4, label=Kostic[0]) # radar.ax.legend() # fig.suptitle('Bundesliga Playmakers', fontsize=16) # fig.savefig('Bund_Playmakers.png') #-------------------------------------------------------------------------------", "labels) # radar.plot(Bale[1:], '-', lw=5, color='r', alpha=0.4, label=Bale[0]) # radar.plot(Benzema[1:], '-', lw=5, color='b',", "# np.around(np.linspace(0,4.2,6),2), np.around(np.linspace(0,3.2,6),2)] # radar = Radar1(fig, titles, labels) # radar.plot(Ronaldo[1:], '-', lw=3,", "= Prem_chart_df.loc[140].values # Sterling = Prem_chart_df.loc[144].values # Salah = Prem_chart_df.loc[429].values # labels =", "chart_prep.loc[1506].values # # #Figure # fig.clear() # # fig = plt.figure(figsize=(10, 10)) #", "np.around(np.linspace(0,6.1,6),2), # np.around(np.linspace(0,2.9,6),2), np.around(np.linspace(0,3.2,6),2), # np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,2.9,6),2), # np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,6.3,6),2), # np.around(np.linspace(0,4.2,6),2), np.around(np.linspace(0,3.2,6),2)]", "label=Ronaldo[0]) # radar.plot(Quag[1:], '-', lw=3, color='b', alpha=0.4, label=Quag[0]) # radar.plot(Icardi[1:], '-', lw=3, color='k',", "# fig.savefig('Fifa_POY.png') #------------------------------------------------------------------------------- #Serie A STRIKERS storeyline # Ronaldo= serie_a_chart.loc[729].values # Quag =", "'Fouls', 'Clear', 'Blocks'] # # labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,91,6),2), # np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,8.8,6),2), #", "'-', lw=3, color='r', alpha=0.4, label=PiatekM[0]) #------------------------------------------------------------------------------- #Piatek on Genoa vs Piatek on Milan", "# radar.ax.legend() # fig.suptitle('Premier League Playmakers', fontsize=16) # fig.savefig('Prem_Playmakers.png') #------------------------------------------------------------------------------- #FIFA PLAYER OF", "# Konate = young_center_backs_chart.loc[1806].values # Muki = young_center_backs_chart.loc[2267].values # Bastoni = young_center_backs_chart.loc[779].values #", "# # radar.ax.legend() # fig.suptitle('Piatek Before and After the Transfer', fontsize=16) # fig.savefig('Piatek.png')", "'Goals', 'Assists', 'SpG', 'Drb', 'KeyP','PS%', # 'Crosses', 'Fouled', 'mis_cont'] # #Numerical labels to", "Hazard = bundesliga_chart_df.loc[1959].values # Kostic = bundesliga_chart_df.loc[1677].values # labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,19,6),2), #", "along each axis # labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,32,6),2), # np.around(np.linspace(0,12,6),2), np.around(np.linspace(0,5.2,6),2), # np.around(np.linspace(0,4.1,6),2),", "[np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,21,6),2), # np.around(np.linspace(0,9,6),2), np.around(np.linspace(0,6.1,6),2), # np.around(np.linspace(0,2.9,6),2), np.around(np.linspace(0,3.2,6),2), # np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,2.9,6),2), # np.around(np.linspace(0,3.2,6),2),", "np.around(np.linspace(0,100,6),2), # np.around(np.linspace(0,2.3,6),2), np.around(np.linspace(0,8.8,6),2), # np.around(np.linspace(0,4.3,6),2), np.around(np.linspace(0,2.9,6),2), # np.around(np.linspace(0,2.3,6),2), np.around(np.linspace(0,5,6),2), # np.around(np.linspace(0,1,6),2)] #", "= bundesliga_chart_df.loc[1654].values # Muller = bundesliga_chart_df.loc[1719].values # Hazard = bundesliga_chart_df.loc[1959].values # Kostic =", "# labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,19,6),2), # np.around(np.linspace(0,13,6),2), np.around(np.linspace(0,4.3,6),2), # np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,3,6),2), # np.around(np.linspace(0,100,6),2),", "Playmakers', fontsize=16) # fig.savefig('Bund_Playmakers.png') #------------------------------------------------------------------------------- #Premier League Playmakers # Hazard = Prem_chart_df.loc[285].values #", "np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,6.3,6),2), # np.around(np.linspace(0,4.2,6),2), np.around(np.linspace(0,3.2,6),2)] # radar = Radar1(fig, titles, labels) # radar.plot(Ronaldo[1:],", "radar.plot(Shaw[1:], '-', lw=5, color='k', alpha=0.4, label=Shaw[0]) # radar.plot(Alonso[1:], '-', lw=5, color='b', alpha=0.4, label=Alonso[0])", "np.around(np.linspace(0,91,6),2), # np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,8.8,6),2), # np.around(np.linspace(0,7.8,6),2), np.around(np.linspace(0,4.4,6),2), # np.around(np.linspace(0,3,6),2), np.around(np.linspace(0,2.7,6),2), # np.around(np.linspace(0,7.4,6),2), np.around(np.linspace(0,1.6,6),2)]", "# np.around(np.linspace(0,6.5,6),2), np.around(np.linspace(0,3.2,6),2)] # radar = Radar1(fig, titles, labels) # radar.plot(Modric[1:], '-', lw=3,", "appear # titles =['Rating', 'Goals', 'Assists', 'SpG', 'Drb', 'KeyP','PS%', # 'Crosses', 'Fouled', 'mis_cont']", "'-', lw=3, color='#FFFF00', alpha=0.4, label=Hazard[0]) # radar.plot(Eriksen[1:], '-', lw=3, color='#000080', alpha=0.4, label=Eriksen[0]) #", "alpha=0.4, label=Shaw[0]) # radar.plot(Alonso[1:], '-', lw=5, color='b', alpha=0.4, label=Alonso[0]) # radar.plot(Rose[1:], '-', lw=5,", "Gnabry = bundesliga_chart_df.loc[1716].values # Brandt = bundesliga_chart_df.loc[1654].values # Muller = bundesliga_chart_df.loc[1719].values # Hazard", "# titles =['Rating', 'Goals', 'Assists', 'SpG', 'Drb', 'KeyP','PS%', # 'Crosses', 'Fouled', 'mis_cont'] #", "#Name to appear # titles =['Rating', 'Goals', 'Assists', 'SpG', 'Drb', 'KeyP','PS%', # 'Crosses',", "alpha=0.4, label=Robertson[0]) # radar.plot(Shaw[1:], '-', lw=5, color='k', alpha=0.4, label=Shaw[0]) # radar.plot(Alonso[1:], '-', lw=5,", "= [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,19,6),2), # np.around(np.linspace(0,11,6),2), np.around(np.linspace(0,3.9,6),2), # np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,3,6),2), # np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,2.3,6),2), #", "chart_prep.loc[514].values # Mendy = chart_prep.loc[128].values # # #Figure # fig.clear() # # fig", "color='#000000', alpha=0.4, label=Modric[0]) # radar.plot(Ronaldo[1:], '-', lw=3, color='#800000', alpha=0.4, label=Ronaldo[0]) # radar.plot(Salah[1:], '-',", "alpha=0.4, label=Muller[0]) # radar.plot(Hazard[1:], '-', lw=3, color= '#0000FF', alpha=0.4, label=Hazard[0]) # radar.plot(Kostic[1:], '-',", "np.around(np.linspace(0,3,6),2), # np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,2.3,6),2), # np.around(np.linspace(0,3,6),2), np.around(np.linspace(0,8.8,6),2), # np.around(np.linspace(0,4.3,6),2), np.around(np.linspace(0,2.9,6),2)] # radar =", "# np.around(np.linspace(0,3,6),2), np.around(np.linspace(0,8.8,6),2), # np.around(np.linspace(0,4.3,6),2), np.around(np.linspace(0,2.9,6),2)] # radar = Radar1(fig, titles, labels) #", "radar.plot(Modric[1:], '-', lw=3, color='#000000', alpha=0.4, label=Modric[0]) # radar.plot(Ronaldo[1:], '-', lw=3, color='#800000', alpha=0.4, label=Ronaldo[0])", "fig.savefig('Piatek.png') #------------------------------------------------------------------------------- #Identifying young defenders to scout further. #Under 20 years old, compared", "to scout further. #Under 20 years old, compared to possibly best young CB", "# titles = ['Rating', 'AvgP','PS%', 'mis_cont','AerialsWon', 'Tackles', 'Inter', # 'Fouls', 'Clear', 'Blocks'] #", "'SpG', 'Drb', 'KeyP','PS%', # 'Crosses', 'Fouled', 'mis_cont'] # #Numerical labels to be displayed", "'Blocks'] # # labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,91,6),2), # np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,8.8,6),2), # np.around(np.linspace(0,7.8,6),2), np.around(np.linspace(0,4.4,6),2),", "# np.around(np.linspace(0,4.3,6),2), np.around(np.linspace(0,2.9,6),2)] # radar = Radar1(fig, titles, labels) # radar.plot(Hazard[1:], '-', lw=3,", "to possibly best young CB in the world Varane # Ndicka = young_center_backs_chart.loc[1676].values", "label=Robertson[0]) # radar.plot(Shaw[1:], '-', lw=5, color='k', alpha=0.4, label=Shaw[0]) # radar.plot(Alonso[1:], '-', lw=5, color='b',", "on each axis for offensive categories # titles = ['Rating', 'Goals', 'Assists', 'SpG',", "radar.plot(Gnabry[1:], '-', lw=3, color='r', alpha=0.4, label=Gnabry[0]) # radar.plot(Brandt[1:], '-', lw=3, color='k', alpha=0.4, label=Brandt[0])", "color='k', alpha=0.4, label=Pogba[0]) # radar.ax.legend() # fig.suptitle('Premier League Playmakers', fontsize=16) # fig.savefig('Prem_Playmakers.png') #-------------------------------------------------------------------------------", "'-', lw=3, color='#FF00FF', alpha=0.4, label=Ronaldo[0]) # radar.plot(Quag[1:], '-', lw=3, color='b', alpha=0.4, label=Quag[0]) #", "# Bastoni = young_center_backs_chart.loc[779].values # Varane = young_center_backs_chart.loc[1510].values # titles = ['Rating', 'AvgP','PS%',", "color='m', alpha=0.4, label=Muller[0]) # radar.plot(Hazard[1:], '-', lw=3, color= '#0000FF', alpha=0.4, label=Hazard[0]) # radar.plot(Kostic[1:],", "np.around(np.linspace(0,2.9,6),2)] # radar = Radar1(fig, titles, labels) # radar.plot(Hazard[1:], '-', lw=3, color='#FFFF00', alpha=0.4,", "label=Varane[0]) # radar.plot(Ndicka[1:], '-', lw=3, color='r', alpha=0.4, label=Ndicka[0]) # radar.plot(Zag[1:], '-', lw=3, color='#FFFF00',", "'Clear', 'Blocks'] # #Numerical labels to be displayed along each axis # labels", "'-', lw=3, color='k', alpha=0.4, label=Pogba[0]) # radar.ax.legend() # fig.suptitle('Premier League Playmakers', fontsize=16) #", "'-', lw=3, color='#000080', alpha=0.4, label=Eriksen[0]) # radar.plot(Sane[1:], '-', lw=3, color='m', alpha=0.4, label=Sane[0]) #", "serie_a_chart.loc[608].values # PiatekM = serie_a_chart.loc[961].values # labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,21,6),2), # np.around(np.linspace(0,9,6),2), np.around(np.linspace(0,6.1,6),2),", "radar.plot(Asensio[1:], '-', lw=5, color='g', alpha=0.4, label=Asensio[0]) # # radar.ax.legend() # fig.suptitle('Ronaldos Replacement', fontsize=22)", "fig = plt.figure(figsize=(10, 10)) # #Name to appear # titles =['Rating', 'Assists', 'Drb','PS%',", "radar.plot(Ronaldo[1:], '-', lw=3, color='#800000', alpha=0.4, label=Ronaldo[0]) # radar.plot(Salah[1:], '-', lw=3, color='#FF0000', alpha=0.4, label=Salah[0])", "Kostic = bundesliga_chart_df.loc[1677].values # labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,19,6),2), # np.around(np.linspace(0,13,6),2), np.around(np.linspace(0,4.3,6),2), # np.around(np.linspace(0,3.2,6),2),", "follow up', fontsize=16) # fig.savefig('Fifa_POY.png') #------------------------------------------------------------------------------- #Serie A STRIKERS storeyline # Ronaldo= serie_a_chart.loc[729].values", "radar.plot(Bastoni[1:], '-', lw=3, color='g', alpha=0.4, label=Bastoni[0]) # # radar.ax.legend() # fig.suptitle('Young Defenders', fontsize=16)", "# radar.plot(Alonso[1:], '-', lw=5, color='b', alpha=0.4, label=Alonso[0]) # radar.plot(Rose[1:], '-', lw=5, color='m', alpha=0.4,", "# #Figure # fig.clear() # # fig = plt.figure(figsize=(10, 10)) # #Name to", "label=Kostic[0]) # radar.ax.legend() # fig.suptitle('Bundesliga Playmakers', fontsize=16) # fig.savefig('Bund_Playmakers.png') #------------------------------------------------------------------------------- #Premier League Playmakers", "# radar.plot(Sane[1:], '-', lw=3, color='m', alpha=0.4, label=Sane[0]) # radar.plot(Sterling[1:], '-', lw=3, color='#00FFFF', alpha=0.4,", "alpha=0.4, label=Salah[0]) # radar.plot(Mbappe[1:], '-', lw=3, color='#0000FF', alpha=0.4, label=Mbappe[0]) # radar.plot(Messi[1:], '-', lw=3,", "radar.plot(Mbappe[1:], '-', lw=3, color='#0000FF', alpha=0.4, label=Mbappe[0]) # radar.plot(Messi[1:], '-', lw=3, color='#00FFFF', alpha=0.4, label=Messi[0])", "= bundesliga_chart_df.loc[1959].values # Kostic = bundesliga_chart_df.loc[1677].values # labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,19,6),2), # np.around(np.linspace(0,13,6),2),", "# np.around(np.linspace(0,4,6),2), np.around(np.linspace(0,3,6),2)] # radar = Radar1(fig, titles, labels) # radar.plot(Sancho[1:], '-', lw=3,", "# radar.plot(Brandt[1:], '-', lw=3, color='k', alpha=0.4, label=Brandt[0]) # radar.plot(Muller[1:], '-', lw=3, color='m', alpha=0.4,", "Modric = df_chart_df.loc[1504].values # Ronaldo= df_chart_df.loc[729].values # Salah = df_chart_df.loc[429].values # Mbappe =", "= Radar1(fig, titles, labels) # radar.plot(Sancho[1:], '-', lw=3, color='#FFFF00', alpha=0.4, label=Sancho[0]) # radar.plot(Gnabry[1:],", "Muki = young_center_backs_chart.loc[2267].values # Bastoni = young_center_backs_chart.loc[779].values # Varane = young_center_backs_chart.loc[1510].values # titles", "lw=3, color='k', alpha=0.4, label=Icardi[0]) # radar.plot(PiatekG[1:], '-', lw=3, color='#00FFFF', alpha=0.4, label=PiatekG[0]) # radar.plot(PiatekM[1:],", "bundesliga_chart_df.loc[1716].values # Brandt = bundesliga_chart_df.loc[1654].values # Muller = bundesliga_chart_df.loc[1719].values # Hazard = bundesliga_chart_df.loc[1959].values", "radar = Radar1(fig, titles, labels) # radar.plot(Hazard[1:], '-', lw=3, color='#FFFF00', alpha=0.4, label=Hazard[0]) #", "Varane = young_center_backs_chart.loc[1510].values # titles = ['Rating', 'AvgP','PS%', 'mis_cont','AerialsWon', 'Tackles', 'Inter', # 'Fouls',", "for generating plots: #------------------------------------------------------------------------------- # #Figure # fig.clear() # fig = plt.figure(figsize=(10, 10))", "np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,7,6),2)] # # # radar = Radar1(fig, titles, labels) # radar.plot(Bale[1:], '-',", "'-', lw=3, color='#00FFFF', alpha=0.4, label=Varane[0]) # radar.plot(Ndicka[1:], '-', lw=3, color='r', alpha=0.4, label=Ndicka[0]) #", "PLAYER OF THE YEAR COMPARISON FOLLOW UP #Messi vs Ronaldo # Modric =", "# labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,32,6),2), # np.around(np.linspace(0,12,6),2), np.around(np.linspace(0,5.2,6),2), # np.around(np.linspace(0,4.1,6),2), np.around(np.linspace(0,2.9,6),2), # np.around(np.linspace(0,100,6),2),", "alpha=0.4, label=Sancho[0]) # radar.plot(Gnabry[1:], '-', lw=3, color='r', alpha=0.4, label=Gnabry[0]) # radar.plot(Brandt[1:], '-', lw=3,", "label=Bale[0]) # radar.plot(Benzema[1:], '-', lw=5, color='b', alpha=0.4, label=Benzema[0]) # radar.plot(Asensio[1:], '-', lw=5, color='g',", "defenders to scout further. #Under 20 years old, compared to possibly best young", "= Radar1(fig, titles, labels) # radar.plot(Ronaldo[1:], '-', lw=3, color='#FF00FF', alpha=0.4, label=Ronaldo[0]) # radar.plot(Quag[1:],", "'-', lw=3, color='g', alpha=0.4, label=Bastoni[0]) # # radar.ax.legend() # fig.suptitle('Young Defenders', fontsize=16) #", "Asensio = chart_prep.loc[1506].values # # #Figure # fig.clear() # # fig = plt.figure(figsize=(10,", "# np.around(np.linspace(0,2.3,6),2), np.around(np.linspace(0,5,6),2), # np.around(np.linspace(0,1,6),2)] # # # radar = Radar1(fig, titles, labels)", "color='r', alpha=0.4, label=Robertson[0]) # radar.plot(Shaw[1:], '-', lw=5, color='k', alpha=0.4, label=Shaw[0]) # radar.plot(Alonso[1:], '-',", "10)) # #Name to appear # titles =['Rating', 'Assists', 'Drb','PS%', 'Crosses', 'mis_cont', 'Tackles',", "= Radar1(fig, titles, labels) # radar.plot(PiatekG[1:], '-', lw=3, color='#0000FF', alpha=0.4, label='<NAME>') # radar.plot(PiatekM[1:],", "further. #Under 20 years old, compared to possibly best young CB in the", "radar.plot(Icardi[1:], '-', lw=3, color='k', alpha=0.4, label=Icardi[0]) # radar.plot(PiatekG[1:], '-', lw=3, color='#00FFFF', alpha=0.4, label=PiatekG[0])", "'Blocks'] # #Numerical labels to be displayed along each axis # labels =", "# Hazard = Prem_chart_df.loc[285].values # Eriksen = Prem_chart_df.loc[390].values # Sane = Prem_chart_df.loc[140].values #", "alpha=0.4, label=Brandt[0]) # radar.plot(Muller[1:], '-', lw=3, color='m', alpha=0.4, label=Muller[0]) # radar.plot(Hazard[1:], '-', lw=3,", "# radar.ax.legend() # fig.suptitle('Bundesliga Playmakers', fontsize=16) # fig.savefig('Bund_Playmakers.png') #------------------------------------------------------------------------------- #Premier League Playmakers #", "# Ronaldo= serie_a_chart.loc[729].values # Quag = serie_a_chart.loc[1004].values # Icardi = serie_a_chart.loc[664].values # PiatekG", "# Icardi = serie_a_chart.loc[664].values # PiatekG = serie_a_chart.loc[608].values # PiatekM = serie_a_chart.loc[961].values #", "the world Varane # Ndicka = young_center_backs_chart.loc[1676].values # Zag = young_center_backs_chart.loc[1922].values # Konate", "[np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,91,6),2), # np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,8.8,6),2), # np.around(np.linspace(0,7.8,6),2), np.around(np.linspace(0,4.4,6),2), # np.around(np.linspace(0,3,6),2), np.around(np.linspace(0,2.7,6),2), # np.around(np.linspace(0,7.4,6),2),", "= serie_a_chart.loc[961].values # #labels/linspace from serie_a above # radar = Radar1(fig, titles, labels)", "= young_center_backs_chart.loc[1510].values # titles = ['Rating', 'AvgP','PS%', 'mis_cont','AerialsWon', 'Tackles', 'Inter', # 'Fouls', 'Clear',", "# titles =['Rating', 'Assists', 'Drb','PS%', 'Crosses', 'mis_cont', 'Tackles', 'Inter', 'Fouls', 'Clear', 'Blocks'] #", "np.around(np.linspace(0,2.9,6),2), # np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,3,6),2), # np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,7,6),2)] # # # radar = Radar1(fig,", "be displayed along each axis # labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,32,6),2), # np.around(np.linspace(0,12,6),2), np.around(np.linspace(0,5.2,6),2),", "Quag = serie_a_chart.loc[1004].values # Icardi = serie_a_chart.loc[664].values # PiatekG = serie_a_chart.loc[608].values # PiatekM", "alpha=0.4, label=Bale[0]) # radar.plot(Benzema[1:], '-', lw=5, color='b', alpha=0.4, label=Benzema[0]) # radar.plot(Asensio[1:], '-', lw=5,", "Milan # PiatekG = serie_a_chart.loc[608].values # PiatekM = serie_a_chart.loc[961].values # #labels/linspace from serie_a", "= chart_prep.loc[291].values # Rose = chart_prep.loc[391].values # Kolasinac = chart_prep.loc[514].values # Mendy =", "alpha=0.4, label=Zag[0]) # radar.plot(Konate[1:], '-', lw=3, color='#FF00FF', alpha=0.4, label=Konate[0]) # radar.plot(Muki[1:], '-', lw=3,", "fig.suptitle('Premier League Playmakers', fontsize=16) # fig.savefig('Prem_Playmakers.png') #------------------------------------------------------------------------------- #FIFA PLAYER OF THE YEAR COMPARISON", "# fig.suptitle('Bundesliga Playmakers', fontsize=16) # fig.savefig('Bund_Playmakers.png') #------------------------------------------------------------------------------- #Premier League Playmakers # Hazard =", "np.around(np.linspace(0,3.2,6),2)] # radar = Radar1(fig, titles, labels) # radar.plot(Modric[1:], '-', lw=3, color='#000000', alpha=0.4,", "alpha=0.4, label=Rose[0]) # radar.plot(Kolasinac[1:], '-', lw=5, color= 'g', alpha=0.4, label=Kolasinac[0]) # radar.plot(Mendy[1:], '-',", "'Inter'] #------------------------------------------------------------------------------- #Bund Playmakers: # Sancho = bundesliga_chart_df.loc[1924].values # Gnabry = bundesliga_chart_df.loc[1716].values #", "lw=5, color='r', alpha=0.4, label=Robertson[0]) # radar.plot(Shaw[1:], '-', lw=5, color='k', alpha=0.4, label=Shaw[0]) # radar.plot(Alonso[1:],", "Sane = Prem_chart_df.loc[140].values # Sterling = Prem_chart_df.loc[144].values # Salah = Prem_chart_df.loc[429].values # labels", "best young CB in the world Varane # Ndicka = young_center_backs_chart.loc[1676].values # Zag", "alpha=0.4, label=Eriksen[0]) # radar.plot(Sane[1:], '-', lw=3, color='m', alpha=0.4, label=Sane[0]) # radar.plot(Sterling[1:], '-', lw=3,", "radar.plot(Bale[1:], '-', lw=5, color='r', alpha=0.4, label=Bale[0]) # radar.plot(Benzema[1:], '-', lw=5, color='b', alpha=0.4, label=Benzema[0])", "# Sancho = bundesliga_chart_df.loc[1924].values # Gnabry = bundesliga_chart_df.loc[1716].values # Brandt = bundesliga_chart_df.loc[1654].values #", "'Drb', 'KeyP','PS%', # 'Crosses', 'Fouled', 'mis_cont'] # #Numerical labels to be displayed along", "Konate = young_center_backs_chart.loc[1806].values # Muki = young_center_backs_chart.loc[2267].values # Bastoni = young_center_backs_chart.loc[779].values # Varane", "lw=5, color= 'c', alpha=0.4, label=Mendy[0]) # # radar.ax.legend() # fig.suptitle('Premier League LB', fontsize=22)", "# Quag = serie_a_chart.loc[1004].values # Icardi = serie_a_chart.loc[664].values # PiatekG = serie_a_chart.loc[608].values #", "lw=5, color='b', alpha=0.4, label=Alonso[0]) # radar.plot(Rose[1:], '-', lw=5, color='m', alpha=0.4, label=Rose[0]) # radar.plot(Kolasinac[1:],", "np.around(np.linspace(0,5.2,6),2), # np.around(np.linspace(0,4.1,6),2), np.around(np.linspace(0,2.9,6),2), # np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,3,6),2), # np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,7,6),2)] # # #", "information for generating plots: #------------------------------------------------------------------------------- # #Figure # fig.clear() # fig = plt.figure(figsize=(10,", "radar.plot(Ndicka[1:], '-', lw=3, color='r', alpha=0.4, label=Ndicka[0]) # radar.plot(Zag[1:], '-', lw=3, color='#FFFF00', alpha=0.4, label=Zag[0])", "Prem_chart_df.loc[140].values # Sterling = Prem_chart_df.loc[144].values # Salah = Prem_chart_df.loc[429].values # labels = [np.around(np.linspace(0,10,6),2),", "'Goals', 'Assists', 'SpG', 'Drb', 'KeyP','PS%', # 'Crosses', 'Fouled', 'mis_cont','Tackles', 'Inter'] #------------------------------------------------------------------------------- #Bund Playmakers:", "League LB', fontsize=22) # fig.savefig('Prem_LB.png') #------------------------------------------------------------------------------- # Bale = chart_prep.loc[1496].values # Benzema =", "label=Pogba[0]) # radar.ax.legend() # fig.suptitle('Premier League Playmakers', fontsize=16) # fig.savefig('Prem_Playmakers.png') #------------------------------------------------------------------------------- #FIFA PLAYER", "OF THE YEAR COMPARISON FOLLOW UP #Messi vs Ronaldo # Modric = df_chart_df.loc[1504].values", "YEAR COMPARISON FOLLOW UP #Messi vs Ronaldo # Modric = df_chart_df.loc[1504].values # Ronaldo=", "# radar.plot(Hazard[1:], '-', lw=3, color= '#0000FF', alpha=0.4, label=Hazard[0]) # radar.plot(Kostic[1:], '-', lw=3, color='#008080',", "np.around(np.linspace(0,4.8,6),2), np.around(np.linspace(0,3.2,6),2), # np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,3,6),2), # np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,8.8,6),2), # np.around(np.linspace(0,6.5,6),2), np.around(np.linspace(0,3.2,6),2)] # radar", "#------------------------------------------------------------------------------- #FIFA PLAYER OF THE YEAR COMPARISON FOLLOW UP #Messi vs Ronaldo #", "lw=3, color='#0000FF', alpha=0.4, label=Mbappe[0]) # radar.plot(Messi[1:], '-', lw=3, color='#00FFFF', alpha=0.4, label=Messi[0]) # #", "#Identifying young defenders to scout further. #Under 20 years old, compared to possibly", "color='r', alpha=0.4, label=Gnabry[0]) # radar.plot(Brandt[1:], '-', lw=3, color='k', alpha=0.4, label=Brandt[0]) # radar.plot(Muller[1:], '-',", "'-', lw=3, color='b', alpha=0.4, label=Muki[0]) # radar.plot(Bastoni[1:], '-', lw=3, color='g', alpha=0.4, label=Bastoni[0]) #", "labels) # radar.plot(Ronaldo[1:], '-', lw=3, color='#FF00FF', alpha=0.4, label=Ronaldo[0]) # radar.plot(Quag[1:], '-', lw=3, color='b',", "lw=3, color='#FFFF00', alpha=0.4, label=Hazard[0]) # radar.plot(Eriksen[1:], '-', lw=3, color='#000080', alpha=0.4, label=Eriksen[0]) # radar.plot(Sane[1:],", "#Numerical labels to be displayed along each axis # labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,32,6),2),", "# radar.plot(Icardi[1:], '-', lw=3, color='k', alpha=0.4, label=Icardi[0]) # radar.plot(PiatekG[1:], '-', lw=3, color='#00FFFF', alpha=0.4,", "bundesliga_chart_df.loc[1924].values # Gnabry = bundesliga_chart_df.loc[1716].values # Brandt = bundesliga_chart_df.loc[1654].values # Muller = bundesliga_chart_df.loc[1719].values", "lw=3, color='r', alpha=0.4, label=Gnabry[0]) # radar.plot(Brandt[1:], '-', lw=3, color='k', alpha=0.4, label=Brandt[0]) # radar.plot(Muller[1:],", "color='#00FFFF', alpha=0.4, label=Messi[0]) # # radar.ax.legend() # fig.suptitle('FIFA Player of the Year follow", "lw=3, color='#00FFFF', alpha=0.4, label=PiatekG[0]) # radar.plot(PiatekM[1:], '-', lw=3, color='r', alpha=0.4, label=PiatekM[0]) #------------------------------------------------------------------------------- #Piatek", "np.around(np.linspace(0,8.8,6),2), # np.around(np.linspace(0,4.3,6),2), np.around(np.linspace(0,2.9,6),2), # np.around(np.linspace(0,2.3,6),2), np.around(np.linspace(0,5,6),2), # np.around(np.linspace(0,1,6),2)] # # # radar", "'-', lw=3, color= '#0000FF', alpha=0.4, label=Hazard[0]) # radar.plot(Kostic[1:], '-', lw=3, color='#008080', alpha=0.4, label=Kostic[0])", "np.around(np.linspace(0,4.4,6),2), # np.around(np.linspace(0,3,6),2), np.around(np.linspace(0,2.7,6),2), # np.around(np.linspace(0,7.4,6),2), np.around(np.linspace(0,1.6,6),2)] # # radar = Radar1(fig, titles,", "'Fouled', 'mis_cont','Tackles', 'Inter'] #------------------------------------------------------------------------------- #Bund Playmakers: # Sancho = bundesliga_chart_df.loc[1924].values # Gnabry =", "Mendy = chart_prep.loc[128].values # # #Figure # fig.clear() # # fig = plt.figure(figsize=(10,", "#------------------------------------------------------------------------------- #Piatek on Genoa vs Piatek on Milan # PiatekG = serie_a_chart.loc[608].values #", "#Figure # fig.clear() # # fig = plt.figure(figsize=(10, 10)) # #Name to appear", "serie_a_chart.loc[664].values # PiatekG = serie_a_chart.loc[608].values # PiatekM = serie_a_chart.loc[961].values # labels = [np.around(np.linspace(0,10,6),2),", "fig.suptitle('Young Defenders', fontsize=16) # fig.savefig('Young_Defenders.png') #------------------------------------------------------------------------------- # Robertson = chart_prep.loc[419].values # Shaw =", "np.around(np.linspace(0,3,6),2), # np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,7,6),2)] # # # radar = Radar1(fig, titles, labels) #", "Icardi = serie_a_chart.loc[664].values # PiatekG = serie_a_chart.loc[608].values # PiatekM = serie_a_chart.loc[961].values # labels", "'-', lw=5, color='b', alpha=0.4, label=Benzema[0]) # radar.plot(Asensio[1:], '-', lw=5, color='g', alpha=0.4, label=Asensio[0]) #", "Alonso = chart_prep.loc[291].values # Rose = chart_prep.loc[391].values # Kolasinac = chart_prep.loc[514].values # Mendy", "color='m', alpha=0.4, label=Sane[0]) # radar.plot(Sterling[1:], '-', lw=3, color='#00FFFF', alpha=0.4, label=Sterling[0]) # radar.plot(Salah[1:], '-',", "Bale = chart_prep.loc[1496].values # Benzema = chart_prep.loc[1500].values # Asensio = chart_prep.loc[1506].values # #", "'-', lw=3, color='m', alpha=0.4, label=Sane[0]) # radar.plot(Sterling[1:], '-', lw=3, color='#00FFFF', alpha=0.4, label=Sterling[0]) #", "np.around(np.linspace(0,4,6),2), np.around(np.linspace(0,3,6),2)] # radar = Radar1(fig, titles, labels) # radar.plot(Sancho[1:], '-', lw=3, color='#FFFF00',", "above # radar = Radar1(fig, titles, labels) # radar.plot(PiatekG[1:], '-', lw=3, color='#0000FF', alpha=0.4,", "np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,2.7,6),2), # np.around(np.linspace(0,2.8,6),2), np.around(np.linspace(0,5.8,6),2), # np.around(np.linspace(0,4,6),2), np.around(np.linspace(0,3,6),2)] # radar = Radar1(fig, titles,", "on Milan # PiatekG = serie_a_chart.loc[608].values # PiatekM = serie_a_chart.loc[961].values # #labels/linspace from", "#Serie A STRIKERS storeyline # Ronaldo= serie_a_chart.loc[729].values # Quag = serie_a_chart.loc[1004].values # Icardi", "np.around(np.linspace(0,7.4,6),2), np.around(np.linspace(0,1.6,6),2)] # # radar = Radar1(fig, titles, labels) # radar.plot(Varane[1:], '-', lw=3,", "labels) # radar.plot(Robertson[1:], '-', lw=5, color='r', alpha=0.4, label=Robertson[0]) # radar.plot(Shaw[1:], '-', lw=5, color='k',", "'Fouls', 'Clear', 'Blocks'] # #Numerical labels to be displayed along each axis #", "# radar.plot(Konate[1:], '-', lw=3, color='#FF00FF', alpha=0.4, label=Konate[0]) # radar.plot(Muki[1:], '-', lw=3, color='b', alpha=0.4,", "each axis # labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,32,6),2), # np.around(np.linspace(0,12,6),2), np.around(np.linspace(0,5.2,6),2), # np.around(np.linspace(0,4.1,6),2), np.around(np.linspace(0,2.9,6),2),", "labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,11,6),2), # np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,100,6),2), # np.around(np.linspace(0,2.3,6),2), np.around(np.linspace(0,8.8,6),2), # np.around(np.linspace(0,4.3,6),2), np.around(np.linspace(0,2.9,6),2),", "np.around(np.linspace(0,4.1,6),2), np.around(np.linspace(0,2.9,6),2), # np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,3,6),2), # np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,7,6),2)] # # # radar =", "'g', alpha=0.4, label=Kolasinac[0]) # radar.plot(Mendy[1:], '-', lw=5, color= 'c', alpha=0.4, label=Mendy[0]) # #", "Prem_chart_df.loc[429].values # labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,19,6),2), # np.around(np.linspace(0,11,6),2), np.around(np.linspace(0,3.9,6),2), # np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,3,6),2), #", "'-', lw=5, color='b', alpha=0.4, label=Alonso[0]) # radar.plot(Rose[1:], '-', lw=5, color='m', alpha=0.4, label=Rose[0]) #", "color='#00FFFF', alpha=0.4, label=Varane[0]) # radar.plot(Ndicka[1:], '-', lw=3, color='r', alpha=0.4, label=Ndicka[0]) # radar.plot(Zag[1:], '-',", "# Muller = bundesliga_chart_df.loc[1719].values # Hazard = bundesliga_chart_df.loc[1959].values # Kostic = bundesliga_chart_df.loc[1677].values #", "# 'Fouls', 'Clear', 'Blocks'] # # labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,91,6),2), # np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,8.8,6),2),", "fig.savefig('Bund_Playmakers.png') #------------------------------------------------------------------------------- #Premier League Playmakers # Hazard = Prem_chart_df.loc[285].values # Eriksen = Prem_chart_df.loc[390].values", "np.around(np.linspace(0,2.9,6),2), # np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,6.3,6),2), # np.around(np.linspace(0,4.2,6),2), np.around(np.linspace(0,3.2,6),2)] # radar = Radar1(fig, titles, labels)", "appear on each axis for offensive categories # titles = ['Rating', 'Goals', 'Assists',", "= [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,19,6),2), # np.around(np.linspace(0,13,6),2), np.around(np.linspace(0,4.3,6),2), # np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,3,6),2), # np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,2.7,6),2), #", "from serie_a above # radar = Radar1(fig, titles, labels) # radar.plot(PiatekG[1:], '-', lw=3,", "label=Muki[0]) # radar.plot(Bastoni[1:], '-', lw=3, color='g', alpha=0.4, label=Bastoni[0]) # # radar.ax.legend() # fig.suptitle('Young", "# Salah = Prem_chart_df.loc[429].values # labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,19,6),2), # np.around(np.linspace(0,11,6),2), np.around(np.linspace(0,3.9,6),2), #", "10)) #Name to appear on each axis for offensive categories # titles =", "labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,19,6),2), # np.around(np.linspace(0,11,6),2), np.around(np.linspace(0,3.9,6),2), # np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,3,6),2), # np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,2.3,6),2),", "radar.plot(Alonso[1:], '-', lw=5, color='b', alpha=0.4, label=Alonso[0]) # radar.plot(Rose[1:], '-', lw=5, color='m', alpha=0.4, label=Rose[0])", "np.around(np.linspace(0,21,6),2), # np.around(np.linspace(0,9,6),2), np.around(np.linspace(0,6.1,6),2), # np.around(np.linspace(0,2.9,6),2), np.around(np.linspace(0,3.2,6),2), # np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,2.9,6),2), # np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,6.3,6),2),", "#------------------------------------------------------------------------------- # #Figure # fig.clear() # fig = plt.figure(figsize=(10, 10)) #Name to appear", "fig = plt.figure(figsize=(10, 10)) # #Name to appear # titles =['Rating', 'Goals', 'Assists',", "Salah = df_chart_df.loc[429].values # Mbappe = df_chart_df.loc[2343].values # Messi = df_chart_df.loc[1241].values # labels", "young_center_backs_chart.loc[1676].values # Zag = young_center_backs_chart.loc[1922].values # Konate = young_center_backs_chart.loc[1806].values # Muki = young_center_backs_chart.loc[2267].values", "alpha=0.4, label=Konate[0]) # radar.plot(Muki[1:], '-', lw=3, color='b', alpha=0.4, label=Muki[0]) # radar.plot(Bastoni[1:], '-', lw=3,", "fontsize=16) # fig.savefig('Prem_Playmakers.png') #------------------------------------------------------------------------------- #FIFA PLAYER OF THE YEAR COMPARISON FOLLOW UP #Messi", "displayed along each axis # labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,11,6),2), # np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,100,6),2), #", "'KeyP','PS%', # 'Crosses', 'Fouled', 'mis_cont','Tackles', 'Inter'] #------------------------------------------------------------------------------- #Bund Playmakers: # Sancho = bundesliga_chart_df.loc[1924].values", "# PiatekM = serie_a_chart.loc[961].values # #labels/linspace from serie_a above # radar = Radar1(fig,", "label=Konate[0]) # radar.plot(Muki[1:], '-', lw=3, color='b', alpha=0.4, label=Muki[0]) # radar.plot(Bastoni[1:], '-', lw=3, color='g',", "color='#000080', alpha=0.4, label=Eriksen[0]) # radar.plot(Sane[1:], '-', lw=3, color='m', alpha=0.4, label=Sane[0]) # radar.plot(Sterling[1:], '-',", "= Prem_chart_df.loc[144].values # Salah = Prem_chart_df.loc[429].values # labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,19,6),2), # np.around(np.linspace(0,11,6),2),", "# radar.plot(Mbappe[1:], '-', lw=3, color='#0000FF', alpha=0.4, label=Mbappe[0]) # radar.plot(Messi[1:], '-', lw=3, color='#00FFFF', alpha=0.4,", "color='#00FFFF', alpha=0.4, label=Sterling[0]) # radar.plot(Salah[1:], '-', lw=3, color= 'r', alpha=0.4, label=Salah[0]) # radar.plot(Pogba[1:],", "= chart_prep.loc[391].values # Kolasinac = chart_prep.loc[514].values # Mendy = chart_prep.loc[128].values # # #Figure", "=['Rating', 'Assists', 'Drb','PS%', 'Crosses', 'mis_cont', 'Tackles', 'Inter', 'Fouls', 'Clear', 'Blocks'] # #Numerical labels", "PiatekG = serie_a_chart.loc[608].values # PiatekM = serie_a_chart.loc[961].values # labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,21,6),2), #", "radar.plot(Ronaldo[1:], '-', lw=3, color='#FF00FF', alpha=0.4, label=Ronaldo[0]) # radar.plot(Quag[1:], '-', lw=3, color='b', alpha=0.4, label=Quag[0])", "labels) # radar.plot(Modric[1:], '-', lw=3, color='#000000', alpha=0.4, label=Modric[0]) # radar.plot(Ronaldo[1:], '-', lw=3, color='#800000',", "alpha=0.4, label=Messi[0]) # # radar.ax.legend() # fig.suptitle('FIFA Player of the Year follow up',", "# radar = Radar1(fig, titles, labels) # radar.plot(Varane[1:], '-', lw=3, color='#00FFFF', alpha=0.4, label=Varane[0])", "# Mendy = chart_prep.loc[128].values # # #Figure # fig.clear() # # fig =", "Prem_chart_df.loc[390].values # Sane = Prem_chart_df.loc[140].values # Sterling = Prem_chart_df.loc[144].values # Salah = Prem_chart_df.loc[429].values", "color='#FFFF00', alpha=0.4, label=Sancho[0]) # radar.plot(Gnabry[1:], '-', lw=3, color='r', alpha=0.4, label=Gnabry[0]) # radar.plot(Brandt[1:], '-',", "'Assists', 'SpG', 'Drb', 'KeyP','PS%', # 'Crosses', 'Fouled', 'mis_cont','Tackles', 'Inter'] #------------------------------------------------------------------------------- #Bund Playmakers: #", "color='#FFFF00', alpha=0.4, label=Hazard[0]) # radar.plot(Eriksen[1:], '-', lw=3, color='#000080', alpha=0.4, label=Eriksen[0]) # radar.plot(Sane[1:], '-',", "radar.plot(PiatekM[1:], '-', lw=3, color='r', alpha=0.4, label=PiatekM[0]) #------------------------------------------------------------------------------- #Piatek on Genoa vs Piatek on", "serie_a_chart.loc[961].values # labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,21,6),2), # np.around(np.linspace(0,9,6),2), np.around(np.linspace(0,6.1,6),2), # np.around(np.linspace(0,2.9,6),2), np.around(np.linspace(0,3.2,6),2), #", "radar.plot(PiatekG[1:], '-', lw=3, color='#0000FF', alpha=0.4, label='<NAME>') # radar.plot(PiatekM[1:], '-', lw=3, color='r', alpha=0.4, label='<NAME>')", "Ndicka = young_center_backs_chart.loc[1676].values # Zag = young_center_backs_chart.loc[1922].values # Konate = young_center_backs_chart.loc[1806].values # Muki", "# np.around(np.linspace(0,12,6),2), np.around(np.linspace(0,5.2,6),2), # np.around(np.linspace(0,4.1,6),2), np.around(np.linspace(0,2.9,6),2), # np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,3,6),2), # np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,7,6),2)] #", "= ['Rating', 'Goals', 'Assists', 'SpG', 'Drb', 'KeyP','PS%', # 'Crosses', 'Fouled', 'mis_cont','Tackles', 'Inter'] #-------------------------------------------------------------------------------", "Sterling = Prem_chart_df.loc[144].values # Salah = Prem_chart_df.loc[429].values # labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,19,6),2), #", "'-', lw=3, color='k', alpha=0.4, label=Icardi[0]) # radar.plot(PiatekG[1:], '-', lw=3, color='#00FFFF', alpha=0.4, label=PiatekG[0]) #", "color='m', alpha=0.4, label=Rose[0]) # radar.plot(Kolasinac[1:], '-', lw=5, color= 'g', alpha=0.4, label=Kolasinac[0]) # radar.plot(Mendy[1:],", "# # radar = Radar1(fig, titles, labels) # radar.plot(Varane[1:], '-', lw=3, color='#00FFFF', alpha=0.4,", "color='#0000FF', alpha=0.4, label=Mbappe[0]) # radar.plot(Messi[1:], '-', lw=3, color='#00FFFF', alpha=0.4, label=Messi[0]) # # radar.ax.legend()", "# radar.plot(Modric[1:], '-', lw=3, color='#000000', alpha=0.4, label=Modric[0]) # radar.plot(Ronaldo[1:], '-', lw=3, color='#800000', alpha=0.4,", "'Drb', 'KeyP','PS%', # 'Crosses', 'Fouled', 'mis_cont','Tackles', 'Inter'] #------------------------------------------------------------------------------- #Bund Playmakers: # Sancho =", "# fig.savefig('Piatek.png') #------------------------------------------------------------------------------- #Identifying young defenders to scout further. #Under 20 years old,", "#labels/linspace from serie_a above # radar = Radar1(fig, titles, labels) # radar.plot(PiatekG[1:], '-',", "radar = Radar1(fig, titles, labels) # radar.plot(Robertson[1:], '-', lw=5, color='r', alpha=0.4, label=Robertson[0]) #", "'-', lw=3, color='#0000FF', alpha=0.4, label=Mbappe[0]) # radar.plot(Messi[1:], '-', lw=3, color='#00FFFF', alpha=0.4, label=Messi[0]) #", "fontsize=16) # fig.savefig('Young_Defenders.png') #------------------------------------------------------------------------------- # Robertson = chart_prep.loc[419].values # Shaw = chart_prep.loc[186].values #", "'-', lw=3, color= 'r', alpha=0.4, label=Salah[0]) # radar.plot(Pogba[1:], '-', lw=3, color='k', alpha=0.4, label=Pogba[0])", "# np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,2.9,6),2), # np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,6.3,6),2), # np.around(np.linspace(0,4.2,6),2), np.around(np.linspace(0,3.2,6),2)] # radar = Radar1(fig,", "fig.clear() # fig = plt.figure(figsize=(10, 10)) #Name to appear on each axis for", "radar.plot(Eriksen[1:], '-', lw=3, color='#000080', alpha=0.4, label=Eriksen[0]) # radar.plot(Sane[1:], '-', lw=3, color='m', alpha=0.4, label=Sane[0])", "# fig.savefig('Prem_LB.png') #------------------------------------------------------------------------------- # Bale = chart_prep.loc[1496].values # Benzema = chart_prep.loc[1500].values # Asensio", "'-', lw=5, color= 'g', alpha=0.4, label=Kolasinac[0]) # radar.plot(Mendy[1:], '-', lw=5, color= 'c', alpha=0.4,", "np.around(np.linspace(0,32,6),2), # np.around(np.linspace(0,13,6),2), np.around(np.linspace(0,6.1,6),2), # np.around(np.linspace(0,4.8,6),2), np.around(np.linspace(0,3.2,6),2), # np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,3,6),2), # np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,8.8,6),2),", "'-', lw=3, color='k', alpha=0.4, label=Brandt[0]) # radar.plot(Muller[1:], '-', lw=3, color='m', alpha=0.4, label=Muller[0]) #", "np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,8.8,6),2), # np.around(np.linspace(0,7.8,6),2), np.around(np.linspace(0,4.4,6),2), # np.around(np.linspace(0,3,6),2), np.around(np.linspace(0,2.7,6),2), # np.around(np.linspace(0,7.4,6),2), np.around(np.linspace(0,1.6,6),2)] # #", "# PiatekM = serie_a_chart.loc[961].values # labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,21,6),2), # np.around(np.linspace(0,9,6),2), np.around(np.linspace(0,6.1,6),2), #", "# Kolasinac = chart_prep.loc[514].values # Mendy = chart_prep.loc[128].values # # #Figure # fig.clear()", "= young_center_backs_chart.loc[2267].values # Bastoni = young_center_backs_chart.loc[779].values # Varane = young_center_backs_chart.loc[1510].values # titles =", "titles =['Rating', 'Goals', 'Assists', 'SpG', 'Drb', 'KeyP','PS%', # 'Crosses', 'Fouled', 'mis_cont'] # #Numerical", "# radar.ax.legend() # fig.suptitle('Piatek Before and After the Transfer', fontsize=16) # fig.savefig('Piatek.png') #-------------------------------------------------------------------------------", "= young_center_backs_chart.loc[779].values # Varane = young_center_backs_chart.loc[1510].values # titles = ['Rating', 'AvgP','PS%', 'mis_cont','AerialsWon', 'Tackles',", "lw=3, color='g', alpha=0.4, label=Bastoni[0]) # # radar.ax.legend() # fig.suptitle('Young Defenders', fontsize=16) # fig.savefig('Young_Defenders.png')", "= chart_prep.loc[1500].values # Asensio = chart_prep.loc[1506].values # # #Figure # fig.clear() # #", "# Sane = Prem_chart_df.loc[140].values # Sterling = Prem_chart_df.loc[144].values # Salah = Prem_chart_df.loc[429].values #", "= df_chart_df.loc[2343].values # Messi = df_chart_df.loc[1241].values # labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,32,6),2), # np.around(np.linspace(0,13,6),2),", "np.around(np.linspace(0,3,6),2), # np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,8.8,6),2), # np.around(np.linspace(0,6.5,6),2), np.around(np.linspace(0,3.2,6),2)] # radar = Radar1(fig, titles, labels)", "radar.plot(Quag[1:], '-', lw=3, color='b', alpha=0.4, label=Quag[0]) # radar.plot(Icardi[1:], '-', lw=3, color='k', alpha=0.4, label=Icardi[0])", "serie_a_chart.loc[961].values # #labels/linspace from serie_a above # radar = Radar1(fig, titles, labels) #", "Radar1(fig, titles, labels) # radar.plot(Hazard[1:], '-', lw=3, color='#FFFF00', alpha=0.4, label=Hazard[0]) # radar.plot(Eriksen[1:], '-',", "lw=5, color='g', alpha=0.4, label=Asensio[0]) # # radar.ax.legend() # fig.suptitle('Ronaldos Replacement', fontsize=22) # fig.savefig('Madrid_front_three.png')", "= plt.figure(figsize=(10, 10)) # #Name to appear # titles =['Rating', 'Assists', 'Drb','PS%', 'Crosses',", "to be displayed along each axis # labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,32,6),2), # np.around(np.linspace(0,12,6),2),", "label=Shaw[0]) # radar.plot(Alonso[1:], '-', lw=5, color='b', alpha=0.4, label=Alonso[0]) # radar.plot(Rose[1:], '-', lw=5, color='m',", "labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,21,6),2), # np.around(np.linspace(0,9,6),2), np.around(np.linspace(0,6.1,6),2), # np.around(np.linspace(0,2.9,6),2), np.around(np.linspace(0,3.2,6),2), # np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,2.9,6),2),", "alpha=0.4, label=Ronaldo[0]) # radar.plot(Salah[1:], '-', lw=3, color='#FF0000', alpha=0.4, label=Salah[0]) # radar.plot(Mbappe[1:], '-', lw=3,", "# PiatekG = serie_a_chart.loc[608].values # PiatekM = serie_a_chart.loc[961].values # labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,21,6),2),", "chart_prep.loc[391].values # Kolasinac = chart_prep.loc[514].values # Mendy = chart_prep.loc[128].values # # #Figure #", "# np.around(np.linspace(0,13,6),2), np.around(np.linspace(0,6.1,6),2), # np.around(np.linspace(0,4.8,6),2), np.around(np.linspace(0,3.2,6),2), # np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,3,6),2), # np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,8.8,6),2), #", "#Name to appear # titles =['Rating', 'Assists', 'Drb','PS%', 'Crosses', 'mis_cont', 'Tackles', 'Inter', 'Fouls',", "alpha=0.4, label='<NAME>') # radar.plot(PiatekM[1:], '-', lw=3, color='r', alpha=0.4, label='<NAME>') # # radar.ax.legend() #", "bundesliga_chart_df.loc[1677].values # labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,19,6),2), # np.around(np.linspace(0,13,6),2), np.around(np.linspace(0,4.3,6),2), # np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,3,6),2), #", "alpha=0.4, label=Kostic[0]) # radar.ax.legend() # fig.suptitle('Bundesliga Playmakers', fontsize=16) # fig.savefig('Bund_Playmakers.png') #------------------------------------------------------------------------------- #Premier League", "'-', lw=3, color='#00FFFF', alpha=0.4, label=Sterling[0]) # radar.plot(Salah[1:], '-', lw=3, color= 'r', alpha=0.4, label=Salah[0])", "#Figure # fig.clear() # fig = plt.figure(figsize=(10, 10)) #Name to appear on each", "alpha=0.4, label=Pogba[0]) # radar.ax.legend() # fig.suptitle('Premier League Playmakers', fontsize=16) # fig.savefig('Prem_Playmakers.png') #------------------------------------------------------------------------------- #FIFA", "alpha=0.4, label=Ndicka[0]) # radar.plot(Zag[1:], '-', lw=3, color='#FFFF00', alpha=0.4, label=Zag[0]) # radar.plot(Konate[1:], '-', lw=3,", "lw=5, color='b', alpha=0.4, label=Benzema[0]) # radar.plot(Asensio[1:], '-', lw=5, color='g', alpha=0.4, label=Asensio[0]) # #", "'Crosses', 'Fouled', 'mis_cont','Tackles', 'Inter'] #------------------------------------------------------------------------------- #Bund Playmakers: # Sancho = bundesliga_chart_df.loc[1924].values # Gnabry", "fontsize=16) # fig.savefig('Bund_Playmakers.png') #------------------------------------------------------------------------------- #Premier League Playmakers # Hazard = Prem_chart_df.loc[285].values # Eriksen", "titles = ['Rating', 'AvgP','PS%', 'mis_cont','AerialsWon', 'Tackles', 'Inter', # 'Fouls', 'Clear', 'Blocks'] # #", "= bundesliga_chart_df.loc[1716].values # Brandt = bundesliga_chart_df.loc[1654].values # Muller = bundesliga_chart_df.loc[1719].values # Hazard =", "lw=5, color= 'g', alpha=0.4, label=Kolasinac[0]) # radar.plot(Mendy[1:], '-', lw=5, color= 'c', alpha=0.4, label=Mendy[0])", "# labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,91,6),2), # np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,8.8,6),2), # np.around(np.linspace(0,7.8,6),2), np.around(np.linspace(0,4.4,6),2), # np.around(np.linspace(0,3,6),2),", "# radar.plot(Hazard[1:], '-', lw=3, color='#FFFF00', alpha=0.4, label=Hazard[0]) # radar.plot(Eriksen[1:], '-', lw=3, color='#000080', alpha=0.4,", "lw=3, color='k', alpha=0.4, label=Pogba[0]) # radar.ax.legend() # fig.suptitle('Premier League Playmakers', fontsize=16) # fig.savefig('Prem_Playmakers.png')", "young_center_backs_chart.loc[1510].values # titles = ['Rating', 'AvgP','PS%', 'mis_cont','AerialsWon', 'Tackles', 'Inter', # 'Fouls', 'Clear', 'Blocks']", "#------------------------------------------------------------------------------- # Robertson = chart_prep.loc[419].values # Shaw = chart_prep.loc[186].values # Alonso = chart_prep.loc[291].values", "= serie_a_chart.loc[608].values # PiatekM = serie_a_chart.loc[961].values # labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,21,6),2), # np.around(np.linspace(0,9,6),2),", "#------------------------------------------------------------------------------- # Bale = chart_prep.loc[1496].values # Benzema = chart_prep.loc[1500].values # Asensio = chart_prep.loc[1506].values", "20 years old, compared to possibly best young CB in the world Varane", "'Tackles', 'Inter', 'Fouls', 'Clear', 'Blocks'] # #Numerical labels to be displayed along each", "= Prem_chart_df.loc[285].values # Eriksen = Prem_chart_df.loc[390].values # Sane = Prem_chart_df.loc[140].values # Sterling =", "# np.around(np.linspace(0,7.4,6),2), np.around(np.linspace(0,1.6,6),2)] # # radar = Radar1(fig, titles, labels) # radar.plot(Varane[1:], '-',", "np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,2.3,6),2), # np.around(np.linspace(0,3,6),2), np.around(np.linspace(0,8.8,6),2), # np.around(np.linspace(0,4.3,6),2), np.around(np.linspace(0,2.9,6),2)] # radar = Radar1(fig, titles,", "'-', lw=3, color='#0000FF', alpha=0.4, label='<NAME>') # radar.plot(PiatekM[1:], '-', lw=3, color='r', alpha=0.4, label='<NAME>') #", "'mis_cont','AerialsWon', 'Tackles', 'Inter', # 'Fouls', 'Clear', 'Blocks'] # # labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,91,6),2),", "# Alonso = chart_prep.loc[291].values # Rose = chart_prep.loc[391].values # Kolasinac = chart_prep.loc[514].values #", "'-', lw=3, color='#00FFFF', alpha=0.4, label=PiatekG[0]) # radar.plot(PiatekM[1:], '-', lw=3, color='r', alpha=0.4, label=PiatekM[0]) #-------------------------------------------------------------------------------", "color='r', alpha=0.4, label=Ndicka[0]) # radar.plot(Zag[1:], '-', lw=3, color='#FFFF00', alpha=0.4, label=Zag[0]) # radar.plot(Konate[1:], '-',", "# radar = Radar1(fig, titles, labels) # radar.plot(Bale[1:], '-', lw=5, color='r', alpha=0.4, label=Bale[0])", "alpha=0.4, label=Hazard[0]) # radar.plot(Eriksen[1:], '-', lw=3, color='#000080', alpha=0.4, label=Eriksen[0]) # radar.plot(Sane[1:], '-', lw=3,", "# # radar = Radar1(fig, titles, labels) # radar.plot(Robertson[1:], '-', lw=5, color='r', alpha=0.4,", "# Muki = young_center_backs_chart.loc[2267].values # Bastoni = young_center_backs_chart.loc[779].values # Varane = young_center_backs_chart.loc[1510].values #", "radar.plot(Robertson[1:], '-', lw=5, color='r', alpha=0.4, label=Robertson[0]) # radar.plot(Shaw[1:], '-', lw=5, color='k', alpha=0.4, label=Shaw[0])", "np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,3,6),2), # np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,8.8,6),2), # np.around(np.linspace(0,6.5,6),2), np.around(np.linspace(0,3.2,6),2)] # radar = Radar1(fig, titles,", "'-', lw=5, color='g', alpha=0.4, label=Asensio[0]) # # radar.ax.legend() # fig.suptitle('Ronaldos Replacement', fontsize=22) #", "label=Mendy[0]) # # radar.ax.legend() # fig.suptitle('Premier League LB', fontsize=22) # fig.savefig('Prem_LB.png') #------------------------------------------------------------------------------- #", "# radar.plot(PiatekG[1:], '-', lw=3, color='#00FFFF', alpha=0.4, label=PiatekG[0]) # radar.plot(PiatekM[1:], '-', lw=3, color='r', alpha=0.4,", "radar.ax.legend() # fig.suptitle('Premier League LB', fontsize=22) # fig.savefig('Prem_LB.png') #------------------------------------------------------------------------------- # Bale = chart_prep.loc[1496].values", "chart_prep.loc[186].values # Alonso = chart_prep.loc[291].values # Rose = chart_prep.loc[391].values # Kolasinac = chart_prep.loc[514].values", "#Piatek on Genoa vs Piatek on Milan # PiatekG = serie_a_chart.loc[608].values # PiatekM", "[np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,19,6),2), # np.around(np.linspace(0,11,6),2), np.around(np.linspace(0,3.9,6),2), # np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,3,6),2), # np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,2.3,6),2), # np.around(np.linspace(0,3,6),2),", "alpha=0.4, label=Sane[0]) # radar.plot(Sterling[1:], '-', lw=3, color='#00FFFF', alpha=0.4, label=Sterling[0]) # radar.plot(Salah[1:], '-', lw=3,", "np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,3,6),2), # np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,2.7,6),2), # np.around(np.linspace(0,2.8,6),2), np.around(np.linspace(0,5.8,6),2), # np.around(np.linspace(0,4,6),2), np.around(np.linspace(0,3,6),2)] # radar", "# radar.plot(Kolasinac[1:], '-', lw=5, color= 'g', alpha=0.4, label=Kolasinac[0]) # radar.plot(Mendy[1:], '-', lw=5, color=", "alpha=0.4, label=Benzema[0]) # radar.plot(Asensio[1:], '-', lw=5, color='g', alpha=0.4, label=Asensio[0]) # # radar.ax.legend() #", "# np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,7,6),2)] # # # radar = Radar1(fig, titles, labels) # radar.plot(Bale[1:],", "young defenders to scout further. #Under 20 years old, compared to possibly best", "# titles = ['Rating', 'Goals', 'Assists', 'SpG', 'Drb', 'KeyP','PS%', # 'Crosses', 'Fouled', 'mis_cont','Tackles',", "plt.figure(figsize=(10, 10)) # #Name to appear # titles =['Rating', 'Goals', 'Assists', 'SpG', 'Drb',", "np.around(np.linspace(0,6.1,6),2), # np.around(np.linspace(0,4.8,6),2), np.around(np.linspace(0,3.2,6),2), # np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,3,6),2), # np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,8.8,6),2), # np.around(np.linspace(0,6.5,6),2), np.around(np.linspace(0,3.2,6),2)]", "# Messi = df_chart_df.loc[1241].values # labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,32,6),2), # np.around(np.linspace(0,13,6),2), np.around(np.linspace(0,6.1,6),2), #", "radar.plot(Sterling[1:], '-', lw=3, color='#00FFFF', alpha=0.4, label=Sterling[0]) # radar.plot(Salah[1:], '-', lw=3, color= 'r', alpha=0.4,", "League Playmakers # Hazard = Prem_chart_df.loc[285].values # Eriksen = Prem_chart_df.loc[390].values # Sane =", "# 'Crosses', 'Fouled', 'mis_cont','Tackles', 'Inter'] #------------------------------------------------------------------------------- #Bund Playmakers: # Sancho = bundesliga_chart_df.loc[1924].values #", "'SpG', 'Drb', 'KeyP','PS%', # 'Crosses', 'Fouled', 'mis_cont','Tackles', 'Inter'] #------------------------------------------------------------------------------- #Bund Playmakers: # Sancho", "#Premier League Playmakers # Hazard = Prem_chart_df.loc[285].values # Eriksen = Prem_chart_df.loc[390].values # Sane", "Zag = young_center_backs_chart.loc[1922].values # Konate = young_center_backs_chart.loc[1806].values # Muki = young_center_backs_chart.loc[2267].values # Bastoni", "np.around(np.linspace(0,12,6),2), np.around(np.linspace(0,5.2,6),2), # np.around(np.linspace(0,4.1,6),2), np.around(np.linspace(0,2.9,6),2), # np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,3,6),2), # np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,7,6),2)] # #", "titles, labels) # radar.plot(Varane[1:], '-', lw=3, color='#00FFFF', alpha=0.4, label=Varane[0]) # radar.plot(Ndicka[1:], '-', lw=3,", "= plt.figure(figsize=(10, 10)) # #Name to appear # titles =['Rating', 'Goals', 'Assists', 'SpG',", "'-', lw=5, color= 'c', alpha=0.4, label=Mendy[0]) # # radar.ax.legend() # fig.suptitle('Premier League LB',", "# radar = Radar1(fig, titles, labels) # radar.plot(Robertson[1:], '-', lw=5, color='r', alpha=0.4, label=Robertson[0])", "# Ndicka = young_center_backs_chart.loc[1676].values # Zag = young_center_backs_chart.loc[1922].values # Konate = young_center_backs_chart.loc[1806].values #", "label=Alonso[0]) # radar.plot(Rose[1:], '-', lw=5, color='m', alpha=0.4, label=Rose[0]) # radar.plot(Kolasinac[1:], '-', lw=5, color=", "'Crosses', 'Fouled', 'mis_cont'] # #Numerical labels to be displayed along each axis #", "color='k', alpha=0.4, label=Brandt[0]) # radar.plot(Muller[1:], '-', lw=3, color='m', alpha=0.4, label=Muller[0]) # radar.plot(Hazard[1:], '-',", "radar.plot(Konate[1:], '-', lw=3, color='#FF00FF', alpha=0.4, label=Konate[0]) # radar.plot(Muki[1:], '-', lw=3, color='b', alpha=0.4, label=Muki[0])", "radar.ax.legend() # fig.suptitle('Piatek Before and After the Transfer', fontsize=16) # fig.savefig('Piatek.png') #------------------------------------------------------------------------------- #Identifying", "Mbappe = df_chart_df.loc[2343].values # Messi = df_chart_df.loc[1241].values # labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,32,6),2), #", "radar.ax.legend() # fig.suptitle('Young Defenders', fontsize=16) # fig.savefig('Young_Defenders.png') #------------------------------------------------------------------------------- # Robertson = chart_prep.loc[419].values #", "'Inter', 'Fouls', 'Clear', 'Blocks'] # #Numerical labels to be displayed along each axis", "# Mbappe = df_chart_df.loc[2343].values # Messi = df_chart_df.loc[1241].values # labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,32,6),2),", "# np.around(np.linspace(0,11,6),2), np.around(np.linspace(0,3.9,6),2), # np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,3,6),2), # np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,2.3,6),2), # np.around(np.linspace(0,3,6),2), np.around(np.linspace(0,8.8,6),2), #", "fig.savefig('Prem_LB.png') #------------------------------------------------------------------------------- # Bale = chart_prep.loc[1496].values # Benzema = chart_prep.loc[1500].values # Asensio =", "lw=3, color='#FFFF00', alpha=0.4, label=Zag[0]) # radar.plot(Konate[1:], '-', lw=3, color='#FF00FF', alpha=0.4, label=Konate[0]) # radar.plot(Muki[1:],", "color='#800000', alpha=0.4, label=Ronaldo[0]) # radar.plot(Salah[1:], '-', lw=3, color='#FF0000', alpha=0.4, label=Salah[0]) # radar.plot(Mbappe[1:], '-',", "color='r', alpha=0.4, label=PiatekM[0]) #------------------------------------------------------------------------------- #Piatek on Genoa vs Piatek on Milan # PiatekG", "Varane # Ndicka = young_center_backs_chart.loc[1676].values # Zag = young_center_backs_chart.loc[1922].values # Konate = young_center_backs_chart.loc[1806].values", "# np.around(np.linspace(0,2.9,6),2), np.around(np.linspace(0,3.2,6),2), # np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,2.9,6),2), # np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,6.3,6),2), # np.around(np.linspace(0,4.2,6),2), np.around(np.linspace(0,3.2,6),2)] #", "alpha=0.4, label=Salah[0]) # radar.plot(Pogba[1:], '-', lw=3, color='k', alpha=0.4, label=Pogba[0]) # radar.ax.legend() # fig.suptitle('Premier", "# radar.plot(Sterling[1:], '-', lw=3, color='#00FFFF', alpha=0.4, label=Sterling[0]) # radar.plot(Salah[1:], '-', lw=3, color= 'r',", "#FIFA PLAYER OF THE YEAR COMPARISON FOLLOW UP #Messi vs Ronaldo # Modric", "np.around(np.linspace(0,9,6),2), np.around(np.linspace(0,6.1,6),2), # np.around(np.linspace(0,2.9,6),2), np.around(np.linspace(0,3.2,6),2), # np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,2.9,6),2), # np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,6.3,6),2), # np.around(np.linspace(0,4.2,6),2),", "# #labels/linspace from serie_a above # radar = Radar1(fig, titles, labels) # radar.plot(PiatekG[1:],", "= [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,11,6),2), # np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,100,6),2), # np.around(np.linspace(0,2.3,6),2), np.around(np.linspace(0,8.8,6),2), # np.around(np.linspace(0,4.3,6),2), np.around(np.linspace(0,2.9,6),2), #", "titles, labels) # radar.plot(Modric[1:], '-', lw=3, color='#000000', alpha=0.4, label=Modric[0]) # radar.plot(Ronaldo[1:], '-', lw=3,", "# radar.ax.legend() # fig.suptitle('FIFA Player of the Year follow up', fontsize=16) # fig.savefig('Fifa_POY.png')", "titles, labels) # radar.plot(Hazard[1:], '-', lw=3, color='#FFFF00', alpha=0.4, label=Hazard[0]) # radar.plot(Eriksen[1:], '-', lw=3,", "in the world Varane # Ndicka = young_center_backs_chart.loc[1676].values # Zag = young_center_backs_chart.loc[1922].values #", "'Clear', 'Blocks'] # # labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,91,6),2), # np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,8.8,6),2), # np.around(np.linspace(0,7.8,6),2),", "axis # labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,11,6),2), # np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,100,6),2), # np.around(np.linspace(0,2.3,6),2), np.around(np.linspace(0,8.8,6),2), #", "Robertson = chart_prep.loc[419].values # Shaw = chart_prep.loc[186].values # Alonso = chart_prep.loc[291].values # Rose", "radar.plot(Hazard[1:], '-', lw=3, color= '#0000FF', alpha=0.4, label=Hazard[0]) # radar.plot(Kostic[1:], '-', lw=3, color='#008080', alpha=0.4,", "# Hazard = bundesliga_chart_df.loc[1959].values # Kostic = bundesliga_chart_df.loc[1677].values # labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,19,6),2),", "radar.plot(Varane[1:], '-', lw=3, color='#00FFFF', alpha=0.4, label=Varane[0]) # radar.plot(Ndicka[1:], '-', lw=3, color='r', alpha=0.4, label=Ndicka[0])", "# np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,8.8,6),2), # np.around(np.linspace(0,6.5,6),2), np.around(np.linspace(0,3.2,6),2)] # radar = Radar1(fig, titles, labels) #", "radar.plot(Sancho[1:], '-', lw=3, color='#FFFF00', alpha=0.4, label=Sancho[0]) # radar.plot(Gnabry[1:], '-', lw=3, color='r', alpha=0.4, label=Gnabry[0])", "chart_prep.loc[1496].values # Benzema = chart_prep.loc[1500].values # Asensio = chart_prep.loc[1506].values # # #Figure #", "STRIKERS storeyline # Ronaldo= serie_a_chart.loc[729].values # Quag = serie_a_chart.loc[1004].values # Icardi = serie_a_chart.loc[664].values", "# # #Figure # fig.clear() # # fig = plt.figure(figsize=(10, 10)) # #Name", "PiatekG = serie_a_chart.loc[608].values # PiatekM = serie_a_chart.loc[961].values # #labels/linspace from serie_a above #", "lw=3, color='m', alpha=0.4, label=Muller[0]) # radar.plot(Hazard[1:], '-', lw=3, color= '#0000FF', alpha=0.4, label=Hazard[0]) #", "[np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,11,6),2), # np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,100,6),2), # np.around(np.linspace(0,2.3,6),2), np.around(np.linspace(0,8.8,6),2), # np.around(np.linspace(0,4.3,6),2), np.around(np.linspace(0,2.9,6),2), # np.around(np.linspace(0,2.3,6),2),", "lw=3, color='#FF0000', alpha=0.4, label=Salah[0]) # radar.plot(Mbappe[1:], '-', lw=3, color='#0000FF', alpha=0.4, label=Mbappe[0]) # radar.plot(Messi[1:],", "lw=3, color='#FFFF00', alpha=0.4, label=Sancho[0]) # radar.plot(Gnabry[1:], '-', lw=3, color='r', alpha=0.4, label=Gnabry[0]) # radar.plot(Brandt[1:],", "'-', lw=3, color='r', alpha=0.4, label=Ndicka[0]) # radar.plot(Zag[1:], '-', lw=3, color='#FFFF00', alpha=0.4, label=Zag[0]) #", "alpha=0.4, label=Sterling[0]) # radar.plot(Salah[1:], '-', lw=3, color= 'r', alpha=0.4, label=Salah[0]) # radar.plot(Pogba[1:], '-',", "the Year follow up', fontsize=16) # fig.savefig('Fifa_POY.png') #------------------------------------------------------------------------------- #Serie A STRIKERS storeyline #", "possibly best young CB in the world Varane # Ndicka = young_center_backs_chart.loc[1676].values #", "label=Salah[0]) # radar.plot(Pogba[1:], '-', lw=3, color='k', alpha=0.4, label=Pogba[0]) # radar.ax.legend() # fig.suptitle('Premier League", "Radar1(fig, titles, labels) # radar.plot(Modric[1:], '-', lw=3, color='#000000', alpha=0.4, label=Modric[0]) # radar.plot(Ronaldo[1:], '-',", "# Kostic = bundesliga_chart_df.loc[1677].values # labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,19,6),2), # np.around(np.linspace(0,13,6),2), np.around(np.linspace(0,4.3,6),2), #", "# radar.plot(Salah[1:], '-', lw=3, color= 'r', alpha=0.4, label=Salah[0]) # radar.plot(Pogba[1:], '-', lw=3, color='k',", "= Radar1(fig, titles, labels) # radar.plot(Robertson[1:], '-', lw=5, color='r', alpha=0.4, label=Robertson[0]) # radar.plot(Shaw[1:],", "# fig = plt.figure(figsize=(10, 10)) # #Name to appear # titles =['Rating', 'Goals',", "radar.ax.legend() # fig.suptitle('Bundesliga Playmakers', fontsize=16) # fig.savefig('Bund_Playmakers.png') #------------------------------------------------------------------------------- #Premier League Playmakers # Hazard", "#Name to appear on each axis for offensive categories # titles = ['Rating',", "# #Figure # fig.clear() # fig = plt.figure(figsize=(10, 10)) #Name to appear on", "'-', lw=3, color='#FF0000', alpha=0.4, label=Salah[0]) # radar.plot(Mbappe[1:], '-', lw=3, color='#0000FF', alpha=0.4, label=Mbappe[0]) #", "Ronaldo # Modric = df_chart_df.loc[1504].values # Ronaldo= df_chart_df.loc[729].values # Salah = df_chart_df.loc[429].values #", "Prem_chart_df.loc[285].values # Eriksen = Prem_chart_df.loc[390].values # Sane = Prem_chart_df.loc[140].values # Sterling = Prem_chart_df.loc[144].values", "radar.plot(Muki[1:], '-', lw=3, color='b', alpha=0.4, label=Muki[0]) # radar.plot(Bastoni[1:], '-', lw=3, color='g', alpha=0.4, label=Bastoni[0])", "Radar1(fig, titles, labels) # radar.plot(PiatekG[1:], '-', lw=3, color='#0000FF', alpha=0.4, label='<NAME>') # radar.plot(PiatekM[1:], '-',", "# # labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,91,6),2), # np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,8.8,6),2), # np.around(np.linspace(0,7.8,6),2), np.around(np.linspace(0,4.4,6),2), #", "labels) # radar.plot(Varane[1:], '-', lw=3, color='#00FFFF', alpha=0.4, label=Varane[0]) # radar.plot(Ndicka[1:], '-', lw=3, color='r',", "np.around(np.linspace(0,1.6,6),2)] # # radar = Radar1(fig, titles, labels) # radar.plot(Varane[1:], '-', lw=3, color='#00FFFF',", "# radar.plot(PiatekG[1:], '-', lw=3, color='#0000FF', alpha=0.4, label='<NAME>') # radar.plot(PiatekM[1:], '-', lw=3, color='r', alpha=0.4,", "plt.figure(figsize=(10, 10)) #Name to appear on each axis for offensive categories # titles", "# radar.plot(Messi[1:], '-', lw=3, color='#00FFFF', alpha=0.4, label=Messi[0]) # # radar.ax.legend() # fig.suptitle('FIFA Player", "Radar1(fig, titles, labels) # radar.plot(Ronaldo[1:], '-', lw=3, color='#FF00FF', alpha=0.4, label=Ronaldo[0]) # radar.plot(Quag[1:], '-',", "the Transfer', fontsize=16) # fig.savefig('Piatek.png') #------------------------------------------------------------------------------- #Identifying young defenders to scout further. #Under", "PiatekM = serie_a_chart.loc[961].values # labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,21,6),2), # np.around(np.linspace(0,9,6),2), np.around(np.linspace(0,6.1,6),2), # np.around(np.linspace(0,2.9,6),2),", "'-', lw=5, color='k', alpha=0.4, label=Shaw[0]) # radar.plot(Alonso[1:], '-', lw=5, color='b', alpha=0.4, label=Alonso[0]) #", "fig.savefig('Fifa_POY.png') #------------------------------------------------------------------------------- #Serie A STRIKERS storeyline # Ronaldo= serie_a_chart.loc[729].values # Quag = serie_a_chart.loc[1004].values", "radar.plot(Salah[1:], '-', lw=3, color='#FF0000', alpha=0.4, label=Salah[0]) # radar.plot(Mbappe[1:], '-', lw=3, color='#0000FF', alpha=0.4, label=Mbappe[0])", "lw=3, color='#00FFFF', alpha=0.4, label=Messi[0]) # # radar.ax.legend() # fig.suptitle('FIFA Player of the Year", "# Sterling = Prem_chart_df.loc[144].values # Salah = Prem_chart_df.loc[429].values # labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,19,6),2),", "young CB in the world Varane # Ndicka = young_center_backs_chart.loc[1676].values # Zag =", "# Brandt = bundesliga_chart_df.loc[1654].values # Muller = bundesliga_chart_df.loc[1719].values # Hazard = bundesliga_chart_df.loc[1959].values #", "labels to be displayed along each axis # labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,11,6),2), #", "fig.suptitle('Bundesliga Playmakers', fontsize=16) # fig.savefig('Bund_Playmakers.png') #------------------------------------------------------------------------------- #Premier League Playmakers # Hazard = Prem_chart_df.loc[285].values", "= chart_prep.loc[514].values # Mendy = chart_prep.loc[128].values # # #Figure # fig.clear() # #", "Eriksen = Prem_chart_df.loc[390].values # Sane = Prem_chart_df.loc[140].values # Sterling = Prem_chart_df.loc[144].values # Salah", "up', fontsize=16) # fig.savefig('Fifa_POY.png') #------------------------------------------------------------------------------- #Serie A STRIKERS storeyline # Ronaldo= serie_a_chart.loc[729].values #", "lw=3, color='#000080', alpha=0.4, label=Eriksen[0]) # radar.plot(Sane[1:], '-', lw=3, color='m', alpha=0.4, label=Sane[0]) # radar.plot(Sterling[1:],", "Before and After the Transfer', fontsize=16) # fig.savefig('Piatek.png') #------------------------------------------------------------------------------- #Identifying young defenders to", "along each axis # labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,11,6),2), # np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,100,6),2), # np.around(np.linspace(0,2.3,6),2),", "Playmakers: # Sancho = bundesliga_chart_df.loc[1924].values # Gnabry = bundesliga_chart_df.loc[1716].values # Brandt = bundesliga_chart_df.loc[1654].values", "np.around(np.linspace(0,4.2,6),2), np.around(np.linspace(0,3.2,6),2)] # radar = Radar1(fig, titles, labels) # radar.plot(Ronaldo[1:], '-', lw=3, color='#FF00FF',", "chart_prep.loc[419].values # Shaw = chart_prep.loc[186].values # Alonso = chart_prep.loc[291].values # Rose = chart_prep.loc[391].values", "'-', lw=3, color='r', alpha=0.4, label='<NAME>') # # radar.ax.legend() # fig.suptitle('Piatek Before and After", "of the Year follow up', fontsize=16) # fig.savefig('Fifa_POY.png') #------------------------------------------------------------------------------- #Serie A STRIKERS storeyline", "label=Gnabry[0]) # radar.plot(Brandt[1:], '-', lw=3, color='k', alpha=0.4, label=Brandt[0]) # radar.plot(Muller[1:], '-', lw=3, color='m',", "label=Modric[0]) # radar.plot(Ronaldo[1:], '-', lw=3, color='#800000', alpha=0.4, label=Ronaldo[0]) # radar.plot(Salah[1:], '-', lw=3, color='#FF0000',", "#------------------------------------------------------------------------------- #Serie A STRIKERS storeyline # Ronaldo= serie_a_chart.loc[729].values # Quag = serie_a_chart.loc[1004].values #", "= chart_prep.loc[186].values # Alonso = chart_prep.loc[291].values # Rose = chart_prep.loc[391].values # Kolasinac =", "color='#00FFFF', alpha=0.4, label=PiatekG[0]) # radar.plot(PiatekM[1:], '-', lw=3, color='r', alpha=0.4, label=PiatekM[0]) #------------------------------------------------------------------------------- #Piatek on", "each axis # labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,11,6),2), # np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,100,6),2), # np.around(np.linspace(0,2.3,6),2), np.around(np.linspace(0,8.8,6),2),", "Messi = df_chart_df.loc[1241].values # labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,32,6),2), # np.around(np.linspace(0,13,6),2), np.around(np.linspace(0,6.1,6),2), # np.around(np.linspace(0,4.8,6),2),", "= df_chart_df.loc[1241].values # labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,32,6),2), # np.around(np.linspace(0,13,6),2), np.around(np.linspace(0,6.1,6),2), # np.around(np.linspace(0,4.8,6),2), np.around(np.linspace(0,3.2,6),2),", "= bundesliga_chart_df.loc[1719].values # Hazard = bundesliga_chart_df.loc[1959].values # Kostic = bundesliga_chart_df.loc[1677].values # labels =", "lw=3, color='#0000FF', alpha=0.4, label='<NAME>') # radar.plot(PiatekM[1:], '-', lw=3, color='r', alpha=0.4, label='<NAME>') # #", "#Saved information for generating plots: #------------------------------------------------------------------------------- # #Figure # fig.clear() # fig =", "lw=5, color='k', alpha=0.4, label=Shaw[0]) # radar.plot(Alonso[1:], '-', lw=5, color='b', alpha=0.4, label=Alonso[0]) # radar.plot(Rose[1:],", "'-', lw=3, color='#FFFF00', alpha=0.4, label=Zag[0]) # radar.plot(Konate[1:], '-', lw=3, color='#FF00FF', alpha=0.4, label=Konate[0]) #", "# radar.plot(Ronaldo[1:], '-', lw=3, color='#FF00FF', alpha=0.4, label=Ronaldo[0]) # radar.plot(Quag[1:], '-', lw=3, color='b', alpha=0.4,", "'-', lw=3, color='#00FFFF', alpha=0.4, label=Messi[0]) # # radar.ax.legend() # fig.suptitle('FIFA Player of the", "# fig.clear() # # fig = plt.figure(figsize=(10, 10)) # #Name to appear #", "'Assists', 'SpG', 'Drb', 'KeyP','PS%', # 'Crosses', 'Fouled', 'mis_cont'] # #Numerical labels to be", "= young_center_backs_chart.loc[1676].values # Zag = young_center_backs_chart.loc[1922].values # Konate = young_center_backs_chart.loc[1806].values # Muki =", "= ['Rating', 'AvgP','PS%', 'mis_cont','AerialsWon', 'Tackles', 'Inter', # 'Fouls', 'Clear', 'Blocks'] # # labels", "# radar.plot(Quag[1:], '-', lw=3, color='b', alpha=0.4, label=Quag[0]) # radar.plot(Icardi[1:], '-', lw=3, color='k', alpha=0.4,", "[np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,32,6),2), # np.around(np.linspace(0,13,6),2), np.around(np.linspace(0,6.1,6),2), # np.around(np.linspace(0,4.8,6),2), np.around(np.linspace(0,3.2,6),2), # np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,3,6),2), # np.around(np.linspace(0,3.2,6),2),", "compared to possibly best young CB in the world Varane # Ndicka =", "fontsize=16) # fig.savefig('Fifa_POY.png') #------------------------------------------------------------------------------- #Serie A STRIKERS storeyline # Ronaldo= serie_a_chart.loc[729].values # Quag", "# Ronaldo= df_chart_df.loc[729].values # Salah = df_chart_df.loc[429].values # Mbappe = df_chart_df.loc[2343].values # Messi", "label=Sterling[0]) # radar.plot(Salah[1:], '-', lw=3, color= 'r', alpha=0.4, label=Salah[0]) # radar.plot(Pogba[1:], '-', lw=3,", "vs Ronaldo # Modric = df_chart_df.loc[1504].values # Ronaldo= df_chart_df.loc[729].values # Salah = df_chart_df.loc[429].values", "np.around(np.linspace(0,7,6),2)] # # # radar = Radar1(fig, titles, labels) # radar.plot(Bale[1:], '-', lw=5,", "# radar.plot(Pogba[1:], '-', lw=3, color='k', alpha=0.4, label=Pogba[0]) # radar.ax.legend() # fig.suptitle('Premier League Playmakers',", "'-', lw=3, color='#800000', alpha=0.4, label=Ronaldo[0]) # radar.plot(Salah[1:], '-', lw=3, color='#FF0000', alpha=0.4, label=Salah[0]) #", "= [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,91,6),2), # np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,8.8,6),2), # np.around(np.linspace(0,7.8,6),2), np.around(np.linspace(0,4.4,6),2), # np.around(np.linspace(0,3,6),2), np.around(np.linspace(0,2.7,6),2), #", "label=Benzema[0]) # radar.plot(Asensio[1:], '-', lw=5, color='g', alpha=0.4, label=Asensio[0]) # # radar.ax.legend() # fig.suptitle('Ronaldos", "radar.plot(Sane[1:], '-', lw=3, color='m', alpha=0.4, label=Sane[0]) # radar.plot(Sterling[1:], '-', lw=3, color='#00FFFF', alpha=0.4, label=Sterling[0])", "chart_prep.loc[1500].values # Asensio = chart_prep.loc[1506].values # # #Figure # fig.clear() # # fig", "color='g', alpha=0.4, label=Asensio[0]) # # radar.ax.legend() # fig.suptitle('Ronaldos Replacement', fontsize=22) # fig.savefig('Madrid_front_three.png') #-------------------------------------------------------------------------------", "alpha=0.4, label=PiatekG[0]) # radar.plot(PiatekM[1:], '-', lw=3, color='r', alpha=0.4, label=PiatekM[0]) #------------------------------------------------------------------------------- #Piatek on Genoa", "Shaw = chart_prep.loc[186].values # Alonso = chart_prep.loc[291].values # Rose = chart_prep.loc[391].values # Kolasinac", "color='r', alpha=0.4, label=Bale[0]) # radar.plot(Benzema[1:], '-', lw=5, color='b', alpha=0.4, label=Benzema[0]) # radar.plot(Asensio[1:], '-',", "np.around(np.linspace(0,6.3,6),2), # np.around(np.linspace(0,4.2,6),2), np.around(np.linspace(0,3.2,6),2)] # radar = Radar1(fig, titles, labels) # radar.plot(Ronaldo[1:], '-',", "= young_center_backs_chart.loc[1806].values # Muki = young_center_backs_chart.loc[2267].values # Bastoni = young_center_backs_chart.loc[779].values # Varane =", "'-', lw=5, color='m', alpha=0.4, label=Rose[0]) # radar.plot(Kolasinac[1:], '-', lw=5, color= 'g', alpha=0.4, label=Kolasinac[0])", "np.around(np.linspace(0,5,6),2), # np.around(np.linspace(0,1,6),2)] # # # radar = Radar1(fig, titles, labels) # radar.plot(Robertson[1:],", "for offensive categories # titles = ['Rating', 'Goals', 'Assists', 'SpG', 'Drb', 'KeyP','PS%', #", "# radar.plot(Muller[1:], '-', lw=3, color='m', alpha=0.4, label=Muller[0]) # radar.plot(Hazard[1:], '-', lw=3, color= '#0000FF',", "radar.plot(Messi[1:], '-', lw=3, color='#00FFFF', alpha=0.4, label=Messi[0]) # # radar.ax.legend() # fig.suptitle('FIFA Player of", "'Inter', # 'Fouls', 'Clear', 'Blocks'] # # labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,91,6),2), # np.around(np.linspace(0,100,6),2),", "np.around(np.linspace(0,2.3,6),2), # np.around(np.linspace(0,3,6),2), np.around(np.linspace(0,8.8,6),2), # np.around(np.linspace(0,4.3,6),2), np.around(np.linspace(0,2.9,6),2)] # radar = Radar1(fig, titles, labels)", "label=Hazard[0]) # radar.plot(Eriksen[1:], '-', lw=3, color='#000080', alpha=0.4, label=Eriksen[0]) # radar.plot(Sane[1:], '-', lw=3, color='m',", "# np.around(np.linspace(0,4.3,6),2), np.around(np.linspace(0,2.9,6),2), # np.around(np.linspace(0,2.3,6),2), np.around(np.linspace(0,5,6),2), # np.around(np.linspace(0,1,6),2)] # # # radar =", "= serie_a_chart.loc[961].values # labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,21,6),2), # np.around(np.linspace(0,9,6),2), np.around(np.linspace(0,6.1,6),2), # np.around(np.linspace(0,2.9,6),2), np.around(np.linspace(0,3.2,6),2),", "color= '#0000FF', alpha=0.4, label=Hazard[0]) # radar.plot(Kostic[1:], '-', lw=3, color='#008080', alpha=0.4, label=Kostic[0]) # radar.ax.legend()", "titles, labels) # radar.plot(Sancho[1:], '-', lw=3, color='#FFFF00', alpha=0.4, label=Sancho[0]) # radar.plot(Gnabry[1:], '-', lw=3,", "'Tackles', 'Inter', # 'Fouls', 'Clear', 'Blocks'] # # labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,91,6),2), #", "labels) # radar.plot(Sancho[1:], '-', lw=3, color='#FFFF00', alpha=0.4, label=Sancho[0]) # radar.plot(Gnabry[1:], '-', lw=3, color='r',", "Sancho = bundesliga_chart_df.loc[1924].values # Gnabry = bundesliga_chart_df.loc[1716].values # Brandt = bundesliga_chart_df.loc[1654].values # Muller", "Bastoni = young_center_backs_chart.loc[779].values # Varane = young_center_backs_chart.loc[1510].values # titles = ['Rating', 'AvgP','PS%', 'mis_cont','AerialsWon',", "label=Ronaldo[0]) # radar.plot(Salah[1:], '-', lw=3, color='#FF0000', alpha=0.4, label=Salah[0]) # radar.plot(Mbappe[1:], '-', lw=3, color='#0000FF',", "# fig.suptitle('Piatek Before and After the Transfer', fontsize=16) # fig.savefig('Piatek.png') #------------------------------------------------------------------------------- #Identifying young", "# radar = Radar1(fig, titles, labels) # radar.plot(Sancho[1:], '-', lw=3, color='#FFFF00', alpha=0.4, label=Sancho[0])", "# fig = plt.figure(figsize=(10, 10)) # #Name to appear # titles =['Rating', 'Assists',", "color='b', alpha=0.4, label=Muki[0]) # radar.plot(Bastoni[1:], '-', lw=3, color='g', alpha=0.4, label=Bastoni[0]) # # radar.ax.legend()", "radar.plot(Zag[1:], '-', lw=3, color='#FFFF00', alpha=0.4, label=Zag[0]) # radar.plot(Konate[1:], '-', lw=3, color='#FF00FF', alpha=0.4, label=Konate[0])", "'r', alpha=0.4, label=Salah[0]) # radar.plot(Pogba[1:], '-', lw=3, color='k', alpha=0.4, label=Pogba[0]) # radar.ax.legend() #", "# radar.plot(Ndicka[1:], '-', lw=3, color='r', alpha=0.4, label=Ndicka[0]) # radar.plot(Zag[1:], '-', lw=3, color='#FFFF00', alpha=0.4,", "df_chart_df.loc[429].values # Mbappe = df_chart_df.loc[2343].values # Messi = df_chart_df.loc[1241].values # labels = [np.around(np.linspace(0,10,6),2),", "labels) # radar.plot(PiatekG[1:], '-', lw=3, color='#0000FF', alpha=0.4, label='<NAME>') # radar.plot(PiatekM[1:], '-', lw=3, color='r',", "# Zag = young_center_backs_chart.loc[1922].values # Konate = young_center_backs_chart.loc[1806].values # Muki = young_center_backs_chart.loc[2267].values #", "to be displayed along each axis # labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,11,6),2), # np.around(np.linspace(0,3.2,6),2),", "radar.plot(PiatekM[1:], '-', lw=3, color='r', alpha=0.4, label='<NAME>') # # radar.ax.legend() # fig.suptitle('Piatek Before and", "color='k', alpha=0.4, label=Shaw[0]) # radar.plot(Alonso[1:], '-', lw=5, color='b', alpha=0.4, label=Alonso[0]) # radar.plot(Rose[1:], '-',", "# radar.plot(Robertson[1:], '-', lw=5, color='r', alpha=0.4, label=Robertson[0]) # radar.plot(Shaw[1:], '-', lw=5, color='k', alpha=0.4,", "= bundesliga_chart_df.loc[1924].values # Gnabry = bundesliga_chart_df.loc[1716].values # Brandt = bundesliga_chart_df.loc[1654].values # Muller =", "label=Ndicka[0]) # radar.plot(Zag[1:], '-', lw=3, color='#FFFF00', alpha=0.4, label=Zag[0]) # radar.plot(Konate[1:], '-', lw=3, color='#FF00FF',", "radar = Radar1(fig, titles, labels) # radar.plot(PiatekG[1:], '-', lw=3, color='#0000FF', alpha=0.4, label='<NAME>') #", "lw=3, color='r', alpha=0.4, label='<NAME>') # # radar.ax.legend() # fig.suptitle('Piatek Before and After the", "to appear on each axis for offensive categories # titles = ['Rating', 'Goals',", "= [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,32,6),2), # np.around(np.linspace(0,13,6),2), np.around(np.linspace(0,6.1,6),2), # np.around(np.linspace(0,4.8,6),2), np.around(np.linspace(0,3.2,6),2), # np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,3,6),2), #", "np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,8.8,6),2), # np.around(np.linspace(0,6.5,6),2), np.around(np.linspace(0,3.2,6),2)] # radar = Radar1(fig, titles, labels) # radar.plot(Modric[1:],", "'-', lw=3, color='#FFFF00', alpha=0.4, label=Sancho[0]) # radar.plot(Gnabry[1:], '-', lw=3, color='r', alpha=0.4, label=Gnabry[0]) #", "# radar = Radar1(fig, titles, labels) # radar.plot(Hazard[1:], '-', lw=3, color='#FFFF00', alpha=0.4, label=Hazard[0])", "# radar.plot(Ronaldo[1:], '-', lw=3, color='#800000', alpha=0.4, label=Ronaldo[0]) # radar.plot(Salah[1:], '-', lw=3, color='#FF0000', alpha=0.4,", "'AvgP','PS%', 'mis_cont','AerialsWon', 'Tackles', 'Inter', # 'Fouls', 'Clear', 'Blocks'] # # labels = [np.around(np.linspace(0,10,6),2),", "labels to be displayed along each axis # labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,32,6),2), #", "lw=5, color='r', alpha=0.4, label=Bale[0]) # radar.plot(Benzema[1:], '-', lw=5, color='b', alpha=0.4, label=Benzema[0]) # radar.plot(Asensio[1:],", "label=Kolasinac[0]) # radar.plot(Mendy[1:], '-', lw=5, color= 'c', alpha=0.4, label=Mendy[0]) # # radar.ax.legend() #", "scout further. #Under 20 years old, compared to possibly best young CB in", "label=Messi[0]) # # radar.ax.legend() # fig.suptitle('FIFA Player of the Year follow up', fontsize=16)", "fig.suptitle('Premier League LB', fontsize=22) # fig.savefig('Prem_LB.png') #------------------------------------------------------------------------------- # Bale = chart_prep.loc[1496].values # Benzema", "# fig.suptitle('Premier League LB', fontsize=22) # fig.savefig('Prem_LB.png') #------------------------------------------------------------------------------- # Bale = chart_prep.loc[1496].values #", "np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,3,6),2), # np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,2.3,6),2), # np.around(np.linspace(0,3,6),2), np.around(np.linspace(0,8.8,6),2), # np.around(np.linspace(0,4.3,6),2), np.around(np.linspace(0,2.9,6),2)] # radar", "np.around(np.linspace(0,3,6),2), # np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,2.7,6),2), # np.around(np.linspace(0,2.8,6),2), np.around(np.linspace(0,5.8,6),2), # np.around(np.linspace(0,4,6),2), np.around(np.linspace(0,3,6),2)] # radar =", "lw=3, color='m', alpha=0.4, label=Sane[0]) # radar.plot(Sterling[1:], '-', lw=3, color='#00FFFF', alpha=0.4, label=Sterling[0]) # radar.plot(Salah[1:],", "radar.ax.legend() # fig.suptitle('FIFA Player of the Year follow up', fontsize=16) # fig.savefig('Fifa_POY.png') #-------------------------------------------------------------------------------", "PiatekM = serie_a_chart.loc[961].values # #labels/linspace from serie_a above # radar = Radar1(fig, titles,", "# np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,3,6),2), # np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,7,6),2)] # # # radar = Radar1(fig, titles,", "fig.savefig('Prem_Playmakers.png') #------------------------------------------------------------------------------- #FIFA PLAYER OF THE YEAR COMPARISON FOLLOW UP #Messi vs Ronaldo", "young_center_backs_chart.loc[779].values # Varane = young_center_backs_chart.loc[1510].values # titles = ['Rating', 'AvgP','PS%', 'mis_cont','AerialsWon', 'Tackles', 'Inter',", "Defenders', fontsize=16) # fig.savefig('Young_Defenders.png') #------------------------------------------------------------------------------- # Robertson = chart_prep.loc[419].values # Shaw = chart_prep.loc[186].values", "Transfer', fontsize=16) # fig.savefig('Piatek.png') #------------------------------------------------------------------------------- #Identifying young defenders to scout further. #Under 20", "lw=3, color='#800000', alpha=0.4, label=Ronaldo[0]) # radar.plot(Salah[1:], '-', lw=3, color='#FF0000', alpha=0.4, label=Salah[0]) # radar.plot(Mbappe[1:],", "plt.figure(figsize=(10, 10)) # #Name to appear # titles =['Rating', 'Assists', 'Drb','PS%', 'Crosses', 'mis_cont',", "# Varane = young_center_backs_chart.loc[1510].values # titles = ['Rating', 'AvgP','PS%', 'mis_cont','AerialsWon', 'Tackles', 'Inter', #", "# np.around(np.linspace(0,3,6),2), np.around(np.linspace(0,2.7,6),2), # np.around(np.linspace(0,7.4,6),2), np.around(np.linspace(0,1.6,6),2)] # # radar = Radar1(fig, titles, labels)", "np.around(np.linspace(0,3.2,6),2), # np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,2.9,6),2), # np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,6.3,6),2), # np.around(np.linspace(0,4.2,6),2), np.around(np.linspace(0,3.2,6),2)] # radar =", "# Gnabry = bundesliga_chart_df.loc[1716].values # Brandt = bundesliga_chart_df.loc[1654].values # Muller = bundesliga_chart_df.loc[1719].values #", "#Under 20 years old, compared to possibly best young CB in the world", "radar.plot(Muller[1:], '-', lw=3, color='m', alpha=0.4, label=Muller[0]) # radar.plot(Hazard[1:], '-', lw=3, color= '#0000FF', alpha=0.4,", "COMPARISON FOLLOW UP #Messi vs Ronaldo # Modric = df_chart_df.loc[1504].values # Ronaldo= df_chart_df.loc[729].values", "lw=3, color='#FF00FF', alpha=0.4, label=Konate[0]) # radar.plot(Muki[1:], '-', lw=3, color='b', alpha=0.4, label=Muki[0]) # radar.plot(Bastoni[1:],", "# fig.savefig('Bund_Playmakers.png') #------------------------------------------------------------------------------- #Premier League Playmakers # Hazard = Prem_chart_df.loc[285].values # Eriksen =", "radar.plot(Mendy[1:], '-', lw=5, color= 'c', alpha=0.4, label=Mendy[0]) # # radar.ax.legend() # fig.suptitle('Premier League", "# radar.plot(Kostic[1:], '-', lw=3, color='#008080', alpha=0.4, label=Kostic[0]) # radar.ax.legend() # fig.suptitle('Bundesliga Playmakers', fontsize=16)", "#Messi vs Ronaldo # Modric = df_chart_df.loc[1504].values # Ronaldo= df_chart_df.loc[729].values # Salah =", "np.around(np.linspace(0,1,6),2)] # # # radar = Radar1(fig, titles, labels) # radar.plot(Robertson[1:], '-', lw=5,", "np.around(np.linspace(0,3.2,6),2)] # radar = Radar1(fig, titles, labels) # radar.plot(Ronaldo[1:], '-', lw=3, color='#FF00FF', alpha=0.4,", "'#0000FF', alpha=0.4, label=Hazard[0]) # radar.plot(Kostic[1:], '-', lw=3, color='#008080', alpha=0.4, label=Kostic[0]) # radar.ax.legend() #", "generating plots: #------------------------------------------------------------------------------- # #Figure # fig.clear() # fig = plt.figure(figsize=(10, 10)) #Name", "vs Piatek on Milan # PiatekG = serie_a_chart.loc[608].values # PiatekM = serie_a_chart.loc[961].values #", "# # radar = Radar1(fig, titles, labels) # radar.plot(Bale[1:], '-', lw=5, color='r', alpha=0.4,", "#------------------------------------------------------------------------------- #Bund Playmakers: # Sancho = bundesliga_chart_df.loc[1924].values # Gnabry = bundesliga_chart_df.loc[1716].values # Brandt", "lw=3, color='b', alpha=0.4, label=Muki[0]) # radar.plot(Bastoni[1:], '-', lw=3, color='g', alpha=0.4, label=Bastoni[0]) # #", "= serie_a_chart.loc[1004].values # Icardi = serie_a_chart.loc[664].values # PiatekG = serie_a_chart.loc[608].values # PiatekM =", "# radar.ax.legend() # fig.suptitle('Young Defenders', fontsize=16) # fig.savefig('Young_Defenders.png') #------------------------------------------------------------------------------- # Robertson = chart_prep.loc[419].values", "Radar1(fig, titles, labels) # radar.plot(Robertson[1:], '-', lw=5, color='r', alpha=0.4, label=Robertson[0]) # radar.plot(Shaw[1:], '-',", "'Crosses', 'mis_cont', 'Tackles', 'Inter', 'Fouls', 'Clear', 'Blocks'] # #Numerical labels to be displayed", "= Prem_chart_df.loc[390].values # Sane = Prem_chart_df.loc[140].values # Sterling = Prem_chart_df.loc[144].values # Salah =", "color= 'r', alpha=0.4, label=Salah[0]) # radar.plot(Pogba[1:], '-', lw=3, color='k', alpha=0.4, label=Pogba[0]) # radar.ax.legend()", "# fig.clear() # fig = plt.figure(figsize=(10, 10)) #Name to appear on each axis", "label=Salah[0]) # radar.plot(Mbappe[1:], '-', lw=3, color='#0000FF', alpha=0.4, label=Mbappe[0]) # radar.plot(Messi[1:], '-', lw=3, color='#00FFFF',", "[np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,19,6),2), # np.around(np.linspace(0,13,6),2), np.around(np.linspace(0,4.3,6),2), # np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,3,6),2), # np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,2.7,6),2), # np.around(np.linspace(0,2.8,6),2),", "label=Mbappe[0]) # radar.plot(Messi[1:], '-', lw=3, color='#00FFFF', alpha=0.4, label=Messi[0]) # # radar.ax.legend() # fig.suptitle('FIFA", "np.around(np.linspace(0,32,6),2), # np.around(np.linspace(0,12,6),2), np.around(np.linspace(0,5.2,6),2), # np.around(np.linspace(0,4.1,6),2), np.around(np.linspace(0,2.9,6),2), # np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,3,6),2), # np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,7,6),2)]", "['Rating', 'Goals', 'Assists', 'SpG', 'Drb', 'KeyP','PS%', # 'Crosses', 'Fouled', 'mis_cont','Tackles', 'Inter'] #------------------------------------------------------------------------------- #Bund", "chart_prep.loc[128].values # # #Figure # fig.clear() # # fig = plt.figure(figsize=(10, 10)) #", "# labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,32,6),2), # np.around(np.linspace(0,13,6),2), np.around(np.linspace(0,6.1,6),2), # np.around(np.linspace(0,4.8,6),2), np.around(np.linspace(0,3.2,6),2), # np.around(np.linspace(0,100,6),2),", "Salah = Prem_chart_df.loc[429].values # labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,19,6),2), # np.around(np.linspace(0,11,6),2), np.around(np.linspace(0,3.9,6),2), # np.around(np.linspace(0,3.2,6),2),", "'-', lw=3, color='#FF00FF', alpha=0.4, label=Konate[0]) # radar.plot(Muki[1:], '-', lw=3, color='b', alpha=0.4, label=Muki[0]) #", "'-', lw=3, color='#008080', alpha=0.4, label=Kostic[0]) # radar.ax.legend() # fig.suptitle('Bundesliga Playmakers', fontsize=16) # fig.savefig('Bund_Playmakers.png')", "fontsize=16) # fig.savefig('Piatek.png') #------------------------------------------------------------------------------- #Identifying young defenders to scout further. #Under 20 years", "Ronaldo= df_chart_df.loc[729].values # Salah = df_chart_df.loc[429].values # Mbappe = df_chart_df.loc[2343].values # Messi =", "# np.around(np.linspace(0,2.3,6),2), np.around(np.linspace(0,8.8,6),2), # np.around(np.linspace(0,4.3,6),2), np.around(np.linspace(0,2.9,6),2), # np.around(np.linspace(0,2.3,6),2), np.around(np.linspace(0,5,6),2), # np.around(np.linspace(0,1,6),2)] # #", "'mis_cont','Tackles', 'Inter'] #------------------------------------------------------------------------------- #Bund Playmakers: # Sancho = bundesliga_chart_df.loc[1924].values # Gnabry = bundesliga_chart_df.loc[1716].values", "radar.plot(Hazard[1:], '-', lw=3, color='#FFFF00', alpha=0.4, label=Hazard[0]) # radar.plot(Eriksen[1:], '-', lw=3, color='#000080', alpha=0.4, label=Eriksen[0])", "color='b', alpha=0.4, label=Quag[0]) # radar.plot(Icardi[1:], '-', lw=3, color='k', alpha=0.4, label=Icardi[0]) # radar.plot(PiatekG[1:], '-',", "'Assists', 'Drb','PS%', 'Crosses', 'mis_cont', 'Tackles', 'Inter', 'Fouls', 'Clear', 'Blocks'] # #Numerical labels to", "np.around(np.linspace(0,3.2,6),2), # np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,3,6),2), # np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,8.8,6),2), # np.around(np.linspace(0,6.5,6),2), np.around(np.linspace(0,3.2,6),2)] # radar =", "= Radar1(fig, titles, labels) # radar.plot(Bale[1:], '-', lw=5, color='r', alpha=0.4, label=Bale[0]) # radar.plot(Benzema[1:],", "'mis_cont', 'Tackles', 'Inter', 'Fouls', 'Clear', 'Blocks'] # #Numerical labels to be displayed along", "alpha=0.4, label=Gnabry[0]) # radar.plot(Brandt[1:], '-', lw=3, color='k', alpha=0.4, label=Brandt[0]) # radar.plot(Muller[1:], '-', lw=3,", "np.around(np.linspace(0,4.3,6),2), np.around(np.linspace(0,2.9,6),2), # np.around(np.linspace(0,2.3,6),2), np.around(np.linspace(0,5,6),2), # np.around(np.linspace(0,1,6),2)] # # # radar = Radar1(fig,", "# labels = [np.around(np.linspace(0,10,6),2), np.around(np.linspace(0,21,6),2), # np.around(np.linspace(0,9,6),2), np.around(np.linspace(0,6.1,6),2), # np.around(np.linspace(0,2.9,6),2), np.around(np.linspace(0,3.2,6),2), # np.around(np.linspace(0,100,6),2),", "radar.plot(Brandt[1:], '-', lw=3, color='k', alpha=0.4, label=Brandt[0]) # radar.plot(Muller[1:], '-', lw=3, color='m', alpha=0.4, label=Muller[0])", "color='#FF00FF', alpha=0.4, label=Ronaldo[0]) # radar.plot(Quag[1:], '-', lw=3, color='b', alpha=0.4, label=Quag[0]) # radar.plot(Icardi[1:], '-',", "label=Brandt[0]) # radar.plot(Muller[1:], '-', lw=3, color='m', alpha=0.4, label=Muller[0]) # radar.plot(Hazard[1:], '-', lw=3, color=", "'Fouled', 'mis_cont'] # #Numerical labels to be displayed along each axis # labels", "np.around(np.linspace(0,13,6),2), np.around(np.linspace(0,6.1,6),2), # np.around(np.linspace(0,4.8,6),2), np.around(np.linspace(0,3.2,6),2), # np.around(np.linspace(0,100,6),2), np.around(np.linspace(0,3,6),2), # np.around(np.linspace(0,3.2,6),2), np.around(np.linspace(0,8.8,6),2), # np.around(np.linspace(0,6.5,6),2),", "['Rating', 'AvgP','PS%', 'mis_cont','AerialsWon', 'Tackles', 'Inter', # 'Fouls', 'Clear', 'Blocks'] # # labels =" ]
[ "= self.app.wd self.go_to_group_page() wd.find_element_by_name(\"new\").click() self.fill_form_group(group) # Submit group creation wd.find_element_by_name(\"submit\").click() self.back_to_group_page() self.group_cache =", "test_delete_all_groups(self): self.select_all_groups() self.delete_groups() def delete_group_by_index(self, index): self.go_to_group_page() self.select_group_by_index(index) self.delete_groups() self.group_cache = None def", "index): wd = self.app.wd wd.find_elements_by_name(\"selected[]\")[index].click() def select_all_groups(self): wd = self.app.wd self.go_to_group_page() for i", "def update_first_group(self): self.update_group_by_index(0, new_group_data) def select_first_group(self): self.select_group_by_index(0) def go_to_group_page(self): wd = self.app.wd if", "count(self): wd = self.app.wd self.go_to_group_page() return len(wd.find_elements_by_name(\"selected[]\")) group_cache = None def get_group_list(self): if", "group): self.change_group_value(\"group_name\", group.name) self.change_group_value(\"group_header\", group.header) self.change_group_value(\"group_footer\", group.footer) def count(self): wd = self.app.wd self.go_to_group_page()", "def fill_form_group(self, group): self.change_group_value(\"group_name\", group.name) self.change_group_value(\"group_header\", group.header) self.change_group_value(\"group_footer\", group.footer) def count(self): wd =", "if not (wd.current_url.endswith(\"/group.php\") and len(wd.find_elements_by_name(\"new\")) > 0): wd.find_element_by_link_text(\"groups\").click() def back_to_group_page(self): wd = self.app.wd", "= self.app.wd wd.find_elements_by_name(\"selected[]\")[index].click() def select_all_groups(self): wd = self.app.wd self.go_to_group_page() for i in range(len(wd.find_elements_by_name(\"selected[]\"))):", "new_group_data) def select_first_group(self): self.select_group_by_index(0) def go_to_group_page(self): wd = self.app.wd if not (wd.current_url.endswith(\"/group.php\") and", "def get_group_list(self): if self.group_cache is None: wd = self.app.wd self.go_to_group_page() self.group_cache = []", "0): wd.find_element_by_link_text(\"groups\").click() def back_to_group_page(self): wd = self.app.wd wd.find_element_by_link_text(\"group page\").click() def change_group_value(self, field_name, text):", "(wd.current_url.endswith(\"/group.php\") and len(wd.find_elements_by_name(\"new\")) > 0): wd.find_element_by_link_text(\"groups\").click() def back_to_group_page(self): wd = self.app.wd wd.find_element_by_link_text(\"group page\").click()", "Submit group update wd.find_element_by_name(\"update\").click() self.back_to_group_page() self.group_cache = None def update_first_group(self): self.update_group_by_index(0, new_group_data) def", "wd = self.app.wd wd.find_elements_by_name(\"selected[]\")[index].click() def select_all_groups(self): wd = self.app.wd self.go_to_group_page() for i in", "def go_to_group_page(self): wd = self.app.wd if not (wd.current_url.endswith(\"/group.php\") and len(wd.find_elements_by_name(\"new\")) > 0): wd.find_element_by_link_text(\"groups\").click()", "wd.find_elements_by_name(\"selected[]\")[index].click() def select_all_groups(self): wd = self.app.wd self.go_to_group_page() for i in range(len(wd.find_elements_by_name(\"selected[]\"))): wd.find_elements_by_name(\"selected[]\")[i].click() def", "self.app.wd if text is not None: wd.find_element_by_name(field_name).click() wd.find_element_by_name(field_name).clear() wd.find_element_by_name(field_name).send_keys(text) def fill_form_group(self, group): self.change_group_value(\"group_name\",", "GroupHelper: def __init__(self, app): self.app = app def create(self, group): wd = self.app.wd", "class GroupHelper: def __init__(self, app): self.app = app def create(self, group): wd =", "def select_group_by_index(self, index): wd = self.app.wd wd.find_elements_by_name(\"selected[]\")[index].click() def select_all_groups(self): wd = self.app.wd self.go_to_group_page()", "wd.find_element_by_name(\"delete\").click() self.back_to_group_page() def delete_first_group(self): self.delete_group_by_index(0) def update_group_by_index(self, index, new_group_data): wd = self.app.wd self.go_to_group_page()", "self.back_to_group_page() def delete_first_group(self): self.delete_group_by_index(0) def update_group_by_index(self, index, new_group_data): wd = self.app.wd self.go_to_group_page() self.select_group_by_index(index)", "delete_first_group(self): self.delete_group_by_index(0) def update_group_by_index(self, index, new_group_data): wd = self.app.wd self.go_to_group_page() self.select_group_by_index(index) wd.find_element_by_name(\"edit\").click() self.fill_form_group(new_group_data)", "def test_delete_all_groups(self): self.select_all_groups() self.delete_groups() def delete_group_by_index(self, index): self.go_to_group_page() self.select_group_by_index(index) self.delete_groups() self.group_cache = None", "= None def update_first_group(self): self.update_group_by_index(0, new_group_data) def select_first_group(self): self.select_group_by_index(0) def go_to_group_page(self): wd =", "go_to_group_page(self): wd = self.app.wd if not (wd.current_url.endswith(\"/group.php\") and len(wd.find_elements_by_name(\"new\")) > 0): wd.find_element_by_link_text(\"groups\").click() def", "self.change_group_value(\"group_footer\", group.footer) def count(self): wd = self.app.wd self.go_to_group_page() return len(wd.find_elements_by_name(\"selected[]\")) group_cache = None", "__init__(self, app): self.app = app def create(self, group): wd = self.app.wd self.go_to_group_page() wd.find_element_by_name(\"new\").click()", "wd = self.app.wd wd.find_element_by_name(\"delete\").click() self.back_to_group_page() def delete_first_group(self): self.delete_group_by_index(0) def update_group_by_index(self, index, new_group_data): wd", "element in wd.find_elements_by_css_selector(\"span.group\"): text = element.text group_id = element.find_element_by_name(\"selected[]\").get_attribute(\"value\") self.group_cache.append(Group(name=text, id=group_id)) return list(self.group_cache)", "None def select_group_by_index(self, index): wd = self.app.wd wd.find_elements_by_name(\"selected[]\")[index].click() def select_all_groups(self): wd = self.app.wd", "= self.app.wd if text is not None: wd.find_element_by_name(field_name).click() wd.find_element_by_name(field_name).clear() wd.find_element_by_name(field_name).send_keys(text) def fill_form_group(self, group):", "= self.app.wd if not (wd.current_url.endswith(\"/group.php\") and len(wd.find_elements_by_name(\"new\")) > 0): wd.find_element_by_link_text(\"groups\").click() def back_to_group_page(self): wd", "model.group import Group class GroupHelper: def __init__(self, app): self.app = app def create(self,", "> 0): wd.find_element_by_link_text(\"groups\").click() def back_to_group_page(self): wd = self.app.wd wd.find_element_by_link_text(\"group page\").click() def change_group_value(self, field_name,", "def change_group_value(self, field_name, text): wd = self.app.wd if text is not None: wd.find_element_by_name(field_name).click()", "self.go_to_group_page() return len(wd.find_elements_by_name(\"selected[]\")) group_cache = None def get_group_list(self): if self.group_cache is None: wd", "def delete_first_group(self): self.delete_group_by_index(0) def update_group_by_index(self, index, new_group_data): wd = self.app.wd self.go_to_group_page() self.select_group_by_index(index) wd.find_element_by_name(\"edit\").click()", "and len(wd.find_elements_by_name(\"new\")) > 0): wd.find_element_by_link_text(\"groups\").click() def back_to_group_page(self): wd = self.app.wd wd.find_element_by_link_text(\"group page\").click() def", "= None def get_group_list(self): if self.group_cache is None: wd = self.app.wd self.go_to_group_page() self.group_cache", "self.group_cache = None def update_first_group(self): self.update_group_by_index(0, new_group_data) def select_first_group(self): self.select_group_by_index(0) def go_to_group_page(self): wd", "wd.find_element_by_name(\"edit\").click() self.fill_form_group(new_group_data) # Submit group update wd.find_element_by_name(\"update\").click() self.back_to_group_page() self.group_cache = None def update_first_group(self):", "group.header) self.change_group_value(\"group_footer\", group.footer) def count(self): wd = self.app.wd self.go_to_group_page() return len(wd.find_elements_by_name(\"selected[]\")) group_cache =", "wd.find_element_by_link_text(\"group page\").click() def change_group_value(self, field_name, text): wd = self.app.wd if text is not", "# Submit group creation wd.find_element_by_name(\"submit\").click() self.back_to_group_page() self.group_cache = None def select_group_by_index(self, index): wd", "def update_group_by_index(self, index, new_group_data): wd = self.app.wd self.go_to_group_page() self.select_group_by_index(index) wd.find_element_by_name(\"edit\").click() self.fill_form_group(new_group_data) # Submit", "delete_group_by_index(self, index): self.go_to_group_page() self.select_group_by_index(index) self.delete_groups() self.group_cache = None def delete_groups(self): wd = self.app.wd", "index, new_group_data): wd = self.app.wd self.go_to_group_page() self.select_group_by_index(index) wd.find_element_by_name(\"edit\").click() self.fill_form_group(new_group_data) # Submit group update", "wd = self.app.wd self.go_to_group_page() self.select_group_by_index(index) wd.find_element_by_name(\"edit\").click() self.fill_form_group(new_group_data) # Submit group update wd.find_element_by_name(\"update\").click() self.back_to_group_page()", "self.back_to_group_page() self.group_cache = None def select_group_by_index(self, index): wd = self.app.wd wd.find_elements_by_name(\"selected[]\")[index].click() def select_all_groups(self):", "None def delete_groups(self): wd = self.app.wd wd.find_element_by_name(\"delete\").click() self.back_to_group_page() def delete_first_group(self): self.delete_group_by_index(0) def update_group_by_index(self,", "wd.find_element_by_name(field_name).clear() wd.find_element_by_name(field_name).send_keys(text) def fill_form_group(self, group): self.change_group_value(\"group_name\", group.name) self.change_group_value(\"group_header\", group.header) self.change_group_value(\"group_footer\", group.footer) def count(self):", "Group class GroupHelper: def __init__(self, app): self.app = app def create(self, group): wd", "= self.app.wd self.go_to_group_page() self.select_group_by_index(index) wd.find_element_by_name(\"edit\").click() self.fill_form_group(new_group_data) # Submit group update wd.find_element_by_name(\"update\").click() self.back_to_group_page() self.group_cache", "if text is not None: wd.find_element_by_name(field_name).click() wd.find_element_by_name(field_name).clear() wd.find_element_by_name(field_name).send_keys(text) def fill_form_group(self, group): self.change_group_value(\"group_name\", group.name)", "wd.find_element_by_name(\"update\").click() self.back_to_group_page() self.group_cache = None def update_first_group(self): self.update_group_by_index(0, new_group_data) def select_first_group(self): self.select_group_by_index(0) def", "not None: wd.find_element_by_name(field_name).click() wd.find_element_by_name(field_name).clear() wd.find_element_by_name(field_name).send_keys(text) def fill_form_group(self, group): self.change_group_value(\"group_name\", group.name) self.change_group_value(\"group_header\", group.header) self.change_group_value(\"group_footer\",", "wd = self.app.wd self.go_to_group_page() self.group_cache = [] for element in wd.find_elements_by_css_selector(\"span.group\"): text =", "None: wd = self.app.wd self.go_to_group_page() self.group_cache = [] for element in wd.find_elements_by_css_selector(\"span.group\"): text", "change_group_value(self, field_name, text): wd = self.app.wd if text is not None: wd.find_element_by_name(field_name).click() wd.find_element_by_name(field_name).clear()", "self.fill_form_group(new_group_data) # Submit group update wd.find_element_by_name(\"update\").click() self.back_to_group_page() self.group_cache = None def update_first_group(self): self.update_group_by_index(0,", "delete_groups(self): wd = self.app.wd wd.find_element_by_name(\"delete\").click() self.back_to_group_page() def delete_first_group(self): self.delete_group_by_index(0) def update_group_by_index(self, index, new_group_data):", "= self.app.wd wd.find_element_by_name(\"delete\").click() self.back_to_group_page() def delete_first_group(self): self.delete_group_by_index(0) def update_group_by_index(self, index, new_group_data): wd =", "wd.find_element_by_name(field_name).click() wd.find_element_by_name(field_name).clear() wd.find_element_by_name(field_name).send_keys(text) def fill_form_group(self, group): self.change_group_value(\"group_name\", group.name) self.change_group_value(\"group_header\", group.header) self.change_group_value(\"group_footer\", group.footer) def", "def back_to_group_page(self): wd = self.app.wd wd.find_element_by_link_text(\"group page\").click() def change_group_value(self, field_name, text): wd =", "wd.find_element_by_name(\"submit\").click() self.back_to_group_page() self.group_cache = None def select_group_by_index(self, index): wd = self.app.wd wd.find_elements_by_name(\"selected[]\")[index].click() def", "is None: wd = self.app.wd self.go_to_group_page() self.group_cache = [] for element in wd.find_elements_by_css_selector(\"span.group\"):", "text is not None: wd.find_element_by_name(field_name).click() wd.find_element_by_name(field_name).clear() wd.find_element_by_name(field_name).send_keys(text) def fill_form_group(self, group): self.change_group_value(\"group_name\", group.name) self.change_group_value(\"group_header\",", "self.app.wd if not (wd.current_url.endswith(\"/group.php\") and len(wd.find_elements_by_name(\"new\")) > 0): wd.find_element_by_link_text(\"groups\").click() def back_to_group_page(self): wd =", "self.app.wd self.go_to_group_page() self.select_group_by_index(index) wd.find_element_by_name(\"edit\").click() self.fill_form_group(new_group_data) # Submit group update wd.find_element_by_name(\"update\").click() self.back_to_group_page() self.group_cache =", "None def update_first_group(self): self.update_group_by_index(0, new_group_data) def select_first_group(self): self.select_group_by_index(0) def go_to_group_page(self): wd = self.app.wd", "self.select_group_by_index(0) def go_to_group_page(self): wd = self.app.wd if not (wd.current_url.endswith(\"/group.php\") and len(wd.find_elements_by_name(\"new\")) > 0):", "self.back_to_group_page() self.group_cache = None def update_first_group(self): self.update_group_by_index(0, new_group_data) def select_first_group(self): self.select_group_by_index(0) def go_to_group_page(self):", "self.app.wd wd.find_elements_by_name(\"selected[]\")[index].click() def select_all_groups(self): wd = self.app.wd self.go_to_group_page() for i in range(len(wd.find_elements_by_name(\"selected[]\"))): wd.find_elements_by_name(\"selected[]\")[i].click()", "= self.app.wd self.go_to_group_page() self.group_cache = [] for element in wd.find_elements_by_css_selector(\"span.group\"): text = element.text", "def create(self, group): wd = self.app.wd self.go_to_group_page() wd.find_element_by_name(\"new\").click() self.fill_form_group(group) # Submit group creation", "self.select_all_groups() self.delete_groups() def delete_group_by_index(self, index): self.go_to_group_page() self.select_group_by_index(index) self.delete_groups() self.group_cache = None def delete_groups(self):", "page\").click() def change_group_value(self, field_name, text): wd = self.app.wd if text is not None:", "= app def create(self, group): wd = self.app.wd self.go_to_group_page() wd.find_element_by_name(\"new\").click() self.fill_form_group(group) # Submit", "def delete_group_by_index(self, index): self.go_to_group_page() self.select_group_by_index(index) self.delete_groups() self.group_cache = None def delete_groups(self): wd =", "group.name) self.change_group_value(\"group_header\", group.header) self.change_group_value(\"group_footer\", group.footer) def count(self): wd = self.app.wd self.go_to_group_page() return len(wd.find_elements_by_name(\"selected[]\"))", "self.go_to_group_page() wd.find_element_by_name(\"new\").click() self.fill_form_group(group) # Submit group creation wd.find_element_by_name(\"submit\").click() self.back_to_group_page() self.group_cache = None def", "import Group class GroupHelper: def __init__(self, app): self.app = app def create(self, group):", "update wd.find_element_by_name(\"update\").click() self.back_to_group_page() self.group_cache = None def update_first_group(self): self.update_group_by_index(0, new_group_data) def select_first_group(self): self.select_group_by_index(0)", "self.app = app def create(self, group): wd = self.app.wd self.go_to_group_page() wd.find_element_by_name(\"new\").click() self.fill_form_group(group) #", "new_group_data): wd = self.app.wd self.go_to_group_page() self.select_group_by_index(index) wd.find_element_by_name(\"edit\").click() self.fill_form_group(new_group_data) # Submit group update wd.find_element_by_name(\"update\").click()", "app): self.app = app def create(self, group): wd = self.app.wd self.go_to_group_page() wd.find_element_by_name(\"new\").click() self.fill_form_group(group)", "= self.app.wd self.go_to_group_page() for i in range(len(wd.find_elements_by_name(\"selected[]\"))): wd.find_elements_by_name(\"selected[]\")[i].click() def test_delete_all_groups(self): self.select_all_groups() self.delete_groups() def", "range(len(wd.find_elements_by_name(\"selected[]\"))): wd.find_elements_by_name(\"selected[]\")[i].click() def test_delete_all_groups(self): self.select_all_groups() self.delete_groups() def delete_group_by_index(self, index): self.go_to_group_page() self.select_group_by_index(index) self.delete_groups() self.group_cache", "back_to_group_page(self): wd = self.app.wd wd.find_element_by_link_text(\"group page\").click() def change_group_value(self, field_name, text): wd = self.app.wd", "return len(wd.find_elements_by_name(\"selected[]\")) group_cache = None def get_group_list(self): if self.group_cache is None: wd =", "group_cache = None def get_group_list(self): if self.group_cache is None: wd = self.app.wd self.go_to_group_page()", "field_name, text): wd = self.app.wd if text is not None: wd.find_element_by_name(field_name).click() wd.find_element_by_name(field_name).clear() wd.find_element_by_name(field_name).send_keys(text)", "self.delete_groups() self.group_cache = None def delete_groups(self): wd = self.app.wd wd.find_element_by_name(\"delete\").click() self.back_to_group_page() def delete_first_group(self):", "= None def delete_groups(self): wd = self.app.wd wd.find_element_by_name(\"delete\").click() self.back_to_group_page() def delete_first_group(self): self.delete_group_by_index(0) def", "len(wd.find_elements_by_name(\"new\")) > 0): wd.find_element_by_link_text(\"groups\").click() def back_to_group_page(self): wd = self.app.wd wd.find_element_by_link_text(\"group page\").click() def change_group_value(self,", "wd = self.app.wd self.go_to_group_page() wd.find_element_by_name(\"new\").click() self.fill_form_group(group) # Submit group creation wd.find_element_by_name(\"submit\").click() self.back_to_group_page() self.group_cache", "self.select_group_by_index(index) wd.find_element_by_name(\"edit\").click() self.fill_form_group(new_group_data) # Submit group update wd.find_element_by_name(\"update\").click() self.back_to_group_page() self.group_cache = None def", "wd = self.app.wd self.go_to_group_page() return len(wd.find_elements_by_name(\"selected[]\")) group_cache = None def get_group_list(self): if self.group_cache", "wd.find_element_by_name(field_name).send_keys(text) def fill_form_group(self, group): self.change_group_value(\"group_name\", group.name) self.change_group_value(\"group_header\", group.header) self.change_group_value(\"group_footer\", group.footer) def count(self): wd", "wd = self.app.wd if text is not None: wd.find_element_by_name(field_name).click() wd.find_element_by_name(field_name).clear() wd.find_element_by_name(field_name).send_keys(text) def fill_form_group(self,", "self.fill_form_group(group) # Submit group creation wd.find_element_by_name(\"submit\").click() self.back_to_group_page() self.group_cache = None def select_group_by_index(self, index):", "group update wd.find_element_by_name(\"update\").click() self.back_to_group_page() self.group_cache = None def update_first_group(self): self.update_group_by_index(0, new_group_data) def select_first_group(self):", "= self.app.wd self.go_to_group_page() return len(wd.find_elements_by_name(\"selected[]\")) group_cache = None def get_group_list(self): if self.group_cache is", "select_all_groups(self): wd = self.app.wd self.go_to_group_page() for i in range(len(wd.find_elements_by_name(\"selected[]\"))): wd.find_elements_by_name(\"selected[]\")[i].click() def test_delete_all_groups(self): self.select_all_groups()", "self.change_group_value(\"group_header\", group.header) self.change_group_value(\"group_footer\", group.footer) def count(self): wd = self.app.wd self.go_to_group_page() return len(wd.find_elements_by_name(\"selected[]\")) group_cache", "creation wd.find_element_by_name(\"submit\").click() self.back_to_group_page() self.group_cache = None def select_group_by_index(self, index): wd = self.app.wd wd.find_elements_by_name(\"selected[]\")[index].click()", "None: wd.find_element_by_name(field_name).click() wd.find_element_by_name(field_name).clear() wd.find_element_by_name(field_name).send_keys(text) def fill_form_group(self, group): self.change_group_value(\"group_name\", group.name) self.change_group_value(\"group_header\", group.header) self.change_group_value(\"group_footer\", group.footer)", "self.delete_group_by_index(0) def update_group_by_index(self, index, new_group_data): wd = self.app.wd self.go_to_group_page() self.select_group_by_index(index) wd.find_element_by_name(\"edit\").click() self.fill_form_group(new_group_data) #", "self.app.wd self.go_to_group_page() self.group_cache = [] for element in wd.find_elements_by_css_selector(\"span.group\"): text = element.text group_id", "self.app.wd wd.find_element_by_link_text(\"group page\").click() def change_group_value(self, field_name, text): wd = self.app.wd if text is", "create(self, group): wd = self.app.wd self.go_to_group_page() wd.find_element_by_name(\"new\").click() self.fill_form_group(group) # Submit group creation wd.find_element_by_name(\"submit\").click()", "select_group_by_index(self, index): wd = self.app.wd wd.find_elements_by_name(\"selected[]\")[index].click() def select_all_groups(self): wd = self.app.wd self.go_to_group_page() for", "self.group_cache = [] for element in wd.find_elements_by_css_selector(\"span.group\"): text = element.text group_id = element.find_element_by_name(\"selected[]\").get_attribute(\"value\")", "self.delete_groups() def delete_group_by_index(self, index): self.go_to_group_page() self.select_group_by_index(index) self.delete_groups() self.group_cache = None def delete_groups(self): wd", "self.app.wd self.go_to_group_page() return len(wd.find_elements_by_name(\"selected[]\")) group_cache = None def get_group_list(self): if self.group_cache is None:", "self.app.wd self.go_to_group_page() wd.find_element_by_name(\"new\").click() self.fill_form_group(group) # Submit group creation wd.find_element_by_name(\"submit\").click() self.back_to_group_page() self.group_cache = None", "select_first_group(self): self.select_group_by_index(0) def go_to_group_page(self): wd = self.app.wd if not (wd.current_url.endswith(\"/group.php\") and len(wd.find_elements_by_name(\"new\")) >", "group): wd = self.app.wd self.go_to_group_page() wd.find_element_by_name(\"new\").click() self.fill_form_group(group) # Submit group creation wd.find_element_by_name(\"submit\").click() self.back_to_group_page()", "self.go_to_group_page() self.select_group_by_index(index) wd.find_element_by_name(\"edit\").click() self.fill_form_group(new_group_data) # Submit group update wd.find_element_by_name(\"update\").click() self.back_to_group_page() self.group_cache = None", "= None def select_group_by_index(self, index): wd = self.app.wd wd.find_elements_by_name(\"selected[]\")[index].click() def select_all_groups(self): wd =", "i in range(len(wd.find_elements_by_name(\"selected[]\"))): wd.find_elements_by_name(\"selected[]\")[i].click() def test_delete_all_groups(self): self.select_all_groups() self.delete_groups() def delete_group_by_index(self, index): self.go_to_group_page() self.select_group_by_index(index)", "def count(self): wd = self.app.wd self.go_to_group_page() return len(wd.find_elements_by_name(\"selected[]\")) group_cache = None def get_group_list(self):", "[] for element in wd.find_elements_by_css_selector(\"span.group\"): text = element.text group_id = element.find_element_by_name(\"selected[]\").get_attribute(\"value\") self.group_cache.append(Group(name=text, id=group_id))", "# Submit group update wd.find_element_by_name(\"update\").click() self.back_to_group_page() self.group_cache = None def update_first_group(self): self.update_group_by_index(0, new_group_data)", "self.go_to_group_page() for i in range(len(wd.find_elements_by_name(\"selected[]\"))): wd.find_elements_by_name(\"selected[]\")[i].click() def test_delete_all_groups(self): self.select_all_groups() self.delete_groups() def delete_group_by_index(self, index):", "update_group_by_index(self, index, new_group_data): wd = self.app.wd self.go_to_group_page() self.select_group_by_index(index) wd.find_element_by_name(\"edit\").click() self.fill_form_group(new_group_data) # Submit group", "None def get_group_list(self): if self.group_cache is None: wd = self.app.wd self.go_to_group_page() self.group_cache =", "def select_all_groups(self): wd = self.app.wd self.go_to_group_page() for i in range(len(wd.find_elements_by_name(\"selected[]\"))): wd.find_elements_by_name(\"selected[]\")[i].click() def test_delete_all_groups(self):", "app def create(self, group): wd = self.app.wd self.go_to_group_page() wd.find_element_by_name(\"new\").click() self.fill_form_group(group) # Submit group", "for element in wd.find_elements_by_css_selector(\"span.group\"): text = element.text group_id = element.find_element_by_name(\"selected[]\").get_attribute(\"value\") self.group_cache.append(Group(name=text, id=group_id)) return", "wd.find_element_by_name(\"new\").click() self.fill_form_group(group) # Submit group creation wd.find_element_by_name(\"submit\").click() self.back_to_group_page() self.group_cache = None def select_group_by_index(self,", "self.app.wd wd.find_element_by_name(\"delete\").click() self.back_to_group_page() def delete_first_group(self): self.delete_group_by_index(0) def update_group_by_index(self, index, new_group_data): wd = self.app.wd", "self.group_cache is None: wd = self.app.wd self.go_to_group_page() self.group_cache = [] for element in", "def delete_groups(self): wd = self.app.wd wd.find_element_by_name(\"delete\").click() self.back_to_group_page() def delete_first_group(self): self.delete_group_by_index(0) def update_group_by_index(self, index,", "self.change_group_value(\"group_name\", group.name) self.change_group_value(\"group_header\", group.header) self.change_group_value(\"group_footer\", group.footer) def count(self): wd = self.app.wd self.go_to_group_page() return", "def __init__(self, app): self.app = app def create(self, group): wd = self.app.wd self.go_to_group_page()", "len(wd.find_elements_by_name(\"selected[]\")) group_cache = None def get_group_list(self): if self.group_cache is None: wd = self.app.wd", "wd = self.app.wd if not (wd.current_url.endswith(\"/group.php\") and len(wd.find_elements_by_name(\"new\")) > 0): wd.find_element_by_link_text(\"groups\").click() def back_to_group_page(self):", "= self.app.wd wd.find_element_by_link_text(\"group page\").click() def change_group_value(self, field_name, text): wd = self.app.wd if text", "get_group_list(self): if self.group_cache is None: wd = self.app.wd self.go_to_group_page() self.group_cache = [] for", "update_first_group(self): self.update_group_by_index(0, new_group_data) def select_first_group(self): self.select_group_by_index(0) def go_to_group_page(self): wd = self.app.wd if not", "is not None: wd.find_element_by_name(field_name).click() wd.find_element_by_name(field_name).clear() wd.find_element_by_name(field_name).send_keys(text) def fill_form_group(self, group): self.change_group_value(\"group_name\", group.name) self.change_group_value(\"group_header\", group.header)", "def select_first_group(self): self.select_group_by_index(0) def go_to_group_page(self): wd = self.app.wd if not (wd.current_url.endswith(\"/group.php\") and len(wd.find_elements_by_name(\"new\"))", "self.group_cache = None def delete_groups(self): wd = self.app.wd wd.find_element_by_name(\"delete\").click() self.back_to_group_page() def delete_first_group(self): self.delete_group_by_index(0)", "Submit group creation wd.find_element_by_name(\"submit\").click() self.back_to_group_page() self.group_cache = None def select_group_by_index(self, index): wd =", "self.update_group_by_index(0, new_group_data) def select_first_group(self): self.select_group_by_index(0) def go_to_group_page(self): wd = self.app.wd if not (wd.current_url.endswith(\"/group.php\")", "= [] for element in wd.find_elements_by_css_selector(\"span.group\"): text = element.text group_id = element.find_element_by_name(\"selected[]\").get_attribute(\"value\") self.group_cache.append(Group(name=text,", "not (wd.current_url.endswith(\"/group.php\") and len(wd.find_elements_by_name(\"new\")) > 0): wd.find_element_by_link_text(\"groups\").click() def back_to_group_page(self): wd = self.app.wd wd.find_element_by_link_text(\"group", "wd = self.app.wd wd.find_element_by_link_text(\"group page\").click() def change_group_value(self, field_name, text): wd = self.app.wd if", "self.group_cache = None def select_group_by_index(self, index): wd = self.app.wd wd.find_elements_by_name(\"selected[]\")[index].click() def select_all_groups(self): wd", "wd = self.app.wd self.go_to_group_page() for i in range(len(wd.find_elements_by_name(\"selected[]\"))): wd.find_elements_by_name(\"selected[]\")[i].click() def test_delete_all_groups(self): self.select_all_groups() self.delete_groups()", "from model.group import Group class GroupHelper: def __init__(self, app): self.app = app def", "group creation wd.find_element_by_name(\"submit\").click() self.back_to_group_page() self.group_cache = None def select_group_by_index(self, index): wd = self.app.wd", "wd.find_element_by_link_text(\"groups\").click() def back_to_group_page(self): wd = self.app.wd wd.find_element_by_link_text(\"group page\").click() def change_group_value(self, field_name, text): wd", "text): wd = self.app.wd if text is not None: wd.find_element_by_name(field_name).click() wd.find_element_by_name(field_name).clear() wd.find_element_by_name(field_name).send_keys(text) def", "self.go_to_group_page() self.group_cache = [] for element in wd.find_elements_by_css_selector(\"span.group\"): text = element.text group_id =", "for i in range(len(wd.find_elements_by_name(\"selected[]\"))): wd.find_elements_by_name(\"selected[]\")[i].click() def test_delete_all_groups(self): self.select_all_groups() self.delete_groups() def delete_group_by_index(self, index): self.go_to_group_page()", "in range(len(wd.find_elements_by_name(\"selected[]\"))): wd.find_elements_by_name(\"selected[]\")[i].click() def test_delete_all_groups(self): self.select_all_groups() self.delete_groups() def delete_group_by_index(self, index): self.go_to_group_page() self.select_group_by_index(index) self.delete_groups()", "self.go_to_group_page() self.select_group_by_index(index) self.delete_groups() self.group_cache = None def delete_groups(self): wd = self.app.wd wd.find_element_by_name(\"delete\").click() self.back_to_group_page()", "self.select_group_by_index(index) self.delete_groups() self.group_cache = None def delete_groups(self): wd = self.app.wd wd.find_element_by_name(\"delete\").click() self.back_to_group_page() def", "fill_form_group(self, group): self.change_group_value(\"group_name\", group.name) self.change_group_value(\"group_header\", group.header) self.change_group_value(\"group_footer\", group.footer) def count(self): wd = self.app.wd", "group.footer) def count(self): wd = self.app.wd self.go_to_group_page() return len(wd.find_elements_by_name(\"selected[]\")) group_cache = None def", "if self.group_cache is None: wd = self.app.wd self.go_to_group_page() self.group_cache = [] for element", "self.app.wd self.go_to_group_page() for i in range(len(wd.find_elements_by_name(\"selected[]\"))): wd.find_elements_by_name(\"selected[]\")[i].click() def test_delete_all_groups(self): self.select_all_groups() self.delete_groups() def delete_group_by_index(self,", "index): self.go_to_group_page() self.select_group_by_index(index) self.delete_groups() self.group_cache = None def delete_groups(self): wd = self.app.wd wd.find_element_by_name(\"delete\").click()", "wd.find_elements_by_name(\"selected[]\")[i].click() def test_delete_all_groups(self): self.select_all_groups() self.delete_groups() def delete_group_by_index(self, index): self.go_to_group_page() self.select_group_by_index(index) self.delete_groups() self.group_cache =" ]
[ "\"\"\" # Implementation author: cym13 # Created on 2015-11-30T12:37:27.133314Z # Last modified on", "it with the literal value \"ネコ\" (which means \"cat\" in japanese) Source: programming-idioms.org", "author: cym13 # Created on 2015-11-30T12:37:27.133314Z # Last modified on 2015-11-30T12:37:27.133314Z # Version", "on 2015-11-30T12:37:27.133314Z # Last modified on 2015-11-30T12:37:27.133314Z # Version 1 s = \"ネコ\"", "\"ネコ\" (which means \"cat\" in japanese) Source: programming-idioms.org \"\"\" # Implementation author: cym13", "# Created on 2015-11-30T12:37:27.133314Z # Last modified on 2015-11-30T12:37:27.133314Z # Version 1 s", "value \"ネコ\" (which means \"cat\" in japanese) Source: programming-idioms.org \"\"\" # Implementation author:", "string _s and initialize it with the literal value \"ネコ\" (which means \"cat\"", "\"cat\" in japanese) Source: programming-idioms.org \"\"\" # Implementation author: cym13 # Created on", "japanese) Source: programming-idioms.org \"\"\" # Implementation author: cym13 # Created on 2015-11-30T12:37:27.133314Z #", "Created on 2015-11-30T12:37:27.133314Z # Last modified on 2015-11-30T12:37:27.133314Z # Version 1 s =", "to string the japanese word ネコ. Declare a new string _s and initialize", "japanese word ネコ. Declare a new string _s and initialize it with the", "means \"cat\" in japanese) Source: programming-idioms.org \"\"\" # Implementation author: cym13 # Created", "literal value \"ネコ\" (which means \"cat\" in japanese) Source: programming-idioms.org \"\"\" # Implementation", "word ネコ. Declare a new string _s and initialize it with the literal", "string the japanese word ネコ. Declare a new string _s and initialize it", "\"\"\"Assign to string the japanese word ネコ. Declare a new string _s and", "programming-idioms.org \"\"\" # Implementation author: cym13 # Created on 2015-11-30T12:37:27.133314Z # Last modified", "_s and initialize it with the literal value \"ネコ\" (which means \"cat\" in", "initialize it with the literal value \"ネコ\" (which means \"cat\" in japanese) Source:", "Implementation author: cym13 # Created on 2015-11-30T12:37:27.133314Z # Last modified on 2015-11-30T12:37:27.133314Z #", "in japanese) Source: programming-idioms.org \"\"\" # Implementation author: cym13 # Created on 2015-11-30T12:37:27.133314Z", "with the literal value \"ネコ\" (which means \"cat\" in japanese) Source: programming-idioms.org \"\"\"", "new string _s and initialize it with the literal value \"ネコ\" (which means", "the literal value \"ネコ\" (which means \"cat\" in japanese) Source: programming-idioms.org \"\"\" #", "Source: programming-idioms.org \"\"\" # Implementation author: cym13 # Created on 2015-11-30T12:37:27.133314Z # Last", "a new string _s and initialize it with the literal value \"ネコ\" (which", "the japanese word ネコ. Declare a new string _s and initialize it with", "Declare a new string _s and initialize it with the literal value \"ネコ\"", "(which means \"cat\" in japanese) Source: programming-idioms.org \"\"\" # Implementation author: cym13 #", "cym13 # Created on 2015-11-30T12:37:27.133314Z # Last modified on 2015-11-30T12:37:27.133314Z # Version 1", "and initialize it with the literal value \"ネコ\" (which means \"cat\" in japanese)", "ネコ. Declare a new string _s and initialize it with the literal value", "# Implementation author: cym13 # Created on 2015-11-30T12:37:27.133314Z # Last modified on 2015-11-30T12:37:27.133314Z" ]
[ "for the workflow app \"\"\" from django.urls import path from workflow import views", "from workflow import views urlpatterns = [ path('logs', views.logs, name='logs'), path('logs/<lid>', views.viewlog, name='viewlog'),", "<reponame>ChalkLab/SciFlow<filename>workflow/urls.py \"\"\" urls for the workflow app \"\"\" from django.urls import path from", "the workflow app \"\"\" from django.urls import path from workflow import views urlpatterns", "path from workflow import views urlpatterns = [ path('logs', views.logs, name='logs'), path('logs/<lid>', views.viewlog,", "workflow import views urlpatterns = [ path('logs', views.logs, name='logs'), path('logs/<lid>', views.viewlog, name='viewlog'), ]", "app \"\"\" from django.urls import path from workflow import views urlpatterns = [", "\"\"\" from django.urls import path from workflow import views urlpatterns = [ path('logs',", "import path from workflow import views urlpatterns = [ path('logs', views.logs, name='logs'), path('logs/<lid>',", "\"\"\" urls for the workflow app \"\"\" from django.urls import path from workflow", "workflow app \"\"\" from django.urls import path from workflow import views urlpatterns =", "django.urls import path from workflow import views urlpatterns = [ path('logs', views.logs, name='logs'),", "from django.urls import path from workflow import views urlpatterns = [ path('logs', views.logs,", "urls for the workflow app \"\"\" from django.urls import path from workflow import" ]
[ "limit): i = 1 while math.pow(n, i + 1) <= limit: i +=", "def maxPower(n, limit): i = 1 while math.pow(n, i + 1) <= limit:", "')) primes = sieve(limit) s = 1 for x in primes: print(math.pow(x, maxPower(x,", "sieve(limit) s = 1 for x in primes: print(math.pow(x, maxPower(x, limit))) s *=", "limit: i += 1 return i limit = int(input('Limit: ')) primes = sieve(limit)", "0: primes.remove(no * m) m+=1 i+=1 return primes def maxPower(n, limit): i =", "<= max(primes): if primes.count(no * m) > 0: primes.remove(no * m) m+=1 i+=1", "for x in primes: print(math.pow(x, maxPower(x, limit))) s *= math.pow(x, maxPower(x, limit)) print(s)", "i = 1 while math.pow(n, i + 1) <= limit: i += 1", "* m) m+=1 i+=1 return primes def maxPower(n, limit): i = 1 while", "len(primes): no = primes[i] m = 2 while (no * m) <= max(primes):", "0 while i < len(primes): no = primes[i] m = 2 while (no", "max(primes): if primes.count(no * m) > 0: primes.remove(no * m) m+=1 i+=1 return", "m) <= max(primes): if primes.count(no * m) > 0: primes.remove(no * m) m+=1", "i+=1 return primes def maxPower(n, limit): i = 1 while math.pow(n, i +", "return primes def maxPower(n, limit): i = 1 while math.pow(n, i + 1)", "+ 1) <= limit: i += 1 return i limit = int(input('Limit: '))", "def sieve(n): primes = list(range(2, n+1)) i = 0 while i < len(primes):", "<= limit: i += 1 return i limit = int(input('Limit: ')) primes =", "m) > 0: primes.remove(no * m) m+=1 i+=1 return primes def maxPower(n, limit):", "= 1 while math.pow(n, i + 1) <= limit: i += 1 return", "1 for x in primes: print(math.pow(x, maxPower(x, limit))) s *= math.pow(x, maxPower(x, limit))", "* m) > 0: primes.remove(no * m) m+=1 i+=1 return primes def maxPower(n,", "= primes[i] m = 2 while (no * m) <= max(primes): if primes.count(no", "maxPower(n, limit): i = 1 while math.pow(n, i + 1) <= limit: i", "no = primes[i] m = 2 while (no * m) <= max(primes): if", "+= 1 return i limit = int(input('Limit: ')) primes = sieve(limit) s =", "< len(primes): no = primes[i] m = 2 while (no * m) <=", "> 0: primes.remove(no * m) m+=1 i+=1 return primes def maxPower(n, limit): i", "primes[i] m = 2 while (no * m) <= max(primes): if primes.count(no *", "primes.count(no * m) > 0: primes.remove(no * m) m+=1 i+=1 return primes def", "m+=1 i+=1 return primes def maxPower(n, limit): i = 1 while math.pow(n, i", "primes def maxPower(n, limit): i = 1 while math.pow(n, i + 1) <=", "= list(range(2, n+1)) i = 0 while i < len(primes): no = primes[i]", "1) <= limit: i += 1 return i limit = int(input('Limit: ')) primes", "i += 1 return i limit = int(input('Limit: ')) primes = sieve(limit) s", "i limit = int(input('Limit: ')) primes = sieve(limit) s = 1 for x", "math.pow(n, i + 1) <= limit: i += 1 return i limit =", "primes.remove(no * m) m+=1 i+=1 return primes def maxPower(n, limit): i = 1", "primes = list(range(2, n+1)) i = 0 while i < len(primes): no =", "list(range(2, n+1)) i = 0 while i < len(primes): no = primes[i] m", "while i < len(primes): no = primes[i] m = 2 while (no *", "primes = sieve(limit) s = 1 for x in primes: print(math.pow(x, maxPower(x, limit)))", "= int(input('Limit: ')) primes = sieve(limit) s = 1 for x in primes:", "import math def sieve(n): primes = list(range(2, n+1)) i = 0 while i", "m = 2 while (no * m) <= max(primes): if primes.count(no * m)", "= 2 while (no * m) <= max(primes): if primes.count(no * m) >", "math def sieve(n): primes = list(range(2, n+1)) i = 0 while i <", "limit = int(input('Limit: ')) primes = sieve(limit) s = 1 for x in", "1 while math.pow(n, i + 1) <= limit: i += 1 return i", "i = 0 while i < len(primes): no = primes[i] m = 2", "1 return i limit = int(input('Limit: ')) primes = sieve(limit) s = 1", "m) m+=1 i+=1 return primes def maxPower(n, limit): i = 1 while math.pow(n,", "(no * m) <= max(primes): if primes.count(no * m) > 0: primes.remove(no *", "= 0 while i < len(primes): no = primes[i] m = 2 while", "* m) <= max(primes): if primes.count(no * m) > 0: primes.remove(no * m)", "while (no * m) <= max(primes): if primes.count(no * m) > 0: primes.remove(no", "n+1)) i = 0 while i < len(primes): no = primes[i] m =", "= 1 for x in primes: print(math.pow(x, maxPower(x, limit))) s *= math.pow(x, maxPower(x,", "sieve(n): primes = list(range(2, n+1)) i = 0 while i < len(primes): no", "i < len(primes): no = primes[i] m = 2 while (no * m)", "i + 1) <= limit: i += 1 return i limit = int(input('Limit:", "s = 1 for x in primes: print(math.pow(x, maxPower(x, limit))) s *= math.pow(x,", "while math.pow(n, i + 1) <= limit: i += 1 return i limit", "int(input('Limit: ')) primes = sieve(limit) s = 1 for x in primes: print(math.pow(x,", "2 while (no * m) <= max(primes): if primes.count(no * m) > 0:", "if primes.count(no * m) > 0: primes.remove(no * m) m+=1 i+=1 return primes", "return i limit = int(input('Limit: ')) primes = sieve(limit) s = 1 for", "= sieve(limit) s = 1 for x in primes: print(math.pow(x, maxPower(x, limit))) s" ]
[ "resolve_client_token(user, gateway: str): customer_id = fetch_customer_id(user, gateway) return payment_gateway.get_client_token(gateway, customer_id) def resolve_payments(info, query):", "def resolve_client_token(user, gateway: str): customer_id = fetch_customer_id(user, gateway) return payment_gateway.get_client_token(gateway, customer_id) def resolve_payments(info,", "fetch_customer_id(user, gateway) return payment_gateway.get_client_token(gateway, customer_id) def resolve_payments(info, query): queryset = models.Payment.objects.all().distinct() return filter_by_query_param(queryset,", "import stripe from ...payment.gateways.stripe.plugin import StripeGatewayPlugin from ...plugins import manager from ... import", "...plugins import manager from ... import settings from ...payment import gateway as payment_gateway,", "return filter_by_query_param(queryset, query, PAYMENT_SEARCH_FIELDS) def resolve_payment_meta(payment_intent_id): stripe_plugin = manager.get_plugins_manager().get_plugin(\"mirumee.payments.stripe\") if isinstance(stripe_plugin, StripeGatewayPlugin): return", "gateway) return payment_gateway.get_client_token(gateway, customer_id) def resolve_payments(info, query): queryset = models.Payment.objects.all().distinct() return filter_by_query_param(queryset, query,", "manager from ... import settings from ...payment import gateway as payment_gateway, models from", "def resolve_payments(info, query): queryset = models.Payment.objects.all().distinct() return filter_by_query_param(queryset, query, PAYMENT_SEARCH_FIELDS) def resolve_payment_meta(payment_intent_id): stripe_plugin", "filter_by_query_param(queryset, query, PAYMENT_SEARCH_FIELDS) def resolve_payment_meta(payment_intent_id): stripe_plugin = manager.get_plugins_manager().get_plugin(\"mirumee.payments.stripe\") if isinstance(stripe_plugin, StripeGatewayPlugin): return stripe_plugin.get_payment_meta(payment_intent_id)", "import StripeGatewayPlugin from ...plugins import manager from ... import settings from ...payment import", "settings from ...payment import gateway as payment_gateway, models from ...payment.utils import fetch_customer_id from", "stripe from ...payment.gateways.stripe.plugin import StripeGatewayPlugin from ...plugins import manager from ... import settings", "str): customer_id = fetch_customer_id(user, gateway) return payment_gateway.get_client_token(gateway, customer_id) def resolve_payments(info, query): queryset =", "= models.Payment.objects.all().distinct() return filter_by_query_param(queryset, query, PAYMENT_SEARCH_FIELDS) def resolve_payment_meta(payment_intent_id): stripe_plugin = manager.get_plugins_manager().get_plugin(\"mirumee.payments.stripe\") if isinstance(stripe_plugin,", "...payment.utils import fetch_customer_id from ..utils.filters import filter_by_query_param PAYMENT_SEARCH_FIELDS = [\"id\"] def resolve_client_token(user, gateway:", "query): queryset = models.Payment.objects.all().distinct() return filter_by_query_param(queryset, query, PAYMENT_SEARCH_FIELDS) def resolve_payment_meta(payment_intent_id): stripe_plugin = manager.get_plugins_manager().get_plugin(\"mirumee.payments.stripe\")", "payment_gateway.get_client_token(gateway, customer_id) def resolve_payments(info, query): queryset = models.Payment.objects.all().distinct() return filter_by_query_param(queryset, query, PAYMENT_SEARCH_FIELDS) def", "resolve_payments(info, query): queryset = models.Payment.objects.all().distinct() return filter_by_query_param(queryset, query, PAYMENT_SEARCH_FIELDS) def resolve_payment_meta(payment_intent_id): stripe_plugin =", "...payment.gateways.stripe.plugin import StripeGatewayPlugin from ...plugins import manager from ... import settings from ...payment", "customer_id) def resolve_payments(info, query): queryset = models.Payment.objects.all().distinct() return filter_by_query_param(queryset, query, PAYMENT_SEARCH_FIELDS) def resolve_payment_meta(payment_intent_id):", "gateway: str): customer_id = fetch_customer_id(user, gateway) return payment_gateway.get_client_token(gateway, customer_id) def resolve_payments(info, query): queryset", "..utils.filters import filter_by_query_param PAYMENT_SEARCH_FIELDS = [\"id\"] def resolve_client_token(user, gateway: str): customer_id = fetch_customer_id(user,", "from ...payment import gateway as payment_gateway, models from ...payment.utils import fetch_customer_id from ..utils.filters", "from ... import settings from ...payment import gateway as payment_gateway, models from ...payment.utils", "models from ...payment.utils import fetch_customer_id from ..utils.filters import filter_by_query_param PAYMENT_SEARCH_FIELDS = [\"id\"] def", "import filter_by_query_param PAYMENT_SEARCH_FIELDS = [\"id\"] def resolve_client_token(user, gateway: str): customer_id = fetch_customer_id(user, gateway)", "from ...plugins import manager from ... import settings from ...payment import gateway as", "= [\"id\"] def resolve_client_token(user, gateway: str): customer_id = fetch_customer_id(user, gateway) return payment_gateway.get_client_token(gateway, customer_id)", "import manager from ... import settings from ...payment import gateway as payment_gateway, models", "queryset = models.Payment.objects.all().distinct() return filter_by_query_param(queryset, query, PAYMENT_SEARCH_FIELDS) def resolve_payment_meta(payment_intent_id): stripe_plugin = manager.get_plugins_manager().get_plugin(\"mirumee.payments.stripe\") if", "...payment import gateway as payment_gateway, models from ...payment.utils import fetch_customer_id from ..utils.filters import", "as payment_gateway, models from ...payment.utils import fetch_customer_id from ..utils.filters import filter_by_query_param PAYMENT_SEARCH_FIELDS =", "import gateway as payment_gateway, models from ...payment.utils import fetch_customer_id from ..utils.filters import filter_by_query_param", "import settings from ...payment import gateway as payment_gateway, models from ...payment.utils import fetch_customer_id", "from ...payment.utils import fetch_customer_id from ..utils.filters import filter_by_query_param PAYMENT_SEARCH_FIELDS = [\"id\"] def resolve_client_token(user,", "models.Payment.objects.all().distinct() return filter_by_query_param(queryset, query, PAYMENT_SEARCH_FIELDS) def resolve_payment_meta(payment_intent_id): stripe_plugin = manager.get_plugins_manager().get_plugin(\"mirumee.payments.stripe\") if isinstance(stripe_plugin, StripeGatewayPlugin):", "fetch_customer_id from ..utils.filters import filter_by_query_param PAYMENT_SEARCH_FIELDS = [\"id\"] def resolve_client_token(user, gateway: str): customer_id", "import fetch_customer_id from ..utils.filters import filter_by_query_param PAYMENT_SEARCH_FIELDS = [\"id\"] def resolve_client_token(user, gateway: str):", "[\"id\"] def resolve_client_token(user, gateway: str): customer_id = fetch_customer_id(user, gateway) return payment_gateway.get_client_token(gateway, customer_id) def", "... import settings from ...payment import gateway as payment_gateway, models from ...payment.utils import", "gateway as payment_gateway, models from ...payment.utils import fetch_customer_id from ..utils.filters import filter_by_query_param PAYMENT_SEARCH_FIELDS", "StripeGatewayPlugin from ...plugins import manager from ... import settings from ...payment import gateway", "from ...payment.gateways.stripe.plugin import StripeGatewayPlugin from ...plugins import manager from ... import settings from", "customer_id = fetch_customer_id(user, gateway) return payment_gateway.get_client_token(gateway, customer_id) def resolve_payments(info, query): queryset = models.Payment.objects.all().distinct()", "filter_by_query_param PAYMENT_SEARCH_FIELDS = [\"id\"] def resolve_client_token(user, gateway: str): customer_id = fetch_customer_id(user, gateway) return", "PAYMENT_SEARCH_FIELDS = [\"id\"] def resolve_client_token(user, gateway: str): customer_id = fetch_customer_id(user, gateway) return payment_gateway.get_client_token(gateway,", "return payment_gateway.get_client_token(gateway, customer_id) def resolve_payments(info, query): queryset = models.Payment.objects.all().distinct() return filter_by_query_param(queryset, query, PAYMENT_SEARCH_FIELDS)", "= fetch_customer_id(user, gateway) return payment_gateway.get_client_token(gateway, customer_id) def resolve_payments(info, query): queryset = models.Payment.objects.all().distinct() return", "payment_gateway, models from ...payment.utils import fetch_customer_id from ..utils.filters import filter_by_query_param PAYMENT_SEARCH_FIELDS = [\"id\"]", "from ..utils.filters import filter_by_query_param PAYMENT_SEARCH_FIELDS = [\"id\"] def resolve_client_token(user, gateway: str): customer_id =" ]
[ "by the d[k] who is the value from the keys, # so the", "the lowest key value will be organized to be the first. print(f'{x} appears", "ct = ct +1 return ct def s_cities_count_for_state(state): cities_list = states[state] return s_cities_count(cities_list)", "def absolute(x): if x >= 0: return x else: return -x print(absolute(3)) print(absolute(-119))", "key=lambda k: d[k]): #The \"x\" is gonna make a comparation into the sorted", ") #A \"key\" está falando \"Eu quero que você organize de acordo com", "the sorted #(Organized by the highest value to lowest) choosing just the keys", "\"Michigan\":['Ann Arbor', 'Traverse City', 'Lansing', 'Kalamazoo'], \"Washington\":[\"Seatle\", \"Tacoma\", \"Olympia\", \"Vancouver\"]} def s_cities_count(cities_list): #return", "call the 'function' Lambda # The variable for lambda is k and we", "print('='*15,'#1','='*15) #Sort function - Vai organizar do menos para o maior valor em", "= [1, 7, 4, -2, 3] def absolute(x): if x>=0: return x else:", "#Optional key parameters L1 = [1, 7, 4, -2, 3] def absolute(x): print('---", "# The variable for lambda is k and we gonna organize by the", "sorted(L2) print(L3) print(sorted(L2)) print(L2) print(\"--------\") L2.sort() print(L2) print(L2.sort()) print('='*15,'#3','='*15) #Optional reverse parameter L2", "[\"Cherry\", \"Apple\", \"Blueberry\"] print(sorted(L2,reverse = True)) print('='*15,'#4','='*15) #Optional key parameters L1 = [1,", "Expression - A lambda expression is short and simple use, when things get", "Ties: Second Sorting fruits = ['peach', 'kiwi', 'apple', 'blueberry', 'papaya', 'mango', 'pear'] new_order", "L2.sort() print(L2) print(L2.sort()) print('='*15,'#3','='*15) #Optional reverse parameter L2 = [\"Cherry\", \"Apple\", \"Blueberry\"] print(sorted(L2,reverse", "def absolute(x): if x>=0: return x else: return -x L2 = sorted(L1, key=absolute", "in d, so increment x as 1 for x in sorted(d.keys(), key=lambda k:", "post-it note for ' +str(x)) if x >=0: return x else: return -x", "[1, 7, 4, -2, 3] def absolute(x): print('--- figuring out what to write", "#When to use a Lambda Expression - A lambda expression is short and", "-x L2 = sorted(L1, key=absolute ) #A \"key\" está falando \"Eu quero que", "with \"S\" ct = 0 for city in cities_list: if city[0] == 'S':", "keys from d (ex: E, F, B etc) # So, we gonna call", "fruit in new_order: print (fruit) print('='*15,'#10','='*15) #When to use a Lambda Expression -", "function states = {\"Minnesota\": ['St.Paul', 'Minneapolis', 'Saint Cloud', 'Stillwater'], \"Michigan\":['Ann Arbor', 'Traverse City',", "def s_cities_count(cities_list): #return a count of how many cities begin with \"S\" ct", "not in d, so increment x as 1 for x in sorted(d.keys(), key=lambda", "sorted(tups): print(tup) print('='*15,'#9','='*15) #Breaking Ties: Second Sorting fruits = ['peach', 'kiwi', 'apple', 'blueberry',", "absolute(x): print('--- figuring out what to write on the post-it note for '", "(fruit) print('='*15,'#10','='*15) #When to use a Lambda Expression - A lambda expression is", "use a function states = {\"Minnesota\": ['St.Paul', 'Minneapolis', 'Saint Cloud', 'Stillwater'], \"Michigan\":['Ann Arbor',", "acordo com os comandos dados depois do igual)\" print(L2) #or in reverse order", "\"Blueberry\"] print(sorted(L2,reverse = True)) print('='*15,'#4','='*15) #Optional key parameters L1 = [1, 7, 4,", "lista L1 = [1, 7, 4, -2, 3] L2 = [\"Cherry\", \"Apple\", \"Blueberry\"]", "[1, 7, 4, -2, 3] L2 = [\"Cherry\", \"Apple\", \"Blueberry\"] L1.sort() print(L1) L2.sort()", "key=absolute ) #A \"key\" está falando \"Eu quero que você organize de acordo", "first. print(f'{x} appears {d[x]} times') print('='*15,'#8','='*15) #Breaking Ties: Second Sorting tups = [(\"A\",", "{d[x]} times') print('='*15,'#8','='*15) #Breaking Ties: Second Sorting tups = [(\"A\", 3, 2), (\"C\",", "\"S\" ct = 0 for city in cities_list: if city[0] == 'S': ct", "(\"A\", 2, 4), (\"C\", 1, 2)] for tup in sorted(tups): print(tup) print('='*15,'#9','='*15) #Breaking", "1, 4), (\"B\", 3, 1), (\"A\", 2, 4), (\"C\", 1, 2)] for tup", "= {\"Minnesota\": ['St.Paul', 'Minneapolis', 'Saint Cloud', 'Stillwater'], \"Michigan\":['Ann Arbor', 'Traverse City', 'Lansing', 'Kalamazoo'],", "for fruit in new_order: print (fruit) print('='*15,'#10','='*15) #When to use a Lambda Expression", "2)] for tup in sorted(tups): print(tup) print('='*15,'#9','='*15) #Breaking Ties: Second Sorting fruits =", "x else: return -x print(absolute(3)) print(absolute(-119)) for y in L1: print(absolute(y)) print('='*15,'#5','='*15) #Optional", "sorted #(Organized by the highest value to lowest) choosing just the keys from", "1, 2)] for tup in sorted(tups): print(tup) print('='*15,'#9','='*15) #Breaking Ties: Second Sorting fruits", "just the keys from d (ex: E, F, B etc) # So, we", "# so the lowest key value will be organized to be the first.", "'B', 'A', 'D', 'I', 'I', 'C', 'B', 'A', 'D', 'D', 'E', 'D'] d", "if this has already seen before else: d[x] = 1 #If x is", "print(\"About to call sorted\") L2= sorted(L1, key=absolute) print(\"Finish execution of sorted\") print(L2) print('='*15,'#7','='*15)", "A lambda expression is short and simple use, when things get complicated use", "B etc) # So, we gonna call the \"key\" (We gonna sorting sth", "Second Sorting tups = [(\"A\", 3, 2), (\"C\", 1, 4), (\"B\", 3, 1),", "'D', 'E', 'D'] d = {} for x in L: if x in", "y in L1: print(absolute(y)) print('='*15,'#5','='*15) #Optional key parameters L1 = [1, 7, 4,", "sorted(fruits, key = lambda fruit_name: (len(fruit_name), fruit_name)) for fruit in new_order: print (fruit)", "(\"C\", 1, 2)] for tup in sorted(tups): print(tup) print('='*15,'#9','='*15) #Breaking Ties: Second Sorting", "L1 = [1, 7, 4, -2, 3] def absolute(x): if x >= 0:", "from the keys, # so the lowest key value will be organized to", "sorted\") print(L2) print('='*15,'#7','='*15) #Sorting a dictionary L = ['E', 'F', 'B', 'A', 'D',", "d, so increment x as 1 for x in sorted(d.keys(), key=lambda k: d[k]):", "'C', 'B', 'A', 'D', 'D', 'E', 'D'] d = {} for x in", "\"Eu quero que você organize de acordo com ... (irá organizar de acordo", "[1, 7, 4, -2, 3] def absolute(x): if x>=0: return x else: return", "Arbor', 'Traverse City', 'Lansing', 'Kalamazoo'], \"Washington\":[\"Seatle\", \"Tacoma\", \"Olympia\", \"Vancouver\"]} def s_cities_count(cities_list): #return a", "#Optional key parameters L1 = [1, 7, 4, -2, 3] def absolute(x): if", "in cities_list: if city[0] == 'S': ct = ct +1 return ct def", "return x else: return -x print(\"About to call sorted\") L2= sorted(L1, key=absolute) print(\"Finish", "'A', 'D', 'D', 'E', 'D'] d = {} for x in L: if", "valor em lista L1 = [1, 7, 4, -2, 3] L2 = [\"Cherry\",", "absolute(x): if x >= 0: return x else: return -x print(absolute(3)) print(absolute(-119)) for", "'Saint Cloud', 'Stillwater'], \"Michigan\":['Ann Arbor', 'Traverse City', 'Lansing', 'Kalamazoo'], \"Washington\":[\"Seatle\", \"Tacoma\", \"Olympia\", \"Vancouver\"]}", "-x print(absolute(3)) print(absolute(-119)) for y in L1: print(absolute(y)) print('='*15,'#5','='*15) #Optional key parameters L1", "the x if this has already seen before else: d[x] = 1 #If", "expression is short and simple use, when things get complicated use a function", "key), we call the 'function' Lambda # The variable for lambda is k", "x as 1 for x in sorted(d.keys(), key=lambda k: d[k]): #The \"x\" is", "fruits = ['peach', 'kiwi', 'apple', 'blueberry', 'papaya', 'mango', 'pear'] new_order = sorted(fruits, key", "você organize de acordo com ... (irá organizar de acordo com os comandos", "k and we gonna organize by the d[k] who is the value from", "in sorted(tups): print(tup) print('='*15,'#9','='*15) #Breaking Ties: Second Sorting fruits = ['peach', 'kiwi', 'apple',", "\"Vancouver\"]} def s_cities_count(cities_list): #return a count of how many cities begin with \"S\"", ">=0: return x else: return -x print(\"About to call sorted\") L2= sorted(L1, key=absolute)", "7, 4, -2, 3] def absolute(x): if x >= 0: return x else:", "'Traverse City', 'Lansing', 'Kalamazoo'], \"Washington\":[\"Seatle\", \"Tacoma\", \"Olympia\", \"Vancouver\"]} def s_cities_count(cities_list): #return a count", "we call the 'function' Lambda # The variable for lambda is k and", "Second Sorting fruits = ['peach', 'kiwi', 'apple', 'blueberry', 'papaya', 'mango', 'pear'] new_order =", "return x else: return -x print(absolute(3)) print(absolute(-119)) for y in L1: print(absolute(y)) print('='*15,'#5','='*15)", "E, F, B etc) # So, we gonna call the \"key\" (We gonna", "to use a Lambda Expression - A lambda expression is short and simple", "{\"Minnesota\": ['St.Paul', 'Minneapolis', 'Saint Cloud', 'Stillwater'], \"Michigan\":['Ann Arbor', 'Traverse City', 'Lansing', 'Kalamazoo'], \"Washington\":[\"Seatle\",", "reverse = True, key = absolute)) print('='*15,'#6','='*15) #Optional key parameters L1 = [1,", "7, 4, -2, 3] def absolute(x): if x>=0: return x else: return -x", "sorted\") L2= sorted(L1, key=absolute) print(\"Finish execution of sorted\") print(L2) print('='*15,'#7','='*15) #Sorting a dictionary", "F, B etc) # So, we gonna call the \"key\" (We gonna sorting", "\"Apple\", \"Blueberry\"] L3= sorted(L2) print(L3) print(sorted(L2)) print(L2) print(\"--------\") L2.sort() print(L2) print(L2.sort()) print('='*15,'#3','='*15) #Optional", "x else: return -x print(\"About to call sorted\") L2= sorted(L1, key=absolute) print(\"Finish execution", "value, so we gonna sum 1 to the x if this has already", "acordo com ... (irá organizar de acordo com os comandos dados depois do", "reverse parameter L2 = [\"Cherry\", \"Apple\", \"Blueberry\"] print(sorted(L2,reverse = True)) print('='*15,'#4','='*15) #Optional key", "(We gonna sorting sth after the key), we call the 'function' Lambda #", "o maior valor em lista L1 = [1, 7, 4, -2, 3] L2", "the first. print(f'{x} appears {d[x]} times') print('='*15,'#8','='*15) #Breaking Ties: Second Sorting tups =", "L2= [\"Cherry\", \"Apple\", \"Blueberry\"] L3= sorted(L2) print(L3) print(sorted(L2)) print(L2) print(\"--------\") L2.sort() print(L2) print(L2.sort())", "a count of how many cities begin with \"S\" ct = 0 for", "d[k]): #The \"x\" is gonna make a comparation into the sorted #(Organized by", "be insert in d and \"d[x] is the value, so we gonna sum", "sorted(L1, key=absolute) print(\"Finish execution of sorted\") print(L2) print('='*15,'#7','='*15) #Sorting a dictionary L =", "print(L1) L2.sort() print(L2) print('='*15,'#2','='*15) #Sorted L2= [\"Cherry\", \"Apple\", \"Blueberry\"] L3= sorted(L2) print(L3) print(sorted(L2))", "call sorted\") L2= sorted(L1, key=absolute) print(\"Finish execution of sorted\") print(L2) print('='*15,'#7','='*15) #Sorting a", "key and x will be insert in d and \"d[x] is the value,", "print('='*15,'#10','='*15) #When to use a Lambda Expression - A lambda expression is short", "sorting sth after the key), we call the 'function' Lambda # The variable", "simple use, when things get complicated use a function states = {\"Minnesota\": ['St.Paul',", "d[x] = 1 #If x is not in d, so increment x as", "comparation into the sorted #(Organized by the highest value to lowest) choosing just", "3] def absolute(x): if x>=0: return x else: return -x L2 = sorted(L1,", "'S': ct = ct +1 return ct def s_cities_count_for_state(state): cities_list = states[state] return", "x else: return -x L2 = sorted(L1, key=absolute ) #A \"key\" está falando", "of sorted\") print(L2) print('='*15,'#7','='*15) #Sorting a dictionary L = ['E', 'F', 'B', 'A',", "= [\"Cherry\", \"Apple\", \"Blueberry\"] print(sorted(L2,reverse = True)) print('='*15,'#4','='*15) #Optional key parameters L1 =", "lambda fruit_name: (len(fruit_name), fruit_name)) for fruit in new_order: print (fruit) print('='*15,'#10','='*15) #When to", "in d and \"d[x] is the value, so we gonna sum 1 to", "#return a count of how many cities begin with \"S\" ct = 0", "L1 = [1, 7, 4, -2, 3] L2 = [\"Cherry\", \"Apple\", \"Blueberry\"] L1.sort()", "organizar do menos para o maior valor em lista L1 = [1, 7,", "L1 = [1, 7, 4, -2, 3] def absolute(x): print('--- figuring out what", "#(Organized by the highest value to lowest) choosing just the keys from d", "The variable for lambda is k and we gonna organize by the d[k]", "parameters L1 = [1, 7, 4, -2, 3] def absolute(x): if x>=0: return", "parameters L1 = [1, 7, 4, -2, 3] def absolute(x): if x >=", "'A', 'D', 'I', 'I', 'C', 'B', 'A', 'D', 'D', 'E', 'D'] d =", "(\"C\", 1, 4), (\"B\", 3, 1), (\"A\", 2, 4), (\"C\", 1, 2)] for", "the value from the keys, # so the lowest key value will be", "= [1, 7, 4, -2, 3] L2 = [\"Cherry\", \"Apple\", \"Blueberry\"] L1.sort() print(L1)", "[(\"A\", 3, 2), (\"C\", 1, 4), (\"B\", 3, 1), (\"A\", 2, 4), (\"C\",", "Cloud', 'Stillwater'], \"Michigan\":['Ann Arbor', 'Traverse City', 'Lansing', 'Kalamazoo'], \"Washington\":[\"Seatle\", \"Tacoma\", \"Olympia\", \"Vancouver\"]} def", "d: d[x] = d[x] +1 # \"x\" will be key and x will", "so we gonna sum 1 to the x if this has already seen", "d and \"d[x] is the value, so we gonna sum 1 to the", "will be organized to be the first. print(f'{x} appears {d[x]} times') print('='*15,'#8','='*15) #Breaking", "+1 # \"x\" will be key and x will be insert in d", "what to write on the post-it note for ' +str(x)) if x >=0:", "\"d[x] is the value, so we gonna sum 1 to the x if", "os comandos dados depois do igual)\" print(L2) #or in reverse order print(sorted(L1, reverse", "' +str(x)) if x >=0: return x else: return -x print(\"About to call", "lowest key value will be organized to be the first. print(f'{x} appears {d[x]}", "key=absolute) print(\"Finish execution of sorted\") print(L2) print('='*15,'#7','='*15) #Sorting a dictionary L = ['E',", "use a Lambda Expression - A lambda expression is short and simple use,", "(ex: E, F, B etc) # So, we gonna call the \"key\" (We", "get complicated use a function states = {\"Minnesota\": ['St.Paul', 'Minneapolis', 'Saint Cloud', 'Stillwater'],", "keys, # so the lowest key value will be organized to be the", "'Lansing', 'Kalamazoo'], \"Washington\":[\"Seatle\", \"Tacoma\", \"Olympia\", \"Vancouver\"]} def s_cities_count(cities_list): #return a count of how", "= d[x] +1 # \"x\" will be key and x will be insert", "make a comparation into the sorted #(Organized by the highest value to lowest)", "# So, we gonna call the \"key\" (We gonna sorting sth after the", "in L1: print(absolute(y)) print('='*15,'#5','='*15) #Optional key parameters L1 = [1, 7, 4, -2,", "gonna sum 1 to the x if this has already seen before else:", "4), (\"B\", 3, 1), (\"A\", 2, 4), (\"C\", 1, 2)] for tup in", "order print(sorted(L1, reverse = True, key = absolute)) print('='*15,'#6','='*15) #Optional key parameters L1", "lambda is k and we gonna organize by the d[k] who is the", "gonna sorting sth after the key), we call the 'function' Lambda # The", "com ... (irá organizar de acordo com os comandos dados depois do igual)\"", "a Lambda Expression - A lambda expression is short and simple use, when", "= [1, 7, 4, -2, 3] def absolute(x): if x >= 0: return", "x is not in d, so increment x as 1 for x in", "#If x is not in d, so increment x as 1 for x", "Vai organizar do menos para o maior valor em lista L1 = [1,", "key = absolute)) print('='*15,'#6','='*15) #Optional key parameters L1 = [1, 7, 4, -2,", "4), (\"C\", 1, 2)] for tup in sorted(tups): print(tup) print('='*15,'#9','='*15) #Breaking Ties: Second", "= [1, 7, 4, -2, 3] def absolute(x): print('--- figuring out what to", "'I', 'I', 'C', 'B', 'A', 'D', 'D', 'E', 'D'] d = {} for", "lambda expression is short and simple use, when things get complicated use a", "d (ex: E, F, B etc) # So, we gonna call the \"key\"", "when things get complicated use a function states = {\"Minnesota\": ['St.Paul', 'Minneapolis', 'Saint", "is short and simple use, when things get complicated use a function states", "tups = [(\"A\", 3, 2), (\"C\", 1, 4), (\"B\", 3, 1), (\"A\", 2,", "begin with \"S\" ct = 0 for city in cities_list: if city[0] ==", "'kiwi', 'apple', 'blueberry', 'papaya', 'mango', 'pear'] new_order = sorted(fruits, key = lambda fruit_name:", "print(sorted(L2)) print(L2) print(\"--------\") L2.sort() print(L2) print(L2.sort()) print('='*15,'#3','='*15) #Optional reverse parameter L2 = [\"Cherry\",", "how many cities begin with \"S\" ct = 0 for city in cities_list:", "fruit_name)) for fruit in new_order: print (fruit) print('='*15,'#10','='*15) #When to use a Lambda", "= sorted(L1, key=absolute ) #A \"key\" está falando \"Eu quero que você organize", "- A lambda expression is short and simple use, when things get complicated", "#A \"key\" está falando \"Eu quero que você organize de acordo com ...", "está falando \"Eu quero que você organize de acordo com ... (irá organizar", "we gonna organize by the d[k] who is the value from the keys,", "(\"B\", 3, 1), (\"A\", 2, 4), (\"C\", 1, 2)] for tup in sorted(tups):", "else: return -x print(\"About to call sorted\") L2= sorted(L1, key=absolute) print(\"Finish execution of", "7, 4, -2, 3] L2 = [\"Cherry\", \"Apple\", \"Blueberry\"] L1.sort() print(L1) L2.sort() print(L2)", "print(sorted(L1, reverse = True, key = absolute)) print('='*15,'#6','='*15) #Optional key parameters L1 =", "Lambda Expression - A lambda expression is short and simple use, when things", "= {} for x in L: if x in d: d[x] = d[x]", "0 for city in cities_list: if city[0] == 'S': ct = ct +1", "L: if x in d: d[x] = d[x] +1 # \"x\" will be", "#Breaking Ties: Second Sorting fruits = ['peach', 'kiwi', 'apple', 'blueberry', 'papaya', 'mango', 'pear']", "reverse order print(sorted(L1, reverse = True, key = absolute)) print('='*15,'#6','='*15) #Optional key parameters", "we gonna sum 1 to the x if this has already seen before", "k: d[k]): #The \"x\" is gonna make a comparation into the sorted #(Organized", "3, 2), (\"C\", 1, 4), (\"B\", 3, 1), (\"A\", 2, 4), (\"C\", 1,", "#Sorting a dictionary L = ['E', 'F', 'B', 'A', 'D', 'I', 'I', 'C',", "d[x] +1 # \"x\" will be key and x will be insert in", "by the highest value to lowest) choosing just the keys from d (ex:", "call the \"key\" (We gonna sorting sth after the key), we call the", "True)) print('='*15,'#4','='*15) #Optional key parameters L1 = [1, 7, 4, -2, 3] def", "already seen before else: d[x] = 1 #If x is not in d,", "else: return -x print(absolute(3)) print(absolute(-119)) for y in L1: print(absolute(y)) print('='*15,'#5','='*15) #Optional key", "Ties: Second Sorting tups = [(\"A\", 3, 2), (\"C\", 1, 4), (\"B\", 3,", "value to lowest) choosing just the keys from d (ex: E, F, B", "x will be insert in d and \"d[x] is the value, so we", "to lowest) choosing just the keys from d (ex: E, F, B etc)", "new_order = sorted(fruits, key = lambda fruit_name: (len(fruit_name), fruit_name)) for fruit in new_order:", "+str(x)) if x >=0: return x else: return -x print(\"About to call sorted\")", "-2, 3] L2 = [\"Cherry\", \"Apple\", \"Blueberry\"] L1.sort() print(L1) L2.sort() print(L2) print('='*15,'#2','='*15) #Sorted", "print('='*15,'#4','='*15) #Optional key parameters L1 = [1, 7, 4, -2, 3] def absolute(x):", "que você organize de acordo com ... (irá organizar de acordo com os", "sth after the key), we call the 'function' Lambda # The variable for", "for y in L1: print(absolute(y)) print('='*15,'#5','='*15) #Optional key parameters L1 = [1, 7,", "-2, 3] def absolute(x): if x >= 0: return x else: return -x", "= [\"Cherry\", \"Apple\", \"Blueberry\"] L1.sort() print(L1) L2.sort() print(L2) print('='*15,'#2','='*15) #Sorted L2= [\"Cherry\", \"Apple\",", "is gonna make a comparation into the sorted #(Organized by the highest value", "3] L2 = [\"Cherry\", \"Apple\", \"Blueberry\"] L1.sort() print(L1) L2.sort() print(L2) print('='*15,'#2','='*15) #Sorted L2=", "do menos para o maior valor em lista L1 = [1, 7, 4,", "return -x print(\"About to call sorted\") L2= sorted(L1, key=absolute) print(\"Finish execution of sorted\")", "value from the keys, # so the lowest key value will be organized", "d = {} for x in L: if x in d: d[x] =", "be key and x will be insert in d and \"d[x] is the", "= ['E', 'F', 'B', 'A', 'D', 'I', 'I', 'C', 'B', 'A', 'D', 'D',", "to be the first. print(f'{x} appears {d[x]} times') print('='*15,'#8','='*15) #Breaking Ties: Second Sorting", "and simple use, when things get complicated use a function states = {\"Minnesota\":", "\"Blueberry\"] L3= sorted(L2) print(L3) print(sorted(L2)) print(L2) print(\"--------\") L2.sort() print(L2) print(L2.sort()) print('='*15,'#3','='*15) #Optional reverse", "#Breaking Ties: Second Sorting tups = [(\"A\", 3, 2), (\"C\", 1, 4), (\"B\",", "'Minneapolis', 'Saint Cloud', 'Stillwater'], \"Michigan\":['Ann Arbor', 'Traverse City', 'Lansing', 'Kalamazoo'], \"Washington\":[\"Seatle\", \"Tacoma\", \"Olympia\",", "3] def absolute(x): if x >= 0: return x else: return -x print(absolute(3))", "if city[0] == 'S': ct = ct +1 return ct def s_cities_count_for_state(state): cities_list", "= ['peach', 'kiwi', 'apple', 'blueberry', 'papaya', 'mango', 'pear'] new_order = sorted(fruits, key =", "function - Vai organizar do menos para o maior valor em lista L1", "so increment x as 1 for x in sorted(d.keys(), key=lambda k: d[k]): #The", "# \"x\" will be key and x will be insert in d and", "'mango', 'pear'] new_order = sorted(fruits, key = lambda fruit_name: (len(fruit_name), fruit_name)) for fruit", "things get complicated use a function states = {\"Minnesota\": ['St.Paul', 'Minneapolis', 'Saint Cloud',", "gonna make a comparation into the sorted #(Organized by the highest value to", "'I', 'C', 'B', 'A', 'D', 'D', 'E', 'D'] d = {} for x", "2), (\"C\", 1, 4), (\"B\", 3, 1), (\"A\", 2, 4), (\"C\", 1, 2)]", "the highest value to lowest) choosing just the keys from d (ex: E,", "print(L2) print(L2.sort()) print('='*15,'#3','='*15) #Optional reverse parameter L2 = [\"Cherry\", \"Apple\", \"Blueberry\"] print(sorted(L2,reverse =", "the keys from d (ex: E, F, B etc) # So, we gonna", "be the first. print(f'{x} appears {d[x]} times') print('='*15,'#8','='*15) #Breaking Ties: Second Sorting tups", "== 'S': ct = ct +1 return ct def s_cities_count_for_state(state): cities_list = states[state]", "for ' +str(x)) if x >=0: return x else: return -x print(\"About to", "print(absolute(y)) print('='*15,'#5','='*15) #Optional key parameters L1 = [1, 7, 4, -2, 3] def", "de acordo com ... (irá organizar de acordo com os comandos dados depois", "= True, key = absolute)) print('='*15,'#6','='*15) #Optional key parameters L1 = [1, 7,", "note for ' +str(x)) if x >=0: return x else: return -x print(\"About", "L1.sort() print(L1) L2.sort() print(L2) print('='*15,'#2','='*15) #Sorted L2= [\"Cherry\", \"Apple\", \"Blueberry\"] L3= sorted(L2) print(L3)", "ct = 0 for city in cities_list: if city[0] == 'S': ct =", "if x in d: d[x] = d[x] +1 # \"x\" will be key", "print(f'{x} appears {d[x]} times') print('='*15,'#8','='*15) #Breaking Ties: Second Sorting tups = [(\"A\", 3,", "#or in reverse order print(sorted(L1, reverse = True, key = absolute)) print('='*15,'#6','='*15) #Optional", "4, -2, 3] def absolute(x): if x >= 0: return x else: return", "increment x as 1 for x in sorted(d.keys(), key=lambda k: d[k]): #The \"x\"", "the 'function' Lambda # The variable for lambda is k and we gonna", "appears {d[x]} times') print('='*15,'#8','='*15) #Breaking Ties: Second Sorting tups = [(\"A\", 3, 2),", "print(sorted(L2,reverse = True)) print('='*15,'#4','='*15) #Optional key parameters L1 = [1, 7, 4, -2,", "comandos dados depois do igual)\" print(L2) #or in reverse order print(sorted(L1, reverse =", "1 #If x is not in d, so increment x as 1 for", "use, when things get complicated use a function states = {\"Minnesota\": ['St.Paul', 'Minneapolis',", "organize de acordo com ... (irá organizar de acordo com os comandos dados", "x >= 0: return x else: return -x print(absolute(3)) print(absolute(-119)) for y in", "#The \"x\" is gonna make a comparation into the sorted #(Organized by the", "'apple', 'blueberry', 'papaya', 'mango', 'pear'] new_order = sorted(fruits, key = lambda fruit_name: (len(fruit_name),", "is the value, so we gonna sum 1 to the x if this", "out what to write on the post-it note for ' +str(x)) if x", "count of how many cities begin with \"S\" ct = 0 for city", "seen before else: d[x] = 1 #If x is not in d, so", "the post-it note for ' +str(x)) if x >=0: return x else: return", "is k and we gonna organize by the d[k] who is the value", "= ct +1 return ct def s_cities_count_for_state(state): cities_list = states[state] return s_cities_count(cities_list) print(sorted(states,key=s_cities_count_for_state))", "print('='*15,'#3','='*15) #Optional reverse parameter L2 = [\"Cherry\", \"Apple\", \"Blueberry\"] print(sorted(L2,reverse = True)) print('='*15,'#4','='*15)", "and we gonna organize by the d[k] who is the value from the", "x in sorted(d.keys(), key=lambda k: d[k]): #The \"x\" is gonna make a comparation", "organize by the d[k] who is the value from the keys, # so", "for city in cities_list: if city[0] == 'S': ct = ct +1 return", "the \"key\" (We gonna sorting sth after the key), we call the 'function'", "\"key\" (We gonna sorting sth after the key), we call the 'function' Lambda", "key parameters L1 = [1, 7, 4, -2, 3] def absolute(x): if x", "highest value to lowest) choosing just the keys from d (ex: E, F,", "parameter L2 = [\"Cherry\", \"Apple\", \"Blueberry\"] print(sorted(L2,reverse = True)) print('='*15,'#4','='*15) #Optional key parameters", "L1 = [1, 7, 4, -2, 3] def absolute(x): if x>=0: return x", "#Sort function - Vai organizar do menos para o maior valor em lista", "L2.sort() print(L2) print('='*15,'#2','='*15) #Sorted L2= [\"Cherry\", \"Apple\", \"Blueberry\"] L3= sorted(L2) print(L3) print(sorted(L2)) print(L2)", "lowest) choosing just the keys from d (ex: E, F, B etc) #", "-2, 3] def absolute(x): if x>=0: return x else: return -x L2 =", "= 0 for city in cities_list: if city[0] == 'S': ct = ct", "(irá organizar de acordo com os comandos dados depois do igual)\" print(L2) #or", "'D', 'D', 'E', 'D'] d = {} for x in L: if x", "L2= sorted(L1, key=absolute) print(\"Finish execution of sorted\") print(L2) print('='*15,'#7','='*15) #Sorting a dictionary L", "variable for lambda is k and we gonna organize by the d[k] who", "be organized to be the first. print(f'{x} appears {d[x]} times') print('='*15,'#8','='*15) #Breaking Ties:", "3, 1), (\"A\", 2, 4), (\"C\", 1, 2)] for tup in sorted(tups): print(tup)", "'D', 'I', 'I', 'C', 'B', 'A', 'D', 'D', 'E', 'D'] d = {}", "this has already seen before else: d[x] = 1 #If x is not", "\"Blueberry\"] L1.sort() print(L1) L2.sort() print(L2) print('='*15,'#2','='*15) #Sorted L2= [\"Cherry\", \"Apple\", \"Blueberry\"] L3= sorted(L2)", "[1, 7, 4, -2, 3] def absolute(x): if x >= 0: return x", ">= 0: return x else: return -x print(absolute(3)) print(absolute(-119)) for y in L1:", "the keys, # so the lowest key value will be organized to be", "menos para o maior valor em lista L1 = [1, 7, 4, -2,", "sum 1 to the x if this has already seen before else: d[x]", "x>=0: return x else: return -x L2 = sorted(L1, key=absolute ) #A \"key\"", "7, 4, -2, 3] def absolute(x): print('--- figuring out what to write on", "return -x print(absolute(3)) print(absolute(-119)) for y in L1: print(absolute(y)) print('='*15,'#5','='*15) #Optional key parameters", "d[x] = d[x] +1 # \"x\" will be key and x will be", "= absolute)) print('='*15,'#6','='*15) #Optional key parameters L1 = [1, 7, 4, -2, 3]", "'papaya', 'mango', 'pear'] new_order = sorted(fruits, key = lambda fruit_name: (len(fruit_name), fruit_name)) for", "to the x if this has already seen before else: d[x] = 1", "'B', 'A', 'D', 'D', 'E', 'D'] d = {} for x in L:", "-2, 3] def absolute(x): print('--- figuring out what to write on the post-it", "L2 = sorted(L1, key=absolute ) #A \"key\" está falando \"Eu quero que você", "def absolute(x): print('--- figuring out what to write on the post-it note for", "if x >= 0: return x else: return -x print(absolute(3)) print(absolute(-119)) for y", "print('='*15,'#7','='*15) #Sorting a dictionary L = ['E', 'F', 'B', 'A', 'D', 'I', 'I',", "2, 4), (\"C\", 1, 2)] for tup in sorted(tups): print(tup) print('='*15,'#9','='*15) #Breaking Ties:", "so the lowest key value will be organized to be the first. print(f'{x}", "print (fruit) print('='*15,'#10','='*15) #When to use a Lambda Expression - A lambda expression", "value will be organized to be the first. print(f'{x} appears {d[x]} times') print('='*15,'#8','='*15)", "x >=0: return x else: return -x print(\"About to call sorted\") L2= sorted(L1,", "1 for x in sorted(d.keys(), key=lambda k: d[k]): #The \"x\" is gonna make", "will be insert in d and \"d[x] is the value, so we gonna", "print('--- figuring out what to write on the post-it note for ' +str(x))", "= sorted(fruits, key = lambda fruit_name: (len(fruit_name), fruit_name)) for fruit in new_order: print", "is not in d, so increment x as 1 for x in sorted(d.keys(),", "x if this has already seen before else: d[x] = 1 #If x", "\"Tacoma\", \"Olympia\", \"Vancouver\"]} def s_cities_count(cities_list): #return a count of how many cities begin", "L2 = [\"Cherry\", \"Apple\", \"Blueberry\"] print(sorted(L2,reverse = True)) print('='*15,'#4','='*15) #Optional key parameters L1", "for x in L: if x in d: d[x] = d[x] +1 #", "fruit_name: (len(fruit_name), fruit_name)) for fruit in new_order: print (fruit) print('='*15,'#10','='*15) #When to use", "key value will be organized to be the first. print(f'{x} appears {d[x]} times')", "print(\"Finish execution of sorted\") print(L2) print('='*15,'#7','='*15) #Sorting a dictionary L = ['E', 'F',", "print(L3) print(sorted(L2)) print(L2) print(\"--------\") L2.sort() print(L2) print(L2.sort()) print('='*15,'#3','='*15) #Optional reverse parameter L2 =", "4, -2, 3] L2 = [\"Cherry\", \"Apple\", \"Blueberry\"] L1.sort() print(L1) L2.sort() print(L2) print('='*15,'#2','='*15)", "= lambda fruit_name: (len(fruit_name), fruit_name)) for fruit in new_order: print (fruit) print('='*15,'#10','='*15) #When", "\"Apple\", \"Blueberry\"] L1.sort() print(L1) L2.sort() print(L2) print('='*15,'#2','='*15) #Sorted L2= [\"Cherry\", \"Apple\", \"Blueberry\"] L3=", "before else: d[x] = 1 #If x is not in d, so increment", "['E', 'F', 'B', 'A', 'D', 'I', 'I', 'C', 'B', 'A', 'D', 'D', 'E',", "city in cities_list: if city[0] == 'S': ct = ct +1 return ct", "L = ['E', 'F', 'B', 'A', 'D', 'I', 'I', 'C', 'B', 'A', 'D',", "\"x\" will be key and x will be insert in d and \"d[x]", "we gonna call the \"key\" (We gonna sorting sth after the key), we", "\"Apple\", \"Blueberry\"] print(sorted(L2,reverse = True)) print('='*15,'#4','='*15) #Optional key parameters L1 = [1, 7,", "to write on the post-it note for ' +str(x)) if x >=0: return", "in L: if x in d: d[x] = d[x] +1 # \"x\" will", "on the post-it note for ' +str(x)) if x >=0: return x else:", "print(L2) #or in reverse order print(sorted(L1, reverse = True, key = absolute)) print('='*15,'#6','='*15)", "write on the post-it note for ' +str(x)) if x >=0: return x", "['peach', 'kiwi', 'apple', 'blueberry', 'papaya', 'mango', 'pear'] new_order = sorted(fruits, key = lambda", "em lista L1 = [1, 7, 4, -2, 3] L2 = [\"Cherry\", \"Apple\",", "key = lambda fruit_name: (len(fruit_name), fruit_name)) for fruit in new_order: print (fruit) print('='*15,'#10','='*15)", "x in d: d[x] = d[x] +1 # \"x\" will be key and", "and \"d[x] is the value, so we gonna sum 1 to the x", "short and simple use, when things get complicated use a function states =", "sorted(d.keys(), key=lambda k: d[k]): #The \"x\" is gonna make a comparation into the", "print(absolute(3)) print(absolute(-119)) for y in L1: print(absolute(y)) print('='*15,'#5','='*15) #Optional key parameters L1 =", "if x >=0: return x else: return -x print(\"About to call sorted\") L2=", "print('='*15,'#5','='*15) #Optional key parameters L1 = [1, 7, 4, -2, 3] def absolute(x):", "return -x L2 = sorted(L1, key=absolute ) #A \"key\" está falando \"Eu quero", "4, -2, 3] def absolute(x): if x>=0: return x else: return -x L2", "igual)\" print(L2) #or in reverse order print(sorted(L1, reverse = True, key = absolute))", "\"key\" está falando \"Eu quero que você organize de acordo com ... (irá", "Sorting fruits = ['peach', 'kiwi', 'apple', 'blueberry', 'papaya', 'mango', 'pear'] new_order = sorted(fruits,", "- Vai organizar do menos para o maior valor em lista L1 =", "4, -2, 3] def absolute(x): print('--- figuring out what to write on the", "L2 = [\"Cherry\", \"Apple\", \"Blueberry\"] L1.sort() print(L1) L2.sort() print(L2) print('='*15,'#2','='*15) #Sorted L2= [\"Cherry\",", "the d[k] who is the value from the keys, # so the lowest", "= 1 #If x is not in d, so increment x as 1", "organized to be the first. print(f'{x} appears {d[x]} times') print('='*15,'#8','='*15) #Breaking Ties: Second", "depois do igual)\" print(L2) #or in reverse order print(sorted(L1, reverse = True, key", "'F', 'B', 'A', 'D', 'I', 'I', 'C', 'B', 'A', 'D', 'D', 'E', 'D']", "'E', 'D'] d = {} for x in L: if x in d:", "'D'] d = {} for x in L: if x in d: d[x]", "print(L2.sort()) print('='*15,'#3','='*15) #Optional reverse parameter L2 = [\"Cherry\", \"Apple\", \"Blueberry\"] print(sorted(L2,reverse = True))", "(len(fruit_name), fruit_name)) for fruit in new_order: print (fruit) print('='*15,'#10','='*15) #When to use a", "will be key and x will be insert in d and \"d[x] is", "tup in sorted(tups): print(tup) print('='*15,'#9','='*15) #Breaking Ties: Second Sorting fruits = ['peach', 'kiwi',", "x in L: if x in d: d[x] = d[x] +1 # \"x\"", "'Kalamazoo'], \"Washington\":[\"Seatle\", \"Tacoma\", \"Olympia\", \"Vancouver\"]} def s_cities_count(cities_list): #return a count of how many", "#Optional reverse parameter L2 = [\"Cherry\", \"Apple\", \"Blueberry\"] print(sorted(L2,reverse = True)) print('='*15,'#4','='*15) #Optional", "City', 'Lansing', 'Kalamazoo'], \"Washington\":[\"Seatle\", \"Tacoma\", \"Olympia\", \"Vancouver\"]} def s_cities_count(cities_list): #return a count of", "the value, so we gonna sum 1 to the x if this has", "maior valor em lista L1 = [1, 7, 4, -2, 3] L2 =", "\"Washington\":[\"Seatle\", \"Tacoma\", \"Olympia\", \"Vancouver\"]} def s_cities_count(cities_list): #return a count of how many cities", "absolute(x): if x>=0: return x else: return -x L2 = sorted(L1, key=absolute )", "key parameters L1 = [1, 7, 4, -2, 3] def absolute(x): print('--- figuring", "new_order: print (fruit) print('='*15,'#10','='*15) #When to use a Lambda Expression - A lambda", "for tup in sorted(tups): print(tup) print('='*15,'#9','='*15) #Breaking Ties: Second Sorting fruits = ['peach',", "So, we gonna call the \"key\" (We gonna sorting sth after the key),", "gonna organize by the d[k] who is the value from the keys, #", "print('='*15,'#9','='*15) #Breaking Ties: Second Sorting fruits = ['peach', 'kiwi', 'apple', 'blueberry', 'papaya', 'mango',", "print(absolute(-119)) for y in L1: print(absolute(y)) print('='*15,'#5','='*15) #Optional key parameters L1 = [1,", "L1: print(absolute(y)) print('='*15,'#5','='*15) #Optional key parameters L1 = [1, 7, 4, -2, 3]", "if x>=0: return x else: return -x L2 = sorted(L1, key=absolute ) #A", "into the sorted #(Organized by the highest value to lowest) choosing just the", "after the key), we call the 'function' Lambda # The variable for lambda", "to call sorted\") L2= sorted(L1, key=absolute) print(\"Finish execution of sorted\") print(L2) print('='*15,'#7','='*15) #Sorting", "key parameters L1 = [1, 7, 4, -2, 3] def absolute(x): if x>=0:", "[\"Cherry\", \"Apple\", \"Blueberry\"] L1.sort() print(L1) L2.sort() print(L2) print('='*15,'#2','='*15) #Sorted L2= [\"Cherry\", \"Apple\", \"Blueberry\"]", "1), (\"A\", 2, 4), (\"C\", 1, 2)] for tup in sorted(tups): print(tup) print('='*15,'#9','='*15)", "figuring out what to write on the post-it note for ' +str(x)) if", "absolute)) print('='*15,'#6','='*15) #Optional key parameters L1 = [1, 7, 4, -2, 3] def", "do igual)\" print(L2) #or in reverse order print(sorted(L1, reverse = True, key =", "3] def absolute(x): print('--- figuring out what to write on the post-it note", "'function' Lambda # The variable for lambda is k and we gonna organize", "print(L2) print(\"--------\") L2.sort() print(L2) print(L2.sort()) print('='*15,'#3','='*15) #Optional reverse parameter L2 = [\"Cherry\", \"Apple\",", "{} for x in L: if x in d: d[x] = d[x] +1", "insert in d and \"d[x] is the value, so we gonna sum 1", "for x in sorted(d.keys(), key=lambda k: d[k]): #The \"x\" is gonna make a", "['St.Paul', 'Minneapolis', 'Saint Cloud', 'Stillwater'], \"Michigan\":['Ann Arbor', 'Traverse City', 'Lansing', 'Kalamazoo'], \"Washington\":[\"Seatle\", \"Tacoma\",", "'blueberry', 'papaya', 'mango', 'pear'] new_order = sorted(fruits, key = lambda fruit_name: (len(fruit_name), fruit_name))", "= True)) print('='*15,'#4','='*15) #Optional key parameters L1 = [1, 7, 4, -2, 3]", "print('='*15,'#2','='*15) #Sorted L2= [\"Cherry\", \"Apple\", \"Blueberry\"] L3= sorted(L2) print(L3) print(sorted(L2)) print(L2) print(\"--------\") L2.sort()", "\"x\" is gonna make a comparation into the sorted #(Organized by the highest", "for lambda is k and we gonna organize by the d[k] who is", "parameters L1 = [1, 7, 4, -2, 3] def absolute(x): print('--- figuring out", "0: return x else: return -x print(absolute(3)) print(absolute(-119)) for y in L1: print(absolute(y))", "Sorting tups = [(\"A\", 3, 2), (\"C\", 1, 4), (\"B\", 3, 1), (\"A\",", "a dictionary L = ['E', 'F', 'B', 'A', 'D', 'I', 'I', 'C', 'B',", "in sorted(d.keys(), key=lambda k: d[k]): #The \"x\" is gonna make a comparation into", "quero que você organize de acordo com ... (irá organizar de acordo com", "print(L2) print('='*15,'#2','='*15) #Sorted L2= [\"Cherry\", \"Apple\", \"Blueberry\"] L3= sorted(L2) print(L3) print(sorted(L2)) print(L2) print(\"--------\")", "else: d[x] = 1 #If x is not in d, so increment x", "sorted(L1, key=absolute ) #A \"key\" está falando \"Eu quero que você organize de", "in new_order: print (fruit) print('='*15,'#10','='*15) #When to use a Lambda Expression - A", "complicated use a function states = {\"Minnesota\": ['St.Paul', 'Minneapolis', 'Saint Cloud', 'Stillwater'], \"Michigan\":['Ann", "organizar de acordo com os comandos dados depois do igual)\" print(L2) #or in", "as 1 for x in sorted(d.keys(), key=lambda k: d[k]): #The \"x\" is gonna", "#Sorted L2= [\"Cherry\", \"Apple\", \"Blueberry\"] L3= sorted(L2) print(L3) print(sorted(L2)) print(L2) print(\"--------\") L2.sort() print(L2)", "choosing just the keys from d (ex: E, F, B etc) # So,", "of how many cities begin with \"S\" ct = 0 for city in", "print(tup) print('='*15,'#9','='*15) #Breaking Ties: Second Sorting fruits = ['peach', 'kiwi', 'apple', 'blueberry', 'papaya',", "cities begin with \"S\" ct = 0 for city in cities_list: if city[0]", "True, key = absolute)) print('='*15,'#6','='*15) #Optional key parameters L1 = [1, 7, 4,", "d[k] who is the value from the keys, # so the lowest key", "city[0] == 'S': ct = ct +1 return ct def s_cities_count_for_state(state): cities_list =", "falando \"Eu quero que você organize de acordo com ... (irá organizar de", "gonna call the \"key\" (We gonna sorting sth after the key), we call", "states = {\"Minnesota\": ['St.Paul', 'Minneapolis', 'Saint Cloud', 'Stillwater'], \"Michigan\":['Ann Arbor', 'Traverse City', 'Lansing',", "Lambda # The variable for lambda is k and we gonna organize by", "print(L2) print('='*15,'#7','='*15) #Sorting a dictionary L = ['E', 'F', 'B', 'A', 'D', 'I',", "-x print(\"About to call sorted\") L2= sorted(L1, key=absolute) print(\"Finish execution of sorted\") print(L2)", "has already seen before else: d[x] = 1 #If x is not in", "s_cities_count(cities_list): #return a count of how many cities begin with \"S\" ct =", "[\"Cherry\", \"Apple\", \"Blueberry\"] L3= sorted(L2) print(L3) print(sorted(L2)) print(L2) print(\"--------\") L2.sort() print(L2) print(L2.sort()) print('='*15,'#3','='*15)", "in reverse order print(sorted(L1, reverse = True, key = absolute)) print('='*15,'#6','='*15) #Optional key", "is the value from the keys, # so the lowest key value will", "print(\"--------\") L2.sort() print(L2) print(L2.sort()) print('='*15,'#3','='*15) #Optional reverse parameter L2 = [\"Cherry\", \"Apple\", \"Blueberry\"]", "many cities begin with \"S\" ct = 0 for city in cities_list: if", "= [(\"A\", 3, 2), (\"C\", 1, 4), (\"B\", 3, 1), (\"A\", 2, 4),", "print('='*15,'#8','='*15) #Breaking Ties: Second Sorting tups = [(\"A\", 3, 2), (\"C\", 1, 4),", "return x else: return -x L2 = sorted(L1, key=absolute ) #A \"key\" está", "the key), we call the 'function' Lambda # The variable for lambda is", "para o maior valor em lista L1 = [1, 7, 4, -2, 3]", "times') print('='*15,'#8','='*15) #Breaking Ties: Second Sorting tups = [(\"A\", 3, 2), (\"C\", 1,", "'pear'] new_order = sorted(fruits, key = lambda fruit_name: (len(fruit_name), fruit_name)) for fruit in", "a function states = {\"Minnesota\": ['St.Paul', 'Minneapolis', 'Saint Cloud', 'Stillwater'], \"Michigan\":['Ann Arbor', 'Traverse", "1 to the x if this has already seen before else: d[x] =", "de acordo com os comandos dados depois do igual)\" print(L2) #or in reverse", "in d: d[x] = d[x] +1 # \"x\" will be key and x", "else: return -x L2 = sorted(L1, key=absolute ) #A \"key\" está falando \"Eu", "dados depois do igual)\" print(L2) #or in reverse order print(sorted(L1, reverse = True,", "com os comandos dados depois do igual)\" print(L2) #or in reverse order print(sorted(L1,", "dictionary L = ['E', 'F', 'B', 'A', 'D', 'I', 'I', 'C', 'B', 'A',", "'Stillwater'], \"Michigan\":['Ann Arbor', 'Traverse City', 'Lansing', 'Kalamazoo'], \"Washington\":[\"Seatle\", \"Tacoma\", \"Olympia\", \"Vancouver\"]} def s_cities_count(cities_list):", "... (irá organizar de acordo com os comandos dados depois do igual)\" print(L2)", "a comparation into the sorted #(Organized by the highest value to lowest) choosing", "etc) # So, we gonna call the \"key\" (We gonna sorting sth after", "L3= sorted(L2) print(L3) print(sorted(L2)) print(L2) print(\"--------\") L2.sort() print(L2) print(L2.sort()) print('='*15,'#3','='*15) #Optional reverse parameter", "execution of sorted\") print(L2) print('='*15,'#7','='*15) #Sorting a dictionary L = ['E', 'F', 'B',", "from d (ex: E, F, B etc) # So, we gonna call the", "\"Olympia\", \"Vancouver\"]} def s_cities_count(cities_list): #return a count of how many cities begin with", "print('='*15,'#6','='*15) #Optional key parameters L1 = [1, 7, 4, -2, 3] def absolute(x):", "who is the value from the keys, # so the lowest key value", "cities_list: if city[0] == 'S': ct = ct +1 return ct def s_cities_count_for_state(state):", "and x will be insert in d and \"d[x] is the value, so" ]
[ "import Pallet from os.path import join class VistaPallet(Pallet): def __init__(self): super(VistaPallet, self).__init__() self.arcgis_services", "import join class VistaPallet(Pallet): def __init__(self): super(VistaPallet, self).__init__() self.arcgis_services = [('Vista', 'MapServer')] self.sgid", "Pallet from os.path import join class VistaPallet(Pallet): def __init__(self): super(VistaPallet, self).__init__() self.arcgis_services =", "project. ''' from forklift.models import Pallet from os.path import join class VistaPallet(Pallet): def", "VistaPallet(Pallet): def __init__(self): super(VistaPallet, self).__init__() self.arcgis_services = [('Vista', 'MapServer')] self.sgid = join(self.garage, 'SGID.sde')", "self).__init__() self.arcgis_services = [('Vista', 'MapServer')] self.sgid = join(self.garage, 'SGID.sde') self.political = join(self.staging_rack, 'political.gdb')", "for the vista project. ''' from forklift.models import Pallet from os.path import join", "= [('Vista', 'MapServer')] self.sgid = join(self.garage, 'SGID.sde') self.political = join(self.staging_rack, 'political.gdb') self.copy_data =", "utf8 * ''' vista_pallet.py A module that contains a forklift pallet definition for", "module that contains a forklift pallet definition for the vista project. ''' from", "''' vista_pallet.py A module that contains a forklift pallet definition for the vista", "vista_pallet.py A module that contains a forklift pallet definition for the vista project.", "the vista project. ''' from forklift.models import Pallet from os.path import join class", "'MapServer')] self.sgid = join(self.garage, 'SGID.sde') self.political = join(self.staging_rack, 'political.gdb') self.copy_data = [self.political] def", "= join(self.garage, 'SGID.sde') self.political = join(self.staging_rack, 'political.gdb') self.copy_data = [self.political] def build(self, config):", "<reponame>agrc/vista #!/usr/bin/env python # * coding: utf8 * ''' vista_pallet.py A module that", "'SGID.sde') self.political = join(self.staging_rack, 'political.gdb') self.copy_data = [self.political] def build(self, config): self.add_crates(['VistaBallotAreas', 'VistaBallotAreas_Proposed'],", "python # * coding: utf8 * ''' vista_pallet.py A module that contains a", "from os.path import join class VistaPallet(Pallet): def __init__(self): super(VistaPallet, self).__init__() self.arcgis_services = [('Vista',", "* coding: utf8 * ''' vista_pallet.py A module that contains a forklift pallet", "self.sgid = join(self.garage, 'SGID.sde') self.political = join(self.staging_rack, 'political.gdb') self.copy_data = [self.political] def build(self,", "that contains a forklift pallet definition for the vista project. ''' from forklift.models", "join class VistaPallet(Pallet): def __init__(self): super(VistaPallet, self).__init__() self.arcgis_services = [('Vista', 'MapServer')] self.sgid =", "from forklift.models import Pallet from os.path import join class VistaPallet(Pallet): def __init__(self): super(VistaPallet,", "class VistaPallet(Pallet): def __init__(self): super(VistaPallet, self).__init__() self.arcgis_services = [('Vista', 'MapServer')] self.sgid = join(self.garage,", "''' from forklift.models import Pallet from os.path import join class VistaPallet(Pallet): def __init__(self):", "os.path import join class VistaPallet(Pallet): def __init__(self): super(VistaPallet, self).__init__() self.arcgis_services = [('Vista', 'MapServer')]", "__init__(self): super(VistaPallet, self).__init__() self.arcgis_services = [('Vista', 'MapServer')] self.sgid = join(self.garage, 'SGID.sde') self.political =", "forklift.models import Pallet from os.path import join class VistaPallet(Pallet): def __init__(self): super(VistaPallet, self).__init__()", "forklift pallet definition for the vista project. ''' from forklift.models import Pallet from", "self.arcgis_services = [('Vista', 'MapServer')] self.sgid = join(self.garage, 'SGID.sde') self.political = join(self.staging_rack, 'political.gdb') self.copy_data", "join(self.garage, 'SGID.sde') self.political = join(self.staging_rack, 'political.gdb') self.copy_data = [self.political] def build(self, config): self.add_crates(['VistaBallotAreas',", "* ''' vista_pallet.py A module that contains a forklift pallet definition for the", "= join(self.staging_rack, 'political.gdb') self.copy_data = [self.political] def build(self, config): self.add_crates(['VistaBallotAreas', 'VistaBallotAreas_Proposed'], {'source_workspace': self.sgid,", "A module that contains a forklift pallet definition for the vista project. '''", "#!/usr/bin/env python # * coding: utf8 * ''' vista_pallet.py A module that contains", "contains a forklift pallet definition for the vista project. ''' from forklift.models import", "# * coding: utf8 * ''' vista_pallet.py A module that contains a forklift", "[('Vista', 'MapServer')] self.sgid = join(self.garage, 'SGID.sde') self.political = join(self.staging_rack, 'political.gdb') self.copy_data = [self.political]", "join(self.staging_rack, 'political.gdb') self.copy_data = [self.political] def build(self, config): self.add_crates(['VistaBallotAreas', 'VistaBallotAreas_Proposed'], {'source_workspace': self.sgid, 'destination_workspace':", "definition for the vista project. ''' from forklift.models import Pallet from os.path import", "a forklift pallet definition for the vista project. ''' from forklift.models import Pallet", "self.political = join(self.staging_rack, 'political.gdb') self.copy_data = [self.political] def build(self, config): self.add_crates(['VistaBallotAreas', 'VistaBallotAreas_Proposed'], {'source_workspace':", "coding: utf8 * ''' vista_pallet.py A module that contains a forklift pallet definition", "def __init__(self): super(VistaPallet, self).__init__() self.arcgis_services = [('Vista', 'MapServer')] self.sgid = join(self.garage, 'SGID.sde') self.political", "pallet definition for the vista project. ''' from forklift.models import Pallet from os.path", "vista project. ''' from forklift.models import Pallet from os.path import join class VistaPallet(Pallet):", "'political.gdb') self.copy_data = [self.political] def build(self, config): self.add_crates(['VistaBallotAreas', 'VistaBallotAreas_Proposed'], {'source_workspace': self.sgid, 'destination_workspace': self.political})", "super(VistaPallet, self).__init__() self.arcgis_services = [('Vista', 'MapServer')] self.sgid = join(self.garage, 'SGID.sde') self.political = join(self.staging_rack," ]
[ "for j in range(len(cartasB)): if cartasA[i] == cartasB[j]: break elif (j == len(cartasB)-1):", "ler_qtd(n, msg): n = int(input(msg)) while (n < 1) or (n > 10000):", "print(sorted(a)) print(sorted(b)) maximo_trocas = qtd_trocas(a, b) print(f' - Maximo de trocas e igual", "conjunto interseccao de a e tiro os repetidos for i in range(len(cartasB)): for", "de a e tiro os repetidos for i in range(len(cartasB)): for j in", "o conjunto interseccao de b e tiro os repetidos menor = inter_a if", "for cont in range(len(lista)): if lista[cont] not in l: l.append(lista[cont]) return l def", "= list() for cont in range(len(lista)): if lista[cont] not in l: l.append(lista[cont]) return", "Quantas cartas Alice possui? ') qb = ler_qtd(qb, ' Quantas cartas Beatriz possui?", "' Quantas cartas Beatriz possui? ') preencher_set_cartas(a, qa, 'Alice') preencher_set_cartas(b, qb, 'Beatriz') print(sorted(a))", "in l: l.append(lista[cont]) return l def qtd_trocas(cartasA, cartasB): inter_a = list() inter_b =", "- \\033[1;31mEntrada invalida!\\033[m Digite a {cont+1} carta de {p}: ')) cartas.append(carta) print(' -", "== len(cartasB)-1): inter_a.append(cartasA[i]) inter_a = retirar_repetidos(inter_a) #Pego o conjunto interseccao de a e", "#Set cartas Alice qb = 0 b = list() #Set cartas Beatriz system('cls')", "inter_b.append(cartasB[i]) inter_b = retirar_repetidos(inter_b) #Pego o conjunto interseccao de b e tiro os", "j in range(len(cartasA)): if cartasB[i] == cartasA[j]: break elif (j == len(cartasA)-1): inter_b.append(cartasB[i])", "cartas.append(carta) print(' - OK!') sleep(1) #Espera 1s def retirar_repetidos(lista): l = list() for", "- Entrada invalida!{msg}')) return n def preencher_set_cartas(cartas, qtd, p): \"\"\" set de cartas,", "Beatriz possui? ') preencher_set_cartas(a, qa, 'Alice') preencher_set_cartas(b, qb, 'Beatriz') print(sorted(a)) print(sorted(b)) maximo_trocas =", "possui? ') qb = ler_qtd(qb, ' Quantas cartas Beatriz possui? ') preencher_set_cartas(a, qa,", "= list() #Set cartas Beatriz system('cls') print('{:=^50}'.format(' TROCA DE CARTAS POKEMON ')) qa", "#Programa principal qa = 0 a = list() #Set cartas Alice qb =", "import sleep print() #Pular linha for cont in range(qtd): carta = int(input(f' -", "break elif (j == len(cartasB)-1): inter_a.append(cartasA[i]) inter_a = retirar_repetidos(inter_a) #Pego o conjunto interseccao", "from time import sleep print() #Pular linha for cont in range(qtd): carta =", "p): \"\"\" set de cartas, qtd de cartas, p de pessoa \"\"\" from", "in range(len(cartasB)): if cartasA[i] == cartasB[j]: break elif (j == len(cartasB)-1): inter_a.append(cartasA[i]) inter_a", "e tiro os repetidos menor = inter_a if len(inter_a) < len(inter_b) else inter_b", "\"\"\" set de cartas, qtd de cartas, p de pessoa \"\"\" from time", "while (n < 1) or (n > 10000): n = int(input(f' - Entrada", "ler_qtd(qb, ' Quantas cartas Beatriz possui? ') preencher_set_cartas(a, qa, 'Alice') preencher_set_cartas(b, qb, 'Beatriz')", "(j == len(cartasA)-1): inter_b.append(cartasB[i]) inter_b = retirar_repetidos(inter_b) #Pego o conjunto interseccao de b", "n = int(input(f' - Entrada invalida!{msg}')) return n def preencher_set_cartas(cartas, qtd, p): \"\"\"", "set de cartas, qtd de cartas, p de pessoa \"\"\" from time import", "return n def preencher_set_cartas(cartas, qtd, p): \"\"\" set de cartas, qtd de cartas,", "cont in range(len(lista)): if lista[cont] not in l: l.append(lista[cont]) return l def qtd_trocas(cartasA,", "l = list() for cont in range(len(lista)): if lista[cont] not in l: l.append(lista[cont])", "(carta < 1) or (carta > 100000): carta = int(input(f' - \\033[1;31mEntrada invalida!\\033[m", "print() #Pular linha for cont in range(qtd): carta = int(input(f' - Digite a", "for j in range(len(cartasA)): if cartasB[i] == cartasA[j]: break elif (j == len(cartasA)-1):", "break elif (j == len(cartasA)-1): inter_b.append(cartasB[i]) inter_b = retirar_repetidos(inter_b) #Pego o conjunto interseccao", "repetidos for i in range(len(cartasB)): for j in range(len(cartasA)): if cartasB[i] == cartasA[j]:", "< 1) or (carta > 100000): carta = int(input(f' - \\033[1;31mEntrada invalida!\\033[m Digite", "qb, 'Beatriz') print(sorted(a)) print(sorted(b)) maximo_trocas = qtd_trocas(a, b) print(f' - Maximo de trocas", "= retirar_repetidos(inter_a) #Pego o conjunto interseccao de a e tiro os repetidos for", "for i in range(len(cartasB)): for j in range(len(cartasA)): if cartasB[i] == cartasA[j]: break", "range(qtd): carta = int(input(f' - Digite a {cont+1} carta de {p}: ')) while", "retirar_repetidos(inter_a) #Pego o conjunto interseccao de a e tiro os repetidos for i", "retirar_repetidos(lista): l = list() for cont in range(len(lista)): if lista[cont] not in l:", "inter_a = list() inter_b = list() for i in range(len(cartasA)): for j in", "range(len(cartasA)): if cartasB[i] == cartasA[j]: break elif (j == len(cartasA)-1): inter_b.append(cartasB[i]) inter_b =", "from os import system def ler_qtd(n, msg): n = int(input(msg)) while (n <", "= ler_qtd(qb, ' Quantas cartas Beatriz possui? ') preencher_set_cartas(a, qa, 'Alice') preencher_set_cartas(b, qb,", "de cartas, p de pessoa \"\"\" from time import sleep print() #Pular linha", "= list() #Set cartas Alice qb = 0 b = list() #Set cartas", "< 1) or (n > 10000): n = int(input(f' - Entrada invalida!{msg}')) return", "10000): n = int(input(f' - Entrada invalida!{msg}')) return n def preencher_set_cartas(cartas, qtd, p):", "{cont+1} carta de {p}: ')) while (carta < 1) or (carta > 100000):", "range(len(lista)): if lista[cont] not in l: l.append(lista[cont]) return l def qtd_trocas(cartasA, cartasB): inter_a", "a e tiro os repetidos for i in range(len(cartasB)): for j in range(len(cartasA)):", "qa = 0 a = list() #Set cartas Alice qb = 0 b", "inter_b return len(menor) #Programa principal qa = 0 a = list() #Set cartas", "l: l.append(lista[cont]) return l def qtd_trocas(cartasA, cartasB): inter_a = list() inter_b = list()", "invalida!{msg}')) return n def preencher_set_cartas(cartas, qtd, p): \"\"\" set de cartas, qtd de", "pessoa \"\"\" from time import sleep print() #Pular linha for cont in range(qtd):", "= int(input(f' - Digite a {cont+1} carta de {p}: ')) while (carta <", "int(input(msg)) while (n < 1) or (n > 10000): n = int(input(f' -", "repetidos menor = inter_a if len(inter_a) < len(inter_b) else inter_b return len(menor) #Programa", "def qtd_trocas(cartasA, cartasB): inter_a = list() inter_b = list() for i in range(len(cartasA)):", "= inter_a if len(inter_a) < len(inter_b) else inter_b return len(menor) #Programa principal qa", "cartas Alice possui? ') qb = ler_qtd(qb, ' Quantas cartas Beatriz possui? ')", "len(inter_a) < len(inter_b) else inter_b return len(menor) #Programa principal qa = 0 a", "o conjunto interseccao de a e tiro os repetidos for i in range(len(cartasB)):", "')) cartas.append(carta) print(' - OK!') sleep(1) #Espera 1s def retirar_repetidos(lista): l = list()", "cartas, qtd de cartas, p de pessoa \"\"\" from time import sleep print()", "os import system def ler_qtd(n, msg): n = int(input(msg)) while (n < 1)", "(n < 1) or (n > 10000): n = int(input(f' - Entrada invalida!{msg}'))", "print(sorted(b)) maximo_trocas = qtd_trocas(a, b) print(f' - Maximo de trocas e igual a", "for cont in range(qtd): carta = int(input(f' - Digite a {cont+1} carta de", "(carta > 100000): carta = int(input(f' - \\033[1;31mEntrada invalida!\\033[m Digite a {cont+1} carta", "print(' - OK!') sleep(1) #Espera 1s def retirar_repetidos(lista): l = list() for cont", "sleep print() #Pular linha for cont in range(qtd): carta = int(input(f' - Digite", "de {p}: ')) cartas.append(carta) print(' - OK!') sleep(1) #Espera 1s def retirar_repetidos(lista): l", "len(cartasA)-1): inter_b.append(cartasB[i]) inter_b = retirar_repetidos(inter_b) #Pego o conjunto interseccao de b e tiro", "preencher_set_cartas(a, qa, 'Alice') preencher_set_cartas(b, qb, 'Beatriz') print(sorted(a)) print(sorted(b)) maximo_trocas = qtd_trocas(a, b) print(f'", "cont in range(qtd): carta = int(input(f' - Digite a {cont+1} carta de {p}:", "if cartasB[i] == cartasA[j]: break elif (j == len(cartasA)-1): inter_b.append(cartasB[i]) inter_b = retirar_repetidos(inter_b)", "qb = ler_qtd(qb, ' Quantas cartas Beatriz possui? ') preencher_set_cartas(a, qa, 'Alice') preencher_set_cartas(b,", "possui? ') preencher_set_cartas(a, qa, 'Alice') preencher_set_cartas(b, qb, 'Beatriz') print(sorted(a)) print(sorted(b)) maximo_trocas = qtd_trocas(a,", "de pessoa \"\"\" from time import sleep print() #Pular linha for cont in", "int(input(f' - Entrada invalida!{msg}')) return n def preencher_set_cartas(cartas, qtd, p): \"\"\" set de", "\"\"\" from time import sleep print() #Pular linha for cont in range(qtd): carta", "not in l: l.append(lista[cont]) return l def qtd_trocas(cartasA, cartasB): inter_a = list() inter_b", "i in range(len(cartasB)): for j in range(len(cartasA)): if cartasB[i] == cartasA[j]: break elif", "elif (j == len(cartasA)-1): inter_b.append(cartasB[i]) inter_b = retirar_repetidos(inter_b) #Pego o conjunto interseccao de", "linha for cont in range(qtd): carta = int(input(f' - Digite a {cont+1} carta", "len(menor) #Programa principal qa = 0 a = list() #Set cartas Alice qb", "Alice possui? ') qb = ler_qtd(qb, ' Quantas cartas Beatriz possui? ') preencher_set_cartas(a,", "in range(len(cartasB)): for j in range(len(cartasA)): if cartasB[i] == cartasA[j]: break elif (j", "CARTAS POKEMON ')) qa = ler_qtd(qa, ' Quantas cartas Alice possui? ') qb", "')) qa = ler_qtd(qa, ' Quantas cartas Alice possui? ') qb = ler_qtd(qb,", "tiro os repetidos menor = inter_a if len(inter_a) < len(inter_b) else inter_b return", "= int(input(msg)) while (n < 1) or (n > 10000): n = int(input(f'", "int(input(f' - Digite a {cont+1} carta de {p}: ')) while (carta < 1)", "n = int(input(msg)) while (n < 1) or (n > 10000): n =", "#Pego o conjunto interseccao de a e tiro os repetidos for i in", "print('{:=^50}'.format(' TROCA DE CARTAS POKEMON ')) qa = ler_qtd(qa, ' Quantas cartas Alice", "list() for i in range(len(cartasA)): for j in range(len(cartasB)): if cartasA[i] == cartasB[j]:", "cartasB[i] == cartasA[j]: break elif (j == len(cartasA)-1): inter_b.append(cartasB[i]) inter_b = retirar_repetidos(inter_b) #Pego", "list() for cont in range(len(lista)): if lista[cont] not in l: l.append(lista[cont]) return l", "range(len(cartasB)): for j in range(len(cartasA)): if cartasB[i] == cartasA[j]: break elif (j ==", "{cont+1} carta de {p}: ')) cartas.append(carta) print(' - OK!') sleep(1) #Espera 1s def", "if cartasA[i] == cartasB[j]: break elif (j == len(cartasB)-1): inter_a.append(cartasA[i]) inter_a = retirar_repetidos(inter_a)", "in range(len(cartasA)): if cartasB[i] == cartasA[j]: break elif (j == len(cartasA)-1): inter_b.append(cartasB[i]) inter_b", "qtd de cartas, p de pessoa \"\"\" from time import sleep print() #Pular", "n def preencher_set_cartas(cartas, qtd, p): \"\"\" set de cartas, qtd de cartas, p", "conjunto interseccao de b e tiro os repetidos menor = inter_a if len(inter_a)", "#Pular linha for cont in range(qtd): carta = int(input(f' - Digite a {cont+1}", "inter_b = list() for i in range(len(cartasA)): for j in range(len(cartasB)): if cartasA[i]", "inter_a = retirar_repetidos(inter_a) #Pego o conjunto interseccao de a e tiro os repetidos", "== cartasB[j]: break elif (j == len(cartasB)-1): inter_a.append(cartasA[i]) inter_a = retirar_repetidos(inter_a) #Pego o", "= 0 b = list() #Set cartas Beatriz system('cls') print('{:=^50}'.format(' TROCA DE CARTAS", "i in range(len(cartasA)): for j in range(len(cartasB)): if cartasA[i] == cartasB[j]: break elif", "cartasB): inter_a = list() inter_b = list() for i in range(len(cartasA)): for j", "de {p}: ')) while (carta < 1) or (carta > 100000): carta =", "#Espera 1s def retirar_repetidos(lista): l = list() for cont in range(len(lista)): if lista[cont]", "else inter_b return len(menor) #Programa principal qa = 0 a = list() #Set", "a {cont+1} carta de {p}: ')) cartas.append(carta) print(' - OK!') sleep(1) #Espera 1s", "> 10000): n = int(input(f' - Entrada invalida!{msg}')) return n def preencher_set_cartas(cartas, qtd,", "' Quantas cartas Alice possui? ') qb = ler_qtd(qb, ' Quantas cartas Beatriz", "elif (j == len(cartasB)-1): inter_a.append(cartasA[i]) inter_a = retirar_repetidos(inter_a) #Pego o conjunto interseccao de", "== len(cartasA)-1): inter_b.append(cartasB[i]) inter_b = retirar_repetidos(inter_b) #Pego o conjunto interseccao de b e", "')) while (carta < 1) or (carta > 100000): carta = int(input(f' -", "retirar_repetidos(inter_b) #Pego o conjunto interseccao de b e tiro os repetidos menor =", "carta de {p}: ')) while (carta < 1) or (carta > 100000): carta", "= int(input(f' - Entrada invalida!{msg}')) return n def preencher_set_cartas(cartas, qtd, p): \"\"\" set", "\\033[1;31mEntrada invalida!\\033[m Digite a {cont+1} carta de {p}: ')) cartas.append(carta) print(' - OK!')", "{p}: ')) cartas.append(carta) print(' - OK!') sleep(1) #Espera 1s def retirar_repetidos(lista): l =", "Beatriz system('cls') print('{:=^50}'.format(' TROCA DE CARTAS POKEMON ')) qa = ler_qtd(qa, ' Quantas", "tiro os repetidos for i in range(len(cartasB)): for j in range(len(cartasA)): if cartasB[i]", "lista[cont] not in l: l.append(lista[cont]) return l def qtd_trocas(cartasA, cartasB): inter_a = list()", "TROCA DE CARTAS POKEMON ')) qa = ler_qtd(qa, ' Quantas cartas Alice possui?", "= ler_qtd(qa, ' Quantas cartas Alice possui? ') qb = ler_qtd(qb, ' Quantas", "ler_qtd(qa, ' Quantas cartas Alice possui? ') qb = ler_qtd(qb, ' Quantas cartas", "def retirar_repetidos(lista): l = list() for cont in range(len(lista)): if lista[cont] not in", "cartasA[j]: break elif (j == len(cartasA)-1): inter_b.append(cartasB[i]) inter_b = retirar_repetidos(inter_b) #Pego o conjunto", "l def qtd_trocas(cartasA, cartasB): inter_a = list() inter_b = list() for i in", "') qb = ler_qtd(qb, ' Quantas cartas Beatriz possui? ') preencher_set_cartas(a, qa, 'Alice')", "a {cont+1} carta de {p}: ')) while (carta < 1) or (carta >", "Entrada invalida!{msg}')) return n def preencher_set_cartas(cartas, qtd, p): \"\"\" set de cartas, qtd", "system('cls') print('{:=^50}'.format(' TROCA DE CARTAS POKEMON ')) qa = ler_qtd(qa, ' Quantas cartas", "interseccao de b e tiro os repetidos menor = inter_a if len(inter_a) <", "sleep(1) #Espera 1s def retirar_repetidos(lista): l = list() for cont in range(len(lista)): if", "menor = inter_a if len(inter_a) < len(inter_b) else inter_b return len(menor) #Programa principal", "de b e tiro os repetidos menor = inter_a if len(inter_a) < len(inter_b)", "qb = 0 b = list() #Set cartas Beatriz system('cls') print('{:=^50}'.format(' TROCA DE", "'Beatriz') print(sorted(a)) print(sorted(b)) maximo_trocas = qtd_trocas(a, b) print(f' - Maximo de trocas e", "qtd_trocas(cartasA, cartasB): inter_a = list() inter_b = list() for i in range(len(cartasA)): for", "os repetidos menor = inter_a if len(inter_a) < len(inter_b) else inter_b return len(menor)", "cartas Beatriz possui? ') preencher_set_cartas(a, qa, 'Alice') preencher_set_cartas(b, qb, 'Beatriz') print(sorted(a)) print(sorted(b)) maximo_trocas", "b = list() #Set cartas Beatriz system('cls') print('{:=^50}'.format(' TROCA DE CARTAS POKEMON '))", "import system def ler_qtd(n, msg): n = int(input(msg)) while (n < 1) or", "msg): n = int(input(msg)) while (n < 1) or (n > 10000): n", "qtd, p): \"\"\" set de cartas, qtd de cartas, p de pessoa \"\"\"", "len(cartasB)-1): inter_a.append(cartasA[i]) inter_a = retirar_repetidos(inter_a) #Pego o conjunto interseccao de a e tiro", "= retirar_repetidos(inter_b) #Pego o conjunto interseccao de b e tiro os repetidos menor", "or (carta > 100000): carta = int(input(f' - \\033[1;31mEntrada invalida!\\033[m Digite a {cont+1}", "range(len(cartasA)): for j in range(len(cartasB)): if cartasA[i] == cartasB[j]: break elif (j ==", "return len(menor) #Programa principal qa = 0 a = list() #Set cartas Alice", "return l def qtd_trocas(cartasA, cartasB): inter_a = list() inter_b = list() for i", "carta = int(input(f' - \\033[1;31mEntrada invalida!\\033[m Digite a {cont+1} carta de {p}: '))", "in range(qtd): carta = int(input(f' - Digite a {cont+1} carta de {p}: '))", "'Alice') preencher_set_cartas(b, qb, 'Beatriz') print(sorted(a)) print(sorted(b)) maximo_trocas = qtd_trocas(a, b) print(f' - Maximo", "> 100000): carta = int(input(f' - \\033[1;31mEntrada invalida!\\033[m Digite a {cont+1} carta de", "preencher_set_cartas(cartas, qtd, p): \"\"\" set de cartas, qtd de cartas, p de pessoa", "inter_a if len(inter_a) < len(inter_b) else inter_b return len(menor) #Programa principal qa =", "def preencher_set_cartas(cartas, qtd, p): \"\"\" set de cartas, qtd de cartas, p de", "if len(inter_a) < len(inter_b) else inter_b return len(menor) #Programa principal qa = 0", "(j == len(cartasB)-1): inter_a.append(cartasA[i]) inter_a = retirar_repetidos(inter_a) #Pego o conjunto interseccao de a", "{p}: ')) while (carta < 1) or (carta > 100000): carta = int(input(f'", "100000): carta = int(input(f' - \\033[1;31mEntrada invalida!\\033[m Digite a {cont+1} carta de {p}:", "interseccao de a e tiro os repetidos for i in range(len(cartasB)): for j", "cartas, p de pessoa \"\"\" from time import sleep print() #Pular linha for", "Alice qb = 0 b = list() #Set cartas Beatriz system('cls') print('{:=^50}'.format(' TROCA", "carta de {p}: ')) cartas.append(carta) print(' - OK!') sleep(1) #Espera 1s def retirar_repetidos(lista):", "principal qa = 0 a = list() #Set cartas Alice qb = 0", "carta = int(input(f' - Digite a {cont+1} carta de {p}: ')) while (carta", "a = list() #Set cartas Alice qb = 0 b = list() #Set", "POKEMON ')) qa = ler_qtd(qa, ' Quantas cartas Alice possui? ') qb =", "qa = ler_qtd(qa, ' Quantas cartas Alice possui? ') qb = ler_qtd(qb, '", "while (carta < 1) or (carta > 100000): carta = int(input(f' - \\033[1;31mEntrada", "DE CARTAS POKEMON ')) qa = ler_qtd(qa, ' Quantas cartas Alice possui? ')", "int(input(f' - \\033[1;31mEntrada invalida!\\033[m Digite a {cont+1} carta de {p}: ')) cartas.append(carta) print('", "for i in range(len(cartasA)): for j in range(len(cartasB)): if cartasA[i] == cartasB[j]: break", "') preencher_set_cartas(a, qa, 'Alice') preencher_set_cartas(b, qb, 'Beatriz') print(sorted(a)) print(sorted(b)) maximo_trocas = qtd_trocas(a, b)", "= 0 a = list() #Set cartas Alice qb = 0 b =", "l.append(lista[cont]) return l def qtd_trocas(cartasA, cartasB): inter_a = list() inter_b = list() for", "- OK!') sleep(1) #Espera 1s def retirar_repetidos(lista): l = list() for cont in", "cartasA[i] == cartasB[j]: break elif (j == len(cartasB)-1): inter_a.append(cartasA[i]) inter_a = retirar_repetidos(inter_a) #Pego", "cartasB[j]: break elif (j == len(cartasB)-1): inter_a.append(cartasA[i]) inter_a = retirar_repetidos(inter_a) #Pego o conjunto", "#Pego o conjunto interseccao de b e tiro os repetidos menor = inter_a", "0 a = list() #Set cartas Alice qb = 0 b = list()", "cartas Alice qb = 0 b = list() #Set cartas Beatriz system('cls') print('{:=^50}'.format('", "list() #Set cartas Beatriz system('cls') print('{:=^50}'.format(' TROCA DE CARTAS POKEMON ')) qa =", "maximo_trocas = qtd_trocas(a, b) print(f' - Maximo de trocas e igual a {maximo_trocas}')", "b e tiro os repetidos menor = inter_a if len(inter_a) < len(inter_b) else", "len(inter_b) else inter_b return len(menor) #Programa principal qa = 0 a = list()", "j in range(len(cartasB)): if cartasA[i] == cartasB[j]: break elif (j == len(cartasB)-1): inter_a.append(cartasA[i])", "1) or (carta > 100000): carta = int(input(f' - \\033[1;31mEntrada invalida!\\033[m Digite a", "(n > 10000): n = int(input(f' - Entrada invalida!{msg}')) return n def preencher_set_cartas(cartas,", "e tiro os repetidos for i in range(len(cartasB)): for j in range(len(cartasA)): if", "time import sleep print() #Pular linha for cont in range(qtd): carta = int(input(f'", "if lista[cont] not in l: l.append(lista[cont]) return l def qtd_trocas(cartasA, cartasB): inter_a =", "invalida!\\033[m Digite a {cont+1} carta de {p}: ')) cartas.append(carta) print(' - OK!') sleep(1)", "OK!') sleep(1) #Espera 1s def retirar_repetidos(lista): l = list() for cont in range(len(lista)):", "= list() for i in range(len(cartasA)): for j in range(len(cartasB)): if cartasA[i] ==", "or (n > 10000): n = int(input(f' - Entrada invalida!{msg}')) return n def", "inter_b = retirar_repetidos(inter_b) #Pego o conjunto interseccao de b e tiro os repetidos", "def ler_qtd(n, msg): n = int(input(msg)) while (n < 1) or (n >", "#Set cartas Beatriz system('cls') print('{:=^50}'.format(' TROCA DE CARTAS POKEMON ')) qa = ler_qtd(qa,", "p de pessoa \"\"\" from time import sleep print() #Pular linha for cont", "- Digite a {cont+1} carta de {p}: ')) while (carta < 1) or", "== cartasA[j]: break elif (j == len(cartasA)-1): inter_b.append(cartasB[i]) inter_b = retirar_repetidos(inter_b) #Pego o", "qa, 'Alice') preencher_set_cartas(b, qb, 'Beatriz') print(sorted(a)) print(sorted(b)) maximo_trocas = qtd_trocas(a, b) print(f' -", "system def ler_qtd(n, msg): n = int(input(msg)) while (n < 1) or (n", "< len(inter_b) else inter_b return len(menor) #Programa principal qa = 0 a =", "os repetidos for i in range(len(cartasB)): for j in range(len(cartasA)): if cartasB[i] ==", "Digite a {cont+1} carta de {p}: ')) cartas.append(carta) print(' - OK!') sleep(1) #Espera", "list() inter_b = list() for i in range(len(cartasA)): for j in range(len(cartasB)): if", "1s def retirar_repetidos(lista): l = list() for cont in range(len(lista)): if lista[cont] not", "= list() inter_b = list() for i in range(len(cartasA)): for j in range(len(cartasB)):", "= int(input(f' - \\033[1;31mEntrada invalida!\\033[m Digite a {cont+1} carta de {p}: ')) cartas.append(carta)", "0 b = list() #Set cartas Beatriz system('cls') print('{:=^50}'.format(' TROCA DE CARTAS POKEMON", "in range(len(lista)): if lista[cont] not in l: l.append(lista[cont]) return l def qtd_trocas(cartasA, cartasB):", "de cartas, qtd de cartas, p de pessoa \"\"\" from time import sleep", "Quantas cartas Beatriz possui? ') preencher_set_cartas(a, qa, 'Alice') preencher_set_cartas(b, qb, 'Beatriz') print(sorted(a)) print(sorted(b))", "inter_a.append(cartasA[i]) inter_a = retirar_repetidos(inter_a) #Pego o conjunto interseccao de a e tiro os", "preencher_set_cartas(b, qb, 'Beatriz') print(sorted(a)) print(sorted(b)) maximo_trocas = qtd_trocas(a, b) print(f' - Maximo de", "range(len(cartasB)): if cartasA[i] == cartasB[j]: break elif (j == len(cartasB)-1): inter_a.append(cartasA[i]) inter_a =", "in range(len(cartasA)): for j in range(len(cartasB)): if cartasA[i] == cartasB[j]: break elif (j", "1) or (n > 10000): n = int(input(f' - Entrada invalida!{msg}')) return n", "list() #Set cartas Alice qb = 0 b = list() #Set cartas Beatriz", "cartas Beatriz system('cls') print('{:=^50}'.format(' TROCA DE CARTAS POKEMON ')) qa = ler_qtd(qa, '", "Digite a {cont+1} carta de {p}: ')) while (carta < 1) or (carta" ]
[ "None: return defualt return param if not isinstance(param, numbers.Number) else [-param, param] self.xdegree", "if random.random() < self.p: img = pil_ops.hflip(img) points = [polygon_ops.hflip(pnts, imw) for pnts", "size=3)) else: fillcolor = self.fillcolor rang = polygon_ops.get_translate_range(points, img.size) if self.max_offset: def limit_box(box,", "PointsToBboxes(object): def __call__(self, img, points): bboxes = np.array(points).reshape((-1, 4)) return img, bboxes class", "= (mw, mh) def __call__(self, img, points): mw, mh = self.size w, h", "scaleY = scales else: scaleX = scaleY = scales self.scaleX, self.scaleY = scaleX,", "def get_param(param, defualt=None): if param is None: return defualt return param if not", "= self.size if not self.keep_ratio: scaleX, scaleY = tw / w, th /", "RandomHorizontalFlip(object): def __init__(self, p=0.5): self.p = p def __call__(self, img, points): imw, imh", "mh = limit else: mw = mh = limit self.size = (mw, mh)", "= pil_ops.resize(img, self.size) points = polygon_ops.scale(points, (scaleX, scaleY)) else: if self.fillcolor is 'random':", "else: assert len(limits) == 4 ml, mt, mr, mb = limits l, t,", "isinstance(degree, numbers.Number) else [-degree, degree] self.expand = expand self.fillcolor = fillcolor def __call__(self,", "return box if len(limits) == 2: ml, mt = 0, 0 mr, mb", "mh = limit self.size = (mw, mh) def __call__(self, img, points): mw, mh", "(w // 2, h // 2), img_size=(w, h), expand=self.expand) for pnts in points]", "points] return img, points def __repr__(self): return self.__class__.__name__ + '(p={})'.format(self.p) class RandomTranslate(object): def", "img.size img = pil_ops.shear_y(img, degree) points = [polygon_ops.shear_y(pnts, degree, img_size=(w, h), expand=True) for", "else: scaleX = scaleY = scales self.scaleX, self.scaleY = scaleX, scaleY def __call__(self,", "b = min(mb, b) if l > r: return None if t >", "fillcolor = tuple(np.random.choice(range(256), size=3)) else: fillcolor = self.fillcolor img = pil_ops.resize_keep_ratio(img, self.size, fillcolor=fillcolor)", "+ self.xdegree[0] w, h = img.size img = pil_ops.shear_x(img, degree, fillcolor=fillcolor) points =", "self.xdegree[0] w, h = img.size img = pil_ops.shear_x(img, degree, fillcolor=fillcolor) points = [polygon_ops.shear_x(pnts,", "scaleX, scaleY = self.scaleX, self.scaleY img = pil_ops.scale(img, (scaleX, scaleY)) points = polygon_ops.scale(points,", "= img.size img = pil_ops.shear_x(img, degree) points = [polygon_ops.shear_x(pnts, degree, img_size=(w, h), expand=True)", "points): mw, mh = self.size w, h = img.size rw = w /", "= pil_ops.vflip(img) points = [polygon_ops.vflip(pnts, imh) for pnts in points] return img, points", "class RandomTranslate(object): def __init__(self, max_offset=None, fillcolor='black'): if max_offset is not None and len(max_offset)", "degree, img_size=(w, h), expand=True) for pnts in points] return img, points class RandomShear(object):", "pnts in points] return img, points class RandomShearY(object): def __init__(self, degree): self.degree =", "fillcolor def __call__(self, img, points): w, h = img.size tw, th = self.size", "= [polygon_ops.shear_x(pnts, degree, img_size=(w, h), expand=True) for pnts in points] if self.ydegree: if", "= random.randint(rang[0], rang[2]) ofy = random.randint(rang[1], rang[3]) img = pil_ops.translate(img, offset=(ofx, ofy), fillcolor=fillcolor)", "= np.array(bboxes).reshape((-1, 2, 2)) return img, points class PointsToBboxes(object): def __call__(self, img, points):", "in points] return img, points class RandomRotate(object): def __init__(self, degree, expand=True, fillcolor='black'): self.degree", "= max(rx, ry) nw = w / r nh = h / r", "return img, points def __repr__(self): return self.__class__.__name__ + '(p={})'.format(self.p) class RandomVerticalFlip(object): def __init__(self,", "h = img.size tw, th = self.size if not self.keep_ratio: scaleX, scaleY =", "self.size = (mw, mh) def __call__(self, img, points): mw, mh = self.size w,", "Zip from wpcv.utils.data_aug import img_aug class ToPILImage(object): def __init__(self): self.to = img_aug.ToPILImage() def", "limit = maxsize if isinstance(limit, (tuple, list, set,)): mw, mh = limit else:", "ry = h / th r = max(rx, ry) nw = w /", "return img, points class RandomShear(object): def __init__(self, xdegree, ydegree=None, fillcolor='balck'): def get_param(param, defualt=None):", "self.fillcolor = fillcolor def __call__(self, img, points): if self.xdegree: if self.fillcolor is 'random':", "for pnts in points] if self.ydegree: if self.fillcolor is 'random': fillcolor = tuple(np.random.choice(range(256),", "fillcolor = self.fillcolor rang = polygon_ops.get_translate_range(points, img.size) if self.max_offset: def limit_box(box, limits=None): if", "h // 2), img_size=(w, h), expand=self.expand) for pnts in points] return img, points", "rw = w / mw rh = h / mh r = max(rw,", "t, r, b = box l = max(ml, l) t = max(mt, t)", "points): if self.fillcolor is 'random': fillcolor = tuple(np.random.choice(range(256), size=3)) else: fillcolor = self.fillcolor", "wpcv.utils.data_aug import img_aug class ToPILImage(object): def __init__(self): self.to = img_aug.ToPILImage() def __call__(self, img,", "img_aug.ToPILImage() def __call__(self, img, *args): if len(args): return (self.to(img), *args) else: return self.to(img)", "degree): self.degree = degree if not isinstance(degree, numbers.Number) else [-degree, degree] def __call__(self,", "img_aug class ToPILImage(object): def __init__(self): self.to = img_aug.ToPILImage() def __call__(self, img, *args): if", "- self.degree[0]) + self.degree[0] w, h = img.size img = pil_ops.shear_y(img, degree) points", "img = pil_ops.rotate(img, degree, expand=self.expand, fillcolor=fillcolor) points = [polygon_ops.rotate(pnts, degree, (w // 2,", "w, h = img.size tw, th = self.size if not self.keep_ratio: scaleX, scaleY", "Reshape(object): def __init__(self, shape): self.target_shape = shape def __call__(self, x): return np.array(x).reshape(self.target_shape) class", "size, keep_ratio=False, fillcolor='black'): self.size = size self.keep_ratio = keep_ratio self.fillcolor = fillcolor def", "img, points): imw, imh = img.size if random.random() < self.p: img = pil_ops.vflip(img)", "img_size=(w, h), expand=self.expand) for pnts in points] return img, points class RandomShearX(object): def", "[polygon_ops.rotate(pnts, degree, (w // 2, h // 2), img_size=(w, h), expand=self.expand) for pnts", "/ r) points = polygon_ops.translate(points, (dw, dh)) return img, points class RandomHorizontalFlip(object): def", "/ r), int(h / r) img = pil_ops.resize(img, (nw, nh)) points = polygon_ops.scale(points,", "nw = w / r nh = h / r dw = (tw", "class ToPILImage(object): def __init__(self): self.to = img_aug.ToPILImage() def __call__(self, img, *args): if len(args):", "w, h = img.size rw = w / mw rh = h /", "fillcolor=fillcolor) rx = w / tw ry = h / th r =", "= self.fillcolor degree = random.random() * (self.xdegree[1] - self.xdegree[0]) + self.xdegree[0] w, h", "scaleY = self.scaleX, self.scaleY img = pil_ops.scale(img, (scaleX, scaleY)) points = polygon_ops.scale(points, (scaleX,", "__repr__(self): return self.__class__.__name__ + '(p={})'.format(self.p) class RandomTranslate(object): def __init__(self, max_offset=None, fillcolor='black'): if max_offset", "__call__(self, img, points): mw, mh = self.size w, h = img.size rw =", "min(mr, r) b = min(mb, b) if l > r: return None if", "None: return img, points ofx = random.randint(rang[0], rang[2]) ofy = random.randint(rang[1], rang[3]) img", "= self.fillcolor img = pil_ops.resize_keep_ratio(img, self.size, fillcolor=fillcolor) rx = w / tw ry", "def __init__(self, degree): self.degree = degree if not isinstance(degree, numbers.Number) else [-degree, degree]", "__init__(self, xdegree, ydegree=None, fillcolor='balck'): def get_param(param, defualt=None): if param is None: return defualt", "= limit_box(rang, self.max_offset) if rang is None: return img, points ofx = random.randint(rang[0],", "size self.keep_ratio = keep_ratio self.fillcolor = fillcolor def __call__(self, img, points): w, h", "polygon_ops.translate(points, (dw, dh)) return img, points class RandomHorizontalFlip(object): def __init__(self, p=0.5): self.p =", "class Reshape(object): def __init__(self, shape): self.target_shape = shape def __call__(self, x): return np.array(x).reshape(self.target_shape)", "[-degree, degree] def __call__(self, img, points): degree = random.random() * (self.degree[1] - self.degree[0])", "h = img.size img = pil_ops.shear_x(img, degree, fillcolor=fillcolor) points = [polygon_ops.shear_x(pnts, degree, img_size=(w,", "pnts in points] if self.ydegree: if self.fillcolor is 'random': fillcolor = tuple(np.random.choice(range(256), size=3))", "polygon_ops.scale(points, (scaleX, scaleY)) else: if self.fillcolor is 'random': fillcolor = tuple(np.random.choice(range(256), size=3)) else:", "is None: return img, points ofx = random.randint(rang[0], rang[2]) ofy = random.randint(rang[1], rang[3])", "RandomTranslate(object): def __init__(self, max_offset=None, fillcolor='black'): if max_offset is not None and len(max_offset) ==", "r, b] rang = limit_box(rang, self.max_offset) if rang is None: return img, points", "r) b = min(mb, b) if l > r: return None if t", "w / mw rh = h / mh r = max(rw, rh) if", "degree] self.expand = expand self.fillcolor = fillcolor def __call__(self, img, points): if self.fillcolor", "= polygon_ops.get_translate_range(points, img.size) if self.max_offset: def limit_box(box, limits=None): if limits is None: return", "if limits is None: return box if len(limits) == 2: ml, mt =", "param] self.xdegree = get_param(xdegree) self.ydegree = get_param(ydegree) self.fillcolor = fillcolor def __call__(self, img,", "return img, points class RandomShearX(object): def __init__(self, degree): self.degree = degree if not", "img = pil_ops.hflip(img) points = [polygon_ops.hflip(pnts, imw) for pnts in points] return img,", "img, *args): if len(args): return (self.to(img), *args) else: return self.to(img) class BboxesToPoints(object): def", "if t > b: return None return [l, t, r, b] rang =", "= [polygon_ops.translate(pnts, (ofx, ofy)) for pnts in points] return img, points class RandomRotate(object):", "self.degree[0] w, h = img.size img = pil_ops.shear_y(img, degree) points = [polygon_ops.shear_y(pnts, degree,", "ofx = random.randint(rang[0], rang[2]) ofy = random.randint(rang[1], rang[3]) img = pil_ops.translate(img, offset=(ofx, ofy),", "get_param(xdegree) self.ydegree = get_param(ydegree) self.fillcolor = fillcolor def __call__(self, img, points): if self.xdegree:", "maxsize): limit = maxsize if isinstance(limit, (tuple, list, set,)): mw, mh = limit", "class Scale(object): def __init__(self, scales): if isinstance(scales, (tuple, list)): scaleX, scaleY = scales", "= pil_ops.resize(img, (nw, nh)) points = polygon_ops.scale(points, 1 / r) return img, points", "len(limits) == 4 ml, mt, mr, mb = limits l, t, r, b", "pil_ops.vflip(img) points = [polygon_ops.vflip(pnts, imh) for pnts in points] return img, points def", "class RandomRotate(object): def __init__(self, degree, expand=True, fillcolor='black'): self.degree = degree if not isinstance(degree,", "keep_ratio=False, fillcolor='black'): self.size = size self.keep_ratio = keep_ratio self.fillcolor = fillcolor def __call__(self,", "__init__(self, scales): if isinstance(scales, (tuple, list)): scaleX, scaleY = scales else: scaleX =", "imh) for pnts in points] return img, points def __repr__(self): return self.__class__.__name__ +", "Compose, Zip from wpcv.utils.data_aug import img_aug class ToPILImage(object): def __init__(self): self.to = img_aug.ToPILImage()", "= img.size if random.random() < self.p: img = pil_ops.hflip(img) points = [polygon_ops.hflip(pnts, imw)", "img_size=(w, h), expand=True) for pnts in points] return img, points class RandomShearY(object): def", "def __init__(self, max_offset=None, fillcolor='black'): if max_offset is not None and len(max_offset) == 2:", "degree, expand=self.expand, fillcolor=fillcolor) points = [polygon_ops.rotate(pnts, degree, (w // 2, h // 2),", "= fillcolor def __call__(self, img, points): if self.fillcolor is 'random': fillcolor = tuple(np.random.choice(range(256),", "None return [l, t, r, b] rang = limit_box(rang, self.max_offset) if rang is", "img, points class RandomRotate(object): def __init__(self, degree, expand=True, fillcolor='black'): self.degree = degree if", "mw, mh = limit else: mw = mh = limit self.size = (mw,", "l) t = max(mt, t) r = min(mr, r) b = min(mb, b)", "fillcolor def __call__(self, img, points): if self.fillcolor is 'random': fillcolor = tuple(np.random.choice(range(256), size=3))", "scaleY = tw / w, th / h img = pil_ops.resize(img, self.size) points", "/ mh r = max(rw, rh) if r > 1: nw, nh =", "p=0.5): self.p = p def __call__(self, img, points): imw, imh = img.size if", "h img = pil_ops.resize(img, self.size) points = polygon_ops.scale(points, (scaleX, scaleY)) else: if self.fillcolor", "limits else: assert len(limits) == 4 ml, mt, mr, mb = limits l,", "< self.p: img = pil_ops.hflip(img) points = [polygon_ops.hflip(pnts, imw) for pnts in points]", "pil_ops.scale(img, (scaleX, scaleY)) points = polygon_ops.scale(points, (scaleX, scaleY)) return img, points class Resize(object):", "dh = (th - nh) // 2 points = polygon_ops.scale(points, 1 / r)", "= 0, 0 mr, mb = limits else: assert len(limits) == 4 ml,", "class RandomVerticalFlip(object): def __init__(self, p=0.5): self.p = p def __call__(self, img, points): imw,", "scaleY = scales self.scaleX, self.scaleY = scaleX, scaleY def __call__(self, img, points): scaleX,", "len(max_offset) == 2: mx, my = max_offset max_offset = [-mx, -my, mx, my]", "img, points): degree = random.random() * (self.degree[1] - self.degree[0]) + self.degree[0] w, h", "= keep_ratio self.fillcolor = fillcolor def __call__(self, img, points): w, h = img.size", "fillcolor='balck'): def get_param(param, defualt=None): if param is None: return defualt return param if", "points class Resize(object): def __init__(self, size, keep_ratio=False, fillcolor='black'): self.size = size self.keep_ratio =", "rang = polygon_ops.get_translate_range(points, img.size) if self.max_offset: def limit_box(box, limits=None): if limits is None:", "scaleY def __call__(self, img, points): scaleX, scaleY = self.scaleX, self.scaleY img = pil_ops.scale(img,", "== 2: ml, mt = 0, 0 mr, mb = limits else: assert", "max_offset self.fillcolor = fillcolor def __call__(self, img, points): if self.fillcolor is 'random': fillcolor", "= min(mb, b) if l > r: return None if t > b:", "fillcolor='black'): self.degree = degree if not isinstance(degree, numbers.Number) else [-degree, degree] self.expand =", "points class RandomShear(object): def __init__(self, xdegree, ydegree=None, fillcolor='balck'): def get_param(param, defualt=None): if param", "w, h = img.size img = pil_ops.shear_x(img, degree, fillcolor=fillcolor) points = [polygon_ops.shear_x(pnts, degree,", "defualt return param if not isinstance(param, numbers.Number) else [-param, param] self.xdegree = get_param(xdegree)", "random.random() * (self.degree[1] - self.degree[0]) + self.degree[0] w, h = img.size img =", "points] if self.ydegree: if self.fillcolor is 'random': fillcolor = tuple(np.random.choice(range(256), size=3)) else: fillcolor", "self.max_offset: def limit_box(box, limits=None): if limits is None: return box if len(limits) ==", "mb = limits l, t, r, b = box l = max(ml, l)", "= self.fillcolor degree = random.random() * (self.degree[1] - self.degree[0]) + self.degree[0] w, h", "0, 0 mr, mb = limits else: assert len(limits) == 4 ml, mt,", "(scaleX, scaleY)) points = polygon_ops.scale(points, (scaleX, scaleY)) return img, points class Resize(object): def", "b: return None return [l, t, r, b] rang = limit_box(rang, self.max_offset) if", "= expand self.fillcolor = fillcolor def __call__(self, img, points): if self.fillcolor is 'random':", "def __init__(self, p=0.5): self.p = p def __call__(self, img, points): imw, imh =", "Image import wpcv from wpcv.utils.ops import pil_ops, polygon_ops from wpcv.utils.data_aug.base import Compose, Zip", "ydegree=None, fillcolor='balck'): def get_param(param, defualt=None): if param is None: return defualt return param", "nw, nh = int(w / r), int(h / r) img = pil_ops.resize(img, (nw,", "box if len(limits) == 2: ml, mt = 0, 0 mr, mb =", "= max_offset self.fillcolor = fillcolor def __call__(self, img, points): if self.fillcolor is 'random':", "degree, (w // 2, h // 2), img_size=(w, h), expand=self.expand) for pnts in", "nh) // 2 points = polygon_ops.scale(points, 1 / r) points = polygon_ops.translate(points, (dw,", "= fillcolor def __call__(self, img, points): w, h = img.size tw, th =", "if param is None: return defualt return param if not isinstance(param, numbers.Number) else", "int(w / r), int(h / r) img = pil_ops.resize(img, (nw, nh)) points =", "pnts in points] return img, points class RandomShearX(object): def __init__(self, degree): self.degree =", "points class RandomHorizontalFlip(object): def __init__(self, p=0.5): self.p = p def __call__(self, img, points):", "ofy), fillcolor=fillcolor) points = [polygon_ops.translate(pnts, (ofx, ofy)) for pnts in points] return img,", "isinstance(limit, (tuple, list, set,)): mw, mh = limit else: mw = mh =", "2), img_size=(w, h), expand=self.expand) for pnts in points] return img, points class RandomShearX(object):", "self.size if not self.keep_ratio: scaleX, scaleY = tw / w, th / h", "w, h = img.size img = pil_ops.shear_x(img, degree) points = [polygon_ops.shear_x(pnts, degree, img_size=(w,", "- self.xdegree[0]) + self.xdegree[0] w, h = img.size img = pil_ops.shear_x(img, degree, fillcolor=fillcolor)", "r: return None if t > b: return None return [l, t, r,", "degree if not isinstance(degree, numbers.Number) else [-degree, degree] def __call__(self, img, points): degree", "img.size rw = w / mw rh = h / mh r =", "wpcv from wpcv.utils.ops import pil_ops, polygon_ops from wpcv.utils.data_aug.base import Compose, Zip from wpcv.utils.data_aug", "self.fillcolor is 'random': fillcolor = tuple(np.random.choice(range(256), size=3)) else: fillcolor = self.fillcolor img =", "pil_ops.resize_keep_ratio(img, self.size, fillcolor=fillcolor) rx = w / tw ry = h / th", "[-degree, degree] self.expand = expand self.fillcolor = fillcolor def __call__(self, img, points): if", "self.fillcolor rang = polygon_ops.get_translate_range(points, img.size) if self.max_offset: def limit_box(box, limits=None): if limits is", "img.size img = pil_ops.shear_x(img, degree, fillcolor=fillcolor) points = [polygon_ops.shear_x(pnts, degree, img_size=(w, h), expand=True)", "= max(rw, rh) if r > 1: nw, nh = int(w / r),", "if rang is None: return img, points ofx = random.randint(rang[0], rang[2]) ofy =", "scaleY)) points = polygon_ops.scale(points, (scaleX, scaleY)) return img, points class Resize(object): def __init__(self,", "> b: return None return [l, t, r, b] rang = limit_box(rang, self.max_offset)", "def __init__(self, degree, expand=True, fillcolor='black'): self.degree = degree if not isinstance(degree, numbers.Number) else", "fillcolor = tuple(np.random.choice(range(256), size=3)) else: fillcolor = self.fillcolor rang = polygon_ops.get_translate_range(points, img.size) if", "r), int(h / r) img = pil_ops.resize(img, (nw, nh)) points = polygon_ops.scale(points, 1", "else: fillcolor = self.fillcolor degree = random.random() * (self.ydegree[1] - self.ydegree[0]) + self.ydegree[0]", "polygon_ops.scale(points, 1 / r) return img, points class Scale(object): def __init__(self, scales): if", "img, points def __repr__(self): return self.__class__.__name__ + '(p={})'.format(self.p) class RandomTranslate(object): def __init__(self, max_offset=None,", "= limits l, t, r, b = box l = max(ml, l) t", "= max_offset max_offset = [-mx, -my, mx, my] self.max_offset = max_offset self.fillcolor =", "points class RandomShearX(object): def __init__(self, degree): self.degree = degree if not isinstance(degree, numbers.Number)", "self.fillcolor degree = random.random() * (self.xdegree[1] - self.xdegree[0]) + self.xdegree[0] w, h =", "degree, fillcolor=fillcolor) points = [polygon_ops.shear_y(pnts, degree, img_size=(w, h), expand=True) for pnts in points]", "(th - nh) // 2 points = polygon_ops.scale(points, 1 / r) points =", "[l, t, r, b] rang = limit_box(rang, self.max_offset) if rang is None: return", "nh = int(w / r), int(h / r) img = pil_ops.resize(img, (nw, nh))", "= scales else: scaleX = scaleY = scales self.scaleX, self.scaleY = scaleX, scaleY", "def __repr__(self): return self.__class__.__name__ + '(p={})'.format(self.p) class RandomTranslate(object): def __init__(self, max_offset=None, fillcolor='black'): if", "nh)) points = polygon_ops.scale(points, 1 / r) return img, points class Scale(object): def", "h / th r = max(rx, ry) nw = w / r nh", "self.degree[0] w, h = img.size img = pil_ops.rotate(img, degree, expand=self.expand, fillcolor=fillcolor) points =", "__init__(self, maxsize): limit = maxsize if isinstance(limit, (tuple, list, set,)): mw, mh =", "isinstance(degree, numbers.Number) else [-degree, degree] def __call__(self, img, points): degree = random.random() *", "mx, my] self.max_offset = max_offset self.fillcolor = fillcolor def __call__(self, img, points): if", "scales self.scaleX, self.scaleY = scaleX, scaleY def __call__(self, img, points): scaleX, scaleY =", "= w / mw rh = h / mh r = max(rw, rh)", "dh)) return img, points class RandomHorizontalFlip(object): def __init__(self, p=0.5): self.p = p def", "= self.fillcolor rang = polygon_ops.get_translate_range(points, img.size) if self.max_offset: def limit_box(box, limits=None): if limits", "self.size) points = polygon_ops.scale(points, (scaleX, scaleY)) else: if self.fillcolor is 'random': fillcolor =", "- nw) // 2 dh = (th - nh) // 2 points =", "r, b = box l = max(ml, l) t = max(mt, t) r", "img.size img = pil_ops.shear_x(img, degree) points = [polygon_ops.shear_x(pnts, degree, img_size=(w, h), expand=True) for", "def __init__(self, maxsize): limit = maxsize if isinstance(limit, (tuple, list, set,)): mw, mh", "(self.degree[1] - self.degree[0]) + self.degree[0] w, h = img.size img = pil_ops.rotate(img, degree,", "return defualt return param if not isinstance(param, numbers.Number) else [-param, param] self.xdegree =", "len(args): return (self.to(img), *args) else: return self.to(img) class BboxesToPoints(object): def __call__(self, img, bboxes):", "// 2 points = polygon_ops.scale(points, 1 / r) points = polygon_ops.translate(points, (dw, dh))", "def __call__(self, img, bboxes): points = np.array(bboxes).reshape((-1, 2, 2)) return img, points class", "else: if self.fillcolor is 'random': fillcolor = tuple(np.random.choice(range(256), size=3)) else: fillcolor = self.fillcolor", "__call__(self, img, points): scaleX, scaleY = self.scaleX, self.scaleY img = pil_ops.scale(img, (scaleX, scaleY))", "self.degree = degree if not isinstance(degree, numbers.Number) else [-degree, degree] self.expand = expand", "points] return img, points class RandomShear(object): def __init__(self, xdegree, ydegree=None, fillcolor='balck'): def get_param(param,", "self.size = size self.keep_ratio = keep_ratio self.fillcolor = fillcolor def __call__(self, img, points):", "if not isinstance(degree, numbers.Number) else [-degree, degree] self.expand = expand self.fillcolor = fillcolor", "max(rx, ry) nw = w / r nh = h / r dw", "imw, imh = img.size if random.random() < self.p: img = pil_ops.vflip(img) points =", "+ '(p={})'.format(self.p) class RandomVerticalFlip(object): def __init__(self, p=0.5): self.p = p def __call__(self, img,", "degree, img_size=(w, h), expand=True) for pnts in points] if self.ydegree: if self.fillcolor is", "self.keep_ratio = keep_ratio self.fillcolor = fillcolor def __call__(self, img, points): w, h =", "wpcv.utils.data_aug.base import Compose, Zip from wpcv.utils.data_aug import img_aug class ToPILImage(object): def __init__(self): self.to", "img.size if random.random() < self.p: img = pil_ops.hflip(img) points = [polygon_ops.hflip(pnts, imw) for", "= (th - nh) // 2 points = polygon_ops.scale(points, 1 / r) points", "[polygon_ops.shear_x(pnts, degree, img_size=(w, h), expand=True) for pnts in points] return img, points class", "// 2, h // 2), img_size=(w, h), expand=self.expand) for pnts in points] return", "/ r dw = (tw - nw) // 2 dh = (th -", "/ r nh = h / r dw = (tw - nw) //", "points] return img, points class RandomRotate(object): def __init__(self, degree, expand=True, fillcolor='black'): self.degree =", "__call__(self, img, points): if self.xdegree: if self.fillcolor is 'random': fillcolor = tuple(np.random.choice(range(256), size=3))", "= box l = max(ml, l) t = max(mt, t) r = min(mr,", "> 1: nw, nh = int(w / r), int(h / r) img =", "l = max(ml, l) t = max(mt, t) r = min(mr, r) b", "t) r = min(mr, r) b = min(mb, b) if l > r:", "img, points def __repr__(self): return self.__class__.__name__ + '(p={})'.format(self.p) class RandomVerticalFlip(object): def __init__(self, p=0.5):", "[polygon_ops.translate(pnts, (ofx, ofy)) for pnts in points] return img, points class RandomRotate(object): def", "__call__(self, img, points): imw, imh = img.size if random.random() < self.p: img =", "self.ydegree[0]) + self.ydegree[0] w, h = img.size img = pil_ops.shear_y(img, degree, fillcolor=fillcolor) points", "img = pil_ops.resize_keep_ratio(img, self.size, fillcolor=fillcolor) rx = w / tw ry = h", "= p def __call__(self, img, points): imw, imh = img.size if random.random() <", "self.fillcolor = fillcolor def __call__(self, img, points): if self.fillcolor is 'random': fillcolor =", "'(p={})'.format(self.p) class RandomVerticalFlip(object): def __init__(self, p=0.5): self.p = p def __call__(self, img, points):", "max(rw, rh) if r > 1: nw, nh = int(w / r), int(h", "in points] return img, points def __repr__(self): return self.__class__.__name__ + '(p={})'.format(self.p) class RandomTranslate(object):", "2: ml, mt = 0, 0 mr, mb = limits else: assert len(limits)", "/ r) img = pil_ops.resize(img, (nw, nh)) points = polygon_ops.scale(points, 1 / r)", "self.fillcolor is 'random': fillcolor = tuple(np.random.choice(range(256), size=3)) else: fillcolor = self.fillcolor rang =", "mr, mb = limits l, t, r, b = box l = max(ml,", "= self.scaleX, self.scaleY img = pil_ops.scale(img, (scaleX, scaleY)) points = polygon_ops.scale(points, (scaleX, scaleY))", "/ mw rh = h / mh r = max(rw, rh) if r", "expand=self.expand) for pnts in points] return img, points class RandomShearX(object): def __init__(self, degree):", "defualt=None): if param is None: return defualt return param if not isinstance(param, numbers.Number)", "r > 1: nw, nh = int(w / r), int(h / r) img", "points): w, h = img.size tw, th = self.size if not self.keep_ratio: scaleX,", "scaleY)) else: if self.fillcolor is 'random': fillcolor = tuple(np.random.choice(range(256), size=3)) else: fillcolor =", "BboxesToPoints(object): def __call__(self, img, bboxes): points = np.array(bboxes).reshape((-1, 2, 2)) return img, points", "w / r nh = h / r dw = (tw - nw)", "in points] return img, points def __repr__(self): return self.__class__.__name__ + '(p={})'.format(self.p) class RandomVerticalFlip(object):", "img = pil_ops.vflip(img) points = [polygon_ops.vflip(pnts, imh) for pnts in points] return img,", "2)) return img, points class PointsToBboxes(object): def __call__(self, img, points): bboxes = np.array(points).reshape((-1,", "random.random() * (self.xdegree[1] - self.xdegree[0]) + self.xdegree[0] w, h = img.size img =", "for pnts in points] return img, points class RandomShearX(object): def __init__(self, degree): self.degree", "numbers.Number) else [-degree, degree] self.expand = expand self.fillcolor = fillcolor def __call__(self, img,", "return img, points class Scale(object): def __init__(self, scales): if isinstance(scales, (tuple, list)): scaleX,", "h), expand=True) for pnts in points] return img, points class RandomShearY(object): def __init__(self,", "= self.size w, h = img.size rw = w / mw rh =", "= limit else: mw = mh = limit self.size = (mw, mh) def", "(dw, dh)) return img, points class RandomHorizontalFlip(object): def __init__(self, p=0.5): self.p = p", "random.random() < self.p: img = pil_ops.hflip(img) points = [polygon_ops.hflip(pnts, imw) for pnts in", "if self.fillcolor is 'random': fillcolor = tuple(np.random.choice(range(256), size=3)) else: fillcolor = self.fillcolor rang", "numpy as np import random import numbers import cv2 from PIL import Image", "mh = self.size w, h = img.size rw = w / mw rh", "keep_ratio self.fillcolor = fillcolor def __call__(self, img, points): w, h = img.size tw,", "in points] return img, points class RandomShearY(object): def __init__(self, degree): self.degree = degree", "tuple(np.random.choice(range(256), size=3)) else: fillcolor = self.fillcolor degree = random.random() * (self.xdegree[1] - self.xdegree[0])", "not None and len(max_offset) == 2: mx, my = max_offset max_offset = [-mx,", "self.size w, h = img.size rw = w / mw rh = h", "self.fillcolor degree = random.random() * (self.degree[1] - self.degree[0]) + self.degree[0] w, h =", "size=3)) else: fillcolor = self.fillcolor degree = random.random() * (self.ydegree[1] - self.ydegree[0]) +", "isinstance(param, numbers.Number) else [-param, param] self.xdegree = get_param(xdegree) self.ydegree = get_param(ydegree) self.fillcolor =", "t = max(mt, t) r = min(mr, r) b = min(mb, b) if", "random.randint(rang[0], rang[2]) ofy = random.randint(rang[1], rang[3]) img = pil_ops.translate(img, offset=(ofx, ofy), fillcolor=fillcolor) points", "= w / r nh = h / r dw = (tw -", "/ th r = max(rx, ry) nw = w / r nh =", "img = pil_ops.shear_y(img, degree) points = [polygon_ops.shear_y(pnts, degree, img_size=(w, h), expand=True) for pnts", "img, points): w, h = img.size tw, th = self.size if not self.keep_ratio:", "(tuple, list, set,)): mw, mh = limit else: mw = mh = limit", "(self.to(img), *args) else: return self.to(img) class BboxesToPoints(object): def __call__(self, img, bboxes): points =", "not self.keep_ratio: scaleX, scaleY = tw / w, th / h img =", "mw rh = h / mh r = max(rw, rh) if r >", "self.degree[0]) + self.degree[0] w, h = img.size img = pil_ops.shear_y(img, degree) points =", "scaleY)) return img, points class Resize(object): def __init__(self, size, keep_ratio=False, fillcolor='black'): self.size =", "w / tw ry = h / th r = max(rx, ry) nw", "in points] return img, points class RandomShear(object): def __init__(self, xdegree, ydegree=None, fillcolor='balck'): def", "th / h img = pil_ops.resize(img, self.size) points = polygon_ops.scale(points, (scaleX, scaleY)) else:", "limits l, t, r, b = box l = max(ml, l) t =", "= pil_ops.rotate(img, degree, expand=self.expand, fillcolor=fillcolor) points = [polygon_ops.rotate(pnts, degree, (w // 2, h", "else: fillcolor = self.fillcolor rang = polygon_ops.get_translate_range(points, img.size) if self.max_offset: def limit_box(box, limits=None):", "RandomShearX(object): def __init__(self, degree): self.degree = degree if not isinstance(degree, numbers.Number) else [-degree,", "= pil_ops.shear_x(img, degree) points = [polygon_ops.shear_x(pnts, degree, img_size=(w, h), expand=True) for pnts in", "self.ydegree[0] w, h = img.size img = pil_ops.shear_y(img, degree, fillcolor=fillcolor) points = [polygon_ops.shear_y(pnts,", "img.size img = pil_ops.shear_y(img, degree, fillcolor=fillcolor) points = [polygon_ops.shear_y(pnts, degree, img_size=(w, h), expand=True)", "ry) nw = w / r nh = h / r dw =", "self.p: img = pil_ops.vflip(img) points = [polygon_ops.vflip(pnts, imh) for pnts in points] return", "== 2: mx, my = max_offset max_offset = [-mx, -my, mx, my] self.max_offset", "expand=True, fillcolor='black'): self.degree = degree if not isinstance(degree, numbers.Number) else [-degree, degree] self.expand", "assert len(limits) == 4 ml, mt, mr, mb = limits l, t, r,", "random.randint(rang[1], rang[3]) img = pil_ops.translate(img, offset=(ofx, ofy), fillcolor=fillcolor) points = [polygon_ops.translate(pnts, (ofx, ofy))", "= random.random() * (self.xdegree[1] - self.xdegree[0]) + self.xdegree[0] w, h = img.size img", "// 2), img_size=(w, h), expand=self.expand) for pnts in points] return img, points class", "h / r dw = (tw - nw) // 2 dh = (th", "self.size, fillcolor=fillcolor) rx = w / tw ry = h / th r", "img.size) if self.max_offset: def limit_box(box, limits=None): if limits is None: return box if", "h), expand=self.expand) for pnts in points] return img, points class RandomShearX(object): def __init__(self,", "scaleX, scaleY = tw / w, th / h img = pil_ops.resize(img, self.size)", "if max_offset is not None and len(max_offset) == 2: mx, my = max_offset", "fillcolor = tuple(np.random.choice(range(256), size=3)) else: fillcolor = self.fillcolor degree = random.random() * (self.xdegree[1]", "[polygon_ops.shear_y(pnts, degree, img_size=(w, h), expand=True) for pnts in points] return img, points #", "self.xdegree: if self.fillcolor is 'random': fillcolor = tuple(np.random.choice(range(256), size=3)) else: fillcolor = self.fillcolor", "h), expand=True) for pnts in points] if self.ydegree: if self.fillcolor is 'random': fillcolor", "points = [polygon_ops.shear_y(pnts, degree, img_size=(w, h), expand=True) for pnts in points] return img,", "rx = w / tw ry = h / th r = max(rx,", "self.degree[0] w, h = img.size img = pil_ops.shear_x(img, degree) points = [polygon_ops.shear_x(pnts, degree,", "mt = 0, 0 mr, mb = limits else: assert len(limits) == 4", "max(mt, t) r = min(mr, r) b = min(mb, b) if l >", "if r > 1: nw, nh = int(w / r), int(h / r)", "2 dh = (th - nh) // 2 points = polygon_ops.scale(points, 1 /", "'(p={})'.format(self.p) class RandomTranslate(object): def __init__(self, max_offset=None, fillcolor='black'): if max_offset is not None and", "+ self.ydegree[0] w, h = img.size img = pil_ops.shear_y(img, degree, fillcolor=fillcolor) points =", "np import random import numbers import cv2 from PIL import Image import wpcv", "= img.size rw = w / mw rh = h / mh r", "ml, mt = 0, 0 mr, mb = limits else: assert len(limits) ==", "th r = max(rx, ry) nw = w / r nh = h", "np.array(points).reshape((-1, 4)) return img, bboxes class Reshape(object): def __init__(self, shape): self.target_shape = shape", "= pil_ops.hflip(img) points = [polygon_ops.hflip(pnts, imw) for pnts in points] return img, points", "= polygon_ops.translate(points, (dw, dh)) return img, points class RandomHorizontalFlip(object): def __init__(self, p=0.5): self.p", "img = pil_ops.translate(img, offset=(ofx, ofy), fillcolor=fillcolor) points = [polygon_ops.translate(pnts, (ofx, ofy)) for pnts", "fillcolor='black'): self.size = size self.keep_ratio = keep_ratio self.fillcolor = fillcolor def __call__(self, img,", "from PIL import Image import wpcv from wpcv.utils.ops import pil_ops, polygon_ops from wpcv.utils.data_aug.base", "if self.fillcolor is 'random': fillcolor = tuple(np.random.choice(range(256), size=3)) else: fillcolor = self.fillcolor img", "fillcolor = self.fillcolor degree = random.random() * (self.ydegree[1] - self.ydegree[0]) + self.ydegree[0] w,", "limit self.size = (mw, mh) def __call__(self, img, points): mw, mh = self.size", "img, points class RandomShearY(object): def __init__(self, degree): self.degree = degree if not isinstance(degree,", "int(h / r) img = pil_ops.resize(img, (nw, nh)) points = polygon_ops.scale(points, 1 /", "imh = img.size if random.random() < self.p: img = pil_ops.hflip(img) points = [polygon_ops.hflip(pnts,", "img.size tw, th = self.size if not self.keep_ratio: scaleX, scaleY = tw /", "pil_ops.shear_x(img, degree) points = [polygon_ops.shear_x(pnts, degree, img_size=(w, h), expand=True) for pnts in points]", "def __call__(self, img, points): degree = random.random() * (self.degree[1] - self.degree[0]) + self.degree[0]", "img.size img = pil_ops.rotate(img, degree, expand=self.expand, fillcolor=fillcolor) points = [polygon_ops.rotate(pnts, degree, (w //", "h = img.size img = pil_ops.shear_y(img, degree) points = [polygon_ops.shear_y(pnts, degree, img_size=(w, h),", "pil_ops.shear_y(img, degree) points = [polygon_ops.shear_y(pnts, degree, img_size=(w, h), expand=True) for pnts in points]", "th = self.size if not self.keep_ratio: scaleX, scaleY = tw / w, th", "__init__(self, shape): self.target_shape = shape def __call__(self, x): return np.array(x).reshape(self.target_shape) class Limitsize(object): def", "if len(args): return (self.to(img), *args) else: return self.to(img) class BboxesToPoints(object): def __call__(self, img,", "(tuple, list)): scaleX, scaleY = scales else: scaleX = scaleY = scales self.scaleX,", "def __call__(self, img, points): w, h = img.size tw, th = self.size if", "return self.__class__.__name__ + '(p={})'.format(self.p) class RandomVerticalFlip(object): def __init__(self, p=0.5): self.p = p def", "= degree if not isinstance(degree, numbers.Number) else [-degree, degree] def __call__(self, img, points):", "self.degree[0]) + self.degree[0] w, h = img.size img = pil_ops.shear_x(img, degree) points =", "else [-degree, degree] def __call__(self, img, points): degree = random.random() * (self.degree[1] -", "imh = img.size if random.random() < self.p: img = pil_ops.vflip(img) points = [polygon_ops.vflip(pnts,", "= mh = limit self.size = (mw, mh) def __call__(self, img, points): mw,", "else: fillcolor = self.fillcolor degree = random.random() * (self.degree[1] - self.degree[0]) + self.degree[0]", "self.scaleY img = pil_ops.scale(img, (scaleX, scaleY)) points = polygon_ops.scale(points, (scaleX, scaleY)) return img,", "size=3)) else: fillcolor = self.fillcolor degree = random.random() * (self.xdegree[1] - self.xdegree[0]) +", "pnts in points] return img, points class RandomRotate(object): def __init__(self, degree, expand=True, fillcolor='black'):", "points] return img, points def __repr__(self): return self.__class__.__name__ + '(p={})'.format(self.p) class RandomVerticalFlip(object): def", "= max(mt, t) r = min(mr, r) b = min(mb, b) if l", "__init__(self): self.to = img_aug.ToPILImage() def __call__(self, img, *args): if len(args): return (self.to(img), *args)", "= [polygon_ops.hflip(pnts, imw) for pnts in points] return img, points def __repr__(self): return", "__init__(self, degree, expand=True, fillcolor='black'): self.degree = degree if not isinstance(degree, numbers.Number) else [-degree,", "= img_aug.ToPILImage() def __call__(self, img, *args): if len(args): return (self.to(img), *args) else: return", "r) img = pil_ops.resize(img, (nw, nh)) points = polygon_ops.scale(points, 1 / r) return", "img, points class PointsToBboxes(object): def __call__(self, img, points): bboxes = np.array(points).reshape((-1, 4)) return", "my] self.max_offset = max_offset self.fillcolor = fillcolor def __call__(self, img, points): if self.fillcolor", "fillcolor=fillcolor) points = [polygon_ops.translate(pnts, (ofx, ofy)) for pnts in points] return img, points", "import numbers import cv2 from PIL import Image import wpcv from wpcv.utils.ops import", "None and len(max_offset) == 2: mx, my = max_offset max_offset = [-mx, -my,", "w, h = img.size img = pil_ops.shear_y(img, degree, fillcolor=fillcolor) points = [polygon_ops.shear_y(pnts, degree,", "offset=(ofx, ofy), fillcolor=fillcolor) points = [polygon_ops.translate(pnts, (ofx, ofy)) for pnts in points] return", "img_size=(w, h), expand=True) for pnts in points] return img, points class RandomShear(object): def", "return img, bboxes class Reshape(object): def __init__(self, shape): self.target_shape = shape def __call__(self,", "-my, mx, my] self.max_offset = max_offset self.fillcolor = fillcolor def __call__(self, img, points):", "size=3)) else: fillcolor = self.fillcolor img = pil_ops.resize_keep_ratio(img, self.size, fillcolor=fillcolor) rx = w", "img, points): imw, imh = img.size if random.random() < self.p: img = pil_ops.hflip(img)", "= tuple(np.random.choice(range(256), size=3)) else: fillcolor = self.fillcolor degree = random.random() * (self.ydegree[1] -", "scaleX, scaleY def __call__(self, img, points): scaleX, scaleY = self.scaleX, self.scaleY img =", "points ofx = random.randint(rang[0], rang[2]) ofy = random.randint(rang[1], rang[3]) img = pil_ops.translate(img, offset=(ofx,", "x): return np.array(x).reshape(self.target_shape) class Limitsize(object): def __init__(self, maxsize): limit = maxsize if isinstance(limit,", "mx, my = max_offset max_offset = [-mx, -my, mx, my] self.max_offset = max_offset", "return img, points ofx = random.randint(rang[0], rang[2]) ofy = random.randint(rang[1], rang[3]) img =", "degree) points = [polygon_ops.shear_y(pnts, degree, img_size=(w, h), expand=True) for pnts in points] return", "l > r: return None if t > b: return None return [l,", "def __init__(self, xdegree, ydegree=None, fillcolor='balck'): def get_param(param, defualt=None): if param is None: return", "tuple(np.random.choice(range(256), size=3)) else: fillcolor = self.fillcolor degree = random.random() * (self.ydegree[1] - self.ydegree[0])", "expand=self.expand, fillcolor=fillcolor) points = [polygon_ops.rotate(pnts, degree, (w // 2, h // 2), img_size=(w,", "if not self.keep_ratio: scaleX, scaleY = tw / w, th / h img", "def __repr__(self): return self.__class__.__name__ + '(p={})'.format(self.p) class RandomVerticalFlip(object): def __init__(self, p=0.5): self.p =", "max_offset is not None and len(max_offset) == 2: mx, my = max_offset max_offset", "= [polygon_ops.rotate(pnts, degree, (w // 2, h // 2), img_size=(w, h), expand=self.expand) for", "get_param(ydegree) self.fillcolor = fillcolor def __call__(self, img, points): if self.xdegree: if self.fillcolor is", "degree] def __call__(self, img, points): degree = random.random() * (self.degree[1] - self.degree[0]) +", "r dw = (tw - nw) // 2 dh = (th - nh)", "def __init__(self, scales): if isinstance(scales, (tuple, list)): scaleX, scaleY = scales else: scaleX", "return self.__class__.__name__ + '(p={})'.format(self.p) class RandomTranslate(object): def __init__(self, max_offset=None, fillcolor='black'): if max_offset is", "pnts in points] return img, points def __repr__(self): return self.__class__.__name__ + '(p={})'.format(self.p) class", "shape): self.target_shape = shape def __call__(self, x): return np.array(x).reshape(self.target_shape) class Limitsize(object): def __init__(self,", "4 ml, mt, mr, mb = limits l, t, r, b = box", "wpcv.utils.ops import pil_ops, polygon_ops from wpcv.utils.data_aug.base import Compose, Zip from wpcv.utils.data_aug import img_aug", "img = pil_ops.resize(img, self.size) points = polygon_ops.scale(points, (scaleX, scaleY)) else: if self.fillcolor is", "b] rang = limit_box(rang, self.max_offset) if rang is None: return img, points ofx", "points def __repr__(self): return self.__class__.__name__ + '(p={})'.format(self.p) class RandomVerticalFlip(object): def __init__(self, p=0.5): self.p", "b) if l > r: return None if t > b: return None", "= polygon_ops.scale(points, (scaleX, scaleY)) else: if self.fillcolor is 'random': fillcolor = tuple(np.random.choice(range(256), size=3))", "rang = limit_box(rang, self.max_offset) if rang is None: return img, points ofx =", "= pil_ops.shear_y(img, degree) points = [polygon_ops.shear_y(pnts, degree, img_size=(w, h), expand=True) for pnts in", "pil_ops, polygon_ops from wpcv.utils.data_aug.base import Compose, Zip from wpcv.utils.data_aug import img_aug class ToPILImage(object):", "import random import numbers import cv2 from PIL import Image import wpcv from", "imw, imh = img.size if random.random() < self.p: img = pil_ops.hflip(img) points =", "__repr__(self): return self.__class__.__name__ + '(p={})'.format(self.p) class RandomVerticalFlip(object): def __init__(self, p=0.5): self.p = p", "degree, img_size=(w, h), expand=True) for pnts in points] return img, points # class", "r) points = polygon_ops.translate(points, (dw, dh)) return img, points class RandomHorizontalFlip(object): def __init__(self,", "list)): scaleX, scaleY = scales else: scaleX = scaleY = scales self.scaleX, self.scaleY", "= img.size if random.random() < self.p: img = pil_ops.vflip(img) points = [polygon_ops.vflip(pnts, imh)", "__init__(self, p=0.5): self.p = p def __call__(self, img, points): imw, imh = img.size", "* (self.degree[1] - self.degree[0]) + self.degree[0] w, h = img.size img = pil_ops.shear_x(img,", "= fillcolor def __call__(self, img, points): if self.xdegree: if self.fillcolor is 'random': fillcolor", "import numpy as np import random import numbers import cv2 from PIL import", "rh) if r > 1: nw, nh = int(w / r), int(h /", "scales): if isinstance(scales, (tuple, list)): scaleX, scaleY = scales else: scaleX = scaleY", "dw = (tw - nw) // 2 dh = (th - nh) //", "img.size if random.random() < self.p: img = pil_ops.vflip(img) points = [polygon_ops.vflip(pnts, imh) for", "r) return img, points class Scale(object): def __init__(self, scales): if isinstance(scales, (tuple, list)):", "* (self.degree[1] - self.degree[0]) + self.degree[0] w, h = img.size img = pil_ops.shear_y(img,", "box l = max(ml, l) t = max(mt, t) r = min(mr, r)", "not isinstance(degree, numbers.Number) else [-degree, degree] def __call__(self, img, points): degree = random.random()", "(self.ydegree[1] - self.ydegree[0]) + self.ydegree[0] w, h = img.size img = pil_ops.shear_y(img, degree,", "limits is None: return box if len(limits) == 2: ml, mt = 0,", "RandomShear(object): def __init__(self, xdegree, ydegree=None, fillcolor='balck'): def get_param(param, defualt=None): if param is None:", "* (self.xdegree[1] - self.xdegree[0]) + self.xdegree[0] w, h = img.size img = pil_ops.shear_x(img,", "pil_ops.hflip(img) points = [polygon_ops.hflip(pnts, imw) for pnts in points] return img, points def", "import Compose, Zip from wpcv.utils.data_aug import img_aug class ToPILImage(object): def __init__(self): self.to =", "class RandomShear(object): def __init__(self, xdegree, ydegree=None, fillcolor='balck'): def get_param(param, defualt=None): if param is", "else: fillcolor = self.fillcolor degree = random.random() * (self.xdegree[1] - self.xdegree[0]) + self.xdegree[0]", "__call__(self, x): return np.array(x).reshape(self.target_shape) class Limitsize(object): def __init__(self, maxsize): limit = maxsize if", "limit_box(box, limits=None): if limits is None: return box if len(limits) == 2: ml,", "> r: return None if t > b: return None return [l, t,", "as np import random import numbers import cv2 from PIL import Image import", "fillcolor = self.fillcolor degree = random.random() * (self.xdegree[1] - self.xdegree[0]) + self.xdegree[0] w,", "return img, points class Resize(object): def __init__(self, size, keep_ratio=False, fillcolor='black'): self.size = size", "if self.max_offset: def limit_box(box, limits=None): if limits is None: return box if len(limits)", "numbers.Number) else [-degree, degree] def __call__(self, img, points): degree = random.random() * (self.degree[1]", "t, r, b] rang = limit_box(rang, self.max_offset) if rang is None: return img,", "random.random() < self.p: img = pil_ops.vflip(img) points = [polygon_ops.vflip(pnts, imh) for pnts in", "def __call__(self, img, points): bboxes = np.array(points).reshape((-1, 4)) return img, bboxes class Reshape(object):", "xdegree, ydegree=None, fillcolor='balck'): def get_param(param, defualt=None): if param is None: return defualt return", "= limits else: assert len(limits) == 4 ml, mt, mr, mb = limits", "img, points class RandomShearX(object): def __init__(self, degree): self.degree = degree if not isinstance(degree,", "img = pil_ops.shear_x(img, degree, fillcolor=fillcolor) points = [polygon_ops.shear_x(pnts, degree, img_size=(w, h), expand=True) for", "'random': fillcolor = tuple(np.random.choice(range(256), size=3)) else: fillcolor = self.fillcolor rang = polygon_ops.get_translate_range(points, img.size)", "limit_box(rang, self.max_offset) if rang is None: return img, points ofx = random.randint(rang[0], rang[2])", "points = [polygon_ops.translate(pnts, (ofx, ofy)) for pnts in points] return img, points class", "is not None and len(max_offset) == 2: mx, my = max_offset max_offset =", "(scaleX, scaleY)) else: if self.fillcolor is 'random': fillcolor = tuple(np.random.choice(range(256), size=3)) else: fillcolor", "rang[3]) img = pil_ops.translate(img, offset=(ofx, ofy), fillcolor=fillcolor) points = [polygon_ops.translate(pnts, (ofx, ofy)) for", "def __call__(self, img, points): mw, mh = self.size w, h = img.size rw", "self.ydegree = get_param(ydegree) self.fillcolor = fillcolor def __call__(self, img, points): if self.xdegree: if", "param is None: return defualt return param if not isinstance(param, numbers.Number) else [-param,", "= scaleY = scales self.scaleX, self.scaleY = scaleX, scaleY def __call__(self, img, points):", "degree, fillcolor=fillcolor) points = [polygon_ops.shear_x(pnts, degree, img_size=(w, h), expand=True) for pnts in points]", "= pil_ops.scale(img, (scaleX, scaleY)) points = polygon_ops.scale(points, (scaleX, scaleY)) return img, points class", "RandomShearY(object): def __init__(self, degree): self.degree = degree if not isinstance(degree, numbers.Number) else [-degree,", "limit else: mw = mh = limit self.size = (mw, mh) def __call__(self,", "r nh = h / r dw = (tw - nw) // 2", "self.scaleX, self.scaleY = scaleX, scaleY def __call__(self, img, points): scaleX, scaleY = self.scaleX,", "self.max_offset = max_offset self.fillcolor = fillcolor def __call__(self, img, points): if self.fillcolor is", "np.array(x).reshape(self.target_shape) class Limitsize(object): def __init__(self, maxsize): limit = maxsize if isinstance(limit, (tuple, list,", "numbers import cv2 from PIL import Image import wpcv from wpcv.utils.ops import pil_ops,", "in points] return img, points class RandomShearX(object): def __init__(self, degree): self.degree = degree", "PIL import Image import wpcv from wpcv.utils.ops import pil_ops, polygon_ops from wpcv.utils.data_aug.base import", "my = max_offset max_offset = [-mx, -my, mx, my] self.max_offset = max_offset self.fillcolor", "= pil_ops.shear_x(img, degree, fillcolor=fillcolor) points = [polygon_ops.shear_x(pnts, degree, img_size=(w, h), expand=True) for pnts", "self.ydegree: if self.fillcolor is 'random': fillcolor = tuple(np.random.choice(range(256), size=3)) else: fillcolor = self.fillcolor", "points class Scale(object): def __init__(self, scales): if isinstance(scales, (tuple, list)): scaleX, scaleY =", "w, h = img.size img = pil_ops.shear_y(img, degree) points = [polygon_ops.shear_y(pnts, degree, img_size=(w,", "points): scaleX, scaleY = self.scaleX, self.scaleY img = pil_ops.scale(img, (scaleX, scaleY)) points =", "self.expand = expand self.fillcolor = fillcolor def __call__(self, img, points): if self.fillcolor is", "img_size=(w, h), expand=True) for pnts in points] if self.ydegree: if self.fillcolor is 'random':", "2, h // 2), img_size=(w, h), expand=self.expand) for pnts in points] return img,", "img_size=(w, h), expand=True) for pnts in points] return img, points # class RandomPerspective:", "w, th / h img = pil_ops.resize(img, self.size) points = polygon_ops.scale(points, (scaleX, scaleY))", "r = max(rx, ry) nw = w / r nh = h /", "degree) points = [polygon_ops.shear_x(pnts, degree, img_size=(w, h), expand=True) for pnts in points] return", "/ r) return img, points class Scale(object): def __init__(self, scales): if isinstance(scales, (tuple,", "self.fillcolor = fillcolor def __call__(self, img, points): w, h = img.size tw, th", "rang is None: return img, points ofx = random.randint(rang[0], rang[2]) ofy = random.randint(rang[1],", "for pnts in points] return img, points class RandomShearY(object): def __init__(self, degree): self.degree", "img = pil_ops.shear_y(img, degree, fillcolor=fillcolor) points = [polygon_ops.shear_y(pnts, degree, img_size=(w, h), expand=True) for", "random import numbers import cv2 from PIL import Image import wpcv from wpcv.utils.ops", "points = polygon_ops.scale(points, 1 / r) return img, points class Scale(object): def __init__(self,", "polygon_ops.scale(points, (scaleX, scaleY)) return img, points class Resize(object): def __init__(self, size, keep_ratio=False, fillcolor='black'):", "img, bboxes): points = np.array(bboxes).reshape((-1, 2, 2)) return img, points class PointsToBboxes(object): def", "from wpcv.utils.ops import pil_ops, polygon_ops from wpcv.utils.data_aug.base import Compose, Zip from wpcv.utils.data_aug import", "else: mw = mh = limit self.size = (mw, mh) def __call__(self, img,", "points = polygon_ops.scale(points, (scaleX, scaleY)) else: if self.fillcolor is 'random': fillcolor = tuple(np.random.choice(range(256),", "max_offset max_offset = [-mx, -my, mx, my] self.max_offset = max_offset self.fillcolor = fillcolor", "if self.fillcolor is 'random': fillcolor = tuple(np.random.choice(range(256), size=3)) else: fillcolor = self.fillcolor degree", "= random.random() * (self.ydegree[1] - self.ydegree[0]) + self.ydegree[0] w, h = img.size img", "*args) else: return self.to(img) class BboxesToPoints(object): def __call__(self, img, bboxes): points = np.array(bboxes).reshape((-1,", "points = np.array(bboxes).reshape((-1, 2, 2)) return img, points class PointsToBboxes(object): def __call__(self, img,", "__init__(self, max_offset=None, fillcolor='black'): if max_offset is not None and len(max_offset) == 2: mx,", "def __call__(self, x): return np.array(x).reshape(self.target_shape) class Limitsize(object): def __init__(self, maxsize): limit = maxsize", "= get_param(ydegree) self.fillcolor = fillcolor def __call__(self, img, points): if self.xdegree: if self.fillcolor", "points = [polygon_ops.hflip(pnts, imw) for pnts in points] return img, points def __repr__(self):", "tw, th = self.size if not self.keep_ratio: scaleX, scaleY = tw / w,", "2 points = polygon_ops.scale(points, 1 / r) points = polygon_ops.translate(points, (dw, dh)) return", "ofy)) for pnts in points] return img, points class RandomRotate(object): def __init__(self, degree,", "img, points class Scale(object): def __init__(self, scales): if isinstance(scales, (tuple, list)): scaleX, scaleY", "self.to(img) class BboxesToPoints(object): def __call__(self, img, bboxes): points = np.array(bboxes).reshape((-1, 2, 2)) return", "(nw, nh)) points = polygon_ops.scale(points, 1 / r) return img, points class Scale(object):", "mt, mr, mb = limits l, t, r, b = box l =", "self.fillcolor is 'random': fillcolor = tuple(np.random.choice(range(256), size=3)) else: fillcolor = self.fillcolor degree =", "*args): if len(args): return (self.to(img), *args) else: return self.to(img) class BboxesToPoints(object): def __call__(self,", "r = min(mr, r) b = min(mb, b) if l > r: return", "points = polygon_ops.scale(points, 1 / r) points = polygon_ops.translate(points, (dw, dh)) return img,", "+ self.degree[0] w, h = img.size img = pil_ops.shear_y(img, degree) points = [polygon_ops.shear_y(pnts,", "__init__(self, degree): self.degree = degree if not isinstance(degree, numbers.Number) else [-degree, degree] def", "points): bboxes = np.array(points).reshape((-1, 4)) return img, bboxes class Reshape(object): def __init__(self, shape):", "max(ml, l) t = max(mt, t) r = min(mr, r) b = min(mb,", "points] return img, points class RandomShearX(object): def __init__(self, degree): self.degree = degree if", "__call__(self, img, points): degree = random.random() * (self.degree[1] - self.degree[0]) + self.degree[0] w,", "img, points): bboxes = np.array(points).reshape((-1, 4)) return img, bboxes class Reshape(object): def __init__(self,", "= [polygon_ops.shear_x(pnts, degree, img_size=(w, h), expand=True) for pnts in points] return img, points", "points): degree = random.random() * (self.degree[1] - self.degree[0]) + self.degree[0] w, h =", "[-mx, -my, mx, my] self.max_offset = max_offset self.fillcolor = fillcolor def __call__(self, img,", "scaleX, scaleY = scales else: scaleX = scaleY = scales self.scaleX, self.scaleY =", "[polygon_ops.shear_y(pnts, degree, img_size=(w, h), expand=True) for pnts in points] return img, points class", "if not isinstance(param, numbers.Number) else [-param, param] self.xdegree = get_param(xdegree) self.ydegree = get_param(ydegree)", "pnts in points] return img, points class RandomShear(object): def __init__(self, xdegree, ydegree=None, fillcolor='balck'):", "= tuple(np.random.choice(range(256), size=3)) else: fillcolor = self.fillcolor img = pil_ops.resize_keep_ratio(img, self.size, fillcolor=fillcolor) rx", "for pnts in points] return img, points class RandomShear(object): def __init__(self, xdegree, ydegree=None,", "mw, mh = self.size w, h = img.size rw = w / mw", "def __call__(self, img, points): imw, imh = img.size if random.random() < self.p: img", "import pil_ops, polygon_ops from wpcv.utils.data_aug.base import Compose, Zip from wpcv.utils.data_aug import img_aug class", "+ self.degree[0] w, h = img.size img = pil_ops.shear_x(img, degree) points = [polygon_ops.shear_x(pnts,", "= w / tw ry = h / th r = max(rx, ry)", "points] return img, points class RandomShearY(object): def __init__(self, degree): self.degree = degree if", "(self.degree[1] - self.degree[0]) + self.degree[0] w, h = img.size img = pil_ops.shear_y(img, degree)", "in points] if self.ydegree: if self.fillcolor is 'random': fillcolor = tuple(np.random.choice(range(256), size=3)) else:", "pil_ops.rotate(img, degree, expand=self.expand, fillcolor=fillcolor) points = [polygon_ops.rotate(pnts, degree, (w // 2, h //", "h = img.size img = pil_ops.shear_x(img, degree) points = [polygon_ops.shear_x(pnts, degree, img_size=(w, h),", "= pil_ops.resize_keep_ratio(img, self.size, fillcolor=fillcolor) rx = w / tw ry = h /", "= img.size img = pil_ops.rotate(img, degree, expand=self.expand, fillcolor=fillcolor) points = [polygon_ops.rotate(pnts, degree, (w", "__call__(self, img, points): if self.fillcolor is 'random': fillcolor = tuple(np.random.choice(range(256), size=3)) else: fillcolor", "== 4 ml, mt, mr, mb = limits l, t, r, b =", "if len(limits) == 2: ml, mt = 0, 0 mr, mb = limits", "size=3)) else: fillcolor = self.fillcolor degree = random.random() * (self.degree[1] - self.degree[0]) +", "h = img.size rw = w / mw rh = h / mh", "maxsize if isinstance(limit, (tuple, list, set,)): mw, mh = limit else: mw =", "= get_param(xdegree) self.ydegree = get_param(ydegree) self.fillcolor = fillcolor def __call__(self, img, points): if", "points = [polygon_ops.rotate(pnts, degree, (w // 2, h // 2), img_size=(w, h), expand=self.expand)", "self.to = img_aug.ToPILImage() def __call__(self, img, *args): if len(args): return (self.to(img), *args) else:", "def __call__(self, img, points): scaleX, scaleY = self.scaleX, self.scaleY img = pil_ops.scale(img, (scaleX,", "numbers.Number) else [-param, param] self.xdegree = get_param(xdegree) self.ydegree = get_param(ydegree) self.fillcolor = fillcolor", "= np.array(points).reshape((-1, 4)) return img, bboxes class Reshape(object): def __init__(self, shape): self.target_shape =", "- self.degree[0]) + self.degree[0] w, h = img.size img = pil_ops.shear_x(img, degree) points", "is 'random': fillcolor = tuple(np.random.choice(range(256), size=3)) else: fillcolor = self.fillcolor degree = random.random()", "points class RandomShearY(object): def __init__(self, degree): self.degree = degree if not isinstance(degree, numbers.Number)", "return img, points class PointsToBboxes(object): def __call__(self, img, points): bboxes = np.array(points).reshape((-1, 4))", "def __init__(self): self.to = img_aug.ToPILImage() def __call__(self, img, *args): if len(args): return (self.to(img),", "Limitsize(object): def __init__(self, maxsize): limit = maxsize if isinstance(limit, (tuple, list, set,)): mw,", "= [polygon_ops.vflip(pnts, imh) for pnts in points] return img, points def __repr__(self): return", "self.p: img = pil_ops.hflip(img) points = [polygon_ops.hflip(pnts, imw) for pnts in points] return", "if not isinstance(degree, numbers.Number) else [-degree, degree] def __call__(self, img, points): degree =", "= h / r dw = (tw - nw) // 2 dh =", "is None: return defualt return param if not isinstance(param, numbers.Number) else [-param, param]", "= random.random() * (self.degree[1] - self.degree[0]) + self.degree[0] w, h = img.size img", "points class RandomRotate(object): def __init__(self, degree, expand=True, fillcolor='black'): self.degree = degree if not", "None if t > b: return None return [l, t, r, b] rang", "tuple(np.random.choice(range(256), size=3)) else: fillcolor = self.fillcolor rang = polygon_ops.get_translate_range(points, img.size) if self.max_offset: def", "self.fillcolor degree = random.random() * (self.ydegree[1] - self.ydegree[0]) + self.ydegree[0] w, h =", "np.array(bboxes).reshape((-1, 2, 2)) return img, points class PointsToBboxes(object): def __call__(self, img, points): bboxes", "__call__(self, img, points): bboxes = np.array(points).reshape((-1, 4)) return img, bboxes class Reshape(object): def", "import cv2 from PIL import Image import wpcv from wpcv.utils.ops import pil_ops, polygon_ops", "img, points): scaleX, scaleY = self.scaleX, self.scaleY img = pil_ops.scale(img, (scaleX, scaleY)) points", "= img.size tw, th = self.size if not self.keep_ratio: scaleX, scaleY = tw", "ofy = random.randint(rang[1], rang[3]) img = pil_ops.translate(img, offset=(ofx, ofy), fillcolor=fillcolor) points = [polygon_ops.translate(pnts,", "return None return [l, t, r, b] rang = limit_box(rang, self.max_offset) if rang", "[polygon_ops.shear_x(pnts, degree, img_size=(w, h), expand=True) for pnts in points] if self.ydegree: if self.fillcolor", "points class PointsToBboxes(object): def __call__(self, img, points): bboxes = np.array(points).reshape((-1, 4)) return img,", "mh r = max(rw, rh) if r > 1: nw, nh = int(w", "(ofx, ofy)) for pnts in points] return img, points class RandomRotate(object): def __init__(self,", "None: return box if len(limits) == 2: ml, mt = 0, 0 mr,", "(tw - nw) // 2 dh = (th - nh) // 2 points", "nw) // 2 dh = (th - nh) // 2 points = polygon_ops.scale(points,", "return img, points class RandomShearY(object): def __init__(self, degree): self.degree = degree if not", "(self.degree[1] - self.degree[0]) + self.degree[0] w, h = img.size img = pil_ops.shear_x(img, degree)", "points = polygon_ops.scale(points, (scaleX, scaleY)) return img, points class Resize(object): def __init__(self, size,", "list, set,)): mw, mh = limit else: mw = mh = limit self.size", "pil_ops.resize(img, self.size) points = polygon_ops.scale(points, (scaleX, scaleY)) else: if self.fillcolor is 'random': fillcolor", "get_param(param, defualt=None): if param is None: return defualt return param if not isinstance(param,", "= min(mr, r) b = min(mb, b) if l > r: return None", "import Image import wpcv from wpcv.utils.ops import pil_ops, polygon_ops from wpcv.utils.data_aug.base import Compose,", "ToPILImage(object): def __init__(self): self.to = img_aug.ToPILImage() def __call__(self, img, *args): if len(args): return", "class Resize(object): def __init__(self, size, keep_ratio=False, fillcolor='black'): self.size = size self.keep_ratio = keep_ratio", "= scaleX, scaleY def __call__(self, img, points): scaleX, scaleY = self.scaleX, self.scaleY img", "pil_ops.shear_y(img, degree, fillcolor=fillcolor) points = [polygon_ops.shear_y(pnts, degree, img_size=(w, h), expand=True) for pnts in", "if isinstance(scales, (tuple, list)): scaleX, scaleY = scales else: scaleX = scaleY =", "fillcolor = self.fillcolor degree = random.random() * (self.degree[1] - self.degree[0]) + self.degree[0] w,", "= tuple(np.random.choice(range(256), size=3)) else: fillcolor = self.fillcolor degree = random.random() * (self.degree[1] -", "= shape def __call__(self, x): return np.array(x).reshape(self.target_shape) class Limitsize(object): def __init__(self, maxsize): limit", "= int(w / r), int(h / r) img = pil_ops.resize(img, (nw, nh)) points", "+ '(p={})'.format(self.p) class RandomTranslate(object): def __init__(self, max_offset=None, fillcolor='black'): if max_offset is not None", "= h / th r = max(rx, ry) nw = w / r", "RandomVerticalFlip(object): def __init__(self, p=0.5): self.p = p def __call__(self, img, points): imw, imh", "self.keep_ratio: scaleX, scaleY = tw / w, th / h img = pil_ops.resize(img,", "- self.degree[0]) + self.degree[0] w, h = img.size img = pil_ops.rotate(img, degree, expand=self.expand,", "= polygon_ops.scale(points, 1 / r) return img, points class Scale(object): def __init__(self, scales):", "img, points): if self.fillcolor is 'random': fillcolor = tuple(np.random.choice(range(256), size=3)) else: fillcolor =", "degree, expand=True, fillcolor='black'): self.degree = degree if not isinstance(degree, numbers.Number) else [-degree, degree]", "self.__class__.__name__ + '(p={})'.format(self.p) class RandomTranslate(object): def __init__(self, max_offset=None, fillcolor='black'): if max_offset is not", "expand=True) for pnts in points] if self.ydegree: if self.fillcolor is 'random': fillcolor =", "class PointsToBboxes(object): def __call__(self, img, points): bboxes = np.array(points).reshape((-1, 4)) return img, bboxes", "// 2 dh = (th - nh) // 2 points = polygon_ops.scale(points, 1", "return img, points class RandomHorizontalFlip(object): def __init__(self, p=0.5): self.p = p def __call__(self,", "def limit_box(box, limits=None): if limits is None: return box if len(limits) == 2:", "(self.xdegree[1] - self.xdegree[0]) + self.xdegree[0] w, h = img.size img = pil_ops.shear_x(img, degree,", "and len(max_offset) == 2: mx, my = max_offset max_offset = [-mx, -my, mx,", "rang[2]) ofy = random.randint(rang[1], rang[3]) img = pil_ops.translate(img, offset=(ofx, ofy), fillcolor=fillcolor) points =", "return param if not isinstance(param, numbers.Number) else [-param, param] self.xdegree = get_param(xdegree) self.ydegree", "__init__(self, size, keep_ratio=False, fillcolor='black'): self.size = size self.keep_ratio = keep_ratio self.fillcolor = fillcolor", "def __call__(self, img, points): if self.fillcolor is 'random': fillcolor = tuple(np.random.choice(range(256), size=3)) else:", "bboxes class Reshape(object): def __init__(self, shape): self.target_shape = shape def __call__(self, x): return", "is None: return box if len(limits) == 2: ml, mt = 0, 0", "len(limits) == 2: ml, mt = 0, 0 mr, mb = limits else:", "= img.size img = pil_ops.shear_y(img, degree) points = [polygon_ops.shear_y(pnts, degree, img_size=(w, h), expand=True)", "expand=True) for pnts in points] return img, points class RandomShear(object): def __init__(self, xdegree,", "random.random() * (self.ydegree[1] - self.ydegree[0]) + self.ydegree[0] w, h = img.size img =", "scaleX = scaleY = scales self.scaleX, self.scaleY = scaleX, scaleY def __call__(self, img,", "class BboxesToPoints(object): def __call__(self, img, bboxes): points = np.array(bboxes).reshape((-1, 2, 2)) return img,", "max_offset=None, fillcolor='black'): if max_offset is not None and len(max_offset) == 2: mx, my", "- nh) // 2 points = polygon_ops.scale(points, 1 / r) points = polygon_ops.translate(points,", "points = [polygon_ops.vflip(pnts, imh) for pnts in points] return img, points def __repr__(self):", "[polygon_ops.vflip(pnts, imh) for pnts in points] return img, points def __repr__(self): return self.__class__.__name__", "class RandomShearY(object): def __init__(self, degree): self.degree = degree if not isinstance(degree, numbers.Number) else", "shape def __call__(self, x): return np.array(x).reshape(self.target_shape) class Limitsize(object): def __init__(self, maxsize): limit =", "expand self.fillcolor = fillcolor def __call__(self, img, points): if self.fillcolor is 'random': fillcolor", "tuple(np.random.choice(range(256), size=3)) else: fillcolor = self.fillcolor degree = random.random() * (self.degree[1] - self.degree[0])", "'random': fillcolor = tuple(np.random.choice(range(256), size=3)) else: fillcolor = self.fillcolor img = pil_ops.resize_keep_ratio(img, self.size,", "return (self.to(img), *args) else: return self.to(img) class BboxesToPoints(object): def __call__(self, img, bboxes): points", "mb = limits else: assert len(limits) == 4 ml, mt, mr, mb =", "= random.randint(rang[1], rang[3]) img = pil_ops.translate(img, offset=(ofx, ofy), fillcolor=fillcolor) points = [polygon_ops.translate(pnts, (ofx,", "= [-mx, -my, mx, my] self.max_offset = max_offset self.fillcolor = fillcolor def __call__(self,", "min(mb, b) if l > r: return None if t > b: return", "h = img.size img = pil_ops.rotate(img, degree, expand=self.expand, fillcolor=fillcolor) points = [polygon_ops.rotate(pnts, degree,", "class Limitsize(object): def __init__(self, maxsize): limit = maxsize if isinstance(limit, (tuple, list, set,)):", "RandomRotate(object): def __init__(self, degree, expand=True, fillcolor='black'): self.degree = degree if not isinstance(degree, numbers.Number)", "return self.to(img) class BboxesToPoints(object): def __call__(self, img, bboxes): points = np.array(bboxes).reshape((-1, 2, 2))", "if l > r: return None if t > b: return None return", "class RandomHorizontalFlip(object): def __init__(self, p=0.5): self.p = p def __call__(self, img, points): imw,", "= pil_ops.translate(img, offset=(ofx, ofy), fillcolor=fillcolor) points = [polygon_ops.translate(pnts, (ofx, ofy)) for pnts in", "p def __call__(self, img, points): imw, imh = img.size if random.random() < self.p:", "fillcolor = tuple(np.random.choice(range(256), size=3)) else: fillcolor = self.fillcolor degree = random.random() * (self.degree[1]", "if self.ydegree: if self.fillcolor is 'random': fillcolor = tuple(np.random.choice(range(256), size=3)) else: fillcolor =", "Scale(object): def __init__(self, scales): if isinstance(scales, (tuple, list)): scaleX, scaleY = scales else:", "nh = h / r dw = (tw - nw) // 2 dh", "(scaleX, scaleY)) return img, points class Resize(object): def __init__(self, size, keep_ratio=False, fillcolor='black'): self.size", "tw ry = h / th r = max(rx, ry) nw = w", "not isinstance(param, numbers.Number) else [-param, param] self.xdegree = get_param(xdegree) self.ydegree = get_param(ydegree) self.fillcolor", "< self.p: img = pil_ops.vflip(img) points = [polygon_ops.vflip(pnts, imh) for pnts in points]", "degree if not isinstance(degree, numbers.Number) else [-degree, degree] self.expand = expand self.fillcolor =", "= degree if not isinstance(degree, numbers.Number) else [-degree, degree] self.expand = expand self.fillcolor", "polygon_ops from wpcv.utils.data_aug.base import Compose, Zip from wpcv.utils.data_aug import img_aug class ToPILImage(object): def", "else [-param, param] self.xdegree = get_param(xdegree) self.ydegree = get_param(ydegree) self.fillcolor = fillcolor def", "[-param, param] self.xdegree = get_param(xdegree) self.ydegree = get_param(ydegree) self.fillcolor = fillcolor def __call__(self,", "self.degree = degree if not isinstance(degree, numbers.Number) else [-degree, degree] def __call__(self, img,", "return img, points def __repr__(self): return self.__class__.__name__ + '(p={})'.format(self.p) class RandomTranslate(object): def __init__(self,", "return np.array(x).reshape(self.target_shape) class Limitsize(object): def __init__(self, maxsize): limit = maxsize if isinstance(limit, (tuple,", "img = pil_ops.shear_x(img, degree) points = [polygon_ops.shear_x(pnts, degree, img_size=(w, h), expand=True) for pnts", "bboxes): points = np.array(bboxes).reshape((-1, 2, 2)) return img, points class PointsToBboxes(object): def __call__(self,", "max_offset = [-mx, -my, mx, my] self.max_offset = max_offset self.fillcolor = fillcolor def", "rh = h / mh r = max(rw, rh) if r > 1:", "mw = mh = limit self.size = (mw, mh) def __call__(self, img, points):", "self.fillcolor img = pil_ops.resize_keep_ratio(img, self.size, fillcolor=fillcolor) rx = w / tw ry =", "fillcolor='black'): if max_offset is not None and len(max_offset) == 2: mx, my =", "= [polygon_ops.shear_y(pnts, degree, img_size=(w, h), expand=True) for pnts in points] return img, points", "pil_ops.resize(img, (nw, nh)) points = polygon_ops.scale(points, 1 / r) return img, points class", "degree = random.random() * (self.ydegree[1] - self.ydegree[0]) + self.ydegree[0] w, h = img.size", "mr, mb = limits else: assert len(limits) == 4 ml, mt, mr, mb", "w, h = img.size img = pil_ops.rotate(img, degree, expand=self.expand, fillcolor=fillcolor) points = [polygon_ops.rotate(pnts,", "r = max(rw, rh) if r > 1: nw, nh = int(w /", "img, bboxes class Reshape(object): def __init__(self, shape): self.target_shape = shape def __call__(self, x):", "points = [polygon_ops.shear_x(pnts, degree, img_size=(w, h), expand=True) for pnts in points] if self.ydegree:", "= limit self.size = (mw, mh) def __call__(self, img, points): mw, mh =", "polygon_ops.get_translate_range(points, img.size) if self.max_offset: def limit_box(box, limits=None): if limits is None: return box", "= size self.keep_ratio = keep_ratio self.fillcolor = fillcolor def __call__(self, img, points): w,", "set,)): mw, mh = limit else: mw = mh = limit self.size =", "img, points ofx = random.randint(rang[0], rang[2]) ofy = random.randint(rang[1], rang[3]) img = pil_ops.translate(img,", "from wpcv.utils.data_aug.base import Compose, Zip from wpcv.utils.data_aug import img_aug class ToPILImage(object): def __init__(self):", "= tuple(np.random.choice(range(256), size=3)) else: fillcolor = self.fillcolor rang = polygon_ops.get_translate_range(points, img.size) if self.max_offset:", "img, points): if self.xdegree: if self.fillcolor is 'random': fillcolor = tuple(np.random.choice(range(256), size=3)) else:", "0 mr, mb = limits else: assert len(limits) == 4 ml, mt, mr,", "points def __repr__(self): return self.__class__.__name__ + '(p={})'.format(self.p) class RandomTranslate(object): def __init__(self, max_offset=None, fillcolor='black'):", "= maxsize if isinstance(limit, (tuple, list, set,)): mw, mh = limit else: mw", "t > b: return None return [l, t, r, b] rang = limit_box(rang,", "__call__(self, img, *args): if len(args): return (self.to(img), *args) else: return self.to(img) class BboxesToPoints(object):", "tw / w, th / h img = pil_ops.resize(img, self.size) points = polygon_ops.scale(points,", "else: fillcolor = self.fillcolor img = pil_ops.resize_keep_ratio(img, self.size, fillcolor=fillcolor) rx = w /", "img, points class RandomHorizontalFlip(object): def __init__(self, p=0.5): self.p = p def __call__(self, img,", "[polygon_ops.hflip(pnts, imw) for pnts in points] return img, points def __repr__(self): return self.__class__.__name__", "import wpcv from wpcv.utils.ops import pil_ops, polygon_ops from wpcv.utils.data_aug.base import Compose, Zip from", "1 / r) return img, points class Scale(object): def __init__(self, scales): if isinstance(scales,", "degree = random.random() * (self.xdegree[1] - self.xdegree[0]) + self.xdegree[0] w, h = img.size", "imw) for pnts in points] return img, points def __repr__(self): return self.__class__.__name__ +", "img = pil_ops.scale(img, (scaleX, scaleY)) points = polygon_ops.scale(points, (scaleX, scaleY)) return img, points", "/ w, th / h img = pil_ops.resize(img, self.size) points = polygon_ops.scale(points, (scaleX,", "fillcolor=fillcolor) points = [polygon_ops.shear_x(pnts, degree, img_size=(w, h), expand=True) for pnts in points] if", "Resize(object): def __init__(self, size, keep_ratio=False, fillcolor='black'): self.size = size self.keep_ratio = keep_ratio self.fillcolor", "2: mx, my = max_offset max_offset = [-mx, -my, mx, my] self.max_offset =", "= pil_ops.shear_y(img, degree, fillcolor=fillcolor) points = [polygon_ops.shear_y(pnts, degree, img_size=(w, h), expand=True) for pnts", "self.max_offset) if rang is None: return img, points ofx = random.randint(rang[0], rang[2]) ofy", "= polygon_ops.scale(points, (scaleX, scaleY)) return img, points class Resize(object): def __init__(self, size, keep_ratio=False,", "def __init__(self, size, keep_ratio=False, fillcolor='black'): self.size = size self.keep_ratio = keep_ratio self.fillcolor =", "= h / mh r = max(rw, rh) if r > 1: nw,", "= tw / w, th / h img = pil_ops.resize(img, self.size) points =", "+ self.degree[0] w, h = img.size img = pil_ops.rotate(img, degree, expand=self.expand, fillcolor=fillcolor) points", "param if not isinstance(param, numbers.Number) else [-param, param] self.xdegree = get_param(xdegree) self.ydegree =", "scales else: scaleX = scaleY = scales self.scaleX, self.scaleY = scaleX, scaleY def", "4)) return img, bboxes class Reshape(object): def __init__(self, shape): self.target_shape = shape def", "1 / r) points = polygon_ops.translate(points, (dw, dh)) return img, points class RandomHorizontalFlip(object):", "fillcolor=fillcolor) points = [polygon_ops.rotate(pnts, degree, (w // 2, h // 2), img_size=(w, h),", "/ h img = pil_ops.resize(img, self.size) points = polygon_ops.scale(points, (scaleX, scaleY)) else: if", "= self.fillcolor degree = random.random() * (self.ydegree[1] - self.ydegree[0]) + self.ydegree[0] w, h", "'random': fillcolor = tuple(np.random.choice(range(256), size=3)) else: fillcolor = self.fillcolor degree = random.random() *", "mh) def __call__(self, img, points): mw, mh = self.size w, h = img.size", "for pnts in points] return img, points class RandomRotate(object): def __init__(self, degree, expand=True,", "* (self.degree[1] - self.degree[0]) + self.degree[0] w, h = img.size img = pil_ops.rotate(img,", "is 'random': fillcolor = tuple(np.random.choice(range(256), size=3)) else: fillcolor = self.fillcolor img = pil_ops.resize_keep_ratio(img,", "return [l, t, r, b] rang = limit_box(rang, self.max_offset) if rang is None:", "__call__(self, img, points): w, h = img.size tw, th = self.size if not", "self.xdegree = get_param(xdegree) self.ydegree = get_param(ydegree) self.fillcolor = fillcolor def __call__(self, img, points):", "self.target_shape = shape def __call__(self, x): return np.array(x).reshape(self.target_shape) class Limitsize(object): def __init__(self, maxsize):", "= max(ml, l) t = max(mt, t) r = min(mr, r) b =", "not isinstance(degree, numbers.Number) else [-degree, degree] self.expand = expand self.fillcolor = fillcolor def", "= (tw - nw) // 2 dh = (th - nh) // 2", "def __call__(self, img, *args): if len(args): return (self.to(img), *args) else: return self.to(img) class", "limits=None): if limits is None: return box if len(limits) == 2: ml, mt", "2, 2)) return img, points class PointsToBboxes(object): def __call__(self, img, points): bboxes =", "self.scaleY = scaleX, scaleY def __call__(self, img, points): scaleX, scaleY = self.scaleX, self.scaleY", "= scales self.scaleX, self.scaleY = scaleX, scaleY def __call__(self, img, points): scaleX, scaleY", "points = polygon_ops.translate(points, (dw, dh)) return img, points class RandomHorizontalFlip(object): def __init__(self, p=0.5):", "points): imw, imh = img.size if random.random() < self.p: img = pil_ops.vflip(img) points", "h / mh r = max(rw, rh) if r > 1: nw, nh", "self.degree[0]) + self.degree[0] w, h = img.size img = pil_ops.rotate(img, degree, expand=self.expand, fillcolor=fillcolor)", "bboxes = np.array(points).reshape((-1, 4)) return img, bboxes class Reshape(object): def __init__(self, shape): self.target_shape", "1: nw, nh = int(w / r), int(h / r) img = pil_ops.resize(img,", "fillcolor=fillcolor) points = [polygon_ops.shear_y(pnts, degree, img_size=(w, h), expand=True) for pnts in points] return", "fillcolor = self.fillcolor img = pil_ops.resize_keep_ratio(img, self.size, fillcolor=fillcolor) rx = w / tw", "degree = random.random() * (self.degree[1] - self.degree[0]) + self.degree[0] w, h = img.size", "img, points class RandomShear(object): def __init__(self, xdegree, ydegree=None, fillcolor='balck'): def get_param(param, defualt=None): if", "l, t, r, b = box l = max(ml, l) t = max(mt,", "else: return self.to(img) class BboxesToPoints(object): def __call__(self, img, bboxes): points = np.array(bboxes).reshape((-1, 2,", "= tuple(np.random.choice(range(256), size=3)) else: fillcolor = self.fillcolor degree = random.random() * (self.xdegree[1] -", "import img_aug class ToPILImage(object): def __init__(self): self.to = img_aug.ToPILImage() def __call__(self, img, *args):", "= img.size img = pil_ops.shear_x(img, degree, fillcolor=fillcolor) points = [polygon_ops.shear_x(pnts, degree, img_size=(w, h),", "return img, points class RandomRotate(object): def __init__(self, degree, expand=True, fillcolor='black'): self.degree = degree", "is 'random': fillcolor = tuple(np.random.choice(range(256), size=3)) else: fillcolor = self.fillcolor rang = polygon_ops.get_translate_range(points,", "= img.size img = pil_ops.shear_y(img, degree, fillcolor=fillcolor) points = [polygon_ops.shear_y(pnts, degree, img_size=(w, h),", "polygon_ops.scale(points, 1 / r) points = polygon_ops.translate(points, (dw, dh)) return img, points class", "b = box l = max(ml, l) t = max(mt, t) r =", "points): if self.xdegree: if self.fillcolor is 'random': fillcolor = tuple(np.random.choice(range(256), size=3)) else: fillcolor", "/ tw ry = h / th r = max(rx, ry) nw =", "img, points): mw, mh = self.size w, h = img.size rw = w", "img, points class Resize(object): def __init__(self, size, keep_ratio=False, fillcolor='black'): self.size = size self.keep_ratio", "from wpcv.utils.data_aug import img_aug class ToPILImage(object): def __init__(self): self.to = img_aug.ToPILImage() def __call__(self,", "(mw, mh) def __call__(self, img, points): mw, mh = self.size w, h =", "tuple(np.random.choice(range(256), size=3)) else: fillcolor = self.fillcolor img = pil_ops.resize_keep_ratio(img, self.size, fillcolor=fillcolor) rx =", "ml, mt, mr, mb = limits l, t, r, b = box l", "fillcolor = tuple(np.random.choice(range(256), size=3)) else: fillcolor = self.fillcolor degree = random.random() * (self.ydegree[1]", "points): imw, imh = img.size if random.random() < self.p: img = pil_ops.hflip(img) points", "def __init__(self, shape): self.target_shape = shape def __call__(self, x): return np.array(x).reshape(self.target_shape) class Limitsize(object):", "pil_ops.translate(img, offset=(ofx, ofy), fillcolor=fillcolor) points = [polygon_ops.translate(pnts, (ofx, ofy)) for pnts in points]", "__call__(self, img, bboxes): points = np.array(bboxes).reshape((-1, 2, 2)) return img, points class PointsToBboxes(object):", "self.__class__.__name__ + '(p={})'.format(self.p) class RandomVerticalFlip(object): def __init__(self, p=0.5): self.p = p def __call__(self,", "class RandomShearX(object): def __init__(self, degree): self.degree = degree if not isinstance(degree, numbers.Number) else", "h), expand=True) for pnts in points] return img, points class RandomShear(object): def __init__(self,", "* (self.ydegree[1] - self.ydegree[0]) + self.ydegree[0] w, h = img.size img = pil_ops.shear_y(img,", "return None if t > b: return None return [l, t, r, b]", "self.xdegree[0]) + self.xdegree[0] w, h = img.size img = pil_ops.shear_x(img, degree, fillcolor=fillcolor) points", "points = [polygon_ops.shear_x(pnts, degree, img_size=(w, h), expand=True) for pnts in points] return img,", "if self.xdegree: if self.fillcolor is 'random': fillcolor = tuple(np.random.choice(range(256), size=3)) else: fillcolor =", "if random.random() < self.p: img = pil_ops.vflip(img) points = [polygon_ops.vflip(pnts, imh) for pnts", "cv2 from PIL import Image import wpcv from wpcv.utils.ops import pil_ops, polygon_ops from", "degree, img_size=(w, h), expand=True) for pnts in points] return img, points class RandomShearY(object):", "h = img.size img = pil_ops.shear_y(img, degree, fillcolor=fillcolor) points = [polygon_ops.shear_y(pnts, degree, img_size=(w,", "if isinstance(limit, (tuple, list, set,)): mw, mh = limit else: mw = mh", "= polygon_ops.scale(points, 1 / r) points = polygon_ops.translate(points, (dw, dh)) return img, points", "isinstance(scales, (tuple, list)): scaleX, scaleY = scales else: scaleX = scaleY = scales", "img = pil_ops.resize(img, (nw, nh)) points = polygon_ops.scale(points, 1 / r) return img,", "self.scaleX, self.scaleY img = pil_ops.scale(img, (scaleX, scaleY)) points = polygon_ops.scale(points, (scaleX, scaleY)) return", "for pnts in points] return img, points def __repr__(self): return self.__class__.__name__ + '(p={})'.format(self.p)", "fillcolor def __call__(self, img, points): if self.xdegree: if self.fillcolor is 'random': fillcolor =", "pil_ops.shear_x(img, degree, fillcolor=fillcolor) points = [polygon_ops.shear_x(pnts, degree, img_size=(w, h), expand=True) for pnts in", "def __call__(self, img, points): if self.xdegree: if self.fillcolor is 'random': fillcolor = tuple(np.random.choice(range(256),", "- self.ydegree[0]) + self.ydegree[0] w, h = img.size img = pil_ops.shear_y(img, degree, fillcolor=fillcolor)", "expand=True) for pnts in points] return img, points class RandomShearY(object): def __init__(self, degree):", "self.p = p def __call__(self, img, points): imw, imh = img.size if random.random()", "else [-degree, degree] self.expand = expand self.fillcolor = fillcolor def __call__(self, img, points):" ]
[ "= \"Error: ParserException\" outp.clear_widgets() if tmp: outp.add_widget(tmp) error.text = \"Nema greske\" class ErrorOut(Label):", "from kivy.lang.builder import Builder from kivy.lang.parser import ParserException def kompajliraj(inp,outp,error): try: tmp =", "error.text = \"Error: Krivi KV kod\" except ParserException: tmp = None error.text =", "outp.clear_widgets() if tmp: outp.add_widget(tmp) error.text = \"Nema greske\" class ErrorOut(Label): pass class ReloadButton(Button):", "not tmp: error.text = \"Nema nista napisano\" except AttributeError: tmp = None error.text", "AttributeError: tmp = None error.text = \"Error: Krivi KV kod\" except ParserException: tmp", "KVoutput(BoxLayout): pass class MainBox(BoxLayout): pass class KvtestApp(App): def build(self): mainbox = MainBox(orientation=\"vertical\") return", "with your current kivy version ! from kivy.app import App from kivy.uix.label import", "from kivy.uix.label import Label from kivy.uix.boxlayout import BoxLayout from kivy.uix.button import Button from", "! from kivy.app import App from kivy.uix.label import Label from kivy.uix.boxlayout import BoxLayout", "ParserException\" outp.clear_widgets() if tmp: outp.add_widget(tmp) error.text = \"Nema greske\" class ErrorOut(Label): pass class", "Label from kivy.uix.boxlayout import BoxLayout from kivy.uix.button import Button from kivy.uix.textinput import TextInput", "import App from kivy.uix.label import Label from kivy.uix.boxlayout import BoxLayout from kivy.uix.button import", "ParserException def kompajliraj(inp,outp,error): try: tmp = Builder.load_string(inp.text) if not tmp: error.text = \"Nema", "from kivy.uix.boxlayout import BoxLayout from kivy.uix.button import Button from kivy.uix.textinput import TextInput from", "App from kivy.uix.label import Label from kivy.uix.boxlayout import BoxLayout from kivy.uix.button import Button", "outp.add_widget(tmp) error.text = \"Nema greske\" class ErrorOut(Label): pass class ReloadButton(Button): def on_press(self): kompajliraj(self.textRead,self.textWrite,self.errorWrite)", "if not tmp: error.text = \"Nema nista napisano\" except AttributeError: tmp = None", "ParserException: tmp = None error.text = \"Error: ParserException\" outp.clear_widgets() if tmp: outp.add_widget(tmp) error.text", "kivy kivy.require('1.10.0') # replace with your current kivy version ! from kivy.app import", "kivy.uix.behaviors.codenavigation import CodeNavigationBehavior from kivy.properties import ObjectProperty from kivy.lang.builder import Builder from kivy.lang.parser", "if tmp: outp.add_widget(tmp) error.text = \"Nema greske\" class ErrorOut(Label): pass class ReloadButton(Button): def", "kivy.uix.textinput import TextInput from kivy.uix.behaviors.codenavigation import CodeNavigationBehavior from kivy.properties import ObjectProperty from kivy.lang.builder", "kompajliraj(self.textRead,self.textWrite,self.errorWrite) class KVinputText(CodeNavigationBehavior,TextInput): pass class KVoutput(BoxLayout): pass class MainBox(BoxLayout): pass class KvtestApp(App): def", "def on_press(self): kompajliraj(self.textRead,self.textWrite,self.errorWrite) class KVinputText(CodeNavigationBehavior,TextInput): pass class KVoutput(BoxLayout): pass class MainBox(BoxLayout): pass class", "Krivi KV kod\" except ParserException: tmp = None error.text = \"Error: ParserException\" outp.clear_widgets()", "tmp: error.text = \"Nema nista napisano\" except AttributeError: tmp = None error.text =", "class MainBox(BoxLayout): pass class KvtestApp(App): def build(self): mainbox = MainBox(orientation=\"vertical\") return mainbox if", "\"Nema greske\" class ErrorOut(Label): pass class ReloadButton(Button): def on_press(self): kompajliraj(self.textRead,self.textWrite,self.errorWrite) class KVinputText(CodeNavigationBehavior,TextInput): pass", "TextInput from kivy.uix.behaviors.codenavigation import CodeNavigationBehavior from kivy.properties import ObjectProperty from kivy.lang.builder import Builder", "class ErrorOut(Label): pass class ReloadButton(Button): def on_press(self): kompajliraj(self.textRead,self.textWrite,self.errorWrite) class KVinputText(CodeNavigationBehavior,TextInput): pass class KVoutput(BoxLayout):", "kivy.require('1.10.0') # replace with your current kivy version ! from kivy.app import App", "kivy.lang.builder import Builder from kivy.lang.parser import ParserException def kompajliraj(inp,outp,error): try: tmp = Builder.load_string(inp.text)", "None error.text = \"Error: Krivi KV kod\" except ParserException: tmp = None error.text", "import kivy kivy.require('1.10.0') # replace with your current kivy version ! from kivy.app", "import ParserException def kompajliraj(inp,outp,error): try: tmp = Builder.load_string(inp.text) if not tmp: error.text =", "from kivy.uix.textinput import TextInput from kivy.uix.behaviors.codenavigation import CodeNavigationBehavior from kivy.properties import ObjectProperty from", "napisano\" except AttributeError: tmp = None error.text = \"Error: Krivi KV kod\" except", "class ReloadButton(Button): def on_press(self): kompajliraj(self.textRead,self.textWrite,self.errorWrite) class KVinputText(CodeNavigationBehavior,TextInput): pass class KVoutput(BoxLayout): pass class MainBox(BoxLayout):", "class KVinputText(CodeNavigationBehavior,TextInput): pass class KVoutput(BoxLayout): pass class MainBox(BoxLayout): pass class KvtestApp(App): def build(self):", "from kivy.uix.behaviors.codenavigation import CodeNavigationBehavior from kivy.properties import ObjectProperty from kivy.lang.builder import Builder from", "from kivy.lang.parser import ParserException def kompajliraj(inp,outp,error): try: tmp = Builder.load_string(inp.text) if not tmp:", "Builder.load_string(inp.text) if not tmp: error.text = \"Nema nista napisano\" except AttributeError: tmp =", "tmp: outp.add_widget(tmp) error.text = \"Nema greske\" class ErrorOut(Label): pass class ReloadButton(Button): def on_press(self):", "try: tmp = Builder.load_string(inp.text) if not tmp: error.text = \"Nema nista napisano\" except", "import CodeNavigationBehavior from kivy.properties import ObjectProperty from kivy.lang.builder import Builder from kivy.lang.parser import", "Button from kivy.uix.textinput import TextInput from kivy.uix.behaviors.codenavigation import CodeNavigationBehavior from kivy.properties import ObjectProperty", "current kivy version ! from kivy.app import App from kivy.uix.label import Label from", "\"Error: Krivi KV kod\" except ParserException: tmp = None error.text = \"Error: ParserException\"", "version ! from kivy.app import App from kivy.uix.label import Label from kivy.uix.boxlayout import", "import ObjectProperty from kivy.lang.builder import Builder from kivy.lang.parser import ParserException def kompajliraj(inp,outp,error): try:", "kompajliraj(inp,outp,error): try: tmp = Builder.load_string(inp.text) if not tmp: error.text = \"Nema nista napisano\"", "from kivy.uix.button import Button from kivy.uix.textinput import TextInput from kivy.uix.behaviors.codenavigation import CodeNavigationBehavior from", "kod\" except ParserException: tmp = None error.text = \"Error: ParserException\" outp.clear_widgets() if tmp:", "\"Error: ParserException\" outp.clear_widgets() if tmp: outp.add_widget(tmp) error.text = \"Nema greske\" class ErrorOut(Label): pass", "<gh_stars>0 import kivy kivy.require('1.10.0') # replace with your current kivy version ! from", "except AttributeError: tmp = None error.text = \"Error: Krivi KV kod\" except ParserException:", "\"Nema nista napisano\" except AttributeError: tmp = None error.text = \"Error: Krivi KV", "error.text = \"Error: ParserException\" outp.clear_widgets() if tmp: outp.add_widget(tmp) error.text = \"Nema greske\" class", "KvtestApp(App): def build(self): mainbox = MainBox(orientation=\"vertical\") return mainbox if __name__ == '__main__': KvtestApp().run()", "pass class MainBox(BoxLayout): pass class KvtestApp(App): def build(self): mainbox = MainBox(orientation=\"vertical\") return mainbox", "KVinputText(CodeNavigationBehavior,TextInput): pass class KVoutput(BoxLayout): pass class MainBox(BoxLayout): pass class KvtestApp(App): def build(self): mainbox", "def kompajliraj(inp,outp,error): try: tmp = Builder.load_string(inp.text) if not tmp: error.text = \"Nema nista", "error.text = \"Nema greske\" class ErrorOut(Label): pass class ReloadButton(Button): def on_press(self): kompajliraj(self.textRead,self.textWrite,self.errorWrite) class", "tmp = Builder.load_string(inp.text) if not tmp: error.text = \"Nema nista napisano\" except AttributeError:", "kivy.app import App from kivy.uix.label import Label from kivy.uix.boxlayout import BoxLayout from kivy.uix.button", "kivy.uix.boxlayout import BoxLayout from kivy.uix.button import Button from kivy.uix.textinput import TextInput from kivy.uix.behaviors.codenavigation", "pass class KVoutput(BoxLayout): pass class MainBox(BoxLayout): pass class KvtestApp(App): def build(self): mainbox =", "kivy.uix.button import Button from kivy.uix.textinput import TextInput from kivy.uix.behaviors.codenavigation import CodeNavigationBehavior from kivy.properties", "= Builder.load_string(inp.text) if not tmp: error.text = \"Nema nista napisano\" except AttributeError: tmp", "= None error.text = \"Error: Krivi KV kod\" except ParserException: tmp = None", "error.text = \"Nema nista napisano\" except AttributeError: tmp = None error.text = \"Error:", "ObjectProperty from kivy.lang.builder import Builder from kivy.lang.parser import ParserException def kompajliraj(inp,outp,error): try: tmp", "kivy.properties import ObjectProperty from kivy.lang.builder import Builder from kivy.lang.parser import ParserException def kompajliraj(inp,outp,error):", "= \"Nema greske\" class ErrorOut(Label): pass class ReloadButton(Button): def on_press(self): kompajliraj(self.textRead,self.textWrite,self.errorWrite) class KVinputText(CodeNavigationBehavior,TextInput):", "tmp = None error.text = \"Error: Krivi KV kod\" except ParserException: tmp =", "your current kivy version ! from kivy.app import App from kivy.uix.label import Label", "replace with your current kivy version ! from kivy.app import App from kivy.uix.label", "kivy version ! from kivy.app import App from kivy.uix.label import Label from kivy.uix.boxlayout", "BoxLayout from kivy.uix.button import Button from kivy.uix.textinput import TextInput from kivy.uix.behaviors.codenavigation import CodeNavigationBehavior", "greske\" class ErrorOut(Label): pass class ReloadButton(Button): def on_press(self): kompajliraj(self.textRead,self.textWrite,self.errorWrite) class KVinputText(CodeNavigationBehavior,TextInput): pass class", "nista napisano\" except AttributeError: tmp = None error.text = \"Error: Krivi KV kod\"", "class KvtestApp(App): def build(self): mainbox = MainBox(orientation=\"vertical\") return mainbox if __name__ == '__main__':", "class KVoutput(BoxLayout): pass class MainBox(BoxLayout): pass class KvtestApp(App): def build(self): mainbox = MainBox(orientation=\"vertical\")", "# replace with your current kivy version ! from kivy.app import App from", "kivy.lang.parser import ParserException def kompajliraj(inp,outp,error): try: tmp = Builder.load_string(inp.text) if not tmp: error.text", "import Button from kivy.uix.textinput import TextInput from kivy.uix.behaviors.codenavigation import CodeNavigationBehavior from kivy.properties import", "tmp = None error.text = \"Error: ParserException\" outp.clear_widgets() if tmp: outp.add_widget(tmp) error.text =", "Builder from kivy.lang.parser import ParserException def kompajliraj(inp,outp,error): try: tmp = Builder.load_string(inp.text) if not", "= \"Nema nista napisano\" except AttributeError: tmp = None error.text = \"Error: Krivi", "= \"Error: Krivi KV kod\" except ParserException: tmp = None error.text = \"Error:", "ErrorOut(Label): pass class ReloadButton(Button): def on_press(self): kompajliraj(self.textRead,self.textWrite,self.errorWrite) class KVinputText(CodeNavigationBehavior,TextInput): pass class KVoutput(BoxLayout): pass", "kivy.uix.label import Label from kivy.uix.boxlayout import BoxLayout from kivy.uix.button import Button from kivy.uix.textinput", "import BoxLayout from kivy.uix.button import Button from kivy.uix.textinput import TextInput from kivy.uix.behaviors.codenavigation import", "import Builder from kivy.lang.parser import ParserException def kompajliraj(inp,outp,error): try: tmp = Builder.load_string(inp.text) if", "on_press(self): kompajliraj(self.textRead,self.textWrite,self.errorWrite) class KVinputText(CodeNavigationBehavior,TextInput): pass class KVoutput(BoxLayout): pass class MainBox(BoxLayout): pass class KvtestApp(App):", "None error.text = \"Error: ParserException\" outp.clear_widgets() if tmp: outp.add_widget(tmp) error.text = \"Nema greske\"", "ReloadButton(Button): def on_press(self): kompajliraj(self.textRead,self.textWrite,self.errorWrite) class KVinputText(CodeNavigationBehavior,TextInput): pass class KVoutput(BoxLayout): pass class MainBox(BoxLayout): pass", "from kivy.properties import ObjectProperty from kivy.lang.builder import Builder from kivy.lang.parser import ParserException def", "except ParserException: tmp = None error.text = \"Error: ParserException\" outp.clear_widgets() if tmp: outp.add_widget(tmp)", "from kivy.app import App from kivy.uix.label import Label from kivy.uix.boxlayout import BoxLayout from", "pass class ReloadButton(Button): def on_press(self): kompajliraj(self.textRead,self.textWrite,self.errorWrite) class KVinputText(CodeNavigationBehavior,TextInput): pass class KVoutput(BoxLayout): pass class", "MainBox(BoxLayout): pass class KvtestApp(App): def build(self): mainbox = MainBox(orientation=\"vertical\") return mainbox if __name__", "pass class KvtestApp(App): def build(self): mainbox = MainBox(orientation=\"vertical\") return mainbox if __name__ ==", "KV kod\" except ParserException: tmp = None error.text = \"Error: ParserException\" outp.clear_widgets() if", "import Label from kivy.uix.boxlayout import BoxLayout from kivy.uix.button import Button from kivy.uix.textinput import", "CodeNavigationBehavior from kivy.properties import ObjectProperty from kivy.lang.builder import Builder from kivy.lang.parser import ParserException", "= None error.text = \"Error: ParserException\" outp.clear_widgets() if tmp: outp.add_widget(tmp) error.text = \"Nema", "import TextInput from kivy.uix.behaviors.codenavigation import CodeNavigationBehavior from kivy.properties import ObjectProperty from kivy.lang.builder import" ]
[ "= io_subclass @property def protocol(self): \"\"\" Gets the protocol of this IbIocProfile. The", "getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x,", "the device version. :param device_version: The device_version of this IbIocProfile. :type: int \"\"\"", "= RDMA Write Requests to IOCs Bit 5 on = RDMA Write Requests", "# (required parameter) 'subsystem_id': 'subsystemId', # (required parameter) 'io_class': 'ioClass', # (required parameter)", "\"\"\" self._send_message_size = send_message_size @property def rdma_transfer_size(self): \"\"\" Gets the rdma_transfer_size of this", "user. :param id_string: The id_string of this IbIocProfile. :type: str \"\"\" self._id_string =", "classes encompassed by the InfiniBand architecture. 0xFFFF is vendor-specific. :return: The io_class of", "A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR", "to identify the device version. :return: The device_version of this IbIocProfile. :rtype: int", "= io_device_id @property def device_version(self): \"\"\" Gets the device_version of this IbIocProfile. A", "\"\"\" Gets the subsystem_vendor_id of this IbIocProfile. The ID of the enclosure vendor", "\"\"\" return self._id_string @id_string.setter def id_string(self, id_string): \"\"\" Sets the id_string of this", "= None self._id_string = None @property def ioc_guid(self): \"\"\" Gets the ioc_guid of", "'sendMessageSize', # (required parameter) 'rdma_transfer_size': 'rdmaTransferSize', # (required parameter) 'controller_ops_capability_mask': 'controllerOpsCapabilityMask', # (required", "RDMA Read Queue :return: The rdma_read_queue_depth of this IbIocProfile. :rtype: int :required/optional: required", "None self._service_entries = None self._id_string = None @property def ioc_guid(self): \"\"\" Gets the", "Gets the send_message_queue_depth of this IbIocProfile. The maximum depth of the Send Message", "products derived from this software without specific prior written permission. NO EXPRESS OR", "'int', # (required parameter) 'io_subclass': 'int', # (required parameter) 'protocol': 'int', # (required", "\"\"\" return self._controller_ops_capability_mask @controller_ops_capability_mask.setter def controller_ops_capability_mask(self, controller_ops_capability_mask): \"\"\" Sets the controller_ops_capability_mask of this", "protocol): \"\"\" Sets the protocol of this IbIocProfile. The I/O protocol of the", "true if both objects are not equal \"\"\" return not self == other", "self._rdma_transfer_size @rdma_transfer_size.setter def rdma_transfer_size(self, rdma_transfer_size): \"\"\" Sets the rdma_transfer_size of this IbIocProfile. The", "vendor to identify the type of I/O controller :param io_device_id: The io_device_id of", "LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON", "Message Queue. :param send_message_queue_depth: The send_message_queue_depth of this IbIocProfile. :type: int \"\"\" self._send_message_queue_depth", "ioc_guid of this IbIocProfile. The EUI-64 GUID used to uniquely identify the I/O", "manually. \"\"\" def __init__(self): \"\"\" IbIocProfile - a model defined in Swagger :param", "self._subsystem_id @subsystem_id.setter def subsystem_id(self, subsystem_id): \"\"\" Sets the subsystem_id of this IbIocProfile. A", "x.to_dict() if hasattr(x, \"to_dict\") else x, value )) elif hasattr(value, \"to_dict\"): result[attr] =", "LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR", "hasattr(value, \"to_dict\"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item:", "self.to_str() def __eq__(self, other): \"\"\" Returns true if both objects are equal \"\"\"", "IbIocProfile. The I/O class of the controller. 0x0000 -0xFFFE is reserved for I/O", "Sets the subsystem_vendor_id of this IbIocProfile. The ID of the enclosure vendor in", "version. :return: The device_version of this IbIocProfile. :rtype: int :required/optional: required \"\"\" return", "self._protocol_version = protocol_version @property def send_message_queue_depth(self): \"\"\" Gets the send_message_queue_depth of this IbIocProfile.", "IbIocProfile. The maximum depth of the Send Message Queue. :return: The send_message_queue_depth of", "service_entries of this IbIocProfile. :type: int \"\"\" self._service_entries = service_entries @property def id_string(self):", "PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER", "outbound RDMA transfers initiated by the controller. :return: The rdma_transfer_size of this IbIocProfile.", "3 on = RDMA Read Requests from IOCs Bit 4 on = RDMA", "'int', # (required parameter) 'controller_ops_capability_mask': 'int', # (required parameter) 'service_entries': 'int', # (required", "uniquely identify the I/O controller. :param ioc_guid: The ioc_guid of this IbIocProfile. :type:", "IbIocProfile. A number assigned by the vendor to identify the device version. :param", "of this IbIocProfile. The I/O protocol of the controller. 0x0000 -0xFFFE is reserved", "= rdma_read_queue_depth @property def send_message_size(self): \"\"\" Gets the send_message_size of this IbIocProfile. The", ")) else: result[attr] = value return result def to_str(self): \"\"\" Returns the string", "def send_message_queue_depth(self): \"\"\" Gets the send_message_queue_depth of this IbIocProfile. The maximum depth of", ":return: The ioc_guid of this IbIocProfile. :rtype: str :required/optional: required \"\"\" return self._ioc_guid", "NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,", "IOCs :return: The controller_ops_capability_mask of this IbIocProfile. :rtype: int :required/optional: required \"\"\" return", "\"\"\" self._protocol_version = protocol_version @property def send_message_queue_depth(self): \"\"\" Gets the send_message_queue_depth of this", "@send_message_queue_depth.setter def send_message_queue_depth(self, send_message_queue_depth): \"\"\" Sets the send_message_queue_depth of this IbIocProfile. The maximum", "this IbIocProfile. The maximum depth of the per-channel RDMA Read Queue :return: The", "(required parameter) 'rdma_read_queue_depth': 'rdmaReadQueueDepth', # (required parameter) 'send_message_size': 'sendMessageSize', # (required parameter) 'rdma_transfer_size':", "for attr, _ in iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr]", "the limitations in the disclaimer below) provided that the following conditions are met:", "'device_version': 'deviceVersion', # (required parameter) 'subsystem_vendor_id': 'subsystemVendorId', # (required parameter) 'subsystem_id': 'subsystemId', #", "IbIocProfile. A number assigned by vendor to identify the type of I/O controller", "parameter) 'protocol': 'int', # (required parameter) 'protocol_version': 'int', # (required parameter) 'send_message_queue_depth': 'int',", "'send_message_queue_depth': 'int', # (required parameter) 'rdma_read_queue_depth': 'int', # (required parameter) 'send_message_size': 'int', #", "required \"\"\" return self._vendor_id @vendor_id.setter def vendor_id(self, vendor_id): \"\"\" Sets the vendor_id of", "list(map( lambda x: x.to_dict() if hasattr(x, \"to_dict\") else x, value )) elif hasattr(value,", "\"\"\" Returns the string representation of the model \"\"\" return pformat(self.to_dict()) def __repr__(self):", "(required parameter) 'service_entries': 'serviceEntries', # (required parameter) 'id_string': 'idString' } self._ioc_guid = None", "return None return self.to_str() def __eq__(self, other): \"\"\" Returns true if both objects", "The I/O controller vendor ID in IEEE format. :param vendor_id: The vendor_id of", "def io_device_id(self, io_device_id): \"\"\" Sets the io_device_id of this IbIocProfile. A number assigned", "IEEE format, or else all zeros if there is no vendor ID. :return:", "IbIocProfile. The protocol version (protocol-specific). :return: The protocol_version of this IbIocProfile. :rtype: int", "this IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._protocol_version @protocol_version.setter def protocol_version(self, protocol_version):", "by the InfiniBand architecture. 0xFFFF is vendor-specific. :return: The io_subclass of this IbIocProfile.", "= subsystem_vendor_id @property def subsystem_id(self): \"\"\" Gets the subsystem_id of this IbIocProfile. A", "None or other is None: return None return self.__dict__ == other.__dict__ def __ne__(self,", "= id_string def to_dict(self): \"\"\" Returns the model properties as a dict \"\"\"", "per-channel RDMA Read Queue :param rdma_read_queue_depth: The rdma_read_queue_depth of this IbIocProfile. :type: int", "reproduce the above copyright notice, this list of conditions and the following disclaimer", "TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS LICENSE. THIS SOFTWARE IS", "The controller_ops_capability_mask of this IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._controller_ops_capability_mask @controller_ops_capability_mask.setter", "entries in the service entries table :param service_entries: The service_entries of this IbIocProfile.", "IbIocProfile. The maximum depth of the Send Message Queue. :param send_message_queue_depth: The send_message_queue_depth", "I/O classes encompassed by the InfiniBand architecture. 0xFFFF is vendor-specific. :param io_class: The", "of this IbIocProfile. A UTF-8 encoded string for identifying the controller to user.", "The maximum depth of the per-channel RDMA Read Queue :param rdma_read_queue_depth: The rdma_read_queue_depth", "\"\"\" return self._send_message_size @send_message_size.setter def send_message_size(self, send_message_size): \"\"\" Sets the send_message_size of this", "of I/O controller :return: The io_device_id of this IbIocProfile. :rtype: int :required/optional: required", "zeros if there is no vendor ID. :return: The subsystem_vendor_id of this IbIocProfile.", "this IbIocProfile. The number of entries in the service entries table :param service_entries:", "OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS", ":rtype: int :required/optional: required \"\"\" return self._protocol @protocol.setter def protocol(self, protocol): \"\"\" Sets", ":required/optional: required \"\"\" return self._rdma_transfer_size @rdma_transfer_size.setter def rdma_transfer_size(self, rdma_transfer_size): \"\"\" Sets the rdma_transfer_size", "are met: * Redistributions of source code must retain the above copyright notice,", "(required parameter) 'protocol_version': 'int', # (required parameter) 'send_message_queue_depth': 'int', # (required parameter) 'rdma_read_queue_depth':", "@property def rdma_read_queue_depth(self): \"\"\" Gets the rdma_read_queue_depth of this IbIocProfile. The maximum depth", "For `print` and `pprint` \"\"\" if self is None: return None return self.to_str()", "The rdma_transfer_size of this IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._rdma_transfer_size @rdma_transfer_size.setter", "size of Send Messages in bytes. :return: The send_message_size of this IbIocProfile. :rtype:", "of the Send Message Queue. :return: The send_message_queue_depth of this IbIocProfile. :rtype: int", "(required parameter) 'protocol_version': 'protocolVersion', # (required parameter) 'send_message_queue_depth': 'sendMessageQueueDepth', # (required parameter) 'rdma_read_queue_depth':", "subsystem_id(self): \"\"\" Gets the subsystem_id of this IbIocProfile. A number identifying the subsystem", "UTF-8 encoded string for identifying the controller to user. :return: The id_string of", "provided with the distribution. * Neither the name of NetApp, Inc. nor the", "Copyright (c) – 2016, NetApp, Inc. All rights reserved. Redistribution and use in", "'protocol_version': 'int', # (required parameter) 'send_message_queue_depth': 'int', # (required parameter) 'rdma_read_queue_depth': 'int', #", "2016, NetApp, Inc. All rights reserved. Redistribution and use in source and binary", "self.swagger_types = { 'ioc_guid': 'str', # (required parameter) 'vendor_id': 'str', # (required parameter)", "the send_message_size of this IbIocProfile. The maximum size of Send Messages in bytes.", "permission. NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED", "def vendor_id(self): \"\"\" Gets the vendor_id of this IbIocProfile. The I/O controller vendor", "ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES", "send_message_size: The send_message_size of this IbIocProfile. :type: int \"\"\" self._send_message_size = send_message_size @property", "InfiniBand architecture. 0xFFFF is vendor-specific. :return: The io_subclass of this IbIocProfile. :rtype: int", "POSSIBILITY OF SUCH DAMAGE. \"\"\" from pprint import pformat from six import iteritems", "in the documentation and/or other materials provided with the distribution. * Neither the", "controller. :return: The rdma_transfer_size of this IbIocProfile. :rtype: int :required/optional: required \"\"\" return", "'str', # (required parameter) 'vendor_id': 'str', # (required parameter) 'io_device_id': 'int', # (required", "(required parameter) 'subsystem_id': 'subsystemId', # (required parameter) 'io_class': 'ioClass', # (required parameter) 'io_subclass':", "InfiniBand architecture. 0xFFFF is vendor-specific. :param io_class: The io_class of this IbIocProfile. :type:", "architecture. 0xFFFF is vendor-specific. :return: The io_class of this IbIocProfile. :rtype: int :required/optional:", "Sets the io_device_id of this IbIocProfile. A number assigned by vendor to identify", ":required/optional: required \"\"\" return self._subsystem_id @subsystem_id.setter def subsystem_id(self, subsystem_id): \"\"\" Sets the subsystem_id", "of this IbIocProfile. :type: int \"\"\" self._io_device_id = io_device_id @property def device_version(self): \"\"\"", "both objects are equal \"\"\" if self is None or other is None:", ":param protocol: The protocol of this IbIocProfile. :type: int \"\"\" self._protocol = protocol", "def io_class(self): \"\"\" Gets the io_class of this IbIocProfile. The I/O class of", "io_subclass of this IbIocProfile. :type: int \"\"\" self._io_subclass = io_subclass @property def protocol(self):", "Messages in bytes. :param send_message_size: The send_message_size of this IbIocProfile. :type: int \"\"\"", "maximum size of outbound RDMA transfers initiated by the controller. :param rdma_transfer_size: The", "OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN", "vendor ID in IEEE format. :param vendor_id: The vendor_id of this IbIocProfile. :type:", "GUID used to uniquely identify the I/O controller. :param ioc_guid: The ioc_guid of", "(required parameter) 'controller_ops_capability_mask': 'controllerOpsCapabilityMask', # (required parameter) 'service_entries': 'serviceEntries', # (required parameter) 'id_string':", "the io_class of this IbIocProfile. The I/O class of the controller. 0x0000 -0xFFFE", "the InfiniBand architecture. 0xFFFF is vendor-specific. :param protocol: The protocol of this IbIocProfile.", "I/O controller vendor ID in IEEE format. :return: The vendor_id of this IbIocProfile.", "OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,", "this IbIocProfile. :type: str \"\"\" self._ioc_guid = ioc_guid @property def vendor_id(self): \"\"\" Gets", "self._vendor_id = None self._io_device_id = None self._device_version = None self._subsystem_vendor_id = None self._subsystem_id", ":param io_device_id: The io_device_id of this IbIocProfile. :type: int \"\"\" self._io_device_id = io_device_id", "Gets the rdma_transfer_size of this IbIocProfile. The maximum size of outbound RDMA transfers", "as a dict \"\"\" result = {} for attr, _ in iteritems(self.swagger_types): value", "None self._protocol_version = None self._send_message_queue_depth = None self._rdma_read_queue_depth = None self._send_message_size = None", "ID of the enclosure vendor in IEEE format, or else all zeros if", "or other is None: return None return self.__dict__ == other.__dict__ def __ne__(self, other):", "or else all zeros if there is no vendor ID. :return: The subsystem_vendor_id", "attributeMap: The key is attribute name and the value is json key in", "return self._send_message_size @send_message_size.setter def send_message_size(self, send_message_size): \"\"\" Sets the send_message_size of this IbIocProfile.", "IbIocProfile. :type: int \"\"\" self._protocol_version = protocol_version @property def send_message_queue_depth(self): \"\"\" Gets the", "str :required/optional: required \"\"\" return self._vendor_id @vendor_id.setter def vendor_id(self, vendor_id): \"\"\" Sets the", "this IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._rdma_transfer_size @rdma_transfer_size.setter def rdma_transfer_size(self, rdma_transfer_size):", "encompassed by the InfiniBand architecture. 0xFFFF is vendor-specific. :return: The protocol of this", "str :required/optional: required \"\"\" return self._ioc_guid @ioc_guid.setter def ioc_guid(self, ioc_guid): \"\"\" Sets the", "@vendor_id.setter def vendor_id(self, vendor_id): \"\"\" Sets the vendor_id of this IbIocProfile. The I/O", "of this IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._rdma_read_queue_depth @rdma_read_queue_depth.setter def rdma_read_queue_depth(self,", ":return: The protocol_version of this IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._protocol_version", ":return: The rdma_read_queue_depth of this IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._rdma_read_queue_depth", "SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE", "by the controller. :param rdma_transfer_size: The rdma_transfer_size of this IbIocProfile. :type: int \"\"\"", "@protocol.setter def protocol(self, protocol): \"\"\" Sets the protocol of this IbIocProfile. The I/O", "\"\"\" self._io_device_id = io_device_id @property def device_version(self): \"\"\" Gets the device_version of this", "(required parameter) 'io_subclass': 'ioSubclass', # (required parameter) 'protocol': 'protocol', # (required parameter) 'protocol_version':", "'ioClass', # (required parameter) 'io_subclass': 'ioSubclass', # (required parameter) 'protocol': 'protocol', # (required", "IbIocProfile. The EUI-64 GUID used to uniquely identify the I/O controller. :return: The", "dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], \"to_dict\") else item,", "IbIocProfile. The maximum size of Send Messages in bytes. :param send_message_size: The send_message_size", "'id_string': 'str' } self.attribute_map = { 'ioc_guid': 'iocGuid', # (required parameter) 'vendor_id': 'vendorId',", "controller. 0x0000 -0xFFFE is reserved for I/O sub-classes encompassed by the InfiniBand architecture.", "IbIocProfile. A number assigned by the vendor to identify the device version. :return:", "the I/O controller. :param ioc_guid: The ioc_guid of this IbIocProfile. :type: str \"\"\"", "by the swagger code generator program. Do not edit the class manually. \"\"\"", "STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT", "= value return result def to_str(self): \"\"\" Returns the string representation of the", "io_class(self, io_class): \"\"\" Sets the io_class of this IbIocProfile. The I/O class of", "to endorse or promote products derived from this software without specific prior written", "self._rdma_transfer_size = rdma_transfer_size @property def controller_ops_capability_mask(self): \"\"\" Gets the controller_ops_capability_mask of this IbIocProfile.", "attr, _ in iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] =", "int :required/optional: required \"\"\" return self._controller_ops_capability_mask @controller_ops_capability_mask.setter def controller_ops_capability_mask(self, controller_ops_capability_mask): \"\"\" Sets the", "THE POSSIBILITY OF SUCH DAMAGE. \"\"\" from pprint import pformat from six import", "of this IbIocProfile. :type: str \"\"\" self._ioc_guid = ioc_guid @property def vendor_id(self): \"\"\"", "str :required/optional: required \"\"\" return self._subsystem_vendor_id @subsystem_vendor_id.setter def subsystem_vendor_id(self, subsystem_vendor_id): \"\"\" Sets the", "\"\"\" Gets the io_device_id of this IbIocProfile. A number assigned by vendor to", "Returns true if both objects are equal \"\"\" if self is None or", "other is None: return None return self.__dict__ == other.__dict__ def __ne__(self, other): \"\"\"", "is no vendor ID. :param subsystem_vendor_id: The subsystem_vendor_id of this IbIocProfile. :type: str", "@send_message_size.setter def send_message_size(self, send_message_size): \"\"\" Sets the send_message_size of this IbIocProfile. The maximum", "this IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._device_version @device_version.setter def device_version(self, device_version):", "protocol of this IbIocProfile. The I/O protocol of the controller. 0x0000 -0xFFFE is", "- a model defined in Swagger :param dict swaggerTypes: The key is attribute", "vendor-specific. :return: The protocol of this IbIocProfile. :rtype: int :required/optional: required \"\"\" return", "(required parameter) 'controller_ops_capability_mask': 'int', # (required parameter) 'service_entries': 'int', # (required parameter) 'id_string':", "controller. 0x0000 -0xFFFE is reserved for I/O classes encompassed by the InfiniBand architecture.", "IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._device_version @device_version.setter def device_version(self, device_version): \"\"\"", "by the vendor to identify the device version. :param device_version: The device_version of", "the controller. 0x0000 -0xFFFE is reserved for I/O classes encompassed by the InfiniBand", "self._send_message_size @send_message_size.setter def send_message_size(self, send_message_size): \"\"\" Sets the send_message_size of this IbIocProfile. The", ":return: The subsystem_id of this IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._subsystem_id", "format. :return: The vendor_id of this IbIocProfile. :rtype: str :required/optional: required \"\"\" return", "IbIocProfile. :type: int \"\"\" self._io_subclass = io_subclass @property def protocol(self): \"\"\" Gets the", "in Swagger :param dict swaggerTypes: The key is attribute name and the value", "\"\"\" self._io_class = io_class @property def io_subclass(self): \"\"\" Gets the io_subclass of this", "of this IbIocProfile. :type: int \"\"\" self._rdma_transfer_size = rdma_transfer_size @property def controller_ops_capability_mask(self): \"\"\"", "or without modification, are permitted (subject to the limitations in the disclaimer below)", "INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF", "is vendor-specific. :return: The protocol of this IbIocProfile. :rtype: int :required/optional: required \"\"\"", "= None self._vendor_id = None self._io_device_id = None self._device_version = None self._subsystem_vendor_id =", "to identify the type of I/O controller :param io_device_id: The io_device_id of this", "The subsystem_vendor_id of this IbIocProfile. :rtype: str :required/optional: required \"\"\" return self._subsystem_vendor_id @subsystem_vendor_id.setter", ":return: The device_version of this IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._device_version", "are equal \"\"\" if self is None or other is None: return None", "objects are equal \"\"\" if self is None or other is None: return", "send_message_size of this IbIocProfile. The maximum size of Send Messages in bytes. :param", "source and binary forms, with or without modification, are permitted (subject to the", "The Clear BSD License Copyright (c) – 2016, NetApp, Inc. All rights reserved.", "int \"\"\" self._subsystem_id = subsystem_id @property def io_class(self): \"\"\" Gets the io_class of", "IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. \"\"\" from pprint import pformat", "resides. :return: The subsystem_id of this IbIocProfile. :rtype: int :required/optional: required \"\"\" return", "PATENT RIGHTS ARE GRANTED BY THIS LICENSE. THIS SOFTWARE IS PROVIDED BY THE", "Gets the id_string of this IbIocProfile. A UTF-8 encoded string for identifying the", "vendor_id(self): \"\"\" Gets the vendor_id of this IbIocProfile. The I/O controller vendor ID", "this IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._rdma_read_queue_depth @rdma_read_queue_depth.setter def rdma_read_queue_depth(self, rdma_read_queue_depth):", "of the controller. 0x0000 -0xFFFE is reserved for I/O classes encompassed by the", "= io_class @property def io_subclass(self): \"\"\" Gets the io_subclass of this IbIocProfile. The", "-0xFFFE is reserved for I/O sub-classes encompassed by the InfiniBand architecture. 0xFFFF is", "parameter) 'send_message_size': 'int', # (required parameter) 'rdma_transfer_size': 'int', # (required parameter) 'controller_ops_capability_mask': 'int',", "\"\"\" return self._io_subclass @io_subclass.setter def io_subclass(self, io_subclass): \"\"\" Sets the io_subclass of this", "NetApp, Inc. nor the names of its contributors may be used to endorse", ":rtype: int :required/optional: required \"\"\" return self._send_message_queue_depth @send_message_queue_depth.setter def send_message_queue_depth(self, send_message_queue_depth): \"\"\" Sets", "COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,", "None self._device_version = None self._subsystem_vendor_id = None self._subsystem_id = None self._io_class = None", "this IbIocProfile. :rtype: str :required/optional: required \"\"\" return self._id_string @id_string.setter def id_string(self, id_string):", "BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,", "IbIocProfile. :rtype: str :required/optional: required \"\"\" return self._vendor_id @vendor_id.setter def vendor_id(self, vendor_id): \"\"\"", "may be used to endorse or promote products derived from this software without", "OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR", "LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\"", "required \"\"\" return self._io_subclass @io_subclass.setter def io_subclass(self, io_subclass): \"\"\" Sets the io_subclass of", "def id_string(self): \"\"\" Gets the id_string of this IbIocProfile. A UTF-8 encoded string", "of this IbIocProfile. The ID of the enclosure vendor in IEEE format, or", "Bit 7 on = Atomic operations from IOCs :param controller_ops_capability_mask: The controller_ops_capability_mask of", "def io_device_id(self): \"\"\" Gets the io_device_id of this IbIocProfile. A number assigned by", "of the controller. 0x0000 -0xFFFE is reserved for I/O protocols encompassed by the", "is vendor-specific. :return: The io_subclass of this IbIocProfile. :rtype: int :required/optional: required \"\"\"", "# (required parameter) 'send_message_queue_depth': 'sendMessageQueueDepth', # (required parameter) 'rdma_read_queue_depth': 'rdmaReadQueueDepth', # (required parameter)", "int \"\"\" self._rdma_transfer_size = rdma_transfer_size @property def controller_ops_capability_mask(self): \"\"\" Gets the controller_ops_capability_mask of", "Requests from IOCs Bit 6 on = Atomic operations to IOCs Bit 7", "@property def io_class(self): \"\"\" Gets the io_class of this IbIocProfile. The I/O class", "self._vendor_id @vendor_id.setter def vendor_id(self, vendor_id): \"\"\" Sets the vendor_id of this IbIocProfile. The", "int \"\"\" self._send_message_queue_depth = send_message_queue_depth @property def rdma_read_queue_depth(self): \"\"\" Gets the rdma_read_queue_depth of", "INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT", "PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS", "Bit 4 on = RDMA Write Requests to IOCs Bit 5 on =", "this IbIocProfile. The maximum depth of the Send Message Queue. :return: The send_message_queue_depth", "required \"\"\" return self._send_message_queue_depth @send_message_queue_depth.setter def send_message_queue_depth(self, send_message_queue_depth): \"\"\" Sets the send_message_queue_depth of", "for I/O classes encompassed by the InfiniBand architecture. 0xFFFF is vendor-specific. :return: The", "specific prior written permission. NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT", "The I/O class of the controller. 0x0000 -0xFFFE is reserved for I/O classes", "EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS", "operations from IOCs :param controller_ops_capability_mask: The controller_ops_capability_mask of this IbIocProfile. :type: int \"\"\"", "'serviceEntries', # (required parameter) 'id_string': 'idString' } self._ioc_guid = None self._vendor_id = None", "where the I/O controller resides. :param subsystem_id: The subsystem_id of this IbIocProfile. :type:", "int :required/optional: required \"\"\" return self._rdma_read_queue_depth @rdma_read_queue_depth.setter def rdma_read_queue_depth(self, rdma_read_queue_depth): \"\"\" Sets the", "Supported operation types of this controller.: Bit 0 on = Send Messages to", "7 on = Atomic operations from IOCs :param controller_ops_capability_mask: The controller_ops_capability_mask of this", "six import iteritems class IbIocProfile(object): \"\"\" NOTE: This class is auto generated by", "vendor to identify the device version. :param device_version: The device_version of this IbIocProfile.", "The number of entries in the service entries table :param service_entries: The service_entries", "(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF", "@property def io_subclass(self): \"\"\" Gets the io_subclass of this IbIocProfile. The I/O sub-class", "Redistributions in binary form must reproduce the above copyright notice, this list of", "LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE", "IbIocProfile. :type: int \"\"\" self._subsystem_id = subsystem_id @property def io_class(self): \"\"\" Gets the", "return self._controller_ops_capability_mask @controller_ops_capability_mask.setter def controller_ops_capability_mask(self, controller_ops_capability_mask): \"\"\" Sets the controller_ops_capability_mask of this IbIocProfile.", "'io_class': 'ioClass', # (required parameter) 'io_subclass': 'ioSubclass', # (required parameter) 'protocol': 'protocol', #", ":type: int \"\"\" self._send_message_size = send_message_size @property def rdma_transfer_size(self): \"\"\" Gets the rdma_transfer_size", "of this IbIocProfile. Supported operation types of this controller.: Bit 0 on =", "of the controller. 0x0000 -0xFFFE is reserved for I/O sub-classes encompassed by the", "HOLDERS AND CONTRIBUTORS \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT", "key in definition. \"\"\" self.swagger_types = { 'ioc_guid': 'str', # (required parameter) 'vendor_id':", "Requests to IOCs Bit 5 on = RDMA Write Requests from IOCs Bit", "\"\"\" return self._rdma_read_queue_depth @rdma_read_queue_depth.setter def rdma_read_queue_depth(self, rdma_read_queue_depth): \"\"\" Sets the rdma_read_queue_depth of this", "provided that the following conditions are met: * Redistributions of source code must", "IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED", "IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._io_class @io_class.setter def io_class(self, io_class): \"\"\"", "\"\"\" self._subsystem_vendor_id = subsystem_vendor_id @property def subsystem_id(self): \"\"\" Gets the subsystem_id of this", "io_device_id(self, io_device_id): \"\"\" Sets the io_device_id of this IbIocProfile. A number assigned by", "'service_entries': 'serviceEntries', # (required parameter) 'id_string': 'idString' } self._ioc_guid = None self._vendor_id =", "assigned by the vendor to identify the device version. :param device_version: The device_version", "def protocol_version(self): \"\"\" Gets the protocol_version of this IbIocProfile. The protocol version (protocol-specific).", "IbIocProfile. The I/O controller vendor ID in IEEE format. :param vendor_id: The vendor_id", "A number assigned by the vendor to identify the device version. :return: The", "IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN", "the above copyright notice, this list of conditions and the following disclaimer. *", "ioc_guid(self, ioc_guid): \"\"\" Sets the ioc_guid of this IbIocProfile. The EUI-64 GUID used", "from IOCs :param controller_ops_capability_mask: The controller_ops_capability_mask of this IbIocProfile. :type: int \"\"\" self._controller_ops_capability_mask", "parameter) 'service_entries': 'int', # (required parameter) 'id_string': 'str' } self.attribute_map = { 'ioc_guid':", "BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A", "(required parameter) 'rdma_transfer_size': 'int', # (required parameter) 'controller_ops_capability_mask': 'int', # (required parameter) 'service_entries':", "Sets the device_version of this IbIocProfile. A number assigned by the vendor to", "attribute type. :param dict attributeMap: The key is attribute name and the value", "send_message_queue_depth of this IbIocProfile. The maximum depth of the Send Message Queue. :param", "\"\"\" Gets the controller_ops_capability_mask of this IbIocProfile. Supported operation types of this controller.:", "\"\"\" Gets the rdma_read_queue_depth of this IbIocProfile. The maximum depth of the per-channel", "IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._io_subclass @io_subclass.setter def io_subclass(self, io_subclass): \"\"\"", "int \"\"\" self._io_subclass = io_subclass @property def protocol(self): \"\"\" Gets the protocol of", "__eq__(self, other): \"\"\" Returns true if both objects are equal \"\"\" if self", "\"\"\" return self._subsystem_id @subsystem_id.setter def subsystem_id(self, subsystem_id): \"\"\" Sets the subsystem_id of this", "this IbIocProfile. The maximum depth of the per-channel RDMA Read Queue :param rdma_read_queue_depth:", "RIGHTS ARE GRANTED BY THIS LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT", "send_message_size): \"\"\" Sets the send_message_size of this IbIocProfile. The maximum size of Send", "other): \"\"\" Returns true if both objects are equal \"\"\" if self is", "def __repr__(self): \"\"\" For `print` and `pprint` \"\"\" if self is None: return", "'controller_ops_capability_mask': 'controllerOpsCapabilityMask', # (required parameter) 'service_entries': 'serviceEntries', # (required parameter) 'id_string': 'idString' }", "this IbIocProfile. The EUI-64 GUID used to uniquely identify the I/O controller. :return:", "OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,", "\"\"\" Sets the protocol_version of this IbIocProfile. The protocol version (protocol-specific). :param protocol_version:", "RDMA Write Requests to IOCs Bit 5 on = RDMA Write Requests from", "of this IbIocProfile. A number assigned by vendor to identify the type of", "# (required parameter) 'vendor_id': 'str', # (required parameter) 'io_device_id': 'int', # (required parameter)", "0x0000 -0xFFFE is reserved for I/O classes encompassed by the InfiniBand architecture. 0xFFFF", ":return: The id_string of this IbIocProfile. :rtype: str :required/optional: required \"\"\" return self._id_string", "controller :param io_device_id: The io_device_id of this IbIocProfile. :type: int \"\"\" self._io_device_id =", "this IbIocProfile. :type: int \"\"\" self._io_class = io_class @property def io_subclass(self): \"\"\" Gets", "number assigned by vendor to identify the type of I/O controller :param io_device_id:", "ioc_guid of this IbIocProfile. :rtype: str :required/optional: required \"\"\" return self._ioc_guid @ioc_guid.setter def", "identify the I/O controller. :param ioc_guid: The ioc_guid of this IbIocProfile. :type: str", "Send Message Queue. :param send_message_queue_depth: The send_message_queue_depth of this IbIocProfile. :type: int \"\"\"", "controller. :return: The ioc_guid of this IbIocProfile. :rtype: str :required/optional: required \"\"\" return", "self._send_message_queue_depth @send_message_queue_depth.setter def send_message_queue_depth(self, send_message_queue_depth): \"\"\" Sets the send_message_queue_depth of this IbIocProfile. The", "OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED", "to_dict(self): \"\"\" Returns the model properties as a dict \"\"\" result = {}", "required \"\"\" return self._id_string @id_string.setter def id_string(self, id_string): \"\"\" Sets the id_string of", "\"\"\" Sets the send_message_size of this IbIocProfile. The maximum size of Send Messages", "IbIocProfile. :type: int \"\"\" self._controller_ops_capability_mask = controller_ops_capability_mask @property def service_entries(self): \"\"\" Gets the", "attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, \"to_dict\")", "architecture. 0xFFFF is vendor-specific. :param io_subclass: The io_subclass of this IbIocProfile. :type: int", "'rdmaReadQueueDepth', # (required parameter) 'send_message_size': 'sendMessageSize', # (required parameter) 'rdma_transfer_size': 'rdmaTransferSize', # (required", "vendor_id): \"\"\" Sets the vendor_id of this IbIocProfile. The I/O controller vendor ID", "vendor-specific. :return: The io_class of this IbIocProfile. :rtype: int :required/optional: required \"\"\" return", "required \"\"\" return self._protocol_version @protocol_version.setter def protocol_version(self, protocol_version): \"\"\" Sets the protocol_version of", ":rtype: int :required/optional: required \"\"\" return self._controller_ops_capability_mask @controller_ops_capability_mask.setter def controller_ops_capability_mask(self, controller_ops_capability_mask): \"\"\" Sets", "IOCs :param controller_ops_capability_mask: The controller_ops_capability_mask of this IbIocProfile. :type: int \"\"\" self._controller_ops_capability_mask =", "self._subsystem_vendor_id @subsystem_vendor_id.setter def subsystem_vendor_id(self, subsystem_vendor_id): \"\"\" Sets the subsystem_vendor_id of this IbIocProfile. The", "of its contributors may be used to endorse or promote products derived from", "all zeros if there is no vendor ID. :return: The subsystem_vendor_id of this", "I/O controller vendor ID in IEEE format. :param vendor_id: The vendor_id of this", ":param io_class: The io_class of this IbIocProfile. :type: int \"\"\" self._io_class = io_class", "and the value is attribute type. :param dict attributeMap: The key is attribute", "to IOCs Bit 1 on = Send Messages from IOCs Bit 2 on", "parameter) 'subsystem_id': 'subsystemId', # (required parameter) 'io_class': 'ioClass', # (required parameter) 'io_subclass': 'ioSubclass',", "'rdmaTransferSize', # (required parameter) 'controller_ops_capability_mask': 'controllerOpsCapabilityMask', # (required parameter) 'service_entries': 'serviceEntries', # (required", "The maximum size of outbound RDMA transfers initiated by the controller. :return: The", "pformat from six import iteritems class IbIocProfile(object): \"\"\" NOTE: This class is auto", ":param controller_ops_capability_mask: The controller_ops_capability_mask of this IbIocProfile. :type: int \"\"\" self._controller_ops_capability_mask = controller_ops_capability_mask", "AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT", "\"\"\" self._vendor_id = vendor_id @property def io_device_id(self): \"\"\" Gets the io_device_id of this", "send_message_size @property def rdma_transfer_size(self): \"\"\" Gets the rdma_transfer_size of this IbIocProfile. The maximum", "str \"\"\" self._subsystem_vendor_id = subsystem_vendor_id @property def subsystem_id(self): \"\"\" Gets the subsystem_id of", "self.__dict__ == other.__dict__ def __ne__(self, other): \"\"\" Returns true if both objects are", "of this IbIocProfile. :type: int \"\"\" self._protocol_version = protocol_version @property def send_message_queue_depth(self): \"\"\"", "rdma_read_queue_depth of this IbIocProfile. The maximum depth of the per-channel RDMA Read Queue", "form must reproduce the above copyright notice, this list of conditions and the", "0xFFFF is vendor-specific. :param protocol: The protocol of this IbIocProfile. :type: int \"\"\"", "of the enclosure vendor in IEEE format, or else all zeros if there", "depth of the Send Message Queue. :param send_message_queue_depth: The send_message_queue_depth of this IbIocProfile.", "None self._subsystem_vendor_id = None self._subsystem_id = None self._io_class = None self._io_subclass = None", "swagger code generator program. Do not edit the class manually. \"\"\" def __init__(self):", "this IbIocProfile. :type: int \"\"\" self._send_message_queue_depth = send_message_queue_depth @property def rdma_read_queue_depth(self): \"\"\" Gets", "UTF-8 encoded string for identifying the controller to user. :param id_string: The id_string", "is None: return None return self.__dict__ == other.__dict__ def __ne__(self, other): \"\"\" Returns", "subsystem_vendor_id): \"\"\" Sets the subsystem_vendor_id of this IbIocProfile. The ID of the enclosure", ":required/optional: required \"\"\" return self._controller_ops_capability_mask @controller_ops_capability_mask.setter def controller_ops_capability_mask(self, controller_ops_capability_mask): \"\"\" Sets the controller_ops_capability_mask", "copyright notice, this list of conditions and the following disclaimer in the documentation", "IbIocProfile. :type: str \"\"\" self._subsystem_vendor_id = subsystem_vendor_id @property def subsystem_id(self): \"\"\" Gets the", "IbIocProfile. Supported operation types of this controller.: Bit 0 on = Send Messages", ":required/optional: required \"\"\" return self._ioc_guid @ioc_guid.setter def ioc_guid(self, ioc_guid): \"\"\" Sets the ioc_guid", "None: return None return self.__dict__ == other.__dict__ def __ne__(self, other): \"\"\" Returns true", "this IbIocProfile. The I/O sub-class of the controller. 0x0000 -0xFFFE is reserved for", "the swagger code generator program. Do not edit the class manually. \"\"\" def", "parameter) 'id_string': 'idString' } self._ioc_guid = None self._vendor_id = None self._io_device_id = None", "parameter) 'protocol_version': 'protocolVersion', # (required parameter) 'send_message_queue_depth': 'sendMessageQueueDepth', # (required parameter) 'rdma_read_queue_depth': 'rdmaReadQueueDepth',", "met: * Redistributions of source code must retain the above copyright notice, this", "'protocol': 'int', # (required parameter) 'protocol_version': 'int', # (required parameter) 'send_message_queue_depth': 'int', #", "prior written permission. NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS", "controller_ops_capability_mask): \"\"\" Sets the controller_ops_capability_mask of this IbIocProfile. Supported operation types of this", "this IbIocProfile. The maximum size of outbound RDMA transfers initiated by the controller.", "Bit 1 on = Send Messages from IOCs Bit 2 on = RDMA", "str \"\"\" self._vendor_id = vendor_id @property def io_device_id(self): \"\"\" Gets the io_device_id of", "IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY", "(required parameter) 'io_device_id': 'ioDeviceId', # (required parameter) 'device_version': 'deviceVersion', # (required parameter) 'subsystem_vendor_id':", "send_message_size(self): \"\"\" Gets the send_message_size of this IbIocProfile. The maximum size of Send", "size of Send Messages in bytes. :param send_message_size: The send_message_size of this IbIocProfile.", "with the distribution. * Neither the name of NetApp, Inc. nor the names", "controller. :param ioc_guid: The ioc_guid of this IbIocProfile. :type: str \"\"\" self._ioc_guid =", "encompassed by the InfiniBand architecture. 0xFFFF is vendor-specific. :return: The io_class of this", "the controller. 0x0000 -0xFFFE is reserved for I/O sub-classes encompassed by the InfiniBand", ":param send_message_queue_depth: The send_message_queue_depth of this IbIocProfile. :type: int \"\"\" self._send_message_queue_depth = send_message_queue_depth", "Gets the io_device_id of this IbIocProfile. A number assigned by vendor to identify", "\"\"\" Gets the send_message_queue_depth of this IbIocProfile. The maximum depth of the Send", "# (required parameter) 'id_string': 'str' } self.attribute_map = { 'ioc_guid': 'iocGuid', # (required", "DAMAGE. \"\"\" from pprint import pformat from six import iteritems class IbIocProfile(object): \"\"\"", "of this IbIocProfile. The EUI-64 GUID used to uniquely identify the I/O controller.", "hasattr(x, \"to_dict\") else x, value )) elif hasattr(value, \"to_dict\"): result[attr] = value.to_dict() elif", "OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS LICENSE.", "return self._protocol_version @protocol_version.setter def protocol_version(self, protocol_version): \"\"\" Sets the protocol_version of this IbIocProfile.", "SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND", ":return: The send_message_queue_depth of this IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._send_message_queue_depth", "of this IbIocProfile. The maximum depth of the per-channel RDMA Read Queue :param", "the distribution. * Neither the name of NetApp, Inc. nor the names of", "this IbIocProfile. :type: int \"\"\" self._device_version = device_version @property def subsystem_vendor_id(self): \"\"\" Gets", "self._send_message_size = send_message_size @property def rdma_transfer_size(self): \"\"\" Gets the rdma_transfer_size of this IbIocProfile.", "string for identifying the controller to user. :return: The id_string of this IbIocProfile.", "int :required/optional: required \"\"\" return self._send_message_queue_depth @send_message_queue_depth.setter def send_message_queue_depth(self, send_message_queue_depth): \"\"\" Sets the", "parameter) 'rdma_read_queue_depth': 'int', # (required parameter) 'send_message_size': 'int', # (required parameter) 'rdma_transfer_size': 'int',", "def rdma_read_queue_depth(self): \"\"\" Gets the rdma_read_queue_depth of this IbIocProfile. The maximum depth of", "rdma_transfer_size): \"\"\" Sets the rdma_transfer_size of this IbIocProfile. The maximum size of outbound", "elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], \"to_dict\")", "Requests to IOCs Bit 3 on = RDMA Read Requests from IOCs Bit", "= { 'ioc_guid': 'iocGuid', # (required parameter) 'vendor_id': 'vendorId', # (required parameter) 'io_device_id':", "io_device_id: The io_device_id of this IbIocProfile. :type: int \"\"\" self._io_device_id = io_device_id @property", "The controller_ops_capability_mask of this IbIocProfile. :type: int \"\"\" self._controller_ops_capability_mask = controller_ops_capability_mask @property def", ":type: int \"\"\" self._send_message_queue_depth = send_message_queue_depth @property def rdma_read_queue_depth(self): \"\"\" Gets the rdma_read_queue_depth", "vendor_id of this IbIocProfile. :rtype: str :required/optional: required \"\"\" return self._vendor_id @vendor_id.setter def", "{ 'ioc_guid': 'str', # (required parameter) 'vendor_id': 'str', # (required parameter) 'io_device_id': 'int',", "The service_entries of this IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._service_entries @service_entries.setter", "self._protocol_version @protocol_version.setter def protocol_version(self, protocol_version): \"\"\" Sets the protocol_version of this IbIocProfile. The", "if hasattr(item[1], \"to_dict\") else item, value.items() )) else: result[attr] = value return result", "of Send Messages in bytes. :param send_message_size: The send_message_size of this IbIocProfile. :type:", "protocol of this IbIocProfile. :type: int \"\"\" self._protocol = protocol @property def protocol_version(self):", "AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED", "parameter) 'io_class': 'int', # (required parameter) 'io_subclass': 'int', # (required parameter) 'protocol': 'int',", "# (required parameter) 'subsystem_vendor_id': 'subsystemVendorId', # (required parameter) 'subsystem_id': 'subsystemId', # (required parameter)", "self._rdma_transfer_size = None self._controller_ops_capability_mask = None self._service_entries = None self._id_string = None @property", "return None return self.__dict__ == other.__dict__ def __ne__(self, other): \"\"\" Returns true if", "int \"\"\" self._rdma_read_queue_depth = rdma_read_queue_depth @property def send_message_size(self): \"\"\" Gets the send_message_size of", "# (required parameter) 'vendor_id': 'vendorId', # (required parameter) 'io_device_id': 'ioDeviceId', # (required parameter)", "on = Send Messages from IOCs Bit 2 on = RDMA Read Requests", "None self._io_device_id = None self._device_version = None self._subsystem_vendor_id = None self._subsystem_id = None", "subsystem_vendor_id of this IbIocProfile. The ID of the enclosure vendor in IEEE format,", "of outbound RDMA transfers initiated by the controller. :param rdma_transfer_size: The rdma_transfer_size of", "IOCs Bit 1 on = Send Messages from IOCs Bit 2 on =", ":required/optional: required \"\"\" return self._vendor_id @vendor_id.setter def vendor_id(self, vendor_id): \"\"\" Sets the vendor_id", "WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN", "type. :param dict attributeMap: The key is attribute name and the value is", "InfiniBand architecture. 0xFFFF is vendor-specific. :param protocol: The protocol of this IbIocProfile. :type:", "for I/O sub-classes encompassed by the InfiniBand architecture. 0xFFFF is vendor-specific. :param io_subclass:", "return self._ioc_guid @ioc_guid.setter def ioc_guid(self, ioc_guid): \"\"\" Sets the ioc_guid of this IbIocProfile.", "Sets the service_entries of this IbIocProfile. The number of entries in the service", ":rtype: int :required/optional: required \"\"\" return self._send_message_size @send_message_size.setter def send_message_size(self, send_message_size): \"\"\" Sets", "def io_class(self, io_class): \"\"\" Sets the io_class of this IbIocProfile. The I/O class", "this IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._subsystem_id @subsystem_id.setter def subsystem_id(self, subsystem_id):", "= RDMA Read Requests to IOCs Bit 3 on = RDMA Read Requests", "'sendMessageQueueDepth', # (required parameter) 'rdma_read_queue_depth': 'rdmaReadQueueDepth', # (required parameter) 'send_message_size': 'sendMessageSize', # (required", "\"\"\" Sets the subsystem_vendor_id of this IbIocProfile. The ID of the enclosure vendor", "self._send_message_queue_depth = None self._rdma_read_queue_depth = None self._send_message_size = None self._rdma_transfer_size = None self._controller_ops_capability_mask", "device_version of this IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._device_version @device_version.setter def", "\"\"\" self._rdma_read_queue_depth = rdma_read_queue_depth @property def send_message_size(self): \"\"\" Gets the send_message_size of this", ":type: str \"\"\" self._subsystem_vendor_id = subsystem_vendor_id @property def subsystem_id(self): \"\"\" Gets the subsystem_id", "I/O sub-classes encompassed by the InfiniBand architecture. 0xFFFF is vendor-specific. :return: The io_subclass", "def rdma_transfer_size(self): \"\"\" Gets the rdma_transfer_size of this IbIocProfile. The maximum size of", "The key is attribute name and the value is attribute type. :param dict", "used to uniquely identify the I/O controller. :return: The ioc_guid of this IbIocProfile.", "parameter) 'rdma_transfer_size': 'rdmaTransferSize', # (required parameter) 'controller_ops_capability_mask': 'controllerOpsCapabilityMask', # (required parameter) 'service_entries': 'serviceEntries',", "this IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._service_entries @service_entries.setter def service_entries(self, service_entries):", "0 on = Send Messages to IOCs Bit 1 on = Send Messages", "self._controller_ops_capability_mask = controller_ops_capability_mask @property def service_entries(self): \"\"\" Gets the service_entries of this IbIocProfile.", "self._io_subclass = None self._protocol = None self._protocol_version = None self._send_message_queue_depth = None self._rdma_read_queue_depth", "@property def rdma_transfer_size(self): \"\"\" Gets the rdma_transfer_size of this IbIocProfile. The maximum size", "(required parameter) 'rdma_read_queue_depth': 'int', # (required parameter) 'send_message_size': 'int', # (required parameter) 'rdma_transfer_size':", "'deviceVersion', # (required parameter) 'subsystem_vendor_id': 'subsystemVendorId', # (required parameter) 'subsystem_id': 'subsystemId', # (required", "def subsystem_vendor_id(self, subsystem_vendor_id): \"\"\" Sets the subsystem_vendor_id of this IbIocProfile. The ID of", "@subsystem_id.setter def subsystem_id(self, subsystem_id): \"\"\" Sets the subsystem_id of this IbIocProfile. A number", "result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], \"to_dict\") else item, value.items()", "# (required parameter) 'protocol': 'int', # (required parameter) 'protocol_version': 'int', # (required parameter)", "TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE", "IOCs Bit 3 on = RDMA Read Requests from IOCs Bit 4 on", "Read Queue :param rdma_read_queue_depth: The rdma_read_queue_depth of this IbIocProfile. :type: int \"\"\" self._rdma_read_queue_depth", "software without specific prior written permission. NO EXPRESS OR IMPLIED LICENSES TO ANY", "AND CONTRIBUTORS \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT", "that the following conditions are met: * Redistributions of source code must retain", "IbIocProfile(object): \"\"\" NOTE: This class is auto generated by the swagger code generator", "service_entries @property def id_string(self): \"\"\" Gets the id_string of this IbIocProfile. A UTF-8", "@property def subsystem_vendor_id(self): \"\"\" Gets the subsystem_vendor_id of this IbIocProfile. The ID of", "(required parameter) 'vendor_id': 'vendorId', # (required parameter) 'io_device_id': 'ioDeviceId', # (required parameter) 'device_version':", "the io_device_id of this IbIocProfile. A number assigned by vendor to identify the", "defined in Swagger :param dict swaggerTypes: The key is attribute name and the", "ioc_guid of this IbIocProfile. :type: str \"\"\" self._ioc_guid = ioc_guid @property def vendor_id(self):", "vendor_id @property def io_device_id(self): \"\"\" Gets the io_device_id of this IbIocProfile. A number", "The vendor_id of this IbIocProfile. :type: str \"\"\" self._vendor_id = vendor_id @property def", "bytes. :return: The send_message_size of this IbIocProfile. :rtype: int :required/optional: required \"\"\" return", "int :required/optional: required \"\"\" return self._protocol_version @protocol_version.setter def protocol_version(self, protocol_version): \"\"\" Sets the", "COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING,", "of this IbIocProfile. The I/O class of the controller. 0x0000 -0xFFFE is reserved", "this IbIocProfile. The I/O controller vendor ID in IEEE format. :return: The vendor_id", "else: result[attr] = value return result def to_str(self): \"\"\" Returns the string representation", "return self.to_str() def __eq__(self, other): \"\"\" Returns true if both objects are equal", "the string representation of the model \"\"\" return pformat(self.to_dict()) def __repr__(self): \"\"\" For", "reserved for I/O classes encompassed by the InfiniBand architecture. 0xFFFF is vendor-specific. :return:", "identifying the controller to user. :return: The id_string of this IbIocProfile. :rtype: str", "(c) – 2016, NetApp, Inc. All rights reserved. Redistribution and use in source", "of this IbIocProfile. The number of entries in the service entries table :param", "@property def protocol(self): \"\"\" Gets the protocol of this IbIocProfile. The I/O protocol", "of this IbIocProfile. The maximum depth of the Send Message Queue. :param send_message_queue_depth:", "id_string: The id_string of this IbIocProfile. :type: str \"\"\" self._id_string = id_string def", "\"\"\" return self._io_device_id @io_device_id.setter def io_device_id(self, io_device_id): \"\"\" Sets the io_device_id of this", ":return: The send_message_size of this IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._send_message_size", "= rdma_transfer_size @property def controller_ops_capability_mask(self): \"\"\" Gets the controller_ops_capability_mask of this IbIocProfile. Supported", ":type: str \"\"\" self._ioc_guid = ioc_guid @property def vendor_id(self): \"\"\" Gets the vendor_id", "I/O class of the controller. 0x0000 -0xFFFE is reserved for I/O classes encompassed", "identify the type of I/O controller :return: The io_device_id of this IbIocProfile. :rtype:", "0x0000 -0xFFFE is reserved for I/O sub-classes encompassed by the InfiniBand architecture. 0xFFFF", "# (required parameter) 'send_message_size': 'int', # (required parameter) 'rdma_transfer_size': 'int', # (required parameter)", "of entries in the service entries table :param service_entries: The service_entries of this", "Messages in bytes. :return: The send_message_size of this IbIocProfile. :rtype: int :required/optional: required", "is vendor-specific. :param io_class: The io_class of this IbIocProfile. :type: int \"\"\" self._io_class", "{} for attr, _ in iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list):", "utf-8 \"\"\" IbIocProfile.py The Clear BSD License Copyright (c) – 2016, NetApp, Inc.", "parameter) 'device_version': 'int', # (required parameter) 'subsystem_vendor_id': 'str', # (required parameter) 'subsystem_id': 'int',", "of the model \"\"\" return pformat(self.to_dict()) def __repr__(self): \"\"\" For `print` and `pprint`", "all zeros if there is no vendor ID. :param subsystem_vendor_id: The subsystem_vendor_id of", "io_device_id of this IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._io_device_id @io_device_id.setter def", "self._service_entries @service_entries.setter def service_entries(self, service_entries): \"\"\" Sets the service_entries of this IbIocProfile. The", "the protocol of this IbIocProfile. The I/O protocol of the controller. 0x0000 -0xFFFE", ":type: int \"\"\" self._protocol = protocol @property def protocol_version(self): \"\"\" Gets the protocol_version", "to IOCs Bit 3 on = RDMA Read Requests from IOCs Bit 4", "I/O controller :param io_device_id: The io_device_id of this IbIocProfile. :type: int \"\"\" self._io_device_id", "def service_entries(self): \"\"\" Gets the service_entries of this IbIocProfile. The number of entries", "@property def vendor_id(self): \"\"\" Gets the vendor_id of this IbIocProfile. The I/O controller", "model properties as a dict \"\"\" result = {} for attr, _ in", "name and the value is attribute type. :param dict attributeMap: The key is", "(required parameter) 'device_version': 'int', # (required parameter) 'subsystem_vendor_id': 'str', # (required parameter) 'subsystem_id':", "'int', # (required parameter) 'protocol': 'int', # (required parameter) 'protocol_version': 'int', # (required", "RDMA Read Requests to IOCs Bit 3 on = RDMA Read Requests from", "Bit 0 on = Send Messages to IOCs Bit 1 on = Send", "None self._subsystem_id = None self._io_class = None self._io_subclass = None self._protocol = None", "\"\"\" return self._ioc_guid @ioc_guid.setter def ioc_guid(self, ioc_guid): \"\"\" Sets the ioc_guid of this", "\"\"\" return self._vendor_id @vendor_id.setter def vendor_id(self, vendor_id): \"\"\" Sets the vendor_id of this", "def subsystem_vendor_id(self): \"\"\" Gets the subsystem_vendor_id of this IbIocProfile. The ID of the", "'int', # (required parameter) 'send_message_size': 'int', # (required parameter) 'rdma_transfer_size': 'int', # (required", "The service_entries of this IbIocProfile. :type: int \"\"\" self._service_entries = service_entries @property def", "parameter) 'send_message_queue_depth': 'sendMessageQueueDepth', # (required parameter) 'rdma_read_queue_depth': 'rdmaReadQueueDepth', # (required parameter) 'send_message_size': 'sendMessageSize',", "in source and binary forms, with or without modification, are permitted (subject to", "parameter) 'device_version': 'deviceVersion', # (required parameter) 'subsystem_vendor_id': 'subsystemVendorId', # (required parameter) 'subsystem_id': 'subsystemId',", "operations to IOCs Bit 7 on = Atomic operations from IOCs :return: The", "to uniquely identify the I/O controller. :param ioc_guid: The ioc_guid of this IbIocProfile.", "the above copyright notice, this list of conditions and the following disclaimer in", "number identifying the subsystem where the I/O controller resides. :return: The subsystem_id of", "ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED", "\"\"\" self._device_version = device_version @property def subsystem_vendor_id(self): \"\"\" Gets the subsystem_vendor_id of this", "this IbIocProfile. A number identifying the subsystem where the I/O controller resides. :return:", "version. :param device_version: The device_version of this IbIocProfile. :type: int \"\"\" self._device_version =", "protocol of the controller. 0x0000 -0xFFFE is reserved for I/O protocols encompassed by", "this IbIocProfile. :type: int \"\"\" self._rdma_read_queue_depth = rdma_read_queue_depth @property def send_message_size(self): \"\"\" Gets", "@ioc_guid.setter def ioc_guid(self, ioc_guid): \"\"\" Sets the ioc_guid of this IbIocProfile. The EUI-64", "'int', # (required parameter) 'rdma_transfer_size': 'int', # (required parameter) 'controller_ops_capability_mask': 'int', # (required", "ioc_guid @property def vendor_id(self): \"\"\" Gets the vendor_id of this IbIocProfile. The I/O", "vendor to identify the device version. :return: The device_version of this IbIocProfile. :rtype:", "if self is None: return None return self.to_str() def __eq__(self, other): \"\"\" Returns", "the following disclaimer in the documentation and/or other materials provided with the distribution.", "\"\"\" Sets the send_message_queue_depth of this IbIocProfile. The maximum depth of the Send", "self._device_version = None self._subsystem_vendor_id = None self._subsystem_id = None self._io_class = None self._io_subclass", "of this IbIocProfile. The number of entries in the service entries table :return:", "controller_ops_capability_mask of this IbIocProfile. Supported operation types of this controller.: Bit 0 on", "send_message_queue_depth: The send_message_queue_depth of this IbIocProfile. :type: int \"\"\" self._send_message_queue_depth = send_message_queue_depth @property", "identifying the subsystem where the I/O controller resides. :param subsystem_id: The subsystem_id of", "type of I/O controller :return: The io_device_id of this IbIocProfile. :rtype: int :required/optional:", "def service_entries(self, service_entries): \"\"\" Sets the service_entries of this IbIocProfile. The number of", "the value is attribute type. :param dict attributeMap: The key is attribute name", "# (required parameter) 'protocol_version': 'protocolVersion', # (required parameter) 'send_message_queue_depth': 'sendMessageQueueDepth', # (required parameter)", "type of I/O controller :param io_device_id: The io_device_id of this IbIocProfile. :type: int", "the type of I/O controller :return: The io_device_id of this IbIocProfile. :rtype: int", "'controllerOpsCapabilityMask', # (required parameter) 'service_entries': 'serviceEntries', # (required parameter) 'id_string': 'idString' } self._ioc_guid", "this IbIocProfile. A number identifying the subsystem where the I/O controller resides. :param", "= None self._send_message_queue_depth = None self._rdma_read_queue_depth = None self._send_message_size = None self._rdma_transfer_size =", "of this IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._device_version @device_version.setter def device_version(self,", "of Send Messages in bytes. :return: The send_message_size of this IbIocProfile. :rtype: int", "by the InfiniBand architecture. 0xFFFF is vendor-specific. :return: The io_class of this IbIocProfile.", "IbIocProfile. The I/O controller vendor ID in IEEE format. :return: The vendor_id of", "import iteritems class IbIocProfile(object): \"\"\" NOTE: This class is auto generated by the", "written permission. NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE", "(required parameter) 'subsystem_id': 'int', # (required parameter) 'io_class': 'int', # (required parameter) 'io_subclass':", ":required/optional: required \"\"\" return self._send_message_queue_depth @send_message_queue_depth.setter def send_message_queue_depth(self, send_message_queue_depth): \"\"\" Sets the send_message_queue_depth", "this software without specific prior written permission. NO EXPRESS OR IMPLIED LICENSES TO", "TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;", "or else all zeros if there is no vendor ID. :param subsystem_vendor_id: The", "by the controller. :return: The rdma_transfer_size of this IbIocProfile. :rtype: int :required/optional: required", "'rdma_transfer_size': 'int', # (required parameter) 'controller_ops_capability_mask': 'int', # (required parameter) 'service_entries': 'int', #", "io_device_id of this IbIocProfile. :type: int \"\"\" self._io_device_id = io_device_id @property def device_version(self):", "if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, \"to_dict\") else", "in IEEE format, or else all zeros if there is no vendor ID.", "2 on = RDMA Read Requests to IOCs Bit 3 on = RDMA", "sub-classes encompassed by the InfiniBand architecture. 0xFFFF is vendor-specific. :param io_subclass: The io_subclass", "the device_version of this IbIocProfile. A number assigned by the vendor to identify", "size of outbound RDMA transfers initiated by the controller. :return: The rdma_transfer_size of", ":rtype: str :required/optional: required \"\"\" return self._id_string @id_string.setter def id_string(self, id_string): \"\"\" Sets", "id_string def to_dict(self): \"\"\" Returns the model properties as a dict \"\"\" result", "identify the device version. :return: The device_version of this IbIocProfile. :rtype: int :required/optional:", "sub-classes encompassed by the InfiniBand architecture. 0xFFFF is vendor-specific. :return: The io_subclass of", "encompassed by the InfiniBand architecture. 0xFFFF is vendor-specific. :param io_subclass: The io_subclass of", "The key is attribute name and the value is json key in definition.", "= service_entries @property def id_string(self): \"\"\" Gets the id_string of this IbIocProfile. A", "The rdma_transfer_size of this IbIocProfile. :type: int \"\"\" self._rdma_transfer_size = rdma_transfer_size @property def", "protocol_version of this IbIocProfile. The protocol version (protocol-specific). :return: The protocol_version of this", "self._subsystem_vendor_id = None self._subsystem_id = None self._io_class = None self._io_subclass = None self._protocol", "\"\"\" Sets the ioc_guid of this IbIocProfile. The EUI-64 GUID used to uniquely", "'device_version': 'int', # (required parameter) 'subsystem_vendor_id': 'str', # (required parameter) 'subsystem_id': 'int', #", "of this IbIocProfile. A number assigned by the vendor to identify the device", "device_version(self, device_version): \"\"\" Sets the device_version of this IbIocProfile. A number assigned by", "send_message_queue_depth): \"\"\" Sets the send_message_queue_depth of this IbIocProfile. The maximum depth of the", "representation of the model \"\"\" return pformat(self.to_dict()) def __repr__(self): \"\"\" For `print` and", "IbIocProfile - a model defined in Swagger :param dict swaggerTypes: The key is", "id_string of this IbIocProfile. A UTF-8 encoded string for identifying the controller to", "LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT", "of this IbIocProfile. :rtype: str :required/optional: required \"\"\" return self._vendor_id @vendor_id.setter def vendor_id(self,", "int :required/optional: required \"\"\" return self._io_class @io_class.setter def io_class(self, io_class): \"\"\" Sets the", "is json key in definition. \"\"\" self.swagger_types = { 'ioc_guid': 'str', # (required", "device_version(self): \"\"\" Gets the device_version of this IbIocProfile. A number assigned by the", "* Redistributions of source code must retain the above copyright notice, this list", "encoded string for identifying the controller to user. :param id_string: The id_string of", "x: x.to_dict() if hasattr(x, \"to_dict\") else x, value )) elif hasattr(value, \"to_dict\"): result[attr]", ":required/optional: required \"\"\" return self._io_subclass @io_subclass.setter def io_subclass(self, io_subclass): \"\"\" Sets the io_subclass", "int \"\"\" self._protocol = protocol @property def protocol_version(self): \"\"\" Gets the protocol_version of", "(required parameter) 'id_string': 'idString' } self._ioc_guid = None self._vendor_id = None self._io_device_id =", "Queue :param rdma_read_queue_depth: The rdma_read_queue_depth of this IbIocProfile. :type: int \"\"\" self._rdma_read_queue_depth =", "'ioDeviceId', # (required parameter) 'device_version': 'deviceVersion', # (required parameter) 'subsystem_vendor_id': 'subsystemVendorId', # (required", "id_string(self): \"\"\" Gets the id_string of this IbIocProfile. A UTF-8 encoded string for", "Send Messages in bytes. :param send_message_size: The send_message_size of this IbIocProfile. :type: int", "self._send_message_size = None self._rdma_transfer_size = None self._controller_ops_capability_mask = None self._service_entries = None self._id_string", "(required parameter) 'id_string': 'str' } self.attribute_map = { 'ioc_guid': 'iocGuid', # (required parameter)", "be used to endorse or promote products derived from this software without specific", "(protocol-specific). :param protocol_version: The protocol_version of this IbIocProfile. :type: int \"\"\" self._protocol_version =", "parameter) 'protocol': 'protocol', # (required parameter) 'protocol_version': 'protocolVersion', # (required parameter) 'send_message_queue_depth': 'sendMessageQueueDepth',", "return self._rdma_read_queue_depth @rdma_read_queue_depth.setter def rdma_read_queue_depth(self, rdma_read_queue_depth): \"\"\" Sets the rdma_read_queue_depth of this IbIocProfile.", ":type: int \"\"\" self._io_class = io_class @property def io_subclass(self): \"\"\" Gets the io_subclass", "None self._send_message_queue_depth = None self._rdma_read_queue_depth = None self._send_message_size = None self._rdma_transfer_size = None", "this IbIocProfile. A number assigned by vendor to identify the type of I/O", "send_message_queue_depth @property def rdma_read_queue_depth(self): \"\"\" Gets the rdma_read_queue_depth of this IbIocProfile. The maximum", "of this IbIocProfile. :rtype: str :required/optional: required \"\"\" return self._id_string @id_string.setter def id_string(self,", "INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT", ":type: int \"\"\" self._rdma_transfer_size = rdma_transfer_size @property def controller_ops_capability_mask(self): \"\"\" Gets the controller_ops_capability_mask", "= getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if", "@property def io_device_id(self): \"\"\" Gets the io_device_id of this IbIocProfile. A number assigned", "rdma_transfer_size: The rdma_transfer_size of this IbIocProfile. :type: int \"\"\" self._rdma_transfer_size = rdma_transfer_size @property", "\"\"\" self._io_subclass = io_subclass @property def protocol(self): \"\"\" Gets the protocol of this", "controller. :param rdma_transfer_size: The rdma_transfer_size of this IbIocProfile. :type: int \"\"\" self._rdma_transfer_size =", "5 on = RDMA Write Requests from IOCs Bit 6 on = Atomic", "Send Messages in bytes. :return: The send_message_size of this IbIocProfile. :rtype: int :required/optional:", "} self._ioc_guid = None self._vendor_id = None self._io_device_id = None self._device_version = None", "the subsystem where the I/O controller resides. :return: The subsystem_id of this IbIocProfile.", "IEEE format. :return: The vendor_id of this IbIocProfile. :rtype: str :required/optional: required \"\"\"", "NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR", "rdma_transfer_size of this IbIocProfile. The maximum size of outbound RDMA transfers initiated by", "must retain the above copyright notice, this list of conditions and the following", "this IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._send_message_size @send_message_size.setter def send_message_size(self, send_message_size):", "io_device_id @property def device_version(self): \"\"\" Gets the device_version of this IbIocProfile. A number", "Do not edit the class manually. \"\"\" def __init__(self): \"\"\" IbIocProfile - a", "ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS LICENSE. THIS SOFTWARE IS PROVIDED", "protocol of this IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._protocol @protocol.setter def", "a model defined in Swagger :param dict swaggerTypes: The key is attribute name", "OF SUCH DAMAGE. \"\"\" from pprint import pformat from six import iteritems class", "self is None or other is None: return None return self.__dict__ == other.__dict__", "Gets the io_class of this IbIocProfile. The I/O class of the controller. 0x0000", "controller_ops_capability_mask: The controller_ops_capability_mask of this IbIocProfile. :type: int \"\"\" self._controller_ops_capability_mask = controller_ops_capability_mask @property", "None self._controller_ops_capability_mask = None self._service_entries = None self._id_string = None @property def ioc_guid(self):", "The EUI-64 GUID used to uniquely identify the I/O controller. :return: The ioc_guid", "parameter) 'rdma_read_queue_depth': 'rdmaReadQueueDepth', # (required parameter) 'send_message_size': 'sendMessageSize', # (required parameter) 'rdma_transfer_size': 'rdmaTransferSize',", "IbIocProfile. A UTF-8 encoded string for identifying the controller to user. :return: The", "\"\"\" return pformat(self.to_dict()) def __repr__(self): \"\"\" For `print` and `pprint` \"\"\" if self", "self._io_device_id @io_device_id.setter def io_device_id(self, io_device_id): \"\"\" Sets the io_device_id of this IbIocProfile. A", "outbound RDMA transfers initiated by the controller. :param rdma_transfer_size: The rdma_transfer_size of this", "\"\"\" return self._device_version @device_version.setter def device_version(self, device_version): \"\"\" Sets the device_version of this", "# (required parameter) 'device_version': 'int', # (required parameter) 'subsystem_vendor_id': 'str', # (required parameter)", "of this IbIocProfile. The protocol version (protocol-specific). :return: The protocol_version of this IbIocProfile.", "def send_message_size(self): \"\"\" Gets the send_message_size of this IbIocProfile. The maximum size of", "The I/O sub-class of the controller. 0x0000 -0xFFFE is reserved for I/O sub-classes", "this list of conditions and the following disclaimer. * Redistributions in binary form", "without modification, are permitted (subject to the limitations in the disclaimer below) provided", "protocol_version: The protocol_version of this IbIocProfile. :type: int \"\"\" self._protocol_version = protocol_version @property", "model defined in Swagger :param dict swaggerTypes: The key is attribute name and", ":param device_version: The device_version of this IbIocProfile. :type: int \"\"\" self._device_version = device_version", "The protocol version (protocol-specific). :param protocol_version: The protocol_version of this IbIocProfile. :type: int", "def controller_ops_capability_mask(self, controller_ops_capability_mask): \"\"\" Sets the controller_ops_capability_mask of this IbIocProfile. Supported operation types", "of this IbIocProfile. The maximum size of Send Messages in bytes. :param send_message_size:", "\"\"\" self._rdma_transfer_size = rdma_transfer_size @property def controller_ops_capability_mask(self): \"\"\" Gets the controller_ops_capability_mask of this", "self._id_string = id_string def to_dict(self): \"\"\" Returns the model properties as a dict", "name of NetApp, Inc. nor the names of its contributors may be used", "Returns the model properties as a dict \"\"\" result = {} for attr,", "io_class): \"\"\" Sets the io_class of this IbIocProfile. The I/O class of the", "coding: utf-8 \"\"\" IbIocProfile.py The Clear BSD License Copyright (c) – 2016, NetApp,", "else x, value )) elif hasattr(value, \"to_dict\"): result[attr] = value.to_dict() elif isinstance(value, dict):", "'controller_ops_capability_mask': 'int', # (required parameter) 'service_entries': 'int', # (required parameter) 'id_string': 'str' }", "controller_ops_capability_mask(self, controller_ops_capability_mask): \"\"\" Sets the controller_ops_capability_mask of this IbIocProfile. Supported operation types of", "(required parameter) 'send_message_size': 'int', # (required parameter) 'rdma_transfer_size': 'int', # (required parameter) 'controller_ops_capability_mask':", "rdma_transfer_size of this IbIocProfile. :type: int \"\"\" self._rdma_transfer_size = rdma_transfer_size @property def controller_ops_capability_mask(self):", "parameter) 'controller_ops_capability_mask': 'controllerOpsCapabilityMask', # (required parameter) 'service_entries': 'serviceEntries', # (required parameter) 'id_string': 'idString'", "of source code must retain the above copyright notice, this list of conditions", "ID in IEEE format. :return: The vendor_id of this IbIocProfile. :rtype: str :required/optional:", "self._rdma_read_queue_depth = rdma_read_queue_depth @property def send_message_size(self): \"\"\" Gets the send_message_size of this IbIocProfile.", "int :required/optional: required \"\"\" return self._service_entries @service_entries.setter def service_entries(self, service_entries): \"\"\" Sets the", "self._id_string = None @property def ioc_guid(self): \"\"\" Gets the ioc_guid of this IbIocProfile.", "\"\"\" NOTE: This class is auto generated by the swagger code generator program.", "A number assigned by vendor to identify the type of I/O controller :param", ":type: str \"\"\" self._id_string = id_string def to_dict(self): \"\"\" Returns the model properties", "uniquely identify the I/O controller. :return: The ioc_guid of this IbIocProfile. :rtype: str", "program. Do not edit the class manually. \"\"\" def __init__(self): \"\"\" IbIocProfile -", "subsystem_vendor_id of this IbIocProfile. :rtype: str :required/optional: required \"\"\" return self._subsystem_vendor_id @subsystem_vendor_id.setter def", "TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE", "enclosure vendor in IEEE format, or else all zeros if there is no", "= None self._protocol_version = None self._send_message_queue_depth = None self._rdma_read_queue_depth = None self._send_message_size =", "Sets the subsystem_id of this IbIocProfile. A number identifying the subsystem where the", "IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._protocol @protocol.setter def protocol(self, protocol): \"\"\"", "on = RDMA Write Requests to IOCs Bit 5 on = RDMA Write", "Bit 6 on = Atomic operations to IOCs Bit 7 on = Atomic", "WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS", "send_message_size of this IbIocProfile. :type: int \"\"\" self._send_message_size = send_message_size @property def rdma_transfer_size(self):", "The subsystem_vendor_id of this IbIocProfile. :type: str \"\"\" self._subsystem_vendor_id = subsystem_vendor_id @property def", "# (required parameter) 'protocol': 'protocol', # (required parameter) 'protocol_version': 'protocolVersion', # (required parameter)", "def ioc_guid(self, ioc_guid): \"\"\" Sets the ioc_guid of this IbIocProfile. The EUI-64 GUID", "parameter) 'vendor_id': 'str', # (required parameter) 'io_device_id': 'int', # (required parameter) 'device_version': 'int',", "'str', # (required parameter) 'subsystem_id': 'int', # (required parameter) 'io_class': 'int', # (required", ":rtype: str :required/optional: required \"\"\" return self._ioc_guid @ioc_guid.setter def ioc_guid(self, ioc_guid): \"\"\" Sets", "this IbIocProfile. The I/O protocol of the controller. 0x0000 -0xFFFE is reserved for", "generated by the swagger code generator program. Do not edit the class manually.", "Gets the service_entries of this IbIocProfile. The number of entries in the service", "copyright notice, this list of conditions and the following disclaimer. * Redistributions in", "ID in IEEE format. :param vendor_id: The vendor_id of this IbIocProfile. :type: str", "Sets the send_message_queue_depth of this IbIocProfile. The maximum depth of the Send Message", "of this IbIocProfile. :type: int \"\"\" self._service_entries = service_entries @property def id_string(self): \"\"\"", "\"\"\" self._id_string = id_string def to_dict(self): \"\"\" Returns the model properties as a", "to user. :param id_string: The id_string of this IbIocProfile. :type: str \"\"\" self._id_string", "class manually. \"\"\" def __init__(self): \"\"\" IbIocProfile - a model defined in Swagger", "entries table :return: The service_entries of this IbIocProfile. :rtype: int :required/optional: required \"\"\"", "rdma_read_queue_depth: The rdma_read_queue_depth of this IbIocProfile. :type: int \"\"\" self._rdma_read_queue_depth = rdma_read_queue_depth @property", "self._service_entries = None self._id_string = None @property def ioc_guid(self): \"\"\" Gets the ioc_guid", "protocol_version of this IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._protocol_version @protocol_version.setter def", "The protocol of this IbIocProfile. :type: int \"\"\" self._protocol = protocol @property def", "Gets the subsystem_vendor_id of this IbIocProfile. The ID of the enclosure vendor in", "\"\"\" Sets the io_device_id of this IbIocProfile. A number assigned by vendor to", "Sets the protocol_version of this IbIocProfile. The protocol version (protocol-specific). :param protocol_version: The", "Sets the rdma_read_queue_depth of this IbIocProfile. The maximum depth of the per-channel RDMA", "IbIocProfile. :type: str \"\"\" self._ioc_guid = ioc_guid @property def vendor_id(self): \"\"\" Gets the", "= None self._subsystem_id = None self._io_class = None self._io_subclass = None self._protocol =", "0xFFFF is vendor-specific. :return: The io_class of this IbIocProfile. :rtype: int :required/optional: required", "Sets the ioc_guid of this IbIocProfile. The EUI-64 GUID used to uniquely identify", "operations from IOCs :return: The controller_ops_capability_mask of this IbIocProfile. :rtype: int :required/optional: required", "vendor ID. :param subsystem_vendor_id: The subsystem_vendor_id of this IbIocProfile. :type: str \"\"\" self._subsystem_vendor_id", "_ in iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map(", "INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR", "of this IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._io_subclass @io_subclass.setter def io_subclass(self,", "\"\"\" Sets the service_entries of this IbIocProfile. The number of entries in the", "@service_entries.setter def service_entries(self, service_entries): \"\"\" Sets the service_entries of this IbIocProfile. The number", "is reserved for I/O classes encompassed by the InfiniBand architecture. 0xFFFF is vendor-specific.", "The maximum size of Send Messages in bytes. :return: The send_message_size of this", "the I/O controller. :return: The ioc_guid of this IbIocProfile. :rtype: str :required/optional: required", "identify the device version. :param device_version: The device_version of this IbIocProfile. :type: int", "of this IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._protocol @protocol.setter def protocol(self,", "= {} for attr, _ in iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value,", "io_device_id): \"\"\" Sets the io_device_id of this IbIocProfile. A number assigned by vendor", "id_string): \"\"\" Sets the id_string of this IbIocProfile. A UTF-8 encoded string for", "format. :param vendor_id: The vendor_id of this IbIocProfile. :type: str \"\"\" self._vendor_id =", "\"\"\" Gets the service_entries of this IbIocProfile. The number of entries in the", "\"\"\" Sets the protocol of this IbIocProfile. The I/O protocol of the controller.", "the per-channel RDMA Read Queue :param rdma_read_queue_depth: The rdma_read_queue_depth of this IbIocProfile. :type:", ":required/optional: required \"\"\" return self._subsystem_vendor_id @subsystem_vendor_id.setter def subsystem_vendor_id(self, subsystem_vendor_id): \"\"\" Sets the subsystem_vendor_id", "this IbIocProfile. The I/O class of the controller. 0x0000 -0xFFFE is reserved for", "CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES", ":type: int \"\"\" self._subsystem_id = subsystem_id @property def io_class(self): \"\"\" Gets the io_class", "\"\"\" Gets the send_message_size of this IbIocProfile. The maximum size of Send Messages", "this list of conditions and the following disclaimer in the documentation and/or other", "vendor-specific. :param io_class: The io_class of this IbIocProfile. :type: int \"\"\" self._io_class =", "The protocol of this IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._protocol @protocol.setter", "vendor-specific. :param protocol: The protocol of this IbIocProfile. :type: int \"\"\" self._protocol =", "@property def id_string(self): \"\"\" Gets the id_string of this IbIocProfile. A UTF-8 encoded", "this IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._io_subclass @io_subclass.setter def io_subclass(self, io_subclass):", "\"\"\" IbIocProfile - a model defined in Swagger :param dict swaggerTypes: The key", "@io_class.setter def io_class(self, io_class): \"\"\" Sets the io_class of this IbIocProfile. The I/O", "= protocol @property def protocol_version(self): \"\"\" Gets the protocol_version of this IbIocProfile. The", "entries in the service entries table :return: The service_entries of this IbIocProfile. :rtype:", "I/O controller. :return: The ioc_guid of this IbIocProfile. :rtype: str :required/optional: required \"\"\"", "'int', # (required parameter) 'protocol_version': 'int', # (required parameter) 'send_message_queue_depth': 'int', # (required", "'io_subclass': 'int', # (required parameter) 'protocol': 'int', # (required parameter) 'protocol_version': 'int', #", "rdma_read_queue_depth): \"\"\" Sets the rdma_read_queue_depth of this IbIocProfile. The maximum depth of the", "self._io_class = io_class @property def io_subclass(self): \"\"\" Gets the io_subclass of this IbIocProfile.", "maximum size of Send Messages in bytes. :return: The send_message_size of this IbIocProfile.", "disclaimer below) provided that the following conditions are met: * Redistributions of source", "binary form must reproduce the above copyright notice, this list of conditions and", "required \"\"\" return self._io_class @io_class.setter def io_class(self, io_class): \"\"\" Sets the io_class of", "int \"\"\" self._controller_ops_capability_mask = controller_ops_capability_mask @property def service_entries(self): \"\"\" Gets the service_entries of", ":type: int \"\"\" self._controller_ops_capability_mask = controller_ops_capability_mask @property def service_entries(self): \"\"\" Gets the service_entries", "subsystem_id @property def io_class(self): \"\"\" Gets the io_class of this IbIocProfile. The I/O", "return self._io_device_id @io_device_id.setter def io_device_id(self, io_device_id): \"\"\" Sets the io_device_id of this IbIocProfile.", "DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF", "by the InfiniBand architecture. 0xFFFF is vendor-specific. :param io_subclass: The io_subclass of this", "self._io_device_id = io_device_id @property def device_version(self): \"\"\" Gets the device_version of this IbIocProfile.", "'ioc_guid': 'iocGuid', # (required parameter) 'vendor_id': 'vendorId', # (required parameter) 'io_device_id': 'ioDeviceId', #", "parameter) 'vendor_id': 'vendorId', # (required parameter) 'io_device_id': 'ioDeviceId', # (required parameter) 'device_version': 'deviceVersion',", ":rtype: int :required/optional: required \"\"\" return self._service_entries @service_entries.setter def service_entries(self, service_entries): \"\"\" Sets", "protocol_version): \"\"\" Sets the protocol_version of this IbIocProfile. The protocol version (protocol-specific). :param", "ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. \"\"\" from pprint import pformat from", "conditions and the following disclaimer. * Redistributions in binary form must reproduce the", "= ioc_guid @property def vendor_id(self): \"\"\" Gets the vendor_id of this IbIocProfile. The", "The ioc_guid of this IbIocProfile. :type: str \"\"\" self._ioc_guid = ioc_guid @property def", "The io_subclass of this IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._io_subclass @io_subclass.setter", "Read Queue :return: The rdma_read_queue_depth of this IbIocProfile. :rtype: int :required/optional: required \"\"\"", "ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF", "= subsystem_id @property def io_class(self): \"\"\" Gets the io_class of this IbIocProfile. The", "\"\"\" Sets the io_class of this IbIocProfile. The I/O class of the controller.", "self._subsystem_vendor_id = subsystem_vendor_id @property def subsystem_id(self): \"\"\" Gets the subsystem_id of this IbIocProfile.", "other materials provided with the distribution. * Neither the name of NetApp, Inc.", "\"\"\" Sets the io_subclass of this IbIocProfile. The I/O sub-class of the controller.", ":return: The controller_ops_capability_mask of this IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._controller_ops_capability_mask", "this IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._protocol @protocol.setter def protocol(self, protocol):", "to IOCs Bit 7 on = Atomic operations from IOCs :return: The controller_ops_capability_mask", "* Neither the name of NetApp, Inc. nor the names of its contributors", "value is json key in definition. \"\"\" self.swagger_types = { 'ioc_guid': 'str', #", "vendor_id of this IbIocProfile. The I/O controller vendor ID in IEEE format. :param", "model \"\"\" return pformat(self.to_dict()) def __repr__(self): \"\"\" For `print` and `pprint` \"\"\" if", "send_message_queue_depth(self): \"\"\" Gets the send_message_queue_depth of this IbIocProfile. The maximum depth of the", "LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF", "number of entries in the service entries table :param service_entries: The service_entries of", ":required/optional: required \"\"\" return self._io_device_id @io_device_id.setter def io_device_id(self, io_device_id): \"\"\" Sets the io_device_id", "in the disclaimer below) provided that the following conditions are met: * Redistributions", "of this IbIocProfile. The maximum depth of the per-channel RDMA Read Queue :return:", "in binary form must reproduce the above copyright notice, this list of conditions", "(item[0], item[1].to_dict()) if hasattr(item[1], \"to_dict\") else item, value.items() )) else: result[attr] = value", "# (required parameter) 'io_class': 'int', # (required parameter) 'io_subclass': 'int', # (required parameter)", "assigned by the vendor to identify the device version. :return: The device_version of", "isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, \"to_dict\") else x,", "IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._rdma_read_queue_depth @rdma_read_queue_depth.setter def rdma_read_queue_depth(self, rdma_read_queue_depth): \"\"\"", "\"\"\" if self is None: return None return self.to_str() def __eq__(self, other): \"\"\"", "IOCs Bit 5 on = RDMA Write Requests from IOCs Bit 6 on", "0xFFFF is vendor-specific. :param io_subclass: The io_subclass of this IbIocProfile. :type: int \"\"\"", ":return: The io_subclass of this IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._io_subclass", "source code must retain the above copyright notice, this list of conditions and", "Read Requests to IOCs Bit 3 on = RDMA Read Requests from IOCs", "= Send Messages from IOCs Bit 2 on = RDMA Read Requests to", "this IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._io_class @io_class.setter def io_class(self, io_class):", "import pformat from six import iteritems class IbIocProfile(object): \"\"\" NOTE: This class is", "Atomic operations from IOCs :param controller_ops_capability_mask: The controller_ops_capability_mask of this IbIocProfile. :type: int", "A UTF-8 encoded string for identifying the controller to user. :return: The id_string", "'protocol', # (required parameter) 'protocol_version': 'protocolVersion', # (required parameter) 'send_message_queue_depth': 'sendMessageQueueDepth', # (required", ":param dict swaggerTypes: The key is attribute name and the value is attribute", "the enclosure vendor in IEEE format, or else all zeros if there is", ":rtype: int :required/optional: required \"\"\" return self._io_device_id @io_device_id.setter def io_device_id(self, io_device_id): \"\"\" Sets", "send_message_size of this IbIocProfile. The maximum size of Send Messages in bytes. :return:", "'vendorId', # (required parameter) 'io_device_id': 'ioDeviceId', # (required parameter) 'device_version': 'deviceVersion', # (required", "identifying the subsystem where the I/O controller resides. :return: The subsystem_id of this", "SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. \"\"\" from pprint", "self._ioc_guid = ioc_guid @property def vendor_id(self): \"\"\" Gets the vendor_id of this IbIocProfile.", "ioc_guid): \"\"\" Sets the ioc_guid of this IbIocProfile. The EUI-64 GUID used to", "device_version of this IbIocProfile. :type: int \"\"\" self._device_version = device_version @property def subsystem_vendor_id(self):", "the following disclaimer. * Redistributions in binary form must reproduce the above copyright", "(subject to the limitations in the disclaimer below) provided that the following conditions", "depth of the Send Message Queue. :return: The send_message_queue_depth of this IbIocProfile. :rtype:", "# (required parameter) 'rdma_transfer_size': 'rdmaTransferSize', # (required parameter) 'controller_ops_capability_mask': 'controllerOpsCapabilityMask', # (required parameter)", "format, or else all zeros if there is no vendor ID. :return: The", "protocol: The protocol of this IbIocProfile. :type: int \"\"\" self._protocol = protocol @property", "\"\"\" Returns true if both objects are equal \"\"\" if self is None", "attribute name and the value is json key in definition. \"\"\" self.swagger_types =", "CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY", "subsystem where the I/O controller resides. :param subsystem_id: The subsystem_id of this IbIocProfile.", "None self._send_message_size = None self._rdma_transfer_size = None self._controller_ops_capability_mask = None self._service_entries = None", "the type of I/O controller :param io_device_id: The io_device_id of this IbIocProfile. :type:", "send_message_size of this IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._send_message_size @send_message_size.setter def", "code generator program. Do not edit the class manually. \"\"\" def __init__(self): \"\"\"", "\"\"\" Gets the protocol_version of this IbIocProfile. The protocol version (protocol-specific). :return: The", "IEEE format, or else all zeros if there is no vendor ID. :param", "this IbIocProfile. :type: int \"\"\" self._protocol_version = protocol_version @property def send_message_queue_depth(self): \"\"\" Gets", "'subsystem_vendor_id': 'str', # (required parameter) 'subsystem_id': 'int', # (required parameter) 'io_class': 'int', #", "int :required/optional: required \"\"\" return self._rdma_transfer_size @rdma_transfer_size.setter def rdma_transfer_size(self, rdma_transfer_size): \"\"\" Sets the", "subsystem_id of this IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._subsystem_id @subsystem_id.setter def", "above copyright notice, this list of conditions and the following disclaimer in the", "zeros if there is no vendor ID. :param subsystem_vendor_id: The subsystem_vendor_id of this", "io_subclass of this IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._io_subclass @io_subclass.setter def", "# (required parameter) 'send_message_queue_depth': 'int', # (required parameter) 'rdma_read_queue_depth': 'int', # (required parameter)", "The EUI-64 GUID used to uniquely identify the I/O controller. :param ioc_guid: The", "# (required parameter) 'rdma_read_queue_depth': 'rdmaReadQueueDepth', # (required parameter) 'send_message_size': 'sendMessageSize', # (required parameter)", "\"\"\" return self._send_message_queue_depth @send_message_queue_depth.setter def send_message_queue_depth(self, send_message_queue_depth): \"\"\" Sets the send_message_queue_depth of this", "result[attr] = value return result def to_str(self): \"\"\" Returns the string representation of", "return self._device_version @device_version.setter def device_version(self, device_version): \"\"\" Sets the device_version of this IbIocProfile.", "int \"\"\" self._service_entries = service_entries @property def id_string(self): \"\"\" Gets the id_string of", "IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\" AND ANY EXPRESS", "this IbIocProfile. The protocol version (protocol-specific). :param protocol_version: The protocol_version of this IbIocProfile.", "result def to_str(self): \"\"\" Returns the string representation of the model \"\"\" return", "THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR", "the names of its contributors may be used to endorse or promote products", "`pprint` \"\"\" if self is None: return None return self.to_str() def __eq__(self, other):", "OF THE POSSIBILITY OF SUCH DAMAGE. \"\"\" from pprint import pformat from six", "edit the class manually. \"\"\" def __init__(self): \"\"\" IbIocProfile - a model defined", "of this IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._subsystem_id @subsystem_id.setter def subsystem_id(self,", "parameter) 'io_device_id': 'int', # (required parameter) 'device_version': 'int', # (required parameter) 'subsystem_vendor_id': 'str',", "def vendor_id(self, vendor_id): \"\"\" Sets the vendor_id of this IbIocProfile. The I/O controller", "The rdma_read_queue_depth of this IbIocProfile. :type: int \"\"\" self._rdma_read_queue_depth = rdma_read_queue_depth @property def", "this IbIocProfile. The maximum size of Send Messages in bytes. :param send_message_size: The", "DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS", "controller.: Bit 0 on = Send Messages to IOCs Bit 1 on =", "IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._send_message_size @send_message_size.setter def send_message_size(self, send_message_size): \"\"\"", "WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO", "ARE GRANTED BY THIS LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS", "@controller_ops_capability_mask.setter def controller_ops_capability_mask(self, controller_ops_capability_mask): \"\"\" Sets the controller_ops_capability_mask of this IbIocProfile. Supported operation", "iteritems class IbIocProfile(object): \"\"\" NOTE: This class is auto generated by the swagger", "in iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda", "SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)", "of I/O controller :param io_device_id: The io_device_id of this IbIocProfile. :type: int \"\"\"", "following conditions are met: * Redistributions of source code must retain the above", "the InfiniBand architecture. 0xFFFF is vendor-specific. :param io_subclass: The io_subclass of this IbIocProfile.", ":type: str \"\"\" self._vendor_id = vendor_id @property def io_device_id(self): \"\"\" Gets the io_device_id", "vendor ID in IEEE format. :return: The vendor_id of this IbIocProfile. :rtype: str", "The maximum depth of the Send Message Queue. :param send_message_queue_depth: The send_message_queue_depth of", "(required parameter) 'protocol': 'int', # (required parameter) 'protocol_version': 'int', # (required parameter) 'send_message_queue_depth':", "'protocol_version': 'protocolVersion', # (required parameter) 'send_message_queue_depth': 'sendMessageQueueDepth', # (required parameter) 'rdma_read_queue_depth': 'rdmaReadQueueDepth', #", "the following conditions are met: * Redistributions of source code must retain the", "int :required/optional: required \"\"\" return self._protocol @protocol.setter def protocol(self, protocol): \"\"\" Sets the", "this IbIocProfile. The maximum size of Send Messages in bytes. :return: The send_message_size", "'subsystem_id': 'subsystemId', # (required parameter) 'io_class': 'ioClass', # (required parameter) 'io_subclass': 'ioSubclass', #", "Queue :return: The rdma_read_queue_depth of this IbIocProfile. :rtype: int :required/optional: required \"\"\" return", "IbIocProfile. The maximum size of outbound RDMA transfers initiated by the controller. :return:", "= send_message_size @property def rdma_transfer_size(self): \"\"\" Gets the rdma_transfer_size of this IbIocProfile. The", "# (required parameter) 'io_device_id': 'ioDeviceId', # (required parameter) 'device_version': 'deviceVersion', # (required parameter)", "IbIocProfile. A number identifying the subsystem where the I/O controller resides. :param subsystem_id:", "\"\"\" Sets the rdma_transfer_size of this IbIocProfile. The maximum size of outbound RDMA", "this IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._controller_ops_capability_mask @controller_ops_capability_mask.setter def controller_ops_capability_mask(self, controller_ops_capability_mask):", "Bit 7 on = Atomic operations from IOCs :return: The controller_ops_capability_mask of this", "the I/O controller resides. :return: The subsystem_id of this IbIocProfile. :rtype: int :required/optional:", "of this IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._rdma_transfer_size @rdma_transfer_size.setter def rdma_transfer_size(self,", "int \"\"\" self._io_class = io_class @property def io_subclass(self): \"\"\" Gets the io_subclass of", "rdma_read_queue_depth of this IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._rdma_read_queue_depth @rdma_read_queue_depth.setter def", "IbIocProfile. :type: int \"\"\" self._rdma_read_queue_depth = rdma_read_queue_depth @property def send_message_size(self): \"\"\" Gets the", "self._id_string @id_string.setter def id_string(self, id_string): \"\"\" Sets the id_string of this IbIocProfile. A", "Sets the id_string of this IbIocProfile. A UTF-8 encoded string for identifying the", "'int', # (required parameter) 'send_message_queue_depth': 'int', # (required parameter) 'rdma_read_queue_depth': 'int', # (required", "IbIocProfile. :type: str \"\"\" self._vendor_id = vendor_id @property def io_device_id(self): \"\"\" Gets the", "self._ioc_guid = None self._vendor_id = None self._io_device_id = None self._device_version = None self._subsystem_vendor_id", "= None self._subsystem_vendor_id = None self._subsystem_id = None self._io_class = None self._io_subclass =", "protocol_version(self): \"\"\" Gets the protocol_version of this IbIocProfile. The protocol version (protocol-specific). :return:", "(required parameter) 'send_message_queue_depth': 'sendMessageQueueDepth', # (required parameter) 'rdma_read_queue_depth': 'rdmaReadQueueDepth', # (required parameter) 'send_message_size':", "Write Requests to IOCs Bit 5 on = RDMA Write Requests from IOCs", "this IbIocProfile. :type: str \"\"\" self._id_string = id_string def to_dict(self): \"\"\" Returns the", "OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN", "Sets the controller_ops_capability_mask of this IbIocProfile. Supported operation types of this controller.: Bit", "this IbIocProfile. The EUI-64 GUID used to uniquely identify the I/O controller. :param", "Gets the send_message_size of this IbIocProfile. The maximum size of Send Messages in", "name and the value is json key in definition. \"\"\" self.swagger_types = {", ":return: The service_entries of this IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._service_entries", "rdma_transfer_size(self, rdma_transfer_size): \"\"\" Sets the rdma_transfer_size of this IbIocProfile. The maximum size of", "subsystem_vendor_id(self, subsystem_vendor_id): \"\"\" Sets the subsystem_vendor_id of this IbIocProfile. The ID of the", "result = {} for attr, _ in iteritems(self.swagger_types): value = getattr(self, attr) if", "following disclaimer. * Redistributions in binary form must reproduce the above copyright notice,", ":required/optional: required \"\"\" return self._device_version @device_version.setter def device_version(self, device_version): \"\"\" Sets the device_version", "of this IbIocProfile. The maximum depth of the Send Message Queue. :return: The", "\"\"\" Gets the io_class of this IbIocProfile. The I/O class of the controller.", "= vendor_id @property def io_device_id(self): \"\"\" Gets the io_device_id of this IbIocProfile. A", "limitations in the disclaimer below) provided that the following conditions are met: *", "IbIocProfile. The ID of the enclosure vendor in IEEE format, or else all", "controller vendor ID in IEEE format. :param vendor_id: The vendor_id of this IbIocProfile.", ":rtype: int :required/optional: required \"\"\" return self._device_version @device_version.setter def device_version(self, device_version): \"\"\" Sets", "no vendor ID. :return: The subsystem_vendor_id of this IbIocProfile. :rtype: str :required/optional: required", "the Send Message Queue. :param send_message_queue_depth: The send_message_queue_depth of this IbIocProfile. :type: int", ":rtype: int :required/optional: required \"\"\" return self._rdma_read_queue_depth @rdma_read_queue_depth.setter def rdma_read_queue_depth(self, rdma_read_queue_depth): \"\"\" Sets", "7 on = Atomic operations from IOCs :return: The controller_ops_capability_mask of this IbIocProfile.", "the subsystem_id of this IbIocProfile. A number identifying the subsystem where the I/O", "SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\" AND ANY", "this IbIocProfile. :type: int \"\"\" self._io_device_id = io_device_id @property def device_version(self): \"\"\" Gets", "key is attribute name and the value is json key in definition. \"\"\"", "this IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._send_message_queue_depth @send_message_queue_depth.setter def send_message_queue_depth(self, send_message_queue_depth):", "Bit 3 on = RDMA Read Requests from IOCs Bit 4 on =", "this IbIocProfile. :type: int \"\"\" self._subsystem_id = subsystem_id @property def io_class(self): \"\"\" Gets", "required \"\"\" return self._subsystem_id @subsystem_id.setter def subsystem_id(self, subsystem_id): \"\"\" Sets the subsystem_id of", "architecture. 0xFFFF is vendor-specific. :return: The protocol of this IbIocProfile. :rtype: int :required/optional:", "for I/O classes encompassed by the InfiniBand architecture. 0xFFFF is vendor-specific. :param io_class:", "= None self._io_subclass = None self._protocol = None self._protocol_version = None self._send_message_queue_depth =", "ID. :param subsystem_vendor_id: The subsystem_vendor_id of this IbIocProfile. :type: str \"\"\" self._subsystem_vendor_id =", "maximum depth of the Send Message Queue. :param send_message_queue_depth: The send_message_queue_depth of this", "None self._rdma_transfer_size = None self._controller_ops_capability_mask = None self._service_entries = None self._id_string = None", "else all zeros if there is no vendor ID. :return: The subsystem_vendor_id of", "this IbIocProfile. The I/O controller vendor ID in IEEE format. :param vendor_id: The", "int \"\"\" self._protocol_version = protocol_version @property def send_message_queue_depth(self): \"\"\" Gets the send_message_queue_depth of", "@device_version.setter def device_version(self, device_version): \"\"\" Sets the device_version of this IbIocProfile. A number", "@property def service_entries(self): \"\"\" Gets the service_entries of this IbIocProfile. The number of", "no vendor ID. :param subsystem_vendor_id: The subsystem_vendor_id of this IbIocProfile. :type: str \"\"\"", "else item, value.items() )) else: result[attr] = value return result def to_str(self): \"\"\"", "the documentation and/or other materials provided with the distribution. * Neither the name", "SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,", "Read Requests from IOCs Bit 4 on = RDMA Write Requests to IOCs", "= None @property def ioc_guid(self): \"\"\" Gets the ioc_guid of this IbIocProfile. The", "(required parameter) 'io_class': 'int', # (required parameter) 'io_subclass': 'int', # (required parameter) 'protocol':", ":rtype: str :required/optional: required \"\"\" return self._vendor_id @vendor_id.setter def vendor_id(self, vendor_id): \"\"\" Sets", "is no vendor ID. :return: The subsystem_vendor_id of this IbIocProfile. :rtype: str :required/optional:", "this IbIocProfile. A UTF-8 encoded string for identifying the controller to user. :param", "= None self._io_device_id = None self._device_version = None self._subsystem_vendor_id = None self._subsystem_id =", "is None or other is None: return None return self.__dict__ == other.__dict__ def", "Atomic operations to IOCs Bit 7 on = Atomic operations from IOCs :return:", "Bit 5 on = RDMA Write Requests from IOCs Bit 6 on =", "parameter) 'rdma_transfer_size': 'int', # (required parameter) 'controller_ops_capability_mask': 'int', # (required parameter) 'service_entries': 'int',", "dict attributeMap: The key is attribute name and the value is json key", "must reproduce the above copyright notice, this list of conditions and the following", "THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES,", "parameter) 'io_subclass': 'int', # (required parameter) 'protocol': 'int', # (required parameter) 'protocol_version': 'int',", "controller vendor ID in IEEE format. :return: The vendor_id of this IbIocProfile. :rtype:", "self._protocol = protocol @property def protocol_version(self): \"\"\" Gets the protocol_version of this IbIocProfile.", "IbIocProfile. A number identifying the subsystem where the I/O controller resides. :return: The", "binary forms, with or without modification, are permitted (subject to the limitations in", "service_entries): \"\"\" Sets the service_entries of this IbIocProfile. The number of entries in", "of this IbIocProfile. :type: int \"\"\" self._io_subclass = io_subclass @property def protocol(self): \"\"\"", "protocol_version(self, protocol_version): \"\"\" Sets the protocol_version of this IbIocProfile. The protocol version (protocol-specific).", "A number assigned by vendor to identify the type of I/O controller :return:", "I/O controller resides. :param subsystem_id: The subsystem_id of this IbIocProfile. :type: int \"\"\"", "@property def send_message_queue_depth(self): \"\"\" Gets the send_message_queue_depth of this IbIocProfile. The maximum depth", "of this IbIocProfile. :type: str \"\"\" self._id_string = id_string def to_dict(self): \"\"\" Returns", "GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER", "modification, are permitted (subject to the limitations in the disclaimer below) provided that", "architecture. 0xFFFF is vendor-specific. :param io_class: The io_class of this IbIocProfile. :type: int", "the service entries table :return: The service_entries of this IbIocProfile. :rtype: int :required/optional:", "Sets the protocol of this IbIocProfile. The I/O protocol of the controller. 0x0000", "Atomic operations from IOCs :return: The controller_ops_capability_mask of this IbIocProfile. :rtype: int :required/optional:", "self._protocol = None self._protocol_version = None self._send_message_queue_depth = None self._rdma_read_queue_depth = None self._send_message_size", "None self._protocol = None self._protocol_version = None self._send_message_queue_depth = None self._rdma_read_queue_depth = None", "parameter) 'controller_ops_capability_mask': 'int', # (required parameter) 'service_entries': 'int', # (required parameter) 'id_string': 'str'", "def device_version(self, device_version): \"\"\" Sets the device_version of this IbIocProfile. A number assigned", "self.attribute_map = { 'ioc_guid': 'iocGuid', # (required parameter) 'vendor_id': 'vendorId', # (required parameter)", "of this IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._io_device_id @io_device_id.setter def io_device_id(self,", "'ioSubclass', # (required parameter) 'protocol': 'protocol', # (required parameter) 'protocol_version': 'protocolVersion', # (required", "IbIocProfile.py The Clear BSD License Copyright (c) – 2016, NetApp, Inc. All rights", "on = RDMA Read Requests from IOCs Bit 4 on = RDMA Write", "self._io_subclass = io_subclass @property def protocol(self): \"\"\" Gets the protocol of this IbIocProfile.", "protocol(self, protocol): \"\"\" Sets the protocol of this IbIocProfile. The I/O protocol of", "IbIocProfile. :rtype: str :required/optional: required \"\"\" return self._id_string @id_string.setter def id_string(self, id_string): \"\"\"", "Send Messages to IOCs Bit 1 on = Send Messages from IOCs Bit", "encompassed by the InfiniBand architecture. 0xFFFF is vendor-specific. :param io_class: The io_class of", "\"to_dict\"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0],", "Sets the send_message_size of this IbIocProfile. The maximum size of Send Messages in", "HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,", "and the following disclaimer in the documentation and/or other materials provided with the", "# (required parameter) 'id_string': 'idString' } self._ioc_guid = None self._vendor_id = None self._io_device_id", "notice, this list of conditions and the following disclaimer in the documentation and/or", "from IOCs :return: The controller_ops_capability_mask of this IbIocProfile. :rtype: int :required/optional: required \"\"\"", "transfers initiated by the controller. :return: The rdma_transfer_size of this IbIocProfile. :rtype: int", "its contributors may be used to endorse or promote products derived from this", "is attribute type. :param dict attributeMap: The key is attribute name and the", "entries table :param service_entries: The service_entries of this IbIocProfile. :type: int \"\"\" self._service_entries", "and the value is json key in definition. \"\"\" self.swagger_types = { 'ioc_guid':", "return self._protocol @protocol.setter def protocol(self, protocol): \"\"\" Sets the protocol of this IbIocProfile.", "= value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if", "\"\"\" For `print` and `pprint` \"\"\" if self is None: return None return", "initiated by the controller. :param rdma_transfer_size: The rdma_transfer_size of this IbIocProfile. :type: int", "IbIocProfile. A UTF-8 encoded string for identifying the controller to user. :param id_string:", "'io_device_id': 'ioDeviceId', # (required parameter) 'device_version': 'deviceVersion', # (required parameter) 'subsystem_vendor_id': 'subsystemVendorId', #", "is vendor-specific. :return: The io_class of this IbIocProfile. :rtype: int :required/optional: required \"\"\"", "the model \"\"\" return pformat(self.to_dict()) def __repr__(self): \"\"\" For `print` and `pprint` \"\"\"", "maximum depth of the Send Message Queue. :return: The send_message_queue_depth of this IbIocProfile.", "table :param service_entries: The service_entries of this IbIocProfile. :type: int \"\"\" self._service_entries =", "vendor_id(self, vendor_id): \"\"\" Sets the vendor_id of this IbIocProfile. The I/O controller vendor", "service_entries(self, service_entries): \"\"\" Sets the service_entries of this IbIocProfile. The number of entries", "reserved for I/O classes encompassed by the InfiniBand architecture. 0xFFFF is vendor-specific. :param", "@io_subclass.setter def io_subclass(self, io_subclass): \"\"\" Sets the io_subclass of this IbIocProfile. The I/O", "parameter) 'subsystem_vendor_id': 'str', # (required parameter) 'subsystem_id': 'int', # (required parameter) 'io_class': 'int',", "LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING", "\"\"\" Gets the ioc_guid of this IbIocProfile. The EUI-64 GUID used to uniquely", "of this IbIocProfile. The protocol version (protocol-specific). :param protocol_version: The protocol_version of this", "(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF", "I/O sub-class of the controller. 0x0000 -0xFFFE is reserved for I/O sub-classes encompassed", "= dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], \"to_dict\") else item, value.items() ))", "this IbIocProfile. :type: str \"\"\" self._subsystem_vendor_id = subsystem_vendor_id @property def subsystem_id(self): \"\"\" Gets", "self._controller_ops_capability_mask = None self._service_entries = None self._id_string = None @property def ioc_guid(self): \"\"\"", "The number of entries in the service entries table :return: The service_entries of", "number identifying the subsystem where the I/O controller resides. :param subsystem_id: The subsystem_id", "\"\"\" Gets the io_subclass of this IbIocProfile. The I/O sub-class of the controller.", "InfiniBand architecture. 0xFFFF is vendor-specific. :param io_subclass: The io_subclass of this IbIocProfile. :type:", "class IbIocProfile(object): \"\"\" NOTE: This class is auto generated by the swagger code", "value.items() )) else: result[attr] = value return result def to_str(self): \"\"\" Returns the", "conditions and the following disclaimer in the documentation and/or other materials provided with", "= Atomic operations from IOCs :return: The controller_ops_capability_mask of this IbIocProfile. :rtype: int", "int :required/optional: required \"\"\" return self._device_version @device_version.setter def device_version(self, device_version): \"\"\" Sets the", "def rdma_read_queue_depth(self, rdma_read_queue_depth): \"\"\" Sets the rdma_read_queue_depth of this IbIocProfile. The maximum depth", "\"\"\" Sets the vendor_id of this IbIocProfile. The I/O controller vendor ID in", "IOCs Bit 4 on = RDMA Write Requests to IOCs Bit 5 on", "table :return: The service_entries of this IbIocProfile. :rtype: int :required/optional: required \"\"\" return", "def ioc_guid(self): \"\"\" Gets the ioc_guid of this IbIocProfile. The EUI-64 GUID used", "= device_version @property def subsystem_vendor_id(self): \"\"\" Gets the subsystem_vendor_id of this IbIocProfile. The", "required \"\"\" return self._controller_ops_capability_mask @controller_ops_capability_mask.setter def controller_ops_capability_mask(self, controller_ops_capability_mask): \"\"\" Sets the controller_ops_capability_mask of", "string representation of the model \"\"\" return pformat(self.to_dict()) def __repr__(self): \"\"\" For `print`", "return self.__dict__ == other.__dict__ def __ne__(self, other): \"\"\" Returns true if both objects", "of entries in the service entries table :return: The service_entries of this IbIocProfile.", "device_version of this IbIocProfile. A number assigned by the vendor to identify the", "service entries table :param service_entries: The service_entries of this IbIocProfile. :type: int \"\"\"", "reserved for I/O protocols encompassed by the InfiniBand architecture. 0xFFFF is vendor-specific. :return:", "= RDMA Write Requests from IOCs Bit 6 on = Atomic operations to", "ioc_guid(self): \"\"\" Gets the ioc_guid of this IbIocProfile. The EUI-64 GUID used to", "this IbIocProfile. The ID of the enclosure vendor in IEEE format, or else", "None self._vendor_id = None self._io_device_id = None self._device_version = None self._subsystem_vendor_id = None", "the controller to user. :return: The id_string of this IbIocProfile. :rtype: str :required/optional:", "in IEEE format. :param vendor_id: The vendor_id of this IbIocProfile. :type: str \"\"\"", "of this IbIocProfile. The I/O sub-class of the controller. 0x0000 -0xFFFE is reserved", "with or without modification, are permitted (subject to the limitations in the disclaimer", "the protocol_version of this IbIocProfile. The protocol version (protocol-specific). :param protocol_version: The protocol_version", "EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF", "in bytes. :param send_message_size: The send_message_size of this IbIocProfile. :type: int \"\"\" self._send_message_size", "x, value )) elif hasattr(value, \"to_dict\"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr]", "send_message_queue_depth(self, send_message_queue_depth): \"\"\" Sets the send_message_queue_depth of this IbIocProfile. The maximum depth of", ":required/optional: required \"\"\" return self._send_message_size @send_message_size.setter def send_message_size(self, send_message_size): \"\"\" Sets the send_message_size", "permitted (subject to the limitations in the disclaimer below) provided that the following", "device_version: The device_version of this IbIocProfile. :type: int \"\"\" self._device_version = device_version @property", "int \"\"\" self._send_message_size = send_message_size @property def rdma_transfer_size(self): \"\"\" Gets the rdma_transfer_size of", "io_subclass(self, io_subclass): \"\"\" Sets the io_subclass of this IbIocProfile. The I/O sub-class of", "Gets the rdma_read_queue_depth of this IbIocProfile. The maximum depth of the per-channel RDMA", "I/O sub-classes encompassed by the InfiniBand architecture. 0xFFFF is vendor-specific. :param io_subclass: The", "self._device_version = device_version @property def subsystem_vendor_id(self): \"\"\" Gets the subsystem_vendor_id of this IbIocProfile.", "on = Atomic operations from IOCs :return: The controller_ops_capability_mask of this IbIocProfile. :rtype:", "IbIocProfile. :type: int \"\"\" self._service_entries = service_entries @property def id_string(self): \"\"\" Gets the", "'rdma_read_queue_depth': 'int', # (required parameter) 'send_message_size': 'int', # (required parameter) 'rdma_transfer_size': 'int', #", "this IbIocProfile. :type: str \"\"\" self._vendor_id = vendor_id @property def io_device_id(self): \"\"\" Gets", "None self._io_subclass = None self._protocol = None self._protocol_version = None self._send_message_queue_depth = None", "subsystem_id(self, subsystem_id): \"\"\" Sets the subsystem_id of this IbIocProfile. A number identifying the", "parameter) 'subsystem_vendor_id': 'subsystemVendorId', # (required parameter) 'subsystem_id': 'subsystemId', # (required parameter) 'io_class': 'ioClass',", ":rtype: int :required/optional: required \"\"\" return self._protocol_version @protocol_version.setter def protocol_version(self, protocol_version): \"\"\" Sets", "IOCs Bit 6 on = Atomic operations to IOCs Bit 7 on =", "number assigned by the vendor to identify the device version. :param device_version: The", "on = Atomic operations to IOCs Bit 7 on = Atomic operations from", "rdma_read_queue_depth(self): \"\"\" Gets the rdma_read_queue_depth of this IbIocProfile. The maximum depth of the", "Gets the device_version of this IbIocProfile. A number assigned by the vendor to", "item, value.items() )) else: result[attr] = value return result def to_str(self): \"\"\" Returns", "'vendor_id': 'vendorId', # (required parameter) 'io_device_id': 'ioDeviceId', # (required parameter) 'device_version': 'deviceVersion', #", "Send Message Queue. :return: The send_message_queue_depth of this IbIocProfile. :rtype: int :required/optional: required", "item: (item[0], item[1].to_dict()) if hasattr(item[1], \"to_dict\") else item, value.items() )) else: result[attr] =", "initiated by the controller. :return: The rdma_transfer_size of this IbIocProfile. :rtype: int :required/optional:", "item[1].to_dict()) if hasattr(item[1], \"to_dict\") else item, value.items() )) else: result[attr] = value return", "vendor_id of this IbIocProfile. The I/O controller vendor ID in IEEE format. :return:", "is None: return None return self.to_str() def __eq__(self, other): \"\"\" Returns true if", "The maximum depth of the Send Message Queue. :return: The send_message_queue_depth of this", "number assigned by vendor to identify the type of I/O controller :return: The", "IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE", "this IbIocProfile. :type: int \"\"\" self._io_subclass = io_subclass @property def protocol(self): \"\"\" Gets", "EUI-64 GUID used to uniquely identify the I/O controller. :param ioc_guid: The ioc_guid", "def protocol(self, protocol): \"\"\" Sets the protocol of this IbIocProfile. The I/O protocol", "str \"\"\" self._ioc_guid = ioc_guid @property def vendor_id(self): \"\"\" Gets the vendor_id of", "vendor-specific. :return: The io_subclass of this IbIocProfile. :rtype: int :required/optional: required \"\"\" return", "the device version. :return: The device_version of this IbIocProfile. :rtype: int :required/optional: required", "'subsystemId', # (required parameter) 'io_class': 'ioClass', # (required parameter) 'io_subclass': 'ioSubclass', # (required", "IOCs Bit 2 on = RDMA Read Requests to IOCs Bit 3 on", "bytes. :param send_message_size: The send_message_size of this IbIocProfile. :type: int \"\"\" self._send_message_size =", "encoded string for identifying the controller to user. :return: The id_string of this", "I/O controller :return: The io_device_id of this IbIocProfile. :rtype: int :required/optional: required \"\"\"", ":type: int \"\"\" self._io_subclass = io_subclass @property def protocol(self): \"\"\" Gets the protocol", "to IOCs Bit 7 on = Atomic operations from IOCs :param controller_ops_capability_mask: The", "Messages from IOCs Bit 2 on = RDMA Read Requests to IOCs Bit", "result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict())", "reserved for I/O sub-classes encompassed by the InfiniBand architecture. 0xFFFF is vendor-specific. :return:", "from IOCs Bit 6 on = Atomic operations to IOCs Bit 7 on", "self._protocol_version = None self._send_message_queue_depth = None self._rdma_read_queue_depth = None self._send_message_size = None self._rdma_transfer_size", "promote products derived from this software without specific prior written permission. NO EXPRESS", "self._rdma_read_queue_depth @rdma_read_queue_depth.setter def rdma_read_queue_depth(self, rdma_read_queue_depth): \"\"\" Sets the rdma_read_queue_depth of this IbIocProfile. The", "str :required/optional: required \"\"\" return self._id_string @id_string.setter def id_string(self, id_string): \"\"\" Sets the", "\"\"\" Sets the subsystem_id of this IbIocProfile. A number identifying the subsystem where", "'io_subclass': 'ioSubclass', # (required parameter) 'protocol': 'protocol', # (required parameter) 'protocol_version': 'protocolVersion', #", "The io_device_id of this IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._io_device_id @io_device_id.setter", "return self._service_entries @service_entries.setter def service_entries(self, service_entries): \"\"\" Sets the service_entries of this IbIocProfile.", "by the vendor to identify the device version. :return: The device_version of this", "EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. \"\"\" from pprint import", "ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF", "protocols encompassed by the InfiniBand architecture. 0xFFFF is vendor-specific. :param protocol: The protocol", "THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\" AND", "def io_subclass(self, io_subclass): \"\"\" Sets the io_subclass of this IbIocProfile. The I/O sub-class", "of this IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._send_message_queue_depth @send_message_queue_depth.setter def send_message_queue_depth(self,", ":rtype: int :required/optional: required \"\"\" return self._io_class @io_class.setter def io_class(self, io_class): \"\"\" Sets", "None self._id_string = None @property def ioc_guid(self): \"\"\" Gets the ioc_guid of this", "of this IbIocProfile. :type: int \"\"\" self._send_message_queue_depth = send_message_queue_depth @property def rdma_read_queue_depth(self): \"\"\"", "is attribute name and the value is json key in definition. \"\"\" self.swagger_types", "reserved for I/O protocols encompassed by the InfiniBand architecture. 0xFFFF is vendor-specific. :param", "of this IbIocProfile. :type: str \"\"\" self._subsystem_vendor_id = subsystem_vendor_id @property def subsystem_id(self): \"\"\"", "The io_subclass of this IbIocProfile. :type: int \"\"\" self._io_subclass = io_subclass @property def", "Write Requests from IOCs Bit 6 on = Atomic operations to IOCs Bit", "below) provided that the following conditions are met: * Redistributions of source code", ":param protocol_version: The protocol_version of this IbIocProfile. :type: int \"\"\" self._protocol_version = protocol_version", "of this IbIocProfile. :type: int \"\"\" self._controller_ops_capability_mask = controller_ops_capability_mask @property def service_entries(self): \"\"\"", "the controller_ops_capability_mask of this IbIocProfile. Supported operation types of this controller.: Bit 0", "required \"\"\" return self._subsystem_vendor_id @subsystem_vendor_id.setter def subsystem_vendor_id(self, subsystem_vendor_id): \"\"\" Sets the subsystem_vendor_id of", "conditions are met: * Redistributions of source code must retain the above copyright", "protocol @property def protocol_version(self): \"\"\" Gets the protocol_version of this IbIocProfile. The protocol", "# (required parameter) 'service_entries': 'int', # (required parameter) 'id_string': 'str' } self.attribute_map =", "if there is no vendor ID. :param subsystem_vendor_id: The subsystem_vendor_id of this IbIocProfile.", "required \"\"\" return self._protocol @protocol.setter def protocol(self, protocol): \"\"\" Sets the protocol of", "and binary forms, with or without modification, are permitted (subject to the limitations", "THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,", "OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY", "\"\"\" return self._service_entries @service_entries.setter def service_entries(self, service_entries): \"\"\" Sets the service_entries of this", "The send_message_size of this IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._send_message_size @send_message_size.setter", "4 on = RDMA Write Requests to IOCs Bit 5 on = RDMA", "# (required parameter) 'service_entries': 'serviceEntries', # (required parameter) 'id_string': 'idString' } self._ioc_guid =", "\"\"\" return self._io_class @io_class.setter def io_class(self, io_class): \"\"\" Sets the io_class of this", "encompassed by the InfiniBand architecture. 0xFFFF is vendor-specific. :param protocol: The protocol of", "\"\"\" self._ioc_guid = ioc_guid @property def vendor_id(self): \"\"\" Gets the vendor_id of this", "BY THIS LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS", "return self._subsystem_id @subsystem_id.setter def subsystem_id(self, subsystem_id): \"\"\" Sets the subsystem_id of this IbIocProfile.", "io_class of this IbIocProfile. :type: int \"\"\" self._io_class = io_class @property def io_subclass(self):", "def controller_ops_capability_mask(self): \"\"\" Gets the controller_ops_capability_mask of this IbIocProfile. Supported operation types of", "and the following disclaimer. * Redistributions in binary form must reproduce the above", "of the per-channel RDMA Read Queue :return: The rdma_read_queue_depth of this IbIocProfile. :rtype:", "\"\"\" Gets the protocol of this IbIocProfile. The I/O protocol of the controller.", "protocol_version @property def send_message_queue_depth(self): \"\"\" Gets the send_message_queue_depth of this IbIocProfile. The maximum", "by the InfiniBand architecture. 0xFFFF is vendor-specific. :param protocol: The protocol of this", "attribute name and the value is attribute type. :param dict attributeMap: The key", "The send_message_size of this IbIocProfile. :type: int \"\"\" self._send_message_size = send_message_size @property def", "RDMA transfers initiated by the controller. :return: The rdma_transfer_size of this IbIocProfile. :rtype:", "This class is auto generated by the swagger code generator program. Do not", "contributors may be used to endorse or promote products derived from this software", ":return: The io_device_id of this IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._io_device_id", "@property def controller_ops_capability_mask(self): \"\"\" Gets the controller_ops_capability_mask of this IbIocProfile. Supported operation types", "# (required parameter) 'send_message_size': 'sendMessageSize', # (required parameter) 'rdma_transfer_size': 'rdmaTransferSize', # (required parameter)", "\"\"\" Sets the id_string of this IbIocProfile. A UTF-8 encoded string for identifying", "int :required/optional: required \"\"\" return self._subsystem_id @subsystem_id.setter def subsystem_id(self, subsystem_id): \"\"\" Sets the", "the per-channel RDMA Read Queue :return: The rdma_read_queue_depth of this IbIocProfile. :rtype: int", "The io_class of this IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._io_class @io_class.setter", "The maximum depth of the per-channel RDMA Read Queue :return: The rdma_read_queue_depth of", "\"\"\" Returns true if both objects are not equal \"\"\" return not self", "Gets the subsystem_id of this IbIocProfile. A number identifying the subsystem where the", "# (required parameter) 'protocol_version': 'int', # (required parameter) 'send_message_queue_depth': 'int', # (required parameter)", ":return: The io_class of this IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._io_class", "'protocol': 'protocol', # (required parameter) 'protocol_version': 'protocolVersion', # (required parameter) 'send_message_queue_depth': 'sendMessageQueueDepth', #", "The io_class of this IbIocProfile. :type: int \"\"\" self._io_class = io_class @property def", "for identifying the controller to user. :return: The id_string of this IbIocProfile. :rtype:", "the subsystem_vendor_id of this IbIocProfile. The ID of the enclosure vendor in IEEE", "@rdma_transfer_size.setter def rdma_transfer_size(self, rdma_transfer_size): \"\"\" Sets the rdma_transfer_size of this IbIocProfile. The maximum", "DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,", "IbIocProfile. :type: int \"\"\" self._rdma_transfer_size = rdma_transfer_size @property def controller_ops_capability_mask(self): \"\"\" Gets the", "\"\"\" Gets the vendor_id of this IbIocProfile. The I/O controller vendor ID in", "rights reserved. Redistribution and use in source and binary forms, with or without", "The send_message_queue_depth of this IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._send_message_queue_depth @send_message_queue_depth.setter", "EUI-64 GUID used to uniquely identify the I/O controller. :return: The ioc_guid of", "materials provided with the distribution. * Neither the name of NetApp, Inc. nor", "int :required/optional: required \"\"\" return self._send_message_size @send_message_size.setter def send_message_size(self, send_message_size): \"\"\" Sets the", "the service_entries of this IbIocProfile. The number of entries in the service entries", "used to endorse or promote products derived from this software without specific prior", "Neither the name of NetApp, Inc. nor the names of its contributors may", ":type: int \"\"\" self._io_device_id = io_device_id @property def device_version(self): \"\"\" Gets the device_version", "send_message_queue_depth of this IbIocProfile. The maximum depth of the Send Message Queue. :return:", "EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS", "generator program. Do not edit the class manually. \"\"\" def __init__(self): \"\"\" IbIocProfile", "\"\"\" Gets the device_version of this IbIocProfile. A number assigned by the vendor", "rdma_read_queue_depth(self, rdma_read_queue_depth): \"\"\" Sets the rdma_read_queue_depth of this IbIocProfile. The maximum depth of", "0xFFFF is vendor-specific. :return: The protocol of this IbIocProfile. :rtype: int :required/optional: required", "used to uniquely identify the I/O controller. :param ioc_guid: The ioc_guid of this", "vendor in IEEE format, or else all zeros if there is no vendor", "Gets the protocol_version of this IbIocProfile. The protocol version (protocol-specific). :return: The protocol_version", "def __ne__(self, other): \"\"\" Returns true if both objects are not equal \"\"\"", "the disclaimer below) provided that the following conditions are met: * Redistributions of", "IbIocProfile. The maximum size of outbound RDMA transfers initiated by the controller. :param", "Gets the controller_ops_capability_mask of this IbIocProfile. Supported operation types of this controller.: Bit", "\"\"\" self.swagger_types = { 'ioc_guid': 'str', # (required parameter) 'vendor_id': 'str', # (required", "The protocol_version of this IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._protocol_version @protocol_version.setter", "rdma_transfer_size(self): \"\"\" Gets the rdma_transfer_size of this IbIocProfile. The maximum size of outbound", "from IOCs Bit 4 on = RDMA Write Requests to IOCs Bit 5", "The ioc_guid of this IbIocProfile. :rtype: str :required/optional: required \"\"\" return self._ioc_guid @ioc_guid.setter", "return self._send_message_queue_depth @send_message_queue_depth.setter def send_message_queue_depth(self, send_message_queue_depth): \"\"\" Sets the send_message_queue_depth of this IbIocProfile.", "distribution. * Neither the name of NetApp, Inc. nor the names of its", "int :required/optional: required \"\"\" return self._io_subclass @io_subclass.setter def io_subclass(self, io_subclass): \"\"\" Sets the", "self._protocol @protocol.setter def protocol(self, protocol): \"\"\" Sets the protocol of this IbIocProfile. The", "= Send Messages to IOCs Bit 1 on = Send Messages from IOCs", "parameter) 'id_string': 'str' } self.attribute_map = { 'ioc_guid': 'iocGuid', # (required parameter) 'vendor_id':", "def rdma_transfer_size(self, rdma_transfer_size): \"\"\" Sets the rdma_transfer_size of this IbIocProfile. The maximum size", "depth of the per-channel RDMA Read Queue :return: The rdma_read_queue_depth of this IbIocProfile.", "protocol_version of this IbIocProfile. :type: int \"\"\" self._protocol_version = protocol_version @property def send_message_queue_depth(self):", "of this IbIocProfile. :type: int \"\"\" self._protocol = protocol @property def protocol_version(self): \"\"\"", "(required parameter) 'send_message_queue_depth': 'int', # (required parameter) 'rdma_read_queue_depth': 'int', # (required parameter) 'send_message_size':", "a dict \"\"\" result = {} for attr, _ in iteritems(self.swagger_types): value =", "of conditions and the following disclaimer in the documentation and/or other materials provided", "subsystem_vendor_id of this IbIocProfile. :type: str \"\"\" self._subsystem_vendor_id = subsystem_vendor_id @property def subsystem_id(self):", "on = Atomic operations from IOCs :param controller_ops_capability_mask: The controller_ops_capability_mask of this IbIocProfile.", "IbIocProfile. The EUI-64 GUID used to uniquely identify the I/O controller. :param ioc_guid:", "None return self.__dict__ == other.__dict__ def __ne__(self, other): \"\"\" Returns true if both", "IOCs Bit 7 on = Atomic operations from IOCs :param controller_ops_capability_mask: The controller_ops_capability_mask", "the rdma_read_queue_depth of this IbIocProfile. The maximum depth of the per-channel RDMA Read", "nor the names of its contributors may be used to endorse or promote", "code must retain the above copyright notice, this list of conditions and the", "IbIocProfile. :type: int \"\"\" self._protocol = protocol @property def protocol_version(self): \"\"\" Gets the", "Gets the ioc_guid of this IbIocProfile. The EUI-64 GUID used to uniquely identify", ":param rdma_read_queue_depth: The rdma_read_queue_depth of this IbIocProfile. :type: int \"\"\" self._rdma_read_queue_depth = rdma_read_queue_depth", "def subsystem_id(self, subsystem_id): \"\"\" Sets the subsystem_id of this IbIocProfile. A number identifying", ":type: int \"\"\" self._rdma_read_queue_depth = rdma_read_queue_depth @property def send_message_size(self): \"\"\" Gets the send_message_size", "pformat(self.to_dict()) def __repr__(self): \"\"\" For `print` and `pprint` \"\"\" if self is None:", "RDMA Write Requests from IOCs Bit 6 on = Atomic operations to IOCs", "NOTE: This class is auto generated by the swagger code generator program. Do", ":required/optional: required \"\"\" return self._rdma_read_queue_depth @rdma_read_queue_depth.setter def rdma_read_queue_depth(self, rdma_read_queue_depth): \"\"\" Sets the rdma_read_queue_depth", "io_class @property def io_subclass(self): \"\"\" Gets the io_subclass of this IbIocProfile. The I/O", "'service_entries': 'int', # (required parameter) 'id_string': 'str' } self.attribute_map = { 'ioc_guid': 'iocGuid',", "of this IbIocProfile. The I/O controller vendor ID in IEEE format. :param vendor_id:", "def __eq__(self, other): \"\"\" Returns true if both objects are equal \"\"\" if", "PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE", "OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF", "iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x:", "self._io_class = None self._io_subclass = None self._protocol = None self._protocol_version = None self._send_message_queue_depth", "format, or else all zeros if there is no vendor ID. :param subsystem_vendor_id:", "this IbIocProfile. :rtype: str :required/optional: required \"\"\" return self._vendor_id @vendor_id.setter def vendor_id(self, vendor_id):", "of this IbIocProfile. :type: int \"\"\" self._io_class = io_class @property def io_subclass(self): \"\"\"", "= RDMA Read Requests from IOCs Bit 4 on = RDMA Write Requests", "License Copyright (c) – 2016, NetApp, Inc. All rights reserved. Redistribution and use", "def protocol(self): \"\"\" Gets the protocol of this IbIocProfile. The I/O protocol of", "the id_string of this IbIocProfile. A UTF-8 encoded string for identifying the controller", "the send_message_queue_depth of this IbIocProfile. The maximum depth of the Send Message Queue.", "# (required parameter) 'io_device_id': 'int', # (required parameter) 'device_version': 'int', # (required parameter)", "FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT", "rdma_read_queue_depth @property def send_message_size(self): \"\"\" Gets the send_message_size of this IbIocProfile. The maximum", "controller_ops_capability_mask @property def service_entries(self): \"\"\" Gets the service_entries of this IbIocProfile. The number", "BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,", "\"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,", "the InfiniBand architecture. 0xFFFF is vendor-specific. :return: The io_class of this IbIocProfile. :rtype:", "for I/O sub-classes encompassed by the InfiniBand architecture. 0xFFFF is vendor-specific. :return: The", "The id_string of this IbIocProfile. :type: str \"\"\" self._id_string = id_string def to_dict(self):", "InfiniBand architecture. 0xFFFF is vendor-specific. :return: The io_class of this IbIocProfile. :rtype: int", "this IbIocProfile. :type: int \"\"\" self._rdma_transfer_size = rdma_transfer_size @property def controller_ops_capability_mask(self): \"\"\" Gets", "or promote products derived from this software without specific prior written permission. NO", "\"\"\" Sets the rdma_read_queue_depth of this IbIocProfile. The maximum depth of the per-channel", "'iocGuid', # (required parameter) 'vendor_id': 'vendorId', # (required parameter) 'io_device_id': 'ioDeviceId', # (required", "Messages to IOCs Bit 1 on = Send Messages from IOCs Bit 2", ":rtype: int :required/optional: required \"\"\" return self._io_subclass @io_subclass.setter def io_subclass(self, io_subclass): \"\"\" Sets", "return pformat(self.to_dict()) def __repr__(self): \"\"\" For `print` and `pprint` \"\"\" if self is", "id_string(self, id_string): \"\"\" Sets the id_string of this IbIocProfile. A UTF-8 encoded string", "in bytes. :return: The send_message_size of this IbIocProfile. :rtype: int :required/optional: required \"\"\"", "'int', # (required parameter) 'rdma_read_queue_depth': 'int', # (required parameter) 'send_message_size': 'int', # (required", "1 on = Send Messages from IOCs Bit 2 on = RDMA Read", "assigned by vendor to identify the type of I/O controller :return: The io_device_id", "@property def device_version(self): \"\"\" Gets the device_version of this IbIocProfile. A number assigned", "NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,", "Sets the io_subclass of this IbIocProfile. The I/O sub-class of the controller. 0x0000", "ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING", "string for identifying the controller to user. :param id_string: The id_string of this", "IbIocProfile. The I/O sub-class of the controller. 0x0000 -0xFFFE is reserved for I/O", "\"\"\" return self._protocol @protocol.setter def protocol(self, protocol): \"\"\" Sets the protocol of this", "I/O protocol of the controller. 0x0000 -0xFFFE is reserved for I/O protocols encompassed", "protocol(self): \"\"\" Gets the protocol of this IbIocProfile. The I/O protocol of the", "OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)", "the class manually. \"\"\" def __init__(self): \"\"\" IbIocProfile - a model defined in", "0x0000 -0xFFFE is reserved for I/O protocols encompassed by the InfiniBand architecture. 0xFFFF", "io_device_id(self): \"\"\" Gets the io_device_id of this IbIocProfile. A number assigned by vendor", "parameter) 'send_message_queue_depth': 'int', # (required parameter) 'rdma_read_queue_depth': 'int', # (required parameter) 'send_message_size': 'int',", "self._subsystem_id = None self._io_class = None self._io_subclass = None self._protocol = None self._protocol_version", "'send_message_size': 'sendMessageSize', # (required parameter) 'rdma_transfer_size': 'rdmaTransferSize', # (required parameter) 'controller_ops_capability_mask': 'controllerOpsCapabilityMask', #", "encompassed by the InfiniBand architecture. 0xFFFF is vendor-specific. :return: The io_subclass of this", "The maximum size of Send Messages in bytes. :param send_message_size: The send_message_size of", "PARTY'S PATENT RIGHTS ARE GRANTED BY THIS LICENSE. THIS SOFTWARE IS PROVIDED BY", "None: return None return self.to_str() def __eq__(self, other): \"\"\" Returns true if both", "parameter) 'service_entries': 'serviceEntries', # (required parameter) 'id_string': 'idString' } self._ioc_guid = None self._vendor_id", "The subsystem_id of this IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._subsystem_id @subsystem_id.setter", "key is attribute name and the value is attribute type. :param dict attributeMap:", "this IbIocProfile. :type: int \"\"\" self._service_entries = service_entries @property def id_string(self): \"\"\" Gets", "NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS", "this IbIocProfile. :rtype: str :required/optional: required \"\"\" return self._subsystem_vendor_id @subsystem_vendor_id.setter def subsystem_vendor_id(self, subsystem_vendor_id):", "Gets the vendor_id of this IbIocProfile. The I/O controller vendor ID in IEEE", "\"\"\" Gets the subsystem_id of this IbIocProfile. A number identifying the subsystem where", "= None self._send_message_size = None self._rdma_transfer_size = None self._controller_ops_capability_mask = None self._service_entries =", "of the per-channel RDMA Read Queue :param rdma_read_queue_depth: The rdma_read_queue_depth of this IbIocProfile.", "IbIocProfile. The number of entries in the service entries table :return: The service_entries", "parameter) 'send_message_size': 'sendMessageSize', # (required parameter) 'rdma_transfer_size': 'rdmaTransferSize', # (required parameter) 'controller_ops_capability_mask': 'controllerOpsCapabilityMask',", "self._controller_ops_capability_mask @controller_ops_capability_mask.setter def controller_ops_capability_mask(self, controller_ops_capability_mask): \"\"\" Sets the controller_ops_capability_mask of this IbIocProfile. Supported", "Queue. :param send_message_queue_depth: The send_message_queue_depth of this IbIocProfile. :type: int \"\"\" self._send_message_queue_depth =", "is reserved for I/O protocols encompassed by the InfiniBand architecture. 0xFFFF is vendor-specific.", "'str', # (required parameter) 'io_device_id': 'int', # (required parameter) 'device_version': 'int', # (required", "this IbIocProfile. Supported operation types of this controller.: Bit 0 on = Send", "DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR", "Send Messages from IOCs Bit 2 on = RDMA Read Requests to IOCs", "to_str(self): \"\"\" Returns the string representation of the model \"\"\" return pformat(self.to_dict()) def", "'int', # (required parameter) 'subsystem_vendor_id': 'str', # (required parameter) 'subsystem_id': 'int', # (required", "notice, this list of conditions and the following disclaimer. * Redistributions in binary", "IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._io_device_id @io_device_id.setter def io_device_id(self, io_device_id): \"\"\"", "json key in definition. \"\"\" self.swagger_types = { 'ioc_guid': 'str', # (required parameter)", "IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._protocol_version @protocol_version.setter def protocol_version(self, protocol_version): \"\"\"", "'protocolVersion', # (required parameter) 'send_message_queue_depth': 'sendMessageQueueDepth', # (required parameter) 'rdma_read_queue_depth': 'rdmaReadQueueDepth', # (required", "'subsystemVendorId', # (required parameter) 'subsystem_id': 'subsystemId', # (required parameter) 'io_class': 'ioClass', # (required", "A number assigned by the vendor to identify the device version. :param device_version:", "None self._io_class = None self._io_subclass = None self._protocol = None self._protocol_version = None", "size of outbound RDMA transfers initiated by the controller. :param rdma_transfer_size: The rdma_transfer_size", "= None self._protocol = None self._protocol_version = None self._send_message_queue_depth = None self._rdma_read_queue_depth =", "self._vendor_id = vendor_id @property def io_device_id(self): \"\"\" Gets the io_device_id of this IbIocProfile.", "def send_message_queue_depth(self, send_message_queue_depth): \"\"\" Sets the send_message_queue_depth of this IbIocProfile. The maximum depth", "= Atomic operations to IOCs Bit 7 on = Atomic operations from IOCs", "def subsystem_id(self): \"\"\" Gets the subsystem_id of this IbIocProfile. A number identifying the", "parameter) 'protocol_version': 'int', # (required parameter) 'send_message_queue_depth': 'int', # (required parameter) 'rdma_read_queue_depth': 'int',", "is reserved for I/O sub-classes encompassed by the InfiniBand architecture. 0xFFFF is vendor-specific.", ":rtype: int :required/optional: required \"\"\" return self._rdma_transfer_size @rdma_transfer_size.setter def rdma_transfer_size(self, rdma_transfer_size): \"\"\" Sets", "(required parameter) 'send_message_size': 'sendMessageSize', # (required parameter) 'rdma_transfer_size': 'rdmaTransferSize', # (required parameter) 'controller_ops_capability_mask':", "return self._io_class @io_class.setter def io_class(self, io_class): \"\"\" Sets the io_class of this IbIocProfile.", "of the Send Message Queue. :param send_message_queue_depth: The send_message_queue_depth of this IbIocProfile. :type:", "the rdma_transfer_size of this IbIocProfile. The maximum size of outbound RDMA transfers initiated", "def protocol_version(self, protocol_version): \"\"\" Sets the protocol_version of this IbIocProfile. The protocol version", "is attribute name and the value is attribute type. :param dict attributeMap: The", "\"to_dict\") else item, value.items() )) else: result[attr] = value return result def to_str(self):", "@property def send_message_size(self): \"\"\" Gets the send_message_size of this IbIocProfile. The maximum size", ":return: The protocol of this IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._protocol", "else all zeros if there is no vendor ID. :param subsystem_vendor_id: The subsystem_vendor_id", ":return: The subsystem_vendor_id of this IbIocProfile. :rtype: str :required/optional: required \"\"\" return self._subsystem_vendor_id", "number assigned by the vendor to identify the device version. :return: The device_version", "other): \"\"\" Returns true if both objects are not equal \"\"\" return not", "required \"\"\" return self._send_message_size @send_message_size.setter def send_message_size(self, send_message_size): \"\"\" Sets the send_message_size of", "THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. \"\"\" from", "names of its contributors may be used to endorse or promote products derived", "IbIocProfile. :type: int \"\"\" self._device_version = device_version @property def subsystem_vendor_id(self): \"\"\" Gets the", "I/O classes encompassed by the InfiniBand architecture. 0xFFFF is vendor-specific. :return: The io_class", "__init__(self): \"\"\" IbIocProfile - a model defined in Swagger :param dict swaggerTypes: The", "classes encompassed by the InfiniBand architecture. 0xFFFF is vendor-specific. :param io_class: The io_class", "operation types of this controller.: Bit 0 on = Send Messages to IOCs", "of this IbIocProfile. The I/O controller vendor ID in IEEE format. :return: The", "use in source and binary forms, with or without modification, are permitted (subject", "Inc. nor the names of its contributors may be used to endorse or", "following disclaimer in the documentation and/or other materials provided with the distribution. *", "BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\" AND ANY EXPRESS OR IMPLIED", "the service entries table :param service_entries: The service_entries of this IbIocProfile. :type: int", "THIS LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS", "RDMA transfers initiated by the controller. :param rdma_transfer_size: The rdma_transfer_size of this IbIocProfile.", "None return self.to_str() def __eq__(self, other): \"\"\" Returns true if both objects are", "OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE", "Gets the protocol of this IbIocProfile. The I/O protocol of the controller. 0x0000", "controller resides. :param subsystem_id: The subsystem_id of this IbIocProfile. :type: int \"\"\" self._subsystem_id", "maximum depth of the per-channel RDMA Read Queue :return: The rdma_read_queue_depth of this", "IbIocProfile. The protocol version (protocol-specific). :param protocol_version: The protocol_version of this IbIocProfile. :type:", "assigned by vendor to identify the type of I/O controller :param io_device_id: The", "@protocol_version.setter def protocol_version(self, protocol_version): \"\"\" Sets the protocol_version of this IbIocProfile. The protocol", "IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._controller_ops_capability_mask @controller_ops_capability_mask.setter def controller_ops_capability_mask(self, controller_ops_capability_mask): \"\"\"", "this IbIocProfile. :type: int \"\"\" self._send_message_size = send_message_size @property def rdma_transfer_size(self): \"\"\" Gets", "of this IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._controller_ops_capability_mask @controller_ops_capability_mask.setter def controller_ops_capability_mask(self,", "in IEEE format. :return: The vendor_id of this IbIocProfile. :rtype: str :required/optional: required", "above copyright notice, this list of conditions and the following disclaimer. * Redistributions", "io_device_id of this IbIocProfile. A number assigned by vendor to identify the type", "All rights reserved. Redistribution and use in source and binary forms, with or", "def id_string(self, id_string): \"\"\" Sets the id_string of this IbIocProfile. A UTF-8 encoded", "id_string of this IbIocProfile. :type: str \"\"\" self._id_string = id_string def to_dict(self): \"\"\"", "def to_str(self): \"\"\" Returns the string representation of the model \"\"\" return pformat(self.to_dict())", "forms, with or without modification, are permitted (subject to the limitations in the", "\"\"\" def __init__(self): \"\"\" IbIocProfile - a model defined in Swagger :param dict", "io_subclass of this IbIocProfile. The I/O sub-class of the controller. 0x0000 -0xFFFE is", "list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, \"to_dict\") else x, value", "The subsystem_id of this IbIocProfile. :type: int \"\"\" self._subsystem_id = subsystem_id @property def", "types of this controller.: Bit 0 on = Send Messages to IOCs Bit", "self._io_device_id = None self._device_version = None self._subsystem_vendor_id = None self._subsystem_id = None self._io_class", "vendor-specific. :param io_subclass: The io_subclass of this IbIocProfile. :type: int \"\"\" self._io_subclass =", "\"\"\" self._subsystem_id = subsystem_id @property def io_class(self): \"\"\" Gets the io_class of this", "service_entries of this IbIocProfile. The number of entries in the service entries table", "= { 'ioc_guid': 'str', # (required parameter) 'vendor_id': 'str', # (required parameter) 'io_device_id':", "`print` and `pprint` \"\"\" if self is None: return None return self.to_str() def", "A number identifying the subsystem where the I/O controller resides. :return: The subsystem_id", "\"\"\" Sets the controller_ops_capability_mask of this IbIocProfile. Supported operation types of this controller.:", "CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;", "this IbIocProfile. :type: int \"\"\" self._protocol = protocol @property def protocol_version(self): \"\"\" Gets", ":type: int \"\"\" self._service_entries = service_entries @property def id_string(self): \"\"\" Gets the id_string", "self._device_version @device_version.setter def device_version(self, device_version): \"\"\" Sets the device_version of this IbIocProfile. A", "@property def protocol_version(self): \"\"\" Gets the protocol_version of this IbIocProfile. The protocol version", "not edit the class manually. \"\"\" def __init__(self): \"\"\" IbIocProfile - a model", "the vendor to identify the device version. :return: The device_version of this IbIocProfile.", "documentation and/or other materials provided with the distribution. * Neither the name of", "this IbIocProfile. A UTF-8 encoded string for identifying the controller to user. :return:", "controller to user. :return: The id_string of this IbIocProfile. :rtype: str :required/optional: required", "def __init__(self): \"\"\" IbIocProfile - a model defined in Swagger :param dict swaggerTypes:", "PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR", "vendor to identify the type of I/O controller :return: The io_device_id of this", "service_entries of this IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._service_entries @service_entries.setter def", "transfers initiated by the controller. :param rdma_transfer_size: The rdma_transfer_size of this IbIocProfile. :type:", "LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS LICENSE. THIS SOFTWARE", "hasattr(item[1], \"to_dict\") else item, value.items() )) else: result[attr] = value return result def", "by the InfiniBand architecture. 0xFFFF is vendor-specific. :return: The protocol of this IbIocProfile.", "subsystem_id of this IbIocProfile. A number identifying the subsystem where the I/O controller", "def io_subclass(self): \"\"\" Gets the io_subclass of this IbIocProfile. The I/O sub-class of", "lambda x: x.to_dict() if hasattr(x, \"to_dict\") else x, value )) elif hasattr(value, \"to_dict\"):", "the controller to user. :param id_string: The id_string of this IbIocProfile. :type: str", "if both objects are equal \"\"\" if self is None or other is", "device_version @property def subsystem_vendor_id(self): \"\"\" Gets the subsystem_vendor_id of this IbIocProfile. The ID", "of this IbIocProfile. :rtype: str :required/optional: required \"\"\" return self._ioc_guid @ioc_guid.setter def ioc_guid(self,", "user. :return: The id_string of this IbIocProfile. :rtype: str :required/optional: required \"\"\" return", "io_class of this IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._io_class @io_class.setter def", "CONTRIBUTORS \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED", "dict \"\"\" result = {} for attr, _ in iteritems(self.swagger_types): value = getattr(self,", ":rtype: str :required/optional: required \"\"\" return self._subsystem_vendor_id @subsystem_vendor_id.setter def subsystem_vendor_id(self, subsystem_vendor_id): \"\"\" Sets", "controller :return: The io_device_id of this IbIocProfile. :rtype: int :required/optional: required \"\"\" return", "self._io_subclass @io_subclass.setter def io_subclass(self, io_subclass): \"\"\" Sets the io_subclass of this IbIocProfile. The", "I/O controller resides. :return: The subsystem_id of this IbIocProfile. :rtype: int :required/optional: required", "None self._rdma_read_queue_depth = None self._send_message_size = None self._rdma_transfer_size = None self._controller_ops_capability_mask = None", "this IbIocProfile. The maximum depth of the Send Message Queue. :param send_message_queue_depth: The", "6 on = Atomic operations to IOCs Bit 7 on = Atomic operations", "the ioc_guid of this IbIocProfile. The EUI-64 GUID used to uniquely identify the", "this IbIocProfile. The protocol version (protocol-specific). :return: The protocol_version of this IbIocProfile. :rtype:", "self._send_message_queue_depth = send_message_queue_depth @property def rdma_read_queue_depth(self): \"\"\" Gets the rdma_read_queue_depth of this IbIocProfile.", "parameter) 'io_device_id': 'ioDeviceId', # (required parameter) 'device_version': 'deviceVersion', # (required parameter) 'subsystem_vendor_id': 'subsystemVendorId',", "Redistributions of source code must retain the above copyright notice, this list of", "return self._id_string @id_string.setter def id_string(self, id_string): \"\"\" Sets the id_string of this IbIocProfile.", "return self._io_subclass @io_subclass.setter def io_subclass(self, io_subclass): \"\"\" Sets the io_subclass of this IbIocProfile.", "this IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._io_device_id @io_device_id.setter def io_device_id(self, io_device_id):", "ioc_guid: The ioc_guid of this IbIocProfile. :type: str \"\"\" self._ioc_guid = ioc_guid @property", "Returns the string representation of the model \"\"\" return pformat(self.to_dict()) def __repr__(self): \"\"\"", "\"\"\" Gets the rdma_transfer_size of this IbIocProfile. The maximum size of outbound RDMA", "I/O protocols encompassed by the InfiniBand architecture. 0xFFFF is vendor-specific. :param protocol: The", "'int', # (required parameter) 'service_entries': 'int', # (required parameter) 'id_string': 'str' } self.attribute_map", "of NetApp, Inc. nor the names of its contributors may be used to", "self._service_entries = service_entries @property def id_string(self): \"\"\" Gets the id_string of this IbIocProfile.", "BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,", "the name of NetApp, Inc. nor the names of its contributors may be", "Redistribution and use in source and binary forms, with or without modification, are", "OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR", "__ne__(self, other): \"\"\" Returns true if both objects are not equal \"\"\" return", "PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\" AND ANY EXPRESS OR", "= None self._controller_ops_capability_mask = None self._service_entries = None self._id_string = None @property def", "USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY", "Returns true if both objects are not equal \"\"\" return not self ==", "FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT", ":type: int \"\"\" self._device_version = device_version @property def subsystem_vendor_id(self): \"\"\" Gets the subsystem_vendor_id", "required \"\"\" return self._rdma_transfer_size @rdma_transfer_size.setter def rdma_transfer_size(self, rdma_transfer_size): \"\"\" Sets the rdma_transfer_size of", "derived from this software without specific prior written permission. NO EXPRESS OR IMPLIED", "0xFFFF is vendor-specific. :return: The io_subclass of this IbIocProfile. :rtype: int :required/optional: required", "are permitted (subject to the limitations in the disclaimer below) provided that the", "\"\"\" self._protocol = protocol @property def protocol_version(self): \"\"\" Gets the protocol_version of this", "parameter) 'subsystem_id': 'int', # (required parameter) 'io_class': 'int', # (required parameter) 'io_subclass': 'int',", "of this IbIocProfile. The maximum size of Send Messages in bytes. :return: The", "the model properties as a dict \"\"\" result = {} for attr, _", ":param subsystem_vendor_id: The subsystem_vendor_id of this IbIocProfile. :type: str \"\"\" self._subsystem_vendor_id = subsystem_vendor_id", "elif hasattr(value, \"to_dict\"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda", "(required parameter) 'vendor_id': 'str', # (required parameter) 'io_device_id': 'int', # (required parameter) 'device_version':", "IbIocProfile. The I/O protocol of the controller. 0x0000 -0xFFFE is reserved for I/O", "by vendor to identify the type of I/O controller :param io_device_id: The io_device_id", "result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, \"to_dict\") else x, value ))", "service entries table :return: The service_entries of this IbIocProfile. :rtype: int :required/optional: required", "identify the type of I/O controller :param io_device_id: The io_device_id of this IbIocProfile.", "IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND", "Sets the rdma_transfer_size of this IbIocProfile. The maximum size of outbound RDMA transfers", "\"\"\" if self is None or other is None: return None return self.__dict__", "int \"\"\" self._io_device_id = io_device_id @property def device_version(self): \"\"\" Gets the device_version of", "= controller_ops_capability_mask @property def service_entries(self): \"\"\" Gets the service_entries of this IbIocProfile. The", "# (required parameter) 'controller_ops_capability_mask': 'int', # (required parameter) 'service_entries': 'int', # (required parameter)", "(required parameter) 'device_version': 'deviceVersion', # (required parameter) 'subsystem_vendor_id': 'subsystemVendorId', # (required parameter) 'subsystem_id':", "protocol version (protocol-specific). :return: The protocol_version of this IbIocProfile. :rtype: int :required/optional: required", "the InfiniBand architecture. 0xFFFF is vendor-specific. :return: The protocol of this IbIocProfile. :rtype:", "sub-class of the controller. 0x0000 -0xFFFE is reserved for I/O sub-classes encompassed by", "the InfiniBand architecture. 0xFFFF is vendor-specific. :param io_class: The io_class of this IbIocProfile.", "# coding: utf-8 \"\"\" IbIocProfile.py The Clear BSD License Copyright (c) – 2016,", "list of conditions and the following disclaimer in the documentation and/or other materials", "self._subsystem_id = subsystem_id @property def io_class(self): \"\"\" Gets the io_class of this IbIocProfile.", "id_string of this IbIocProfile. :rtype: str :required/optional: required \"\"\" return self._id_string @id_string.setter def", "SUCH DAMAGE. \"\"\" from pprint import pformat from six import iteritems class IbIocProfile(object):", "lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], \"to_dict\") else item, value.items() )) else: result[attr]", "IbIocProfile. :type: int \"\"\" self._send_message_size = send_message_size @property def rdma_transfer_size(self): \"\"\" Gets the", "io_class of this IbIocProfile. The I/O class of the controller. 0x0000 -0xFFFE is", "in the service entries table :param service_entries: The service_entries of this IbIocProfile. :type:", ":type: int \"\"\" self._protocol_version = protocol_version @property def send_message_queue_depth(self): \"\"\" Gets the send_message_queue_depth", ":required/optional: required \"\"\" return self._service_entries @service_entries.setter def service_entries(self, service_entries): \"\"\" Sets the service_entries", ":param vendor_id: The vendor_id of this IbIocProfile. :type: str \"\"\" self._vendor_id = vendor_id", "EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,", "identifying the controller to user. :param id_string: The id_string of this IbIocProfile. :type:", "from six import iteritems class IbIocProfile(object): \"\"\" NOTE: This class is auto generated", "the vendor_id of this IbIocProfile. The I/O controller vendor ID in IEEE format.", "the io_subclass of this IbIocProfile. The I/O sub-class of the controller. 0x0000 -0xFFFE", "Atomic operations to IOCs Bit 7 on = Atomic operations from IOCs :param", "self._rdma_read_queue_depth = None self._send_message_size = None self._rdma_transfer_size = None self._controller_ops_capability_mask = None self._service_entries", "Message Queue. :return: The send_message_queue_depth of this IbIocProfile. :rtype: int :required/optional: required \"\"\"", "of this IbIocProfile. The maximum size of outbound RDMA transfers initiated by the", "controller_ops_capability_mask of this IbIocProfile. :type: int \"\"\" self._controller_ops_capability_mask = controller_ops_capability_mask @property def service_entries(self):", "from pprint import pformat from six import iteritems class IbIocProfile(object): \"\"\" NOTE: This", "io_subclass: The io_subclass of this IbIocProfile. :type: int \"\"\" self._io_subclass = io_subclass @property", "the controller. :param rdma_transfer_size: The rdma_transfer_size of this IbIocProfile. :type: int \"\"\" self._rdma_transfer_size", "this IbIocProfile. A number assigned by the vendor to identify the device version.", "'int', # (required parameter) 'io_class': 'int', # (required parameter) 'io_subclass': 'int', # (required", "OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY", "THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH", "The maximum size of outbound RDMA transfers initiated by the controller. :param rdma_transfer_size:", "class of the controller. 0x0000 -0xFFFE is reserved for I/O classes encompassed by", "IbIocProfile. :rtype: str :required/optional: required \"\"\" return self._subsystem_vendor_id @subsystem_vendor_id.setter def subsystem_vendor_id(self, subsystem_vendor_id): \"\"\"", "of conditions and the following disclaimer. * Redistributions in binary form must reproduce", "@subsystem_vendor_id.setter def subsystem_vendor_id(self, subsystem_vendor_id): \"\"\" Sets the subsystem_vendor_id of this IbIocProfile. The ID", "OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY", "of this IbIocProfile. :type: int \"\"\" self._subsystem_id = subsystem_id @property def io_class(self): \"\"\"", "NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY", "\"\"\" IbIocProfile.py The Clear BSD License Copyright (c) – 2016, NetApp, Inc. All", "subsystem_id: The subsystem_id of this IbIocProfile. :type: int \"\"\" self._subsystem_id = subsystem_id @property", "disclaimer in the documentation and/or other materials provided with the distribution. * Neither", "vendor_id of this IbIocProfile. :type: str \"\"\" self._vendor_id = vendor_id @property def io_device_id(self):", "the I/O controller resides. :param subsystem_id: The subsystem_id of this IbIocProfile. :type: int", "= None self._device_version = None self._subsystem_vendor_id = None self._subsystem_id = None self._io_class =", "for identifying the controller to user. :param id_string: The id_string of this IbIocProfile.", "# (required parameter) 'controller_ops_capability_mask': 'controllerOpsCapabilityMask', # (required parameter) 'service_entries': 'serviceEntries', # (required parameter)", "GRANTED BY THIS LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND", "of this IbIocProfile. :type: str \"\"\" self._vendor_id = vendor_id @property def io_device_id(self): \"\"\"", "to IOCs Bit 5 on = RDMA Write Requests from IOCs Bit 6", "The protocol version (protocol-specific). :return: The protocol_version of this IbIocProfile. :rtype: int :required/optional:", "ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE", "(required parameter) 'io_device_id': 'int', # (required parameter) 'device_version': 'int', # (required parameter) 'subsystem_vendor_id':", "'rdma_transfer_size': 'rdmaTransferSize', # (required parameter) 'controller_ops_capability_mask': 'controllerOpsCapabilityMask', # (required parameter) 'service_entries': 'serviceEntries', #", "protocol version (protocol-specific). :param protocol_version: The protocol_version of this IbIocProfile. :type: int \"\"\"", ":param subsystem_id: The subsystem_id of this IbIocProfile. :type: int \"\"\" self._subsystem_id = subsystem_id", "to identify the type of I/O controller :return: The io_device_id of this IbIocProfile.", "protocols encompassed by the InfiniBand architecture. 0xFFFF is vendor-specific. :return: The protocol of", ":required/optional: required \"\"\" return self._io_class @io_class.setter def io_class(self, io_class): \"\"\" Sets the io_class", "disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this", "the vendor to identify the device version. :param device_version: The device_version of this", "NetApp, Inc. All rights reserved. Redistribution and use in source and binary forms,", "subsystem_vendor_id @property def subsystem_id(self): \"\"\" Gets the subsystem_id of this IbIocProfile. A number", "(required parameter) 'io_subclass': 'int', # (required parameter) 'protocol': 'int', # (required parameter) 'protocol_version':", "IbIocProfile. :type: int \"\"\" self._send_message_queue_depth = send_message_queue_depth @property def rdma_read_queue_depth(self): \"\"\" Gets the", "= None self._service_entries = None self._id_string = None @property def ioc_guid(self): \"\"\" Gets", "(required parameter) 'subsystem_vendor_id': 'str', # (required parameter) 'subsystem_id': 'int', # (required parameter) 'io_class':", "required \"\"\" return self._io_device_id @io_device_id.setter def io_device_id(self, io_device_id): \"\"\" Sets the io_device_id of", "'send_message_queue_depth': 'sendMessageQueueDepth', # (required parameter) 'rdma_read_queue_depth': 'rdmaReadQueueDepth', # (required parameter) 'send_message_size': 'sendMessageSize', #", "'io_class': 'int', # (required parameter) 'io_subclass': 'int', # (required parameter) 'protocol': 'int', #", "in definition. \"\"\" self.swagger_types = { 'ioc_guid': 'str', # (required parameter) 'vendor_id': 'str',", "the value is json key in definition. \"\"\" self.swagger_types = { 'ioc_guid': 'str',", "return self._subsystem_vendor_id @subsystem_vendor_id.setter def subsystem_vendor_id(self, subsystem_vendor_id): \"\"\" Sets the subsystem_vendor_id of this IbIocProfile.", "The vendor_id of this IbIocProfile. :rtype: str :required/optional: required \"\"\" return self._vendor_id @vendor_id.setter", "THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.", "@id_string.setter def id_string(self, id_string): \"\"\" Sets the id_string of this IbIocProfile. A UTF-8", "on = RDMA Write Requests from IOCs Bit 6 on = Atomic operations", "def device_version(self): \"\"\" Gets the device_version of this IbIocProfile. A number assigned by", "of this IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._service_entries @service_entries.setter def service_entries(self,", "MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL", "I/O controller. :param ioc_guid: The ioc_guid of this IbIocProfile. :type: str \"\"\" self._ioc_guid", "io_subclass @property def protocol(self): \"\"\" Gets the protocol of this IbIocProfile. The I/O", "definition. \"\"\" self.swagger_types = { 'ioc_guid': 'str', # (required parameter) 'vendor_id': 'str', #", "value return result def to_str(self): \"\"\" Returns the string representation of the model", "I/O protocols encompassed by the InfiniBand architecture. 0xFFFF is vendor-specific. :return: The protocol", "the protocol_version of this IbIocProfile. The protocol version (protocol-specific). :return: The protocol_version of", "required \"\"\" return self._device_version @device_version.setter def device_version(self, device_version): \"\"\" Sets the device_version of", "== other.__dict__ def __ne__(self, other): \"\"\" Returns true if both objects are not", "OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL", "Inc. All rights reserved. Redistribution and use in source and binary forms, with", "# (required parameter) 'io_subclass': 'ioSubclass', # (required parameter) 'protocol': 'protocol', # (required parameter)", "parameter) 'io_subclass': 'ioSubclass', # (required parameter) 'protocol': 'protocol', # (required parameter) 'protocol_version': 'protocolVersion',", "the controller. 0x0000 -0xFFFE is reserved for I/O protocols encompassed by the InfiniBand", "rdma_transfer_size of this IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._rdma_transfer_size @rdma_transfer_size.setter def", "The rdma_read_queue_depth of this IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._rdma_read_queue_depth @rdma_read_queue_depth.setter", "IbIocProfile. :type: int \"\"\" self._io_device_id = io_device_id @property def device_version(self): \"\"\" Gets the", "io_class(self): \"\"\" Gets the io_class of this IbIocProfile. The I/O class of the", "there is no vendor ID. :param subsystem_vendor_id: The subsystem_vendor_id of this IbIocProfile. :type:", "# (required parameter) 'io_subclass': 'int', # (required parameter) 'protocol': 'int', # (required parameter)", "depth of the per-channel RDMA Read Queue :param rdma_read_queue_depth: The rdma_read_queue_depth of this", "architecture. 0xFFFF is vendor-specific. :param protocol: The protocol of this IbIocProfile. :type: int", "\"\"\" result = {} for attr, _ in iteritems(self.swagger_types): value = getattr(self, attr)", "subsystem_vendor_id: The subsystem_vendor_id of this IbIocProfile. :type: str \"\"\" self._subsystem_vendor_id = subsystem_vendor_id @property", "InfiniBand architecture. 0xFFFF is vendor-specific. :return: The protocol of this IbIocProfile. :rtype: int", ")) elif hasattr(value, \"to_dict\"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map(", "IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._service_entries @service_entries.setter def service_entries(self, service_entries): \"\"\"", "send_message_queue_depth of this IbIocProfile. :type: int \"\"\" self._send_message_queue_depth = send_message_queue_depth @property def rdma_read_queue_depth(self):", ":param id_string: The id_string of this IbIocProfile. :type: str \"\"\" self._id_string = id_string", "'rdma_read_queue_depth': 'rdmaReadQueueDepth', # (required parameter) 'send_message_size': 'sendMessageSize', # (required parameter) 'rdma_transfer_size': 'rdmaTransferSize', #", "(protocol-specific). :return: The protocol_version of this IbIocProfile. :rtype: int :required/optional: required \"\"\" return", "identify the I/O controller. :return: The ioc_guid of this IbIocProfile. :rtype: str :required/optional:", "'ioc_guid': 'str', # (required parameter) 'vendor_id': 'str', # (required parameter) 'io_device_id': 'int', #", "on = Send Messages to IOCs Bit 1 on = Send Messages from", "protocol_version of this IbIocProfile. The protocol version (protocol-specific). :param protocol_version: The protocol_version of", "return result def to_str(self): \"\"\" Returns the string representation of the model \"\"\"", "IbIocProfile. The maximum depth of the per-channel RDMA Read Queue :param rdma_read_queue_depth: The", "ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE", "to uniquely identify the I/O controller. :return: The ioc_guid of this IbIocProfile. :rtype:", ":required/optional: required \"\"\" return self._protocol_version @protocol_version.setter def protocol_version(self, protocol_version): \"\"\" Sets the protocol_version", "maximum size of outbound RDMA transfers initiated by the controller. :return: The rdma_transfer_size", "IOCs Bit 7 on = Atomic operations from IOCs :return: The controller_ops_capability_mask of", "operations to IOCs Bit 7 on = Atomic operations from IOCs :param controller_ops_capability_mask:", "None @property def ioc_guid(self): \"\"\" Gets the ioc_guid of this IbIocProfile. The EUI-64", "this controller.: Bit 0 on = Send Messages to IOCs Bit 1 on", "ID. :return: The subsystem_vendor_id of this IbIocProfile. :rtype: str :required/optional: required \"\"\" return", "other.__dict__ def __ne__(self, other): \"\"\" Returns true if both objects are not equal", "maximum depth of the per-channel RDMA Read Queue :param rdma_read_queue_depth: The rdma_read_queue_depth of", "\"\"\" Gets the id_string of this IbIocProfile. A UTF-8 encoded string for identifying", "(required parameter) 'service_entries': 'int', # (required parameter) 'id_string': 'str' } self.attribute_map = {", ":param send_message_size: The send_message_size of this IbIocProfile. :type: int \"\"\" self._send_message_size = send_message_size", "of this IbIocProfile. :type: int \"\"\" self._device_version = device_version @property def subsystem_vendor_id(self): \"\"\"", "controller resides. :return: The subsystem_id of this IbIocProfile. :rtype: int :required/optional: required \"\"\"", "to user. :return: The id_string of this IbIocProfile. :rtype: str :required/optional: required \"\"\"", "for I/O protocols encompassed by the InfiniBand architecture. 0xFFFF is vendor-specific. :param protocol:", "IbIocProfile. The number of entries in the service entries table :param service_entries: The", "to the limitations in the disclaimer below) provided that the following conditions are", "= None self._io_class = None self._io_subclass = None self._protocol = None self._protocol_version =", "list of conditions and the following disclaimer. * Redistributions in binary form must", "return self._rdma_transfer_size @rdma_transfer_size.setter def rdma_transfer_size(self, rdma_transfer_size): \"\"\" Sets the rdma_transfer_size of this IbIocProfile.", "The protocol_version of this IbIocProfile. :type: int \"\"\" self._protocol_version = protocol_version @property def", "= None self._rdma_read_queue_depth = None self._send_message_size = None self._rdma_transfer_size = None self._controller_ops_capability_mask =", "Bit 2 on = RDMA Read Requests to IOCs Bit 3 on =", "-0xFFFE is reserved for I/O protocols encompassed by the InfiniBand architecture. 0xFFFF is", "IbIocProfile. :type: int \"\"\" self._io_class = io_class @property def io_subclass(self): \"\"\" Gets the", "this IbIocProfile. :type: int \"\"\" self._controller_ops_capability_mask = controller_ops_capability_mask @property def service_entries(self): \"\"\" Gets", "subsystem_id): \"\"\" Sets the subsystem_id of this IbIocProfile. A number identifying the subsystem", "number of entries in the service entries table :return: The service_entries of this", "The send_message_queue_depth of this IbIocProfile. :type: int \"\"\" self._send_message_queue_depth = send_message_queue_depth @property def", "of this IbIocProfile. :type: int \"\"\" self._rdma_read_queue_depth = rdma_read_queue_depth @property def send_message_size(self): \"\"\"", "\"\"\" from pprint import pformat from six import iteritems class IbIocProfile(object): \"\"\" NOTE:", "* Redistributions in binary form must reproduce the above copyright notice, this list", "class is auto generated by the swagger code generator program. Do not edit", "of this controller.: Bit 0 on = Send Messages to IOCs Bit 1", "self._ioc_guid @ioc_guid.setter def ioc_guid(self, ioc_guid): \"\"\" Sets the ioc_guid of this IbIocProfile. The", "dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], \"to_dict\") else item, value.items() )) else:", "controller_ops_capability_mask(self): \"\"\" Gets the controller_ops_capability_mask of this IbIocProfile. Supported operation types of this", "version (protocol-specific). :return: The protocol_version of this IbIocProfile. :rtype: int :required/optional: required \"\"\"", "# (required parameter) 'io_class': 'ioClass', # (required parameter) 'io_subclass': 'ioSubclass', # (required parameter)", "@property def ioc_guid(self): \"\"\" Gets the ioc_guid of this IbIocProfile. The EUI-64 GUID", "rdma_transfer_size @property def controller_ops_capability_mask(self): \"\"\" Gets the controller_ops_capability_mask of this IbIocProfile. Supported operation", "per-channel RDMA Read Queue :return: The rdma_read_queue_depth of this IbIocProfile. :rtype: int :required/optional:", "__repr__(self): \"\"\" For `print` and `pprint` \"\"\" if self is None: return None", "\"\"\" return self._rdma_transfer_size @rdma_transfer_size.setter def rdma_transfer_size(self, rdma_transfer_size): \"\"\" Sets the rdma_transfer_size of this", "of this IbIocProfile. :rtype: str :required/optional: required \"\"\" return self._subsystem_vendor_id @subsystem_vendor_id.setter def subsystem_vendor_id(self,", "The io_device_id of this IbIocProfile. :type: int \"\"\" self._io_device_id = io_device_id @property def", "subsystem_id of this IbIocProfile. :type: int \"\"\" self._subsystem_id = subsystem_id @property def io_class(self):", "this IbIocProfile. :rtype: str :required/optional: required \"\"\" return self._ioc_guid @ioc_guid.setter def ioc_guid(self, ioc_guid):", "subsystem where the I/O controller resides. :return: The subsystem_id of this IbIocProfile. :rtype:", "of this IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._protocol_version @protocol_version.setter def protocol_version(self,", "isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], \"to_dict\") else", "IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY", "vendor_id: The vendor_id of this IbIocProfile. :type: str \"\"\" self._vendor_id = vendor_id @property", "swaggerTypes: The key is attribute name and the value is attribute type. :param", "{ 'ioc_guid': 'iocGuid', # (required parameter) 'vendor_id': 'vendorId', # (required parameter) 'io_device_id': 'ioDeviceId',", "(required parameter) 'protocol': 'protocol', # (required parameter) 'protocol_version': 'protocolVersion', # (required parameter) 'send_message_queue_depth':", "Sets the io_class of this IbIocProfile. The I/O class of the controller. 0x0000", "value is attribute type. :param dict attributeMap: The key is attribute name and", "required \"\"\" return self._rdma_read_queue_depth @rdma_read_queue_depth.setter def rdma_read_queue_depth(self, rdma_read_queue_depth): \"\"\" Sets the rdma_read_queue_depth of", "if there is no vendor ID. :return: The subsystem_vendor_id of this IbIocProfile. :rtype:", "IbIocProfile. :type: str \"\"\" self._id_string = id_string def to_dict(self): \"\"\" Returns the model", "pprint import pformat from six import iteritems class IbIocProfile(object): \"\"\" NOTE: This class", "dict swaggerTypes: The key is attribute name and the value is attribute type.", "this IbIocProfile. The number of entries in the service entries table :return: The", "The ID of the enclosure vendor in IEEE format, or else all zeros", "retain the above copyright notice, this list of conditions and the following disclaimer.", "by the InfiniBand architecture. 0xFFFF is vendor-specific. :param io_class: The io_class of this", "\"to_dict\") else x, value )) elif hasattr(value, \"to_dict\"): result[attr] = value.to_dict() elif isinstance(value,", "'int', # (required parameter) 'device_version': 'int', # (required parameter) 'subsystem_vendor_id': 'str', # (required", "self is None: return None return self.to_str() def __eq__(self, other): \"\"\" Returns true", "OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT", "by vendor to identify the type of I/O controller :return: The io_device_id of", "RDMA Read Requests from IOCs Bit 4 on = RDMA Write Requests to", "and/or other materials provided with the distribution. * Neither the name of NetApp,", "# (required parameter) 'subsystem_vendor_id': 'str', # (required parameter) 'subsystem_id': 'int', # (required parameter)", ":param dict attributeMap: The key is attribute name and the value is json", "'idString' } self._ioc_guid = None self._vendor_id = None self._io_device_id = None self._device_version =", "where the I/O controller resides. :return: The subsystem_id of this IbIocProfile. :rtype: int", "version (protocol-specific). :param protocol_version: The protocol_version of this IbIocProfile. :type: int \"\"\" self._protocol_version", "and `pprint` \"\"\" if self is None: return None return self.to_str() def __eq__(self,", "reserved. Redistribution and use in source and binary forms, with or without modification,", "service_entries: The service_entries of this IbIocProfile. :type: int \"\"\" self._service_entries = service_entries @property", "= protocol_version @property def send_message_queue_depth(self): \"\"\" Gets the send_message_queue_depth of this IbIocProfile. The", ":required/optional: required \"\"\" return self._protocol @protocol.setter def protocol(self, protocol): \"\"\" Sets the protocol", "int \"\"\" self._device_version = device_version @property def subsystem_vendor_id(self): \"\"\" Gets the subsystem_vendor_id of", "\"\"\" return self._protocol_version @protocol_version.setter def protocol_version(self, protocol_version): \"\"\" Sets the protocol_version of this", "service_entries(self): \"\"\" Gets the service_entries of this IbIocProfile. The number of entries in", "'int', # (required parameter) 'id_string': 'str' } self.attribute_map = { 'ioc_guid': 'iocGuid', #", ":param service_entries: The service_entries of this IbIocProfile. :type: int \"\"\" self._service_entries = service_entries", "reserved for I/O sub-classes encompassed by the InfiniBand architecture. 0xFFFF is vendor-specific. :param", "\"\"\" Sets the device_version of this IbIocProfile. A number assigned by the vendor", "self._io_class @io_class.setter def io_class(self, io_class): \"\"\" Sets the io_class of this IbIocProfile. The", "= Atomic operations from IOCs :param controller_ops_capability_mask: The controller_ops_capability_mask of this IbIocProfile. :type:", "(required parameter) 'rdma_transfer_size': 'rdmaTransferSize', # (required parameter) 'controller_ops_capability_mask': 'controllerOpsCapabilityMask', # (required parameter) 'service_entries':", "required \"\"\" return self._ioc_guid @ioc_guid.setter def ioc_guid(self, ioc_guid): \"\"\" Sets the ioc_guid of", "rdma_read_queue_depth of this IbIocProfile. :type: int \"\"\" self._rdma_read_queue_depth = rdma_read_queue_depth @property def send_message_size(self):", "value )) elif hasattr(value, \"to_dict\"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] =", "IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._rdma_transfer_size @rdma_transfer_size.setter def rdma_transfer_size(self, rdma_transfer_size): \"\"\"", "the InfiniBand architecture. 0xFFFF is vendor-specific. :return: The io_subclass of this IbIocProfile. :rtype:", "IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._subsystem_id @subsystem_id.setter def subsystem_id(self, subsystem_id): \"\"\"", ":required/optional: required \"\"\" return self._id_string @id_string.setter def id_string(self, id_string): \"\"\" Sets the id_string", "The device_version of this IbIocProfile. :type: int \"\"\" self._device_version = device_version @property def", "from this software without specific prior written permission. NO EXPRESS OR IMPLIED LICENSES", "subsystem_vendor_id(self): \"\"\" Gets the subsystem_vendor_id of this IbIocProfile. The ID of the enclosure", "The I/O protocol of the controller. 0x0000 -0xFFFE is reserved for I/O protocols", ":param rdma_transfer_size: The rdma_transfer_size of this IbIocProfile. :type: int \"\"\" self._rdma_transfer_size = rdma_transfer_size", "required \"\"\" return self._service_entries @service_entries.setter def service_entries(self, service_entries): \"\"\" Sets the service_entries of", "IEEE format. :param vendor_id: The vendor_id of this IbIocProfile. :type: str \"\"\" self._vendor_id", ":param io_subclass: The io_subclass of this IbIocProfile. :type: int \"\"\" self._io_subclass = io_subclass", "@rdma_read_queue_depth.setter def rdma_read_queue_depth(self, rdma_read_queue_depth): \"\"\" Sets the rdma_read_queue_depth of this IbIocProfile. The maximum", "GUID used to uniquely identify the I/O controller. :return: The ioc_guid of this", "the Send Message Queue. :return: The send_message_queue_depth of this IbIocProfile. :rtype: int :required/optional:", "device_version): \"\"\" Sets the device_version of this IbIocProfile. A number assigned by the", "'str' } self.attribute_map = { 'ioc_guid': 'iocGuid', # (required parameter) 'vendor_id': 'vendorId', #", "= send_message_queue_depth @property def rdma_read_queue_depth(self): \"\"\" Gets the rdma_read_queue_depth of this IbIocProfile. The", "properties as a dict \"\"\" result = {} for attr, _ in iteritems(self.swagger_types):", "endorse or promote products derived from this software without specific prior written permission.", "is vendor-specific. :param protocol: The protocol of this IbIocProfile. :type: int \"\"\" self._protocol", "FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER", "true if both objects are equal \"\"\" if self is None or other", "on = RDMA Read Requests to IOCs Bit 3 on = RDMA Read", "Gets the io_subclass of this IbIocProfile. The I/O sub-class of the controller. 0x0000", "IbIocProfile. :rtype: str :required/optional: required \"\"\" return self._ioc_guid @ioc_guid.setter def ioc_guid(self, ioc_guid): \"\"\"", "IbIocProfile. The maximum depth of the per-channel RDMA Read Queue :return: The rdma_read_queue_depth", "str \"\"\" self._id_string = id_string def to_dict(self): \"\"\" Returns the model properties as", "'io_device_id': 'int', # (required parameter) 'device_version': 'int', # (required parameter) 'subsystem_vendor_id': 'str', #", "return self._vendor_id @vendor_id.setter def vendor_id(self, vendor_id): \"\"\" Sets the vendor_id of this IbIocProfile.", "equal \"\"\" if self is None or other is None: return None return", "IbIocProfile. The maximum size of Send Messages in bytes. :return: The send_message_size of", "} self.attribute_map = { 'ioc_guid': 'iocGuid', # (required parameter) 'vendor_id': 'vendorId', # (required", "resides. :param subsystem_id: The subsystem_id of this IbIocProfile. :type: int \"\"\" self._subsystem_id =", "if self is None or other is None: return None return self.__dict__ ==", "of this IbIocProfile. :type: int \"\"\" self._send_message_size = send_message_size @property def rdma_transfer_size(self): \"\"\"", "Queue. :return: The send_message_queue_depth of this IbIocProfile. :rtype: int :required/optional: required \"\"\" return", "# (required parameter) 'device_version': 'deviceVersion', # (required parameter) 'subsystem_vendor_id': 'subsystemVendorId', # (required parameter)", "of this IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._send_message_size @send_message_size.setter def send_message_size(self,", "OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. \"\"\"", "@property def subsystem_id(self): \"\"\" Gets the subsystem_id of this IbIocProfile. A number identifying", "and use in source and binary forms, with or without modification, are permitted", "# (required parameter) 'rdma_transfer_size': 'int', # (required parameter) 'controller_ops_capability_mask': 'int', # (required parameter)", "maximum size of Send Messages in bytes. :param send_message_size: The send_message_size of this", "for I/O protocols encompassed by the InfiniBand architecture. 0xFFFF is vendor-specific. :return: The", "\"\"\" self._send_message_queue_depth = send_message_queue_depth @property def rdma_read_queue_depth(self): \"\"\" Gets the rdma_read_queue_depth of this", "architecture. 0xFFFF is vendor-specific. :return: The io_subclass of this IbIocProfile. :rtype: int :required/optional:", "is vendor-specific. :param io_subclass: The io_subclass of this IbIocProfile. :type: int \"\"\" self._io_subclass", "# (required parameter) 'rdma_read_queue_depth': 'int', # (required parameter) 'send_message_size': 'int', # (required parameter)", "value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1],", "parameter) 'io_class': 'ioClass', # (required parameter) 'io_subclass': 'ioSubclass', # (required parameter) 'protocol': 'protocol',", "The I/O controller vendor ID in IEEE format. :return: The vendor_id of this", "int :required/optional: required \"\"\" return self._io_device_id @io_device_id.setter def io_device_id(self, io_device_id): \"\"\" Sets the", "send_message_size(self, send_message_size): \"\"\" Sets the send_message_size of this IbIocProfile. The maximum size of", "in the service entries table :return: The service_entries of this IbIocProfile. :rtype: int", "CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR", "'subsystem_id': 'int', # (required parameter) 'io_class': 'int', # (required parameter) 'io_subclass': 'int', #", "# (required parameter) 'subsystem_id': 'int', # (required parameter) 'io_class': 'int', # (required parameter)", "A number identifying the subsystem where the I/O controller resides. :param subsystem_id: The", "The id_string of this IbIocProfile. :rtype: str :required/optional: required \"\"\" return self._id_string @id_string.setter", "is auto generated by the swagger code generator program. Do not edit the", "the controller. :return: The rdma_transfer_size of this IbIocProfile. :rtype: int :required/optional: required \"\"\"", "IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._send_message_queue_depth @send_message_queue_depth.setter def send_message_queue_depth(self, send_message_queue_depth): \"\"\"", "Swagger :param dict swaggerTypes: The key is attribute name and the value is", "(required parameter) 'io_class': 'ioClass', # (required parameter) 'io_subclass': 'ioSubclass', # (required parameter) 'protocol':", "Requests from IOCs Bit 4 on = RDMA Write Requests to IOCs Bit", "to identify the device version. :param device_version: The device_version of this IbIocProfile. :type:", "BSD License Copyright (c) – 2016, NetApp, Inc. All rights reserved. Redistribution and", ":rtype: int :required/optional: required \"\"\" return self._subsystem_id @subsystem_id.setter def subsystem_id(self, subsystem_id): \"\"\" Sets", "'send_message_size': 'int', # (required parameter) 'rdma_transfer_size': 'int', # (required parameter) 'controller_ops_capability_mask': 'int', #", "device version. :return: The device_version of this IbIocProfile. :rtype: int :required/optional: required \"\"\"", "– 2016, NetApp, Inc. All rights reserved. Redistribution and use in source and", "there is no vendor ID. :return: The subsystem_vendor_id of this IbIocProfile. :rtype: str", "def send_message_size(self, send_message_size): \"\"\" Sets the send_message_size of this IbIocProfile. The maximum size", "'vendor_id': 'str', # (required parameter) 'io_device_id': 'int', # (required parameter) 'device_version': 'int', #", "WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE", "Clear BSD License Copyright (c) – 2016, NetApp, Inc. All rights reserved. Redistribution", "of this IbIocProfile. A number identifying the subsystem where the I/O controller resides.", "(required parameter) 'subsystem_vendor_id': 'subsystemVendorId', # (required parameter) 'subsystem_id': 'subsystemId', # (required parameter) 'io_class':", "of this IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._io_class @io_class.setter def io_class(self,", "auto generated by the swagger code generator program. Do not edit the class", "Sets the vendor_id of this IbIocProfile. The I/O controller vendor ID in IEEE", "-0xFFFE is reserved for I/O classes encompassed by the InfiniBand architecture. 0xFFFF is", "value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict()", "of outbound RDMA transfers initiated by the controller. :return: The rdma_transfer_size of this", "USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.", "\"\"\" self._controller_ops_capability_mask = controller_ops_capability_mask @property def service_entries(self): \"\"\" Gets the service_entries of this", ":return: The rdma_transfer_size of this IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._rdma_transfer_size", "controller to user. :param id_string: The id_string of this IbIocProfile. :type: str \"\"\"", "io_class: The io_class of this IbIocProfile. :type: int \"\"\" self._io_class = io_class @property", "device version. :param device_version: The device_version of this IbIocProfile. :type: int \"\"\" self._device_version", "The device_version of this IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._device_version @device_version.setter", "A UTF-8 encoded string for identifying the controller to user. :param id_string: The", "vendor ID. :return: The subsystem_vendor_id of this IbIocProfile. :rtype: str :required/optional: required \"\"\"", "= list(map( lambda x: x.to_dict() if hasattr(x, \"to_dict\") else x, value )) elif", "\"\"\" Returns the model properties as a dict \"\"\" result = {} for", ":return: The vendor_id of this IbIocProfile. :rtype: str :required/optional: required \"\"\" return self._vendor_id", "io_subclass): \"\"\" Sets the io_subclass of this IbIocProfile. The I/O sub-class of the", ":param ioc_guid: The ioc_guid of this IbIocProfile. :type: str \"\"\" self._ioc_guid = ioc_guid", "send_message_queue_depth of this IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._send_message_queue_depth @send_message_queue_depth.setter def", "RDMA Read Queue :param rdma_read_queue_depth: The rdma_read_queue_depth of this IbIocProfile. :type: int \"\"\"", "controller. 0x0000 -0xFFFE is reserved for I/O protocols encompassed by the InfiniBand architecture.", "'subsystem_vendor_id': 'subsystemVendorId', # (required parameter) 'subsystem_id': 'subsystemId', # (required parameter) 'io_class': 'ioClass', #", "'id_string': 'idString' } self._ioc_guid = None self._vendor_id = None self._io_device_id = None self._device_version", "AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE", "0xFFFF is vendor-specific. :param io_class: The io_class of this IbIocProfile. :type: int \"\"\"", "the subsystem where the I/O controller resides. :param subsystem_id: The subsystem_id of this", "if hasattr(x, \"to_dict\") else x, value )) elif hasattr(value, \"to_dict\"): result[attr] = value.to_dict()", "\"\"\" return self._subsystem_vendor_id @subsystem_vendor_id.setter def subsystem_vendor_id(self, subsystem_vendor_id): \"\"\" Sets the subsystem_vendor_id of this", "= None self._rdma_transfer_size = None self._controller_ops_capability_mask = None self._service_entries = None self._id_string =", "@io_device_id.setter def io_device_id(self, io_device_id): \"\"\" Sets the io_device_id of this IbIocProfile. A number", "from IOCs Bit 2 on = RDMA Read Requests to IOCs Bit 3", "io_subclass(self): \"\"\" Gets the io_subclass of this IbIocProfile. The I/O sub-class of the", "controller_ops_capability_mask of this IbIocProfile. :rtype: int :required/optional: required \"\"\" return self._controller_ops_capability_mask @controller_ops_capability_mask.setter def", "\"\"\" self._service_entries = service_entries @property def id_string(self): \"\"\" Gets the id_string of this", "IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS LICENSE. THIS", "def to_dict(self): \"\"\" Returns the model properties as a dict \"\"\" result =", "without specific prior written permission. NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S" ]
[ "uri.strip('/').split('/') pattern_parts = [] for part in parts: is_variable = VARIABLE.search(part) if is_variable:", "= [] def add(self, uri, resource, priority=0): parts = uri.strip('/').split('/') pattern_parts = []", "it: it[0], reverse=True) # ok for our N < 20 def match(self, uri):", "re VARIABLE = re.compile(r'{([\\w\\d\\-_\\.]+)}') class Matcher(object): def __init__(self): self._patterns = [] def add(self,", "<filename>yandex_tracker_client/uriutils.py<gh_stars>10-100 # coding: utf-8 import re VARIABLE = re.compile(r'{([\\w\\d\\-_\\.]+)}') class Matcher(object): def __init__(self):", "ok for our N < 20 def match(self, uri): path = uri.strip('/') for", "VARIABLE.search(part) if is_variable: pattern_part = r'(?P<{0}>[\\w\\d\\-\\_\\.]+)'.format( is_variable.group(1) ) pattern_parts.append(pattern_part) else: pattern_parts.append(part) pattern =", "= re.compile(r'{([\\w\\d\\-_\\.]+)}') class Matcher(object): def __init__(self): self._patterns = [] def add(self, uri, resource,", "r'(?P<{0}>[\\w\\d\\-\\_\\.]+)'.format( is_variable.group(1) ) pattern_parts.append(pattern_part) else: pattern_parts.append(part) pattern = re.compile('/'.join(pattern_parts)) self._patterns.append(( priority, pattern, resource", "add(self, uri, resource, priority=0): parts = uri.strip('/').split('/') pattern_parts = [] for part in", "else: pattern_parts.append(part) pattern = re.compile('/'.join(pattern_parts)) self._patterns.append(( priority, pattern, resource )) #sort by priority", "utf-8 import re VARIABLE = re.compile(r'{([\\w\\d\\-_\\.]+)}') class Matcher(object): def __init__(self): self._patterns = []", "__init__(self): self._patterns = [] def add(self, uri, resource, priority=0): parts = uri.strip('/').split('/') pattern_parts", "priority self._patterns.sort(key=lambda it: it[0], reverse=True) # ok for our N < 20 def", "our N < 20 def match(self, uri): path = uri.strip('/') for _, pattern,", "_, pattern, value in self._patterns: match = pattern.match(path) if match: return value return", "parts = uri.strip('/').split('/') pattern_parts = [] for part in parts: is_variable = VARIABLE.search(part)", "uri, resource, priority=0): parts = uri.strip('/').split('/') pattern_parts = [] for part in parts:", "[] def add(self, uri, resource, priority=0): parts = uri.strip('/').split('/') pattern_parts = [] for", "by priority self._patterns.sort(key=lambda it: it[0], reverse=True) # ok for our N < 20", "priority=0): parts = uri.strip('/').split('/') pattern_parts = [] for part in parts: is_variable =", "is_variable: pattern_part = r'(?P<{0}>[\\w\\d\\-\\_\\.]+)'.format( is_variable.group(1) ) pattern_parts.append(pattern_part) else: pattern_parts.append(part) pattern = re.compile('/'.join(pattern_parts)) self._patterns.append((", "self._patterns.sort(key=lambda it: it[0], reverse=True) # ok for our N < 20 def match(self,", "= [] for part in parts: is_variable = VARIABLE.search(part) if is_variable: pattern_part =", "match(self, uri): path = uri.strip('/') for _, pattern, value in self._patterns: match =", "= uri.strip('/') for _, pattern, value in self._patterns: match = pattern.match(path) if match:", "uri.strip('/') for _, pattern, value in self._patterns: match = pattern.match(path) if match: return", "re.compile('/'.join(pattern_parts)) self._patterns.append(( priority, pattern, resource )) #sort by priority self._patterns.sort(key=lambda it: it[0], reverse=True)", "= r'(?P<{0}>[\\w\\d\\-\\_\\.]+)'.format( is_variable.group(1) ) pattern_parts.append(pattern_part) else: pattern_parts.append(part) pattern = re.compile('/'.join(pattern_parts)) self._patterns.append(( priority, pattern,", "= VARIABLE.search(part) if is_variable: pattern_part = r'(?P<{0}>[\\w\\d\\-\\_\\.]+)'.format( is_variable.group(1) ) pattern_parts.append(pattern_part) else: pattern_parts.append(part) pattern", "< 20 def match(self, uri): path = uri.strip('/') for _, pattern, value in", "parts: is_variable = VARIABLE.search(part) if is_variable: pattern_part = r'(?P<{0}>[\\w\\d\\-\\_\\.]+)'.format( is_variable.group(1) ) pattern_parts.append(pattern_part) else:", "# ok for our N < 20 def match(self, uri): path = uri.strip('/')", "class Matcher(object): def __init__(self): self._patterns = [] def add(self, uri, resource, priority=0): parts", "def add(self, uri, resource, priority=0): parts = uri.strip('/').split('/') pattern_parts = [] for part", "self._patterns.append(( priority, pattern, resource )) #sort by priority self._patterns.sort(key=lambda it: it[0], reverse=True) #", "resource, priority=0): parts = uri.strip('/').split('/') pattern_parts = [] for part in parts: is_variable", ") pattern_parts.append(pattern_part) else: pattern_parts.append(part) pattern = re.compile('/'.join(pattern_parts)) self._patterns.append(( priority, pattern, resource )) #sort", "resource )) #sort by priority self._patterns.sort(key=lambda it: it[0], reverse=True) # ok for our", "20 def match(self, uri): path = uri.strip('/') for _, pattern, value in self._patterns:", "part in parts: is_variable = VARIABLE.search(part) if is_variable: pattern_part = r'(?P<{0}>[\\w\\d\\-\\_\\.]+)'.format( is_variable.group(1) )", "pattern_part = r'(?P<{0}>[\\w\\d\\-\\_\\.]+)'.format( is_variable.group(1) ) pattern_parts.append(pattern_part) else: pattern_parts.append(part) pattern = re.compile('/'.join(pattern_parts)) self._patterns.append(( priority,", "coding: utf-8 import re VARIABLE = re.compile(r'{([\\w\\d\\-_\\.]+)}') class Matcher(object): def __init__(self): self._patterns =", "it[0], reverse=True) # ok for our N < 20 def match(self, uri): path", "uri): path = uri.strip('/') for _, pattern, value in self._patterns: match = pattern.match(path)", "pattern, value in self._patterns: match = pattern.match(path) if match: return value return None", "is_variable.group(1) ) pattern_parts.append(pattern_part) else: pattern_parts.append(part) pattern = re.compile('/'.join(pattern_parts)) self._patterns.append(( priority, pattern, resource ))", "N < 20 def match(self, uri): path = uri.strip('/') for _, pattern, value", "path = uri.strip('/') for _, pattern, value in self._patterns: match = pattern.match(path) if", "pattern = re.compile('/'.join(pattern_parts)) self._patterns.append(( priority, pattern, resource )) #sort by priority self._patterns.sort(key=lambda it:", "Matcher(object): def __init__(self): self._patterns = [] def add(self, uri, resource, priority=0): parts =", ")) #sort by priority self._patterns.sort(key=lambda it: it[0], reverse=True) # ok for our N", "pattern, resource )) #sort by priority self._patterns.sort(key=lambda it: it[0], reverse=True) # ok for", "if is_variable: pattern_part = r'(?P<{0}>[\\w\\d\\-\\_\\.]+)'.format( is_variable.group(1) ) pattern_parts.append(pattern_part) else: pattern_parts.append(part) pattern = re.compile('/'.join(pattern_parts))", "for our N < 20 def match(self, uri): path = uri.strip('/') for _,", "[] for part in parts: is_variable = VARIABLE.search(part) if is_variable: pattern_part = r'(?P<{0}>[\\w\\d\\-\\_\\.]+)'.format(", "def __init__(self): self._patterns = [] def add(self, uri, resource, priority=0): parts = uri.strip('/').split('/')", "= uri.strip('/').split('/') pattern_parts = [] for part in parts: is_variable = VARIABLE.search(part) if", "import re VARIABLE = re.compile(r'{([\\w\\d\\-_\\.]+)}') class Matcher(object): def __init__(self): self._patterns = [] def", "= re.compile('/'.join(pattern_parts)) self._patterns.append(( priority, pattern, resource )) #sort by priority self._patterns.sort(key=lambda it: it[0],", "VARIABLE = re.compile(r'{([\\w\\d\\-_\\.]+)}') class Matcher(object): def __init__(self): self._patterns = [] def add(self, uri,", "in parts: is_variable = VARIABLE.search(part) if is_variable: pattern_part = r'(?P<{0}>[\\w\\d\\-\\_\\.]+)'.format( is_variable.group(1) ) pattern_parts.append(pattern_part)", "def match(self, uri): path = uri.strip('/') for _, pattern, value in self._patterns: match", "is_variable = VARIABLE.search(part) if is_variable: pattern_part = r'(?P<{0}>[\\w\\d\\-\\_\\.]+)'.format( is_variable.group(1) ) pattern_parts.append(pattern_part) else: pattern_parts.append(part)", "for _, pattern, value in self._patterns: match = pattern.match(path) if match: return value", "reverse=True) # ok for our N < 20 def match(self, uri): path =", "# coding: utf-8 import re VARIABLE = re.compile(r'{([\\w\\d\\-_\\.]+)}') class Matcher(object): def __init__(self): self._patterns", "re.compile(r'{([\\w\\d\\-_\\.]+)}') class Matcher(object): def __init__(self): self._patterns = [] def add(self, uri, resource, priority=0):", "pattern_parts.append(pattern_part) else: pattern_parts.append(part) pattern = re.compile('/'.join(pattern_parts)) self._patterns.append(( priority, pattern, resource )) #sort by", "#sort by priority self._patterns.sort(key=lambda it: it[0], reverse=True) # ok for our N <", "self._patterns = [] def add(self, uri, resource, priority=0): parts = uri.strip('/').split('/') pattern_parts =", "pattern_parts = [] for part in parts: is_variable = VARIABLE.search(part) if is_variable: pattern_part", "priority, pattern, resource )) #sort by priority self._patterns.sort(key=lambda it: it[0], reverse=True) # ok", "pattern_parts.append(part) pattern = re.compile('/'.join(pattern_parts)) self._patterns.append(( priority, pattern, resource )) #sort by priority self._patterns.sort(key=lambda", "for part in parts: is_variable = VARIABLE.search(part) if is_variable: pattern_part = r'(?P<{0}>[\\w\\d\\-\\_\\.]+)'.format( is_variable.group(1)" ]
[ "response_body['status'] self.error = response_body['error'] self.error_lineno = response_body['error_lineno'] self.submitted_at = response_body['submitted_at'] self.updated_at = response_body['updated_at']", "self.description = response_body['description'] self.status = response_body['status'] self.error = response_body['error'] self.error_lineno = response_body['error_lineno'] self.submitted_at", "= response_body['error_lineno'] self.submitted_at = response_body['submitted_at'] self.updated_at = response_body['updated_at'] def id(self): return self.id def", "= response_body['submitted_at'] self.updated_at = response_body['updated_at'] def id(self): return self.id def request_id(self): return self.request_id", "response_body['description'] self.status = response_body['status'] self.error = response_body['error'] self.error_lineno = response_body['error_lineno'] self.submitted_at = response_body['submitted_at']", "from pokepay_partner_python_sdk.pokepay.response.response import PokepayResponse class BulkTransaction(PokepayResponse): def __init__(self, response, response_body): super().__init__(response, response_body) self.id", "response_body['error'] self.error_lineno = response_body['error_lineno'] self.submitted_at = response_body['submitted_at'] self.updated_at = response_body['updated_at'] def id(self): return", "= response_body['id'] self.request_id = response_body['request_id'] self.name = response_body['name'] self.description = response_body['description'] self.status =", "self.submitted_at = response_body['submitted_at'] self.updated_at = response_body['updated_at'] def id(self): return self.id def request_id(self): return", "BulkTransaction(PokepayResponse): def __init__(self, response, response_body): super().__init__(response, response_body) self.id = response_body['id'] self.request_id = response_body['request_id']", "response_body['name'] self.description = response_body['description'] self.status = response_body['status'] self.error = response_body['error'] self.error_lineno = response_body['error_lineno']", "by code generator. from pokepay_partner_python_sdk.pokepay.response.response import PokepayResponse class BulkTransaction(PokepayResponse): def __init__(self, response, response_body):", "response_body) self.id = response_body['id'] self.request_id = response_body['request_id'] self.name = response_body['name'] self.description = response_body['description']", "= response_body['error'] self.error_lineno = response_body['error_lineno'] self.submitted_at = response_body['submitted_at'] self.updated_at = response_body['updated_at'] def id(self):", "self.status = response_body['status'] self.error = response_body['error'] self.error_lineno = response_body['error_lineno'] self.submitted_at = response_body['submitted_at'] self.updated_at", "pokepay_partner_python_sdk.pokepay.response.response import PokepayResponse class BulkTransaction(PokepayResponse): def __init__(self, response, response_body): super().__init__(response, response_body) self.id =", "def request_id(self): return self.request_id def name(self): return self.name def description(self): return self.description def", "self.status def error(self): return self.error def error_lineno(self): return self.error_lineno def submitted_at(self): return self.submitted_at", "self.error = response_body['error'] self.error_lineno = response_body['error_lineno'] self.submitted_at = response_body['submitted_at'] self.updated_at = response_body['updated_at'] def", "return self.name def description(self): return self.description def status(self): return self.status def error(self): return", "status(self): return self.status def error(self): return self.error def error_lineno(self): return self.error_lineno def submitted_at(self):", "return self.error def error_lineno(self): return self.error_lineno def submitted_at(self): return self.submitted_at def updated_at(self): return", "def description(self): return self.description def status(self): return self.status def error(self): return self.error def", "# DO NOT EDIT: File is generated by code generator. from pokepay_partner_python_sdk.pokepay.response.response import", "def name(self): return self.name def description(self): return self.description def status(self): return self.status def", "return self.request_id def name(self): return self.name def description(self): return self.description def status(self): return", "response_body['submitted_at'] self.updated_at = response_body['updated_at'] def id(self): return self.id def request_id(self): return self.request_id def", "return self.description def status(self): return self.status def error(self): return self.error def error_lineno(self): return", "response_body['id'] self.request_id = response_body['request_id'] self.name = response_body['name'] self.description = response_body['description'] self.status = response_body['status']", "response_body): super().__init__(response, response_body) self.id = response_body['id'] self.request_id = response_body['request_id'] self.name = response_body['name'] self.description", "class BulkTransaction(PokepayResponse): def __init__(self, response, response_body): super().__init__(response, response_body) self.id = response_body['id'] self.request_id =", "self.name def description(self): return self.description def status(self): return self.status def error(self): return self.error", "self.id def request_id(self): return self.request_id def name(self): return self.name def description(self): return self.description", "EDIT: File is generated by code generator. from pokepay_partner_python_sdk.pokepay.response.response import PokepayResponse class BulkTransaction(PokepayResponse):", "response_body['request_id'] self.name = response_body['name'] self.description = response_body['description'] self.status = response_body['status'] self.error = response_body['error']", "= response_body['request_id'] self.name = response_body['name'] self.description = response_body['description'] self.status = response_body['status'] self.error =", "File is generated by code generator. from pokepay_partner_python_sdk.pokepay.response.response import PokepayResponse class BulkTransaction(PokepayResponse): def", "return self.status def error(self): return self.error def error_lineno(self): return self.error_lineno def submitted_at(self): return", "= response_body['status'] self.error = response_body['error'] self.error_lineno = response_body['error_lineno'] self.submitted_at = response_body['submitted_at'] self.updated_at =", "error(self): return self.error def error_lineno(self): return self.error_lineno def submitted_at(self): return self.submitted_at def updated_at(self):", "self.request_id def name(self): return self.name def description(self): return self.description def status(self): return self.status", "= response_body['updated_at'] def id(self): return self.id def request_id(self): return self.request_id def name(self): return", "request_id(self): return self.request_id def name(self): return self.name def description(self): return self.description def status(self):", "generated by code generator. from pokepay_partner_python_sdk.pokepay.response.response import PokepayResponse class BulkTransaction(PokepayResponse): def __init__(self, response,", "return self.id def request_id(self): return self.request_id def name(self): return self.name def description(self): return", "response_body['updated_at'] def id(self): return self.id def request_id(self): return self.request_id def name(self): return self.name", "name(self): return self.name def description(self): return self.description def status(self): return self.status def error(self):", "= response_body['name'] self.description = response_body['description'] self.status = response_body['status'] self.error = response_body['error'] self.error_lineno =", "is generated by code generator. from pokepay_partner_python_sdk.pokepay.response.response import PokepayResponse class BulkTransaction(PokepayResponse): def __init__(self,", "self.error_lineno = response_body['error_lineno'] self.submitted_at = response_body['submitted_at'] self.updated_at = response_body['updated_at'] def id(self): return self.id", "generator. from pokepay_partner_python_sdk.pokepay.response.response import PokepayResponse class BulkTransaction(PokepayResponse): def __init__(self, response, response_body): super().__init__(response, response_body)", "def status(self): return self.status def error(self): return self.error def error_lineno(self): return self.error_lineno def", "def __init__(self, response, response_body): super().__init__(response, response_body) self.id = response_body['id'] self.request_id = response_body['request_id'] self.name", "import PokepayResponse class BulkTransaction(PokepayResponse): def __init__(self, response, response_body): super().__init__(response, response_body) self.id = response_body['id']", "response, response_body): super().__init__(response, response_body) self.id = response_body['id'] self.request_id = response_body['request_id'] self.name = response_body['name']", "def id(self): return self.id def request_id(self): return self.request_id def name(self): return self.name def", "self.error def error_lineno(self): return self.error_lineno def submitted_at(self): return self.submitted_at def updated_at(self): return self.updated_at", "NOT EDIT: File is generated by code generator. from pokepay_partner_python_sdk.pokepay.response.response import PokepayResponse class", "super().__init__(response, response_body) self.id = response_body['id'] self.request_id = response_body['request_id'] self.name = response_body['name'] self.description =", "DO NOT EDIT: File is generated by code generator. from pokepay_partner_python_sdk.pokepay.response.response import PokepayResponse", "self.request_id = response_body['request_id'] self.name = response_body['name'] self.description = response_body['description'] self.status = response_body['status'] self.error", "self.name = response_body['name'] self.description = response_body['description'] self.status = response_body['status'] self.error = response_body['error'] self.error_lineno", "self.updated_at = response_body['updated_at'] def id(self): return self.id def request_id(self): return self.request_id def name(self):", "code generator. from pokepay_partner_python_sdk.pokepay.response.response import PokepayResponse class BulkTransaction(PokepayResponse): def __init__(self, response, response_body): super().__init__(response,", "id(self): return self.id def request_id(self): return self.request_id def name(self): return self.name def description(self):", "= response_body['description'] self.status = response_body['status'] self.error = response_body['error'] self.error_lineno = response_body['error_lineno'] self.submitted_at =", "def error(self): return self.error def error_lineno(self): return self.error_lineno def submitted_at(self): return self.submitted_at def", "PokepayResponse class BulkTransaction(PokepayResponse): def __init__(self, response, response_body): super().__init__(response, response_body) self.id = response_body['id'] self.request_id", "__init__(self, response, response_body): super().__init__(response, response_body) self.id = response_body['id'] self.request_id = response_body['request_id'] self.name =", "self.description def status(self): return self.status def error(self): return self.error def error_lineno(self): return self.error_lineno", "description(self): return self.description def status(self): return self.status def error(self): return self.error def error_lineno(self):", "self.id = response_body['id'] self.request_id = response_body['request_id'] self.name = response_body['name'] self.description = response_body['description'] self.status", "response_body['error_lineno'] self.submitted_at = response_body['submitted_at'] self.updated_at = response_body['updated_at'] def id(self): return self.id def request_id(self):" ]
[ "body = request.data s = 'You want path: {} and data: {}'.format(path, body)", "app = Flask(__name__) @app.after_request def add_headers(response): response.headers.add('Access-Control-Allow-Origin', '*') response.headers.add('Access-Control-Allow-Headers', 'Content-Type,Authorization') return response @app.route('/',", "char == '&'][0] data = dataString[(ANDlndex+1):] print(\"striped: \"+data, file=sys.stderr) jsonData = json.loads(data) r", "def http_app(ip, httpsPort): app.run(host=ip, port=httpsPort) if __name__ == '__main__': ip = '0.0.0.0' httpPort", "the POST req body = request.data s = 'You want path: {} and", "jsonData = json.loads(data) r = requests.post('http://10.10.1.140:9200/'+path, json={\"username\": jsonData['username'], \"password\": <PASSWORD>['password']}) print(r.status_code, file=sys.stderr) return", "multiprocessing import Process app = Flask(__name__) @app.after_request def add_headers(response): response.headers.add('Access-Control-Allow-Origin', '*') response.headers.add('Access-Control-Allow-Headers', 'Content-Type,Authorization')", "char in enumerate(dataString) if char == '&'][0] data = dataString[(ANDlndex+1):] print(\"striped: \"+data, file=sys.stderr)", "def catch_all(path): try: # Get the body of the POST req body =", "of '&' char ANDlndex = [pos for pos, char in enumerate(dataString) if char", "the first index of '&' char ANDlndex = [pos for pos, char in", "import Process app = Flask(__name__) @app.after_request def add_headers(response): response.headers.add('Access-Control-Allow-Origin', '*') response.headers.add('Access-Control-Allow-Headers', 'Content-Type,Authorization') return", "jsonData['username'], \"password\": <PASSWORD>['password']}) print(r.status_code, file=sys.stderr) return json.dumps({'success': True}), 200, {'ContentType': 'application/json'} except: return", "{'ContentType': 'application/json'} except: return json.dumps({'rip': True}), 404, {'ContentType': 'application/json'} def http_app(ip, httpsPort): app.run(host=ip,", "'You want path: {} and data: {}'.format(path, body) print(\"All: \"+s, file=sys.stderr) dataString =", "= dataString[(ANDlndex+1):] print(\"striped: \"+data, file=sys.stderr) jsonData = json.loads(data) r = requests.post('http://10.10.1.140:9200/'+path, json={\"username\": jsonData['username'],", "Get the body of the POST req body = request.data s = 'You", "body) print(\"All: \"+s, file=sys.stderr) dataString = body.decode(\"utf-8\") # Get the first index of", "dataString = body.decode(\"utf-8\") # Get the first index of '&' char ANDlndex =", "= 5150 httpsPort = 5151 Process(target=http_app, args=(ip, httpPort), daemon=True).start() app.run(host=ip, port=httpsPort, ssl_context=('cert.pem', 'key.pem'))", "import sys, requests, json from multiprocessing import Process app = Flask(__name__) @app.after_request def", "port=httpsPort) if __name__ == '__main__': ip = '0.0.0.0' httpPort = 5150 httpsPort =", "flask import Flask, request import sys, requests, json from multiprocessing import Process app", "''}, methods=['POST']) @app.route('/<path:path>', methods=['POST']) def catch_all(path): try: # Get the body of the", "data: {}'.format(path, body) print(\"All: \"+s, file=sys.stderr) dataString = body.decode(\"utf-8\") # Get the first", "path: {} and data: {}'.format(path, body) print(\"All: \"+s, file=sys.stderr) dataString = body.decode(\"utf-8\") #", "file=sys.stderr) return json.dumps({'success': True}), 200, {'ContentType': 'application/json'} except: return json.dumps({'rip': True}), 404, {'ContentType':", "sys, requests, json from multiprocessing import Process app = Flask(__name__) @app.after_request def add_headers(response):", "and data: {}'.format(path, body) print(\"All: \"+s, file=sys.stderr) dataString = body.decode(\"utf-8\") # Get the", "= Flask(__name__) @app.after_request def add_headers(response): response.headers.add('Access-Control-Allow-Origin', '*') response.headers.add('Access-Control-Allow-Headers', 'Content-Type,Authorization') return response @app.route('/', defaults={'path':", "print(\"All: \"+s, file=sys.stderr) dataString = body.decode(\"utf-8\") # Get the first index of '&'", "[pos for pos, char in enumerate(dataString) if char == '&'][0] data = dataString[(ANDlndex+1):]", "httpPort = 5150 httpsPort = 5151 Process(target=http_app, args=(ip, httpPort), daemon=True).start() app.run(host=ip, port=httpsPort, ssl_context=('cert.pem',", "from multiprocessing import Process app = Flask(__name__) @app.after_request def add_headers(response): response.headers.add('Access-Control-Allow-Origin', '*') response.headers.add('Access-Control-Allow-Headers',", "= request.data s = 'You want path: {} and data: {}'.format(path, body) print(\"All:", "return response @app.route('/', defaults={'path': ''}, methods=['POST']) @app.route('/<path:path>', methods=['POST']) def catch_all(path): try: # Get", "= requests.post('http://10.10.1.140:9200/'+path, json={\"username\": jsonData['username'], \"password\": <PASSWORD>['password']}) print(r.status_code, file=sys.stderr) return json.dumps({'success': True}), 200, {'ContentType':", "'application/json'} except: return json.dumps({'rip': True}), 404, {'ContentType': 'application/json'} def http_app(ip, httpsPort): app.run(host=ip, port=httpsPort)", "return json.dumps({'rip': True}), 404, {'ContentType': 'application/json'} def http_app(ip, httpsPort): app.run(host=ip, port=httpsPort) if __name__", "print(\"striped: \"+data, file=sys.stderr) jsonData = json.loads(data) r = requests.post('http://10.10.1.140:9200/'+path, json={\"username\": jsonData['username'], \"password\": <PASSWORD>['password']})", "ip = '0.0.0.0' httpPort = 5150 httpsPort = 5151 Process(target=http_app, args=(ip, httpPort), daemon=True).start()", "pos, char in enumerate(dataString) if char == '&'][0] data = dataString[(ANDlndex+1):] print(\"striped: \"+data,", "= '0.0.0.0' httpPort = 5150 httpsPort = 5151 Process(target=http_app, args=(ip, httpPort), daemon=True).start() app.run(host=ip,", "# Get the body of the POST req body = request.data s =", "<PASSWORD>['password']}) print(r.status_code, file=sys.stderr) return json.dumps({'success': True}), 200, {'ContentType': 'application/json'} except: return json.dumps({'rip': True}),", "dataString[(ANDlndex+1):] print(\"striped: \"+data, file=sys.stderr) jsonData = json.loads(data) r = requests.post('http://10.10.1.140:9200/'+path, json={\"username\": jsonData['username'], \"password\":", "methods=['POST']) def catch_all(path): try: # Get the body of the POST req body", "= json.loads(data) r = requests.post('http://10.10.1.140:9200/'+path, json={\"username\": jsonData['username'], \"password\": <PASSWORD>['password']}) print(r.status_code, file=sys.stderr) return json.dumps({'success':", "# Get the first index of '&' char ANDlndex = [pos for pos,", "file=sys.stderr) jsonData = json.loads(data) r = requests.post('http://10.10.1.140:9200/'+path, json={\"username\": jsonData['username'], \"password\": <PASSWORD>['password']}) print(r.status_code, file=sys.stderr)", "of the POST req body = request.data s = 'You want path: {}", "enumerate(dataString) if char == '&'][0] data = dataString[(ANDlndex+1):] print(\"striped: \"+data, file=sys.stderr) jsonData =", "== '__main__': ip = '0.0.0.0' httpPort = 5150 httpsPort = 5151 Process(target=http_app, args=(ip,", "try: # Get the body of the POST req body = request.data s", "'0.0.0.0' httpPort = 5150 httpsPort = 5151 Process(target=http_app, args=(ip, httpPort), daemon=True).start() app.run(host=ip, port=httpsPort,", "app.run(host=ip, port=httpsPort) if __name__ == '__main__': ip = '0.0.0.0' httpPort = 5150 httpsPort", "from flask import Flask, request import sys, requests, json from multiprocessing import Process", "ANDlndex = [pos for pos, char in enumerate(dataString) if char == '&'][0] data", "\"+s, file=sys.stderr) dataString = body.decode(\"utf-8\") # Get the first index of '&' char", "True}), 200, {'ContentType': 'application/json'} except: return json.dumps({'rip': True}), 404, {'ContentType': 'application/json'} def http_app(ip,", "for pos, char in enumerate(dataString) if char == '&'][0] data = dataString[(ANDlndex+1):] print(\"striped:", "request.data s = 'You want path: {} and data: {}'.format(path, body) print(\"All: \"+s,", "'application/json'} def http_app(ip, httpsPort): app.run(host=ip, port=httpsPort) if __name__ == '__main__': ip = '0.0.0.0'", "'&'][0] data = dataString[(ANDlndex+1):] print(\"striped: \"+data, file=sys.stderr) jsonData = json.loads(data) r = requests.post('http://10.10.1.140:9200/'+path,", "json={\"username\": jsonData['username'], \"password\": <PASSWORD>['password']}) print(r.status_code, file=sys.stderr) return json.dumps({'success': True}), 200, {'ContentType': 'application/json'} except:", "char ANDlndex = [pos for pos, char in enumerate(dataString) if char == '&'][0]", "defaults={'path': ''}, methods=['POST']) @app.route('/<path:path>', methods=['POST']) def catch_all(path): try: # Get the body of", "\"+data, file=sys.stderr) jsonData = json.loads(data) r = requests.post('http://10.10.1.140:9200/'+path, json={\"username\": jsonData['username'], \"password\": <PASSWORD>['password']}) print(r.status_code,", "body.decode(\"utf-8\") # Get the first index of '&' char ANDlndex = [pos for", "Flask(__name__) @app.after_request def add_headers(response): response.headers.add('Access-Control-Allow-Origin', '*') response.headers.add('Access-Control-Allow-Headers', 'Content-Type,Authorization') return response @app.route('/', defaults={'path': ''},", "response.headers.add('Access-Control-Allow-Origin', '*') response.headers.add('Access-Control-Allow-Headers', 'Content-Type,Authorization') return response @app.route('/', defaults={'path': ''}, methods=['POST']) @app.route('/<path:path>', methods=['POST']) def", "'Content-Type,Authorization') return response @app.route('/', defaults={'path': ''}, methods=['POST']) @app.route('/<path:path>', methods=['POST']) def catch_all(path): try: #", "req body = request.data s = 'You want path: {} and data: {}'.format(path,", "data = dataString[(ANDlndex+1):] print(\"striped: \"+data, file=sys.stderr) jsonData = json.loads(data) r = requests.post('http://10.10.1.140:9200/'+path, json={\"username\":", "@app.after_request def add_headers(response): response.headers.add('Access-Control-Allow-Origin', '*') response.headers.add('Access-Control-Allow-Headers', 'Content-Type,Authorization') return response @app.route('/', defaults={'path': ''}, methods=['POST'])", "response.headers.add('Access-Control-Allow-Headers', 'Content-Type,Authorization') return response @app.route('/', defaults={'path': ''}, methods=['POST']) @app.route('/<path:path>', methods=['POST']) def catch_all(path): try:", "json from multiprocessing import Process app = Flask(__name__) @app.after_request def add_headers(response): response.headers.add('Access-Control-Allow-Origin', '*')", "= 'You want path: {} and data: {}'.format(path, body) print(\"All: \"+s, file=sys.stderr) dataString", "Get the first index of '&' char ANDlndex = [pos for pos, char", "'*') response.headers.add('Access-Control-Allow-Headers', 'Content-Type,Authorization') return response @app.route('/', defaults={'path': ''}, methods=['POST']) @app.route('/<path:path>', methods=['POST']) def catch_all(path):", "== '&'][0] data = dataString[(ANDlndex+1):] print(\"striped: \"+data, file=sys.stderr) jsonData = json.loads(data) r =", "add_headers(response): response.headers.add('Access-Control-Allow-Origin', '*') response.headers.add('Access-Control-Allow-Headers', 'Content-Type,Authorization') return response @app.route('/', defaults={'path': ''}, methods=['POST']) @app.route('/<path:path>', methods=['POST'])", "\"password\": <PASSWORD>['password']}) print(r.status_code, file=sys.stderr) return json.dumps({'success': True}), 200, {'ContentType': 'application/json'} except: return json.dumps({'rip':", "except: return json.dumps({'rip': True}), 404, {'ContentType': 'application/json'} def http_app(ip, httpsPort): app.run(host=ip, port=httpsPort) if", "json.loads(data) r = requests.post('http://10.10.1.140:9200/'+path, json={\"username\": jsonData['username'], \"password\": <PASSWORD>['password']}) print(r.status_code, file=sys.stderr) return json.dumps({'success': True}),", "requests, json from multiprocessing import Process app = Flask(__name__) @app.after_request def add_headers(response): response.headers.add('Access-Control-Allow-Origin',", "__name__ == '__main__': ip = '0.0.0.0' httpPort = 5150 httpsPort = 5151 Process(target=http_app,", "@app.route('/', defaults={'path': ''}, methods=['POST']) @app.route('/<path:path>', methods=['POST']) def catch_all(path): try: # Get the body", "{}'.format(path, body) print(\"All: \"+s, file=sys.stderr) dataString = body.decode(\"utf-8\") # Get the first index", "'&' char ANDlndex = [pos for pos, char in enumerate(dataString) if char ==", "404, {'ContentType': 'application/json'} def http_app(ip, httpsPort): app.run(host=ip, port=httpsPort) if __name__ == '__main__': ip", "import Flask, request import sys, requests, json from multiprocessing import Process app =", "the body of the POST req body = request.data s = 'You want", "200, {'ContentType': 'application/json'} except: return json.dumps({'rip': True}), 404, {'ContentType': 'application/json'} def http_app(ip, httpsPort):", "'__main__': ip = '0.0.0.0' httpPort = 5150 httpsPort = 5151 Process(target=http_app, args=(ip, httpPort),", "Flask, request import sys, requests, json from multiprocessing import Process app = Flask(__name__)", "response @app.route('/', defaults={'path': ''}, methods=['POST']) @app.route('/<path:path>', methods=['POST']) def catch_all(path): try: # Get the", "want path: {} and data: {}'.format(path, body) print(\"All: \"+s, file=sys.stderr) dataString = body.decode(\"utf-8\")", "POST req body = request.data s = 'You want path: {} and data:", "requests.post('http://10.10.1.140:9200/'+path, json={\"username\": jsonData['username'], \"password\": <PASSWORD>['password']}) print(r.status_code, file=sys.stderr) return json.dumps({'success': True}), 200, {'ContentType': 'application/json'}", "in enumerate(dataString) if char == '&'][0] data = dataString[(ANDlndex+1):] print(\"striped: \"+data, file=sys.stderr) jsonData", "index of '&' char ANDlndex = [pos for pos, char in enumerate(dataString) if", "first index of '&' char ANDlndex = [pos for pos, char in enumerate(dataString)", "= [pos for pos, char in enumerate(dataString) if char == '&'][0] data =", "print(r.status_code, file=sys.stderr) return json.dumps({'success': True}), 200, {'ContentType': 'application/json'} except: return json.dumps({'rip': True}), 404,", "@app.route('/<path:path>', methods=['POST']) def catch_all(path): try: # Get the body of the POST req", "request import sys, requests, json from multiprocessing import Process app = Flask(__name__) @app.after_request", "file=sys.stderr) dataString = body.decode(\"utf-8\") # Get the first index of '&' char ANDlndex", "methods=['POST']) @app.route('/<path:path>', methods=['POST']) def catch_all(path): try: # Get the body of the POST", "return json.dumps({'success': True}), 200, {'ContentType': 'application/json'} except: return json.dumps({'rip': True}), 404, {'ContentType': 'application/json'}", "s = 'You want path: {} and data: {}'.format(path, body) print(\"All: \"+s, file=sys.stderr)", "if char == '&'][0] data = dataString[(ANDlndex+1):] print(\"striped: \"+data, file=sys.stderr) jsonData = json.loads(data)", "httpsPort): app.run(host=ip, port=httpsPort) if __name__ == '__main__': ip = '0.0.0.0' httpPort = 5150", "= body.decode(\"utf-8\") # Get the first index of '&' char ANDlndex = [pos", "def add_headers(response): response.headers.add('Access-Control-Allow-Origin', '*') response.headers.add('Access-Control-Allow-Headers', 'Content-Type,Authorization') return response @app.route('/', defaults={'path': ''}, methods=['POST']) @app.route('/<path:path>',", "if __name__ == '__main__': ip = '0.0.0.0' httpPort = 5150 httpsPort = 5151", "r = requests.post('http://10.10.1.140:9200/'+path, json={\"username\": jsonData['username'], \"password\": <PASSWORD>['password']}) print(r.status_code, file=sys.stderr) return json.dumps({'success': True}), 200,", "http_app(ip, httpsPort): app.run(host=ip, port=httpsPort) if __name__ == '__main__': ip = '0.0.0.0' httpPort =", "{'ContentType': 'application/json'} def http_app(ip, httpsPort): app.run(host=ip, port=httpsPort) if __name__ == '__main__': ip =", "{} and data: {}'.format(path, body) print(\"All: \"+s, file=sys.stderr) dataString = body.decode(\"utf-8\") # Get", "body of the POST req body = request.data s = 'You want path:", "Process app = Flask(__name__) @app.after_request def add_headers(response): response.headers.add('Access-Control-Allow-Origin', '*') response.headers.add('Access-Control-Allow-Headers', 'Content-Type,Authorization') return response", "json.dumps({'success': True}), 200, {'ContentType': 'application/json'} except: return json.dumps({'rip': True}), 404, {'ContentType': 'application/json'} def", "json.dumps({'rip': True}), 404, {'ContentType': 'application/json'} def http_app(ip, httpsPort): app.run(host=ip, port=httpsPort) if __name__ ==", "catch_all(path): try: # Get the body of the POST req body = request.data", "True}), 404, {'ContentType': 'application/json'} def http_app(ip, httpsPort): app.run(host=ip, port=httpsPort) if __name__ == '__main__':" ]
[ "<filename>setup.py import setuptools import os with open(\"README.md\", \"r\", encoding=\"utf-8\") as fh: long_description =", "}, classifiers=[ \"Programming Language :: Python :: 3\", \"License :: OSI Approved ::", "requires = f.read().splitlines() GITHUB_URL = \"https://github.com/jackz314/eeglabio\" setuptools.setup( name=\"eeglabio\", version=version, author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"I/O support", "fid): if line.startswith('__version__'): version = line.split('=')[1].strip().strip(\"'\") break if version is None: version =", "setuptools import os with open(\"README.md\", \"r\", encoding=\"utf-8\") as fh: long_description = fh.read() version", "import os with open(\"README.md\", \"r\", encoding=\"utf-8\") as fh: long_description = fh.read() version =", "line.split('=')[1].strip().strip(\"'\") break if version is None: version = \"0.0.1\" with open(\"requirements.txt\") as f:", "\"Source\": GITHUB_URL, \"Tracker\": GITHUB_URL + '/issues', }, classifiers=[ \"Programming Language :: Python ::", "break if version is None: version = \"0.0.1\" with open(\"requirements.txt\") as f: requires", "import setuptools import os with open(\"README.md\", \"r\", encoding=\"utf-8\") as fh: long_description = fh.read()", "for line in (line.strip() for line in fid): if line.startswith('__version__'): version = line.split('=')[1].strip().strip(\"'\")", "with open(os.path.join('eeglabio', '_version.py'), 'r') as fid: for line in (line.strip() for line in", "open(\"requirements.txt\") as f: requires = f.read().splitlines() GITHUB_URL = \"https://github.com/jackz314/eeglabio\" setuptools.setup( name=\"eeglabio\", version=version, author=\"<NAME>\",", "in Python\", license=\"BSD (3-clause)\", long_description=long_description, long_description_content_type=\"text/markdown\", url=GITHUB_URL, download_url=GITHUB_URL, project_urls={ \"Source\": GITHUB_URL, \"Tracker\": GITHUB_URL", "\"Operating System :: OS Independent\", ], packages=setuptools.find_packages(exclude=(\"*tests\",)), python_requires=\">=3.6\", include_package_data=True, install_requires=requires, keywords=\"EEG MEG MNE", "os with open(\"README.md\", \"r\", encoding=\"utf-8\") as fh: long_description = fh.read() version = None", "in fid): if line.startswith('__version__'): version = line.split('=')[1].strip().strip(\"'\") break if version is None: version", "with open(\"README.md\", \"r\", encoding=\"utf-8\") as fh: long_description = fh.read() version = None with", "for line in fid): if line.startswith('__version__'): version = line.split('=')[1].strip().strip(\"'\") break if version is", "with open(\"requirements.txt\") as f: requires = f.read().splitlines() GITHUB_URL = \"https://github.com/jackz314/eeglabio\" setuptools.setup( name=\"eeglabio\", version=version,", ":: Python :: 3\", \"License :: OSI Approved :: BSD License\", \"Operating System", "open(os.path.join('eeglabio', '_version.py'), 'r') as fid: for line in (line.strip() for line in fid):", "(3-clause)\", long_description=long_description, long_description_content_type=\"text/markdown\", url=GITHUB_URL, download_url=GITHUB_URL, project_urls={ \"Source\": GITHUB_URL, \"Tracker\": GITHUB_URL + '/issues', },", "= None with open(os.path.join('eeglabio', '_version.py'), 'r') as fid: for line in (line.strip() for", "'/issues', }, classifiers=[ \"Programming Language :: Python :: 3\", \"License :: OSI Approved", "line.startswith('__version__'): version = line.split('=')[1].strip().strip(\"'\") break if version is None: version = \"0.0.1\" with", "if line.startswith('__version__'): version = line.split('=')[1].strip().strip(\"'\") break if version is None: version = \"0.0.1\"", "description=\"I/O support for EEGLAB files in Python\", license=\"BSD (3-clause)\", long_description=long_description, long_description_content_type=\"text/markdown\", url=GITHUB_URL, download_url=GITHUB_URL,", "= f.read().splitlines() GITHUB_URL = \"https://github.com/jackz314/eeglabio\" setuptools.setup( name=\"eeglabio\", version=version, author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"I/O support for", "line in fid): if line.startswith('__version__'): version = line.split('=')[1].strip().strip(\"'\") break if version is None:", "version = \"0.0.1\" with open(\"requirements.txt\") as f: requires = f.read().splitlines() GITHUB_URL = \"https://github.com/jackz314/eeglabio\"", "is None: version = \"0.0.1\" with open(\"requirements.txt\") as f: requires = f.read().splitlines() GITHUB_URL", "3\", \"License :: OSI Approved :: BSD License\", \"Operating System :: OS Independent\",", "fh.read() version = None with open(os.path.join('eeglabio', '_version.py'), 'r') as fid: for line in", "OSI Approved :: BSD License\", \"Operating System :: OS Independent\", ], packages=setuptools.find_packages(exclude=(\"*tests\",)), python_requires=\">=3.6\",", "Python :: 3\", \"License :: OSI Approved :: BSD License\", \"Operating System ::", "\"Programming Language :: Python :: 3\", \"License :: OSI Approved :: BSD License\",", "Python\", license=\"BSD (3-clause)\", long_description=long_description, long_description_content_type=\"text/markdown\", url=GITHUB_URL, download_url=GITHUB_URL, project_urls={ \"Source\": GITHUB_URL, \"Tracker\": GITHUB_URL +", "encoding=\"utf-8\") as fh: long_description = fh.read() version = None with open(os.path.join('eeglabio', '_version.py'), 'r')", "version is None: version = \"0.0.1\" with open(\"requirements.txt\") as f: requires = f.read().splitlines()", "support for EEGLAB files in Python\", license=\"BSD (3-clause)\", long_description=long_description, long_description_content_type=\"text/markdown\", url=GITHUB_URL, download_url=GITHUB_URL, project_urls={", "\"Tracker\": GITHUB_URL + '/issues', }, classifiers=[ \"Programming Language :: Python :: 3\", \"License", "version = line.split('=')[1].strip().strip(\"'\") break if version is None: version = \"0.0.1\" with open(\"requirements.txt\")", "if version is None: version = \"0.0.1\" with open(\"requirements.txt\") as f: requires =", "license=\"BSD (3-clause)\", long_description=long_description, long_description_content_type=\"text/markdown\", url=GITHUB_URL, download_url=GITHUB_URL, project_urls={ \"Source\": GITHUB_URL, \"Tracker\": GITHUB_URL + '/issues',", "project_urls={ \"Source\": GITHUB_URL, \"Tracker\": GITHUB_URL + '/issues', }, classifiers=[ \"Programming Language :: Python", "\"https://github.com/jackz314/eeglabio\" setuptools.setup( name=\"eeglabio\", version=version, author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"I/O support for EEGLAB files in Python\",", "long_description = fh.read() version = None with open(os.path.join('eeglabio', '_version.py'), 'r') as fid: for", "author_email=\"<EMAIL>\", description=\"I/O support for EEGLAB files in Python\", license=\"BSD (3-clause)\", long_description=long_description, long_description_content_type=\"text/markdown\", url=GITHUB_URL,", "EEGLAB files in Python\", license=\"BSD (3-clause)\", long_description=long_description, long_description_content_type=\"text/markdown\", url=GITHUB_URL, download_url=GITHUB_URL, project_urls={ \"Source\": GITHUB_URL,", "None with open(os.path.join('eeglabio', '_version.py'), 'r') as fid: for line in (line.strip() for line", "open(\"README.md\", \"r\", encoding=\"utf-8\") as fh: long_description = fh.read() version = None with open(os.path.join('eeglabio',", "fh: long_description = fh.read() version = None with open(os.path.join('eeglabio', '_version.py'), 'r') as fid:", "'r') as fid: for line in (line.strip() for line in fid): if line.startswith('__version__'):", "as f: requires = f.read().splitlines() GITHUB_URL = \"https://github.com/jackz314/eeglabio\" setuptools.setup( name=\"eeglabio\", version=version, author=\"<NAME>\", author_email=\"<EMAIL>\",", "long_description_content_type=\"text/markdown\", url=GITHUB_URL, download_url=GITHUB_URL, project_urls={ \"Source\": GITHUB_URL, \"Tracker\": GITHUB_URL + '/issues', }, classifiers=[ \"Programming", "version=version, author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"I/O support for EEGLAB files in Python\", license=\"BSD (3-clause)\", long_description=long_description,", "= \"0.0.1\" with open(\"requirements.txt\") as f: requires = f.read().splitlines() GITHUB_URL = \"https://github.com/jackz314/eeglabio\" setuptools.setup(", "as fid: for line in (line.strip() for line in fid): if line.startswith('__version__'): version", "= line.split('=')[1].strip().strip(\"'\") break if version is None: version = \"0.0.1\" with open(\"requirements.txt\") as", "classifiers=[ \"Programming Language :: Python :: 3\", \"License :: OSI Approved :: BSD", "in (line.strip() for line in fid): if line.startswith('__version__'): version = line.split('=')[1].strip().strip(\"'\") break if", "(line.strip() for line in fid): if line.startswith('__version__'): version = line.split('=')[1].strip().strip(\"'\") break if version", "as fh: long_description = fh.read() version = None with open(os.path.join('eeglabio', '_version.py'), 'r') as", ":: OSI Approved :: BSD License\", \"Operating System :: OS Independent\", ], packages=setuptools.find_packages(exclude=(\"*tests\",)),", "name=\"eeglabio\", version=version, author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"I/O support for EEGLAB files in Python\", license=\"BSD (3-clause)\",", "None: version = \"0.0.1\" with open(\"requirements.txt\") as f: requires = f.read().splitlines() GITHUB_URL =", "System :: OS Independent\", ], packages=setuptools.find_packages(exclude=(\"*tests\",)), python_requires=\">=3.6\", include_package_data=True, install_requires=requires, keywords=\"EEG MEG MNE EEGLAB\",", "+ '/issues', }, classifiers=[ \"Programming Language :: Python :: 3\", \"License :: OSI", "License\", \"Operating System :: OS Independent\", ], packages=setuptools.find_packages(exclude=(\"*tests\",)), python_requires=\">=3.6\", include_package_data=True, install_requires=requires, keywords=\"EEG MEG", "BSD License\", \"Operating System :: OS Independent\", ], packages=setuptools.find_packages(exclude=(\"*tests\",)), python_requires=\">=3.6\", include_package_data=True, install_requires=requires, keywords=\"EEG", "fid: for line in (line.strip() for line in fid): if line.startswith('__version__'): version =", "GITHUB_URL + '/issues', }, classifiers=[ \"Programming Language :: Python :: 3\", \"License ::", "Language :: Python :: 3\", \"License :: OSI Approved :: BSD License\", \"Operating", "GITHUB_URL, \"Tracker\": GITHUB_URL + '/issues', }, classifiers=[ \"Programming Language :: Python :: 3\",", "version = None with open(os.path.join('eeglabio', '_version.py'), 'r') as fid: for line in (line.strip()", "GITHUB_URL = \"https://github.com/jackz314/eeglabio\" setuptools.setup( name=\"eeglabio\", version=version, author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"I/O support for EEGLAB files", ":: OS Independent\", ], packages=setuptools.find_packages(exclude=(\"*tests\",)), python_requires=\">=3.6\", include_package_data=True, install_requires=requires, keywords=\"EEG MEG MNE EEGLAB\", )", "'_version.py'), 'r') as fid: for line in (line.strip() for line in fid): if", "f.read().splitlines() GITHUB_URL = \"https://github.com/jackz314/eeglabio\" setuptools.setup( name=\"eeglabio\", version=version, author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"I/O support for EEGLAB", "\"r\", encoding=\"utf-8\") as fh: long_description = fh.read() version = None with open(os.path.join('eeglabio', '_version.py'),", "url=GITHUB_URL, download_url=GITHUB_URL, project_urls={ \"Source\": GITHUB_URL, \"Tracker\": GITHUB_URL + '/issues', }, classifiers=[ \"Programming Language", "f: requires = f.read().splitlines() GITHUB_URL = \"https://github.com/jackz314/eeglabio\" setuptools.setup( name=\"eeglabio\", version=version, author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"I/O", "files in Python\", license=\"BSD (3-clause)\", long_description=long_description, long_description_content_type=\"text/markdown\", url=GITHUB_URL, download_url=GITHUB_URL, project_urls={ \"Source\": GITHUB_URL, \"Tracker\":", ":: 3\", \"License :: OSI Approved :: BSD License\", \"Operating System :: OS", "\"License :: OSI Approved :: BSD License\", \"Operating System :: OS Independent\", ],", "= \"https://github.com/jackz314/eeglabio\" setuptools.setup( name=\"eeglabio\", version=version, author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"I/O support for EEGLAB files in", "= fh.read() version = None with open(os.path.join('eeglabio', '_version.py'), 'r') as fid: for line", "long_description=long_description, long_description_content_type=\"text/markdown\", url=GITHUB_URL, download_url=GITHUB_URL, project_urls={ \"Source\": GITHUB_URL, \"Tracker\": GITHUB_URL + '/issues', }, classifiers=[", "download_url=GITHUB_URL, project_urls={ \"Source\": GITHUB_URL, \"Tracker\": GITHUB_URL + '/issues', }, classifiers=[ \"Programming Language ::", "for EEGLAB files in Python\", license=\"BSD (3-clause)\", long_description=long_description, long_description_content_type=\"text/markdown\", url=GITHUB_URL, download_url=GITHUB_URL, project_urls={ \"Source\":", "author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"I/O support for EEGLAB files in Python\", license=\"BSD (3-clause)\", long_description=long_description, long_description_content_type=\"text/markdown\",", ":: BSD License\", \"Operating System :: OS Independent\", ], packages=setuptools.find_packages(exclude=(\"*tests\",)), python_requires=\">=3.6\", include_package_data=True, install_requires=requires,", "setuptools.setup( name=\"eeglabio\", version=version, author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"I/O support for EEGLAB files in Python\", license=\"BSD", "\"0.0.1\" with open(\"requirements.txt\") as f: requires = f.read().splitlines() GITHUB_URL = \"https://github.com/jackz314/eeglabio\" setuptools.setup( name=\"eeglabio\",", "Approved :: BSD License\", \"Operating System :: OS Independent\", ], packages=setuptools.find_packages(exclude=(\"*tests\",)), python_requires=\">=3.6\", include_package_data=True,", "line in (line.strip() for line in fid): if line.startswith('__version__'): version = line.split('=')[1].strip().strip(\"'\") break" ]
[ "line search \"\"\" x_i, y_i = x0 all_x_i = list() all_y_i = list()", "r[1][:10]) plt.plot(r[0][:10], r[1][:10]) plt.title('exact line search') plt.savefig(\"steepestDescentDemo.png\", dpi = 300) plt.show() if __name__", "= g_xx H[0][1] = g_xy H[1][0] = g_xy H[1][1] = g_yy return H", "\"\"\" f = 0.5 * np.square(np.square(x[0]) - x[1]) + 0.5 * np.square(x[0] -", "aoki, aoki_gd, hessian = aoki_hess, adaptative = True) plt.scatter(r[0][:10], r[1][:10]) plt.plot(r[0][:10], r[1][:10]) plt.title('exact", "z, 50) plt.plot(1, 1, 'go', MarkerSize=10) r = gradient_descent(np.array((0.0, 0.0)), aoki, aoki_gd, hessian", "<reponame>nappaillav/pyprobml<filename>scripts/steepestDescentDemo.py # Author: <NAME> import numpy as np import matplotlib.pyplot as plt from", "f def aoki(x): \"\"\" F(x,y) = 0.5 x (x^2 - y)^2 + 0.5", "derivative - Hessian Matrix of aoki function(Nabia - 2) \"\"\" g_xx = 6", "= -2 * x[0] g_yy = 1 H = np.diag((2,2)) H[0][0] = g_xx", "conditions step = line_search(f, f_prime, np.r_[x_i, y_i], -np.r_[dx_i, dy_i], np.r_[dx_i, dy_i], c2=.05) step", "np.r_[x_i, y_i], -np.r_[dx_i, dy_i], np.r_[dx_i, dy_i], c2=.05) step = step[0] if step is", "0.5 x (x-1)^2 \"\"\" f = 0.5 * np.square(np.square(x[:][0]) - x[:][1]) + 0.5", "1 H = np.diag((2,2)) H[0][0] = g_xx H[0][1] = g_xy H[1][0] = g_xy", "x = np.meshgrid(x1, x2) z = aokiFn(np.array(x)) plt.contour(x1, x2, z, 50) plt.plot(1, 1,", "Author: <NAME> import numpy as np import matplotlib.pyplot as plt from scipy.optimize import", "= 2 * np.dot((np.square(x[0]) - x[1]), x[0]) + x[0] - 1 g_y =", "step = 1 x_i += - step*dx_i y_i += - step*dy_i if np.abs(all_f_i[-1])", "if adaptative: # Compute a step size using a line_search to satisfy the", "# Author: <NAME> import numpy as np import matplotlib.pyplot as plt from scipy.optimize", "1) return f def aoki_gd(x): \"\"\" First-Order derivative of aoki function(Nabia - 1)", "= 0.5 * np.square(np.square(x[:][0]) - x[:][1]) + 0.5 * np.square(x[:][0] - 1) return", "= 0.5 * np.square(np.square(x[0]) - x[1]) + 0.5 * np.square(x[0] - 1) return", "H[1][1] = g_yy return H def gradient_descent(x0, f, f_prime, hessian=None, adaptative=False): \"\"\" Steepest-Descent", "2 * np.dot((np.square(x[0]) - x[1]), x[0]) + x[0] - 1 g_y = -1", "= list() all_f_i = list() for i in range(1, 100): all_x_i.append(x_i) all_y_i.append(y_i) all_f_i.append(f([x_i,", "2) \"\"\" g_xx = 6 * np.square(x[0]) - 2*x[1] + 1 g_xy =", "(x^2 - y)^2 + 0.5 x (x-1)^2 \"\"\" f = 0.5 * np.square(np.square(x[:][0])", "(x-1)^2 \"\"\" f = 0.5 * np.square(np.square(x[:][0]) - x[:][1]) + 0.5 * np.square(x[:][0]", "as plt from scipy.optimize import minimize, line_search def aokiFn(x): \"\"\" F(x,y) = 0.5", "all_x_i, all_y_i, all_f_i def main(): x1 = np.arange(0, 2, 0.1) x2 = np.arange(-0.5,", "y_i])) if adaptative: # Compute a step size using a line_search to satisfy", "* np.square(np.square(x[:][0]) - x[:][1]) + 0.5 * np.square(x[:][0] - 1) return f def", "np.square(np.square(x[0]) - x[1]) + 0.5 * np.square(x[0] - 1) return f def aoki_gd(x):", "+ x[0] - 1 g_y = -1 * (np.square(x[0]) - x[1]) return np.array((g_x,", "H = np.diag((2,2)) H[0][0] = g_xx H[0][1] = g_xy H[1][0] = g_xy H[1][1]", "None: step = 0 else: step = 1 x_i += - step*dx_i y_i", "0 else: step = 1 x_i += - step*dx_i y_i += - step*dy_i", "aoki function(Nabia - 2) \"\"\" g_xx = 6 * np.square(x[0]) - 2*x[1] +", "is None: step = 0 else: step = 1 x_i += - step*dx_i", "hessian = aoki_hess, adaptative = True) plt.scatter(r[0][:10], r[1][:10]) plt.plot(r[0][:10], r[1][:10]) plt.title('exact line search')", "* np.dot((np.square(x[0]) - x[1]), x[0]) + x[0] - 1 g_y = -1 *", "f_prime(np.asarray([x_i, y_i])) if adaptative: # Compute a step size using a line_search to", "\"\"\" Second-Order derivative - Hessian Matrix of aoki function(Nabia - 2) \"\"\" g_xx", "f, f_prime, hessian=None, adaptative=False): \"\"\" Steepest-Descent algorithm with option for line search \"\"\"", "- x[:][1]) + 0.5 * np.square(x[:][0] - 1) return f def aoki(x): \"\"\"", "= g_xy H[1][0] = g_xy H[1][1] = g_yy return H def gradient_descent(x0, f,", "adaptative: # Compute a step size using a line_search to satisfy the Wolf", "True) plt.scatter(r[0][:10], r[1][:10]) plt.plot(r[0][:10], r[1][:10]) plt.title('exact line search') plt.savefig(\"steepestDescentDemo.png\", dpi = 300) plt.show()", "(np.square(x[0]) - x[1]) return np.array((g_x, g_y)) def aoki_hess(x): \"\"\" Second-Order derivative - Hessian", "gradient_descent(np.array((0.0, 0.0)), aoki, aoki_gd, hessian = aoki_hess, adaptative = True) plt.scatter(r[0][:10], r[1][:10]) plt.plot(r[0][:10],", "(x-1)^2 \"\"\" f = 0.5 * np.square(np.square(x[0]) - x[1]) + 0.5 * np.square(x[0]", "r[1][:10]) plt.title('exact line search') plt.savefig(\"steepestDescentDemo.png\", dpi = 300) plt.show() if __name__ == \"__main__\":", "import matplotlib.pyplot as plt from scipy.optimize import minimize, line_search def aokiFn(x): \"\"\" F(x,y)", "adaptative=False): \"\"\" Steepest-Descent algorithm with option for line search \"\"\" x_i, y_i =", "f def aoki_gd(x): \"\"\" First-Order derivative of aoki function(Nabia - 1) \"\"\" g_x", "* np.square(x[0] - 1) return f def aoki_gd(x): \"\"\" First-Order derivative of aoki", "= list() for i in range(1, 100): all_x_i.append(x_i) all_y_i.append(y_i) all_f_i.append(f([x_i, y_i])) dx_i, dy_i", "0.1) x = np.meshgrid(x1, x2) z = aokiFn(np.array(x)) plt.contour(x1, x2, z, 50) plt.plot(1,", "- y)^2 + 0.5 x (x-1)^2 \"\"\" f = 0.5 * np.square(np.square(x[:][0]) -", "0.5 * np.square(np.square(x[:][0]) - x[:][1]) + 0.5 * np.square(x[:][0] - 1) return f", "c2=.05) step = step[0] if step is None: step = 0 else: step", "1 g_y = -1 * (np.square(x[0]) - x[1]) return np.array((g_x, g_y)) def aoki_hess(x):", "list() all_f_i = list() for i in range(1, 100): all_x_i.append(x_i) all_y_i.append(y_i) all_f_i.append(f([x_i, y_i]))", "1e-16: break return all_x_i, all_y_i, all_f_i def main(): x1 = np.arange(0, 2, 0.1)", "0.5 x (x^2 - y)^2 + 0.5 x (x-1)^2 \"\"\" f = 0.5", "aokiFn(x): \"\"\" F(x,y) = 0.5 x (x^2 - y)^2 + 0.5 x (x-1)^2", "- y)^2 + 0.5 x (x-1)^2 \"\"\" f = 0.5 * np.square(np.square(x[0]) -", "np.square(x[0]) - 2*x[1] + 1 g_xy = -2 * x[0] g_yy = 1", "+ 0.5 x (x-1)^2 \"\"\" f = 0.5 * np.square(np.square(x[0]) - x[1]) +", "in range(1, 100): all_x_i.append(x_i) all_y_i.append(y_i) all_f_i.append(f([x_i, y_i])) dx_i, dy_i = f_prime(np.asarray([x_i, y_i])) if", "y_i += - step*dy_i if np.abs(all_f_i[-1]) < 1e-16: break return all_x_i, all_y_i, all_f_i", "- step*dy_i if np.abs(all_f_i[-1]) < 1e-16: break return all_x_i, all_y_i, all_f_i def main():", "np.abs(all_f_i[-1]) < 1e-16: break return all_x_i, all_y_i, all_f_i def main(): x1 = np.arange(0,", "plt.contour(x1, x2, z, 50) plt.plot(1, 1, 'go', MarkerSize=10) r = gradient_descent(np.array((0.0, 0.0)), aoki,", "np.square(x[:][0] - 1) return f def aoki(x): \"\"\" F(x,y) = 0.5 x (x^2", "x[0]) + x[0] - 1 g_y = -1 * (np.square(x[0]) - x[1]) return", "6 * np.square(x[0]) - 2*x[1] + 1 g_xy = -2 * x[0] g_yy", "dy_i], np.r_[dx_i, dy_i], c2=.05) step = step[0] if step is None: step =", "x2 = np.arange(-0.5, 3, 0.1) x = np.meshgrid(x1, x2) z = aokiFn(np.array(x)) plt.contour(x1,", "H def gradient_descent(x0, f, f_prime, hessian=None, adaptative=False): \"\"\" Steepest-Descent algorithm with option for", "\"\"\" x_i, y_i = x0 all_x_i = list() all_y_i = list() all_f_i =", "all_f_i = list() for i in range(1, 100): all_x_i.append(x_i) all_y_i.append(y_i) all_f_i.append(f([x_i, y_i])) dx_i,", "if step is None: step = 0 else: step = 1 x_i +=", "def aoki_hess(x): \"\"\" Second-Order derivative - Hessian Matrix of aoki function(Nabia - 2)", "step = 0 else: step = 1 x_i += - step*dx_i y_i +=", "1 g_xy = -2 * x[0] g_yy = 1 H = np.diag((2,2)) H[0][0]", "= 1 x_i += - step*dx_i y_i += - step*dy_i if np.abs(all_f_i[-1]) <", "y_i], -np.r_[dx_i, dy_i], np.r_[dx_i, dy_i], c2=.05) step = step[0] if step is None:", "1) \"\"\" g_x = 2 * np.dot((np.square(x[0]) - x[1]), x[0]) + x[0] -", "H[0][1] = g_xy H[1][0] = g_xy H[1][1] = g_yy return H def gradient_descent(x0,", "Hessian Matrix of aoki function(Nabia - 2) \"\"\" g_xx = 6 * np.square(x[0])", "i in range(1, 100): all_x_i.append(x_i) all_y_i.append(y_i) all_f_i.append(f([x_i, y_i])) dx_i, dy_i = f_prime(np.asarray([x_i, y_i]))", "0.5 * np.square(np.square(x[0]) - x[1]) + 0.5 * np.square(x[0] - 1) return f", "-2 * x[0] g_yy = 1 H = np.diag((2,2)) H[0][0] = g_xx H[0][1]", "x[1]) + 0.5 * np.square(x[0] - 1) return f def aoki_gd(x): \"\"\" First-Order", "return f def aoki_gd(x): \"\"\" First-Order derivative of aoki function(Nabia - 1) \"\"\"", "x[1]), x[0]) + x[0] - 1 g_y = -1 * (np.square(x[0]) - x[1])", "function(Nabia - 2) \"\"\" g_xx = 6 * np.square(x[0]) - 2*x[1] + 1", "option for line search \"\"\" x_i, y_i = x0 all_x_i = list() all_y_i", "g_y)) def aoki_hess(x): \"\"\" Second-Order derivative - Hessian Matrix of aoki function(Nabia -", "x[0] g_yy = 1 H = np.diag((2,2)) H[0][0] = g_xx H[0][1] = g_xy", "def main(): x1 = np.arange(0, 2, 0.1) x2 = np.arange(-0.5, 3, 0.1) x", "< 1e-16: break return all_x_i, all_y_i, all_f_i def main(): x1 = np.arange(0, 2,", "- 2) \"\"\" g_xx = 6 * np.square(x[0]) - 2*x[1] + 1 g_xy", "= -1 * (np.square(x[0]) - x[1]) return np.array((g_x, g_y)) def aoki_hess(x): \"\"\" Second-Order", "np.meshgrid(x1, x2) z = aokiFn(np.array(x)) plt.contour(x1, x2, z, 50) plt.plot(1, 1, 'go', MarkerSize=10)", "2, 0.1) x2 = np.arange(-0.5, 3, 0.1) x = np.meshgrid(x1, x2) z =", "F(x,y) = 0.5 x (x^2 - y)^2 + 0.5 x (x-1)^2 \"\"\" f", "r = gradient_descent(np.array((0.0, 0.0)), aoki, aoki_gd, hessian = aoki_hess, adaptative = True) plt.scatter(r[0][:10],", "aoki_gd, hessian = aoki_hess, adaptative = True) plt.scatter(r[0][:10], r[1][:10]) plt.plot(r[0][:10], r[1][:10]) plt.title('exact line", "plt.title('exact line search') plt.savefig(\"steepestDescentDemo.png\", dpi = 300) plt.show() if __name__ == \"__main__\": main()", "step = line_search(f, f_prime, np.r_[x_i, y_i], -np.r_[dx_i, dy_i], np.r_[dx_i, dy_i], c2=.05) step =", "g_xx = 6 * np.square(x[0]) - 2*x[1] + 1 g_xy = -2 *", "dx_i, dy_i = f_prime(np.asarray([x_i, y_i])) if adaptative: # Compute a step size using", "return all_x_i, all_y_i, all_f_i def main(): x1 = np.arange(0, 2, 0.1) x2 =", "50) plt.plot(1, 1, 'go', MarkerSize=10) r = gradient_descent(np.array((0.0, 0.0)), aoki, aoki_gd, hessian =", "from scipy.optimize import minimize, line_search def aokiFn(x): \"\"\" F(x,y) = 0.5 x (x^2", "(x^2 - y)^2 + 0.5 x (x-1)^2 \"\"\" f = 0.5 * np.square(np.square(x[0])", "\"\"\" g_x = 2 * np.dot((np.square(x[0]) - x[1]), x[0]) + x[0] - 1", "MarkerSize=10) r = gradient_descent(np.array((0.0, 0.0)), aoki, aoki_gd, hessian = aoki_hess, adaptative = True)", "a line_search to satisfy the Wolf # conditions step = line_search(f, f_prime, np.r_[x_i,", "H[0][0] = g_xx H[0][1] = g_xy H[1][0] = g_xy H[1][1] = g_yy return", "matplotlib.pyplot as plt from scipy.optimize import minimize, line_search def aokiFn(x): \"\"\" F(x,y) =", "= g_xy H[1][1] = g_yy return H def gradient_descent(x0, f, f_prime, hessian=None, adaptative=False):", "def aoki_gd(x): \"\"\" First-Order derivative of aoki function(Nabia - 1) \"\"\" g_x =", "as np import matplotlib.pyplot as plt from scipy.optimize import minimize, line_search def aokiFn(x):", "plt from scipy.optimize import minimize, line_search def aokiFn(x): \"\"\" F(x,y) = 0.5 x", "import numpy as np import matplotlib.pyplot as plt from scipy.optimize import minimize, line_search", "# conditions step = line_search(f, f_prime, np.r_[x_i, y_i], -np.r_[dx_i, dy_i], np.r_[dx_i, dy_i], c2=.05)", "f_prime, np.r_[x_i, y_i], -np.r_[dx_i, dy_i], np.r_[dx_i, dy_i], c2=.05) step = step[0] if step", "np.r_[dx_i, dy_i], c2=.05) step = step[0] if step is None: step = 0", "0.1) x2 = np.arange(-0.5, 3, 0.1) x = np.meshgrid(x1, x2) z = aokiFn(np.array(x))", "scipy.optimize import minimize, line_search def aokiFn(x): \"\"\" F(x,y) = 0.5 x (x^2 -", "all_y_i = list() all_f_i = list() for i in range(1, 100): all_x_i.append(x_i) all_y_i.append(y_i)", "= f_prime(np.asarray([x_i, y_i])) if adaptative: # Compute a step size using a line_search", "- x[1]), x[0]) + x[0] - 1 g_y = -1 * (np.square(x[0]) -", "else: step = 1 x_i += - step*dx_i y_i += - step*dy_i if", "\"\"\" Steepest-Descent algorithm with option for line search \"\"\" x_i, y_i = x0", "def aokiFn(x): \"\"\" F(x,y) = 0.5 x (x^2 - y)^2 + 0.5 x", "for line search \"\"\" x_i, y_i = x0 all_x_i = list() all_y_i =", "y_i = x0 all_x_i = list() all_y_i = list() all_f_i = list() for", "= x0 all_x_i = list() all_y_i = list() all_f_i = list() for i", "all_y_i, all_f_i def main(): x1 = np.arange(0, 2, 0.1) x2 = np.arange(-0.5, 3,", "x2, z, 50) plt.plot(1, 1, 'go', MarkerSize=10) r = gradient_descent(np.array((0.0, 0.0)), aoki, aoki_gd,", "g_xx H[0][1] = g_xy H[1][0] = g_xy H[1][1] = g_yy return H def", "x[1]) return np.array((g_x, g_y)) def aoki_hess(x): \"\"\" Second-Order derivative - Hessian Matrix of", "derivative of aoki function(Nabia - 1) \"\"\" g_x = 2 * np.dot((np.square(x[0]) -", "all_x_i = list() all_y_i = list() all_f_i = list() for i in range(1,", "+= - step*dx_i y_i += - step*dy_i if np.abs(all_f_i[-1]) < 1e-16: break return", "x (x-1)^2 \"\"\" f = 0.5 * np.square(np.square(x[:][0]) - x[:][1]) + 0.5 *", "\"\"\" F(x,y) = 0.5 x (x^2 - y)^2 + 0.5 x (x-1)^2 \"\"\"", "* (np.square(x[0]) - x[1]) return np.array((g_x, g_y)) def aoki_hess(x): \"\"\" Second-Order derivative -", "x (x-1)^2 \"\"\" f = 0.5 * np.square(np.square(x[0]) - x[1]) + 0.5 *", "of aoki function(Nabia - 2) \"\"\" g_xx = 6 * np.square(x[0]) - 2*x[1]", "\"\"\" First-Order derivative of aoki function(Nabia - 1) \"\"\" g_x = 2 *", "plt.scatter(r[0][:10], r[1][:10]) plt.plot(r[0][:10], r[1][:10]) plt.title('exact line search') plt.savefig(\"steepestDescentDemo.png\", dpi = 300) plt.show() if", "- 1) \"\"\" g_x = 2 * np.dot((np.square(x[0]) - x[1]), x[0]) + x[0]", "= 1 H = np.diag((2,2)) H[0][0] = g_xx H[0][1] = g_xy H[1][0] =", "step*dx_i y_i += - step*dy_i if np.abs(all_f_i[-1]) < 1e-16: break return all_x_i, all_y_i,", "= 0 else: step = 1 x_i += - step*dx_i y_i += -", "1 x_i += - step*dx_i y_i += - step*dy_i if np.abs(all_f_i[-1]) < 1e-16:", "aoki_hess, adaptative = True) plt.scatter(r[0][:10], r[1][:10]) plt.plot(r[0][:10], r[1][:10]) plt.title('exact line search') plt.savefig(\"steepestDescentDemo.png\", dpi", "\"\"\" f = 0.5 * np.square(np.square(x[:][0]) - x[:][1]) + 0.5 * np.square(x[:][0] -", "- x[1]) + 0.5 * np.square(x[0] - 1) return f def aoki_gd(x): \"\"\"", "g_x = 2 * np.dot((np.square(x[0]) - x[1]), x[0]) + x[0] - 1 g_y", "= step[0] if step is None: step = 0 else: step = 1", "<NAME> import numpy as np import matplotlib.pyplot as plt from scipy.optimize import minimize,", "0.0)), aoki, aoki_gd, hessian = aoki_hess, adaptative = True) plt.scatter(r[0][:10], r[1][:10]) plt.plot(r[0][:10], r[1][:10])", "= np.meshgrid(x1, x2) z = aokiFn(np.array(x)) plt.contour(x1, x2, z, 50) plt.plot(1, 1, 'go',", "H[1][0] = g_xy H[1][1] = g_yy return H def gradient_descent(x0, f, f_prime, hessian=None,", "line_search def aokiFn(x): \"\"\" F(x,y) = 0.5 x (x^2 - y)^2 + 0.5", "import minimize, line_search def aokiFn(x): \"\"\" F(x,y) = 0.5 x (x^2 - y)^2", "adaptative = True) plt.scatter(r[0][:10], r[1][:10]) plt.plot(r[0][:10], r[1][:10]) plt.title('exact line search') plt.savefig(\"steepestDescentDemo.png\", dpi =", "= list() all_y_i = list() all_f_i = list() for i in range(1, 100):", "all_x_i.append(x_i) all_y_i.append(y_i) all_f_i.append(f([x_i, y_i])) dx_i, dy_i = f_prime(np.asarray([x_i, y_i])) if adaptative: # Compute", "= np.diag((2,2)) H[0][0] = g_xx H[0][1] = g_xy H[1][0] = g_xy H[1][1] =", "step = step[0] if step is None: step = 0 else: step =", "3, 0.1) x = np.meshgrid(x1, x2) z = aokiFn(np.array(x)) plt.contour(x1, x2, z, 50)", "x2) z = aokiFn(np.array(x)) plt.contour(x1, x2, z, 50) plt.plot(1, 1, 'go', MarkerSize=10) r", "plt.plot(r[0][:10], r[1][:10]) plt.title('exact line search') plt.savefig(\"steepestDescentDemo.png\", dpi = 300) plt.show() if __name__ ==", "all_y_i.append(y_i) all_f_i.append(f([x_i, y_i])) dx_i, dy_i = f_prime(np.asarray([x_i, y_i])) if adaptative: # Compute a", "minimize, line_search def aokiFn(x): \"\"\" F(x,y) = 0.5 x (x^2 - y)^2 +", "-np.r_[dx_i, dy_i], np.r_[dx_i, dy_i], c2=.05) step = step[0] if step is None: step", "list() all_y_i = list() all_f_i = list() for i in range(1, 100): all_x_i.append(x_i)", "g_xy = -2 * x[0] g_yy = 1 H = np.diag((2,2)) H[0][0] =", "np.dot((np.square(x[0]) - x[1]), x[0]) + x[0] - 1 g_y = -1 * (np.square(x[0])", "hessian=None, adaptative=False): \"\"\" Steepest-Descent algorithm with option for line search \"\"\" x_i, y_i", "algorithm with option for line search \"\"\" x_i, y_i = x0 all_x_i =", "step is None: step = 0 else: step = 1 x_i += -", "y)^2 + 0.5 x (x-1)^2 \"\"\" f = 0.5 * np.square(np.square(x[:][0]) - x[:][1])", "line_search(f, f_prime, np.r_[x_i, y_i], -np.r_[dx_i, dy_i], np.r_[dx_i, dy_i], c2=.05) step = step[0] if", "z = aokiFn(np.array(x)) plt.contour(x1, x2, z, 50) plt.plot(1, 1, 'go', MarkerSize=10) r =", "= aoki_hess, adaptative = True) plt.scatter(r[0][:10], r[1][:10]) plt.plot(r[0][:10], r[1][:10]) plt.title('exact line search') plt.savefig(\"steepestDescentDemo.png\",", "np.arange(0, 2, 0.1) x2 = np.arange(-0.5, 3, 0.1) x = np.meshgrid(x1, x2) z", "aoki_gd(x): \"\"\" First-Order derivative of aoki function(Nabia - 1) \"\"\" g_x = 2", "search \"\"\" x_i, y_i = x0 all_x_i = list() all_y_i = list() all_f_i", "= 0.5 x (x^2 - y)^2 + 0.5 x (x-1)^2 \"\"\" f =", "Second-Order derivative - Hessian Matrix of aoki function(Nabia - 2) \"\"\" g_xx =", "list() for i in range(1, 100): all_x_i.append(x_i) all_y_i.append(y_i) all_f_i.append(f([x_i, y_i])) dx_i, dy_i =", "- step*dx_i y_i += - step*dy_i if np.abs(all_f_i[-1]) < 1e-16: break return all_x_i,", "plt.plot(1, 1, 'go', MarkerSize=10) r = gradient_descent(np.array((0.0, 0.0)), aoki, aoki_gd, hessian = aoki_hess,", "+ 0.5 * np.square(x[0] - 1) return f def aoki_gd(x): \"\"\" First-Order derivative", "0.5 x (x-1)^2 \"\"\" f = 0.5 * np.square(np.square(x[0]) - x[1]) + 0.5", "* np.square(np.square(x[0]) - x[1]) + 0.5 * np.square(x[0] - 1) return f def", "np.square(np.square(x[:][0]) - x[:][1]) + 0.5 * np.square(x[:][0] - 1) return f def aoki(x):", "size using a line_search to satisfy the Wolf # conditions step = line_search(f,", "f = 0.5 * np.square(np.square(x[:][0]) - x[:][1]) + 0.5 * np.square(x[:][0] - 1)", "= line_search(f, f_prime, np.r_[x_i, y_i], -np.r_[dx_i, dy_i], np.r_[dx_i, dy_i], c2=.05) step = step[0]", "g_xy H[1][0] = g_xy H[1][1] = g_yy return H def gradient_descent(x0, f, f_prime,", "- Hessian Matrix of aoki function(Nabia - 2) \"\"\" g_xx = 6 *", "numpy as np import matplotlib.pyplot as plt from scipy.optimize import minimize, line_search def", "break return all_x_i, all_y_i, all_f_i def main(): x1 = np.arange(0, 2, 0.1) x2", "of aoki function(Nabia - 1) \"\"\" g_x = 2 * np.dot((np.square(x[0]) - x[1]),", "= np.arange(0, 2, 0.1) x2 = np.arange(-0.5, 3, 0.1) x = np.meshgrid(x1, x2)", "- 1 g_y = -1 * (np.square(x[0]) - x[1]) return np.array((g_x, g_y)) def", "= gradient_descent(np.array((0.0, 0.0)), aoki, aoki_gd, hessian = aoki_hess, adaptative = True) plt.scatter(r[0][:10], r[1][:10])", "gradient_descent(x0, f, f_prime, hessian=None, adaptative=False): \"\"\" Steepest-Descent algorithm with option for line search", "dy_i], c2=.05) step = step[0] if step is None: step = 0 else:", "Matrix of aoki function(Nabia - 2) \"\"\" g_xx = 6 * np.square(x[0]) -", "Steepest-Descent algorithm with option for line search \"\"\" x_i, y_i = x0 all_x_i", "x_i, y_i = x0 all_x_i = list() all_y_i = list() all_f_i = list()", "= 6 * np.square(x[0]) - 2*x[1] + 1 g_xy = -2 * x[0]", "+ 0.5 x (x-1)^2 \"\"\" f = 0.5 * np.square(np.square(x[:][0]) - x[:][1]) +", "x1 = np.arange(0, 2, 0.1) x2 = np.arange(-0.5, 3, 0.1) x = np.meshgrid(x1,", "= aokiFn(np.array(x)) plt.contour(x1, x2, z, 50) plt.plot(1, 1, 'go', MarkerSize=10) r = gradient_descent(np.array((0.0,", "0.5 * np.square(x[:][0] - 1) return f def aoki(x): \"\"\" F(x,y) = 0.5", "all_f_i.append(f([x_i, y_i])) dx_i, dy_i = f_prime(np.asarray([x_i, y_i])) if adaptative: # Compute a step", "aoki function(Nabia - 1) \"\"\" g_x = 2 * np.dot((np.square(x[0]) - x[1]), x[0])", "return H def gradient_descent(x0, f, f_prime, hessian=None, adaptative=False): \"\"\" Steepest-Descent algorithm with option", "dy_i = f_prime(np.asarray([x_i, y_i])) if adaptative: # Compute a step size using a", "f = 0.5 * np.square(np.square(x[0]) - x[1]) + 0.5 * np.square(x[0] - 1)", "First-Order derivative of aoki function(Nabia - 1) \"\"\" g_x = 2 * np.dot((np.square(x[0])", "-1 * (np.square(x[0]) - x[1]) return np.array((g_x, g_y)) def aoki_hess(x): \"\"\" Second-Order derivative", "100): all_x_i.append(x_i) all_y_i.append(y_i) all_f_i.append(f([x_i, y_i])) dx_i, dy_i = f_prime(np.asarray([x_i, y_i])) if adaptative: #", "to satisfy the Wolf # conditions step = line_search(f, f_prime, np.r_[x_i, y_i], -np.r_[dx_i,", "np.diag((2,2)) H[0][0] = g_xx H[0][1] = g_xy H[1][0] = g_xy H[1][1] = g_yy", "if np.abs(all_f_i[-1]) < 1e-16: break return all_x_i, all_y_i, all_f_i def main(): x1 =", "def gradient_descent(x0, f, f_prime, hessian=None, adaptative=False): \"\"\" Steepest-Descent algorithm with option for line", "* np.square(x[0]) - 2*x[1] + 1 g_xy = -2 * x[0] g_yy =", "+ 1 g_xy = -2 * x[0] g_yy = 1 H = np.diag((2,2))", "- 1) return f def aoki_gd(x): \"\"\" First-Order derivative of aoki function(Nabia -", "np import matplotlib.pyplot as plt from scipy.optimize import minimize, line_search def aokiFn(x): \"\"\"", "y_i])) dx_i, dy_i = f_prime(np.asarray([x_i, y_i])) if adaptative: # Compute a step size", "return f def aoki(x): \"\"\" F(x,y) = 0.5 x (x^2 - y)^2 +", "line_search to satisfy the Wolf # conditions step = line_search(f, f_prime, np.r_[x_i, y_i],", "+= - step*dy_i if np.abs(all_f_i[-1]) < 1e-16: break return all_x_i, all_y_i, all_f_i def", "g_yy = 1 H = np.diag((2,2)) H[0][0] = g_xx H[0][1] = g_xy H[1][0]", "g_yy return H def gradient_descent(x0, f, f_prime, hessian=None, adaptative=False): \"\"\" Steepest-Descent algorithm with", "* x[0] g_yy = 1 H = np.diag((2,2)) H[0][0] = g_xx H[0][1] =", "range(1, 100): all_x_i.append(x_i) all_y_i.append(y_i) all_f_i.append(f([x_i, y_i])) dx_i, dy_i = f_prime(np.asarray([x_i, y_i])) if adaptative:", "f_prime, hessian=None, adaptative=False): \"\"\" Steepest-Descent algorithm with option for line search \"\"\" x_i,", "the Wolf # conditions step = line_search(f, f_prime, np.r_[x_i, y_i], -np.r_[dx_i, dy_i], np.r_[dx_i,", "def aoki(x): \"\"\" F(x,y) = 0.5 x (x^2 - y)^2 + 0.5 x", "# Compute a step size using a line_search to satisfy the Wolf #", "np.array((g_x, g_y)) def aoki_hess(x): \"\"\" Second-Order derivative - Hessian Matrix of aoki function(Nabia", "Wolf # conditions step = line_search(f, f_prime, np.r_[x_i, y_i], -np.r_[dx_i, dy_i], np.r_[dx_i, dy_i],", "aoki_hess(x): \"\"\" Second-Order derivative - Hessian Matrix of aoki function(Nabia - 2) \"\"\"", "0.5 * np.square(x[0] - 1) return f def aoki_gd(x): \"\"\" First-Order derivative of", "x (x^2 - y)^2 + 0.5 x (x-1)^2 \"\"\" f = 0.5 *", "\"\"\" g_xx = 6 * np.square(x[0]) - 2*x[1] + 1 g_xy = -2", "x0 all_x_i = list() all_y_i = list() all_f_i = list() for i in", "with option for line search \"\"\" x_i, y_i = x0 all_x_i = list()", "step[0] if step is None: step = 0 else: step = 1 x_i", "- 1) return f def aoki(x): \"\"\" F(x,y) = 0.5 x (x^2 -", "satisfy the Wolf # conditions step = line_search(f, f_prime, np.r_[x_i, y_i], -np.r_[dx_i, dy_i],", "x_i += - step*dx_i y_i += - step*dy_i if np.abs(all_f_i[-1]) < 1e-16: break", "* np.square(x[:][0] - 1) return f def aoki(x): \"\"\" F(x,y) = 0.5 x", "a step size using a line_search to satisfy the Wolf # conditions step", "return np.array((g_x, g_y)) def aoki_hess(x): \"\"\" Second-Order derivative - Hessian Matrix of aoki", "- 2*x[1] + 1 g_xy = -2 * x[0] g_yy = 1 H", "x[:][1]) + 0.5 * np.square(x[:][0] - 1) return f def aoki(x): \"\"\" F(x,y)", "g_xy H[1][1] = g_yy return H def gradient_descent(x0, f, f_prime, hessian=None, adaptative=False): \"\"\"", "x[0] - 1 g_y = -1 * (np.square(x[0]) - x[1]) return np.array((g_x, g_y))", "main(): x1 = np.arange(0, 2, 0.1) x2 = np.arange(-0.5, 3, 0.1) x =", "= g_yy return H def gradient_descent(x0, f, f_prime, hessian=None, adaptative=False): \"\"\" Steepest-Descent algorithm", "np.arange(-0.5, 3, 0.1) x = np.meshgrid(x1, x2) z = aokiFn(np.array(x)) plt.contour(x1, x2, z,", "g_y = -1 * (np.square(x[0]) - x[1]) return np.array((g_x, g_y)) def aoki_hess(x): \"\"\"", "y)^2 + 0.5 x (x-1)^2 \"\"\" f = 0.5 * np.square(np.square(x[0]) - x[1])", "np.square(x[0] - 1) return f def aoki_gd(x): \"\"\" First-Order derivative of aoki function(Nabia", "function(Nabia - 1) \"\"\" g_x = 2 * np.dot((np.square(x[0]) - x[1]), x[0]) +", "= True) plt.scatter(r[0][:10], r[1][:10]) plt.plot(r[0][:10], r[1][:10]) plt.title('exact line search') plt.savefig(\"steepestDescentDemo.png\", dpi = 300)", "+ 0.5 * np.square(x[:][0] - 1) return f def aoki(x): \"\"\" F(x,y) =", "all_f_i def main(): x1 = np.arange(0, 2, 0.1) x2 = np.arange(-0.5, 3, 0.1)", "= np.arange(-0.5, 3, 0.1) x = np.meshgrid(x1, x2) z = aokiFn(np.array(x)) plt.contour(x1, x2,", "Compute a step size using a line_search to satisfy the Wolf # conditions", "'go', MarkerSize=10) r = gradient_descent(np.array((0.0, 0.0)), aoki, aoki_gd, hessian = aoki_hess, adaptative =", "1) return f def aoki(x): \"\"\" F(x,y) = 0.5 x (x^2 - y)^2", "2*x[1] + 1 g_xy = -2 * x[0] g_yy = 1 H =", "aokiFn(np.array(x)) plt.contour(x1, x2, z, 50) plt.plot(1, 1, 'go', MarkerSize=10) r = gradient_descent(np.array((0.0, 0.0)),", "1, 'go', MarkerSize=10) r = gradient_descent(np.array((0.0, 0.0)), aoki, aoki_gd, hessian = aoki_hess, adaptative", "for i in range(1, 100): all_x_i.append(x_i) all_y_i.append(y_i) all_f_i.append(f([x_i, y_i])) dx_i, dy_i = f_prime(np.asarray([x_i,", "aoki(x): \"\"\" F(x,y) = 0.5 x (x^2 - y)^2 + 0.5 x (x-1)^2", "using a line_search to satisfy the Wolf # conditions step = line_search(f, f_prime,", "- x[1]) return np.array((g_x, g_y)) def aoki_hess(x): \"\"\" Second-Order derivative - Hessian Matrix", "step*dy_i if np.abs(all_f_i[-1]) < 1e-16: break return all_x_i, all_y_i, all_f_i def main(): x1", "step size using a line_search to satisfy the Wolf # conditions step =" ]