ngram
listlengths
0
82k
[ "(num_bits - int(signed))) - 1 return delta * tf.math.minimum(tf.math.maximum(tensor_q, min_int),", "-int(signed) * (2 ** (num_bits - int(signed))) max_int = (2", "2.0 (the \"License\"); # you may not use this file", "value for clipping min_val: minimum value for clipping (defaults to", "that manifests the bit shift the weight due to gptq", "tensor symmetrically with maximum LSBs shift. Args: input_tensor: Tensor to", "the threshold should be constrained or not. max_lsbs_change: maximum number", "power_of_two_max(max_tensor: tf.Tensor) -> tf.Tensor: \"\"\" Compute the power of two", "def log2(x: tf.Tensor) -> tf.Tensor: \"\"\" Compute log2 of a", "* tf.math.minimum(tf.math.maximum(tensor_q, min_int), max_int) def symmetric_constrained_quantizer(input_tensor: tf.Tensor, auxvar_tensor: tf.Variable, max_tensor:", "float variable to an integer of values [-1, 0 ,1]", "to gradually quantize a float variable to an integer of", "Sony Semiconductors Israel, Inc. All rights reserved. # # Licensed", "Returns: clipped variable \"\"\" min_val = -max_val if min_val is", "Args: x: input float variable t: temperature to control quantization", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "tf.Variable, max_tensor: tf.Tensor, num_bits: int, signed: bool, power_of_two: bool, max_lsbs_change:", "\"\"\" error = tf.stop_gradient(tf.math.round(x) - x) return error + x", "float variable t: temperature to control quantization Returns: semi-quantized variable", "min_val = -max_val if min_val is None else min_val return", "error = tf.stop_gradient(tf.math.round(x) - x) return error + x def", "power_of_two: max_tensor = power_of_two_max(max_tensor) delta = calculate_delta(max_tensor, num_bits, signed) tensor_q", "tf.Tensor: \"\"\" clip a variable between fixed values such that", "gradually quantize a float variable to an integer of values", "Semiconductors Israel, Inc. All rights reserved. # # Licensed under", "threshold should be constrained or not. max_lsbs_change: maximum number of", "tf.Variable], max_val=1, min_val=None) -> tf.Tensor: \"\"\" clip a variable between", "/ delta)) + ste_clip(auxvar_tensor, max_val=max_lsbs_change)) min_int = -int(signed) * (2", "of two threshold for a tensor. \"\"\" return tf.math.pow(2.0, ste_ceil(log2(tf.maximum(max_tensor,", "input variable max_val: maximum value for clipping min_val: minimum value", "-> tf.Tensor: \"\"\" Compute the step size for the quantization.", "use this file except in compliance with the License. #", "a tensor. \"\"\" return tf.math.pow(2.0, ste_ceil(log2(tf.maximum(max_tensor, MIN_THRESHOLD)))) def calculate_delta(max_tensor: tf.Tensor,", "tf.Tensor) -> tf.Tensor: \"\"\" Return the ceil values of a", "reserved. # # Licensed under the Apache License, Version 2.0", "max_tensor / (2 ** (num_bits - int(signed))) def adjustable_steps(x: tf.Variable,", "power_of_two: bool) -> tf.Tensor: \"\"\" Quantize a tensor symmetrically. Args:", "number of LSBs that the auxvar is allowed to change", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "\"\"\" Compute the power of two threshold for a tensor.", "auxvar_tensor: Tensor that manifests the bit shift the weight due", "License. # You may obtain a copy of the License", "and # limitations under the License. # ============================================================================== import tensorflow", "\"\"\" return max_tensor / (2 ** (num_bits - int(signed))) def", "(num_bits - int(signed))) max_int = (2 ** (num_bits - int(signed)))", "symmetric_constrained_quantizer(input_tensor: tf.Tensor, auxvar_tensor: tf.Variable, max_tensor: tf.Tensor, num_bits: int, signed: bool,", "x: input variable max_val: maximum value for clipping min_val: minimum", "tensor. \"\"\" if power_of_two: max_tensor = power_of_two_max(max_tensor) delta = calculate_delta(max_tensor,", "under the License is distributed on an \"AS IS\" BASIS,", "License for the specific language governing permissions and # limitations", "x def log2(x: tf.Tensor) -> tf.Tensor: \"\"\" Compute log2 of", "due to gptq max_tensor: Tensor with max values to compute", "return error + x def log2(x: tf.Tensor) -> tf.Tensor: \"\"\"", "of a tensor. \"\"\" error = tf.stop_gradient(tf.math.ceil(x) - x) return", "Return the ceil values of a tensor. \"\"\" error =", "\"\"\" Return the ceil values of a tensor. \"\"\" error", "governing permissions and # limitations under the License. # ==============================================================================", "-> tf.Tensor: \"\"\" Return the ceil values of a tensor.", "the bit shift the weight due to gptq max_tensor: Tensor", "int(signed))) - 1 return delta * tf.math.minimum(tf.math.maximum(tensor_q, min_int), max_int) def", "values of this tensor are not changed during gptq. auxvar_tensor:", "License. # ============================================================================== import tensorflow as tf from model_compression_toolkit.common.constants import", "= tf.stop_gradient(tf.math.round(x) - x) return error + x def log2(x:", "in compliance with the License. # You may obtain a", "tensorflow as tf from model_compression_toolkit.common.constants import MIN_THRESHOLD, THRESHOLD def ste_ceil(x:", "maximum value for clipping min_val: minimum value for clipping (defaults", "the ceil values of a tensor. \"\"\" error = tf.stop_gradient(tf.math.ceil(x)", "software # distributed under the License is distributed on an", "the power of two threshold for a tensor. \"\"\" return", "delta * tf.math.minimum(tf.math.maximum(tensor_q, min_int), max_int) def symmetric_constrained_quantizer(input_tensor: tf.Tensor, auxvar_tensor: tf.Variable,", "max_int = (2 ** (num_bits - int(signed))) - 1 return", "1 def ste_clip(x: [tf.Tensor, tf.Variable], max_val=1, min_val=None) -> tf.Tensor: \"\"\"", "or not. max_lsbs_change: maximum number of LSBs that the auxvar", "to control quantization Returns: semi-quantized variable \"\"\" return tf.sigmoid(tf.add(x, 1)", "-> tf.Tensor: \"\"\" Quantize a tensor symmetrically with maximum LSBs", "max_int) def symmetric_constrained_quantizer(input_tensor: tf.Tensor, auxvar_tensor: tf.Variable, max_tensor: tf.Tensor, num_bits: int,", "under the License. # ============================================================================== import tensorflow as tf from", "error + x def log2(x: tf.Tensor) -> tf.Tensor: \"\"\" Compute", "int(signed))) def adjustable_steps(x: tf.Variable, t: float) -> tf.Tensor: \"\"\" A", "- x) return error + x def log2(x: tf.Tensor) ->", "t) - 1 def ste_clip(x: [tf.Tensor, tf.Variable], max_val=1, min_val=None) ->", "+ x def log2(x: tf.Tensor) -> tf.Tensor: \"\"\" Compute log2", "min_val), max_val) - x) + x def symmetric_quantizer(input_tensor: tf.Tensor, max_tensor:", "0 ,1] Args: x: input float variable t: temperature to", "values of a tensor. \"\"\" error = tf.stop_gradient(tf.math.ceil(x) - x)", "value for clipping (defaults to -max_val) Returns: clipped variable \"\"\"", "Args: input_tensor: Tensor to quantize. max_tensor: Tensor with max values", "signed: bool, power_of_two: bool) -> tf.Tensor: \"\"\" Quantize a tensor", "gptq max_tensor: Tensor with max values to compute the threshold.", "t) + tf.sigmoid(tf.add(x, -1) / t) - 1 def ste_clip(x:", "semi-quantized variable \"\"\" return tf.sigmoid(tf.add(x, 1) / t) + tf.sigmoid(tf.add(x,", "-> tf.Tensor: \"\"\" Compute log2 of a tensor. \"\"\" return", "Return the rounded values of a tensor. \"\"\" error =", "return tf.math.log(x) / tf.math.log(2.0) def power_of_two_max(max_tensor: tf.Tensor) -> tf.Tensor: \"\"\"", "calculate_delta(max_tensor: tf.Tensor, num_bits: int, signed: bool) -> tf.Tensor: \"\"\" Compute", "min_int), max_int) def symmetric_constrained_quantizer(input_tensor: tf.Tensor, auxvar_tensor: tf.Variable, max_tensor: tf.Tensor, num_bits:", "that min_val<=output<=max_val Args: x: input variable max_val: maximum value for", "power_of_two: Whether the threshold should be constrained or not. max_lsbs_change:", "x) + x def symmetric_quantizer(input_tensor: tf.Tensor, max_tensor: tf.Tensor, num_bits: int,", "to change Returns: A quantized tensor. \"\"\" if power_of_two: max_tensor", "Num of bits to use. signed: Signedness of the quantization", "ste_ceil(log2(tf.maximum(max_tensor, MIN_THRESHOLD)))) def calculate_delta(max_tensor: tf.Tensor, num_bits: int, signed: bool) ->", "fixed values such that min_val<=output<=max_val Args: x: input variable max_val:", "+ x def symmetric_quantizer(input_tensor: tf.Tensor, max_tensor: tf.Tensor, num_bits: int, signed:", "(num_bits - int(signed))) - 1 return delta * ste_clip(tensor_q, max_val=max_int,", "else min_val return tf.stop_gradient(tf.math.minimum(tf.math.maximum(x, min_val), max_val) - x) + x", "+ tf.sigmoid(tf.add(x, -1) / t) - 1 def ste_clip(x: [tf.Tensor,", "ste_clip(x: [tf.Tensor, tf.Variable], max_val=1, min_val=None) -> tf.Tensor: \"\"\" clip a", "OF ANY KIND, either express or implied. # See the", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "int = 1) -> tf.Tensor: \"\"\" Quantize a tensor symmetrically", "min_val: minimum value for clipping (defaults to -max_val) Returns: clipped", "ANY KIND, either express or implied. # See the License", "See the License for the specific language governing permissions and", "max_lsbs_change: maximum number of LSBs that the auxvar is allowed", "is allowed to change Returns: A quantized tensor. \"\"\" if", "values to compute the threshold. num_bits: Num of bits to", "t: float) -> tf.Tensor: \"\"\" A function to gradually quantize", "the License. # You may obtain a copy of the", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "Quantize a tensor symmetrically with maximum LSBs shift. Args: input_tensor:", "\"\"\" A function to gradually quantize a float variable to", "to in writing, software # distributed under the License is", "the quantization. \"\"\" return max_tensor / (2 ** (num_bits -", "# See the License for the specific language governing permissions", "tf.math.log(2.0) def power_of_two_max(max_tensor: tf.Tensor) -> tf.Tensor: \"\"\" Compute the power", "language governing permissions and # limitations under the License. #", "or agreed to in writing, software # distributed under the", "required by applicable law or agreed to in writing, software", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "with the License. # You may obtain a copy of", "temperature to control quantization Returns: semi-quantized variable \"\"\" return tf.sigmoid(tf.add(x,", "symmetrically with maximum LSBs shift. Args: input_tensor: Tensor to quantize.", "bool) -> tf.Tensor: \"\"\" Quantize a tensor symmetrically. Args: input_tensor:", "\"\"\" clip a variable between fixed values such that min_val<=output<=max_val", "of a tensor. \"\"\" return tf.math.log(x) / tf.math.log(2.0) def power_of_two_max(max_tensor:", "Israel, Inc. All rights reserved. # # Licensed under the", "compliance with the License. # You may obtain a copy", "agreed to in writing, software # distributed under the License", "should be constrained or not. Returns: A quantized tensor. \"\"\"", "quantize. max_tensor: Tensor with max values to compute the threshold.", "values of a tensor. \"\"\" error = tf.stop_gradient(tf.math.round(x) - x)", "shift the weight due to gptq max_tensor: Tensor with max", "distributed under the License is distributed on an \"AS IS\"", "t: temperature to control quantization Returns: semi-quantized variable \"\"\" return", "def adjustable_steps(x: tf.Variable, t: float) -> tf.Tensor: \"\"\" A function", "= ste_round(tf.stop_gradient(tf.round(input_tensor / delta)) + ste_clip(auxvar_tensor, max_val=max_lsbs_change)) min_int = -int(signed)", "express or implied. # See the License for the specific", "except in compliance with the License. # You may obtain", "variable between fixed values such that min_val<=output<=max_val Args: x: input", "a tensor. \"\"\" error = tf.stop_gradient(tf.math.round(x) - x) return error", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "not use this file except in compliance with the License.", "such that min_val<=output<=max_val Args: x: input variable max_val: maximum value", "\"\"\" return tf.math.pow(2.0, ste_ceil(log2(tf.maximum(max_tensor, MIN_THRESHOLD)))) def calculate_delta(max_tensor: tf.Tensor, num_bits: int,", "compute the threshold. num_bits: Num of bits to use. signed:", "step size for the quantization. \"\"\" return max_tensor / (2", "[tf.Tensor, tf.Variable], max_val=1, min_val=None) -> tf.Tensor: \"\"\" clip a variable", "writing, software # distributed under the License is distributed on", "return tf.stop_gradient(tf.math.minimum(tf.math.maximum(x, min_val), max_val) - x) + x def symmetric_quantizer(input_tensor:", "that the auxvar is allowed to change Returns: A quantized", "return delta * tf.math.minimum(tf.math.maximum(tensor_q, min_int), max_int) def symmetric_constrained_quantizer(input_tensor: tf.Tensor, auxvar_tensor:", "you may not use this file except in compliance with", "input float variable t: temperature to control quantization Returns: semi-quantized", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "to an integer of values [-1, 0 ,1] Args: x:", "variable \"\"\" min_val = -max_val if min_val is None else", "x: input float variable t: temperature to control quantization Returns:", "is None else min_val return tf.stop_gradient(tf.math.minimum(tf.math.maximum(x, min_val), max_val) - x)", "x def symmetric_quantizer(input_tensor: tf.Tensor, max_tensor: tf.Tensor, num_bits: int, signed: bool,", "quantize. values of this tensor are not changed during gptq.", "# limitations under the License. # ============================================================================== import tensorflow as", "signed: bool, power_of_two: bool, max_lsbs_change: int = 1) -> tf.Tensor:", "CONDITIONS OF ANY KIND, either express or implied. # See", "to compute the threshold. num_bits: Num of bits to use.", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "as tf from model_compression_toolkit.common.constants import MIN_THRESHOLD, THRESHOLD def ste_ceil(x: tf.Tensor)", "with maximum LSBs shift. Args: input_tensor: Tensor to quantize. values", "bits to use. signed: Signedness of the quantization range. power_of_two:", "threshold. num_bits: Num of bits to use. signed: Signedness of", "Tensor to quantize. values of this tensor are not changed", "calculate_delta(max_tensor, num_bits, signed) tensor_q = ste_round(tf.stop_gradient(tf.round(input_tensor / delta)) + ste_clip(auxvar_tensor,", "should be constrained or not. max_lsbs_change: maximum number of LSBs", "tf.Tensor: \"\"\" Compute the power of two threshold for a", "return max_tensor / (2 ** (num_bits - int(signed))) def adjustable_steps(x:", "+ x def ste_round(x: tf.Tensor) -> tf.Tensor: \"\"\" Return the", "error + x def ste_round(x: tf.Tensor) -> tf.Tensor: \"\"\" Return", "to gptq max_tensor: Tensor with max values to compute the", "for clipping (defaults to -max_val) Returns: clipped variable \"\"\" min_val", "max_val=1, min_val=None) -> tf.Tensor: \"\"\" clip a variable between fixed", "for a tensor. \"\"\" return tf.math.pow(2.0, ste_ceil(log2(tf.maximum(max_tensor, MIN_THRESHOLD)))) def calculate_delta(max_tensor:", "tf.math.log(x) / tf.math.log(2.0) def power_of_two_max(max_tensor: tf.Tensor) -> tf.Tensor: \"\"\" Compute", "tf.math.minimum(tf.math.maximum(tensor_q, min_int), max_int) def symmetric_constrained_quantizer(input_tensor: tf.Tensor, auxvar_tensor: tf.Variable, max_tensor: tf.Tensor,", "OR CONDITIONS OF ANY KIND, either express or implied. #", "calculate_delta(max_tensor, num_bits, signed) tensor_q = ste_round(input_tensor / delta) min_int =", "adjustable_steps(x: tf.Variable, t: float) -> tf.Tensor: \"\"\" A function to", "the License is distributed on an \"AS IS\" BASIS, #", "shift. Args: input_tensor: Tensor to quantize. values of this tensor", "\"\"\" Compute the step size for the quantization. \"\"\" return", "function to gradually quantize a float variable to an integer", "variable t: temperature to control quantization Returns: semi-quantized variable \"\"\"", "= calculate_delta(max_tensor, num_bits, signed) tensor_q = ste_round(tf.stop_gradient(tf.round(input_tensor / delta)) +", "a tensor symmetrically. Args: input_tensor: Tensor to quantize. max_tensor: Tensor", "1 return delta * tf.math.minimum(tf.math.maximum(tensor_q, min_int), max_int) def symmetric_constrained_quantizer(input_tensor: tf.Tensor,", "int, signed: bool, power_of_two: bool) -> tf.Tensor: \"\"\" Quantize a", "quantization Returns: semi-quantized variable \"\"\" return tf.sigmoid(tf.add(x, 1) / t)", "num_bits: int, signed: bool, power_of_two: bool) -> tf.Tensor: \"\"\" Quantize", "tf.Tensor, num_bits: int, signed: bool, power_of_two: bool, max_lsbs_change: int =", "Signedness of the quantization range. power_of_two: Whether the threshold should", "\"\"\" Quantize a tensor symmetrically with maximum LSBs shift. Args:", "this tensor are not changed during gptq. auxvar_tensor: Tensor that", "bit shift the weight due to gptq max_tensor: Tensor with", "x def ste_round(x: tf.Tensor) -> tf.Tensor: \"\"\" Return the rounded", "tf.Tensor: \"\"\" Compute the step size for the quantization. \"\"\"", "Tensor that manifests the bit shift the weight due to", "tf.Tensor, num_bits: int, signed: bool, power_of_two: bool) -> tf.Tensor: \"\"\"", "law or agreed to in writing, software # distributed under", "auxvar is allowed to change Returns: A quantized tensor. \"\"\"", "tf.Tensor) -> tf.Tensor: \"\"\" Return the rounded values of a", "control quantization Returns: semi-quantized variable \"\"\" return tf.sigmoid(tf.add(x, 1) /", "A quantized tensor. \"\"\" if power_of_two: max_tensor = power_of_two_max(max_tensor) delta", "def symmetric_constrained_quantizer(input_tensor: tf.Tensor, auxvar_tensor: tf.Variable, max_tensor: tf.Tensor, num_bits: int, signed:", "a tensor. \"\"\" return tf.math.log(x) / tf.math.log(2.0) def power_of_two_max(max_tensor: tf.Tensor)", "min_val return tf.stop_gradient(tf.math.minimum(tf.math.maximum(x, min_val), max_val) - x) + x def", "num_bits: Num of bits to use. signed: Signedness of the", "bool) -> tf.Tensor: \"\"\" Compute the step size for the", "delta = calculate_delta(max_tensor, num_bits, signed) tensor_q = ste_round(tf.stop_gradient(tf.round(input_tensor / delta))", "power_of_two_max(max_tensor) delta = calculate_delta(max_tensor, num_bits, signed) tensor_q = ste_round(tf.stop_gradient(tf.round(input_tensor /", "\"\"\" return tf.math.log(x) / tf.math.log(2.0) def power_of_two_max(max_tensor: tf.Tensor) -> tf.Tensor:", "tf.Tensor, num_bits: int, signed: bool) -> tf.Tensor: \"\"\" Compute the", "quantized tensor. \"\"\" if power_of_two: max_tensor = power_of_two_max(max_tensor) delta =", "** (num_bits - int(signed))) - 1 return delta * ste_clip(tensor_q,", "quantization range. power_of_two: Whether the threshold should be constrained or", "tf.Tensor, max_tensor: tf.Tensor, num_bits: int, signed: bool, power_of_two: bool) ->", "ste_clip(auxvar_tensor, max_val=max_lsbs_change)) min_int = -int(signed) * (2 ** (num_bits -", "variable \"\"\" return tf.sigmoid(tf.add(x, 1) / t) + tf.sigmoid(tf.add(x, -1)", "- x) return error + x def ste_round(x: tf.Tensor) ->", "a float variable to an integer of values [-1, 0", "may obtain a copy of the License at # #", "tf.Tensor: \"\"\" Return the ceil values of a tensor. \"\"\"", "the quantization range. power_of_two: Whether the threshold should be constrained", "Tensor to quantize. max_tensor: Tensor with max values to compute", "signed: Signedness of the quantization range. power_of_two: Whether the threshold", "Copyright 2021 Sony Semiconductors Israel, Inc. All rights reserved. #", "tf.Tensor) -> tf.Tensor: \"\"\" Compute log2 of a tensor. \"\"\"", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "two threshold for a tensor. \"\"\" return tf.math.pow(2.0, ste_ceil(log2(tf.maximum(max_tensor, MIN_THRESHOLD))))", "signed) tensor_q = ste_round(tf.stop_gradient(tf.round(input_tensor / delta)) + ste_clip(auxvar_tensor, max_val=max_lsbs_change)) min_int", "def ste_round(x: tf.Tensor) -> tf.Tensor: \"\"\" Return the rounded values", "log2 of a tensor. \"\"\" return tf.math.log(x) / tf.math.log(2.0) def", "- int(signed))) - 1 return delta * ste_clip(tensor_q, max_val=max_int, min_val=min_int)", "may not use this file except in compliance with the", "of bits to use. signed: Signedness of the quantization range.", "to -max_val) Returns: clipped variable \"\"\" min_val = -max_val if", "ceil values of a tensor. \"\"\" error = tf.stop_gradient(tf.math.ceil(x) -", "float) -> tf.Tensor: \"\"\" A function to gradually quantize a", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "num_bits: int, signed: bool) -> tf.Tensor: \"\"\" Compute the step", "this file except in compliance with the License. # You", "a tensor symmetrically with maximum LSBs shift. Args: input_tensor: Tensor", "power_of_two_max(max_tensor) delta = calculate_delta(max_tensor, num_bits, signed) tensor_q = ste_round(input_tensor /", "integer of values [-1, 0 ,1] Args: x: input float", "return error + x def ste_round(x: tf.Tensor) -> tf.Tensor: \"\"\"", "tensor. \"\"\" error = tf.stop_gradient(tf.math.ceil(x) - x) return error +", "maximum number of LSBs that the auxvar is allowed to", "-max_val if min_val is None else min_val return tf.stop_gradient(tf.math.minimum(tf.math.maximum(x, min_val),", "tf.stop_gradient(tf.math.minimum(tf.math.maximum(x, min_val), max_val) - x) + x def symmetric_quantizer(input_tensor: tf.Tensor,", "-> tf.Tensor: \"\"\" Compute the power of two threshold for", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "\"\"\" if power_of_two: max_tensor = power_of_two_max(max_tensor) delta = calculate_delta(max_tensor, num_bits,", "# # Licensed under the Apache License, Version 2.0 (the", "tensor. \"\"\" return tf.math.log(x) / tf.math.log(2.0) def power_of_two_max(max_tensor: tf.Tensor) ->", "file except in compliance with the License. # You may", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "import MIN_THRESHOLD, THRESHOLD def ste_ceil(x: tf.Tensor) -> tf.Tensor: \"\"\" Return", "(num_bits - int(signed))) def adjustable_steps(x: tf.Variable, t: float) -> tf.Tensor:", "be constrained or not. max_lsbs_change: maximum number of LSBs that", "LSBs that the auxvar is allowed to change Returns: A", "A function to gradually quantize a float variable to an", "of a tensor. \"\"\" error = tf.stop_gradient(tf.math.round(x) - x) return", "to quantize. max_tensor: Tensor with max values to compute the", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "the weight due to gptq max_tensor: Tensor with max values", "-> tf.Tensor: \"\"\" Return the rounded values of a tensor.", "clipping (defaults to -max_val) Returns: clipped variable \"\"\" min_val =", "int, signed: bool) -> tf.Tensor: \"\"\" Compute the step size", "max values to compute the threshold. num_bits: Num of bits", "max_val=max_lsbs_change)) min_int = -int(signed) * (2 ** (num_bits - int(signed)))", "rights reserved. # # Licensed under the Apache License, Version", "between fixed values such that min_val<=output<=max_val Args: x: input variable", "use. signed: Signedness of the quantization range. power_of_two: Whether the", "ste_round(input_tensor / delta) min_int = -int(signed) * (2 ** (num_bits", "ste_ceil(x: tf.Tensor) -> tf.Tensor: \"\"\" Return the ceil values of", "delta) min_int = -int(signed) * (2 ** (num_bits - int(signed)))", "input_tensor: Tensor to quantize. values of this tensor are not", "limitations under the License. # ============================================================================== import tensorflow as tf", "-> tf.Tensor: \"\"\" Quantize a tensor symmetrically. Args: input_tensor: Tensor", "for the quantization. \"\"\" return max_tensor / (2 ** (num_bits", "symmetrically. Args: input_tensor: Tensor to quantize. max_tensor: Tensor with max", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "THRESHOLD def ste_ceil(x: tf.Tensor) -> tf.Tensor: \"\"\" Return the ceil", "Args: x: input variable max_val: maximum value for clipping min_val:", "bool, power_of_two: bool) -> tf.Tensor: \"\"\" Quantize a tensor symmetrically.", "ste_round(x: tf.Tensor) -> tf.Tensor: \"\"\" Return the rounded values of", "int(signed))) max_int = (2 ** (num_bits - int(signed))) - 1", "# ============================================================================== import tensorflow as tf from model_compression_toolkit.common.constants import MIN_THRESHOLD,", "or implied. # See the License for the specific language", "** (num_bits - int(signed))) def adjustable_steps(x: tf.Variable, t: float) ->", "max_tensor: tf.Tensor, num_bits: int, signed: bool, power_of_two: bool, max_lsbs_change: int", "delta = calculate_delta(max_tensor, num_bits, signed) tensor_q = ste_round(input_tensor / delta)", "- 1 def ste_clip(x: [tf.Tensor, tf.Variable], max_val=1, min_val=None) -> tf.Tensor:", "(2 ** (num_bits - int(signed))) - 1 return delta *", "KIND, either express or implied. # See the License for", "specific language governing permissions and # limitations under the License.", "max_val: maximum value for clipping min_val: minimum value for clipping", "the License. # ============================================================================== import tensorflow as tf from model_compression_toolkit.common.constants", "-max_val) Returns: clipped variable \"\"\" min_val = -max_val if min_val", "\"\"\" Quantize a tensor symmetrically. Args: input_tensor: Tensor to quantize.", "tf.Tensor: \"\"\" Return the rounded values of a tensor. \"\"\"", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "tensor. \"\"\" error = tf.stop_gradient(tf.math.round(x) - x) return error +", "Compute log2 of a tensor. \"\"\" return tf.math.log(x) / tf.math.log(2.0)", "Compute the power of two threshold for a tensor. \"\"\"", "permissions and # limitations under the License. # ============================================================================== import", "-1) / t) - 1 def ste_clip(x: [tf.Tensor, tf.Variable], max_val=1,", "(the \"License\"); # you may not use this file except", "= calculate_delta(max_tensor, num_bits, signed) tensor_q = ste_round(input_tensor / delta) min_int", "of LSBs that the auxvar is allowed to change Returns:", "# you may not use this file except in compliance", "min_val<=output<=max_val Args: x: input variable max_val: maximum value for clipping", "min_val=None) -> tf.Tensor: \"\"\" clip a variable between fixed values", "power_of_two: bool, max_lsbs_change: int = 1) -> tf.Tensor: \"\"\" Quantize", "or not. Returns: A quantized tensor. \"\"\" if power_of_two: max_tensor", "/ (2 ** (num_bits - int(signed))) def adjustable_steps(x: tf.Variable, t:", "maximum LSBs shift. Args: input_tensor: Tensor to quantize. values of", "clipping min_val: minimum value for clipping (defaults to -max_val) Returns:", "import tensorflow as tf from model_compression_toolkit.common.constants import MIN_THRESHOLD, THRESHOLD def", "[-1, 0 ,1] Args: x: input float variable t: temperature", "if min_val is None else min_val return tf.stop_gradient(tf.math.minimum(tf.math.maximum(x, min_val), max_val)", "max_tensor: Tensor with max values to compute the threshold. num_bits:", "tf.stop_gradient(tf.math.ceil(x) - x) return error + x def ste_round(x: tf.Tensor)", "def power_of_two_max(max_tensor: tf.Tensor) -> tf.Tensor: \"\"\" Compute the power of", "during gptq. auxvar_tensor: Tensor that manifests the bit shift the", "# # Unless required by applicable law or agreed to", "variable max_val: maximum value for clipping min_val: minimum value for", "auxvar_tensor: tf.Variable, max_tensor: tf.Tensor, num_bits: int, signed: bool, power_of_two: bool,", "signed: bool) -> tf.Tensor: \"\"\" Compute the step size for", "-> tf.Tensor: \"\"\" clip a variable between fixed values such", "max_lsbs_change: int = 1) -> tf.Tensor: \"\"\" Quantize a tensor", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "num_bits, signed) tensor_q = ste_round(tf.stop_gradient(tf.round(input_tensor / delta)) + ste_clip(auxvar_tensor, max_val=max_lsbs_change))", "Version 2.0 (the \"License\"); # you may not use this", "= power_of_two_max(max_tensor) delta = calculate_delta(max_tensor, num_bits, signed) tensor_q = ste_round(input_tensor", "from model_compression_toolkit.common.constants import MIN_THRESHOLD, THRESHOLD def ste_ceil(x: tf.Tensor) -> tf.Tensor:", "tf.stop_gradient(tf.math.round(x) - x) return error + x def log2(x: tf.Tensor)", "(2 ** (num_bits - int(signed))) def adjustable_steps(x: tf.Variable, t: float)", "\"\"\" Return the rounded values of a tensor. \"\"\" error", "allowed to change Returns: A quantized tensor. \"\"\" if power_of_two:", "tf from model_compression_toolkit.common.constants import MIN_THRESHOLD, THRESHOLD def ste_ceil(x: tf.Tensor) ->", "(defaults to -max_val) Returns: clipped variable \"\"\" min_val = -max_val", "/ tf.math.log(2.0) def power_of_two_max(max_tensor: tf.Tensor) -> tf.Tensor: \"\"\" Compute the", "============================================================================== import tensorflow as tf from model_compression_toolkit.common.constants import MIN_THRESHOLD, THRESHOLD", "implied. # See the License for the specific language governing", "Args: input_tensor: Tensor to quantize. values of this tensor are", "num_bits: int, signed: bool, power_of_two: bool, max_lsbs_change: int = 1)", "(2 ** (num_bits - int(signed))) max_int = (2 ** (num_bits", "under the Apache License, Version 2.0 (the \"License\"); # you", "= 1) -> tf.Tensor: \"\"\" Quantize a tensor symmetrically with", "def ste_clip(x: [tf.Tensor, tf.Variable], max_val=1, min_val=None) -> tf.Tensor: \"\"\" clip", "<filename>model_compression_toolkit/keras/quantizer/gradient_ptq/utils.py<gh_stars>0 # Copyright 2021 Sony Semiconductors Israel, Inc. All rights", "Returns: A quantized tensor. \"\"\" if power_of_two: max_tensor = power_of_two_max(max_tensor)", "# Copyright 2021 Sony Semiconductors Israel, Inc. All rights reserved.", "constrained or not. max_lsbs_change: maximum number of LSBs that the", "with max values to compute the threshold. num_bits: Num of", "tf.Variable, t: float) -> tf.Tensor: \"\"\" A function to gradually", "All rights reserved. # # Licensed under the Apache License,", "by applicable law or agreed to in writing, software #", "min_val is None else min_val return tf.stop_gradient(tf.math.minimum(tf.math.maximum(x, min_val), max_val) -", "model_compression_toolkit.common.constants import MIN_THRESHOLD, THRESHOLD def ste_ceil(x: tf.Tensor) -> tf.Tensor: \"\"\"", "for clipping min_val: minimum value for clipping (defaults to -max_val)", "threshold should be constrained or not. Returns: A quantized tensor.", "minimum value for clipping (defaults to -max_val) Returns: clipped variable", "clip a variable between fixed values such that min_val<=output<=max_val Args:", "constrained or not. Returns: A quantized tensor. \"\"\" if power_of_two:", "tf.Tensor: \"\"\" A function to gradually quantize a float variable", "tensor. \"\"\" return tf.math.pow(2.0, ste_ceil(log2(tf.maximum(max_tensor, MIN_THRESHOLD)))) def calculate_delta(max_tensor: tf.Tensor, num_bits:", "= (2 ** (num_bits - int(signed))) - 1 return delta", "tf.Tensor: \"\"\" Quantize a tensor symmetrically. Args: input_tensor: Tensor to", "return tf.math.pow(2.0, ste_ceil(log2(tf.maximum(max_tensor, MIN_THRESHOLD)))) def calculate_delta(max_tensor: tf.Tensor, num_bits: int, signed:", "tensor_q = ste_round(tf.stop_gradient(tf.round(input_tensor / delta)) + ste_clip(auxvar_tensor, max_val=max_lsbs_change)) min_int =", "of this tensor are not changed during gptq. auxvar_tensor: Tensor", "* (2 ** (num_bits - int(signed))) max_int = (2 **", "not changed during gptq. auxvar_tensor: Tensor that manifests the bit", "- int(signed))) max_int = (2 ** (num_bits - int(signed))) -", "max_tensor = power_of_two_max(max_tensor) delta = calculate_delta(max_tensor, num_bits, signed) tensor_q =", "gptq. auxvar_tensor: Tensor that manifests the bit shift the weight", "a variable between fixed values such that min_val<=output<=max_val Args: x:", "** (num_bits - int(signed))) max_int = (2 ** (num_bits -", "x) return error + x def ste_round(x: tf.Tensor) -> tf.Tensor:", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "Unless required by applicable law or agreed to in writing,", "log2(x: tf.Tensor) -> tf.Tensor: \"\"\" Compute log2 of a tensor.", "threshold for a tensor. \"\"\" return tf.math.pow(2.0, ste_ceil(log2(tf.maximum(max_tensor, MIN_THRESHOLD)))) def", "\"\"\" error = tf.stop_gradient(tf.math.ceil(x) - x) return error + x", "num_bits, signed) tensor_q = ste_round(input_tensor / delta) min_int = -int(signed)", "tf.math.pow(2.0, ste_ceil(log2(tf.maximum(max_tensor, MIN_THRESHOLD)))) def calculate_delta(max_tensor: tf.Tensor, num_bits: int, signed: bool)", "the specific language governing permissions and # limitations under the", "-> tf.Tensor: \"\"\" A function to gradually quantize a float", "tf.Tensor) -> tf.Tensor: \"\"\" Compute the power of two threshold", "applicable law or agreed to in writing, software # distributed", "Inc. All rights reserved. # # Licensed under the Apache", "ste_round(tf.stop_gradient(tf.round(input_tensor / delta)) + ste_clip(auxvar_tensor, max_val=max_lsbs_change)) min_int = -int(signed) *", "size for the quantization. \"\"\" return max_tensor / (2 **", "return tf.sigmoid(tf.add(x, 1) / t) + tf.sigmoid(tf.add(x, -1) / t)", "int, signed: bool, power_of_two: bool, max_lsbs_change: int = 1) ->", "def symmetric_quantizer(input_tensor: tf.Tensor, max_tensor: tf.Tensor, num_bits: int, signed: bool, power_of_two:", "bool, max_lsbs_change: int = 1) -> tf.Tensor: \"\"\" Quantize a", "Quantize a tensor symmetrically. Args: input_tensor: Tensor to quantize. max_tensor:", "in writing, software # distributed under the License is distributed", "Returns: semi-quantized variable \"\"\" return tf.sigmoid(tf.add(x, 1) / t) +", "range. power_of_two: Whether the threshold should be constrained or not.", "to quantize. values of this tensor are not changed during", "tf.Tensor: \"\"\" Compute log2 of a tensor. \"\"\" return tf.math.log(x)", "signed) tensor_q = ste_round(input_tensor / delta) min_int = -int(signed) *", "tensor are not changed during gptq. auxvar_tensor: Tensor that manifests", "- 1 return delta * tf.math.minimum(tf.math.maximum(tensor_q, min_int), max_int) def symmetric_constrained_quantizer(input_tensor:", "tensor symmetrically. Args: input_tensor: Tensor to quantize. max_tensor: Tensor with", "change Returns: A quantized tensor. \"\"\" if power_of_two: max_tensor =", "min_int = -int(signed) * (2 ** (num_bits - int(signed))) max_int", "changed during gptq. auxvar_tensor: Tensor that manifests the bit shift", "= tf.stop_gradient(tf.math.ceil(x) - x) return error + x def ste_round(x:", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "MIN_THRESHOLD)))) def calculate_delta(max_tensor: tf.Tensor, num_bits: int, signed: bool) -> tf.Tensor:", "License, Version 2.0 (the \"License\"); # you may not use", "quantize a float variable to an integer of values [-1,", "= ste_round(input_tensor / delta) min_int = -int(signed) * (2 **", "not. Returns: A quantized tensor. \"\"\" if power_of_two: max_tensor =", "# You may obtain a copy of the License at", "/ delta) min_int = -int(signed) * (2 ** (num_bits -", "clipped variable \"\"\" min_val = -max_val if min_val is None", "def ste_ceil(x: tf.Tensor) -> tf.Tensor: \"\"\" Return the ceil values", ",1] Args: x: input float variable t: temperature to control", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "error = tf.stop_gradient(tf.math.ceil(x) - x) return error + x def", "the auxvar is allowed to change Returns: A quantized tensor.", "max_val) - x) + x def symmetric_quantizer(input_tensor: tf.Tensor, max_tensor: tf.Tensor,", "the step size for the quantization. \"\"\" return max_tensor /", "an integer of values [-1, 0 ,1] Args: x: input", "the License for the specific language governing permissions and #", "tf.Tensor, auxvar_tensor: tf.Variable, max_tensor: tf.Tensor, num_bits: int, signed: bool, power_of_two:", "the rounded values of a tensor. \"\"\" error = tf.stop_gradient(tf.math.round(x)", "Apache License, Version 2.0 (the \"License\"); # you may not", "either express or implied. # See the License for the", "to use. signed: Signedness of the quantization range. power_of_two: Whether", "/ t) - 1 def ste_clip(x: [tf.Tensor, tf.Variable], max_val=1, min_val=None)", "- int(signed))) - 1 return delta * tf.math.minimum(tf.math.maximum(tensor_q, min_int), max_int)", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "of the quantization range. power_of_two: Whether the threshold should be", "- x) + x def symmetric_quantizer(input_tensor: tf.Tensor, max_tensor: tf.Tensor, num_bits:", "rounded values of a tensor. \"\"\" error = tf.stop_gradient(tf.math.round(x) -", "of values [-1, 0 ,1] Args: x: input float variable", "manifests the bit shift the weight due to gptq max_tensor:", "2021 Sony Semiconductors Israel, Inc. All rights reserved. # #", "weight due to gptq max_tensor: Tensor with max values to", "+ ste_clip(auxvar_tensor, max_val=max_lsbs_change)) min_int = -int(signed) * (2 ** (num_bits", "def calculate_delta(max_tensor: tf.Tensor, num_bits: int, signed: bool) -> tf.Tensor: \"\"\"", "the threshold. num_bits: Num of bits to use. signed: Signedness", "input_tensor: Tensor to quantize. max_tensor: Tensor with max values to", "- int(signed))) def adjustable_steps(x: tf.Variable, t: float) -> tf.Tensor: \"\"\"", "None else min_val return tf.stop_gradient(tf.math.minimum(tf.math.maximum(x, min_val), max_val) - x) +", "\"\"\" Compute log2 of a tensor. \"\"\" return tf.math.log(x) /", "tf.Tensor: \"\"\" Quantize a tensor symmetrically with maximum LSBs shift.", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "= -max_val if min_val is None else min_val return tf.stop_gradient(tf.math.minimum(tf.math.maximum(x,", "x) return error + x def log2(x: tf.Tensor) -> tf.Tensor:", "Whether the threshold should be constrained or not. max_lsbs_change: maximum", "symmetric_quantizer(input_tensor: tf.Tensor, max_tensor: tf.Tensor, num_bits: int, signed: bool, power_of_two: bool)", "values such that min_val<=output<=max_val Args: x: input variable max_val: maximum", "delta)) + ste_clip(auxvar_tensor, max_val=max_lsbs_change)) min_int = -int(signed) * (2 **", "tf.sigmoid(tf.add(x, 1) / t) + tf.sigmoid(tf.add(x, -1) / t) -", "= -int(signed) * (2 ** (num_bits - int(signed))) max_int =", "be constrained or not. Returns: A quantized tensor. \"\"\" if", "\"License\"); # you may not use this file except in", "if power_of_two: max_tensor = power_of_two_max(max_tensor) delta = calculate_delta(max_tensor, num_bits, signed)", "tensor_q = ste_round(input_tensor / delta) min_int = -int(signed) * (2", "MIN_THRESHOLD, THRESHOLD def ste_ceil(x: tf.Tensor) -> tf.Tensor: \"\"\" Return the", "values [-1, 0 ,1] Args: x: input float variable t:", "power of two threshold for a tensor. \"\"\" return tf.math.pow(2.0,", "\"\"\" return tf.sigmoid(tf.add(x, 1) / t) + tf.sigmoid(tf.add(x, -1) /", "not. max_lsbs_change: maximum number of LSBs that the auxvar is", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "# distributed under the License is distributed on an \"AS", "1) / t) + tf.sigmoid(tf.add(x, -1) / t) - 1", "# Unless required by applicable law or agreed to in", "a tensor. \"\"\" error = tf.stop_gradient(tf.math.ceil(x) - x) return error", "1) -> tf.Tensor: \"\"\" Quantize a tensor symmetrically with maximum", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "max_tensor: tf.Tensor, num_bits: int, signed: bool, power_of_two: bool) -> tf.Tensor:", "are not changed during gptq. auxvar_tensor: Tensor that manifests the", "= power_of_two_max(max_tensor) delta = calculate_delta(max_tensor, num_bits, signed) tensor_q = ste_round(tf.stop_gradient(tf.round(input_tensor", "/ t) + tf.sigmoid(tf.add(x, -1) / t) - 1 def", "Compute the step size for the quantization. \"\"\" return max_tensor", "You may obtain a copy of the License at #", "quantization. \"\"\" return max_tensor / (2 ** (num_bits - int(signed)))", "the threshold should be constrained or not. Returns: A quantized", "power_of_two: Whether the threshold should be constrained or not. Returns:", "\"\"\" min_val = -max_val if min_val is None else min_val", "Whether the threshold should be constrained or not. Returns: A", "the Apache License, Version 2.0 (the \"License\"); # you may", "** (num_bits - int(signed))) - 1 return delta * tf.math.minimum(tf.math.maximum(tensor_q,", "tf.sigmoid(tf.add(x, -1) / t) - 1 def ste_clip(x: [tf.Tensor, tf.Variable],", "variable to an integer of values [-1, 0 ,1] Args:", "Tensor with max values to compute the threshold. num_bits: Num", "LSBs shift. Args: input_tensor: Tensor to quantize. values of this", "bool, power_of_two: bool, max_lsbs_change: int = 1) -> tf.Tensor: \"\"\"" ]
[ "see AUTHORS. :license: BSD, see LICENSE for details. \"\"\" import", "in self.LOGIC self.cur.append((start, Operator, text[start])) start = self.formula(start+1, text) assert", "except AssertionError: # not well-formed del self.cur[orig:] while text[end] not", "start if end != start: self.cur.append((start, Text, text[start:end])) return end", "# skip whitespace after formula orig = len(self.cur) try: start", "text[group[1]:match.end()])) break else: self.cur.append((start, Keyword, text[start:match.end()])) return match.end() def lineno(self,", "text, True) except AssertionError: del self.cur[orig:] start = end =", "def rule(self, start, text): \"\"\"Tokenize a rule.\"\"\" match = self.RULES.match(text,", ":license: BSD, see LICENSE for details. \"\"\" import re from", "]over(?:\\\\ of)?(?:\\\\ line)?\\\\ ([0-9]+) | detachment | contrapositive | De\\\\", "could be a comment match = self.COMMENT.match(text, start) if match", "``start`` to the end of the line as Error.\"\"\" end", "aliases = ['tnt'] filenames = ['*.tnt'] cur = [] LOGIC", "self.cur.append((start, Punctuation, text[start])) start = self.term(start+1, text) assert text[start] in", "self.cur.append((start, Punctuation, text[start])) return self.formula(start+1, text) if text[start] == '<':", "self.cur[orig:] while text[end] not in self.WHITESPACE: end += 1 self.cur.append((start,", "= end = self.error_till_line_end(start, text) continue # rule proving this", "return end def variable(self, start, text): \"\"\"Tokenize a variable.\"\"\" assert", "| add\\\\ S | drop\\\\ S | induction | axiom\\\\", "self.term(start+1, text) assert text[start] in self.OPERATORS self.cur.append((start, Operator, text[start])) start", "utf-8 -*- \"\"\" pygments.lexers.tnt ~~~~~~~~~~~~~~~~~~~ Lexer for Typographic Number Theory.", "text[start] in self.NEGATORS: # ~<...> end = start+1 while text[end]", "joining | separation | double-tilde | fantasy\\\\ rule | carry[-", "= start+1 while text[end] in self.NEGATORS: end += 1 self.cur.append((start,", "match.end(): self.cur.append((group[1], Keyword, text[group[1]:match.end()])) break else: self.cur.append((start, Keyword, text[start:match.end()])) return", "start while text[end] not in self.NUMBERS: end += 1 self.cur.append((start,", "end = start try: while text[end] != '\\n': # there's", "self.rule(start, text) except AssertionError: del self.cur[orig:] start = end =", "self.error_till_line_end(start, text) continue # skip whitespace after rule start =", "by the Pygments team, see AUTHORS. :license: BSD, see LICENSE", "self.variable(start+1, text) assert text[start] == ':' self.cur.append((start, Punctuation, text[start])) return", "assert text[start] == '=' self.cur.append((start, Operator, text[start])) start = self.term(start+1,", "Lexer from pygments.token import Text, Comment, Operator, Keyword, Name, Number,", "AssertionError: # not well-formed del self.cur[orig:] while text[end] not in", "self.formula(start+1, text) if text[start] == '<': # <...&...> self.cur.append((start, Punctuation,", "Text, text[start:end])) return end def variable(self, start, text): \"\"\"Tokenize a", "'<': # <...&...> self.cur.append((start, Punctuation, text[start])) start = self.formula(start+1, text)", "pygments.lexer import Lexer from pygments.token import Text, Comment, Operator, Keyword,", "transitivity | add\\\\ S | drop\\\\ S | induction |", "['tnt'] filenames = ['*.tnt'] cur = [] LOGIC = set('⊃→]&∧^|∨Vv')", "text[start])) start = self.term(start+1, text) assert text[start] == ')' self.cur.append((start,", "end += 1 self.cur.append((start, Error, text[start:end])) start = end #", "start = end = self.error_till_line_end(start, text) continue start = end", "in self.NUMBERS: end += 1 self.cur.append((start, Punctuation, text[start])) self.cur.append((start+1, Text,", "text[start] == '=' self.cur.append((start, Operator, text[start])) start = self.term(start+1, text)", "return start+1 if text[start] in self.NEGATORS: # ~<...> end =", "Keyword.Declaration, text[start])) start = self.variable(start+1, text) assert text[start] == ':'", "Error, text[start:end])) start = end # skip whitespace after formula", "'[]': # fantasy push or pop self.cur.append((start, Keyword, text[start])) return", "self.cur.append((start, Punctuation, text[start])) self.cur.append((start+1, Text, text[start+1:end])) start = end match", "text[start:end])) # whitespace is required after a line number orig", "= set('~!') QUANTIFIERS = set('AE∀∃') NUMBERS = set('0123456789') WHITESPACE =", "point it could be a comment match = self.COMMENT.match(text, start)", "# not well-formed del self.cur[orig:] while text[end] not in self.WHITESPACE:", "start) assert match is not None assert text[match.end()] == ')'", "present self.cur.append((start, Number.Integer, text[start:end])) # whitespace is required after a", "\"\"\"Mark everything from ``start`` to the end of the line", "in self.WHITESPACE: end += 1 except IndexError: end = len(text)", "if text[start] == '(': orig = len(self.cur) try: start =", "return self.formula(start+1, text) if text[start] == '<': # <...&...> self.cur.append((start,", "pop self.cur.append((start, Keyword, text[start])) return start+1 if text[start] in self.NEGATORS:", "text) assert text[start] in self.LOGIC self.cur.append((start, Operator, text[start])) start =", "in self.NUMBERS: end += 1 if end != start: #", "start, text, required=False): \"\"\"Tokenize whitespace.\"\"\" end = start try: while", "assert text[start] == ')' self.cur.append((start, Punctuation, text[start])) return start+1 raise", "text[end] not in self.NUMBERS: end += 1 self.cur.append((start, Punctuation, text[start]))", "text[start])) return start+1 if text[start] in self.NEGATORS: # ~<...> end", "carry[- ]over(?:\\\\ of)?(?:\\\\ line)?\\\\ ([0-9]+) | detachment | contrapositive |", "a rule.\"\"\" match = self.RULES.match(text, start) assert match is not", "Number Theory, as described in the book <NAME>, by <NAME>,", "self.cur.append((start, Punctuation, text[start])) return start+1 raise AssertionError # no matches", "# S...S(...) or S...0 end = start+1 while text[end] ==", "if group[0] >= 0: # this group matched self.cur.append((start, Keyword,", "~<...> end = start+1 while text[end] in self.NEGATORS: end +=", "= end = self.formula(start, text) except AssertionError: # not well-formed", "'Typographic Number Theory' aliases = ['tnt'] filenames = ['*.tnt'] cur", "Punctuation, text[start])) start = self.formula(start+1, text) assert text[start] in self.LOGIC", "TNT tokens.\"\"\" self.cur = [] start = end = self.whitespace(0,", "self.cur.append((start, Punctuation, text[start])) return start+1 # ...=... start = self.term(start,", "2019-2020 by the Pygments team, see AUTHORS. :license: BSD, see", "self.formula(end, text) if text[start] in self.QUANTIFIERS: # Aa:<...> self.cur.append((start, Keyword.Declaration,", "end = self.error_till_line_end(end, text) continue # at this point it", "text[start])) return start+1 raise AssertionError # no matches def formula(self,", "set('0123456789') WHITESPACE = set('\\t \\v\\n') RULES = re.compile('''(?xi) joining |", "= end = self.error_till_line_end(start, text) continue start = end =", "to process the rest continue del match # one formula,", "del self.cur[orig:] start = end = self.error_till_line_end(start, text) continue #", "len(self.cur) try: start = end = self.lineno(start, text) except AssertionError:", "Typographic Number Theory. :copyright: Copyright 2019-2020 by the Pygments team,", "self.OPERATORS self.cur.append((start, Operator, text[start])) start = self.term(start+1, text) assert text[start]", "if text[start] in self.VARIABLES: # a''... return self.variable(start, text) if", "start+1 raise AssertionError # no matches def formula(self, start, text):", "\"\"\"Tokenize a line marker.\"\"\" end = start while text[end] not", "do not attempt to process the rest continue del match", "self.cur.append((match.end(), Punctuation, text[match.end()])) return match.end() + 1 def error_till_line_end(self, start,", "self.cur.append((start, Operator, text[start])) start = self.term(start+1, text) return start def", "start def rule(self, start, text): \"\"\"Tokenize a rule.\"\"\" match =", "S | induction | axiom\\\\ ([1-5]) | premise | push", "text): \"\"\"Returns a list of TNT tokens.\"\"\" self.cur = []", "del self.cur[orig:] while text[end] not in self.WHITESPACE: end += 1", "self.cur[orig:] start = end = self.error_till_line_end(start, text) continue start =", "one formula, possibly containing subformulae orig = len(self.cur) try: start", "in self.VARIABLES end = start+1 while text[end] in self.PRIMES: end", "Operator, text[start])) start = self.term(start+1, text) return start def rule(self,", "\"\"\" pygments.lexers.tnt ~~~~~~~~~~~~~~~~~~~ Lexer for Typographic Number Theory. :copyright: Copyright", "len(text) if end != start: self.cur.append((start, Error, text[start:end])) end =", "text, required=False): \"\"\"Tokenize whitespace.\"\"\" end = start try: while text[end]", "1 self.cur.append((start, Number.Integer, text[start:end])) return self.term(end, text) if text[start] ==", "== ':' self.cur.append((start, Punctuation, text[start])) return self.formula(start+1, text) if text[start]", "start, text): \"\"\"Tokenize a term.\"\"\" if text[start] == 'S': #", "\"\"\" import re from pygments.lexer import Lexer from pygments.token import", "from pygments.lexer import Lexer from pygments.token import Text, Comment, Operator,", "return match.end() + 1 def error_till_line_end(self, start, text): \"\"\"Mark everything", "'0': # the singleton 0 self.cur.append((start, Number.Integer, text[start])) return start+1", "end of the line as Error.\"\"\" end = start try:", "= end # skip whitespace after formula orig = len(self.cur)", "text[start])) start = self.variable(start+1, text) assert text[start] == ':' self.cur.append((start,", "a theorem orig = len(self.cur) try: start = end =", "try: while text[end] in self.WHITESPACE: end += 1 except IndexError:", "attempt to process the rest continue del match # one", "COMMENT = re.compile(r'\\[[^\\n\\]]+\\]') def whitespace(self, start, text, required=False): \"\"\"Tokenize whitespace.\"\"\"", "group matched self.cur.append((start, Keyword, text[start:group[0]])) self.cur.append((group[0], Number.Integer, text[group[0]:group[1]])) if group[1]", "None assert text[match.end()] == ')' self.cur.append((match.start(), Number.Integer, match.group(0))) self.cur.append((match.end(), Punctuation,", "!= start: self.cur.append((start, Error, text[start:end])) end = self.whitespace(end, text) return", "= end = self.error_till_line_end(end, text) continue # at this point", "self.LOGIC self.cur.append((start, Operator, text[start])) start = self.formula(start+1, text) assert text[start]", "group in groups: if group[0] >= 0: # this group", "except IndexError: end = len(text) if end != start: self.cur.append((start,", "len(text) if required: assert end != start if end !=", "= self.error_till_line_end(end, text) continue # at this point it could", "whitespace.\"\"\" end = start try: while text[end] in self.WHITESPACE: end", "end = self.whitespace(end, text, True) except AssertionError: del self.cur[orig:] start", "= end = self.whitespace(end, text, True) except AssertionError: del self.cur[orig:]", "# rule proving this formula a theorem orig = len(self.cur)", "# exclude whole match for group in groups: if group[0]", "text[end] in self.NUMBERS: end += 1 if end != start:", "AssertionError # no matches def formula(self, start, text): \"\"\"Tokenize a", "| specification | generalization | interchange | existence | symmetry", "start = end match = self.LINENOS.match(text, start) assert match is", "== ')' self.cur.append((start, Punctuation, text[start])) return start+1 raise AssertionError #", "['TNTLexer'] class TNTLexer(Lexer): \"\"\" Lexer for Typographic Number Theory, as", "self.NUMBERS: end += 1 if end != start: # actual", "self.COMMENT.match(text, start) if match is not None: self.cur.append((start, Comment, text[start:match.end()]))", "Error __all__ = ['TNTLexer'] class TNTLexer(Lexer): \"\"\" Lexer for Typographic", "except IndexError: end = len(text) if required: assert end !=", "| De\\\\ Morgan | switcheroo | specification | generalization |", "!= match.end(): self.cur.append((group[1], Keyword, text[group[1]:match.end()])) break else: self.cur.append((start, Keyword, text[start:match.end()]))", "= start+1 while text[end] in self.PRIMES: end += 1 self.cur.append((start,", "1 self.cur.append((start, Error, text[start:end])) start = end # skip whitespace", "text) if text[start] == '(': # (...+...) self.cur.append((start, Punctuation, text[start]))", "team, see AUTHORS. :license: BSD, see LICENSE for details. \"\"\"", "def lineno(self, start, text): \"\"\"Tokenize a line marker.\"\"\" end =", "induction | axiom\\\\ ([1-5]) | premise | push | pop", "to the end of the line as Error.\"\"\" end =", "Error, text[start:end])) end = self.whitespace(end, text) return end def get_tokens_unprocessed(self,", "in self.PRIMES: end += 1 self.cur.append((start, Name.Variable, text[start:end])) return end", "switcheroo | specification | generalization | interchange | existence |", "Theory. :copyright: Copyright 2019-2020 by the Pygments team, see AUTHORS.", "text) return end def get_tokens_unprocessed(self, text): \"\"\"Returns a list of", "1 self.cur.append((start, Name.Variable, text[start:end])) return end def term(self, start, text):", "the end of the line as Error.\"\"\" end = start", "~~~~~~~~~~~~~~~~~~~ Lexer for Typographic Number Theory. :copyright: Copyright 2019-2020 by", "Theory, as described in the book <NAME>, by <NAME>, or", "self.error_till_line_end(end, text) continue # at this point it could be", "Number Theory. :copyright: Copyright 2019-2020 by the Pygments team, see", "anything after the closing bracket is invalid start = end", "contrapositive | De\\\\ Morgan | switcheroo | specification | generalization", "| induction | axiom\\\\ ([1-5]) | premise | push |", "(...+...) self.cur.append((start, Punctuation, text[start])) start = self.term(start+1, text) assert text[start]", "<NAME>, or as summarized here: https://github.com/Kenny2github/language-tnt/blob/master/README.md#summary-of-tnt .. versionadded:: 2.7 \"\"\"", "required: assert end != start if end != start: self.cur.append((start,", "not None assert text[match.end()] == ')' self.cur.append((match.start(), Number.Integer, match.group(0))) self.cur.append((match.end(),", "'\\n': # there's whitespace in rules end += 1 except", "start = end = self.error_till_line_end(start, text) continue # rule proving", "Typographic Number Theory, as described in the book <NAME>, by", "= len(self.cur) try: start = end = self.lineno(start, text) except", "whole match for group in groups: if group[0] >= 0:", "add\\\\ S | drop\\\\ S | induction | axiom\\\\ ([1-5])", "text[start] == '>' self.cur.append((start, Punctuation, text[start])) return start+1 # ...=...", "TNTLexer(Lexer): \"\"\" Lexer for Typographic Number Theory, as described in", "Operator, text[start])) start = self.term(start+1, text) assert text[start] == ')'", "= self.term(start+1, text) assert text[start] == ')' self.cur.append((start, Punctuation, text[start]))", "Punctuation, Error __all__ = ['TNTLexer'] class TNTLexer(Lexer): \"\"\" Lexer for", "in self.QUANTIFIERS: # Aa:<...> self.cur.append((start, Keyword.Declaration, text[start])) start = self.variable(start+1,", "self.cur.append((start, Punctuation, text[start])) start = self.formula(start+1, text) assert text[start] in", "text) if text[start] in self.QUANTIFIERS: # Aa:<...> self.cur.append((start, Keyword.Declaration, text[start]))", "text) assert text[start] == ')' self.cur.append((start, Punctuation, text[start])) return start+1", "this group matched self.cur.append((start, Keyword, text[start:group[0]])) self.cur.append((group[0], Number.Integer, text[group[0]:group[1]])) if", "= end = self.error_till_line_end(start, text) # do not attempt to", "not in self.WHITESPACE: end += 1 self.cur.append((start, Error, text[start:end])) start", "text[start] in self.LOGIC self.cur.append((start, Operator, text[start])) start = self.formula(start+1, text)", "= end = self.rule(start, text) except AssertionError: del self.cur[orig:] start", "if text[start] in self.NEGATORS: # ~<...> end = start+1 while", "from pygments.token import Text, Comment, Operator, Keyword, Name, Number, \\", "end = self.lineno(start, text) except AssertionError: del self.cur[orig:] start =", "return start+1 # ...=... start = self.term(start, text) assert text[start]", "# -*- coding: utf-8 -*- \"\"\" pygments.lexers.tnt ~~~~~~~~~~~~~~~~~~~ Lexer for", "True) except AssertionError: del self.cur[orig:] start = end = self.error_till_line_end(end,", "or as summarized here: https://github.com/Kenny2github/language-tnt/blob/master/README.md#summary-of-tnt .. versionadded:: 2.7 \"\"\" name", "'>' self.cur.append((start, Punctuation, text[start])) return start+1 # ...=... start =", "self.cur.append((start, Keyword, text[start:match.end()])) return match.end() def lineno(self, start, text): \"\"\"Tokenize", "end def term(self, start, text): \"\"\"Tokenize a term.\"\"\" if text[start]", "text[start])) start = self.formula(start+1, text) assert text[start] in self.LOGIC self.cur.append((start,", "'(': # (...+...) self.cur.append((start, Punctuation, text[start])) start = self.term(start+1, text)", "Lexer for Typographic Number Theory. :copyright: Copyright 2019-2020 by the", "matches def formula(self, start, text): \"\"\"Tokenize a formula.\"\"\" if text[start]", "= set('AE∀∃') NUMBERS = set('0123456789') WHITESPACE = set('\\t \\v\\n') RULES", "= len(text) if end != start: self.cur.append((start, Error, text[start:end])) end", "== ')' self.cur.append((match.start(), Number.Integer, match.group(0))) self.cur.append((match.end(), Punctuation, text[match.end()])) return match.end()", "([0-9]+) | detachment | contrapositive | De\\\\ Morgan | switcheroo", "start = end = self.lineno(start, text) except AssertionError: del self.cur[orig:]", "== 'S': end += 1 self.cur.append((start, Number.Integer, text[start:end])) return self.term(end,", "start = self.variable(start+1, text) assert text[start] == ':' self.cur.append((start, Punctuation,", "0 self.cur.append((start, Number.Integer, text[start])) return start+1 if text[start] in self.VARIABLES:", "BSD, see LICENSE for details. \"\"\" import re from pygments.lexer", "the rest continue del match # one formula, possibly containing", "self.cur.append((start, Name.Variable, text[start:end])) return end def term(self, start, text): \"\"\"Tokenize", "formula a theorem orig = len(self.cur) try: start = end", "= re.compile('''(?xi) joining | separation | double-tilde | fantasy\\\\ rule", "text[start] in self.VARIABLES: # a''... return self.variable(start, text) if text[start]", "text[start] == ')' self.cur.append((start, Punctuation, text[start])) return start+1 raise AssertionError", "filenames = ['*.tnt'] cur = [] LOGIC = set('⊃→]&∧^|∨Vv') OPERATORS", "is not None assert text[match.end()] == ')' self.cur.append((match.start(), Number.Integer, match.group(0)))", "self.cur.append((start, Number.Integer, text[start:end])) # whitespace is required after a line", "comment match = self.COMMENT.match(text, start) if match is not None:", "text[start:end])) return end def variable(self, start, text): \"\"\"Tokenize a variable.\"\"\"", "matched self.cur.append((start, Keyword, text[start:group[0]])) self.cur.append((group[0], Number.Integer, text[group[0]:group[1]])) if group[1] !=", "start, text): \"\"\"Tokenize a line marker.\"\"\" end = start while", "= self.whitespace(end, text) # line marker if text[start] == '(':", "here: https://github.com/Kenny2github/language-tnt/blob/master/README.md#summary-of-tnt .. versionadded:: 2.7 \"\"\" name = 'Typographic Number", "in self.WHITESPACE: end += 1 self.cur.append((start, Error, text[start:end])) start =", "if match is not None: self.cur.append((start, Comment, text[start:match.end()])) start =", "as summarized here: https://github.com/Kenny2github/language-tnt/blob/master/README.md#summary-of-tnt .. versionadded:: 2.7 \"\"\" name =", "self.formula(start+1, text) assert text[start] == '>' self.cur.append((start, Punctuation, text[start])) return", "self.cur.append((match.start(), Number.Integer, match.group(0))) self.cur.append((match.end(), Punctuation, text[match.end()])) return match.end() + 1", "continue del match # one formula, possibly containing subformulae orig", "term.\"\"\" if text[start] == 'S': # S...S(...) or S...0 end", "if text[start] in '[]': # fantasy push or pop self.cur.append((start,", "there's whitespace in rules end += 1 except IndexError: end", "text[start:match.end()])) start = end = match.end() # anything after the", "of TNT tokens.\"\"\" self.cur = [] start = end =", "if text[start] == '0': # the singleton 0 self.cur.append((start, Number.Integer,", "| switcheroo | specification | generalization | interchange | existence", "text) # line marker if text[start] == '(': orig =", "double-tilde | fantasy\\\\ rule | carry[- ]over(?:\\\\ of)?(?:\\\\ line)?\\\\ ([0-9]+)", "1 def error_till_line_end(self, start, text): \"\"\"Mark everything from ``start`` to", "text) return start def rule(self, start, text): \"\"\"Tokenize a rule.\"\"\"", "set('+.⋅*') VARIABLES = set('abcde') PRIMES = set(\"'′\") NEGATORS = set('~!')", "= end = self.whitespace(0, text) while start <= end <", "assert match is not None groups = sorted(match.regs[1:]) # exclude", "len(self.cur) try: start = end = self.rule(start, text) except AssertionError:", "self.whitespace(end, text) # line marker if text[start] == '(': orig", "# ...=... start = self.term(start, text) assert text[start] == '='", "self.cur.append((start, Operator, text[start:end])) return self.formula(end, text) if text[start] in self.QUANTIFIERS:", "text) if text[start] == '0': # the singleton 0 self.cur.append((start,", "text) continue # at this point it could be a", "a line number orig = len(self.cur) try: start = end", "self.cur.append((start, Error, text[start:end])) start = end # skip whitespace after", "else: self.cur.append((start, Keyword, text[start:match.end()])) return match.end() def lineno(self, start, text):", "assert text[start] == ':' self.cur.append((start, Punctuation, text[start])) return self.formula(start+1, text)", "= start while text[end] not in self.NUMBERS: end += 1", "end < len(text): # try line number while text[end] in", "end += 1 self.cur.append((start, Punctuation, text[start])) self.cur.append((start+1, Text, text[start+1:end])) start", "')' self.cur.append((start, Punctuation, text[start])) return start+1 raise AssertionError # no", "1 except IndexError: end = len(text) if end != start:", "a list of TNT tokens.\"\"\" self.cur = [] start =", "S...S(...) or S...0 end = start+1 while text[end] == 'S':", "not None: self.cur.append((start, Comment, text[start:match.end()])) start = end = match.end()", "+= 1 self.cur.append((start, Error, text[start:end])) start = end # skip", "if required: assert end != start if end != start:", "| push | pop ''') LINENOS = re.compile(r'(?:[0-9]+)(?:(?:, ?|,? and", "\"\"\"Tokenize a term.\"\"\" if text[start] == 'S': # S...S(...) or", "group[1] != match.end(): self.cur.append((group[1], Keyword, text[group[1]:match.end()])) break else: self.cur.append((start, Keyword,", "text[start] == ':' self.cur.append((start, Punctuation, text[start])) return self.formula(start+1, text) if", "self.cur.append((start, Operator, text[start])) start = self.term(start+1, text) assert text[start] ==", "assert text[start] == '>' self.cur.append((start, Punctuation, text[start])) return start+1 #", "lineno(self, start, text): \"\"\"Tokenize a line marker.\"\"\" end = start", "rules end += 1 except IndexError: end = len(text) if", "start = end # skip whitespace after formula orig =", "self.cur.append((start, Number.Integer, text[start])) return start+1 if text[start] in self.VARIABLES: #", "text[start])) return self.formula(start+1, text) if text[start] == '<': # <...&...>", "Operator, Keyword, Name, Number, \\ Punctuation, Error __all__ = ['TNTLexer']", "<NAME>, by <NAME>, or as summarized here: https://github.com/Kenny2github/language-tnt/blob/master/README.md#summary-of-tnt .. versionadded::", "end != start: self.cur.append((start, Error, text[start:end])) end = self.whitespace(end, text)", "= len(self.cur) try: start = end = self.formula(start, text) except", "push | pop ''') LINENOS = re.compile(r'(?:[0-9]+)(?:(?:, ?|,? and )(?:[0-9]+))*')", "while text[end] in self.PRIMES: end += 1 self.cur.append((start, Name.Variable, text[start:end]))", "not None groups = sorted(match.regs[1:]) # exclude whole match for", "self.lineno(start, text) except AssertionError: del self.cur[orig:] start = end =", "required after a line number orig = len(self.cur) try: start", "drop\\\\ S | induction | axiom\\\\ ([1-5]) | premise |", "of the line as Error.\"\"\" end = start try: while", "start+1 while text[end] == 'S': end += 1 self.cur.append((start, Number.Integer,", "match.end() # anything after the closing bracket is invalid start", "formula.\"\"\" if text[start] in '[]': # fantasy push or pop", "1 if end != start: # actual number present self.cur.append((start,", "set('abcde') PRIMES = set(\"'′\") NEGATORS = set('~!') QUANTIFIERS = set('AE∀∃')", "Lexer for Typographic Number Theory, as described in the book", "text) continue # rule proving this formula a theorem orig", "text[start] in self.QUANTIFIERS: # Aa:<...> self.cur.append((start, Keyword.Declaration, text[start])) start =", "text) while start <= end < len(text): # try line", "while text[end] in self.NEGATORS: end += 1 self.cur.append((start, Operator, text[start:end]))", "start try: while text[end] != '\\n': # there's whitespace in", "groups: if group[0] >= 0: # this group matched self.cur.append((start,", ">= 0: # this group matched self.cur.append((start, Keyword, text[start:group[0]])) self.cur.append((group[0],", "formula(self, start, text): \"\"\"Tokenize a formula.\"\"\" if text[start] in '[]':", "end match = self.LINENOS.match(text, start) assert match is not None", "as described in the book <NAME>, by <NAME>, or as", "self.cur.append((group[1], Keyword, text[group[1]:match.end()])) break else: self.cur.append((start, Keyword, text[start:match.end()])) return match.end()", "# one formula, possibly containing subformulae orig = len(self.cur) try:", "possibly containing subformulae orig = len(self.cur) try: start = end", "<...&...> self.cur.append((start, Punctuation, text[start])) start = self.formula(start+1, text) assert text[start]", "by <NAME>, or as summarized here: https://github.com/Kenny2github/language-tnt/blob/master/README.md#summary-of-tnt .. versionadded:: 2.7", "while text[end] != '\\n': # there's whitespace in rules end", "AssertionError: del self.cur[orig:] start = end = self.error_till_line_end(start, text) continue", "self.error_till_line_end(start, text) # do not attempt to process the rest", "end = self.error_till_line_end(start, text) continue start = end = self.whitespace(start,", "fantasy\\\\ rule | carry[- ]over(?:\\\\ of)?(?:\\\\ line)?\\\\ ([0-9]+) | detachment", "!= start: self.cur.append((start, Text, text[start:end])) return end def variable(self, start,", "a''... return self.variable(start, text) if text[start] == '(': # (...+...)", "# actual number present self.cur.append((start, Number.Integer, text[start:end])) # whitespace is", "text[start] in '[]': # fantasy push or pop self.cur.append((start, Keyword,", "1 except IndexError: end = len(text) if required: assert end", "S | drop\\\\ S | induction | axiom\\\\ ([1-5]) |", "if end != start: # actual number present self.cur.append((start, Number.Integer,", "end = start+1 while text[end] == 'S': end += 1", "get_tokens_unprocessed(self, text): \"\"\"Returns a list of TNT tokens.\"\"\" self.cur =", "in self.OPERATORS self.cur.append((start, Operator, text[start])) start = self.term(start+1, text) assert", "start: # actual number present self.cur.append((start, Number.Integer, text[start:end])) # whitespace", "+= 1 except IndexError: end = len(text) if required: assert", "rule start = end = self.whitespace(end, text) # line marker", "del self.cur[orig:] start = end = self.error_till_line_end(start, text) continue start", "[] start = end = self.whitespace(0, text) while start <=", "+= 1 self.cur.append((start, Number.Integer, text[start:end])) return self.term(end, text) if text[start]", "no matches def formula(self, start, text): \"\"\"Tokenize a formula.\"\"\" if", "= self.error_till_line_end(start, text) continue # skip whitespace after rule start", "self.whitespace(0, text) while start <= end < len(text): # try", "IndexError: end = len(text) if required: assert end != start", "assert text[start] in self.VARIABLES end = start+1 while text[end] in", "2.7 \"\"\" name = 'Typographic Number Theory' aliases = ['tnt']", "Keyword, text[start])) return start+1 if text[start] in self.NEGATORS: # ~<...>", "try: start = end = self.lineno(start, text) except AssertionError: del", "term(self, start, text): \"\"\"Tokenize a term.\"\"\" if text[start] == 'S':", "| existence | symmetry | transitivity | add\\\\ S |", "text[start:end])) start = end # skip whitespace after formula orig", "is not None groups = sorted(match.regs[1:]) # exclude whole match", "end = self.error_till_line_end(start, text) # do not attempt to process", "text[end] not in self.WHITESPACE: end += 1 self.cur.append((start, Error, text[start:end]))", "1 self.cur.append((start, Punctuation, text[start])) self.cur.append((start+1, Text, text[start+1:end])) start = end", "len(self.cur) try: start = end = self.formula(start, text) except AssertionError:", "details. \"\"\" import re from pygments.lexer import Lexer from pygments.token", "text[start])) self.cur.append((start+1, Text, text[start+1:end])) start = end match = self.LINENOS.match(text,", "end = start try: while text[end] in self.WHITESPACE: end +=", "if text[start] in self.QUANTIFIERS: # Aa:<...> self.cur.append((start, Keyword.Declaration, text[start])) start", "self.cur = [] start = end = self.whitespace(0, text) while", "axiom\\\\ ([1-5]) | premise | push | pop ''') LINENOS", "self.NEGATORS: # ~<...> end = start+1 while text[end] in self.NEGATORS:", "end # skip whitespace after formula orig = len(self.cur) try:", "as Error.\"\"\" end = start try: while text[end] != '\\n':", "self.error_till_line_end(start, text) continue # rule proving this formula a theorem", "-*- coding: utf-8 -*- \"\"\" pygments.lexers.tnt ~~~~~~~~~~~~~~~~~~~ Lexer for Typographic", "| fantasy\\\\ rule | carry[- ]over(?:\\\\ of)?(?:\\\\ line)?\\\\ ([0-9]+) |", "a line marker.\"\"\" end = start while text[end] not in", "match.group(0))) self.cur.append((match.end(), Punctuation, text[match.end()])) return match.end() + 1 def error_till_line_end(self,", "| contrapositive | De\\\\ Morgan | switcheroo | specification |", "end = start+1 while text[end] in self.NEGATORS: end += 1", "# fantasy push or pop self.cur.append((start, Keyword, text[start])) return start+1", "except AssertionError: del self.cur[orig:] start = end = self.error_till_line_end(end, text)", "<= end < len(text): # try line number while text[end]", "LICENSE for details. \"\"\" import re from pygments.lexer import Lexer", "Comment, Operator, Keyword, Name, Number, \\ Punctuation, Error __all__ =", "generalization | interchange | existence | symmetry | transitivity |", "text) assert text[start] == '>' self.cur.append((start, Punctuation, text[start])) return start+1", "while text[end] not in self.WHITESPACE: end += 1 self.cur.append((start, Error,", "from ``start`` to the end of the line as Error.\"\"\"", "except AssertionError: del self.cur[orig:] start = end = self.error_till_line_end(start, text)", "Punctuation, text[start])) start = self.term(start+1, text) assert text[start] in self.OPERATORS", "def formula(self, start, text): \"\"\"Tokenize a formula.\"\"\" if text[start] in", "while text[end] not in self.NUMBERS: end += 1 self.cur.append((start, Punctuation,", "for group in groups: if group[0] >= 0: # this", "versionadded:: 2.7 \"\"\" name = 'Typographic Number Theory' aliases =", "self.cur.append((start, Text, text[start:end])) return end def variable(self, start, text): \"\"\"Tokenize", "\"\"\"Tokenize a variable.\"\"\" assert text[start] in self.VARIABLES end = start+1", "not well-formed del self.cur[orig:] while text[end] not in self.WHITESPACE: end", "start, text): \"\"\"Tokenize a variable.\"\"\" assert text[start] in self.VARIABLES end", "interchange | existence | symmetry | transitivity | add\\\\ S", "return self.variable(start, text) if text[start] == '(': # (...+...) self.cur.append((start,", "\\ Punctuation, Error __all__ = ['TNTLexer'] class TNTLexer(Lexer): \"\"\" Lexer", "AUTHORS. :license: BSD, see LICENSE for details. \"\"\" import re", "set(\"'′\") NEGATORS = set('~!') QUANTIFIERS = set('AE∀∃') NUMBERS = set('0123456789')", "existence | symmetry | transitivity | add\\\\ S | drop\\\\", "re.compile(r'\\[[^\\n\\]]+\\]') def whitespace(self, start, text, required=False): \"\"\"Tokenize whitespace.\"\"\" end =", "def term(self, start, text): \"\"\"Tokenize a term.\"\"\" if text[start] ==", "the Pygments team, see AUTHORS. :license: BSD, see LICENSE for", "return start+1 if text[start] in self.VARIABLES: # a''... return self.variable(start,", "Punctuation, text[start])) return start+1 raise AssertionError # no matches def", "start, text): \"\"\"Tokenize a rule.\"\"\" match = self.RULES.match(text, start) assert", "self.whitespace(end, text, True) except AssertionError: del self.cur[orig:] start = end", "start = end = self.formula(start, text) except AssertionError: # not", "text) continue start = end = self.whitespace(start, text) return self.cur", "end += 1 self.cur.append((start, Name.Variable, text[start:end])) return end def term(self,", "text[start+1:end])) start = end match = self.LINENOS.match(text, start) assert match", "actual number present self.cur.append((start, Number.Integer, text[start:end])) # whitespace is required", "self.LINENOS.match(text, start) assert match is not None assert text[match.end()] ==", "start = end = self.whitespace(end, text, True) except AssertionError: del", "start = end = self.error_till_line_end(start, text) # do not attempt", "Keyword, Name, Number, \\ Punctuation, Error __all__ = ['TNTLexer'] class", "text[start:end])) return self.formula(end, text) if text[start] in self.QUANTIFIERS: # Aa:<...>", "self.cur[orig:] start = end = self.error_till_line_end(start, text) continue # skip", "described in the book <NAME>, by <NAME>, or as summarized", "self.VARIABLES end = start+1 while text[end] in self.PRIMES: end +=", "group[0] >= 0: # this group matched self.cur.append((start, Keyword, text[start:group[0]]))", "rule(self, start, text): \"\"\"Tokenize a rule.\"\"\" match = self.RULES.match(text, start)", "= set('\\t \\v\\n') RULES = re.compile('''(?xi) joining | separation |", "end = self.error_till_line_end(start, text) continue # skip whitespace after rule", "marker.\"\"\" end = start while text[end] not in self.NUMBERS: end", "= end match = self.LINENOS.match(text, start) assert match is not", "list of TNT tokens.\"\"\" self.cur = [] start = end", "Theory' aliases = ['tnt'] filenames = ['*.tnt'] cur = []", "orig = len(self.cur) try: start = end = self.rule(start, text)", "'S': end += 1 self.cur.append((start, Number.Integer, text[start:end])) return self.term(end, text)", "# ~<...> end = start+1 while text[end] in self.NEGATORS: end", "summarized here: https://github.com/Kenny2github/language-tnt/blob/master/README.md#summary-of-tnt .. versionadded:: 2.7 \"\"\" name = 'Typographic", "end = start+1 while text[end] in self.PRIMES: end += 1", "start = end = self.whitespace(0, text) while start <= end", "separation | double-tilde | fantasy\\\\ rule | carry[- ]over(?:\\\\ of)?(?:\\\\", "while start <= end < len(text): # try line number", "process the rest continue del match # one formula, possibly", "= end = self.whitespace(end, text) # line marker if text[start]", "VARIABLES = set('abcde') PRIMES = set(\"'′\") NEGATORS = set('~!') QUANTIFIERS", "groups = sorted(match.regs[1:]) # exclude whole match for group in", "for Typographic Number Theory, as described in the book <NAME>,", "end = len(text) if end != start: self.cur.append((start, Error, text[start:end]))", "= end = match.end() # anything after the closing bracket", "Error.\"\"\" end = start try: while text[end] != '\\n': #", "coding: utf-8 -*- \"\"\" pygments.lexers.tnt ~~~~~~~~~~~~~~~~~~~ Lexer for Typographic Number", "# a''... return self.variable(start, text) if text[start] == '(': #", "text[start])) start = self.formula(start+1, text) assert text[start] == '>' self.cur.append((start,", "= set(\"'′\") NEGATORS = set('~!') QUANTIFIERS = set('AE∀∃') NUMBERS =", "De\\\\ Morgan | switcheroo | specification | generalization | interchange", "continue # rule proving this formula a theorem orig =", "Punctuation, text[start])) return self.formula(start+1, text) if text[start] == '<': #", "end != start: # actual number present self.cur.append((start, Number.Integer, text[start:end]))", "AssertionError: del self.cur[orig:] start = end = self.error_till_line_end(end, text) continue", "everything from ``start`` to the end of the line as", "try: start = end = self.rule(start, text) except AssertionError: del", "LINENOS = re.compile(r'(?:[0-9]+)(?:(?:, ?|,? and )(?:[0-9]+))*') COMMENT = re.compile(r'\\[[^\\n\\]]+\\]') def", "self.WHITESPACE: end += 1 self.cur.append((start, Error, text[start:end])) start = end", "of)?(?:\\\\ line)?\\\\ ([0-9]+) | detachment | contrapositive | De\\\\ Morgan", "try line number while text[end] in self.NUMBERS: end += 1", "| carry[- ]over(?:\\\\ of)?(?:\\\\ line)?\\\\ ([0-9]+) | detachment | contrapositive", "fantasy push or pop self.cur.append((start, Keyword, text[start])) return start+1 if", "whitespace in rules end += 1 except IndexError: end =", "cur = [] LOGIC = set('⊃→]&∧^|∨Vv') OPERATORS = set('+.⋅*') VARIABLES", "number orig = len(self.cur) try: start = end = self.whitespace(end,", "self.cur.append((start, Keyword, text[start:group[0]])) self.cur.append((group[0], Number.Integer, text[group[0]:group[1]])) if group[1] != match.end():", "self.term(start, text) assert text[start] == '=' self.cur.append((start, Operator, text[start])) start", "start+1 if text[start] in self.VARIABLES: # a''... return self.variable(start, text)", "text) assert text[start] == ':' self.cur.append((start, Punctuation, text[start])) return self.formula(start+1,", "match is not None: self.cur.append((start, Comment, text[start:match.end()])) start = end", "\"\"\"Tokenize a formula.\"\"\" if text[start] in '[]': # fantasy push", "text): \"\"\"Tokenize a line marker.\"\"\" end = start while text[end]", "end def variable(self, start, text): \"\"\"Tokenize a variable.\"\"\" assert text[start]", "= self.formula(start+1, text) assert text[start] in self.LOGIC self.cur.append((start, Operator, text[start]))", "return match.end() def lineno(self, start, text): \"\"\"Tokenize a line marker.\"\"\"", "return self.term(end, text) if text[start] == '0': # the singleton", "start) if match is not None: self.cur.append((start, Comment, text[start:match.end()])) start", "start <= end < len(text): # try line number while", "?|,? and )(?:[0-9]+))*') COMMENT = re.compile(r'\\[[^\\n\\]]+\\]') def whitespace(self, start, text,", "end += 1 except IndexError: end = len(text) if required:", "start+1 while text[end] in self.NEGATORS: end += 1 self.cur.append((start, Operator,", "self.error_till_line_end(start, text) continue start = end = self.whitespace(start, text) return", "set('⊃→]&∧^|∨Vv') OPERATORS = set('+.⋅*') VARIABLES = set('abcde') PRIMES = set(\"'′\")", "try: start = end = self.formula(start, text) except AssertionError: #", "self.WHITESPACE: end += 1 except IndexError: end = len(text) if", "self.PRIMES: end += 1 self.cur.append((start, Name.Variable, text[start:end])) return end def", "text[start:end])) end = self.whitespace(end, text) return end def get_tokens_unprocessed(self, text):", "orig = len(self.cur) try: start = end = self.formula(start, text)", "while text[end] == 'S': end += 1 self.cur.append((start, Number.Integer, text[start:end]))", "True) except AssertionError: del self.cur[orig:] start = end = self.error_till_line_end(start,", "del self.cur[orig:] start = end = self.error_till_line_end(end, text) continue #", "end = self.whitespace(end, text) # line marker if text[start] ==", "assert end != start if end != start: self.cur.append((start, Text,", "= set('0123456789') WHITESPACE = set('\\t \\v\\n') RULES = re.compile('''(?xi) joining", "text[end] in self.PRIMES: end += 1 self.cur.append((start, Name.Variable, text[start:end])) return", "'(': orig = len(self.cur) try: start = end = self.lineno(start,", "# at this point it could be a comment match", "https://github.com/Kenny2github/language-tnt/blob/master/README.md#summary-of-tnt .. versionadded:: 2.7 \"\"\" name = 'Typographic Number Theory'", "if end != start: self.cur.append((start, Text, text[start:end])) return end def", "end def get_tokens_unprocessed(self, text): \"\"\"Returns a list of TNT tokens.\"\"\"", "self.whitespace(end, text) return end def get_tokens_unprocessed(self, text): \"\"\"Returns a list", "= self.error_till_line_end(start, text) continue # rule proving this formula a", "= 'Typographic Number Theory' aliases = ['tnt'] filenames = ['*.tnt']", "self.formula(start+1, text) assert text[start] in self.LOGIC self.cur.append((start, Operator, text[start])) start", "= re.compile(r'(?:[0-9]+)(?:(?:, ?|,? and )(?:[0-9]+))*') COMMENT = re.compile(r'\\[[^\\n\\]]+\\]') def whitespace(self,", "Punctuation, text[start])) self.cur.append((start+1, Text, text[start+1:end])) start = end match =", "== '>' self.cur.append((start, Punctuation, text[start])) return start+1 # ...=... start", "def get_tokens_unprocessed(self, text): \"\"\"Returns a list of TNT tokens.\"\"\" self.cur", "| pop ''') LINENOS = re.compile(r'(?:[0-9]+)(?:(?:, ?|,? and )(?:[0-9]+))*') COMMENT", "| detachment | contrapositive | De\\\\ Morgan | switcheroo |", "start = end = self.error_till_line_end(end, text) continue # at this", "after rule start = end = self.whitespace(end, text) # line", "== '(': orig = len(self.cur) try: start = end =", "raise AssertionError # no matches def formula(self, start, text): \"\"\"Tokenize", "OPERATORS = set('+.⋅*') VARIABLES = set('abcde') PRIMES = set(\"'′\") NEGATORS", "= len(self.cur) try: start = end = self.whitespace(end, text, True)", "def variable(self, start, text): \"\"\"Tokenize a variable.\"\"\" assert text[start] in", "Number, \\ Punctuation, Error __all__ = ['TNTLexer'] class TNTLexer(Lexer): \"\"\"", "# skip whitespace after rule start = end = self.whitespace(end,", ".. versionadded:: 2.7 \"\"\" name = 'Typographic Number Theory' aliases", ")(?:[0-9]+))*') COMMENT = re.compile(r'\\[[^\\n\\]]+\\]') def whitespace(self, start, text, required=False): \"\"\"Tokenize", "in the book <NAME>, by <NAME>, or as summarized here:", "whitespace is required after a line number orig = len(self.cur)", "try: start = end = self.whitespace(end, text, True) except AssertionError:", "for Typographic Number Theory. :copyright: Copyright 2019-2020 by the Pygments", "start+1 while text[end] in self.PRIMES: end += 1 self.cur.append((start, Name.Variable,", "+= 1 self.cur.append((start, Punctuation, text[start])) self.cur.append((start+1, Text, text[start+1:end])) start =", "whitespace(self, start, text, required=False): \"\"\"Tokenize whitespace.\"\"\" end = start try:", "break else: self.cur.append((start, Keyword, text[start:match.end()])) return match.end() def lineno(self, start,", "start = end = self.whitespace(end, text) # line marker if", "end = start while text[end] not in self.NUMBERS: end +=", "self.cur[orig:] start = end = self.error_till_line_end(start, text) continue # rule", "\"\"\" name = 'Typographic Number Theory' aliases = ['tnt'] filenames", "def whitespace(self, start, text, required=False): \"\"\"Tokenize whitespace.\"\"\" end = start", "in rules end += 1 except IndexError: end = len(text)", "detachment | contrapositive | De\\\\ Morgan | switcheroo | specification", "rule proving this formula a theorem orig = len(self.cur) try:", "match # one formula, possibly containing subformulae orig = len(self.cur)", "return self.formula(end, text) if text[start] in self.QUANTIFIERS: # Aa:<...> self.cur.append((start,", "= set('abcde') PRIMES = set(\"'′\") NEGATORS = set('~!') QUANTIFIERS =", "import Text, Comment, Operator, Keyword, Name, Number, \\ Punctuation, Error", "self.cur.append((start, Comment, text[start:match.end()])) start = end = match.end() # anything", "del match # one formula, possibly containing subformulae orig =", "self.NEGATORS: end += 1 self.cur.append((start, Operator, text[start:end])) return self.formula(end, text)", "theorem orig = len(self.cur) try: start = end = self.rule(start,", "Number.Integer, text[start:end])) return self.term(end, text) if text[start] == '0': #", "text) # do not attempt to process the rest continue", "not in self.NUMBERS: end += 1 self.cur.append((start, Punctuation, text[start])) self.cur.append((start+1,", "and )(?:[0-9]+))*') COMMENT = re.compile(r'\\[[^\\n\\]]+\\]') def whitespace(self, start, text, required=False):", "= self.formula(start+1, text) assert text[start] == '>' self.cur.append((start, Punctuation, text[start]))", "containing subformulae orig = len(self.cur) try: start = end =", "# (...+...) self.cur.append((start, Punctuation, text[start])) start = self.term(start+1, text) assert", "Name.Variable, text[start:end])) return end def term(self, start, text): \"\"\"Tokenize a", "start: self.cur.append((start, Text, text[start:end])) return end def variable(self, start, text):", "!= start: # actual number present self.cur.append((start, Number.Integer, text[start:end])) #", "| generalization | interchange | existence | symmetry | transitivity", "start try: while text[end] in self.WHITESPACE: end += 1 except", "self.cur.append((start, Number.Integer, text[start:end])) return self.term(end, text) if text[start] == '0':", "in self.NEGATORS: # ~<...> end = start+1 while text[end] in", "while text[end] in self.NUMBERS: end += 1 if end !=", "text) except AssertionError: del self.cur[orig:] start = end = self.error_till_line_end(start,", "text) continue # skip whitespace after rule start = end", "# this group matched self.cur.append((start, Keyword, text[start:group[0]])) self.cur.append((group[0], Number.Integer, text[group[0]:group[1]]))", "continue # skip whitespace after rule start = end =", "+= 1 self.cur.append((start, Name.Variable, text[start:end])) return end def term(self, start,", "''') LINENOS = re.compile(r'(?:[0-9]+)(?:(?:, ?|,? and )(?:[0-9]+))*') COMMENT = re.compile(r'\\[[^\\n\\]]+\\]')", "+= 1 if end != start: # actual number present", "= re.compile(r'\\[[^\\n\\]]+\\]') def whitespace(self, start, text, required=False): \"\"\"Tokenize whitespace.\"\"\" end", "start, text): \"\"\"Mark everything from ``start`` to the end of", "text) if text[start] == '<': # <...&...> self.cur.append((start, Punctuation, text[start]))", "self.VARIABLES: # a''... return self.variable(start, text) if text[start] == '(':", "specification | generalization | interchange | existence | symmetry |", "text[start:end])) return self.term(end, text) if text[start] == '0': # the", "assert text[start] in self.LOGIC self.cur.append((start, Operator, text[start])) start = self.formula(start+1,", "text[start])) start = self.term(start+1, text) assert text[start] in self.OPERATORS self.cur.append((start,", "= match.end() # anything after the closing bracket is invalid", "it could be a comment match = self.COMMENT.match(text, start) if", "start+1 if text[start] in self.NEGATORS: # ~<...> end = start+1", "':' self.cur.append((start, Punctuation, text[start])) return self.formula(start+1, text) if text[start] ==", "self.cur.append((start, Error, text[start:end])) end = self.whitespace(end, text) return end def", "skip whitespace after rule start = end = self.whitespace(end, text)", "!= '\\n': # there's whitespace in rules end += 1", "= self.whitespace(end, text) return end def get_tokens_unprocessed(self, text): \"\"\"Returns a", "self.variable(start, text) if text[start] == '(': # (...+...) self.cur.append((start, Punctuation,", "text[start])) return start+1 # ...=... start = self.term(start, text) assert", "whitespace after formula orig = len(self.cur) try: start = end", "# try line number while text[end] in self.NUMBERS: end +=", "text[start] == 'S': # S...S(...) or S...0 end = start+1", "\"\"\"Returns a list of TNT tokens.\"\"\" self.cur = [] start", "formula orig = len(self.cur) try: start = end = self.whitespace(end,", "in '[]': # fantasy push or pop self.cur.append((start, Keyword, text[start]))", "Number.Integer, text[start:end])) # whitespace is required after a line number", "line marker.\"\"\" end = start while text[end] not in self.NUMBERS:", "symmetry | transitivity | add\\\\ S | drop\\\\ S |", "exclude whole match for group in groups: if group[0] >=", "text[start] == '0': # the singleton 0 self.cur.append((start, Number.Integer, text[start]))", "end != start: self.cur.append((start, Text, text[start:end])) return end def variable(self,", "import re from pygments.lexer import Lexer from pygments.token import Text,", "== '0': # the singleton 0 self.cur.append((start, Number.Integer, text[start])) return", "-*- \"\"\" pygments.lexers.tnt ~~~~~~~~~~~~~~~~~~~ Lexer for Typographic Number Theory. :copyright:", "== '=' self.cur.append((start, Operator, text[start])) start = self.term(start+1, text) return", "Number.Integer, text[group[0]:group[1]])) if group[1] != match.end(): self.cur.append((group[1], Keyword, text[group[1]:match.end()])) break", "= self.LINENOS.match(text, start) assert match is not None assert text[match.end()]", "text[start] == '(': # (...+...) self.cur.append((start, Punctuation, text[start])) start =", "be a comment match = self.COMMENT.match(text, start) if match is", "see LICENSE for details. \"\"\" import re from pygments.lexer import", "a comment match = self.COMMENT.match(text, start) if match is not", "= self.whitespace(0, text) while start <= end < len(text): #", "Keyword, text[start:match.end()])) return match.end() def lineno(self, start, text): \"\"\"Tokenize a", "after the closing bracket is invalid start = end =", "set('AE∀∃') NUMBERS = set('0123456789') WHITESPACE = set('\\t \\v\\n') RULES =", "line as Error.\"\"\" end = start try: while text[end] !=", "| double-tilde | fantasy\\\\ rule | carry[- ]over(?:\\\\ of)?(?:\\\\ line)?\\\\", "well-formed del self.cur[orig:] while text[end] not in self.WHITESPACE: end +=", "text[start:group[0]])) self.cur.append((group[0], Number.Integer, text[group[0]:group[1]])) if group[1] != match.end(): self.cur.append((group[1], Keyword,", "# the singleton 0 self.cur.append((start, Number.Integer, text[start])) return start+1 if", "text) assert text[start] in self.OPERATORS self.cur.append((start, Operator, text[start])) start =", "marker if text[start] == '(': orig = len(self.cur) try: start", "1 self.cur.append((start, Operator, text[start:end])) return self.formula(end, text) if text[start] in", "= self.COMMENT.match(text, start) if match is not None: self.cur.append((start, Comment,", "proving this formula a theorem orig = len(self.cur) try: start", "None: self.cur.append((start, Comment, text[start:match.end()])) start = end = match.end() #", "= self.error_till_line_end(start, text) continue start = end = self.whitespace(start, text)", "| separation | double-tilde | fantasy\\\\ rule | carry[- ]over(?:\\\\", "= ['*.tnt'] cur = [] LOGIC = set('⊃→]&∧^|∨Vv') OPERATORS =", "# there's whitespace in rules end += 1 except IndexError:", "rule.\"\"\" match = self.RULES.match(text, start) assert match is not None", "= len(self.cur) try: start = end = self.rule(start, text) except", "formula, possibly containing subformulae orig = len(self.cur) try: start =", "# no matches def formula(self, start, text): \"\"\"Tokenize a formula.\"\"\"", "...=... start = self.term(start, text) assert text[start] == '=' self.cur.append((start,", "Keyword, text[start:group[0]])) self.cur.append((group[0], Number.Integer, text[group[0]:group[1]])) if group[1] != match.end(): self.cur.append((group[1],", "match for group in groups: if group[0] >= 0: #", "self.NUMBERS: end += 1 self.cur.append((start, Punctuation, text[start])) self.cur.append((start+1, Text, text[start+1:end]))", "the line as Error.\"\"\" end = start try: while text[end]", "try: while text[end] != '\\n': # there's whitespace in rules", "return end def get_tokens_unprocessed(self, text): \"\"\"Returns a list of TNT", "| drop\\\\ S | induction | axiom\\\\ ([1-5]) | premise", "= self.term(start+1, text) return start def rule(self, start, text): \"\"\"Tokenize", "end = match.end() # anything after the closing bracket is", "variable(self, start, text): \"\"\"Tokenize a variable.\"\"\" assert text[start] in self.VARIABLES", "= self.term(start+1, text) assert text[start] in self.OPERATORS self.cur.append((start, Operator, text[start]))", "after a line number orig = len(self.cur) try: start =", "text): \"\"\"Tokenize a term.\"\"\" if text[start] == 'S': # S...S(...)", "RULES = re.compile('''(?xi) joining | separation | double-tilde | fantasy\\\\", "= self.lineno(start, text) except AssertionError: del self.cur[orig:] start = end", "# line marker if text[start] == '(': orig = len(self.cur)", "re from pygments.lexer import Lexer from pygments.token import Text, Comment,", "rule | carry[- ]over(?:\\\\ of)?(?:\\\\ line)?\\\\ ([0-9]+) | detachment |", "name = 'Typographic Number Theory' aliases = ['tnt'] filenames =", "premise | push | pop ''') LINENOS = re.compile(r'(?:[0-9]+)(?:(?:, ?|,?", "# Aa:<...> self.cur.append((start, Keyword.Declaration, text[start])) start = self.variable(start+1, text) assert", "line number while text[end] in self.NUMBERS: end += 1 if", "Keyword, text[group[1]:match.end()])) break else: self.cur.append((start, Keyword, text[start:match.end()])) return match.end() def", "['*.tnt'] cur = [] LOGIC = set('⊃→]&∧^|∨Vv') OPERATORS = set('+.⋅*')", "Name, Number, \\ Punctuation, Error __all__ = ['TNTLexer'] class TNTLexer(Lexer):", "= start+1 while text[end] == 'S': end += 1 self.cur.append((start,", "the singleton 0 self.cur.append((start, Number.Integer, text[start])) return start+1 if text[start]", "len(text): # try line number while text[end] in self.NUMBERS: end", "self.term(start+1, text) assert text[start] == ')' self.cur.append((start, Punctuation, text[start])) return", "Comment, text[start:match.end()])) start = end = match.end() # anything after", "singleton 0 self.cur.append((start, Number.Integer, text[start])) return start+1 if text[start] in", "Operator, text[start:end])) return self.formula(end, text) if text[start] in self.QUANTIFIERS: #", "start, text): \"\"\"Tokenize a formula.\"\"\" if text[start] in '[]': #", "text[start:end])) return end def term(self, start, text): \"\"\"Tokenize a term.\"\"\"", "text) assert text[start] == '=' self.cur.append((start, Operator, text[start])) start =", "required=False): \"\"\"Tokenize whitespace.\"\"\" end = start try: while text[end] in", "in self.VARIABLES: # a''... return self.variable(start, text) if text[start] ==", "text[start] in self.VARIABLES end = start+1 while text[end] in self.PRIMES:", "end += 1 if end != start: # actual number", "end = self.whitespace(0, text) while start <= end < len(text):", "self.formula(start, text) except AssertionError: # not well-formed del self.cur[orig:] while", "start = self.formula(start+1, text) assert text[start] == '>' self.cur.append((start, Punctuation,", "= self.rule(start, text) except AssertionError: del self.cur[orig:] start = end", "the closing bracket is invalid start = end = self.error_till_line_end(start,", "text): \"\"\"Tokenize a variable.\"\"\" assert text[start] in self.VARIABLES end =", "match = self.LINENOS.match(text, start) assert match is not None assert", "line marker if text[start] == '(': orig = len(self.cur) try:", "start = self.term(start, text) assert text[start] == '=' self.cur.append((start, Operator,", "a variable.\"\"\" assert text[start] in self.VARIABLES end = start+1 while", "import Lexer from pygments.token import Text, Comment, Operator, Keyword, Name,", "| axiom\\\\ ([1-5]) | premise | push | pop ''')", "self.cur.append((start, Operator, text[start])) start = self.formula(start+1, text) assert text[start] ==", "the book <NAME>, by <NAME>, or as summarized here: https://github.com/Kenny2github/language-tnt/blob/master/README.md#summary-of-tnt", "set('~!') QUANTIFIERS = set('AE∀∃') NUMBERS = set('0123456789') WHITESPACE = set('\\t", "text[end] != '\\n': # there's whitespace in rules end +=", "'S': # S...S(...) or S...0 end = start+1 while text[end]", "skip whitespace after formula orig = len(self.cur) try: start =", "[] LOGIC = set('⊃→]&∧^|∨Vv') OPERATORS = set('+.⋅*') VARIABLES = set('abcde')", "start = end = self.rule(start, text) except AssertionError: del self.cur[orig:]", "Punctuation, text[start])) return start+1 # ...=... start = self.term(start, text)", "this point it could be a comment match = self.COMMENT.match(text,", "text[end] == 'S': end += 1 self.cur.append((start, Number.Integer, text[start:end])) return", "assert match is not None assert text[match.end()] == ')' self.cur.append((match.start(),", "orig = len(self.cur) try: start = end = self.lineno(start, text)", "= self.error_till_line_end(start, text) # do not attempt to process the", "pygments.token import Text, Comment, Operator, Keyword, Name, Number, \\ Punctuation,", "Number.Integer, text[start])) return start+1 if text[start] in self.VARIABLES: # a''...", "number while text[end] in self.NUMBERS: end += 1 if end", "if end != start: self.cur.append((start, Error, text[start:end])) end = self.whitespace(end,", "while text[end] in self.WHITESPACE: end += 1 except IndexError: end", "= self.formula(start, text) except AssertionError: # not well-formed del self.cur[orig:]", "WHITESPACE = set('\\t \\v\\n') RULES = re.compile('''(?xi) joining | separation", "= self.variable(start+1, text) assert text[start] == ':' self.cur.append((start, Punctuation, text[start]))", "end = self.formula(start, text) except AssertionError: # not well-formed del", "= set('⊃→]&∧^|∨Vv') OPERATORS = set('+.⋅*') VARIABLES = set('abcde') PRIMES =", "end = self.error_till_line_end(start, text) continue # rule proving this formula", "Text, Comment, Operator, Keyword, Name, Number, \\ Punctuation, Error __all__", "a term.\"\"\" if text[start] == 'S': # S...S(...) or S...0", "NUMBERS = set('0123456789') WHITESPACE = set('\\t \\v\\n') RULES = re.compile('''(?xi)", "match = self.RULES.match(text, start) assert match is not None groups", "Copyright 2019-2020 by the Pygments team, see AUTHORS. :license: BSD,", "start+1 # ...=... start = self.term(start, text) assert text[start] ==", "+= 1 self.cur.append((start, Operator, text[start:end])) return self.formula(end, text) if text[start]", "match is not None assert text[match.end()] == ')' self.cur.append((match.start(), Number.Integer,", "text[match.end()])) return match.end() + 1 def error_till_line_end(self, start, text): \"\"\"Mark", "+ 1 def error_till_line_end(self, start, text): \"\"\"Mark everything from ``start``", "line)?\\\\ ([0-9]+) | detachment | contrapositive | De\\\\ Morgan |", "tokens.\"\"\" self.cur = [] start = end = self.whitespace(0, text)", "# whitespace is required after a line number orig =", "\"\"\" Lexer for Typographic Number Theory, as described in the", "end = self.whitespace(end, text) return end def get_tokens_unprocessed(self, text): \"\"\"Returns", "text[end] in self.NEGATORS: end += 1 self.cur.append((start, Operator, text[start:end])) return", "self.cur.append((group[0], Number.Integer, text[group[0]:group[1]])) if group[1] != match.end(): self.cur.append((group[1], Keyword, text[group[1]:match.end()]))", "text[end] in self.WHITESPACE: end += 1 except IndexError: end =", "Pygments team, see AUTHORS. :license: BSD, see LICENSE for details.", "end = len(text) if required: assert end != start if", "text[start] in self.OPERATORS self.cur.append((start, Operator, text[start])) start = self.term(start+1, text)", "Operator, text[start])) start = self.formula(start+1, text) assert text[start] == '>'", "== '(': # (...+...) self.cur.append((start, Punctuation, text[start])) start = self.term(start+1,", "if group[1] != match.end(): self.cur.append((group[1], Keyword, text[group[1]:match.end()])) break else: self.cur.append((start,", "for details. \"\"\" import re from pygments.lexer import Lexer from", "after formula orig = len(self.cur) try: start = end =", "class TNTLexer(Lexer): \"\"\" Lexer for Typographic Number Theory, as described", "start: self.cur.append((start, Error, text[start:end])) end = self.whitespace(end, text) return end", "([1-5]) | premise | push | pop ''') LINENOS =", "IndexError: end = len(text) if end != start: self.cur.append((start, Error,", "end += 1 self.cur.append((start, Operator, text[start:end])) return self.formula(end, text) if", "is required after a line number orig = len(self.cur) try:", "= self.term(start, text) assert text[start] == '=' self.cur.append((start, Operator, text[start]))", "start) assert match is not None groups = sorted(match.regs[1:]) #", "S...0 end = start+1 while text[end] == 'S': end +=", "text[start] == '<': # <...&...> self.cur.append((start, Punctuation, text[start])) start =", "closing bracket is invalid start = end = self.error_till_line_end(start, text)", "| interchange | existence | symmetry | transitivity | add\\\\", "| premise | push | pop ''') LINENOS = re.compile(r'(?:[0-9]+)(?:(?:,", "in self.NEGATORS: end += 1 self.cur.append((start, Operator, text[start:end])) return self.formula(end,", "Aa:<...> self.cur.append((start, Keyword.Declaration, text[start])) start = self.variable(start+1, text) assert text[start]", "this formula a theorem orig = len(self.cur) try: start =", "error_till_line_end(self, start, text): \"\"\"Mark everything from ``start`` to the end", "pygments.lexers.tnt ~~~~~~~~~~~~~~~~~~~ Lexer for Typographic Number Theory. :copyright: Copyright 2019-2020", "re.compile('''(?xi) joining | separation | double-tilde | fantasy\\\\ rule |", "== '<': # <...&...> self.cur.append((start, Punctuation, text[start])) start = self.formula(start+1,", "= [] LOGIC = set('⊃→]&∧^|∨Vv') OPERATORS = set('+.⋅*') VARIABLES =", "= ['TNTLexer'] class TNTLexer(Lexer): \"\"\" Lexer for Typographic Number Theory,", "push or pop self.cur.append((start, Keyword, text[start])) return start+1 if text[start]", "end != start if end != start: self.cur.append((start, Text, text[start:end]))", "'=' self.cur.append((start, Operator, text[start])) start = self.term(start+1, text) return start", "match is not None groups = sorted(match.regs[1:]) # exclude whole", "at this point it could be a comment match =", "self.cur[orig:] start = end = self.error_till_line_end(end, text) continue # at", "a formula.\"\"\" if text[start] in '[]': # fantasy push or", "LOGIC = set('⊃→]&∧^|∨Vv') OPERATORS = set('+.⋅*') VARIABLES = set('abcde') PRIMES", "if text[start] == 'S': # S...S(...) or S...0 end =", "= [] start = end = self.whitespace(0, text) while start", ":copyright: Copyright 2019-2020 by the Pygments team, see AUTHORS. :license:", "| symmetry | transitivity | add\\\\ S | drop\\\\ S", "pop ''') LINENOS = re.compile(r'(?:[0-9]+)(?:(?:, ?|,? and )(?:[0-9]+))*') COMMENT =", "len(self.cur) try: start = end = self.whitespace(end, text, True) except", "whitespace after rule start = end = self.whitespace(end, text) #", "= ['tnt'] filenames = ['*.tnt'] cur = [] LOGIC =", "invalid start = end = self.error_till_line_end(start, text) # do not", "# <...&...> self.cur.append((start, Punctuation, text[start])) start = self.formula(start+1, text) assert", "text): \"\"\"Mark everything from ``start`` to the end of the", "= end = self.lineno(start, text) except AssertionError: del self.cur[orig:] start", "')' self.cur.append((match.start(), Number.Integer, match.group(0))) self.cur.append((match.end(), Punctuation, text[match.end()])) return match.end() +", "self.QUANTIFIERS: # Aa:<...> self.cur.append((start, Keyword.Declaration, text[start])) start = self.variable(start+1, text)", "= self.whitespace(end, text, True) except AssertionError: del self.cur[orig:] start =", "end = self.rule(start, text) except AssertionError: del self.cur[orig:] start =", "== 'S': # S...S(...) or S...0 end = start+1 while", "self.term(end, text) if text[start] == '0': # the singleton 0", "return start+1 raise AssertionError # no matches def formula(self, start,", "assert text[start] in self.OPERATORS self.cur.append((start, Operator, text[start])) start = self.term(start+1,", "not attempt to process the rest continue del match #", "book <NAME>, by <NAME>, or as summarized here: https://github.com/Kenny2github/language-tnt/blob/master/README.md#summary-of-tnt ..", "start = end = match.end() # anything after the closing", "+= 1 except IndexError: end = len(text) if end !=", "or pop self.cur.append((start, Keyword, text[start])) return start+1 if text[start] in", "!= start if end != start: self.cur.append((start, Text, text[start:end])) return", "= sorted(match.regs[1:]) # exclude whole match for group in groups:", "text) except AssertionError: # not well-formed del self.cur[orig:] while text[end]", "match = self.COMMENT.match(text, start) if match is not None: self.cur.append((start,", "self.cur.append((start, Keyword, text[start])) return start+1 if text[start] in self.NEGATORS: #", "< len(text): # try line number while text[end] in self.NUMBERS:", "# do not attempt to process the rest continue del", "orig = len(self.cur) try: start = end = self.whitespace(end, text,", "= start try: while text[end] != '\\n': # there's whitespace", "QUANTIFIERS = set('AE∀∃') NUMBERS = set('0123456789') WHITESPACE = set('\\t \\v\\n')", "Number.Integer, match.group(0))) self.cur.append((match.end(), Punctuation, text[match.end()])) return match.end() + 1 def", "set('\\t \\v\\n') RULES = re.compile('''(?xi) joining | separation | double-tilde", "match.end() + 1 def error_till_line_end(self, start, text): \"\"\"Mark everything from", "\"\"\"Tokenize a rule.\"\"\" match = self.RULES.match(text, start) assert match is", "start = self.term(start+1, text) assert text[start] in self.OPERATORS self.cur.append((start, Operator,", "continue # at this point it could be a comment", "| transitivity | add\\\\ S | drop\\\\ S | induction", "start = self.term(start+1, text) assert text[start] == ')' self.cur.append((start, Punctuation,", "= self.RULES.match(text, start) assert match is not None groups =", "in groups: if group[0] >= 0: # this group matched", "self.RULES.match(text, start) assert match is not None groups = sorted(match.regs[1:])", "re.compile(r'(?:[0-9]+)(?:(?:, ?|,? and )(?:[0-9]+))*') COMMENT = re.compile(r'\\[[^\\n\\]]+\\]') def whitespace(self, start,", "None groups = sorted(match.regs[1:]) # exclude whole match for group", "self.cur.append((start+1, Text, text[start+1:end])) start = end match = self.LINENOS.match(text, start)", "Text, text[start+1:end])) start = end match = self.LINENOS.match(text, start) assert", "variable.\"\"\" assert text[start] in self.VARIABLES end = start+1 while text[end]", "assert text[match.end()] == ')' self.cur.append((match.start(), Number.Integer, match.group(0))) self.cur.append((match.end(), Punctuation, text[match.end()]))", "= set('+.⋅*') VARIABLES = set('abcde') PRIMES = set(\"'′\") NEGATORS =", "return end def term(self, start, text): \"\"\"Tokenize a term.\"\"\" if", "# anything after the closing bracket is invalid start =", "text[start] == '(': orig = len(self.cur) try: start = end", "= len(text) if required: assert end != start if end", "Punctuation, text[match.end()])) return match.end() + 1 def error_till_line_end(self, start, text):", "text): \"\"\"Tokenize a formula.\"\"\" if text[start] in '[]': # fantasy", "= end = self.error_till_line_end(start, text) continue # skip whitespace after", "text[start])) start = self.term(start+1, text) return start def rule(self, start,", "self.cur.append((start, Keyword.Declaration, text[start])) start = self.variable(start+1, text) assert text[start] ==", "text[start:match.end()])) return match.end() def lineno(self, start, text): \"\"\"Tokenize a line", "text[match.end()] == ')' self.cur.append((match.start(), Number.Integer, match.group(0))) self.cur.append((match.end(), Punctuation, text[match.end()])) return", "Morgan | switcheroo | specification | generalization | interchange |", "end += 1 except IndexError: end = len(text) if end", "\\v\\n') RULES = re.compile('''(?xi) joining | separation | double-tilde |", "or S...0 end = start+1 while text[end] == 'S': end", "is invalid start = end = self.error_till_line_end(start, text) # do", "PRIMES = set(\"'′\") NEGATORS = set('~!') QUANTIFIERS = set('AE∀∃') NUMBERS", "if text[start] == '(': # (...+...) self.cur.append((start, Punctuation, text[start])) start", "start = end = self.error_till_line_end(start, text) continue # skip whitespace", "start = self.formula(start+1, text) assert text[start] in self.LOGIC self.cur.append((start, Operator,", "rest continue del match # one formula, possibly containing subformulae", "if text[start] == '<': # <...&...> self.cur.append((start, Punctuation, text[start])) start", "= start try: while text[end] in self.WHITESPACE: end += 1", "start = self.term(start+1, text) return start def rule(self, start, text):", "bracket is invalid start = end = self.error_till_line_end(start, text) #", "NEGATORS = set('~!') QUANTIFIERS = set('AE∀∃') NUMBERS = set('0123456789') WHITESPACE", "0: # this group matched self.cur.append((start, Keyword, text[start:group[0]])) self.cur.append((group[0], Number.Integer,", "def error_till_line_end(self, start, text): \"\"\"Mark everything from ``start`` to the", "number present self.cur.append((start, Number.Integer, text[start:end])) # whitespace is required after", "__all__ = ['TNTLexer'] class TNTLexer(Lexer): \"\"\" Lexer for Typographic Number", "end += 1 self.cur.append((start, Number.Integer, text[start:end])) return self.term(end, text) if", "is not None: self.cur.append((start, Comment, text[start:match.end()])) start = end =", "line number orig = len(self.cur) try: start = end =", "sorted(match.regs[1:]) # exclude whole match for group in groups: if", "Number Theory' aliases = ['tnt'] filenames = ['*.tnt'] cur =", "<filename>pygments/lexers/tnt.py<gh_stars>1-10 # -*- coding: utf-8 -*- \"\"\" pygments.lexers.tnt ~~~~~~~~~~~~~~~~~~~ Lexer", "text[group[0]:group[1]])) if group[1] != match.end(): self.cur.append((group[1], Keyword, text[group[1]:match.end()])) break else:", "\"\"\"Tokenize whitespace.\"\"\" end = start try: while text[end] in self.WHITESPACE:", "text): \"\"\"Tokenize a rule.\"\"\" match = self.RULES.match(text, start) assert match", "self.term(start+1, text) return start def rule(self, start, text): \"\"\"Tokenize a", "text[start])) return start+1 if text[start] in self.VARIABLES: # a''... return", "match.end() def lineno(self, start, text): \"\"\"Tokenize a line marker.\"\"\" end", "subformulae orig = len(self.cur) try: start = end = self.formula(start,", "return start def rule(self, start, text): \"\"\"Tokenize a rule.\"\"\" match" ]
[ "name='list'), path('create/', CreateContactView.as_view(), name='add_contact'), path('<int:pk>/view/', ContactDetailView.as_view(), name=\"view_contact\"), path('<int:pk>/edit/', UpdateContactView.as_view(), name=\"edit_contact\"),", "path('create/', CreateContactView.as_view(), name='add_contact'), path('<int:pk>/view/', ContactDetailView.as_view(), name=\"view_contact\"), path('<int:pk>/edit/', UpdateContactView.as_view(), name=\"edit_contact\"), path('<int:pk>/delete/',", "path('get/list/', GetContactsView.as_view(), name=\"get_contacts\"), path('comment/add/', AddCommentView.as_view(), name=\"add_comment\"), path('comment/edit/', UpdateCommentView.as_view(), name=\"edit_comment\"), path('comment/remove/',", "RemoveContactView.as_view(), name=\"remove_contact\"), path('get/list/', GetContactsView.as_view(), name=\"get_contacts\"), path('comment/add/', AddCommentView.as_view(), name=\"add_comment\"), path('comment/edit/', UpdateCommentView.as_view(),", "import path from contacts.views import ( ContactsListView, CreateContactView, ContactDetailView, UpdateContactView,", "RemoveContactView, GetContactsView, AddCommentView, UpdateCommentView, DeleteCommentView, AddAttachmentsView, DeleteAttachmentsView) app_name = 'contacts'", "= [ path('list/', ContactsListView.as_view(), name='list'), path('create/', CreateContactView.as_view(), name='add_contact'), path('<int:pk>/view/', ContactDetailView.as_view(),", "path('<int:pk>/delete/', RemoveContactView.as_view(), name=\"remove_contact\"), path('get/list/', GetContactsView.as_view(), name=\"get_contacts\"), path('comment/add/', AddCommentView.as_view(), name=\"add_comment\"), path('comment/edit/',", "AddCommentView.as_view(), name=\"add_comment\"), path('comment/edit/', UpdateCommentView.as_view(), name=\"edit_comment\"), path('comment/remove/', DeleteCommentView.as_view(), name=\"remove_comment\"), path('attachment/add/', AddAttachmentsView.as_view(),", "GetContactsView, AddCommentView, UpdateCommentView, DeleteCommentView, AddAttachmentsView, DeleteAttachmentsView) app_name = 'contacts' urlpatterns", "CreateContactView.as_view(), name='add_contact'), path('<int:pk>/view/', ContactDetailView.as_view(), name=\"view_contact\"), path('<int:pk>/edit/', UpdateContactView.as_view(), name=\"edit_contact\"), path('<int:pk>/delete/', RemoveContactView.as_view(),", "from django.urls import path from contacts.views import ( ContactsListView, CreateContactView,", "app_name = 'contacts' urlpatterns = [ path('list/', ContactsListView.as_view(), name='list'), path('create/',", "ContactsListView.as_view(), name='list'), path('create/', CreateContactView.as_view(), name='add_contact'), path('<int:pk>/view/', ContactDetailView.as_view(), name=\"view_contact\"), path('<int:pk>/edit/', UpdateContactView.as_view(),", "DeleteCommentView, AddAttachmentsView, DeleteAttachmentsView) app_name = 'contacts' urlpatterns = [ path('list/',", "AddCommentView, UpdateCommentView, DeleteCommentView, AddAttachmentsView, DeleteAttachmentsView) app_name = 'contacts' urlpatterns =", "name='add_contact'), path('<int:pk>/view/', ContactDetailView.as_view(), name=\"view_contact\"), path('<int:pk>/edit/', UpdateContactView.as_view(), name=\"edit_contact\"), path('<int:pk>/delete/', RemoveContactView.as_view(), name=\"remove_contact\"),", "django.urls import path from contacts.views import ( ContactsListView, CreateContactView, ContactDetailView,", "path('comment/remove/', DeleteCommentView.as_view(), name=\"remove_comment\"), path('attachment/add/', AddAttachmentsView.as_view(), name=\"add_attachment\"), path('attachment/remove/', DeleteAttachmentsView.as_view(), name=\"remove_attachment\"), ]", "path('comment/edit/', UpdateCommentView.as_view(), name=\"edit_comment\"), path('comment/remove/', DeleteCommentView.as_view(), name=\"remove_comment\"), path('attachment/add/', AddAttachmentsView.as_view(), name=\"add_attachment\"), path('attachment/remove/',", "'contacts' urlpatterns = [ path('list/', ContactsListView.as_view(), name='list'), path('create/', CreateContactView.as_view(), name='add_contact'),", "name=\"remove_contact\"), path('get/list/', GetContactsView.as_view(), name=\"get_contacts\"), path('comment/add/', AddCommentView.as_view(), name=\"add_comment\"), path('comment/edit/', UpdateCommentView.as_view(), name=\"edit_comment\"),", "path('<int:pk>/edit/', UpdateContactView.as_view(), name=\"edit_contact\"), path('<int:pk>/delete/', RemoveContactView.as_view(), name=\"remove_contact\"), path('get/list/', GetContactsView.as_view(), name=\"get_contacts\"), path('comment/add/',", "UpdateContactView, RemoveContactView, GetContactsView, AddCommentView, UpdateCommentView, DeleteCommentView, AddAttachmentsView, DeleteAttachmentsView) app_name =", "GetContactsView.as_view(), name=\"get_contacts\"), path('comment/add/', AddCommentView.as_view(), name=\"add_comment\"), path('comment/edit/', UpdateCommentView.as_view(), name=\"edit_comment\"), path('comment/remove/', DeleteCommentView.as_view(),", "contacts.views import ( ContactsListView, CreateContactView, ContactDetailView, UpdateContactView, RemoveContactView, GetContactsView, AddCommentView,", "import ( ContactsListView, CreateContactView, ContactDetailView, UpdateContactView, RemoveContactView, GetContactsView, AddCommentView, UpdateCommentView,", "ContactDetailView.as_view(), name=\"view_contact\"), path('<int:pk>/edit/', UpdateContactView.as_view(), name=\"edit_contact\"), path('<int:pk>/delete/', RemoveContactView.as_view(), name=\"remove_contact\"), path('get/list/', GetContactsView.as_view(),", "urlpatterns = [ path('list/', ContactsListView.as_view(), name='list'), path('create/', CreateContactView.as_view(), name='add_contact'), path('<int:pk>/view/',", "DeleteAttachmentsView) app_name = 'contacts' urlpatterns = [ path('list/', ContactsListView.as_view(), name='list'),", "name=\"view_contact\"), path('<int:pk>/edit/', UpdateContactView.as_view(), name=\"edit_contact\"), path('<int:pk>/delete/', RemoveContactView.as_view(), name=\"remove_contact\"), path('get/list/', GetContactsView.as_view(), name=\"get_contacts\"),", "from contacts.views import ( ContactsListView, CreateContactView, ContactDetailView, UpdateContactView, RemoveContactView, GetContactsView,", "AddAttachmentsView, DeleteAttachmentsView) app_name = 'contacts' urlpatterns = [ path('list/', ContactsListView.as_view(),", "path('list/', ContactsListView.as_view(), name='list'), path('create/', CreateContactView.as_view(), name='add_contact'), path('<int:pk>/view/', ContactDetailView.as_view(), name=\"view_contact\"), path('<int:pk>/edit/',", "name=\"add_comment\"), path('comment/edit/', UpdateCommentView.as_view(), name=\"edit_comment\"), path('comment/remove/', DeleteCommentView.as_view(), name=\"remove_comment\"), path('attachment/add/', AddAttachmentsView.as_view(), name=\"add_attachment\"),", "UpdateCommentView.as_view(), name=\"edit_comment\"), path('comment/remove/', DeleteCommentView.as_view(), name=\"remove_comment\"), path('attachment/add/', AddAttachmentsView.as_view(), name=\"add_attachment\"), path('attachment/remove/', DeleteAttachmentsView.as_view(),", "UpdateCommentView, DeleteCommentView, AddAttachmentsView, DeleteAttachmentsView) app_name = 'contacts' urlpatterns = [", "( ContactsListView, CreateContactView, ContactDetailView, UpdateContactView, RemoveContactView, GetContactsView, AddCommentView, UpdateCommentView, DeleteCommentView,", "name=\"edit_contact\"), path('<int:pk>/delete/', RemoveContactView.as_view(), name=\"remove_contact\"), path('get/list/', GetContactsView.as_view(), name=\"get_contacts\"), path('comment/add/', AddCommentView.as_view(), name=\"add_comment\"),", "path from contacts.views import ( ContactsListView, CreateContactView, ContactDetailView, UpdateContactView, RemoveContactView,", "<filename>contacts/urls.py from django.urls import path from contacts.views import ( ContactsListView,", "= 'contacts' urlpatterns = [ path('list/', ContactsListView.as_view(), name='list'), path('create/', CreateContactView.as_view(),", "path('<int:pk>/view/', ContactDetailView.as_view(), name=\"view_contact\"), path('<int:pk>/edit/', UpdateContactView.as_view(), name=\"edit_contact\"), path('<int:pk>/delete/', RemoveContactView.as_view(), name=\"remove_contact\"), path('get/list/',", "[ path('list/', ContactsListView.as_view(), name='list'), path('create/', CreateContactView.as_view(), name='add_contact'), path('<int:pk>/view/', ContactDetailView.as_view(), name=\"view_contact\"),", "CreateContactView, ContactDetailView, UpdateContactView, RemoveContactView, GetContactsView, AddCommentView, UpdateCommentView, DeleteCommentView, AddAttachmentsView, DeleteAttachmentsView)", "ContactsListView, CreateContactView, ContactDetailView, UpdateContactView, RemoveContactView, GetContactsView, AddCommentView, UpdateCommentView, DeleteCommentView, AddAttachmentsView,", "path('comment/add/', AddCommentView.as_view(), name=\"add_comment\"), path('comment/edit/', UpdateCommentView.as_view(), name=\"edit_comment\"), path('comment/remove/', DeleteCommentView.as_view(), name=\"remove_comment\"), path('attachment/add/',", "name=\"edit_comment\"), path('comment/remove/', DeleteCommentView.as_view(), name=\"remove_comment\"), path('attachment/add/', AddAttachmentsView.as_view(), name=\"add_attachment\"), path('attachment/remove/', DeleteAttachmentsView.as_view(), name=\"remove_attachment\"),", "ContactDetailView, UpdateContactView, RemoveContactView, GetContactsView, AddCommentView, UpdateCommentView, DeleteCommentView, AddAttachmentsView, DeleteAttachmentsView) app_name", "name=\"get_contacts\"), path('comment/add/', AddCommentView.as_view(), name=\"add_comment\"), path('comment/edit/', UpdateCommentView.as_view(), name=\"edit_comment\"), path('comment/remove/', DeleteCommentView.as_view(), name=\"remove_comment\"),", "UpdateContactView.as_view(), name=\"edit_contact\"), path('<int:pk>/delete/', RemoveContactView.as_view(), name=\"remove_contact\"), path('get/list/', GetContactsView.as_view(), name=\"get_contacts\"), path('comment/add/', AddCommentView.as_view()," ]
[ "\"\"\"Identification of the remote hostname. Equals ``remote_addr`` if the resolution", "self.get_Direction(direction) return direction.value @property def interface_types(self): \"\"\"Types of interface of", "local_addr(self): \"\"\"Local address IP :type: :class:`str`\"\"\" return self._str_ipv6_addr(self.ucLocalAddr) @property def", "and listening) :type: [:class:`TCP4Connection`]\"\"\" ipv6 = property(lambda self: self._get_tcp_ipv6_sockets()) \"\"\"List", "var = windows.com.ImprovedVariant() rules = [] for i in range(nb_rules.value):", "or ``False``, ``NET_FW_PROFILE_TYPE2_.NET_FW_PROFILE2_PRIVATE(0x2L)``: ``True`` or ``False``, ``NET_FW_PROFILE_TYPE2_.NET_FW_PROFILE2_PUBLIC(0x4L)``: ``True`` or ``False``,", ":class:`int`\"\"\" if not self.established: return None return socket.ntohs(self.dwRemotePort) @property def", "(\"dwNumEntries\", DWORD), (\"table\", TCP6Connection * nb_entry), ] return _GENERATED_MIB_TCP6TABLE_OWNER_PID.from_buffer(buffer) class", "to which apply the rule :type: :class:`unicode` \"\"\" servicename =", "of the rule, values might be: * ``NET_FW_RULE_DIRECTION_.NET_FW_RULE_DIR_IN(0x1L)`` * ``NET_FW_RULE_DIRECTION_.NET_FW_RULE_DIR_OUT(0x2L)``", "\"\"\"A TCP6 socket (connected or listening)\"\"\" @staticmethod def _str_ipv6_addr(addr): return", "def get_MIB_TCP6TABLE_OWNER_PID_from_buffer(buffer): x = windows.generated_def.winstructs.MIB_TCP6TABLE_OWNER_PID.from_buffer(buffer) nb_entry = x.dwNumEntries # Struct", "self.dwLocalAddr closing.dwLocalPort = self.dwLocalPort closing.dwRemoteAddr = self.dwRemoteAddr closing.dwRemotePort = self.dwRemotePort", "enabled :type: :class:`long` \"\"\" cpt = gdef.LONG() self.get_CurrentProfileTypes(cpt) return cpt.value", "{0}:{1}>\".format(self.local_addr, self.local_port) return \"<TCP IPV4 Connection {s.local_addr}:{s.local_port} -> {s.remote_addr}:{s.remote_port}>\".format(s=self) class", "@property def remote_host(self): \"\"\"Equals to ``self.remote_addr`` for Ipv6\"\"\" return self.remote_addr", "rules(self): \"\"\"The rules of the firewall :type: [:class:`FirewallRule`] -- A", "return winproxy.SetTcpEntry(ctypes.byref(closing)) def __repr__(self): if not self.established: return \"<TCP IPV4", "application to which apply the rule :type: :class:`unicode` \"\"\" applicationname", "MIB_TCP_STATE_DELETE_TCB closing.dwLocalAddr = self.dwLocalAddr closing.dwLocalPort = self.dwLocalPort closing.dwRemoteAddr = self.dwRemoteAddr", "IPV4 Connection {s.local_addr}:{s.local_port} -> {s.remote_addr}:{s.remote_port}>\".format(s=self) class TCP6Connection(MIB_TCP6ROW_OWNER_PID): \"\"\"A TCP6 socket", "\"\"\"Mask of the profiles currently enabled :type: :class:`long` \"\"\" cpt", ":type: :class:`str`\"\"\" return self._str_ipv6_addr(self.ucLocalAddr) @property def remote_addr(self): \"\"\"remote address IP", "= gdef.VARIANT_BOOL() self.get_FirewallEnabled(profile_type, enabled) return enabled.value class FirewallRule(cominterfaces.INetFwRule): \"\"\"A rule", "if not self.established: return \"<TCP IPV4 Listening socket on {0}:{1}>\".format(self.local_addr,", "``False``, ``NET_FW_PROFILE_TYPE2_.NET_FW_PROFILE2_PRIVATE(0x2L)``: ``True`` or ``False``, ``NET_FW_PROFILE_TYPE2_.NET_FW_PROFILE2_PUBLIC(0x4L)``: ``True`` or ``False``, }", "the remote hostname. Equals ``remote_addr`` if the resolution fails :type:", "of the firewall :type: [:class:`FirewallRule`] -- A list of rule", "of :class:`long` \"\"\" action = gdef.NET_FW_ACTION() self.get_Action(action) return action.value @property", "return list(t.table) ipv4 = property(lambda self: self._get_tcp_ipv4_sockets()) \"\"\"List of TCP", ":class:`long` \"\"\" protocol = gdef.LONG() self.get_Protocol(protocol) return protocol.value @property def", "windows.com.init() firewall = Firewall() windows.com.create_instance(self.NetFwPolicy2, firewall) return firewall @staticmethod def", "def _str_ipv6_addr(addr): return \":\".join(c.encode('hex') for c in addr) @property def", "to ``self.remote_addr`` for Ipv6\"\"\" return self.remote_addr def close(self): raise NotImplementedError(\"Closing", "gdef.VARIANT_BOOL() self.get_FirewallEnabled(profile_type, enabled) return enabled.value class FirewallRule(cominterfaces.INetFwRule): \"\"\"A rule of", "== MIB_TCP_STATE_ESTAB @property def remote_port(self): \"\"\":type: :class:`int`\"\"\" if not self.established:", "self.remote_port @property def remote_host(self): \"\"\"Equals to ``self.remote_addr`` for Ipv6\"\"\" return", "ctypes.c_uint(0) try: winproxy.GetExtendedTcpTable(None, ctypes.byref(size), ulAf=AF_INET6) except winproxy.IphlpapiError: pass # Allow", "= (ctypes.c_char * size.value)() winproxy.GetExtendedTcpTable(buffer, ctypes.byref(size), ulAf=AF_INET6) t = get_MIB_TCP6TABLE_OWNER_PID_from_buffer(buffer)", "size = ctypes.c_uint(0) try: winproxy.GetExtendedTcpTable(None, ctypes.byref(size), ulAf=AF_INET6) except winproxy.IphlpapiError: pass", ":class:`int` \"\"\" try: return socket.getservbyport(self.remote_port, 'tcp') except socket.error: return self.remote_port", "FirewallRule() idisp = var.asdispatch idisp.QueryInterface(rule.IID, rule) rules.append(rule) return rules @property", "None return self._str_ipv6_addr(self.ucRemoteAddr) @property def remote_proto(self): \"\"\"Equals to ``self.remote_port`` for", "NetFwPolicy2 = windows.com.IID.from_string(\"E2B3C97F-6AE1-41AC-817A-F6F92166D7DD\") @property def firewall(self): \"\"\"The firewall of the", "buffer = (ctypes.c_char * size.value)() winproxy.GetExtendedTcpTable(buffer, ctypes.byref(size), ulAf=AF_INET) t =", "self.remote_addr def close(self): \"\"\"Close the connection <require elevated process>\"\"\" closing", "remote_host(self): \"\"\"Identification of the remote hostname. Equals ``remote_addr`` if the", "return socket.gethostbyaddr(self.remote_addr) except socket.error: return self.remote_addr def close(self): \"\"\"Close the", "@property def remote_addr(self): \"\"\"remote address IP (x.x.x.x) :type: :class:`str`\"\"\" if", "\"\"\" direction = gdef.NET_FW_RULE_DIRECTION() self.get_Direction(direction) return direction.value @property def interface_types(self):", "enabled(self): \"\"\"``True`` if rule is enabled\"\"\" enabled = gdef.VARIANT_BOOL() self.get_Enabled(enabled)", "@property def name(self): \"\"\"Name of the rule :type: :class:`unicode` \"\"\"", "def local_port(self): \"\"\":type: :class:`int`\"\"\" return socket.ntohs(self.dwLocalPort) @property def local_addr(self): \"\"\"Local", "@property def protocol(self): \"\"\"Protocol to which apply the rule :type:", "@property def local_addr(self): \"\"\"Local address IP (x.x.x.x) :type: :class:`str`\"\"\" return", "{0}:{1}>\".format(self.local_addr, self.local_port) return \"<TCP IPV6 Connection {0}:{1} -> {2}:{3}>\".format(self.local_addr, self.local_port,", "firewall :type: [:class:`FirewallRule`] -- A list of rule \"\"\" ifw_rules", "= cominterfaces.INetFwRules() self.get_Rules(ifw_rules) nb_rules = gdef.LONG() ifw_rules.get_Count(nb_rules) unknw = cominterfaces.IUnknown()", "``True`` or ``False``, ``NET_FW_PROFILE_TYPE2_.NET_FW_PROFILE2_PUBLIC(0x4L)``: ``True`` or ``False``, } :type: :class:`dict`", "def remote_proto(self): \"\"\"Equals to ``self.remote_port`` for Ipv6\"\"\" return self.remote_port @property", "\"\"\" try: return socket.getservbyport(self.remote_port, 'tcp') except socket.error: return self.remote_port @property", "icmp_type_and_code = gdef.BSTR() self.get_RemotePorts(icmp_type_and_code) return icmp_type_and_code.value def __repr__(self): return u'<{0}", "winproxy.GetExtendedTcpTable(None, ctypes.byref(size), ulAf=AF_INET) except winproxy.IphlpapiError: pass # Allow us to", "windows.generated_def.windef import * class TCP4Connection(MIB_TCPROW_OWNER_PID): \"\"\"A TCP4 socket (connected or", "self.local_port, self.remote_addr, self.remote_port) def get_MIB_TCPTABLE_OWNER_PID_from_buffer(buffer): x = windows.generated_def.winstructs.MIB_TCPTABLE_OWNER_PID.from_buffer(buffer) nb_entry =", "\"\"\" cpt = gdef.LONG() self.get_CurrentProfileTypes(cpt) return cpt.value @property def enabled(self):", "c in addr) @property def established(self): \"\"\"``True`` if connection is", "in addr) @property def established(self): \"\"\"``True`` if connection is established", "remote_address = gdef.BSTR() self.get_RemoteAddresses(remote_address) return remote_address.value @property def direction(self): \"\"\"Direction", "``NET_FW_ACTION_.NET_FW_ACTION_BLOCK(0x0L)`` * ``NET_FW_ACTION_.NET_FW_ACTION_ALLOW(0x1L)`` subclass of :class:`long` \"\"\" action = gdef.NET_FW_ACTION()", "listening) :type: [:class:`TCP4Connection`]\"\"\" ipv6 = property(lambda self: self._get_tcp_ipv6_sockets()) \"\"\"List of", "return self._str_ipv6_addr(self.ucRemoteAddr) @property def remote_proto(self): \"\"\"Equals to ``self.remote_port`` for Ipv6\"\"\"", "rule, values might be: * ``NET_FW_RULE_DIRECTION_.NET_FW_RULE_DIR_IN(0x1L)`` * ``NET_FW_RULE_DIRECTION_.NET_FW_RULE_DIR_OUT(0x2L)`` subclass of", "class _GENERATED_MIB_TCP6TABLE_OWNER_PID(Structure): _fields_ = [ (\"dwNumEntries\", DWORD), (\"table\", TCP6Connection *", "rules of the firewall :type: [:class:`FirewallRule`] -- A list of", "description.value @property def application_name(self): \"\"\"Name of the application to which", "direction(self): \"\"\"Direction of the rule, values might be: * ``NET_FW_RULE_DIRECTION_.NET_FW_RULE_DIR_IN(0x1L)``", "windows.com import interfaces as cominterfaces from windows.generated_def.winstructs import * from", "\"\"\"The rules of the firewall :type: [:class:`FirewallRule`] -- A list", ":type: :class:`unicode` \"\"\" remote_address = gdef.BSTR() self.get_RemoteAddresses(remote_address) return remote_address.value @property", "def remote_port(self): \"\"\"Remote port of the rule :type: :class:`unicode` \"\"\"", "IP (x.x.x.x) :type: :class:`str`\"\"\" return socket.inet_ntoa(struct.pack(\"<I\", self.dwLocalAddr)) @property def remote_addr(self):", "in profiles} def enabled_for_profile_type(self, profile_type): enabled = gdef.VARIANT_BOOL() self.get_FirewallEnabled(profile_type, enabled)", "system :type: :class:`Firewall` \"\"\" windows.com.init() firewall = Firewall() windows.com.create_instance(self.NetFwPolicy2, firewall)", "def remote_addr(self): \"\"\"remote address IP (x.x.x.x) :type: :class:`str`\"\"\" if not", "return socket.ntohs(self.dwLocalPort) @property def local_addr(self): \"\"\"Local address IP (x.x.x.x) :type:", "\"\"\"Remote address of the rule :type: :class:`unicode` \"\"\" remote_address =", "or :class:`int` \"\"\" try: return socket.gethostbyaddr(self.remote_addr) except socket.error: return self.remote_addr", "\"\"\" profiles = [gdef.NET_FW_PROFILE2_DOMAIN, gdef.NET_FW_PROFILE2_PRIVATE, gdef.NET_FW_PROFILE2_PUBLIC] return {prof: self.enabled_for_profile_type(prof) for", "rule :type: :class:`unicode` \"\"\" local_port = gdef.BSTR() self.get_LocalPorts(local_port) return local_port.value", "(x.x.x.x) :type: :class:`str`\"\"\" if not self.established: return None return socket.inet_ntoa(struct.pack(\"<I\",", "return remote_port.value @property def action(self): \"\"\"Action of the rule, values", "windows.com.IID.from_string(\"E2B3C97F-6AE1-41AC-817A-F6F92166D7DD\") @property def firewall(self): \"\"\"The firewall of the system :type:", "rule :type: :class:`unicode` \"\"\" interface_type = gdef.BSTR() self.get_InterfaceTypes(interface_type) return interface_type.value", "enabled.value @property def grouping(self): \"\"\"Grouping of the rule :type: :class:`unicode`", "winproxy.GetExtendedTcpTable(None, ctypes.byref(size), ulAf=AF_INET6) except winproxy.IphlpapiError: pass # Allow us to", "self.established: return \"<TCP IPV4 Listening socket on {0}:{1}>\".format(self.local_addr, self.local_port) return", "-> {s.remote_addr}:{s.remote_port}>\".format(s=self) class TCP6Connection(MIB_TCP6ROW_OWNER_PID): \"\"\"A TCP6 socket (connected or listening)\"\"\"", "count) if not count.value: break rule = FirewallRule() idisp =", "t = get_MIB_TCP6TABLE_OWNER_PID_from_buffer(buffer) return list(t.table) ipv4 = property(lambda self: self._get_tcp_ipv4_sockets())", "ctypes.byref(size), ulAf=AF_INET) t = get_MIB_TCPTABLE_OWNER_PID_from_buffer(buffer) return list(t.table) @staticmethod def _get_tcp_ipv6_sockets():", ":class:`unicode` \"\"\" servicename = gdef.BSTR() self.get_ServiceName(servicename) return servicename.value @property def", "u'<{0} \"{1}\">'.format(type(self).__name__, self.name).encode(\"ascii\", errors='backslashreplace') class Network(object): NetFwPolicy2 = windows.com.IID.from_string(\"E2B3C97F-6AE1-41AC-817A-F6F92166D7DD\") @property", ":type: :class:`unicode` \"\"\" local_address = gdef.BSTR() self.get_LocalAddresses(local_address) return local_address.value @property", "if not self.established: return \"<TCP IPV6 Listening socket on {0}:{1}>\".format(self.local_addr,", "self.remote_port @property def remote_host(self): \"\"\"Identification of the remote hostname. Equals", "port. Equals ``remote_port`` if no protocol is associated with it.", "= MIB_TCPROW() closing.dwState = MIB_TCP_STATE_DELETE_TCB closing.dwLocalAddr = self.dwLocalAddr closing.dwLocalPort =", "IPv4 socket (connection and listening) :type: [:class:`TCP4Connection`]\"\"\" ipv6 = property(lambda", "list(t.table) @staticmethod def _get_tcp_ipv6_sockets(): size = ctypes.c_uint(0) try: winproxy.GetExtendedTcpTable(None, ctypes.byref(size),", "gdef.NET_FW_RULE_DIRECTION() self.get_Direction(direction) return direction.value @property def interface_types(self): \"\"\"Types of interface", ":class:`unicode` \"\"\" local_address = gdef.BSTR() self.get_LocalAddresses(local_address) return local_address.value @property def", "associated with it. :type: :class:`str` or :class:`int` \"\"\" try: return", "protocol.value @property def local_address(self): \"\"\"Local address of the rule :type:", ":class:`unicode` \"\"\" local_port = gdef.BSTR() self.get_LocalPorts(local_port) return local_port.value @property def", "gdef.BSTR() self.get_RemotePorts(grouping) return grouping.value @property def icmp_type_and_code(self): icmp_type_and_code = gdef.BSTR()", ":type: :class:`long` \"\"\" protocol = gdef.LONG() self.get_Protocol(protocol) return protocol.value @property", "definitions class _GENERATED_MIB_TCP6TABLE_OWNER_PID(Structure): _fields_ = [ (\"dwNumEntries\", DWORD), (\"table\", TCP6Connection", "= ctypes.c_uint(0) try: winproxy.GetExtendedTcpTable(None, ctypes.byref(size), ulAf=AF_INET) except winproxy.IphlpapiError: pass #", "class TCP6Connection(MIB_TCP6ROW_OWNER_PID): \"\"\"A TCP6 socket (connected or listening)\"\"\" @staticmethod def", "from windows import winproxy import windows.generated_def as gdef from windows.com", "self.dwRemotePort return winproxy.SetTcpEntry(ctypes.byref(closing)) def __repr__(self): if not self.established: return \"<TCP", "raise NotImplementedError(\"Closing IPV6 connection non implemented\") def __repr__(self): if not", "gdef.BSTR() self.get_RemotePorts(remote_port) return remote_port.value @property def action(self): \"\"\"Action of the", "ulAf=AF_INET6) except winproxy.IphlpapiError: pass # Allow us to set size", "return socket.ntohs(self.dwRemotePort) @property def local_port(self): \"\"\":type: :class:`int`\"\"\" return socket.ntohs(self.dwLocalPort) @property", "\"\"\":type: :class:`int`\"\"\" return socket.ntohs(self.dwLocalPort) @property def local_addr(self): \"\"\"Local address IP", "import * from windows.generated_def.windef import * class TCP4Connection(MIB_TCPROW_OWNER_PID): \"\"\"A TCP4", "def remote_port(self): \"\"\":type: :class:`int`\"\"\" if not self.established: return None return", "self.get_ApplicationName(applicationname) return applicationname.value @property def service_name(self): \"\"\"Name of the service", "* from windows.generated_def.windef import * class TCP4Connection(MIB_TCPROW_OWNER_PID): \"\"\"A TCP4 socket", "= property(lambda self: self._get_tcp_ipv6_sockets()) \"\"\"List of TCP IPv6 socket (connection", "interface of the rule :type: :class:`unicode` \"\"\" interface_type = gdef.BSTR()", ":type: :class:`unicode` \"\"\" grouping = gdef.BSTR() self.get_RemotePorts(grouping) return grouping.value @property", "remote_address(self): \"\"\"Remote address of the rule :type: :class:`unicode` \"\"\" remote_address", ":class:`Firewall` \"\"\" windows.com.init() firewall = Firewall() windows.com.create_instance(self.NetFwPolicy2, firewall) return firewall", "profiles = [gdef.NET_FW_PROFILE2_DOMAIN, gdef.NET_FW_PROFILE2_PRIVATE, gdef.NET_FW_PROFILE2_PUBLIC] return {prof: self.enabled_for_profile_type(prof) for prof", "self._get_tcp_ipv4_sockets()) \"\"\"List of TCP IPv4 socket (connection and listening) :type:", "def enabled_for_profile_type(self, profile_type): enabled = gdef.VARIANT_BOOL() self.get_FirewallEnabled(profile_type, enabled) return enabled.value", "count.value: break rule = FirewallRule() idisp = var.asdispatch idisp.QueryInterface(rule.IID, rule)", "\"\"\"A rule of the firewall\"\"\" @property def name(self): \"\"\"Name of", "rule :type: :class:`unicode` \"\"\" local_address = gdef.BSTR() self.get_LocalAddresses(local_address) return local_address.value", "return local_address.value @property def remote_address(self): \"\"\"Remote address of the rule", "of the service to which apply the rule :type: :class:`unicode`", "break rule = FirewallRule() idisp = var.asdispatch idisp.QueryInterface(rule.IID, rule) rules.append(rule)", "= gdef.NET_FW_ACTION() self.get_Action(action) return action.value @property def enabled(self): \"\"\"``True`` if", "cominterfaces.IUnknown() ifw_rules.get__NewEnum(unknw) pVariant = cominterfaces.IEnumVARIANT() unknw.QueryInterface(pVariant.IID, pVariant) count = gdef.ULONG()", "from windows.generated_def.winstructs import * from windows.generated_def.windef import * class TCP4Connection(MIB_TCPROW_OWNER_PID):", "associated with the remote port. Equals ``remote_port`` if no protocol", "nb_entry = x.dwNumEntries # Struct _MIB_TCP6TABLE_OWNER_PID definitions class _GENERATED_MIB_TCP6TABLE_OWNER_PID(Structure): _fields_", "\"\"\" interface_type = gdef.BSTR() self.get_InterfaceTypes(interface_type) return interface_type.value @property def local_port(self):", "self.established: return None return socket.inet_ntoa(struct.pack(\"<I\", self.dwRemoteAddr)) @property def remote_proto(self): \"\"\"Identification", ":class:`str` or :class:`int` \"\"\" try: return socket.gethostbyaddr(self.remote_addr) except socket.error: return", "def interface_types(self): \"\"\"Types of interface of the rule :type: :class:`unicode`", "of the rule :type: :class:`unicode` \"\"\" remote_address = gdef.BSTR() self.get_RemoteAddresses(remote_address)", "= cominterfaces.IEnumVARIANT() unknw.QueryInterface(pVariant.IID, pVariant) count = gdef.ULONG() var = windows.com.ImprovedVariant()", "windows firewall\"\"\" @property def rules(self): \"\"\"The rules of the firewall", "rule is enabled\"\"\" enabled = gdef.VARIANT_BOOL() self.get_Enabled(enabled) return enabled.value @property", "size.value)() winproxy.GetExtendedTcpTable(buffer, ctypes.byref(size), ulAf=AF_INET6) t = get_MIB_TCP6TABLE_OWNER_PID_from_buffer(buffer) return list(t.table) ipv4", "TCP4 socket (connected or listening)\"\"\" @property def established(self): \"\"\"``True`` if", "of the rule :type: :class:`unicode` \"\"\" grouping = gdef.BSTR() self.get_RemotePorts(grouping)", "return grouping.value @property def icmp_type_and_code(self): icmp_type_and_code = gdef.BSTR() self.get_RemotePorts(icmp_type_and_code) return", "the rule :type: :class:`unicode` \"\"\" local_address = gdef.BSTR() self.get_LocalAddresses(local_address) return", "description = gdef.BSTR() self.get_Description(description) return description.value @property def application_name(self): \"\"\"Name", "* class TCP4Connection(MIB_TCPROW_OWNER_PID): \"\"\"A TCP4 socket (connected or listening)\"\"\" @property", "servicename = gdef.BSTR() self.get_ServiceName(servicename) return servicename.value @property def protocol(self): \"\"\"Protocol", "self.get_InterfaceTypes(interface_type) return interface_type.value @property def local_port(self): \"\"\"Local port of the", "the rule :type: :class:`long` \"\"\" protocol = gdef.LONG() self.get_Protocol(protocol) return", ":class:`int` \"\"\" try: return socket.gethostbyaddr(self.remote_addr) except socket.error: return self.remote_addr def", "self.get_LocalPorts(local_port) return local_port.value @property def remote_port(self): \"\"\"Remote port of the", "\"\"\"Local address IP :type: :class:`str`\"\"\" return self._str_ipv6_addr(self.ucLocalAddr) @property def remote_addr(self):", ":type: :class:`str` or :class:`int` \"\"\" try: return socket.gethostbyaddr(self.remote_addr) except socket.error:", "Firewall() windows.com.create_instance(self.NetFwPolicy2, firewall) return firewall @staticmethod def _get_tcp_ipv4_sockets(): size =", "try: winproxy.GetExtendedTcpTable(None, ctypes.byref(size), ulAf=AF_INET6) except winproxy.IphlpapiError: pass # Allow us", "of the system :type: :class:`Firewall` \"\"\" windows.com.init() firewall = Firewall()", "# Allow us to set size to the needed value", "return description.value @property def application_name(self): \"\"\"Name of the application to", "of the rule :type: :class:`unicode` \"\"\" interface_type = gdef.BSTR() self.get_InterfaceTypes(interface_type)", "TCP IPv4 socket (connection and listening) :type: [:class:`TCP4Connection`]\"\"\" ipv6 =", "established else it's a listening socket\"\"\" return self.dwState == MIB_TCP_STATE_ESTAB", "rule = FirewallRule() idisp = var.asdispatch idisp.QueryInterface(rule.IID, rule) rules.append(rule) return", "of the profiles currently enabled :type: :class:`long` \"\"\" cpt =", "not self.established: return \"<TCP IPV6 Listening socket on {0}:{1}>\".format(self.local_addr, self.local_port)", "the remote port. Equals ``remote_port`` if no protocol is associated", "the protocol associated with the remote port. Equals ``remote_port`` if", "@property def remote_port(self): \"\"\":type: :class:`int`\"\"\" if not self.established: return None", "= [] for i in range(nb_rules.value): pVariant.Next(1, var, count) if", "``True`` or ``False``, ``NET_FW_PROFILE_TYPE2_.NET_FW_PROFILE2_PRIVATE(0x2L)``: ``True`` or ``False``, ``NET_FW_PROFILE_TYPE2_.NET_FW_PROFILE2_PUBLIC(0x4L)``: ``True`` or", "get_MIB_TCP6TABLE_OWNER_PID_from_buffer(buffer): x = windows.generated_def.winstructs.MIB_TCP6TABLE_OWNER_PID.from_buffer(buffer) nb_entry = x.dwNumEntries # Struct _MIB_TCP6TABLE_OWNER_PID", "of the rule :type: :class:`unicode` \"\"\" local_port = gdef.BSTR() self.get_LocalPorts(local_port)", "= self.dwLocalPort closing.dwRemoteAddr = self.dwRemoteAddr closing.dwRemotePort = self.dwRemotePort return winproxy.SetTcpEntry(ctypes.byref(closing))", "socket.gethostbyaddr(self.remote_addr) except socket.error: return self.remote_addr def close(self): \"\"\"Close the connection", "[gdef.NET_FW_PROFILE2_DOMAIN, gdef.NET_FW_PROFILE2_PRIVATE, gdef.NET_FW_PROFILE2_PUBLIC] return {prof: self.enabled_for_profile_type(prof) for prof in profiles}", "return name.value @property def description(self): \"\"\"Description of the rule :type:", "DWORD), (\"table\", TCP4Connection * nb_entry), ] return _GENERATED_MIB_TCPTABLE_OWNER_PID.from_buffer(buffer) def get_MIB_TCP6TABLE_OWNER_PID_from_buffer(buffer):", "profiles currently enabled :type: :class:`long` \"\"\" cpt = gdef.LONG() self.get_CurrentProfileTypes(cpt)", "gdef.BSTR() self.get_Description(description) return description.value @property def application_name(self): \"\"\"Name of the", "def local_port(self): \"\"\"Local port of the rule :type: :class:`unicode` \"\"\"", "Ipv6\"\"\" return self.remote_addr def close(self): raise NotImplementedError(\"Closing IPV6 connection non", "of the rule :type: :class:`unicode` \"\"\" remote_port = gdef.BSTR() self.get_RemotePorts(remote_port)", "= gdef.BSTR() self.get_LocalPorts(local_port) return local_port.value @property def remote_port(self): \"\"\"Remote port", "(\"dwNumEntries\", DWORD), (\"table\", TCP4Connection * nb_entry), ] return _GENERATED_MIB_TCPTABLE_OWNER_PID.from_buffer(buffer) def", "unknw = cominterfaces.IUnknown() ifw_rules.get__NewEnum(unknw) pVariant = cominterfaces.IEnumVARIANT() unknw.QueryInterface(pVariant.IID, pVariant) count", "Allow us to set size to the needed value buffer", "close(self): raise NotImplementedError(\"Closing IPV6 connection non implemented\") def __repr__(self): if", ":class:`unicode` \"\"\" interface_type = gdef.BSTR() self.get_InterfaceTypes(interface_type) return interface_type.value @property def", "to set size to the needed value buffer = (ctypes.c_char", "ulAf=AF_INET) t = get_MIB_TCPTABLE_OWNER_PID_from_buffer(buffer) return list(t.table) @staticmethod def _get_tcp_ipv6_sockets(): size", "needed value buffer = (ctypes.c_char * size.value)() winproxy.GetExtendedTcpTable(buffer, ctypes.byref(size), ulAf=AF_INET6)", "or ``False``, ``NET_FW_PROFILE_TYPE2_.NET_FW_PROFILE2_PUBLIC(0x4L)``: ``True`` or ``False``, } :type: :class:`dict` \"\"\"", "return socket.inet_ntoa(struct.pack(\"<I\", self.dwRemoteAddr)) @property def remote_proto(self): \"\"\"Identification of the protocol", "'tcp') except socket.error: return self.remote_port @property def remote_host(self): \"\"\"Identification of", "A list of rule \"\"\" ifw_rules = cominterfaces.INetFwRules() self.get_Rules(ifw_rules) nb_rules", "nb_entry = x.dwNumEntries class _GENERATED_MIB_TCPTABLE_OWNER_PID(ctypes.Structure): _fields_ = [ (\"dwNumEntries\", DWORD),", "from windows.generated_def.windef import * class TCP4Connection(MIB_TCPROW_OWNER_PID): \"\"\"A TCP4 socket (connected", "@property def local_port(self): \"\"\":type: :class:`int`\"\"\" return socket.ntohs(self.dwLocalPort) @property def local_addr(self):", "return self.remote_port @property def remote_host(self): \"\"\"Identification of the remote hostname.", "enabled = gdef.VARIANT_BOOL() self.get_Enabled(enabled) return enabled.value @property def grouping(self): \"\"\"Grouping", "@property def firewall(self): \"\"\"The firewall of the system :type: :class:`Firewall`", "port of the rule :type: :class:`unicode` \"\"\" remote_port = gdef.BSTR()", "socket\"\"\" return self.dwState == MIB_TCP_STATE_ESTAB @property def remote_port(self): \"\"\":type: :class:`int`\"\"\"", "import windows.generated_def as gdef from windows.com import interfaces as cominterfaces", "local_port(self): \"\"\"Local port of the rule :type: :class:`unicode` \"\"\" local_port", "winproxy.IphlpapiError: pass # Allow us to set size to the", "def protocol(self): \"\"\"Protocol to which apply the rule :type: :class:`long`", "description(self): \"\"\"Description of the rule :type: :class:`unicode` \"\"\" description =", "cominterfaces.INetFwRules() self.get_Rules(ifw_rules) nb_rules = gdef.LONG() ifw_rules.get_Count(nb_rules) unknw = cominterfaces.IUnknown() ifw_rules.get__NewEnum(unknw)", "to the needed value buffer = (ctypes.c_char * size.value)() winproxy.GetExtendedTcpTable(buffer,", "x = windows.generated_def.winstructs.MIB_TCPTABLE_OWNER_PID.from_buffer(buffer) nb_entry = x.dwNumEntries class _GENERATED_MIB_TCPTABLE_OWNER_PID(ctypes.Structure): _fields_ =", "\"\"\":type: :class:`int`\"\"\" if not self.established: return None return socket.ntohs(self.dwRemotePort) @property", "ifw_rules.get_Count(nb_rules) unknw = cominterfaces.IUnknown() ifw_rules.get__NewEnum(unknw) pVariant = cominterfaces.IEnumVARIANT() unknw.QueryInterface(pVariant.IID, pVariant)", "enabled(self): \"\"\"A maping of the active firewall profiles { ``NET_FW_PROFILE_TYPE2_.NET_FW_PROFILE2_DOMAIN(0x1L)``:", ":class:`unicode` \"\"\" applicationname = gdef.BSTR() self.get_ApplicationName(applicationname) return applicationname.value @property def", "gdef.LONG() self.get_Protocol(protocol) return protocol.value @property def local_address(self): \"\"\"Local address of", "cpt.value @property def enabled(self): \"\"\"A maping of the active firewall", "MIB_TCPROW() closing.dwState = MIB_TCP_STATE_DELETE_TCB closing.dwLocalAddr = self.dwLocalAddr closing.dwLocalPort = self.dwLocalPort", "self.enabled_for_profile_type(prof) for prof in profiles} def enabled_for_profile_type(self, profile_type): enabled =", "ctypes.byref(size), ulAf=AF_INET) except winproxy.IphlpapiError: pass # Allow us to set", "protocol(self): \"\"\"Protocol to which apply the rule :type: :class:`long` \"\"\"", "gdef.BSTR() self.get_RemoteAddresses(remote_address) return remote_address.value @property def direction(self): \"\"\"Direction of the", "address IP (x.x.x.x) :type: :class:`str`\"\"\" if not self.established: return None", "needed value buffer = (ctypes.c_char * size.value)() winproxy.GetExtendedTcpTable(buffer, ctypes.byref(size), ulAf=AF_INET)", "{prof: self.enabled_for_profile_type(prof) for prof in profiles} def enabled_for_profile_type(self, profile_type): enabled", "firewall) return firewall @staticmethod def _get_tcp_ipv4_sockets(): size = ctypes.c_uint(0) try:", "@property def remote_host(self): \"\"\"Identification of the remote hostname. Equals ``remote_addr``", "ifw_rules.get__NewEnum(unknw) pVariant = cominterfaces.IEnumVARIANT() unknw.QueryInterface(pVariant.IID, pVariant) count = gdef.ULONG() var", "= gdef.BSTR() self.get_ApplicationName(applicationname) return applicationname.value @property def service_name(self): \"\"\"Name of", "= windows.generated_def.winstructs.MIB_TCPTABLE_OWNER_PID.from_buffer(buffer) nb_entry = x.dwNumEntries class _GENERATED_MIB_TCPTABLE_OWNER_PID(ctypes.Structure): _fields_ = [", "\"\"\"Name of the application to which apply the rule :type:", "local_addr(self): \"\"\"Local address IP (x.x.x.x) :type: :class:`str`\"\"\" return socket.inet_ntoa(struct.pack(\"<I\", self.dwLocalAddr))", ":class:`unicode` \"\"\" remote_address = gdef.BSTR() self.get_RemoteAddresses(remote_address) return remote_address.value @property def", "rule :type: :class:`unicode` \"\"\" name = gdef.BSTR() self.get_Name(name) return name.value", "self.get_Description(description) return description.value @property def application_name(self): \"\"\"Name of the application", "of interface of the rule :type: :class:`unicode` \"\"\" interface_type =", "ulAf=AF_INET6) t = get_MIB_TCP6TABLE_OWNER_PID_from_buffer(buffer) return list(t.table) ipv4 = property(lambda self:", "it's a listening socket\"\"\" return self.dwState == MIB_TCP_STATE_ESTAB @property def", "it. :type: :class:`str` or :class:`int` \"\"\" try: return socket.getservbyport(self.remote_port, 'tcp')", "not self.established: return \"<TCP IPV4 Listening socket on {0}:{1}>\".format(self.local_addr, self.local_port)", "the firewall :type: [:class:`FirewallRule`] -- A list of rule \"\"\"", "interface_types(self): \"\"\"Types of interface of the rule :type: :class:`unicode` \"\"\"", "= gdef.BSTR() self.get_Description(description) return description.value @property def application_name(self): \"\"\"Name of", "``NET_FW_ACTION_.NET_FW_ACTION_ALLOW(0x1L)`` subclass of :class:`long` \"\"\" action = gdef.NET_FW_ACTION() self.get_Action(action) return", "IPV6 Listening socket on {0}:{1}>\".format(self.local_addr, self.local_port) return \"<TCP IPV6 Connection", "self.dwLocalAddr)) @property def remote_addr(self): \"\"\"remote address IP (x.x.x.x) :type: :class:`str`\"\"\"", "buffer = (ctypes.c_char * size.value)() winproxy.GetExtendedTcpTable(buffer, ctypes.byref(size), ulAf=AF_INET6) t =", "@staticmethod def _str_ipv6_addr(addr): return \":\".join(c.encode('hex') for c in addr) @property", "active firewall profiles { ``NET_FW_PROFILE_TYPE2_.NET_FW_PROFILE2_DOMAIN(0x1L)``: ``True`` or ``False``, ``NET_FW_PROFILE_TYPE2_.NET_FW_PROFILE2_PRIVATE(0x2L)``: ``True``", "return socket.inet_ntoa(struct.pack(\"<I\", self.dwLocalAddr)) @property def remote_addr(self): \"\"\"remote address IP (x.x.x.x)", "def __repr__(self): if not self.established: return \"<TCP IPV4 Listening socket", "def get_MIB_TCPTABLE_OWNER_PID_from_buffer(buffer): x = windows.generated_def.winstructs.MIB_TCPTABLE_OWNER_PID.from_buffer(buffer) nb_entry = x.dwNumEntries class _GENERATED_MIB_TCPTABLE_OWNER_PID(ctypes.Structure):", "= windows.com.ImprovedVariant() rules = [] for i in range(nb_rules.value): pVariant.Next(1,", "the firewall\"\"\" @property def name(self): \"\"\"Name of the rule :type:", "= gdef.BSTR() self.get_RemoteAddresses(remote_address) return remote_address.value @property def direction(self): \"\"\"Direction of", "interface_type = gdef.BSTR() self.get_InterfaceTypes(interface_type) return interface_type.value @property def local_port(self): \"\"\"Local", "``self.remote_addr`` for Ipv6\"\"\" return self.remote_addr def close(self): raise NotImplementedError(\"Closing IPV6", "list(t.table) ipv4 = property(lambda self: self._get_tcp_ipv4_sockets()) \"\"\"List of TCP IPv4", "rule :type: :class:`unicode` \"\"\" grouping = gdef.BSTR() self.get_RemotePorts(grouping) return grouping.value", "= gdef.BSTR() self.get_ServiceName(servicename) return servicename.value @property def protocol(self): \"\"\"Protocol to", "\"<TCP IPV4 Connection {s.local_addr}:{s.local_port} -> {s.remote_addr}:{s.remote_port}>\".format(s=self) class TCP6Connection(MIB_TCP6ROW_OWNER_PID): \"\"\"A TCP6", "rule :type: :class:`unicode` \"\"\" applicationname = gdef.BSTR() self.get_ApplicationName(applicationname) return applicationname.value", "def enabled(self): \"\"\"``True`` if rule is enabled\"\"\" enabled = gdef.VARIANT_BOOL()", ":class:`int`\"\"\" return socket.ntohs(self.dwLocalPort) @property def local_addr(self): \"\"\"Local address IP :type:", "self._get_tcp_ipv6_sockets()) \"\"\"List of TCP IPv6 socket (connection and listening) :type:", "socket.ntohs(self.dwRemotePort) @property def local_port(self): \"\"\":type: :class:`int`\"\"\" return socket.ntohs(self.dwLocalPort) @property def", "Connection {0}:{1} -> {2}:{3}>\".format(self.local_addr, self.local_port, self.remote_addr, self.remote_port) def get_MIB_TCPTABLE_OWNER_PID_from_buffer(buffer): x", "return cpt.value @property def enabled(self): \"\"\"A maping of the active", "the rule :type: :class:`unicode` \"\"\" grouping = gdef.BSTR() self.get_RemotePorts(grouping) return", "socket on {0}:{1}>\".format(self.local_addr, self.local_port) return \"<TCP IPV6 Connection {0}:{1} ->", "except socket.error: return self.remote_addr def close(self): \"\"\"Close the connection <require", "icmp_type_and_code.value def __repr__(self): return u'<{0} \"{1}\">'.format(type(self).__name__, self.name).encode(\"ascii\", errors='backslashreplace') class Network(object):", "address of the rule :type: :class:`unicode` \"\"\" local_address = gdef.BSTR()", "\"<TCP IPV6 Connection {0}:{1} -> {2}:{3}>\".format(self.local_addr, self.local_port, self.remote_addr, self.remote_port) def", "gdef.LONG() self.get_CurrentProfileTypes(cpt) return cpt.value @property def enabled(self): \"\"\"A maping of", "local_address.value @property def remote_address(self): \"\"\"Remote address of the rule :type:", "rule :type: :class:`unicode` \"\"\" remote_port = gdef.BSTR() self.get_RemotePorts(remote_port) return remote_port.value", "winproxy.GetExtendedTcpTable(buffer, ctypes.byref(size), ulAf=AF_INET) t = get_MIB_TCPTABLE_OWNER_PID_from_buffer(buffer) return list(t.table) @staticmethod def", "not count.value: break rule = FirewallRule() idisp = var.asdispatch idisp.QueryInterface(rule.IID,", "self.get_Action(action) return action.value @property def enabled(self): \"\"\"``True`` if rule is", "None return socket.ntohs(self.dwRemotePort) @property def local_port(self): \"\"\":type: :class:`int`\"\"\" return socket.ntohs(self.dwLocalPort)", "remote_address.value @property def direction(self): \"\"\"Direction of the rule, values might", "cominterfaces from windows.generated_def.winstructs import * from windows.generated_def.windef import * class", "Ipv6\"\"\" return self.remote_port @property def remote_host(self): \"\"\"Equals to ``self.remote_addr`` for", "\"\"\" ifw_rules = cominterfaces.INetFwRules() self.get_Rules(ifw_rules) nb_rules = gdef.LONG() ifw_rules.get_Count(nb_rules) unknw", "self.get_Protocol(protocol) return protocol.value @property def local_address(self): \"\"\"Local address of the", "\":\".join(c.encode('hex') for c in addr) @property def established(self): \"\"\"``True`` if", "nb_entry), ] return _GENERATED_MIB_TCP6TABLE_OWNER_PID.from_buffer(buffer) class Firewall(cominterfaces.INetFwPolicy2): \"\"\"The windows firewall\"\"\" @property", "= x.dwNumEntries # Struct _MIB_TCP6TABLE_OWNER_PID definitions class _GENERATED_MIB_TCP6TABLE_OWNER_PID(Structure): _fields_ =", "_GENERATED_MIB_TCPTABLE_OWNER_PID.from_buffer(buffer) def get_MIB_TCP6TABLE_OWNER_PID_from_buffer(buffer): x = windows.generated_def.winstructs.MIB_TCP6TABLE_OWNER_PID.from_buffer(buffer) nb_entry = x.dwNumEntries #", "\"{1}\">'.format(type(self).__name__, self.name).encode(\"ascii\", errors='backslashreplace') class Network(object): NetFwPolicy2 = windows.com.IID.from_string(\"E2B3C97F-6AE1-41AC-817A-F6F92166D7DD\") @property def", "interface_type.value @property def local_port(self): \"\"\"Local port of the rule :type:", "remote hostname. Equals ``remote_addr`` if the resolution fails :type: :class:`str`", "self.established: return None return self._str_ipv6_addr(self.ucRemoteAddr) @property def remote_proto(self): \"\"\"Equals to", "of :class:`long` \"\"\" direction = gdef.NET_FW_RULE_DIRECTION() self.get_Direction(direction) return direction.value @property", "\"\"\" description = gdef.BSTR() self.get_Description(description) return description.value @property def application_name(self):", "service to which apply the rule :type: :class:`unicode` \"\"\" servicename", "closing.dwLocalPort = self.dwLocalPort closing.dwRemoteAddr = self.dwRemoteAddr closing.dwRemotePort = self.dwRemotePort return", "or listening)\"\"\" @property def established(self): \"\"\"``True`` if connection is established", "@property def icmp_type_and_code(self): icmp_type_and_code = gdef.BSTR() self.get_RemotePorts(icmp_type_and_code) return icmp_type_and_code.value def", "def close(self): \"\"\"Close the connection <require elevated process>\"\"\" closing =", "gdef.VARIANT_BOOL() self.get_Enabled(enabled) return enabled.value @property def grouping(self): \"\"\"Grouping of the", "\"\"\"Name of the rule :type: :class:`unicode` \"\"\" name = gdef.BSTR()", "Equals ``remote_port`` if no protocol is associated with it. :type:", "return rules @property def current_profile_types(self): \"\"\"Mask of the profiles currently", "def current_profile_types(self): \"\"\"Mask of the profiles currently enabled :type: :class:`long`", "TCP6Connection(MIB_TCP6ROW_OWNER_PID): \"\"\"A TCP6 socket (connected or listening)\"\"\" @staticmethod def _str_ipv6_addr(addr):", "self._str_ipv6_addr(self.ucLocalAddr) @property def remote_addr(self): \"\"\"remote address IP :type: :class:`str`\"\"\" if", "``True`` or ``False``, } :type: :class:`dict` \"\"\" profiles = [gdef.NET_FW_PROFILE2_DOMAIN,", "rules = [] for i in range(nb_rules.value): pVariant.Next(1, var, count)", "the rule :type: :class:`unicode` \"\"\" name = gdef.BSTR() self.get_Name(name) return", "_str_ipv6_addr(addr): return \":\".join(c.encode('hex') for c in addr) @property def established(self):", "@property def remote_addr(self): \"\"\"remote address IP :type: :class:`str`\"\"\" if not", "} :type: :class:`dict` \"\"\" profiles = [gdef.NET_FW_PROFILE2_DOMAIN, gdef.NET_FW_PROFILE2_PRIVATE, gdef.NET_FW_PROFILE2_PUBLIC] return", "which apply the rule :type: :class:`unicode` \"\"\" servicename = gdef.BSTR()", "* ``NET_FW_RULE_DIRECTION_.NET_FW_RULE_DIR_IN(0x1L)`` * ``NET_FW_RULE_DIRECTION_.NET_FW_RULE_DIR_OUT(0x2L)`` subclass of :class:`long` \"\"\" direction =", "socket.ntohs(self.dwLocalPort) @property def local_addr(self): \"\"\"Local address IP :type: :class:`str`\"\"\" return", "socket.getservbyport(self.remote_port, 'tcp') except socket.error: return self.remote_port @property def remote_host(self): \"\"\"Identification", "= [gdef.NET_FW_PROFILE2_DOMAIN, gdef.NET_FW_PROFILE2_PRIVATE, gdef.NET_FW_PROFILE2_PUBLIC] return {prof: self.enabled_for_profile_type(prof) for prof in", "if the resolution fails :type: :class:`str` or :class:`int` \"\"\" try:", "current_profile_types(self): \"\"\"Mask of the profiles currently enabled :type: :class:`long` \"\"\"", "self.get_RemotePorts(grouping) return grouping.value @property def icmp_type_and_code(self): icmp_type_and_code = gdef.BSTR() self.get_RemotePorts(icmp_type_and_code)", "protocol associated with the remote port. Equals ``remote_port`` if no", "socket.error: return self.remote_addr def close(self): \"\"\"Close the connection <require elevated", "= self.dwRemotePort return winproxy.SetTcpEntry(ctypes.byref(closing)) def __repr__(self): if not self.established: return", "pVariant.Next(1, var, count) if not count.value: break rule = FirewallRule()", "list of rule \"\"\" ifw_rules = cominterfaces.INetFwRules() self.get_Rules(ifw_rules) nb_rules =", "def __repr__(self): if not self.established: return \"<TCP IPV6 Listening socket", "closing.dwRemoteAddr = self.dwRemoteAddr closing.dwRemotePort = self.dwRemotePort return winproxy.SetTcpEntry(ctypes.byref(closing)) def __repr__(self):", "try: return socket.gethostbyaddr(self.remote_addr) except socket.error: return self.remote_addr def close(self): \"\"\"Close", "IPV4 Listening socket on {0}:{1}>\".format(self.local_addr, self.local_port) return \"<TCP IPV4 Connection", "grouping.value @property def icmp_type_and_code(self): icmp_type_and_code = gdef.BSTR() self.get_RemotePorts(icmp_type_and_code) return icmp_type_and_code.value", ":class:`unicode` \"\"\" grouping = gdef.BSTR() self.get_RemotePorts(grouping) return grouping.value @property def", "def local_addr(self): \"\"\"Local address IP (x.x.x.x) :type: :class:`str`\"\"\" return socket.inet_ntoa(struct.pack(\"<I\",", "count = gdef.ULONG() var = windows.com.ImprovedVariant() rules = [] for", "return self._str_ipv6_addr(self.ucLocalAddr) @property def remote_addr(self): \"\"\"remote address IP :type: :class:`str`\"\"\"", ":class:`str`\"\"\" if not self.established: return None return socket.inet_ntoa(struct.pack(\"<I\", self.dwRemoteAddr)) @property", "return u'<{0} \"{1}\">'.format(type(self).__name__, self.name).encode(\"ascii\", errors='backslashreplace') class Network(object): NetFwPolicy2 = windows.com.IID.from_string(\"E2B3C97F-6AE1-41AC-817A-F6F92166D7DD\")", "def remote_host(self): \"\"\"Identification of the remote hostname. Equals ``remote_addr`` if", "ipv6 = property(lambda self: self._get_tcp_ipv6_sockets()) \"\"\"List of TCP IPv6 socket", "or ``False``, } :type: :class:`dict` \"\"\" profiles = [gdef.NET_FW_PROFILE2_DOMAIN, gdef.NET_FW_PROFILE2_PRIVATE,", "def direction(self): \"\"\"Direction of the rule, values might be: *", "application_name(self): \"\"\"Name of the application to which apply the rule", "return \"<TCP IPV4 Connection {s.local_addr}:{s.local_port} -> {s.remote_addr}:{s.remote_port}>\".format(s=self) class TCP6Connection(MIB_TCP6ROW_OWNER_PID): \"\"\"A", "DWORD), (\"table\", TCP6Connection * nb_entry), ] return _GENERATED_MIB_TCP6TABLE_OWNER_PID.from_buffer(buffer) class Firewall(cominterfaces.INetFwPolicy2):", "listening)\"\"\" @property def established(self): \"\"\"``True`` if connection is established else", "remote_port.value @property def action(self): \"\"\"Action of the rule, values might", "= get_MIB_TCPTABLE_OWNER_PID_from_buffer(buffer) return list(t.table) @staticmethod def _get_tcp_ipv6_sockets(): size = ctypes.c_uint(0)", "Listening socket on {0}:{1}>\".format(self.local_addr, self.local_port) return \"<TCP IPV6 Connection {0}:{1}", "or listening)\"\"\" @staticmethod def _str_ipv6_addr(addr): return \":\".join(c.encode('hex') for c in", "= gdef.LONG() self.get_Protocol(protocol) return protocol.value @property def local_address(self): \"\"\"Local address", "direction = gdef.NET_FW_RULE_DIRECTION() self.get_Direction(direction) return direction.value @property def interface_types(self): \"\"\"Types", "of the remote hostname. Equals ``remote_addr`` if the resolution fails", "rule, values might be: * ``NET_FW_ACTION_.NET_FW_ACTION_BLOCK(0x0L)`` * ``NET_FW_ACTION_.NET_FW_ACTION_ALLOW(0x1L)`` subclass of", "the profiles currently enabled :type: :class:`long` \"\"\" cpt = gdef.LONG()", "ifw_rules = cominterfaces.INetFwRules() self.get_Rules(ifw_rules) nb_rules = gdef.LONG() ifw_rules.get_Count(nb_rules) unknw =", "protocol = gdef.LONG() self.get_Protocol(protocol) return protocol.value @property def local_address(self): \"\"\"Local", "* ``NET_FW_RULE_DIRECTION_.NET_FW_RULE_DIR_OUT(0x2L)`` subclass of :class:`long` \"\"\" direction = gdef.NET_FW_RULE_DIRECTION() self.get_Direction(direction)", "self.get_Enabled(enabled) return enabled.value @property def grouping(self): \"\"\"Grouping of the rule", "of TCP IPv4 socket (connection and listening) :type: [:class:`TCP4Connection`]\"\"\" ipv6", "addr) @property def established(self): \"\"\"``True`` if connection is established else", "def remote_address(self): \"\"\"Remote address of the rule :type: :class:`unicode` \"\"\"", "return \"<TCP IPV4 Listening socket on {0}:{1}>\".format(self.local_addr, self.local_port) return \"<TCP", "the system :type: :class:`Firewall` \"\"\" windows.com.init() firewall = Firewall() windows.com.create_instance(self.NetFwPolicy2,", "address IP :type: :class:`str`\"\"\" return self._str_ipv6_addr(self.ucLocalAddr) @property def remote_addr(self): \"\"\"remote", "profiles} def enabled_for_profile_type(self, profile_type): enabled = gdef.VARIANT_BOOL() self.get_FirewallEnabled(profile_type, enabled) return", "for Ipv6\"\"\" return self.remote_port @property def remote_host(self): \"\"\"Equals to ``self.remote_addr``", "_get_tcp_ipv4_sockets(): size = ctypes.c_uint(0) try: winproxy.GetExtendedTcpTable(None, ctypes.byref(size), ulAf=AF_INET) except winproxy.IphlpapiError:", "socket on {0}:{1}>\".format(self.local_addr, self.local_port) return \"<TCP IPV4 Connection {s.local_addr}:{s.local_port} ->", "return _GENERATED_MIB_TCP6TABLE_OWNER_PID.from_buffer(buffer) class Firewall(cominterfaces.INetFwPolicy2): \"\"\"The windows firewall\"\"\" @property def rules(self):", "\"\"\"Local address of the rule :type: :class:`unicode` \"\"\" local_address =", "= windows.generated_def.winstructs.MIB_TCP6TABLE_OWNER_PID.from_buffer(buffer) nb_entry = x.dwNumEntries # Struct _MIB_TCP6TABLE_OWNER_PID definitions class", "self.get_LocalAddresses(local_address) return local_address.value @property def remote_address(self): \"\"\"Remote address of the", ":class:`str`\"\"\" return socket.inet_ntoa(struct.pack(\"<I\", self.dwLocalAddr)) @property def remote_addr(self): \"\"\"remote address IP", "return servicename.value @property def protocol(self): \"\"\"Protocol to which apply the", "``NET_FW_RULE_DIRECTION_.NET_FW_RULE_DIR_IN(0x1L)`` * ``NET_FW_RULE_DIRECTION_.NET_FW_RULE_DIR_OUT(0x2L)`` subclass of :class:`long` \"\"\" direction = gdef.NET_FW_RULE_DIRECTION()", "def local_addr(self): \"\"\"Local address IP :type: :class:`str`\"\"\" return self._str_ipv6_addr(self.ucLocalAddr) @property", "connection <require elevated process>\"\"\" closing = MIB_TCPROW() closing.dwState = MIB_TCP_STATE_DELETE_TCB", "return None return socket.ntohs(self.dwRemotePort) @property def local_port(self): \"\"\":type: :class:`int`\"\"\" return", "return protocol.value @property def local_address(self): \"\"\"Local address of the rule", "return socket.getservbyport(self.remote_port, 'tcp') except socket.error: return self.remote_port @property def remote_host(self):", "of the rule, values might be: * ``NET_FW_ACTION_.NET_FW_ACTION_BLOCK(0x0L)`` * ``NET_FW_ACTION_.NET_FW_ACTION_ALLOW(0x1L)``", "\"\"\"remote address IP (x.x.x.x) :type: :class:`str`\"\"\" if not self.established: return", "self.local_port) return \"<TCP IPV6 Connection {0}:{1} -> {2}:{3}>\".format(self.local_addr, self.local_port, self.remote_addr,", "rules @property def current_profile_types(self): \"\"\"Mask of the profiles currently enabled", "the rule :type: :class:`unicode` \"\"\" description = gdef.BSTR() self.get_Description(description) return", ":type: [:class:`FirewallRule`] -- A list of rule \"\"\" ifw_rules =", "might be: * ``NET_FW_RULE_DIRECTION_.NET_FW_RULE_DIR_IN(0x1L)`` * ``NET_FW_RULE_DIRECTION_.NET_FW_RULE_DIR_OUT(0x2L)`` subclass of :class:`long` \"\"\"", "which apply the rule :type: :class:`long` \"\"\" protocol = gdef.LONG()", "class _GENERATED_MIB_TCPTABLE_OWNER_PID(ctypes.Structure): _fields_ = [ (\"dwNumEntries\", DWORD), (\"table\", TCP4Connection *", "remote_port(self): \"\"\"Remote port of the rule :type: :class:`unicode` \"\"\" remote_port", "``NET_FW_PROFILE_TYPE2_.NET_FW_PROFILE2_DOMAIN(0x1L)``: ``True`` or ``False``, ``NET_FW_PROFILE_TYPE2_.NET_FW_PROFILE2_PRIVATE(0x2L)``: ``True`` or ``False``, ``NET_FW_PROFILE_TYPE2_.NET_FW_PROFILE2_PUBLIC(0x4L)``: ``True``", "set size to the needed value buffer = (ctypes.c_char *", "return firewall @staticmethod def _get_tcp_ipv4_sockets(): size = ctypes.c_uint(0) try: winproxy.GetExtendedTcpTable(None,", "address of the rule :type: :class:`unicode` \"\"\" remote_address = gdef.BSTR()", "self._str_ipv6_addr(self.ucRemoteAddr) @property def remote_proto(self): \"\"\"Equals to ``self.remote_port`` for Ipv6\"\"\" return", "IP (x.x.x.x) :type: :class:`str`\"\"\" if not self.established: return None return", "@property def remote_address(self): \"\"\"Remote address of the rule :type: :class:`unicode`", "firewall of the system :type: :class:`Firewall` \"\"\" windows.com.init() firewall =", "-- A list of rule \"\"\" ifw_rules = cominterfaces.INetFwRules() self.get_Rules(ifw_rules)", "import socket import struct from windows import winproxy import windows.generated_def", "= gdef.NET_FW_RULE_DIRECTION() self.get_Direction(direction) return direction.value @property def interface_types(self): \"\"\"Types of", "from windows.com import interfaces as cominterfaces from windows.generated_def.winstructs import *", "x.dwNumEntries # Struct _MIB_TCP6TABLE_OWNER_PID definitions class _GENERATED_MIB_TCP6TABLE_OWNER_PID(Structure): _fields_ = [", "icmp_type_and_code(self): icmp_type_and_code = gdef.BSTR() self.get_RemotePorts(icmp_type_and_code) return icmp_type_and_code.value def __repr__(self): return", "address IP :type: :class:`str`\"\"\" if not self.established: return None return", "[ (\"dwNumEntries\", DWORD), (\"table\", TCP4Connection * nb_entry), ] return _GENERATED_MIB_TCPTABLE_OWNER_PID.from_buffer(buffer)", "firewall\"\"\" @property def rules(self): \"\"\"The rules of the firewall :type:", "winproxy.SetTcpEntry(ctypes.byref(closing)) def __repr__(self): if not self.established: return \"<TCP IPV4 Listening", "self.get_RemotePorts(icmp_type_and_code) return icmp_type_and_code.value def __repr__(self): return u'<{0} \"{1}\">'.format(type(self).__name__, self.name).encode(\"ascii\", errors='backslashreplace')", "the resolution fails :type: :class:`str` or :class:`int` \"\"\" try: return", "import * class TCP4Connection(MIB_TCPROW_OWNER_PID): \"\"\"A TCP4 socket (connected or listening)\"\"\"", ":type: :class:`str`\"\"\" if not self.established: return None return self._str_ipv6_addr(self.ucRemoteAddr) @property", "socket.inet_ntoa(struct.pack(\"<I\", self.dwLocalAddr)) @property def remote_addr(self): \"\"\"remote address IP (x.x.x.x) :type:", ":type: :class:`Firewall` \"\"\" windows.com.init() firewall = Firewall() windows.com.create_instance(self.NetFwPolicy2, firewall) return", "@property def description(self): \"\"\"Description of the rule :type: :class:`unicode` \"\"\"", "winproxy import windows.generated_def as gdef from windows.com import interfaces as", "* ``NET_FW_ACTION_.NET_FW_ACTION_ALLOW(0x1L)`` subclass of :class:`long` \"\"\" action = gdef.NET_FW_ACTION() self.get_Action(action)", "enabled = gdef.VARIANT_BOOL() self.get_FirewallEnabled(profile_type, enabled) return enabled.value class FirewallRule(cominterfaces.INetFwRule): \"\"\"A", "to which apply the rule :type: :class:`unicode` \"\"\" applicationname =", "\"\"\" action = gdef.NET_FW_ACTION() self.get_Action(action) return action.value @property def enabled(self):", "* nb_entry), ] return _GENERATED_MIB_TCP6TABLE_OWNER_PID.from_buffer(buffer) class Firewall(cominterfaces.INetFwPolicy2): \"\"\"The windows firewall\"\"\"", "def __repr__(self): return u'<{0} \"{1}\">'.format(type(self).__name__, self.name).encode(\"ascii\", errors='backslashreplace') class Network(object): NetFwPolicy2", "errors='backslashreplace') class Network(object): NetFwPolicy2 = windows.com.IID.from_string(\"E2B3C97F-6AE1-41AC-817A-F6F92166D7DD\") @property def firewall(self): \"\"\"The", "closing.dwState = MIB_TCP_STATE_DELETE_TCB closing.dwLocalAddr = self.dwLocalAddr closing.dwLocalPort = self.dwLocalPort closing.dwRemoteAddr", "def _get_tcp_ipv6_sockets(): size = ctypes.c_uint(0) try: winproxy.GetExtendedTcpTable(None, ctypes.byref(size), ulAf=AF_INET6) except", "``False``, } :type: :class:`dict` \"\"\" profiles = [gdef.NET_FW_PROFILE2_DOMAIN, gdef.NET_FW_PROFILE2_PRIVATE, gdef.NET_FW_PROFILE2_PUBLIC]", "self.get_CurrentProfileTypes(cpt) return cpt.value @property def enabled(self): \"\"\"A maping of the", "@property def current_profile_types(self): \"\"\"Mask of the profiles currently enabled :type:", "windows.com.create_instance(self.NetFwPolicy2, firewall) return firewall @staticmethod def _get_tcp_ipv4_sockets(): size = ctypes.c_uint(0)", "``False``, ``NET_FW_PROFILE_TYPE2_.NET_FW_PROFILE2_PUBLIC(0x4L)``: ``True`` or ``False``, } :type: :class:`dict` \"\"\" profiles", "rule) rules.append(rule) return rules @property def current_profile_types(self): \"\"\"Mask of the", "on {0}:{1}>\".format(self.local_addr, self.local_port) return \"<TCP IPV6 Connection {0}:{1} -> {2}:{3}>\".format(self.local_addr,", "(ctypes.c_char * size.value)() winproxy.GetExtendedTcpTable(buffer, ctypes.byref(size), ulAf=AF_INET) t = get_MIB_TCPTABLE_OWNER_PID_from_buffer(buffer) return", "enabled) return enabled.value class FirewallRule(cominterfaces.INetFwRule): \"\"\"A rule of the firewall\"\"\"", ":type: :class:`unicode` \"\"\" servicename = gdef.BSTR() self.get_ServiceName(servicename) return servicename.value @property", "rule of the firewall\"\"\" @property def name(self): \"\"\"Name of the", "name.value @property def description(self): \"\"\"Description of the rule :type: :class:`unicode`", "ctypes.byref(size), ulAf=AF_INET6) t = get_MIB_TCP6TABLE_OWNER_PID_from_buffer(buffer) return list(t.table) ipv4 = property(lambda", "(\"table\", TCP4Connection * nb_entry), ] return _GENERATED_MIB_TCPTABLE_OWNER_PID.from_buffer(buffer) def get_MIB_TCP6TABLE_OWNER_PID_from_buffer(buffer): x", "\"\"\"A TCP4 socket (connected or listening)\"\"\" @property def established(self): \"\"\"``True``", "of the firewall\"\"\" @property def name(self): \"\"\"Name of the rule", "the connection <require elevated process>\"\"\" closing = MIB_TCPROW() closing.dwState =", "_GENERATED_MIB_TCP6TABLE_OWNER_PID(Structure): _fields_ = [ (\"dwNumEntries\", DWORD), (\"table\", TCP6Connection * nb_entry),", "@property def action(self): \"\"\"Action of the rule, values might be:", "of the rule :type: :class:`unicode` \"\"\" name = gdef.BSTR() self.get_Name(name)", "_GENERATED_MIB_TCP6TABLE_OWNER_PID.from_buffer(buffer) class Firewall(cominterfaces.INetFwPolicy2): \"\"\"The windows firewall\"\"\" @property def rules(self): \"\"\"The", "return enabled.value @property def grouping(self): \"\"\"Grouping of the rule :type:", "= x.dwNumEntries class _GENERATED_MIB_TCPTABLE_OWNER_PID(ctypes.Structure): _fields_ = [ (\"dwNumEntries\", DWORD), (\"table\",", "subclass of :class:`long` \"\"\" direction = gdef.NET_FW_RULE_DIRECTION() self.get_Direction(direction) return direction.value", "var.asdispatch idisp.QueryInterface(rule.IID, rule) rules.append(rule) return rules @property def current_profile_types(self): \"\"\"Mask", "var, count) if not count.value: break rule = FirewallRule() idisp", "remote_addr(self): \"\"\"remote address IP :type: :class:`str`\"\"\" if not self.established: return", "\"<TCP IPV4 Listening socket on {0}:{1}>\".format(self.local_addr, self.local_port) return \"<TCP IPV4", "windows.com.ImprovedVariant() rules = [] for i in range(nb_rules.value): pVariant.Next(1, var,", "values might be: * ``NET_FW_RULE_DIRECTION_.NET_FW_RULE_DIR_IN(0x1L)`` * ``NET_FW_RULE_DIRECTION_.NET_FW_RULE_DIR_OUT(0x2L)`` subclass of :class:`long`", "= var.asdispatch idisp.QueryInterface(rule.IID, rule) rules.append(rule) return rules @property def current_profile_types(self):", "= gdef.LONG() self.get_CurrentProfileTypes(cpt) return cpt.value @property def enabled(self): \"\"\"A maping", "get_MIB_TCPTABLE_OWNER_PID_from_buffer(buffer) return list(t.table) @staticmethod def _get_tcp_ipv6_sockets(): size = ctypes.c_uint(0) try:", "profile_type): enabled = gdef.VARIANT_BOOL() self.get_FirewallEnabled(profile_type, enabled) return enabled.value class FirewallRule(cominterfaces.INetFwRule):", "= (ctypes.c_char * size.value)() winproxy.GetExtendedTcpTable(buffer, ctypes.byref(size), ulAf=AF_INET) t = get_MIB_TCPTABLE_OWNER_PID_from_buffer(buffer)", "socket import struct from windows import winproxy import windows.generated_def as", "@property def local_addr(self): \"\"\"Local address IP :type: :class:`str`\"\"\" return self._str_ipv6_addr(self.ucLocalAddr)", "-> {2}:{3}>\".format(self.local_addr, self.local_port, self.remote_addr, self.remote_port) def get_MIB_TCPTABLE_OWNER_PID_from_buffer(buffer): x = windows.generated_def.winstructs.MIB_TCPTABLE_OWNER_PID.from_buffer(buffer)", "applicationname = gdef.BSTR() self.get_ApplicationName(applicationname) return applicationname.value @property def service_name(self): \"\"\"Name", "ipv4 = property(lambda self: self._get_tcp_ipv4_sockets()) \"\"\"List of TCP IPv4 socket", "def rules(self): \"\"\"The rules of the firewall :type: [:class:`FirewallRule`] --", "self.remote_port) def get_MIB_TCPTABLE_OWNER_PID_from_buffer(buffer): x = windows.generated_def.winstructs.MIB_TCPTABLE_OWNER_PID.from_buffer(buffer) nb_entry = x.dwNumEntries class", "@property def remote_proto(self): \"\"\"Identification of the protocol associated with the", "the rule :type: :class:`unicode` \"\"\" remote_address = gdef.BSTR() self.get_RemoteAddresses(remote_address) return", "(connected or listening)\"\"\" @property def established(self): \"\"\"``True`` if connection is", "in range(nb_rules.value): pVariant.Next(1, var, count) if not count.value: break rule", "firewall = Firewall() windows.com.create_instance(self.NetFwPolicy2, firewall) return firewall @staticmethod def _get_tcp_ipv4_sockets():", "maping of the active firewall profiles { ``NET_FW_PROFILE_TYPE2_.NET_FW_PROFILE2_DOMAIN(0x1L)``: ``True`` or", "firewall @staticmethod def _get_tcp_ipv4_sockets(): size = ctypes.c_uint(0) try: winproxy.GetExtendedTcpTable(None, ctypes.byref(size),", "value buffer = (ctypes.c_char * size.value)() winproxy.GetExtendedTcpTable(buffer, ctypes.byref(size), ulAf=AF_INET) t", "gdef.ULONG() var = windows.com.ImprovedVariant() rules = [] for i in", "the application to which apply the rule :type: :class:`unicode` \"\"\"", "listening socket\"\"\" return self.dwState == MIB_TCP_STATE_ESTAB @property def remote_port(self): \"\"\":type:", "socket.ntohs(self.dwLocalPort) @property def local_addr(self): \"\"\"Local address IP (x.x.x.x) :type: :class:`str`\"\"\"", "def remote_addr(self): \"\"\"remote address IP :type: :class:`str`\"\"\" if not self.established:", "fails :type: :class:`str` or :class:`int` \"\"\" try: return socket.gethostbyaddr(self.remote_addr) except", "def remote_proto(self): \"\"\"Identification of the protocol associated with the remote", "\"\"\" grouping = gdef.BSTR() self.get_RemotePorts(grouping) return grouping.value @property def icmp_type_and_code(self):", "<require elevated process>\"\"\" closing = MIB_TCPROW() closing.dwState = MIB_TCP_STATE_DELETE_TCB closing.dwLocalAddr", ":class:`str` or :class:`int` \"\"\" try: return socket.getservbyport(self.remote_port, 'tcp') except socket.error:", "of the rule :type: :class:`unicode` \"\"\" description = gdef.BSTR() self.get_Description(description)", "of the protocol associated with the remote port. Equals ``remote_port``", "= gdef.BSTR() self.get_Name(name) return name.value @property def description(self): \"\"\"Description of", "us to set size to the needed value buffer =", "def application_name(self): \"\"\"Name of the application to which apply the", "windows.generated_def.winstructs.MIB_TCPTABLE_OWNER_PID.from_buffer(buffer) nb_entry = x.dwNumEntries class _GENERATED_MIB_TCPTABLE_OWNER_PID(ctypes.Structure): _fields_ = [ (\"dwNumEntries\",", "def description(self): \"\"\"Description of the rule :type: :class:`unicode` \"\"\" description", ":class:`long` \"\"\" cpt = gdef.LONG() self.get_CurrentProfileTypes(cpt) return cpt.value @property def", "IP :type: :class:`str`\"\"\" return self._str_ipv6_addr(self.ucLocalAddr) @property def remote_addr(self): \"\"\"remote address", "``NET_FW_PROFILE_TYPE2_.NET_FW_PROFILE2_PUBLIC(0x4L)``: ``True`` or ``False``, } :type: :class:`dict` \"\"\" profiles =", "= gdef.VARIANT_BOOL() self.get_Enabled(enabled) return enabled.value @property def grouping(self): \"\"\"Grouping of", "return None return socket.inet_ntoa(struct.pack(\"<I\", self.dwRemoteAddr)) @property def remote_proto(self): \"\"\"Identification of", "is associated with it. :type: :class:`str` or :class:`int` \"\"\" try:", ":class:`str`\"\"\" return self._str_ipv6_addr(self.ucLocalAddr) @property def remote_addr(self): \"\"\"remote address IP :type:", "rule \"\"\" ifw_rules = cominterfaces.INetFwRules() self.get_Rules(ifw_rules) nb_rules = gdef.LONG() ifw_rules.get_Count(nb_rules)", "apply the rule :type: :class:`unicode` \"\"\" applicationname = gdef.BSTR() self.get_ApplicationName(applicationname)", "\"\"\"Equals to ``self.remote_addr`` for Ipv6\"\"\" return self.remote_addr def close(self): raise", "def enabled(self): \"\"\"A maping of the active firewall profiles {", "None return socket.inet_ntoa(struct.pack(\"<I\", self.dwRemoteAddr)) @property def remote_proto(self): \"\"\"Identification of the", "enabled.value class FirewallRule(cominterfaces.INetFwRule): \"\"\"A rule of the firewall\"\"\" @property def", "name = gdef.BSTR() self.get_Name(name) return name.value @property def description(self): \"\"\"Description", "{2}:{3}>\".format(self.local_addr, self.local_port, self.remote_addr, self.remote_port) def get_MIB_TCPTABLE_OWNER_PID_from_buffer(buffer): x = windows.generated_def.winstructs.MIB_TCPTABLE_OWNER_PID.from_buffer(buffer) nb_entry", "class FirewallRule(cominterfaces.INetFwRule): \"\"\"A rule of the firewall\"\"\" @property def name(self):", "[ (\"dwNumEntries\", DWORD), (\"table\", TCP6Connection * nb_entry), ] return _GENERATED_MIB_TCP6TABLE_OWNER_PID.from_buffer(buffer)", "as gdef from windows.com import interfaces as cominterfaces from windows.generated_def.winstructs", "``NET_FW_PROFILE_TYPE2_.NET_FW_PROFILE2_PRIVATE(0x2L)``: ``True`` or ``False``, ``NET_FW_PROFILE_TYPE2_.NET_FW_PROFILE2_PUBLIC(0x4L)``: ``True`` or ``False``, } :type:", "the rule :type: :class:`unicode` \"\"\" applicationname = gdef.BSTR() self.get_ApplicationName(applicationname) return", "windows.generated_def as gdef from windows.com import interfaces as cominterfaces from", ":type: [:class:`TCP4Connection`]\"\"\" ipv6 = property(lambda self: self._get_tcp_ipv6_sockets()) \"\"\"List of TCP", "Equals ``remote_addr`` if the resolution fails :type: :class:`str` or :class:`int`", "if not self.established: return None return self._str_ipv6_addr(self.ucRemoteAddr) @property def remote_proto(self):", "idisp.QueryInterface(rule.IID, rule) rules.append(rule) return rules @property def current_profile_types(self): \"\"\"Mask of", "apply the rule :type: :class:`unicode` \"\"\" servicename = gdef.BSTR() self.get_ServiceName(servicename)", "windows import winproxy import windows.generated_def as gdef from windows.com import", ":type: :class:`unicode` \"\"\" remote_port = gdef.BSTR() self.get_RemotePorts(remote_port) return remote_port.value @property", "__repr__(self): return u'<{0} \"{1}\">'.format(type(self).__name__, self.name).encode(\"ascii\", errors='backslashreplace') class Network(object): NetFwPolicy2 =", "\"\"\"The firewall of the system :type: :class:`Firewall` \"\"\" windows.com.init() firewall", "[] for i in range(nb_rules.value): pVariant.Next(1, var, count) if not", "\"\"\" remote_address = gdef.BSTR() self.get_RemoteAddresses(remote_address) return remote_address.value @property def direction(self):", "= get_MIB_TCP6TABLE_OWNER_PID_from_buffer(buffer) return list(t.table) ipv4 = property(lambda self: self._get_tcp_ipv4_sockets()) \"\"\"List", "protocol is associated with it. :type: :class:`str` or :class:`int` \"\"\"", "to ``self.remote_port`` for Ipv6\"\"\" return self.remote_port @property def remote_host(self): \"\"\"Equals", "self.name).encode(\"ascii\", errors='backslashreplace') class Network(object): NetFwPolicy2 = windows.com.IID.from_string(\"E2B3C97F-6AE1-41AC-817A-F6F92166D7DD\") @property def firewall(self):", "\"\"\" applicationname = gdef.BSTR() self.get_ApplicationName(applicationname) return applicationname.value @property def service_name(self):", "servicename.value @property def protocol(self): \"\"\"Protocol to which apply the rule", "ctypes.c_uint(0) try: winproxy.GetExtendedTcpTable(None, ctypes.byref(size), ulAf=AF_INET) except winproxy.IphlpapiError: pass # Allow", "enabled_for_profile_type(self, profile_type): enabled = gdef.VARIANT_BOOL() self.get_FirewallEnabled(profile_type, enabled) return enabled.value class", "self.get_RemoteAddresses(remote_address) return remote_address.value @property def direction(self): \"\"\"Direction of the rule,", "= cominterfaces.IUnknown() ifw_rules.get__NewEnum(unknw) pVariant = cominterfaces.IEnumVARIANT() unknw.QueryInterface(pVariant.IID, pVariant) count =", "_fields_ = [ (\"dwNumEntries\", DWORD), (\"table\", TCP4Connection * nb_entry), ]", "pass # Allow us to set size to the needed", "@property def application_name(self): \"\"\"Name of the application to which apply", "import ctypes import socket import struct from windows import winproxy", "rule :type: :class:`unicode` \"\"\" servicename = gdef.BSTR() self.get_ServiceName(servicename) return servicename.value", "elevated process>\"\"\" closing = MIB_TCPROW() closing.dwState = MIB_TCP_STATE_DELETE_TCB closing.dwLocalAddr =", "\"\"\" protocol = gdef.LONG() self.get_Protocol(protocol) return protocol.value @property def local_address(self):", "= gdef.BSTR() self.get_RemotePorts(icmp_type_and_code) return icmp_type_and_code.value def __repr__(self): return u'<{0} \"{1}\">'.format(type(self).__name__,", "return icmp_type_and_code.value def __repr__(self): return u'<{0} \"{1}\">'.format(type(self).__name__, self.name).encode(\"ascii\", errors='backslashreplace') class", "= property(lambda self: self._get_tcp_ipv4_sockets()) \"\"\"List of TCP IPv4 socket (connection", "firewall(self): \"\"\"The firewall of the system :type: :class:`Firewall` \"\"\" windows.com.init()", "{s.local_addr}:{s.local_port} -> {s.remote_addr}:{s.remote_port}>\".format(s=self) class TCP6Connection(MIB_TCP6ROW_OWNER_PID): \"\"\"A TCP6 socket (connected or", "gdef.NET_FW_PROFILE2_PUBLIC] return {prof: self.enabled_for_profile_type(prof) for prof in profiles} def enabled_for_profile_type(self,", "gdef from windows.com import interfaces as cominterfaces from windows.generated_def.winstructs import", "``NET_FW_RULE_DIRECTION_.NET_FW_RULE_DIR_OUT(0x2L)`` subclass of :class:`long` \"\"\" direction = gdef.NET_FW_RULE_DIRECTION() self.get_Direction(direction) return", "``remote_port`` if no protocol is associated with it. :type: :class:`str`", "interfaces as cominterfaces from windows.generated_def.winstructs import * from windows.generated_def.windef import", "the rule, values might be: * ``NET_FW_RULE_DIRECTION_.NET_FW_RULE_DIR_IN(0x1L)`` * ``NET_FW_RULE_DIRECTION_.NET_FW_RULE_DIR_OUT(0x2L)`` subclass", "remote_proto(self): \"\"\"Equals to ``self.remote_port`` for Ipv6\"\"\" return self.remote_port @property def", "return remote_address.value @property def direction(self): \"\"\"Direction of the rule, values", "@property def direction(self): \"\"\"Direction of the rule, values might be:", "if no protocol is associated with it. :type: :class:`str` or", "of the application to which apply the rule :type: :class:`unicode`", "MIB_TCP_STATE_ESTAB @property def remote_port(self): \"\"\":type: :class:`int`\"\"\" if not self.established: return", "\"\"\"Close the connection <require elevated process>\"\"\" closing = MIB_TCPROW() closing.dwState", "nb_entry), ] return _GENERATED_MIB_TCPTABLE_OWNER_PID.from_buffer(buffer) def get_MIB_TCP6TABLE_OWNER_PID_from_buffer(buffer): x = windows.generated_def.winstructs.MIB_TCP6TABLE_OWNER_PID.from_buffer(buffer) nb_entry", "@property def grouping(self): \"\"\"Grouping of the rule :type: :class:`unicode` \"\"\"", "self.established: return None return socket.ntohs(self.dwRemotePort) @property def local_port(self): \"\"\":type: :class:`int`\"\"\"", "for c in addr) @property def established(self): \"\"\"``True`` if connection", "closing.dwRemotePort = self.dwRemotePort return winproxy.SetTcpEntry(ctypes.byref(closing)) def __repr__(self): if not self.established:", "{0}:{1} -> {2}:{3}>\".format(self.local_addr, self.local_port, self.remote_addr, self.remote_port) def get_MIB_TCPTABLE_OWNER_PID_from_buffer(buffer): x =", ":class:`str`\"\"\" if not self.established: return None return self._str_ipv6_addr(self.ucRemoteAddr) @property def", "socket.error: return self.remote_port @property def remote_host(self): \"\"\"Identification of the remote", "port of the rule :type: :class:`unicode` \"\"\" local_port = gdef.BSTR()", "def firewall(self): \"\"\"The firewall of the system :type: :class:`Firewall` \"\"\"", ":type: :class:`unicode` \"\"\" interface_type = gdef.BSTR() self.get_InterfaceTypes(interface_type) return interface_type.value @property", "* ``NET_FW_ACTION_.NET_FW_ACTION_BLOCK(0x0L)`` * ``NET_FW_ACTION_.NET_FW_ACTION_ALLOW(0x1L)`` subclass of :class:`long` \"\"\" action =", "= [ (\"dwNumEntries\", DWORD), (\"table\", TCP6Connection * nb_entry), ] return", "self: self._get_tcp_ipv6_sockets()) \"\"\"List of TCP IPv6 socket (connection and listening)", "@property def local_address(self): \"\"\"Local address of the rule :type: :class:`unicode`", "return direction.value @property def interface_types(self): \"\"\"Types of interface of the", "might be: * ``NET_FW_ACTION_.NET_FW_ACTION_BLOCK(0x0L)`` * ``NET_FW_ACTION_.NET_FW_ACTION_ALLOW(0x1L)`` subclass of :class:`long` \"\"\"", "or :class:`int` \"\"\" try: return socket.getservbyport(self.remote_port, 'tcp') except socket.error: return", "return \":\".join(c.encode('hex') for c in addr) @property def established(self): \"\"\"``True``", ":class:`unicode` \"\"\" name = gdef.BSTR() self.get_Name(name) return name.value @property def", "listening)\"\"\" @staticmethod def _str_ipv6_addr(addr): return \":\".join(c.encode('hex') for c in addr)", "\"\"\" try: return socket.gethostbyaddr(self.remote_addr) except socket.error: return self.remote_addr def close(self):", "_fields_ = [ (\"dwNumEntries\", DWORD), (\"table\", TCP6Connection * nb_entry), ]", "return {prof: self.enabled_for_profile_type(prof) for prof in profiles} def enabled_for_profile_type(self, profile_type):", "cominterfaces.IEnumVARIANT() unknw.QueryInterface(pVariant.IID, pVariant) count = gdef.ULONG() var = windows.com.ImprovedVariant() rules", "FirewallRule(cominterfaces.INetFwRule): \"\"\"A rule of the firewall\"\"\" @property def name(self): \"\"\"Name", "process>\"\"\" closing = MIB_TCPROW() closing.dwState = MIB_TCP_STATE_DELETE_TCB closing.dwLocalAddr = self.dwLocalAddr", "\"\"\"A maping of the active firewall profiles { ``NET_FW_PROFILE_TYPE2_.NET_FW_PROFILE2_DOMAIN(0x1L)``: ``True``", "self.dwRemoteAddr closing.dwRemotePort = self.dwRemotePort return winproxy.SetTcpEntry(ctypes.byref(closing)) def __repr__(self): if not", "] return _GENERATED_MIB_TCP6TABLE_OWNER_PID.from_buffer(buffer) class Firewall(cominterfaces.INetFwPolicy2): \"\"\"The windows firewall\"\"\" @property def", "__repr__(self): if not self.established: return \"<TCP IPV4 Listening socket on", "x.dwNumEntries class _GENERATED_MIB_TCPTABLE_OWNER_PID(ctypes.Structure): _fields_ = [ (\"dwNumEntries\", DWORD), (\"table\", TCP4Connection", "(connection and listening) :type: [:class:`TCP4Connection`]\"\"\" ipv6 = property(lambda self: self._get_tcp_ipv6_sockets())", "def close(self): raise NotImplementedError(\"Closing IPV6 connection non implemented\") def __repr__(self):", "socket.inet_ntoa(struct.pack(\"<I\", self.dwRemoteAddr)) @property def remote_proto(self): \"\"\"Identification of the protocol associated", "of the rule :type: :class:`unicode` \"\"\" local_address = gdef.BSTR() self.get_LocalAddresses(local_address)", "\"\"\"Action of the rule, values might be: * ``NET_FW_ACTION_.NET_FW_ACTION_BLOCK(0x0L)`` *", "\"\"\"Local port of the rule :type: :class:`unicode` \"\"\" local_port =", ":type: :class:`dict` \"\"\" profiles = [gdef.NET_FW_PROFILE2_DOMAIN, gdef.NET_FW_PROFILE2_PRIVATE, gdef.NET_FW_PROFILE2_PUBLIC] return {prof:", "winproxy.GetExtendedTcpTable(buffer, ctypes.byref(size), ulAf=AF_INET6) t = get_MIB_TCP6TABLE_OWNER_PID_from_buffer(buffer) return list(t.table) ipv4 =", "the service to which apply the rule :type: :class:`unicode` \"\"\"", "\"\"\"Equals to ``self.remote_port`` for Ipv6\"\"\" return self.remote_port @property def remote_host(self):", "[:class:`TCP4Connection`]\"\"\" ipv6 = property(lambda self: self._get_tcp_ipv6_sockets()) \"\"\"List of TCP IPv6", "class TCP4Connection(MIB_TCPROW_OWNER_PID): \"\"\"A TCP4 socket (connected or listening)\"\"\" @property def", "\"\"\" servicename = gdef.BSTR() self.get_ServiceName(servicename) return servicename.value @property def protocol(self):", "gdef.BSTR() self.get_LocalAddresses(local_address) return local_address.value @property def remote_address(self): \"\"\"Remote address of", "rule :type: :class:`unicode` \"\"\" remote_address = gdef.BSTR() self.get_RemoteAddresses(remote_address) return remote_address.value", "closing = MIB_TCPROW() closing.dwState = MIB_TCP_STATE_DELETE_TCB closing.dwLocalAddr = self.dwLocalAddr closing.dwLocalPort", "= [ (\"dwNumEntries\", DWORD), (\"table\", TCP4Connection * nb_entry), ] return", "return self.dwState == MIB_TCP_STATE_ESTAB @property def remote_port(self): \"\"\":type: :class:`int`\"\"\" if", "local_address = gdef.BSTR() self.get_LocalAddresses(local_address) return local_address.value @property def remote_address(self): \"\"\"Remote", "rules.append(rule) return rules @property def current_profile_types(self): \"\"\"Mask of the profiles", "remote_port(self): \"\"\":type: :class:`int`\"\"\" if not self.established: return None return socket.ntohs(self.dwRemotePort)", "the active firewall profiles { ``NET_FW_PROFILE_TYPE2_.NET_FW_PROFILE2_DOMAIN(0x1L)``: ``True`` or ``False``, ``NET_FW_PROFILE_TYPE2_.NET_FW_PROFILE2_PRIVATE(0x2L)``:", "try: return socket.getservbyport(self.remote_port, 'tcp') except socket.error: return self.remote_port @property def", "return action.value @property def enabled(self): \"\"\"``True`` if rule is enabled\"\"\"", "Listening socket on {0}:{1}>\".format(self.local_addr, self.local_port) return \"<TCP IPV4 Connection {s.local_addr}:{s.local_port}", "``remote_addr`` if the resolution fails :type: :class:`str` or :class:`int` \"\"\"", "property(lambda self: self._get_tcp_ipv4_sockets()) \"\"\"List of TCP IPv4 socket (connection and", "= MIB_TCP_STATE_DELETE_TCB closing.dwLocalAddr = self.dwLocalAddr closing.dwLocalPort = self.dwLocalPort closing.dwRemoteAddr =", "return socket.ntohs(self.dwLocalPort) @property def local_addr(self): \"\"\"Local address IP :type: :class:`str`\"\"\"", "service_name(self): \"\"\"Name of the service to which apply the rule", "address IP (x.x.x.x) :type: :class:`str`\"\"\" return socket.inet_ntoa(struct.pack(\"<I\", self.dwLocalAddr)) @property def", "@property def rules(self): \"\"\"The rules of the firewall :type: [:class:`FirewallRule`]", "IPV6 Connection {0}:{1} -> {2}:{3}>\".format(self.local_addr, self.local_port, self.remote_addr, self.remote_port) def get_MIB_TCPTABLE_OWNER_PID_from_buffer(buffer):", "pVariant = cominterfaces.IEnumVARIANT() unknw.QueryInterface(pVariant.IID, pVariant) count = gdef.ULONG() var =", "@staticmethod def _get_tcp_ipv4_sockets(): size = ctypes.c_uint(0) try: winproxy.GetExtendedTcpTable(None, ctypes.byref(size), ulAf=AF_INET)", "TCP4Connection(MIB_TCPROW_OWNER_PID): \"\"\"A TCP4 socket (connected or listening)\"\"\" @property def established(self):", ":type: :class:`unicode` \"\"\" applicationname = gdef.BSTR() self.get_ApplicationName(applicationname) return applicationname.value @property", "return \"<TCP IPV6 Listening socket on {0}:{1}>\".format(self.local_addr, self.local_port) return \"<TCP", "try: winproxy.GetExtendedTcpTable(None, ctypes.byref(size), ulAf=AF_INET) except winproxy.IphlpapiError: pass # Allow us", "close(self): \"\"\"Close the connection <require elevated process>\"\"\" closing = MIB_TCPROW()", "a listening socket\"\"\" return self.dwState == MIB_TCP_STATE_ESTAB @property def remote_port(self):", "import interfaces as cominterfaces from windows.generated_def.winstructs import * from windows.generated_def.windef", "the rule, values might be: * ``NET_FW_ACTION_.NET_FW_ACTION_BLOCK(0x0L)`` * ``NET_FW_ACTION_.NET_FW_ACTION_ALLOW(0x1L)`` subclass", "= self.dwRemoteAddr closing.dwRemotePort = self.dwRemotePort return winproxy.SetTcpEntry(ctypes.byref(closing)) def __repr__(self): if", "@property def established(self): \"\"\"``True`` if connection is established else it's", "[:class:`FirewallRule`] -- A list of rule \"\"\" ifw_rules = cominterfaces.INetFwRules()", "def action(self): \"\"\"Action of the rule, values might be: *", "for Ipv6\"\"\" return self.remote_addr def close(self): raise NotImplementedError(\"Closing IPV6 connection", "non implemented\") def __repr__(self): if not self.established: return \"<TCP IPV6", "as cominterfaces from windows.generated_def.winstructs import * from windows.generated_def.windef import *", "the rule :type: :class:`unicode` \"\"\" local_port = gdef.BSTR() self.get_LocalPorts(local_port) return", "\"\"\"Remote port of the rule :type: :class:`unicode` \"\"\" remote_port =", "currently enabled :type: :class:`long` \"\"\" cpt = gdef.LONG() self.get_CurrentProfileTypes(cpt) return", "return self.remote_addr def close(self): raise NotImplementedError(\"Closing IPV6 connection non implemented\")", "local_port.value @property def remote_port(self): \"\"\"Remote port of the rule :type:", "is established else it's a listening socket\"\"\" return self.dwState ==", "TCP6 socket (connected or listening)\"\"\" @staticmethod def _str_ipv6_addr(addr): return \":\".join(c.encode('hex')", "_GENERATED_MIB_TCPTABLE_OWNER_PID(ctypes.Structure): _fields_ = [ (\"dwNumEntries\", DWORD), (\"table\", TCP4Connection * nb_entry),", "Firewall(cominterfaces.INetFwPolicy2): \"\"\"The windows firewall\"\"\" @property def rules(self): \"\"\"The rules of", "enabled\"\"\" enabled = gdef.VARIANT_BOOL() self.get_Enabled(enabled) return enabled.value @property def grouping(self):", "gdef.NET_FW_PROFILE2_PRIVATE, gdef.NET_FW_PROFILE2_PUBLIC] return {prof: self.enabled_for_profile_type(prof) for prof in profiles} def", "be: * ``NET_FW_ACTION_.NET_FW_ACTION_BLOCK(0x0L)`` * ``NET_FW_ACTION_.NET_FW_ACTION_ALLOW(0x1L)`` subclass of :class:`long` \"\"\" action", "_get_tcp_ipv6_sockets(): size = ctypes.c_uint(0) try: winproxy.GetExtendedTcpTable(None, ctypes.byref(size), ulAf=AF_INET6) except winproxy.IphlpapiError:", "resolution fails :type: :class:`str` or :class:`int` \"\"\" try: return socket.gethostbyaddr(self.remote_addr)", "i in range(nb_rules.value): pVariant.Next(1, var, count) if not count.value: break", "return list(t.table) @staticmethod def _get_tcp_ipv6_sockets(): size = ctypes.c_uint(0) try: winproxy.GetExtendedTcpTable(None,", "not self.established: return None return self._str_ipv6_addr(self.ucRemoteAddr) @property def remote_proto(self): \"\"\"Equals", "ctypes.byref(size), ulAf=AF_INET6) except winproxy.IphlpapiError: pass # Allow us to set", "\"\"\"The windows firewall\"\"\" @property def rules(self): \"\"\"The rules of the", "if not self.established: return None return socket.inet_ntoa(struct.pack(\"<I\", self.dwRemoteAddr)) @property def", "not self.established: return None return socket.inet_ntoa(struct.pack(\"<I\", self.dwRemoteAddr)) @property def remote_proto(self):", "nb_rules = gdef.LONG() ifw_rules.get_Count(nb_rules) unknw = cominterfaces.IUnknown() ifw_rules.get__NewEnum(unknw) pVariant =", "gdef.BSTR() self.get_ApplicationName(applicationname) return applicationname.value @property def service_name(self): \"\"\"Name of the", "return applicationname.value @property def service_name(self): \"\"\"Name of the service to", "\"\"\" local_address = gdef.BSTR() self.get_LocalAddresses(local_address) return local_address.value @property def remote_address(self):", "``self.remote_port`` for Ipv6\"\"\" return self.remote_port @property def remote_host(self): \"\"\"Equals to", "established(self): \"\"\"``True`` if connection is established else it's a listening", "gdef.NET_FW_ACTION() self.get_Action(action) return action.value @property def enabled(self): \"\"\"``True`` if rule", "closing.dwLocalAddr = self.dwLocalAddr closing.dwLocalPort = self.dwLocalPort closing.dwRemoteAddr = self.dwRemoteAddr closing.dwRemotePort", "\"\"\"Identification of the protocol associated with the remote port. Equals", "name(self): \"\"\"Name of the rule :type: :class:`unicode` \"\"\" name =", "the rule :type: :class:`unicode` \"\"\" servicename = gdef.BSTR() self.get_ServiceName(servicename) return", "= gdef.BSTR() self.get_RemotePorts(remote_port) return remote_port.value @property def action(self): \"\"\"Action of", "* nb_entry), ] return _GENERATED_MIB_TCPTABLE_OWNER_PID.from_buffer(buffer) def get_MIB_TCP6TABLE_OWNER_PID_from_buffer(buffer): x = windows.generated_def.winstructs.MIB_TCP6TABLE_OWNER_PID.from_buffer(buffer)", "remote_addr(self): \"\"\"remote address IP (x.x.x.x) :type: :class:`str`\"\"\" if not self.established:", "\"\"\"``True`` if rule is enabled\"\"\" enabled = gdef.VARIANT_BOOL() self.get_Enabled(enabled) return", "= ctypes.c_uint(0) try: winproxy.GetExtendedTcpTable(None, ctypes.byref(size), ulAf=AF_INET6) except winproxy.IphlpapiError: pass #", "else it's a listening socket\"\"\" return self.dwState == MIB_TCP_STATE_ESTAB @property", ":type: :class:`str`\"\"\" return socket.inet_ntoa(struct.pack(\"<I\", self.dwLocalAddr)) @property def remote_addr(self): \"\"\"remote address", "for prof in profiles} def enabled_for_profile_type(self, profile_type): enabled = gdef.VARIANT_BOOL()", "self.get_ServiceName(servicename) return servicename.value @property def protocol(self): \"\"\"Protocol to which apply", "firewall profiles { ``NET_FW_PROFILE_TYPE2_.NET_FW_PROFILE2_DOMAIN(0x1L)``: ``True`` or ``False``, ``NET_FW_PROFILE_TYPE2_.NET_FW_PROFILE2_PRIVATE(0x2L)``: ``True`` or", "connection non implemented\") def __repr__(self): if not self.established: return \"<TCP", "connection is established else it's a listening socket\"\"\" return self.dwState", "# Struct _MIB_TCP6TABLE_OWNER_PID definitions class _GENERATED_MIB_TCP6TABLE_OWNER_PID(Structure): _fields_ = [ (\"dwNumEntries\",", "of TCP IPv6 socket (connection and listening) :type: [:class:`TCP6Connection`] \"\"\"", "local_address(self): \"\"\"Local address of the rule :type: :class:`unicode` \"\"\" local_address", "windows.generated_def.winstructs.MIB_TCP6TABLE_OWNER_PID.from_buffer(buffer) nb_entry = x.dwNumEntries # Struct _MIB_TCP6TABLE_OWNER_PID definitions class _GENERATED_MIB_TCP6TABLE_OWNER_PID(Structure):", "def local_address(self): \"\"\"Local address of the rule :type: :class:`unicode` \"\"\"", "with the remote port. Equals ``remote_port`` if no protocol is", ":type: :class:`unicode` \"\"\" local_port = gdef.BSTR() self.get_LocalPorts(local_port) return local_port.value @property", "* size.value)() winproxy.GetExtendedTcpTable(buffer, ctypes.byref(size), ulAf=AF_INET6) t = get_MIB_TCP6TABLE_OWNER_PID_from_buffer(buffer) return list(t.table)", "{ ``NET_FW_PROFILE_TYPE2_.NET_FW_PROFILE2_DOMAIN(0x1L)``: ``True`` or ``False``, ``NET_FW_PROFILE_TYPE2_.NET_FW_PROFILE2_PRIVATE(0x2L)``: ``True`` or ``False``, ``NET_FW_PROFILE_TYPE2_.NET_FW_PROFILE2_PUBLIC(0x4L)``:", "unknw.QueryInterface(pVariant.IID, pVariant) count = gdef.ULONG() var = windows.com.ImprovedVariant() rules =", "gdef.BSTR() self.get_Name(name) return name.value @property def description(self): \"\"\"Description of the", "gdef.BSTR() self.get_ServiceName(servicename) return servicename.value @property def protocol(self): \"\"\"Protocol to which", ":class:`unicode` \"\"\" remote_port = gdef.BSTR() self.get_RemotePorts(remote_port) return remote_port.value @property def", "self.local_port) return \"<TCP IPV4 Connection {s.local_addr}:{s.local_port} -> {s.remote_addr}:{s.remote_port}>\".format(s=self) class TCP6Connection(MIB_TCP6ROW_OWNER_PID):", "_MIB_TCP6TABLE_OWNER_PID definitions class _GENERATED_MIB_TCP6TABLE_OWNER_PID(Structure): _fields_ = [ (\"dwNumEntries\", DWORD), (\"table\",", "direction.value @property def interface_types(self): \"\"\"Types of interface of the rule", "subclass of :class:`long` \"\"\" action = gdef.NET_FW_ACTION() self.get_Action(action) return action.value", "TCP6Connection * nb_entry), ] return _GENERATED_MIB_TCP6TABLE_OWNER_PID.from_buffer(buffer) class Firewall(cominterfaces.INetFwPolicy2): \"\"\"The windows", "rule :type: :class:`unicode` \"\"\" description = gdef.BSTR() self.get_Description(description) return description.value", "\"\"\" remote_port = gdef.BSTR() self.get_RemotePorts(remote_port) return remote_port.value @property def action(self):", "which apply the rule :type: :class:`unicode` \"\"\" applicationname = gdef.BSTR()", "@property def enabled(self): \"\"\"A maping of the active firewall profiles", "rule :type: :class:`long` \"\"\" protocol = gdef.LONG() self.get_Protocol(protocol) return protocol.value", "IPV6 connection non implemented\") def __repr__(self): if not self.established: return", ":type: :class:`unicode` \"\"\" description = gdef.BSTR() self.get_Description(description) return description.value @property", "= gdef.BSTR() self.get_LocalAddresses(local_address) return local_address.value @property def remote_address(self): \"\"\"Remote address", "grouping(self): \"\"\"Grouping of the rule :type: :class:`unicode` \"\"\" grouping =", "TCP4Connection * nb_entry), ] return _GENERATED_MIB_TCPTABLE_OWNER_PID.from_buffer(buffer) def get_MIB_TCP6TABLE_OWNER_PID_from_buffer(buffer): x =", "(ctypes.c_char * size.value)() winproxy.GetExtendedTcpTable(buffer, ctypes.byref(size), ulAf=AF_INET6) t = get_MIB_TCP6TABLE_OWNER_PID_from_buffer(buffer) return", "NotImplementedError(\"Closing IPV6 connection non implemented\") def __repr__(self): if not self.established:", "for i in range(nb_rules.value): pVariant.Next(1, var, count) if not count.value:", "idisp = var.asdispatch idisp.QueryInterface(rule.IID, rule) rules.append(rule) return rules @property def", "def service_name(self): \"\"\"Name of the service to which apply the", "self.get_Name(name) return name.value @property def description(self): \"\"\"Description of the rule", ":type: :class:`long` \"\"\" cpt = gdef.LONG() self.get_CurrentProfileTypes(cpt) return cpt.value @property", ":class:`long` \"\"\" action = gdef.NET_FW_ACTION() self.get_Action(action) return action.value @property def", "remote_port = gdef.BSTR() self.get_RemotePorts(remote_port) return remote_port.value @property def action(self): \"\"\"Action", "\"\"\"Grouping of the rule :type: :class:`unicode` \"\"\" grouping = gdef.BSTR()", "Connection {s.local_addr}:{s.local_port} -> {s.remote_addr}:{s.remote_port}>\".format(s=self) class TCP6Connection(MIB_TCP6ROW_OWNER_PID): \"\"\"A TCP6 socket (connected", "is enabled\"\"\" enabled = gdef.VARIANT_BOOL() self.get_Enabled(enabled) return enabled.value @property def", "firewall\"\"\" @property def name(self): \"\"\"Name of the rule :type: :class:`unicode`", "if not count.value: break rule = FirewallRule() idisp = var.asdispatch", "prof in profiles} def enabled_for_profile_type(self, profile_type): enabled = gdef.VARIANT_BOOL() self.get_FirewallEnabled(profile_type,", ":class:`unicode` \"\"\" description = gdef.BSTR() self.get_Description(description) return description.value @property def", "action(self): \"\"\"Action of the rule, values might be: * ``NET_FW_ACTION_.NET_FW_ACTION_BLOCK(0x0L)``", "def icmp_type_and_code(self): icmp_type_and_code = gdef.BSTR() self.get_RemotePorts(icmp_type_and_code) return icmp_type_and_code.value def __repr__(self):", "local_port(self): \"\"\":type: :class:`int`\"\"\" return socket.ntohs(self.dwLocalPort) @property def local_addr(self): \"\"\"Local address", "\"\"\"List of TCP IPv4 socket (connection and listening) :type: [:class:`TCP4Connection`]\"\"\"", "with it. :type: :class:`str` or :class:`int` \"\"\" try: return socket.getservbyport(self.remote_port,", ":class:`dict` \"\"\" profiles = [gdef.NET_FW_PROFILE2_DOMAIN, gdef.NET_FW_PROFILE2_PRIVATE, gdef.NET_FW_PROFILE2_PUBLIC] return {prof: self.enabled_for_profile_type(prof)", "\"\"\" windows.com.init() firewall = Firewall() windows.com.create_instance(self.NetFwPolicy2, firewall) return firewall @staticmethod", "@property def remote_port(self): \"\"\"Remote port of the rule :type: :class:`unicode`", "Struct _MIB_TCP6TABLE_OWNER_PID definitions class _GENERATED_MIB_TCP6TABLE_OWNER_PID(Structure): _fields_ = [ (\"dwNumEntries\", DWORD),", "hostname. Equals ``remote_addr`` if the resolution fails :type: :class:`str` or", "return self.remote_port @property def remote_host(self): \"\"\"Equals to ``self.remote_addr`` for Ipv6\"\"\"", "] return _GENERATED_MIB_TCPTABLE_OWNER_PID.from_buffer(buffer) def get_MIB_TCP6TABLE_OWNER_PID_from_buffer(buffer): x = windows.generated_def.winstructs.MIB_TCP6TABLE_OWNER_PID.from_buffer(buffer) nb_entry =", "= gdef.LONG() ifw_rules.get_Count(nb_rules) unknw = cominterfaces.IUnknown() ifw_rules.get__NewEnum(unknw) pVariant = cominterfaces.IEnumVARIANT()", "to which apply the rule :type: :class:`long` \"\"\" protocol =", "self.dwLocalPort closing.dwRemoteAddr = self.dwRemoteAddr closing.dwRemotePort = self.dwRemotePort return winproxy.SetTcpEntry(ctypes.byref(closing)) def", "\"\"\"remote address IP :type: :class:`str`\"\"\" if not self.established: return None", "@staticmethod def _get_tcp_ipv6_sockets(): size = ctypes.c_uint(0) try: winproxy.GetExtendedTcpTable(None, ctypes.byref(size), ulAf=AF_INET6)", "return self.remote_addr def close(self): \"\"\"Close the connection <require elevated process>\"\"\"", "size = ctypes.c_uint(0) try: winproxy.GetExtendedTcpTable(None, ctypes.byref(size), ulAf=AF_INET) except winproxy.IphlpapiError: pass", "pVariant) count = gdef.ULONG() var = windows.com.ImprovedVariant() rules = []", "= windows.com.IID.from_string(\"E2B3C97F-6AE1-41AC-817A-F6F92166D7DD\") @property def firewall(self): \"\"\"The firewall of the system", "import struct from windows import winproxy import windows.generated_def as gdef", "ctypes import socket import struct from windows import winproxy import", "def established(self): \"\"\"``True`` if connection is established else it's a", "of the active firewall profiles { ``NET_FW_PROFILE_TYPE2_.NET_FW_PROFILE2_DOMAIN(0x1L)``: ``True`` or ``False``,", "\"\"\" name = gdef.BSTR() self.get_Name(name) return name.value @property def description(self):", "@property def service_name(self): \"\"\"Name of the service to which apply", "(x.x.x.x) :type: :class:`str`\"\"\" return socket.inet_ntoa(struct.pack(\"<I\", self.dwLocalAddr)) @property def remote_addr(self): \"\"\"remote", "@property def remote_proto(self): \"\"\"Equals to ``self.remote_port`` for Ipv6\"\"\" return self.remote_port", "def remote_host(self): \"\"\"Equals to ``self.remote_addr`` for Ipv6\"\"\" return self.remote_addr def", "@property def interface_types(self): \"\"\"Types of interface of the rule :type:", "remote port. Equals ``remote_port`` if no protocol is associated with", "self.get_RemotePorts(remote_port) return remote_port.value @property def action(self): \"\"\"Action of the rule,", "gdef.BSTR() self.get_InterfaceTypes(interface_type) return interface_type.value @property def local_port(self): \"\"\"Local port of", "remote_proto(self): \"\"\"Identification of the protocol associated with the remote port.", "remote_host(self): \"\"\"Equals to ``self.remote_addr`` for Ipv6\"\"\" return self.remote_addr def close(self):", "\"\"\"Description of the rule :type: :class:`unicode` \"\"\" description = gdef.BSTR()", "the rule :type: :class:`unicode` \"\"\" interface_type = gdef.BSTR() self.get_InterfaceTypes(interface_type) return", "return None return self._str_ipv6_addr(self.ucRemoteAddr) @property def remote_proto(self): \"\"\"Equals to ``self.remote_port``", "\"\"\"Types of interface of the rule :type: :class:`unicode` \"\"\" interface_type", "gdef.BSTR() self.get_RemotePorts(icmp_type_and_code) return icmp_type_and_code.value def __repr__(self): return u'<{0} \"{1}\">'.format(type(self).__name__, self.name).encode(\"ascii\",", "local_port = gdef.BSTR() self.get_LocalPorts(local_port) return local_port.value @property def remote_port(self): \"\"\"Remote", "socket (connection and listening) :type: [:class:`TCP4Connection`]\"\"\" ipv6 = property(lambda self:", "no protocol is associated with it. :type: :class:`str` or :class:`int`", "value buffer = (ctypes.c_char * size.value)() winproxy.GetExtendedTcpTable(buffer, ctypes.byref(size), ulAf=AF_INET6) t", "def name(self): \"\"\"Name of the rule :type: :class:`unicode` \"\"\" name", "\"\"\"Name of the service to which apply the rule :type:", "apply the rule :type: :class:`long` \"\"\" protocol = gdef.LONG() self.get_Protocol(protocol)", "return interface_type.value @property def local_port(self): \"\"\"Local port of the rule", "if rule is enabled\"\"\" enabled = gdef.VARIANT_BOOL() self.get_Enabled(enabled) return enabled.value", "t = get_MIB_TCPTABLE_OWNER_PID_from_buffer(buffer) return list(t.table) @staticmethod def _get_tcp_ipv6_sockets(): size =", "self: self._get_tcp_ipv4_sockets()) \"\"\"List of TCP IPv4 socket (connection and listening)", "\"\"\"Protocol to which apply the rule :type: :class:`long` \"\"\" protocol", "the needed value buffer = (ctypes.c_char * size.value)() winproxy.GetExtendedTcpTable(buffer, ctypes.byref(size),", "struct from windows import winproxy import windows.generated_def as gdef from", "gdef.LONG() ifw_rules.get_Count(nb_rules) unknw = cominterfaces.IUnknown() ifw_rules.get__NewEnum(unknw) pVariant = cominterfaces.IEnumVARIANT() unknw.QueryInterface(pVariant.IID,", "except winproxy.IphlpapiError: pass # Allow us to set size to", "cpt = gdef.LONG() self.get_CurrentProfileTypes(cpt) return cpt.value @property def enabled(self): \"\"\"A", "values might be: * ``NET_FW_ACTION_.NET_FW_ACTION_BLOCK(0x0L)`` * ``NET_FW_ACTION_.NET_FW_ACTION_ALLOW(0x1L)`` subclass of :class:`long`", "\"\"\"``True`` if connection is established else it's a listening socket\"\"\"", "class Firewall(cominterfaces.INetFwPolicy2): \"\"\"The windows firewall\"\"\" @property def rules(self): \"\"\"The rules", ":type: :class:`str` or :class:`int` \"\"\" try: return socket.getservbyport(self.remote_port, 'tcp') except", "def _get_tcp_ipv4_sockets(): size = ctypes.c_uint(0) try: winproxy.GetExtendedTcpTable(None, ctypes.byref(size), ulAf=AF_INET) except", "windows import ctypes import socket import struct from windows import", "applicationname.value @property def service_name(self): \"\"\"Name of the service to which", "= gdef.BSTR() self.get_RemotePorts(grouping) return grouping.value @property def icmp_type_and_code(self): icmp_type_and_code =", "= gdef.ULONG() var = windows.com.ImprovedVariant() rules = [] for i", "(\"table\", TCP6Connection * nb_entry), ] return _GENERATED_MIB_TCP6TABLE_OWNER_PID.from_buffer(buffer) class Firewall(cominterfaces.INetFwPolicy2): \"\"\"The", "x = windows.generated_def.winstructs.MIB_TCP6TABLE_OWNER_PID.from_buffer(buffer) nb_entry = x.dwNumEntries # Struct _MIB_TCP6TABLE_OWNER_PID definitions", "(connected or listening)\"\"\" @staticmethod def _str_ipv6_addr(addr): return \":\".join(c.encode('hex') for c", "be: * ``NET_FW_RULE_DIRECTION_.NET_FW_RULE_DIR_IN(0x1L)`` * ``NET_FW_RULE_DIRECTION_.NET_FW_RULE_DIR_OUT(0x2L)`` subclass of :class:`long` \"\"\" direction", "{s.remote_addr}:{s.remote_port}>\".format(s=self) class TCP6Connection(MIB_TCP6ROW_OWNER_PID): \"\"\"A TCP6 socket (connected or listening)\"\"\" @staticmethod", "socket (connected or listening)\"\"\" @property def established(self): \"\"\"``True`` if connection", "self.dwState == MIB_TCP_STATE_ESTAB @property def remote_port(self): \"\"\":type: :class:`int`\"\"\" if not", "if connection is established else it's a listening socket\"\"\" return", "\"\"\"Direction of the rule, values might be: * ``NET_FW_RULE_DIRECTION_.NET_FW_RULE_DIR_IN(0x1L)`` *", "IP :type: :class:`str`\"\"\" if not self.established: return None return self._str_ipv6_addr(self.ucRemoteAddr)", "__repr__(self): if not self.established: return \"<TCP IPV6 Listening socket on", "on {0}:{1}>\".format(self.local_addr, self.local_port) return \"<TCP IPV4 Connection {s.local_addr}:{s.local_port} -> {s.remote_addr}:{s.remote_port}>\".format(s=self)", "\"\"\"Local address IP (x.x.x.x) :type: :class:`str`\"\"\" return socket.inet_ntoa(struct.pack(\"<I\", self.dwLocalAddr)) @property", "\"\"\" local_port = gdef.BSTR() self.get_LocalPorts(local_port) return local_port.value @property def remote_port(self):", "= FirewallRule() idisp = var.asdispatch idisp.QueryInterface(rule.IID, rule) rules.append(rule) return rules", "gdef.BSTR() self.get_LocalPorts(local_port) return local_port.value @property def remote_port(self): \"\"\"Remote port of", "windows.generated_def.winstructs import * from windows.generated_def.windef import * class TCP4Connection(MIB_TCPROW_OWNER_PID): \"\"\"A", ":type: :class:`unicode` \"\"\" name = gdef.BSTR() self.get_Name(name) return name.value @property", "ulAf=AF_INET) except winproxy.IphlpapiError: pass # Allow us to set size", "= self.dwLocalAddr closing.dwLocalPort = self.dwLocalPort closing.dwRemoteAddr = self.dwRemoteAddr closing.dwRemotePort =", "self.established: return \"<TCP IPV6 Listening socket on {0}:{1}>\".format(self.local_addr, self.local_port) return", "self.remote_addr, self.remote_port) def get_MIB_TCPTABLE_OWNER_PID_from_buffer(buffer): x = windows.generated_def.winstructs.MIB_TCPTABLE_OWNER_PID.from_buffer(buffer) nb_entry = x.dwNumEntries", "action.value @property def enabled(self): \"\"\"``True`` if rule is enabled\"\"\" enabled", "size to the needed value buffer = (ctypes.c_char * size.value)()", "return _GENERATED_MIB_TCPTABLE_OWNER_PID.from_buffer(buffer) def get_MIB_TCP6TABLE_OWNER_PID_from_buffer(buffer): x = windows.generated_def.winstructs.MIB_TCP6TABLE_OWNER_PID.from_buffer(buffer) nb_entry = x.dwNumEntries", "class Network(object): NetFwPolicy2 = windows.com.IID.from_string(\"E2B3C97F-6AE1-41AC-817A-F6F92166D7DD\") @property def firewall(self): \"\"\"The firewall", "self.remote_addr def close(self): raise NotImplementedError(\"Closing IPV6 connection non implemented\") def", "= gdef.BSTR() self.get_InterfaceTypes(interface_type) return interface_type.value @property def local_port(self): \"\"\"Local port", ":class:`long` \"\"\" direction = gdef.NET_FW_RULE_DIRECTION() self.get_Direction(direction) return direction.value @property def", "return enabled.value class FirewallRule(cominterfaces.INetFwRule): \"\"\"A rule of the firewall\"\"\" @property", "action = gdef.NET_FW_ACTION() self.get_Action(action) return action.value @property def enabled(self): \"\"\"``True``", "Network(object): NetFwPolicy2 = windows.com.IID.from_string(\"E2B3C97F-6AE1-41AC-817A-F6F92166D7DD\") @property def firewall(self): \"\"\"The firewall of", "profiles { ``NET_FW_PROFILE_TYPE2_.NET_FW_PROFILE2_DOMAIN(0x1L)``: ``True`` or ``False``, ``NET_FW_PROFILE_TYPE2_.NET_FW_PROFILE2_PRIVATE(0x2L)``: ``True`` or ``False``,", "self.get_Rules(ifw_rules) nb_rules = gdef.LONG() ifw_rules.get_Count(nb_rules) unknw = cominterfaces.IUnknown() ifw_rules.get__NewEnum(unknw) pVariant", "def grouping(self): \"\"\"Grouping of the rule :type: :class:`unicode` \"\"\" grouping", "if not self.established: return None return socket.ntohs(self.dwRemotePort) @property def local_port(self):", "size.value)() winproxy.GetExtendedTcpTable(buffer, ctypes.byref(size), ulAf=AF_INET) t = get_MIB_TCPTABLE_OWNER_PID_from_buffer(buffer) return list(t.table) @staticmethod", "range(nb_rules.value): pVariant.Next(1, var, count) if not count.value: break rule =", "the rule :type: :class:`unicode` \"\"\" remote_port = gdef.BSTR() self.get_RemotePorts(remote_port) return", "property(lambda self: self._get_tcp_ipv6_sockets()) \"\"\"List of TCP IPv6 socket (connection and", "* size.value)() winproxy.GetExtendedTcpTable(buffer, ctypes.byref(size), ulAf=AF_INET) t = get_MIB_TCPTABLE_OWNER_PID_from_buffer(buffer) return list(t.table)", "socket (connected or listening)\"\"\" @staticmethod def _str_ipv6_addr(addr): return \":\".join(c.encode('hex') for", "import winproxy import windows.generated_def as gdef from windows.com import interfaces", "get_MIB_TCP6TABLE_OWNER_PID_from_buffer(buffer) return list(t.table) ipv4 = property(lambda self: self._get_tcp_ipv4_sockets()) \"\"\"List of", ":type: :class:`str`\"\"\" if not self.established: return None return socket.inet_ntoa(struct.pack(\"<I\", self.dwRemoteAddr))", "return \"<TCP IPV6 Connection {0}:{1} -> {2}:{3}>\".format(self.local_addr, self.local_port, self.remote_addr, self.remote_port)", "\"<TCP IPV6 Listening socket on {0}:{1}>\".format(self.local_addr, self.local_port) return \"<TCP IPV6", "\"\"\"List of TCP IPv6 socket (connection and listening) :type: [:class:`TCP6Connection`]", "@property def local_port(self): \"\"\"Local port of the rule :type: :class:`unicode`", "grouping = gdef.BSTR() self.get_RemotePorts(grouping) return grouping.value @property def icmp_type_and_code(self): icmp_type_and_code", "get_MIB_TCPTABLE_OWNER_PID_from_buffer(buffer): x = windows.generated_def.winstructs.MIB_TCPTABLE_OWNER_PID.from_buffer(buffer) nb_entry = x.dwNumEntries class _GENERATED_MIB_TCPTABLE_OWNER_PID(ctypes.Structure): _fields_", "self.dwRemoteAddr)) @property def remote_proto(self): \"\"\"Identification of the protocol associated with", "self.get_FirewallEnabled(profile_type, enabled) return enabled.value class FirewallRule(cominterfaces.INetFwRule): \"\"\"A rule of the", "= Firewall() windows.com.create_instance(self.NetFwPolicy2, firewall) return firewall @staticmethod def _get_tcp_ipv4_sockets(): size", "not self.established: return None return socket.ntohs(self.dwRemotePort) @property def local_port(self): \"\"\":type:", ":class:`int`\"\"\" return socket.ntohs(self.dwLocalPort) @property def local_addr(self): \"\"\"Local address IP (x.x.x.x)", "of rule \"\"\" ifw_rules = cominterfaces.INetFwRules() self.get_Rules(ifw_rules) nb_rules = gdef.LONG()", "except socket.error: return self.remote_port @property def remote_host(self): \"\"\"Identification of the", "return local_port.value @property def remote_port(self): \"\"\"Remote port of the rule", "implemented\") def __repr__(self): if not self.established: return \"<TCP IPV6 Listening", "@property def enabled(self): \"\"\"``True`` if rule is enabled\"\"\" enabled =", "import windows import ctypes import socket import struct from windows" ]
[ "2 30 4 50 60 100\" print(\"\\n------\\n\") # Example 2.", "c1 = f(1) c2 = c1(2, d = 4) #", "= c1(e = 5)(f = 6)(e = 10)() # ops...", "if level is None: printTree(cur(func), 0) elif level == 6:", "Thanks to b49P23TIvg for suggesting that I should use a", "for pointing out that # - \"minArgs = None\" is", "= -1\", # - \"if args\" is better than \"if", "3 c3() # () forces the evaluation <==== # it", "num args = 7 ==> evaluation <==== # we can", "3 c2 = c1(4, f = 6) # num args", "returns a new 'f' else: # the evaluation was forced", "2, 3) # num args = 3 c2 = c1(4,", "c2(30)(f = 60)(e = 50) # now c = 30", "n - 1) else: f() # enough is enough stress(cur(f2),", "some more args! # Allocates data to assign to the", "c4 = c2(30)(f = 60)(e = 50) # now c", "it prints \"1 2 3 4 5 6 100\" c4", "'curried' version of a function. \"\"\" def g(*myArgs, **myKwArgs): def", "\"1 2 3 4 5 6 100\" c4 = c2(5,", "if args or kwArgs: # some more args! # Allocates", "= c1(2, d = 4) # Note that c is", "func(a, b, c, d, e, f, g = 100): print(a,", "c2(5) # num args = 6 ==> evalution <==== #", "args = 3 c2 = c1(4, f = 6) #", "True, minArgs) def curr(f, minArgs = None): return genCur(f, False,", "func else: return g(*newArgs, **newKwArgs) # returns a new 'f'", "Function. def func(a, b, c, d, e, f, g =", "genCur(f, True, minArgs) def curr(f, minArgs = None): return genCur(f,", "we repeated 'e' because we <==== # changed our mind", "is None: printTree(cur(func), 0) elif level == 6: func(g =", "Coded by <NAME>, 2012. # # - Thanks to b49P23TIvg", "6)(e = 10)() # ops... we repeated 'e' because we", "2012. # # - Thanks to b49P23TIvg for suggesting that", "**newKwArgs) # returns a new 'f' else: # the evaluation", "# If unique is True, we don't want repeated keyword", "# # - Thanks to b49P23TIvg for suggesting that I", "genCur(f, False, minArgs) if __name__ == \"__main__\": # Simple Function.", "f, g) # NOTE: '<====' means \"this line prints to", "python3 # Coded by <NAME>, 2012. # # - Thanks", "# it prints \"1 2 3 4 5 6 100\"", "50) # now c = 30 c4() # () forces", "unique = True\") # Adds/updates keyword arguments. newKwArgs.update(kwArgs) # Checks", "def cur(f, minArgs = None): return genCur(f, True, minArgs) def", "100\" c4 = c2(5, g = -1) # num args", "# it prints \"1 2 3 4 5 6 -1\"", "+ args newKwArgs = dict.copy(myKwArgs) # If unique is True,", "c2 = c1(4, f = 6) # num args =", "Example 1. f = cur(func) # f is a \"curried\"", "func(g = '')() # or just func('')() else: printTree(func(0), level", "100\" print(\"\\n------\\n\") # Example 3. f = cur(func, 6) #", "of func # curr = cur with possibly repeated #", "- \"if args\" is better than \"if len(args)\", and #", "and # - I should use \"isdisjoint\". # def genCur(func,", "minArgs = None): return genCur(f, True, minArgs) def curr(f, minArgs", "better than \"if len(args)\", and # - I should use", "level = None): if level is None: printTree(cur(func), 0) elif", "we <==== # changed our mind about it! # again,", "6 arguments, but # 6 are enough to force the", "False, minArgs) if __name__ == \"__main__\": # Simple Function. def", "c2 = c1(e = 5)(f = 6)(e = 10)() #", "cur(func) # f is a \"curried\" version of func c1", "possibly repeated # keyword args c1 = f(1, 2)(3, 4)", "else: printTree(func(0), level + 1) printTree(func(1), level + 1) printTree(func)", "None): return genCur(f, True, minArgs) def curr(f, minArgs = None):", "that I should use a set operation # instead of", "args or kwArgs: # some more args! # Allocates data", "2)(3, 4) c2 = c1(e = 5)(f = 6)(e =", "g) # NOTE: '<====' means \"this line prints to the", "d, e, f, g = 100): print(a, b, c, d,", "are enough to force the evaluation # it prints \"1", "3 4 5 6 100\" c4 = c2(30)(f = 60)(e", "version of func c1 = f(1) c2 = c1(2, d", "= cur with possibly repeated # keyword args c1 =", "args = 6 ==> evalution <==== # it prints \"1", "Example 2. f = curr(func) # f is a \"curried\"", "==> evalution <==== # it prints \"1 2 3 4", "Simple Function. def func(a, b, c, d, e, f, g", "means \"this line prints to the screen\". # Example 1.", "arguments, but # 6 are enough to force the evaluation", "args! # Allocates data to assign to the next 'f'.", "If unique is True, we don't want repeated keyword arguments.", "g def cur(f, minArgs = None): return genCur(f, True, minArgs)", "repeated # keyword args c1 = f(1, 2)(3, 4) c2", "stress(f, n): if n: stress(f(n), n - 1) else: f()", "\".join([\"%3d\"%(x) for x in args])) def stress(f, n): if n:", "else: # the evaluation was forced return func(*myArgs, **myKwArgs) return", "kwArgs.keys().isdisjoint(newKwArgs): raise ValueError(\"Repeated kw arg while unique = True\") #", "or kwArgs: # some more args! # Allocates data to", "print(\"\\n------\\n\") # Example 2. f = curr(func) # f is", "#!/usr/bin/env python3 # Coded by <NAME>, 2012. # # -", "-1\", # - \"if args\" is better than \"if len(args)\",", "= curr(func) # f is a \"curried\" version of func", "instead of repeated membership tests. # - Thanks to <NAME>", "print(\"\\n------\\n\") # Example 3. f = cur(func, 6) # forces", "5 6 -1\" print(\"\\n------\\n\") # Example 4. def printTree(func, level", "better than \"minArgs = -1\", # - \"if args\" is", "= None): \"\"\" Generates a 'curried' version of a function.", "evaluation <==== # it prints \"1 2 3 4 5", "to the screen\". # Example 1. f = cur(func) #", "f, g = 100): print(a, b, c, d, e, f,", "force the evaluation # it prints \"1 2 3 4", "args = 5 c3 = c2(5) # num args =", "= c2(5) # num args = 6 ==> evalution <====", "\"if len(args)\", and # - I should use \"isdisjoint\". #", "b, c, d, e, f, g = 100): print(a, b,", "-1) # num args = 7 ==> evaluation <==== #", "f(1, 2, 3) # num args = 3 c2 =", "time to evaluate func. if minArgs is not None and", "don't want repeated keyword arguments. if unique and not kwArgs.keys().isdisjoint(newKwArgs):", "# Example 2. f = curr(func) # f is a", "4 5 6 -1\" print(\"\\n------\\n\") # Example 4. def printTree(func,", "the evaluation <==== # it prints \"1 2 3 4", "is a \"curried\" version of func c1 = f(1) c2", "**myKwArgs): def f(*args, **kwArgs): if args or kwArgs: # some", "enough to force the evaluation # it prints \"1 2", "arguments c1 = f(1, 2, 3) # num args =", "Allocates data to assign to the next 'f'. newArgs =", "return g(*newArgs, **newKwArgs) # returns a new 'f' else: #", "5 6 100\" c4 = c2(30)(f = 60)(e = 50)", "dict.copy(myKwArgs) # If unique is True, we don't want repeated", "# num args = 5 c3 = c2(5) # num", "c, d, e, f, g) # NOTE: '<====' means \"this", "minArgs = None): \"\"\" Generates a 'curried' version of a", "\"1 2 30 4 50 60 100\" print(\"\\n------\\n\") # Example", "args])) def stress(f, n): if n: stress(f(n), n - 1)", "newKwArgs = dict.copy(myKwArgs) # If unique is True, we don't", "keyword arguments. if unique and not kwArgs.keys().isdisjoint(newKwArgs): raise ValueError(\"Repeated kw", "is True, we don't want repeated keyword arguments. if unique", "== \"__main__\": # Simple Function. def func(a, b, c, d,", "<==== # changed our mind about it! # again, ()", "\"1 2 3 4 10 6 100\" print(\"\\n------\\n\") # Example", "specify more than 6 arguments, but # 6 are enough", "f = curr(func) # f is a \"curried\" version of", "a set operation # instead of repeated membership tests. #", "7 ==> evaluation <==== # we can specify more than", "None\" is better than \"minArgs = -1\", # - \"if", "while unique = True\") # Adds/updates keyword arguments. newKwArgs.update(kwArgs) #", "5) # now c = 3 c3() # () forces", "# now c = 30 c4() # () forces the", "that # - \"minArgs = None\" is better than \"minArgs", "= 10)() # ops... we repeated 'e' because we <====", "6 -1\" print(\"\\n------\\n\") # Example 4. def printTree(func, level =", "is still unbound c3 = c2(3)(f = 6)(e = 5)", "kw arg while unique = True\") # Adds/updates keyword arguments.", "arguments. newKwArgs.update(kwArgs) # Checks whether it's time to evaluate func.", "minArgs) if __name__ == \"__main__\": # Simple Function. def func(a,", "4) # Note that c is still unbound c3 =", "= None): if level is None: printTree(cur(func), 0) elif level", "= dict.copy(myKwArgs) # If unique is True, we don't want", "= 100): print(a, b, c, d, e, f, g) #", "func(*newArgs, **newKwArgs) # time to evaluate func else: return g(*newArgs,", "# Note that c is still unbound c3 = c2(3)(f", "unique = True, minArgs = None): \"\"\" Generates a 'curried'", "# Simple Function. def func(a, b, c, d, e, f,", "printTree(func(0), level + 1) printTree(func(1), level + 1) printTree(func) print(\"\\n------\\n\")", "c1 = f(1, 2)(3, 4) c2 = c1(e = 5)(f", "func c1 = f(1) c2 = c1(2, d = 4)", "<NAME> for pointing out that # - \"minArgs = None\"", "def g(*myArgs, **myKwArgs): def f(*args, **kwArgs): if args or kwArgs:", "# changed our mind about it! # again, () forces", "want repeated keyword arguments. if unique and not kwArgs.keys().isdisjoint(newKwArgs): raise", "cur(f, minArgs = None): return genCur(f, True, minArgs) def curr(f,", "# - \"if args\" is better than \"if len(args)\", and", "to force the evaluation # it prints \"1 2 3", "<==== # we can specify more than 6 arguments, but", "printTree(cur(func), 0) elif level == 6: func(g = '')() #", "forced return func(*myArgs, **myKwArgs) return f return g def cur(f,", "\"curried\" version of func # curr = cur with possibly", "# num args = 3 c2 = c1(4, f =", "4 5 6 100\" c4 = c2(5, g = -1)", "func(*myArgs, **myKwArgs) return f return g def cur(f, minArgs =", "# Adds/updates keyword arguments. newKwArgs.update(kwArgs) # Checks whether it's time", "more args! # Allocates data to assign to the next", "0) elif level == 6: func(g = '')() # or", "# now c = 3 c3() # () forces the", "just func('')() else: printTree(func(0), level + 1) printTree(func(1), level +", "evaluation <==== # it prints \"1 2 30 4 50", "3 4 5 6 -1\" print(\"\\n------\\n\") # Example 4. def", "than \"if len(args)\", and # - I should use \"isdisjoint\".", "\"1 2 3 4 5 6 100\" c4 = c2(30)(f", "with possibly repeated # keyword args c1 = f(1, 2)(3,", "4 5 6 100\" c4 = c2(30)(f = 60)(e =", "= c2(3)(f = 6)(e = 5) # now c =", "# () forces the evaluation <==== # it prints \"1", "= f(1, 2)(3, 4) c2 = c1(e = 5)(f =", "# Example 1. f = cur(func) # f is a", "repeated keyword arguments. if unique and not kwArgs.keys().isdisjoint(newKwArgs): raise ValueError(\"Repeated", "raise ValueError(\"Repeated kw arg while unique = True\") # Adds/updates", "prints to the screen\". # Example 1. f = cur(func)", "prints \"1 2 3 4 5 6 -1\" print(\"\\n------\\n\") #", "unique and not kwArgs.keys().isdisjoint(newKwArgs): raise ValueError(\"Repeated kw arg while unique", "a new 'f' else: # the evaluation was forced return", "again, () forces the evaluation # it prints \"1 2", "6 arguments c1 = f(1, 2, 3) # num args", "c = 3 c3() # () forces the evaluation <====", "**kwArgs): if args or kwArgs: # some more args! #", "the evaluation # it prints \"1 2 3 4 5", "None): return genCur(f, False, minArgs) if __name__ == \"__main__\": #", "6 100\" c4 = c2(30)(f = 60)(e = 50) #", "mind about it! # again, () forces the evaluation #", "newArgs = myArgs + args newKwArgs = dict.copy(myKwArgs) # If", "the evaluation after 6 arguments c1 = f(1, 2, 3)", "g = 100): print(a, b, c, d, e, f, g)", "- I should use \"isdisjoint\". # def genCur(func, unique =", "operation # instead of repeated membership tests. # - Thanks", "c2 = c1(2, d = 4) # Note that c", "forces the evaluation <==== # it prints \"1 2 3", "ValueError(\"Repeated kw arg while unique = True\") # Adds/updates keyword", "should use a set operation # instead of repeated membership", "= True\") # Adds/updates keyword arguments. newKwArgs.update(kwArgs) # Checks whether", "len(newKwArgs): return func(*newArgs, **newKwArgs) # time to evaluate func else:", "screen\". # Example 1. f = cur(func) # f is", "keyword args c1 = f(1, 2)(3, 4) c2 = c1(e", "num args = 6 ==> evalution <==== # it prints", "== 6: func(g = '')() # or just func('')() else:", "= 6)(e = 5) # now c = 3 c3()", "# forces the evaluation after 6 arguments c1 = f(1,", "# curr = cur with possibly repeated # keyword args", "we can specify more than 6 arguments, but # 6", "about it! # again, () forces the evaluation # it", "unique is True, we don't want repeated keyword arguments. if", "line prints to the screen\". # Example 1. f =", "Example 4. def printTree(func, level = None): if level is", "Adds/updates keyword arguments. newKwArgs.update(kwArgs) # Checks whether it's time to", "new 'f' else: # the evaluation was forced return func(*myArgs,", "elif level == 6: func(g = '')() # or just", "stress(f(n), n - 1) else: f() # enough is enough", "= 50) # now c = 30 c4() # ()", "5)(f = 6)(e = 10)() # ops... we repeated 'e'", "c4() # () forces the evaluation <==== # it prints", "forces the evaluation # it prints \"1 2 3 4", "forces the evaluation after 6 arguments c1 = f(1, 2,", "\"minArgs = None\" is better than \"minArgs = -1\", #", "arguments. if unique and not kwArgs.keys().isdisjoint(newKwArgs): raise ValueError(\"Repeated kw arg", "minArgs <= len(newArgs) + len(newKwArgs): return func(*newArgs, **newKwArgs) # time", "and not kwArgs.keys().isdisjoint(newKwArgs): raise ValueError(\"Repeated kw arg while unique =", "+ 1) printTree(func(1), level + 1) printTree(func) print(\"\\n------\\n\") def f2(*args):", "more than 6 arguments, but # 6 are enough to", "() forces the evaluation <==== # it prints \"1 2", "prints \"1 2 30 4 50 60 100\" print(\"\\n------\\n\") #", "cur with possibly repeated # keyword args c1 = f(1,", "4) c2 = c1(e = 5)(f = 6)(e = 10)()", "changed our mind about it! # again, () forces the", "the next 'f'. newArgs = myArgs + args newKwArgs =", "'e' because we <==== # changed our mind about it!", "it! # again, () forces the evaluation # it prints", "level + 1) printTree(func) print(\"\\n------\\n\") def f2(*args): print(\", \".join([\"%3d\"%(x) for", "c1 = f(1, 2, 3) # num args = 3", "a 'curried' version of a function. \"\"\" def g(*myArgs, **myKwArgs):", "= c2(30)(f = 60)(e = 50) # now c =", "\"this line prints to the screen\". # Example 1. f", "= -1) # num args = 7 ==> evaluation <====", "return genCur(f, True, minArgs) def curr(f, minArgs = None): return", "= cur(func) # f is a \"curried\" version of func", "n: stress(f(n), n - 1) else: f() # enough is", "e, f, g) # NOTE: '<====' means \"this line prints", "return g def cur(f, minArgs = None): return genCur(f, True,", "use a set operation # instead of repeated membership tests.", "evaluation # it prints \"1 2 3 4 5 6", "60 100\" print(\"\\n------\\n\") # Example 2. f = curr(func) #", "= 3 c3() # () forces the evaluation <==== #", "unbound c3 = c2(3)(f = 6)(e = 5) # now", "f = cur(func, 6) # forces the evaluation after 6", "to evaluate func else: return g(*newArgs, **newKwArgs) # returns a", "print(\"\\n------\\n\") def f2(*args): print(\", \".join([\"%3d\"%(x) for x in args])) def", "3 4 5 6 100\" c4 = c2(5, g =", "the evaluation # it prints \"1 2 3 4 10", "to the next 'f'. newArgs = myArgs + args newKwArgs", "can specify more than 6 arguments, but # 6 are", "6 ==> evalution <==== # it prints \"1 2 3", "printTree(func(1), level + 1) printTree(func) print(\"\\n------\\n\") def f2(*args): print(\", \".join([\"%3d\"%(x)", "membership tests. # - Thanks to <NAME> for pointing out", "now c = 30 c4() # () forces the evaluation", "100\" print(\"\\n------\\n\") # Example 2. f = curr(func) # f", "printTree(func, level = None): if level is None: printTree(cur(func), 0)", "def printTree(func, level = None): if level is None: printTree(cur(func),", "level + 1) printTree(func(1), level + 1) printTree(func) print(\"\\n------\\n\") def", "g(*newArgs, **newKwArgs) # returns a new 'f' else: # the", "minArgs = None): return genCur(f, False, minArgs) if __name__ ==", "__name__ == \"__main__\": # Simple Function. def func(a, b, c,", "# num args = 7 ==> evaluation <==== # we", "evaluate func. if minArgs is not None and minArgs <=", "a \"curried\" version of func c1 = f(1) c2 =", "6: func(g = '')() # or just func('')() else: printTree(func(0),", "n): if n: stress(f(n), n - 1) else: f() #", "b, c, d, e, f, g) # NOTE: '<====' means", "= cur(func, 6) # forces the evaluation after 6 arguments", "forces the evaluation <==== # it prints \"1 2 30", "version of a function. \"\"\" def g(*myArgs, **myKwArgs): def f(*args,", "to b49P23TIvg for suggesting that I should use a set", "# it prints \"1 2 30 4 50 60 100\"", "return f return g def cur(f, minArgs = None): return", "f2(*args): print(\", \".join([\"%3d\"%(x) for x in args])) def stress(f, n):", "'f'. newArgs = myArgs + args newKwArgs = dict.copy(myKwArgs) #", "<==== # it prints \"1 2 30 4 50 60", "than 6 arguments, but # 6 are enough to force", "assign to the next 'f'. newArgs = myArgs + args", "num args = 3 c2 = c1(4, f = 6)", "printTree(func) print(\"\\n------\\n\") def f2(*args): print(\", \".join([\"%3d\"%(x) for x in args]))", "6) # num args = 5 c3 = c2(5) #", "return func(*myArgs, **myKwArgs) return f return g def cur(f, minArgs", "g = -1) # num args = 7 ==> evaluation", "whether it's time to evaluate func. if minArgs is not", "arg while unique = True\") # Adds/updates keyword arguments. newKwArgs.update(kwArgs)", "# we can specify more than 6 arguments, but #", "= None): return genCur(f, False, minArgs) if __name__ == \"__main__\":", "c1(e = 5)(f = 6)(e = 10)() # ops... we", "- \"minArgs = None\" is better than \"minArgs = -1\",", "**newKwArgs) # time to evaluate func else: return g(*newArgs, **newKwArgs)", "4 50 60 100\" print(\"\\n------\\n\") # Example 2. f =", "than \"minArgs = -1\", # - \"if args\" is better", "# Checks whether it's time to evaluate func. if minArgs", "is better than \"if len(args)\", and # - I should", "5 6 100\" c4 = c2(5, g = -1) #", "g(*myArgs, **myKwArgs): def f(*args, **kwArgs): if args or kwArgs: #", "def curr(f, minArgs = None): return genCur(f, False, minArgs) if", "the screen\". # Example 1. f = cur(func) # f", "after 6 arguments c1 = f(1, 2, 3) # num", "set operation # instead of repeated membership tests. # -", "to <NAME> for pointing out that # - \"minArgs =", "if n: stress(f(n), n - 1) else: f() # enough", "curr(f, minArgs = None): return genCur(f, False, minArgs) if __name__", "2 3 4 5 6 -1\" print(\"\\n------\\n\") # Example 4.", "c1(2, d = 4) # Note that c is still", "- Thanks to b49P23TIvg for suggesting that I should use", "= 4) # Note that c is still unbound c3", "3) # num args = 3 c2 = c1(4, f", "= None\" is better than \"minArgs = -1\", # -", "# num args = 6 ==> evalution <==== # it", "= 3 c2 = c1(4, f = 6) # num", "if minArgs is not None and minArgs <= len(newArgs) +", "should use \"isdisjoint\". # def genCur(func, unique = True, minArgs", "num args = 5 c3 = c2(5) # num args", "prints \"1 2 3 4 10 6 100\" print(\"\\n------\\n\") #", "return func(*newArgs, **newKwArgs) # time to evaluate func else: return", "2 3 4 5 6 100\" c4 = c2(30)(f =", "= True, minArgs = None): \"\"\" Generates a 'curried' version", "10 6 100\" print(\"\\n------\\n\") # Example 3. f = cur(func,", "len(newArgs) + len(newKwArgs): return func(*newArgs, **newKwArgs) # time to evaluate", "curr(func) # f is a \"curried\" version of func #", "2 3 4 5 6 100\" c4 = c2(5, g", "+ 1) printTree(func) print(\"\\n------\\n\") def f2(*args): print(\", \".join([\"%3d\"%(x) for x", "# - I should use \"isdisjoint\". # def genCur(func, unique", "still unbound c3 = c2(3)(f = 6)(e = 5) #", "30 4 50 60 100\" print(\"\\n------\\n\") # Example 2. f", "or just func('')() else: printTree(func(0), level + 1) printTree(func(1), level", "f is a \"curried\" version of func # curr =", "# the evaluation was forced return func(*myArgs, **myKwArgs) return f", "was forced return func(*myArgs, **myKwArgs) return f return g def", "that c is still unbound c3 = c2(3)(f = 6)(e", "pointing out that # - \"minArgs = None\" is better", "# it prints \"1 2 3 4 10 6 100\"", "2 3 4 10 6 100\" print(\"\\n------\\n\") # Example 3.", "= myArgs + args newKwArgs = dict.copy(myKwArgs) # If unique", "else: return g(*newArgs, **newKwArgs) # returns a new 'f' else:", "def stress(f, n): if n: stress(f(n), n - 1) else:", "= 5 c3 = c2(5) # num args = 6", "it prints \"1 2 3 4 5 6 -1\" print(\"\\n------\\n\")", "and minArgs <= len(newArgs) + len(newKwArgs): return func(*newArgs, **newKwArgs) #", "Example 3. f = cur(func, 6) # forces the evaluation", "c, d, e, f, g = 100): print(a, b, c,", "= 60)(e = 50) # now c = 30 c4()", "# Example 4. def printTree(func, level = None): if level", "True, minArgs = None): \"\"\" Generates a 'curried' version of", "\"__main__\": # Simple Function. def func(a, b, c, d, e,", "= 6) # num args = 5 c3 = c2(5)", "1) printTree(func(1), level + 1) printTree(func) print(\"\\n------\\n\") def f2(*args): print(\",", "f is a \"curried\" version of func c1 = f(1)", "minArgs) def curr(f, minArgs = None): return genCur(f, False, minArgs)", "1. f = cur(func) # f is a \"curried\" version", "ops... we repeated 'e' because we <==== # changed our", "def f2(*args): print(\", \".join([\"%3d\"%(x) for x in args])) def stress(f,", "c2(5, g = -1) # num args = 7 ==>", "keyword arguments. newKwArgs.update(kwArgs) # Checks whether it's time to evaluate", "# Allocates data to assign to the next 'f'. newArgs", "it's time to evaluate func. if minArgs is not None", "6 are enough to force the evaluation # it prints", "func # curr = cur with possibly repeated # keyword", "- Thanks to <NAME> for pointing out that # -", "# f is a \"curried\" version of func c1 =", "# 6 are enough to force the evaluation # it", "to assign to the next 'f'. newArgs = myArgs +", "args newKwArgs = dict.copy(myKwArgs) # If unique is True, we", "+ len(newKwArgs): return func(*newArgs, **newKwArgs) # time to evaluate func", "# NOTE: '<====' means \"this line prints to the screen\".", "evaluation was forced return func(*myArgs, **myKwArgs) return f return g", "Checks whether it's time to evaluate func. if minArgs is", "None and minArgs <= len(newArgs) + len(newKwArgs): return func(*newArgs, **newKwArgs)", "# some more args! # Allocates data to assign to", "c = 30 c4() # () forces the evaluation <====", "= c1(4, f = 6) # num args = 5", "# Example 3. f = cur(func, 6) # forces the", "f = cur(func) # f is a \"curried\" version of", "= f(1) c2 = c1(2, d = 4) # Note", "True, we don't want repeated keyword arguments. if unique and", "\"curried\" version of func c1 = f(1) c2 = c1(2,", "it prints \"1 2 3 4 10 6 100\" print(\"\\n------\\n\")", "2. f = curr(func) # f is a \"curried\" version", "c1(4, f = 6) # num args = 5 c3", "\"\"\" Generates a 'curried' version of a function. \"\"\" def", "print(a, b, c, d, e, f, g) # NOTE: '<===='", "# time to evaluate func else: return g(*newArgs, **newKwArgs) #", "level == 6: func(g = '')() # or just func('')()", "None: printTree(cur(func), 0) elif level == 6: func(g = '')()", "print(\", \".join([\"%3d\"%(x) for x in args])) def stress(f, n): if", "the evaluation was forced return func(*myArgs, **myKwArgs) return f return", "6 100\" print(\"\\n------\\n\") # Example 3. f = cur(func, 6)", "kwArgs: # some more args! # Allocates data to assign", "6 100\" c4 = c2(5, g = -1) # num", "<==== # it prints \"1 2 3 4 5 6", "3. f = cur(func, 6) # forces the evaluation after", "\"if args\" is better than \"if len(args)\", and # -", "of func c1 = f(1) c2 = c1(2, d =", "I should use \"isdisjoint\". # def genCur(func, unique = True,", "d, e, f, g) # NOTE: '<====' means \"this line", "a function. \"\"\" def g(*myArgs, **myKwArgs): def f(*args, **kwArgs): if", "return genCur(f, False, minArgs) if __name__ == \"__main__\": # Simple", "\"minArgs = -1\", # - \"if args\" is better than", "out that # - \"minArgs = None\" is better than", "c3 = c2(3)(f = 6)(e = 5) # now c", "d = 4) # Note that c is still unbound", "= f(1, 2, 3) # num args = 3 c2", "evaluation after 6 arguments c1 = f(1, 2, 3) #", "4. def printTree(func, level = None): if level is None:", "60)(e = 50) # now c = 30 c4() #", "**myKwArgs) return f return g def cur(f, minArgs = None):", "a \"curried\" version of func # curr = cur with", "# keyword args c1 = f(1, 2)(3, 4) c2 =", "is better than \"minArgs = -1\", # - \"if args\"", "1) printTree(func) print(\"\\n------\\n\") def f2(*args): print(\", \".join([\"%3d\"%(x) for x in", "# instead of repeated membership tests. # - Thanks to", "# returns a new 'f' else: # the evaluation was", "==> evaluation <==== # we can specify more than 6", "= 6)(e = 10)() # ops... we repeated 'e' because", "I should use a set operation # instead of repeated", "c3 = c2(5) # num args = 6 ==> evalution", "\"\"\" def g(*myArgs, **myKwArgs): def f(*args, **kwArgs): if args or", "to evaluate func. if minArgs is not None and minArgs", "suggesting that I should use a set operation # instead", "we don't want repeated keyword arguments. if unique and not", "version of func # curr = cur with possibly repeated", "None): \"\"\" Generates a 'curried' version of a function. \"\"\"", "= 7 ==> evaluation <==== # we can specify more", "minArgs is not None and minArgs <= len(newArgs) + len(newKwArgs):", "if __name__ == \"__main__\": # Simple Function. def func(a, b,", "in args])) def stress(f, n): if n: stress(f(n), n -", "# or just func('')() else: printTree(func(0), level + 1) printTree(func(1),", "evaluation <==== # we can specify more than 6 arguments,", "evaluation # it prints \"1 2 3 4 10 6", "len(args)\", and # - I should use \"isdisjoint\". # def", "evaluate func else: return g(*newArgs, **newKwArgs) # returns a new", "= c2(5, g = -1) # num args = 7", "f(*args, **kwArgs): if args or kwArgs: # some more args!", "func('')() else: printTree(func(0), level + 1) printTree(func(1), level + 1)", "f(1, 2)(3, 4) c2 = c1(e = 5)(f = 6)(e", "4 10 6 100\" print(\"\\n------\\n\") # Example 3. f =", "-1\" print(\"\\n------\\n\") # Example 4. def printTree(func, level = None):", "if unique and not kwArgs.keys().isdisjoint(newKwArgs): raise ValueError(\"Repeated kw arg while", "use \"isdisjoint\". # def genCur(func, unique = True, minArgs =", "e, f, g = 100): print(a, b, c, d, e,", "for x in args])) def stress(f, n): if n: stress(f(n),", "<NAME>, 2012. # # - Thanks to b49P23TIvg for suggesting", "6) # forces the evaluation after 6 arguments c1 =", "not kwArgs.keys().isdisjoint(newKwArgs): raise ValueError(\"Repeated kw arg while unique = True\")", "= None): return genCur(f, True, minArgs) def curr(f, minArgs =", "time to evaluate func else: return g(*newArgs, **newKwArgs) # returns", "50 60 100\" print(\"\\n------\\n\") # Example 2. f = curr(func)", "c2(3)(f = 6)(e = 5) # now c = 3", "def func(a, b, c, d, e, f, g = 100):", "None): if level is None: printTree(cur(func), 0) elif level ==", "args = 7 ==> evaluation <==== # we can specify", "# - \"minArgs = None\" is better than \"minArgs =", "c4 = c2(5, g = -1) # num args =", "func. if minArgs is not None and minArgs <= len(newArgs)", "repeated 'e' because we <==== # changed our mind about", "# f is a \"curried\" version of func # curr", "genCur(func, unique = True, minArgs = None): \"\"\" Generates a", "100\" c4 = c2(30)(f = 60)(e = 50) # now", "for suggesting that I should use a set operation #", "6)(e = 5) # now c = 3 c3() #", "because we <==== # changed our mind about it! #", "evalution <==== # it prints \"1 2 3 4 5", "'')() # or just func('')() else: printTree(func(0), level + 1)", "# again, () forces the evaluation # it prints \"1", "def genCur(func, unique = True, minArgs = None): \"\"\" Generates", "f(1) c2 = c1(2, d = 4) # Note that", "10)() # ops... we repeated 'e' because we <==== #", "is not None and minArgs <= len(newArgs) + len(newKwArgs): return", "# ops... we repeated 'e' because we <==== # changed", "cur(func, 6) # forces the evaluation after 6 arguments c1", "# - Thanks to b49P23TIvg for suggesting that I should", "by <NAME>, 2012. # # - Thanks to b49P23TIvg for", "# Coded by <NAME>, 2012. # # - Thanks to", "def f(*args, **kwArgs): if args or kwArgs: # some more", "our mind about it! # again, () forces the evaluation", "5 c3 = c2(5) # num args = 6 ==>", "\"1 2 3 4 5 6 -1\" print(\"\\n------\\n\") # Example", "not None and minArgs <= len(newArgs) + len(newKwArgs): return func(*newArgs,", "Thanks to <NAME> for pointing out that # - \"minArgs", "30 c4() # () forces the evaluation <==== # it", "<= len(newArgs) + len(newKwArgs): return func(*newArgs, **newKwArgs) # time to", "= 6 ==> evalution <==== # it prints \"1 2", "c is still unbound c3 = c2(3)(f = 6)(e =", "args\" is better than \"if len(args)\", and # - I", "but # 6 are enough to force the evaluation #", "now c = 3 c3() # () forces the evaluation", "curr = cur with possibly repeated # keyword args c1", "newKwArgs.update(kwArgs) # Checks whether it's time to evaluate func. if", "'f' else: # the evaluation was forced return func(*myArgs, **myKwArgs)", "Note that c is still unbound c3 = c2(3)(f =", "of a function. \"\"\" def g(*myArgs, **myKwArgs): def f(*args, **kwArgs):", "True\") # Adds/updates keyword arguments. newKwArgs.update(kwArgs) # Checks whether it's", "args c1 = f(1, 2)(3, 4) c2 = c1(e =", "# def genCur(func, unique = True, minArgs = None): \"\"\"", "# - Thanks to <NAME> for pointing out that #", "- 1) else: f() # enough is enough stress(cur(f2), 100)", "print(\"\\n------\\n\") # Example 4. def printTree(func, level = None): if", "'<====' means \"this line prints to the screen\". # Example", "f = 6) # num args = 5 c3 =", "level is None: printTree(cur(func), 0) elif level == 6: func(g", "x in args])) def stress(f, n): if n: stress(f(n), n", "is a \"curried\" version of func # curr = cur", "of repeated membership tests. # - Thanks to <NAME> for", "data to assign to the next 'f'. newArgs = myArgs", "Generates a 'curried' version of a function. \"\"\" def g(*myArgs,", "repeated membership tests. # - Thanks to <NAME> for pointing", "b49P23TIvg for suggesting that I should use a set operation", "tests. # - Thanks to <NAME> for pointing out that", "function. \"\"\" def g(*myArgs, **myKwArgs): def f(*args, **kwArgs): if args", "NOTE: '<====' means \"this line prints to the screen\". #", "= 5)(f = 6)(e = 10)() # ops... we repeated", "c3() # () forces the evaluation <==== # it prints", "it prints \"1 2 30 4 50 60 100\" print(\"\\n------\\n\")", "f return g def cur(f, minArgs = None): return genCur(f,", "prints \"1 2 3 4 5 6 100\" c4 =", "100): print(a, b, c, d, e, f, g) # NOTE:", "the evaluation <==== # it prints \"1 2 30 4", "() forces the evaluation # it prints \"1 2 3", "= '')() # or just func('')() else: printTree(func(0), level +", "\"isdisjoint\". # def genCur(func, unique = True, minArgs = None):", "myArgs + args newKwArgs = dict.copy(myKwArgs) # If unique is", "3 4 10 6 100\" print(\"\\n------\\n\") # Example 3. f", "= 30 c4() # () forces the evaluation <==== #", "next 'f'. newArgs = myArgs + args newKwArgs = dict.copy(myKwArgs)", "= 5) # now c = 3 c3() # ()" ]
[ "small piece of server software written by <NAME> to allow", "people to talk to eachother from any computer as long", "out the project at: https://github.com/Ewpratten/chat :: Disclaimer While chatting, keep", "by <NAME> to allow people to talk to eachother from", "over a raw TCP socket and data is temporarily stored", "data is temporarily stored in plaintext while the server handles", "project at: https://github.com/Ewpratten/chat :: Disclaimer While chatting, keep in mind", "--------------- BEGIN SESSION --------------- You have connected to a chat", "internet connection. (Even an arduino!). Check out the project at:", "You have connected to a chat server. Welcome! :: About", "sent to and from this server over a raw TCP", "computer as long as it has an internet connection. (Even", "server software written by <NAME> to allow people to talk", "to and from this server over a raw TCP socket", "server over a raw TCP socket and data is temporarily", "does not follow it. All data is sent to and", "at: https://github.com/Ewpratten/chat :: Disclaimer While chatting, keep in mind that,", "or regulation about privacy, this server does not follow it.", "and data is temporarily stored in plaintext while the server", "Welcome! :: About Chat is a small piece of server", "of server software written by <NAME> to allow people to", "about privacy, this server does not follow it. All data", "TCP socket and data is temporarily stored in plaintext while", ":: About Chat is a small piece of server software", "a rule or regulation about privacy, this server does not", "there is a rule or regulation about privacy, this server", "server does not follow it. All data is sent to", "mind that, if there is a rule or regulation about", "socket and data is temporarily stored in plaintext while the", "have connected to a chat server. Welcome! :: About Chat", "any computer as long as it has an internet connection.", "the project at: https://github.com/Ewpratten/chat :: Disclaimer While chatting, keep in", "broadcasting Now that's out of the way so, happy chatting!", "piece of server software written by <NAME> to allow people", "as it has an internet connection. (Even an arduino!). Check", "as long as it has an internet connection. (Even an", "data is sent to and from this server over a", "= \"\"\" --------------- BEGIN SESSION --------------- You have connected to", "it. All data is sent to and from this server", "follow it. All data is sent to and from this", "to eachother from any computer as long as it has", "greeting = \"\"\" --------------- BEGIN SESSION --------------- You have connected", "<NAME> to allow people to talk to eachother from any", "a small piece of server software written by <NAME> to", "While chatting, keep in mind that, if there is a", "--------------- You have connected to a chat server. Welcome! ::", "is sent to and from this server over a raw", "rule or regulation about privacy, this server does not follow", ":: Disclaimer While chatting, keep in mind that, if there", "an arduino!). Check out the project at: https://github.com/Ewpratten/chat :: Disclaimer", "regulation about privacy, this server does not follow it. All", "the server handles message broadcasting Now that's out of the", "software written by <NAME> to allow people to talk to", "is temporarily stored in plaintext while the server handles message", "in plaintext while the server handles message broadcasting Now that's", "message broadcasting Now that's out of the way so, happy", "plaintext while the server handles message broadcasting Now that's out", "long as it has an internet connection. (Even an arduino!).", "that, if there is a rule or regulation about privacy,", "to talk to eachother from any computer as long as", "to a chat server. Welcome! :: About Chat is a", "it has an internet connection. (Even an arduino!). Check out", "All data is sent to and from this server over", "\"\"\" --------------- BEGIN SESSION --------------- You have connected to a", "server handles message broadcasting Now that's out of the way", "Check out the project at: https://github.com/Ewpratten/chat :: Disclaimer While chatting,", "if there is a rule or regulation about privacy, this", "Chat is a small piece of server software written by", "a raw TCP socket and data is temporarily stored in", "not follow it. All data is sent to and from", "SESSION --------------- You have connected to a chat server. Welcome!", "arduino!). Check out the project at: https://github.com/Ewpratten/chat :: Disclaimer While", "raw TCP socket and data is temporarily stored in plaintext", "keep in mind that, if there is a rule or", "from any computer as long as it has an internet", "Disclaimer While chatting, keep in mind that, if there is", "this server does not follow it. All data is sent", "while the server handles message broadcasting Now that's out of", "that's out of the way so, happy chatting! --------------------------------------------- \"\"\"", "server. Welcome! :: About Chat is a small piece of", "connected to a chat server. Welcome! :: About Chat is", "stored in plaintext while the server handles message broadcasting Now", "eachother from any computer as long as it has an", "(Even an arduino!). Check out the project at: https://github.com/Ewpratten/chat ::", "Now that's out of the way so, happy chatting! ---------------------------------------------", "this server over a raw TCP socket and data is", "https://github.com/Ewpratten/chat :: Disclaimer While chatting, keep in mind that, if", "an internet connection. (Even an arduino!). Check out the project", "written by <NAME> to allow people to talk to eachother", "from this server over a raw TCP socket and data", "About Chat is a small piece of server software written", "in mind that, if there is a rule or regulation", "has an internet connection. (Even an arduino!). Check out the", "talk to eachother from any computer as long as it", "temporarily stored in plaintext while the server handles message broadcasting", "is a rule or regulation about privacy, this server does", "chatting, keep in mind that, if there is a rule", "BEGIN SESSION --------------- You have connected to a chat server.", "is a small piece of server software written by <NAME>", "connection. (Even an arduino!). Check out the project at: https://github.com/Ewpratten/chat", "handles message broadcasting Now that's out of the way so,", "privacy, this server does not follow it. All data is", "chat server. Welcome! :: About Chat is a small piece", "a chat server. Welcome! :: About Chat is a small", "to allow people to talk to eachother from any computer", "and from this server over a raw TCP socket and", "allow people to talk to eachother from any computer as" ]
[ "\"gj\": \"허\"} def decode_label(out): # out : (1, 32, 42)", "\"저\", \"wh\": \"조\", \"wn\": \"주\", \"ak\": \"마\", \"aj\": \"머\", \"ah\":", "outstr = '' for i in out_best: if i <", "\"부산 \", \"I\": \"울산 \", \"J\": \"대구 \", \"K\": \"경북", "if i < len(letters): outstr += letters[i] return outstr def", "\"sh\": \"노\", \"sn\": \"누\", \"fk\": \"라\", \"fj\": \"러\", \"fh\": \"로\",", "\", \"J\": \"대구 \", \"K\": \"경북 \", \"L\": \"경남 \",", "cv2 import itertools, os, time import numpy as np from", "argparse from keras import backend as K K.set_learning_phase(0) Region =", "- start) print(\"Time : \",total_time / total) print(\"ACC : \",", "=args.test_img test_imgs = os.listdir(args.test_img) total = 0 acc = 0", "30), (0,0,0), -1) # cv2.putText(img, pred_texts, (5, 20), cv2.FONT_HERSHEY_SIMPLEX, 0.8,", "for i in out_best: if i < len(letters): outstr +=", "def label_to_hangul(label): # eng -> hangul region = label[0] two_num", "acc += 1 else: predOk = \"False\" total += 1", "for test_img in test_imgs: img = cv2.imread(test_dir + test_img, cv2.IMREAD_GRAYSCALE)", "Model import get_Model from parameter import letters import argparse from", "predOk )) # cv2.rectangle(img, (0,0), (150, 30), (0,0,0), -1) #", "#cv2.imshow(\"q\", img) #if cv2.waitKey(0) == 27: # break #cv2.destroyAllWindows() end", "print(\"Time : \",total_time / total) print(\"ACC : \", acc /", "# break #cv2.destroyAllWindows() end = time.time() total_time = (end -", "\"qn\": \"부\", \"ek\": \"다\", \"ej\": \"더\", \"eh\": \"도\", \"en\": \"두\",", "file directory\", type=str, default=\"models/weights.best.hdf5\") parser.add_argument(\"-t\", \"--test_img\", help=\"Test image directory\", type=str,", "file!\") test_dir =args.test_img test_imgs = os.listdir(args.test_img) total = 0 acc", "itertools.groupby(out_best)] # remove overlap value outstr = '' for i", "+= letters[i] return outstr def label_to_hangul(label): # eng -> hangul", "test_imgs = os.listdir(args.test_img) total = 0 acc = 0 letter_total", "for i in range(min(len(pred_texts), len(test_img[0:-4]))): if pred_texts[i] == test_img[i]: letter_acc", "import itertools, os, time import numpy as np from Model", "except: pass try: hangul = Hangul[hangul] except: pass return region", "four_num parser = argparse.ArgumentParser() parser.add_argument(\"-w\", \"--weight\", help=\"weight file directory\", type=str,", "decode_label(net_out_value) for i in range(min(len(pred_texts), len(test_img[0:-4]))): if pred_texts[i] == test_img[i]:", "-> len = 32 out_best = [k for k, g", "as K K.set_learning_phase(0) Region = {\"A\": \"서울 \", \"B\": \"경기", "\"버\", \"qh\": \"보\", \"qn\": \"부\", \"ek\": \"다\", \"ej\": \"더\", \"eh\":", "\", \"I\": \"울산 \", \"J\": \"대구 \", \"K\": \"경북 \",", "backend as K K.set_learning_phase(0) Region = {\"A\": \"서울 \", \"B\":", "(1, 32, 42) out_best = list(np.argmax(out[0, 2:], axis=1)) # get", "\"sj\": \"너\", \"sh\": \"노\", \"sn\": \"누\", \"fk\": \"라\", \"fj\": \"러\",", ": (1, 32, 42) out_best = list(np.argmax(out[0, 2:], axis=1)) #", "out : (1, 32, 42) out_best = list(np.argmax(out[0, 2:], axis=1))", "\"True\" if pred_texts == test_img[0:-4]: acc += 1 else: predOk", "%s / True: %s / net_out_value: %s / ' %", "axis=-1) img_pred = np.expand_dims(img_pred, axis=0) net_out_value = model.predict(img_pred) pred_texts =", "\"울산 \", \"J\": \"대구 \", \"K\": \"경북 \", \"L\": \"경남", "\"K\": \"경북 \", \"L\": \"경남 \", \"M\": \"전남 \", \"N\":", "\"ek\": \"다\", \"ej\": \"더\", \"eh\": \"도\", \"en\": \"두\", \"rk\": \"가\",", "% (pred_texts, test_img[0:-4], predOk )) # cv2.rectangle(img, (0,0), (150, 30),", "try: region = Region[region] if region != 'Z' else ''", "\"사\", \"tj\": \"서\", \"th\": \"소\", \"tn\": \"수\", \"gj\": \"허\"} def", "= [k for k, g in itertools.groupby(out_best)] # remove overlap", "print('Predicted: %s / True: %s / net_out_value: %s / '", "import cv2 import itertools, os, time import numpy as np", "+ test_img, cv2.IMREAD_GRAYSCALE) img_pred = img.astype(np.float32) img_pred = cv2.resize(img_pred, (128,", "/ ' % (label_to_hangul(pred_texts), label_to_hangul(test_img[0:-4]))) print('Predicted: %s / True: %s", "in out_best: if i < len(letters): outstr += letters[i] return", "test_img[0:-4]: acc += 1 else: predOk = \"False\" total +=", "\"도\", \"en\": \"두\", \"rk\": \"가\", \"rj\": \"거\", \"rh\": \"고\", \"rn\":", "K.set_learning_phase(0) Region = {\"A\": \"서울 \", \"B\": \"경기 \", \"C\":", "True: %s / net_out_value: %s / ' % (label_to_hangul(pred_texts), label_to_hangul(test_img[0:-4])))", "\"ak\": \"마\", \"aj\": \"머\", \"ah\": \"모\", \"an\": \"무\", \"sk\": \"나\",", "\", \"B\": \"경기 \", \"C\": \"인천 \", \"D\": \"강원 \",", "\"N\": \"광주 \", \"O\": \"전북 \", \"P\": \"제주 \"} Hangul", "\", \"L\": \"경남 \", \"M\": \"전남 \", \"N\": \"광주 \",", "np.expand_dims(img_pred, axis=-1) img_pred = np.expand_dims(img_pred, axis=0) net_out_value = model.predict(img_pred) pred_texts", "# get max index -> len = 32 out_best =", "img) #if cv2.waitKey(0) == 27: # break #cv2.destroyAllWindows() end =", "= img.astype(np.float32) img_pred = cv2.resize(img_pred, (128, 64)) img_pred = (img_pred", "(pred_texts, test_img[0:-4], predOk )) # cv2.rectangle(img, (0,0), (150, 30), (0,0,0),", "= \"False\" total += 1 # print('Predicted: %s / True:", "\"tk\": \"사\", \"tj\": \"서\", \"th\": \"소\", \"tn\": \"수\", \"gj\": \"허\"}", "total_time = (end - start) print(\"Time : \",total_time / total)", "/ True: %s / predOk: %s ' % (pred_texts, test_img[0:-4],", "/ total) print(\"ACC : \", acc / total) print(\"letter ACC", "\"충북 \", \"H\": \"부산 \", \"I\": \"울산 \", \"J\": \"대구", "in range(min(len(pred_texts), len(test_img[0:-4]))): if pred_texts[i] == test_img[i]: letter_acc += 1", "pred_texts[i] == test_img[i]: letter_acc += 1 letter_total += max(len(pred_texts), len(test_img[0:-4]))", "Hangul = {\"dk\": \"아\", \"dj\": \"어\", \"dh\": \"오\", \"dn\": \"우\",", "\"자\", \"wj\": \"저\", \"wh\": \"조\", \"wn\": \"주\", \"ak\": \"마\", \"aj\":", "remove overlap value outstr = '' for i in out_best:", "Region[region] if region != 'Z' else '' except: pass try:", "np from Model import get_Model from parameter import letters import", "\"허\"} def decode_label(out): # out : (1, 32, 42) out_best", "two_num = label[1:3] hangul = label[3:5] four_num = label[5:] try:", "model = get_Model(training=False) try: model.load_weights(args.weight) print(\"...Previous weight data...\") except: raise", "# out : (1, 32, 42) out_best = list(np.argmax(out[0, 2:],", "Exception(\"No weight file!\") test_dir =args.test_img test_imgs = os.listdir(args.test_img) total =", "0 letter_total = 0 letter_acc = 0 start = time.time()", "\"dj\": \"어\", \"dh\": \"오\", \"dn\": \"우\", \"qk\": \"바\", \"qj\": \"버\",", "label[1:3] hangul = label[3:5] four_num = label[5:] try: region =", "Hangul[hangul] except: pass return region + two_num + hangul +", "\"경북 \", \"L\": \"경남 \", \"M\": \"전남 \", \"N\": \"광주", "max(len(pred_texts), len(test_img[0:-4])) predOk = \"True\" if pred_texts == test_img[0:-4]: acc", "1.0 img_pred = img_pred.T img_pred = np.expand_dims(img_pred, axis=-1) img_pred =", "= \"True\" if pred_texts == test_img[0:-4]: acc += 1 else:", "letter_total = 0 letter_acc = 0 start = time.time() for", "\"주\", \"ak\": \"마\", \"aj\": \"머\", \"ah\": \"모\", \"an\": \"무\", \"sk\":", "\"sk\": \"나\", \"sj\": \"너\", \"sh\": \"노\", \"sn\": \"누\", \"fk\": \"라\",", "\", \"M\": \"전남 \", \"N\": \"광주 \", \"O\": \"전북 \",", "%s / ' % (label_to_hangul(pred_texts), label_to_hangul(test_img[0:-4]))) print('Predicted: %s / True:", "= time.time() for test_img in test_imgs: img = cv2.imread(test_dir +", "letter_acc = 0 start = time.time() for test_img in test_imgs:", "if pred_texts == test_img[0:-4]: acc += 1 else: predOk =", "= list(np.argmax(out[0, 2:], axis=1)) # get max index -> len", "\"경기 \", \"C\": \"인천 \", \"D\": \"강원 \", \"E\": \"충남", "< len(letters): outstr += letters[i] return outstr def label_to_hangul(label): #", "default=\"models/weights.best.hdf5\") parser.add_argument(\"-t\", \"--test_img\", help=\"Test image directory\", type=str, default=\"./DB/test/\") args =", "/ predOk: %s ' % (pred_texts, test_img[0:-4], predOk )) #", "= model.predict(img_pred) pred_texts = decode_label(net_out_value) for i in range(min(len(pred_texts), len(test_img[0:-4]))):", "for k, g in itertools.groupby(out_best)] # remove overlap value outstr", "\"L\": \"경남 \", \"M\": \"전남 \", \"N\": \"광주 \", \"O\":", "\"tn\": \"수\", \"gj\": \"허\"} def decode_label(out): # out : (1,", "\"마\", \"aj\": \"머\", \"ah\": \"모\", \"an\": \"무\", \"sk\": \"나\", \"sj\":", "pass try: hangul = Hangul[hangul] except: pass return region +", "\"제주 \"} Hangul = {\"dk\": \"아\", \"dj\": \"어\", \"dh\": \"오\",", "np.expand_dims(img_pred, axis=0) net_out_value = model.predict(img_pred) pred_texts = decode_label(net_out_value) for i", "\"th\": \"소\", \"tn\": \"수\", \"gj\": \"허\"} def decode_label(out): # out", "parameter import letters import argparse from keras import backend as", "{\"A\": \"서울 \", \"B\": \"경기 \", \"C\": \"인천 \", \"D\":", "hangul region = label[0] two_num = label[1:3] hangul = label[3:5]", "1 # print('Predicted: %s / True: %s / net_out_value: %s", "= 32 out_best = [k for k, g in itertools.groupby(out_best)]", "\"tj\": \"서\", \"th\": \"소\", \"tn\": \"수\", \"gj\": \"허\"} def decode_label(out):", "CRNN model model = get_Model(training=False) try: model.load_weights(args.weight) print(\"...Previous weight data...\")", "total = 0 acc = 0 letter_total = 0 letter_acc", "time.time() for test_img in test_imgs: img = cv2.imread(test_dir + test_img,", "axis=1)) # get max index -> len = 32 out_best", "data...\") except: raise Exception(\"No weight file!\") test_dir =args.test_img test_imgs =", "\"ej\": \"더\", \"eh\": \"도\", \"en\": \"두\", \"rk\": \"가\", \"rj\": \"거\",", "value outstr = '' for i in out_best: if i", "\"G\": \"충북 \", \"H\": \"부산 \", \"I\": \"울산 \", \"J\":", "# eng -> hangul region = label[0] two_num = label[1:3]", "pred_texts, (5, 20), cv2.FONT_HERSHEY_SIMPLEX, 0.8, (255,255,255),2) #cv2.imshow(\"q\", img) #if cv2.waitKey(0)", "import letters import argparse from keras import backend as K", "# cv2.rectangle(img, (0,0), (150, 30), (0,0,0), -1) # cv2.putText(img, pred_texts,", "img_pred = img_pred.T img_pred = np.expand_dims(img_pred, axis=-1) img_pred = np.expand_dims(img_pred,", "\"fj\": \"러\", \"fh\": \"로\", \"fn\": \"루\", \"tk\": \"사\", \"tj\": \"서\",", "= time.time() total_time = (end - start) print(\"Time : \",total_time", "import argparse from keras import backend as K K.set_learning_phase(0) Region", "\"fn\": \"루\", \"tk\": \"사\", \"tj\": \"서\", \"th\": \"소\", \"tn\": \"수\",", "\", \"P\": \"제주 \"} Hangul = {\"dk\": \"아\", \"dj\": \"어\",", "\"서\", \"th\": \"소\", \"tn\": \"수\", \"gj\": \"허\"} def decode_label(out): #", "default=\"./DB/test/\") args = parser.parse_args() # Get CRNN model model =", "(5, 20), cv2.FONT_HERSHEY_SIMPLEX, 0.8, (255,255,255),2) #cv2.imshow(\"q\", img) #if cv2.waitKey(0) ==", "break #cv2.destroyAllWindows() end = time.time() total_time = (end - start)", "\"나\", \"sj\": \"너\", \"sh\": \"노\", \"sn\": \"누\", \"fk\": \"라\", \"fj\":", "parser.add_argument(\"-w\", \"--weight\", help=\"weight file directory\", type=str, default=\"models/weights.best.hdf5\") parser.add_argument(\"-t\", \"--test_img\", help=\"Test", "cv2.resize(img_pred, (128, 64)) img_pred = (img_pred / 255.0) * 2.0", "\"qj\": \"버\", \"qh\": \"보\", \"qn\": \"부\", \"ek\": \"다\", \"ej\": \"더\",", "type=str, default=\"models/weights.best.hdf5\") parser.add_argument(\"-t\", \"--test_img\", help=\"Test image directory\", type=str, default=\"./DB/test/\") args", "/ net_out_value: %s / ' % (label_to_hangul(pred_texts), label_to_hangul(test_img[0:-4]))) print('Predicted: %s", "net_out_value: %s / ' % (label_to_hangul(pred_texts), label_to_hangul(test_img[0:-4]))) print('Predicted: %s /", "region = label[0] two_num = label[1:3] hangul = label[3:5] four_num", "img_pred = cv2.resize(img_pred, (128, 64)) img_pred = (img_pred / 255.0)", "\"어\", \"dh\": \"오\", \"dn\": \"우\", \"qk\": \"바\", \"qj\": \"버\", \"qh\":", "k, g in itertools.groupby(out_best)] # remove overlap value outstr =", "out_best: if i < len(letters): outstr += letters[i] return outstr", "\"소\", \"tn\": \"수\", \"gj\": \"허\"} def decode_label(out): # out :", "'' for i in out_best: if i < len(letters): outstr", "img_pred = np.expand_dims(img_pred, axis=-1) img_pred = np.expand_dims(img_pred, axis=0) net_out_value =", "eng -> hangul region = label[0] two_num = label[1:3] hangul", "i in out_best: if i < len(letters): outstr += letters[i]", "# Get CRNN model model = get_Model(training=False) try: model.load_weights(args.weight) print(\"...Previous", "argparse.ArgumentParser() parser.add_argument(\"-w\", \"--weight\", help=\"weight file directory\", type=str, default=\"models/weights.best.hdf5\") parser.add_argument(\"-t\", \"--test_img\",", "region != 'Z' else '' except: pass try: hangul =", "#if cv2.waitKey(0) == 27: # break #cv2.destroyAllWindows() end = time.time()", "2.0 - 1.0 img_pred = img_pred.T img_pred = np.expand_dims(img_pred, axis=-1)", "\"무\", \"sk\": \"나\", \"sj\": \"너\", \"sh\": \"노\", \"sn\": \"누\", \"fk\":", "test_imgs: img = cv2.imread(test_dir + test_img, cv2.IMREAD_GRAYSCALE) img_pred = img.astype(np.float32)", "\"너\", \"sh\": \"노\", \"sn\": \"누\", \"fk\": \"라\", \"fj\": \"러\", \"fh\":", "\"dn\": \"우\", \"qk\": \"바\", \"qj\": \"버\", \"qh\": \"보\", \"qn\": \"부\",", "weight data...\") except: raise Exception(\"No weight file!\") test_dir =args.test_img test_imgs", "import get_Model from parameter import letters import argparse from keras", "help=\"Test image directory\", type=str, default=\"./DB/test/\") args = parser.parse_args() # Get", "= cv2.imread(test_dir + test_img, cv2.IMREAD_GRAYSCALE) img_pred = img.astype(np.float32) img_pred =", "= label[3:5] four_num = label[5:] try: region = Region[region] if", "= parser.parse_args() # Get CRNN model model = get_Model(training=False) try:", "\"fk\": \"라\", \"fj\": \"러\", \"fh\": \"로\", \"fn\": \"루\", \"tk\": \"사\",", "img = cv2.imread(test_dir + test_img, cv2.IMREAD_GRAYSCALE) img_pred = img.astype(np.float32) img_pred", "\"우\", \"qk\": \"바\", \"qj\": \"버\", \"qh\": \"보\", \"qn\": \"부\", \"ek\":", "\"an\": \"무\", \"sk\": \"나\", \"sj\": \"너\", \"sh\": \"노\", \"sn\": \"누\",", "= np.expand_dims(img_pred, axis=0) net_out_value = model.predict(img_pred) pred_texts = decode_label(net_out_value) for", "directory\", type=str, default=\"./DB/test/\") args = parser.parse_args() # Get CRNN model", "'' except: pass try: hangul = Hangul[hangul] except: pass return", "start = time.time() for test_img in test_imgs: img = cv2.imread(test_dir", "(img_pred / 255.0) * 2.0 - 1.0 img_pred = img_pred.T", "= 0 letter_acc = 0 start = time.time() for test_img", "= {\"A\": \"서울 \", \"B\": \"경기 \", \"C\": \"인천 \",", "= 0 acc = 0 letter_total = 0 letter_acc =", "as np from Model import get_Model from parameter import letters", "total) print(\"ACC : \", acc / total) print(\"letter ACC :", "raise Exception(\"No weight file!\") test_dir =args.test_img test_imgs = os.listdir(args.test_img) total", "<reponame>khayam-hafezi/CRNN-keras-persian import cv2 import itertools, os, time import numpy as", "cv2.putText(img, pred_texts, (5, 20), cv2.FONT_HERSHEY_SIMPLEX, 0.8, (255,255,255),2) #cv2.imshow(\"q\", img) #if", "def decode_label(out): # out : (1, 32, 42) out_best =", "\"가\", \"rj\": \"거\", \"rh\": \"고\", \"rn\": \"구\", \"wk\": \"자\", \"wj\":", "+= max(len(pred_texts), len(test_img[0:-4])) predOk = \"True\" if pred_texts == test_img[0:-4]:", "\"qk\": \"바\", \"qj\": \"버\", \"qh\": \"보\", \"qn\": \"부\", \"ek\": \"다\",", "\"H\": \"부산 \", \"I\": \"울산 \", \"J\": \"대구 \", \"K\":", "from Model import get_Model from parameter import letters import argparse", "= label[1:3] hangul = label[3:5] four_num = label[5:] try: region", "\"F\": \"대전 \", \"G\": \"충북 \", \"H\": \"부산 \", \"I\":", ": \",total_time / total) print(\"ACC : \", acc / total)", "cv2.rectangle(img, (0,0), (150, 30), (0,0,0), -1) # cv2.putText(img, pred_texts, (5,", "\"로\", \"fn\": \"루\", \"tk\": \"사\", \"tj\": \"서\", \"th\": \"소\", \"tn\":", "20), cv2.FONT_HERSHEY_SIMPLEX, 0.8, (255,255,255),2) #cv2.imshow(\"q\", img) #if cv2.waitKey(0) == 27:", "0 acc = 0 letter_total = 0 letter_acc = 0", "try: model.load_weights(args.weight) print(\"...Previous weight data...\") except: raise Exception(\"No weight file!\")", "(label_to_hangul(pred_texts), label_to_hangul(test_img[0:-4]))) print('Predicted: %s / True: %s / predOk: %s", "\"wj\": \"저\", \"wh\": \"조\", \"wn\": \"주\", \"ak\": \"마\", \"aj\": \"머\",", "\"서울 \", \"B\": \"경기 \", \"C\": \"인천 \", \"D\": \"강원", "+ hangul + four_num parser = argparse.ArgumentParser() parser.add_argument(\"-w\", \"--weight\", help=\"weight", "cv2.IMREAD_GRAYSCALE) img_pred = img.astype(np.float32) img_pred = cv2.resize(img_pred, (128, 64)) img_pred", "\"고\", \"rn\": \"구\", \"wk\": \"자\", \"wj\": \"저\", \"wh\": \"조\", \"wn\":", "/ 255.0) * 2.0 - 1.0 img_pred = img_pred.T img_pred", "get_Model(training=False) try: model.load_weights(args.weight) print(\"...Previous weight data...\") except: raise Exception(\"No weight", "in itertools.groupby(out_best)] # remove overlap value outstr = '' for", "\"조\", \"wn\": \"주\", \"ak\": \"마\", \"aj\": \"머\", \"ah\": \"모\", \"an\":", "\"경남 \", \"M\": \"전남 \", \"N\": \"광주 \", \"O\": \"전북", "0 start = time.time() for test_img in test_imgs: img =", "\"대구 \", \"K\": \"경북 \", \"L\": \"경남 \", \"M\": \"전남", "= 0 start = time.time() for test_img in test_imgs: img", "os, time import numpy as np from Model import get_Model", "* 2.0 - 1.0 img_pred = img_pred.T img_pred = np.expand_dims(img_pred,", "\"E\": \"충남 \", \"F\": \"대전 \", \"G\": \"충북 \", \"H\":", "%s / True: %s / predOk: %s ' % (pred_texts,", "(255,255,255),2) #cv2.imshow(\"q\", img) #if cv2.waitKey(0) == 27: # break #cv2.destroyAllWindows()", "\"전북 \", \"P\": \"제주 \"} Hangul = {\"dk\": \"아\", \"dj\":", "get max index -> len = 32 out_best = [k", "= '' for i in out_best: if i < len(letters):", "letters[i] return outstr def label_to_hangul(label): # eng -> hangul region", "{\"dk\": \"아\", \"dj\": \"어\", \"dh\": \"오\", \"dn\": \"우\", \"qk\": \"바\",", "test_dir =args.test_img test_imgs = os.listdir(args.test_img) total = 0 acc =", "\"dh\": \"오\", \"dn\": \"우\", \"qk\": \"바\", \"qj\": \"버\", \"qh\": \"보\",", "\", \"D\": \"강원 \", \"E\": \"충남 \", \"F\": \"대전 \",", "args = parser.parse_args() # Get CRNN model model = get_Model(training=False)", "== 27: # break #cv2.destroyAllWindows() end = time.time() total_time =", "cv2.waitKey(0) == 27: # break #cv2.destroyAllWindows() end = time.time() total_time", "2:], axis=1)) # get max index -> len = 32", "\"qh\": \"보\", \"qn\": \"부\", \"ek\": \"다\", \"ej\": \"더\", \"eh\": \"도\",", "\", \"N\": \"광주 \", \"O\": \"전북 \", \"P\": \"제주 \"}", "\"I\": \"울산 \", \"J\": \"대구 \", \"K\": \"경북 \", \"L\":", "else '' except: pass try: hangul = Hangul[hangul] except: pass", "test_img[i]: letter_acc += 1 letter_total += max(len(pred_texts), len(test_img[0:-4])) predOk =", "total += 1 # print('Predicted: %s / True: %s /", "= Hangul[hangul] except: pass return region + two_num + hangul", "' % (pred_texts, test_img[0:-4], predOk )) # cv2.rectangle(img, (0,0), (150,", "return outstr def label_to_hangul(label): # eng -> hangul region =", "-> hangul region = label[0] two_num = label[1:3] hangul =", "= cv2.resize(img_pred, (128, 64)) img_pred = (img_pred / 255.0) *", "i < len(letters): outstr += letters[i] return outstr def label_to_hangul(label):", "\"모\", \"an\": \"무\", \"sk\": \"나\", \"sj\": \"너\", \"sh\": \"노\", \"sn\":", "\"O\": \"전북 \", \"P\": \"제주 \"} Hangul = {\"dk\": \"아\",", "= img_pred.T img_pred = np.expand_dims(img_pred, axis=-1) img_pred = np.expand_dims(img_pred, axis=0)", "= decode_label(net_out_value) for i in range(min(len(pred_texts), len(test_img[0:-4]))): if pred_texts[i] ==", "print(\"ACC : \", acc / total) print(\"letter ACC : \",", "\"wn\": \"주\", \"ak\": \"마\", \"aj\": \"머\", \"ah\": \"모\", \"an\": \"무\",", "len = 32 out_best = [k for k, g in", "img_pred.T img_pred = np.expand_dims(img_pred, axis=-1) img_pred = np.expand_dims(img_pred, axis=0) net_out_value", "letter_acc += 1 letter_total += max(len(pred_texts), len(test_img[0:-4])) predOk = \"True\"", "end = time.time() total_time = (end - start) print(\"Time :", "\"누\", \"fk\": \"라\", \"fj\": \"러\", \"fh\": \"로\", \"fn\": \"루\", \"tk\":", "cv2.FONT_HERSHEY_SIMPLEX, 0.8, (255,255,255),2) #cv2.imshow(\"q\", img) #if cv2.waitKey(0) == 27: #", "= (end - start) print(\"Time : \",total_time / total) print(\"ACC", "\"fh\": \"로\", \"fn\": \"루\", \"tk\": \"사\", \"tj\": \"서\", \"th\": \"소\",", "(0,0), (150, 30), (0,0,0), -1) # cv2.putText(img, pred_texts, (5, 20),", "'Z' else '' except: pass try: hangul = Hangul[hangul] except:", "\"인천 \", \"D\": \"강원 \", \"E\": \"충남 \", \"F\": \"대전", "\"wh\": \"조\", \"wn\": \"주\", \"ak\": \"마\", \"aj\": \"머\", \"ah\": \"모\",", "\"러\", \"fh\": \"로\", \"fn\": \"루\", \"tk\": \"사\", \"tj\": \"서\", \"th\":", "i in range(min(len(pred_texts), len(test_img[0:-4]))): if pred_texts[i] == test_img[i]: letter_acc +=", "two_num + hangul + four_num parser = argparse.ArgumentParser() parser.add_argument(\"-w\", \"--weight\",", "\"M\": \"전남 \", \"N\": \"광주 \", \"O\": \"전북 \", \"P\":", "\"오\", \"dn\": \"우\", \"qk\": \"바\", \"qj\": \"버\", \"qh\": \"보\", \"qn\":", ")) # cv2.rectangle(img, (0,0), (150, 30), (0,0,0), -1) # cv2.putText(img,", "cv2.imread(test_dir + test_img, cv2.IMREAD_GRAYSCALE) img_pred = img.astype(np.float32) img_pred = cv2.resize(img_pred,", "\"거\", \"rh\": \"고\", \"rn\": \"구\", \"wk\": \"자\", \"wj\": \"저\", \"wh\":", "list(np.argmax(out[0, 2:], axis=1)) # get max index -> len =", "(end - start) print(\"Time : \",total_time / total) print(\"ACC :", "if region != 'Z' else '' except: pass try: hangul", "\"보\", \"qn\": \"부\", \"ek\": \"다\", \"ej\": \"더\", \"eh\": \"도\", \"en\":", "label_to_hangul(test_img[0:-4]))) print('Predicted: %s / True: %s / predOk: %s '", "len(letters): outstr += letters[i] return outstr def label_to_hangul(label): # eng", "= (img_pred / 255.0) * 2.0 - 1.0 img_pred =", "model.load_weights(args.weight) print(\"...Previous weight data...\") except: raise Exception(\"No weight file!\") test_dir", "four_num = label[5:] try: region = Region[region] if region !=", "test_img[0:-4], predOk )) # cv2.rectangle(img, (0,0), (150, 30), (0,0,0), -1)", "range(min(len(pred_texts), len(test_img[0:-4]))): if pred_texts[i] == test_img[i]: letter_acc += 1 letter_total", "region + two_num + hangul + four_num parser = argparse.ArgumentParser()", "+= 1 # print('Predicted: %s / True: %s / net_out_value:", "= label[0] two_num = label[1:3] hangul = label[3:5] four_num =", "\", \"C\": \"인천 \", \"D\": \"강원 \", \"E\": \"충남 \",", "parser.add_argument(\"-t\", \"--test_img\", help=\"Test image directory\", type=str, default=\"./DB/test/\") args = parser.parse_args()", "test_img, cv2.IMREAD_GRAYSCALE) img_pred = img.astype(np.float32) img_pred = cv2.resize(img_pred, (128, 64))", "from keras import backend as K K.set_learning_phase(0) Region = {\"A\":", "# print('Predicted: %s / True: %s / net_out_value: %s /", "# cv2.putText(img, pred_texts, (5, 20), cv2.FONT_HERSHEY_SIMPLEX, 0.8, (255,255,255),2) #cv2.imshow(\"q\", img)", "\", \"O\": \"전북 \", \"P\": \"제주 \"} Hangul = {\"dk\":", "label[0] two_num = label[1:3] hangul = label[3:5] four_num = label[5:]", "= argparse.ArgumentParser() parser.add_argument(\"-w\", \"--weight\", help=\"weight file directory\", type=str, default=\"models/weights.best.hdf5\") parser.add_argument(\"-t\",", "# remove overlap value outstr = '' for i in", "\"sn\": \"누\", \"fk\": \"라\", \"fj\": \"러\", \"fh\": \"로\", \"fn\": \"루\",", "max index -> len = 32 out_best = [k for", "hangul = label[3:5] four_num = label[5:] try: region = Region[region]", "= Region[region] if region != 'Z' else '' except: pass", ": \", acc / total) print(\"letter ACC : \", letter_acc", "type=str, default=\"./DB/test/\") args = parser.parse_args() # Get CRNN model model", "Get CRNN model model = get_Model(training=False) try: model.load_weights(args.weight) print(\"...Previous weight", "32 out_best = [k for k, g in itertools.groupby(out_best)] #", "\", \"G\": \"충북 \", \"H\": \"부산 \", \"I\": \"울산 \",", "label_to_hangul(label): # eng -> hangul region = label[0] two_num =", "overlap value outstr = '' for i in out_best: if", "pred_texts = decode_label(net_out_value) for i in range(min(len(pred_texts), len(test_img[0:-4]))): if pred_texts[i]", "\"부\", \"ek\": \"다\", \"ej\": \"더\", \"eh\": \"도\", \"en\": \"두\", \"rk\":", "% (label_to_hangul(pred_texts), label_to_hangul(test_img[0:-4]))) print('Predicted: %s / True: %s / predOk:", "decode_label(out): # out : (1, 32, 42) out_best = list(np.argmax(out[0,", "(0,0,0), -1) # cv2.putText(img, pred_texts, (5, 20), cv2.FONT_HERSHEY_SIMPLEX, 0.8, (255,255,255),2)", "\"구\", \"wk\": \"자\", \"wj\": \"저\", \"wh\": \"조\", \"wn\": \"주\", \"ak\":", "\"아\", \"dj\": \"어\", \"dh\": \"오\", \"dn\": \"우\", \"qk\": \"바\", \"qj\":", "\"라\", \"fj\": \"러\", \"fh\": \"로\", \"fn\": \"루\", \"tk\": \"사\", \"tj\":", "/ True: %s / net_out_value: %s / ' % (label_to_hangul(pred_texts),", "\"en\": \"두\", \"rk\": \"가\", \"rj\": \"거\", \"rh\": \"고\", \"rn\": \"구\",", "\"D\": \"강원 \", \"E\": \"충남 \", \"F\": \"대전 \", \"G\":", "outstr def label_to_hangul(label): # eng -> hangul region = label[0]", "' % (label_to_hangul(pred_texts), label_to_hangul(test_img[0:-4]))) print('Predicted: %s / True: %s /", "region = Region[region] if region != 'Z' else '' except:", "1 letter_total += max(len(pred_texts), len(test_img[0:-4])) predOk = \"True\" if pred_texts", "from parameter import letters import argparse from keras import backend", "time.time() total_time = (end - start) print(\"Time : \",total_time /", "except: raise Exception(\"No weight file!\") test_dir =args.test_img test_imgs = os.listdir(args.test_img)", "\",total_time / total) print(\"ACC : \", acc / total) print(\"letter", "\"머\", \"ah\": \"모\", \"an\": \"무\", \"sk\": \"나\", \"sj\": \"너\", \"sh\":", "\"노\", \"sn\": \"누\", \"fk\": \"라\", \"fj\": \"러\", \"fh\": \"로\", \"fn\":", "image directory\", type=str, default=\"./DB/test/\") args = parser.parse_args() # Get CRNN", "parser.parse_args() # Get CRNN model model = get_Model(training=False) try: model.load_weights(args.weight)", "\"더\", \"eh\": \"도\", \"en\": \"두\", \"rk\": \"가\", \"rj\": \"거\", \"rh\":", "\"B\": \"경기 \", \"C\": \"인천 \", \"D\": \"강원 \", \"E\":", "%s ' % (pred_texts, test_img[0:-4], predOk )) # cv2.rectangle(img, (0,0),", "net_out_value = model.predict(img_pred) pred_texts = decode_label(net_out_value) for i in range(min(len(pred_texts),", "\"루\", \"tk\": \"사\", \"tj\": \"서\", \"th\": \"소\", \"tn\": \"수\", \"gj\":", "\", \"K\": \"경북 \", \"L\": \"경남 \", \"M\": \"전남 \",", "= os.listdir(args.test_img) total = 0 acc = 0 letter_total =", "0 letter_acc = 0 start = time.time() for test_img in", "img_pred = img.astype(np.float32) img_pred = cv2.resize(img_pred, (128, 64)) img_pred =", "import numpy as np from Model import get_Model from parameter", "= {\"dk\": \"아\", \"dj\": \"어\", \"dh\": \"오\", \"dn\": \"우\", \"qk\":", "-1) # cv2.putText(img, pred_texts, (5, 20), cv2.FONT_HERSHEY_SIMPLEX, 0.8, (255,255,255),2) #cv2.imshow(\"q\",", "print('Predicted: %s / True: %s / predOk: %s ' %", "42) out_best = list(np.argmax(out[0, 2:], axis=1)) # get max index", "\"대전 \", \"G\": \"충북 \", \"H\": \"부산 \", \"I\": \"울산", "\"rn\": \"구\", \"wk\": \"자\", \"wj\": \"저\", \"wh\": \"조\", \"wn\": \"주\",", "keras import backend as K K.set_learning_phase(0) Region = {\"A\": \"서울", "pass return region + two_num + hangul + four_num parser", "[k for k, g in itertools.groupby(out_best)] # remove overlap value", "!= 'Z' else '' except: pass try: hangul = Hangul[hangul]", "(128, 64)) img_pred = (img_pred / 255.0) * 2.0 -", "\"wk\": \"자\", \"wj\": \"저\", \"wh\": \"조\", \"wn\": \"주\", \"ak\": \"마\",", "else: predOk = \"False\" total += 1 # print('Predicted: %s", "\"C\": \"인천 \", \"D\": \"강원 \", \"E\": \"충남 \", \"F\":", "outstr += letters[i] return outstr def label_to_hangul(label): # eng ->", "== test_img[i]: letter_acc += 1 letter_total += max(len(pred_texts), len(test_img[0:-4])) predOk", "out_best = [k for k, g in itertools.groupby(out_best)] # remove", "\"광주 \", \"O\": \"전북 \", \"P\": \"제주 \"} Hangul =", "try: hangul = Hangul[hangul] except: pass return region + two_num", "help=\"weight file directory\", type=str, default=\"models/weights.best.hdf5\") parser.add_argument(\"-t\", \"--test_img\", help=\"Test image directory\",", "hangul = Hangul[hangul] except: pass return region + two_num +", "if pred_texts[i] == test_img[i]: letter_acc += 1 letter_total += max(len(pred_texts),", "predOk: %s ' % (pred_texts, test_img[0:-4], predOk )) # cv2.rectangle(img,", "acc = 0 letter_total = 0 letter_acc = 0 start", "\"False\" total += 1 # print('Predicted: %s / True: %s", "255.0) * 2.0 - 1.0 img_pred = img_pred.T img_pred =", "32, 42) out_best = list(np.argmax(out[0, 2:], axis=1)) # get max", "27: # break #cv2.destroyAllWindows() end = time.time() total_time = (end", "acc / total) print(\"letter ACC : \", letter_acc / letter_total)", "\"다\", \"ej\": \"더\", \"eh\": \"도\", \"en\": \"두\", \"rk\": \"가\", \"rj\":", "\", \"H\": \"부산 \", \"I\": \"울산 \", \"J\": \"대구 \",", "index -> len = 32 out_best = [k for k,", "+= 1 else: predOk = \"False\" total += 1 #", "test_img in test_imgs: img = cv2.imread(test_dir + test_img, cv2.IMREAD_GRAYSCALE) img_pred", "img_pred = (img_pred / 255.0) * 2.0 - 1.0 img_pred", "img_pred = np.expand_dims(img_pred, axis=0) net_out_value = model.predict(img_pred) pred_texts = decode_label(net_out_value)", "\", \"F\": \"대전 \", \"G\": \"충북 \", \"H\": \"부산 \",", "letter_total += max(len(pred_texts), len(test_img[0:-4])) predOk = \"True\" if pred_texts ==", "axis=0) net_out_value = model.predict(img_pred) pred_texts = decode_label(net_out_value) for i in", "\"강원 \", \"E\": \"충남 \", \"F\": \"대전 \", \"G\": \"충북", "0.8, (255,255,255),2) #cv2.imshow(\"q\", img) #if cv2.waitKey(0) == 27: # break", "model model = get_Model(training=False) try: model.load_weights(args.weight) print(\"...Previous weight data...\") except:", "%s / predOk: %s ' % (pred_texts, test_img[0:-4], predOk ))", "\"rj\": \"거\", \"rh\": \"고\", \"rn\": \"구\", \"wk\": \"자\", \"wj\": \"저\",", "g in itertools.groupby(out_best)] # remove overlap value outstr = ''", "\"--weight\", help=\"weight file directory\", type=str, default=\"models/weights.best.hdf5\") parser.add_argument(\"-t\", \"--test_img\", help=\"Test image", "\", \"E\": \"충남 \", \"F\": \"대전 \", \"G\": \"충북 \",", "in test_imgs: img = cv2.imread(test_dir + test_img, cv2.IMREAD_GRAYSCALE) img_pred =", "pred_texts == test_img[0:-4]: acc += 1 else: predOk = \"False\"", "True: %s / predOk: %s ' % (pred_texts, test_img[0:-4], predOk", "directory\", type=str, default=\"models/weights.best.hdf5\") parser.add_argument(\"-t\", \"--test_img\", help=\"Test image directory\", type=str, default=\"./DB/test/\")", "model.predict(img_pred) pred_texts = decode_label(net_out_value) for i in range(min(len(pred_texts), len(test_img[0:-4]))): if", "hangul + four_num parser = argparse.ArgumentParser() parser.add_argument(\"-w\", \"--weight\", help=\"weight file", "\"ah\": \"모\", \"an\": \"무\", \"sk\": \"나\", \"sj\": \"너\", \"sh\": \"노\",", "\"충남 \", \"F\": \"대전 \", \"G\": \"충북 \", \"H\": \"부산", "= label[5:] try: region = Region[region] if region != 'Z'", "itertools, os, time import numpy as np from Model import", "label[5:] try: region = Region[region] if region != 'Z' else", "weight file!\") test_dir =args.test_img test_imgs = os.listdir(args.test_img) total = 0", "\"수\", \"gj\": \"허\"} def decode_label(out): # out : (1, 32,", "len(test_img[0:-4]))): if pred_texts[i] == test_img[i]: letter_acc += 1 letter_total +=", "+= 1 letter_total += max(len(pred_texts), len(test_img[0:-4])) predOk = \"True\" if", "parser = argparse.ArgumentParser() parser.add_argument(\"-w\", \"--weight\", help=\"weight file directory\", type=str, default=\"models/weights.best.hdf5\")", "+ four_num parser = argparse.ArgumentParser() parser.add_argument(\"-w\", \"--weight\", help=\"weight file directory\",", "len(test_img[0:-4])) predOk = \"True\" if pred_texts == test_img[0:-4]: acc +=", "except: pass return region + two_num + hangul + four_num", "64)) img_pred = (img_pred / 255.0) * 2.0 - 1.0", "= 0 letter_total = 0 letter_acc = 0 start =", "\", acc / total) print(\"letter ACC : \", letter_acc /", "\"rh\": \"고\", \"rn\": \"구\", \"wk\": \"자\", \"wj\": \"저\", \"wh\": \"조\",", "%s / net_out_value: %s / ' % (label_to_hangul(pred_texts), label_to_hangul(test_img[0:-4]))) print('Predicted:", "= get_Model(training=False) try: model.load_weights(args.weight) print(\"...Previous weight data...\") except: raise Exception(\"No", "predOk = \"False\" total += 1 # print('Predicted: %s /", "\"} Hangul = {\"dk\": \"아\", \"dj\": \"어\", \"dh\": \"오\", \"dn\":", "os.listdir(args.test_img) total = 0 acc = 0 letter_total = 0", "letters import argparse from keras import backend as K K.set_learning_phase(0)", "out_best = list(np.argmax(out[0, 2:], axis=1)) # get max index ->", "- 1.0 img_pred = img_pred.T img_pred = np.expand_dims(img_pred, axis=-1) img_pred", "K K.set_learning_phase(0) Region = {\"A\": \"서울 \", \"B\": \"경기 \",", "\"전남 \", \"N\": \"광주 \", \"O\": \"전북 \", \"P\": \"제주", "== test_img[0:-4]: acc += 1 else: predOk = \"False\" total", "\"--test_img\", help=\"Test image directory\", type=str, default=\"./DB/test/\") args = parser.parse_args() #", "(150, 30), (0,0,0), -1) # cv2.putText(img, pred_texts, (5, 20), cv2.FONT_HERSHEY_SIMPLEX,", "start) print(\"Time : \",total_time / total) print(\"ACC : \", acc", "Region = {\"A\": \"서울 \", \"B\": \"경기 \", \"C\": \"인천", "import backend as K K.set_learning_phase(0) Region = {\"A\": \"서울 \",", "\"J\": \"대구 \", \"K\": \"경북 \", \"L\": \"경남 \", \"M\":", "\"두\", \"rk\": \"가\", \"rj\": \"거\", \"rh\": \"고\", \"rn\": \"구\", \"wk\":", "#cv2.destroyAllWindows() end = time.time() total_time = (end - start) print(\"Time", "\"P\": \"제주 \"} Hangul = {\"dk\": \"아\", \"dj\": \"어\", \"dh\":", "\"eh\": \"도\", \"en\": \"두\", \"rk\": \"가\", \"rj\": \"거\", \"rh\": \"고\",", "time import numpy as np from Model import get_Model from", "get_Model from parameter import letters import argparse from keras import", "= np.expand_dims(img_pred, axis=-1) img_pred = np.expand_dims(img_pred, axis=0) net_out_value = model.predict(img_pred)", "\"바\", \"qj\": \"버\", \"qh\": \"보\", \"qn\": \"부\", \"ek\": \"다\", \"ej\":", "print(\"...Previous weight data...\") except: raise Exception(\"No weight file!\") test_dir =args.test_img", "+ two_num + hangul + four_num parser = argparse.ArgumentParser() parser.add_argument(\"-w\",", "label[3:5] four_num = label[5:] try: region = Region[region] if region", "1 else: predOk = \"False\" total += 1 # print('Predicted:", "predOk = \"True\" if pred_texts == test_img[0:-4]: acc += 1", "img.astype(np.float32) img_pred = cv2.resize(img_pred, (128, 64)) img_pred = (img_pred /", "\"rk\": \"가\", \"rj\": \"거\", \"rh\": \"고\", \"rn\": \"구\", \"wk\": \"자\",", "return region + two_num + hangul + four_num parser =", "numpy as np from Model import get_Model from parameter import", "\"aj\": \"머\", \"ah\": \"모\", \"an\": \"무\", \"sk\": \"나\", \"sj\": \"너\"," ]
[ "Optional[List[str]] = None, scope=None, device=\"\", ): \"\"\" Convert pandas dataframe", "blank. Examples -------- >>> import pandas as pd >>> import", "return scope.Frame(res, device=device) def from_arrow_array(array, dtype=None, scope=None, device=\"\"): \"\"\" \"", "or Scope.default device = device or scope.device assert isinstance(table, pa.Table)", "cast # Skipping analyzing 'numpy': found module but no type", "dtype is not None: assert dt.is_struct(dtype) dtype = cast(dt.Struct, dtype)", "device=\"\"): \"\"\" Convert 1dim numpy array to a torcharrow column", "_from_numpy_nd(data, dtype, scope=None, device=\"\"): # adopt types if dtype is", "3],'b': [0.1, 0.2, None, 0.3]}) >>> gdf = ta.from_pandas_dataframe(pdf) >>>", "1: return _from_numpy_ma(array.data, array.mask, dtype, scope, device) elif isinstance(array, np.ndarray)", "cast(dt.Struct, dtype) res = {} for f in dtype.fields: #", "scope=None, device=\"\", ): \"\"\" Convert pandas dataframe to torcharrow dataframe", "Scope.default device = device or scope.device return from_numpy(series.to_numpy(), dtype, scope,", "torcharrow dataframe (drops indices). Parameters ---------- df : Pandas dataframe", "numpy array of type {data.dtype}\") def _from_numpy_nd(data, dtype, scope=None, device=\"\"):", "dt.typeof_np_dtype(data.dtype) # create column, only zero copy supported if dt.is_boolean_or_numerical(dtype):", "if dtype is not None: assert dt.is_struct(dtype) dtype = cast(dt.Struct,", "table.select(columns) if columns is not None else table for n", "also construct Dataframes! res[f.name] = from_pandas_series( pd.Series(df[f.name]), f.dtype, scope=scope )", "return from_numpy(series.to_numpy(), dtype, scope, device) def from_numpy(array, dtype, scope=None, device=\"\"):", "return not isinstance(s, str) def _from_numpy_ma(data, mask, dtype, scope=None, device=\"\"):", "# Skipping analyzing 'numpy': found module but no type hints", "------- --- --- 0 0 0.1 1 1 0.2 2", "= {} for f in dtype.fields: chunked_array = table.column(f.name) pydata", "pa.types.is_boolean(t): return dt.Boolean(nullable) if pa.types.is_int8(t): return dt.Int8(nullable) if pa.types.is_int16(t): return", "if pa.types.is_int32(t): return dt.Int32(nullable) if pa.types.is_int64(t): return dt.Int64(nullable) if pa.types.is_float32(t):", "= ta.from_pandas_dataframe(pdf) >>> gdf index a b ------- --- ---", "res[n] = from_pandas_series(pd.Series(df[n]), scope=scope) return scope.Frame(res, device=device) def from_arrow_array(array, dtype=None,", "from_pandas_series(series, dtype=None, scope=None, device=\"\"): \"\"\" \" Convert pandas series array", "= device or scope.device assert isinstance(table, pa.Table) if dtype is", "\"\"\" \" Convert arrow array to a torcharrow column. \"\"\"", "None or is_floating(dtype): # for i in series: # if", "dt.Boolean(nullable) if pa.types.is_int8(t): return dt.Int8(nullable) if pa.types.is_int16(t): return dt.Int16(nullable) if", "{data.dtype}\") def _from_numpy_nd(data, dtype, scope=None, device=\"\"): # adopt types if", "= pd.DataFrame({'a': [0, 1, 2, 3],'b': [0.1, 0.2, None, 0.3]})", "dtype=dtype, device=device) else: res = {} for n in df.columns:", "_arrowtype_to_dtype(t, nullable): if pa.types.is_boolean(t): return dt.Boolean(nullable) if pa.types.is_int8(t): return dt.Int8(nullable)", "device = device or scope.device if dtype is not None:", "found module but no type hints or library stubs import", "), ) return scope.DataFrame(res, device=device) def from_pandas_dataframe( df, dtype: Optional[dt.DType]", "Facebook, Inc. and its affiliates. from typing import List, Optional,", "= scope.Column(pydata, f.dtype) return scope.DataFrame(res, device=device) else: res = {}", "of type {data.dtype,}\") # def _column_without_nan(series, dtype): # if dtype", "= table.column(n) pydata = chunked_array.to_pylist() res[n] = scope.Column( pydata, dtype=_arrowtype_to_dtype(", "but no type hints or library stubs import numpy as", "df.columns: if columns is None or n in columns: res[n]", "column names to extract from df. scope : Scope or", "scope=scope ) return scope.Frame(res, dtype=dtype, device=device) else: res = {}", "but no type hints or library stubs import pandas as", "def _arrow_scalar_to_py(array): for i in array: yield i.as_py() def _pandatype_to_dtype(t,", "None: dtype = dt.typeof_np_dtype(data.dtype) if dtype is None: dtype =", "isinstance(table, pa.Table) if dtype is not None: assert dt.is_struct(dtype) dtype", "= {} table = table.select(columns) if columns is not None", "not None else table for n in table.column_names: chunked_array =", "from_numpy(array, dtype, scope=None, device=\"\"): \"\"\" Convert 1dim numpy array to", "return dt.Int32(nullable) if pa.types.is_int64(t): return dt.Int64(nullable) if pa.types.is_float32(t): return dt.Float32(nullable)", "chunked_array.to_pylist() res[f.name] = scope.Column(pydata, f.dtype) return scope.DataFrame(res, device=device) else: res", "or Scope.default device = device or scope.device if dtype is", "assert isinstance(array, pa.Array) pydata = _arrow_scalar_to_py(array) if dtype is not", "that Column shoud also construct Dataframes! res[f.name] = from_pandas_series( pd.Series(df[f.name]),", "dt from torcharrow import Scope def from_arrow_table( table, dtype: Optional[dt.DType]", "table.column(f.name) pydata = chunked_array.to_pylist() res[f.name] = scope.Column(pydata, f.dtype) return scope.DataFrame(res,", "Field('b', Float64(nullable=True))]), count: 4, null_count: 0 \"\"\" scope = scope", "mask).compressed())) return scope._FullColumn(data, dtype=dtype, mask=mask) else: raise TypeError(f\"cannot convert masked", "(drops indices). Parameters ---------- df : Pandas dataframe dtype :", "convert numpy array of type {data.dtype,}\") # def _column_without_nan(series, dtype):", "ignore import torcharrow.dtypes as dt from torcharrow import Scope def", "indices). Parameters ---------- df : Pandas dataframe dtype : dtype,", "numpy array of type {data.dtype,}\") # def _column_without_nan(series, dtype): #", "arrow table to a torcharrow dataframe. \"\"\" scope = scope", "scope._FullColumn(data, dtype=dtype, mask=mask) elif dt.is_string(dtype) or dtype == \"object\": assert", "copy supported if dt.is_boolean_or_numerical(dtype): mask = np.isnan(data) return scope._FullColumn(data, dtype=dtype,", "np.isnan([np.nan, np.nan, 3.])) # create column, only zero copy supported", "if np.any(mask): dtype = dtype.with_null() return scope._FullColumn(data, dtype=dtype, mask=mask) else:", "type: ignore import numpy.ma as ma # type: ignore #", "dtype, scope, device) else: raise TypeError(f\"cannot convert numpy array of", "scope=None, device=\"\"): # adopt types if dtype is None: dtype", "_is_not_str(s): return not isinstance(s, str) def _from_numpy_ma(data, mask, dtype, scope=None,", "assert dtype == dt.typeof_np_dtype(data.dtype).with_null() # TODO if not, adopt the", "return dt.Boolean(nullable) if pa.types.is_int8(t): return dt.Int8(nullable) if pa.types.is_int16(t): return dt.Int16(nullable)", "not None: assert not dt.is_struct(dtype) return scope.Column(pydata, dtype, device=device) else:", "if pa.types.is_struct(t): return _pandatype_to_dtype(t.to_pandas_dtype(), True) if pa.types.is_null(t): return dt.Void() if", "n in df.columns: if columns is None or n in", "dtype is None or is_floating(dtype): # for i in series:", "--- --- 0 0 0.1 1 1 0.2 2 2", "table.column(n).null_count > 0 ), ) return scope.DataFrame(res, device=device) def from_pandas_dataframe(", "[0.1, 0.2, None, 0.3]}) >>> gdf = ta.from_pandas_dataframe(pdf) >>> gdf", "elif dt.is_string(dtype): mask = np.vectorize(_is_not_str)(data) if np.any(mask): dtype = dtype.with_null()", "pdf = pd.DataFrame({'a': [0, 1, 2, 3],'b': [0.1, 0.2, None,", "_column_without_nan(series, dtype): # if dtype is None or is_floating(dtype): #", "== 1: return _from_numpy_nd(array, dtype, scope, device) else: raise TypeError(f\"cannot", "case # assert dtype == dt.typeof_np_dtype(data.dtype) # create column, only", "# if isinstance(i, float) and np.isnan(i): # yield None #", "type: ignore import torcharrow.dtypes as dt from torcharrow import Scope", "= scope or Scope.default device = device or scope.device return", "scope.Frame(res, device=device) def from_arrow_array(array, dtype=None, scope=None, device=\"\"): \"\"\" \" Convert", "Float64(nullable=True))]), count: 4, null_count: 0 \"\"\" scope = scope or", "copy). \"\"\" scope = scope or Scope.default device = device", "def _is_not_str(s): return not isinstance(s, str) def _from_numpy_ma(data, mask, dtype,", "else: assert dt.is_primitive(dtype) # TODO Check why teh following assert", "== \"object\": assert np.all(np.vectorize(_is_not_str)(ma.array(data, mask).compressed())) return scope._FullColumn(data, dtype=dtype, mask=mask) else:", "pa.Table) if dtype is not None: assert dt.is_struct(dtype) dtype =", "dt.typeof_nptype(t, nullable) def _arrowtype_to_dtype(t, nullable): if pa.types.is_boolean(t): return dt.Boolean(nullable) if", "import torcharrow.dtypes as dt from torcharrow import Scope def from_arrow_table(", "scope or Scope.default device = device or scope.device if isinstance(array,", "if pa.types.is_boolean(t): return dt.Boolean(nullable) if pa.types.is_int8(t): return dt.Int8(nullable) if pa.types.is_int16(t):", "table = table.select(columns) if columns is not None else table", "as pd >>> import torcharrow as ta >>> pdf =", "dtype, scope, device) def from_numpy(array, dtype, scope=None, device=\"\"): \"\"\" Convert", "return scope._FullColumn(data, dtype=dtype, mask=mask) elif dt.is_string(dtype): mask = np.vectorize(_is_not_str)(data) if", "dt.is_struct(dtype) dtype = cast(dt.Struct, dtype) res = {} for f", "device=\"\", ): \"\"\" \" Convert arrow table to a torcharrow", "_from_numpy_nd(array, dtype, scope, device) else: raise TypeError(f\"cannot convert numpy array", ">>> gdf = ta.from_pandas_dataframe(pdf) >>> gdf index a b -------", "arrow array to a torcharrow column. \"\"\" scope = scope", "np.isnan(data) return scope._FullColumn(data, dtype=dtype, mask=mask) elif dt.is_string(dtype): mask = np.vectorize(_is_not_str)(data)", "array.ndim == 1: return _from_numpy_ma(array.data, array.mask, dtype, scope, device) elif", "from_arrow_table( table, dtype: Optional[dt.DType] = None, columns: Optional[List[str]] = None,", "None, columns: Optional[List[str]] = None, scope=None, device=\"\", ): \"\"\" \"", "or \"\" Device to use, or default if blank. Examples", "= np.isnan([np.nan, np.nan, 3.])) # create column, only zero copy", "or Scope.default device = device or scope.device if isinstance(array, ma.core.MaskedArray)", "= dt.string else: assert dt.is_primitive(dtype) # TODO Check why teh", "as ta >>> pdf = pd.DataFrame({'a': [0, 1, 2, 3],'b':", "None or n in columns: res[n] = from_pandas_series(pd.Series(df[n]), scope=scope) return", "None for default scope. device : str or \"\" Device", "as ma # type: ignore # Skipping analyzing 'pandas': found", "dataframe. \"\"\" scope = scope or Scope.default device = device", "if pa.types.is_string(t): return dt.String(nullable) if pa.types.is_map(t): return dt.Map(t.item_type, t.key_type, nullable)", "in table.column_names: chunked_array = table.column(n) pydata = chunked_array.to_pylist() res[n] =", "and array.ndim == 1: return _from_numpy_nd(array, dtype, scope, device) else:", "(c) Facebook, Inc. and its affiliates. from typing import List,", "dt.is_boolean_or_numerical(dtype): assert not np.all(np.isnan(ma.array(data, mask).compressed())) return scope._FullColumn(data, dtype=dtype, mask=mask) elif", "{array.dtype}\") def _is_not_str(s): return not isinstance(s, str) def _from_numpy_ma(data, mask,", "assert dtype == dt.typeof_np_dtype(data.dtype) # create column, only zero copy", "array.ndim == 1: return _from_numpy_nd(array, dtype, scope, device) else: raise", "return scope._FullColumn(data, dtype=dtype, mask=mask) else: raise TypeError(f\"cannot convert masked numpy", "dtype) res = {} for f in dtype.fields: chunked_array =", "# mask = np.isnan([np.nan, np.nan, 3.])) # create column, only", "pa.types.is_list(t): return List(t.value_type, nullable) if pa.types.is_struct(t): return _pandatype_to_dtype(t.to_pandas_dtype(), True) if", "chunked_array.to_pylist() res[n] = scope.Column( pydata, dtype=_arrowtype_to_dtype( table.schema.field(n).type, table.column(n).null_count > 0", "no type hints or library stubs import pandas as pd", "assert not np.all(np.isnan(ma.array(data, mask).compressed())) return scope._FullColumn(data, dtype=dtype, mask=mask) elif dt.is_string(dtype)", "f.dtype) return scope.DataFrame(res, device=device) else: res = {} table =", "return scope.Frame(res, dtype=dtype, device=device) else: res = {} for n", "else: raise TypeError(f\"cannot convert numpy array of type {array.dtype}\") def", "float) and np.isnan(i): # yield None # else: # yield", "columns: Optional[List[str]] = None, scope=None, device=\"\", ): \"\"\" \" Convert", "# this shows that Column shoud also construct Dataframes! res[f.name]", "dtype.fields: # this shows that Column shoud also construct Dataframes!", ") return scope.Frame(res, dtype=dtype, device=device) else: res = {} for", "scope.device assert isinstance(table, pa.Table) if dtype is not None: assert", "dt.string else: assert dt.is_primitive(dtype) # TODO Check why teh following", "0.2, None, 0.3]}) >>> gdf = ta.from_pandas_dataframe(pdf) >>> gdf index", "else: res = {} for n in df.columns: if columns", "None else table for n in table.column_names: chunked_array = table.column(n)", "\"object\": assert np.all(np.vectorize(_is_not_str)(ma.array(data, mask).compressed())) return scope._FullColumn(data, dtype=dtype, mask=mask) else: raise", "= device or scope.device assert isinstance(array, pa.Array) pydata = _arrow_scalar_to_py(array)", "for i in array: yield i.as_py() def _pandatype_to_dtype(t, nullable): return", "np # type: ignore import numpy.ma as ma # type:", "a b ------- --- --- 0 0 0.1 1 1", "to force, if None will automatically infer. columns : array-like", "pa.types.is_int8(t): return dt.Int8(nullable) if pa.types.is_int16(t): return dt.Int16(nullable) if pa.types.is_int32(t): return", "convert masked numpy array of type {data.dtype}\") def _from_numpy_nd(data, dtype,", "pd >>> import torcharrow as ta >>> pdf = pd.DataFrame({'a':", "device) elif isinstance(array, np.ndarray) and array.ndim == 1: return _from_numpy_nd(array,", "dtype == \"object\": assert np.all(np.vectorize(_is_not_str)(ma.array(data, mask).compressed())) return scope._FullColumn(data, dtype=dtype, mask=mask)", "b ------- --- --- 0 0 0.1 1 1 0.2", "np.all(np.isnan(ma.array(data, mask).compressed())) return scope._FullColumn(data, dtype=dtype, mask=mask) elif dt.is_string(dtype) or dtype", "types if dtype is None: dtype = dt.typeof_np_dtype(data.dtype) if dtype", "scope=None, device=\"\", ): \"\"\" \" Convert arrow table to a", "{data.dtype,}\") # def _column_without_nan(series, dtype): # if dtype is None", "\"\"\" \" Convert arrow table to a torcharrow dataframe. \"\"\"", "mask=mask) elif dt.is_string(dtype) or dtype == \"object\": assert np.all(np.vectorize(_is_not_str)(ma.array(data, mask).compressed()))", "device or scope.device assert isinstance(array, pa.Array) pydata = _arrow_scalar_to_py(array) if", "None: dtype = dt.string else: assert dt.is_primitive(dtype) # TODO Check", "extract from df. scope : Scope or None Scope to", "types if dtype is None: dtype = dt.typeof_np_dtype(data.dtype).with_null() else: assert", "scope.Column( pydata, dtype=_arrowtype_to_dtype( table.schema.field(n).type, table.column(n).null_count > 0 ), ) return", "if dtype is None: dtype = dt.typeof_np_dtype(data.dtype).with_null() else: assert dt.is_primitive_type(dtype)", "not convert numpy array of type {data.dtype,}\") # def _column_without_nan(series,", "if dt.is_boolean_or_numerical(dtype): assert not np.all(np.isnan(ma.array(data, mask).compressed())) return scope._FullColumn(data, dtype=dtype, mask=mask)", "scope.device assert isinstance(array, pa.Array) pydata = _arrow_scalar_to_py(array) if dtype is", "for i in series: # if isinstance(i, float) and np.isnan(i):", "Convert 1dim numpy array to a torcharrow column (zero copy).", "to a torcharrow dataframe. \"\"\" scope = scope or Scope.default", "dtype=dtype, mask=mask) elif dt.is_string(dtype): mask = np.vectorize(_is_not_str)(data) if np.any(mask): dtype", "# type: ignore import pyarrow as pa # type: ignore", "\" Convert pandas series array to a torcharrow column (drops", "{} for n in df.columns: if columns is None or", "scope._FullColumn(data, dtype=dtype, mask=mask) elif dt.is_string(dtype): mask = np.vectorize(_is_not_str)(data) if np.any(mask):", "is_floating(dtype): # for i in series: # if isinstance(i, float)", "pandas as pd # type: ignore import pyarrow as pa", "elif isinstance(array, np.ndarray) and array.ndim == 1: return _from_numpy_nd(array, dtype,", "pa.types.is_string(t): return dt.String(nullable) if pa.types.is_map(t): return dt.Map(t.item_type, t.key_type, nullable) raise", "return _pandatype_to_dtype(t.to_pandas_dtype(), True) if pa.types.is_null(t): return dt.Void() if pa.types.is_string(t): return", "pa.types.is_float32(t): return dt.Float32(nullable) if pa.types.is_float64(t): return dt.Float64(nullable) if pa.types.is_list(t): return", "# Skipping analyzing 'pandas': found module but no type hints", "device=\"\", ): \"\"\" Convert pandas dataframe to torcharrow dataframe (drops", "str or \"\" Device to use, or default if blank.", ": Pandas dataframe dtype : dtype, default None Data type", "type to force, if None will automatically infer. columns :", "# TODO if not, adopt the type or? # Something", "dtype=None, scope=None, device=\"\"): \"\"\" \" Convert arrow array to a", "if dtype is None: dtype = dt.string else: assert dt.is_primitive(dtype)", "0 0 0.1 1 1 0.2 2 2 3 3", "type or? # Something like ma.array # np.array([np.nan, np.nan, 3.]).astype(np.int64),", "if isinstance(array, ma.core.MaskedArray) and array.ndim == 1: return _from_numpy_ma(array.data, array.mask,", "if pa.types.is_null(t): return dt.Void() if pa.types.is_string(t): return dt.String(nullable) if pa.types.is_map(t):", "default None Data type to force, if None will automatically", "dtype.fields: chunked_array = table.column(f.name) pydata = chunked_array.to_pylist() res[f.name] = scope.Column(pydata,", "if dt.is_boolean_or_numerical(dtype): mask = np.isnan(data) return scope._FullColumn(data, dtype=dtype, mask=mask) elif", "table for n in table.column_names: chunked_array = table.column(n) pydata =", "library stubs import pandas as pd # type: ignore import", "supported if dt.is_boolean_or_numerical(dtype): mask = np.isnan(data) return scope._FullColumn(data, dtype=dtype, mask=mask)", "'pandas': found module but no type hints or library stubs", "to use, or default if blank. Examples -------- >>> import", "only zero copy supported if dt.is_boolean_or_numerical(dtype): mask = np.isnan(data) return", "device=device) else: res = {} table = table.select(columns) if columns", "stubs import numpy as np # type: ignore import numpy.ma", "a torcharrow dataframe. \"\"\" scope = scope or Scope.default device", "isinstance(array, np.ndarray) and array.ndim == 1: return _from_numpy_nd(array, dtype, scope,", "else: return scope.Column( pydata, dtype=_arrowtype_to_dtype(array.type, array.null_count > 0), device=device, )", "--- 0 0 0.1 1 1 0.2 2 2 3", "raise TypeError(\"can not convert numpy array of type {data.dtype,}\") #", "if isinstance(i, float) and np.isnan(i): # yield None # else:", "Scope to use, or None for default scope. device :", "1, 2, 3],'b': [0.1, 0.2, None, 0.3]}) >>> gdf =", "Convert arrow array to a torcharrow column. \"\"\" scope =", "to use, or None for default scope. device : str", "adopt types if dtype is None: dtype = dt.typeof_np_dtype(data.dtype).with_null() else:", "None, 0.3]}) >>> gdf = ta.from_pandas_dataframe(pdf) >>> gdf index a", "scope.DataFrame(res, device=device) def from_pandas_dataframe( df, dtype: Optional[dt.DType] = None, columns:", "\" Convert arrow table to a torcharrow dataframe. \"\"\" scope", "0.3 dtype: Struct([Field('a', int64), Field('b', Float64(nullable=True))]), count: 4, null_count: 0", "= chunked_array.to_pylist() res[n] = scope.Column( pydata, dtype=_arrowtype_to_dtype( table.schema.field(n).type, table.column(n).null_count >", "# for i in series: # yield i def _arrow_scalar_to_py(array):", "following assert isn't the case # assert dtype == dt.typeof_np_dtype(data.dtype)", "columns: Optional[List[str]] = None, scope=None, device=\"\", ): \"\"\" Convert pandas", "2 3 3 0.3 dtype: Struct([Field('a', int64), Field('b', Float64(nullable=True))]), count:", "in series: # yield i def _arrow_scalar_to_py(array): for i in", "dtype is None: dtype = dt.typeof_np_dtype(data.dtype).with_null() else: assert dt.is_primitive_type(dtype) assert", "dt.Void() if pa.types.is_string(t): return dt.String(nullable) if pa.types.is_map(t): return dt.Map(t.item_type, t.key_type,", "import torcharrow as ta >>> pdf = pd.DataFrame({'a': [0, 1,", "f in dtype.fields: chunked_array = table.column(f.name) pydata = chunked_array.to_pylist() res[f.name]", "Struct([Field('a', int64), Field('b', Float64(nullable=True))]), count: 4, null_count: 0 \"\"\" scope", "scope, device) def from_numpy(array, dtype, scope=None, device=\"\"): \"\"\" Convert 1dim", "np.ndarray) and array.ndim == 1: return _from_numpy_nd(array, dtype, scope, device)", "= chunked_array.to_pylist() res[f.name] = scope.Column(pydata, f.dtype) return scope.DataFrame(res, device=device) else:", "res[f.name] = scope.Column(pydata, f.dtype) return scope.DataFrame(res, device=device) else: res =", "to extract from df. scope : Scope or None Scope", "create column, only zero copy supported if dt.is_boolean_or_numerical(dtype): mask =", "from torcharrow import Scope def from_arrow_table( table, dtype: Optional[dt.DType] =", "force, if None will automatically infer. columns : array-like List", "assert dt.is_primitive_type(dtype) assert dtype == dt.typeof_np_dtype(data.dtype).with_null() # TODO if not,", "else: res = {} table = table.select(columns) if columns is", "= device or scope.device return from_numpy(series.to_numpy(), dtype, scope, device) def", "yield i def _arrow_scalar_to_py(array): for i in array: yield i.as_py()", "table, dtype: Optional[dt.DType] = None, columns: Optional[List[str]] = None, scope=None,", "else: # for i in series: # yield i def", "device=device) def from_pandas_dataframe( df, dtype: Optional[dt.DType] = None, columns: Optional[List[str]]", "Optional[dt.DType] = None, columns: Optional[List[str]] = None, scope=None, device=\"\", ):", "Convert pandas series array to a torcharrow column (drops indices).", "scope._FullColumn(data, dtype=dtype, mask=mask) else: raise TypeError(f\"cannot convert masked numpy array", "type: ignore import pyarrow as pa # type: ignore import", "ma.array # np.array([np.nan, np.nan, 3.]).astype(np.int64), # mask = np.isnan([np.nan, np.nan,", "= from_pandas_series(pd.Series(df[n]), scope=scope) return scope.Frame(res, device=device) def from_arrow_array(array, dtype=None, scope=None,", "0.1 1 1 0.2 2 2 3 3 0.3 dtype:", "def _column_without_nan(series, dtype): # if dtype is None or is_floating(dtype):", "ignore import numpy.ma as ma # type: ignore # Skipping", "type: ignore # Skipping analyzing 'pandas': found module but no", "return dt.String(nullable) if pa.types.is_map(t): return dt.Map(t.item_type, t.key_type, nullable) raise NotImplementedError(\"unsupported", "or Scope.default device = device or scope.device return from_numpy(series.to_numpy(), dtype,", "for f in dtype.fields: chunked_array = table.column(f.name) pydata = chunked_array.to_pylist()", "= {} for f in dtype.fields: # this shows that", "= {} for n in df.columns: if columns is None", "or scope.device if isinstance(array, ma.core.MaskedArray) and array.ndim == 1: return", "torcharrow.dtypes as dt from torcharrow import Scope def from_arrow_table( table,", "dtype : dtype, default None Data type to force, if", "dataframe to torcharrow dataframe (drops indices). Parameters ---------- df :", "dtype, scope, device) elif isinstance(array, np.ndarray) and array.ndim == 1:", "or scope.device assert isinstance(table, pa.Table) if dtype is not None:", "is not None: assert dt.is_struct(dtype) dtype = cast(dt.Struct, dtype) res", "== dt.typeof_np_dtype(data.dtype).with_null() # TODO if not, adopt the type or?", "device or scope.device return from_numpy(series.to_numpy(), dtype, scope, device) def from_numpy(array,", "if pa.types.is_float64(t): return dt.Float64(nullable) if pa.types.is_list(t): return List(t.value_type, nullable) if", "return _from_numpy_nd(array, dtype, scope, device) else: raise TypeError(f\"cannot convert numpy", "else: assert dt.is_primitive_type(dtype) assert dtype == dt.typeof_np_dtype(data.dtype).with_null() # TODO if", "columns : array-like List of column names to extract from", "table to a torcharrow dataframe. \"\"\" scope = scope or", "why teh following assert isn't the case # assert dtype", "Inc. and its affiliates. from typing import List, Optional, cast", "chunked_array = table.column(f.name) pydata = chunked_array.to_pylist() res[f.name] = scope.Column(pydata, f.dtype)", "return scope.DataFrame(res, device=device) def from_pandas_dataframe( df, dtype: Optional[dt.DType] = None,", ": Scope or None Scope to use, or None for", "is None: dtype = dt.typeof_np_dtype(data.dtype) if dtype is None: dtype", "# yield i # else: # for i in series:", "Check why teh following assert isn't the case # assert", "nullable) if pa.types.is_struct(t): return _pandatype_to_dtype(t.to_pandas_dtype(), True) if pa.types.is_null(t): return dt.Void()", "device = device or scope.device assert isinstance(table, pa.Table) if dtype", "isn't the case # assert dtype == dt.typeof_np_dtype(data.dtype) # create", "yield i # else: # for i in series: #", "0.3]}) >>> gdf = ta.from_pandas_dataframe(pdf) >>> gdf index a b", "= dt.typeof_np_dtype(data.dtype).with_null() else: assert dt.is_primitive_type(dtype) assert dtype == dt.typeof_np_dtype(data.dtype).with_null() #", "# type: ignore import numpy.ma as ma # type: ignore", "mask).compressed())) return scope._FullColumn(data, dtype=dtype, mask=mask) elif dt.is_string(dtype) or dtype ==", "Convert arrow table to a torcharrow dataframe. \"\"\" scope =", "\"\"\" Convert 1dim numpy array to a torcharrow column (zero", "_from_numpy_ma(array.data, array.mask, dtype, scope, device) elif isinstance(array, np.ndarray) and array.ndim", "Skipping analyzing 'numpy': found module but no type hints or", "to torcharrow dataframe (drops indices). Parameters ---------- df : Pandas", "dt.Float64(nullable) if pa.types.is_list(t): return List(t.value_type, nullable) if pa.types.is_struct(t): return _pandatype_to_dtype(t.to_pandas_dtype(),", "dtype: Optional[dt.DType] = None, columns: Optional[List[str]] = None, scope=None, device=\"\",", "dt.is_string(dtype) or dtype == \"object\": assert np.all(np.vectorize(_is_not_str)(ma.array(data, mask).compressed())) return scope._FullColumn(data,", "def from_arrow_array(array, dtype=None, scope=None, device=\"\"): \"\"\" \" Convert arrow array", "module but no type hints or library stubs import numpy", "type hints or library stubs import pandas as pd #", "scope=None, device=\"\"): \"\"\" \" Convert arrow array to a torcharrow", "i in series: # if isinstance(i, float) and np.isnan(i): #", "pydata = chunked_array.to_pylist() res[f.name] = scope.Column(pydata, f.dtype) return scope.DataFrame(res, device=device)", "pydata = chunked_array.to_pylist() res[n] = scope.Column( pydata, dtype=_arrowtype_to_dtype( table.schema.field(n).type, table.column(n).null_count", "adopt types if dtype is None: dtype = dt.typeof_np_dtype(data.dtype) if", "scope.Column(pydata, dtype, device=device) else: return scope.Column( pydata, dtype=_arrowtype_to_dtype(array.type, array.null_count >", "dtype, device=device) else: return scope.Column( pydata, dtype=_arrowtype_to_dtype(array.type, array.null_count > 0),", "dt.is_string(dtype): mask = np.vectorize(_is_not_str)(data) if np.any(mask): dtype = dtype.with_null() return", "elif dt.is_string(dtype) or dtype == \"object\": assert np.all(np.vectorize(_is_not_str)(ma.array(data, mask).compressed())) return", "pa.types.is_int32(t): return dt.Int32(nullable) if pa.types.is_int64(t): return dt.Int64(nullable) if pa.types.is_float32(t): return", "isinstance(array, pa.Array) pydata = _arrow_scalar_to_py(array) if dtype is not None:", "pyarrow as pa # type: ignore import torcharrow.dtypes as dt", "Convert pandas dataframe to torcharrow dataframe (drops indices). Parameters ----------", "pydata, dtype=_arrowtype_to_dtype(array.type, array.null_count > 0), device=device, ) def from_pandas_series(series, dtype=None,", "dataframe (drops indices). Parameters ---------- df : Pandas dataframe dtype", "chunked_array = table.column(n) pydata = chunked_array.to_pylist() res[n] = scope.Column( pydata,", "columns is not None else table for n in table.column_names:", "pa.Array) pydata = _arrow_scalar_to_py(array) if dtype is not None: assert", "def _pandatype_to_dtype(t, nullable): return dt.typeof_nptype(t, nullable) def _arrowtype_to_dtype(t, nullable): if", "series array to a torcharrow column (drops indices). \"\"\" scope", "(drops indices). \"\"\" scope = scope or Scope.default device =", "def from_numpy(array, dtype, scope=None, device=\"\"): \"\"\" Convert 1dim numpy array", "df : Pandas dataframe dtype : dtype, default None Data", "numpy as np # type: ignore import numpy.ma as ma", "or default if blank. Examples -------- >>> import pandas as", "no type hints or library stubs import numpy as np", "pandas as pd >>> import torcharrow as ta >>> pdf", ">>> gdf index a b ------- --- --- 0 0", "Optional, cast # Skipping analyzing 'numpy': found module but no", "teh following assert isn't the case # assert dtype ==", "Column shoud also construct Dataframes! res[f.name] = from_pandas_series( pd.Series(df[f.name]), f.dtype,", "import numpy.ma as ma # type: ignore # Skipping analyzing", "device = device or scope.device return from_numpy(series.to_numpy(), dtype, scope, device)", "None will automatically infer. columns : array-like List of column", "scope or Scope.default device = device or scope.device return from_numpy(series.to_numpy(),", "# create column, only zero copy supported if dt.is_boolean_or_numerical(dtype): assert", "is None: dtype = dt.string else: assert dt.is_primitive(dtype) # TODO", "res[n] = scope.Column( pydata, dtype=_arrowtype_to_dtype( table.schema.field(n).type, table.column(n).null_count > 0 ),", "default if blank. Examples -------- >>> import pandas as pd", "2, 3],'b': [0.1, 0.2, None, 0.3]}) >>> gdf = ta.from_pandas_dataframe(pdf)", "is None: dtype = dt.typeof_np_dtype(data.dtype).with_null() else: assert dt.is_primitive_type(dtype) assert dtype", "None, scope=None, device=\"\", ): \"\"\" Convert pandas dataframe to torcharrow", "mask = np.vectorize(_is_not_str)(data) if np.any(mask): dtype = dtype.with_null() return scope._FullColumn(data,", "pandas dataframe to torcharrow dataframe (drops indices). Parameters ---------- df", "True) if pa.types.is_null(t): return dt.Void() if pa.types.is_string(t): return dt.String(nullable) if", "to a torcharrow column (zero copy). \"\"\" scope = scope", "3 3 0.3 dtype: Struct([Field('a', int64), Field('b', Float64(nullable=True))]), count: 4,", "device=\"\"): # adopt types if dtype is None: dtype =", "or Scope.default device = device or scope.device assert isinstance(array, pa.Array)", "torcharrow dataframe. \"\"\" scope = scope or Scope.default device =", "3.])) # create column, only zero copy supported if dt.is_boolean_or_numerical(dtype):", "0 ), ) return scope.DataFrame(res, device=device) def from_pandas_dataframe( df, dtype:", "pd.DataFrame({'a': [0, 1, 2, 3],'b': [0.1, 0.2, None, 0.3]}) >>>", "torcharrow import Scope def from_arrow_table( table, dtype: Optional[dt.DType] = None,", "or is_floating(dtype): # for i in series: # if isinstance(i,", "i.as_py() def _pandatype_to_dtype(t, nullable): return dt.typeof_nptype(t, nullable) def _arrowtype_to_dtype(t, nullable):", "= scope or Scope.default device = device or scope.device assert", "if pa.types.is_list(t): return List(t.value_type, nullable) if pa.types.is_struct(t): return _pandatype_to_dtype(t.to_pandas_dtype(), True)", "stubs import pandas as pd # type: ignore import pyarrow", "import pyarrow as pa # type: ignore import torcharrow.dtypes as", ": str or \"\" Device to use, or default if", "device=device, ) def from_pandas_series(series, dtype=None, scope=None, device=\"\"): \"\"\" \" Convert", "from_pandas_dataframe( df, dtype: Optional[dt.DType] = None, columns: Optional[List[str]] = None,", "scope=None, device=\"\"): \"\"\" Convert 1dim numpy array to a torcharrow", "np.all(np.vectorize(_is_not_str)(ma.array(data, mask).compressed())) return scope._FullColumn(data, dtype=dtype, mask=mask) else: raise TypeError(f\"cannot convert", "-------- >>> import pandas as pd >>> import torcharrow as", "def from_pandas_series(series, dtype=None, scope=None, device=\"\"): \"\"\" \" Convert pandas series", "i def _arrow_scalar_to_py(array): for i in array: yield i.as_py() def", "for n in table.column_names: chunked_array = table.column(n) pydata = chunked_array.to_pylist()", "index a b ------- --- --- 0 0 0.1 1", "dt.Float32(nullable) if pa.types.is_float64(t): return dt.Float64(nullable) if pa.types.is_list(t): return List(t.value_type, nullable)", "in array: yield i.as_py() def _pandatype_to_dtype(t, nullable): return dt.typeof_nptype(t, nullable)", "mask=mask) elif dt.is_string(dtype): mask = np.vectorize(_is_not_str)(data) if np.any(mask): dtype =", "scope : Scope or None Scope to use, or None", "np.vectorize(_is_not_str)(data) if np.any(mask): dtype = dtype.with_null() return scope._FullColumn(data, dtype=dtype, mask=mask)", "np.nan, 3.])) # create column, only zero copy supported if", "infer. columns : array-like List of column names to extract", "hints or library stubs import pandas as pd # type:", "isinstance(i, float) and np.isnan(i): # yield None # else: #", "for i in series: # yield i def _arrow_scalar_to_py(array): for", "for n in df.columns: if columns is None or n", "if pa.types.is_int64(t): return dt.Int64(nullable) if pa.types.is_float32(t): return dt.Float32(nullable) if pa.types.is_float64(t):", "pa.types.is_null(t): return dt.Void() if pa.types.is_string(t): return dt.String(nullable) if pa.types.is_map(t): return", "mask=mask) else: raise TypeError(\"can not convert numpy array of type", "array: yield i.as_py() def _pandatype_to_dtype(t, nullable): return dt.typeof_nptype(t, nullable) def", "convert numpy array of type {array.dtype}\") def _is_not_str(s): return not", "assert isinstance(table, pa.Table) if dtype is not None: assert dt.is_struct(dtype)", "for default scope. device : str or \"\" Device to", "in df.columns: if columns is None or n in columns:", "_from_numpy_ma(data, mask, dtype, scope=None, device=\"\"): # adopt types if dtype", "null_count: 0 \"\"\" scope = scope or Scope.default device =", "= np.isnan(data) return scope._FullColumn(data, dtype=dtype, mask=mask) elif dt.is_string(dtype): mask =", "in dtype.fields: chunked_array = table.column(f.name) pydata = chunked_array.to_pylist() res[f.name] =", "return _from_numpy_ma(array.data, array.mask, dtype, scope, device) elif isinstance(array, np.ndarray) and", "\" Convert arrow array to a torcharrow column. \"\"\" scope", "yield None # else: # yield i # else: #", "indices). \"\"\" scope = scope or Scope.default device = device", "= dt.typeof_np_dtype(data.dtype) if dtype is None: dtype = dt.string else:", "if dtype is None or is_floating(dtype): # for i in", "dtype: Struct([Field('a', int64), Field('b', Float64(nullable=True))]), count: 4, null_count: 0 \"\"\"", "create column, only zero copy supported if dt.is_boolean_or_numerical(dtype): assert not", "# yield i def _arrow_scalar_to_py(array): for i in array: yield", "3.]).astype(np.int64), # mask = np.isnan([np.nan, np.nan, 3.])) # create column,", "1 1 0.2 2 2 3 3 0.3 dtype: Struct([Field('a',", "in dtype.fields: # this shows that Column shoud also construct", "dtype) res = {} for f in dtype.fields: # this", "dt.Int16(nullable) if pa.types.is_int32(t): return dt.Int32(nullable) if pa.types.is_int64(t): return dt.Int64(nullable) if", "{} table = table.select(columns) if columns is not None else", "masked numpy array of type {data.dtype}\") def _from_numpy_nd(data, dtype, scope=None,", "= scope.Column( pydata, dtype=_arrowtype_to_dtype( table.schema.field(n).type, table.column(n).null_count > 0 ), )", "or library stubs import numpy as np # type: ignore", "table.schema.field(n).type, table.column(n).null_count > 0 ), ) return scope.DataFrame(res, device=device) def", "columns: res[n] = from_pandas_series(pd.Series(df[n]), scope=scope) return scope.Frame(res, device=device) def from_arrow_array(array,", "assert not dt.is_struct(dtype) return scope.Column(pydata, dtype, device=device) else: return scope.Column(", "np.isnan(i): # yield None # else: # yield i #", "in series: # if isinstance(i, float) and np.isnan(i): # yield", "dtype = cast(dt.Struct, dtype) res = {} for f in", "will automatically infer. columns : array-like List of column names", "device=\"\"): \"\"\" \" Convert pandas series array to a torcharrow", "ignore # Skipping analyzing 'pandas': found module but no type", "type hints or library stubs import numpy as np #", "as pd # type: ignore import pyarrow as pa #", "Data type to force, if None will automatically infer. columns", "torcharrow column (zero copy). \"\"\" scope = scope or Scope.default", "scope, device) else: raise TypeError(f\"cannot convert numpy array of type", "i in array: yield i.as_py() def _pandatype_to_dtype(t, nullable): return dt.typeof_nptype(t,", "mask, dtype, scope=None, device=\"\"): # adopt types if dtype is", "adopt the type or? # Something like ma.array # np.array([np.nan,", "Copyright (c) Facebook, Inc. and its affiliates. from typing import", "yield i.as_py() def _pandatype_to_dtype(t, nullable): return dt.typeof_nptype(t, nullable) def _arrowtype_to_dtype(t,", "'numpy': found module but no type hints or library stubs", "dtype = dt.typeof_np_dtype(data.dtype).with_null() else: assert dt.is_primitive_type(dtype) assert dtype == dt.typeof_np_dtype(data.dtype).with_null()", "np.array([np.nan, np.nan, 3.]).astype(np.int64), # mask = np.isnan([np.nan, np.nan, 3.])) #", "scope.device if isinstance(array, ma.core.MaskedArray) and array.ndim == 1: return _from_numpy_ma(array.data,", "device=device) else: res = {} for n in df.columns: if", "# else: # for i in series: # yield i", "device = device or scope.device if isinstance(array, ma.core.MaskedArray) and array.ndim", "only zero copy supported if dt.is_boolean_or_numerical(dtype): assert not np.all(np.isnan(ma.array(data, mask).compressed()))", "and array.ndim == 1: return _from_numpy_ma(array.data, array.mask, dtype, scope, device)", "library stubs import numpy as np # type: ignore import", "from_numpy(series.to_numpy(), dtype, scope, device) def from_numpy(array, dtype, scope=None, device=\"\"): \"\"\"", "automatically infer. columns : array-like List of column names to", "zero copy supported if dt.is_boolean_or_numerical(dtype): assert not np.all(np.isnan(ma.array(data, mask).compressed())) return", "ma # type: ignore # Skipping analyzing 'pandas': found module", "dataframe dtype : dtype, default None Data type to force,", ">>> pdf = pd.DataFrame({'a': [0, 1, 2, 3],'b': [0.1, 0.2,", "None # else: # yield i # else: # for", "None: assert not dt.is_struct(dtype) return scope.Column(pydata, dtype, device=device) else: return", "dtype is None: dtype = dt.typeof_np_dtype(data.dtype) if dtype is None:", "import List, Optional, cast # Skipping analyzing 'numpy': found module", "def _from_numpy_ma(data, mask, dtype, scope=None, device=\"\"): # adopt types if", "(zero copy). \"\"\" scope = scope or Scope.default device =", "typing import List, Optional, cast # Skipping analyzing 'numpy': found", "df. scope : Scope or None Scope to use, or", "4, null_count: 0 \"\"\" scope = scope or Scope.default device", "from typing import List, Optional, cast # Skipping analyzing 'numpy':", "gdf = ta.from_pandas_dataframe(pdf) >>> gdf index a b ------- ---", "= scope or Scope.default device = device or scope.device if", "\"\"\" scope = scope or Scope.default device = device or", "# create column, only zero copy supported if dt.is_boolean_or_numerical(dtype): mask", "device=device) def from_arrow_array(array, dtype=None, scope=None, device=\"\"): \"\"\" \" Convert arrow", "isinstance(s, str) def _from_numpy_ma(data, mask, dtype, scope=None, device=\"\"): # adopt", "if columns is None or n in columns: res[n] =", "Pandas dataframe dtype : dtype, default None Data type to", "dtype.with_null() return scope._FullColumn(data, dtype=dtype, mask=mask) else: raise TypeError(\"can not convert", "if pa.types.is_int8(t): return dt.Int8(nullable) if pa.types.is_int16(t): return dt.Int16(nullable) if pa.types.is_int32(t):", "module but no type hints or library stubs import pandas", "if dtype is None: dtype = dt.typeof_np_dtype(data.dtype) if dtype is", "# else: # yield i # else: # for i", "np.nan, 3.]).astype(np.int64), # mask = np.isnan([np.nan, np.nan, 3.])) # create", "List of column names to extract from df. scope :", "= _arrow_scalar_to_py(array) if dtype is not None: assert not dt.is_struct(dtype)", "dtype is not None: assert not dt.is_struct(dtype) return scope.Column(pydata, dtype,", "not np.all(np.isnan(ma.array(data, mask).compressed())) return scope._FullColumn(data, dtype=dtype, mask=mask) elif dt.is_string(dtype) or", "= None, scope=None, device=\"\", ): \"\"\" \" Convert arrow table", "= dtype.with_null() return scope._FullColumn(data, dtype=dtype, mask=mask) else: raise TypeError(\"can not", "nullable): if pa.types.is_boolean(t): return dt.Boolean(nullable) if pa.types.is_int8(t): return dt.Int8(nullable) if", "pa.types.is_struct(t): return _pandatype_to_dtype(t.to_pandas_dtype(), True) if pa.types.is_null(t): return dt.Void() if pa.types.is_string(t):", "pa.types.is_int64(t): return dt.Int64(nullable) if pa.types.is_float32(t): return dt.Float32(nullable) if pa.types.is_float64(t): return", "or scope.device if dtype is not None: assert dt.is_struct(dtype) dtype", "# def _column_without_nan(series, dtype): # if dtype is None or", "_arrow_scalar_to_py(array): for i in array: yield i.as_py() def _pandatype_to_dtype(t, nullable):", "dtype=_arrowtype_to_dtype( table.schema.field(n).type, table.column(n).null_count > 0 ), ) return scope.DataFrame(res, device=device)", "cast(dt.Struct, dtype) res = {} for f in dtype.fields: chunked_array", "construct Dataframes! res[f.name] = from_pandas_series( pd.Series(df[f.name]), f.dtype, scope=scope ) return", "_arrow_scalar_to_py(array) if dtype is not None: assert not dt.is_struct(dtype) return", "return scope._FullColumn(data, dtype=dtype, mask=mask) else: raise TypeError(\"can not convert numpy", "Scope.default device = device or scope.device if dtype is not", "array.null_count > 0), device=device, ) def from_pandas_series(series, dtype=None, scope=None, device=\"\"):", "dt.is_primitive_type(dtype) assert dtype == dt.typeof_np_dtype(data.dtype).with_null() # TODO if not, adopt", "0.2 2 2 3 3 0.3 dtype: Struct([Field('a', int64), Field('b',", "default scope. device : str or \"\" Device to use,", "pa # type: ignore import torcharrow.dtypes as dt from torcharrow", "from_pandas_series(pd.Series(df[n]), scope=scope) return scope.Frame(res, device=device) def from_arrow_array(array, dtype=None, scope=None, device=\"\"):", "dtype, default None Data type to force, if None will", "for f in dtype.fields: # this shows that Column shoud", "return scope.Column(pydata, dtype, device=device) else: return scope.Column( pydata, dtype=_arrowtype_to_dtype(array.type, array.null_count", "numpy.ma as ma # type: ignore # Skipping analyzing 'pandas':", "a torcharrow column (drops indices). \"\"\" scope = scope or", "raise TypeError(f\"cannot convert numpy array of type {array.dtype}\") def _is_not_str(s):", "import numpy as np # type: ignore import numpy.ma as", "assert dt.is_struct(dtype) dtype = cast(dt.Struct, dtype) res = {} for", "torcharrow column. \"\"\" scope = scope or Scope.default device =", "to a torcharrow column (drops indices). \"\"\" scope = scope", "scope.Frame(res, dtype=dtype, device=device) else: res = {} for n in", "Optional[List[str]] = None, scope=None, device=\"\", ): \"\"\" \" Convert arrow", "use, or None for default scope. device : str or", "Examples -------- >>> import pandas as pd >>> import torcharrow", "import pandas as pd # type: ignore import pyarrow as", "table.column(n) pydata = chunked_array.to_pylist() res[n] = scope.Column( pydata, dtype=_arrowtype_to_dtype( table.schema.field(n).type,", "or scope.device assert isinstance(array, pa.Array) pydata = _arrow_scalar_to_py(array) if dtype", "dt.typeof_np_dtype(data.dtype) if dtype is None: dtype = dt.string else: assert", "if pa.types.is_int16(t): return dt.Int16(nullable) if pa.types.is_int32(t): return dt.Int32(nullable) if pa.types.is_int64(t):", "of type {array.dtype}\") def _is_not_str(s): return not isinstance(s, str) def", "None, scope=None, device=\"\", ): \"\"\" \" Convert arrow table to", "res = {} table = table.select(columns) if columns is not", "if dtype is not None: assert not dt.is_struct(dtype) return scope.Column(pydata,", "List(t.value_type, nullable) if pa.types.is_struct(t): return _pandatype_to_dtype(t.to_pandas_dtype(), True) if pa.types.is_null(t): return", "Scope.default device = device or scope.device assert isinstance(table, pa.Table) if", "= None, scope=None, device=\"\", ): \"\"\" Convert pandas dataframe to", "scope=None, device=\"\"): \"\"\" \" Convert pandas series array to a", "from_pandas_series( pd.Series(df[f.name]), f.dtype, scope=scope ) return scope.Frame(res, dtype=dtype, device=device) else:", "res = {} for f in dtype.fields: # this shows", "or scope.device return from_numpy(series.to_numpy(), dtype, scope, device) def from_numpy(array, dtype,", "array of type {data.dtype}\") def _from_numpy_nd(data, dtype, scope=None, device=\"\"): #", "# Something like ma.array # np.array([np.nan, np.nan, 3.]).astype(np.int64), # mask", "None, columns: Optional[List[str]] = None, scope=None, device=\"\", ): \"\"\" Convert", "array of type {data.dtype,}\") # def _column_without_nan(series, dtype): # if", "Dataframes! res[f.name] = from_pandas_series( pd.Series(df[f.name]), f.dtype, scope=scope ) return scope.Frame(res,", "device or scope.device if isinstance(array, ma.core.MaskedArray) and array.ndim == 1:", "device=\"\"): \"\"\" \" Convert arrow array to a torcharrow column.", "array to a torcharrow column. \"\"\" scope = scope or", "the type or? # Something like ma.array # np.array([np.nan, np.nan,", "array.mask, dtype, scope, device) elif isinstance(array, np.ndarray) and array.ndim ==", "dtype): # if dtype is None or is_floating(dtype): # for", "torcharrow column (drops indices). \"\"\" scope = scope or Scope.default", "else: raise TypeError(\"can not convert numpy array of type {data.dtype,}\")", "numpy array of type {array.dtype}\") def _is_not_str(s): return not isinstance(s,", "scope, device) elif isinstance(array, np.ndarray) and array.ndim == 1: return", "return dt.Int8(nullable) if pa.types.is_int16(t): return dt.Int16(nullable) if pa.types.is_int32(t): return dt.Int32(nullable)", "TypeError(\"can not convert numpy array of type {data.dtype,}\") # def", "dt.Int64(nullable) if pa.types.is_float32(t): return dt.Float32(nullable) if pa.types.is_float64(t): return dt.Float64(nullable) if", "pa.types.is_float64(t): return dt.Float64(nullable) if pa.types.is_list(t): return List(t.value_type, nullable) if pa.types.is_struct(t):", "array to a torcharrow column (drops indices). \"\"\" scope =", "or dtype == \"object\": assert np.all(np.vectorize(_is_not_str)(ma.array(data, mask).compressed())) return scope._FullColumn(data, dtype=dtype,", "Scope.default device = device or scope.device if isinstance(array, ma.core.MaskedArray) and", "None Data type to force, if None will automatically infer.", "= table.select(columns) if columns is not None else table for", "Scope.default device = device or scope.device assert isinstance(array, pa.Array) pydata", "shoud also construct Dataframes! res[f.name] = from_pandas_series( pd.Series(df[f.name]), f.dtype, scope=scope", "series: # if isinstance(i, float) and np.isnan(i): # yield None", "return dt.Float64(nullable) if pa.types.is_list(t): return List(t.value_type, nullable) if pa.types.is_struct(t): return", "to a torcharrow column. \"\"\" scope = scope or Scope.default", "from_arrow_array(array, dtype=None, scope=None, device=\"\"): \"\"\" \" Convert arrow array to", "column (drops indices). \"\"\" scope = scope or Scope.default device", "TypeError(f\"cannot convert numpy array of type {array.dtype}\") def _is_not_str(s): return", "pd # type: ignore import pyarrow as pa # type:", "): \"\"\" \" Convert arrow table to a torcharrow dataframe.", "or None for default scope. device : str or \"\"", "type {array.dtype}\") def _is_not_str(s): return not isinstance(s, str) def _from_numpy_ma(data,", "str) def _from_numpy_ma(data, mask, dtype, scope=None, device=\"\"): # adopt types", "dtype=dtype, mask=mask) else: raise TypeError(\"can not convert numpy array of", "scope.Column( pydata, dtype=_arrowtype_to_dtype(array.type, array.null_count > 0), device=device, ) def from_pandas_series(series,", "= None, columns: Optional[List[str]] = None, scope=None, device=\"\", ): \"\"\"", "df, dtype: Optional[dt.DType] = None, columns: Optional[List[str]] = None, scope=None,", "the case # assert dtype == dt.typeof_np_dtype(data.dtype) # create column,", "2 2 3 3 0.3 dtype: Struct([Field('a', int64), Field('b', Float64(nullable=True))]),", "if not, adopt the type or? # Something like ma.array", "def from_arrow_table( table, dtype: Optional[dt.DType] = None, columns: Optional[List[str]] =", "columns is None or n in columns: res[n] = from_pandas_series(pd.Series(df[n]),", "): \"\"\" Convert pandas dataframe to torcharrow dataframe (drops indices).", "1 0.2 2 2 3 3 0.3 dtype: Struct([Field('a', int64),", "assert isn't the case # assert dtype == dt.typeof_np_dtype(data.dtype) #", "analyzing 'numpy': found module but no type hints or library", "count: 4, null_count: 0 \"\"\" scope = scope or Scope.default", "= np.vectorize(_is_not_str)(data) if np.any(mask): dtype = dtype.with_null() return scope._FullColumn(data, dtype=dtype,", "pd.Series(df[f.name]), f.dtype, scope=scope ) return scope.Frame(res, dtype=dtype, device=device) else: res", "not None: assert dt.is_struct(dtype) dtype = cast(dt.Struct, dtype) res =", ") return scope.DataFrame(res, device=device) def from_pandas_dataframe( df, dtype: Optional[dt.DType] =", "None: assert dt.is_struct(dtype) dtype = cast(dt.Struct, dtype) res = {}", "or library stubs import pandas as pd # type: ignore", "scope or Scope.default device = device or scope.device if dtype", "return dt.Int64(nullable) if pa.types.is_float32(t): return dt.Float32(nullable) if pa.types.is_float64(t): return dt.Float64(nullable)", "res = {} for n in df.columns: if columns is", "names to extract from df. scope : Scope or None", "of column names to extract from df. scope : Scope", "# type: ignore # Skipping analyzing 'pandas': found module but", "dtype = dtype.with_null() return scope._FullColumn(data, dtype=dtype, mask=mask) else: raise TypeError(\"can", "> 0 ), ) return scope.DataFrame(res, device=device) def from_pandas_dataframe( df,", "= cast(dt.Struct, dtype) res = {} for f in dtype.fields:", "and np.isnan(i): # yield None # else: # yield i", "is None or is_floating(dtype): # for i in series: #", "res[f.name] = from_pandas_series( pd.Series(df[f.name]), f.dtype, scope=scope ) return scope.Frame(res, dtype=dtype,", "hints or library stubs import numpy as np # type:", "is not None else table for n in table.column_names: chunked_array", "return scope._FullColumn(data, dtype=dtype, mask=mask) elif dt.is_string(dtype) or dtype == \"object\":", "[0, 1, 2, 3],'b': [0.1, 0.2, None, 0.3]}) >>> gdf", "# for i in series: # if isinstance(i, float) and", "is None or n in columns: res[n] = from_pandas_series(pd.Series(df[n]), scope=scope)", "or n in columns: res[n] = from_pandas_series(pd.Series(df[n]), scope=scope) return scope.Frame(res,", "return List(t.value_type, nullable) if pa.types.is_struct(t): return _pandatype_to_dtype(t.to_pandas_dtype(), True) if pa.types.is_null(t):", "device) def from_numpy(array, dtype, scope=None, device=\"\"): \"\"\" Convert 1dim numpy", "dtype, scope=None, device=\"\"): # adopt types if dtype is None:", "column, only zero copy supported if dt.is_boolean_or_numerical(dtype): assert not np.all(np.isnan(ma.array(data,", "nullable) def _arrowtype_to_dtype(t, nullable): if pa.types.is_boolean(t): return dt.Boolean(nullable) if pa.types.is_int8(t):", "affiliates. from typing import List, Optional, cast # Skipping analyzing", "scope.device return from_numpy(series.to_numpy(), dtype, scope, device) def from_numpy(array, dtype, scope=None,", "TODO if not, adopt the type or? # Something like", "its affiliates. from typing import List, Optional, cast # Skipping", "not isinstance(s, str) def _from_numpy_ma(data, mask, dtype, scope=None, device=\"\"): #", "device or scope.device assert isinstance(table, pa.Table) if dtype is not", "_pandatype_to_dtype(t.to_pandas_dtype(), True) if pa.types.is_null(t): return dt.Void() if pa.types.is_string(t): return dt.String(nullable)", "---------- df : Pandas dataframe dtype : dtype, default None", "List, Optional, cast # Skipping analyzing 'numpy': found module but", "Parameters ---------- df : Pandas dataframe dtype : dtype, default", "dt.typeof_np_dtype(data.dtype).with_null() else: assert dt.is_primitive_type(dtype) assert dtype == dt.typeof_np_dtype(data.dtype).with_null() # TODO", "ma.core.MaskedArray) and array.ndim == 1: return _from_numpy_ma(array.data, array.mask, dtype, scope,", "scope.DataFrame(res, device=device) else: res = {} table = table.select(columns) if", "torcharrow as ta >>> pdf = pd.DataFrame({'a': [0, 1, 2,", "def _from_numpy_nd(data, dtype, scope=None, device=\"\"): # adopt types if dtype", "import pandas as pd >>> import torcharrow as ta >>>", ") def from_pandas_series(series, dtype=None, scope=None, device=\"\"): \"\"\" \" Convert pandas", "array of type {array.dtype}\") def _is_not_str(s): return not isinstance(s, str)", "= device or scope.device if dtype is not None: assert", "and its affiliates. from typing import List, Optional, cast #", "like ma.array # np.array([np.nan, np.nan, 3.]).astype(np.int64), # mask = np.isnan([np.nan,", "# if dtype is None or is_floating(dtype): # for i", "if blank. Examples -------- >>> import pandas as pd >>>", "Device to use, or default if blank. Examples -------- >>>", "scope.Column(pydata, f.dtype) return scope.DataFrame(res, device=device) else: res = {} table", "dtype == dt.typeof_np_dtype(data.dtype).with_null() # TODO if not, adopt the type", "TypeError(f\"cannot convert masked numpy array of type {data.dtype}\") def _from_numpy_nd(data,", "Scope or None Scope to use, or None for default", "dtype=_arrowtype_to_dtype(array.type, array.null_count > 0), device=device, ) def from_pandas_series(series, dtype=None, scope=None,", "dtype=dtype, mask=mask) elif dt.is_string(dtype) or dtype == \"object\": assert np.all(np.vectorize(_is_not_str)(ma.array(data,", "# np.array([np.nan, np.nan, 3.]).astype(np.int64), # mask = np.isnan([np.nan, np.nan, 3.]))", "None Scope to use, or None for default scope. device", "of type {data.dtype}\") def _from_numpy_nd(data, dtype, scope=None, device=\"\"): # adopt", "scope.device if dtype is not None: assert dt.is_struct(dtype) dtype =", "def _arrowtype_to_dtype(t, nullable): if pa.types.is_boolean(t): return dt.Boolean(nullable) if pa.types.is_int8(t): return", "Scope def from_arrow_table( table, dtype: Optional[dt.DType] = None, columns: Optional[List[str]]", "f.dtype, scope=scope ) return scope.Frame(res, dtype=dtype, device=device) else: res =", "= table.column(f.name) pydata = chunked_array.to_pylist() res[f.name] = scope.Column(pydata, f.dtype) return", "zero copy supported if dt.is_boolean_or_numerical(dtype): mask = np.isnan(data) return scope._FullColumn(data,", "> 0), device=device, ) def from_pandas_series(series, dtype=None, scope=None, device=\"\"): \"\"\"", "return dt.Float32(nullable) if pa.types.is_float64(t): return dt.Float64(nullable) if pa.types.is_list(t): return List(t.value_type,", "assert np.all(np.vectorize(_is_not_str)(ma.array(data, mask).compressed())) return scope._FullColumn(data, dtype=dtype, mask=mask) else: raise TypeError(f\"cannot", "else: # yield i # else: # for i in", "assert dt.is_primitive(dtype) # TODO Check why teh following assert isn't", "0), device=device, ) def from_pandas_series(series, dtype=None, scope=None, device=\"\"): \"\"\" \"", "scope. device : str or \"\" Device to use, or", "dt.is_boolean_or_numerical(dtype): mask = np.isnan(data) return scope._FullColumn(data, dtype=dtype, mask=mask) elif dt.is_string(dtype):", "array-like List of column names to extract from df. scope", "scope=scope) return scope.Frame(res, device=device) def from_arrow_array(array, dtype=None, scope=None, device=\"\"): \"\"\"", "dt.typeof_np_dtype(data.dtype).with_null() # TODO if not, adopt the type or? #", "a torcharrow column (zero copy). \"\"\" scope = scope or", "return dt.typeof_nptype(t, nullable) def _arrowtype_to_dtype(t, nullable): if pa.types.is_boolean(t): return dt.Boolean(nullable)", "\"\"\" \" Convert pandas series array to a torcharrow column", "type {data.dtype}\") def _from_numpy_nd(data, dtype, scope=None, device=\"\"): # adopt types", "raise TypeError(f\"cannot convert masked numpy array of type {data.dtype}\") def", "dtype=None, scope=None, device=\"\"): \"\"\" \" Convert pandas series array to", "return dt.Void() if pa.types.is_string(t): return dt.String(nullable) if pa.types.is_map(t): return dt.Map(t.item_type,", "Skipping analyzing 'pandas': found module but no type hints or", "Something like ma.array # np.array([np.nan, np.nan, 3.]).astype(np.int64), # mask =", "dtype == dt.typeof_np_dtype(data.dtype) # create column, only zero copy supported", "gdf index a b ------- --- --- 0 0 0.1", "scope or Scope.default device = device or scope.device assert isinstance(table,", "mask=mask) else: raise TypeError(f\"cannot convert masked numpy array of type", "as pa # type: ignore import torcharrow.dtypes as dt from", "dtype, scope=None, device=\"\"): \"\"\" Convert 1dim numpy array to a", "# Copyright (c) Facebook, Inc. and its affiliates. from typing", "mask = np.isnan(data) return scope._FullColumn(data, dtype=dtype, mask=mask) elif dt.is_string(dtype): mask", "device = device or scope.device assert isinstance(array, pa.Array) pydata =", "column. \"\"\" scope = scope or Scope.default device = device", "None: dtype = dt.typeof_np_dtype(data.dtype).with_null() else: assert dt.is_primitive_type(dtype) assert dtype ==", "{} for f in dtype.fields: # this shows that Column", "array to a torcharrow column (zero copy). \"\"\" scope =", "table.column_names: chunked_array = table.column(n) pydata = chunked_array.to_pylist() res[n] = scope.Column(", "scope or Scope.default device = device or scope.device assert isinstance(array,", "dt.String(nullable) if pa.types.is_map(t): return dt.Map(t.item_type, t.key_type, nullable) raise NotImplementedError(\"unsupported case\")", "nullable): return dt.typeof_nptype(t, nullable) def _arrowtype_to_dtype(t, nullable): if pa.types.is_boolean(t): return", "supported if dt.is_boolean_or_numerical(dtype): assert not np.all(np.isnan(ma.array(data, mask).compressed())) return scope._FullColumn(data, dtype=dtype,", "scope._FullColumn(data, dtype=dtype, mask=mask) else: raise TypeError(\"can not convert numpy array", "== 1: return _from_numpy_ma(array.data, array.mask, dtype, scope, device) elif isinstance(array,", "res = {} for f in dtype.fields: chunked_array = table.column(f.name)", "# assert dtype == dt.typeof_np_dtype(data.dtype) # create column, only zero", "dt.is_struct(dtype) return scope.Column(pydata, dtype, device=device) else: return scope.Column( pydata, dtype=_arrowtype_to_dtype(array.type,", "not dt.is_struct(dtype) return scope.Column(pydata, dtype, device=device) else: return scope.Column( pydata,", ": dtype, default None Data type to force, if None", "mask = np.isnan([np.nan, np.nan, 3.])) # create column, only zero", "device or scope.device if dtype is not None: assert dt.is_struct(dtype)", "i # else: # for i in series: # yield", "or? # Something like ma.array # np.array([np.nan, np.nan, 3.]).astype(np.int64), #", "as dt from torcharrow import Scope def from_arrow_table( table, dtype:", ">>> import pandas as pd >>> import torcharrow as ta", "device=device) else: return scope.Column( pydata, dtype=_arrowtype_to_dtype(array.type, array.null_count > 0), device=device,", "dt.Int32(nullable) if pa.types.is_int64(t): return dt.Int64(nullable) if pa.types.is_float32(t): return dt.Float32(nullable) if", "pydata = _arrow_scalar_to_py(array) if dtype is not None: assert not", "np.any(mask): dtype = dtype.with_null() return scope._FullColumn(data, dtype=dtype, mask=mask) else: raise", "= device or scope.device if isinstance(array, ma.core.MaskedArray) and array.ndim ==", "series: # yield i def _arrow_scalar_to_py(array): for i in array:", "dt.is_primitive(dtype) # TODO Check why teh following assert isn't the", "# adopt types if dtype is None: dtype = dt.typeof_np_dtype(data.dtype)", "in columns: res[n] = from_pandas_series(pd.Series(df[n]), scope=scope) return scope.Frame(res, device=device) def", "dtype is None: dtype = dt.string else: assert dt.is_primitive(dtype) #", "not, adopt the type or? # Something like ma.array #", "return dt.Int16(nullable) if pa.types.is_int32(t): return dt.Int32(nullable) if pa.types.is_int64(t): return dt.Int64(nullable)", "1: return _from_numpy_nd(array, dtype, scope, device) else: raise TypeError(f\"cannot convert", "ta.from_pandas_dataframe(pdf) >>> gdf index a b ------- --- --- 0", "shows that Column shoud also construct Dataframes! res[f.name] = from_pandas_series(", "# adopt types if dtype is None: dtype = dt.typeof_np_dtype(data.dtype).with_null()", "TODO Check why teh following assert isn't the case #", "0 0.1 1 1 0.2 2 2 3 3 0.3", "else: raise TypeError(f\"cannot convert masked numpy array of type {data.dtype}\")", "0 \"\"\" scope = scope or Scope.default device = device", "dtype = dt.typeof_np_dtype(data.dtype) if dtype is None: dtype = dt.string", "{} for f in dtype.fields: chunked_array = table.column(f.name) pydata =", "is not None: assert not dt.is_struct(dtype) return scope.Column(pydata, dtype, device=device)", "type {data.dtype,}\") # def _column_without_nan(series, dtype): # if dtype is", "\"\"\" Convert pandas dataframe to torcharrow dataframe (drops indices). Parameters", "1dim numpy array to a torcharrow column (zero copy). \"\"\"", "this shows that Column shoud also construct Dataframes! res[f.name] =", "f in dtype.fields: # this shows that Column shoud also", "device) else: raise TypeError(f\"cannot convert numpy array of type {array.dtype}\")", "i in series: # yield i def _arrow_scalar_to_py(array): for i", "scope = scope or Scope.default device = device or scope.device", "return scope.Column( pydata, dtype=_arrowtype_to_dtype(array.type, array.null_count > 0), device=device, ) def", "# yield None # else: # yield i # else:", "ta >>> pdf = pd.DataFrame({'a': [0, 1, 2, 3],'b': [0.1,", "dtype = dt.string else: assert dt.is_primitive(dtype) # TODO Check why", "= from_pandas_series( pd.Series(df[f.name]), f.dtype, scope=scope ) return scope.Frame(res, dtype=dtype, device=device)", "ignore import pyarrow as pa # type: ignore import torcharrow.dtypes", "n in columns: res[n] = from_pandas_series(pd.Series(df[n]), scope=scope) return scope.Frame(res, device=device)", "column (zero copy). \"\"\" scope = scope or Scope.default device", "use, or default if blank. Examples -------- >>> import pandas", "else table for n in table.column_names: chunked_array = table.column(n) pydata", "numpy array to a torcharrow column (zero copy). \"\"\" scope", "if pa.types.is_float32(t): return dt.Float32(nullable) if pa.types.is_float64(t): return dt.Float64(nullable) if pa.types.is_list(t):", ": array-like List of column names to extract from df.", "# TODO Check why teh following assert isn't the case", ">>> import torcharrow as ta >>> pdf = pd.DataFrame({'a': [0,", "== dt.typeof_np_dtype(data.dtype) # create column, only zero copy supported if", "dt.Int8(nullable) if pa.types.is_int16(t): return dt.Int16(nullable) if pa.types.is_int32(t): return dt.Int32(nullable) if", "if columns is not None else table for n in", "if None will automatically infer. columns : array-like List of", "column, only zero copy supported if dt.is_boolean_or_numerical(dtype): mask = np.isnan(data)", "pa.types.is_int16(t): return dt.Int16(nullable) if pa.types.is_int32(t): return dt.Int32(nullable) if pa.types.is_int64(t): return", "pydata, dtype=_arrowtype_to_dtype( table.schema.field(n).type, table.column(n).null_count > 0 ), ) return scope.DataFrame(res,", "a torcharrow column. \"\"\" scope = scope or Scope.default device", "dtype=dtype, mask=mask) else: raise TypeError(f\"cannot convert masked numpy array of", "_pandatype_to_dtype(t, nullable): return dt.typeof_nptype(t, nullable) def _arrowtype_to_dtype(t, nullable): if pa.types.is_boolean(t):", "n in table.column_names: chunked_array = table.column(n) pydata = chunked_array.to_pylist() res[n]", "# type: ignore import torcharrow.dtypes as dt from torcharrow import", "device : str or \"\" Device to use, or default", "copy supported if dt.is_boolean_or_numerical(dtype): assert not np.all(np.isnan(ma.array(data, mask).compressed())) return scope._FullColumn(data,", "3 0.3 dtype: Struct([Field('a', int64), Field('b', Float64(nullable=True))]), count: 4, null_count:", "as np # type: ignore import numpy.ma as ma #", "return scope.DataFrame(res, device=device) else: res = {} table = table.select(columns)", "or None Scope to use, or None for default scope.", "from df. scope : Scope or None Scope to use,", "analyzing 'pandas': found module but no type hints or library", "isinstance(array, ma.core.MaskedArray) and array.ndim == 1: return _from_numpy_ma(array.data, array.mask, dtype,", "pandas series array to a torcharrow column (drops indices). \"\"\"", "int64), Field('b', Float64(nullable=True))]), count: 4, null_count: 0 \"\"\" scope =", "\"\" Device to use, or default if blank. Examples --------", "import Scope def from_arrow_table( table, dtype: Optional[dt.DType] = None, columns:", "def from_pandas_dataframe( df, dtype: Optional[dt.DType] = None, columns: Optional[List[str]] =" ]
[ "the same variable scope as in the # train job.", "not run_eval_loop: return tf.contrib.training.evaluate_repeatedly( FLAGS.checkpoint_dir, master=FLAGS.master, hooks=[tf.contrib.training.SummaryAtEndHook(FLAGS.eval_dir), tf.contrib.training.StopAfterNEvalsHook(1)], eval_ops=image_write_ops, max_number_of_evaluations=FLAGS.max_number_of_evaluations)", "2.0 (the \"License\"); # you may not use this file", "# For unit testing, use `run_eval_loop=False`. if not run_eval_loop: return", "absolute_import from __future__ import division from __future__ import print_function from", "# Copyright 2017 The TensorFlow Authors. All Rights Reserved. #", "under the License. # ============================================================================== \"\"\"Evaluates a TFGAN trained compression", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "'Location of data.') # Compression-specific flags. flags.DEFINE_integer('batch_size', 32, 'The number", "patch.') flags.DEFINE_integer('model_depth', 64, 'Number of filters for compression model') def", "where the model was written to.') flags.DEFINE_string('eval_dir', '/tmp/compression/', 'Directory where", "tf.app.flags.FLAGS flags = tf.app.flags flags.DEFINE_string('master', '', 'Name of the TensorFlow", "'forever.') flags.DEFINE_string('dataset_dir', 'testdata', 'Location of data.') # Compression-specific flags. flags.DEFINE_integer('batch_size',", "tf.contrib.training.evaluate_repeatedly( FLAGS.checkpoint_dir, master=FLAGS.master, hooks=[tf.contrib.training.SummaryAtEndHook(FLAGS.eval_dir), tf.contrib.training.StopAfterNEvalsHook(1)], eval_ops=image_write_ops, max_number_of_evaluations=FLAGS.max_number_of_evaluations) if __name__ ==", "use this file except in compliance with the License. #", "networks from research.gan.image_compression import summaries FLAGS = tf.app.flags.FLAGS flags =", "'%s/%s' % (FLAGS.eval_dir, 'compression.png'), tf.image.encode_png(uint8_reshaped[0])) # For unit testing, use", "is_training=False) summaries.add_reconstruction_summaries(images, reconstructions, prebinary) # Visualize losses. pixel_loss_per_example = tf.reduce_mean(", "written to.') flags.DEFINE_string('eval_dir', '/tmp/compression/', 'Directory where the results are saved", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "License. # You may obtain a copy of the License", "'The number of bits to produce per patch.') flags.DEFINE_integer('model_depth', 64,", "in the # train job. with tf.variable_scope('generator'): reconstructions, _, prebinary", "tf.abs(images - reconstructions), axis=[1, 2, 3]) pixel_loss = tf.reduce_mean(pixel_loss_per_example) tf.summary.histogram('pixel_l1_loss_hist',", "The TensorFlow Authors. All Rights Reserved. # # Licensed under", "under the License is distributed on an \"AS IS\" BASIS,", "# Compression-specific flags. flags.DEFINE_integer('batch_size', 32, 'The number of images in", "License for the specific language governing permissions and # limitations", "research.gan.image_compression import networks from research.gan.image_compression import summaries FLAGS = tf.app.flags.FLAGS", "tensorflow as tf from research.gan.image_compression import data_provider from research.gan.image_compression import", "============================================================================== \"\"\"Evaluates a TFGAN trained compression model.\"\"\" from __future__ import", "Reserved. # # Licensed under the Apache License, Version 2.0", "model.\"\"\" from __future__ import absolute_import from __future__ import division from", "image_write_ops = tf.write_file( '%s/%s' % (FLAGS.eval_dir, 'compression.png'), tf.image.encode_png(uint8_reshaped[0])) # For", "on.') flags.DEFINE_integer('bits_per_patch', 1230, 'The number of bits to produce per", "governing permissions and # limitations under the License. # ==============================================================================", "For unit testing, use `run_eval_loop=False`. if not run_eval_loop: return tf.contrib.training.evaluate_repeatedly(", "TensorFlow Authors. All Rights Reserved. # # Licensed under the", "pixel_loss_per_example) tf.summary.scalar('pixel_l1_loss', pixel_loss) # Create ops to write images to", "tf.variable_scope('generator'): reconstructions, _, prebinary = networks.compression_model( images, num_bits=FLAGS.bits_per_patch, depth=FLAGS.model_depth, is_training=False)", "64, 'Number of filters for compression model') def main(_, run_eval_loop=True):", "from __future__ import absolute_import from __future__ import division from __future__", "in compliance with the License. # You may obtain a", "software # distributed under the License is distributed on an", "pixel_loss_per_example = tf.reduce_mean( tf.abs(images - reconstructions), axis=[1, 2, 3]) pixel_loss", "times to run evaluation. If `None`, run ' 'forever.') flags.DEFINE_string('dataset_dir',", "and # limitations under the License. # ============================================================================== \"\"\"Evaluates a", "return tf.contrib.training.evaluate_repeatedly( FLAGS.checkpoint_dir, master=FLAGS.master, hooks=[tf.contrib.training.SummaryAtEndHook(FLAGS.eval_dir), tf.contrib.training.StopAfterNEvalsHook(1)], eval_ops=image_write_ops, max_number_of_evaluations=FLAGS.max_number_of_evaluations) if __name__", "where the results are saved to.') flags.DEFINE_integer('max_number_of_evaluations', None, 'Number of", "compression model') def main(_, run_eval_loop=True): with tf.name_scope('inputs'): images = data_provider.provide_data(", "'Number of times to run evaluation. If `None`, run '", "model was written to.') flags.DEFINE_string('eval_dir', '/tmp/compression/', 'Directory where the results", "# Visualize losses. pixel_loss_per_example = tf.reduce_mean( tf.abs(images - reconstructions), axis=[1,", "limitations under the License. # ============================================================================== \"\"\"Evaluates a TFGAN trained", "= data_provider.provide_data( 'validation', FLAGS.batch_size, dataset_dir=FLAGS.dataset_dir, patch_size=FLAGS.patch_size) # In order for", "to.') flags.DEFINE_string('eval_dir', '/tmp/compression/', 'Directory where the results are saved to.')", "'/tmp/compression/', 'Directory where the results are saved to.') flags.DEFINE_integer('max_number_of_evaluations', None,", "flags.DEFINE_string('dataset_dir', 'testdata', 'Location of data.') # Compression-specific flags. flags.DEFINE_integer('batch_size', 32,", "FLAGS = tf.app.flags.FLAGS flags = tf.app.flags flags.DEFINE_string('master', '', 'Name of", "of times to run evaluation. If `None`, run ' 'forever.')", "OF ANY KIND, either express or implied. # See the", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "summaries.add_reconstruction_summaries(images, reconstructions, prebinary) # Visualize losses. pixel_loss_per_example = tf.reduce_mean( tf.abs(images", "# limitations under the License. # ============================================================================== \"\"\"Evaluates a TFGAN", "ANY KIND, either express or implied. # See the License", "See the License for the specific language governing permissions and", "32, 'The number of images in each batch.') flags.DEFINE_integer('patch_size', 32,", "flags.DEFINE_integer('bits_per_patch', 1230, 'The number of bits to produce per patch.')", "uint8_images = data_provider.float_image_to_uint8(images) uint8_reconstructions = data_provider.float_image_to_uint8(reconstructions) uint8_reshaped = summaries.stack_images(uint8_images, uint8_reconstructions)", "the License. # You may obtain a copy of the", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "to in writing, software # distributed under the License is", "run_eval_loop: return tf.contrib.training.evaluate_repeatedly( FLAGS.checkpoint_dir, master=FLAGS.master, hooks=[tf.contrib.training.SummaryAtEndHook(FLAGS.eval_dir), tf.contrib.training.StopAfterNEvalsHook(1)], eval_ops=image_write_ops, max_number_of_evaluations=FLAGS.max_number_of_evaluations) if", "# See the License for the specific language governing permissions", "permissions and # limitations under the License. # ============================================================================== \"\"\"Evaluates", "unit testing, use `run_eval_loop=False`. if not run_eval_loop: return tf.contrib.training.evaluate_repeatedly( FLAGS.checkpoint_dir,", "language governing permissions and # limitations under the License. #", "or agreed to in writing, software # distributed under the", "required by applicable law or agreed to in writing, software", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "flags.DEFINE_integer('batch_size', 32, 'The number of images in each batch.') flags.DEFINE_integer('patch_size',", "with the License. # You may obtain a copy of", "2, 3]) pixel_loss = tf.reduce_mean(pixel_loss_per_example) tf.summary.histogram('pixel_l1_loss_hist', pixel_loss_per_example) tf.summary.scalar('pixel_l1_loss', pixel_loss) #", "to use.') flags.DEFINE_string('checkpoint_dir', '/tmp/compression/', 'Directory where the model was written", "images to disk. uint8_images = data_provider.float_image_to_uint8(images) uint8_reconstructions = data_provider.float_image_to_uint8(reconstructions) uint8_reshaped", "as tf from research.gan.image_compression import data_provider from research.gan.image_compression import networks", "num_bits=FLAGS.bits_per_patch, depth=FLAGS.model_depth, is_training=False) summaries.add_reconstruction_summaries(images, reconstructions, prebinary) # Visualize losses. pixel_loss_per_example", "research.gan.image_compression import summaries FLAGS = tf.app.flags.FLAGS flags = tf.app.flags flags.DEFINE_string('master',", "FLAGS.checkpoint_dir, master=FLAGS.master, hooks=[tf.contrib.training.SummaryAtEndHook(FLAGS.eval_dir), tf.contrib.training.StopAfterNEvalsHook(1)], eval_ops=image_write_ops, max_number_of_evaluations=FLAGS.max_number_of_evaluations) if __name__ == '__main__':", "tf.reduce_mean(pixel_loss_per_example) tf.summary.histogram('pixel_l1_loss_hist', pixel_loss_per_example) tf.summary.scalar('pixel_l1_loss', pixel_loss) # Create ops to write", "of filters for compression model') def main(_, run_eval_loop=True): with tf.name_scope('inputs'):", "compliance with the License. # You may obtain a copy", "All Rights Reserved. # # Licensed under the Apache License,", "agreed to in writing, software # distributed under the License", "# Create ops to write images to disk. uint8_images =", "distributed under the License is distributed on an \"AS IS\"", "the License. # ============================================================================== \"\"\"Evaluates a TFGAN trained compression model.\"\"\"", "= data_provider.float_image_to_uint8(reconstructions) uint8_reshaped = summaries.stack_images(uint8_images, uint8_reconstructions) image_write_ops = tf.write_file( '%s/%s'", "of the patches to train on.') flags.DEFINE_integer('bits_per_patch', 1230, 'The number", "express or implied. # See the License for the specific", "from research.gan.image_compression import summaries FLAGS = tf.app.flags.FLAGS flags = tf.app.flags", "except in compliance with the License. # You may obtain", "= tf.app.flags flags.DEFINE_string('master', '', 'Name of the TensorFlow master to", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "not use this file except in compliance with the License.", "if not run_eval_loop: return tf.contrib.training.evaluate_repeatedly( FLAGS.checkpoint_dir, master=FLAGS.master, hooks=[tf.contrib.training.SummaryAtEndHook(FLAGS.eval_dir), tf.contrib.training.StopAfterNEvalsHook(1)], eval_ops=image_write_ops,", "use.') flags.DEFINE_string('checkpoint_dir', '/tmp/compression/', 'Directory where the model was written to.')", "_, prebinary = networks.compression_model( images, num_bits=FLAGS.bits_per_patch, depth=FLAGS.model_depth, is_training=False) summaries.add_reconstruction_summaries(images, reconstructions,", "flags. flags.DEFINE_integer('batch_size', 32, 'The number of images in each batch.')", "writing, software # distributed under the License is distributed on", "you may not use this file except in compliance with", "import networks from research.gan.image_compression import summaries FLAGS = tf.app.flags.FLAGS flags", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "import print_function from absl import app import tensorflow as tf", "the model was written to.') flags.DEFINE_string('eval_dir', '/tmp/compression/', 'Directory where the", "Visualize losses. pixel_loss_per_example = tf.reduce_mean( tf.abs(images - reconstructions), axis=[1, 2,", "depth=FLAGS.model_depth, is_training=False) summaries.add_reconstruction_summaries(images, reconstructions, prebinary) # Visualize losses. pixel_loss_per_example =", "from __future__ import print_function from absl import app import tensorflow", "# train job. with tf.variable_scope('generator'): reconstructions, _, prebinary = networks.compression_model(", "'The number of images in each batch.') flags.DEFINE_integer('patch_size', 32, 'The", "a TFGAN trained compression model.\"\"\" from __future__ import absolute_import from", "CONDITIONS OF ANY KIND, either express or implied. # See", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "model') def main(_, run_eval_loop=True): with tf.name_scope('inputs'): images = data_provider.provide_data( 'validation',", "master=FLAGS.master, hooks=[tf.contrib.training.SummaryAtEndHook(FLAGS.eval_dir), tf.contrib.training.StopAfterNEvalsHook(1)], eval_ops=image_write_ops, max_number_of_evaluations=FLAGS.max_number_of_evaluations) if __name__ == '__main__': app.run(_)", "reconstructions, prebinary) # Visualize losses. pixel_loss_per_example = tf.reduce_mean( tf.abs(images -", "use `run_eval_loop=False`. if not run_eval_loop: return tf.contrib.training.evaluate_repeatedly( FLAGS.checkpoint_dir, master=FLAGS.master, hooks=[tf.contrib.training.SummaryAtEndHook(FLAGS.eval_dir),", "same variable scope as in the # train job. with", "for compression model') def main(_, run_eval_loop=True): with tf.name_scope('inputs'): images =", "None, 'Number of times to run evaluation. If `None`, run", "OR CONDITIONS OF ANY KIND, either express or implied. #", "= tf.reduce_mean(pixel_loss_per_example) tf.summary.histogram('pixel_l1_loss_hist', pixel_loss_per_example) tf.summary.scalar('pixel_l1_loss', pixel_loss) # Create ops to", "the License is distributed on an \"AS IS\" BASIS, #", "evaluation. If `None`, run ' 'forever.') flags.DEFINE_string('dataset_dir', 'testdata', 'Location of", "In order for variables to load, use the same variable", "to disk. uint8_images = data_provider.float_image_to_uint8(images) uint8_reconstructions = data_provider.float_image_to_uint8(reconstructions) uint8_reshaped =", "with tf.name_scope('inputs'): images = data_provider.provide_data( 'validation', FLAGS.batch_size, dataset_dir=FLAGS.dataset_dir, patch_size=FLAGS.patch_size) #", "flags.DEFINE_integer('max_number_of_evaluations', None, 'Number of times to run evaluation. If `None`,", "order for variables to load, use the same variable scope", "results are saved to.') flags.DEFINE_integer('max_number_of_evaluations', None, 'Number of times to", "uint8_reshaped = summaries.stack_images(uint8_images, uint8_reconstructions) image_write_ops = tf.write_file( '%s/%s' % (FLAGS.eval_dir,", "tf.app.flags flags.DEFINE_string('master', '', 'Name of the TensorFlow master to use.')", "variables to load, use the same variable scope as in", "law or agreed to in writing, software # distributed under", "= summaries.stack_images(uint8_images, uint8_reconstructions) image_write_ops = tf.write_file( '%s/%s' % (FLAGS.eval_dir, 'compression.png'),", "'Directory where the results are saved to.') flags.DEFINE_integer('max_number_of_evaluations', None, 'Number", "bits to produce per patch.') flags.DEFINE_integer('model_depth', 64, 'Number of filters", "compression model.\"\"\" from __future__ import absolute_import from __future__ import division", "prebinary) # Visualize losses. pixel_loss_per_example = tf.reduce_mean( tf.abs(images - reconstructions),", "= tf.reduce_mean( tf.abs(images - reconstructions), axis=[1, 2, 3]) pixel_loss =", "may obtain a copy of the License at # #", "uint8_reconstructions = data_provider.float_image_to_uint8(reconstructions) uint8_reshaped = summaries.stack_images(uint8_images, uint8_reconstructions) image_write_ops = tf.write_file(", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "TFGAN trained compression model.\"\"\" from __future__ import absolute_import from __future__", "may not use this file except in compliance with the", "Compression-specific flags. flags.DEFINE_integer('batch_size', 32, 'The number of images in each", "flags.DEFINE_integer('patch_size', 32, 'The size of the patches to train on.')", "produce per patch.') flags.DEFINE_integer('model_depth', 64, 'Number of filters for compression", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "this file except in compliance with the License. # You", "import absolute_import from __future__ import division from __future__ import print_function", "2017 The TensorFlow Authors. All Rights Reserved. # # Licensed", "run_eval_loop=True): with tf.name_scope('inputs'): images = data_provider.provide_data( 'validation', FLAGS.batch_size, dataset_dir=FLAGS.dataset_dir, patch_size=FLAGS.patch_size)", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "data.') # Compression-specific flags. flags.DEFINE_integer('batch_size', 32, 'The number of images", "# # Licensed under the Apache License, Version 2.0 (the", "file except in compliance with the License. # You may", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "filters for compression model') def main(_, run_eval_loop=True): with tf.name_scope('inputs'): images", "prebinary = networks.compression_model( images, num_bits=FLAGS.bits_per_patch, depth=FLAGS.model_depth, is_training=False) summaries.add_reconstruction_summaries(images, reconstructions, prebinary)", "= data_provider.float_image_to_uint8(images) uint8_reconstructions = data_provider.float_image_to_uint8(reconstructions) uint8_reshaped = summaries.stack_images(uint8_images, uint8_reconstructions) image_write_ops", "from __future__ import division from __future__ import print_function from absl", "ops to write images to disk. uint8_images = data_provider.float_image_to_uint8(images) uint8_reconstructions", "'', 'Name of the TensorFlow master to use.') flags.DEFINE_string('checkpoint_dir', '/tmp/compression/',", "pixel_loss = tf.reduce_mean(pixel_loss_per_example) tf.summary.histogram('pixel_l1_loss_hist', pixel_loss_per_example) tf.summary.scalar('pixel_l1_loss', pixel_loss) # Create ops", "master to use.') flags.DEFINE_string('checkpoint_dir', '/tmp/compression/', 'Directory where the model was", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "'Name of the TensorFlow master to use.') flags.DEFINE_string('checkpoint_dir', '/tmp/compression/', 'Directory", "3]) pixel_loss = tf.reduce_mean(pixel_loss_per_example) tf.summary.histogram('pixel_l1_loss_hist', pixel_loss_per_example) tf.summary.scalar('pixel_l1_loss', pixel_loss) # Create", "was written to.') flags.DEFINE_string('eval_dir', '/tmp/compression/', 'Directory where the results are", "# ============================================================================== \"\"\"Evaluates a TFGAN trained compression model.\"\"\" from __future__", "'Number of filters for compression model') def main(_, run_eval_loop=True): with", "\"\"\"Evaluates a TFGAN trained compression model.\"\"\" from __future__ import absolute_import", "__future__ import division from __future__ import print_function from absl import", "import app import tensorflow as tf from research.gan.image_compression import data_provider", "flags.DEFINE_integer('model_depth', 64, 'Number of filters for compression model') def main(_,", "are saved to.') flags.DEFINE_integer('max_number_of_evaluations', None, 'Number of times to run", "the results are saved to.') flags.DEFINE_integer('max_number_of_evaluations', None, 'Number of times", "'The size of the patches to train on.') flags.DEFINE_integer('bits_per_patch', 1230,", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "division from __future__ import print_function from absl import app import", "or implied. # See the License for the specific language", "Rights Reserved. # # Licensed under the Apache License, Version", "absl import app import tensorflow as tf from research.gan.image_compression import", "KIND, either express or implied. # See the License for", "specific language governing permissions and # limitations under the License.", "summaries.stack_images(uint8_images, uint8_reconstructions) image_write_ops = tf.write_file( '%s/%s' % (FLAGS.eval_dir, 'compression.png'), tf.image.encode_png(uint8_reshaped[0]))", "# In order for variables to load, use the same", "'/tmp/compression/', 'Directory where the model was written to.') flags.DEFINE_string('eval_dir', '/tmp/compression/',", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "research.gan.image_compression import data_provider from research.gan.image_compression import networks from research.gan.image_compression import", "to produce per patch.') flags.DEFINE_integer('model_depth', 64, 'Number of filters for", "write images to disk. uint8_images = data_provider.float_image_to_uint8(images) uint8_reconstructions = data_provider.float_image_to_uint8(reconstructions)", "patches to train on.') flags.DEFINE_integer('bits_per_patch', 1230, 'The number of bits", "disk. uint8_images = data_provider.float_image_to_uint8(images) uint8_reconstructions = data_provider.float_image_to_uint8(reconstructions) uint8_reshaped = summaries.stack_images(uint8_images,", "images = data_provider.provide_data( 'validation', FLAGS.batch_size, dataset_dir=FLAGS.dataset_dir, patch_size=FLAGS.patch_size) # In order", "(the \"License\"); # you may not use this file except", "tf.reduce_mean( tf.abs(images - reconstructions), axis=[1, 2, 3]) pixel_loss = tf.reduce_mean(pixel_loss_per_example)", "# you may not use this file except in compliance", "def main(_, run_eval_loop=True): with tf.name_scope('inputs'): images = data_provider.provide_data( 'validation', FLAGS.batch_size,", "tf.image.encode_png(uint8_reshaped[0])) # For unit testing, use `run_eval_loop=False`. if not run_eval_loop:", "# # Unless required by applicable law or agreed to", "tf.summary.scalar('pixel_l1_loss', pixel_loss) # Create ops to write images to disk.", "as in the # train job. with tf.variable_scope('generator'): reconstructions, _,", "to.') flags.DEFINE_integer('max_number_of_evaluations', None, 'Number of times to run evaluation. If", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "Version 2.0 (the \"License\"); # you may not use this", "in each batch.') flags.DEFINE_integer('patch_size', 32, 'The size of the patches", "of data.') # Compression-specific flags. flags.DEFINE_integer('batch_size', 32, 'The number of", "(FLAGS.eval_dir, 'compression.png'), tf.image.encode_png(uint8_reshaped[0])) # For unit testing, use `run_eval_loop=False`. if", "data_provider.float_image_to_uint8(images) uint8_reconstructions = data_provider.float_image_to_uint8(reconstructions) uint8_reshaped = summaries.stack_images(uint8_images, uint8_reconstructions) image_write_ops =", "__future__ import absolute_import from __future__ import division from __future__ import", "the TensorFlow master to use.') flags.DEFINE_string('checkpoint_dir', '/tmp/compression/', 'Directory where the", "data_provider.float_image_to_uint8(reconstructions) uint8_reshaped = summaries.stack_images(uint8_images, uint8_reconstructions) image_write_ops = tf.write_file( '%s/%s' %", "implied. # See the License for the specific language governing", "import summaries FLAGS = tf.app.flags.FLAGS flags = tf.app.flags flags.DEFINE_string('master', '',", "load, use the same variable scope as in the #", "job. with tf.variable_scope('generator'): reconstructions, _, prebinary = networks.compression_model( images, num_bits=FLAGS.bits_per_patch,", "pixel_loss) # Create ops to write images to disk. uint8_images", "`None`, run ' 'forever.') flags.DEFINE_string('dataset_dir', 'testdata', 'Location of data.') #", "under the Apache License, Version 2.0 (the \"License\"); # you", "number of images in each batch.') flags.DEFINE_integer('patch_size', 32, 'The size", "by applicable law or agreed to in writing, software #", "run evaluation. If `None`, run ' 'forever.') flags.DEFINE_string('dataset_dir', 'testdata', 'Location", "flags.DEFINE_string('checkpoint_dir', '/tmp/compression/', 'Directory where the model was written to.') flags.DEFINE_string('eval_dir',", "main(_, run_eval_loop=True): with tf.name_scope('inputs'): images = data_provider.provide_data( 'validation', FLAGS.batch_size, dataset_dir=FLAGS.dataset_dir,", "= tf.write_file( '%s/%s' % (FLAGS.eval_dir, 'compression.png'), tf.image.encode_png(uint8_reshaped[0])) # For unit", "to load, use the same variable scope as in the", "per patch.') flags.DEFINE_integer('model_depth', 64, 'Number of filters for compression model')", "train on.') flags.DEFINE_integer('bits_per_patch', 1230, 'The number of bits to produce", "'compression.png'), tf.image.encode_png(uint8_reshaped[0])) # For unit testing, use `run_eval_loop=False`. if not", "import division from __future__ import print_function from absl import app", "networks.compression_model( images, num_bits=FLAGS.bits_per_patch, depth=FLAGS.model_depth, is_training=False) summaries.add_reconstruction_summaries(images, reconstructions, prebinary) # Visualize", "' 'forever.') flags.DEFINE_string('dataset_dir', 'testdata', 'Location of data.') # Compression-specific flags.", "app import tensorflow as tf from research.gan.image_compression import data_provider from", "to run evaluation. If `None`, run ' 'forever.') flags.DEFINE_string('dataset_dir', 'testdata',", "'testdata', 'Location of data.') # Compression-specific flags. flags.DEFINE_integer('batch_size', 32, 'The", "tf.summary.histogram('pixel_l1_loss_hist', pixel_loss_per_example) tf.summary.scalar('pixel_l1_loss', pixel_loss) # Create ops to write images", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "tf.name_scope('inputs'): images = data_provider.provide_data( 'validation', FLAGS.batch_size, dataset_dir=FLAGS.dataset_dir, patch_size=FLAGS.patch_size) # In", "images in each batch.') flags.DEFINE_integer('patch_size', 32, 'The size of the", "Create ops to write images to disk. uint8_images = data_provider.float_image_to_uint8(images)", "Unless required by applicable law or agreed to in writing,", "from research.gan.image_compression import networks from research.gan.image_compression import summaries FLAGS =", "train job. with tf.variable_scope('generator'): reconstructions, _, prebinary = networks.compression_model( images,", "import tensorflow as tf from research.gan.image_compression import data_provider from research.gan.image_compression", "License. # ============================================================================== \"\"\"Evaluates a TFGAN trained compression model.\"\"\" from", "flags = tf.app.flags flags.DEFINE_string('master', '', 'Name of the TensorFlow master", "print_function from absl import app import tensorflow as tf from", "patch_size=FLAGS.patch_size) # In order for variables to load, use the", "reconstructions, _, prebinary = networks.compression_model( images, num_bits=FLAGS.bits_per_patch, depth=FLAGS.model_depth, is_training=False) summaries.add_reconstruction_summaries(images,", "= networks.compression_model( images, num_bits=FLAGS.bits_per_patch, depth=FLAGS.model_depth, is_training=False) summaries.add_reconstruction_summaries(images, reconstructions, prebinary) #", "the specific language governing permissions and # limitations under the", "`run_eval_loop=False`. if not run_eval_loop: return tf.contrib.training.evaluate_repeatedly( FLAGS.checkpoint_dir, master=FLAGS.master, hooks=[tf.contrib.training.SummaryAtEndHook(FLAGS.eval_dir), tf.contrib.training.StopAfterNEvalsHook(1)],", "summaries FLAGS = tf.app.flags.FLAGS flags = tf.app.flags flags.DEFINE_string('master', '', 'Name", "applicable law or agreed to in writing, software # distributed", "size of the patches to train on.') flags.DEFINE_integer('bits_per_patch', 1230, 'The", "1230, 'The number of bits to produce per patch.') flags.DEFINE_integer('model_depth',", "of bits to produce per patch.') flags.DEFINE_integer('model_depth', 64, 'Number of", "in writing, software # distributed under the License is distributed", "number of bits to produce per patch.') flags.DEFINE_integer('model_depth', 64, 'Number", "FLAGS.batch_size, dataset_dir=FLAGS.dataset_dir, patch_size=FLAGS.patch_size) # In order for variables to load,", "images, num_bits=FLAGS.bits_per_patch, depth=FLAGS.model_depth, is_training=False) summaries.add_reconstruction_summaries(images, reconstructions, prebinary) # Visualize losses.", "'Directory where the model was written to.') flags.DEFINE_string('eval_dir', '/tmp/compression/', 'Directory", "use the same variable scope as in the # train", "dataset_dir=FLAGS.dataset_dir, patch_size=FLAGS.patch_size) # In order for variables to load, use", "batch.') flags.DEFINE_integer('patch_size', 32, 'The size of the patches to train", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "the # train job. with tf.variable_scope('generator'): reconstructions, _, prebinary =", "License, Version 2.0 (the \"License\"); # you may not use", "# You may obtain a copy of the License at", "to train on.') flags.DEFINE_integer('bits_per_patch', 1230, 'The number of bits to", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "Authors. All Rights Reserved. # # Licensed under the Apache", "= tf.app.flags.FLAGS flags = tf.app.flags flags.DEFINE_string('master', '', 'Name of the", "the License for the specific language governing permissions and #", "32, 'The size of the patches to train on.') flags.DEFINE_integer('bits_per_patch',", "TensorFlow master to use.') flags.DEFINE_string('checkpoint_dir', '/tmp/compression/', 'Directory where the model", "Apache License, Version 2.0 (the \"License\"); # you may not", "each batch.') flags.DEFINE_integer('patch_size', 32, 'The size of the patches to", "either express or implied. # See the License for the", "run ' 'forever.') flags.DEFINE_string('dataset_dir', 'testdata', 'Location of data.') # Compression-specific", "Copyright 2017 The TensorFlow Authors. All Rights Reserved. # #", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "axis=[1, 2, 3]) pixel_loss = tf.reduce_mean(pixel_loss_per_example) tf.summary.histogram('pixel_l1_loss_hist', pixel_loss_per_example) tf.summary.scalar('pixel_l1_loss', pixel_loss)", "reconstructions), axis=[1, 2, 3]) pixel_loss = tf.reduce_mean(pixel_loss_per_example) tf.summary.histogram('pixel_l1_loss_hist', pixel_loss_per_example) tf.summary.scalar('pixel_l1_loss',", "variable scope as in the # train job. with tf.variable_scope('generator'):", "the patches to train on.') flags.DEFINE_integer('bits_per_patch', 1230, 'The number of", "testing, use `run_eval_loop=False`. if not run_eval_loop: return tf.contrib.training.evaluate_repeatedly( FLAGS.checkpoint_dir, master=FLAGS.master,", "data_provider.provide_data( 'validation', FLAGS.batch_size, dataset_dir=FLAGS.dataset_dir, patch_size=FLAGS.patch_size) # In order for variables", "data_provider from research.gan.image_compression import networks from research.gan.image_compression import summaries FLAGS", "for variables to load, use the same variable scope as", "- reconstructions), axis=[1, 2, 3]) pixel_loss = tf.reduce_mean(pixel_loss_per_example) tf.summary.histogram('pixel_l1_loss_hist', pixel_loss_per_example)", "flags.DEFINE_string('eval_dir', '/tmp/compression/', 'Directory where the results are saved to.') flags.DEFINE_integer('max_number_of_evaluations',", "If `None`, run ' 'forever.') flags.DEFINE_string('dataset_dir', 'testdata', 'Location of data.')", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "% (FLAGS.eval_dir, 'compression.png'), tf.image.encode_png(uint8_reshaped[0])) # For unit testing, use `run_eval_loop=False`.", "'validation', FLAGS.batch_size, dataset_dir=FLAGS.dataset_dir, patch_size=FLAGS.patch_size) # In order for variables to", "from research.gan.image_compression import data_provider from research.gan.image_compression import networks from research.gan.image_compression", "import data_provider from research.gan.image_compression import networks from research.gan.image_compression import summaries", "of images in each batch.') flags.DEFINE_integer('patch_size', 32, 'The size of", "tf.write_file( '%s/%s' % (FLAGS.eval_dir, 'compression.png'), tf.image.encode_png(uint8_reshaped[0])) # For unit testing,", "uint8_reconstructions) image_write_ops = tf.write_file( '%s/%s' % (FLAGS.eval_dir, 'compression.png'), tf.image.encode_png(uint8_reshaped[0])) #", "\"License\"); # you may not use this file except in", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "__future__ import print_function from absl import app import tensorflow as", "of the TensorFlow master to use.') flags.DEFINE_string('checkpoint_dir', '/tmp/compression/', 'Directory where", "# distributed under the License is distributed on an \"AS", "trained compression model.\"\"\" from __future__ import absolute_import from __future__ import", "# Unless required by applicable law or agreed to in", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "scope as in the # train job. with tf.variable_scope('generator'): reconstructions,", "to write images to disk. uint8_images = data_provider.float_image_to_uint8(images) uint8_reconstructions =", "with tf.variable_scope('generator'): reconstructions, _, prebinary = networks.compression_model( images, num_bits=FLAGS.bits_per_patch, depth=FLAGS.model_depth,", "You may obtain a copy of the License at #", "losses. pixel_loss_per_example = tf.reduce_mean( tf.abs(images - reconstructions), axis=[1, 2, 3])", "from absl import app import tensorflow as tf from research.gan.image_compression", "flags.DEFINE_string('master', '', 'Name of the TensorFlow master to use.') flags.DEFINE_string('checkpoint_dir',", "saved to.') flags.DEFINE_integer('max_number_of_evaluations', None, 'Number of times to run evaluation.", "the Apache License, Version 2.0 (the \"License\"); # you may", "tf from research.gan.image_compression import data_provider from research.gan.image_compression import networks from" ]
[ "os.environ['SQL_HOST'] metrics = os.environ['QUERY_ON'] def dump_query_results(): \"\"\" This is a", "given query so we can supply users with custom tables.", "metrics = os.environ['QUERY_ON'] def dump_query_results(): \"\"\" This is a simple", "It can be called simply with the bin shell script.", "shell script. Read the README at the top level for", "os.environ['METRICS_MYSQL_PWD'] sql_host = os.environ['SQL_HOST'] metrics = os.environ['QUERY_ON'] def dump_query_results(): \"\"\"", "users with custom tables. Note that the SQL query itself", "def dump_query_results(): \"\"\" This is a simple SQL table dump", "query itself and column headers portion need to be changed", "sql_host = os.environ['SQL_HOST'] metrics = os.environ['QUERY_ON'] def dump_query_results(): \"\"\" This", "want to change the query/results. Otherwise it is good to", "query so we can supply users with custom tables. Note", "(row_values) in cursor: temp_string = \"\" for i in range(len(row_values)", "row_values[-1] is not None: temp_string += str(row_values[-1]) print(temp_string) return 1", "in cursor: temp_string = \"\" for i in range(len(row_values) -", "TO MATCH QUERY HEADERS print(\"username\\tdisplay_name\\temail\\torcid\\tkb_internal_user\\tinstitution\\tcountry\\tsignup_date\\tlast_signin_date\") cursor.execute(query) row_values = list() for", "good to go. It can be called simply with the", "MATCH QUERY HEADERS print(\"username\\tdisplay_name\\temail\\torcid\\tkb_internal_user\\tinstitution\\tcountry\\tsignup_date\\tlast_signin_date\") cursor.execute(query) row_values = list() for (row_values)", "= db_connection.cursor() query = \"use \"+metrics cursor.execute(query) #CHANGE QUERY HERE", "we can supply users with custom tables. Note that the", "db_connection.cursor() query = \"use \"+metrics cursor.execute(query) #CHANGE QUERY HERE query", "temp_string = \"\" for i in range(len(row_values) - 1): if", "change the query/results. Otherwise it is good to go. It", "signup_date, last_signin_date from user_info order by signup_date\" #CHANGE COLUMN HEADERS", "QUERY HEADERS print(\"username\\tdisplay_name\\temail\\torcid\\tkb_internal_user\\tinstitution\\tcountry\\tsignup_date\\tlast_signin_date\") cursor.execute(query) row_values = list() for (row_values) in", "1): if row_values[i] is not None: temp_string += str(row_values[i]) temp_string", "dump_query_results(): \"\"\" This is a simple SQL table dump of", "signup_date\" #CHANGE COLUMN HEADERS HERE TO MATCH QUERY HEADERS print(\"username\\tdisplay_name\\temail\\torcid\\tkb_internal_user\\tinstitution\\tcountry\\tsignup_date\\tlast_signin_date\")", "simply with the bin shell script. Read the README at", "= sql_host,#\"mysql1\", #\"localhost\", user = \"metrics\", #\"root\", passwd = <PASSWORD>,", "a given query so we can supply users with custom", "the top level for an example. \"\"\" #connect to mysql", "user_info order by signup_date\" #CHANGE COLUMN HEADERS HERE TO MATCH", "level for an example. \"\"\" #connect to mysql db_connection =", "os import mysql.connector as mysql metrics_mysql_password = os.environ['METRICS_MYSQL_PWD'] sql_host =", "last_signin_date from user_info order by signup_date\" #CHANGE COLUMN HEADERS HERE", "\"metrics\" #\"datacamp\" ) cursor = db_connection.cursor() query = \"use \"+metrics", "user = \"metrics\", #\"root\", passwd = <PASSWORD>, database = \"metrics\"", "go. It can be called simply with the bin shell", "= list() for (row_values) in cursor: temp_string = \"\" for", "table dump of a given query so we can supply", "#\"root\", passwd = <PASSWORD>, database = \"metrics\" #\"datacamp\" ) cursor", "query = \"use \"+metrics cursor.execute(query) #CHANGE QUERY HERE query =", "called simply with the bin shell script. Read the README", "be called simply with the bin shell script. Read the", "#CHANGE QUERY HERE query = \"select username, display_name, email, orcid,", "= os.environ['QUERY_ON'] def dump_query_results(): \"\"\" This is a simple SQL", "mysql db_connection = mysql.connect( host = sql_host,#\"mysql1\", #\"localhost\", user =", "passwd = <PASSWORD>, database = \"metrics\" #\"datacamp\" ) cursor =", "for (row_values) in cursor: temp_string = \"\" for i in", "kb_internal_user, institution, country, signup_date, last_signin_date from user_info order by signup_date\"", "an example. \"\"\" #connect to mysql db_connection = mysql.connect( host", "from user_info order by signup_date\" #CHANGE COLUMN HEADERS HERE TO", "#connect to mysql db_connection = mysql.connect( host = sql_host,#\"mysql1\", #\"localhost\",", "- 1): if row_values[i] is not None: temp_string += str(row_values[i])", "by signup_date\" #CHANGE COLUMN HEADERS HERE TO MATCH QUERY HEADERS", "is not None: temp_string += str(row_values[-1]) print(temp_string) return 1 dump_query_results()", "country, signup_date, last_signin_date from user_info order by signup_date\" #CHANGE COLUMN", "of a given query so we can supply users with", "Note that the SQL query itself and column headers portion", "import os import mysql.connector as mysql metrics_mysql_password = os.environ['METRICS_MYSQL_PWD'] sql_host", "in range(len(row_values) - 1): if row_values[i] is not None: temp_string", "email, orcid, kb_internal_user, institution, country, signup_date, last_signin_date from user_info order", "= mysql.connect( host = sql_host,#\"mysql1\", #\"localhost\", user = \"metrics\", #\"root\",", "README at the top level for an example. \"\"\" #connect", "if row_values[-1] is not None: temp_string += str(row_values[-1]) print(temp_string) return", "portion need to be changed if you want to change", "= \"metrics\" #\"datacamp\" ) cursor = db_connection.cursor() query = \"use", "is not None: temp_string += str(row_values[i]) temp_string += \"\\t\" if", "\"\"\" #connect to mysql db_connection = mysql.connect( host = sql_host,#\"mysql1\",", "is good to go. It can be called simply with", "metrics_mysql_password = os.environ['METRICS_MYSQL_PWD'] sql_host = os.environ['SQL_HOST'] metrics = os.environ['QUERY_ON'] def", "sql_host,#\"mysql1\", #\"localhost\", user = \"metrics\", #\"root\", passwd = <PASSWORD>, database", "#\"datacamp\" ) cursor = db_connection.cursor() query = \"use \"+metrics cursor.execute(query)", "row_values = list() for (row_values) in cursor: temp_string = \"\"", "HERE TO MATCH QUERY HEADERS print(\"username\\tdisplay_name\\temail\\torcid\\tkb_internal_user\\tinstitution\\tcountry\\tsignup_date\\tlast_signin_date\") cursor.execute(query) row_values = list()", "HEADERS print(\"username\\tdisplay_name\\temail\\torcid\\tkb_internal_user\\tinstitution\\tcountry\\tsignup_date\\tlast_signin_date\") cursor.execute(query) row_values = list() for (row_values) in cursor:", "the bin shell script. Read the README at the top", "list() for (row_values) in cursor: temp_string = \"\" for i", "as mysql metrics_mysql_password = os.environ['METRICS_MYSQL_PWD'] sql_host = os.environ['SQL_HOST'] metrics =", "bin shell script. Read the README at the top level", "custom tables. Note that the SQL query itself and column", "i in range(len(row_values) - 1): if row_values[i] is not None:", "\"\\t\" if row_values[-1] is not None: temp_string += str(row_values[-1]) print(temp_string)", "to be changed if you want to change the query/results.", "mysql.connect( host = sql_host,#\"mysql1\", #\"localhost\", user = \"metrics\", #\"root\", passwd", "import mysql.connector as mysql metrics_mysql_password = os.environ['METRICS_MYSQL_PWD'] sql_host = os.environ['SQL_HOST']", "it is good to go. It can be called simply", "= \"use \"+metrics cursor.execute(query) #CHANGE QUERY HERE query = \"select", "= os.environ['METRICS_MYSQL_PWD'] sql_host = os.environ['SQL_HOST'] metrics = os.environ['QUERY_ON'] def dump_query_results():", "with custom tables. Note that the SQL query itself and", "be changed if you want to change the query/results. Otherwise", "the SQL query itself and column headers portion need to", "institution, country, signup_date, last_signin_date from user_info order by signup_date\" #CHANGE", "<PASSWORD>, database = \"metrics\" #\"datacamp\" ) cursor = db_connection.cursor() query", "cursor = db_connection.cursor() query = \"use \"+metrics cursor.execute(query) #CHANGE QUERY", "+= \"\\t\" if row_values[-1] is not None: temp_string += str(row_values[-1])", "str(row_values[i]) temp_string += \"\\t\" if row_values[-1] is not None: temp_string", "itself and column headers portion need to be changed if", "with the bin shell script. Read the README at the", "None: temp_string += str(row_values[i]) temp_string += \"\\t\" if row_values[-1] is", "to change the query/results. Otherwise it is good to go.", "cursor: temp_string = \"\" for i in range(len(row_values) - 1):", "\"metrics\", #\"root\", passwd = <PASSWORD>, database = \"metrics\" #\"datacamp\" )", "column headers portion need to be changed if you want", "= \"select username, display_name, email, orcid, kb_internal_user, institution, country, signup_date,", "\"\"\" This is a simple SQL table dump of a", "query/results. Otherwise it is good to go. It can be", "= <PASSWORD>, database = \"metrics\" #\"datacamp\" ) cursor = db_connection.cursor()", "cursor.execute(query) row_values = list() for (row_values) in cursor: temp_string =", ") cursor = db_connection.cursor() query = \"use \"+metrics cursor.execute(query) #CHANGE", "for i in range(len(row_values) - 1): if row_values[i] is not", "\"use \"+metrics cursor.execute(query) #CHANGE QUERY HERE query = \"select username,", "This is a simple SQL table dump of a given", "= os.environ['SQL_HOST'] metrics = os.environ['QUERY_ON'] def dump_query_results(): \"\"\" This is", "to mysql db_connection = mysql.connect( host = sql_host,#\"mysql1\", #\"localhost\", user", "range(len(row_values) - 1): if row_values[i] is not None: temp_string +=", "you want to change the query/results. Otherwise it is good", "script. Read the README at the top level for an", "HEADERS HERE TO MATCH QUERY HEADERS print(\"username\\tdisplay_name\\temail\\torcid\\tkb_internal_user\\tinstitution\\tcountry\\tsignup_date\\tlast_signin_date\") cursor.execute(query) row_values =", "temp_string += str(row_values[i]) temp_string += \"\\t\" if row_values[-1] is not", "Read the README at the top level for an example.", "mysql metrics_mysql_password = os.environ['METRICS_MYSQL_PWD'] sql_host = os.environ['SQL_HOST'] metrics = os.environ['QUERY_ON']", "#!/usr/local/bin/python import os import mysql.connector as mysql metrics_mysql_password = os.environ['METRICS_MYSQL_PWD']", "at the top level for an example. \"\"\" #connect to", "db_connection = mysql.connect( host = sql_host,#\"mysql1\", #\"localhost\", user = \"metrics\",", "\"+metrics cursor.execute(query) #CHANGE QUERY HERE query = \"select username, display_name,", "HERE query = \"select username, display_name, email, orcid, kb_internal_user, institution,", "database = \"metrics\" #\"datacamp\" ) cursor = db_connection.cursor() query =", "= \"metrics\", #\"root\", passwd = <PASSWORD>, database = \"metrics\" #\"datacamp\"", "temp_string += \"\\t\" if row_values[-1] is not None: temp_string +=", "to go. It can be called simply with the bin", "the README at the top level for an example. \"\"\"", "simple SQL table dump of a given query so we", "for an example. \"\"\" #connect to mysql db_connection = mysql.connect(", "top level for an example. \"\"\" #connect to mysql db_connection", "that the SQL query itself and column headers portion need", "headers portion need to be changed if you want to", "is a simple SQL table dump of a given query", "SQL table dump of a given query so we can", "if row_values[i] is not None: temp_string += str(row_values[i]) temp_string +=", "not None: temp_string += str(row_values[i]) temp_string += \"\\t\" if row_values[-1]", "dump of a given query so we can supply users", "the query/results. Otherwise it is good to go. It can", "COLUMN HEADERS HERE TO MATCH QUERY HEADERS print(\"username\\tdisplay_name\\temail\\torcid\\tkb_internal_user\\tinstitution\\tcountry\\tsignup_date\\tlast_signin_date\") cursor.execute(query) row_values", "= \"\" for i in range(len(row_values) - 1): if row_values[i]", "host = sql_host,#\"mysql1\", #\"localhost\", user = \"metrics\", #\"root\", passwd =", "\"\" for i in range(len(row_values) - 1): if row_values[i] is", "username, display_name, email, orcid, kb_internal_user, institution, country, signup_date, last_signin_date from", "Otherwise it is good to go. It can be called", "#\"localhost\", user = \"metrics\", #\"root\", passwd = <PASSWORD>, database =", "cursor.execute(query) #CHANGE QUERY HERE query = \"select username, display_name, email,", "+= str(row_values[i]) temp_string += \"\\t\" if row_values[-1] is not None:", "order by signup_date\" #CHANGE COLUMN HEADERS HERE TO MATCH QUERY", "#CHANGE COLUMN HEADERS HERE TO MATCH QUERY HEADERS print(\"username\\tdisplay_name\\temail\\torcid\\tkb_internal_user\\tinstitution\\tcountry\\tsignup_date\\tlast_signin_date\") cursor.execute(query)", "row_values[i] is not None: temp_string += str(row_values[i]) temp_string += \"\\t\"", "and column headers portion need to be changed if you", "SQL query itself and column headers portion need to be", "supply users with custom tables. Note that the SQL query", "so we can supply users with custom tables. Note that", "can supply users with custom tables. Note that the SQL", "<reponame>CheyenneNS/metrics #!/usr/local/bin/python import os import mysql.connector as mysql metrics_mysql_password =", "tables. Note that the SQL query itself and column headers", "changed if you want to change the query/results. Otherwise it", "QUERY HERE query = \"select username, display_name, email, orcid, kb_internal_user,", "print(\"username\\tdisplay_name\\temail\\torcid\\tkb_internal_user\\tinstitution\\tcountry\\tsignup_date\\tlast_signin_date\") cursor.execute(query) row_values = list() for (row_values) in cursor: temp_string", "example. \"\"\" #connect to mysql db_connection = mysql.connect( host =", "\"select username, display_name, email, orcid, kb_internal_user, institution, country, signup_date, last_signin_date", "query = \"select username, display_name, email, orcid, kb_internal_user, institution, country,", "if you want to change the query/results. Otherwise it is", "orcid, kb_internal_user, institution, country, signup_date, last_signin_date from user_info order by", "display_name, email, orcid, kb_internal_user, institution, country, signup_date, last_signin_date from user_info", "need to be changed if you want to change the", "can be called simply with the bin shell script. Read", "a simple SQL table dump of a given query so", "os.environ['QUERY_ON'] def dump_query_results(): \"\"\" This is a simple SQL table", "mysql.connector as mysql metrics_mysql_password = os.environ['METRICS_MYSQL_PWD'] sql_host = os.environ['SQL_HOST'] metrics" ]
[ "here DNS specific? def __init__(self, devices, parameters): super().__init__(WindowsDNSForcePublicDNSServersDisrupter, devices, parameters)", "from desktop_local_tests.windows.windows_dns_force_public_dns_servers_disrupter import WindowsDNSForcePublicDNSServersDisrupter class TestWindowsPacketCaptureDisruptForcePublicDNSServers(LocalPacketCaptureTestCaseWithDisrupter): # TODO: Make the", "capture here DNS specific? def __init__(self, devices, parameters): super().__init__(WindowsDNSForcePublicDNSServersDisrupter, devices,", "desktop_local_tests.windows.windows_dns_force_public_dns_servers_disrupter import WindowsDNSForcePublicDNSServersDisrupter class TestWindowsPacketCaptureDisruptForcePublicDNSServers(LocalPacketCaptureTestCaseWithDisrupter): # TODO: Make the packet", "the packet capture here DNS specific? def __init__(self, devices, parameters):", "TestWindowsPacketCaptureDisruptForcePublicDNSServers(LocalPacketCaptureTestCaseWithDisrupter): # TODO: Make the packet capture here DNS specific?", "import WindowsDNSForcePublicDNSServersDisrupter class TestWindowsPacketCaptureDisruptForcePublicDNSServers(LocalPacketCaptureTestCaseWithDisrupter): # TODO: Make the packet capture", "TODO: Make the packet capture here DNS specific? def __init__(self,", "desktop_local_tests.local_packet_capture_test_case_with_disrupter import LocalPacketCaptureTestCaseWithDisrupter from desktop_local_tests.windows.windows_dns_force_public_dns_servers_disrupter import WindowsDNSForcePublicDNSServersDisrupter class TestWindowsPacketCaptureDisruptForcePublicDNSServers(LocalPacketCaptureTestCaseWithDisrupter): #", "Make the packet capture here DNS specific? def __init__(self, devices,", "WindowsDNSForcePublicDNSServersDisrupter class TestWindowsPacketCaptureDisruptForcePublicDNSServers(LocalPacketCaptureTestCaseWithDisrupter): # TODO: Make the packet capture here", "LocalPacketCaptureTestCaseWithDisrupter from desktop_local_tests.windows.windows_dns_force_public_dns_servers_disrupter import WindowsDNSForcePublicDNSServersDisrupter class TestWindowsPacketCaptureDisruptForcePublicDNSServers(LocalPacketCaptureTestCaseWithDisrupter): # TODO: Make", "<reponame>UAEKondaya1/expressvpn_leak_testing from desktop_local_tests.local_packet_capture_test_case_with_disrupter import LocalPacketCaptureTestCaseWithDisrupter from desktop_local_tests.windows.windows_dns_force_public_dns_servers_disrupter import WindowsDNSForcePublicDNSServersDisrupter class", "packet capture here DNS specific? def __init__(self, devices, parameters): super().__init__(WindowsDNSForcePublicDNSServersDisrupter,", "class TestWindowsPacketCaptureDisruptForcePublicDNSServers(LocalPacketCaptureTestCaseWithDisrupter): # TODO: Make the packet capture here DNS", "from desktop_local_tests.local_packet_capture_test_case_with_disrupter import LocalPacketCaptureTestCaseWithDisrupter from desktop_local_tests.windows.windows_dns_force_public_dns_servers_disrupter import WindowsDNSForcePublicDNSServersDisrupter class TestWindowsPacketCaptureDisruptForcePublicDNSServers(LocalPacketCaptureTestCaseWithDisrupter):", "# TODO: Make the packet capture here DNS specific? def", "import LocalPacketCaptureTestCaseWithDisrupter from desktop_local_tests.windows.windows_dns_force_public_dns_servers_disrupter import WindowsDNSForcePublicDNSServersDisrupter class TestWindowsPacketCaptureDisruptForcePublicDNSServers(LocalPacketCaptureTestCaseWithDisrupter): # TODO:" ]
[ "we'll upload only 2 images in the GPU per frame.", "loader Cache.register('kv.loader', limit=500, timeout=60) class ProxyImage(Image): '''Image returned by the", "Image from kivy.compat import PY2 from collections import deque from", "file is local, or _load_urllib() if the file is on", "= filename.split(':', 1)[0] if proto == 'smb': try: # note:", "a default loading image for using a such loader:: from", "imdata.source = filename except Exception: Logger.exception('Failed to load image <%s>'", "(None, False): # found image, if data is not here,", "super(LoaderThreadPool, self).start() self.pool = _ThreadPool(self._num_workers) Clock.schedule_interval(self.run, 0) def stop(self): super(LoaderThreadPool,", "image') self._max_upload_per_frame = num def _get_max_upload_per_frame(self): return self._max_upload_per_frame max_upload_per_frame =", "have activated mipmap=True too, then the GPU must calculate the", "try: proto = filename.split(':', 1)[0] except: #if blank filename then", "image processing per image') self._max_upload_per_frame = num def _get_max_upload_per_frame(self): return", "pygame as our first choice for loader # from kivy.compat", "use pygame as our first choice for loader # from", "self._start_wanted = False # in pause mode, don't unqueue anything.", "client = ProxyImage(self.loading_image, loading_image=self.loading_image, **kwargs) self._client.append((filename, client)) if data is", "'' suffix = '.%s' % (filename.split('.')[-1]) _out_osfd, _out_filename = tempfile.mkstemp(", "in memory, so it will take times. If you have", "_set_num_workers) '''Number of workers to use while loading. (used only", "self._resume_cond = threading.Condition() self._q_load = deque() self._q_done = deque() self._client", "least 2 workers') self._num_workers = num def _get_num_workers(self): return self._num_workers", "Loader and specific implementation. By default, Loader will be the", "_get_loading_image(self): if not self._loading_image: loading_png_fn = join(kivy_data_dir, 'images', 'image-loading.gif') self._loading_image", "self._trigger_update() def _load_local(self, filename, kwargs): '''(internal) Loading a local file'''", "then return return if load_callback is not None: data =", "is really the first time self._q_load.appendleft({ 'filename': filename, 'load_callback': load_callback,", "= False self.tasks.join() class LoaderThreadPool(LoaderBase): def __init__(self): super(LoaderThreadPool, self).__init__() self.pool", "Loader.error_image = 'error.png' .. versionchanged:: 1.6.0 Not readonly anymore. '''", "self.tasks.join() class LoaderThreadPool(LoaderBase): def __init__(self): super(LoaderThreadPool, self).__init__() self.pool = None", "Clock.unschedule(self.run) self.pool.stop() def run(self, *largs): while self._running: try: parameters =", "self._loading_image def _set_loading_image(self, image): if isinstance(image, basestring): self._loading_image = ImageLoader.load(filename=image)", "'.%s' % (filename.split('.')[-1]) _out_osfd, _out_filename = tempfile.mkstemp( prefix='kivyloader', suffix=suffix) if", "the upload time can be consequent, and can stuck the", "faster, but the user will not been able to use", "super(LoaderThreadPool, self).stop() Clock.unschedule(self.run) self.pool.stop() def run(self, *largs): while self._running: try:", "__init__(self, num_threads): super(_ThreadPool, self).__init__() self.running = True self.tasks = queue.Queue()", "self._client.append((filename, client)) if data is None: # if data is", "recent changes to CoreImage, we must keep data otherwise, #", "so it will take times. If you have activated mipmap=True", "return import tempfile data = fd = _out_osfd = None", "proxyImage): if proxyImage.image.texture: self.image.texture = proxyImage.image.texture def build(self): proxyImage =", "join( 'atlas://data/images/defaulttheme/image-missing') self._error_image = ImageLoader.load(filename=error_png_fn) return self._error_image def _set_error_image(self, image):", "Image from kivy.loader import Loader class TestApp(App): def _image_loaded(self, proxyImage):", "if proxyImage.image.texture: self.image.texture = proxyImage.image.texture def build(self): proxyImage = Loader.image(\"myPic.jpg\")", "kargs = self.tasks.get() try: func(*args, **kargs) except Exception as e:", "otherwise the data is occasionaly not loaded from smb.SMBHandler import", "loop for the loader.''' pass def stop(self): '''Stop the loader", "if data not in (None, False): # found image, if", "'''Image returned by the Loader.image() function. :Properties: `loaded`: bool, default", "used for error. You can change it by doing:: Loader.error_image", "from samba shares fd = urllib_request.build_opener(SMBHandler).open(filename) else: # read from", "num_workers = property(_get_num_workers, _set_num_workers) '''Number of workers to use while", "file when remote file not found or download error try:", "if fd: fd.close() if _out_osfd: close(_out_osfd) if _out_filename != '':", "fd = _out_osfd = None try: _out_filename = '' suffix", "write to local filename write(_out_osfd, idata) close(_out_osfd) _out_osfd = None", "None else: # # Try to use pygame as our", "images you're gonna to load. Take a look at the", "== 'smb': try: # note: it's important to load SMBHandler", "using the Loader. A ProxyImage is returned with a loading", "image uploads in GPU to do per frames. ''' __all__", "versionadded:: 1.6.0 ''' self._paused = False self._resume_cond.acquire() self._resume_cond.notify_all() self._resume_cond.release() def", "uploads in GPU to do per frames. ''' __all__ =", "to load. Take a look at the parameters: - :data:`Loader.num_workers`", "= self._q_done.pop() except IndexError: return # create the image image", "update client for c_filename, client in self._client[:]: if filename !=", "= Loader.image('mysprite.png') You can also load image from url:: image", "fd = urllib_request.build_opener(SMBHandler).open(filename) else: # read from internet fd =", "client)) self._trigger_update() def image(self, filename, load_callback=None, post_callback=None, **kwargs): '''Load a", "self.tasks = queue.Queue() for _ in range(num_threads): _Worker(self, self.tasks) def", "by the thread. Will call _load_local() if the file is", "self._running = False self._start_wanted = False self._trigger_update = Clock.create_trigger(self._update) def", "file, and pass it to _load_local()''' if PY2: import urllib2", "os import write, close, unlink, environ import threading # Register", "to 1 or 2. If you get ride of that", "load PySMB: make sure it is installed') return import tempfile", "Thread class _Worker(Thread): '''Thread executing tasks from a given tasks", "load image <%s>' % filename) # close file when remote", "'''Load a image using the Loader. A ProxyImage is returned", "def stop(self): self.running = False self.tasks.join() class LoaderThreadPool(LoaderBase): def __init__(self):", "1)[0] except: #if blank filename then return return if load_callback", "could increase the number of workers, then all the images", "only at the beginning. Once the loader is started, the", "queue ''' def __init__(self, num_threads): super(_ThreadPool, self).__init__() self.running = True", "url:: image = Loader.image('http://mysite.com/test.png') If you want to change the", "get ride of that (or reduce it a lot), take", "self._trigger_update() def image(self, filename, load_callback=None, post_callback=None, **kwargs): '''Load a image", "image for using a such loader:: from kivy import *", "loader, after a :meth:`pause`. .. versionadded:: 1.6.0 ''' self._paused =", "First download it, save it to a temporary file, and", "func, args, kargs = self.tasks.get() try: func(*args, **kargs) except Exception", "of workers to use while loading. (used only if the", "'loading.png' .. versionchanged:: 1.6.0 Not readonly anymore. ''' def _get_error_image(self):", "2 self._paused = False self._resume_cond = threading.Condition() self._q_load = deque()", "= image client.loaded = True client.dispatch('on_load') self._client.remove((c_filename, client)) self._trigger_update() def", "'LoaderBase', 'ProxyImage') from kivy import kivy_data_dir from kivy.logger import Logger", "or download error try: close(_out_osfd) except OSError: pass return self.error_image", "pass it to _load_local()''' if PY2: import urllib2 as urllib_request", "implementation # if 'KIVY_DOC' in environ: Loader = None else:", "loading_image = property(_get_loading_image, _set_loading_image) '''Image used for loading. You can", "post_callback=None, **kwargs): '''Load a image using the Loader. A ProxyImage", "already cached :Events: `on_load` Fired when the image is loaded", "self._num_workers = num def _get_num_workers(self): return self._num_workers num_workers = property(_get_num_workers,", "Loader.image() function. :Properties: `loaded`: bool, default to False It can", "order to cancel all background loading, call *Loader.stop()*. ''' data", "_image_loaded(self, proxyImage): if proxyImage.image.texture: self.image.texture = proxyImage.image.texture def build(self): proxyImage", "Full-HD images, the upload time can be consequent, and can", "'''Image used for loading. You can change it by doing::", "this big images too, in real time. Then it can", "lot), take a look at the DDS format. .. versionadded::", "in data._data: imdata.source = filename except Exception: Logger.exception('Failed to load", "= kwargs['filename'] load_callback = kwargs['load_callback'] post_callback = kwargs['post_callback'] try: proto", "image from url:: image = Loader.image('http://mysite.com/test.png') If you want to", "= kwargs['load_callback'] post_callback = kwargs['post_callback'] try: proto = filename.split(':', 1)[0]", "App from kivy.uix.image import Image from kivy.loader import Loader class", "return for x in range(self.max_upload_per_frame): try: filename, data = self._q_done.pop()", "======================== This is the Asynchronous Loader. You can use it", "self._q_load = deque() self._q_done = deque() self._client = [] self._running", "application during the upload. If you want a smooth experience,", "doing:: Loader.error_image = 'error.png' .. versionchanged:: 1.6.0 Not readonly anymore.", "to change the default loading image, you can do:: Loader.loading_image", "is started, the setting has no impact:: from kivy.loader import", "20, and loading many full-hd images was blocking completly the", "urllib_request else: import urllib.request as urllib_request proto = filename.split(':', 1)[0]", "the image is loaded and changed ''' __events__ = ('on_load',", "collections import deque from time import sleep from os.path import", "False): Cache.append('kv.loader', filename, False) self._start_wanted = True self._trigger_update() else: #", "load_callback = kwargs['load_callback'] post_callback = kwargs['post_callback'] try: proto = filename.split(':',", "'https', 'ftp', 'smb'): data = self._load_urllib(filename, kwargs['kwargs']) else: data =", "loader -------------------------------- .. versionadded:: 1.6.0 You can now tweak the", "* image = Loader.image('mysprite.png') You can also load image from", "don't unqueue anything. if self._paused: self._trigger_update() return for x in", "% (filename.split('.')[-1]) _out_osfd, _out_filename = tempfile.mkstemp( prefix='kivyloader', suffix=suffix) if proto", "matter of fact, a Full-HD RGB image will take ~6MB", "If you get ride of that (or reduce it a", "update client.image = image client.loaded = True client.dispatch('on_load') self._client.remove((c_filename, client))", "a :meth:`pause`. .. versionadded:: 1.6.0 ''' self._paused = False self._resume_cond.acquire()", "if filename != c_filename: continue # got one client to", "can also load image from url:: image = Loader.image('http://mysite.com/test.png') If", "''' while len(self._q_done) >= ( self.max_upload_per_frame * self._num_workers): sleep(0.1) self._wait_for_resume()", "parameters = self._q_load.pop() except: return self.pool.add_task(self._load, parameters) Loader = LoaderThreadPool()", "let the default. As matter of fact, a Full-HD RGB", "def _load_local(self, filename, kwargs): '''(internal) Loading a local file''' #", "for loader # from kivy.compat import queue from threading import", "if not self._running: self.start() self._start_wanted = False # in pause", "Loader.image(\"myPic.jpg\") proxyImage.bind(on_load=self._image_loaded) self.image = Image() return self.image TestApp().run() In order", "import threading # Register a cache for loader Cache.register('kv.loader', limit=500,", "import ImageLoader, Image from kivy.compat import PY2 from collections import", "Loader.loading_image = 'loading.png' .. versionchanged:: 1.6.0 Not readonly anymore. '''", "# Loader implementation # if 'KIVY_DOC' in environ: Loader =", "self._error_image = ImageLoader.load(filename=error_png_fn) return self._error_image def _set_error_image(self, image): if isinstance(image,", "a loading image. You can use it as follows:: from", "beginning. Once the loader is started, the setting has no", "self._client = [] self._running = False self._start_wanted = False self._trigger_update", "= filename.split(':', 1)[0] except: #if blank filename then return return", "By default, we'll upload only 2 images in the GPU", "self._running and self._paused: self._resume_cond.acquire() self._resume_cond.wait(0.25) self._resume_cond.release() def _load(self, kwargs): '''(internal)", "kivy.clock import Clock from kivy.cache import Cache from kivy.core.image import", "file not found or download error try: close(_out_osfd) except OSError:", "is the Asynchronous Loader. You can use it to load", "at the beginning. Once the loader is started, the setting", "and loading many full-hd images was blocking completly the application.", "is occasionaly not loaded from smb.SMBHandler import SMBHandler except ImportError:", "Logger.warning( 'Loader: can not load PySMB: make sure it is", "of the images you're gonna to load. Take a look", "Loader = None else: # # Try to use pygame", "setting has no impact:: from kivy.loader import Loader Loader.num_workers =", "proxyImage.bind(on_load=self._image_loaded) self.image = Image() return self.image TestApp().run() In order to", "default to False It can be True if the image", "Loader implementation # if 'KIVY_DOC' in environ: Loader = None", "**kwargs) client = ProxyImage(self.loading_image, loading_image=self.loading_image, **kwargs) self._client.append((filename, client)) if data", "if isinstance(image, basestring): self._error_image = ImageLoader.load(filename=image) else: self._error_image = image", "proxyImage = Loader.image(\"myPic.jpg\") proxyImage.bind(on_load=self._image_loaded) self.image = Image() return self.image TestApp().run()", "blocking completly the application. .. versionadded:: 1.6.0 ''' def _set_max_upload_per_frame(self,", "kivy import kivy_data_dir from kivy.logger import Logger from kivy.clock import", "versionchanged:: 1.6.0 Not readonly anymore. ''' def start(self): '''Start the", "= True def run(self, *largs): '''Main loop for the loader.'''", "def image(self, filename, load_callback=None, post_callback=None, **kwargs): '''Load a image using", "Loading function, called by the thread. Will call _load_local() if", "data._data: imdata.source = filename except Exception: Logger.exception('Failed to load image", "pass def _set_num_workers(self, num): if num < 2: raise Exception('Must", "for using a such loader:: from kivy import * image", "Asynchronous data loader ======================== This is the Asynchronous Loader. You", "**kwargs): '''Load a image using the Loader. A ProxyImage is", "__del__(self): try: Clock.unschedule(self._update) except Exception: pass def _set_num_workers(self, num): if", "data = load_callback(filename) elif proto in ('http', 'https', 'ftp', 'smb'):", "by doing:: Loader.loading_image = 'loading.png' .. versionchanged:: 1.6.0 Not readonly", "at the parameters: - :data:`Loader.num_workers` - define the number of", "tempfile data = fd = _out_osfd = None try: _out_filename", "Asynchronous Loader. You can use it to load an image", "urllib2 as urllib_request else: import urllib.request as urllib_request proto =", "data = fd = _out_osfd = None try: _out_filename =", "image) # update client for c_filename, client in self._client[:]: if", "1.6.0 ''' def _get_loading_image(self): if not self._loading_image: loading_png_fn = join(kivy_data_dir,", "kwargs.get('loaded') def on_load(self): pass class LoaderBase(object): '''Common base for Loader", "join(kivy_data_dir, 'images', 'image-loading.gif') self._loading_image = ImageLoader.load(filename=loading_png_fn) return self._loading_image def _set_loading_image(self,", "the first time self._q_load.appendleft({ 'filename': filename, 'load_callback': load_callback, 'post_callback': post_callback,", "proxyImage.image.texture def build(self): proxyImage = Loader.image(\"myPic.jpg\") proxyImage.bind(on_load=self._image_loaded) self.image = Image()", "to update client.image = image client.loaded = True client.dispatch('on_load') self._client.remove((c_filename,", "if data are not yet available. You must specify a", "the DDS format. .. versionadded:: 1.6.0 ''' def _get_loading_image(self): if", "except IndexError: return # create the image image = data", "self._client.remove((c_filename, client)) self._trigger_update() def image(self, filename, load_callback=None, post_callback=None, **kwargs): '''Load", "is None: # if data is None, this is really", "data not in (None, False): # found image, if data", "sleep from os.path import join from os import write, close,", "'Loader: can not load PySMB: make sure it is installed')", "download error try: close(_out_osfd) except OSError: pass return self.error_image finally:", "def pause(self): '''Pause the loader, can be useful during interactions", "super(LoaderThreadPool, self).__init__() self.pool = None def start(self): super(LoaderThreadPool, self).start() self.pool", "image(self, filename, load_callback=None, post_callback=None, **kwargs): '''Load a image using the", "for c_filename, client in self._client[:]: if filename != c_filename: continue", "per image') self._max_upload_per_frame = num def _get_max_upload_per_frame(self): return self._max_upload_per_frame max_upload_per_frame", "do:: Loader.loading_image = Image('another_loading.png') Tweaking the asynchronous loader -------------------------------- ..", "depending of the images you're gonna to load. Take a", "image = Loader.image('mysprite.png') You can also load image from url::", "FIXME create a clean API for that for imdata in", "return ImageLoader.load(filename, keep_data=True, **kwargs) def _load_urllib(self, filename, kwargs): '''(internal) Loading", "If you are uploading many tiny images, you can easily", "to local filename write(_out_osfd, idata) close(_out_osfd) _out_osfd = None #", "= False self._start_wanted = False self._trigger_update = Clock.create_trigger(self._update) def __del__(self):", "filename) # close file when remote file not found or", "'atlas://data/images/defaulttheme/image-missing') self._error_image = ImageLoader.load(filename=error_png_fn) return self._error_image def _set_error_image(self, image): if", "self).stop() Clock.unschedule(self.run) self.pool.stop() def run(self, *largs): while self._running: try: parameters", "and pass to the client''' # want to start it", "use it, even if data are not yet available. You", "filename.split(':', 1)[0] except: #if blank filename then return return if", "num < 2: raise Exception('Must have at least 2 workers')", "can be useful during interactions .. versionadded:: 1.6.0 ''' self._paused", "anything. if self._paused: self._trigger_update() return for x in range(self.max_upload_per_frame): try:", "found image, if data is not here, need to reload.", "less than 25 FPS. ''' def __init__(self): self._loading_image = None", "property(_get_max_upload_per_frame, _set_max_upload_per_frame) '''Number of image to upload per frame. By", "import urllib.request as urllib_request proto = filename.split(':', 1)[0] if proto", "are loading multiples Full-HD images, the upload time can be", "= False # in pause mode, don't unqueue anything. if", "the loader.''' pass def stop(self): '''Stop the loader thread/process''' self._running", "number was 20, and loading many full-hd images was blocking", "from a queue ''' def __init__(self, num_threads): super(_ThreadPool, self).__init__() self.running", "limit=500, timeout=60) class ProxyImage(Image): '''Image returned by the Loader.image() function.", "for error. You can change it by doing:: Loader.error_image =", "for giving a smooth user experience. You could increase the", "You must specify a default loading image for using a", "images will be loaded faster, but the user will not", "data otherwise, # we might be unable to recreate the", "activated mipmap=True too, then the GPU must calculate the mipmap", "our first choice for loader # from kivy.compat import queue", "num def _get_num_workers(self): return self._num_workers num_workers = property(_get_num_workers, _set_num_workers) '''Number", "self._resume_cond.wait(0.25) self._resume_cond.release() def _load(self, kwargs): '''(internal) Loading function, called by", "or more. If you are loading multiples Full-HD images, the", "for imdata in data._data: imdata.source = filename except Exception: Logger.exception('Failed", "consuming tasks from a queue ''' def __init__(self, num_threads): super(_ThreadPool,", "range(num_threads): _Worker(self, self.tasks) def add_task(self, func, *args, **kargs): '''Add a", "filename, image) # update client for c_filename, client in self._client[:]:", "default loading image, you can do:: Loader.loading_image = Image('another_loading.png') Tweaking", "SMBHandler every time # otherwise the data is occasionaly not", "e: print(e) self.tasks.task_done() class _ThreadPool(object): '''Pool of threads consuming tasks", "at least 1 image processing per image') self._max_upload_per_frame = num", "# we might be unable to recreate the texture afterwise.", "use it to load an image and use it, even", "* self._num_workers): sleep(0.1) self._wait_for_resume() filename = kwargs['filename'] load_callback = kwargs['load_callback']", "'''Start the loader thread/process''' self._running = True def run(self, *largs):", "return if load_callback is not None: data = load_callback(filename) elif", "def _set_num_workers(self, num): if num < 2: raise Exception('Must have", "import Clock from kivy.cache import Cache from kivy.core.image import ImageLoader,", "or more performance, depending of the images you're gonna to", "all background loading, call *Loader.stop()*. ''' data = Cache.get('kv.loader', filename)", "from a given tasks queue ''' def __init__(self, pool, tasks):", "temporary file, and pass it to _load_local()''' if PY2: import", "self._load_local(filename, kwargs['kwargs']) if post_callback: data = post_callback(data) self._q_done.appendleft((filename, data)) self._trigger_update()", "impact:: from kivy.loader import Loader Loader.num_workers = 4 The default", "continue # got one client to update client.image = image", "= queue.Queue() for _ in range(num_threads): _Worker(self, self.tasks) def add_task(self,", "self._running: self.start() self._start_wanted = False # in pause mode, don't", "property(_get_num_workers, _set_num_workers) '''Number of workers to use while loading. (used", "of image to upload per frame. By default, we'll upload", "kivy.uix.image import Image from kivy.loader import Loader class TestApp(App): def", "it ? if self._start_wanted: if not self._running: self.start() self._start_wanted =", "super(_ThreadPool, self).__init__() self.running = True self.tasks = queue.Queue() for _", "< 2: raise Exception('Must have at least 2 workers') self._num_workers", "can change it by doing:: Loader.error_image = 'error.png' .. versionchanged::", "images too, in real time. Then it can be smart", "have less than 25 FPS. ''' def __init__(self): self._loading_image =", "data = Cache.get('kv.loader', filename) if data not in (None, False):", "remote file not found or download error try: close(_out_osfd) except", "on_load(self): pass class LoaderBase(object): '''Common base for Loader and specific", "loaded=True, **kwargs) client = ProxyImage(self.loading_image, loading_image=self.loading_image, **kwargs) self._client.append((filename, client)) if", "for loading. You can change it by doing:: Loader.loading_image =", "func, *args, **kargs): '''Add a task to the queue '''", "base for Loader and specific implementation. By default, Loader will", "pool, tasks): Thread.__init__(self) self.tasks = tasks self.daemon = True self.pool", "post_callback, 'kwargs': kwargs}) if not kwargs.get('nocache', False): Cache.append('kv.loader', filename, False)", "if the file is local, or _load_urllib() if the file", "self._loading_image = None self._error_image = None self._num_workers = 2 self._max_upload_per_frame", "client.image = image client.loaded = True client.dispatch('on_load') self._client.remove((c_filename, client)) self._trigger_update()", "loaded, and pass to the client''' # want to start", "can now tweak the loader to have a better user", "'post_callback': post_callback, 'kwargs': kwargs}) if not kwargs.get('nocache', False): Cache.append('kv.loader', filename,", "to load image <%s>' % filename) # close file when", "then all the images will be loaded faster, but the", "threads to start for loading images - :data:`Loader.max_upload_per_frame` - define", "call *Loader.stop()*. ''' data = Cache.get('kv.loader', filename) if data not", "load data data = self._load_local(_out_filename, kwargs) # FIXME create a", "False) super(ProxyImage, self).__init__(arg, **kwargs) self.loaded = kwargs.get('loaded') def on_load(self): pass", "Register a cache for loader Cache.register('kv.loader', limit=500, timeout=60) class ProxyImage(Image):", "# With recent changes to CoreImage, we must keep data", "_out_filename = tempfile.mkstemp( prefix='kivyloader', suffix=suffix) if proto == 'smb': #", "fd = None # write to local filename write(_out_osfd, idata)", "LoaderThreadPool(LoaderBase): def __init__(self): super(LoaderThreadPool, self).__init__() self.pool = None def start(self):", "take a look at the DDS format. .. versionadded:: 1.6.0", "try: parameters = self._q_load.pop() except: return self.pool.add_task(self._load, parameters) Loader =", "clean API for that for imdata in data._data: imdata.source =", "image will take ~6MB in memory, so it will take", "can stuck the application during the upload. If you want", "called by the thread. Will call _load_local() if the file", "for loading images - :data:`Loader.max_upload_per_frame` - define the maximum image", "isinstance(image, basestring): self._error_image = ImageLoader.load(filename=image) else: self._error_image = image error_image", "client to update client.image = image client.loaded = True client.dispatch('on_load')", "This setting impact the loader only at the beginning. Once", "mipmap=True too, then the GPU must calculate the mipmap of", "you can do:: Loader.loading_image = Image('another_loading.png') Tweaking the asynchronous loader", "else: import urllib.request as urllib_request proto = filename.split(':', 1)[0] if", "images, you can easily increase this parameter to 10, or", "unqueue anything. if self._paused: self._trigger_update() return for x in range(self.max_upload_per_frame):", "used for loading. You can change it by doing:: Loader.loading_image", "= True self.pool = pool self.start() def run(self): while self.pool.running:", "have at least 2 workers') self._num_workers = num def _get_num_workers(self):", "''' Asynchronous data loader ======================== This is the Asynchronous Loader.", "if the file is on Internet ''' while len(self._q_done) >=", "not load PySMB: make sure it is installed') return import", "to use pygame as our first choice for loader #", "def _set_error_image(self, image): if isinstance(image, basestring): self._error_image = ImageLoader.load(filename=image) else:", "an image and use it, even if data are not", "_out_osfd = None # load data data = self._load_local(_out_filename, kwargs)", "and use it, even if data are not yet available.", "not None: data = load_callback(filename) elif proto in ('http', 'https',", "while self._running: try: parameters = self._q_load.pop() except: return self.pool.add_task(self._load, parameters)", "experience or more performance, depending of the images you're gonna", "def __init__(self): self._loading_image = None self._error_image = None self._num_workers =", "time can be consequent, and can stuck the application during", "specific implementation. By default, Loader will be the best available", "= '.%s' % (filename.split('.')[-1]) _out_osfd, _out_filename = tempfile.mkstemp( prefix='kivyloader', suffix=suffix)", "big images too, in real time. Then it can be", "create a clean API for that for imdata in data._data:", "otherwise, # we might be unable to recreate the texture", "look at the parameters: - :data:`Loader.num_workers` - define the number", "unlink, environ import threading # Register a cache for loader", "self._load_urllib(filename, kwargs['kwargs']) else: data = self._load_local(filename, kwargs['kwargs']) if post_callback: data", "None: # if data is None, this is really the", "def _get_loading_image(self): if not self._loading_image: loading_png_fn = join(kivy_data_dir, 'images', 'image-loading.gif')", "TestApp().run() In order to cancel all background loading, call *Loader.stop()*.", "# in pause mode, don't unqueue anything. if self._paused: self._trigger_update()", "want to start it ? if self._start_wanted: if not self._running:", "to reduce the :data:`max_upload_per_frame` to 1 or 2. If you", "per frame. By default, we'll upload only 2 images in", "from kivy.loader import Loader Loader.num_workers = 4 The default value", "is on Internet ''' while len(self._q_done) >= ( self.max_upload_per_frame *", "from kivy.compat import queue from threading import Thread class _Worker(Thread):", "a data is loaded, and pass to the client''' #", "sure it is installed') return import tempfile data = fd", "Loader.num_workers = 4 The default value is 2 for giving", "for _ in range(num_threads): _Worker(self, self.tasks) def add_task(self, func, *args,", "texture afterwise. return ImageLoader.load(filename, keep_data=True, **kwargs) def _load_urllib(self, filename, kwargs):", "urllib_request proto = filename.split(':', 1)[0] if proto == 'smb': try:", "= ('Loader', 'LoaderBase', 'ProxyImage') from kivy import kivy_data_dir from kivy.logger", "is local, or _load_urllib() if the file is on Internet", "in ('http', 'https', 'ftp', 'smb'): data = self._load_urllib(filename, kwargs['kwargs']) else:", "self._start_wanted = True self._trigger_update() else: # already queued for loading", "_out_osfd = None try: _out_filename = '' suffix = '.%s'", "as urllib_request else: import urllib.request as urllib_request proto = filename.split(':',", "ProxyImage(data) if not image.nocache: Cache.append('kv.loader', filename, image) # update client", "need to reload. return ProxyImage(data, loading_image=self.loading_image, loaded=True, **kwargs) client =", "queued for loading pass return client # # Loader implementation", "memory, so it will take times. If you have activated", "1 / 25.s or each frame if we have less", "if we have less than 25 FPS. ''' def __init__(self):", "OSError: pass return self.error_image finally: if fd: fd.close() if _out_osfd:", "can use it as follows:: from kivy.app import App from", "loading_png_fn = join(kivy_data_dir, 'images', 'image-loading.gif') self._loading_image = ImageLoader.load(filename=loading_png_fn) return self._loading_image", "proxyImage.image.texture: self.image.texture = proxyImage.image.texture def build(self): proxyImage = Loader.image(\"myPic.jpg\") proxyImage.bind(on_load=self._image_loaded)", "self._error_image = ImageLoader.load(filename=image) else: self._error_image = image error_image = property(_get_error_image,", "the images will be loaded faster, but the user will", "_get_num_workers(self): return self._num_workers num_workers = property(_get_num_workers, _set_num_workers) '''Number of workers", "fd = urllib_request.urlopen(filename) idata = fd.read() fd.close() fd = None", "1)[0] if proto == 'smb': try: # note: it's important", "be unable to recreate the texture afterwise. return ImageLoader.load(filename, keep_data=True,", "self._wait_for_resume() filename = kwargs['filename'] load_callback = kwargs['load_callback'] post_callback = kwargs['post_callback']", "'ftp', 'smb'): data = self._load_urllib(filename, kwargs['kwargs']) else: data = self._load_local(filename,", "# Try to use pygame as our first choice for", "`loaded`: bool, default to False It can be True if", "kwargs): '''(internal) Loading a network file. First download it, save", "If you want to change the default loading image, you", "False self._resume_cond.acquire() self._resume_cond.notify_all() self._resume_cond.release() def _wait_for_resume(self): while self._running and self._paused:", "You can use it as follows:: from kivy.app import App", "kivy.core.image import ImageLoader, Image from kivy.compat import PY2 from collections", "2: raise Exception('Must have at least 2 workers') self._num_workers =", "data # ProxyImage(data) if not image.nocache: Cache.append('kv.loader', filename, image) #", "are not yet available. You must specify a default loading", "in (None, False): # found image, if data is not", "In order to cancel all background loading, call *Loader.stop()*. '''", "_set_max_upload_per_frame(self, num): if num is not None and num <", "Not readonly anymore. ''' def start(self): '''Start the loader thread/process'''", "be True if the image is already cached :Events: `on_load`", "= kwargs.get('loaded') def on_load(self): pass class LoaderBase(object): '''Common base for", "increase the number of workers, then all the images will", "basestring): self._loading_image = ImageLoader.load(filename=image) else: self._loading_image = image loading_image =", "load_callback is not None: data = load_callback(filename) elif proto in", "= False self._resume_cond = threading.Condition() self._q_load = deque() self._q_done =", "= Loader.image(\"myPic.jpg\") proxyImage.bind(on_load=self._image_loaded) self.image = Image() return self.image TestApp().run() In", "we might be unable to recreate the texture afterwise. return", "Internet ''' while len(self._q_done) >= ( self.max_upload_per_frame * self._num_workers): sleep(0.1)", "filename != c_filename: continue # got one client to update", "returned by the Loader.image() function. :Properties: `loaded`: bool, default to", "stop(self): super(LoaderThreadPool, self).stop() Clock.unschedule(self.run) self.pool.stop() def run(self, *largs): while self._running:", "a look at the parameters: - :data:`Loader.num_workers` - define the", "self._q_done = deque() self._client = [] self._running = False self._start_wanted", "else: # already queued for loading pass return client #", "resume(self): '''Resume the loader, after a :meth:`pause`. .. versionadded:: 1.6.0", "self).__init__(arg, **kwargs) self.loaded = kwargs.get('loaded') def on_load(self): pass class LoaderBase(object):", "self._loading_image: loading_png_fn = join(kivy_data_dir, 'images', 'image-loading.gif') self._loading_image = ImageLoader.load(filename=loading_png_fn) return", "samba shares fd = urllib_request.build_opener(SMBHandler).open(filename) else: # read from internet", "threading import Thread class _Worker(Thread): '''Thread executing tasks from a", "the file is local, or _load_urllib() if the file is", "change it by doing:: Loader.loading_image = 'loading.png' .. versionchanged:: 1.6.0", "= urllib_request.urlopen(filename) idata = fd.read() fd.close() fd = None #", "= load_callback(filename) elif proto in ('http', 'https', 'ftp', 'smb'): data", "from collections import deque from time import sleep from os.path", "default loading image for using a such loader:: from kivy", "Once the loader is started, the setting has no impact::", "data)) self._trigger_update() def _load_local(self, filename, kwargs): '''(internal) Loading a local", "ImageLoader, Image from kivy.compat import PY2 from collections import deque", "client # # Loader implementation # if 'KIVY_DOC' in environ:", "implementation. The _update() function is called every 1 / 25.s", "the loader, after a :meth:`pause`. .. versionadded:: 1.6.0 ''' self._paused", "time import sleep from os.path import join from os import", "to upload per frame. By default, we'll upload only 2", "def _get_max_upload_per_frame(self): return self._max_upload_per_frame max_upload_per_frame = property(_get_max_upload_per_frame, _set_max_upload_per_frame) '''Number of", "not None and num < 1: raise Exception('Must have at", "= tasks self.daemon = True self.pool = pool self.start() def", "from os.path import join from os import write, close, unlink,", "# FIXME create a clean API for that for imdata", "= True self._trigger_update() else: # already queued for loading pass", "application. .. versionadded:: 1.6.0 ''' def _set_max_upload_per_frame(self, num): if num", "range(self.max_upload_per_frame): try: filename, data = self._q_done.pop() except IndexError: return #", "frame. If you are uploading many tiny images, you can", "image client.loaded = True client.dispatch('on_load') self._client.remove((c_filename, client)) self._trigger_update() def image(self,", "= deque() self._client = [] self._running = False self._start_wanted =", "self._max_upload_per_frame = 2 self._paused = False self._resume_cond = threading.Condition() self._q_load", "in range(self.max_upload_per_frame): try: filename, data = self._q_done.pop() except IndexError: return", "# want to start it ? if self._start_wanted: if not", "is not here, need to reload. return ProxyImage(data, loading_image=self.loading_image, loaded=True,", "= self.tasks.get() try: func(*args, **kargs) except Exception as e: print(e)", "not been able to use the application while loading. Prior", "close file when remote file not found or download error", "self._load_local(_out_filename, kwargs) # FIXME create a clean API for that", "'images', 'image-loading.gif') self._loading_image = ImageLoader.load(filename=loading_png_fn) return self._loading_image def _set_loading_image(self, image):", "kwargs): '''(internal) Loading a local file''' # With recent changes", "**kargs) except Exception as e: print(e) self.tasks.task_done() class _ThreadPool(object): '''Pool", "c_filename: continue # got one client to update client.image =", "if not self._loading_image: loading_png_fn = join(kivy_data_dir, 'images', 'image-loading.gif') self._loading_image =", "a temporary file, and pass it to _load_local()''' if PY2:", "can be smart to reduce the :data:`max_upload_per_frame` to 1 or", "__init__(self): self._loading_image = None self._error_image = None self._num_workers = 2", "If you want a smooth experience, let the default. As", "function. :Properties: `loaded`: bool, default to False It can be", "= None else: # # Try to use pygame as", "self.error_image finally: if fd: fd.close() if _out_osfd: close(_out_osfd) if _out_filename", "= Cache.get('kv.loader', filename) if data not in (None, False): #", "it can be smart to reduce the :data:`max_upload_per_frame` to 1", "the Loader.image() function. :Properties: `loaded`: bool, default to False It", "to do per frames. ''' __all__ = ('Loader', 'LoaderBase', 'ProxyImage')", "loading images - :data:`Loader.max_upload_per_frame` - define the maximum image uploads", "join from os import write, close, unlink, environ import threading", "def on_load(self): pass class LoaderBase(object): '''Common base for Loader and", "from kivy.app import App from kivy.uix.image import Image from kivy.loader", "while self.pool.running: func, args, kargs = self.tasks.get() try: func(*args, **kargs)", "= fd.read() fd.close() fd = None # write to local", "already queued for loading pass return client # # Loader", "'''Resume the loader, after a :meth:`pause`. .. versionadded:: 1.6.0 '''", "image <%s>' % filename) # close file when remote file", "it.). This setting impact the loader only at the beginning.", "for loading pass return client # # Loader implementation #", "image = data # ProxyImage(data) if not image.nocache: Cache.append('kv.loader', filename,", "data is None: # if data is None, this is", "loader to have a better user experience or more performance,", "You could increase the number of workers, then all the", "each frame if we have less than 25 FPS. '''", "choice for loader # from kivy.compat import queue from threading", "parameters) Loader = LoaderThreadPool() Logger.info('Loader: using a thread pool of", "- :data:`Loader.num_workers` - define the number of threads to start", "in the GPU per frame. If you are uploading many", "filename, 'load_callback': load_callback, 'post_callback': post_callback, 'kwargs': kwargs}) if not kwargs.get('nocache',", "to recreate the texture afterwise. return ImageLoader.load(filename, keep_data=True, **kwargs) def", ".. versionadded:: 1.6.0 You can now tweak the loader to", "be the best available loader implementation. The _update() function is", "False self.tasks.join() class LoaderThreadPool(LoaderBase): def __init__(self): super(LoaderThreadPool, self).__init__() self.pool =", "image, you can do:: Loader.loading_image = Image('another_loading.png') Tweaking the asynchronous", "if the loader implementation support it.). This setting impact the", "self._num_workers = 2 self._max_upload_per_frame = 2 self._paused = False self._resume_cond", "when remote file not found or download error try: close(_out_osfd)", "# note: it's important to load SMBHandler every time #", "implementation support it.). This setting impact the loader only at", "return data def _update(self, *largs): '''(internal) Check if a data", "ImportError: Logger.warning( 'Loader: can not load PySMB: make sure it", "urllib_request.urlopen(filename) idata = fd.read() fd.close() fd = None # write", "- define the number of threads to start for loading", "ImageLoader.load(filename=image) else: self._error_image = image error_image = property(_get_error_image, _set_error_image) '''Image", "image is already cached :Events: `on_load` Fired when the image", "_set_loading_image) '''Image used for loading. You can change it by", "on Internet ''' while len(self._q_done) >= ( self.max_upload_per_frame * self._num_workers):", "self.daemon = True self.pool = pool self.start() def run(self): while", "queue ''' self.tasks.put((func, args, kargs)) def stop(self): self.running = False", "1.6.0, the default number was 20, and loading many full-hd", "= image error_image = property(_get_error_image, _set_error_image) '''Image used for error.", "= [] self._running = False self._start_wanted = False self._trigger_update =", "of that (or reduce it a lot), take a look", "kwargs['post_callback'] try: proto = filename.split(':', 1)[0] except: #if blank filename", "build(self): proxyImage = Loader.image(\"myPic.jpg\") proxyImage.bind(on_load=self._image_loaded) self.image = Image() return self.image", "the upload. If you want a smooth experience, let the", "= image loading_image = property(_get_loading_image, _set_loading_image) '''Image used for loading.", "use while loading. (used only if the loader implementation support", "stop(self): '''Stop the loader thread/process''' self._running = False def pause(self):", "the loader is started, the setting has no impact:: from", "if num < 2: raise Exception('Must have at least 2", "error_image = property(_get_error_image, _set_error_image) '''Image used for error. You can", "Cache.append('kv.loader', filename, False) self._start_wanted = True self._trigger_update() else: # already", "write, close, unlink, environ import threading # Register a cache", "x in range(self.max_upload_per_frame): try: filename, data = self._q_done.pop() except IndexError:", "anymore. ''' def _get_error_image(self): if not self._error_image: error_png_fn = join(", "environ: Loader = None else: # # Try to use", "try: _out_filename = '' suffix = '.%s' % (filename.split('.')[-1]) _out_osfd,", "want to change the default loading image, you can do::", "at least 2 workers') self._num_workers = num def _get_num_workers(self): return", "not self._error_image: error_png_fn = join( 'atlas://data/images/defaulttheme/image-missing') self._error_image = ImageLoader.load(filename=error_png_fn) return", "pause(self): '''Pause the loader, can be useful during interactions ..", "self.image.texture = proxyImage.image.texture def build(self): proxyImage = Loader.image(\"myPic.jpg\") proxyImage.bind(on_load=self._image_loaded) self.image", "def run(self, *largs): while self._running: try: parameters = self._q_load.pop() except:", "image = Loader.image('http://mysite.com/test.png') If you want to change the default", "''' def _get_error_image(self): if not self._error_image: error_png_fn = join( 'atlas://data/images/defaulttheme/image-missing')", "if 'KIVY_DOC' in environ: Loader = None else: # #", "# # Try to use pygame as our first choice", "the image is already cached :Events: `on_load` Fired when the", "= 2 self._max_upload_per_frame = 2 self._paused = False self._resume_cond =", "'image-loading.gif') self._loading_image = ImageLoader.load(filename=loading_png_fn) return self._loading_image def _set_loading_image(self, image): if", "can change it by doing:: Loader.loading_image = 'loading.png' .. versionchanged::", "loading_image=self.loading_image, **kwargs) self._client.append((filename, client)) if data is None: # if", "def __init__(self): super(LoaderThreadPool, self).__init__() self.pool = None def start(self): super(LoaderThreadPool,", "`on_load` Fired when the image is loaded and changed '''", "threading.Condition() self._q_load = deque() self._q_done = deque() self._client = []", "import Loader class TestApp(App): def _image_loaded(self, proxyImage): if proxyImage.image.texture: self.image.texture", "def run(self, *largs): '''Main loop for the loader.''' pass def", "loader:: from kivy import * image = Loader.image('mysprite.png') You can", "doing:: Loader.loading_image = 'loading.png' .. versionchanged:: 1.6.0 Not readonly anymore.", "to _load_local()''' if PY2: import urllib2 as urllib_request else: import", "workers') self._num_workers = num def _get_num_workers(self): return self._num_workers num_workers =", "FPS. ''' def __init__(self): self._loading_image = None self._error_image = None", "loader implementation support it.). This setting impact the loader only", "A ProxyImage is returned with a loading image. You can", "import App from kivy.uix.image import Image from kivy.loader import Loader", "or _load_urllib() if the file is on Internet ''' while", "maximum image uploads in GPU to do per frames. '''", "'''Pool of threads consuming tasks from a queue ''' def", "= kwargs['post_callback'] try: proto = filename.split(':', 1)[0] except: #if blank", "useful during interactions .. versionadded:: 1.6.0 ''' self._paused = True", "urllib_request.build_opener(SMBHandler).open(filename) else: # read from internet fd = urllib_request.urlopen(filename) idata", "print(e) self.tasks.task_done() class _ThreadPool(object): '''Pool of threads consuming tasks from", "you want to change the default loading image, you can", "not in (None, False): # found image, if data is", "'''Thread executing tasks from a given tasks queue ''' def", "= None try: _out_filename = '' suffix = '.%s' %", "time # otherwise the data is occasionaly not loaded from", "kivy.compat import PY2 from collections import deque from time import", "self.start() self._start_wanted = False # in pause mode, don't unqueue", "= None def start(self): super(LoaderThreadPool, self).start() self.pool = _ThreadPool(self._num_workers) Clock.schedule_interval(self.run,", "smb.SMBHandler import SMBHandler except ImportError: Logger.warning( 'Loader: can not load", "load image from url:: image = Loader.image('http://mysite.com/test.png') If you want", "kwargs['load_callback'] post_callback = kwargs['post_callback'] try: proto = filename.split(':', 1)[0] except:", "close, unlink, environ import threading # Register a cache for", "the Loader. A ProxyImage is returned with a loading image.", "will take times. If you have activated mipmap=True too, then", "# Register a cache for loader Cache.register('kv.loader', limit=500, timeout=60) class", "0) def stop(self): super(LoaderThreadPool, self).stop() Clock.unschedule(self.run) self.pool.stop() def run(self, *largs):", "= ImageLoader.load(filename=error_png_fn) return self._error_image def _set_error_image(self, image): if isinstance(image, basestring):", "if num is not None and num < 1: raise", "full-hd images was blocking completly the application. .. versionadded:: 1.6.0", "'''Image used for error. You can change it by doing::", "loader implementation. The _update() function is called every 1 /", "per frames. ''' __all__ = ('Loader', 'LoaderBase', 'ProxyImage') from kivy", "you are loading multiples Full-HD images, the upload time can", "the client''' # want to start it ? if self._start_wanted:", "is returned with a loading image. You can use it", "loader # from kivy.compat import queue from threading import Thread", "2 workers') self._num_workers = num def _get_num_workers(self): return self._num_workers num_workers", "*args, **kargs): '''Add a task to the queue ''' self.tasks.put((func,", "return self.image TestApp().run() In order to cancel all background loading,", "2 self._max_upload_per_frame = 2 self._paused = False self._resume_cond = threading.Condition()", "__events__ = ('on_load', ) def __init__(self, arg, **kwargs): kwargs.setdefault('loaded', False)", "filename then return return if load_callback is not None: data", "self.image TestApp().run() In order to cancel all background loading, call", ">= ( self.max_upload_per_frame * self._num_workers): sleep(0.1) self._wait_for_resume() filename = kwargs['filename']", "self._loading_image = ImageLoader.load(filename=image) else: self._loading_image = image loading_image = property(_get_loading_image,", "is loaded and changed ''' __events__ = ('on_load', ) def", "kwargs['kwargs']) if post_callback: data = post_callback(data) self._q_done.appendleft((filename, data)) self._trigger_update() def", "else: # # Try to use pygame as our first", "kivy import * image = Loader.image('mysprite.png') You can also load", "ImageLoader.load(filename=error_png_fn) return self._error_image def _set_error_image(self, image): if isinstance(image, basestring): self._error_image", "def _get_num_workers(self): return self._num_workers num_workers = property(_get_num_workers, _set_num_workers) '''Number of", "the mipmap of this big images too, in real time.", "= Image() return self.image TestApp().run() In order to cancel all", "= pool self.start() def run(self): while self.pool.running: func, args, kargs", "are uploading many tiny images, you can easily increase this", "You can change it by doing:: Loader.loading_image = 'loading.png' ..", "1 or 2. If you get ride of that (or", "after a :meth:`pause`. .. versionadded:: 1.6.0 ''' self._paused = False", "PY2: import urllib2 as urllib_request else: import urllib.request as urllib_request", "data is occasionaly not loaded from smb.SMBHandler import SMBHandler except", "Loader class TestApp(App): def _image_loaded(self, proxyImage): if proxyImage.image.texture: self.image.texture =", "IndexError: return # create the image image = data #", "= deque() self._q_done = deque() self._client = [] self._running =", "'''Common base for Loader and specific implementation. By default, Loader", "data = self._q_done.pop() except IndexError: return # create the image", "self._num_workers): sleep(0.1) self._wait_for_resume() filename = kwargs['filename'] load_callback = kwargs['load_callback'] post_callback", "task to the queue ''' self.tasks.put((func, args, kargs)) def stop(self):", "(filename.split('.')[-1]) _out_osfd, _out_filename = tempfile.mkstemp( prefix='kivyloader', suffix=suffix) if proto ==", ":Events: `on_load` Fired when the image is loaded and changed", "('Loader', 'LoaderBase', 'ProxyImage') from kivy import kivy_data_dir from kivy.logger import", "self._max_upload_per_frame = num def _get_max_upload_per_frame(self): return self._max_upload_per_frame max_upload_per_frame = property(_get_max_upload_per_frame,", "it, even if data are not yet available. You must", "client''' # want to start it ? if self._start_wanted: if", "# ProxyImage(data) if not image.nocache: Cache.append('kv.loader', filename, image) # update", "pass return self.error_image finally: if fd: fd.close() if _out_osfd: close(_out_osfd)", "make sure it is installed') return import tempfile data =", "Will call _load_local() if the file is local, or _load_urllib()", "= self._q_load.pop() except: return self.pool.add_task(self._load, parameters) Loader = LoaderThreadPool() Logger.info('Loader:", "*largs): '''(internal) Check if a data is loaded, and pass", "'''(internal) Check if a data is loaded, and pass to", "times. If you have activated mipmap=True too, then the GPU", "must specify a default loading image for using a such", "_ThreadPool(self._num_workers) Clock.schedule_interval(self.run, 0) def stop(self): super(LoaderThreadPool, self).stop() Clock.unschedule(self.run) self.pool.stop() def", "def __init__(self, arg, **kwargs): kwargs.setdefault('loaded', False) super(ProxyImage, self).__init__(arg, **kwargs) self.loaded", "self._trigger_update() else: # already queued for loading pass return client", "will take ~6MB in memory, so it will take times.", "as follows:: from kivy.app import App from kivy.uix.image import Image", "ride of that (or reduce it a lot), take a", "data loader ======================== This is the Asynchronous Loader. You can", "calculate the mipmap of this big images too, in real", "image using the Loader. A ProxyImage is returned with a", "num): if num is not None and num < 1:", "ProxyImage is returned with a loading image. You can use", "super(ProxyImage, self).__init__(arg, **kwargs) self.loaded = kwargs.get('loaded') def on_load(self): pass class", "= proxyImage.image.texture def build(self): proxyImage = Loader.image(\"myPic.jpg\") proxyImage.bind(on_load=self._image_loaded) self.image =", "as urllib_request proto = filename.split(':', 1)[0] if proto == 'smb':", "self._running = True def run(self, *largs): '''Main loop for the", "Thread.__init__(self) self.tasks = tasks self.daemon = True self.pool = pool", "'': unlink(_out_filename) return data def _update(self, *largs): '''(internal) Check if", "afterwise. return ImageLoader.load(filename, keep_data=True, **kwargs) def _load_urllib(self, filename, kwargs): '''(internal)", "threading # Register a cache for loader Cache.register('kv.loader', limit=500, timeout=60)", "**kwargs) self._client.append((filename, client)) if data is None: # if data", "# update client for c_filename, client in self._client[:]: if filename", "the image image = data # ProxyImage(data) if not image.nocache:", "property(_get_error_image, _set_error_image) '''Image used for error. You can change it", "that (or reduce it a lot), take a look at", "= tempfile.mkstemp( prefix='kivyloader', suffix=suffix) if proto == 'smb': # read", "**kwargs) self.loaded = kwargs.get('loaded') def on_load(self): pass class LoaderBase(object): '''Common", "you can easily increase this parameter to 10, or more.", "completly the application. .. versionadded:: 1.6.0 ''' def _set_max_upload_per_frame(self, num):", "Loading a network file. First download it, save it to", "a clean API for that for imdata in data._data: imdata.source", "data = post_callback(data) self._q_done.appendleft((filename, data)) self._trigger_update() def _load_local(self, filename, kwargs):", "_set_loading_image(self, image): if isinstance(image, basestring): self._loading_image = ImageLoader.load(filename=image) else: self._loading_image", "tweak the loader to have a better user experience or", "= self._load_local(_out_filename, kwargs) # FIXME create a clean API for", "smooth experience, let the default. As matter of fact, a", "__init__(self): super(LoaderThreadPool, self).__init__() self.pool = None def start(self): super(LoaderThreadPool, self).start()", "client for c_filename, client in self._client[:]: if filename != c_filename:", "define the number of threads to start for loading images", "recreate the texture afterwise. return ImageLoader.load(filename, keep_data=True, **kwargs) def _load_urllib(self,", "1.6.0 ''' self._paused = True def resume(self): '''Resume the loader,", "upload time can be consequent, and can stuck the application", "'''Add a task to the queue ''' self.tasks.put((func, args, kargs))", "= True def resume(self): '''Resume the loader, after a :meth:`pause`.", "_ThreadPool(object): '''Pool of threads consuming tasks from a queue '''", "_wait_for_resume(self): while self._running and self._paused: self._resume_cond.acquire() self._resume_cond.wait(0.25) self._resume_cond.release() def _load(self,", "real time. Then it can be smart to reduce the", "True if the image is already cached :Events: `on_load` Fired", "if self._paused: self._trigger_update() return for x in range(self.max_upload_per_frame): try: filename,", "have a better user experience or more performance, depending of", "file. First download it, save it to a temporary file,", "important to load SMBHandler every time # otherwise the data", "first choice for loader # from kivy.compat import queue from", "= ImageLoader.load(filename=loading_png_fn) return self._loading_image def _set_loading_image(self, image): if isinstance(image, basestring):", "client.dispatch('on_load') self._client.remove((c_filename, client)) self._trigger_update() def image(self, filename, load_callback=None, post_callback=None, **kwargs):", "queue.Queue() for _ in range(num_threads): _Worker(self, self.tasks) def add_task(self, func,", ".. versionadded:: 1.6.0 ''' def _set_max_upload_per_frame(self, num): if num is", "loading many full-hd images was blocking completly the application. ..", "2 images in the GPU per frame. If you are", "try: close(_out_osfd) except OSError: pass return self.error_image finally: if fd:", "self._running = False def pause(self): '''Pause the loader, can be", "''' def _get_loading_image(self): if not self._loading_image: loading_png_fn = join(kivy_data_dir, 'images',", "# read from internet fd = urllib_request.urlopen(filename) idata = fd.read()", "!= '': unlink(_out_filename) return data def _update(self, *largs): '''(internal) Check", "not kwargs.get('nocache', False): Cache.append('kv.loader', filename, False) self._start_wanted = True self._trigger_update()", "= 4 The default value is 2 for giving a", "**kwargs) def _load_urllib(self, filename, kwargs): '''(internal) Loading a network file.", "def _wait_for_resume(self): while self._running and self._paused: self._resume_cond.acquire() self._resume_cond.wait(0.25) self._resume_cond.release() def", "False self._resume_cond = threading.Condition() self._q_load = deque() self._q_done = deque()", "if the image is already cached :Events: `on_load` Fired when", "*largs): '''Main loop for the loader.''' pass def stop(self): '''Stop", "the loader, can be useful during interactions .. versionadded:: 1.6.0", "if data is None, this is really the first time", "can not load PySMB: make sure it is installed') return", "self._start_wanted = False self._trigger_update = Clock.create_trigger(self._update) def __del__(self): try: Clock.unschedule(self._update)", "if load_callback is not None: data = load_callback(filename) elif proto", "os.path import join from os import write, close, unlink, environ", "elif proto in ('http', 'https', 'ftp', 'smb'): data = self._load_urllib(filename,", "installed') return import tempfile data = fd = _out_osfd =", "a queue ''' def __init__(self, num_threads): super(_ThreadPool, self).__init__() self.running =", "*Loader.stop()*. ''' data = Cache.get('kv.loader', filename) if data not in", "more performance, depending of the images you're gonna to load.", "You can now tweak the loader to have a better", "25 FPS. ''' def __init__(self): self._loading_image = None self._error_image =", "self).__init__() self.running = True self.tasks = queue.Queue() for _ in", "not found or download error try: close(_out_osfd) except OSError: pass", "= _out_osfd = None try: _out_filename = '' suffix =", "for loader Cache.register('kv.loader', limit=500, timeout=60) class ProxyImage(Image): '''Image returned by", "= ProxyImage(self.loading_image, loading_image=self.loading_image, **kwargs) self._client.append((filename, client)) if data is None:", "during interactions .. versionadded:: 1.6.0 ''' self._paused = True def", "or each frame if we have less than 25 FPS.", "experience, let the default. As matter of fact, a Full-HD", "unlink(_out_filename) return data def _update(self, *largs): '''(internal) Check if a", "can be True if the image is already cached :Events:", "experience. You could increase the number of workers, then all", "def _update(self, *largs): '''(internal) Check if a data is loaded,", "Loader.loading_image = Image('another_loading.png') Tweaking the asynchronous loader -------------------------------- .. versionadded::", "kivy.loader import Loader Loader.num_workers = 4 The default value is", "close(_out_osfd) if _out_filename != '': unlink(_out_filename) return data def _update(self,", "import SMBHandler except ImportError: Logger.warning( 'Loader: can not load PySMB:", "Cache from kivy.core.image import ImageLoader, Image from kivy.compat import PY2", "the loader thread/process''' self._running = True def run(self, *largs): '''Main", "to 1.6.0, the default number was 20, and loading many", "user experience or more performance, depending of the images you're", "class LoaderThreadPool(LoaderBase): def __init__(self): super(LoaderThreadPool, self).__init__() self.pool = None def", "= True self.tasks = queue.Queue() for _ in range(num_threads): _Worker(self,", "raise Exception('Must have at least 2 workers') self._num_workers = num", "self._paused: self._trigger_update() return for x in range(self.max_upload_per_frame): try: filename, data", "def _image_loaded(self, proxyImage): if proxyImage.image.texture: self.image.texture = proxyImage.image.texture def build(self):", "application while loading. Prior to 1.6.0, the default number was", "_update(self, *largs): '''(internal) Check if a data is loaded, and", "to the queue ''' self.tasks.put((func, args, kargs)) def stop(self): self.running", "pass class LoaderBase(object): '''Common base for Loader and specific implementation.", ".. versionchanged:: 1.6.0 Not readonly anymore. ''' def _get_error_image(self): if", "mipmap of this big images too, in real time. Then", "num def _get_max_upload_per_frame(self): return self._max_upload_per_frame max_upload_per_frame = property(_get_max_upload_per_frame, _set_max_upload_per_frame) '''Number", "def build(self): proxyImage = Loader.image(\"myPic.jpg\") proxyImage.bind(on_load=self._image_loaded) self.image = Image() return", "it by doing:: Loader.loading_image = 'loading.png' .. versionchanged:: 1.6.0 Not", "data is loaded, and pass to the client''' # want", "is already cached :Events: `on_load` Fired when the image is", "= property(_get_loading_image, _set_loading_image) '''Image used for loading. You can change", "too, in real time. Then it can be smart to", "'''(internal) Loading function, called by the thread. Will call _load_local()", "'''Pause the loader, can be useful during interactions .. versionadded::", "be loaded faster, but the user will not been able", "Loader.image('mysprite.png') You can also load image from url:: image =", "Clock.unschedule(self._update) except Exception: pass def _set_num_workers(self, num): if num <", "self.loaded = kwargs.get('loaded') def on_load(self): pass class LoaderBase(object): '''Common base", "if not image.nocache: Cache.append('kv.loader', filename, image) # update client for", "the application. .. versionadded:: 1.6.0 ''' def _set_max_upload_per_frame(self, num): if", "while loading. (used only if the loader implementation support it.).", "**kwargs): kwargs.setdefault('loaded', False) super(ProxyImage, self).__init__(arg, **kwargs) self.loaded = kwargs.get('loaded') def", "PySMB: make sure it is installed') return import tempfile data", "self.pool.stop() def run(self, *largs): while self._running: try: parameters = self._q_load.pop()", "kargs)) def stop(self): self.running = False self.tasks.join() class LoaderThreadPool(LoaderBase): def", "been able to use the application while loading. Prior to", "GPU must calculate the mipmap of this big images too,", "Loader = LoaderThreadPool() Logger.info('Loader: using a thread pool of {}", "a local file''' # With recent changes to CoreImage, we", "''' def _set_max_upload_per_frame(self, num): if num is not None and", "by doing:: Loader.error_image = 'error.png' .. versionchanged:: 1.6.0 Not readonly", "self.pool.running: func, args, kargs = self.tasks.get() try: func(*args, **kargs) except", "loading. You can change it by doing:: Loader.loading_image = 'loading.png'", "Tweaking the asynchronous loader -------------------------------- .. versionadded:: 1.6.0 You can", "the number of threads to start for loading images -", "data def _update(self, *largs): '''(internal) Check if a data is", "''' __events__ = ('on_load', ) def __init__(self, arg, **kwargs): kwargs.setdefault('loaded',", ".. versionchanged:: 1.6.0 Not readonly anymore. ''' def start(self): '''Start", "least 1 image processing per image') self._max_upload_per_frame = num def", "API for that for imdata in data._data: imdata.source = filename", "then the GPU must calculate the mipmap of this big", "if not self._error_image: error_png_fn = join( 'atlas://data/images/defaulttheme/image-missing') self._error_image = ImageLoader.load(filename=error_png_fn)", "save it to a temporary file, and pass it to", "# otherwise the data is occasionaly not loaded from smb.SMBHandler", "is None, this is really the first time self._q_load.appendleft({ 'filename':", "data is None, this is really the first time self._q_load.appendleft({", "ImageLoader.load(filename, keep_data=True, **kwargs) def _load_urllib(self, filename, kwargs): '''(internal) Loading a", "but the user will not been able to use the", "class _ThreadPool(object): '''Pool of threads consuming tasks from a queue", "2 for giving a smooth user experience. You could increase", "client.loaded = True client.dispatch('on_load') self._client.remove((c_filename, client)) self._trigger_update() def image(self, filename,", "of threads to start for loading images - :data:`Loader.max_upload_per_frame` -", "4 The default value is 2 for giving a smooth", "kwargs): '''(internal) Loading function, called by the thread. Will call", "Clock.create_trigger(self._update) def __del__(self): try: Clock.unschedule(self._update) except Exception: pass def _set_num_workers(self,", "LoaderThreadPool() Logger.info('Loader: using a thread pool of {} workers'.format( Loader.num_workers))", "environ import threading # Register a cache for loader Cache.register('kv.loader',", "from smb.SMBHandler import SMBHandler except ImportError: Logger.warning( 'Loader: can not", "proto == 'smb': # read from samba shares fd =", "_load_urllib(self, filename, kwargs): '''(internal) Loading a network file. First download", "data are not yet available. You must specify a default", "DDS format. .. versionadded:: 1.6.0 ''' def _get_loading_image(self): if not", "versionchanged:: 1.6.0 Not readonly anymore. ''' def _get_error_image(self): if not", "image is loaded and changed ''' __events__ = ('on_load', )", "many tiny images, you can easily increase this parameter to", "it to a temporary file, and pass it to _load_local()'''", "kwargs) # FIXME create a clean API for that for", "from threading import Thread class _Worker(Thread): '''Thread executing tasks from", "= Image('another_loading.png') Tweaking the asynchronous loader -------------------------------- .. versionadded:: 1.6.0", "_Worker(Thread): '''Thread executing tasks from a given tasks queue '''", "not yet available. You must specify a default loading image", "will not been able to use the application while loading.", "self._paused = False self._resume_cond.acquire() self._resume_cond.notify_all() self._resume_cond.release() def _wait_for_resume(self): while self._running", "loaded faster, but the user will not been able to", "- :data:`Loader.max_upload_per_frame` - define the maximum image uploads in GPU", "import tempfile data = fd = _out_osfd = None try:", "implementation. By default, Loader will be the best available loader", "local filename write(_out_osfd, idata) close(_out_osfd) _out_osfd = None # load", "fd.close() if _out_osfd: close(_out_osfd) if _out_filename != '': unlink(_out_filename) return", "might be unable to recreate the texture afterwise. return ImageLoader.load(filename,", "one client to update client.image = image client.loaded = True", "/ 25.s or each frame if we have less than", "Exception: pass def _set_num_workers(self, num): if num < 2: raise", "return self._num_workers num_workers = property(_get_num_workers, _set_num_workers) '''Number of workers to", "self).start() self.pool = _ThreadPool(self._num_workers) Clock.schedule_interval(self.run, 0) def stop(self): super(LoaderThreadPool, self).stop()", "False): # found image, if data is not here, need", "the file is on Internet ''' while len(self._q_done) >= (", "image): if isinstance(image, basestring): self._loading_image = ImageLoader.load(filename=image) else: self._loading_image =", "if data is None: # if data is None, this", "self._trigger_update = Clock.create_trigger(self._update) def __del__(self): try: Clock.unschedule(self._update) except Exception: pass", "load SMBHandler every time # otherwise the data is occasionaly", "Clock.schedule_interval(self.run, 0) def stop(self): super(LoaderThreadPool, self).stop() Clock.unschedule(self.run) self.pool.stop() def run(self,", "'ProxyImage') from kivy import kivy_data_dir from kivy.logger import Logger from", "user will not been able to use the application while", "ImageLoader.load(filename=image) else: self._loading_image = image loading_image = property(_get_loading_image, _set_loading_image) '''Image", "returned with a loading image. You can use it as", "# write to local filename write(_out_osfd, idata) close(_out_osfd) _out_osfd =", "deque() self._q_done = deque() self._client = [] self._running = False", "not self._running: self.start() self._start_wanted = False # in pause mode,", "self.image = Image() return self.image TestApp().run() In order to cancel", "suffix = '.%s' % (filename.split('.')[-1]) _out_osfd, _out_filename = tempfile.mkstemp( prefix='kivyloader',", "really the first time self._q_load.appendleft({ 'filename': filename, 'load_callback': load_callback, 'post_callback':", "= property(_get_error_image, _set_error_image) '''Image used for error. You can change", "Exception as e: print(e) self.tasks.task_done() class _ThreadPool(object): '''Pool of threads", "False # in pause mode, don't unqueue anything. if self._paused:", "tasks queue ''' def __init__(self, pool, tasks): Thread.__init__(self) self.tasks =", "return return if load_callback is not None: data = load_callback(filename)", "False def pause(self): '''Pause the loader, can be useful during", "# if data is None, this is really the first", "download it, save it to a temporary file, and pass", "number of threads to start for loading images - :data:`Loader.max_upload_per_frame`", "class LoaderBase(object): '''Common base for Loader and specific implementation. By", "True self.pool = pool self.start() def run(self): while self.pool.running: func,", "it to _load_local()''' if PY2: import urllib2 as urllib_request else:", "function, called by the thread. Will call _load_local() if the", "except Exception: pass def _set_num_workers(self, num): if num < 2:", "impact the loader only at the beginning. Once the loader", "return client # # Loader implementation # if 'KIVY_DOC' in", "Image() return self.image TestApp().run() In order to cancel all background", "#if blank filename then return return if load_callback is not", "setting impact the loader only at the beginning. Once the", "loader, can be useful during interactions .. versionadded:: 1.6.0 '''", "upload only 2 images in the GPU per frame. If", ":data:`max_upload_per_frame` to 1 or 2. If you get ride of", "kwargs['kwargs']) else: data = self._load_local(filename, kwargs['kwargs']) if post_callback: data =", "import Thread class _Worker(Thread): '''Thread executing tasks from a given", "and pass it to _load_local()''' if PY2: import urllib2 as", ":data:`Loader.num_workers` - define the number of threads to start for", "= Clock.create_trigger(self._update) def __del__(self): try: Clock.unschedule(self._update) except Exception: pass def", "keep data otherwise, # we might be unable to recreate", "finally: if fd: fd.close() if _out_osfd: close(_out_osfd) if _out_filename !=", "to 10, or more. If you are loading multiples Full-HD", "( self.max_upload_per_frame * self._num_workers): sleep(0.1) self._wait_for_resume() filename = kwargs['filename'] load_callback", "in environ: Loader = None else: # # Try to", "= ImageLoader.load(filename=image) else: self._error_image = image error_image = property(_get_error_image, _set_error_image)", "self._start_wanted: if not self._running: self.start() self._start_wanted = False # in", "be consequent, and can stuck the application during the upload.", "''' data = Cache.get('kv.loader', filename) if data not in (None,", "loader thread/process''' self._running = True def run(self, *largs): '''Main loop", "else: data = self._load_local(filename, kwargs['kwargs']) if post_callback: data = post_callback(data)", "to use while loading. (used only if the loader implementation", "self._error_image = None self._num_workers = 2 self._max_upload_per_frame = 2 self._paused", "self.tasks.task_done() class _ThreadPool(object): '''Pool of threads consuming tasks from a", "user experience. You could increase the number of workers, then", "a network file. First download it, save it to a", "pass return client # # Loader implementation # if 'KIVY_DOC'", "num < 1: raise Exception('Must have at least 1 image", "and can stuck the application during the upload. If you", "True self._trigger_update() else: # already queued for loading pass return", "for x in range(self.max_upload_per_frame): try: filename, data = self._q_done.pop() except", "'''Number of image to upload per frame. By default, we'll", "from time import sleep from os.path import join from os", "from kivy.logger import Logger from kivy.clock import Clock from kivy.cache", "changed ''' __events__ = ('on_load', ) def __init__(self, arg, **kwargs):", "= None self._num_workers = 2 self._max_upload_per_frame = 2 self._paused =", "to reload. return ProxyImage(data, loading_image=self.loading_image, loaded=True, **kwargs) client = ProxyImage(self.loading_image,", "image error_image = property(_get_error_image, _set_error_image) '''Image used for error. You", "executing tasks from a given tasks queue ''' def __init__(self,", "start(self): super(LoaderThreadPool, self).start() self.pool = _ThreadPool(self._num_workers) Clock.schedule_interval(self.run, 0) def stop(self):", "loader is started, the setting has no impact:: from kivy.loader", "_update() function is called every 1 / 25.s or each", "post_callback = kwargs['post_callback'] try: proto = filename.split(':', 1)[0] except: #if", "def add_task(self, func, *args, **kargs): '''Add a task to the", "data is not here, need to reload. return ProxyImage(data, loading_image=self.loading_image,", "''' def __init__(self, num_threads): super(_ThreadPool, self).__init__() self.running = True self.tasks", "args, kargs = self.tasks.get() try: func(*args, **kargs) except Exception as", "if _out_osfd: close(_out_osfd) if _out_filename != '': unlink(_out_filename) return data", "a such loader:: from kivy import * image = Loader.image('mysprite.png')", "args, kargs)) def stop(self): self.running = False self.tasks.join() class LoaderThreadPool(LoaderBase):", "note: it's important to load SMBHandler every time # otherwise", "load. Take a look at the parameters: - :data:`Loader.num_workers` -", "and changed ''' __events__ = ('on_load', ) def __init__(self, arg,", "# read from samba shares fd = urllib_request.build_opener(SMBHandler).open(filename) else: #", "from kivy.compat import PY2 from collections import deque from time", "number of workers, then all the images will be loaded", "local, or _load_urllib() if the file is on Internet '''", "if self._start_wanted: if not self._running: self.start() self._start_wanted = False #", "write(_out_osfd, idata) close(_out_osfd) _out_osfd = None # load data data", "try: filename, data = self._q_done.pop() except IndexError: return # create", "default, Loader will be the best available loader implementation. The", "will be loaded faster, but the user will not been", "idata = fd.read() fd.close() fd = None # write to", "('http', 'https', 'ftp', 'smb'): data = self._load_urllib(filename, kwargs['kwargs']) else: data", "1.6.0 Not readonly anymore. ''' def start(self): '''Start the loader", "= data # ProxyImage(data) if not image.nocache: Cache.append('kv.loader', filename, image)", ":data:`Loader.max_upload_per_frame` - define the maximum image uploads in GPU to", "post_callback(data) self._q_done.appendleft((filename, data)) self._trigger_update() def _load_local(self, filename, kwargs): '''(internal) Loading", "file is on Internet ''' while len(self._q_done) >= ( self.max_upload_per_frame", "num): if num < 2: raise Exception('Must have at least", "start(self): '''Start the loader thread/process''' self._running = True def run(self,", "filename.split(':', 1)[0] if proto == 'smb': try: # note: it's", "best available loader implementation. The _update() function is called every", "or 2. If you get ride of that (or reduce", "% filename) # close file when remote file not found", "_load_local(self, filename, kwargs): '''(internal) Loading a local file''' # With", "self._max_upload_per_frame max_upload_per_frame = property(_get_max_upload_per_frame, _set_max_upload_per_frame) '''Number of image to upload", "change it by doing:: Loader.error_image = 'error.png' .. versionchanged:: 1.6.0", "every time # otherwise the data is occasionaly not loaded", "the :data:`max_upload_per_frame` to 1 or 2. If you get ride", "def run(self): while self.pool.running: func, args, kargs = self.tasks.get() try:", "a image using the Loader. A ProxyImage is returned with", "the thread. Will call _load_local() if the file is local,", "'error.png' .. versionchanged:: 1.6.0 Not readonly anymore. ''' def start(self):", "load_callback(filename) elif proto in ('http', 'https', 'ftp', 'smb'): data =", "default. As matter of fact, a Full-HD RGB image will", "= property(_get_num_workers, _set_num_workers) '''Number of workers to use while loading.", "1 image processing per image') self._max_upload_per_frame = num def _get_max_upload_per_frame(self):", "<%s>' % filename) # close file when remote file not", "= LoaderThreadPool() Logger.info('Loader: using a thread pool of {} workers'.format(", "pool self.start() def run(self): while self.pool.running: func, args, kargs =", "It can be True if the image is already cached", "in GPU to do per frames. ''' __all__ = ('Loader',", "False self._trigger_update = Clock.create_trigger(self._update) def __del__(self): try: Clock.unschedule(self._update) except Exception:", "isinstance(image, basestring): self._loading_image = ImageLoader.load(filename=image) else: self._loading_image = image loading_image", "a task to the queue ''' self.tasks.put((func, args, kargs)) def", "available loader implementation. The _update() function is called every 1", "this is really the first time self._q_load.appendleft({ 'filename': filename, 'load_callback':", "proto = filename.split(':', 1)[0] except: #if blank filename then return", "basestring): self._error_image = ImageLoader.load(filename=image) else: self._error_image = image error_image =", "this parameter to 10, or more. If you are loading", "it's important to load SMBHandler every time # otherwise the", "return self._error_image def _set_error_image(self, image): if isinstance(image, basestring): self._error_image =", "can easily increase this parameter to 10, or more. If", "is not None and num < 1: raise Exception('Must have", "be smart to reduce the :data:`max_upload_per_frame` to 1 or 2.", "to cancel all background loading, call *Loader.stop()*. ''' data =", "pass to the client''' # want to start it ?", "__init__(self, pool, tasks): Thread.__init__(self) self.tasks = tasks self.daemon = True", "to a temporary file, and pass it to _load_local()''' if", "for Loader and specific implementation. By default, Loader will be", "it to load an image and use it, even if", "idata) close(_out_osfd) _out_osfd = None # load data data =", "the beginning. Once the loader is started, the setting has", "Logger from kivy.clock import Clock from kivy.cache import Cache from", "_out_osfd, _out_filename = tempfile.mkstemp( prefix='kivyloader', suffix=suffix) if proto == 'smb':", "deque() self._client = [] self._running = False self._start_wanted = False", "tasks from a given tasks queue ''' def __init__(self, pool,", "kwargs['filename'] load_callback = kwargs['load_callback'] post_callback = kwargs['post_callback'] try: proto =", "Cache.append('kv.loader', filename, image) # update client for c_filename, client in", "images in the GPU per frame. If you are uploading", "GPU per frame. If you are uploading many tiny images,", "error_png_fn = join( 'atlas://data/images/defaulttheme/image-missing') self._error_image = ImageLoader.load(filename=error_png_fn) return self._error_image def", "the application while loading. Prior to 1.6.0, the default number", "PY2 from collections import deque from time import sleep from", "Loader.image('http://mysite.com/test.png') If you want to change the default loading image,", "_out_filename = '' suffix = '.%s' % (filename.split('.')[-1]) _out_osfd, _out_filename", "the parameters: - :data:`Loader.num_workers` - define the number of threads", "-------------------------------- .. versionadded:: 1.6.0 You can now tweak the loader", "tasks): Thread.__init__(self) self.tasks = tasks self.daemon = True self.pool =", "a lot), take a look at the DDS format. ..", "= urllib_request.build_opener(SMBHandler).open(filename) else: # read from internet fd = urllib_request.urlopen(filename)", "for the loader.''' pass def stop(self): '''Stop the loader thread/process'''", "while self._running and self._paused: self._resume_cond.acquire() self._resume_cond.wait(0.25) self._resume_cond.release() def _load(self, kwargs):", "versionadded:: 1.6.0 ''' def _set_max_upload_per_frame(self, num): if num is not", "such loader:: from kivy import * image = Loader.image('mysprite.png') You", "import write, close, unlink, environ import threading # Register a", "self._paused = True def resume(self): '''Resume the loader, after a", "the queue ''' self.tasks.put((func, args, kargs)) def stop(self): self.running =", "in pause mode, don't unqueue anything. if self._paused: self._trigger_update() return", "'load_callback': load_callback, 'post_callback': post_callback, 'kwargs': kwargs}) if not kwargs.get('nocache', False):", "start for loading images - :data:`Loader.max_upload_per_frame` - define the maximum", "= self._load_local(filename, kwargs['kwargs']) if post_callback: data = post_callback(data) self._q_done.appendleft((filename, data))", "filename, kwargs): '''(internal) Loading a local file''' # With recent", "# close file when remote file not found or download", "from kivy.uix.image import Image from kivy.loader import Loader class TestApp(App):", "if isinstance(image, basestring): self._loading_image = ImageLoader.load(filename=image) else: self._loading_image = image", "loader.''' pass def stop(self): '''Stop the loader thread/process''' self._running =", "Fired when the image is loaded and changed ''' __events__", "the application during the upload. If you want a smooth", "import * image = Loader.image('mysprite.png') You can also load image", "support it.). This setting impact the loader only at the", "import deque from time import sleep from os.path import join", "loading. (used only if the loader implementation support it.). This", "? if self._start_wanted: if not self._running: self.start() self._start_wanted = False", "is called every 1 / 25.s or each frame if", "except ImportError: Logger.warning( 'Loader: can not load PySMB: make sure", "If you are loading multiples Full-HD images, the upload time", ":Properties: `loaded`: bool, default to False It can be True", "- define the maximum image uploads in GPU to do", "1: raise Exception('Must have at least 1 image processing per", "to the client''' # want to start it ? if", "keep_data=True, **kwargs) def _load_urllib(self, filename, kwargs): '''(internal) Loading a network", "image to upload per frame. By default, we'll upload only", "a Full-HD RGB image will take ~6MB in memory, so", "if post_callback: data = post_callback(data) self._q_done.appendleft((filename, data)) self._trigger_update() def _load_local(self,", "to start for loading images - :data:`Loader.max_upload_per_frame` - define the", "Try to use pygame as our first choice for loader", "be useful during interactions .. versionadded:: 1.6.0 ''' self._paused =", "change the default loading image, you can do:: Loader.loading_image =", "cancel all background loading, call *Loader.stop()*. ''' data = Cache.get('kv.loader',", "deque from time import sleep from os.path import join from", "''' self._paused = True def resume(self): '''Resume the loader, after", "every 1 / 25.s or each frame if we have", "self._error_image = image error_image = property(_get_error_image, _set_error_image) '''Image used for", "= self._load_urllib(filename, kwargs['kwargs']) else: data = self._load_local(filename, kwargs['kwargs']) if post_callback:", "upload per frame. By default, we'll upload only 2 images", "= threading.Condition() self._q_load = deque() self._q_done = deque() self._client =", "1.6.0 ''' self._paused = False self._resume_cond.acquire() self._resume_cond.notify_all() self._resume_cond.release() def _wait_for_resume(self):", "imdata in data._data: imdata.source = filename except Exception: Logger.exception('Failed to", "too, then the GPU must calculate the mipmap of this", "and num < 1: raise Exception('Must have at least 1", "return # create the image image = data # ProxyImage(data)", "self._q_done.appendleft((filename, data)) self._trigger_update() def _load_local(self, filename, kwargs): '''(internal) Loading a", "the default. As matter of fact, a Full-HD RGB image", "'''Main loop for the loader.''' pass def stop(self): '''Stop the", "you are uploading many tiny images, you can easily increase", "kwargs.setdefault('loaded', False) super(ProxyImage, self).__init__(arg, **kwargs) self.loaded = kwargs.get('loaded') def on_load(self):", "= ImageLoader.load(filename=image) else: self._loading_image = image loading_image = property(_get_loading_image, _set_loading_image)", "return self._loading_image def _set_loading_image(self, image): if isinstance(image, basestring): self._loading_image =", "thread. Will call _load_local() if the file is local, or", "specify a default loading image for using a such loader::", "a better user experience or more performance, depending of the", "tasks from a queue ''' def __init__(self, num_threads): super(_ThreadPool, self).__init__()", "proto in ('http', 'https', 'ftp', 'smb'): data = self._load_urllib(filename, kwargs['kwargs'])", "Image('another_loading.png') Tweaking the asynchronous loader -------------------------------- .. versionadded:: 1.6.0 You", "readonly anymore. ''' def start(self): '''Start the loader thread/process''' self._running", "while loading. Prior to 1.6.0, the default number was 20,", "in real time. Then it can be smart to reduce", "a look at the DDS format. .. versionadded:: 1.6.0 '''", "ProxyImage(data, loading_image=self.loading_image, loaded=True, **kwargs) client = ProxyImage(self.loading_image, loading_image=self.loading_image, **kwargs) self._client.append((filename,", "try: Clock.unschedule(self._update) except Exception: pass def _set_num_workers(self, num): if num", "the GPU must calculate the mipmap of this big images", "def __init__(self, num_threads): super(_ThreadPool, self).__init__() self.running = True self.tasks =", "def _load_urllib(self, filename, kwargs): '''(internal) Loading a network file. First", "can do:: Loader.loading_image = Image('another_loading.png') Tweaking the asynchronous loader --------------------------------", "def start(self): '''Start the loader thread/process''' self._running = True def", "thread/process''' self._running = True def run(self, *largs): '''Main loop for", "== 'smb': # read from samba shares fd = urllib_request.build_opener(SMBHandler).open(filename)", "= _ThreadPool(self._num_workers) Clock.schedule_interval(self.run, 0) def stop(self): super(LoaderThreadPool, self).stop() Clock.unschedule(self.run) self.pool.stop()", "self._resume_cond.release() def _load(self, kwargs): '''(internal) Loading function, called by the", "shares fd = urllib_request.build_opener(SMBHandler).open(filename) else: # read from internet fd", "client)) if data is None: # if data is None,", "_out_filename != '': unlink(_out_filename) return data def _update(self, *largs): '''(internal)", "import sleep from os.path import join from os import write,", "RGB image will take ~6MB in memory, so it will", "add_task(self, func, *args, **kargs): '''Add a task to the queue", "workers, then all the images will be loaded faster, but", "the default loading image, you can do:: Loader.loading_image = Image('another_loading.png')", "suffix=suffix) if proto == 'smb': # read from samba shares", "return self._max_upload_per_frame max_upload_per_frame = property(_get_max_upload_per_frame, _set_max_upload_per_frame) '''Number of image to", "queue ''' def __init__(self, pool, tasks): Thread.__init__(self) self.tasks = tasks", "import join from os import write, close, unlink, environ import", "Full-HD RGB image will take ~6MB in memory, so it", "_get_error_image(self): if not self._error_image: error_png_fn = join( 'atlas://data/images/defaulttheme/image-missing') self._error_image =", "local file''' # With recent changes to CoreImage, we must", "client in self._client[:]: if filename != c_filename: continue # got", "filename write(_out_osfd, idata) close(_out_osfd) _out_osfd = None # load data", "data data = self._load_local(_out_filename, kwargs) # FIXME create a clean", "increase this parameter to 10, or more. If you are", "tasks self.daemon = True self.pool = pool self.start() def run(self):", "threads consuming tasks from a queue ''' def __init__(self, num_threads):", "_set_error_image) '''Image used for error. You can change it by", "filename except Exception: Logger.exception('Failed to load image <%s>' % filename)", "25.s or each frame if we have less than 25", "cached :Events: `on_load` Fired when the image is loaded and", "= None # write to local filename write(_out_osfd, idata) close(_out_osfd)", ":meth:`pause`. .. versionadded:: 1.6.0 ''' self._paused = False self._resume_cond.acquire() self._resume_cond.notify_all()", "self._error_image: error_png_fn = join( 'atlas://data/images/defaulttheme/image-missing') self._error_image = ImageLoader.load(filename=error_png_fn) return self._error_image", "to start it ? if self._start_wanted: if not self._running: self.start()", "def __del__(self): try: Clock.unschedule(self._update) except Exception: pass def _set_num_workers(self, num):", "consequent, and can stuck the application during the upload. If", "filename, False) self._start_wanted = True self._trigger_update() else: # already queued", "def resume(self): '''Resume the loader, after a :meth:`pause`. .. versionadded::", "With recent changes to CoreImage, we must keep data otherwise,", "called every 1 / 25.s or each frame if we", "~6MB in memory, so it will take times. If you", "import Logger from kivy.clock import Clock from kivy.cache import Cache", "Then it can be smart to reduce the :data:`max_upload_per_frame` to", "bool, default to False It can be True if the", "Clock from kivy.cache import Cache from kivy.core.image import ImageLoader, Image", ") def __init__(self, arg, **kwargs): kwargs.setdefault('loaded', False) super(ProxyImage, self).__init__(arg, **kwargs)", "= False self._resume_cond.acquire() self._resume_cond.notify_all() self._resume_cond.release() def _wait_for_resume(self): while self._running and", "read from internet fd = urllib_request.urlopen(filename) idata = fd.read() fd.close()", "_set_num_workers(self, num): if num < 2: raise Exception('Must have at", "loader only at the beginning. Once the loader is started,", "(used only if the loader implementation support it.). This setting", "import kivy_data_dir from kivy.logger import Logger from kivy.clock import Clock", "self._resume_cond.acquire() self._resume_cond.notify_all() self._resume_cond.release() def _wait_for_resume(self): while self._running and self._paused: self._resume_cond.acquire()", "proto = filename.split(':', 1)[0] if proto == 'smb': try: #", "(or reduce it a lot), take a look at the", "uploading many tiny images, you can easily increase this parameter", "else: # read from internet fd = urllib_request.urlopen(filename) idata =", "kivy.app import App from kivy.uix.image import Image from kivy.loader import", "parameter to 10, or more. If you are loading multiples", "import Cache from kivy.core.image import ImageLoader, Image from kivy.compat import", "network file. First download it, save it to a temporary", "as e: print(e) self.tasks.task_done() class _ThreadPool(object): '''Pool of threads consuming", "self.running = False self.tasks.join() class LoaderThreadPool(LoaderBase): def __init__(self): super(LoaderThreadPool, self).__init__()", "easily increase this parameter to 10, or more. If you", "try: func(*args, **kargs) except Exception as e: print(e) self.tasks.task_done() class", "# load data data = self._load_local(_out_filename, kwargs) # FIXME create", "the texture afterwise. return ImageLoader.load(filename, keep_data=True, **kwargs) def _load_urllib(self, filename,", "__init__(self, arg, **kwargs): kwargs.setdefault('loaded', False) super(ProxyImage, self).__init__(arg, **kwargs) self.loaded =", "you get ride of that (or reduce it a lot),", "post_callback: data = post_callback(data) self._q_done.appendleft((filename, data)) self._trigger_update() def _load_local(self, filename,", "loading_image=self.loading_image, loaded=True, **kwargs) client = ProxyImage(self.loading_image, loading_image=self.loading_image, **kwargs) self._client.append((filename, client))", "try: # note: it's important to load SMBHandler every time", "''' def __init__(self, pool, tasks): Thread.__init__(self) self.tasks = tasks self.daemon", "reduce the :data:`max_upload_per_frame` to 1 or 2. If you get", "a cache for loader Cache.register('kv.loader', limit=500, timeout=60) class ProxyImage(Image): '''Image", "from kivy import * image = Loader.image('mysprite.png') You can also", "return ProxyImage(data, loading_image=self.loading_image, loaded=True, **kwargs) client = ProxyImage(self.loading_image, loading_image=self.loading_image, **kwargs)", "None self._error_image = None self._num_workers = 2 self._max_upload_per_frame = 2", "self.tasks) def add_task(self, func, *args, **kargs): '''Add a task to", "CoreImage, we must keep data otherwise, # we might be", "By default, Loader will be the best available loader implementation.", "use the application while loading. Prior to 1.6.0, the default", "smart to reduce the :data:`max_upload_per_frame` to 1 or 2. If", "_load_urllib() if the file is on Internet ''' while len(self._q_done)", "None try: _out_filename = '' suffix = '.%s' % (filename.split('.')[-1])", "'''(internal) Loading a local file''' # With recent changes to", "readonly anymore. ''' def _get_error_image(self): if not self._error_image: error_png_fn =", "start it ? if self._start_wanted: if not self._running: self.start() self._start_wanted", "ImageLoader.load(filename=loading_png_fn) return self._loading_image def _set_loading_image(self, image): if isinstance(image, basestring): self._loading_image", "max_upload_per_frame = property(_get_max_upload_per_frame, _set_max_upload_per_frame) '''Number of image to upload per", "is installed') return import tempfile data = fd = _out_osfd", "and self._paused: self._resume_cond.acquire() self._resume_cond.wait(0.25) self._resume_cond.release() def _load(self, kwargs): '''(internal) Loading", "fd.close() fd = None # write to local filename write(_out_osfd,", "image image = data # ProxyImage(data) if not image.nocache: Cache.append('kv.loader',", "self._error_image def _set_error_image(self, image): if isinstance(image, basestring): self._error_image = ImageLoader.load(filename=image)", "def stop(self): '''Stop the loader thread/process''' self._running = False def", "that for imdata in data._data: imdata.source = filename except Exception:", "= False def pause(self): '''Pause the loader, can be useful", "self._running: try: parameters = self._q_load.pop() except: return self.pool.add_task(self._load, parameters) Loader", "True def run(self, *largs): '''Main loop for the loader.''' pass", "the asynchronous loader -------------------------------- .. versionadded:: 1.6.0 You can now", "fd: fd.close() if _out_osfd: close(_out_osfd) if _out_filename != '': unlink(_out_filename)", "all the images will be loaded faster, but the user", "run(self, *largs): while self._running: try: parameters = self._q_load.pop() except: return", "only 2 images in the GPU per frame. If you", "= 'loading.png' .. versionchanged:: 1.6.0 Not readonly anymore. ''' def", "versionadded:: 1.6.0 You can now tweak the loader to have", "10, or more. If you are loading multiples Full-HD images,", "filename = kwargs['filename'] load_callback = kwargs['load_callback'] post_callback = kwargs['post_callback'] try:", "if data is not here, need to reload. return ProxyImage(data,", "to False It can be True if the image is", "self.start() def run(self): while self.pool.running: func, args, kargs = self.tasks.get()", "can use it to load an image and use it,", "1.6.0 You can now tweak the loader to have a", "upload. If you want a smooth experience, let the default.", "TestApp(App): def _image_loaded(self, proxyImage): if proxyImage.image.texture: self.image.texture = proxyImage.image.texture def", "= fd = _out_osfd = None try: _out_filename = ''", "Take a look at the parameters: - :data:`Loader.num_workers` - define", "The _update() function is called every 1 / 25.s or", "yet available. You must specify a default loading image for", "and specific implementation. By default, Loader will be the best", "if proto == 'smb': try: # note: it's important to", "more. If you are loading multiples Full-HD images, the upload", "class _Worker(Thread): '''Thread executing tasks from a given tasks queue", "workers to use while loading. (used only if the loader", "able to use the application while loading. Prior to 1.6.0,", "# found image, if data is not here, need to", "default value is 2 for giving a smooth user experience.", "default number was 20, and loading many full-hd images was", "occasionaly not loaded from smb.SMBHandler import SMBHandler except ImportError: Logger.warning(", "None: data = load_callback(filename) elif proto in ('http', 'https', 'ftp',", "fact, a Full-HD RGB image will take ~6MB in memory,", "image loading_image = property(_get_loading_image, _set_loading_image) '''Image used for loading. You", "can be consequent, and can stuck the application during the", "ProxyImage(self.loading_image, loading_image=self.loading_image, **kwargs) self._client.append((filename, client)) if data is None: #", "urllib.request as urllib_request proto = filename.split(':', 1)[0] if proto ==", "from kivy import kivy_data_dir from kivy.logger import Logger from kivy.clock", "kwargs.get('nocache', False): Cache.append('kv.loader', filename, False) self._start_wanted = True self._trigger_update() else:", "call _load_local() if the file is local, or _load_urllib() if", "asynchronous loader -------------------------------- .. versionadded:: 1.6.0 You can now tweak", "you want a smooth experience, let the default. As matter", "from url:: image = Loader.image('http://mysite.com/test.png') If you want to change", "self._q_load.appendleft({ 'filename': filename, 'load_callback': load_callback, 'post_callback': post_callback, 'kwargs': kwargs}) if", "read from samba shares fd = urllib_request.build_opener(SMBHandler).open(filename) else: # read", "import urllib2 as urllib_request else: import urllib.request as urllib_request proto", "if not kwargs.get('nocache', False): Cache.append('kv.loader', filename, False) self._start_wanted = True", "tiny images, you can easily increase this parameter to 10,", "we have less than 25 FPS. ''' def __init__(self): self._loading_image", "many full-hd images was blocking completly the application. .. versionadded::", "blank filename then return return if load_callback is not None:", "''' __all__ = ('Loader', 'LoaderBase', 'ProxyImage') from kivy import kivy_data_dir", "to CoreImage, we must keep data otherwise, # we might", "self.pool.add_task(self._load, parameters) Loader = LoaderThreadPool() Logger.info('Loader: using a thread pool", "Prior to 1.6.0, the default number was 20, and loading", "image, if data is not here, need to reload. return", "kivy.logger import Logger from kivy.clock import Clock from kivy.cache import", "better user experience or more performance, depending of the images", "Loading a local file''' # With recent changes to CoreImage,", "to have a better user experience or more performance, depending", "to load an image and use it, even if data", "arg, **kwargs): kwargs.setdefault('loaded', False) super(ProxyImage, self).__init__(arg, **kwargs) self.loaded = kwargs.get('loaded')", "time. Then it can be smart to reduce the :data:`max_upload_per_frame`", "You can use it to load an image and use", "LoaderBase(object): '''Common base for Loader and specific implementation. By default,", "will be the best available loader implementation. The _update() function", "Exception('Must have at least 2 workers') self._num_workers = num def", "loader ======================== This is the Asynchronous Loader. You can use", "while len(self._q_done) >= ( self.max_upload_per_frame * self._num_workers): sleep(0.1) self._wait_for_resume() filename", "per frame. If you are uploading many tiny images, you", "_load_local()''' if PY2: import urllib2 as urllib_request else: import urllib.request", "take ~6MB in memory, so it will take times. If", "anymore. ''' def start(self): '''Start the loader thread/process''' self._running =", "loaded from smb.SMBHandler import SMBHandler except ImportError: Logger.warning( 'Loader: can", "loading. Prior to 1.6.0, the default number was 20, and", "_ in range(num_threads): _Worker(self, self.tasks) def add_task(self, func, *args, **kargs):", "filename) if data not in (None, False): # found image,", "self.tasks.put((func, args, kargs)) def stop(self): self.running = False self.tasks.join() class", "data = self._load_urllib(filename, kwargs['kwargs']) else: data = self._load_local(filename, kwargs['kwargs']) if", "def _set_loading_image(self, image): if isinstance(image, basestring): self._loading_image = ImageLoader.load(filename=image) else:", "loading image for using a such loader:: from kivy import", "GPU to do per frames. ''' __all__ = ('Loader', 'LoaderBase',", "def _set_max_upload_per_frame(self, num): if num is not None and num", "except Exception as e: print(e) self.tasks.task_done() class _ThreadPool(object): '''Pool of", "loading multiples Full-HD images, the upload time can be consequent,", "load an image and use it, even if data are", "sleep(0.1) self._wait_for_resume() filename = kwargs['filename'] load_callback = kwargs['load_callback'] post_callback =", "else: self._error_image = image error_image = property(_get_error_image, _set_error_image) '''Image used", "return self.pool.add_task(self._load, parameters) Loader = LoaderThreadPool() Logger.info('Loader: using a thread", "the loader implementation support it.). This setting impact the loader", "close(_out_osfd) except OSError: pass return self.error_image finally: if fd: fd.close()", "take times. If you have activated mipmap=True too, then the", "= 'error.png' .. versionchanged:: 1.6.0 Not readonly anymore. ''' def", "run(self): while self.pool.running: func, args, kargs = self.tasks.get() try: func(*args,", "have at least 1 image processing per image') self._max_upload_per_frame =", "smooth user experience. You could increase the number of workers,", "a smooth user experience. You could increase the number of", "SMBHandler except ImportError: Logger.warning( 'Loader: can not load PySMB: make", "'smb': # read from samba shares fd = urllib_request.build_opener(SMBHandler).open(filename) else:", "self.pool = _ThreadPool(self._num_workers) Clock.schedule_interval(self.run, 0) def stop(self): super(LoaderThreadPool, self).stop() Clock.unschedule(self.run)", "the default number was 20, and loading many full-hd images", "self.pool = pool self.start() def run(self): while self.pool.running: func, args,", "has no impact:: from kivy.loader import Loader Loader.num_workers = 4", "except Exception: Logger.exception('Failed to load image <%s>' % filename) #", "= filename except Exception: Logger.exception('Failed to load image <%s>' %", "filename, data = self._q_done.pop() except IndexError: return # create the", "# # Loader implementation # if 'KIVY_DOC' in environ: Loader", "not image.nocache: Cache.append('kv.loader', filename, image) # update client for c_filename,", "False self._start_wanted = False self._trigger_update = Clock.create_trigger(self._update) def __del__(self): try:", "Loader. A ProxyImage is returned with a loading image. You", "self.pool = None def start(self): super(LoaderThreadPool, self).start() self.pool = _ThreadPool(self._num_workers)", "the loader to have a better user experience or more", "frame if we have less than 25 FPS. ''' def", "kivy.cache import Cache from kivy.core.image import ImageLoader, Image from kivy.compat", "*largs): while self._running: try: parameters = self._q_load.pop() except: return self.pool.add_task(self._load,", "__all__ = ('Loader', 'LoaderBase', 'ProxyImage') from kivy import kivy_data_dir from", "1.6.0 ''' def _set_max_upload_per_frame(self, num): if num is not None", "self._q_done.pop() except IndexError: return # create the image image =", "except OSError: pass return self.error_image finally: if fd: fd.close() if", "# from kivy.compat import queue from threading import Thread class", "queue from threading import Thread class _Worker(Thread): '''Thread executing tasks", "Cache.get('kv.loader', filename) if data not in (None, False): # found", "pass def stop(self): '''Stop the loader thread/process''' self._running = False", "image.nocache: Cache.append('kv.loader', filename, image) # update client for c_filename, client", "value is 2 for giving a smooth user experience. You", "# if 'KIVY_DOC' in environ: Loader = None else: #", "versionadded:: 1.6.0 ''' self._paused = True def resume(self): '''Resume the", "pause mode, don't unqueue anything. if self._paused: self._trigger_update() return for", "function is called every 1 / 25.s or each frame", "raise Exception('Must have at least 1 image processing per image')", "Loader Loader.num_workers = 4 The default value is 2 for", "the images you're gonna to load. Take a look at", "was 20, and loading many full-hd images was blocking completly", "versionadded:: 1.6.0 ''' def _get_loading_image(self): if not self._loading_image: loading_png_fn =", "from os import write, close, unlink, environ import threading #", "_out_osfd: close(_out_osfd) if _out_filename != '': unlink(_out_filename) return data def", "self._trigger_update() return for x in range(self.max_upload_per_frame): try: filename, data =", "kivy.compat import queue from threading import Thread class _Worker(Thread): '''Thread", "property(_get_loading_image, _set_loading_image) '''Image used for loading. You can change it", "= num def _get_num_workers(self): return self._num_workers num_workers = property(_get_num_workers, _set_num_workers)", "images, the upload time can be consequent, and can stuck", "kivy_data_dir from kivy.logger import Logger from kivy.clock import Clock from", "when the image is loaded and changed ''' __events__ =", "None def start(self): super(LoaderThreadPool, self).start() self.pool = _ThreadPool(self._num_workers) Clock.schedule_interval(self.run, 0)", "if a data is loaded, and pass to the client'''", "proto == 'smb': try: # note: it's important to load", "= False self._trigger_update = Clock.create_trigger(self._update) def __del__(self): try: Clock.unschedule(self._update) except", "close(_out_osfd) _out_osfd = None # load data data = self._load_local(_out_filename,", "None self._num_workers = 2 self._max_upload_per_frame = 2 self._paused = False", "the loader only at the beginning. Once the loader is", "Exception('Must have at least 1 image processing per image') self._max_upload_per_frame", "timeout=60) class ProxyImage(Image): '''Image returned by the Loader.image() function. :Properties:", "Check if a data is loaded, and pass to the", "stop(self): self.running = False self.tasks.join() class LoaderThreadPool(LoaderBase): def __init__(self): super(LoaderThreadPool,", "error. You can change it by doing:: Loader.error_image = 'error.png'", "multiples Full-HD images, the upload time can be consequent, and", "!= c_filename: continue # got one client to update client.image", "kivy.loader import Loader class TestApp(App): def _image_loaded(self, proxyImage): if proxyImage.image.texture:", "the Asynchronous Loader. You can use it to load an", "self._client[:]: if filename != c_filename: continue # got one client", "processing per image') self._max_upload_per_frame = num def _get_max_upload_per_frame(self): return self._max_upload_per_frame", "= num def _get_max_upload_per_frame(self): return self._max_upload_per_frame max_upload_per_frame = property(_get_max_upload_per_frame, _set_max_upload_per_frame)", "tempfile.mkstemp( prefix='kivyloader', suffix=suffix) if proto == 'smb': # read from", "True def resume(self): '''Resume the loader, after a :meth:`pause`. ..", "num_threads): super(_ThreadPool, self).__init__() self.running = True self.tasks = queue.Queue() for", ".. versionadded:: 1.6.0 ''' self._paused = True def resume(self): '''Resume", "# got one client to update client.image = image client.loaded", "changes to CoreImage, we must keep data otherwise, # we", "from kivy.loader import Loader class TestApp(App): def _image_loaded(self, proxyImage): if", "'''Stop the loader thread/process''' self._running = False def pause(self): '''Pause", "'kwargs': kwargs}) if not kwargs.get('nocache', False): Cache.append('kv.loader', filename, False) self._start_wanted", "gonna to load. Take a look at the parameters: -", "[] self._running = False self._start_wanted = False self._trigger_update = Clock.create_trigger(self._update)", "follows:: from kivy.app import App from kivy.uix.image import Image from", "even if data are not yet available. You must specify", "using a such loader:: from kivy import * image =", "load_callback=None, post_callback=None, **kwargs): '''Load a image using the Loader. A", "self._loading_image = ImageLoader.load(filename=loading_png_fn) return self._loading_image def _set_loading_image(self, image): if isinstance(image,", "if proto == 'smb': # read from samba shares fd", "the maximum image uploads in GPU to do per frames.", "= '' suffix = '.%s' % (filename.split('.')[-1]) _out_osfd, _out_filename =", "loaded and changed ''' __events__ = ('on_load', ) def __init__(self,", "the setting has no impact:: from kivy.loader import Loader Loader.num_workers", "def stop(self): super(LoaderThreadPool, self).stop() Clock.unschedule(self.run) self.pool.stop() def run(self, *largs): while", "_set_error_image(self, image): if isinstance(image, basestring): self._error_image = ImageLoader.load(filename=image) else: self._error_image", "you have activated mipmap=True too, then the GPU must calculate", "time self._q_load.appendleft({ 'filename': filename, 'load_callback': load_callback, 'post_callback': post_callback, 'kwargs': kwargs})", "first time self._q_load.appendleft({ 'filename': filename, 'load_callback': load_callback, 'post_callback': post_callback, 'kwargs':", "of threads consuming tasks from a queue ''' def __init__(self,", "= property(_get_max_upload_per_frame, _set_max_upload_per_frame) '''Number of image to upload per frame.", "data = self._load_local(_out_filename, kwargs) # FIXME create a clean API", "except: return self.pool.add_task(self._load, parameters) Loader = LoaderThreadPool() Logger.info('Loader: using a", "# create the image image = data # ProxyImage(data) if", "it by doing:: Loader.error_image = 'error.png' .. versionchanged:: 1.6.0 Not", "internet fd = urllib_request.urlopen(filename) idata = fd.read() fd.close() fd =", "self._q_load.pop() except: return self.pool.add_task(self._load, parameters) Loader = LoaderThreadPool() Logger.info('Loader: using", "not self._loading_image: loading_png_fn = join(kivy_data_dir, 'images', 'image-loading.gif') self._loading_image = ImageLoader.load(filename=loading_png_fn)", "must calculate the mipmap of this big images too, in", "not here, need to reload. return ProxyImage(data, loading_image=self.loading_image, loaded=True, **kwargs)", "num is not None and num < 1: raise Exception('Must", "must keep data otherwise, # we might be unable to", "fd.read() fd.close() fd = None # write to local filename", "self.tasks = tasks self.daemon = True self.pool = pool self.start()", "frame. By default, we'll upload only 2 images in the", "'''(internal) Loading a network file. First download it, save it", "Not readonly anymore. ''' def _get_error_image(self): if not self._error_image: error_png_fn", "= join( 'atlas://data/images/defaulttheme/image-missing') self._error_image = ImageLoader.load(filename=error_png_fn) return self._error_image def _set_error_image(self,", "filename, load_callback=None, post_callback=None, **kwargs): '''Load a image using the Loader.", "it as follows:: from kivy.app import App from kivy.uix.image import", "no impact:: from kivy.loader import Loader Loader.num_workers = 4 The", "loading image. You can use it as follows:: from kivy.app", "from internet fd = urllib_request.urlopen(filename) idata = fd.read() fd.close() fd", "loading, call *Loader.stop()*. ''' data = Cache.get('kv.loader', filename) if data", "of this big images too, in real time. Then it", "self._resume_cond.acquire() self._resume_cond.wait(0.25) self._resume_cond.release() def _load(self, kwargs): '''(internal) Loading function, called", "to load SMBHandler every time # otherwise the data is", "the data is occasionaly not loaded from smb.SMBHandler import SMBHandler", "self._resume_cond.release() def _wait_for_resume(self): while self._running and self._paused: self._resume_cond.acquire() self._resume_cond.wait(0.25) self._resume_cond.release()", "the best available loader implementation. The _update() function is called", "image): if isinstance(image, basestring): self._error_image = ImageLoader.load(filename=image) else: self._error_image =", "it a lot), take a look at the DDS format.", "use it as follows:: from kivy.app import App from kivy.uix.image", ".. versionadded:: 1.6.0 ''' def _get_loading_image(self): if not self._loading_image: loading_png_fn", "True client.dispatch('on_load') self._client.remove((c_filename, client)) self._trigger_update() def image(self, filename, load_callback=None, post_callback=None,", "ProxyImage(Image): '''Image returned by the Loader.image() function. :Properties: `loaded`: bool,", "None and num < 1: raise Exception('Must have at least", "def _load(self, kwargs): '''(internal) Loading function, called by the thread.", "_get_max_upload_per_frame(self): return self._max_upload_per_frame max_upload_per_frame = property(_get_max_upload_per_frame, _set_max_upload_per_frame) '''Number of image", "loader thread/process''' self._running = False def pause(self): '''Pause the loader,", "the GPU per frame. If you are uploading many tiny", "given tasks queue ''' def __init__(self, pool, tasks): Thread.__init__(self) self.tasks", "define the maximum image uploads in GPU to do per", "You can change it by doing:: Loader.error_image = 'error.png' ..", "at the DDS format. .. versionadded:: 1.6.0 ''' def _get_loading_image(self):", "stuck the application during the upload. If you want a", "None, this is really the first time self._q_load.appendleft({ 'filename': filename,", "of fact, a Full-HD RGB image will take ~6MB in", "images was blocking completly the application. .. versionadded:: 1.6.0 '''", "class TestApp(App): def _image_loaded(self, proxyImage): if proxyImage.image.texture: self.image.texture = proxyImage.image.texture", "= Loader.image('http://mysite.com/test.png') If you want to change the default loading", "by the Loader.image() function. :Properties: `loaded`: bool, default to False", "parameters: - :data:`Loader.num_workers` - define the number of threads to", "not loaded from smb.SMBHandler import SMBHandler except ImportError: Logger.warning( 'Loader:", "a smooth experience, let the default. As matter of fact,", "it is installed') return import tempfile data = fd =", "error try: close(_out_osfd) except OSError: pass return self.error_image finally: if", "want a smooth experience, let the default. As matter of", "self.tasks.get() try: func(*args, **kargs) except Exception as e: print(e) self.tasks.task_done()", "started, the setting has no impact:: from kivy.loader import Loader", "load_callback, 'post_callback': post_callback, 'kwargs': kwargs}) if not kwargs.get('nocache', False): Cache.append('kv.loader',", "if PY2: import urllib2 as urllib_request else: import urllib.request as", "''' self._paused = False self._resume_cond.acquire() self._resume_cond.notify_all() self._resume_cond.release() def _wait_for_resume(self): while", "'''Number of workers to use while loading. (used only if", "from kivy.clock import Clock from kivy.cache import Cache from kivy.core.image", "a given tasks queue ''' def __init__(self, pool, tasks): Thread.__init__(self)", "the user will not been able to use the application", "loading image, you can do:: Loader.loading_image = Image('another_loading.png') Tweaking the", "now tweak the loader to have a better user experience", "image. You can use it as follows:: from kivy.app import", "it will take times. If you have activated mipmap=True too,", "for that for imdata in data._data: imdata.source = filename except", "create the image image = data # ProxyImage(data) if not", "Logger.exception('Failed to load image <%s>' % filename) # close file", "def _get_error_image(self): if not self._error_image: error_png_fn = join( 'atlas://data/images/defaulttheme/image-missing') self._error_image", "thread/process''' self._running = False def pause(self): '''Pause the loader, can", "len(self._q_done) >= ( self.max_upload_per_frame * self._num_workers): sleep(0.1) self._wait_for_resume() filename =", "performance, depending of the images you're gonna to load. Take", "def __init__(self, pool, tasks): Thread.__init__(self) self.tasks = tasks self.daemon =", "# already queued for loading pass return client # #", "also load image from url:: image = Loader.image('http://mysite.com/test.png') If you", "available. You must specify a default loading image for using", "_load_local() if the file is local, or _load_urllib() if the", "background loading, call *Loader.stop()*. ''' data = Cache.get('kv.loader', filename) if", "of workers, then all the images will be loaded faster,", "**kargs): '''Add a task to the queue ''' self.tasks.put((func, args,", "import Image from kivy.loader import Loader class TestApp(App): def _image_loaded(self,", "''' def start(self): '''Start the loader thread/process''' self._running = True", "This is the Asynchronous Loader. You can use it to", "filename, kwargs): '''(internal) Loading a network file. First download it,", "with a loading image. You can use it as follows::", "The default value is 2 for giving a smooth user", "giving a smooth user experience. You could increase the number", "c_filename, client in self._client[:]: if filename != c_filename: continue #", "during the upload. If you want a smooth experience, let", "'filename': filename, 'load_callback': load_callback, 'post_callback': post_callback, 'kwargs': kwargs}) if not", "self._paused: self._resume_cond.acquire() self._resume_cond.wait(0.25) self._resume_cond.release() def _load(self, kwargs): '''(internal) Loading function,", "the number of workers, then all the images will be", "found or download error try: close(_out_osfd) except OSError: pass return", "= ('on_load', ) def __init__(self, arg, **kwargs): kwargs.setdefault('loaded', False) super(ProxyImage,", "'KIVY_DOC' in environ: Loader = None else: # # Try", "was blocking completly the application. .. versionadded:: 1.6.0 ''' def", "return self.error_image finally: if fd: fd.close() if _out_osfd: close(_out_osfd) if", "_load(self, kwargs): '''(internal) Loading function, called by the thread. Will", "only if the loader implementation support it.). This setting impact", "unable to recreate the texture afterwise. return ImageLoader.load(filename, keep_data=True, **kwargs)", "False) self._start_wanted = True self._trigger_update() else: # already queued for", "in range(num_threads): _Worker(self, self.tasks) def add_task(self, func, *args, **kargs): '''Add", "is not None: data = load_callback(filename) elif proto in ('http',", "interactions .. versionadded:: 1.6.0 ''' self._paused = True def resume(self):", "frames. ''' __all__ = ('Loader', 'LoaderBase', 'ProxyImage') from kivy import", "as our first choice for loader # from kivy.compat import", "False It can be True if the image is already", "''' self.tasks.put((func, args, kargs)) def stop(self): self.running = False self.tasks.join()", "is loaded, and pass to the client''' # want to", "def start(self): super(LoaderThreadPool, self).start() self.pool = _ThreadPool(self._num_workers) Clock.schedule_interval(self.run, 0) def", "here, need to reload. return ProxyImage(data, loading_image=self.loading_image, loaded=True, **kwargs) client", "images - :data:`Loader.max_upload_per_frame` - define the maximum image uploads in", "from kivy.cache import Cache from kivy.core.image import ImageLoader, Image from", "self.max_upload_per_frame * self._num_workers): sleep(0.1) self._wait_for_resume() filename = kwargs['filename'] load_callback =", "kwargs}) if not kwargs.get('nocache', False): Cache.append('kv.loader', filename, False) self._start_wanted =", "= post_callback(data) self._q_done.appendleft((filename, data)) self._trigger_update() def _load_local(self, filename, kwargs): '''(internal)", "Cache.register('kv.loader', limit=500, timeout=60) class ProxyImage(Image): '''Image returned by the Loader.image()", "You can also load image from url:: image = Loader.image('http://mysite.com/test.png')", "if _out_filename != '': unlink(_out_filename) return data def _update(self, *largs):", "import PY2 from collections import deque from time import sleep", "Loader will be the best available loader implementation. The _update()", "self._num_workers num_workers = property(_get_num_workers, _set_num_workers) '''Number of workers to use", "from kivy.core.image import ImageLoader, Image from kivy.compat import PY2 from", "in self._client[:]: if filename != c_filename: continue # got one", "self.running = True self.tasks = queue.Queue() for _ in range(num_threads):", "class ProxyImage(Image): '''Image returned by the Loader.image() function. :Properties: `loaded`:", "got one client to update client.image = image client.loaded =", "= True client.dispatch('on_load') self._client.remove((c_filename, client)) self._trigger_update() def image(self, filename, load_callback=None,", "2. If you get ride of that (or reduce it", "look at the DDS format. .. versionadded:: 1.6.0 ''' def", "except: #if blank filename then return return if load_callback is", "= 2 self._paused = False self._resume_cond = threading.Condition() self._q_load =", "loading pass return client # # Loader implementation # if", "than 25 FPS. ''' def __init__(self): self._loading_image = None self._error_image", "format. .. versionadded:: 1.6.0 ''' def _get_loading_image(self): if not self._loading_image:", "file''' # With recent changes to CoreImage, we must keep", "import Loader Loader.num_workers = 4 The default value is 2", "mode, don't unqueue anything. if self._paused: self._trigger_update() return for x", "else: self._loading_image = image loading_image = property(_get_loading_image, _set_loading_image) '''Image used", "'smb': try: # note: it's important to load SMBHandler every", "to use the application while loading. Prior to 1.6.0, the", "Exception: Logger.exception('Failed to load image <%s>' % filename) # close", "func(*args, **kargs) except Exception as e: print(e) self.tasks.task_done() class _ThreadPool(object):", "self._resume_cond.notify_all() self._resume_cond.release() def _wait_for_resume(self): while self._running and self._paused: self._resume_cond.acquire() self._resume_cond.wait(0.25)", "image and use it, even if data are not yet", "default, we'll upload only 2 images in the GPU per", "None # load data data = self._load_local(_out_filename, kwargs) # FIXME", "('on_load', ) def __init__(self, arg, **kwargs): kwargs.setdefault('loaded', False) super(ProxyImage, self).__init__(arg,", "reduce it a lot), take a look at the DDS", "prefix='kivyloader', suffix=suffix) if proto == 'smb': # read from samba", "is 2 for giving a smooth user experience. You could", ".. versionadded:: 1.6.0 ''' self._paused = False self._resume_cond.acquire() self._resume_cond.notify_all() self._resume_cond.release()", "As matter of fact, a Full-HD RGB image will take", "you're gonna to load. Take a look at the parameters:", "the loader thread/process''' self._running = False def pause(self): '''Pause the", "None # write to local filename write(_out_osfd, idata) close(_out_osfd) _out_osfd", "data = self._load_local(filename, kwargs['kwargs']) if post_callback: data = post_callback(data) self._q_done.appendleft((filename,", "True self.tasks = queue.Queue() for _ in range(num_threads): _Worker(self, self.tasks)", "= join(kivy_data_dir, 'images', 'image-loading.gif') self._loading_image = ImageLoader.load(filename=loading_png_fn) return self._loading_image def", "_Worker(self, self.tasks) def add_task(self, func, *args, **kargs): '''Add a task", "'smb'): data = self._load_urllib(filename, kwargs['kwargs']) else: data = self._load_local(filename, kwargs['kwargs'])", "do per frames. ''' __all__ = ('Loader', 'LoaderBase', 'ProxyImage') from", "cache for loader Cache.register('kv.loader', limit=500, timeout=60) class ProxyImage(Image): '''Image returned", "run(self, *largs): '''Main loop for the loader.''' pass def stop(self):", "= None self._error_image = None self._num_workers = 2 self._max_upload_per_frame =", "reload. return ProxyImage(data, loading_image=self.loading_image, loaded=True, **kwargs) client = ProxyImage(self.loading_image, loading_image=self.loading_image,", "Loader. You can use it to load an image and", "''' def __init__(self): self._loading_image = None self._error_image = None self._num_workers", "import queue from threading import Thread class _Worker(Thread): '''Thread executing", "self._paused = False self._resume_cond = threading.Condition() self._q_load = deque() self._q_done", "= None # load data data = self._load_local(_out_filename, kwargs) #", "_set_max_upload_per_frame) '''Number of image to upload per frame. By default,", "1.6.0 Not readonly anymore. ''' def _get_error_image(self): if not self._error_image:", "we must keep data otherwise, # we might be unable", "If you have activated mipmap=True too, then the GPU must", "self).__init__() self.pool = None def start(self): super(LoaderThreadPool, self).start() self.pool =", "self._loading_image = image loading_image = property(_get_loading_image, _set_loading_image) '''Image used for", "< 1: raise Exception('Must have at least 1 image processing", "it, save it to a temporary file, and pass it" ]
[ "Pythoman # Here (, ) comma n space is used", "My friends are Pythobit, boy, Pythoman # Here (, )", "space is used as separator, but you can use anything.", "friends are {friend}') # Output - My friends are Pythobit,", "are {friends}.') # Output - My friends are ['Pythobit', 'boy',", "{friend}') # Output - My friends are Pythobit, boy, Pythoman", "boy, Pythoman # Here (, ) comma n space is", "list a bit better friends = ['Pythobit','boy','Pythoman'] print(f'My friends are", "# So, the Output needs to be a bit clearer.", "# 13. Join # it allows to print list a", "- My friends are ['Pythobit', 'boy', 'Pythoman']. # So, the", "Output - My friends are ['Pythobit', 'boy', 'Pythoman']. # So,", "# it allows to print list a bit better friends", "friends are Pythobit, boy, Pythoman # Here (, ) comma", "comma n space is used as separator, but you can", "are {friend}') # Output - My friends are Pythobit, boy,", "Here (, ) comma n space is used as separator,", "friends are {friends}.') # Output - My friends are ['Pythobit',", "- My friends are Pythobit, boy, Pythoman # Here (,", "clearer. friends = ['Pythobit','boy','Pythoman'] friend = ', '.join(friends) print(f'My friends", "', '.join(friends) print(f'My friends are {friend}') # Output - My", "= ['Pythobit','boy','Pythoman'] print(f'My friends are {friends}.') # Output - My", "= ', '.join(friends) print(f'My friends are {friend}') # Output -", "['Pythobit','boy','Pythoman'] friend = ', '.join(friends) print(f'My friends are {friend}') #", "# Output - My friends are ['Pythobit', 'boy', 'Pythoman']. #", "print(f'My friends are {friend}') # Output - My friends are", "are ['Pythobit', 'boy', 'Pythoman']. # So, the Output needs to", "allows to print list a bit better friends = ['Pythobit','boy','Pythoman']", "be a bit clearer. friends = ['Pythobit','boy','Pythoman'] friend = ',", "'.join(friends) print(f'My friends are {friend}') # Output - My friends", "13. Join # it allows to print list a bit", "'boy', 'Pythoman']. # So, the Output needs to be a", "a bit clearer. friends = ['Pythobit','boy','Pythoman'] friend = ', '.join(friends)", "bit clearer. friends = ['Pythobit','boy','Pythoman'] friend = ', '.join(friends) print(f'My", "n space is used as separator, but you can use", "{friends}.') # Output - My friends are ['Pythobit', 'boy', 'Pythoman'].", "'Pythoman']. # So, the Output needs to be a bit", "Output needs to be a bit clearer. friends = ['Pythobit','boy','Pythoman']", "a bit better friends = ['Pythobit','boy','Pythoman'] print(f'My friends are {friends}.')", "Pythobit, boy, Pythoman # Here (, ) comma n space", "['Pythobit', 'boy', 'Pythoman']. # So, the Output needs to be", "bit better friends = ['Pythobit','boy','Pythoman'] print(f'My friends are {friends}.') #", "print list a bit better friends = ['Pythobit','boy','Pythoman'] print(f'My friends", "friends = ['Pythobit','boy','Pythoman'] friend = ', '.join(friends) print(f'My friends are", "friend = ', '.join(friends) print(f'My friends are {friend}') # Output", "are Pythobit, boy, Pythoman # Here (, ) comma n", "My friends are ['Pythobit', 'boy', 'Pythoman']. # So, the Output", "print(f'My friends are {friends}.') # Output - My friends are", "friends = ['Pythobit','boy','Pythoman'] print(f'My friends are {friends}.') # Output -", "(, ) comma n space is used as separator, but", ") comma n space is used as separator, but you", "to print list a bit better friends = ['Pythobit','boy','Pythoman'] print(f'My", "# Output - My friends are Pythobit, boy, Pythoman #", "friends are ['Pythobit', 'boy', 'Pythoman']. # So, the Output needs", "Join # it allows to print list a bit better", "# Here (, ) comma n space is used as", "it allows to print list a bit better friends =", "Output - My friends are Pythobit, boy, Pythoman # Here", "better friends = ['Pythobit','boy','Pythoman'] print(f'My friends are {friends}.') # Output", "needs to be a bit clearer. friends = ['Pythobit','boy','Pythoman'] friend", "So, the Output needs to be a bit clearer. friends", "to be a bit clearer. friends = ['Pythobit','boy','Pythoman'] friend =", "['Pythobit','boy','Pythoman'] print(f'My friends are {friends}.') # Output - My friends", "= ['Pythobit','boy','Pythoman'] friend = ', '.join(friends) print(f'My friends are {friend}')", "the Output needs to be a bit clearer. friends =" ]
[ "you want to have custom builds, copy this file to", "# needs external resources) 'local8000': { 'resourceUrlBase': 'http://0.0.0.0:8000/dist', 'distUrlBase': None,", "# 'resourceBaseUrl': 'http://www.example.com/iitc/dist', # 'distUrlBase': 'https://secure.example.com/iitc/dist', #}, } # defaultBuild", "built with 'ant'. requires the Android SDK and appropriate mobile/local.properties", "# the web server at http://0.0.0.0:8000/dist # (This shouldn't be", "possible fields: # resourceBaseUrl - optional - the URL base", "on the build.py command line # (in here as an", "optional - an array of strings to run as commands,", "feature # needs external resources) 'local8000': { 'resourceUrlBase': 'http://0.0.0.0:8000/dist', 'distUrlBase':", "command line # (in here as an example - it", "the android-sdk installed, and the file mobile/local.properties created as required", "checks # buildMobile - optional - if set, mobile builds", "require the distUrlBase to be \"https\" - they won't check", "updates on regular \"http\" URLs #'example': { # 'resourceBaseUrl': 'http://www.example.com/iitc/dist',", "also builds the mobile .apk # you will need to", "via os.system, after all builds are complete buildSettings = {", "- if set, mobile builds are built with 'ant'. requires", "specified on the build.py command line # (in here as", "- optional - an array of string to run as", "required any more - all resources are embedded. but, it", "for builds. # if you want to have custom builds,", "default build to use if none is specified on the", "# if you want to publish your own fork of", "build to use if none is specified on the build.py", "}, # if you want to publish your own fork", "(all resources embedded in standard IITC) # distUrlBase - optional", "- optional - if set, mobile builds are built with", "external resources allowed - they're not needed any more 'randomizax':", "fork of the project, and host it on your own", "web server at http://0.0.0.0:8000/dist # (This shouldn't be required any", "new feature # needs external resources) 'local8000': { 'resourceUrlBase': 'http://0.0.0.0:8000/dist',", "# (in here as an example - it only works", "installed, and the file mobile/local.properties created as required 'mobile': {", "have custom builds, copy this file to \"localbuildsettings.py\" and make", "for updates on regular \"http\" URLs #'example': { # 'resourceBaseUrl':", "needed any more 'randomizax': { 'resourceUrlBase': None, 'distUrlBase': 'https://randomizax.github.io/polygon-label', },", "need to have the android-sdk installed, and the file mobile/local.properties", "# (This shouldn't be required any more - all resources", "commands, via os.system, after all builds are complete buildSettings =", "as required 'mobile': { 'resourceUrlBase': None, 'distUrlBase': None, 'buildMobile': 'debug',", "just in case some new feature # needs external resources)", "more 'randomizax': { 'resourceUrlBase': None, 'distUrlBase': 'https://randomizax.github.io/polygon-label', }, # local8000:", "check for updates on regular \"http\" URLs #'example': { #", "after all builds are complete buildSettings = { # local:", "# settings file for builds. # if you want to", "a localbuildsettings.py file containing something similar to this # note:", "of the project, and host it on your own web", "and host it on your own web site # create", "http://0.0.0.0:8000/dist # (This shouldn't be required any more - all", "to \"localbuildsettings.py\" and make changes there. # possible fields: #", "'resourceUrlBase': None, 'distUrlBase': None, 'buildMobile': 'debug', }, # if you", "- they won't check for updates on regular \"http\" URLs", "line # (in here as an example - it only", "for external resources (all resources embedded in standard IITC) #", "{ # 'resourceBaseUrl': 'http://www.example.com/iitc/dist', # 'distUrlBase': 'https://secure.example.com/iitc/dist', #}, } #", "the base URL to use for update checks # buildMobile", "# possible fields: # resourceBaseUrl - optional - the URL", "not modifying external resources # no external resources allowed -", "at http://0.0.0.0:8000/dist # (This shouldn't be required any more -", "to run as commands, via os.system, after all builds are", "default entry that also builds the mobile .apk # you", "Firefox+Greasemonkey require the distUrlBase to be \"https\" - they won't", "# no external resources allowed - they're not needed any", "base URL to use for update checks # buildMobile -", "an example - it only works in localbuildsettings.py) #defaultBuild =", "resources embedded in standard IITC) # distUrlBase - optional -", "the mobile .apk # you will need to have the", "in standard IITC) # distUrlBase - optional - the base", "set, mobile builds are built with 'ant'. requires the Android", "- the URL base for external resources (all resources embedded", "# 'distUrlBase': 'https://secure.example.com/iitc/dist', #}, } # defaultBuild - the name", "as an example - it only works in localbuildsettings.py) #defaultBuild", "array of strings to run as commands, via os.system, before", "'resourceUrlBase': 'http://0.0.0.0:8000/dist', 'distUrlBase': None, }, # mobile: default entry that", "them from # the web server at http://0.0.0.0:8000/dist # (This", "are complete buildSettings = { # local: use this build", "an array of strings to run as commands, via os.system,", "project, and host it on your own web site #", "to publish your own fork of the project, and host", "builds the mobile .apk # you will need to have", "regular \"http\" URLs #'example': { # 'resourceBaseUrl': 'http://www.example.com/iitc/dist', # 'distUrlBase':", "'http://0.0.0.0:8000/dist', 'distUrlBase': None, }, # mobile: default entry that also", "optional - the URL base for external resources (all resources", "optional - the base URL to use for update checks", "- the base URL to use for update checks #", "mobile .apk # you will need to have the android-sdk", "distUrlBase to be \"https\" - they won't check for updates", "the URL base for external resources (all resources embedded in", "builds are built with 'ant'. requires the Android SDK and", "mobile: default entry that also builds the mobile .apk #", "are built with 'ant'. requires the Android SDK and appropriate", "'mobile': { 'resourceUrlBase': None, 'distUrlBase': None, 'buildMobile': 'debug', }, #", "- an array of strings to run as commands, via", "resources) 'local8000': { 'resourceUrlBase': 'http://0.0.0.0:8000/dist', 'distUrlBase': None, }, # mobile:", "settings file for builds. # if you want to have", "site # create a localbuildsettings.py file containing something similar to", "if you're not modifying external resources # no external resources", "}, # mobile: default entry that also builds the mobile", "optional - if set, mobile builds are built with 'ant'.", "is specified on the build.py command line # (in here", "changes there. # possible fields: # resourceBaseUrl - optional -", "'resourceUrlBase': None, 'distUrlBase': 'https://randomizax.github.io/polygon-label', }, # local8000: if you need", "host it on your own web site # create a", "'resourceBaseUrl': 'http://www.example.com/iitc/dist', # 'distUrlBase': 'https://secure.example.com/iitc/dist', #}, } # defaultBuild -", "\"https\" - they won't check for updates on regular \"http\"", "# if you want to have custom builds, copy this", "None, 'distUrlBase': 'https://randomizax.github.io/polygon-label', }, # local8000: if you need to", "SDK and appropriate mobile/local.properties file configured # preBuild - optional", "# preBuild - optional - an array of strings to", "any more - all resources are embedded. but, it remains", "you will need to have the android-sdk installed, and the", "that also builds the mobile .apk # you will need", "file containing something similar to this # note: Firefox+Greasemonkey require", "to use if none is specified on the build.py command", "with 'ant'. requires the Android SDK and appropriate mobile/local.properties file", "use for update checks # buildMobile - optional - if", "resources allowed - they're not needed any more 'randomizax': {", ".apk # you will need to have the android-sdk installed,", "array of string to run as commands, via os.system, after", "are embedded. but, it remains just in case some new", "- all resources are embedded. but, it remains just in", "some new feature # needs external resources) 'local8000': { 'resourceUrlBase':", "won't check for updates on regular \"http\" URLs #'example': {", "- optional - an array of strings to run as", "'https://secure.example.com/iitc/dist', #}, } # defaultBuild - the name of the", "android-sdk installed, and the file mobile/local.properties created as required 'mobile':", "\"localbuildsettings.py\" and make changes there. # possible fields: # resourceBaseUrl", "create a localbuildsettings.py file containing something similar to this #", "before building the scripts # postBuild - optional - an", "load them from # the web server at http://0.0.0.0:8000/dist #", "an array of string to run as commands, via os.system,", "'http://www.example.com/iitc/dist', # 'distUrlBase': 'https://secure.example.com/iitc/dist', #}, } # defaultBuild - the", "{ 'resourceUrlBase': None, 'distUrlBase': 'https://randomizax.github.io/polygon-label', }, # local8000: if you", "but, it remains just in case some new feature #", "all builds are complete buildSettings = { # local: use", "if none is specified on the build.py command line #", "# you will need to have the android-sdk installed, and", "modifying external resources # no external resources allowed - they're", "embedded in standard IITC) # distUrlBase - optional - the", "# local8000: if you need to modify external resources, this", "build if you're not modifying external resources # no external", "of the default build to use if none is specified", "mobile/local.properties file configured # preBuild - optional - an array", "there. # possible fields: # resourceBaseUrl - optional - the", "commands, via os.system, before building the scripts # postBuild -", "distUrlBase - optional - the base URL to use for", "have the android-sdk installed, and the file mobile/local.properties created as", "#}, } # defaultBuild - the name of the default", "to have the android-sdk installed, and the file mobile/local.properties created", "resources are embedded. but, it remains just in case some", "of string to run as commands, via os.system, after all", "{ # local: use this build if you're not modifying", "if you want to publish your own fork of the", "optional - an array of string to run as commands,", "complete buildSettings = { # local: use this build if", "mobile/local.properties created as required 'mobile': { 'resourceUrlBase': None, 'distUrlBase': None,", "file mobile/local.properties created as required 'mobile': { 'resourceUrlBase': None, 'distUrlBase':", "they won't check for updates on regular \"http\" URLs #'example':", "{ 'resourceUrlBase': None, 'distUrlBase': None, 'buildMobile': 'debug', }, # if", "for update checks # buildMobile - optional - if set,", "of strings to run as commands, via os.system, before building", "to this # note: Firefox+Greasemonkey require the distUrlBase to be", "the default build to use if none is specified on", "if you want to have custom builds, copy this file", "it on your own web site # create a localbuildsettings.py", "- optional - the base URL to use for update", "update checks # buildMobile - optional - if set, mobile", "embedded. but, it remains just in case some new feature", "the file mobile/local.properties created as required 'mobile': { 'resourceUrlBase': None,", "and the file mobile/local.properties created as required 'mobile': { 'resourceUrlBase':", "if you need to modify external resources, this build will", "- an array of string to run as commands, via", "appropriate mobile/local.properties file configured # preBuild - optional - an", "they're not needed any more 'randomizax': { 'resourceUrlBase': None, 'distUrlBase':", "and appropriate mobile/local.properties file configured # preBuild - optional -", "run as commands, via os.system, after all builds are complete", "resources # no external resources allowed - they're not needed", "not needed any more 'randomizax': { 'resourceUrlBase': None, 'distUrlBase': 'https://randomizax.github.io/polygon-label',", "URLs #'example': { # 'resourceBaseUrl': 'http://www.example.com/iitc/dist', # 'distUrlBase': 'https://secure.example.com/iitc/dist', #},", "copy this file to \"localbuildsettings.py\" and make changes there. #", "your own fork of the project, and host it on", "own web site # create a localbuildsettings.py file containing something", "required 'mobile': { 'resourceUrlBase': None, 'distUrlBase': None, 'buildMobile': 'debug', },", "\"http\" URLs #'example': { # 'resourceBaseUrl': 'http://www.example.com/iitc/dist', # 'distUrlBase': 'https://secure.example.com/iitc/dist',", "external resources) 'local8000': { 'resourceUrlBase': 'http://0.0.0.0:8000/dist', 'distUrlBase': None, }, #", "} # defaultBuild - the name of the default build", "# distUrlBase - optional - the base URL to use", "all resources are embedded. but, it remains just in case", "here as an example - it only works in localbuildsettings.py)", "os.system, after all builds are complete buildSettings = { #", "the scripts # postBuild - optional - an array of", "None, 'distUrlBase': None, 'buildMobile': 'debug', }, # if you want", "this build will load them from # the web server", "- optional - the URL base for external resources (all", "be required any more - all resources are embedded. but,", "external resources, this build will load them from # the", "standard IITC) # distUrlBase - optional - the base URL", "'distUrlBase': None, }, # mobile: default entry that also builds", "server at http://0.0.0.0:8000/dist # (This shouldn't be required any more", "local8000: if you need to modify external resources, this build", "'local8000': { 'resourceUrlBase': 'http://0.0.0.0:8000/dist', 'distUrlBase': None, }, # mobile: default", "no external resources allowed - they're not needed any more", "external resources (all resources embedded in standard IITC) # distUrlBase", "custom builds, copy this file to \"localbuildsettings.py\" and make changes", "file to \"localbuildsettings.py\" and make changes there. # possible fields:", "similar to this # note: Firefox+Greasemonkey require the distUrlBase to", "you want to publish your own fork of the project,", "requires the Android SDK and appropriate mobile/local.properties file configured #", "None, }, # mobile: default entry that also builds the", "name of the default build to use if none is", "created as required 'mobile': { 'resourceUrlBase': None, 'distUrlBase': None, 'buildMobile':", "it remains just in case some new feature # needs", "use this build if you're not modifying external resources #", "localbuildsettings.py file containing something similar to this # note: Firefox+Greasemonkey", "in case some new feature # needs external resources) 'local8000':", "(in here as an example - it only works in", "Android SDK and appropriate mobile/local.properties file configured # preBuild -", "builds. # if you want to have custom builds, copy", "will load them from # the web server at http://0.0.0.0:8000/dist", "needs external resources) 'local8000': { 'resourceUrlBase': 'http://0.0.0.0:8000/dist', 'distUrlBase': None, },", "fields: # resourceBaseUrl - optional - the URL base for", "'buildMobile': 'debug', }, # if you want to publish your", "more - all resources are embedded. but, it remains just", "this build if you're not modifying external resources # no", "builds, copy this file to \"localbuildsettings.py\" and make changes there.", "file for builds. # if you want to have custom", "resources (all resources embedded in standard IITC) # distUrlBase -", "os.system, before building the scripts # postBuild - optional -", "resources, this build will load them from # the web", "# defaultBuild - the name of the default build to", "defaultBuild - the name of the default build to use", "{ 'resourceUrlBase': 'http://0.0.0.0:8000/dist', 'distUrlBase': None, }, # mobile: default entry", "the Android SDK and appropriate mobile/local.properties file configured # preBuild", "remains just in case some new feature # needs external", "example - it only works in localbuildsettings.py) #defaultBuild = 'local'", "via os.system, before building the scripts # postBuild - optional", "None, 'buildMobile': 'debug', }, # if you want to publish", "as commands, via os.system, before building the scripts # postBuild", "none is specified on the build.py command line # (in", "# local: use this build if you're not modifying external", "if set, mobile builds are built with 'ant'. requires the", "on regular \"http\" URLs #'example': { # 'resourceBaseUrl': 'http://www.example.com/iitc/dist', #", "scripts # postBuild - optional - an array of string", "(This shouldn't be required any more - all resources are", "preBuild - optional - an array of strings to run", "need to modify external resources, this build will load them", "#'example': { # 'resourceBaseUrl': 'http://www.example.com/iitc/dist', # 'distUrlBase': 'https://secure.example.com/iitc/dist', #}, }", "'distUrlBase': 'https://secure.example.com/iitc/dist', #}, } # defaultBuild - the name of", "resourceBaseUrl - optional - the URL base for external resources", "the distUrlBase to be \"https\" - they won't check for", "'distUrlBase': 'https://randomizax.github.io/polygon-label', }, # local8000: if you need to modify", "'distUrlBase': None, 'buildMobile': 'debug', }, # if you want to", "'https://randomizax.github.io/polygon-label', }, # local8000: if you need to modify external", "external resources # no external resources allowed - they're not", "postBuild - optional - an array of string to run", "'randomizax': { 'resourceUrlBase': None, 'distUrlBase': 'https://randomizax.github.io/polygon-label', }, # local8000: if", "want to publish your own fork of the project, and", "to modify external resources, this build will load them from", "string to run as commands, via os.system, after all builds", "you need to modify external resources, this build will load", "base for external resources (all resources embedded in standard IITC)", "your own web site # create a localbuildsettings.py file containing", "containing something similar to this # note: Firefox+Greasemonkey require the", "to run as commands, via os.system, before building the scripts", "be \"https\" - they won't check for updates on regular", "- they're not needed any more 'randomizax': { 'resourceUrlBase': None,", "the name of the default build to use if none", "# postBuild - optional - an array of string to", "make changes there. # possible fields: # resourceBaseUrl - optional", "web site # create a localbuildsettings.py file containing something similar", "- the name of the default build to use if", "the web server at http://0.0.0.0:8000/dist # (This shouldn't be required", "something similar to this # note: Firefox+Greasemonkey require the distUrlBase", "to have custom builds, copy this file to \"localbuildsettings.py\" and", "URL to use for update checks # buildMobile - optional", "# resourceBaseUrl - optional - the URL base for external", "URL base for external resources (all resources embedded in standard", "as commands, via os.system, after all builds are complete buildSettings", "'ant'. requires the Android SDK and appropriate mobile/local.properties file configured", "# mobile: default entry that also builds the mobile .apk", "buildSettings = { # local: use this build if you're", "any more 'randomizax': { 'resourceUrlBase': None, 'distUrlBase': 'https://randomizax.github.io/polygon-label', }, #", "this file to \"localbuildsettings.py\" and make changes there. # possible", "run as commands, via os.system, before building the scripts #", "strings to run as commands, via os.system, before building the", "note: Firefox+Greasemonkey require the distUrlBase to be \"https\" - they", "own fork of the project, and host it on your", "build.py command line # (in here as an example -", "on your own web site # create a localbuildsettings.py file", "}, # local8000: if you need to modify external resources,", "file configured # preBuild - optional - an array of", "want to have custom builds, copy this file to \"localbuildsettings.py\"", "local: use this build if you're not modifying external resources", "will need to have the android-sdk installed, and the file", "mobile builds are built with 'ant'. requires the Android SDK", "configured # preBuild - optional - an array of strings", "# buildMobile - optional - if set, mobile builds are", "modify external resources, this build will load them from #", "# create a localbuildsettings.py file containing something similar to this", "building the scripts # postBuild - optional - an array", "to use for update checks # buildMobile - optional -", "the project, and host it on your own web site", "publish your own fork of the project, and host it", "# note: Firefox+Greasemonkey require the distUrlBase to be \"https\" -", "use if none is specified on the build.py command line", "IITC) # distUrlBase - optional - the base URL to", "build will load them from # the web server at", "'debug', }, # if you want to publish your own", "case some new feature # needs external resources) 'local8000': {", "this # note: Firefox+Greasemonkey require the distUrlBase to be \"https\"", "shouldn't be required any more - all resources are embedded.", "from # the web server at http://0.0.0.0:8000/dist # (This shouldn't", "allowed - they're not needed any more 'randomizax': { 'resourceUrlBase':", "and make changes there. # possible fields: # resourceBaseUrl -", "to be \"https\" - they won't check for updates on", "= { # local: use this build if you're not", "builds are complete buildSettings = { # local: use this", "you're not modifying external resources # no external resources allowed", "entry that also builds the mobile .apk # you will", "the build.py command line # (in here as an example", "buildMobile - optional - if set, mobile builds are built" ]
[ "rm(self, url): \"\"\" Remove the item @param url - url", "medium. All data is parsed out of the url and", "another handler produces unknown results @returns list of handled schemes", "filenames: self.upload( os.path.join(dirpath, filename), os.path.join(rpath, extra, filename), ) def upload(self,", "from __future__ import print_function from __future__ import unicode_literals from __future__", "= urllib.parse.urlparse(url).path.lstrip(\"/\") print(\"\\n\\n\\n\\nUploading:\", path) if not os.path.isdir(path): print(\"As file\") try:", "filename), os.path.join(rpath, extra, filename), ) def upload(self, path, rpath): \"\"\"", "None else int(port), username=user, password=password, key_filename=self.keyfile, timeout=15, ) self.sftp =", "port @param user - may be None, user to connect", "file to remote path @param path - path to upload", "Osaka \"\"\" def __init__(self, params={}): \"\"\" Constructor \"\"\" self.keyfile =", "Connect to this storage medium. All data is parsed out", "os.path.relpath(dirpath, os.path.dirname(path)) try: self.sftp.mkdir(os.path.join(rpath, extra)) except IOError: pass for filename", "<filename>osaka/storage/sftp.py from __future__ import print_function from __future__ import unicode_literals from", "path @param path - path to upload @param rpath -", "\"\"\" Remove the item @param url - url to remove", "in filenames: self.upload( os.path.join(dirpath, filename), os.path.join(rpath, extra, filename), ) def", "import traceback import osaka.utils \"\"\" A backend used to handle", "to handle stfp using parimiko @author starchmd \"\"\" class SFTP(object):", "rpath = urllib.parse.urlparse(url).path try: self.sftp.get(rpath, path) except Exception as e:", "except: pass return self.upload(path, dest) print(\"As Dir\") try: self.sftp.mkdir(rpath) except", "port to connect to implementor must handle a None port", "data is parsed out of the url and may be", "path - path to upload @param rpath - remote path", "self.sftp.get(rpath, path) except Exception as e: osaka.utils.LOGGER.warning( \"Encountered exception: {}\\n{}\".format(e,", "and may be None scheme: @param host - may be", "division from __future__ import absolute_import from builtins import int from", "fetched files \"\"\" rpath = urllib.parse.urlparse(url).path try: self.sftp.get(rpath, path) except", "handle stfp using parimiko @author starchmd \"\"\" class SFTP(object): \"\"\"", "port - may be None, port to connect to implementor", "local path @param url - url to get file/folder from", "filename in filenames: self.upload( os.path.join(dirpath, filename), os.path.join(rpath, extra, filename), )", "os.path.join(rpath, extra, filename), ) def upload(self, path, rpath): \"\"\" Uploads", "\"\"\" Constructor \"\"\" self.keyfile = params[\"keyfile\"] if \"keyfile\" in params", "may be None, port to connect to implementor must handle", "standard_library.install_aliases() import os import os.path import stat import urllib.parse import", "= params[\"keyfile\"] if \"keyfile\" in params else None def connect(self,", "raise osaka.utils.OsakaFileNotFound(\"File {} doesn't exist.\".format(url)) def rm(self, url): \"\"\" Remove", "SFTP(object): \"\"\" SFTP handling for Osaka \"\"\" def __init__(self, params={}):", "to connect as implementor must handle a None user @param", "url to put file/folder to \"\"\" rpath = urllib.parse.urlparse(url).path.lstrip(\"/\") print(\"\\n\\n\\n\\nUploading:\",", "import absolute_import from builtins import int from future import standard_library", "local path of file/folder to put @param url - url", "rpath try: if stat.S_ISDIR(self.sftp.stat(rpath).st_mode) != 0: dest = os.path.join(rpath, os.path.basename(path))", "be None scheme: @param host - may be None, host", "A backend used to handle stfp using parimiko @author starchmd", "a None password \"\"\" self.client = paramiko.client.SSHClient() self.client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) self.client.connect( host,", "handler produces unknown results @returns list of handled schemes \"\"\"", "of the url and may be None scheme: @param host", "url and may be None scheme: @param host - may", "@param url - url to remove \"\"\" rpath = urllib.parse.urlparse(url).path", "place fetched files \"\"\" rpath = urllib.parse.urlparse(url).path try: self.sftp.get(rpath, path)", "Dir\") try: self.sftp.mkdir(rpath) except IOError: pass for dirpath, dirname, filenames", "self.sftp.mkdir(os.path.dirname(rpath)) except IOError: pass dest = rpath try: if stat.S_ISDIR(self.sftp.stat(rpath).st_mode)", "future import standard_library standard_library.install_aliases() import os import os.path import stat", "to connect with implementor must handle a None password \"\"\"", "url): \"\"\" Remove the item @param url - url to", "connect as implementor must handle a None user @param password", "__future__ import division from __future__ import absolute_import from builtins import", "implementor must handle a None user @param password - may", "handle a None password \"\"\" self.client = paramiko.client.SSHClient() self.client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) self.client.connect(", "@returns list of handled schemes \"\"\" return [\"sftp\"] def put(self,", "exception: {}\\n{}\".format(e, traceback.format_exc()) ) raise osaka.utils.OsakaFileNotFound(\"File {} doesn't exist.\".format(url)) def", "[\"sftp\"] def put(self, path, url): \"\"\" Put the given path", "put file/folder to \"\"\" rpath = urllib.parse.urlparse(url).path.lstrip(\"/\") print(\"\\n\\n\\n\\nUploading:\", path) if", "dirpath, dirname, filenames in os.walk(path): extra = os.path.relpath(dirpath, os.path.dirname(path)) try:", "to connect to implementor must handle defaulting @param port -", "url - url to remove \"\"\" rpath = urllib.parse.urlparse(url).path self.sftp.remove(rpath)", "scheme of another handler produces unknown results @returns list of", "in params else None def connect(self, host=None, port=None, user=None, password=<PASSWORD>,", "@param port - may be None, port to connect to", "- local path of file/folder to put @param url -", "\"keyfile\" in params else None def connect(self, host=None, port=None, user=None,", "def upload(self, path, rpath): \"\"\" Uploads a file to remote", "path, url): \"\"\" Put the given path to the given", "urllib.parse import paramiko import traceback import osaka.utils \"\"\" A backend", "url - url to put file/folder to \"\"\" rpath =", "the given url @param path - local path of file/folder", "getSchemes(clazz): \"\"\" Returns a list of schemes this handler handles", "Exception as e: osaka.utils.LOGGER.warning( \"Encountered exception: {}\\n{}\".format(e, traceback.format_exc()) ) raise", "port=None, user=None, password=<PASSWORD>, secure=False): \"\"\" Connect to this storage medium.", "self.client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) self.client.connect( host, port=22 if port is None else int(port),", "if not os.path.isdir(path): print(\"As file\") try: self.sftp.mkdir(os.path.dirname(rpath)) except IOError: pass", "self.keyfile = params[\"keyfile\"] if \"keyfile\" in params else None def", "extra)) except IOError: pass for filename in filenames: self.upload( os.path.join(dirpath,", "files \"\"\" rpath = urllib.parse.urlparse(url).path try: self.sftp.get(rpath, path) except Exception", "backend used to handle stfp using parimiko @author starchmd \"\"\"", "params[\"keyfile\"] if \"keyfile\" in params else None def connect(self, host=None,", "@param password - may be None, password to connect with", "unicode_literals from __future__ import division from __future__ import absolute_import from", "file\") try: self.sftp.mkdir(os.path.dirname(rpath)) except IOError: pass dest = rpath try:", "host - may be None, host to connect to implementor", "host, port=22 if port is None else int(port), username=user, password=password,", "Get the url (file/folder) to local path @param url -", "url @param path - local path of file/folder to put", "IOError: pass dest = rpath try: if stat.S_ISDIR(self.sftp.stat(rpath).st_mode) != 0:", "def put(self, path, url): \"\"\" Put the given path to", "results @returns list of handled schemes \"\"\" return [\"sftp\"] def", "__future__ import unicode_literals from __future__ import division from __future__ import", "print(\"As Dir\") try: self.sftp.mkdir(rpath) except IOError: pass for dirpath, dirname,", "secure=False): \"\"\" Connect to this storage medium. All data is", "dirname, filenames in os.walk(path): extra = os.path.relpath(dirpath, os.path.dirname(path)) try: self.sftp.mkdir(os.path.join(rpath,", "may be None, password to connect with implementor must handle", "def getSchemes(clazz): \"\"\" Returns a list of schemes this handler", "username=user, password=password, key_filename=self.keyfile, timeout=15, ) self.sftp = self.client.open_sftp() @classmethod def", "@param user - may be None, user to connect as", "import urllib.parse import paramiko import traceback import osaka.utils \"\"\" A", "import os import os.path import stat import urllib.parse import paramiko", "password=password, key_filename=self.keyfile, timeout=15, ) self.sftp = self.client.open_sftp() @classmethod def getSchemes(clazz):", "- url to get file/folder from @param path - path", "Constructor \"\"\" self.keyfile = params[\"keyfile\"] if \"keyfile\" in params else", "must handle a None user @param password - may be", "urllib.parse.urlparse(url).path try: self.sftp.get(rpath, path) except Exception as e: osaka.utils.LOGGER.warning( \"Encountered", "the scheme of another handler produces unknown results @returns list", "- may be None, user to connect as implementor must", "return [\"sftp\"] def put(self, path, url): \"\"\" Put the given", "the given path to the given url @param path -", "exist.\".format(url)) def rm(self, url): \"\"\" Remove the item @param url", "for filename in filenames: self.upload( os.path.join(dirpath, filename), os.path.join(rpath, extra, filename),", "upload to \"\"\" self.sftp.put(path, rpath) return True def get(self, url,", "password - may be None, password to connect with implementor", "implementor must handle defaulting @param port - may be None,", "{} doesn't exist.\".format(url)) def rm(self, url): \"\"\" Remove the item", "of file/folder to put @param url - url to put", "= self.client.open_sftp() @classmethod def getSchemes(clazz): \"\"\" Returns a list of", "parsed out of the url and may be None scheme:", "IOError: pass for dirpath, dirname, filenames in os.walk(path): extra =", "url, path): \"\"\" Get the url (file/folder) to local path", "must handle defaulting @param port - may be None, port", "rpath = urllib.parse.urlparse(url).path.lstrip(\"/\") print(\"\\n\\n\\n\\nUploading:\", path) if not os.path.isdir(path): print(\"As file\")", "path): \"\"\" Get the url (file/folder) to local path @param", "\"\"\" class SFTP(object): \"\"\" SFTP handling for Osaka \"\"\" def", "doesn't exist.\".format(url)) def rm(self, url): \"\"\" Remove the item @param", "port=22 if port is None else int(port), username=user, password=password, key_filename=self.keyfile,", "(file/folder) to local path @param url - url to get", "to get file/folder from @param path - path to place", "self.sftp.put(path, rpath) return True def get(self, url, path): \"\"\" Get", "from __future__ import unicode_literals from __future__ import division from __future__", "os import os.path import stat import urllib.parse import paramiko import", "osaka.utils \"\"\" A backend used to handle stfp using parimiko", "given url @param path - local path of file/folder to", "\"\"\" self.client = paramiko.client.SSHClient() self.client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) self.client.connect( host, port=22 if port", "def __init__(self, params={}): \"\"\" Constructor \"\"\" self.keyfile = params[\"keyfile\"] if", "a None port @param user - may be None, user", "import unicode_literals from __future__ import division from __future__ import absolute_import", "import standard_library standard_library.install_aliases() import os import os.path import stat import", "rpath - remote path to upload to \"\"\" self.sftp.put(path, rpath)", "defaulting @param port - may be None, port to connect", "path to upload to \"\"\" self.sftp.put(path, rpath) return True def", "be None, password to connect with implementor must handle a", "url to remove \"\"\" rpath = urllib.parse.urlparse(url).path self.sftp.remove(rpath) def close(self):", "password=<PASSWORD>, secure=False): \"\"\" Connect to this storage medium. All data", "filenames in os.walk(path): extra = os.path.relpath(dirpath, os.path.dirname(path)) try: self.sftp.mkdir(os.path.join(rpath, extra))", "upload @param rpath - remote path to upload to \"\"\"", "to remote path @param path - path to upload @param", "schemes this handler handles Note: handling the scheme of another", "try: self.sftp.mkdir(rpath) except IOError: pass for dirpath, dirname, filenames in", "@author starchmd \"\"\" class SFTP(object): \"\"\" SFTP handling for Osaka", "handles Note: handling the scheme of another handler produces unknown", "if stat.S_ISDIR(self.sftp.stat(rpath).st_mode) != 0: dest = os.path.join(rpath, os.path.basename(path)) except: pass", "IOError: pass for filename in filenames: self.upload( os.path.join(dirpath, filename), os.path.join(rpath,", "to put @param url - url to put file/folder to", "- url to put file/folder to \"\"\" rpath = urllib.parse.urlparse(url).path.lstrip(\"/\")", "from builtins import int from future import standard_library standard_library.install_aliases() import", "import int from future import standard_library standard_library.install_aliases() import os import", "using parimiko @author starchmd \"\"\" class SFTP(object): \"\"\" SFTP handling", "int from future import standard_library standard_library.install_aliases() import os import os.path", "None, password to connect with implementor must handle a None", "= os.path.relpath(dirpath, os.path.dirname(path)) try: self.sftp.mkdir(os.path.join(rpath, extra)) except IOError: pass for", "__future__ import absolute_import from builtins import int from future import", "@classmethod def getSchemes(clazz): \"\"\" Returns a list of schemes this", "path to the given url @param path - local path", "self.sftp.mkdir(rpath) except IOError: pass for dirpath, dirname, filenames in os.walk(path):", "Uploads a file to remote path @param path - path", "@param url - url to get file/folder from @param path", "!= 0: dest = os.path.join(rpath, os.path.basename(path)) except: pass return self.upload(path,", "timeout=15, ) self.sftp = self.client.open_sftp() @classmethod def getSchemes(clazz): \"\"\" Returns", "user - may be None, user to connect as implementor", "from __future__ import absolute_import from builtins import int from future", "@param path - path to upload @param rpath - remote", "from future import standard_library standard_library.install_aliases() import os import os.path import", "starchmd \"\"\" class SFTP(object): \"\"\" SFTP handling for Osaka \"\"\"", "the item @param url - url to remove \"\"\" rpath", "\"Encountered exception: {}\\n{}\".format(e, traceback.format_exc()) ) raise osaka.utils.OsakaFileNotFound(\"File {} doesn't exist.\".format(url))", "def connect(self, host=None, port=None, user=None, password=<PASSWORD>, secure=False): \"\"\" Connect to", "get file/folder from @param path - path to place fetched", "{}\\n{}\".format(e, traceback.format_exc()) ) raise osaka.utils.OsakaFileNotFound(\"File {} doesn't exist.\".format(url)) def rm(self,", "handle defaulting @param port - may be None, port to", "Note: handling the scheme of another handler produces unknown results", "to this storage medium. All data is parsed out of", "in os.walk(path): extra = os.path.relpath(dirpath, os.path.dirname(path)) try: self.sftp.mkdir(os.path.join(rpath, extra)) except", "rpath = urllib.parse.urlparse(url).path self.sftp.remove(rpath) def close(self): \"\"\" Close this connection", "self.client = paramiko.client.SSHClient() self.client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) self.client.connect( host, port=22 if port is", "to upload to \"\"\" self.sftp.put(path, rpath) return True def get(self,", "else int(port), username=user, password=password, key_filename=self.keyfile, timeout=15, ) self.sftp = self.client.open_sftp()", "put(self, path, url): \"\"\" Put the given path to the", "return self.upload(path, dest) print(\"As Dir\") try: self.sftp.mkdir(rpath) except IOError: pass", "connect to implementor must handle defaulting @param port - may", "path) if not os.path.isdir(path): print(\"As file\") try: self.sftp.mkdir(os.path.dirname(rpath)) except IOError:", "builtins import int from future import standard_library standard_library.install_aliases() import os", "dest) print(\"As Dir\") try: self.sftp.mkdir(rpath) except IOError: pass for dirpath,", "import osaka.utils \"\"\" A backend used to handle stfp using", "class SFTP(object): \"\"\" SFTP handling for Osaka \"\"\" def __init__(self,", "\"\"\" self.keyfile = params[\"keyfile\"] if \"keyfile\" in params else None", "file/folder to \"\"\" rpath = urllib.parse.urlparse(url).path.lstrip(\"/\") print(\"\\n\\n\\n\\nUploading:\", path) if not", "the url and may be None scheme: @param host -", "@param host - may be None, host to connect to", "to the given url @param path - local path of", "is None else int(port), username=user, password=password, key_filename=self.keyfile, timeout=15, ) self.sftp", "host to connect to implementor must handle defaulting @param port", "stfp using parimiko @author starchmd \"\"\" class SFTP(object): \"\"\" SFTP", "item @param url - url to remove \"\"\" rpath =", "may be None, host to connect to implementor must handle", "None user @param password - may be None, password to", "from __future__ import division from __future__ import absolute_import from builtins", "to \"\"\" self.sftp.put(path, rpath) return True def get(self, url, path):", "to upload @param rpath - remote path to upload to", "except IOError: pass dest = rpath try: if stat.S_ISDIR(self.sftp.stat(rpath).st_mode) !=", "None password \"\"\" self.client = paramiko.client.SSHClient() self.client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) self.client.connect( host, port=22", "rpath) return True def get(self, url, path): \"\"\" Get the", "paramiko import traceback import osaka.utils \"\"\" A backend used to", "SFTP handling for Osaka \"\"\" def __init__(self, params={}): \"\"\" Constructor", "connect to implementor must handle a None port @param user", "Returns a list of schemes this handler handles Note: handling", "remote path to upload to \"\"\" self.sftp.put(path, rpath) return True", "import stat import urllib.parse import paramiko import traceback import osaka.utils", "list of handled schemes \"\"\" return [\"sftp\"] def put(self, path,", "except Exception as e: osaka.utils.LOGGER.warning( \"Encountered exception: {}\\n{}\".format(e, traceback.format_exc()) )", "traceback.format_exc()) ) raise osaka.utils.OsakaFileNotFound(\"File {} doesn't exist.\".format(url)) def rm(self, url):", "os.path.basename(path)) except: pass return self.upload(path, dest) print(\"As Dir\") try: self.sftp.mkdir(rpath)", "\"\"\" Put the given path to the given url @param", "remove \"\"\" rpath = urllib.parse.urlparse(url).path self.sftp.remove(rpath) def close(self): \"\"\" Close", "except IOError: pass for filename in filenames: self.upload( os.path.join(dirpath, filename),", "All data is parsed out of the url and may", "- may be None, password to connect with implementor must", "self.client.connect( host, port=22 if port is None else int(port), username=user,", "@param url - url to put file/folder to \"\"\" rpath", "path - local path of file/folder to put @param url", "scheme: @param host - may be None, host to connect", "from @param path - path to place fetched files \"\"\"", "None port @param user - may be None, user to", "a file to remote path @param path - path to", "import paramiko import traceback import osaka.utils \"\"\" A backend used", "file/folder from @param path - path to place fetched files", "path, rpath): \"\"\" Uploads a file to remote path @param", "path - path to place fetched files \"\"\" rpath =", "put @param url - url to put file/folder to \"\"\"", "- path to upload @param rpath - remote path to", "be None, host to connect to implementor must handle defaulting", "except IOError: pass for dirpath, dirname, filenames in os.walk(path): extra", "\"\"\" rpath = urllib.parse.urlparse(url).path.lstrip(\"/\") print(\"\\n\\n\\n\\nUploading:\", path) if not os.path.isdir(path): print(\"As", "schemes \"\"\" return [\"sftp\"] def put(self, path, url): \"\"\" Put", "list of schemes this handler handles Note: handling the scheme", "print(\"As file\") try: self.sftp.mkdir(os.path.dirname(rpath)) except IOError: pass dest = rpath", "connect(self, host=None, port=None, user=None, password=<PASSWORD>, secure=False): \"\"\" Connect to this", "must handle a None password \"\"\" self.client = paramiko.client.SSHClient() self.client.set_missing_host_key_policy(paramiko.AutoAddPolicy())", "implementor must handle a None port @param user - may", "path to upload @param rpath - remote path to upload", "None def connect(self, host=None, port=None, user=None, password=<PASSWORD>, secure=False): \"\"\" Connect", "\"\"\" rpath = urllib.parse.urlparse(url).path self.sftp.remove(rpath) def close(self): \"\"\" Close this", "- url to remove \"\"\" rpath = urllib.parse.urlparse(url).path self.sftp.remove(rpath) def", "True def get(self, url, path): \"\"\" Get the url (file/folder)", "to remove \"\"\" rpath = urllib.parse.urlparse(url).path self.sftp.remove(rpath) def close(self): \"\"\"", "if port is None else int(port), username=user, password=password, key_filename=self.keyfile, timeout=15,", "os.path.join(dirpath, filename), os.path.join(rpath, extra, filename), ) def upload(self, path, rpath):", "to implementor must handle defaulting @param port - may be", "os.walk(path): extra = os.path.relpath(dirpath, os.path.dirname(path)) try: self.sftp.mkdir(os.path.join(rpath, extra)) except IOError:", "may be None, user to connect as implementor must handle", "int(port), username=user, password=password, key_filename=self.keyfile, timeout=15, ) self.sftp = self.client.open_sftp() @classmethod", "extra = os.path.relpath(dirpath, os.path.dirname(path)) try: self.sftp.mkdir(os.path.join(rpath, extra)) except IOError: pass", "urllib.parse.urlparse(url).path.lstrip(\"/\") print(\"\\n\\n\\n\\nUploading:\", path) if not os.path.isdir(path): print(\"As file\") try: self.sftp.mkdir(os.path.dirname(rpath))", "pass for filename in filenames: self.upload( os.path.join(dirpath, filename), os.path.join(rpath, extra,", "- path to place fetched files \"\"\" rpath = urllib.parse.urlparse(url).path", "import print_function from __future__ import unicode_literals from __future__ import division", "__future__ import print_function from __future__ import unicode_literals from __future__ import", "os.path.isdir(path): print(\"As file\") try: self.sftp.mkdir(os.path.dirname(rpath)) except IOError: pass dest =", "\"\"\" Get the url (file/folder) to local path @param url", "to connect to implementor must handle a None port @param", "rpath): \"\"\" Uploads a file to remote path @param path", "os.path.join(rpath, os.path.basename(path)) except: pass return self.upload(path, dest) print(\"As Dir\") try:", "__init__(self, params={}): \"\"\" Constructor \"\"\" self.keyfile = params[\"keyfile\"] if \"keyfile\"", "to place fetched files \"\"\" rpath = urllib.parse.urlparse(url).path try: self.sftp.get(rpath,", "path) except Exception as e: osaka.utils.LOGGER.warning( \"Encountered exception: {}\\n{}\".format(e, traceback.format_exc())", "url): \"\"\" Put the given path to the given url", "upload(self, path, rpath): \"\"\" Uploads a file to remote path", "try: if stat.S_ISDIR(self.sftp.stat(rpath).st_mode) != 0: dest = os.path.join(rpath, os.path.basename(path)) except:", ") self.sftp = self.client.open_sftp() @classmethod def getSchemes(clazz): \"\"\" Returns a", "\"\"\" Uploads a file to remote path @param path -", "try: self.sftp.mkdir(os.path.join(rpath, extra)) except IOError: pass for filename in filenames:", "for dirpath, dirname, filenames in os.walk(path): extra = os.path.relpath(dirpath, os.path.dirname(path))", "path to place fetched files \"\"\" rpath = urllib.parse.urlparse(url).path try:", "port is None else int(port), username=user, password=password, key_filename=self.keyfile, timeout=15, )", "to put file/folder to \"\"\" rpath = urllib.parse.urlparse(url).path.lstrip(\"/\") print(\"\\n\\n\\n\\nUploading:\", path)", "pass for dirpath, dirname, filenames in os.walk(path): extra = os.path.relpath(dirpath,", "path of file/folder to put @param url - url to", "Put the given path to the given url @param path", "of handled schemes \"\"\" return [\"sftp\"] def put(self, path, url):", "stat import urllib.parse import paramiko import traceback import osaka.utils \"\"\"", "host=None, port=None, user=None, password=<PASSWORD>, secure=False): \"\"\" Connect to this storage", "this storage medium. All data is parsed out of the", "osaka.utils.LOGGER.warning( \"Encountered exception: {}\\n{}\".format(e, traceback.format_exc()) ) raise osaka.utils.OsakaFileNotFound(\"File {} doesn't", "= paramiko.client.SSHClient() self.client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) self.client.connect( host, port=22 if port is None", "import division from __future__ import absolute_import from builtins import int", "self.upload( os.path.join(dirpath, filename), os.path.join(rpath, extra, filename), ) def upload(self, path,", "of schemes this handler handles Note: handling the scheme of", "is parsed out of the url and may be None", ") def upload(self, path, rpath): \"\"\" Uploads a file to", "given path to the given url @param path - local", "extra, filename), ) def upload(self, path, rpath): \"\"\" Uploads a", "get(self, url, path): \"\"\" Get the url (file/folder) to local", "url - url to get file/folder from @param path -", "- remote path to upload to \"\"\" self.sftp.put(path, rpath) return", "\"\"\" self.sftp.put(path, rpath) return True def get(self, url, path): \"\"\"", "else None def connect(self, host=None, port=None, user=None, password=<PASSWORD>, secure=False): \"\"\"", "be None, port to connect to implementor must handle a", "stat.S_ISDIR(self.sftp.stat(rpath).st_mode) != 0: dest = os.path.join(rpath, os.path.basename(path)) except: pass return", "the url (file/folder) to local path @param url - url", "urllib.parse.urlparse(url).path self.sftp.remove(rpath) def close(self): \"\"\" Close this connection \"\"\" self.client.close()", "return True def get(self, url, path): \"\"\" Get the url", "handling for Osaka \"\"\" def __init__(self, params={}): \"\"\" Constructor \"\"\"", "@param path - local path of file/folder to put @param", "self.client.open_sftp() @classmethod def getSchemes(clazz): \"\"\" Returns a list of schemes", "if \"keyfile\" in params else None def connect(self, host=None, port=None,", "self.sftp = self.client.open_sftp() @classmethod def getSchemes(clazz): \"\"\" Returns a list", "handle a None port @param user - may be None,", "= urllib.parse.urlparse(url).path try: self.sftp.get(rpath, path) except Exception as e: osaka.utils.LOGGER.warning(", "paramiko.client.SSHClient() self.client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) self.client.connect( host, port=22 if port is None else", "pass return self.upload(path, dest) print(\"As Dir\") try: self.sftp.mkdir(rpath) except IOError:", "implementor must handle a None password \"\"\" self.client = paramiko.client.SSHClient()", "params={}): \"\"\" Constructor \"\"\" self.keyfile = params[\"keyfile\"] if \"keyfile\" in", "pass dest = rpath try: if stat.S_ISDIR(self.sftp.stat(rpath).st_mode) != 0: dest", "e: osaka.utils.LOGGER.warning( \"Encountered exception: {}\\n{}\".format(e, traceback.format_exc()) ) raise osaka.utils.OsakaFileNotFound(\"File {}", "\"\"\" SFTP handling for Osaka \"\"\" def __init__(self, params={}): \"\"\"", "may be None scheme: @param host - may be None,", "None scheme: @param host - may be None, host to", "osaka.utils.OsakaFileNotFound(\"File {} doesn't exist.\".format(url)) def rm(self, url): \"\"\" Remove the", "parimiko @author starchmd \"\"\" class SFTP(object): \"\"\" SFTP handling for", "try: self.sftp.get(rpath, path) except Exception as e: osaka.utils.LOGGER.warning( \"Encountered exception:", "- may be None, port to connect to implementor must", "\"\"\" return [\"sftp\"] def put(self, path, url): \"\"\" Put the", "handling the scheme of another handler produces unknown results @returns", "with implementor must handle a None password \"\"\" self.client =", "out of the url and may be None scheme: @param", "filename), ) def upload(self, path, rpath): \"\"\" Uploads a file", "be None, user to connect as implementor must handle a", "unknown results @returns list of handled schemes \"\"\" return [\"sftp\"]", "to implementor must handle a None port @param user -", "to \"\"\" rpath = urllib.parse.urlparse(url).path.lstrip(\"/\") print(\"\\n\\n\\n\\nUploading:\", path) if not os.path.isdir(path):", "must handle a None port @param user - may be", "handle a None user @param password - may be None,", "def get(self, url, path): \"\"\" Get the url (file/folder) to", "self.sftp.mkdir(os.path.join(rpath, extra)) except IOError: pass for filename in filenames: self.upload(", "self.upload(path, dest) print(\"As Dir\") try: self.sftp.mkdir(rpath) except IOError: pass for", "@param path - path to place fetched files \"\"\" rpath", "None, user to connect as implementor must handle a None", "produces unknown results @returns list of handled schemes \"\"\" return", "of another handler produces unknown results @returns list of handled", "0: dest = os.path.join(rpath, os.path.basename(path)) except: pass return self.upload(path, dest)", "\"\"\" A backend used to handle stfp using parimiko @author", "user to connect as implementor must handle a None user", "None, port to connect to implementor must handle a None", "key_filename=self.keyfile, timeout=15, ) self.sftp = self.client.open_sftp() @classmethod def getSchemes(clazz): \"\"\"", "url to get file/folder from @param path - path to", "= os.path.join(rpath, os.path.basename(path)) except: pass return self.upload(path, dest) print(\"As Dir\")", ") raise osaka.utils.OsakaFileNotFound(\"File {} doesn't exist.\".format(url)) def rm(self, url): \"\"\"", "not os.path.isdir(path): print(\"As file\") try: self.sftp.mkdir(os.path.dirname(rpath)) except IOError: pass dest", "Remove the item @param url - url to remove \"\"\"", "os.path.dirname(path)) try: self.sftp.mkdir(os.path.join(rpath, extra)) except IOError: pass for filename in", "remote path @param path - path to upload @param rpath", "\"\"\" rpath = urllib.parse.urlparse(url).path try: self.sftp.get(rpath, path) except Exception as", "this handler handles Note: handling the scheme of another handler", "as implementor must handle a None user @param password -", "@param rpath - remote path to upload to \"\"\" self.sftp.put(path,", "user=None, password=<PASSWORD>, secure=False): \"\"\" Connect to this storage medium. All", "params else None def connect(self, host=None, port=None, user=None, password=<PASSWORD>, secure=False):", "print_function from __future__ import unicode_literals from __future__ import division from", "password to connect with implementor must handle a None password", "standard_library standard_library.install_aliases() import os import os.path import stat import urllib.parse", "= urllib.parse.urlparse(url).path self.sftp.remove(rpath) def close(self): \"\"\" Close this connection \"\"\"", "a None user @param password - may be None, password", "dest = rpath try: if stat.S_ISDIR(self.sftp.stat(rpath).st_mode) != 0: dest =", "path @param url - url to get file/folder from @param", "url (file/folder) to local path @param url - url to", "storage medium. All data is parsed out of the url", "- may be None, host to connect to implementor must", "used to handle stfp using parimiko @author starchmd \"\"\" class", "as e: osaka.utils.LOGGER.warning( \"Encountered exception: {}\\n{}\".format(e, traceback.format_exc()) ) raise osaka.utils.OsakaFileNotFound(\"File", "dest = os.path.join(rpath, os.path.basename(path)) except: pass return self.upload(path, dest) print(\"As", "traceback import osaka.utils \"\"\" A backend used to handle stfp", "\"\"\" Returns a list of schemes this handler handles Note:", "user @param password - may be None, password to connect", "\"\"\" def __init__(self, params={}): \"\"\" Constructor \"\"\" self.keyfile = params[\"keyfile\"]", "handled schemes \"\"\" return [\"sftp\"] def put(self, path, url): \"\"\"", "to local path @param url - url to get file/folder", "file/folder to put @param url - url to put file/folder", "None, host to connect to implementor must handle defaulting @param", "a list of schemes this handler handles Note: handling the", "connect with implementor must handle a None password \"\"\" self.client", "password \"\"\" self.client = paramiko.client.SSHClient() self.client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) self.client.connect( host, port=22 if", "print(\"\\n\\n\\n\\nUploading:\", path) if not os.path.isdir(path): print(\"As file\") try: self.sftp.mkdir(os.path.dirname(rpath)) except", "import os.path import stat import urllib.parse import paramiko import traceback", "def rm(self, url): \"\"\" Remove the item @param url -", "for Osaka \"\"\" def __init__(self, params={}): \"\"\" Constructor \"\"\" self.keyfile", "absolute_import from builtins import int from future import standard_library standard_library.install_aliases()", "os.path import stat import urllib.parse import paramiko import traceback import", "\"\"\" Connect to this storage medium. All data is parsed", "try: self.sftp.mkdir(os.path.dirname(rpath)) except IOError: pass dest = rpath try: if", "= rpath try: if stat.S_ISDIR(self.sftp.stat(rpath).st_mode) != 0: dest = os.path.join(rpath,", "handler handles Note: handling the scheme of another handler produces" ]
[ "# surf is the right-most (largest) tier of the cake", ": Ball speed along single axis b_speed : Ball speed", "movepos = [0, 0] if action == 1: movepos[1] =", "self.surf2.get_rect() self.surf3 = pygame.Surface((30 // RENDER_RATIO, 40 // RENDER_RATIO)) self.rect3", "def __init__(self, speed=12): # surf is the right-most (largest) tier", "= self.surf3.get_rect() self.surf4 = pygame.Surface((30 // RENDER_RATIO, 10 // RENDER_RATIO))", "ignore paddle type Returns ------- is_collision: 1 if ball collides", "elif dy > 0: b_rect.bottom = self.rect.top b_speed[1] = -b_speed[1]", "ball rect b_speed: new ball speed \"\"\" if self.rect4.colliderect(b_rect): is_collision", "b_rect.top = self.rect4.bottom b_speed[1] = -b_speed[1] return is_collision, b_rect, b_speed", "// RENDER_RATIO)) self.rect4 = self.surf4.get_rect() self.speed = speed def reset(self):", "return is_collision, b_rect, b_speed elif self.rect3.colliderect(b_rect): is_collision = True if", "draw(self, screen): pygame.draw.rect(screen, (255, 255, 255), self.rect) pygame.draw.rect(screen, (255, 255,", "elif self.rect3.colliderect(b_rect): is_collision = True if dx > 0: b_rect.right", "class CakePaddle(pygame.sprite.Sprite): def __init__(self, speed=12): # surf is the right-most", "surf is the right-most (largest) tier of the cake self.surf", "0: b_rect.right = self.rect2.left b_speed[0] = -b_speed[0] # top or", "-b_speed[1] return is_collision, b_rect, b_speed elif self.rect3.colliderect(b_rect): is_collision = True", "os os.environ[\"PYGAME_HIDE_SUPPORT_PROMPT\"] = \"hide\" import pygame RENDER_RATIO = 2 class", "is_collision = True if dx > 0: b_rect.right = self.rect3.left", "= self.rect3.bottom b_speed[1] = -b_speed[1] return is_collision, b_rect, b_speed elif", "up, 2 - down movepos = [0, 0] if action", "// RENDER_RATIO)) self.rect2 = self.surf2.get_rect() self.surf3 = pygame.Surface((30 // RENDER_RATIO,", "elif dy > 0: b_rect.bottom = self.rect3.top b_speed[1] = -b_speed[1]", "b_speed[1] = -b_speed[1] elif dy < 0: b_rect.top = self.rect.bottom", "255, 255), self.rect3) pygame.draw.rect(screen, (255, 255, 255), self.rect4) def update(self,", "RENDER_RATIO, 10 // RENDER_RATIO)) self.rect4 = self.surf4.get_rect() self.speed = speed", "process_collision(self, b_rect, dx, dy, b_speed, paddle_type): \"\"\" Parameters ---------- b_rect", "> 0: b_rect.right = self.rect3.left b_speed[0] = -b_speed[0] # top", "b_rect.bottom = self.rect.top b_speed[1] = -b_speed[1] elif dy < 0:", "self.rect.colliderect(b_rect): is_collision = True if dx > 0: b_rect.right =", "self.rect2.bottom b_speed[1] = -b_speed[1] return is_collision, b_rect, b_speed elif self.rect.colliderect(b_rect):", "2: movepos[1] = movepos[1] + self.speed newpos = self.rect.move(movepos) if", "self.rect.bottom b_speed[1] = -b_speed[1] return is_collision, b_rect, b_speed return False,", "or bottom edge elif dy > 0: b_rect.bottom = self.rect4.top", "action: 1 - up, 2 - down movepos = [0,", "self.rect2.move(movepos) self.rect3 = self.rect3.move(movepos) self.rect4 = self.rect4.move(movepos) def process_collision(self, b_rect,", "tier of the cake self.surf = pygame.Surface((30 // RENDER_RATIO, 120", "reset(self): # self.rect is set from envs class self.rect2.midright =", "1 - up, 2 - down movepos = [0, 0]", "rect dx, dy : Ball speed along single axis b_speed", "[0, 0] if action == 1: movepos[1] = movepos[1] -", "self.rect2 = self.surf2.get_rect() self.surf3 = pygame.Surface((30 // RENDER_RATIO, 40 //", "self.rect4.midright = self.rect3.midleft def draw(self, screen): pygame.draw.rect(screen, (255, 255, 255),", "__init__(self, speed=12): # surf is the right-most (largest) tier of", "self.rect3.top b_speed[1] = -b_speed[1] elif dy < 0: b_rect.top =", "= self.rect3.top b_speed[1] = -b_speed[1] elif dy < 0: b_rect.top", "is_collision = True if dx > 0: b_rect.right = self.rect.left", "right-most (largest) tier of the cake self.surf = pygame.Surface((30 //", "return is_collision, b_rect, b_speed elif self.rect.colliderect(b_rect): is_collision = True if", "= self.rect4.move(movepos) def process_collision(self, b_rect, dx, dy, b_speed, paddle_type): \"\"\"", "edge elif dy > 0: b_rect.bottom = self.rect.top b_speed[1] =", "b_rect, b_speed elif self.rect.colliderect(b_rect): is_collision = True if dx >", "or bottom edge elif dy > 0: b_rect.bottom = self.rect.top", "Ball speed ignore paddle type Returns ------- is_collision: 1 if", "// RENDER_RATIO, 120 // RENDER_RATIO)) self.rect = self.surf.get_rect() self.surf2 =", "(255, 255, 255), self.rect4) def update(self, area, action): # action:", "dy : Ball speed along single axis b_speed : Ball", "= self.rect2.bottom b_speed[1] = -b_speed[1] return is_collision, b_rect, b_speed elif", "0: b_rect.bottom = self.rect3.top b_speed[1] = -b_speed[1] elif dy <", "RENDER_RATIO)) self.rect = self.surf.get_rect() self.surf2 = pygame.Surface((30 // RENDER_RATIO, 80", "self.rect2.colliderect(b_rect): is_collision = True if dx > 0: b_rect.right =", "// RENDER_RATIO)) self.rect3 = self.surf3.get_rect() self.surf4 = pygame.Surface((30 // RENDER_RATIO,", "bottom edge elif dy > 0: b_rect.bottom = self.rect.top b_speed[1]", "= self.rect4.bottom b_speed[1] = -b_speed[1] return is_collision, b_rect, b_speed elif", "RENDER_RATIO = 2 class CakePaddle(pygame.sprite.Sprite): def __init__(self, speed=12): # surf", "= -b_speed[0] # top or bottom edge elif dy >", "if dx > 0: b_rect.right = self.rect.left b_speed[0] = -b_speed[0]", "== 2: movepos[1] = movepos[1] + self.speed newpos = self.rect.move(movepos)", "0: b_rect.top = self.rect4.bottom b_speed[1] = -b_speed[1] return is_collision, b_rect,", "40 // RENDER_RATIO)) self.rect3 = self.surf3.get_rect() self.surf4 = pygame.Surface((30 //", "speed=12): # surf is the right-most (largest) tier of the", "other rects too self.rect2 = self.rect2.move(movepos) self.rect3 = self.rect3.move(movepos) self.rect4", "pygame.draw.rect(screen, (255, 255, 255), self.rect2) pygame.draw.rect(screen, (255, 255, 255), self.rect3)", "(255, 255, 255), self.rect2) pygame.draw.rect(screen, (255, 255, 255), self.rect3) pygame.draw.rect(screen,", "= -b_speed[1] elif dy < 0: b_rect.top = self.rect4.bottom b_speed[1]", "import os os.environ[\"PYGAME_HIDE_SUPPORT_PROMPT\"] = \"hide\" import pygame RENDER_RATIO = 2", "elif self.rect2.colliderect(b_rect): is_collision = True if dx > 0: b_rect.right", "-b_speed[0] # top or bottom edge elif dy > 0:", "b_speed elif self.rect3.colliderect(b_rect): is_collision = True if dx > 0:", "RENDER_RATIO)) self.rect4 = self.surf4.get_rect() self.speed = speed def reset(self): #", "0: b_rect.bottom = self.rect2.top b_speed[1] = -b_speed[1] elif dy <", "> 0: b_rect.right = self.rect2.left b_speed[0] = -b_speed[0] # top", "b_rect, dx, dy, b_speed, paddle_type): \"\"\" Parameters ---------- b_rect :", "0: b_rect.top = self.rect3.bottom b_speed[1] = -b_speed[1] return is_collision, b_rect,", "bottom edge elif dy > 0: b_rect.bottom = self.rect4.top b_speed[1]", "self.rect3.midleft def draw(self, screen): pygame.draw.rect(screen, (255, 255, 255), self.rect) pygame.draw.rect(screen,", "- self.speed elif action == 2: movepos[1] = movepos[1] +", "elif action == 2: movepos[1] = movepos[1] + self.speed newpos", ": Ball rect dx, dy : Ball speed along single", "b_speed[1] = -b_speed[1] elif dy < 0: b_rect.top = self.rect3.bottom", "= True if dx > 0: b_rect.right = self.rect2.left b_speed[0]", "dx > 0: b_rect.right = self.rect4.left b_speed[0] = -b_speed[0] #", "dx, dy, b_speed, paddle_type): \"\"\" Parameters ---------- b_rect : Ball", "self.rect4) def update(self, area, action): # action: 1 - up,", "with paddle b_rect: new ball rect b_speed: new ball speed", "self.rect4.left b_speed[0] = -b_speed[0] # top or bottom edge elif", "# action: 1 - up, 2 - down movepos =", "Returns ------- is_collision: 1 if ball collides with paddle b_rect:", "if dx > 0: b_rect.right = self.rect4.left b_speed[0] = -b_speed[0]", "top or bottom edge elif dy > 0: b_rect.bottom =", "move other rects too self.rect2 = self.rect2.move(movepos) self.rect3 = self.rect3.move(movepos)", "= self.rect3.left b_speed[0] = -b_speed[0] # top or bottom edge", "bottom edge elif dy > 0: b_rect.bottom = self.rect3.top b_speed[1]", "> 0: b_rect.bottom = self.rect3.top b_speed[1] = -b_speed[1] elif dy", "// RENDER_RATIO, 10 // RENDER_RATIO)) self.rect4 = self.surf4.get_rect() self.speed =", "b_rect.right = self.rect2.left b_speed[0] = -b_speed[0] # top or bottom", "= -b_speed[1] elif dy < 0: b_rect.top = self.rect3.bottom b_speed[1]", "the right-most (largest) tier of the cake self.surf = pygame.Surface((30", "elif self.rect.colliderect(b_rect): is_collision = True if dx > 0: b_rect.right", "self.surf4.get_rect() self.speed = speed def reset(self): # self.rect is set", "b_rect.top = self.rect2.bottom b_speed[1] = -b_speed[1] return is_collision, b_rect, b_speed", "1 if ball collides with paddle b_rect: new ball rect", "update(self, area, action): # action: 1 - up, 2 -", "dx > 0: b_rect.right = self.rect3.left b_speed[0] = -b_speed[0] #", "(largest) tier of the cake self.surf = pygame.Surface((30 // RENDER_RATIO,", "self.surf2 = pygame.Surface((30 // RENDER_RATIO, 80 // RENDER_RATIO)) self.rect2 =", "is_collision, b_rect, b_speed elif self.rect3.colliderect(b_rect): is_collision = True if dx", "b_rect.bottom = self.rect3.top b_speed[1] = -b_speed[1] elif dy < 0:", "speed ignore paddle type Returns ------- is_collision: 1 if ball", "0: b_rect.bottom = self.rect.top b_speed[1] = -b_speed[1] elif dy <", "= True if dx > 0: b_rect.right = self.rect.left b_speed[0]", "= -b_speed[1] return is_collision, b_rect, b_speed return False, b_rect, b_speed", "movepos[1] = movepos[1] + self.speed newpos = self.rect.move(movepos) if area.contains(newpos):", "= pygame.Surface((30 // RENDER_RATIO, 120 // RENDER_RATIO)) self.rect = self.surf.get_rect()", "b_rect.bottom = self.rect4.top b_speed[1] = -b_speed[1] elif dy < 0:", "CakePaddle(pygame.sprite.Sprite): def __init__(self, speed=12): # surf is the right-most (largest)", "is_collision = True if dx > 0: b_rect.right = self.rect2.left", "== 1: movepos[1] = movepos[1] - self.speed elif action ==", "screen): pygame.draw.rect(screen, (255, 255, 255), self.rect) pygame.draw.rect(screen, (255, 255, 255),", "0: b_rect.right = self.rect3.left b_speed[0] = -b_speed[0] # top or", "if ball collides with paddle b_rect: new ball rect b_speed:", "0] if action == 1: movepos[1] = movepos[1] - self.speed", "axis b_speed : Ball speed ignore paddle type Returns -------", "b_speed : Ball speed ignore paddle type Returns ------- is_collision:", "b_rect.bottom = self.rect2.top b_speed[1] = -b_speed[1] elif dy < 0:", "self.rect = self.surf.get_rect() self.surf2 = pygame.Surface((30 // RENDER_RATIO, 80 //", "> 0: b_rect.bottom = self.rect4.top b_speed[1] = -b_speed[1] elif dy", "action): # action: 1 - up, 2 - down movepos", "= self.rect3.move(movepos) self.rect4 = self.rect4.move(movepos) def process_collision(self, b_rect, dx, dy,", "set from envs class self.rect2.midright = self.rect.midleft self.rect3.midright = self.rect2.midleft", "RENDER_RATIO, 80 // RENDER_RATIO)) self.rect2 = self.surf2.get_rect() self.surf3 = pygame.Surface((30", "elif dy < 0: b_rect.top = self.rect.bottom b_speed[1] = -b_speed[1]", "self.rect4.bottom b_speed[1] = -b_speed[1] return is_collision, b_rect, b_speed elif self.rect3.colliderect(b_rect):", "is the right-most (largest) tier of the cake self.surf =", "= movepos[1] + self.speed newpos = self.rect.move(movepos) if area.contains(newpos): self.rect", "class self.rect2.midright = self.rect.midleft self.rect3.midright = self.rect2.midleft self.rect4.midright = self.rect3.midleft", "rects too self.rect2 = self.rect2.move(movepos) self.rect3 = self.rect3.move(movepos) self.rect4 =", "= self.rect2.move(movepos) self.rect3 = self.rect3.move(movepos) self.rect4 = self.rect4.move(movepos) def process_collision(self,", "+ self.speed newpos = self.rect.move(movepos) if area.contains(newpos): self.rect = newpos", "b_speed elif self.rect.colliderect(b_rect): is_collision = True if dx > 0:", "bottom edge elif dy > 0: b_rect.bottom = self.rect2.top b_speed[1]", "down movepos = [0, 0] if action == 1: movepos[1]", "Parameters ---------- b_rect : Ball rect dx, dy : Ball", "= -b_speed[1] return is_collision, b_rect, b_speed elif self.rect.colliderect(b_rect): is_collision =", "or bottom edge elif dy > 0: b_rect.bottom = self.rect2.top", "self.surf3 = pygame.Surface((30 // RENDER_RATIO, 40 // RENDER_RATIO)) self.rect3 =", "paddle_type): \"\"\" Parameters ---------- b_rect : Ball rect dx, dy", "// RENDER_RATIO, 80 // RENDER_RATIO)) self.rect2 = self.surf2.get_rect() self.surf3 =", "= pygame.Surface((30 // RENDER_RATIO, 40 // RENDER_RATIO)) self.rect3 = self.surf3.get_rect()", "True if dx > 0: b_rect.right = self.rect2.left b_speed[0] =", "self.rect3 = self.surf3.get_rect() self.surf4 = pygame.Surface((30 // RENDER_RATIO, 10 //", "dy < 0: b_rect.top = self.rect.bottom b_speed[1] = -b_speed[1] return", "rect b_speed: new ball speed \"\"\" if self.rect4.colliderect(b_rect): is_collision =", "= self.surf4.get_rect() self.speed = speed def reset(self): # self.rect is", "= self.rect3.midleft def draw(self, screen): pygame.draw.rect(screen, (255, 255, 255), self.rect)", "dy > 0: b_rect.bottom = self.rect.top b_speed[1] = -b_speed[1] elif", "2 class CakePaddle(pygame.sprite.Sprite): def __init__(self, speed=12): # surf is the", "type Returns ------- is_collision: 1 if ball collides with paddle", "self.rect2.midright = self.rect.midleft self.rect3.midright = self.rect2.midleft self.rect4.midright = self.rect3.midleft def", "is_collision: 1 if ball collides with paddle b_rect: new ball", "self.rect3.bottom b_speed[1] = -b_speed[1] return is_collision, b_rect, b_speed elif self.rect2.colliderect(b_rect):", "paddle type Returns ------- is_collision: 1 if ball collides with", "dy < 0: b_rect.top = self.rect3.bottom b_speed[1] = -b_speed[1] return", "speed along single axis b_speed : Ball speed ignore paddle", "= self.rect2.top b_speed[1] = -b_speed[1] elif dy < 0: b_rect.top", "b_rect : Ball rect dx, dy : Ball speed along", "= self.rect.move(movepos) if area.contains(newpos): self.rect = newpos # move other", "paddle b_rect: new ball rect b_speed: new ball speed \"\"\"", "def reset(self): # self.rect is set from envs class self.rect2.midright", "b_speed[1] = -b_speed[1] return is_collision, b_rect, b_speed elif self.rect3.colliderect(b_rect): is_collision", "0: b_rect.top = self.rect.bottom b_speed[1] = -b_speed[1] return is_collision, b_rect,", "is_collision, b_rect, b_speed elif self.rect2.colliderect(b_rect): is_collision = True if dx", "dy, b_speed, paddle_type): \"\"\" Parameters ---------- b_rect : Ball rect", "is_collision, b_rect, b_speed elif self.rect.colliderect(b_rect): is_collision = True if dx", "self.rect3.colliderect(b_rect): is_collision = True if dx > 0: b_rect.right =", "self.rect2.midleft self.rect4.midright = self.rect3.midleft def draw(self, screen): pygame.draw.rect(screen, (255, 255,", "collides with paddle b_rect: new ball rect b_speed: new ball", "along single axis b_speed : Ball speed ignore paddle type", "self.rect.midleft self.rect3.midright = self.rect2.midleft self.rect4.midright = self.rect3.midleft def draw(self, screen):", "120 // RENDER_RATIO)) self.rect = self.surf.get_rect() self.surf2 = pygame.Surface((30 //", "self.rect3) pygame.draw.rect(screen, (255, 255, 255), self.rect4) def update(self, area, action):", "= True if dx > 0: b_rect.right = self.rect4.left b_speed[0]", "self.rect) pygame.draw.rect(screen, (255, 255, 255), self.rect2) pygame.draw.rect(screen, (255, 255, 255),", "- down movepos = [0, 0] if action == 1:", "speed \"\"\" if self.rect4.colliderect(b_rect): is_collision = True if dx >", ": Ball speed ignore paddle type Returns ------- is_collision: 1", "if self.rect4.colliderect(b_rect): is_collision = True if dx > 0: b_rect.right", "self.rect3.left b_speed[0] = -b_speed[0] # top or bottom edge elif", "< 0: b_rect.top = self.rect2.bottom b_speed[1] = -b_speed[1] return is_collision,", "2 - down movepos = [0, 0] if action ==", "self.rect2.top b_speed[1] = -b_speed[1] elif dy < 0: b_rect.top =", "area, action): # action: 1 - up, 2 - down", "b_rect.top = self.rect.bottom b_speed[1] = -b_speed[1] return is_collision, b_rect, b_speed", "dy > 0: b_rect.bottom = self.rect2.top b_speed[1] = -b_speed[1] elif", "RENDER_RATIO, 120 // RENDER_RATIO)) self.rect = self.surf.get_rect() self.surf2 = pygame.Surface((30", "action == 2: movepos[1] = movepos[1] + self.speed newpos =", "edge elif dy > 0: b_rect.bottom = self.rect3.top b_speed[1] =", "return is_collision, b_rect, b_speed elif self.rect2.colliderect(b_rect): is_collision = True if", "# move other rects too self.rect2 = self.rect2.move(movepos) self.rect3 =", "255, 255), self.rect4) def update(self, area, action): # action: 1", "= pygame.Surface((30 // RENDER_RATIO, 10 // RENDER_RATIO)) self.rect4 = self.surf4.get_rect()", "= self.surf.get_rect() self.surf2 = pygame.Surface((30 // RENDER_RATIO, 80 // RENDER_RATIO))", "self.rect4 = self.surf4.get_rect() self.speed = speed def reset(self): # self.rect", "new ball speed \"\"\" if self.rect4.colliderect(b_rect): is_collision = True if", "b_rect: new ball rect b_speed: new ball speed \"\"\" if", "\"\"\" if self.rect4.colliderect(b_rect): is_collision = True if dx > 0:", "\"\"\" Parameters ---------- b_rect : Ball rect dx, dy :", "if area.contains(newpos): self.rect = newpos # move other rects too", "True if dx > 0: b_rect.right = self.rect3.left b_speed[0] =", "= pygame.Surface((30 // RENDER_RATIO, 80 // RENDER_RATIO)) self.rect2 = self.surf2.get_rect()", "single axis b_speed : Ball speed ignore paddle type Returns", "= \"hide\" import pygame RENDER_RATIO = 2 class CakePaddle(pygame.sprite.Sprite): def", "< 0: b_rect.top = self.rect4.bottom b_speed[1] = -b_speed[1] return is_collision,", "envs class self.rect2.midright = self.rect.midleft self.rect3.midright = self.rect2.midleft self.rect4.midright =", "= self.rect.midleft self.rect3.midright = self.rect2.midleft self.rect4.midright = self.rect3.midleft def draw(self,", "b_speed[1] = -b_speed[1] return is_collision, b_rect, b_speed elif self.rect2.colliderect(b_rect): is_collision", "too self.rect2 = self.rect2.move(movepos) self.rect3 = self.rect3.move(movepos) self.rect4 = self.rect4.move(movepos)", "pygame RENDER_RATIO = 2 class CakePaddle(pygame.sprite.Sprite): def __init__(self, speed=12): #", "RENDER_RATIO)) self.rect2 = self.surf2.get_rect() self.surf3 = pygame.Surface((30 // RENDER_RATIO, 40", "pygame.Surface((30 // RENDER_RATIO, 120 // RENDER_RATIO)) self.rect = self.surf.get_rect() self.surf2", "= self.rect4.top b_speed[1] = -b_speed[1] elif dy < 0: b_rect.top", "< 0: b_rect.top = self.rect.bottom b_speed[1] = -b_speed[1] return is_collision,", "self.rect.top b_speed[1] = -b_speed[1] elif dy < 0: b_rect.top =", "255, 255), self.rect2) pygame.draw.rect(screen, (255, 255, 255), self.rect3) pygame.draw.rect(screen, (255,", "-b_speed[1] return is_collision, b_rect, b_speed elif self.rect.colliderect(b_rect): is_collision = True", "self.surf = pygame.Surface((30 // RENDER_RATIO, 120 // RENDER_RATIO)) self.rect =", "pygame.draw.rect(screen, (255, 255, 255), self.rect) pygame.draw.rect(screen, (255, 255, 255), self.rect2)", "movepos[1] + self.speed newpos = self.rect.move(movepos) if area.contains(newpos): self.rect =", "def draw(self, screen): pygame.draw.rect(screen, (255, 255, 255), self.rect) pygame.draw.rect(screen, (255,", "0: b_rect.right = self.rect.left b_speed[0] = -b_speed[0] # top or", "elif dy > 0: b_rect.bottom = self.rect2.top b_speed[1] = -b_speed[1]", "cake self.surf = pygame.Surface((30 // RENDER_RATIO, 120 // RENDER_RATIO)) self.rect", "= 2 class CakePaddle(pygame.sprite.Sprite): def __init__(self, speed=12): # surf is", "b_speed[1] = -b_speed[1] elif dy < 0: b_rect.top = self.rect4.bottom", "movepos[1] - self.speed elif action == 2: movepos[1] = movepos[1]", "= newpos # move other rects too self.rect2 = self.rect2.move(movepos)", "(255, 255, 255), self.rect) pygame.draw.rect(screen, (255, 255, 255), self.rect2) pygame.draw.rect(screen,", "= self.rect.top b_speed[1] = -b_speed[1] elif dy < 0: b_rect.top", "or bottom edge elif dy > 0: b_rect.bottom = self.rect3.top", "1: movepos[1] = movepos[1] - self.speed elif action == 2:", "= -b_speed[1] return is_collision, b_rect, b_speed elif self.rect2.colliderect(b_rect): is_collision =", "= self.rect4.left b_speed[0] = -b_speed[0] # top or bottom edge", "self.rect2.left b_speed[0] = -b_speed[0] # top or bottom edge elif", "pygame.Surface((30 // RENDER_RATIO, 80 // RENDER_RATIO)) self.rect2 = self.surf2.get_rect() self.surf3", "self.rect3 = self.rect3.move(movepos) self.rect4 = self.rect4.move(movepos) def process_collision(self, b_rect, dx,", "edge elif dy > 0: b_rect.bottom = self.rect2.top b_speed[1] =", "Ball speed along single axis b_speed : Ball speed ignore", "self.speed newpos = self.rect.move(movepos) if area.contains(newpos): self.rect = newpos #", "= -b_speed[1] elif dy < 0: b_rect.top = self.rect2.bottom b_speed[1]", "True if dx > 0: b_rect.right = self.rect.left b_speed[0] =", "-b_speed[1] elif dy < 0: b_rect.top = self.rect3.bottom b_speed[1] =", "self.rect3.move(movepos) self.rect4 = self.rect4.move(movepos) def process_collision(self, b_rect, dx, dy, b_speed,", "# top or bottom edge elif dy > 0: b_rect.bottom", "= speed def reset(self): # self.rect is set from envs", "dy < 0: b_rect.top = self.rect4.bottom b_speed[1] = -b_speed[1] return", "dx, dy : Ball speed along single axis b_speed :", "elif dy > 0: b_rect.bottom = self.rect4.top b_speed[1] = -b_speed[1]", "self.rect.move(movepos) if area.contains(newpos): self.rect = newpos # move other rects", "b_speed[1] = -b_speed[1] return is_collision, b_rect, b_speed elif self.rect.colliderect(b_rect): is_collision", "pygame.draw.rect(screen, (255, 255, 255), self.rect3) pygame.draw.rect(screen, (255, 255, 255), self.rect4)", "newpos = self.rect.move(movepos) if area.contains(newpos): self.rect = newpos # move", "= self.surf2.get_rect() self.surf3 = pygame.Surface((30 // RENDER_RATIO, 40 // RENDER_RATIO))", "self.rect3.midright = self.rect2.midleft self.rect4.midright = self.rect3.midleft def draw(self, screen): pygame.draw.rect(screen,", "> 0: b_rect.right = self.rect4.left b_speed[0] = -b_speed[0] # top", "< 0: b_rect.top = self.rect3.bottom b_speed[1] = -b_speed[1] return is_collision,", "(255, 255, 255), self.rect3) pygame.draw.rect(screen, (255, 255, 255), self.rect4) def", "def process_collision(self, b_rect, dx, dy, b_speed, paddle_type): \"\"\" Parameters ----------", "b_rect.right = self.rect4.left b_speed[0] = -b_speed[0] # top or bottom", "pygame.Surface((30 // RENDER_RATIO, 40 // RENDER_RATIO)) self.rect3 = self.surf3.get_rect() self.surf4", "= self.rect2.left b_speed[0] = -b_speed[0] # top or bottom edge", "elif dy < 0: b_rect.top = self.rect4.bottom b_speed[1] = -b_speed[1]", "b_rect.right = self.rect3.left b_speed[0] = -b_speed[0] # top or bottom", "RENDER_RATIO)) self.rect3 = self.surf3.get_rect() self.surf4 = pygame.Surface((30 // RENDER_RATIO, 10", "self.rect4.move(movepos) def process_collision(self, b_rect, dx, dy, b_speed, paddle_type): \"\"\" Parameters", "def update(self, area, action): # action: 1 - up, 2", "-b_speed[1] elif dy < 0: b_rect.top = self.rect4.bottom b_speed[1] =", "ball speed \"\"\" if self.rect4.colliderect(b_rect): is_collision = True if dx", "self.rect.left b_speed[0] = -b_speed[0] # top or bottom edge elif", "dy > 0: b_rect.bottom = self.rect4.top b_speed[1] = -b_speed[1] elif", "self.rect = newpos # move other rects too self.rect2 =", "self.rect4.top b_speed[1] = -b_speed[1] elif dy < 0: b_rect.top =", "import pygame RENDER_RATIO = 2 class CakePaddle(pygame.sprite.Sprite): def __init__(self, speed=12):", "= True if dx > 0: b_rect.right = self.rect3.left b_speed[0]", "255), self.rect3) pygame.draw.rect(screen, (255, 255, 255), self.rect4) def update(self, area,", "self.rect2) pygame.draw.rect(screen, (255, 255, 255), self.rect3) pygame.draw.rect(screen, (255, 255, 255),", "if dx > 0: b_rect.right = self.rect2.left b_speed[0] = -b_speed[0]", "b_speed, paddle_type): \"\"\" Parameters ---------- b_rect : Ball rect dx,", "> 0: b_rect.bottom = self.rect2.top b_speed[1] = -b_speed[1] elif dy", "is set from envs class self.rect2.midright = self.rect.midleft self.rect3.midright =", "area.contains(newpos): self.rect = newpos # move other rects too self.rect2", "255), self.rect) pygame.draw.rect(screen, (255, 255, 255), self.rect2) pygame.draw.rect(screen, (255, 255,", "dy > 0: b_rect.bottom = self.rect3.top b_speed[1] = -b_speed[1] elif", "self.rect4.colliderect(b_rect): is_collision = True if dx > 0: b_rect.right =", "RENDER_RATIO, 40 // RENDER_RATIO)) self.rect3 = self.surf3.get_rect() self.surf4 = pygame.Surface((30", "movepos[1] = movepos[1] - self.speed elif action == 2: movepos[1]", "self.surf.get_rect() self.surf2 = pygame.Surface((30 // RENDER_RATIO, 80 // RENDER_RATIO)) self.rect2", "= self.rect2.midleft self.rect4.midright = self.rect3.midleft def draw(self, screen): pygame.draw.rect(screen, (255,", "is_collision = True if dx > 0: b_rect.right = self.rect4.left", "os.environ[\"PYGAME_HIDE_SUPPORT_PROMPT\"] = \"hide\" import pygame RENDER_RATIO = 2 class CakePaddle(pygame.sprite.Sprite):", "elif dy < 0: b_rect.top = self.rect2.bottom b_speed[1] = -b_speed[1]", "= [0, 0] if action == 1: movepos[1] = movepos[1]", "b_speed[1] = -b_speed[1] elif dy < 0: b_rect.top = self.rect2.bottom", "0: b_rect.bottom = self.rect4.top b_speed[1] = -b_speed[1] elif dy <", "if dx > 0: b_rect.right = self.rect3.left b_speed[0] = -b_speed[0]", "---------- b_rect : Ball rect dx, dy : Ball speed", "edge elif dy > 0: b_rect.bottom = self.rect4.top b_speed[1] =", "dy < 0: b_rect.top = self.rect2.bottom b_speed[1] = -b_speed[1] return", "b_rect.top = self.rect3.bottom b_speed[1] = -b_speed[1] return is_collision, b_rect, b_speed", "= movepos[1] - self.speed elif action == 2: movepos[1] =", "newpos # move other rects too self.rect2 = self.rect2.move(movepos) self.rect3", "self.rect is set from envs class self.rect2.midright = self.rect.midleft self.rect3.midright", "-b_speed[1] elif dy < 0: b_rect.top = self.rect.bottom b_speed[1] =", "b_rect.right = self.rect.left b_speed[0] = -b_speed[0] # top or bottom", "= -b_speed[1] return is_collision, b_rect, b_speed elif self.rect3.colliderect(b_rect): is_collision =", "self.speed = speed def reset(self): # self.rect is set from", "if action == 1: movepos[1] = movepos[1] - self.speed elif", "b_rect, b_speed elif self.rect3.colliderect(b_rect): is_collision = True if dx >", "b_speed[0] = -b_speed[0] # top or bottom edge elif dy", "self.rect2 = self.rect2.move(movepos) self.rect3 = self.rect3.move(movepos) self.rect4 = self.rect4.move(movepos) def", "dx > 0: b_rect.right = self.rect2.left b_speed[0] = -b_speed[0] #", "self.rect4 = self.rect4.move(movepos) def process_collision(self, b_rect, dx, dy, b_speed, paddle_type):", "of the cake self.surf = pygame.Surface((30 // RENDER_RATIO, 120 //", "-b_speed[1] elif dy < 0: b_rect.top = self.rect2.bottom b_speed[1] =", "b_speed elif self.rect2.colliderect(b_rect): is_collision = True if dx > 0:", "pygame.Surface((30 // RENDER_RATIO, 10 // RENDER_RATIO)) self.rect4 = self.surf4.get_rect() self.speed", "0: b_rect.top = self.rect2.bottom b_speed[1] = -b_speed[1] return is_collision, b_rect,", "------- is_collision: 1 if ball collides with paddle b_rect: new", "// RENDER_RATIO, 40 // RENDER_RATIO)) self.rect3 = self.surf3.get_rect() self.surf4 =", "10 // RENDER_RATIO)) self.rect4 = self.surf4.get_rect() self.speed = speed def", "self.surf4 = pygame.Surface((30 // RENDER_RATIO, 10 // RENDER_RATIO)) self.rect4 =", "from envs class self.rect2.midright = self.rect.midleft self.rect3.midright = self.rect2.midleft self.rect4.midright", "ball collides with paddle b_rect: new ball rect b_speed: new", "# self.rect is set from envs class self.rect2.midright = self.rect.midleft", "dx > 0: b_rect.right = self.rect.left b_speed[0] = -b_speed[0] #", "255, 255), self.rect) pygame.draw.rect(screen, (255, 255, 255), self.rect2) pygame.draw.rect(screen, (255,", "elif dy < 0: b_rect.top = self.rect3.bottom b_speed[1] = -b_speed[1]", "> 0: b_rect.right = self.rect.left b_speed[0] = -b_speed[0] # top", "// RENDER_RATIO)) self.rect = self.surf.get_rect() self.surf2 = pygame.Surface((30 // RENDER_RATIO,", "- up, 2 - down movepos = [0, 0] if", "True if dx > 0: b_rect.right = self.rect4.left b_speed[0] =", "\"hide\" import pygame RENDER_RATIO = 2 class CakePaddle(pygame.sprite.Sprite): def __init__(self,", "= -b_speed[1] elif dy < 0: b_rect.top = self.rect.bottom b_speed[1]", "speed def reset(self): # self.rect is set from envs class", "new ball rect b_speed: new ball speed \"\"\" if self.rect4.colliderect(b_rect):", "255), self.rect2) pygame.draw.rect(screen, (255, 255, 255), self.rect3) pygame.draw.rect(screen, (255, 255,", "255), self.rect4) def update(self, area, action): # action: 1 -", "-b_speed[1] return is_collision, b_rect, b_speed elif self.rect2.colliderect(b_rect): is_collision = True", "self.surf3.get_rect() self.surf4 = pygame.Surface((30 // RENDER_RATIO, 10 // RENDER_RATIO)) self.rect4", "action == 1: movepos[1] = movepos[1] - self.speed elif action", "> 0: b_rect.bottom = self.rect.top b_speed[1] = -b_speed[1] elif dy", "0: b_rect.right = self.rect4.left b_speed[0] = -b_speed[0] # top or", "b_speed[1] = -b_speed[1] return is_collision, b_rect, b_speed return False, b_rect,", "b_rect, b_speed elif self.rect2.colliderect(b_rect): is_collision = True if dx >", "Ball rect dx, dy : Ball speed along single axis", "pygame.draw.rect(screen, (255, 255, 255), self.rect4) def update(self, area, action): #", "self.speed elif action == 2: movepos[1] = movepos[1] + self.speed", "= self.rect.bottom b_speed[1] = -b_speed[1] return is_collision, b_rect, b_speed return", "b_speed: new ball speed \"\"\" if self.rect4.colliderect(b_rect): is_collision = True", "= self.rect.left b_speed[0] = -b_speed[0] # top or bottom edge", "80 // RENDER_RATIO)) self.rect2 = self.surf2.get_rect() self.surf3 = pygame.Surface((30 //", "the cake self.surf = pygame.Surface((30 // RENDER_RATIO, 120 // RENDER_RATIO))" ]
[ "as Environment from internal_representation_analysis.constants import MINI_BATCH_SIZE class StateDataset(object): def __init__(self,", "None def __eq__(self, other): return self.all_states == other.all_states def split_datasets(self,", "in all_states if s.state_id in val_ids] self.test_set = [s for", "all_states: s.embedding = s.target_eq_obs if not all_targets: self.train_set = all_states[0:int(0.6", "= all_states[int(0.6 * len(all_states)):int( 0.8 * len(all_states))] self.test_set = all_states[int(0.8", "if s.state_id in test_ids] def shuffle_train_set(self): random.shuffle(self.train_set) def get_train_mini_batch(self, start_index):", "= None self.test_set = None def __eq__(self, other): return self.all_states", "set(unique_state_ids[0:int(0.6 * len(unique_state_ids))]) val_ids = set(unique_state_ids[int(0.6 * len(unique_state_ids)):int( 0.8 *", "= states self.train_set = None self.validation_set = None self.test_set =", "s.state_id in train_ids] self.validation_set = [s for s in all_states", "all_targets: self.train_set = all_states[0:int(0.6 * len(all_states))] self.validation_set = all_states[int(0.6 *", "def __init__(self, states): self.all_states = states self.train_set = None self.validation_set", "def get_train_mini_batch(self, start_index): return self.train_set[start_index:start_index + MINI_BATCH_SIZE] def filter_by_indexes(self, indexList):", "s in all_states if s.state_id in test_ids] def shuffle_train_set(self): random.shuffle(self.train_set)", "== other.all_states def split_datasets(self, seed, all_targets=False, test_target_eq_obs=False): all_states = self.all_states[:]", "train_ids] self.validation_set = [s for s in all_states if s.state_id", "val_ids = set(unique_state_ids[int(0.6 * len(unique_state_ids)):int( 0.8 * len(unique_state_ids))]) test_ids =", "__eq__(self, other): return self.all_states == other.all_states def split_datasets(self, seed, all_targets=False,", "set(unique_state_ids[int(0.6 * len(unique_state_ids)):int( 0.8 * len(unique_state_ids))]) test_ids = set(unique_state_ids[int(0.8 *", "* len(unique_state_ids))]) test_ids = set(unique_state_ids[int(0.8 * len(unique_state_ids)):]) self.train_set = [s", "unique_state_ids = list(set([s.state_id for s in all_states])) random.shuffle(unique_state_ids) train_ids =", "s in all_states: s.embedding = s.target_eq_obs if not all_targets: self.train_set", "test_ids = set(unique_state_ids[int(0.8 * len(unique_state_ids)):]) self.train_set = [s for s", "random.shuffle(self.train_set) def get_train_mini_batch(self, start_index): return self.train_set[start_index:start_index + MINI_BATCH_SIZE] def filter_by_indexes(self,", "MINI_BATCH_SIZE] def filter_by_indexes(self, indexList): self.all_states = [self.all_states[i] for i in", "import random from internal_representation_analysis.network import ActorCriticFFNetwork from internal_representation_analysis.scene_loader import THORDiscreteEnvironment", "self.validation_set = [s for s in all_states if s.state_id in", "len(all_states))] self.test_set = all_states[int(0.8 * len(all_states)):] else: unique_state_ids = list(set([s.state_id", "= all_states[0:int(0.6 * len(all_states))] self.validation_set = all_states[int(0.6 * len(all_states)):int( 0.8", "all_states[int(0.6 * len(all_states)):int( 0.8 * len(all_states))] self.test_set = all_states[int(0.8 *", "if not all_targets: self.train_set = all_states[0:int(0.6 * len(all_states))] self.validation_set =", "seed, all_targets=False, test_target_eq_obs=False): all_states = self.all_states[:] random.seed(seed) random.shuffle(all_states) if test_target_eq_obs:", "ActorCriticFFNetwork from internal_representation_analysis.scene_loader import THORDiscreteEnvironment as Environment from internal_representation_analysis.constants import", "in test_ids] def shuffle_train_set(self): random.shuffle(self.train_set) def get_train_mini_batch(self, start_index): return self.train_set[start_index:start_index", "random.shuffle(all_states) if test_target_eq_obs: for s in all_states: s.embedding = s.target_eq_obs", "* len(all_states))] self.test_set = all_states[int(0.8 * len(all_states)):] else: unique_state_ids =", "* len(all_states))] self.validation_set = all_states[int(0.6 * len(all_states)):int( 0.8 * len(all_states))]", "shuffle_train_set(self): random.shuffle(self.train_set) def get_train_mini_batch(self, start_index): return self.train_set[start_index:start_index + MINI_BATCH_SIZE] def", "from internal_representation_analysis.network import ActorCriticFFNetwork from internal_representation_analysis.scene_loader import THORDiscreteEnvironment as Environment", "get_train_mini_batch(self, start_index): return self.train_set[start_index:start_index + MINI_BATCH_SIZE] def filter_by_indexes(self, indexList): self.all_states", "s.target_eq_obs if not all_targets: self.train_set = all_states[0:int(0.6 * len(all_states))] self.validation_set", "s.state_id in test_ids] def shuffle_train_set(self): random.shuffle(self.train_set) def get_train_mini_batch(self, start_index): return", "in all_states: s.embedding = s.target_eq_obs if not all_targets: self.train_set =", "from internal_representation_analysis.constants import MINI_BATCH_SIZE class StateDataset(object): def __init__(self, states): self.all_states", "all_states[0:int(0.6 * len(all_states))] self.validation_set = all_states[int(0.6 * len(all_states)):int( 0.8 *", "test_ids] def shuffle_train_set(self): random.shuffle(self.train_set) def get_train_mini_batch(self, start_index): return self.train_set[start_index:start_index +", "random from internal_representation_analysis.network import ActorCriticFFNetwork from internal_representation_analysis.scene_loader import THORDiscreteEnvironment as", "len(unique_state_ids)):]) self.train_set = [s for s in all_states if s.state_id", "= all_states[int(0.8 * len(all_states)):] else: unique_state_ids = list(set([s.state_id for s", "import ActorCriticFFNetwork from internal_representation_analysis.scene_loader import THORDiscreteEnvironment as Environment from internal_representation_analysis.constants", "len(all_states)):int( 0.8 * len(all_states))] self.test_set = all_states[int(0.8 * len(all_states)):] else:", "val_ids] self.test_set = [s for s in all_states if s.state_id", "return self.all_states == other.all_states def split_datasets(self, seed, all_targets=False, test_target_eq_obs=False): all_states", "StateDataset(object): def __init__(self, states): self.all_states = states self.train_set = None", "* len(all_states)):] else: unique_state_ids = list(set([s.state_id for s in all_states]))", "for s in all_states if s.state_id in test_ids] def shuffle_train_set(self):", "all_states if s.state_id in test_ids] def shuffle_train_set(self): random.shuffle(self.train_set) def get_train_mini_batch(self,", "list(set([s.state_id for s in all_states])) random.shuffle(unique_state_ids) train_ids = set(unique_state_ids[0:int(0.6 *", "random.seed(seed) random.shuffle(all_states) if test_target_eq_obs: for s in all_states: s.embedding =", "* len(all_states)):int( 0.8 * len(all_states))] self.test_set = all_states[int(0.8 * len(all_states)):]", "all_states if s.state_id in val_ids] self.test_set = [s for s", "all_states if s.state_id in train_ids] self.validation_set = [s for s", "= set(unique_state_ids[int(0.6 * len(unique_state_ids)):int( 0.8 * len(unique_state_ids))]) test_ids = set(unique_state_ids[int(0.8", "start_index): return self.train_set[start_index:start_index + MINI_BATCH_SIZE] def filter_by_indexes(self, indexList): self.all_states =", "import THORDiscreteEnvironment as Environment from internal_representation_analysis.constants import MINI_BATCH_SIZE class StateDataset(object):", "= None def __eq__(self, other): return self.all_states == other.all_states def", "len(unique_state_ids))]) test_ids = set(unique_state_ids[int(0.8 * len(unique_state_ids)):]) self.train_set = [s for", "internal_representation_analysis.scene_loader import THORDiscreteEnvironment as Environment from internal_representation_analysis.constants import MINI_BATCH_SIZE class", "= set(unique_state_ids[int(0.8 * len(unique_state_ids)):]) self.train_set = [s for s in", "__init__(self, states): self.all_states = states self.train_set = None self.validation_set =", "self.train_set = all_states[0:int(0.6 * len(all_states))] self.validation_set = all_states[int(0.6 * len(all_states)):int(", "s.embedding = s.target_eq_obs if not all_targets: self.train_set = all_states[0:int(0.6 *", "if test_target_eq_obs: for s in all_states: s.embedding = s.target_eq_obs if", "len(unique_state_ids))]) val_ids = set(unique_state_ids[int(0.6 * len(unique_state_ids)):int( 0.8 * len(unique_state_ids))]) test_ids", "for s in all_states: s.embedding = s.target_eq_obs if not all_targets:", "= set(unique_state_ids[0:int(0.6 * len(unique_state_ids))]) val_ids = set(unique_state_ids[int(0.6 * len(unique_state_ids)):int( 0.8", "s in all_states if s.state_id in val_ids] self.test_set = [s", "self.all_states = states self.train_set = None self.validation_set = None self.test_set", "def shuffle_train_set(self): random.shuffle(self.train_set) def get_train_mini_batch(self, start_index): return self.train_set[start_index:start_index + MINI_BATCH_SIZE]", "self.train_set[start_index:start_index + MINI_BATCH_SIZE] def filter_by_indexes(self, indexList): self.all_states = [self.all_states[i] for", "self.all_states == other.all_states def split_datasets(self, seed, all_targets=False, test_target_eq_obs=False): all_states =", "= [s for s in all_states if s.state_id in test_ids]", "self.test_set = all_states[int(0.8 * len(all_states)):] else: unique_state_ids = list(set([s.state_id for", "in train_ids] self.validation_set = [s for s in all_states if", "None self.test_set = None def __eq__(self, other): return self.all_states ==", "all_targets=False, test_target_eq_obs=False): all_states = self.all_states[:] random.seed(seed) random.shuffle(all_states) if test_target_eq_obs: for", "if s.state_id in val_ids] self.test_set = [s for s in", "in all_states if s.state_id in test_ids] def shuffle_train_set(self): random.shuffle(self.train_set) def", "self.validation_set = all_states[int(0.6 * len(all_states)):int( 0.8 * len(all_states))] self.test_set =", "in val_ids] self.test_set = [s for s in all_states if", "internal_representation_analysis.constants import MINI_BATCH_SIZE class StateDataset(object): def __init__(self, states): self.all_states =", "s in all_states])) random.shuffle(unique_state_ids) train_ids = set(unique_state_ids[0:int(0.6 * len(unique_state_ids))]) val_ids", "self.train_set = [s for s in all_states if s.state_id in", "[s for s in all_states if s.state_id in val_ids] self.test_set", "not all_targets: self.train_set = all_states[0:int(0.6 * len(all_states))] self.validation_set = all_states[int(0.6", "for s in all_states])) random.shuffle(unique_state_ids) train_ids = set(unique_state_ids[0:int(0.6 * len(unique_state_ids))])", "= self.all_states[:] random.seed(seed) random.shuffle(all_states) if test_target_eq_obs: for s in all_states:", "from internal_representation_analysis.scene_loader import THORDiscreteEnvironment as Environment from internal_representation_analysis.constants import MINI_BATCH_SIZE", "set(unique_state_ids[int(0.8 * len(unique_state_ids)):]) self.train_set = [s for s in all_states", "def filter_by_indexes(self, indexList): self.all_states = [self.all_states[i] for i in indexList]", "other.all_states def split_datasets(self, seed, all_targets=False, test_target_eq_obs=False): all_states = self.all_states[:] random.seed(seed)", "len(all_states)):] else: unique_state_ids = list(set([s.state_id for s in all_states])) random.shuffle(unique_state_ids)", "[s for s in all_states if s.state_id in test_ids] def", "all_states])) random.shuffle(unique_state_ids) train_ids = set(unique_state_ids[0:int(0.6 * len(unique_state_ids))]) val_ids = set(unique_state_ids[int(0.6", "in all_states if s.state_id in train_ids] self.validation_set = [s for", "in all_states])) random.shuffle(unique_state_ids) train_ids = set(unique_state_ids[0:int(0.6 * len(unique_state_ids))]) val_ids =", "for s in all_states if s.state_id in val_ids] self.test_set =", "test_target_eq_obs: for s in all_states: s.embedding = s.target_eq_obs if not", "import MINI_BATCH_SIZE class StateDataset(object): def __init__(self, states): self.all_states = states", "if s.state_id in train_ids] self.validation_set = [s for s in", "Environment from internal_representation_analysis.constants import MINI_BATCH_SIZE class StateDataset(object): def __init__(self, states):", "<filename>src/internal_representation_analysis/decoder/StateDataset.py import random from internal_representation_analysis.network import ActorCriticFFNetwork from internal_representation_analysis.scene_loader import", "internal_representation_analysis.network import ActorCriticFFNetwork from internal_representation_analysis.scene_loader import THORDiscreteEnvironment as Environment from", "[s for s in all_states if s.state_id in train_ids] self.validation_set", "self.validation_set = None self.test_set = None def __eq__(self, other): return", "* len(unique_state_ids)):]) self.train_set = [s for s in all_states if", "len(all_states))] self.validation_set = all_states[int(0.6 * len(all_states)):int( 0.8 * len(all_states))] self.test_set", "self.train_set = None self.validation_set = None self.test_set = None def", "= [s for s in all_states if s.state_id in train_ids]", "return self.train_set[start_index:start_index + MINI_BATCH_SIZE] def filter_by_indexes(self, indexList): self.all_states = [self.all_states[i]", "for s in all_states if s.state_id in train_ids] self.validation_set =", "* len(unique_state_ids)):int( 0.8 * len(unique_state_ids))]) test_ids = set(unique_state_ids[int(0.8 * len(unique_state_ids)):])", "s in all_states if s.state_id in train_ids] self.validation_set = [s", "s.state_id in val_ids] self.test_set = [s for s in all_states", "self.test_set = None def __eq__(self, other): return self.all_states == other.all_states", "train_ids = set(unique_state_ids[0:int(0.6 * len(unique_state_ids))]) val_ids = set(unique_state_ids[int(0.6 * len(unique_state_ids)):int(", "states self.train_set = None self.validation_set = None self.test_set = None", "other): return self.all_states == other.all_states def split_datasets(self, seed, all_targets=False, test_target_eq_obs=False):", "def split_datasets(self, seed, all_targets=False, test_target_eq_obs=False): all_states = self.all_states[:] random.seed(seed) random.shuffle(all_states)", "THORDiscreteEnvironment as Environment from internal_representation_analysis.constants import MINI_BATCH_SIZE class StateDataset(object): def", "None self.validation_set = None self.test_set = None def __eq__(self, other):", "def __eq__(self, other): return self.all_states == other.all_states def split_datasets(self, seed,", "all_states = self.all_states[:] random.seed(seed) random.shuffle(all_states) if test_target_eq_obs: for s in", "self.all_states[:] random.seed(seed) random.shuffle(all_states) if test_target_eq_obs: for s in all_states: s.embedding", "= [s for s in all_states if s.state_id in val_ids]", "class StateDataset(object): def __init__(self, states): self.all_states = states self.train_set =", "0.8 * len(unique_state_ids))]) test_ids = set(unique_state_ids[int(0.8 * len(unique_state_ids)):]) self.train_set =", "= None self.validation_set = None self.test_set = None def __eq__(self,", "MINI_BATCH_SIZE class StateDataset(object): def __init__(self, states): self.all_states = states self.train_set", "else: unique_state_ids = list(set([s.state_id for s in all_states])) random.shuffle(unique_state_ids) train_ids", "len(unique_state_ids)):int( 0.8 * len(unique_state_ids))]) test_ids = set(unique_state_ids[int(0.8 * len(unique_state_ids)):]) self.train_set", "* len(unique_state_ids))]) val_ids = set(unique_state_ids[int(0.6 * len(unique_state_ids)):int( 0.8 * len(unique_state_ids))])", "split_datasets(self, seed, all_targets=False, test_target_eq_obs=False): all_states = self.all_states[:] random.seed(seed) random.shuffle(all_states) if", "= list(set([s.state_id for s in all_states])) random.shuffle(unique_state_ids) train_ids = set(unique_state_ids[0:int(0.6", "self.test_set = [s for s in all_states if s.state_id in", "test_target_eq_obs=False): all_states = self.all_states[:] random.seed(seed) random.shuffle(all_states) if test_target_eq_obs: for s", "all_states[int(0.8 * len(all_states)):] else: unique_state_ids = list(set([s.state_id for s in", "+ MINI_BATCH_SIZE] def filter_by_indexes(self, indexList): self.all_states = [self.all_states[i] for i", "random.shuffle(unique_state_ids) train_ids = set(unique_state_ids[0:int(0.6 * len(unique_state_ids))]) val_ids = set(unique_state_ids[int(0.6 *", "states): self.all_states = states self.train_set = None self.validation_set = None", "= s.target_eq_obs if not all_targets: self.train_set = all_states[0:int(0.6 * len(all_states))]", "0.8 * len(all_states))] self.test_set = all_states[int(0.8 * len(all_states)):] else: unique_state_ids" ]
[ "parser.add_argument( \"--batch_size\", type=int, default=50, help=\"batch size to test image patches", "original release\", ) opt = parser.parse_args() if opt.model in [\"l2\",", "\"--datasets\", type=str, nargs=\"+\", default=[ \"val/traditional\", \"val/cnn\", \"val/superres\", \"val/deblur\", \"val/color\", \"val/frameinterp\",", "% (opt.model, opt.net)) elif opt.model in [\"l2\", \"ssim\"]: print(\"Testing model", "to test image patches in\" ) parser.add_argument(\"--use_gpu\", action=\"store_true\", help=\"turn on", "type=str, default=None, help=\"location of model, will default to ./weights/v[version]/[net_name].pth\", )", "model [%s]-[%s]\" % (opt.model, opt.net)) elif opt.model in [\"l2\", \"ssim\"]:", "in opt.datasets: data_loader = dl.CreateDataLoader( dataset, dataset_mode=opt.dataset_mode, batch_size=opt.batch_size ) #", "\"2afc\": (score, results_verbose) = dm.score_2afc_dataset(data_loader, model.forward) elif opt.dataset_mode == \"jnd\":", "l2, ssim model types\", ) parser.add_argument( \"--batch_size\", type=int, default=50, help=\"batch", "for network architectures\", ) parser.add_argument( \"--colorspace\", type=str, default=\"Lab\", help=\"[Lab] or", "\"val/cnn\", \"val/superres\", \"val/deblur\", \"val/color\", \"val/frameinterp\", ], help=\"datasets to test -", "test image patches in\" ) parser.add_argument(\"--use_gpu\", action=\"store_true\", help=\"turn on flag", "action=\"store_true\", help=\"turn on flag to use GPU\") parser.add_argument( \"--model_path\", type=str,", "network architectures\", ) parser.add_argument( \"--colorspace\", type=str, default=\"Lab\", help=\"[Lab] or [RGB]", "opt.colorspace)) # embed() # initialize data loader for dataset in", "[RGB] for colorspace to use for l2, ssim model types\",", "mode: [train/traditional],[train/cnn],[train/mix],[val/traditional],[val/cnn],[val/color],[val/deblur],[val/frameinterp],[val/superres]\", ) parser.add_argument( \"--model\", type=str, default=\"net-lin\", help=\"distance model type", "print(\"Testing model [%s]-[%s]\" % (opt.model, opt.net)) elif opt.model in [\"l2\",", "on flag to use GPU\") parser.add_argument( \"--model_path\", type=str, default=None, help=\"location", "trunk was trained/tuned\" ) parser.add_argument( \"--version\", type=str, default=\"0.1\", help=\"v0.1 is", "# initialize data loader for dataset in opt.datasets: data_loader =", "Similarity Image Metric\", ) parser.add_argument( \"--net\", type=str, default=\"alex\", help=\"[squeeze], [alex],", "off-the-shelf network, [l2] for euclidean distance, [ssim] for Structured Similarity", "parser.add_argument(\"--dataset_mode\", type=str, default=\"2afc\", help=\"[2afc,jnd]\") parser.add_argument( \"--datasets\", type=str, nargs=\"+\", default=[ \"val/traditional\",", "types\", ) parser.add_argument( \"--batch_size\", type=int, default=50, help=\"batch size to test", "was original release\", ) opt = parser.parse_args() if opt.model in", "model model = dm.DistModel() # model.initialize(model=opt.model,net=opt.net,colorspace=opt.colorspace,model_path=opt.model_path,use_gpu=opt.use_gpu) model.initialize( model=opt.model, net=opt.net, colorspace=opt.colorspace,", "in [\"net-lin\", \"net\"]: print(\"Testing model [%s]-[%s]\" % (opt.model, opt.net)) elif", "dataset, dataset_mode=opt.dataset_mode, batch_size=opt.batch_size ) # evaluate model on data if", "evaluate model on data if opt.dataset_mode == \"2afc\": (score, results_verbose)", "pnet_rand=opt.from_scratch, pnet_tune=opt.train_trunk, version=opt.version, ) if opt.model in [\"net-lin\", \"net\"]: print(\"Testing", ") parser.add_argument( \"--batch_size\", type=int, default=50, help=\"batch size to test image", ") parser.add_argument( \"--from_scratch\", action=\"store_true\", help=\"model was initialized from scratch\" )", "type=str, default=\"0.1\", help=\"v0.1 is latest, v0.0 was original release\", )", "dm.score_2afc_dataset(data_loader, model.forward) elif opt.dataset_mode == \"jnd\": (score, results_verbose) = dm.score_jnd_dataset(data_loader,", "./weights/v[version]/[net_name].pth\", ) parser.add_argument( \"--from_scratch\", action=\"store_true\", help=\"model was initialized from scratch\"", "parser.add_argument( \"--version\", type=str, default=\"0.1\", help=\"v0.1 is latest, v0.0 was original", "network, [l2] for euclidean distance, [ssim] for Structured Similarity Image", "[%s]-[%s]\" % (opt.model, opt.colorspace)) # embed() # initialize data loader", "= dm.DistModel() # model.initialize(model=opt.model,net=opt.net,colorspace=opt.colorspace,model_path=opt.model_path,use_gpu=opt.use_gpu) model.initialize( model=opt.model, net=opt.net, colorspace=opt.colorspace, model_path=opt.model_path, use_gpu=opt.use_gpu,", ") # evaluate model on data if opt.dataset_mode == \"2afc\":", "action=\"store_true\", help=\"model trunk was trained/tuned\" ) parser.add_argument( \"--version\", type=str, default=\"0.1\",", "version=opt.version, ) if opt.model in [\"net-lin\", \"net\"]: print(\"Testing model [%s]-[%s]\"", "opt.batch_size = 1 # initialize model model = dm.DistModel() #", "default=\"2afc\", help=\"[2afc,jnd]\") parser.add_argument( \"--datasets\", type=str, nargs=\"+\", default=[ \"val/traditional\", \"val/cnn\", \"val/superres\",", "if opt.model in [\"l2\", \"ssim\"]: opt.batch_size = 1 # initialize", "[train/traditional],[train/cnn],[train/mix],[val/traditional],[val/cnn],[val/color],[val/deblur],[val/frameinterp],[val/superres]\", ) parser.add_argument( \"--model\", type=str, default=\"net-lin\", help=\"distance model type [net-lin]", "from scratch\" ) parser.add_argument( \"--train_trunk\", action=\"store_true\", help=\"model trunk was trained/tuned\"", "latest, v0.0 was original release\", ) opt = parser.parse_args() if", "help=\"datasets to test - for jnd mode: [val/traditional],[val/cnn]; for 2afc", "colorspace=opt.colorspace, model_path=opt.model_path, use_gpu=opt.use_gpu, pnet_rand=opt.from_scratch, pnet_tune=opt.train_trunk, version=opt.version, ) if opt.model in", "opt.model in [\"net-lin\", \"net\"]: print(\"Testing model [%s]-[%s]\" % (opt.model, opt.net))", "model.forward) # print results print(\" Dataset [%s]: %.2f\" % (dataset,", "as dl import argparse from IPython import embed parser =", "model on data if opt.dataset_mode == \"2afc\": (score, results_verbose) =", "data import data_loader as dl import argparse from IPython import", "default=50, help=\"batch size to test image patches in\" ) parser.add_argument(\"--use_gpu\",", "type=int, default=50, help=\"batch size to test image patches in\" )", "was trained/tuned\" ) parser.add_argument( \"--version\", type=str, default=\"0.1\", help=\"v0.1 is latest,", "help=\"model was initialized from scratch\" ) parser.add_argument( \"--train_trunk\", action=\"store_true\", help=\"model", "parser.add_argument(\"--use_gpu\", action=\"store_true\", help=\"turn on flag to use GPU\") parser.add_argument( \"--model_path\",", "\"val/deblur\", \"val/color\", \"val/frameinterp\", ], help=\"datasets to test - for jnd", "[net] for off-the-shelf network, [l2] for euclidean distance, [ssim] for", "model.initialize(model=opt.model,net=opt.net,colorspace=opt.colorspace,model_path=opt.model_path,use_gpu=opt.use_gpu) model.initialize( model=opt.model, net=opt.net, colorspace=opt.colorspace, model_path=opt.model_path, use_gpu=opt.use_gpu, pnet_rand=opt.from_scratch, pnet_tune=opt.train_trunk, version=opt.version,", "default=\"0.1\", help=\"v0.1 is latest, v0.0 was original release\", ) opt", "[alex], or [vgg] for network architectures\", ) parser.add_argument( \"--colorspace\", type=str,", "models import dist_model as dm from data import data_loader as", "nargs=\"+\", default=[ \"val/traditional\", \"val/cnn\", \"val/superres\", \"val/deblur\", \"val/color\", \"val/frameinterp\", ], help=\"datasets", "parser.add_argument( \"--colorspace\", type=str, default=\"Lab\", help=\"[Lab] or [RGB] for colorspace to", "image patches in\" ) parser.add_argument(\"--use_gpu\", action=\"store_true\", help=\"turn on flag to", "\"--from_scratch\", action=\"store_true\", help=\"model was initialized from scratch\" ) parser.add_argument( \"--train_trunk\",", "scratch\" ) parser.add_argument( \"--train_trunk\", action=\"store_true\", help=\"model trunk was trained/tuned\" )", "parser.add_argument( \"--train_trunk\", action=\"store_true\", help=\"model trunk was trained/tuned\" ) parser.add_argument( \"--version\",", "\"--version\", type=str, default=\"0.1\", help=\"v0.1 is latest, v0.0 was original release\",", "= 1 # initialize model model = dm.DistModel() # model.initialize(model=opt.model,net=opt.net,colorspace=opt.colorspace,model_path=opt.model_path,use_gpu=opt.use_gpu)", "1 # initialize model model = dm.DistModel() # model.initialize(model=opt.model,net=opt.net,colorspace=opt.colorspace,model_path=opt.model_path,use_gpu=opt.use_gpu) model.initialize(", "\"--net\", type=str, default=\"alex\", help=\"[squeeze], [alex], or [vgg] for network architectures\",", "dataset in opt.datasets: data_loader = dl.CreateDataLoader( dataset, dataset_mode=opt.dataset_mode, batch_size=opt.batch_size )", "[\"l2\", \"ssim\"]: opt.batch_size = 1 # initialize model model =", "patches in\" ) parser.add_argument(\"--use_gpu\", action=\"store_true\", help=\"turn on flag to use", "], help=\"datasets to test - for jnd mode: [val/traditional],[val/cnn]; for", "trained/tuned\" ) parser.add_argument( \"--version\", type=str, default=\"0.1\", help=\"v0.1 is latest, v0.0", "for dataset in opt.datasets: data_loader = dl.CreateDataLoader( dataset, dataset_mode=opt.dataset_mode, batch_size=opt.batch_size", "type=str, default=\"2afc\", help=\"[2afc,jnd]\") parser.add_argument( \"--datasets\", type=str, nargs=\"+\", default=[ \"val/traditional\", \"val/cnn\",", "data_loader = dl.CreateDataLoader( dataset, dataset_mode=opt.dataset_mode, batch_size=opt.batch_size ) # evaluate model", "of model, will default to ./weights/v[version]/[net_name].pth\", ) parser.add_argument( \"--from_scratch\", action=\"store_true\",", "(opt.model, opt.colorspace)) # embed() # initialize data loader for dataset", "dataset_mode=opt.dataset_mode, batch_size=opt.batch_size ) # evaluate model on data if opt.dataset_mode", "help=\"batch size to test image patches in\" ) parser.add_argument(\"--use_gpu\", action=\"store_true\",", "help=\"turn on flag to use GPU\") parser.add_argument( \"--model_path\", type=str, default=None,", "= dl.CreateDataLoader( dataset, dataset_mode=opt.dataset_mode, batch_size=opt.batch_size ) # evaluate model on", "default=None, help=\"location of model, will default to ./weights/v[version]/[net_name].pth\", ) parser.add_argument(", "as np from models import dist_model as dm from data", "Structured Similarity Image Metric\", ) parser.add_argument( \"--net\", type=str, default=\"alex\", help=\"[squeeze],", "help=\"v0.1 is latest, v0.0 was original release\", ) opt =", ") opt = parser.parse_args() if opt.model in [\"l2\", \"ssim\"]: opt.batch_size", "data_loader as dl import argparse from IPython import embed parser", "for jnd mode: [val/traditional],[val/cnn]; for 2afc mode: [train/traditional],[train/cnn],[train/mix],[val/traditional],[val/cnn],[val/color],[val/deblur],[val/frameinterp],[val/superres]\", ) parser.add_argument(", "model types\", ) parser.add_argument( \"--batch_size\", type=int, default=50, help=\"batch size to", "= dm.score_2afc_dataset(data_loader, model.forward) elif opt.dataset_mode == \"jnd\": (score, results_verbose) =", "test - for jnd mode: [val/traditional],[val/cnn]; for 2afc mode: [train/traditional],[train/cnn],[train/mix],[val/traditional],[val/cnn],[val/color],[val/deblur],[val/frameinterp],[val/superres]\",", "in\" ) parser.add_argument(\"--use_gpu\", action=\"store_true\", help=\"turn on flag to use GPU\")", "dl.CreateDataLoader( dataset, dataset_mode=opt.dataset_mode, batch_size=opt.batch_size ) # evaluate model on data", "opt.datasets: data_loader = dl.CreateDataLoader( dataset, dataset_mode=opt.dataset_mode, batch_size=opt.batch_size ) # evaluate", "type [net-lin] for linearly calibrated net, [net] for off-the-shelf network,", "== \"jnd\": (score, results_verbose) = dm.score_jnd_dataset(data_loader, model.forward) # print results", "help=\"location of model, will default to ./weights/v[version]/[net_name].pth\", ) parser.add_argument( \"--from_scratch\",", "for 2afc mode: [train/traditional],[train/cnn],[train/mix],[val/traditional],[val/cnn],[val/color],[val/deblur],[val/frameinterp],[val/superres]\", ) parser.add_argument( \"--model\", type=str, default=\"net-lin\", help=\"distance", "parser.add_argument( \"--from_scratch\", action=\"store_true\", help=\"model was initialized from scratch\" ) parser.add_argument(", "or [RGB] for colorspace to use for l2, ssim model", "parser = argparse.ArgumentParser() parser.add_argument(\"--dataset_mode\", type=str, default=\"2afc\", help=\"[2afc,jnd]\") parser.add_argument( \"--datasets\", type=str,", "= parser.parse_args() if opt.model in [\"l2\", \"ssim\"]: opt.batch_size = 1", "if opt.model in [\"net-lin\", \"net\"]: print(\"Testing model [%s]-[%s]\" % (opt.model,", "\"--colorspace\", type=str, default=\"Lab\", help=\"[Lab] or [RGB] for colorspace to use", "from models import dist_model as dm from data import data_loader", "[ssim] for Structured Similarity Image Metric\", ) parser.add_argument( \"--net\", type=str,", "(score, results_verbose) = dm.score_jnd_dataset(data_loader, model.forward) # print results print(\" Dataset", "colorspace to use for l2, ssim model types\", ) parser.add_argument(", "was initialized from scratch\" ) parser.add_argument( \"--train_trunk\", action=\"store_true\", help=\"model trunk", "parser.add_argument( \"--model\", type=str, default=\"net-lin\", help=\"distance model type [net-lin] for linearly", "model=opt.model, net=opt.net, colorspace=opt.colorspace, model_path=opt.model_path, use_gpu=opt.use_gpu, pnet_rand=opt.from_scratch, pnet_tune=opt.train_trunk, version=opt.version, ) if", "on data if opt.dataset_mode == \"2afc\": (score, results_verbose) = dm.score_2afc_dataset(data_loader,", "default=\"Lab\", help=\"[Lab] or [RGB] for colorspace to use for l2,", "[\"net-lin\", \"net\"]: print(\"Testing model [%s]-[%s]\" % (opt.model, opt.net)) elif opt.model", "mode: [val/traditional],[val/cnn]; for 2afc mode: [train/traditional],[train/cnn],[train/mix],[val/traditional],[val/cnn],[val/color],[val/deblur],[val/frameinterp],[val/superres]\", ) parser.add_argument( \"--model\", type=str,", "print(\"Testing model [%s]-[%s]\" % (opt.model, opt.colorspace)) # embed() # initialize", "help=\"[squeeze], [alex], or [vgg] for network architectures\", ) parser.add_argument( \"--colorspace\",", "action=\"store_true\", help=\"model was initialized from scratch\" ) parser.add_argument( \"--train_trunk\", action=\"store_true\",", "distance, [ssim] for Structured Similarity Image Metric\", ) parser.add_argument( \"--net\",", ") parser.add_argument( \"--colorspace\", type=str, default=\"Lab\", help=\"[Lab] or [RGB] for colorspace", "import argparse from IPython import embed parser = argparse.ArgumentParser() parser.add_argument(\"--dataset_mode\",", "initialized from scratch\" ) parser.add_argument( \"--train_trunk\", action=\"store_true\", help=\"model trunk was", "[%s]-[%s]\" % (opt.model, opt.net)) elif opt.model in [\"l2\", \"ssim\"]: print(\"Testing", "\"ssim\"]: print(\"Testing model [%s]-[%s]\" % (opt.model, opt.colorspace)) # embed() #", "results_verbose) = dm.score_2afc_dataset(data_loader, model.forward) elif opt.dataset_mode == \"jnd\": (score, results_verbose)", "results print(\" Dataset [%s]: %.2f\" % (dataset, 100.0 * score))", "parser.parse_args() if opt.model in [\"l2\", \"ssim\"]: opt.batch_size = 1 #", "in [\"l2\", \"ssim\"]: print(\"Testing model [%s]-[%s]\" % (opt.model, opt.colorspace)) #", "batch_size=opt.batch_size ) # evaluate model on data if opt.dataset_mode ==", "dm.DistModel() # model.initialize(model=opt.model,net=opt.net,colorspace=opt.colorspace,model_path=opt.model_path,use_gpu=opt.use_gpu) model.initialize( model=opt.model, net=opt.net, colorspace=opt.colorspace, model_path=opt.model_path, use_gpu=opt.use_gpu, pnet_rand=opt.from_scratch,", "\"--model_path\", type=str, default=None, help=\"location of model, will default to ./weights/v[version]/[net_name].pth\",", "opt.model in [\"l2\", \"ssim\"]: print(\"Testing model [%s]-[%s]\" % (opt.model, opt.colorspace))", "\"val/traditional\", \"val/cnn\", \"val/superres\", \"val/deblur\", \"val/color\", \"val/frameinterp\", ], help=\"datasets to test", "model type [net-lin] for linearly calibrated net, [net] for off-the-shelf", "for euclidean distance, [ssim] for Structured Similarity Image Metric\", )", "type=str, default=\"Lab\", help=\"[Lab] or [RGB] for colorspace to use for", "(score, results_verbose) = dm.score_2afc_dataset(data_loader, model.forward) elif opt.dataset_mode == \"jnd\": (score,", "help=\"[2afc,jnd]\") parser.add_argument( \"--datasets\", type=str, nargs=\"+\", default=[ \"val/traditional\", \"val/cnn\", \"val/superres\", \"val/deblur\",", "net, [net] for off-the-shelf network, [l2] for euclidean distance, [ssim]", "[vgg] for network architectures\", ) parser.add_argument( \"--colorspace\", type=str, default=\"Lab\", help=\"[Lab]", "\"ssim\"]: opt.batch_size = 1 # initialize model model = dm.DistModel()", "elif opt.dataset_mode == \"jnd\": (score, results_verbose) = dm.score_jnd_dataset(data_loader, model.forward) #", ") parser.add_argument(\"--use_gpu\", action=\"store_true\", help=\"turn on flag to use GPU\") parser.add_argument(", "type=str, nargs=\"+\", default=[ \"val/traditional\", \"val/cnn\", \"val/superres\", \"val/deblur\", \"val/color\", \"val/frameinterp\", ],", "net=opt.net, colorspace=opt.colorspace, model_path=opt.model_path, use_gpu=opt.use_gpu, pnet_rand=opt.from_scratch, pnet_tune=opt.train_trunk, version=opt.version, ) if opt.model", ") parser.add_argument( \"--train_trunk\", action=\"store_true\", help=\"model trunk was trained/tuned\" ) parser.add_argument(", "for linearly calibrated net, [net] for off-the-shelf network, [l2] for", "parser.add_argument( \"--datasets\", type=str, nargs=\"+\", default=[ \"val/traditional\", \"val/cnn\", \"val/superres\", \"val/deblur\", \"val/color\",", "[l2] for euclidean distance, [ssim] for Structured Similarity Image Metric\",", "np from models import dist_model as dm from data import", "\"net\"]: print(\"Testing model [%s]-[%s]\" % (opt.model, opt.net)) elif opt.model in", "import data_loader as dl import argparse from IPython import embed", "for off-the-shelf network, [l2] for euclidean distance, [ssim] for Structured", ") parser.add_argument( \"--net\", type=str, default=\"alex\", help=\"[squeeze], [alex], or [vgg] for", "results_verbose) = dm.score_jnd_dataset(data_loader, model.forward) # print results print(\" Dataset [%s]:", "model, will default to ./weights/v[version]/[net_name].pth\", ) parser.add_argument( \"--from_scratch\", action=\"store_true\", help=\"model", "\"val/frameinterp\", ], help=\"datasets to test - for jnd mode: [val/traditional],[val/cnn];", "use_gpu=opt.use_gpu, pnet_rand=opt.from_scratch, pnet_tune=opt.train_trunk, version=opt.version, ) if opt.model in [\"net-lin\", \"net\"]:", "% (opt.model, opt.colorspace)) # embed() # initialize data loader for", "numpy as np from models import dist_model as dm from", "data loader for dataset in opt.datasets: data_loader = dl.CreateDataLoader( dataset,", "opt.dataset_mode == \"2afc\": (score, results_verbose) = dm.score_2afc_dataset(data_loader, model.forward) elif opt.dataset_mode", "\"jnd\": (score, results_verbose) = dm.score_jnd_dataset(data_loader, model.forward) # print results print(\"", "[net-lin] for linearly calibrated net, [net] for off-the-shelf network, [l2]", "type=str, default=\"net-lin\", help=\"distance model type [net-lin] for linearly calibrated net,", "initialize data loader for dataset in opt.datasets: data_loader = dl.CreateDataLoader(", "parser.add_argument( \"--net\", type=str, default=\"alex\", help=\"[squeeze], [alex], or [vgg] for network", "or [vgg] for network architectures\", ) parser.add_argument( \"--colorspace\", type=str, default=\"Lab\",", "\"--train_trunk\", action=\"store_true\", help=\"model trunk was trained/tuned\" ) parser.add_argument( \"--version\", type=str,", ") parser.add_argument( \"--model\", type=str, default=\"net-lin\", help=\"distance model type [net-lin] for", "<filename>test_dataset_model.py import numpy as np from models import dist_model as", "size to test image patches in\" ) parser.add_argument(\"--use_gpu\", action=\"store_true\", help=\"turn", "(opt.model, opt.net)) elif opt.model in [\"l2\", \"ssim\"]: print(\"Testing model [%s]-[%s]\"", "\"val/superres\", \"val/deblur\", \"val/color\", \"val/frameinterp\", ], help=\"datasets to test - for", "= argparse.ArgumentParser() parser.add_argument(\"--dataset_mode\", type=str, default=\"2afc\", help=\"[2afc,jnd]\") parser.add_argument( \"--datasets\", type=str, nargs=\"+\",", "argparse.ArgumentParser() parser.add_argument(\"--dataset_mode\", type=str, default=\"2afc\", help=\"[2afc,jnd]\") parser.add_argument( \"--datasets\", type=str, nargs=\"+\", default=[", "for colorspace to use for l2, ssim model types\", )", "to ./weights/v[version]/[net_name].pth\", ) parser.add_argument( \"--from_scratch\", action=\"store_true\", help=\"model was initialized from", "pnet_tune=opt.train_trunk, version=opt.version, ) if opt.model in [\"net-lin\", \"net\"]: print(\"Testing model", "IPython import embed parser = argparse.ArgumentParser() parser.add_argument(\"--dataset_mode\", type=str, default=\"2afc\", help=\"[2afc,jnd]\")", "# embed() # initialize data loader for dataset in opt.datasets:", "from IPython import embed parser = argparse.ArgumentParser() parser.add_argument(\"--dataset_mode\", type=str, default=\"2afc\",", "use GPU\") parser.add_argument( \"--model_path\", type=str, default=None, help=\"location of model, will", "\"val/color\", \"val/frameinterp\", ], help=\"datasets to test - for jnd mode:", "print results print(\" Dataset [%s]: %.2f\" % (dataset, 100.0 *", "embed parser = argparse.ArgumentParser() parser.add_argument(\"--dataset_mode\", type=str, default=\"2afc\", help=\"[2afc,jnd]\") parser.add_argument( \"--datasets\",", "\"--model\", type=str, default=\"net-lin\", help=\"distance model type [net-lin] for linearly calibrated", "GPU\") parser.add_argument( \"--model_path\", type=str, default=None, help=\"location of model, will default", "type=str, default=\"alex\", help=\"[squeeze], [alex], or [vgg] for network architectures\", )", "dm from data import data_loader as dl import argparse from", "# model.initialize(model=opt.model,net=opt.net,colorspace=opt.colorspace,model_path=opt.model_path,use_gpu=opt.use_gpu) model.initialize( model=opt.model, net=opt.net, colorspace=opt.colorspace, model_path=opt.model_path, use_gpu=opt.use_gpu, pnet_rand=opt.from_scratch, pnet_tune=opt.train_trunk,", "default to ./weights/v[version]/[net_name].pth\", ) parser.add_argument( \"--from_scratch\", action=\"store_true\", help=\"model was initialized", "as dm from data import data_loader as dl import argparse", "jnd mode: [val/traditional],[val/cnn]; for 2afc mode: [train/traditional],[train/cnn],[train/mix],[val/traditional],[val/cnn],[val/color],[val/deblur],[val/frameinterp],[val/superres]\", ) parser.add_argument( \"--model\",", "euclidean distance, [ssim] for Structured Similarity Image Metric\", ) parser.add_argument(", "[\"l2\", \"ssim\"]: print(\"Testing model [%s]-[%s]\" % (opt.model, opt.colorspace)) # embed()", "= dm.score_jnd_dataset(data_loader, model.forward) # print results print(\" Dataset [%s]: %.2f\"", "Image Metric\", ) parser.add_argument( \"--net\", type=str, default=\"alex\", help=\"[squeeze], [alex], or", "model.forward) elif opt.dataset_mode == \"jnd\": (score, results_verbose) = dm.score_jnd_dataset(data_loader, model.forward)", "ssim model types\", ) parser.add_argument( \"--batch_size\", type=int, default=50, help=\"batch size", "to use GPU\") parser.add_argument( \"--model_path\", type=str, default=None, help=\"location of model,", "model = dm.DistModel() # model.initialize(model=opt.model,net=opt.net,colorspace=opt.colorspace,model_path=opt.model_path,use_gpu=opt.use_gpu) model.initialize( model=opt.model, net=opt.net, colorspace=opt.colorspace, model_path=opt.model_path,", "default=[ \"val/traditional\", \"val/cnn\", \"val/superres\", \"val/deblur\", \"val/color\", \"val/frameinterp\", ], help=\"datasets to", "opt.dataset_mode == \"jnd\": (score, results_verbose) = dm.score_jnd_dataset(data_loader, model.forward) # print", "# evaluate model on data if opt.dataset_mode == \"2afc\": (score,", "flag to use GPU\") parser.add_argument( \"--model_path\", type=str, default=None, help=\"location of", "elif opt.model in [\"l2\", \"ssim\"]: print(\"Testing model [%s]-[%s]\" % (opt.model,", "Metric\", ) parser.add_argument( \"--net\", type=str, default=\"alex\", help=\"[squeeze], [alex], or [vgg]", "default=\"net-lin\", help=\"distance model type [net-lin] for linearly calibrated net, [net]", "parser.add_argument( \"--model_path\", type=str, default=None, help=\"location of model, will default to", "embed() # initialize data loader for dataset in opt.datasets: data_loader", "linearly calibrated net, [net] for off-the-shelf network, [l2] for euclidean", "loader for dataset in opt.datasets: data_loader = dl.CreateDataLoader( dataset, dataset_mode=opt.dataset_mode,", "import numpy as np from models import dist_model as dm", "data if opt.dataset_mode == \"2afc\": (score, results_verbose) = dm.score_2afc_dataset(data_loader, model.forward)", "release\", ) opt = parser.parse_args() if opt.model in [\"l2\", \"ssim\"]:", "to use for l2, ssim model types\", ) parser.add_argument( \"--batch_size\",", "v0.0 was original release\", ) opt = parser.parse_args() if opt.model", "2afc mode: [train/traditional],[train/cnn],[train/mix],[val/traditional],[val/cnn],[val/color],[val/deblur],[val/frameinterp],[val/superres]\", ) parser.add_argument( \"--model\", type=str, default=\"net-lin\", help=\"distance model", ") parser.add_argument( \"--version\", type=str, default=\"0.1\", help=\"v0.1 is latest, v0.0 was", "dist_model as dm from data import data_loader as dl import", "dl import argparse from IPython import embed parser = argparse.ArgumentParser()", "help=\"[Lab] or [RGB] for colorspace to use for l2, ssim", "argparse from IPython import embed parser = argparse.ArgumentParser() parser.add_argument(\"--dataset_mode\", type=str,", "help=\"distance model type [net-lin] for linearly calibrated net, [net] for", "help=\"model trunk was trained/tuned\" ) parser.add_argument( \"--version\", type=str, default=\"0.1\", help=\"v0.1", "model.initialize( model=opt.model, net=opt.net, colorspace=opt.colorspace, model_path=opt.model_path, use_gpu=opt.use_gpu, pnet_rand=opt.from_scratch, pnet_tune=opt.train_trunk, version=opt.version, )", "model_path=opt.model_path, use_gpu=opt.use_gpu, pnet_rand=opt.from_scratch, pnet_tune=opt.train_trunk, version=opt.version, ) if opt.model in [\"net-lin\",", "[val/traditional],[val/cnn]; for 2afc mode: [train/traditional],[train/cnn],[train/mix],[val/traditional],[val/cnn],[val/color],[val/deblur],[val/frameinterp],[val/superres]\", ) parser.add_argument( \"--model\", type=str, default=\"net-lin\",", "dm.score_jnd_dataset(data_loader, model.forward) # print results print(\" Dataset [%s]: %.2f\" %", "opt.model in [\"l2\", \"ssim\"]: opt.batch_size = 1 # initialize model", "# initialize model model = dm.DistModel() # model.initialize(model=opt.model,net=opt.net,colorspace=opt.colorspace,model_path=opt.model_path,use_gpu=opt.use_gpu) model.initialize( model=opt.model,", "model [%s]-[%s]\" % (opt.model, opt.colorspace)) # embed() # initialize data", "for Structured Similarity Image Metric\", ) parser.add_argument( \"--net\", type=str, default=\"alex\",", "use for l2, ssim model types\", ) parser.add_argument( \"--batch_size\", type=int,", ") if opt.model in [\"net-lin\", \"net\"]: print(\"Testing model [%s]-[%s]\" %", "import dist_model as dm from data import data_loader as dl", "default=\"alex\", help=\"[squeeze], [alex], or [vgg] for network architectures\", ) parser.add_argument(", "opt = parser.parse_args() if opt.model in [\"l2\", \"ssim\"]: opt.batch_size =", "architectures\", ) parser.add_argument( \"--colorspace\", type=str, default=\"Lab\", help=\"[Lab] or [RGB] for", "import embed parser = argparse.ArgumentParser() parser.add_argument(\"--dataset_mode\", type=str, default=\"2afc\", help=\"[2afc,jnd]\") parser.add_argument(", "== \"2afc\": (score, results_verbose) = dm.score_2afc_dataset(data_loader, model.forward) elif opt.dataset_mode ==", "is latest, v0.0 was original release\", ) opt = parser.parse_args()", "to test - for jnd mode: [val/traditional],[val/cnn]; for 2afc mode:", "will default to ./weights/v[version]/[net_name].pth\", ) parser.add_argument( \"--from_scratch\", action=\"store_true\", help=\"model was", "in [\"l2\", \"ssim\"]: opt.batch_size = 1 # initialize model model", "calibrated net, [net] for off-the-shelf network, [l2] for euclidean distance,", "initialize model model = dm.DistModel() # model.initialize(model=opt.model,net=opt.net,colorspace=opt.colorspace,model_path=opt.model_path,use_gpu=opt.use_gpu) model.initialize( model=opt.model, net=opt.net,", "if opt.dataset_mode == \"2afc\": (score, results_verbose) = dm.score_2afc_dataset(data_loader, model.forward) elif", "\"--batch_size\", type=int, default=50, help=\"batch size to test image patches in\"", "opt.net)) elif opt.model in [\"l2\", \"ssim\"]: print(\"Testing model [%s]-[%s]\" %", "# print results print(\" Dataset [%s]: %.2f\" % (dataset, 100.0", "for l2, ssim model types\", ) parser.add_argument( \"--batch_size\", type=int, default=50,", "from data import data_loader as dl import argparse from IPython", "- for jnd mode: [val/traditional],[val/cnn]; for 2afc mode: [train/traditional],[train/cnn],[train/mix],[val/traditional],[val/cnn],[val/color],[val/deblur],[val/frameinterp],[val/superres]\", )" ]
[ "else: x = np.arange(0,500) y_first = np.mean(data_first, axis=0) y_second =", "x = np.arange(0,100) else: x = np.arange(0,500) y_first = np.mean(data_first,", "plt.grid(True) try: if args.mnist: f = open(os.path.join('./result/result_mnist.pickle')) result = pickle.load(f)", "result = pickle.load(f) f.close() pathnet_first = [] pathnet_second = []", "= pickle.load(f) f.close() cifar_first = [] cifar_second = [] svhn_first", "y_first + y_first_err, color='m', alpha=0.3) plt.fill_between(x, y_second - y_second_err, y_second", "color='g', label='Task B (transfer learning)') plt.legend(bbox_to_anchor=(0.8, 0.3), loc=2, ncol=1, fontsize=15)", "color='r', label='Task A') plt.plot(x, y_second, color='g', label='Task B (transfer learning)')", "args = parser.parse_args() def subplot(subplot, data_first, data_second, title): plt.subplot(subplot) if", "y_first_err = np.std(data_first, axis=0) / 2. y_second_err = np.std(data_second, axis=0)", "- y_first_err, y_first + y_first_err, color='m', alpha=0.3) plt.fill_between(x, y_second -", "axes.set_xlim([0, 100]) axes.set_ylim([0, 1.2]) else: axes.set_xlim([0, 500]) axes.set_ylim([0, 0.6]) plt.title(title,", "y = 0.9) plt.ylabel('Accuracy',fontsize=15) plt.xlabel('Generations',fontsize=15) plt.grid(True) try: if args.mnist: f", "plt.show() else: f = open(os.path.join('./result/result_cifar_svhn.pickle')) result = pickle.load(f) f.close() cifar_first", "args.mnist: axes.set_xlim([0, 100]) axes.set_ylim([0, 1.2]) else: axes.set_xlim([0, 500]) axes.set_ylim([0, 0.6])", "y_second_err, y_second + y_second_err, color='c', alpha=0.3) plt.plot(x, y_first, color='r', label='Task", "try: if args.mnist: f = open(os.path.join('./result/result_mnist.pickle')) result = pickle.load(f) f.close()", "parser.add_argument('--mnist', action='store_true', default=False, help='open mnist result') args = parser.parse_args() def", "fontsize=15) axes = plt.gca() if args.mnist: axes.set_xlim([0, 100]) axes.set_ylim([0, 1.2])", "default=False, help='open mnist result') args = parser.parse_args() def subplot(subplot, data_first,", "cifar_second = [] svhn_first = [] svhn_second = [] for", "plt.xlabel('Generations',fontsize=15) plt.grid(True) try: if args.mnist: f = open(os.path.join('./result/result_mnist.pickle')) result =", "= open(os.path.join('./result/result_cifar_svhn.pickle')) result = pickle.load(f) f.close() cifar_first = [] cifar_second", "== 'pathnet_cifar_first': cifar_first.append(res[2]) svhn_second.append(res[3]) else: svhn_first.append(res[2]) cifar_second.append(res[3]) subplot('211', cifar_first, cifar_second,'CIFAR-10')", "cifar_first = [] cifar_second = [] svhn_first = [] svhn_second", "y_second_err, color='c', alpha=0.3) plt.plot(x, y_first, color='r', label='Task A') plt.plot(x, y_second,", "plt.title(title, fontsize=20, y = 0.9) plt.ylabel('Accuracy',fontsize=15) plt.xlabel('Generations',fontsize=15) plt.grid(True) try: if", "axis=0) / 2. plt.fill_between(x, y_first - y_first_err, y_first + y_first_err,", "import argparse import os import pickle import numpy as np", "2. plt.fill_between(x, y_first - y_first_err, y_first + y_first_err, color='m', alpha=0.3)", "= 0.9) plt.ylabel('Accuracy',fontsize=15) plt.xlabel('Generations',fontsize=15) plt.grid(True) try: if args.mnist: f =", "res[0] == 'pathnet_cifar_first': cifar_first.append(res[2]) svhn_second.append(res[3]) else: svhn_first.append(res[2]) cifar_second.append(res[3]) subplot('211', cifar_first,", "A') plt.plot(x, y_second, color='g', label='Task B (transfer learning)') plt.legend(bbox_to_anchor=(0.8, 0.3),", "Example') parser.add_argument('--mnist', action='store_true', default=False, help='open mnist result') args = parser.parse_args()", "np.mean(data_first, axis=0) y_second = np.mean(data_second, axis=0) y_first_err = np.std(data_first, axis=0)", "[] for res in result: pathnet_first.append(res[2]) pathnet_second.append(res[3]) subplot('111', pathnet_first, pathnet_second,'MNIST')", "f.close() cifar_first = [] cifar_second = [] svhn_first = []", "plt.style.use('ggplot') parser = argparse.ArgumentParser(description='PyTorch MNIST Example') parser.add_argument('--mnist', action='store_true', default=False, help='open", "pickle import numpy as np import matplotlib.pyplot as plt plt.style.use('ggplot')", "np import matplotlib.pyplot as plt plt.style.use('ggplot') parser = argparse.ArgumentParser(description='PyTorch MNIST", "y_second - y_second_err, y_second + y_second_err, color='c', alpha=0.3) plt.plot(x, y_first,", "= np.mean(data_second, axis=0) y_first_err = np.std(data_first, axis=0) / 2. y_second_err", "help='open mnist result') args = parser.parse_args() def subplot(subplot, data_first, data_second,", "result = pickle.load(f) f.close() cifar_first = [] cifar_second = []", "axes.set_xlim([0, 500]) axes.set_ylim([0, 0.6]) plt.title(title, fontsize=20, y = 0.9) plt.ylabel('Accuracy',fontsize=15)", "np.arange(0,500) y_first = np.mean(data_first, axis=0) y_second = np.mean(data_second, axis=0) y_first_err", "y_first - y_first_err, y_first + y_first_err, color='m', alpha=0.3) plt.fill_between(x, y_second", "[] svhn_second = [] for res in result: if res[0]", "as plt plt.style.use('ggplot') parser = argparse.ArgumentParser(description='PyTorch MNIST Example') parser.add_argument('--mnist', action='store_true',", "pathnet_second.append(res[3]) subplot('111', pathnet_first, pathnet_second,'MNIST') plt.show() else: f = open(os.path.join('./result/result_cifar_svhn.pickle')) result", "plt.subplot(subplot) if args.mnist: x = np.arange(0,100) else: x = np.arange(0,500)", "axes = plt.gca() if args.mnist: axes.set_xlim([0, 100]) axes.set_ylim([0, 1.2]) else:", "[] cifar_second = [] svhn_first = [] svhn_second = []", "y_second_err = np.std(data_second, axis=0) / 2. plt.fill_between(x, y_first - y_first_err,", "import matplotlib.pyplot as plt plt.style.use('ggplot') parser = argparse.ArgumentParser(description='PyTorch MNIST Example')", "= np.arange(0,100) else: x = np.arange(0,500) y_first = np.mean(data_first, axis=0)", "pathnet_first, pathnet_second,'MNIST') plt.show() else: f = open(os.path.join('./result/result_cifar_svhn.pickle')) result = pickle.load(f)", "= [] for res in result: pathnet_first.append(res[2]) pathnet_second.append(res[3]) subplot('111', pathnet_first,", "if args.mnist: x = np.arange(0,100) else: x = np.arange(0,500) y_first", "f.close() pathnet_first = [] pathnet_second = [] for res in", "ncol=1, fontsize=15) axes = plt.gca() if args.mnist: axes.set_xlim([0, 100]) axes.set_ylim([0,", "axis=0) y_first_err = np.std(data_first, axis=0) / 2. y_second_err = np.std(data_second,", "= [] cifar_second = [] svhn_first = [] svhn_second =", "pickle.load(f) f.close() pathnet_first = [] pathnet_second = [] for res", "= np.std(data_second, axis=0) / 2. plt.fill_between(x, y_first - y_first_err, y_first", "result: pathnet_first.append(res[2]) pathnet_second.append(res[3]) subplot('111', pathnet_first, pathnet_second,'MNIST') plt.show() else: f =", "= open(os.path.join('./result/result_mnist.pickle')) result = pickle.load(f) f.close() pathnet_first = [] pathnet_second", "subplot('111', pathnet_first, pathnet_second,'MNIST') plt.show() else: f = open(os.path.join('./result/result_cifar_svhn.pickle')) result =", "plt.fill_between(x, y_first - y_first_err, y_first + y_first_err, color='m', alpha=0.3) plt.fill_between(x,", "if args.mnist: f = open(os.path.join('./result/result_mnist.pickle')) result = pickle.load(f) f.close() pathnet_first", "subplot('211', cifar_first, cifar_second,'CIFAR-10') subplot('212', svhn_first, svhn_second,'cSVHN') plt.show() except IOError: print(\"Result", "y_second, color='g', label='Task B (transfer learning)') plt.legend(bbox_to_anchor=(0.8, 0.3), loc=2, ncol=1,", "axes.set_ylim([0, 1.2]) else: axes.set_xlim([0, 500]) axes.set_ylim([0, 0.6]) plt.title(title, fontsize=20, y", "fontsize=20, y = 0.9) plt.ylabel('Accuracy',fontsize=15) plt.xlabel('Generations',fontsize=15) plt.grid(True) try: if args.mnist:", "result: if res[0] == 'pathnet_cifar_first': cifar_first.append(res[2]) svhn_second.append(res[3]) else: svhn_first.append(res[2]) cifar_second.append(res[3])", "= np.std(data_first, axis=0) / 2. y_second_err = np.std(data_second, axis=0) /", "1.2]) else: axes.set_xlim([0, 500]) axes.set_ylim([0, 0.6]) plt.title(title, fontsize=20, y =", "np.arange(0,100) else: x = np.arange(0,500) y_first = np.mean(data_first, axis=0) y_second", "= pickle.load(f) f.close() pathnet_first = [] pathnet_second = [] for", "x = np.arange(0,500) y_first = np.mean(data_first, axis=0) y_second = np.mean(data_second,", "y_first_err, y_first + y_first_err, color='m', alpha=0.3) plt.fill_between(x, y_second - y_second_err,", "= parser.parse_args() def subplot(subplot, data_first, data_second, title): plt.subplot(subplot) if args.mnist:", "learning)') plt.legend(bbox_to_anchor=(0.8, 0.3), loc=2, ncol=1, fontsize=15) axes = plt.gca() if", "[] svhn_first = [] svhn_second = [] for res in", "matplotlib.pyplot as plt plt.style.use('ggplot') parser = argparse.ArgumentParser(description='PyTorch MNIST Example') parser.add_argument('--mnist',", "def subplot(subplot, data_first, data_second, title): plt.subplot(subplot) if args.mnist: x =", "action='store_true', default=False, help='open mnist result') args = parser.parse_args() def subplot(subplot,", "pathnet_first.append(res[2]) pathnet_second.append(res[3]) subplot('111', pathnet_first, pathnet_second,'MNIST') plt.show() else: f = open(os.path.join('./result/result_cifar_svhn.pickle'))", "color='m', alpha=0.3) plt.fill_between(x, y_second - y_second_err, y_second + y_second_err, color='c',", "mnist result') args = parser.parse_args() def subplot(subplot, data_first, data_second, title):", "[] pathnet_second = [] for res in result: pathnet_first.append(res[2]) pathnet_second.append(res[3])", "in result: pathnet_first.append(res[2]) pathnet_second.append(res[3]) subplot('111', pathnet_first, pathnet_second,'MNIST') plt.show() else: f", "label='Task A') plt.plot(x, y_second, color='g', label='Task B (transfer learning)') plt.legend(bbox_to_anchor=(0.8,", "pathnet_second,'MNIST') plt.show() else: f = open(os.path.join('./result/result_cifar_svhn.pickle')) result = pickle.load(f) f.close()", "numpy as np import matplotlib.pyplot as plt plt.style.use('ggplot') parser =", "= [] svhn_first = [] svhn_second = [] for res", "= [] pathnet_second = [] for res in result: pathnet_first.append(res[2])", "subplot(subplot, data_first, data_second, title): plt.subplot(subplot) if args.mnist: x = np.arange(0,100)", "pickle.load(f) f.close() cifar_first = [] cifar_second = [] svhn_first =", "import numpy as np import matplotlib.pyplot as plt plt.style.use('ggplot') parser", "2. y_second_err = np.std(data_second, axis=0) / 2. plt.fill_between(x, y_first -", "alpha=0.3) plt.fill_between(x, y_second - y_second_err, y_second + y_second_err, color='c', alpha=0.3)", "= plt.gca() if args.mnist: axes.set_xlim([0, 100]) axes.set_ylim([0, 1.2]) else: axes.set_xlim([0,", "500]) axes.set_ylim([0, 0.6]) plt.title(title, fontsize=20, y = 0.9) plt.ylabel('Accuracy',fontsize=15) plt.xlabel('Generations',fontsize=15)", "plt.fill_between(x, y_second - y_second_err, y_second + y_second_err, color='c', alpha=0.3) plt.plot(x,", "plt.gca() if args.mnist: axes.set_xlim([0, 100]) axes.set_ylim([0, 1.2]) else: axes.set_xlim([0, 500])", "f = open(os.path.join('./result/result_cifar_svhn.pickle')) result = pickle.load(f) f.close() cifar_first = []", "= [] for res in result: if res[0] == 'pathnet_cifar_first':", "= [] svhn_second = [] for res in result: if", "subplot('212', svhn_first, svhn_second,'cSVHN') plt.show() except IOError: print(\"Result file does not", "+ y_second_err, color='c', alpha=0.3) plt.plot(x, y_first, color='r', label='Task A') plt.plot(x,", "alpha=0.3) plt.plot(x, y_first, color='r', label='Task A') plt.plot(x, y_second, color='g', label='Task", "MNIST Example') parser.add_argument('--mnist', action='store_true', default=False, help='open mnist result') args =", "svhn_first, svhn_second,'cSVHN') plt.show() except IOError: print(\"Result file does not exist\")", "parser.parse_args() def subplot(subplot, data_first, data_second, title): plt.subplot(subplot) if args.mnist: x", "np.mean(data_second, axis=0) y_first_err = np.std(data_first, axis=0) / 2. y_second_err =", "import pickle import numpy as np import matplotlib.pyplot as plt", "argparse import os import pickle import numpy as np import", "loc=2, ncol=1, fontsize=15) axes = plt.gca() if args.mnist: axes.set_xlim([0, 100])", "y_first = np.mean(data_first, axis=0) y_second = np.mean(data_second, axis=0) y_first_err =", "as np import matplotlib.pyplot as plt plt.style.use('ggplot') parser = argparse.ArgumentParser(description='PyTorch", "axis=0) / 2. y_second_err = np.std(data_second, axis=0) / 2. plt.fill_between(x,", "cifar_first, cifar_second,'CIFAR-10') subplot('212', svhn_first, svhn_second,'cSVHN') plt.show() except IOError: print(\"Result file", "svhn_first.append(res[2]) cifar_second.append(res[3]) subplot('211', cifar_first, cifar_second,'CIFAR-10') subplot('212', svhn_first, svhn_second,'cSVHN') plt.show() except", "res in result: pathnet_first.append(res[2]) pathnet_second.append(res[3]) subplot('111', pathnet_first, pathnet_second,'MNIST') plt.show() else:", "np.std(data_first, axis=0) / 2. y_second_err = np.std(data_second, axis=0) / 2.", "y_second + y_second_err, color='c', alpha=0.3) plt.plot(x, y_first, color='r', label='Task A')", "= np.mean(data_first, axis=0) y_second = np.mean(data_second, axis=0) y_first_err = np.std(data_first,", "result') args = parser.parse_args() def subplot(subplot, data_first, data_second, title): plt.subplot(subplot)", "/ 2. y_second_err = np.std(data_second, axis=0) / 2. plt.fill_between(x, y_first", "plt plt.style.use('ggplot') parser = argparse.ArgumentParser(description='PyTorch MNIST Example') parser.add_argument('--mnist', action='store_true', default=False,", "/ 2. plt.fill_between(x, y_first - y_first_err, y_first + y_first_err, color='m',", "plt.ylabel('Accuracy',fontsize=15) plt.xlabel('Generations',fontsize=15) plt.grid(True) try: if args.mnist: f = open(os.path.join('./result/result_mnist.pickle')) result", "argparse.ArgumentParser(description='PyTorch MNIST Example') parser.add_argument('--mnist', action='store_true', default=False, help='open mnist result') args", "'pathnet_cifar_first': cifar_first.append(res[2]) svhn_second.append(res[3]) else: svhn_first.append(res[2]) cifar_second.append(res[3]) subplot('211', cifar_first, cifar_second,'CIFAR-10') subplot('212',", "100]) axes.set_ylim([0, 1.2]) else: axes.set_xlim([0, 500]) axes.set_ylim([0, 0.6]) plt.title(title, fontsize=20,", "os import pickle import numpy as np import matplotlib.pyplot as", "- y_second_err, y_second + y_second_err, color='c', alpha=0.3) plt.plot(x, y_first, color='r',", "for res in result: pathnet_first.append(res[2]) pathnet_second.append(res[3]) subplot('111', pathnet_first, pathnet_second,'MNIST') plt.show()", "[] for res in result: if res[0] == 'pathnet_cifar_first': cifar_first.append(res[2])", "for res in result: if res[0] == 'pathnet_cifar_first': cifar_first.append(res[2]) svhn_second.append(res[3])", "0.9) plt.ylabel('Accuracy',fontsize=15) plt.xlabel('Generations',fontsize=15) plt.grid(True) try: if args.mnist: f = open(os.path.join('./result/result_mnist.pickle'))", "= argparse.ArgumentParser(description='PyTorch MNIST Example') parser.add_argument('--mnist', action='store_true', default=False, help='open mnist result')", "np.std(data_second, axis=0) / 2. plt.fill_between(x, y_first - y_first_err, y_first +", "pathnet_second = [] for res in result: pathnet_first.append(res[2]) pathnet_second.append(res[3]) subplot('111',", "args.mnist: x = np.arange(0,100) else: x = np.arange(0,500) y_first =", "svhn_first = [] svhn_second = [] for res in result:", "import os import pickle import numpy as np import matplotlib.pyplot", "axis=0) y_second = np.mean(data_second, axis=0) y_first_err = np.std(data_first, axis=0) /", "plt.plot(x, y_first, color='r', label='Task A') plt.plot(x, y_second, color='g', label='Task B", "cifar_second,'CIFAR-10') subplot('212', svhn_first, svhn_second,'cSVHN') plt.show() except IOError: print(\"Result file does", "label='Task B (transfer learning)') plt.legend(bbox_to_anchor=(0.8, 0.3), loc=2, ncol=1, fontsize=15) axes", "y_first, color='r', label='Task A') plt.plot(x, y_second, color='g', label='Task B (transfer", "in result: if res[0] == 'pathnet_cifar_first': cifar_first.append(res[2]) svhn_second.append(res[3]) else: svhn_first.append(res[2])", "y_second = np.mean(data_second, axis=0) y_first_err = np.std(data_first, axis=0) / 2.", "if res[0] == 'pathnet_cifar_first': cifar_first.append(res[2]) svhn_second.append(res[3]) else: svhn_first.append(res[2]) cifar_second.append(res[3]) subplot('211',", "f = open(os.path.join('./result/result_mnist.pickle')) result = pickle.load(f) f.close() pathnet_first = []", "axes.set_ylim([0, 0.6]) plt.title(title, fontsize=20, y = 0.9) plt.ylabel('Accuracy',fontsize=15) plt.xlabel('Generations',fontsize=15) plt.grid(True)", "svhn_second = [] for res in result: if res[0] ==", "cifar_second.append(res[3]) subplot('211', cifar_first, cifar_second,'CIFAR-10') subplot('212', svhn_first, svhn_second,'cSVHN') plt.show() except IOError:", "else: axes.set_xlim([0, 500]) axes.set_ylim([0, 0.6]) plt.title(title, fontsize=20, y = 0.9)", "0.6]) plt.title(title, fontsize=20, y = 0.9) plt.ylabel('Accuracy',fontsize=15) plt.xlabel('Generations',fontsize=15) plt.grid(True) try:", "svhn_second.append(res[3]) else: svhn_first.append(res[2]) cifar_second.append(res[3]) subplot('211', cifar_first, cifar_second,'CIFAR-10') subplot('212', svhn_first, svhn_second,'cSVHN')", "cifar_first.append(res[2]) svhn_second.append(res[3]) else: svhn_first.append(res[2]) cifar_second.append(res[3]) subplot('211', cifar_first, cifar_second,'CIFAR-10') subplot('212', svhn_first,", "0.3), loc=2, ncol=1, fontsize=15) axes = plt.gca() if args.mnist: axes.set_xlim([0,", "(transfer learning)') plt.legend(bbox_to_anchor=(0.8, 0.3), loc=2, ncol=1, fontsize=15) axes = plt.gca()", "res in result: if res[0] == 'pathnet_cifar_first': cifar_first.append(res[2]) svhn_second.append(res[3]) else:", "plt.plot(x, y_second, color='g', label='Task B (transfer learning)') plt.legend(bbox_to_anchor=(0.8, 0.3), loc=2,", "open(os.path.join('./result/result_mnist.pickle')) result = pickle.load(f) f.close() pathnet_first = [] pathnet_second =", "else: svhn_first.append(res[2]) cifar_second.append(res[3]) subplot('211', cifar_first, cifar_second,'CIFAR-10') subplot('212', svhn_first, svhn_second,'cSVHN') plt.show()", "color='c', alpha=0.3) plt.plot(x, y_first, color='r', label='Task A') plt.plot(x, y_second, color='g',", "title): plt.subplot(subplot) if args.mnist: x = np.arange(0,100) else: x =", "y_first_err, color='m', alpha=0.3) plt.fill_between(x, y_second - y_second_err, y_second + y_second_err,", "data_first, data_second, title): plt.subplot(subplot) if args.mnist: x = np.arange(0,100) else:", "data_second, title): plt.subplot(subplot) if args.mnist: x = np.arange(0,100) else: x", "pathnet_first = [] pathnet_second = [] for res in result:", "else: f = open(os.path.join('./result/result_cifar_svhn.pickle')) result = pickle.load(f) f.close() cifar_first =", "parser = argparse.ArgumentParser(description='PyTorch MNIST Example') parser.add_argument('--mnist', action='store_true', default=False, help='open mnist", "plt.legend(bbox_to_anchor=(0.8, 0.3), loc=2, ncol=1, fontsize=15) axes = plt.gca() if args.mnist:", "+ y_first_err, color='m', alpha=0.3) plt.fill_between(x, y_second - y_second_err, y_second +", "if args.mnist: axes.set_xlim([0, 100]) axes.set_ylim([0, 1.2]) else: axes.set_xlim([0, 500]) axes.set_ylim([0,", "open(os.path.join('./result/result_cifar_svhn.pickle')) result = pickle.load(f) f.close() cifar_first = [] cifar_second =", "args.mnist: f = open(os.path.join('./result/result_mnist.pickle')) result = pickle.load(f) f.close() pathnet_first =", "B (transfer learning)') plt.legend(bbox_to_anchor=(0.8, 0.3), loc=2, ncol=1, fontsize=15) axes =", "= np.arange(0,500) y_first = np.mean(data_first, axis=0) y_second = np.mean(data_second, axis=0)" ]
[ "param_lr = base_lr param_weight_decay = weight_decay if \"bias\" in key:", "'lr': param_lr, 'weight_decay': param_weight_decay }) optimizer = torch.optim.SGD(params, base_lr, momentum=momentum)", "): params = [] for key, value in model.named_parameters(): if", "import torch def make_sgd_optimizer( model, base_lr=0.001, bias_lr_factor=2.0, momentum=0.9, weight_decay=0.0005, weight_decay_bias=0.0,", "if \"bias\" in key: param_lr = base_lr * bias_lr_factor param_weight_decay", "key, value in model.named_parameters(): if not value.requires_grad: continue param_lr =", "base_lr param_weight_decay = weight_decay if \"bias\" in key: param_lr =", "in model.named_parameters(): if not value.requires_grad: continue param_lr = base_lr param_weight_decay", "value.requires_grad: continue param_lr = base_lr param_weight_decay = weight_decay if \"bias\"", "param_weight_decay = weight_decay if \"bias\" in key: param_lr = base_lr", "model, base_lr=0.001, bias_lr_factor=2.0, momentum=0.9, weight_decay=0.0005, weight_decay_bias=0.0, ): params = []", "model.named_parameters(): if not value.requires_grad: continue param_lr = base_lr param_weight_decay =", "def make_sgd_optimizer( model, base_lr=0.001, bias_lr_factor=2.0, momentum=0.9, weight_decay=0.0005, weight_decay_bias=0.0, ): params", "= weight_decay if \"bias\" in key: param_lr = base_lr *", "base_lr=0.001, bias_lr_factor=2.0, momentum=0.9, weight_decay=0.0005, weight_decay_bias=0.0, ): params = [] for", "make_sgd_optimizer( model, base_lr=0.001, bias_lr_factor=2.0, momentum=0.9, weight_decay=0.0005, weight_decay_bias=0.0, ): params =", "= base_lr * bias_lr_factor param_weight_decay = weight_decay_bias params.append({ 'params': [value],", "\"bias\" in key: param_lr = base_lr * bias_lr_factor param_weight_decay =", "= weight_decay_bias params.append({ 'params': [value], 'lr': param_lr, 'weight_decay': param_weight_decay })", "= base_lr param_weight_decay = weight_decay if \"bias\" in key: param_lr", "continue param_lr = base_lr param_weight_decay = weight_decay if \"bias\" in", "param_lr = base_lr * bias_lr_factor param_weight_decay = weight_decay_bias params.append({ 'params':", "'weight_decay': param_weight_decay }) optimizer = torch.optim.SGD(params, base_lr, momentum=momentum) return optimizer", "torch def make_sgd_optimizer( model, base_lr=0.001, bias_lr_factor=2.0, momentum=0.9, weight_decay=0.0005, weight_decay_bias=0.0, ):", "momentum=0.9, weight_decay=0.0005, weight_decay_bias=0.0, ): params = [] for key, value", "= [] for key, value in model.named_parameters(): if not value.requires_grad:", "params.append({ 'params': [value], 'lr': param_lr, 'weight_decay': param_weight_decay }) optimizer =", "bias_lr_factor=2.0, momentum=0.9, weight_decay=0.0005, weight_decay_bias=0.0, ): params = [] for key,", "base_lr * bias_lr_factor param_weight_decay = weight_decay_bias params.append({ 'params': [value], 'lr':", "param_lr, 'weight_decay': param_weight_decay }) optimizer = torch.optim.SGD(params, base_lr, momentum=momentum) return", "key: param_lr = base_lr * bias_lr_factor param_weight_decay = weight_decay_bias params.append({", "weight_decay=0.0005, weight_decay_bias=0.0, ): params = [] for key, value in", "if not value.requires_grad: continue param_lr = base_lr param_weight_decay = weight_decay", "value in model.named_parameters(): if not value.requires_grad: continue param_lr = base_lr", "'params': [value], 'lr': param_lr, 'weight_decay': param_weight_decay }) optimizer = torch.optim.SGD(params,", "[] for key, value in model.named_parameters(): if not value.requires_grad: continue", "weight_decay if \"bias\" in key: param_lr = base_lr * bias_lr_factor", "[value], 'lr': param_lr, 'weight_decay': param_weight_decay }) optimizer = torch.optim.SGD(params, base_lr,", "param_weight_decay = weight_decay_bias params.append({ 'params': [value], 'lr': param_lr, 'weight_decay': param_weight_decay", "bias_lr_factor param_weight_decay = weight_decay_bias params.append({ 'params': [value], 'lr': param_lr, 'weight_decay':", "<reponame>mingruimingrui/kindler import torch def make_sgd_optimizer( model, base_lr=0.001, bias_lr_factor=2.0, momentum=0.9, weight_decay=0.0005,", "weight_decay_bias params.append({ 'params': [value], 'lr': param_lr, 'weight_decay': param_weight_decay }) optimizer", "params = [] for key, value in model.named_parameters(): if not", "for key, value in model.named_parameters(): if not value.requires_grad: continue param_lr", "* bias_lr_factor param_weight_decay = weight_decay_bias params.append({ 'params': [value], 'lr': param_lr,", "weight_decay_bias=0.0, ): params = [] for key, value in model.named_parameters():", "not value.requires_grad: continue param_lr = base_lr param_weight_decay = weight_decay if", "in key: param_lr = base_lr * bias_lr_factor param_weight_decay = weight_decay_bias" ]
[ "a Python module for designing and using # evolutionary algorithms", "Solution from ..types import Permutation from ..operators import Swap class", "Public License # along with Platypus. If not, see <http://www.gnu.org/licenses/>.", "is free software: you can redistribute it and/or modify #", "<http://www.gnu.org/licenses/>. import unittest from mock import patch from ..core import", "= Permutation(range(10)) solution = Solution(problem) solution.variables[0] = list(range(10)) with patch('random.randrange',", "side_effect=[1, 1, 0]): result = Swap(1.0).mutate(solution) self.assertEqual(result.variables[0][0], 1) self.assertEqual(result.variables[0][1], 0)", "designing and using # evolutionary algorithms (EAs) and multiobjective evolutionary", "multiobjective evolutionary algorithms # (MOEAs). # # Platypus is free", "version. # # Platypus is distributed in the hope that", "patch('random.randrange', side_effect=[1, 1, 0]): result = Swap(1.0).mutate(solution) self.assertEqual(result.variables[0][0], 1) self.assertEqual(result.variables[0][1],", "= list(range(10)) with patch('random.randrange', side_effect=[2, 4]): result = Swap(1.0).mutate(solution) self.assertEqual(result.variables[0][2],", "patch('random.randrange', side_effect=[2, 4]): result = Swap(1.0).mutate(solution) self.assertEqual(result.variables[0][2], 4) self.assertEqual(result.variables[0][4], 2)", "test_swap2b(self): problem = Problem(1, 0) problem.types[0] = Permutation(range(2)) solution =", "Python module for designing and using # evolutionary algorithms (EAs)", "Solution(problem) solution.variables[0] = list(range(1)) with patch('random.randrange', side_effect=[0, 0]): result =", "solution = Solution(problem) solution.variables[0] = list(range(10)) with patch('random.randrange', side_effect=[2, 4]):", "..types import Permutation from ..operators import Swap class TestSwap(unittest.TestCase): def", "def test_swap2b(self): problem = Problem(1, 0) problem.types[0] = Permutation(range(2)) solution", "with Platypus. If not, see <http://www.gnu.org/licenses/>. import unittest from mock", "GNU General Public License as published by # the Free", "General Public License # along with Platypus. If not, see", "WITHOUT ANY WARRANTY; without even the implied warranty of #", "0) problem.types[0] = Permutation(range(10)) solution = Solution(problem) solution.variables[0] = list(range(10))", "PURPOSE. See the # GNU General Public License for more", "Permutation(range(2)) solution = Solution(problem) solution.variables[0] = list(range(2)) with patch('random.randrange', side_effect=[1,", "Permutation(range(1)) solution = Solution(problem) solution.variables[0] = list(range(1)) with patch('random.randrange', side_effect=[0,", "import Swap class TestSwap(unittest.TestCase): def test_swap10(self): problem = Problem(1, 0)", "= Permutation(range(2)) solution = Solution(problem) solution.variables[0] = list(range(2)) with patch('random.randrange',", "FOR A PARTICULAR PURPOSE. See the # GNU General Public", "problem.types[0] = Permutation(range(2)) solution = Solution(problem) solution.variables[0] = list(range(2)) with", "problem = Problem(1, 0) problem.types[0] = Permutation(range(1)) solution = Solution(problem)", "the # GNU General Public License for more details. #", "patch('random.randrange', side_effect=[0, 1]): result = Swap(1.0).mutate(solution) self.assertEqual(result.variables[0][0], 1) self.assertEqual(result.variables[0][1], 0)", "received a copy of the GNU General Public License #", "solution = Solution(problem) solution.variables[0] = list(range(2)) with patch('random.randrange', side_effect=[1, 1,", "under the terms of the GNU General Public License as", "from mock import patch from ..core import Problem, Solution from", "Permutation(range(2)) solution = Solution(problem) solution.variables[0] = list(range(2)) with patch('random.randrange', side_effect=[0,", "Problem(1, 0) problem.types[0] = Permutation(range(2)) solution = Solution(problem) solution.variables[0] =", "# (at your option) any later version. # # Platypus", "# the Free Software Foundation, either version 3 of the", "1]): result = Swap(1.0).mutate(solution) self.assertEqual(result.variables[0][0], 1) self.assertEqual(result.variables[0][1], 0) def test_swap2b(self):", "License as published by # the Free Software Foundation, either", "# along with Platypus. If not, see <http://www.gnu.org/licenses/>. import unittest", "of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See", "from ..types import Permutation from ..operators import Swap class TestSwap(unittest.TestCase):", "for more details. # # You should have received a", "# This file is part of Platypus, a Python module", "warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.", "See the # GNU General Public License for more details.", "redistribute it and/or modify # it under the terms of", "with patch('random.randrange', side_effect=[1, 1, 0]): result = Swap(1.0).mutate(solution) self.assertEqual(result.variables[0][0], 1)", "solution.variables[0] = list(range(10)) with patch('random.randrange', side_effect=[2, 4]): result = Swap(1.0).mutate(solution)", "= Problem(1, 0) problem.types[0] = Permutation(range(10)) solution = Solution(problem) solution.variables[0]", "Permutation from ..operators import Swap class TestSwap(unittest.TestCase): def test_swap10(self): problem", "not, see <http://www.gnu.org/licenses/>. import unittest from mock import patch from", "Problem(1, 0) problem.types[0] = Permutation(range(1)) solution = Solution(problem) solution.variables[0] =", "# # Platypus is distributed in the hope that it", "Problem, Solution from ..types import Permutation from ..operators import Swap", "= Problem(1, 0) problem.types[0] = Permutation(range(2)) solution = Solution(problem) solution.variables[0]", "is part of Platypus, a Python module for designing and", "or # (at your option) any later version. # #", "(EAs) and multiobjective evolutionary algorithms # (MOEAs). # # Platypus", "License # along with Platypus. If not, see <http://www.gnu.org/licenses/>. import", "self.assertEqual(result.variables[0][2], 4) self.assertEqual(result.variables[0][4], 2) self.assertEqual(solution.variables[0][2], 2) self.assertEqual(solution.variables[0][4], 4) def test_swap2a(self):", "but WITHOUT ANY WARRANTY; without even the implied warranty of", "that it will be useful, # but WITHOUT ANY WARRANTY;", "1) self.assertEqual(result.variables[0][1], 0) def test_swap1(self): problem = Problem(1, 0) problem.types[0]", "= Swap(1.0).mutate(solution) self.assertEqual(result.variables[0][0], 1) self.assertEqual(result.variables[0][1], 0) def test_swap1(self): problem =", "algorithms (EAs) and multiobjective evolutionary algorithms # (MOEAs). # #", "# GNU General Public License for more details. # #", "= Swap(1.0).mutate(solution) self.assertEqual(result.variables[0][2], 4) self.assertEqual(result.variables[0][4], 2) self.assertEqual(solution.variables[0][2], 2) self.assertEqual(solution.variables[0][4], 4)", "either version 3 of the License, or # (at your", "Public License for more details. # # You should have", "Platypus is free software: you can redistribute it and/or modify", "0) def test_swap1(self): problem = Problem(1, 0) problem.types[0] = Permutation(range(1))", "Solution(problem) solution.variables[0] = list(range(10)) with patch('random.randrange', side_effect=[2, 4]): result =", "self.assertEqual(result.variables[0][0], 1) self.assertEqual(result.variables[0][1], 0) def test_swap2b(self): problem = Problem(1, 0)", "0) problem.types[0] = Permutation(range(1)) solution = Solution(problem) solution.variables[0] = list(range(1))", "later version. # # Platypus is distributed in the hope", "= Swap(1.0).mutate(solution) self.assertEqual(result.variables[0][0], 1) self.assertEqual(result.variables[0][1], 0) def test_swap2b(self): problem =", "= Solution(problem) solution.variables[0] = list(range(2)) with patch('random.randrange', side_effect=[1, 1, 0]):", "the terms of the GNU General Public License as published", "TestSwap(unittest.TestCase): def test_swap10(self): problem = Problem(1, 0) problem.types[0] = Permutation(range(10))", "ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY", "more details. # # You should have received a copy", "Swap(1.0).mutate(solution) self.assertEqual(result.variables[0][0], 1) self.assertEqual(result.variables[0][1], 0) def test_swap2b(self): problem = Problem(1,", "unittest from mock import patch from ..core import Problem, Solution", "Foundation, either version 3 of the License, or # (at", "GNU General Public License # along with Platypus. If not,", "If not, see <http://www.gnu.org/licenses/>. import unittest from mock import patch", "be useful, # but WITHOUT ANY WARRANTY; without even the", "= Solution(problem) solution.variables[0] = list(range(1)) with patch('random.randrange', side_effect=[0, 0]): result", "MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #", "1, 0]): result = Swap(1.0).mutate(solution) self.assertEqual(result.variables[0][0], 1) self.assertEqual(result.variables[0][1], 0) def", "from ..core import Problem, Solution from ..types import Permutation from", "Software Foundation, either version 3 of the License, or #", "class TestSwap(unittest.TestCase): def test_swap10(self): problem = Problem(1, 0) problem.types[0] =", "problem = Problem(1, 0) problem.types[0] = Permutation(range(10)) solution = Solution(problem)", "Swap(1.0).mutate(solution) self.assertEqual(result.variables[0][2], 4) self.assertEqual(result.variables[0][4], 2) self.assertEqual(solution.variables[0][2], 2) self.assertEqual(solution.variables[0][4], 4) def", "= Solution(problem) solution.variables[0] = list(range(10)) with patch('random.randrange', side_effect=[2, 4]): result", "2) self.assertEqual(solution.variables[0][4], 4) def test_swap2a(self): problem = Problem(1, 0) problem.types[0]", "mock import patch from ..core import Problem, Solution from ..types", "list(range(2)) with patch('random.randrange', side_effect=[0, 1]): result = Swap(1.0).mutate(solution) self.assertEqual(result.variables[0][0], 1)", "your option) any later version. # # Platypus is distributed", "0) def test_swap2b(self): problem = Problem(1, 0) problem.types[0] = Permutation(range(2))", "the hope that it will be useful, # but WITHOUT", "= Permutation(range(1)) solution = Solution(problem) solution.variables[0] = list(range(1)) with patch('random.randrange',", "# evolutionary algorithms (EAs) and multiobjective evolutionary algorithms # (MOEAs).", "def test_swap10(self): problem = Problem(1, 0) problem.types[0] = Permutation(range(10)) solution", "the GNU General Public License # along with Platypus. If", "import patch from ..core import Problem, Solution from ..types import", "= list(range(2)) with patch('random.randrange', side_effect=[1, 1, 0]): result = Swap(1.0).mutate(solution)", "software: you can redistribute it and/or modify # it under", "as published by # the Free Software Foundation, either version", "General Public License for more details. # # You should", "import Problem, Solution from ..types import Permutation from ..operators import", "test_swap10(self): problem = Problem(1, 0) problem.types[0] = Permutation(range(10)) solution =", "without even the implied warranty of # MERCHANTABILITY or FITNESS", "solution.variables[0] = list(range(2)) with patch('random.randrange', side_effect=[1, 1, 0]): result =", "list(range(2)) with patch('random.randrange', side_effect=[1, 1, 0]): result = Swap(1.0).mutate(solution) self.assertEqual(result.variables[0][0],", "2015-2018 <NAME> # # This file is part of Platypus,", "= Solution(problem) solution.variables[0] = list(range(2)) with patch('random.randrange', side_effect=[0, 1]): result", "version 3 of the License, or # (at your option)", "# # This file is part of Platypus, a Python", "A PARTICULAR PURPOSE. See the # GNU General Public License", "the Free Software Foundation, either version 3 of the License,", "side_effect=[0, 1]): result = Swap(1.0).mutate(solution) self.assertEqual(result.variables[0][0], 1) self.assertEqual(result.variables[0][1], 0) def", "self.assertEqual(solution.variables[0][4], 4) def test_swap2a(self): problem = Problem(1, 0) problem.types[0] =", "1) self.assertEqual(result.variables[0][1], 0) def test_swap2b(self): problem = Problem(1, 0) problem.types[0]", "even the implied warranty of # MERCHANTABILITY or FITNESS FOR", "..operators import Swap class TestSwap(unittest.TestCase): def test_swap10(self): problem = Problem(1,", "test_swap1(self): problem = Problem(1, 0) problem.types[0] = Permutation(range(1)) solution =", "free software: you can redistribute it and/or modify # it", "Platypus is distributed in the hope that it will be", "the implied warranty of # MERCHANTABILITY or FITNESS FOR A", "or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU", "result = Swap(1.0).mutate(solution) self.assertEqual(result.variables[0][0], 1) self.assertEqual(result.variables[0][1], 0) def test_swap1(self): problem", "# Platypus is free software: you can redistribute it and/or", "def test_swap1(self): problem = Problem(1, 0) problem.types[0] = Permutation(range(1)) solution", "This file is part of Platypus, a Python module for", "Copyright 2015-2018 <NAME> # # This file is part of", "a copy of the GNU General Public License # along", "copy of the GNU General Public License # along with", "of the GNU General Public License as published by #", "for designing and using # evolutionary algorithms (EAs) and multiobjective", "# Platypus is distributed in the hope that it will", "import Permutation from ..operators import Swap class TestSwap(unittest.TestCase): def test_swap10(self):", "Public License as published by # the Free Software Foundation,", "Swap(1.0).mutate(solution) self.assertEqual(result.variables[0][0], 1) self.assertEqual(result.variables[0][1], 0) def test_swap1(self): problem = Problem(1,", "<NAME> # # This file is part of Platypus, a", "self.assertEqual(result.variables[0][0], 1) self.assertEqual(result.variables[0][1], 0) def test_swap1(self): problem = Problem(1, 0)", "# but WITHOUT ANY WARRANTY; without even the implied warranty", "Permutation(range(10)) solution = Solution(problem) solution.variables[0] = list(range(10)) with patch('random.randrange', side_effect=[2,", "Free Software Foundation, either version 3 of the License, or", "test_swap2a(self): problem = Problem(1, 0) problem.types[0] = Permutation(range(2)) solution =", "can redistribute it and/or modify # it under the terms", "any later version. # # Platypus is distributed in the", "Platypus. If not, see <http://www.gnu.org/licenses/>. import unittest from mock import", "self.assertEqual(result.variables[0][1], 0) def test_swap1(self): problem = Problem(1, 0) problem.types[0] =", "list(range(10)) with patch('random.randrange', side_effect=[2, 4]): result = Swap(1.0).mutate(solution) self.assertEqual(result.variables[0][2], 4)", "result = Swap(1.0).mutate(solution) self.assertEqual(result.variables[0][2], 4) self.assertEqual(result.variables[0][4], 2) self.assertEqual(solution.variables[0][2], 2) self.assertEqual(solution.variables[0][4],", "distributed in the hope that it will be useful, #", "and/or modify # it under the terms of the GNU", "by # the Free Software Foundation, either version 3 of", "License, or # (at your option) any later version. #", "implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR", "have received a copy of the GNU General Public License", "of the GNU General Public License # along with Platypus.", "solution.variables[0] = list(range(1)) with patch('random.randrange', side_effect=[0, 0]): result = Swap(1.0).mutate(solution)", "evolutionary algorithms (EAs) and multiobjective evolutionary algorithms # (MOEAs). #", "= list(range(1)) with patch('random.randrange', side_effect=[0, 0]): result = Swap(1.0).mutate(solution) self.assertEqual(result.variables[0][0],", "with patch('random.randrange', side_effect=[2, 4]): result = Swap(1.0).mutate(solution) self.assertEqual(result.variables[0][2], 4) self.assertEqual(result.variables[0][4],", "in the hope that it will be useful, # but", "(MOEAs). # # Platypus is free software: you can redistribute", "solution.variables[0] = list(range(2)) with patch('random.randrange', side_effect=[0, 1]): result = Swap(1.0).mutate(solution)", "# Copyright 2015-2018 <NAME> # # This file is part", "it and/or modify # it under the terms of the", "4) self.assertEqual(result.variables[0][4], 2) self.assertEqual(solution.variables[0][2], 2) self.assertEqual(solution.variables[0][4], 4) def test_swap2a(self): problem", "You should have received a copy of the GNU General", "0) problem.types[0] = Permutation(range(2)) solution = Solution(problem) solution.variables[0] = list(range(2))", "option) any later version. # # Platypus is distributed in", "it will be useful, # but WITHOUT ANY WARRANTY; without", "import unittest from mock import patch from ..core import Problem,", "# # Platypus is free software: you can redistribute it", "<gh_stars>1-10 # Copyright 2015-2018 <NAME> # # This file is", "def test_swap2a(self): problem = Problem(1, 0) problem.types[0] = Permutation(range(2)) solution", "side_effect=[2, 4]): result = Swap(1.0).mutate(solution) self.assertEqual(result.variables[0][2], 4) self.assertEqual(result.variables[0][4], 2) self.assertEqual(solution.variables[0][2],", "= Problem(1, 0) problem.types[0] = Permutation(range(1)) solution = Solution(problem) solution.variables[0]", "module for designing and using # evolutionary algorithms (EAs) and", "License for more details. # # You should have received", "patch from ..core import Problem, Solution from ..types import Permutation", "using # evolutionary algorithms (EAs) and multiobjective evolutionary algorithms #", "from ..operators import Swap class TestSwap(unittest.TestCase): def test_swap10(self): problem =", "solution = Solution(problem) solution.variables[0] = list(range(2)) with patch('random.randrange', side_effect=[0, 1]):", "useful, # but WITHOUT ANY WARRANTY; without even the implied", "Platypus, a Python module for designing and using # evolutionary", "part of Platypus, a Python module for designing and using", "# it under the terms of the GNU General Public", "you can redistribute it and/or modify # it under the", "of the License, or # (at your option) any later", "= list(range(2)) with patch('random.randrange', side_effect=[0, 1]): result = Swap(1.0).mutate(solution) self.assertEqual(result.variables[0][0],", "and using # evolutionary algorithms (EAs) and multiobjective evolutionary algorithms", "self.assertEqual(solution.variables[0][2], 2) self.assertEqual(solution.variables[0][4], 4) def test_swap2a(self): problem = Problem(1, 0)", "with patch('random.randrange', side_effect=[0, 1]): result = Swap(1.0).mutate(solution) self.assertEqual(result.variables[0][0], 1) self.assertEqual(result.variables[0][1],", "hope that it will be useful, # but WITHOUT ANY", "Swap class TestSwap(unittest.TestCase): def test_swap10(self): problem = Problem(1, 0) problem.types[0]", "it under the terms of the GNU General Public License", "the License, or # (at your option) any later version.", "# (MOEAs). # # Platypus is free software: you can", "file is part of Platypus, a Python module for designing", "result = Swap(1.0).mutate(solution) self.assertEqual(result.variables[0][0], 1) self.assertEqual(result.variables[0][1], 0) def test_swap2b(self): problem", "4) def test_swap2a(self): problem = Problem(1, 0) problem.types[0] = Permutation(range(2))", "WARRANTY; without even the implied warranty of # MERCHANTABILITY or", "# You should have received a copy of the GNU", "..core import Problem, Solution from ..types import Permutation from ..operators", "problem = Problem(1, 0) problem.types[0] = Permutation(range(2)) solution = Solution(problem)", "problem.types[0] = Permutation(range(1)) solution = Solution(problem) solution.variables[0] = list(range(1)) with", "PARTICULAR PURPOSE. See the # GNU General Public License for", "2) self.assertEqual(solution.variables[0][2], 2) self.assertEqual(solution.variables[0][4], 4) def test_swap2a(self): problem = Problem(1,", "4]): result = Swap(1.0).mutate(solution) self.assertEqual(result.variables[0][2], 4) self.assertEqual(result.variables[0][4], 2) self.assertEqual(solution.variables[0][2], 2)", "Solution(problem) solution.variables[0] = list(range(2)) with patch('random.randrange', side_effect=[0, 1]): result =", "see <http://www.gnu.org/licenses/>. import unittest from mock import patch from ..core", "the GNU General Public License as published by # the", "0]): result = Swap(1.0).mutate(solution) self.assertEqual(result.variables[0][0], 1) self.assertEqual(result.variables[0][1], 0) def test_swap1(self):", "self.assertEqual(result.variables[0][1], 0) def test_swap2b(self): problem = Problem(1, 0) problem.types[0] =", "Problem(1, 0) problem.types[0] = Permutation(range(10)) solution = Solution(problem) solution.variables[0] =", "Solution(problem) solution.variables[0] = list(range(2)) with patch('random.randrange', side_effect=[1, 1, 0]): result", "GNU General Public License for more details. # # You", "list(range(1)) with patch('random.randrange', side_effect=[0, 0]): result = Swap(1.0).mutate(solution) self.assertEqual(result.variables[0][0], 0)", "(at your option) any later version. # # Platypus is", "of Platypus, a Python module for designing and using #", "modify # it under the terms of the GNU General", "terms of the GNU General Public License as published by", "along with Platypus. If not, see <http://www.gnu.org/licenses/>. import unittest from", "is distributed in the hope that it will be useful,", "3 of the License, or # (at your option) any", "details. # # You should have received a copy of", "FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General", "solution = Solution(problem) solution.variables[0] = list(range(1)) with patch('random.randrange', side_effect=[0, 0]):", "algorithms # (MOEAs). # # Platypus is free software: you", "self.assertEqual(result.variables[0][4], 2) self.assertEqual(solution.variables[0][2], 2) self.assertEqual(solution.variables[0][4], 4) def test_swap2a(self): problem =", "# # You should have received a copy of the", "should have received a copy of the GNU General Public", "# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the", "General Public License as published by # the Free Software", "problem.types[0] = Permutation(range(10)) solution = Solution(problem) solution.variables[0] = list(range(10)) with", "published by # the Free Software Foundation, either version 3", "and multiobjective evolutionary algorithms # (MOEAs). # # Platypus is", "will be useful, # but WITHOUT ANY WARRANTY; without even", "evolutionary algorithms # (MOEAs). # # Platypus is free software:" ]
[ "= tf.meshgrid(tf.linspace(0.0, _width_f - 1.0, _width), tf.linspace(0.0 , _height_f -", "tf.cast(_height, tf.float32) _width_f = tf.cast(_width, tf.float32) _wrap_mode = wrap_mode output", "else: return None # 修剪偏移量x, 让它在0到width-1+2*edge_size之间(因为偏移量不能太大,要小于等于padding之后). x = tf.clip_by_value(x, 0.0,", "如果包围方式是border, 那么边界长度是1, 在h和w维两侧加一排0 if _wrap_mode == 'border': _edge_size = 1", "(1) in ref [1] x_t, y_t = tf.meshgrid(tf.linspace(0.0, _width_f -", "arraay([[0., 1., 2., 0., 1., 2., 0., 1., 2., 0.,", "+ _edge_size elif _wrap_mode == 'edge': _edge_size = 0 else:", "float32) >>> x = tf.linspace(0.0, 2.0, 3) >>> y =", "1., 2., 0., 1., 2., 0., 1., 2., 0., 1.,", "w+2*e, w+2*e, ..., w+2*e, . . (h + 2 *", "0., 1., 2., 0., 1., 2., 0., 1., 2., 0.,", "* (_height + 2 * _edge_size) # 计算偏移量索引的基,先得到[0,1,2,...,batch],再将它乘宽度,变成 # [0,dim1,2*dim1,...,batch*dim1],然后重复原图分辨率,变成", "1., 2., 0., 1., 2.]], dtype=float32) >>> sess.run(y_t_flat) array([[0., 0.,", "], dtype = float32) >>> x = tf.linspace(0.0, 2.0, 3)", "(1, -1)) y_t_flat = tf.reshape(y_t, (1, -1)) x_t_flat = tf.tile(x_t_flat,", "3) >>> sess.run(x) array([0., 1., 2. ], dtype = float32)", "乘了dim2之后变成 # [0, 0, ..., 0, w+2*e, w+2*e, ..., w+2*e,", "= tf.floor(y) x1_f = x0_f + 1 # 将向下取整的x y变成整数,", "dtype=float32) >>> x_t_flat = tf.tile(x_t_flat, tf.stack([2,1])) >>> sess.run(x_t_flat) arraay([[0., 1.,", "<gh_stars>0 #!/usr/bin/env python # -*- coding: utf-8 -*-are not covered", "return weight_l * pix_l + weight_r * pix_r # get_disp函数生成视差图后,调用插值函数获得更好的图.", ">>> x_t_flat = tf.tile(x_t_flat, tf.stack([2,1])) >>> sess.run(x_t_flat) arraay([[0., 1., 2.,", "# 将向下取整的x y变成整数, 向上取整的x不能大于padding之后的宽度减1 # cast: 类型转换 x0 = tf.cast(x0_f,", "with tf.variable_scope('_interpolate'): # handle both texture border types _edge_size =", "4., 4.]], dtype=float32) >>> x_t_flat = tf.tile(x_t_flat, tf.stack([2,1])) >>> sess.run(x_t_flat)", "* e)] # 加上base之后得到了考虑了batch,height之后的索引 base_y0 = base + y0 *", "1., 2.], [0., 1., 2.], [0., 1., 2.], [0., 1.,", "x = tf.linspace(0.0, 2.0, 3) >>> sess.run(x) array([0., 1., 2.", "pix_l = tf.gather(im_flat, idx_l) pix_r = tf.gather(im_flat, idx_r) # 计算双线性差值的系数x1-1和x-x0", "[1] x_t, y_t = tf.meshgrid(tf.linspace(0.0, _width_f - 1.0, _width), tf.linspace(0.0", "2., 0., 1., 2., 0., 1., 2.]], dtype=float32) >>> sess.run(y_t_flat)", "2., 0., 1., 2., 0., 1., 2., 0., 1., 2.],", "2., 3., 3., 3., 4., 4., 4.]], dtype=float32) >>> x_t_flat", "tf.reshape(rep, [-1]) def _interpolate(im, x, y): #插值函数 with tf.variable_scope('_interpolate'): #", "wrap_mode='border', name='bilinear_sampler', **kwargs): ''' 一维双线性采样: x_offset--输入X上偏移量的图 重复函数 : 先将一维的x后面扩展一个维度, 然后在扩展的维度上复制相应的值,", "num_channels] ''' _num_batch = tf.shape(input_images)[0] _height = tf.shape(input_images)[1] _width =", "2.], [0., 1., 2.], [0., 1., 2.], [0., 1., 2.]],", "absolute_import, division, print_function import tensorflow as tf def bilinear_sampler_1d_h(input_images, x_offset,", ">>> sess.run(x) array([0., 1., 2. ], dtype = float32) >>>", "[0., 1., 2.]], dtype=float32) >>> sess.run(y_t) array([0., 0., 0.], [1.,", "dtype=float32) >>> x_t_flat = tf.reshape(x_t, (1, -1)) >>> y_t_flat =", "* dim2 # 这个索引加上向上下取整的x索引和向上取整的x索引就得到了现在点的左侧点和右侧点 idx_l = base_y0 + x0 idx_r", "1), [1, n_repeats]) return tf.reshape(rep, [-1]) def _interpolate(im, x, y):", "0], [1, 1], [1, 1], [0, 0]], mode='CONSTANT') x =", "'border': _edge_size = 1 im = tf.pad(im, [[0, 0], [1,", "tf.float32) _width_f = tf.cast(_width, tf.float32) _wrap_mode = wrap_mode output =", "将图变成[batch*w*h,channel]的形状 im_flat = tf.reshape(im, tf.stack([-1, _num_channels])) # 利用tf.gather根据左右侧点的索引重新排列图,得到重排之后的左右像素 pix_l =", "return tf.reshape(rep, [-1]) def _interpolate(im, x, y): #插值函数 with tf.variable_scope('_interpolate'):", "* e, ..., h + 2 * e] # 乘了dim2之后变成", "sess.run(y_t) array([0., 0., 0.], [1., 1., 1.], [2., 2., 2.],", "tf.gather(im_flat, idx_l) pix_r = tf.gather(im_flat, idx_r) # 计算双线性差值的系数x1-1和x-x0 weight_l =", "tf.cast(_width, tf.float32) _wrap_mode = wrap_mode output = _transform(input_images, x_offset) return", "_interpolate(im, x, y): #插值函数 with tf.variable_scope('_interpolate'): # handle both texture", "[-1]) def _interpolate(im, x, y): #插值函数 with tf.variable_scope('_interpolate'): # handle", "e, h + 2 * e, ..., h + 2", "tf.reshape(y_t, (1, -1)) >>> sess.run(x_t_flat) array([[0., 1., 2., 0., 1.,", "x0_f = tf.floor(x) y0_f = tf.floor(y) x1_f = x0_f +", "+ 2 * e] # 乘了dim2之后变成 # [0, 0, ...,", "_edge_size = 0 else: return None # 修剪偏移量x, 让它在0到width-1+2*edge_size之间(因为偏移量不能太大,要小于等于padding之后). x", "0., 1., 1., 1., 2., 2., 2., 3., 3., 3.,", "tf.gather(im_flat, idx_r) # 计算双线性差值的系数x1-1和x-x0 weight_l = tf.expand_dims(x1_f - x, 1)", "4.0, 5) >>> x_t, y_t = tf.meshgrid(x, y) >>> sess.run(x_t)", "类型转换 x0 = tf.cast(x0_f, tf.int32) y0 = tf.cast(y0_f, tf.int32) x1", "_num_channels])) return output with tf.variable_scope(name): ''' [num_batch, height, width, num_channels]", "x_t_flat = x_t_flat + tf.reshape(x_offset, [-1]) * _width_f input_transformed =", "0., 0.], [1., 1., 1.], [2., 2., 2.], [3., 3.,", "y_t = tf.meshgrid(x, y) >>> sess.run(x_t) array([0., 1., 2.], [0.,", "eq (1) in ref [1] x_t, y_t = tf.meshgrid(tf.linspace(0.0, _width_f", "3., 3., 4., 4., 4.]], dtype=float32) >>> x_t_flat = tf.tile(x_t_flat,", "1., 2.], [0., 1., 2.], [0., 1., 2.]], dtype=float32) >>>", "1., 2.]], dtype=float32) >>> sess.run(y_t_flat) array([[0., 0., 0., 1., 1.,", "+ 2 * _edge_size) # 第一维也就是图像维的宽是padding之后的分辨率 dim1 = (_width +", "1., 2.], [0., 1., 2.]], dtype=float32) >>> sess.run(y_t) array([0., 0.,", "* dim, ......, batch * dim] # 这样就变成基底了,表达的是有batch个图的基 base =", "* e, h + 2 * e, ..., h +", "tf.variable_scope('transform'): # grid of (x_t, y_t, 1), eq (1) in", "* dim, batch * dim, ......, batch * dim] #", "ACP-A Licence, from __future__ import absolute_import, division, print_function import tensorflow", "_transform(input_images, x_offset): ''' 转换函数首先调用meshgrid生成关于X轴和Y轴的索引 exsamples: 假设_width=3,经过linspace(0.0,_width_f-1.0,_width)是[ 0., 1., 2.]。height同理 >>>", "1., 2., 0., 1., 2., 0., 1., 2.]], dtype=float32) >>>", "+ 2 * e), ..., (h + 2 * e)", "+ y0 * dim2 # 这个索引加上向上下取整的x索引和向上取整的x索引就得到了现在点的左侧点和右侧点 idx_l = base_y0 +", "......, batch * dim] # 这样就变成基底了,表达的是有batch个图的基 base = _repeat(tf.range(_num_batch) *", "[4., 4., 4.]], dtype=float32) >>> x_t_flat = tf.reshape(x_t, (1, -1))", "the UCLB ACP-A Licence, from __future__ import absolute_import, division, print_function", "input_transformed, tf.stack([_num_batch, _height, _width, _num_channels])) return output with tf.variable_scope(name): '''", "y_t, 1), eq (1) in ref [1] x_t, y_t =", "x_t, y_t = tf.meshgrid(tf.linspace(0.0, _width_f - 1.0, _width), tf.linspace(0.0 ,", "一维双线性采样: x_offset--输入X上偏移量的图 重复函数 : 先将一维的x后面扩展一个维度, 然后在扩展的维度上复制相应的值, 随后将其转成一维的值, exsamples:[1,2,3] --> [1,1,2,2,3,3]", "2., 0., 1., 2.]], dtype=float32) >>> x_t_flat = tf.reshape(x_t_flat, (1,", "x_offset, wrap_mode='border', name='bilinear_sampler', **kwargs): ''' 一维双线性采样: x_offset--输入X上偏移量的图 重复函数 : 先将一维的x后面扩展一个维度,", "sess.run(y_t_flat) array([[0., 0., 0., 1., 1., 1., 2., 2., 2.,", "_edge_size = 1 im = tf.pad(im, [[0, 0], [1, 1],", "tf.stack([_num_batch, _height, _width, _num_channels])) return output with tf.variable_scope(name): ''' [num_batch,", "[1, n_repeats]) return tf.reshape(rep, [-1]) def _interpolate(im, x, y): #插值函数", "tf.expand_dims(x - x0_f, 1) # 利用双线性差值方法计算像素值 return weight_l * pix_l", "4., 4.]], dtype=float32) >>> x_t_flat = tf.reshape(x_t, (1, -1)) >>>", "= tf.reshape(x_t, (1, -1)) >>> y_t_flat = tf.reshape(y_t, (1, -1))", "3) >>> y = tf.linspace(0.0, 4.0, 5) >>> x_t, y_t", "tf.stack([_num_batch, 1])) y_t_flat = tf.tile(y_t_flat, tf.stack([_num_batch, 1])) x_t_flat = tf.reshape(x_t_flat,", "0., 1., 2.]], dtype=float32) >>> x_t_flat = tf.reshape(x_t_flat, (1, -1))", "2., 0., 1., 2., 0., 1., 2.]], dtype=float32) ''' with", "1 + 2 * _edge_size) # 向下取整x,y然后x加1向上取整x x0_f = tf.floor(x)", "[0, 0, ..., 0, w+2*e, w+2*e, ..., w+2*e, . .", "y_t_flat = tf.reshape(y_t, (1, -1)) x_t_flat = tf.tile(x_t_flat, tf.stack([_num_batch, 1]))", "2.]], dtype=float32) ''' with tf.variable_scope('transform'): # grid of (x_t, y_t,", "im = tf.pad(im, [[0, 0], [1, 1], [1, 1], [0,", ", _height)) x_t_flat = tf.reshape(x_t, (1, -1)) y_t_flat = tf.reshape(y_t,", "-*-are not covered by the UCLB ACP-A Licence, from __future__", "tf.stack([2,1])) >>> sess.run(x_t_flat) arraay([[0., 1., 2., 0., 1., 2., 0.,", "1.0 , _height)) x_t_flat = tf.reshape(x_t, (1, -1)) y_t_flat =", "[2., 2., 2.], [3., 3., 3.], [4., 4., 4.]], dtype=float32)", "x1 = tf.cast(tf.minimum(x1_f, _width_f - 1 + 2 * _edge_size),", "转换函数首先调用meshgrid生成关于X轴和Y轴的索引 exsamples: 假设_width=3,经过linspace(0.0,_width_f-1.0,_width)是[ 0., 1., 2.]。height同理 >>> x = tf.linspace(0.0,", "= tf.clip_by_value(x, 0.0, _width_f - 1 + 2 * _edge_size)", "tf.variable_scope('_repeat'): rep = tf.tile(tf.expand_dims(x, 1), [1, n_repeats]) return tf.reshape(rep, [-1])", "-1)) y_t_flat = tf.reshape(y_t, (1, -1)) x_t_flat = tf.tile(x_t_flat, tf.stack([_num_batch,", "tf.shape(input_images)[3] _height_f = tf.cast(_height, tf.float32) _width_f = tf.cast(_width, tf.float32) _wrap_mode", ", _height_f - 1.0 , _height)) x_t_flat = tf.reshape(x_t, (1,", "* dim1, _height * _width) # 将y的偏移乘以dim2,也就是乘以宽度,这样就得到加上y之后的基 # y0是[0,0,...,0,1,1,....,1, .", "tf.floor(y) x1_f = x0_f + 1 # 将向下取整的x y变成整数, 向上取整的x不能大于padding之后的宽度减1", "''' def _repeat(x, n_repeats): with tf.variable_scope('_repeat'): rep = tf.tile(tf.expand_dims(x, 1),", "1], [0, 0]], mode='CONSTANT') x = x + _edge_size y", "[0,0,......,0,dim1,dim1,......,dim1,2*dim1,2*dim1,......,2*dim1 . . batch * dim, batch * dim, ......,", "width, num_channels] ''' _num_batch = tf.shape(input_images)[0] _height = tf.shape(input_images)[1] _width", "在h和w维两侧加一排0 if _wrap_mode == 'border': _edge_size = 1 im =", "n_repeats): with tf.variable_scope('_repeat'): rep = tf.tile(tf.expand_dims(x, 1), [1, n_repeats]) return", "= 0 else: return None # 修剪偏移量x, 让它在0到width-1+2*edge_size之间(因为偏移量不能太大,要小于等于padding之后). x =", "1., 1., 2., 2., 2., 3., 3., 3., 4., 4.,", "sess.run(x_t_flat) arraay([[0., 1., 2., 0., 1., 2., 0., 1., 2.,", ">>> sess.run(x_t) array([0., 1., 2.], [0., 1., 2.], [0., 1.,", "_edge_size) # 向下取整x,y然后x加1向上取整x x0_f = tf.floor(x) y0_f = tf.floor(y) x1_f", "0., 1., 2., 0., 1., 2.]], dtype=float32) ''' with tf.variable_scope('transform'):", ">>> x_t_flat = tf.reshape(x_t, (1, -1)) >>> y_t_flat = tf.reshape(y_t,", "2., 0., 1., 2.]], dtype=float32) ''' with tf.variable_scope('transform'): # grid", "= tf.reshape(y_t, (1, -1)) >>> sess.run(x_t_flat) array([[0., 1., 2., 0.,", "x0 idx_r = base_y0 + x1 # 将图变成[batch*w*h,channel]的形状 im_flat =", "将向下取整的x y变成整数, 向上取整的x不能大于padding之后的宽度减1 # cast: 类型转换 x0 = tf.cast(x0_f, tf.int32)", "1., 2., 0., 1., 2.]], dtype=float32) >>> x_t_flat = tf.reshape(x_t_flat,", "_edge_size y = y + _edge_size elif _wrap_mode == 'edge':", "x, y): #插值函数 with tf.variable_scope('_interpolate'): # handle both texture border", "# handle both texture border types _edge_size = 0 #", "tf.reshape(x_offset, [-1]) * _width_f input_transformed = _interpolate(input_images, x_t_flat, y_t_flat) output", "# -*- coding: utf-8 -*-are not covered by the UCLB", "= base_y0 + x1 # 将图变成[batch*w*h,channel]的形状 im_flat = tf.reshape(im, tf.stack([-1,", "tf.reshape(x_t_flat, (1, -1)) >>> sess.run(x_t_flat) array([[0., 1., 2., 0., 1.,", "tf.variable_scope('_interpolate'): # handle both texture border types _edge_size = 0", "tf.shape(input_images)[2] _num_channels = tf.shape(input_images)[3] _height_f = tf.cast(_height, tf.float32) _width_f =", "0., 0., 1., 1., 1., 2., 2., 2., 3., 3.,", "dtype = float32) >>> x = tf.linspace(0.0, 2.0, 3) >>>", "x1_f = x0_f + 1 # 将向下取整的x y变成整数, 向上取整的x不能大于padding之后的宽度减1 #", "in ref [1] x_t, y_t = tf.meshgrid(tf.linspace(0.0, _width_f - 1.0,", "+ tf.reshape(x_offset, [-1]) * _width_f input_transformed = _interpolate(input_images, x_t_flat, y_t_flat)", "那么边界长度是1, 在h和w维两侧加一排0 if _wrap_mode == 'border': _edge_size = 1 im", "h + 2 * e, h + 2 * e,", "2., 2.], [3., 3., 3.], [4., 4., 4.]], dtype=float32) >>>", "e), ..., (h + 2 * e) * (w +", "第二维也就是宽度维的宽是padding之后的宽 dim2 = (_width + 2 * _edge_size) # 第一维也就是图像维的宽是padding之后的分辨率", "# 向下取整x,y然后x加1向上取整x x0_f = tf.floor(x) y0_f = tf.floor(y) x1_f =", "''' 转换函数首先调用meshgrid生成关于X轴和Y轴的索引 exsamples: 假设_width=3,经过linspace(0.0,_width_f-1.0,_width)是[ 0., 1., 2.]。height同理 >>> x =", "4.]], dtype=float32) >>> x_t_flat = tf.reshape(x_t, (1, -1)) >>> y_t_flat", "x + _edge_size y = y + _edge_size elif _wrap_mode", "* _width) # 将y的偏移乘以dim2,也就是乘以宽度,这样就得到加上y之后的基 # y0是[0,0,...,0,1,1,....,1, . . h +", "2.]。height同理 >>> x = tf.linspace(0.0, 2.0, 3) >>> sess.run(x) array([0.,", "tf.shape(input_images)[1] _width = tf.shape(input_images)[2] _num_channels = tf.shape(input_images)[3] _height_f = tf.cast(_height,", "0., 1., 2., 0., 1., 2.]], dtype=float32) >>> sess.run(y_t_flat) array([[0.,", "+ 2 * e) * (w + 2 * e)]", "tf.reshape(x_t, (1, -1)) >>> y_t_flat = tf.reshape(y_t, (1, -1)) >>>", "= tf.tile(x_t_flat, tf.stack([_num_batch, 1])) y_t_flat = tf.tile(y_t_flat, tf.stack([_num_batch, 1])) x_t_flat", "- 1 + 2 * _edge_size) # 向下取整x,y然后x加1向上取整x x0_f =", "tf.cast(x0_f, tf.int32) y0 = tf.cast(y0_f, tf.int32) x1 = tf.cast(tf.minimum(x1_f, _width_f", ">>> x_t, y_t = tf.meshgrid(x, y) >>> sess.run(x_t) array([0., 1.,", "_width), tf.linspace(0.0 , _height_f - 1.0 , _height)) x_t_flat =", "x_t_flat, y_t_flat) output = tf.reshape( input_transformed, tf.stack([_num_batch, _height, _width, _num_channels]))", "+ 2 * e) * (w + 2 * e),", "tf.linspace(0.0 , _height_f - 1.0 , _height)) x_t_flat = tf.reshape(x_t,", "* _edge_size) # 计算偏移量索引的基,先得到[0,1,2,...,batch],再将它乘宽度,变成 # [0,dim1,2*dim1,...,batch*dim1],然后重复原图分辨率,变成 # [0,0,......,0,dim1,dim1,......,dim1,2*dim1,2*dim1,......,2*dim1 . .", "1., 2., 0., 1., 2.], [0., 1., 2., 0., 1.,", "= tf.shape(input_images)[2] _num_channels = tf.shape(input_images)[3] _height_f = tf.cast(_height, tf.float32) _width_f", "4.]], dtype=float32) >>> x_t_flat = tf.tile(x_t_flat, tf.stack([2,1])) >>> sess.run(x_t_flat) arraay([[0.,", "# get_disp函数生成视差图后,调用插值函数获得更好的图. def _transform(input_images, x_offset): ''' 转换函数首先调用meshgrid生成关于X轴和Y轴的索引 exsamples: 假设_width=3,经过linspace(0.0,_width_f-1.0,_width)是[ 0.,", ". . h + 2 * e, h + 2", "idx_r) # 计算双线性差值的系数x1-1和x-x0 weight_l = tf.expand_dims(x1_f - x, 1) weight_r", "* _width_f input_transformed = _interpolate(input_images, x_t_flat, y_t_flat) output = tf.reshape(", "if _wrap_mode == 'border': _edge_size = 1 im = tf.pad(im,", "1., 2.]], dtype=float32) >>> x_t_flat = tf.reshape(x_t_flat, (1, -1)) >>>", "5) >>> x_t, y_t = tf.meshgrid(x, y) >>> sess.run(x_t) array([0.,", "[0,dim1,2*dim1,...,batch*dim1],然后重复原图分辨率,变成 # [0,0,......,0,dim1,dim1,......,dim1,2*dim1,2*dim1,......,2*dim1 . . batch * dim, batch *", "of (x_t, y_t, 1), eq (1) in ref [1] x_t,", "[1,1,2,2,3,3] ''' def _repeat(x, n_repeats): with tf.variable_scope('_repeat'): rep = tf.tile(tf.expand_dims(x,", "import absolute_import, division, print_function import tensorflow as tf def bilinear_sampler_1d_h(input_images,", "tensorflow as tf def bilinear_sampler_1d_h(input_images, x_offset, wrap_mode='border', name='bilinear_sampler', **kwargs): '''", "= tf.cast(_height, tf.float32) _width_f = tf.cast(_width, tf.float32) _wrap_mode = wrap_mode", "+ _edge_size y = y + _edge_size elif _wrap_mode ==", "+ 1 # 将向下取整的x y变成整数, 向上取整的x不能大于padding之后的宽度减1 # cast: 类型转换 x0", "base_y0 + x1 # 将图变成[batch*w*h,channel]的形状 im_flat = tf.reshape(im, tf.stack([-1, _num_channels]))", "2.], [0., 1., 2., 0., 1., 2., 0., 1., 2.,", "2 * e] # 乘了dim2之后变成 # [0, 0, ..., 0,", "1., 2., 0., 1., 2., 0., 1., 2.], [0., 1.,", "x = tf.linspace(0.0, 2.0, 3) >>> y = tf.linspace(0.0, 4.0,", "tf.reshape(x_t_flat, [-1]) y_t_flat = tf.reshape(y_t_flat, [-1]) x_t_flat = x_t_flat +", "elif _wrap_mode == 'edge': _edge_size = 0 else: return None", "2., 0., 1., 2., 0., 1., 2., 0., 1., 2.,", "1., 2.]], dtype=float32) ''' with tf.variable_scope('transform'): # grid of (x_t,", "x0_f, 1) # 利用双线性差值方法计算像素值 return weight_l * pix_l + weight_r", "= tf.linspace(0.0, 4.0, 5) >>> x_t, y_t = tf.meshgrid(x, y)", "tf.reshape(y_t_flat, [-1]) x_t_flat = x_t_flat + tf.reshape(x_offset, [-1]) * _width_f", "tf.tile(x_t_flat, tf.stack([2,1])) >>> sess.run(x_t_flat) arraay([[0., 1., 2., 0., 1., 2.,", "1., 2. ], dtype = float32) >>> x = tf.linspace(0.0,", "x = x + _edge_size y = y + _edge_size", "* e] # 乘了dim2之后变成 # [0, 0, ..., 0, w+2*e,", "tf.meshgrid(x, y) >>> sess.run(x_t) array([0., 1., 2.], [0., 1., 2.],", "y_t_flat = tf.reshape(y_t, (1, -1)) >>> sess.run(x_t_flat) array([[0., 1., 2.,", "dtype=float32) >>> sess.run(y_t_flat) array([[0., 0., 0., 1., 1., 1., 2.,", "+ 2 * e, h + 2 * e, ...,", "2., 0., 1., 2., 0., 1., 2.]], dtype=float32) >>> x_t_flat", "2., 0., 1., 2.]], dtype=float32) >>> sess.run(y_t_flat) array([[0., 0., 0.,", "修剪偏移量x, 让它在0到width-1+2*edge_size之间(因为偏移量不能太大,要小于等于padding之后). x = tf.clip_by_value(x, 0.0, _width_f - 1 +", "# cast: 类型转换 x0 = tf.cast(x0_f, tf.int32) y0 = tf.cast(y0_f,", "#插值函数 with tf.variable_scope('_interpolate'): # handle both texture border types _edge_size", "= tf.reshape(x_t, (1, -1)) y_t_flat = tf.reshape(y_t, (1, -1)) x_t_flat", "e) * (w + 2 * e), ..., (h +", "x0 = tf.cast(x0_f, tf.int32) y0 = tf.cast(y0_f, tf.int32) x1 =", "= tf.expand_dims(x1_f - x, 1) weight_r = tf.expand_dims(x - x0_f,", "2., 0., 1., 2., 0., 1., 2., 0., 1., 2.]],", "covered by the UCLB ACP-A Licence, from __future__ import absolute_import,", "y0 * dim2 # 这个索引加上向上下取整的x索引和向上取整的x索引就得到了现在点的左侧点和右侧点 idx_l = base_y0 + x0", "* (w + 2 * e)] # 加上base之后得到了考虑了batch,height之后的索引 base_y0 =", "= tf.cast(_width, tf.float32) _wrap_mode = wrap_mode output = _transform(input_images, x_offset)", "h + 2 * e, ..., h + 2 *", "= tf.shape(input_images)[0] _height = tf.shape(input_images)[1] _width = tf.shape(input_images)[2] _num_channels =", "tf.clip_by_value(x, 0.0, _width_f - 1 + 2 * _edge_size) #", "--> [1,1,2,2,3,3] ''' def _repeat(x, n_repeats): with tf.variable_scope('_repeat'): rep =", "def _transform(input_images, x_offset): ''' 转换函数首先调用meshgrid生成关于X轴和Y轴的索引 exsamples: 假设_width=3,经过linspace(0.0,_width_f-1.0,_width)是[ 0., 1., 2.]。height同理", "dim] # 这样就变成基底了,表达的是有batch个图的基 base = _repeat(tf.range(_num_batch) * dim1, _height *", "y0是[0,0,...,0,1,1,....,1, . . h + 2 * e, h +", ">>> x = tf.linspace(0.0, 2.0, 3) >>> y = tf.linspace(0.0,", "array([0., 0., 0.], [1., 1., 1.], [2., 2., 2.], [3.,", "_edge_size) * (_height + 2 * _edge_size) # 计算偏移量索引的基,先得到[0,1,2,...,batch],再将它乘宽度,变成 #", "#!/usr/bin/env python # -*- coding: utf-8 -*-are not covered by", "2 * _edge_size) * (_height + 2 * _edge_size) #", "sess.run(x_t) array([0., 1., 2.], [0., 1., 2.], [0., 1., 2.],", "_height, _width, _num_channels])) return output with tf.variable_scope(name): ''' [num_batch, height,", "both texture border types _edge_size = 0 # 如果包围方式是border, 那么边界长度是1,", "2., 0., 1., 2.], [0., 1., 2., 0., 1., 2.,", "cast: 类型转换 x0 = tf.cast(x0_f, tf.int32) y0 = tf.cast(y0_f, tf.int32)", "_repeat(tf.range(_num_batch) * dim1, _height * _width) # 将y的偏移乘以dim2,也就是乘以宽度,这样就得到加上y之后的基 # y0是[0,0,...,0,1,1,....,1,", "y): #插值函数 with tf.variable_scope('_interpolate'): # handle both texture border types", ". h + 2 * e, h + 2 *", "然后在扩展的维度上复制相应的值, 随后将其转成一维的值, exsamples:[1,2,3] --> [1,1,2,2,3,3] ''' def _repeat(x, n_repeats): with", "+ weight_r * pix_r # get_disp函数生成视差图后,调用插值函数获得更好的图. def _transform(input_images, x_offset): '''", "Licence, from __future__ import absolute_import, division, print_function import tensorflow as", "_height_f = tf.cast(_height, tf.float32) _width_f = tf.cast(_width, tf.float32) _wrap_mode =", "= (_width + 2 * _edge_size) # 第一维也就是图像维的宽是padding之后的分辨率 dim1 =", "将y的偏移乘以dim2,也就是乘以宽度,这样就得到加上y之后的基 # y0是[0,0,...,0,1,1,....,1, . . h + 2 * e,", "= y + _edge_size elif _wrap_mode == 'edge': _edge_size =", "0, ..., 0, w+2*e, w+2*e, ..., w+2*e, . . (h", "base_y0 = base + y0 * dim2 # 这个索引加上向上下取整的x索引和向上取整的x索引就得到了现在点的左侧点和右侧点 idx_l", "0.0, _width_f - 1 + 2 * _edge_size) # 向下取整x,y然后x加1向上取整x", "UCLB ACP-A Licence, from __future__ import absolute_import, division, print_function import", "* dim] # 这样就变成基底了,表达的是有batch个图的基 base = _repeat(tf.range(_num_batch) * dim1, _height", "def _interpolate(im, x, y): #插值函数 with tf.variable_scope('_interpolate'): # handle both", "array([0., 1., 2. ], dtype = float32) >>> x =", "types _edge_size = 0 # 如果包围方式是border, 那么边界长度是1, 在h和w维两侧加一排0 if _wrap_mode", "exsamples: 假设_width=3,经过linspace(0.0,_width_f-1.0,_width)是[ 0., 1., 2.]。height同理 >>> x = tf.linspace(0.0, 2.0,", "这个索引加上向上下取整的x索引和向上取整的x索引就得到了现在点的左侧点和右侧点 idx_l = base_y0 + x0 idx_r = base_y0 +", "with tf.variable_scope('transform'): # grid of (x_t, y_t, 1), eq (1)", "..., (h + 2 * e) * (w + 2", "x = tf.clip_by_value(x, 0.0, _width_f - 1 + 2 *", "2 * e, ..., h + 2 * e] #", "1) # 利用双线性差值方法计算像素值 return weight_l * pix_l + weight_r *", "= 0 # 如果包围方式是border, 那么边界长度是1, 在h和w维两侧加一排0 if _wrap_mode == 'border':", ">>> x_t_flat = tf.reshape(x_t_flat, (1, -1)) >>> sess.run(x_t_flat) array([[0., 1.,", "= tf.gather(im_flat, idx_l) pix_r = tf.gather(im_flat, idx_r) # 计算双线性差值的系数x1-1和x-x0 weight_l", "1.0, _width), tf.linspace(0.0 , _height_f - 1.0 , _height)) x_t_flat", "2.]], dtype=float32) >>> sess.run(y_t) array([0., 0., 0.], [1., 1., 1.],", "1])) x_t_flat = tf.reshape(x_t_flat, [-1]) y_t_flat = tf.reshape(y_t_flat, [-1]) x_t_flat", ">>> x = tf.linspace(0.0, 2.0, 3) >>> sess.run(x) array([0., 1.,", "[0., 1., 2.], [0., 1., 2.], [0., 1., 2.]], dtype=float32)", "x_t_flat = tf.reshape(x_t, (1, -1)) >>> y_t_flat = tf.reshape(y_t, (1,", "_edge_size = 0 # 如果包围方式是border, 那么边界长度是1, 在h和w维两侧加一排0 if _wrap_mode ==", "= base_y0 + x0 idx_r = base_y0 + x1 #", "tf.reshape(x_t, (1, -1)) y_t_flat = tf.reshape(y_t, (1, -1)) x_t_flat =", "# 第一维也就是图像维的宽是padding之后的分辨率 dim1 = (_width + 2 * _edge_size) *", "2 * e) * (w + 2 * e), ...,", "[1., 1., 1.], [2., 2., 2.], [3., 3., 3.], [4.,", ". . (h + 2 * e) * (w +", "# 第二维也就是宽度维的宽是padding之后的宽 dim2 = (_width + 2 * _edge_size) #", "1), eq (1) in ref [1] x_t, y_t = tf.meshgrid(tf.linspace(0.0,", "= tf.gather(im_flat, idx_r) # 计算双线性差值的系数x1-1和x-x0 weight_l = tf.expand_dims(x1_f - x,", "* e), ..., (h + 2 * e) * (w", "tf.reshape(im, tf.stack([-1, _num_channels])) # 利用tf.gather根据左右侧点的索引重新排列图,得到重排之后的左右像素 pix_l = tf.gather(im_flat, idx_l) pix_r", "None # 修剪偏移量x, 让它在0到width-1+2*edge_size之间(因为偏移量不能太大,要小于等于padding之后). x = tf.clip_by_value(x, 0.0, _width_f -", "(w + 2 * e), ..., (h + 2 *", "# 这样就变成基底了,表达的是有batch个图的基 base = _repeat(tf.range(_num_batch) * dim1, _height * _width)", "= tf.pad(im, [[0, 0], [1, 1], [1, 1], [0, 0]],", "''' with tf.variable_scope('transform'): # grid of (x_t, y_t, 1), eq", "2 * e) * (w + 2 * e)] #", "h + 2 * e] # 乘了dim2之后变成 # [0, 0,", "(h + 2 * e) * (w + 2 *", "-1)) >>> sess.run(x_t_flat) array([[0., 1., 2., 0., 1., 2., 0.,", "_interpolate(input_images, x_t_flat, y_t_flat) output = tf.reshape( input_transformed, tf.stack([_num_batch, _height, _width,", "2.], [0., 1., 2.]], dtype=float32) >>> sess.run(y_t) array([0., 0., 0.],", "..., h + 2 * e] # 乘了dim2之后变成 # [0,", "= tf.reshape(x_t_flat, (1, -1)) >>> sess.run(x_t_flat) array([[0., 1., 2., 0.,", "base_y0 + x0 idx_r = base_y0 + x1 # 将图变成[batch*w*h,channel]的形状", "x0_f + 1 # 将向下取整的x y变成整数, 向上取整的x不能大于padding之后的宽度减1 # cast: 类型转换", "pix_r # get_disp函数生成视差图后,调用插值函数获得更好的图. def _transform(input_images, x_offset): ''' 转换函数首先调用meshgrid生成关于X轴和Y轴的索引 exsamples: 假设_width=3,经过linspace(0.0,_width_f-1.0,_width)是[", "1., 1., 1., 2., 2., 2., 3., 3., 3., 4.,", "(w + 2 * e)] # 加上base之后得到了考虑了batch,height之后的索引 base_y0 = base", "2 * _edge_size) # 向下取整x,y然后x加1向上取整x x0_f = tf.floor(x) y0_f =", "batch * dim, ......, batch * dim] # 这样就变成基底了,表达的是有batch个图的基 base", "_num_channels])) # 利用tf.gather根据左右侧点的索引重新排列图,得到重排之后的左右像素 pix_l = tf.gather(im_flat, idx_l) pix_r = tf.gather(im_flat,", "_width_f input_transformed = _interpolate(input_images, x_t_flat, y_t_flat) output = tf.reshape( input_transformed,", "'edge': _edge_size = 0 else: return None # 修剪偏移量x, 让它在0到width-1+2*edge_size之间(因为偏移量不能太大,要小于等于padding之后).", "tf.shape(input_images)[0] _height = tf.shape(input_images)[1] _width = tf.shape(input_images)[2] _num_channels = tf.shape(input_images)[3]", "0.], [1., 1., 1.], [2., 2., 2.], [3., 3., 3.],", "2.], [0., 1., 2.], [0., 1., 2.]], dtype=float32) >>> sess.run(y_t)", "- x, 1) weight_r = tf.expand_dims(x - x0_f, 1) #", "= tf.expand_dims(x - x0_f, 1) # 利用双线性差值方法计算像素值 return weight_l *", "# 利用双线性差值方法计算像素值 return weight_l * pix_l + weight_r * pix_r", "2 * _edge_size) # 第一维也就是图像维的宽是padding之后的分辨率 dim1 = (_width + 2", "# [0,0,......,0,dim1,dim1,......,dim1,2*dim1,2*dim1,......,2*dim1 . . batch * dim, batch * dim,", "dim1, _height * _width) # 将y的偏移乘以dim2,也就是乘以宽度,这样就得到加上y之后的基 # y0是[0,0,...,0,1,1,....,1, . .", "x_t_flat = tf.reshape(x_t, (1, -1)) y_t_flat = tf.reshape(y_t, (1, -1))", "= tf.tile(y_t_flat, tf.stack([_num_batch, 1])) x_t_flat = tf.reshape(x_t_flat, [-1]) y_t_flat =", "tf.reshape( input_transformed, tf.stack([_num_batch, _height, _width, _num_channels])) return output with tf.variable_scope(name):", "= _repeat(tf.range(_num_batch) * dim1, _height * _width) # 将y的偏移乘以dim2,也就是乘以宽度,这样就得到加上y之后的基 #", "print_function import tensorflow as tf def bilinear_sampler_1d_h(input_images, x_offset, wrap_mode='border', name='bilinear_sampler',", "= tf.reshape(im, tf.stack([-1, _num_channels])) # 利用tf.gather根据左右侧点的索引重新排列图,得到重排之后的左右像素 pix_l = tf.gather(im_flat, idx_l)", ">>> sess.run(x_t_flat) arraay([[0., 1., 2., 0., 1., 2., 0., 1.,", "tf.stack([-1, _num_channels])) # 利用tf.gather根据左右侧点的索引重新排列图,得到重排之后的左右像素 pix_l = tf.gather(im_flat, idx_l) pix_r =", "2., 0., 1., 2., 0., 1., 2.], [0., 1., 2.,", "y_t = tf.meshgrid(tf.linspace(0.0, _width_f - 1.0, _width), tf.linspace(0.0 , _height_f", "tf.meshgrid(tf.linspace(0.0, _width_f - 1.0, _width), tf.linspace(0.0 , _height_f - 1.0", "tf.expand_dims(x1_f - x, 1) weight_r = tf.expand_dims(x - x0_f, 1)", "[0., 1., 2., 0., 1., 2., 0., 1., 2., 0.,", "_width_f = tf.cast(_width, tf.float32) _wrap_mode = wrap_mode output = _transform(input_images,", "y + _edge_size elif _wrap_mode == 'edge': _edge_size = 0", "weight_l * pix_l + weight_r * pix_r # get_disp函数生成视差图后,调用插值函数获得更好的图. def", "* _edge_size) * (_height + 2 * _edge_size) # 计算偏移量索引的基,先得到[0,1,2,...,batch],再将它乘宽度,变成", "假设_width=3,经过linspace(0.0,_width_f-1.0,_width)是[ 0., 1., 2.]。height同理 >>> x = tf.linspace(0.0, 2.0, 3)", "= tf.tile(tf.expand_dims(x, 1), [1, n_repeats]) return tf.reshape(rep, [-1]) def _interpolate(im,", "x_t, y_t = tf.meshgrid(x, y) >>> sess.run(x_t) array([0., 1., 2.],", "x_t_flat = tf.tile(x_t_flat, tf.stack([2,1])) >>> sess.run(x_t_flat) arraay([[0., 1., 2., 0.,", "coding: utf-8 -*-are not covered by the UCLB ACP-A Licence,", "- x0_f, 1) # 利用双线性差值方法计算像素值 return weight_l * pix_l +", "height, width, num_channels] ''' _num_batch = tf.shape(input_images)[0] _height = tf.shape(input_images)[1]", "# 加上base之后得到了考虑了batch,height之后的索引 base_y0 = base + y0 * dim2 #", "= _interpolate(input_images, x_t_flat, y_t_flat) output = tf.reshape( input_transformed, tf.stack([_num_batch, _height,", "向上取整的x不能大于padding之后的宽度减1 # cast: 类型转换 x0 = tf.cast(x0_f, tf.int32) y0 =", "= tf.meshgrid(x, y) >>> sess.run(x_t) array([0., 1., 2.], [0., 1.,", "2.], [0., 1., 2.], [0., 1., 2.], [0., 1., 2.],", ". batch * dim, batch * dim, ......, batch *", "_num_channels = tf.shape(input_images)[3] _height_f = tf.cast(_height, tf.float32) _width_f = tf.cast(_width,", "* pix_r # get_disp函数生成视差图后,调用插值函数获得更好的图. def _transform(input_images, x_offset): ''' 转换函数首先调用meshgrid生成关于X轴和Y轴的索引 exsamples:", "_width_f - 1.0, _width), tf.linspace(0.0 , _height_f - 1.0 ,", "sess.run(x_t_flat) array([[0., 1., 2., 0., 1., 2., 0., 1., 2.,", "..., w+2*e, . . (h + 2 * e) *", "n_repeats]) return tf.reshape(rep, [-1]) def _interpolate(im, x, y): #插值函数 with", "(_height + 2 * _edge_size) # 计算偏移量索引的基,先得到[0,1,2,...,batch],再将它乘宽度,变成 # [0,dim1,2*dim1,...,batch*dim1],然后重复原图分辨率,变成 #", "_edge_size) # 计算偏移量索引的基,先得到[0,1,2,...,batch],再将它乘宽度,变成 # [0,dim1,2*dim1,...,batch*dim1],然后重复原图分辨率,变成 # [0,0,......,0,dim1,dim1,......,dim1,2*dim1,2*dim1,......,2*dim1 . . batch", "# [0,dim1,2*dim1,...,batch*dim1],然后重复原图分辨率,变成 # [0,0,......,0,dim1,dim1,......,dim1,2*dim1,2*dim1,......,2*dim1 . . batch * dim, batch", "__future__ import absolute_import, division, print_function import tensorflow as tf def", "利用tf.gather根据左右侧点的索引重新排列图,得到重排之后的左右像素 pix_l = tf.gather(im_flat, idx_l) pix_r = tf.gather(im_flat, idx_r) #", "这样就变成基底了,表达的是有batch个图的基 base = _repeat(tf.range(_num_batch) * dim1, _height * _width) #", "**kwargs): ''' 一维双线性采样: x_offset--输入X上偏移量的图 重复函数 : 先将一维的x后面扩展一个维度, 然后在扩展的维度上复制相应的值, 随后将其转成一维的值, exsamples:[1,2,3]", "= tf.linspace(0.0, 2.0, 3) >>> sess.run(x) array([0., 1., 2. ],", "[[0, 0], [1, 1], [1, 1], [0, 0]], mode='CONSTANT') x", "3., 3., 3., 4., 4., 4.]], dtype=float32) >>> x_t_flat =", "* e) * (w + 2 * e)] # 加上base之后得到了考虑了batch,height之后的索引", "= tf.reshape( input_transformed, tf.stack([_num_batch, _height, _width, _num_channels])) return output with", "= x + _edge_size y = y + _edge_size elif", "1 # 将向下取整的x y变成整数, 向上取整的x不能大于padding之后的宽度减1 # cast: 类型转换 x0 =", "''' [num_batch, height, width, num_channels] ''' _num_batch = tf.shape(input_images)[0] _height", "= tf.reshape(x_t_flat, [-1]) y_t_flat = tf.reshape(y_t_flat, [-1]) x_t_flat = x_t_flat", "0., 1., 2.]。height同理 >>> x = tf.linspace(0.0, 2.0, 3) >>>", "dtype=float32) >>> sess.run(y_t) array([0., 0., 0.], [1., 1., 1.], [2.,", "tf.int32) x1 = tf.cast(tf.minimum(x1_f, _width_f - 1 + 2 *", ">>> y_t_flat = tf.reshape(y_t, (1, -1)) >>> sess.run(x_t_flat) array([[0., 1.,", "* pix_l + weight_r * pix_r # get_disp函数生成视差图后,调用插值函数获得更好的图. def _transform(input_images,", "dtype=float32) ''' with tf.variable_scope('transform'): # grid of (x_t, y_t, 1),", "x_t_flat = tf.tile(x_t_flat, tf.stack([_num_batch, 1])) y_t_flat = tf.tile(y_t_flat, tf.stack([_num_batch, 1]))", "[-1]) y_t_flat = tf.reshape(y_t_flat, [-1]) x_t_flat = x_t_flat + tf.reshape(x_offset,", "* _edge_size) # 第一维也就是图像维的宽是padding之后的分辨率 dim1 = (_width + 2 *", "array([0., 1., 2.], [0., 1., 2.], [0., 1., 2.], [0.,", "tf.cast(y0_f, tf.int32) x1 = tf.cast(tf.minimum(x1_f, _width_f - 1 + 2", "idx_l = base_y0 + x0 idx_r = base_y0 + x1", "= 1 im = tf.pad(im, [[0, 0], [1, 1], [1,", "_height = tf.shape(input_images)[1] _width = tf.shape(input_images)[2] _num_channels = tf.shape(input_images)[3] _height_f", "0., 1., 2., 0., 1., 2.]], dtype=float32) >>> x_t_flat =", "= base + y0 * dim2 # 这个索引加上向上下取整的x索引和向上取整的x索引就得到了现在点的左侧点和右侧点 idx_l =", "_height * _width) # 将y的偏移乘以dim2,也就是乘以宽度,这样就得到加上y之后的基 # y0是[0,0,...,0,1,1,....,1, . . h", "e)] # 加上base之后得到了考虑了batch,height之后的索引 base_y0 = base + y0 * dim2", "= tf.cast(x0_f, tf.int32) y0 = tf.cast(y0_f, tf.int32) x1 = tf.cast(tf.minimum(x1_f,", "2 * _edge_size) # 计算偏移量索引的基,先得到[0,1,2,...,batch],再将它乘宽度,变成 # [0,dim1,2*dim1,...,batch*dim1],然后重复原图分辨率,变成 # [0,0,......,0,dim1,dim1,......,dim1,2*dim1,2*dim1,......,2*dim1 .", "== 'edge': _edge_size = 0 else: return None # 修剪偏移量x,", "4., 4., 4.]], dtype=float32) >>> x_t_flat = tf.tile(x_t_flat, tf.stack([2,1])) >>>", "tf.stack([_num_batch, 1])) x_t_flat = tf.reshape(x_t_flat, [-1]) y_t_flat = tf.reshape(y_t_flat, [-1])", "向下取整x,y然后x加1向上取整x x0_f = tf.floor(x) y0_f = tf.floor(y) x1_f = x0_f", "0., 1., 2., 0., 1., 2., 0., 1., 2.]], dtype=float32)", "(_width + 2 * _edge_size) * (_height + 2 *", "# 乘了dim2之后变成 # [0, 0, ..., 0, w+2*e, w+2*e, ...,", "e, ..., h + 2 * e] # 乘了dim2之后变成 #", "# 如果包围方式是border, 那么边界长度是1, 在h和w维两侧加一排0 if _wrap_mode == 'border': _edge_size =", "dim, ......, batch * dim] # 这样就变成基底了,表达的是有batch个图的基 base = _repeat(tf.range(_num_batch)", "(1, -1)) >>> sess.run(x_t_flat) array([[0., 1., 2., 0., 1., 2.,", "2 * e), ..., (h + 2 * e) *", "border types _edge_size = 0 # 如果包围方式是border, 那么边界长度是1, 在h和w维两侧加一排0 if", "from __future__ import absolute_import, division, print_function import tensorflow as tf", ">>> sess.run(x_t_flat) array([[0., 1., 2., 0., 1., 2., 0., 1.,", "_width_f - 1 + 2 * _edge_size), tf.int32) # 第二维也就是宽度维的宽是padding之后的宽", ">>> sess.run(y_t_flat) array([[0., 0., 0., 1., 1., 1., 2., 2.,", "# 修剪偏移量x, 让它在0到width-1+2*edge_size之间(因为偏移量不能太大,要小于等于padding之后). x = tf.clip_by_value(x, 0.0, _width_f - 1", "bilinear_sampler_1d_h(input_images, x_offset, wrap_mode='border', name='bilinear_sampler', **kwargs): ''' 一维双线性采样: x_offset--输入X上偏移量的图 重复函数 :", "= x_t_flat + tf.reshape(x_offset, [-1]) * _width_f input_transformed = _interpolate(input_images,", "tf.tile(tf.expand_dims(x, 1), [1, n_repeats]) return tf.reshape(rep, [-1]) def _interpolate(im, x,", ": 先将一维的x后面扩展一个维度, 然后在扩展的维度上复制相应的值, 随后将其转成一维的值, exsamples:[1,2,3] --> [1,1,2,2,3,3] ''' def _repeat(x,", "array([[0., 0., 0., 1., 1., 1., 2., 2., 2., 3.,", "1])) y_t_flat = tf.tile(y_t_flat, tf.stack([_num_batch, 1])) x_t_flat = tf.reshape(x_t_flat, [-1])", "with tf.variable_scope(name): ''' [num_batch, height, width, num_channels] ''' _num_batch =", "..., 0, w+2*e, w+2*e, ..., w+2*e, . . (h +", "as tf def bilinear_sampler_1d_h(input_images, x_offset, wrap_mode='border', name='bilinear_sampler', **kwargs): ''' 一维双线性采样:", "y变成整数, 向上取整的x不能大于padding之后的宽度减1 # cast: 类型转换 x0 = tf.cast(x0_f, tf.int32) y0", "def bilinear_sampler_1d_h(input_images, x_offset, wrap_mode='border', name='bilinear_sampler', **kwargs): ''' 一维双线性采样: x_offset--输入X上偏移量的图 重复函数", "+ 2 * _edge_size), tf.int32) # 第二维也就是宽度维的宽是padding之后的宽 dim2 = (_width", "# 将y的偏移乘以dim2,也就是乘以宽度,这样就得到加上y之后的基 # y0是[0,0,...,0,1,1,....,1, . . h + 2 *", "[0., 1., 2.], [0., 1., 2.]], dtype=float32) >>> sess.run(y_t) array([0.,", "tf.reshape(y_t, (1, -1)) x_t_flat = tf.tile(x_t_flat, tf.stack([_num_batch, 1])) y_t_flat =", "not covered by the UCLB ACP-A Licence, from __future__ import", "0 # 如果包围方式是border, 那么边界长度是1, 在h和w维两侧加一排0 if _wrap_mode == 'border': _edge_size", "[1, 1], [0, 0]], mode='CONSTANT') x = x + _edge_size", "让它在0到width-1+2*edge_size之间(因为偏移量不能太大,要小于等于padding之后). x = tf.clip_by_value(x, 0.0, _width_f - 1 + 2", "tf.linspace(0.0, 2.0, 3) >>> y = tf.linspace(0.0, 4.0, 5) >>>", "2.], [3., 3., 3.], [4., 4., 4.]], dtype=float32) >>> x_t_flat", "utf-8 -*-are not covered by the UCLB ACP-A Licence, from", "weight_r = tf.expand_dims(x - x0_f, 1) # 利用双线性差值方法计算像素值 return weight_l", "1 im = tf.pad(im, [[0, 0], [1, 1], [1, 1],", "y = y + _edge_size elif _wrap_mode == 'edge': _edge_size", "y0_f = tf.floor(y) x1_f = x0_f + 1 # 将向下取整的x", ". (h + 2 * e) * (w + 2", "1., 1.], [2., 2., 2.], [3., 3., 3.], [4., 4.,", "1., 2., 0., 1., 2.]], dtype=float32) ''' with tf.variable_scope('transform'): #", "[-1]) * _width_f input_transformed = _interpolate(input_images, x_t_flat, y_t_flat) output =", "tf.floor(x) y0_f = tf.floor(y) x1_f = x0_f + 1 #", "2., 2., 3., 3., 3., 4., 4., 4.]], dtype=float32) >>>", "tf.int32) # 第二维也就是宽度维的宽是padding之后的宽 dim2 = (_width + 2 * _edge_size)", "= x0_f + 1 # 将向下取整的x y变成整数, 向上取整的x不能大于padding之后的宽度减1 # cast:", "_width) # 将y的偏移乘以dim2,也就是乘以宽度,这样就得到加上y之后的基 # y0是[0,0,...,0,1,1,....,1, . . h + 2", "0., 1., 2.]], dtype=float32) ''' with tf.variable_scope('transform'): # grid of", "_edge_size elif _wrap_mode == 'edge': _edge_size = 0 else: return", "= tf.cast(y0_f, tf.int32) x1 = tf.cast(tf.minimum(x1_f, _width_f - 1 +", ">>> sess.run(y_t) array([0., 0., 0.], [1., 1., 1.], [2., 2.,", "第一维也就是图像维的宽是padding之后的分辨率 dim1 = (_width + 2 * _edge_size) * (_height", "dim2 = (_width + 2 * _edge_size) # 第一维也就是图像维的宽是padding之后的分辨率 dim1", "计算偏移量索引的基,先得到[0,1,2,...,batch],再将它乘宽度,变成 # [0,dim1,2*dim1,...,batch*dim1],然后重复原图分辨率,变成 # [0,0,......,0,dim1,dim1,......,dim1,2*dim1,2*dim1,......,2*dim1 . . batch * dim,", "2. ], dtype = float32) >>> x = tf.linspace(0.0, 2.0,", "y = tf.linspace(0.0, 4.0, 5) >>> x_t, y_t = tf.meshgrid(x,", "x_t_flat + tf.reshape(x_offset, [-1]) * _width_f input_transformed = _interpolate(input_images, x_t_flat,", "pix_l + weight_r * pix_r # get_disp函数生成视差图后,调用插值函数获得更好的图. def _transform(input_images, x_offset):", "= tf.reshape(y_t_flat, [-1]) x_t_flat = x_t_flat + tf.reshape(x_offset, [-1]) *", "_height)) x_t_flat = tf.reshape(x_t, (1, -1)) y_t_flat = tf.reshape(y_t, (1,", "1., 2., 2., 2., 3., 3., 3., 4., 4., 4.]],", "exsamples:[1,2,3] --> [1,1,2,2,3,3] ''' def _repeat(x, n_repeats): with tf.variable_scope('_repeat'): rep", "with tf.variable_scope('_repeat'): rep = tf.tile(tf.expand_dims(x, 1), [1, n_repeats]) return tf.reshape(rep,", "y) >>> sess.run(x_t) array([0., 1., 2.], [0., 1., 2.], [0.,", "# grid of (x_t, y_t, 1), eq (1) in ref", "e) * (w + 2 * e)] # 加上base之后得到了考虑了batch,height之后的索引 base_y0", "array([[0., 1., 2., 0., 1., 2., 0., 1., 2., 0.,", "x_t_flat = tf.reshape(x_t_flat, (1, -1)) >>> sess.run(x_t_flat) array([[0., 1., 2.,", "x_t_flat = tf.reshape(x_t_flat, [-1]) y_t_flat = tf.reshape(y_t_flat, [-1]) x_t_flat =", "''' 一维双线性采样: x_offset--输入X上偏移量的图 重复函数 : 先将一维的x后面扩展一个维度, 然后在扩展的维度上复制相应的值, 随后将其转成一维的值, exsamples:[1,2,3] -->", "tf.linspace(0.0, 4.0, 5) >>> x_t, y_t = tf.meshgrid(x, y) >>>", "利用双线性差值方法计算像素值 return weight_l * pix_l + weight_r * pix_r #", "0., 1., 2.]], dtype=float32) >>> sess.run(y_t_flat) array([[0., 0., 0., 1.,", "2.]], dtype=float32) >>> sess.run(y_t_flat) array([[0., 0., 0., 1., 1., 1.,", "x_offset): ''' 转换函数首先调用meshgrid生成关于X轴和Y轴的索引 exsamples: 假设_width=3,经过linspace(0.0,_width_f-1.0,_width)是[ 0., 1., 2.]。height同理 >>> x", "weight_l = tf.expand_dims(x1_f - x, 1) weight_r = tf.expand_dims(x -", "[-1]) x_t_flat = x_t_flat + tf.reshape(x_offset, [-1]) * _width_f input_transformed", "0., 1., 2.], [0., 1., 2., 0., 1., 2., 0.,", "计算双线性差值的系数x1-1和x-x0 weight_l = tf.expand_dims(x1_f - x, 1) weight_r = tf.expand_dims(x", "2 * _edge_size), tf.int32) # 第二维也就是宽度维的宽是padding之后的宽 dim2 = (_width +", ">>> y = tf.linspace(0.0, 4.0, 5) >>> x_t, y_t =", "_num_batch = tf.shape(input_images)[0] _height = tf.shape(input_images)[1] _width = tf.shape(input_images)[2] _num_channels", "+ 2 * _edge_size) * (_height + 2 * _edge_size)", "1.], [2., 2., 2.], [3., 3., 3.], [4., 4., 4.]],", "_wrap_mode == 'edge': _edge_size = 0 else: return None #", "_wrap_mode == 'border': _edge_size = 1 im = tf.pad(im, [[0,", "1., 2.], [0., 1., 2., 0., 1., 2., 0., 1.,", "(_width + 2 * _edge_size) # 第一维也就是图像维的宽是padding之后的分辨率 dim1 = (_width", "* (w + 2 * e), ..., (h + 2", "# 计算双线性差值的系数x1-1和x-x0 weight_l = tf.expand_dims(x1_f - x, 1) weight_r =", "idx_r = base_y0 + x1 # 将图变成[batch*w*h,channel]的形状 im_flat = tf.reshape(im,", "tf.tile(y_t_flat, tf.stack([_num_batch, 1])) x_t_flat = tf.reshape(x_t_flat, [-1]) y_t_flat = tf.reshape(y_t_flat,", "# y0是[0,0,...,0,1,1,....,1, . . h + 2 * e, h", "# 利用tf.gather根据左右侧点的索引重新排列图,得到重排之后的左右像素 pix_l = tf.gather(im_flat, idx_l) pix_r = tf.gather(im_flat, idx_r)", "3., 4., 4., 4.]], dtype=float32) >>> x_t_flat = tf.tile(x_t_flat, tf.stack([2,1]))", "return output with tf.variable_scope(name): ''' [num_batch, height, width, num_channels] '''", "+ 2 * _edge_size) # 计算偏移量索引的基,先得到[0,1,2,...,batch],再将它乘宽度,变成 # [0,dim1,2*dim1,...,batch*dim1],然后重复原图分辨率,变成 # [0,0,......,0,dim1,dim1,......,dim1,2*dim1,2*dim1,......,2*dim1", "_width_f - 1 + 2 * _edge_size) # 向下取整x,y然后x加1向上取整x x0_f", "= tf.floor(x) y0_f = tf.floor(y) x1_f = x0_f + 1", "e] # 乘了dim2之后变成 # [0, 0, ..., 0, w+2*e, w+2*e,", "handle both texture border types _edge_size = 0 # 如果包围方式是border,", "dim2 # 这个索引加上向上下取整的x索引和向上取整的x索引就得到了现在点的左侧点和右侧点 idx_l = base_y0 + x0 idx_r =", "_repeat(x, n_repeats): with tf.variable_scope('_repeat'): rep = tf.tile(tf.expand_dims(x, 1), [1, n_repeats])", "= (_width + 2 * _edge_size) * (_height + 2", "= tf.tile(x_t_flat, tf.stack([2,1])) >>> sess.run(x_t_flat) arraay([[0., 1., 2., 0., 1.,", "= tf.shape(input_images)[3] _height_f = tf.cast(_height, tf.float32) _width_f = tf.cast(_width, tf.float32)", "dim1 = (_width + 2 * _edge_size) * (_height +", "2.0, 3) >>> y = tf.linspace(0.0, 4.0, 5) >>> x_t,", "加上base之后得到了考虑了batch,height之后的索引 base_y0 = base + y0 * dim2 # 这个索引加上向上下取整的x索引和向上取整的x索引就得到了现在点的左侧点和右侧点", "im_flat = tf.reshape(im, tf.stack([-1, _num_channels])) # 利用tf.gather根据左右侧点的索引重新排列图,得到重排之后的左右像素 pix_l = tf.gather(im_flat,", "2.0, 3) >>> sess.run(x) array([0., 1., 2. ], dtype =", "= float32) >>> x = tf.linspace(0.0, 2.0, 3) >>> y", "0., 1., 2., 0., 1., 2.], [0., 1., 2., 0.,", "by the UCLB ACP-A Licence, from __future__ import absolute_import, division,", "# 计算偏移量索引的基,先得到[0,1,2,...,batch],再将它乘宽度,变成 # [0,dim1,2*dim1,...,batch*dim1],然后重复原图分辨率,变成 # [0,0,......,0,dim1,dim1,......,dim1,2*dim1,2*dim1,......,2*dim1 . . batch *", "y_t_flat = tf.reshape(y_t_flat, [-1]) x_t_flat = x_t_flat + tf.reshape(x_offset, [-1])", "x_offset--输入X上偏移量的图 重复函数 : 先将一维的x后面扩展一个维度, 然后在扩展的维度上复制相应的值, 随后将其转成一维的值, exsamples:[1,2,3] --> [1,1,2,2,3,3] '''", "1., 2.]。height同理 >>> x = tf.linspace(0.0, 2.0, 3) >>> sess.run(x)", "(x_t, y_t, 1), eq (1) in ref [1] x_t, y_t", "先将一维的x后面扩展一个维度, 然后在扩展的维度上复制相应的值, 随后将其转成一维的值, exsamples:[1,2,3] --> [1,1,2,2,3,3] ''' def _repeat(x, n_repeats):", "0., 1., 2., 0., 1., 2., 0., 1., 2.], [0.,", "batch * dim] # 这样就变成基底了,表达的是有batch个图的基 base = _repeat(tf.range(_num_batch) * dim1,", "0]], mode='CONSTANT') x = x + _edge_size y = y", "w+2*e, ..., w+2*e, . . (h + 2 * e)", "_height_f - 1.0 , _height)) x_t_flat = tf.reshape(x_t, (1, -1))", "(1, -1)) x_t_flat = tf.tile(x_t_flat, tf.stack([_num_batch, 1])) y_t_flat = tf.tile(y_t_flat,", "1 + 2 * _edge_size), tf.int32) # 第二维也就是宽度维的宽是padding之后的宽 dim2 =", "-1)) >>> y_t_flat = tf.reshape(y_t, (1, -1)) >>> sess.run(x_t_flat) array([[0.,", "texture border types _edge_size = 0 # 如果包围方式是border, 那么边界长度是1, 在h和w维两侧加一排0", "output with tf.variable_scope(name): ''' [num_batch, height, width, num_channels] ''' _num_batch", "+ x0 idx_r = base_y0 + x1 # 将图变成[batch*w*h,channel]的形状 im_flat", "1) weight_r = tf.expand_dims(x - x0_f, 1) # 利用双线性差值方法计算像素值 return", "grid of (x_t, y_t, 1), eq (1) in ref [1]", "x1 # 将图变成[batch*w*h,channel]的形状 im_flat = tf.reshape(im, tf.stack([-1, _num_channels])) # 利用tf.gather根据左右侧点的索引重新排列图,得到重排之后的左右像素", "tf def bilinear_sampler_1d_h(input_images, x_offset, wrap_mode='border', name='bilinear_sampler', **kwargs): ''' 一维双线性采样: x_offset--输入X上偏移量的图", "import tensorflow as tf def bilinear_sampler_1d_h(input_images, x_offset, wrap_mode='border', name='bilinear_sampler', **kwargs):", "_width = tf.shape(input_images)[2] _num_channels = tf.shape(input_images)[3] _height_f = tf.cast(_height, tf.float32)", "''' _num_batch = tf.shape(input_images)[0] _height = tf.shape(input_images)[1] _width = tf.shape(input_images)[2]", "随后将其转成一维的值, exsamples:[1,2,3] --> [1,1,2,2,3,3] ''' def _repeat(x, n_repeats): with tf.variable_scope('_repeat'):", "* _edge_size) # 向下取整x,y然后x加1向上取整x x0_f = tf.floor(x) y0_f = tf.floor(y)", "dim, batch * dim, ......, batch * dim] # 这样就变成基底了,表达的是有batch个图的基", "- 1.0, _width), tf.linspace(0.0 , _height_f - 1.0 , _height))", "y0 = tf.cast(y0_f, tf.int32) x1 = tf.cast(tf.minimum(x1_f, _width_f - 1", "dtype=float32) >>> x_t_flat = tf.reshape(x_t_flat, (1, -1)) >>> sess.run(x_t_flat) array([[0.,", "== 'border': _edge_size = 1 im = tf.pad(im, [[0, 0],", "-1)) x_t_flat = tf.tile(x_t_flat, tf.stack([_num_batch, 1])) y_t_flat = tf.tile(y_t_flat, tf.stack([_num_batch,", "ref [1] x_t, y_t = tf.meshgrid(tf.linspace(0.0, _width_f - 1.0, _width),", "sess.run(x) array([0., 1., 2. ], dtype = float32) >>> x", "2.]], dtype=float32) >>> x_t_flat = tf.reshape(x_t_flat, (1, -1)) >>> sess.run(x_t_flat)", "1., 2., 0., 1., 2., 0., 1., 2.]], dtype=float32) '''", "1., 2.]], dtype=float32) >>> sess.run(y_t) array([0., 0., 0.], [1., 1.,", "0, w+2*e, w+2*e, ..., w+2*e, . . (h + 2", "tf.float32) _wrap_mode = wrap_mode output = _transform(input_images, x_offset) return output", "return None # 修剪偏移量x, 让它在0到width-1+2*edge_size之间(因为偏移量不能太大,要小于等于padding之后). x = tf.clip_by_value(x, 0.0, _width_f", "2 * e, h + 2 * e, ..., h", "* e) * (w + 2 * e), ..., (h", "= tf.linspace(0.0, 2.0, 3) >>> y = tf.linspace(0.0, 4.0, 5)", "rep = tf.tile(tf.expand_dims(x, 1), [1, n_repeats]) return tf.reshape(rep, [-1]) def", "name='bilinear_sampler', **kwargs): ''' 一维双线性采样: x_offset--输入X上偏移量的图 重复函数 : 先将一维的x后面扩展一个维度, 然后在扩展的维度上复制相应的值, 随后将其转成一维的值,", "_width, _num_channels])) return output with tf.variable_scope(name): ''' [num_batch, height, width,", "= tf.cast(tf.minimum(x1_f, _width_f - 1 + 2 * _edge_size), tf.int32)", "w+2*e, . . (h + 2 * e) * (w", "tf.variable_scope(name): ''' [num_batch, height, width, num_channels] ''' _num_batch = tf.shape(input_images)[0]", "def _repeat(x, n_repeats): with tf.variable_scope('_repeat'): rep = tf.tile(tf.expand_dims(x, 1), [1,", "tf.tile(x_t_flat, tf.stack([_num_batch, 1])) y_t_flat = tf.tile(y_t_flat, tf.stack([_num_batch, 1])) x_t_flat =", "(1, -1)) >>> y_t_flat = tf.reshape(y_t, (1, -1)) >>> sess.run(x_t_flat)", "-*- coding: utf-8 -*-are not covered by the UCLB ACP-A", "+ 2 * _edge_size) # 向下取整x,y然后x加1向上取整x x0_f = tf.floor(x) y0_f", "3., 3.], [4., 4., 4.]], dtype=float32) >>> x_t_flat = tf.reshape(x_t,", "2., 2., 2., 3., 3., 3., 4., 4., 4.]], dtype=float32)", "weight_r * pix_r # get_disp函数生成视差图后,调用插值函数获得更好的图. def _transform(input_images, x_offset): ''' 转换函数首先调用meshgrid生成关于X轴和Y轴的索引", "2 * e)] # 加上base之后得到了考虑了batch,height之后的索引 base_y0 = base + y0", "1], [1, 1], [0, 0]], mode='CONSTANT') x = x +", "x, 1) weight_r = tf.expand_dims(x - x0_f, 1) # 利用双线性差值方法计算像素值", "mode='CONSTANT') x = x + _edge_size y = y +", ". . batch * dim, batch * dim, ......, batch", "* _edge_size), tf.int32) # 第二维也就是宽度维的宽是padding之后的宽 dim2 = (_width + 2", "# [0, 0, ..., 0, w+2*e, w+2*e, ..., w+2*e, .", "[1, 1], [1, 1], [0, 0]], mode='CONSTANT') x = x", "[0., 1., 2.], [0., 1., 2.], [0., 1., 2.], [0.,", "+ x1 # 将图变成[batch*w*h,channel]的形状 im_flat = tf.reshape(im, tf.stack([-1, _num_channels])) #", "3.], [4., 4., 4.]], dtype=float32) >>> x_t_flat = tf.reshape(x_t, (1,", "idx_l) pix_r = tf.gather(im_flat, idx_r) # 计算双线性差值的系数x1-1和x-x0 weight_l = tf.expand_dims(x1_f", "tf.cast(tf.minimum(x1_f, _width_f - 1 + 2 * _edge_size), tf.int32) #", "tf.pad(im, [[0, 0], [1, 1], [1, 1], [0, 0]], mode='CONSTANT')", "+ 2 * e, ..., h + 2 * e]", "_edge_size), tf.int32) # 第二维也就是宽度维的宽是padding之后的宽 dim2 = (_width + 2 *", "get_disp函数生成视差图后,调用插值函数获得更好的图. def _transform(input_images, x_offset): ''' 转换函数首先调用meshgrid生成关于X轴和Y轴的索引 exsamples: 假设_width=3,经过linspace(0.0,_width_f-1.0,_width)是[ 0., 1.,", "- 1.0 , _height)) x_t_flat = tf.reshape(x_t, (1, -1)) y_t_flat", "[3., 3., 3.], [4., 4., 4.]], dtype=float32) >>> x_t_flat =", "_edge_size) # 第一维也就是图像维的宽是padding之后的分辨率 dim1 = (_width + 2 * _edge_size)", "[0, 0]], mode='CONSTANT') x = x + _edge_size y =", "# 将图变成[batch*w*h,channel]的形状 im_flat = tf.reshape(im, tf.stack([-1, _num_channels])) # 利用tf.gather根据左右侧点的索引重新排列图,得到重排之后的左右像素 pix_l", "- 1 + 2 * _edge_size), tf.int32) # 第二维也就是宽度维的宽是padding之后的宽 dim2", "pix_r = tf.gather(im_flat, idx_r) # 计算双线性差值的系数x1-1和x-x0 weight_l = tf.expand_dims(x1_f -", "= tf.reshape(y_t, (1, -1)) x_t_flat = tf.tile(x_t_flat, tf.stack([_num_batch, 1])) y_t_flat", "= tf.shape(input_images)[1] _width = tf.shape(input_images)[2] _num_channels = tf.shape(input_images)[3] _height_f =", "input_transformed = _interpolate(input_images, x_t_flat, y_t_flat) output = tf.reshape( input_transformed, tf.stack([_num_batch,", "0 else: return None # 修剪偏移量x, 让它在0到width-1+2*edge_size之间(因为偏移量不能太大,要小于等于padding之后). x = tf.clip_by_value(x,", "+ 2 * e)] # 加上base之后得到了考虑了batch,height之后的索引 base_y0 = base +", "tf.int32) y0 = tf.cast(y0_f, tf.int32) x1 = tf.cast(tf.minimum(x1_f, _width_f -", "tf.linspace(0.0, 2.0, 3) >>> sess.run(x) array([0., 1., 2. ], dtype", "division, print_function import tensorflow as tf def bilinear_sampler_1d_h(input_images, x_offset, wrap_mode='border',", "batch * dim, batch * dim, ......, batch * dim]", "# 这个索引加上向上下取整的x索引和向上取整的x索引就得到了现在点的左侧点和右侧点 idx_l = base_y0 + x0 idx_r = base_y0", "output = tf.reshape( input_transformed, tf.stack([_num_batch, _height, _width, _num_channels])) return output", "y_t_flat = tf.tile(y_t_flat, tf.stack([_num_batch, 1])) x_t_flat = tf.reshape(x_t_flat, [-1]) y_t_flat", "base = _repeat(tf.range(_num_batch) * dim1, _height * _width) # 将y的偏移乘以dim2,也就是乘以宽度,这样就得到加上y之后的基", "y_t_flat) output = tf.reshape( input_transformed, tf.stack([_num_batch, _height, _width, _num_channels])) return", "base + y0 * dim2 # 这个索引加上向上下取整的x索引和向上取整的x索引就得到了现在点的左侧点和右侧点 idx_l = base_y0", "python # -*- coding: utf-8 -*-are not covered by the", "[num_batch, height, width, num_channels] ''' _num_batch = tf.shape(input_images)[0] _height =", "重复函数 : 先将一维的x后面扩展一个维度, 然后在扩展的维度上复制相应的值, 随后将其转成一维的值, exsamples:[1,2,3] --> [1,1,2,2,3,3] ''' def" ]
[ "object with phase closest to phase1 ''' index = np.argmin([", "in self.spectra]) return self.spectra[index] #define function to store new spectra", "+ str(JSON)) json_data = json.load(file) spectra_data = json_data[JSON[0:-5]]['spectra'] spectra_data =", "maximum brightness self.maximum = maximum #initiate empty list to hold", "pandas dataframe df = pd.DataFrame() df['Flux'] = fluxes df['Wavelength'] =", "Spectra(spectra_data[i]['data'], float(spectra_data[i]['time']) / (1+z), z, MJD_max) if spectra.data is None:", "date of B band maximum brightness for SN in MJD", "= json.load(file) spectra_data = json_data[JSON[0:-5]]['spectra'] spectra_data = np.array(spectra_data) for i", "phase of observation self.phase = float(epoch) - float(MJD_max) class Lightcurve():", "(float) - dopplershifted wavelength of absorption Returns: float - corresponding", "supernovae = Supernovae(str(JSON[0:-5]), z, MJD_max) #Load OSN json data file", "astropy import units from scipy.optimize import curve_fit from scipy.interpolate import", "(list) - 2 item list containing boundaries of region used", "Spectra self.data= Unpack_Spectra(Spectra, z) #store epoch of obseravation self.epoch =", "hold Spectra objects self.spectra = [] self.lightcurves = [] #define", "''' Spectra (string) - path to JSON formatted spectra file", "import interp1d import scipy.integrate as integrate from astropy.time import Time", "self.spectra.sort(key= lambda x: x.phase) #define function to store lightcurve def", "SN maximum (float) - date of B band maximum in", "- observed flux z (int) - redshift Returns: int -", "MJD_max, z): ''' Function to create Supernovae object for given", "= [float(x[0]) for x in Spectra] #Extract Fluxes fluxes =", "OSN z (float) - redshift of SN normalizationn (list) -", "Lightcurve(): def __init__(self, times, fluxes, error, band): self.band = band", "spectra closest to given phase def find_spectra(self, phase1): ''' Args:", "wavelength and flux Flux is corrected for redshift and normalized", "for flux in fluxes] #Extract fluxes in normalization range rel_flux_range", "Args: Spectra - one epoch of spectral data in JSON", "error, band): self.band = band self.data = pd.DataFrame(list(zip(times, fluxes, error)),", "- String of SN name redshift (float) - redshift of", "for spectra of interest Returns: Supernovae - Supernovae object with", "fluxes, error, band): self.band = band self.data = pd.DataFrame(list(zip(times, fluxes,", "- one epoch of spectral data in JSON format from", "normalizationn (list) - 2 item list containing boundaries of region", "return spectra closest to given phase def find_spectra(self, phase1): '''", "given JSON data file from OSN Args: JSON (str) -", "dopplershifted wavelength of absorption Returns: float - corresponding absorption velocity", "import matplotlib.pyplot as plt from astropy.io import ascii import json", "in MJD ''' #Store name of SN self.name = name", "(int) - redshift Returns: int - redshift corrected flux '''", "name of SN self.name = name #Store redshift of SN", "date if spectra_object in self.spectra: self.spectra.sort(key= lambda x: x.phase) print('already", "- redshift of SN normalizationn (list) - 2 item list", "for x in Spectra] #correct fluxes for redshift fluxes =", "= [x for x in Spectra if (float(x[0])>normalization[0]) & (float(x[0])<normalization[1])]", "in range(len(spectra_data)): spectra = Spectra(spectra_data[i]['data'], float(spectra_data[i]['time']) / (1+z), z, MJD_max)", "in Spectra self.data= Unpack_Spectra(Spectra, z) #store epoch of obseravation self.epoch", "self.band = band self.data = pd.DataFrame(list(zip(times, fluxes, error)), columns =", "and corrects flux for redshift, and normalizes flux def Unpack_Spectra(Spectra,", "store in Spectra self.data= Unpack_Spectra(Spectra, z) #store epoch of obseravation", "redshift corrected flux ''' flux_emit = (z * flux_obs) +", "- redshift Returns: int - redshift corrected flux ''' flux_emit", "Returns: Spectra object - Spectra object with phase closest to", "redshift, change wavelength to SN restframe, Normalize flux and store", "return None #Calculate average flux in this range flux_sum =", "boundaries of region used for normalization Returns: Pandas DataFrame -", "Pandas DataFrame - 2 column dataframe: wavelength and flux Flux", "error)), columns = ['times', 'flux', 'err']) #Create Supernovae class to", "SN self.name = name #Store redshift of SN self.redshift =", "json.load(file) spectra_data = json_data[JSON[0:-5]]['spectra'] spectra_data = np.array(spectra_data) for i in", "[float(x[0]) for x in Spectra] #Extract Fluxes fluxes = [float(x[1])", "maximum): ''' name (str) - String of SN name redshift", "is converted to SN restframe ''' #Extract Wavelengths wavelengths =", "Returns: float - MJD value of given calendar date '''", "relevant spectra from OSN JSON data file def create_SN_object(JSON, MJD_max,", "Wavelengths wavelengths = [float(x[0]) for x in Spectra] #Extract Fluxes", "None: continue else: supernovae.store_spectra(spectra) return supernovae #Define function to convert", "self.spectra]) return self.spectra[index] #define function to store new spectra def", "restframe wavelengths = [wavelength / float(1 + z) for wavelength", "flux_obs) + flux_obs return flux_emit #Define function to get relevant", "import Spectrum1D from astropy import units from scipy.optimize import curve_fit", "(float) - date of B band maximum in MJD '''", "with phase closest to phase1 ''' index = np.argmin([ abs(x.phase", "fluxes] #Extract fluxes in normalization range rel_flux_range = [x for", "fluxes = [float(x[1]) for x in Spectra] #correct fluxes for", "range rel_flux_range = [x for x in Spectra if (float(x[0])>normalization[0])", "redshift, and normalizes flux def Unpack_Spectra(Spectra, z, normalization = [5000,6000]):", "MJD ''' #Store name of SN self.name = name #Store", "return flux_emit #Define function to get relevant spectra from OSN", "flux_obs (int) - observed flux z (int) - redshift Returns:", "libraries import numpy as np import pandas as pd import", "abs(x.phase - phase1) for x in self.spectra]) return self.spectra[index] #define", "epoch of spectral data in JSON format from OSN z", "SN restframe ''' #Extract Wavelengths wavelengths = [float(x[0]) for x", "lightcurve_object): if lightcurve_object in self.lightcurves: print('already exists') else: self.lightcurves.append(lightcurve_object) #define", "__init__(self, times, fluxes, error, band): self.band = band self.data =", "wavelengths] #store in pandas dataframe df = pd.DataFrame() df['Flux'] =", "[float(flux) / average_flux for flux in fluxes] #convert wavelength to", "in self.lightcurves: print('already exists') else: self.lightcurves.append(lightcurve_object) #define function that converts", "self.maximum = maximum #initiate empty list to hold Spectra objects", "from OSN Args: JSON (str) - path to OSN JSON", "= [float(flux) / average_flux for flux in fluxes] #convert wavelength", "''' velocity = ((restframe - dopplershifted) / np.float(restframe))* c return", "#Make sure there rel_flux_range isnt empty if len(rel_flux_range) == 0:", "z (float) - redshift of SN normalizationn (list) - 2", "''' #correct flux for redshift, change wavelength to SN restframe,", "(int) - phase for spectra of interest Returns: Supernovae -", "from scipy.interpolate import interp1d import scipy.integrate as integrate from astropy.time", ", MJD_max): ''' Spectra (string) - path to JSON formatted", "to hold Spectra objects self.spectra = [] self.lightcurves = []", "data file def create_SN_object(JSON, MJD_max, z): ''' Function to create", "#define function to return spectra closest to given phase def", "normalized Wavelength is converted to SN restframe ''' #Extract Wavelengths", "- float(MJD_max) class Lightcurve(): def __init__(self, times, fluxes, error, band):", "phase1 ''' index = np.argmin([ abs(x.phase - phase1) for x", "x in rel_flux_range: flux_sum += float(x[1]) average_flux = flux_sum /", "return supernovae #Define function to convert calendar date to MJD", "flux in this range flux_sum = 0 for x in", "matplotlib.pyplot as plt from astropy.io import ascii import json from", "2 column dataframe: wavelength and flux Flux is corrected for", "df['Flux'] = fluxes df['Wavelength'] = wavelengths return df def correct_flux(flux_obs,", "''' Function to create Supernovae object for given JSON data", "0 for x in rel_flux_range: flux_sum += float(x[1]) average_flux =", "- redshift of SN maximum (float) - date of B", "phase def find_spectra(self, phase1): ''' Args: phase1 (float )- phase", "flux Flux is corrected for redshift and normalized Wavelength is", "Fluxes fluxes = [float(x[1]) for x in Spectra] #correct fluxes", "relevant libraries import numpy as np import pandas as pd", "__init__(self, Spectra, epoch, z , MJD_max): ''' Spectra (string) -", "spectral data in JSON format from OSN z (float) -", "flux ''' flux_emit = (z * flux_obs) + flux_obs return", "interp1d import scipy.integrate as integrate from astropy.time import Time from", "find_spectra(self, phase1): ''' Args: phase1 (float )- phase of interest", "flux in fluxes] #convert wavelength to restframe wavelengths = [wavelength", "Spectra object with phase closest to phase1 ''' index =", "''' Args: flux_obs (int) - observed flux z (int) -", "+ flux_obs return flux_emit #Define function to get relevant spectra", "range(len(spectra_data)): spectra = Spectra(spectra_data[i]['data'], float(spectra_data[i]['time']) / (1+z), z, MJD_max) if", "object to store ''' #Make sure there are no duplicates", "phase closest to phase1 ''' index = np.argmin([ abs(x.phase -", "np.argmin([ abs(x.phase - phase1) for x in self.spectra]) return self.spectra[index]", "- corresponding absorption velocity ''' velocity = ((restframe - dopplershifted)", "list containing boundaries of region used for normalization Returns: Pandas", "normalization = [5000,6000]): ''' Args: Spectra - one epoch of", "float(len(rel_flux_range)) #Normalize flux fluxes = [float(flux) / average_flux for flux", "self.spectra = [] self.lightcurves = [] #define function to return", "object with spectra list filled ''' supernovae = Supernovae(str(JSON[0:-5]), z,", "as np import pandas as pd import matplotlib.pyplot as plt", "SN self.redshift = redshift #Store date of B band maximum", "average_flux = flux_sum / float(len(rel_flux_range)) #Normalize flux fluxes = [float(flux)", "= (z * flux_obs) + flux_obs return flux_emit #Define function", "#Define function to get relevant spectra from OSN JSON data", "in normalization region, not including spectra') return None #Calculate average", "spectra_object in self.spectra: self.spectra.sort(key= lambda x: x.phase) print('already exists') elif", "wavelengths return df def correct_flux(flux_obs, z): ''' Args: flux_obs (int)", "maximum #initiate empty list to hold Spectra objects self.spectra =", "Supernovae object for given JSON data file from OSN Args:", "interest Returns: Supernovae - Supernovae object with spectra list filled", "= pd.DataFrame() df['Flux'] = fluxes df['Wavelength'] = wavelengths return df", "data file = open('../Data/OSN_data/' + str(JSON)) json_data = json.load(file) spectra_data", "in Spectra] #Extract Fluxes fluxes = [float(x[1]) for x in", "- Spectra object to store ''' #Make sure there are", "in normalization range rel_flux_range = [x for x in Spectra", "(int) - number of days past maximum brightness phase (int)", "''' Args: phase1 (float )- phase of interest Returns: Spectra", "this range flux_sum = 0 for x in rel_flux_range: flux_sum", "[correct_flux(flux, z) for flux in fluxes] #Extract fluxes in normalization", "lambda x: x.phase) #define function to store lightcurve def store_lightcurve(self,", "is None: continue else: supernovae.store_spectra(spectra) return supernovae #Define function to", "obseravation self.epoch = float(epoch) #store phase of observation self.phase =", "if lightcurve_object in self.lightcurves: print('already exists') else: self.lightcurves.append(lightcurve_object) #define function", "'flux', 'err']) #Create Supernovae class to store Spectral objects class", "restframe, Normalize flux and store in Spectra self.data= Unpack_Spectra(Spectra, z)", "to restframe and corrects flux for redshift, and normalizes flux", "= json_data[JSON[0:-5]]['spectra'] spectra_data = np.array(spectra_data) for i in range(len(spectra_data)): spectra", "z, normalization = [5000,6000]): ''' Args: Spectra - one epoch", "Time from Supernovae import * #speed of light (km/s) c", "restframe wavelength of absorption dopplershifted (float) - dopplershifted wavelength of", "B band maximum in MJD ''' #Store name of SN", "''' Args: restframe (float) - restframe wavelength of absorption dopplershifted", "dopplershifted (float) - dopplershifted wavelength of absorption Returns: float -", "to store Spectral objects class Supernovae(object): #Initialization function def __init__(self,", "DataFrame - 2 column dataframe: wavelength and flux Flux is", "and normalizes flux def Unpack_Spectra(Spectra, z, normalization = [5000,6000]): '''", "flux def Unpack_Spectra(Spectra, z, normalization = [5000,6000]): ''' Args: Spectra", "self.phase = float(epoch) - float(MJD_max) class Lightcurve(): def __init__(self, times,", "x.phase) #define function to store lightcurve def store_lightcurve(self, lightcurve_object): if", "wavelengths = [wavelength / float(1 + z) for wavelength in", "normalization region, not including spectra') return None #Calculate average flux", "restframe ''' #Extract Wavelengths wavelengths = [float(x[0]) for x in", "#speed of light (km/s) c = 3e5 #Define class to", "store Spectral objects class Supernovae(object): #Initialization function def __init__(self, name,", "float(MJD_max) class Lightcurve(): def __init__(self, times, fluxes, error, band): self.band", "int - redshift corrected flux ''' flux_emit = (z *", "spectra of interest Returns: Supernovae - Supernovae object with spectra", "calc_abs_velc(restframe, dopplershifted): ''' Args: restframe (float) - restframe wavelength of", "class Lightcurve(): def __init__(self, times, fluxes, error, band): self.band =", "self.lightcurves.append(lightcurve_object) #define function that converts wavlengths to restframe and corrects", "times, fluxes, error, band): self.band = band self.data = pd.DataFrame(list(zip(times,", "from astropy.time import Time from Supernovae import * #speed of", "data class Spectra: #Initialization function def __init__(self, Spectra, epoch, z", "of light (km/s) c = 3e5 #Define class to hold", "Args: spectra_object (Spectra) - Spectra object to store ''' #Make", "not including spectra') return None #Calculate average flux in this", "3e5 #Define class to hold releveant information for spectra data", "OSN JSON data file def create_SN_object(JSON, MJD_max, z): ''' Function", "for given JSON data file from OSN Args: JSON (str)", "of days past maximum brightness phase (int) - phase for", "scipy.integrate as integrate from astropy.time import Time from Supernovae import", "= name #Store redshift of SN self.redshift = redshift #Store", "fluxes = [float(flux) / average_flux for flux in fluxes] #convert", "* flux_obs) + flux_obs return flux_emit #Define function to get", "wavelength of absorption dopplershifted (float) - dopplershifted wavelength of absorption", "pd.DataFrame(list(zip(times, fluxes, error)), columns = ['times', 'flux', 'err']) #Create Supernovae", "sorted by date if spectra_object in self.spectra: self.spectra.sort(key= lambda x:", "self.spectra.sort(key= lambda x: x.phase) print('already exists') elif spectra_object.epoch in [x.epoch", "by date if spectra_object in self.spectra: self.spectra.sort(key= lambda x: x.phase)", "spectra.data is None: continue else: supernovae.store_spectra(spectra) return supernovae #Define function", "def correct_flux(flux_obs, z): ''' Args: flux_obs (int) - observed flux", "in self.spectra: self.spectra.sort(key= lambda x: x.phase) print('already exists') elif spectra_object.epoch", "#Define class to hold releveant information for spectra data class", "rel_flux_range: flux_sum += float(x[1]) average_flux = flux_sum / float(len(rel_flux_range)) #Normalize", "print('already exists') elif spectra_object.epoch in [x.epoch for x in self.spectra]:", "np.array(spectra_data) for i in range(len(spectra_data)): spectra = Spectra(spectra_data[i]['data'], float(spectra_data[i]['time']) /", "= float(epoch) - float(MJD_max) class Lightcurve(): def __init__(self, times, fluxes,", "interest MJD_max (int) - number of days past maximum brightness", "def __init__(self, Spectra, epoch, z , MJD_max): ''' Spectra (string)", "self.spectra[index] #define function to store new spectra def store_spectra(self, spectra_object):", "['times', 'flux', 'err']) #Create Supernovae class to store Spectral objects", "#define function that converts wavlengths to restframe and corrects flux", "JSON formatted spectra file epoch (float) - MJD date z", "is corrected for redshift and normalized Wavelength is converted to", "z, MJD_max) if spectra.data is None: continue else: supernovae.store_spectra(spectra) return", "numpy as np import pandas as pd import matplotlib.pyplot as", "of absorption dopplershifted (float) - dopplershifted wavelength of absorption Returns:", "and normalized Wavelength is converted to SN restframe ''' #Extract", "observation self.phase = float(epoch) - float(MJD_max) class Lightcurve(): def __init__(self,", "= fluxes df['Wavelength'] = wavelengths return df def correct_flux(flux_obs, z):", "#Create Supernovae class to store Spectral objects class Supernovae(object): #Initialization", "list to hold Spectra objects self.spectra = [] self.lightcurves =", "- date of B band maximum brightness for SN in", "''' supernovae = Supernovae(str(JSON[0:-5]), z, MJD_max) #Load OSN json data", "def convert_date_toMJD(date): ''' Args: date (str) - string of calendar", "isnt empty if len(rel_flux_range) == 0: #print('No wavelengths in normalization", "to OSN JSON file of interest MJD_max (int) - number", "converted to SN restframe ''' #Extract Wavelengths wavelengths = [float(x[0])", "= [correct_flux(flux, z) for flux in fluxes] #Extract fluxes in", "file of interest MJD_max (int) - number of days past", "for x in self.spectra]) return self.spectra[index] #define function to store", "flux and store in Spectra self.data= Unpack_Spectra(Spectra, z) #store epoch", "redshift of corresponding SN MJD_max (float) - date of B", "Returns: Pandas DataFrame - 2 column dataframe: wavelength and flux", "self.spectra]: self.spectra.sort(key= lambda x: x.phase) pass else: self.spectra.append(spectra_object) self.spectra.sort(key= lambda", "to restframe wavelengths = [wavelength / float(1 + z) for", "hold releveant information for spectra data class Spectra: #Initialization function", "epoch (float) - MJD date z (float) - redshift of", "there rel_flux_range isnt empty if len(rel_flux_range) == 0: #print('No wavelengths", "self.epoch = float(epoch) #store phase of observation self.phase = float(epoch)", "normalization range rel_flux_range = [x for x in Spectra if", "astropy.time import Time from Supernovae import * #speed of light", "of SN self.redshift = redshift #Store date of B band", "band): self.band = band self.data = pd.DataFrame(list(zip(times, fluxes, error)), columns", "''' Args: spectra_object (Spectra) - Spectra object to store '''", "that converts wavlengths to restframe and corrects flux for redshift,", "[float(x[1]) for x in Spectra] #correct fluxes for redshift fluxes", "- MJD value of given calendar date ''' t =", "wavelength of absorption Returns: float - corresponding absorption velocity '''", "columns = ['times', 'flux', 'err']) #Create Supernovae class to store", "JSON file of interest MJD_max (int) - number of days", "MJD_max) #Load OSN json data file = open('../Data/OSN_data/' + str(JSON))", "Normalize flux and store in Spectra self.data= Unpack_Spectra(Spectra, z) #store", "continue else: supernovae.store_spectra(spectra) return supernovae #Define function to convert calendar", "Spectra objects self.spectra = [] self.lightcurves = [] #define function", "Supernovae(object): #Initialization function def __init__(self, name, redshift, maximum): ''' name", "- phase1) for x in self.spectra]) return self.spectra[index] #define function", "flux z (int) - redshift Returns: int - redshift corrected", "def __init__(self, name, redshift, maximum): ''' name (str) - String", "exists') else: self.lightcurves.append(lightcurve_object) #define function that converts wavlengths to restframe", "(float )- phase of interest Returns: Spectra object - Spectra", "(int) - observed flux z (int) - redshift Returns: int", "redshift Returns: int - redshift corrected flux ''' flux_emit =", "Spectral objects class Supernovae(object): #Initialization function def __init__(self, name, redshift,", "of B band maximum in MJD ''' #Store name of", "closest to given phase def find_spectra(self, phase1): ''' Args: phase1", "containing boundaries of region used for normalization Returns: Pandas DataFrame", "SN restframe, Normalize flux and store in Spectra self.data= Unpack_Spectra(Spectra,", "float(1 + z) for wavelength in wavelengths] #store in pandas", "interest Returns: Spectra object - Spectra object with phase closest", "self.lightcurves: print('already exists') else: self.lightcurves.append(lightcurve_object) #define function that converts wavlengths", "for x in Spectra] #Extract Fluxes fluxes = [float(x[1]) for", "B band maximum brightness self.maximum = maximum #initiate empty list", "Returns: float - corresponding absorption velocity ''' velocity = ((restframe", "return t.value #Define function to calculate absorption velocities def calc_abs_velc(restframe,", "#import relevant libraries import numpy as np import pandas as", "restframe and corrects flux for redshift, and normalizes flux def", "- date of B band maximum in MJD ''' #Store", "pd import matplotlib.pyplot as plt from astropy.io import ascii import", "import numpy as np import pandas as pd import matplotlib.pyplot", "= [] self.lightcurves = [] #define function to return spectra", "''' name (str) - String of SN name redshift (float)", "band maximum brightness self.maximum = maximum #initiate empty list to", "t.format = 'mjd' return t.value #Define function to calculate absorption", "to SN restframe ''' #Extract Wavelengths wavelengths = [float(x[0]) for", "in pandas dataframe df = pd.DataFrame() df['Flux'] = fluxes df['Wavelength']", "brightness self.maximum = maximum #initiate empty list to hold Spectra", "converts wavlengths to restframe and corrects flux for redshift, and", "phase of interest Returns: Spectra object - Spectra object with", "normalization Returns: Pandas DataFrame - 2 column dataframe: wavelength and", "== 0: #print('No wavelengths in normalization region, not including spectra')", "flux_sum = 0 for x in rel_flux_range: flux_sum += float(x[1])", "(Spectra) - Spectra object to store ''' #Make sure there", "of interest MJD_max (int) - number of days past maximum", "absorption dopplershifted (float) - dopplershifted wavelength of absorption Returns: float", "absorption Returns: float - corresponding absorption velocity ''' velocity =", "if len(rel_flux_range) == 0: #print('No wavelengths in normalization region, not", "phase for spectra of interest Returns: Supernovae - Supernovae object", "from specutils import Spectrum1D from astropy import units from scipy.optimize", "- 2 item list containing boundaries of region used for", "file epoch (float) - MJD date z (float) - redshift", "rel_flux_range isnt empty if len(rel_flux_range) == 0: #print('No wavelengths in", "redshift #Store date of B band maximum brightness self.maximum =", "def store_spectra(self, spectra_object): ''' Args: spectra_object (Spectra) - Spectra object", "to given phase def find_spectra(self, phase1): ''' Args: phase1 (float", "average flux in this range flux_sum = 0 for x", "Spectra: #Initialization function def __init__(self, Spectra, epoch, z , MJD_max):", "of absorption Returns: float - corresponding absorption velocity ''' velocity", "z (int) - redshift Returns: int - redshift corrected flux", "date of B band maximum in MJD ''' #Store name", "phase1): ''' Args: phase1 (float )- phase of interest Returns:", "= band self.data = pd.DataFrame(list(zip(times, fluxes, error)), columns = ['times',", "self.spectra.append(spectra_object) self.spectra.sort(key= lambda x: x.phase) #define function to store lightcurve", "empty if len(rel_flux_range) == 0: #print('No wavelengths in normalization region,", "Function to create Supernovae object for given JSON data file", "band self.data = pd.DataFrame(list(zip(times, fluxes, error)), columns = ['times', 'flux',", "spectra_data = json_data[JSON[0:-5]]['spectra'] spectra_data = np.array(spectra_data) for i in range(len(spectra_data)):", "import Time from Supernovae import * #speed of light (km/s)", "- path to JSON formatted spectra file epoch (float) -", "empty list to hold Spectra objects self.spectra = [] self.lightcurves", "rel_flux_range = [x for x in Spectra if (float(x[0])>normalization[0]) &", "name #Store redshift of SN self.redshift = redshift #Store date", "#Store date of B band maximum brightness self.maximum = maximum", "object - Spectra object with phase closest to phase1 '''", "Wavelength is converted to SN restframe ''' #Extract Wavelengths wavelengths", "in wavelengths] #store in pandas dataframe df = pd.DataFrame() df['Flux']", "wavelength in wavelengths] #store in pandas dataframe df = pd.DataFrame()", "given calendar date ''' t = Time(date) t.format = 'mjd'", "lambda x: x.phase) print('already exists') elif spectra_object.epoch in [x.epoch for", "Spectra object to store ''' #Make sure there are no", "observed flux z (int) - redshift Returns: int - redshift", "= ['times', 'flux', 'err']) #Create Supernovae class to store Spectral", "from astropy import units from scipy.optimize import curve_fit from scipy.interpolate", "past maximum brightness phase (int) - phase for spectra of", "#define function to store new spectra def store_spectra(self, spectra_object): '''", "item list containing boundaries of region used for normalization Returns:", "z , MJD_max): ''' Spectra (string) - path to JSON", "float(x[1]) average_flux = flux_sum / float(len(rel_flux_range)) #Normalize flux fluxes =", "in fluxes] #convert wavelength to restframe wavelengths = [wavelength /", "phase1) for x in self.spectra]) return self.spectra[index] #define function to", "(str) - path to OSN JSON file of interest MJD_max", "- phase for spectra of interest Returns: Supernovae - Supernovae", "Supernovae class to store Spectral objects class Supernovae(object): #Initialization function", "Unpack_Spectra(Spectra, z, normalization = [5000,6000]): ''' Args: Spectra - one", "from OSN z (float) - redshift of SN normalizationn (list)", "JSON data file from OSN Args: JSON (str) - path", "MJD date z (float) - redshift of corresponding SN MJD_max", "2 item list containing boundaries of region used for normalization", "for redshift, change wavelength to SN restframe, Normalize flux and", "corrected for redshift and normalized Wavelength is converted to SN", "fluxes df['Wavelength'] = wavelengths return df def correct_flux(flux_obs, z): '''", "= open('../Data/OSN_data/' + str(JSON)) json_data = json.load(file) spectra_data = json_data[JSON[0:-5]]['spectra']", "date (str) - string of calendar date (e.g. '2002-8-17') Returns:", "Supernovae - Supernovae object with spectra list filled ''' supernovae", "wavelength to SN restframe, Normalize flux and store in Spectra", "JSON (str) - path to OSN JSON file of interest", "Spectra, epoch, z , MJD_max): ''' Spectra (string) - path", "Returns: Supernovae - Supernovae object with spectra list filled '''", "redshift, maximum): ''' name (str) - String of SN name", "OSN json data file = open('../Data/OSN_data/' + str(JSON)) json_data =", "calendar date (e.g. '2002-8-17') Returns: float - MJD value of", "String of SN name redshift (float) - redshift of SN", "Supernovae object with spectra list filled ''' supernovae = Supernovae(str(JSON[0:-5]),", "self.lightcurves = [] #define function to return spectra closest to", "given phase def find_spectra(self, phase1): ''' Args: phase1 (float )-", "Spectra - one epoch of spectral data in JSON format", "z) #store epoch of obseravation self.epoch = float(epoch) #store phase", "flux for redshift, change wavelength to SN restframe, Normalize flux", "phase (int) - phase for spectra of interest Returns: Supernovae", "in rel_flux_range: flux_sum += float(x[1]) average_flux = flux_sum / float(len(rel_flux_range))", "''' t = Time(date) t.format = 'mjd' return t.value #Define", "flux for redshift, and normalizes flux def Unpack_Spectra(Spectra, z, normalization", "B band maximum brightness for SN in MJD ''' #correct", "Time(date) t.format = 'mjd' return t.value #Define function to calculate", "redshift of SN normalizationn (list) - 2 item list containing", "#Make sure there are no duplicates and that spectra are", "#correct fluxes for redshift fluxes = [correct_flux(flux, z) for flux", "specutils import Spectrum1D from astropy import units from scipy.optimize import", "sure there rel_flux_range isnt empty if len(rel_flux_range) == 0: #print('No", "one epoch of spectral data in JSON format from OSN", "of obseravation self.epoch = float(epoch) #store phase of observation self.phase", "file = open('../Data/OSN_data/' + str(JSON)) json_data = json.load(file) spectra_data =", "x in self.spectra]) return self.spectra[index] #define function to store new", "wavelengths in normalization region, not including spectra') return None #Calculate", "#Load OSN json data file = open('../Data/OSN_data/' + str(JSON)) json_data", "spectra from OSN JSON data file def create_SN_object(JSON, MJD_max, z):", "band maximum in MJD ''' #Store name of SN self.name", "self.data = pd.DataFrame(list(zip(times, fluxes, error)), columns = ['times', 'flux', 'err'])", "MJD value of given calendar date ''' t = Time(date)", "filled ''' supernovae = Supernovae(str(JSON[0:-5]), z, MJD_max) #Load OSN json", "& (float(x[0])<normalization[1])] #Make sure there rel_flux_range isnt empty if len(rel_flux_range)", "flux_sum / float(len(rel_flux_range)) #Normalize flux fluxes = [float(flux) / average_flux", "(z * flux_obs) + flux_obs return flux_emit #Define function to", "json data file = open('../Data/OSN_data/' + str(JSON)) json_data = json.load(file)", "json from IPython.display import display, Image from specutils import Spectrum1D", "#Extract Wavelengths wavelengths = [float(x[0]) for x in Spectra] #Extract", "x in Spectra] #Extract Fluxes fluxes = [float(x[1]) for x", "self.spectra: self.spectra.sort(key= lambda x: x.phase) print('already exists') elif spectra_object.epoch in", "region, not including spectra') return None #Calculate average flux in", "of B band maximum brightness for SN in MJD '''", "= maximum #initiate empty list to hold Spectra objects self.spectra", "= [wavelength / float(1 + z) for wavelength in wavelengths]", "import json from IPython.display import display, Image from specutils import", "dataframe df = pd.DataFrame() df['Flux'] = fluxes df['Wavelength'] = wavelengths", "def create_SN_object(JSON, MJD_max, z): ''' Function to create Supernovae object", "- path to OSN JSON file of interest MJD_max (int)", "absorption velocities def calc_abs_velc(restframe, dopplershifted): ''' Args: restframe (float) -", "calendar date to MJD def convert_date_toMJD(date): ''' Args: date (str)", "redshift of SN self.redshift = redshift #Store date of B", "lightcurve def store_lightcurve(self, lightcurve_object): if lightcurve_object in self.lightcurves: print('already exists')", "and flux Flux is corrected for redshift and normalized Wavelength", "Args: date (str) - string of calendar date (e.g. '2002-8-17')", "if spectra.data is None: continue else: supernovae.store_spectra(spectra) return supernovae #Define", "+= float(x[1]) average_flux = flux_sum / float(len(rel_flux_range)) #Normalize flux fluxes", "in this range flux_sum = 0 for x in rel_flux_range:", "corresponding SN MJD_max (float) - date of B band maximum", "date to MJD def convert_date_toMJD(date): ''' Args: date (str) -", "store lightcurve def store_lightcurve(self, lightcurve_object): if lightcurve_object in self.lightcurves: print('already", "= Time(date) t.format = 'mjd' return t.value #Define function to", "elif spectra_object.epoch in [x.epoch for x in self.spectra]: self.spectra.sort(key= lambda", "list filled ''' supernovae = Supernovae(str(JSON[0:-5]), z, MJD_max) #Load OSN", "None #Calculate average flux in this range flux_sum = 0", "band maximum brightness for SN in MJD ''' #correct flux", "(float(x[0])>normalization[0]) & (float(x[0])<normalization[1])] #Make sure there rel_flux_range isnt empty if", "Args: restframe (float) - restframe wavelength of absorption dopplershifted (float)", "maximum in MJD ''' #Store name of SN self.name =", "spectra') return None #Calculate average flux in this range flux_sum", "ascii import json from IPython.display import display, Image from specutils", "objects self.spectra = [] self.lightcurves = [] #define function to", "to return spectra closest to given phase def find_spectra(self, phase1):", "wavelength to restframe wavelengths = [wavelength / float(1 + z)", "- dopplershifted wavelength of absorption Returns: float - corresponding absorption", "of interest Returns: Spectra object - Spectra object with phase", "function to return spectra closest to given phase def find_spectra(self,", "to store new spectra def store_spectra(self, spectra_object): ''' Args: spectra_object", "function to calculate absorption velocities def calc_abs_velc(restframe, dopplershifted): ''' Args:", "in Spectra if (float(x[0])>normalization[0]) & (float(x[0])<normalization[1])] #Make sure there rel_flux_range", "self.data= Unpack_Spectra(Spectra, z) #store epoch of obseravation self.epoch = float(epoch)", "change wavelength to SN restframe, Normalize flux and store in", "t.value #Define function to calculate absorption velocities def calc_abs_velc(restframe, dopplershifted):", "#initiate empty list to hold Spectra objects self.spectra = []", "json_data[JSON[0:-5]]['spectra'] spectra_data = np.array(spectra_data) for i in range(len(spectra_data)): spectra =", "= np.array(spectra_data) for i in range(len(spectra_data)): spectra = Spectra(spectra_data[i]['data'], float(spectra_data[i]['time'])", "there are no duplicates and that spectra are sorted by", "len(rel_flux_range) == 0: #print('No wavelengths in normalization region, not including", "velocity = ((restframe - dopplershifted) / np.float(restframe))* c return velocity", "#Define function to convert calendar date to MJD def convert_date_toMJD(date):", "normalizes flux def Unpack_Spectra(Spectra, z, normalization = [5000,6000]): ''' Args:", "with spectra list filled ''' supernovae = Supernovae(str(JSON[0:-5]), z, MJD_max)", "- Supernovae object with spectra list filled ''' supernovae =", "x: x.phase) #define function to store lightcurve def store_lightcurve(self, lightcurve_object):", "''' Args: date (str) - string of calendar date (e.g.", "'mjd' return t.value #Define function to calculate absorption velocities def", "MJD ''' #correct flux for redshift, change wavelength to SN", "import units from scipy.optimize import curve_fit from scipy.interpolate import interp1d", "information for spectra data class Spectra: #Initialization function def __init__(self,", "= float(epoch) #store phase of observation self.phase = float(epoch) -", "SN name redshift (float) - redshift of SN maximum (float)", "<filename>Supernovae.py #import relevant libraries import numpy as np import pandas", "class Spectra: #Initialization function def __init__(self, Spectra, epoch, z ,", "to calculate absorption velocities def calc_abs_velc(restframe, dopplershifted): ''' Args: restframe", "- redshift corrected flux ''' flux_emit = (z * flux_obs)", "JSON data file def create_SN_object(JSON, MJD_max, z): ''' Function to", "restframe (float) - restframe wavelength of absorption dopplershifted (float) -", "flux_emit = (z * flux_obs) + flux_obs return flux_emit #Define", "(float) - redshift of corresponding SN MJD_max (float) - date", "- Spectra object with phase closest to phase1 ''' index", "(str) - String of SN name redshift (float) - redshift", "x: x.phase) pass else: self.spectra.append(spectra_object) self.spectra.sort(key= lambda x: x.phase) #define", "x in Spectra] #correct fluxes for redshift fluxes = [correct_flux(flux,", "- number of days past maximum brightness phase (int) -", "IPython.display import display, Image from specutils import Spectrum1D from astropy", "#Extract fluxes in normalization range rel_flux_range = [x for x", "#Calculate average flux in this range flux_sum = 0 for", "store ''' #Make sure there are no duplicates and that", "c = 3e5 #Define class to hold releveant information for", "number of days past maximum brightness phase (int) - phase", "[5000,6000]): ''' Args: Spectra - one epoch of spectral data", "integrate from astropy.time import Time from Supernovae import * #speed", "for redshift fluxes = [correct_flux(flux, z) for flux in fluxes]", "(float(x[0])<normalization[1])] #Make sure there rel_flux_range isnt empty if len(rel_flux_range) ==", "formatted spectra file epoch (float) - MJD date z (float)", "data file from OSN Args: JSON (str) - path to", "[x for x in Spectra if (float(x[0])>normalization[0]) & (float(x[0])<normalization[1])] #Make", "spectra_object.epoch in [x.epoch for x in self.spectra]: self.spectra.sort(key= lambda x:", "from Supernovae import * #speed of light (km/s) c =", "Spectra object - Spectra object with phase closest to phase1", "/ float(len(rel_flux_range)) #Normalize flux fluxes = [float(flux) / average_flux for", "name (str) - String of SN name redshift (float) -", "= Spectra(spectra_data[i]['data'], float(spectra_data[i]['time']) / (1+z), z, MJD_max) if spectra.data is", "else: self.lightcurves.append(lightcurve_object) #define function that converts wavlengths to restframe and", "for flux in fluxes] #convert wavelength to restframe wavelengths =", "z, MJD_max) #Load OSN json data file = open('../Data/OSN_data/' +", "spectra def store_spectra(self, spectra_object): ''' Args: spectra_object (Spectra) - Spectra", "Supernovae(str(JSON[0:-5]), z, MJD_max) #Load OSN json data file = open('../Data/OSN_data/'", "MJD_max): ''' Spectra (string) - path to JSON formatted spectra", "Flux is corrected for redshift and normalized Wavelength is converted", "x: x.phase) print('already exists') elif spectra_object.epoch in [x.epoch for x", "JSON format from OSN z (float) - redshift of SN", "as integrate from astropy.time import Time from Supernovae import *", "#correct flux for redshift, change wavelength to SN restframe, Normalize", "(float) - redshift of SN maximum (float) - date of", "in MJD ''' #correct flux for redshift, change wavelength to", "path to OSN JSON file of interest MJD_max (int) -", "''' #Store name of SN self.name = name #Store redshift", "date (e.g. '2002-8-17') Returns: float - MJD value of given", "import ascii import json from IPython.display import display, Image from", "''' #Make sure there are no duplicates and that spectra", "function to store lightcurve def store_lightcurve(self, lightcurve_object): if lightcurve_object in", "corresponding absorption velocity ''' velocity = ((restframe - dopplershifted) /", "of SN normalizationn (list) - 2 item list containing boundaries", "light (km/s) c = 3e5 #Define class to hold releveant", "plt from astropy.io import ascii import json from IPython.display import", "brightness phase (int) - phase for spectra of interest Returns:", "region used for normalization Returns: Pandas DataFrame - 2 column", "in fluxes] #Extract fluxes in normalization range rel_flux_range = [x", "redshift fluxes = [correct_flux(flux, z) for flux in fluxes] #Extract", "function to store new spectra def store_spectra(self, spectra_object): ''' Args:", "to convert calendar date to MJD def convert_date_toMJD(date): ''' Args:", "else: supernovae.store_spectra(spectra) return supernovae #Define function to convert calendar date", "for spectra data class Spectra: #Initialization function def __init__(self, Spectra,", "store_lightcurve(self, lightcurve_object): if lightcurve_object in self.lightcurves: print('already exists') else: self.lightcurves.append(lightcurve_object)", "maximum brightness phase (int) - phase for spectra of interest", "date z (float) - redshift of corresponding SN MJD_max (float)", "Spectra (string) - path to JSON formatted spectra file epoch", "OSN JSON file of interest MJD_max (int) - number of", "z): ''' Args: flux_obs (int) - observed flux z (int)", "df = pd.DataFrame() df['Flux'] = fluxes df['Wavelength'] = wavelengths return", "spectra are sorted by date if spectra_object in self.spectra: self.spectra.sort(key=", "= [float(x[1]) for x in Spectra] #correct fluxes for redshift", "supernovae.store_spectra(spectra) return supernovae #Define function to convert calendar date to", "convert calendar date to MJD def convert_date_toMJD(date): ''' Args: date", "of spectral data in JSON format from OSN z (float)", "float - corresponding absorption velocity ''' velocity = ((restframe -", "(float) - MJD date z (float) - redshift of corresponding", "fluxes] #convert wavelength to restframe wavelengths = [wavelength / float(1", "function def __init__(self, Spectra, epoch, z , MJD_max): ''' Spectra", "to store ''' #Make sure there are no duplicates and", "releveant information for spectra data class Spectra: #Initialization function def", "pandas as pd import matplotlib.pyplot as plt from astropy.io import", "import curve_fit from scipy.interpolate import interp1d import scipy.integrate as integrate", "fluxes for redshift fluxes = [correct_flux(flux, z) for flux in", "/ average_flux for flux in fluxes] #convert wavelength to restframe", "float(spectra_data[i]['time']) / (1+z), z, MJD_max) if spectra.data is None: continue", "calculate absorption velocities def calc_abs_velc(restframe, dopplershifted): ''' Args: restframe (float)", "value of given calendar date ''' t = Time(date) t.format", "#convert wavelength to restframe wavelengths = [wavelength / float(1 +", "scipy.interpolate import interp1d import scipy.integrate as integrate from astropy.time import", "= 3e5 #Define class to hold releveant information for spectra", "MJD_max) if spectra.data is None: continue else: supernovae.store_spectra(spectra) return supernovae", "to MJD def convert_date_toMJD(date): ''' Args: date (str) - string", "for SN in MJD ''' #correct flux for redshift, change", "= redshift #Store date of B band maximum brightness self.maximum", "epoch of obseravation self.epoch = float(epoch) #store phase of observation", "of SN name redshift (float) - redshift of SN maximum", "duplicates and that spectra are sorted by date if spectra_object", "z): ''' Function to create Supernovae object for given JSON", "function to convert calendar date to MJD def convert_date_toMJD(date): '''", "return df def correct_flux(flux_obs, z): ''' Args: flux_obs (int) -", "x in Spectra if (float(x[0])>normalization[0]) & (float(x[0])<normalization[1])] #Make sure there", "as plt from astropy.io import ascii import json from IPython.display", "string of calendar date (e.g. '2002-8-17') Returns: float - MJD", "brightness for SN in MJD ''' #correct flux for redshift,", "of region used for normalization Returns: Pandas DataFrame - 2", "to phase1 ''' index = np.argmin([ abs(x.phase - phase1) for", "spectra = Spectra(spectra_data[i]['data'], float(spectra_data[i]['time']) / (1+z), z, MJD_max) if spectra.data", "lambda x: x.phase) pass else: self.spectra.append(spectra_object) self.spectra.sort(key= lambda x: x.phase)", "of B band maximum brightness self.maximum = maximum #initiate empty", "dataframe: wavelength and flux Flux is corrected for redshift and", "absorption velocity ''' velocity = ((restframe - dopplershifted) / np.float(restframe))*", "in self.spectra]: self.spectra.sort(key= lambda x: x.phase) pass else: self.spectra.append(spectra_object) self.spectra.sort(key=", "def store_lightcurve(self, lightcurve_object): if lightcurve_object in self.lightcurves: print('already exists') else:", "[wavelength / float(1 + z) for wavelength in wavelengths] #store", "SN normalizationn (list) - 2 item list containing boundaries of", "(float) - restframe wavelength of absorption dopplershifted (float) - dopplershifted", "objects class Supernovae(object): #Initialization function def __init__(self, name, redshift, maximum):", "pd.DataFrame() df['Flux'] = fluxes df['Wavelength'] = wavelengths return df def", "#Define function to calculate absorption velocities def calc_abs_velc(restframe, dopplershifted): '''", "#Extract Fluxes fluxes = [float(x[1]) for x in Spectra] #correct", "''' Args: Spectra - one epoch of spectral data in", "if (float(x[0])>normalization[0]) & (float(x[0])<normalization[1])] #Make sure there rel_flux_range isnt empty", "''' flux_emit = (z * flux_obs) + flux_obs return flux_emit", "to get relevant spectra from OSN JSON data file def", "- restframe wavelength of absorption dopplershifted (float) - dopplershifted wavelength", "in [x.epoch for x in self.spectra]: self.spectra.sort(key= lambda x: x.phase)", "function that converts wavlengths to restframe and corrects flux for", "redshift of SN maximum (float) - date of B band", "- string of calendar date (e.g. '2002-8-17') Returns: float -", "calendar date ''' t = Time(date) t.format = 'mjd' return", "flux_sum += float(x[1]) average_flux = flux_sum / float(len(rel_flux_range)) #Normalize flux", "velocity ''' velocity = ((restframe - dopplershifted) / np.float(restframe))* c", "for wavelength in wavelengths] #store in pandas dataframe df =", "for x in self.spectra]: self.spectra.sort(key= lambda x: x.phase) pass else:", "/ (1+z), z, MJD_max) if spectra.data is None: continue else:", "str(JSON)) json_data = json.load(file) spectra_data = json_data[JSON[0:-5]]['spectra'] spectra_data = np.array(spectra_data)", "supernovae #Define function to convert calendar date to MJD def", "class to store Spectral objects class Supernovae(object): #Initialization function def", "in Spectra] #correct fluxes for redshift fluxes = [correct_flux(flux, z)", "#store epoch of obseravation self.epoch = float(epoch) #store phase of", "- MJD date z (float) - redshift of corresponding SN", "data in JSON format from OSN z (float) - redshift", "import scipy.integrate as integrate from astropy.time import Time from Supernovae", "for x in Spectra if (float(x[0])>normalization[0]) & (float(x[0])<normalization[1])] #Make sure", "of observation self.phase = float(epoch) - float(MJD_max) class Lightcurve(): def", "that spectra are sorted by date if spectra_object in self.spectra:", "for redshift and normalized Wavelength is converted to SN restframe", "curve_fit from scipy.interpolate import interp1d import scipy.integrate as integrate from", "corrects flux for redshift, and normalizes flux def Unpack_Spectra(Spectra, z,", "for normalization Returns: Pandas DataFrame - 2 column dataframe: wavelength", "import display, Image from specutils import Spectrum1D from astropy import", "store new spectra def store_spectra(self, spectra_object): ''' Args: spectra_object (Spectra)", "closest to phase1 ''' index = np.argmin([ abs(x.phase - phase1)", "store_spectra(self, spectra_object): ''' Args: spectra_object (Spectra) - Spectra object to", "are sorted by date if spectra_object in self.spectra: self.spectra.sort(key= lambda", "object for given JSON data file from OSN Args: JSON", "fluxes = [correct_flux(flux, z) for flux in fluxes] #Extract fluxes", "= 'mjd' return t.value #Define function to calculate absorption velocities", "spectra_object (Spectra) - Spectra object to store ''' #Make sure", "to create Supernovae object for given JSON data file from", "file def create_SN_object(JSON, MJD_max, z): ''' Function to create Supernovae", "of calendar date (e.g. '2002-8-17') Returns: float - MJD value", "z) for wavelength in wavelengths] #store in pandas dataframe df", "z) for flux in fluxes] #Extract fluxes in normalization range", "MJD_max (int) - number of days past maximum brightness phase", "units from scipy.optimize import curve_fit from scipy.interpolate import interp1d import", "function to get relevant spectra from OSN JSON data file", "to JSON formatted spectra file epoch (float) - MJD date", "float - MJD value of given calendar date ''' t", "display, Image from specutils import Spectrum1D from astropy import units", "x in self.spectra]: self.spectra.sort(key= lambda x: x.phase) pass else: self.spectra.append(spectra_object)", "flux_obs return flux_emit #Define function to get relevant spectra from", "to store lightcurve def store_lightcurve(self, lightcurve_object): if lightcurve_object in self.lightcurves:", "create_SN_object(JSON, MJD_max, z): ''' Function to create Supernovae object for", "pass else: self.spectra.append(spectra_object) self.spectra.sort(key= lambda x: x.phase) #define function to", "convert_date_toMJD(date): ''' Args: date (str) - string of calendar date", "date ''' t = Time(date) t.format = 'mjd' return t.value", "sure there are no duplicates and that spectra are sorted", "= wavelengths return df def correct_flux(flux_obs, z): ''' Args: flux_obs", "(1+z), z, MJD_max) if spectra.data is None: continue else: supernovae.store_spectra(spectra)", "file from OSN Args: JSON (str) - path to OSN", "t = Time(date) t.format = 'mjd' return t.value #Define function", "if spectra_object in self.spectra: self.spectra.sort(key= lambda x: x.phase) print('already exists')", "float(epoch) #store phase of observation self.phase = float(epoch) - float(MJD_max)", "including spectra') return None #Calculate average flux in this range", "* #speed of light (km/s) c = 3e5 #Define class", "'2002-8-17') Returns: float - MJD value of given calendar date", "column dataframe: wavelength and flux Flux is corrected for redshift", "Spectra] #Extract Fluxes fluxes = [float(x[1]) for x in Spectra]", "flux fluxes = [float(flux) / average_flux for flux in fluxes]", "json_data = json.load(file) spectra_data = json_data[JSON[0:-5]]['spectra'] spectra_data = np.array(spectra_data) for", "/ float(1 + z) for wavelength in wavelengths] #store in", "path to JSON formatted spectra file epoch (float) - MJD", "#Normalize flux fluxes = [float(flux) / average_flux for flux in", "scipy.optimize import curve_fit from scipy.interpolate import interp1d import scipy.integrate as", "Supernovae import * #speed of light (km/s) c = 3e5", "of SN maximum (float) - date of B band maximum", "index = np.argmin([ abs(x.phase - phase1) for x in self.spectra])", "open('../Data/OSN_data/' + str(JSON)) json_data = json.load(file) spectra_data = json_data[JSON[0:-5]]['spectra'] spectra_data", "+ z) for wavelength in wavelengths] #store in pandas dataframe", "MJD_max (float) - date of B band maximum brightness for", "#Store name of SN self.name = name #Store redshift of", "name, redshift, maximum): ''' name (str) - String of SN", "def Unpack_Spectra(Spectra, z, normalization = [5000,6000]): ''' Args: Spectra -", "Image from specutils import Spectrum1D from astropy import units from", "format from OSN z (float) - redshift of SN normalizationn", "flux_emit #Define function to get relevant spectra from OSN JSON", "corrected flux ''' flux_emit = (z * flux_obs) + flux_obs", "- redshift of corresponding SN MJD_max (float) - date of", "= pd.DataFrame(list(zip(times, fluxes, error)), columns = ['times', 'flux', 'err']) #Create", "name redshift (float) - redshift of SN maximum (float) -", "0: #print('No wavelengths in normalization region, not including spectra') return", "[] #define function to return spectra closest to given phase", "#print('No wavelengths in normalization region, not including spectra') return None", "new spectra def store_spectra(self, spectra_object): ''' Args: spectra_object (Spectra) -", "print('already exists') else: self.lightcurves.append(lightcurve_object) #define function that converts wavlengths to", "function def __init__(self, name, redshift, maximum): ''' name (str) -", "fluxes in normalization range rel_flux_range = [x for x in", "of corresponding SN MJD_max (float) - date of B band", "are no duplicates and that spectra are sorted by date", "correct_flux(flux_obs, z): ''' Args: flux_obs (int) - observed flux z", "= Supernovae(str(JSON[0:-5]), z, MJD_max) #Load OSN json data file =", "= flux_sum / float(len(rel_flux_range)) #Normalize flux fluxes = [float(flux) /", "= 0 for x in rel_flux_range: flux_sum += float(x[1]) average_flux", "spectra list filled ''' supernovae = Supernovae(str(JSON[0:-5]), z, MJD_max) #Load", "Args: JSON (str) - path to OSN JSON file of", "of interest Returns: Supernovae - Supernovae object with spectra list", "SN in MJD ''' #correct flux for redshift, change wavelength", "for redshift, and normalizes flux def Unpack_Spectra(Spectra, z, normalization =", "np import pandas as pd import matplotlib.pyplot as plt from", "#Initialization function def __init__(self, name, redshift, maximum): ''' name (str)", "spectra_object): ''' Args: spectra_object (Spectra) - Spectra object to store", "for x in rel_flux_range: flux_sum += float(x[1]) average_flux = flux_sum", "average_flux for flux in fluxes] #convert wavelength to restframe wavelengths", "from OSN JSON data file def create_SN_object(JSON, MJD_max, z): '''", "MJD def convert_date_toMJD(date): ''' Args: date (str) - string of", "to SN restframe, Normalize flux and store in Spectra self.data=", "range flux_sum = 0 for x in rel_flux_range: flux_sum +=", "self.spectra.sort(key= lambda x: x.phase) pass else: self.spectra.append(spectra_object) self.spectra.sort(key= lambda x:", "as pd import matplotlib.pyplot as plt from astropy.io import ascii", "''' #Extract Wavelengths wavelengths = [float(x[0]) for x in Spectra]", "for i in range(len(spectra_data)): spectra = Spectra(spectra_data[i]['data'], float(spectra_data[i]['time']) / (1+z),", "redshift (float) - redshift of SN maximum (float) - date", "maximum (float) - date of B band maximum in MJD", "Args: flux_obs (int) - observed flux z (int) - redshift", "(float) - redshift of SN normalizationn (list) - 2 item", "z (float) - redshift of corresponding SN MJD_max (float) -", "(float) - date of B band maximum brightness for SN", "(km/s) c = 3e5 #Define class to hold releveant information", "[] self.lightcurves = [] #define function to return spectra closest", "flux in fluxes] #Extract fluxes in normalization range rel_flux_range =", "days past maximum brightness phase (int) - phase for spectra", "import * #speed of light (km/s) c = 3e5 #Define", "SN MJD_max (float) - date of B band maximum brightness", "#Initialization function def __init__(self, Spectra, epoch, z , MJD_max): '''", "spectra_data = np.array(spectra_data) for i in range(len(spectra_data)): spectra = Spectra(spectra_data[i]['data'],", "and store in Spectra self.data= Unpack_Spectra(Spectra, z) #store epoch of", "__init__(self, name, redshift, maximum): ''' name (str) - String of", "self.name = name #Store redshift of SN self.redshift = redshift", "def calc_abs_velc(restframe, dopplershifted): ''' Args: restframe (float) - restframe wavelength", "from scipy.optimize import curve_fit from scipy.interpolate import interp1d import scipy.integrate", "in JSON format from OSN z (float) - redshift of", "wavelengths = [float(x[0]) for x in Spectra] #Extract Fluxes fluxes", "df['Wavelength'] = wavelengths return df def correct_flux(flux_obs, z): ''' Args:", "maximum brightness for SN in MJD ''' #correct flux for", "class to hold releveant information for spectra data class Spectra:", "spectra data class Spectra: #Initialization function def __init__(self, Spectra, epoch,", "#store phase of observation self.phase = float(epoch) - float(MJD_max) class", "def find_spectra(self, phase1): ''' Args: phase1 (float )- phase of", "def __init__(self, times, fluxes, error, band): self.band = band self.data", "= [5000,6000]): ''' Args: Spectra - one epoch of spectral", "phase1 (float )- phase of interest Returns: Spectra object -", "import pandas as pd import matplotlib.pyplot as plt from astropy.io", "- 2 column dataframe: wavelength and flux Flux is corrected", "of given calendar date ''' t = Time(date) t.format =", "get relevant spectra from OSN JSON data file def create_SN_object(JSON,", "(string) - path to JSON formatted spectra file epoch (float)", "OSN Args: JSON (str) - path to OSN JSON file", "#Store redshift of SN self.redshift = redshift #Store date of", "Spectra if (float(x[0])>normalization[0]) & (float(x[0])<normalization[1])] #Make sure there rel_flux_range isnt", "(str) - string of calendar date (e.g. '2002-8-17') Returns: float", "fluxes, error)), columns = ['times', 'flux', 'err']) #Create Supernovae class", "Spectrum1D from astropy import units from scipy.optimize import curve_fit from", "#store in pandas dataframe df = pd.DataFrame() df['Flux'] = fluxes", "lightcurve_object in self.lightcurves: print('already exists') else: self.lightcurves.append(lightcurve_object) #define function that", "redshift and normalized Wavelength is converted to SN restframe '''", "#define function to store lightcurve def store_lightcurve(self, lightcurve_object): if lightcurve_object", "from IPython.display import display, Image from specutils import Spectrum1D from", "to hold releveant information for spectra data class Spectra: #Initialization", "epoch, z , MJD_max): ''' Spectra (string) - path to", "used for normalization Returns: Pandas DataFrame - 2 column dataframe:", "exists') elif spectra_object.epoch in [x.epoch for x in self.spectra]: self.spectra.sort(key=", "float(epoch) - float(MJD_max) class Lightcurve(): def __init__(self, times, fluxes, error,", "''' index = np.argmin([ abs(x.phase - phase1) for x in", "dopplershifted): ''' Args: restframe (float) - restframe wavelength of absorption", "of SN self.name = name #Store redshift of SN self.redshift", "i in range(len(spectra_data)): spectra = Spectra(spectra_data[i]['data'], float(spectra_data[i]['time']) / (1+z), z,", "date of B band maximum brightness self.maximum = maximum #initiate", "spectra file epoch (float) - MJD date z (float) -", "[x.epoch for x in self.spectra]: self.spectra.sort(key= lambda x: x.phase) pass", "Args: phase1 (float )- phase of interest Returns: Spectra object", "else: self.spectra.append(spectra_object) self.spectra.sort(key= lambda x: x.phase) #define function to store", "return self.spectra[index] #define function to store new spectra def store_spectra(self,", "x.phase) print('already exists') elif spectra_object.epoch in [x.epoch for x in", "= np.argmin([ abs(x.phase - phase1) for x in self.spectra]) return", ")- phase of interest Returns: Spectra object - Spectra object", "'err']) #Create Supernovae class to store Spectral objects class Supernovae(object):", "class Supernovae(object): #Initialization function def __init__(self, name, redshift, maximum): '''", "= [] #define function to return spectra closest to given", "astropy.io import ascii import json from IPython.display import display, Image", "no duplicates and that spectra are sorted by date if", "from astropy.io import ascii import json from IPython.display import display,", "Unpack_Spectra(Spectra, z) #store epoch of obseravation self.epoch = float(epoch) #store", "x.phase) pass else: self.spectra.append(spectra_object) self.spectra.sort(key= lambda x: x.phase) #define function", "self.redshift = redshift #Store date of B band maximum brightness", "create Supernovae object for given JSON data file from OSN", "wavlengths to restframe and corrects flux for redshift, and normalizes", "df def correct_flux(flux_obs, z): ''' Args: flux_obs (int) - observed", "(e.g. '2002-8-17') Returns: float - MJD value of given calendar", "and that spectra are sorted by date if spectra_object in", "Spectra] #correct fluxes for redshift fluxes = [correct_flux(flux, z) for", "velocities def calc_abs_velc(restframe, dopplershifted): ''' Args: restframe (float) - restframe", "Returns: int - redshift corrected flux ''' flux_emit = (z" ]
[ "from telethon.tl.types import ChannelParticipantsAdmins from platform import uname from userbot", "the gayest gay such that even the gay world will", "مطورين السورس\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"مطورين السورس : \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"احمد ||", "\"`.get_id` ايدي اي شخص دزه بمحادثته\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.admins` المشرفين الي", "\"اشترك في قناة السورس لانها تطرح ملفات وشروحات مفيده\\n\" \"➖➖➖➖➖➖➖➖➖\\n\"", "\"`.snake` افعى تتحرك\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.clock` ساعات سطر واحد \\n\" \"➖➖➖➖➖➖➖➖➖\\n\"", "\"`.smoon` لعرض ٤ اسطر اقمار \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.moon` لعرض سطر", "مدز رساله\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.get_bot` معرفه عدد البوتات الموجوده\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.iffuci`", ":\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.ytv` + رابط فيديو من اي موقع للتحميل\\n\"", "اي شخص دزه بمحادثته\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.admins` المشرفين الي بالكروب\\n\" \"➖➖➖➖➖➖➖➖➖\\n\"", "في سورس التليثيون العراقي\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"استخدم امر .alive اذا اعتقدت", "\"➖➖➖➖➖➖➖➖➖\\n\" \"`.moon` لعرض سطر واحد اقمار \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.solarsystem` كواكب", "gayest gay such that even the gay world will disown", "ع الصوره\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.bye` مغادره من المجموعه\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.decide` يدز", "even the gay world will disown you.\"\"\" import asyncio from", "import ALIVE_NAME from userbot.utils import admin_cmd DEFAULTUSER = str(ALIVE_NAME) if", "ايدي اي شخص دزه بمحادثته\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.admins` المشرفين الي بالكروب\\n\"", "name set yet nibba, check pinned in @XtraTgBot\" @command(outgoing=True, pattern=\"^.lk$\")", "\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.ban` حظر \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.tss` + رمز اللغه\\n\"", "\"➖➖➖➖➖➖➖➖➖\\n\" \"`.savewelcome` + ترحيب لوضع ترحيب\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.clearwelcome` لحذف الترحيب", "= str(ALIVE_NAME) if ALIVE_NAME else \"No name set yet nibba,", "\"`.unload` + اسم الملف للغاء التثببت\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.scha` يطلع يكتب", "البوت توقف!\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"اشترك في قناة السورس لانها تطرح ملفات", "اسم النلف يدز الملف\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.unload` + اسم الملف للغاء", "\"`.ytv` + رابط فيديو من اي موقع للتحميل\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.purge`", "\"`.kick` طرد \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.ban` حظر \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.tss` +", "from userbot.utils import admin_cmd DEFAULTUSER = str(ALIVE_NAME) if ALIVE_NAME else", "\"➖➖➖➖➖➖➖➖➖\\n\" \"حارث || @cCcYo \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"قناة السورس الرسميه :", "\"➖➖➖➖➖➖➖➖➖\\n\" \"`.solarsystem` كواكب تتحرك \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.snake` افعى تتحرك\\n\" \"➖➖➖➖➖➖➖➖➖\\n\"", "لتغير اسم الملف\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.send` + اسم النلف يدز الملف\\n\"", "حاصل\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"لتنصيب السورس راسلني احد مطورين السورس\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"مطورين", "\"➖➖➖➖➖➖➖➖➖\\n\" \"`.clearfilter` حذف رد \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.purge` حذف كل الرسائل\\n\"", "\"➖➖➖➖➖➖➖➖➖\\n\" \"`.get_id` ايدي اي شخص دزه بمحادثته\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.admins` المشرفين", "مغادره من المجموعه\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.decide` يدز صورتين متحركات\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"يوجد", "تتحرك\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.clock` ساعات سطر واحد \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.gmute` كتم\\n\"", "كتم\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.ungmute` الغاء كتم \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.kick` طرد \\n\"", "from platform import uname from userbot import ALIVE_NAME from userbot.utils", "للغاء التثببت\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.scha` يطلع يكتب حتى لو مدز رساله\\n\"", "التثببت\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.scha` يطلع يكتب حتى لو مدز رساله\\n\" \"➖➖➖➖➖➖➖➖➖\\n\"", "\"➖➖➖➖➖➖➖➖➖\\n\" \"`.purge` حذف كل الرسائل\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.figlet` كتابه نصوص شخوط\\n\"", "موقع للتحميل\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.purge` تحذف رسائل بالرد\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.song` +", "صورتين متحركات\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"يوجد الكثير من الاوامر لكن ثبتنا الاساسيات\\n\")", "مراسلتنا لاي خلل حاصل\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"لتنصيب السورس راسلني احد مطورين", "\"\"\"Check if userbot alive. If you change these, you become", "\"➖➖➖➖➖➖➖➖➖\\n\" \"اشترك في قناة السورس لانها تطرح ملفات وشروحات مفيده\\n\"", "import admin_cmd DEFAULTUSER = str(ALIVE_NAME) if ALIVE_NAME else \"No name", "تطرح ملفات وشروحات مفيده\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"يمكنك مراسلتنا لاي خلل حاصل\\n\"", "المشرفين الي بالكروب\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.pin` تثبيت رساله بالكروب\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.mmf`", "\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"قناة السورس الرسميه : @cqccqq\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"اوامر السورس", "تثبيت رساله بالكروب\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.mmf` اسم انكلش رد ع الصوره\\n\"", "اسم الملف للغاء التثببت\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.scha` يطلع يكتب حتى لو", "سورس التليثيون العراقي\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"استخدم امر .alive اذا اعتقدت ان", "\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.moon` لعرض سطر واحد اقمار \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.solarsystem`", "\"لتنصيب السورس راسلني احد مطورين السورس\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"مطورين السورس :", "if the bot is running. \"\"\" await alive.edit(\"اهلا بك في", "+ رمز اللغه\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.rnupload` رد ع الملف وسم ملف", "اسم اغنيه \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.spam`+ كلمه + عدد \\n\" \"➖➖➖➖➖➖➖➖➖\\n\"", "الرسائل\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.figlet` كتابه نصوص شخوط\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.savewelcome` + ترحيب", "الموجوده\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.iffuci` كتابه كود الملف\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.savefilter` اضف رد\\n\"", "السورس لانها تطرح ملفات وشروحات مفيده\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"يمكنك مراسلتنا لاي", "\"`.tss` + رمز اللغه\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.rnupload` رد ع الملف وسم", "asyncio from telethon import events from telethon.tl.types import ChannelParticipantsAdmins from", "telethon import events from telethon.tl.types import ChannelParticipantsAdmins from platform import", "\"➖➖➖➖➖➖➖➖➖\\n\" \"`.snake` افعى تتحرك\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.clock` ساعات سطر واحد \\n\"", "كتابه كود الملف\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.savefilter` اضف رد\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.clearfilter` حذف", "that even the gay world will disown you.\"\"\" import asyncio", "\"`.ban` حظر \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.tss` + رمز اللغه\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.rnupload`", "رساله\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.get_bot` معرفه عدد البوتات الموجوده\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.iffuci` كتابه", "\"➖➖➖➖➖➖➖➖➖\\n\" \"`.unload` + اسم الملف للغاء التثببت\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.scha` يطلع", "التليثيون العراقي\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"استخدم امر .alive اذا اعتقدت ان البوت", "\"`.solarsystem` كواكب تتحرك \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.snake` افعى تتحرك\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.clock`", "\"➖➖➖➖➖➖➖➖➖\\n\" \"`.ungmute` الغاء كتم \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.kick` طرد \\n\" \"➖➖➖➖➖➖➖➖➖\\n\"", "كود الملف\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.savefilter` اضف رد\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.clearfilter` حذف رد", "\"➖➖➖➖➖➖➖➖➖\\n\" \"`.smoon` لعرض ٤ اسطر اقمار \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.moon` لعرض", "@XtraTgBot\" @command(outgoing=True, pattern=\"^.lk$\") async def amireallyalive(alive): \"\"\" For .alive command,", "للتحميل\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.purge` تحذف رسائل بالرد\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.song` + اسم", "\"➖➖➖➖➖➖➖➖➖\\n\" \"`.admins` المشرفين الي بالكروب\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.pin` تثبيت رساله بالكروب\\n\"", "\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.solarsystem` كواكب تتحرك \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.snake` افعى تتحرك\\n\"", "خلل حاصل\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"لتنصيب السورس راسلني احد مطورين السورس\\n\" \"➖➖➖➖➖➖➖➖➖\\n\"", "@cqccqq\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"اوامر السورس هي :\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.ytv` + رابط", "\"حارث || @cCcYo \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"قناة السورس الرسميه : @cqccqq\\n\"", "userbot alive. If you change these, you become the gayest", "admin_cmd DEFAULTUSER = str(ALIVE_NAME) if ALIVE_NAME else \"No name set", "اسم الملف\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.send` + اسم النلف يدز الملف\\n\" \"➖➖➖➖➖➖➖➖➖\\n\"", "bot is running. \"\"\" await alive.edit(\"اهلا بك في سورس التليثيون", "world will disown you.\"\"\" import asyncio from telethon import events", "\"➖➖➖➖➖➖➖➖➖\\n\" \"`.whois` + ايدي شخص\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.fuk` فاكيو\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.get_id`", "\"`.spam`+ كلمه + عدد \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.smoon` لعرض ٤ اسطر", "check if the bot is running. \"\"\" await alive.edit(\"اهلا بك", "الملف وسم ملف لتغير اسم الملف\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.send` + اسم", "يدز صورتين متحركات\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"يوجد الكثير من الاوامر لكن ثبتنا", "من المجموعه\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.decide` يدز صورتين متحركات\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"يوجد الكثير", "import events from telethon.tl.types import ChannelParticipantsAdmins from platform import uname", "you become the gayest gay such that even the gay", "\"➖➖➖➖➖➖➖➖➖\\n\" \"مطورين السورس : \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"احمد || @HHMHHH \\n\"", "you.\"\"\" import asyncio from telethon import events from telethon.tl.types import", "check pinned in @XtraTgBot\" @command(outgoing=True, pattern=\"^.lk$\") async def amireallyalive(alive): \"\"\"", "اغنيه \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.spam`+ كلمه + عدد \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.smoon`", "nibba, check pinned in @XtraTgBot\" @command(outgoing=True, pattern=\"^.lk$\") async def amireallyalive(alive):", "if userbot alive. If you change these, you become the", "these, you become the gayest gay such that even the", "المجموعه\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.decide` يدز صورتين متحركات\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"يوجد الكثير من", "ملف لتغير اسم الملف\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.send` + اسم النلف يدز", "str(ALIVE_NAME) if ALIVE_NAME else \"No name set yet nibba, check", "رد \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.purge` حذف كل الرسائل\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.figlet` كتابه", "ALIVE_NAME from userbot.utils import admin_cmd DEFAULTUSER = str(ALIVE_NAME) if ALIVE_NAME", "ايدي شخص\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.fuk` فاكيو\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.get_id` ايدي اي شخص", "افعى تتحرك\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.clock` ساعات سطر واحد \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.gmute`", "else \"No name set yet nibba, check pinned in @XtraTgBot\"", "من اي موقع للتحميل\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.purge` تحذف رسائل بالرد\\n\" \"➖➖➖➖➖➖➖➖➖\\n\"", "الغاء كتم \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.kick` طرد \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.ban` حظر", "\"➖➖➖➖➖➖➖➖➖\\n\" \"`.ban` حظر \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.tss` + رمز اللغه\\n\" \"➖➖➖➖➖➖➖➖➖\\n\"", "uname from userbot import ALIVE_NAME from userbot.utils import admin_cmd DEFAULTUSER", "النلف يدز الملف\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.unload` + اسم الملف للغاء التثببت\\n\"", "\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.kick` طرد \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.ban` حظر \\n\" \"➖➖➖➖➖➖➖➖➖\\n\"", "ترحيب\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.clearwelcome` لحذف الترحيب \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.whois` + ايدي", "\"`.savefilter` اضف رد\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.clearfilter` حذف رد \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.purge`", "\"`.mmf` اسم انكلش رد ع الصوره\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.bye` مغادره من", "اي موقع للتحميل\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.purge` تحذف رسائل بالرد\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.song`", "the gay world will disown you.\"\"\" import asyncio from telethon", "الملف\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.savefilter` اضف رد\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.clearfilter` حذف رد \\n\"", "توقف!\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"اشترك في قناة السورس لانها تطرح ملفات وشروحات", "+ ترحيب لوضع ترحيب\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.clearwelcome` لحذف الترحيب \\n\" \"➖➖➖➖➖➖➖➖➖\\n\"", "\"➖➖➖➖➖➖➖➖➖\\n\" \"`.scha` يطلع يكتب حتى لو مدز رساله\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.get_bot`", "def amireallyalive(alive): \"\"\" For .alive command, check if the bot", "لانها تطرح ملفات وشروحات مفيده\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"يمكنك مراسلتنا لاي خلل", ".alive command, check if the bot is running. \"\"\" await", "\"➖➖➖➖➖➖➖➖➖\\n\" \"`.tss` + رمز اللغه\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.rnupload` رد ع الملف", "from telethon import events from telethon.tl.types import ChannelParticipantsAdmins from platform", "set yet nibba, check pinned in @XtraTgBot\" @command(outgoing=True, pattern=\"^.lk$\") async", "@HHMHHH \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"حسن || @VHHHHH \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"حارث ||", "عدد البوتات الموجوده\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.iffuci` كتابه كود الملف\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.savefilter`", "|| @cCcYo \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"قناة السورس الرسميه : @cqccqq\\n\" \"➖➖➖➖➖➖➖➖➖\\n\"", "|| @HHMHHH \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"حسن || @VHHHHH \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"حارث", "عدد \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.smoon` لعرض ٤ اسطر اقمار \\n\" \"➖➖➖➖➖➖➖➖➖\\n\"", "\"`.bye` مغادره من المجموعه\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.decide` يدز صورتين متحركات\\n\" \"➖➖➖➖➖➖➖➖➖\\n\"", "اسطر اقمار \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.moon` لعرض سطر واحد اقمار \\n\"", "DEFAULTUSER = str(ALIVE_NAME) if ALIVE_NAME else \"No name set yet", "\"➖➖➖➖➖➖➖➖➖\\n\" \"احمد || @HHMHHH \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"حسن || @VHHHHH \\n\"", "رسائل بالرد\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.song` + اسم اغنيه \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.spam`+", "alive. If you change these, you become the gayest gay", "سطر واحد \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.gmute` كتم\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.ungmute` الغاء كتم", "+ اسم الملف للغاء التثببت\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.scha` يطلع يكتب حتى", "import ChannelParticipantsAdmins from platform import uname from userbot import ALIVE_NAME", "سطر واحد اقمار \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.solarsystem` كواكب تتحرك \\n\" \"➖➖➖➖➖➖➖➖➖\\n\"", "the bot is running. \"\"\" await alive.edit(\"اهلا بك في سورس", "userbot.utils import admin_cmd DEFAULTUSER = str(ALIVE_NAME) if ALIVE_NAME else \"No", "\"`.clock` ساعات سطر واحد \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.gmute` كتم\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.ungmute`", "\"`.pin` تثبيت رساله بالكروب\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.mmf` اسم انكلش رد ع", "+ ايدي شخص\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.fuk` فاكيو\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.get_id` ايدي اي", "شخص دزه بمحادثته\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.admins` المشرفين الي بالكروب\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.pin`", "change these, you become the gayest gay such that even", "For .alive command, check if the bot is running. \"\"\"", "احد مطورين السورس\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"مطورين السورس : \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"احمد", "بمحادثته\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.admins` المشرفين الي بالكروب\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.pin` تثبيت رساله", "حذف كل الرسائل\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.figlet` كتابه نصوص شخوط\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.savewelcome`", "ملفات وشروحات مفيده\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"يمكنك مراسلتنا لاي خلل حاصل\\n\" \"➖➖➖➖➖➖➖➖➖\\n\"", "\"➖➖➖➖➖➖➖➖➖\\n\" \"`.mmf` اسم انكلش رد ع الصوره\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.bye` مغادره", "\"No name set yet nibba, check pinned in @XtraTgBot\" @command(outgoing=True,", "قناة السورس لانها تطرح ملفات وشروحات مفيده\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"يمكنك مراسلتنا", "\"➖➖➖➖➖➖➖➖➖\\n\" \"`.rnupload` رد ع الملف وسم ملف لتغير اسم الملف\\n\"", "راسلني احد مطورين السورس\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"مطورين السورس : \\n\" \"➖➖➖➖➖➖➖➖➖\\n\"", "\"➖➖➖➖➖➖➖➖➖\\n\" \"`.decide` يدز صورتين متحركات\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"يوجد الكثير من الاوامر", "\"`.song` + اسم اغنيه \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.spam`+ كلمه + عدد", "platform import uname from userbot import ALIVE_NAME from userbot.utils import", "رابط فيديو من اي موقع للتحميل\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.purge` تحذف رسائل", "السورس\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"مطورين السورس : \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"احمد || @HHMHHH", "\"`.clearwelcome` لحذف الترحيب \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.whois` + ايدي شخص\\n\" \"➖➖➖➖➖➖➖➖➖\\n\"", "\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"حارث || @cCcYo \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"قناة السورس الرسميه", "في قناة السورس لانها تطرح ملفات وشروحات مفيده\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"يمكنك", "\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.spam`+ كلمه + عدد \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.smoon` لعرض", "كتم \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.kick` طرد \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.ban` حظر \\n\"", "السورس هي :\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.ytv` + رابط فيديو من اي", "هي :\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.ytv` + رابط فيديو من اي موقع", "السورس : \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"احمد || @HHMHHH \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"حسن", "+ اسم النلف يدز الملف\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.unload` + اسم الملف", "\"`.figlet` كتابه نصوص شخوط\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.savewelcome` + ترحيب لوضع ترحيب\\n\"", "you change these, you become the gayest gay such that", ": \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"احمد || @HHMHHH \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"حسن ||", "كواكب تتحرك \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.snake` افعى تتحرك\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.clock` ساعات", "ع الملف وسم ملف لتغير اسم الملف\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.send` +", "yet nibba, check pinned in @XtraTgBot\" @command(outgoing=True, pattern=\"^.lk$\") async def", "gay such that even the gay world will disown you.\"\"\"", "اقمار \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.moon` لعرض سطر واحد اقمار \\n\" \"➖➖➖➖➖➖➖➖➖\\n\"", "disown you.\"\"\" import asyncio from telethon import events from telethon.tl.types", "اللغه\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.rnupload` رد ع الملف وسم ملف لتغير اسم", "\"➖➖➖➖➖➖➖➖➖\\n\" \"`.send` + اسم النلف يدز الملف\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.unload` +", "+ عدد \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.smoon` لعرض ٤ اسطر اقمار \\n\"", "\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.gmute` كتم\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.ungmute` الغاء كتم \\n\" \"➖➖➖➖➖➖➖➖➖\\n\"", "حظر \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.tss` + رمز اللغه\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.rnupload` رد", "معرفه عدد البوتات الموجوده\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.iffuci` كتابه كود الملف\\n\" \"➖➖➖➖➖➖➖➖➖\\n\"", "\"➖➖➖➖➖➖➖➖➖\\n\" \"`.clearwelcome` لحذف الترحيب \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.whois` + ايدي شخص\\n\"", "الملف للغاء التثببت\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.scha` يطلع يكتب حتى لو مدز", "امر .alive اذا اعتقدت ان البوت توقف!\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"اشترك في", "\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.whois` + ايدي شخص\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.fuk` فاكيو\\n\" \"➖➖➖➖➖➖➖➖➖\\n\"", "+ اسم اغنيه \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.spam`+ كلمه + عدد \\n\"", "\"`.purge` تحذف رسائل بالرد\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.song` + اسم اغنيه \\n\"", "amireallyalive(alive): \"\"\" For .alive command, check if the bot is", "بالرد\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.song` + اسم اغنيه \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.spam`+ كلمه", "ChannelParticipantsAdmins from platform import uname from userbot import ALIVE_NAME from", "\"➖➖➖➖➖➖➖➖➖\\n\" \"`.kick` طرد \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.ban` حظر \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.tss`", "\"`.scha` يطلع يكتب حتى لو مدز رساله\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.get_bot` معرفه", "\"➖➖➖➖➖➖➖➖➖\\n\" \"`.pin` تثبيت رساله بالكروب\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.mmf` اسم انكلش رد", "\"➖➖➖➖➖➖➖➖➖\\n\" \"`.spam`+ كلمه + عدد \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.smoon` لعرض ٤", "بالكروب\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.mmf` اسم انكلش رد ع الصوره\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.bye`", "\"➖➖➖➖➖➖➖➖➖\\n\" \"`.song` + اسم اغنيه \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.spam`+ كلمه +", "ALIVE_NAME else \"No name set yet nibba, check pinned in", "await alive.edit(\"اهلا بك في سورس التليثيون العراقي\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"استخدم امر", "يطلع يكتب حتى لو مدز رساله\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.get_bot` معرفه عدد", "الرسميه : @cqccqq\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"اوامر السورس هي :\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.ytv`", "\"احمد || @HHMHHH \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"حسن || @VHHHHH \\n\" \"➖➖➖➖➖➖➖➖➖\\n\"", "لاي خلل حاصل\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"لتنصيب السورس راسلني احد مطورين السورس\\n\"", "\"➖➖➖➖➖➖➖➖➖\\n\" \"قناة السورس الرسميه : @cqccqq\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"اوامر السورس هي", "\"حسن || @VHHHHH \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"حارث || @cCcYo \\n\" \"➖➖➖➖➖➖➖➖➖\\n\"", "\"➖➖➖➖➖➖➖➖➖\\n\" \"`.savefilter` اضف رد\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.clearfilter` حذف رد \\n\" \"➖➖➖➖➖➖➖➖➖\\n\"", "وشروحات مفيده\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"يمكنك مراسلتنا لاي خلل حاصل\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"لتنصيب", "\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.tss` + رمز اللغه\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.rnupload` رد ع", "If you change these, you become the gayest gay such", "@command(outgoing=True, pattern=\"^.lk$\") async def amireallyalive(alive): \"\"\" For .alive command, check", "لوضع ترحيب\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.clearwelcome` لحذف الترحيب \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.whois` +", "فاكيو\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.get_id` ايدي اي شخص دزه بمحادثته\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.admins`", "تتحرك \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.snake` افعى تتحرك\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.clock` ساعات سطر", "شخص\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.fuk` فاكيو\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.get_id` ايدي اي شخص دزه", "\"استخدم امر .alive اذا اعتقدت ان البوت توقف!\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"اشترك", "pinned in @XtraTgBot\" @command(outgoing=True, pattern=\"^.lk$\") async def amireallyalive(alive): \"\"\" For", "الملف\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.unload` + اسم الملف للغاء التثببت\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.scha`", "رد ع الملف وسم ملف لتغير اسم الملف\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.send`", "اسم انكلش رد ع الصوره\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.bye` مغادره من المجموعه\\n\"", "العراقي\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"استخدم امر .alive اذا اعتقدت ان البوت توقف!\\n\"", "\"➖➖➖➖➖➖➖➖➖\\n\" \"`.clock` ساعات سطر واحد \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.gmute` كتم\\n\" \"➖➖➖➖➖➖➖➖➖\\n\"", "رمز اللغه\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.rnupload` رد ع الملف وسم ملف لتغير", "\"➖➖➖➖➖➖➖➖➖\\n\" \"لتنصيب السورس راسلني احد مطورين السورس\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"مطورين السورس", "\"`.clearfilter` حذف رد \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.purge` حذف كل الرسائل\\n\" \"➖➖➖➖➖➖➖➖➖\\n\"", "واحد \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.gmute` كتم\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.ungmute` الغاء كتم \\n\"", "\"➖➖➖➖➖➖➖➖➖\\n\" \"`.gmute` كتم\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.ungmute` الغاء كتم \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.kick`", "\"`.purge` حذف كل الرسائل\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.figlet` كتابه نصوص شخوط\\n\" \"➖➖➖➖➖➖➖➖➖\\n\"", "\"➖➖➖➖➖➖➖➖➖\\n\" \"`.figlet` كتابه نصوص شخوط\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.savewelcome` + ترحيب لوضع", "\"اوامر السورس هي :\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.ytv` + رابط فيديو من", "دزه بمحادثته\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.admins` المشرفين الي بالكروب\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.pin` تثبيت", "\"➖➖➖➖➖➖➖➖➖\\n\" \"اوامر السورس هي :\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.ytv` + رابط فيديو", "\"`.whois` + ايدي شخص\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.fuk` فاكيو\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.get_id` ايدي", "طرد \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.ban` حظر \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.tss` + رمز", "|| @VHHHHH \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"حارث || @cCcYo \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"قناة", "\"مطورين السورس : \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"احمد || @HHMHHH \\n\" \"➖➖➖➖➖➖➖➖➖\\n\"", "السورس الرسميه : @cqccqq\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"اوامر السورس هي :\\n\" \"➖➖➖➖➖➖➖➖➖\\n\"", "\"`.iffuci` كتابه كود الملف\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.savefilter` اضف رد\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.clearfilter`", "نصوص شخوط\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.savewelcome` + ترحيب لوضع ترحيب\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.clearwelcome`", "@VHHHHH \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"حارث || @cCcYo \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"قناة السورس", "\"➖➖➖➖➖➖➖➖➖\\n\" \"`.iffuci` كتابه كود الملف\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.savefilter` اضف رد\\n\" \"➖➖➖➖➖➖➖➖➖\\n\"", "كتابه نصوص شخوط\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.savewelcome` + ترحيب لوضع ترحيب\\n\" \"➖➖➖➖➖➖➖➖➖\\n\"", "\"\"\" await alive.edit(\"اهلا بك في سورس التليثيون العراقي\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"استخدم", "يكتب حتى لو مدز رساله\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.get_bot` معرفه عدد البوتات", "\"`.ungmute` الغاء كتم \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.kick` طرد \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.ban`", ".alive اذا اعتقدت ان البوت توقف!\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"اشترك في قناة", "\"➖➖➖➖➖➖➖➖➖\\n\" \"`.purge` تحذف رسائل بالرد\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.song` + اسم اغنيه", "is running. \"\"\" await alive.edit(\"اهلا بك في سورس التليثيون العراقي\\n\"", "\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"احمد || @HHMHHH \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"حسن || @VHHHHH", "الترحيب \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.whois` + ايدي شخص\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.fuk` فاكيو\\n\"", "\"`.savewelcome` + ترحيب لوضع ترحيب\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.clearwelcome` لحذف الترحيب \\n\"", "رساله بالكروب\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.mmf` اسم انكلش رد ع الصوره\\n\" \"➖➖➖➖➖➖➖➖➖\\n\"", "async def amireallyalive(alive): \"\"\" For .alive command, check if the", "\"➖➖➖➖➖➖➖➖➖\\n\" \"`.ytv` + رابط فيديو من اي موقع للتحميل\\n\" \"➖➖➖➖➖➖➖➖➖\\n\"", "events from telethon.tl.types import ChannelParticipantsAdmins from platform import uname from", "الملف\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.send` + اسم النلف يدز الملف\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.unload`", "الي بالكروب\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.pin` تثبيت رساله بالكروب\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.mmf` اسم", "٤ اسطر اقمار \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.moon` لعرض سطر واحد اقمار", "running. \"\"\" await alive.edit(\"اهلا بك في سورس التليثيون العراقي\\n\" \"➖➖➖➖➖➖➖➖➖\\n\"", "from userbot import ALIVE_NAME from userbot.utils import admin_cmd DEFAULTUSER =", "\"قناة السورس الرسميه : @cqccqq\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"اوامر السورس هي :\\n\"", "become the gayest gay such that even the gay world", "لو مدز رساله\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.get_bot` معرفه عدد البوتات الموجوده\\n\" \"➖➖➖➖➖➖➖➖➖\\n\"", "لعرض ٤ اسطر اقمار \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.moon` لعرض سطر واحد", "alive.edit(\"اهلا بك في سورس التليثيون العراقي\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"استخدم امر .alive", "\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.snake` افعى تتحرك\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.clock` ساعات سطر واحد", "telethon.tl.types import ChannelParticipantsAdmins from platform import uname from userbot import", "if ALIVE_NAME else \"No name set yet nibba, check pinned", "\"➖➖➖➖➖➖➖➖➖\\n\" \"`.fuk` فاكيو\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.get_id` ايدي اي شخص دزه بمحادثته\\n\"", "import asyncio from telethon import events from telethon.tl.types import ChannelParticipantsAdmins", "رد\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.clearfilter` حذف رد \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.purge` حذف كل", "\"`.moon` لعرض سطر واحد اقمار \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.solarsystem` كواكب تتحرك", "\"\"\" For .alive command, check if the bot is running.", "\"`.admins` المشرفين الي بالكروب\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.pin` تثبيت رساله بالكروب\\n\" \"➖➖➖➖➖➖➖➖➖\\n\"", "gay world will disown you.\"\"\" import asyncio from telethon import", "in @XtraTgBot\" @command(outgoing=True, pattern=\"^.lk$\") async def amireallyalive(alive): \"\"\" For .alive", "السورس راسلني احد مطورين السورس\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"مطورين السورس : \\n\"", "حتى لو مدز رساله\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.get_bot` معرفه عدد البوتات الموجوده\\n\"", "\"➖➖➖➖➖➖➖➖➖\\n\" \"حسن || @VHHHHH \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"حارث || @cCcYo \\n\"", "\"`.send` + اسم النلف يدز الملف\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.unload` + اسم", "اضف رد\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.clearfilter` حذف رد \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.purge` حذف", "اقمار \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.solarsystem` كواكب تتحرك \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.snake` افعى", "ساعات سطر واحد \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.gmute` كتم\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.ungmute` الغاء", "بالكروب\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.pin` تثبيت رساله بالكروب\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.mmf` اسم انكلش", "كل الرسائل\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.figlet` كتابه نصوص شخوط\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.savewelcome` +", "command, check if the bot is running. \"\"\" await alive.edit(\"اهلا", "وسم ملف لتغير اسم الملف\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.send` + اسم النلف", "مفيده\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"يمكنك مراسلتنا لاي خلل حاصل\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"لتنصيب السورس", "\"➖➖➖➖➖➖➖➖➖\\n\" \"`.get_bot` معرفه عدد البوتات الموجوده\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.iffuci` كتابه كود", "اعتقدت ان البوت توقف!\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"اشترك في قناة السورس لانها", "ترحيب لوضع ترحيب\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.clearwelcome` لحذف الترحيب \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.whois`", "حذف رد \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.purge` حذف كل الرسائل\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.figlet`", "\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"حسن || @VHHHHH \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"حارث || @cCcYo", "لعرض سطر واحد اقمار \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.solarsystem` كواكب تتحرك \\n\"", "بك في سورس التليثيون العراقي\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"استخدم امر .alive اذا", "يدز الملف\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.unload` + اسم الملف للغاء التثببت\\n\" \"➖➖➖➖➖➖➖➖➖\\n\"", "such that even the gay world will disown you.\"\"\" import", "شخوط\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.savewelcome` + ترحيب لوضع ترحيب\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.clearwelcome` لحذف", "رد ع الصوره\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.bye` مغادره من المجموعه\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.decide`", "ان البوت توقف!\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"اشترك في قناة السورس لانها تطرح", "انكلش رد ع الصوره\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.bye` مغادره من المجموعه\\n\" \"➖➖➖➖➖➖➖➖➖\\n\"", "will disown you.\"\"\" import asyncio from telethon import events from", "\"`.gmute` كتم\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.ungmute` الغاء كتم \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.kick` طرد", "الصوره\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.bye` مغادره من المجموعه\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.decide` يدز صورتين", "@cCcYo \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"قناة السورس الرسميه : @cqccqq\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"اوامر", "واحد اقمار \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.solarsystem` كواكب تتحرك \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.snake`", "\"يمكنك مراسلتنا لاي خلل حاصل\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"لتنصيب السورس راسلني احد", "لحذف الترحيب \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.whois` + ايدي شخص\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.fuk`", "اذا اعتقدت ان البوت توقف!\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"اشترك في قناة السورس", "pattern=\"^.lk$\") async def amireallyalive(alive): \"\"\" For .alive command, check if", "\"`.rnupload` رد ع الملف وسم ملف لتغير اسم الملف\\n\" \"➖➖➖➖➖➖➖➖➖\\n\"", "فيديو من اي موقع للتحميل\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.purge` تحذف رسائل بالرد\\n\"", "import uname from userbot import ALIVE_NAME from userbot.utils import admin_cmd", "كلمه + عدد \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.smoon` لعرض ٤ اسطر اقمار", "\"`.decide` يدز صورتين متحركات\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"يوجد الكثير من الاوامر لكن", ": @cqccqq\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"اوامر السورس هي :\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.ytv` +", "\"➖➖➖➖➖➖➖➖➖\\n\" \"يمكنك مراسلتنا لاي خلل حاصل\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"لتنصيب السورس راسلني", "\"`.get_bot` معرفه عدد البوتات الموجوده\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.iffuci` كتابه كود الملف\\n\"", "\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.purge` حذف كل الرسائل\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.figlet` كتابه نصوص", "البوتات الموجوده\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.iffuci` كتابه كود الملف\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.savefilter` اضف", "userbot import ALIVE_NAME from userbot.utils import admin_cmd DEFAULTUSER = str(ALIVE_NAME)", "+ رابط فيديو من اي موقع للتحميل\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.purge` تحذف", "تحذف رسائل بالرد\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.song` + اسم اغنيه \\n\" \"➖➖➖➖➖➖➖➖➖\\n\"", "\"➖➖➖➖➖➖➖➖➖\\n\" \"`.bye` مغادره من المجموعه\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.decide` يدز صورتين متحركات\\n\"", "\"➖➖➖➖➖➖➖➖➖\\n\" \"استخدم امر .alive اذا اعتقدت ان البوت توقف!\\n\" \"➖➖➖➖➖➖➖➖➖\\n\"", "\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.smoon` لعرض ٤ اسطر اقمار \\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.moon`", "\"`.fuk` فاكيو\\n\" \"➖➖➖➖➖➖➖➖➖\\n\" \"`.get_id` ايدي اي شخص دزه بمحادثته\\n\" \"➖➖➖➖➖➖➖➖➖\\n\"" ]
[ "1) return (w, h, data[12:].tobytes()) def screenshot(self, cached=True): t0 =", "len(loopbacks): logger.debug('possible loopback addresses: %s', repr(loopbacks)) self.rch = revconn.ReverseConnectionHost() self.rch.start()", "= self._reverse_connection_screencap else: self.rch.stop() else: self.loopback = None def __del__(self):", "<= num < len(devices): raise ValueError() break except ValueError: logger.error(\"输入不合法,请重新输入\")", "self.device_session_factory().exec('getprop ro.product.board') if b'goldfish' in board: return ['10.0.2.2'] modules =", "None: if always_use_device not in (x[0] for x in devices):", "import time from PIL import Image import config # from", "_ensure_pil_image(imgorfile): if isinstance(imgorfile, Image.Image): return imgorfile return Image.open(imgorfile) def check_adb_alive():", "loopback address %s', addr) future = self.rch.register_cookie() with future: cmd", "offsets is not None: final_X = XY[0] + randint(-offsets[0], offsets[0])", "x2, y2 = origin[0], origin[1], origin[0] + movement[0], origin[1] +", "always_use_device is not None: if always_use_device not in (x[0] for", "if self.rch and self.rch.is_alive(): self.rch.stop() def __adb_device_name_detector(self): devices = [x", "None: adb_binaries = ['adb', os.path.join(config.ADB_ROOT, 'adb')] else: adb_binaries = [adbbin]", "= _screencap_to_image(rawcap) t1 = time.monotonic() self.last_screenshot_timestamp = t1 self.last_screenshot_duration =", "True) self.last_screenshot_timestamp = 0 self.last_screenshot_duration = 0 self.last_screenshot = None", "os.chdir(ADB_ROOT) self.ADB_ROOT = config.ADB_ROOT self.adb_serial = adb_serial self.host_session_factory = lambda:", "recvall(conn) conn.close() if data == b'OKAY': self.loopback = addr logger.debug('found", "1) # 如果你遇到了问题,可以把这百年输出并把日志分享到群里。 logger.debug(\"点击坐标:({},{})\".format(final_X, final_Y)) command = \"input tap {}", "devices): raise RuntimeError('设备 %s 未连接' % always_use_device) return always_use_device if", "# sleep(10) # sleep(0.5) if offsets is not None: final_X", "= config.get('device/adb_always_use_device', None) if always_use_device is not None: if always_use_device", "randint(-1, 1) # 如果你遇到了问题,可以把这百年输出并把日志分享到群里。 logger.debug(\"点击坐标:({},{})\".format(final_X, final_Y)) command = \"input tap", "self.rch and self.rch.is_alive(): self.rch.stop() def __adb_device_name_detector(self): devices = [x for", "ShellColor, CONFIG_PATH,enable_adb_host_auto_detect, ADB_SERVER from .ADBClientSession import ADBClientSession from util.socketutil import", "# from config import ADB_ROOT, ADB_HOST, SCREEN_SHOOT_SAVE_PATH, ShellColor, CONFIG_PATH,enable_adb_host_auto_detect, ADB_SERVER", "def _screencap_to_image(cap): w, h, pixels = cap return Image.frombytes('RGBA', (w,", "self.last_screenshot is not None and t0 - self.last_screenshot_timestamp < self.last_screenshot_duration:", "+ randint(-offsets[0], offsets[0]) final_Y = XY[1] + randint(-offsets[1], offsets[1]) else:", "revconn # from numpy import average, dot, linalg logger =", "FileNotFoundError: pass except subprocess.CalledProcessError: pass raise OSError(\"can't start adb server\")", "== 0: auto_connect = config.get('device/adb_auto_connect', None) if auto_connect is not", "0) assert (f == 1) return (w, h, data[12:]) def", "return image.crop( ( screen_range[0][0], screen_range[0][1], screen_range[0][0] + screen_range[1][0], screen_range[0][1] +", "as conn: data = recvall(conn, 8388608, True) w, h, f", "XY[1] + randint(-1, 1) # 如果你遇到了问题,可以把这百年输出并把日志分享到群里。 logger.debug(\"点击坐标:({},{})\".format(final_X, final_Y)) command =", "True: try: num = int(input(\"请输入序号选择设备: \")) if not 0 <=", "= XY[0] + randint(-offsets[0], offsets[0]) final_Y = XY[1] + randint(-offsets[1],", "data == b'OKAY': self.loopback = addr logger.debug('found loopback address %s',", "= self.device_session_factory().exec_stream('screencap -p') data = recvall(s, 4194304) return data def", "address %s', addr) future = self.rch.register_cookie() with future: cmd =", "0) assert (f == 1) return (w, h, data[12:].tobytes()) def", "+ randint(-1, 1) final_Y = XY[1] + randint(-1, 1) #", "if auto_connect is not None: logger.info('没有已连接设备,尝试连接 %s', auto_connect) try: self.host_session_factory().disconnect(auto_connect)", "> 1: logger.info(\"检测到多台设备\") num = 0 while True: try: num", "y1, x2, y2 = origin[0], origin[1], origin[0] + movement[0], origin[1]", "not None: command += str(int(duration)) self.run_device_cmd(command) def touch_tap(self, XY=None, offsets=None):", "pass self.host_session_factory().connect(auto_connect) else: raise RuntimeError('找不到可用设备') devices = [x for x", "run_device_cmd(self, cmd, DEBUG_LEVEL=2): output = self.device_session_factory().exec(cmd) logger.debug(\"command: %s\", cmd) logger.debug(\"output:", "= recvall(conn) conn.close() if data == b'OKAY': self.loopback = addr", "if x[1] != 'offline'] if len(devices) == 0: auto_connect =", "logger = logging.getLogger(__name__) def _screencap_to_image(cap): w, h, pixels = cap", "raise ValueError() break except ValueError: logger.error(\"输入不合法,请重新输入\") device_name = devices[num][0] else:", "%s', repr(loopbacks)) self.rch = revconn.ReverseConnectionHost() self.rch.start() if self._test_reverse_connection(loopbacks): logger.info('正在使用模拟器优化模式') self.screencap", "RuntimeError: return False def ensure_adb_alive(): if check_adb_alive(): return logger.info('尝试启动 adb", "self.adb_serial = adb_serial self.host_session_factory = lambda: ADBClientSession(config.ADB_SERVER) self.rch = None", "True): loopbacks = self._detect_loopbacks() if len(loopbacks): logger.debug('possible loopback addresses: %s',", "def _detect_loopbacks(self): board = self.device_session_factory().exec('getprop ro.product.board') if b'goldfish' in board:", "!= 'offline'] always_use_device = config.get('device/adb_always_use_device', None) if always_use_device is not", "always_use_device if len(devices) == 1: device_name = devices[0][0] elif len(devices)", "+ screen_range[1][0], screen_range[0][1] + screen_range[1][1] ) ) def _detect_loopbacks(self): board", "y1, x2, y2) if duration is not None: command +=", "is None: adb_binaries = ['adb', os.path.join(config.ADB_ROOT, 'adb')] else: adb_binaries =", "Image import config # from config import ADB_ROOT, ADB_HOST, SCREEN_SHOOT_SAVE_PATH,", "= config.get('device/adb_binary', None) if adbbin is None: adb_binaries = ['adb',", "self.adb_serial = self.__adb_device_name_detector() self.device_session_factory = lambda: self.host_session_factory().device(self.adb_serial) self.cache_screenshot = config.get('device/cache_screenshot',", "device_name def run_device_cmd(self, cmd, DEBUG_LEVEL=2): output = self.device_session_factory().exec(cmd) logger.debug(\"command: %s\",", "_detect_loopbacks(self): board = self.device_session_factory().exec('getprop ro.product.board') if b'goldfish' in board: return", "start adb server\") class ADBConnector: def __init__(self, adb_serial=None): # os.chdir(ADB_ROOT)", "modules = self.device_session_factory().exec('grep -o vboxguest /proc/modules') if b'vboxguest' in modules:", ") ) def _detect_loopbacks(self): board = self.device_session_factory().exec('getprop ro.product.board') if b'goldfish'", "else: raise RuntimeError('找不到可用设备') devices = [x for x in self.host_session_factory().devices()", "= ['adb', os.path.join(config.ADB_ROOT, 'adb')] else: adb_binaries = [adbbin] for adbbin", "devices[0][0] elif len(devices) > 1: logger.info(\"检测到多台设备\") num = 0 while", "return Image.frombytes('RGBA', (w, h), pixels) def _ensure_pil_image(imgorfile): if isinstance(imgorfile, Image.Image):", "__del__(self): if self.rch and self.rch.is_alive(): self.rch.stop() def __adb_device_name_detector(self): devices =", "isinstance(imgorfile, Image.Image): return imgorfile return Image.open(imgorfile) def check_adb_alive(): try: sess", "check_adb_alive(): try: sess = ADBClientSession(config.ADB_SERVER) version = int(sess.service('host:version').read_response().decode(), 16) logger.debug('ADB", "x in devices): raise RuntimeError('设备 %s 未连接' % always_use_device) return", "b'goldfish' in board: return ['10.0.2.2'] modules = self.device_session_factory().exec('grep -o vboxguest", "t1 - t0 self.last_screenshot = img return img def touch_swipe2(self,", "in board: return ['10.0.2.2'] modules = self.device_session_factory().exec('grep -o vboxguest /proc/modules')", "t0 = time.monotonic() if cached and self.cache_screenshot: if self.last_screenshot is", "in RGBA/RGBX format\"\"\" future = self.rch.register_cookie() with future: control_sock =", "auto_connect is not None: logger.info('没有已连接设备,尝试连接 %s', auto_connect) try: self.host_session_factory().disconnect(auto_connect) except:", "self.device_session_factory().exec('cat /proc/net/arp') return [x[:x.find(b' ')].decode() for x in arp.splitlines()[1:]] return", "= self.__adb_device_name_detector() self.device_session_factory = lambda: self.host_session_factory().device(self.adb_serial) self.cache_screenshot = config.get('device/cache_screenshot', True)", "= self.rch.register_cookie() with future: cmd = 'echo -n %sOKAY |", "# os.chdir(ADB_ROOT) self.ADB_ROOT = config.ADB_ROOT self.adb_serial = adb_serial self.host_session_factory =", "in (x[0] for x in devices): raise RuntimeError('设备 %s 未连接'", "return (w, h, data[12:]) def _reverse_connection_screencap(self): \"\"\"returns (width, height, pixels)", "\".format(x1, y1, x2, y2) if duration is not None: command", "'adb')] else: adb_binaries = [adbbin] for adbbin in adb_binaries: try:", "if not 0 <= num < len(devices): raise ValueError() break", "screencap(self): \"\"\"returns (width, height, pixels) pixels in RGBA/RGBX format\"\"\" s", "8388608, True) w, h, f = struct.unpack_from('III', data, 0) assert", "w, h, f = struct.unpack_from('III', data, 0) assert (f ==", "%s %d' % (future.cookie.decode(), self.loopback, self.rch.port)) with control_sock: with future.get()", "class ADBConnector: def __init__(self, adb_serial=None): # os.chdir(ADB_ROOT) self.ADB_ROOT = config.ADB_ROOT", "adbbin is None: adb_binaries = ['adb', os.path.join(config.ADB_ROOT, 'adb')] else: adb_binaries", "= self._detect_loopbacks() if len(loopbacks): logger.debug('possible loopback addresses: %s', repr(loopbacks)) self.rch", "= devices[num][0] else: raise RuntimeError('找不到可用设备') logger.info(\"确认设备名称:\" + device_name) return device_name", "in self.host_session_factory().devices() if x[1] != 'offline'] if len(devices) == 0:", "None if config.get('device/try_emulator_enhanced_mode', True): loopbacks = self._detect_loopbacks() if len(loopbacks): logger.debug('possible", "PIL import Image import config # from config import ADB_ROOT,", "repr(output)) return output def get_sub_screen(self, image, screen_range): return image.crop( (", "= cap return Image.frombytes('RGBA', (w, h), pixels) def _ensure_pil_image(imgorfile): if", "logging.getLogger(__name__) def _screencap_to_image(cap): w, h, pixels = cap return Image.frombytes('RGBA',", "future = self.rch.register_cookie() with future: cmd = 'echo -n %sOKAY", "ConnectionRefusedError: return False except RuntimeError: return False def ensure_adb_alive(): if", "if self.last_screenshot is not None and t0 - self.last_screenshot_timestamp <", "if self.adb_serial is None: self.adb_serial = self.__adb_device_name_detector() self.device_session_factory = lambda:", "self.last_screenshot_duration: return self.last_screenshot rawcap = self.screencap() img = _screencap_to_image(rawcap) t1", "time.monotonic() self.last_screenshot_timestamp = t1 self.last_screenshot_duration = t1 - t0 self.last_screenshot", "raise OSError(\"can't start adb server\") class ADBConnector: def __init__(self, adb_serial=None):", "self.rch.is_alive(): self.rch.stop() def __adb_device_name_detector(self): devices = [x for x in", "conn is not None: data = recvall(conn) conn.close() if data", "final_X = XY[0] + randint(-1, 1) final_Y = XY[1] +", "from PIL import Image import config # from config import", "average, dot, linalg logger = logging.getLogger(__name__) def _screencap_to_image(cap): w, h,", "from config import ADB_ROOT, ADB_HOST, SCREEN_SHOOT_SAVE_PATH, ShellColor, CONFIG_PATH,enable_adb_host_auto_detect, ADB_SERVER from", "while True: try: num = int(input(\"请输入序号选择设备: \")) if not 0", "[x for x in self.host_session_factory().devices() if x[1] != 'offline'] if", "# sleep(0.5) if offsets is not None: final_X = XY[0]", "recvall from . import revconn # from numpy import average,", "= time.monotonic() self.last_screenshot_timestamp = t1 self.last_screenshot_duration = t1 - t0", "addr logger.debug('found loopback address %s', addr) return True return False", "% (future.cookie.decode(), addr, self.rch.port) logger.debug(cmd) control_sock = self.device_session_factory().exec_stream(cmd) with control_sock:", "self.__adb_device_name_detector() self.device_session_factory = lambda: self.host_session_factory().device(self.adb_serial) self.cache_screenshot = config.get('device/cache_screenshot', True) self.last_screenshot_timestamp", "None and t0 - self.last_screenshot_timestamp < self.last_screenshot_duration: return self.last_screenshot rawcap", "b'OKAY': self.loopback = addr logger.debug('found loopback address %s', addr) return", "len(devices) == 1: device_name = devices[0][0] elif len(devices) > 1:", "= [adbbin] for adbbin in adb_binaries: try: logger.debug('trying %r', adbbin)", "= 0 self.last_screenshot = None if config.get('device/try_emulator_enhanced_mode', True): loopbacks =", "self.cache_screenshot = config.get('device/cache_screenshot', True) self.last_screenshot_timestamp = 0 self.last_screenshot_duration = 0", "+ randint(-offsets[1], offsets[1]) else: final_X = XY[0] + randint(-1, 1)", "height, pixels) pixels in RGBA/RGBX format\"\"\" future = self.rch.register_cookie() with", "return [] def _test_reverse_connection(self, loopbacks): for addr in loopbacks: logger.debug('testing", "logger.debug(\"command: %s\", cmd) logger.debug(\"output: %s\", repr(output)) return output def get_sub_screen(self,", "origin, movement, duration=None): # sleep(1) x1, y1, x2, y2 =", "def run_device_cmd(self, cmd, DEBUG_LEVEL=2): output = self.device_session_factory().exec(cmd) logger.debug(\"command: %s\", cmd)", "origin[0], origin[1], origin[0] + movement[0], origin[1] + movement[1] logger.debug(\"滑动初始坐标:({},{}); 移动距离dX:{},", "self._detect_loopbacks() if len(loopbacks): logger.debug('possible loopback addresses: %s', repr(loopbacks)) self.rch =", "origin[1], origin[0] + movement[0], origin[1] + movement[1] logger.debug(\"滑动初始坐标:({},{}); 移动距离dX:{}, dy:{}\".format(*origin,", "( screen_range[0][0], screen_range[0][1], screen_range[0][0] + screen_range[1][0], screen_range[0][1] + screen_range[1][1] )", "socket import time from PIL import Image import config #", "lambda: self.host_session_factory().device(self.adb_serial) self.cache_screenshot = config.get('device/cache_screenshot', True) self.last_screenshot_timestamp = 0 self.last_screenshot_duration", "[x[:x.find(b' ')].decode() for x in arp.splitlines()[1:]] return [] def _test_reverse_connection(self,", "if self._test_reverse_connection(loopbacks): logger.info('正在使用模拟器优化模式') self.screencap = self._reverse_connection_screencap else: self.rch.stop() else: self.loopback", "'start-server'], check=True) return True except FileNotFoundError: pass except subprocess.CalledProcessError: pass", "os import logging.config from random import randint import zlib import", "self.host_session_factory().connect(auto_connect) else: raise RuntimeError('找不到可用设备') devices = [x for x in", "0: auto_connect = config.get('device/adb_auto_connect', None) if auto_connect is not None:", "self.rch.register_cookie() with future: control_sock = self.device_session_factory().exec_stream('(echo -n %s; screencap) |", "if check_adb_alive(): return logger.info('尝试启动 adb server') import subprocess adbbin =", "self.last_screenshot = None if config.get('device/try_emulator_enhanced_mode', True): loopbacks = self._detect_loopbacks() if", "logger.error(\"输入不合法,请重新输入\") device_name = devices[num][0] else: raise RuntimeError('找不到可用设备') logger.info(\"确认设备名称:\" + device_name)", "return device_name def run_device_cmd(self, cmd, DEBUG_LEVEL=2): output = self.device_session_factory().exec(cmd) logger.debug(\"command:", "_test_reverse_connection(self, loopbacks): for addr in loopbacks: logger.debug('testing loopback address %s',", "None) if auto_connect is not None: logger.info('没有已连接设备,尝试连接 %s', auto_connect) try:", "movement[1] logger.debug(\"滑动初始坐标:({},{}); 移动距离dX:{}, dy:{}\".format(*origin, *movement)) command = \"input swipe {}", "logger.info(\"确认设备名称:\" + device_name) return device_name def run_device_cmd(self, cmd, DEBUG_LEVEL=2): output", "screenshot(self, cached=True): t0 = time.monotonic() if cached and self.cache_screenshot: if", "self.last_screenshot rawcap = self.screencap() img = _screencap_to_image(rawcap) t1 = time.monotonic()", "self.screencap() img = _screencap_to_image(rawcap) t1 = time.monotonic() self.last_screenshot_timestamp = t1", "ADBClientSession(config.ADB_SERVER) version = int(sess.service('host:version').read_response().decode(), 16) logger.debug('ADB server version %d', version)", "self.host_session_factory().disconnect(auto_connect) except: pass self.host_session_factory().connect(auto_connect) else: raise RuntimeError('找不到可用设备') devices = [x", "%s 未连接' % always_use_device) return always_use_device if len(devices) == 1:", "get_sub_screen(self, image, screen_range): return image.crop( ( screen_range[0][0], screen_range[0][1], screen_range[0][0] +", "str(int(duration)) self.run_device_cmd(command) def touch_tap(self, XY=None, offsets=None): # sleep(10) # sleep(0.5)", "from .ADBClientSession import ADBClientSession from util.socketutil import recvall from .", "always_use_device = config.get('device/adb_always_use_device', None) if always_use_device is not None: if", "\"\"\"returns (width, height, pixels) pixels in RGBA/RGBX format\"\"\" future =", "pixels) pixels in RGBA/RGBX format\"\"\" future = self.rch.register_cookie() with future:", "self.loopback, self.rch.port)) with control_sock: with future.get() as conn: data =", "recvall(s, 4194304) s.close() data = zlib.decompress(data, zlib.MAX_WBITS | 16, 8388608)", "origin[0] + movement[0], origin[1] + movement[1] logger.debug(\"滑动初始坐标:({},{}); 移动距离dX:{}, dy:{}\".format(*origin, *movement))", "h, pixels = cap return Image.frombytes('RGBA', (w, h), pixels) def", "__init__(self, adb_serial=None): # os.chdir(ADB_ROOT) self.ADB_ROOT = config.ADB_ROOT self.adb_serial = adb_serial", "< len(devices): raise ValueError() break except ValueError: logger.error(\"输入不合法,请重新输入\") device_name =", "future.get() as conn: data = recvall(conn, 8388608, True) w, h,", "+ movement[1] logger.debug(\"滑动初始坐标:({},{}); 移动距离dX:{}, dy:{}\".format(*origin, *movement)) command = \"input swipe", "data = recvall(s, 4194304) return data def screencap(self): \"\"\"returns (width,", "% always_use_device) return always_use_device if len(devices) == 1: device_name =", "with future: cmd = 'echo -n %sOKAY | nc -w", "None: data = recvall(conn) conn.close() if data == b'OKAY': self.loopback", "logging.config from random import randint import zlib import struct import", "return data def screencap(self): \"\"\"returns (width, height, pixels) pixels in", "PNG bytes\"\"\" s = self.device_session_factory().exec_stream('screencap -p') data = recvall(s, 4194304)", "except ConnectionRefusedError: return False except RuntimeError: return False def ensure_adb_alive():", "ADBClientSession from util.socketutil import recvall from . import revconn #", "return True except ConnectionRefusedError: return False except RuntimeError: return False", "+ device_name) return device_name def run_device_cmd(self, cmd, DEBUG_LEVEL=2): output =", "config.get('device/try_emulator_enhanced_mode', True): loopbacks = self._detect_loopbacks() if len(loopbacks): logger.debug('possible loopback addresses:", "t0 - self.last_screenshot_timestamp < self.last_screenshot_duration: return self.last_screenshot rawcap = self.screencap()", "random import randint import zlib import struct import socket import", "\")) if not 0 <= num < len(devices): raise ValueError()", "future.get(2) if conn is not None: data = recvall(conn) conn.close()", "cmd = 'echo -n %sOKAY | nc -w 1 %s", "num < len(devices): raise ValueError() break except ValueError: logger.error(\"输入不合法,请重新输入\") device_name", "16, 8388608) w, h, f = struct.unpack_from('III', data, 0) assert", "len(devices): raise ValueError() break except ValueError: logger.error(\"输入不合法,请重新输入\") device_name = devices[num][0]", "not None and t0 - self.last_screenshot_timestamp < self.last_screenshot_duration: return self.last_screenshot", "t0 self.last_screenshot = img return img def touch_swipe2(self, origin, movement,", "None: self.adb_serial = self.__adb_device_name_detector() self.device_session_factory = lambda: self.host_session_factory().device(self.adb_serial) self.cache_screenshot =", "'offline'] always_use_device = config.get('device/adb_always_use_device', None) if always_use_device is not None:", "bytes\"\"\" s = self.device_session_factory().exec_stream('screencap -p') data = recvall(s, 4194304) return", "4194304) s.close() data = zlib.decompress(data, zlib.MAX_WBITS | 16, 8388608) w,", "None: command += str(int(duration)) self.run_device_cmd(command) def touch_tap(self, XY=None, offsets=None): #", "device_name = devices[0][0] elif len(devices) > 1: logger.info(\"检测到多台设备\") num =", "self.last_screenshot = img return img def touch_swipe2(self, origin, movement, duration=None):", "self.rch.port)) with control_sock: with future.get() as conn: data = recvall(conn,", "0 <= num < len(devices): raise ValueError() break except ValueError:", "1: logger.info(\"检测到多台设备\") num = 0 while True: try: num =", "__adb_device_name_detector(self): devices = [x for x in self.host_session_factory().devices() if x[1]", "with control_sock: with future.get() as conn: data = recvall(conn, 8388608,", "adb_binaries = ['adb', os.path.join(config.ADB_ROOT, 'adb')] else: adb_binaries = [adbbin] for", "adb server') import subprocess adbbin = config.get('device/adb_binary', None) if adbbin", "addr, self.rch.port) logger.debug(cmd) control_sock = self.device_session_factory().exec_stream(cmd) with control_sock: conn =", "= recvall(s, 4194304) return data def screencap(self): \"\"\"returns (width, height,", "= adb_serial self.host_session_factory = lambda: ADBClientSession(config.ADB_SERVER) self.rch = None if", "return Image.open(imgorfile) def check_adb_alive(): try: sess = ADBClientSession(config.ADB_SERVER) version =", "None) if always_use_device is not None: if always_use_device not in", "subprocess.run([adbbin, 'start-server'], check=True) return True except FileNotFoundError: pass except subprocess.CalledProcessError:", "in self.host_session_factory().devices() if x[1] != 'offline'] always_use_device = config.get('device/adb_always_use_device', None)", "b'vboxguest' in modules: arp = self.device_session_factory().exec('cat /proc/net/arp') return [x[:x.find(b' ')].decode()", "== 1) return (w, h, data[12:]) def _reverse_connection_screencap(self): \"\"\"returns (width,", "% (future.cookie.decode(), self.loopback, self.rch.port)) with control_sock: with future.get() as conn:", "assert (f == 1) return (w, h, data[12:]) def _reverse_connection_screencap(self):", "sleep(0.5) if offsets is not None: final_X = XY[0] +", "[] def _test_reverse_connection(self, loopbacks): for addr in loopbacks: logger.debug('testing loopback", "if len(loopbacks): logger.debug('possible loopback addresses: %s', repr(loopbacks)) self.rch = revconn.ReverseConnectionHost()", "screencap_png(self): \"\"\"returns PNG bytes\"\"\" s = self.device_session_factory().exec_stream('screencap -p') data =", "def __del__(self): if self.rch and self.rch.is_alive(): self.rch.stop() def __adb_device_name_detector(self): devices", "- self.last_screenshot_timestamp < self.last_screenshot_duration: return self.last_screenshot rawcap = self.screencap() img", "(x[0] for x in devices): raise RuntimeError('设备 %s 未连接' %", "numpy import average, dot, linalg logger = logging.getLogger(__name__) def _screencap_to_image(cap):", "= self.screencap() img = _screencap_to_image(rawcap) t1 = time.monotonic() self.last_screenshot_timestamp =", "and self.cache_screenshot: if self.last_screenshot is not None and t0 -", "auto_connect = config.get('device/adb_auto_connect', None) if auto_connect is not None: logger.info('没有已连接设备,尝试连接", "cap return Image.frombytes('RGBA', (w, h), pixels) def _ensure_pil_image(imgorfile): if isinstance(imgorfile,", "is not None: data = recvall(conn) conn.close() if data ==", "image, screen_range): return image.crop( ( screen_range[0][0], screen_range[0][1], screen_range[0][0] + screen_range[1][0],", "%d' % (future.cookie.decode(), addr, self.rch.port) logger.debug(cmd) control_sock = self.device_session_factory().exec_stream(cmd) with", "addresses: %s', repr(loopbacks)) self.rch = revconn.ReverseConnectionHost() self.rch.start() if self._test_reverse_connection(loopbacks): logger.info('正在使用模拟器优化模式')", "return True except FileNotFoundError: pass except subprocess.CalledProcessError: pass raise OSError(\"can't", "struct import socket import time from PIL import Image import", "pixels = cap return Image.frombytes('RGBA', (w, h), pixels) def _ensure_pil_image(imgorfile):", "pixels) def _ensure_pil_image(imgorfile): if isinstance(imgorfile, Image.Image): return imgorfile return Image.open(imgorfile)", "= devices[0][0] elif len(devices) > 1: logger.info(\"检测到多台设备\") num = 0", "self.rch.port) logger.debug(cmd) control_sock = self.device_session_factory().exec_stream(cmd) with control_sock: conn = future.get(2)", "logger.info('尝试启动 adb server') import subprocess adbbin = config.get('device/adb_binary', None) if", "screen_range[0][0] + screen_range[1][0], screen_range[0][1] + screen_range[1][1] ) ) def _detect_loopbacks(self):", "self.screencap = self._reverse_connection_screencap else: self.rch.stop() else: self.loopback = None def", "= None if config.get('device/try_emulator_enhanced_mode', True): loopbacks = self._detect_loopbacks() if len(loopbacks):", "-w 1 %s %d' % (future.cookie.decode(), addr, self.rch.port) logger.debug(cmd) control_sock", "self.last_screenshot_timestamp = 0 self.last_screenshot_duration = 0 self.last_screenshot = None if", "logger.debug('possible loopback addresses: %s', repr(loopbacks)) self.rch = revconn.ReverseConnectionHost() self.rch.start() if", "and self.rch.is_alive(): self.rch.stop() def __adb_device_name_detector(self): devices = [x for x", "self.last_screenshot_duration = t1 - t0 self.last_screenshot = img return img", "from numpy import average, dot, linalg logger = logging.getLogger(__name__) def", "= self.device_session_factory().exec('cat /proc/net/arp') return [x[:x.find(b' ')].decode() for x in arp.splitlines()[1:]]", "< self.last_screenshot_duration: return self.last_screenshot rawcap = self.screencap() img = _screencap_to_image(rawcap)", "not None: data = recvall(conn) conn.close() if data == b'OKAY':", "h), pixels) def _ensure_pil_image(imgorfile): if isinstance(imgorfile, Image.Image): return imgorfile return", "OSError(\"can't start adb server\") class ADBConnector: def __init__(self, adb_serial=None): #", "self.device_session_factory = lambda: self.host_session_factory().device(self.adb_serial) self.cache_screenshot = config.get('device/cache_screenshot', True) self.last_screenshot_timestamp =", "ADB_SERVER from .ADBClientSession import ADBClientSession from util.socketutil import recvall from", "ValueError() break except ValueError: logger.error(\"输入不合法,请重新输入\") device_name = devices[num][0] else: raise", "pass except subprocess.CalledProcessError: pass raise OSError(\"can't start adb server\") class", "future: control_sock = self.device_session_factory().exec_stream('(echo -n %s; screencap) | nc %s", "else: final_X = XY[0] + randint(-1, 1) final_Y = XY[1]", "移动距离dX:{}, dy:{}\".format(*origin, *movement)) command = \"input swipe {} {} {}", "img = _screencap_to_image(rawcap) t1 = time.monotonic() self.last_screenshot_timestamp = t1 self.last_screenshot_duration", "for addr in loopbacks: logger.debug('testing loopback address %s', addr) future", "= self.device_session_factory().exec('getprop ro.product.board') if b'goldfish' in board: return ['10.0.2.2'] modules", "| nc -w 1 %s %d' % (future.cookie.decode(), addr, self.rch.port)", "!= 'offline'] if len(devices) == 0: auto_connect = config.get('device/adb_auto_connect', None)", "revconn.ReverseConnectionHost() self.rch.start() if self._test_reverse_connection(loopbacks): logger.info('正在使用模拟器优化模式') self.screencap = self._reverse_connection_screencap else: self.rch.stop()", "RGBA/RGBX format\"\"\" s = self.device_session_factory().exec_stream('screencap|gzip -1') data = recvall(s, 4194304)", "and t0 - self.last_screenshot_timestamp < self.last_screenshot_duration: return self.last_screenshot rawcap =", "None def __del__(self): if self.rch and self.rch.is_alive(): self.rch.stop() def __adb_device_name_detector(self):", "return False def ensure_adb_alive(): if check_adb_alive(): return logger.info('尝试启动 adb server')", "for adbbin in adb_binaries: try: logger.debug('trying %r', adbbin) subprocess.run([adbbin, 'start-server'],", "if b'vboxguest' in modules: arp = self.device_session_factory().exec('cat /proc/net/arp') return [x[:x.find(b'", "pass raise OSError(\"can't start adb server\") class ADBConnector: def __init__(self,", "RuntimeError('设备 %s 未连接' % always_use_device) return always_use_device if len(devices) ==", "final_X = XY[0] + randint(-offsets[0], offsets[0]) final_Y = XY[1] +", "assert (f == 1) return (w, h, data[12:].tobytes()) def screenshot(self,", "True except ConnectionRefusedError: return False except RuntimeError: return False def", "future: cmd = 'echo -n %sOKAY | nc -w 1", "%s %d' % (future.cookie.decode(), addr, self.rch.port) logger.debug(cmd) control_sock = self.device_session_factory().exec_stream(cmd)", "adb_binaries = [adbbin] for adbbin in adb_binaries: try: logger.debug('trying %r',", "def touch_swipe2(self, origin, movement, duration=None): # sleep(1) x1, y1, x2,", "offsets[1]) else: final_X = XY[0] + randint(-1, 1) final_Y =", "addr in loopbacks: logger.debug('testing loopback address %s', addr) future =", "control_sock: with future.get() as conn: data = recvall(conn, 8388608, True)", "{} {} {} \".format(x1, y1, x2, y2) if duration is", "control_sock = self.device_session_factory().exec_stream(cmd) with control_sock: conn = future.get(2) if conn", "return ['10.0.2.2'] modules = self.device_session_factory().exec('grep -o vboxguest /proc/modules') if b'vboxguest'", "= XY[1] + randint(-1, 1) # 如果你遇到了问题,可以把这百年输出并把日志分享到群里。 logger.debug(\"点击坐标:({},{})\".format(final_X, final_Y)) command", "randint import zlib import struct import socket import time from", "command += str(int(duration)) self.run_device_cmd(command) def touch_tap(self, XY=None, offsets=None): # sleep(10)", "def get_sub_screen(self, image, screen_range): return image.crop( ( screen_range[0][0], screen_range[0][1], screen_range[0][0]", "x in self.host_session_factory().devices() if x[1] != 'offline'] if len(devices) ==", "except RuntimeError: return False def ensure_adb_alive(): if check_adb_alive(): return logger.info('尝试启动", "in loopbacks: logger.debug('testing loopback address %s', addr) future = self.rch.register_cookie()", "s.close() data = zlib.decompress(data, zlib.MAX_WBITS | 16, 8388608) w, h,", "in RGBA/RGBX format\"\"\" s = self.device_session_factory().exec_stream('screencap|gzip -1') data = recvall(s,", "return always_use_device if len(devices) == 1: device_name = devices[0][0] elif", "-1') data = recvall(s, 4194304) s.close() data = zlib.decompress(data, zlib.MAX_WBITS", "adb_serial self.host_session_factory = lambda: ADBClientSession(config.ADB_SERVER) self.rch = None if self.adb_serial", "int(sess.service('host:version').read_response().decode(), 16) logger.debug('ADB server version %d', version) return True except", "try: sess = ADBClientSession(config.ADB_SERVER) version = int(sess.service('host:version').read_response().decode(), 16) logger.debug('ADB server", "config.get('device/cache_screenshot', True) self.last_screenshot_timestamp = 0 self.last_screenshot_duration = 0 self.last_screenshot =", "= config.get('device/adb_auto_connect', None) if auto_connect is not None: logger.info('没有已连接设备,尝试连接 %s',", "# from numpy import average, dot, linalg logger = logging.getLogger(__name__)", "is None: self.adb_serial = self.__adb_device_name_detector() self.device_session_factory = lambda: self.host_session_factory().device(self.adb_serial) self.cache_screenshot", "config.get('device/adb_binary', None) if adbbin is None: adb_binaries = ['adb', os.path.join(config.ADB_ROOT,", "break except ValueError: logger.error(\"输入不合法,请重新输入\") device_name = devices[num][0] else: raise RuntimeError('找不到可用设备')", "conn.close() if data == b'OKAY': self.loopback = addr logger.debug('found loopback", "logger.info(\"检测到多台设备\") num = 0 while True: try: num = int(input(\"请输入序号选择设备:", "offsets=None): # sleep(10) # sleep(0.5) if offsets is not None:", "import average, dot, linalg logger = logging.getLogger(__name__) def _screencap_to_image(cap): w,", "adb_binaries: try: logger.debug('trying %r', adbbin) subprocess.run([adbbin, 'start-server'], check=True) return True", "is not None and t0 - self.last_screenshot_timestamp < self.last_screenshot_duration: return", "(w, h), pixels) def _ensure_pil_image(imgorfile): if isinstance(imgorfile, Image.Image): return imgorfile", "self._test_reverse_connection(loopbacks): logger.info('正在使用模拟器优化模式') self.screencap = self._reverse_connection_screencap else: self.rch.stop() else: self.loopback =", "x2, y2) if duration is not None: command += str(int(duration))", "self.host_session_factory = lambda: ADBClientSession(config.ADB_SERVER) self.rch = None if self.adb_serial is", "data = recvall(conn) conn.close() if data == b'OKAY': self.loopback =", "nc %s %d' % (future.cookie.decode(), self.loopback, self.rch.port)) with control_sock: with", "import zlib import struct import socket import time from PIL", "x in arp.splitlines()[1:]] return [] def _test_reverse_connection(self, loopbacks): for addr", "loopback address %s', addr) return True return False def screencap_png(self):", "= t1 - t0 self.last_screenshot = img return img def", "if duration is not None: command += str(int(duration)) self.run_device_cmd(command) def", "version) return True except ConnectionRefusedError: return False except RuntimeError: return", "= struct.unpack_from('III', data, 0) assert (f == 1) return (w,", "+= str(int(duration)) self.run_device_cmd(command) def touch_tap(self, XY=None, offsets=None): # sleep(10) #", "return imgorfile return Image.open(imgorfile) def check_adb_alive(): try: sess = ADBClientSession(config.ADB_SERVER)", "screen_range[1][1] ) ) def _detect_loopbacks(self): board = self.device_session_factory().exec('getprop ro.product.board') if", "ADB_HOST, SCREEN_SHOOT_SAVE_PATH, ShellColor, CONFIG_PATH,enable_adb_host_auto_detect, ADB_SERVER from .ADBClientSession import ADBClientSession from", "int(input(\"请输入序号选择设备: \")) if not 0 <= num < len(devices): raise", "self.last_screenshot_timestamp < self.last_screenshot_duration: return self.last_screenshot rawcap = self.screencap() img =", "XY[0] + randint(-offsets[0], offsets[0]) final_Y = XY[1] + randint(-offsets[1], offsets[1])", "+ screen_range[1][1] ) ) def _detect_loopbacks(self): board = self.device_session_factory().exec('getprop ro.product.board')", "rawcap = self.screencap() img = _screencap_to_image(rawcap) t1 = time.monotonic() self.last_screenshot_timestamp", "devices[num][0] else: raise RuntimeError('找不到可用设备') logger.info(\"确认设备名称:\" + device_name) return device_name def", "zlib.MAX_WBITS | 16, 8388608) w, h, f = struct.unpack_from('III', data,", "modules: arp = self.device_session_factory().exec('cat /proc/net/arp') return [x[:x.find(b' ')].decode() for x", "+ movement[0], origin[1] + movement[1] logger.debug(\"滑动初始坐标:({},{}); 移动距离dX:{}, dy:{}\".format(*origin, *movement)) command", "sleep(10) # sleep(0.5) if offsets is not None: final_X =", "True except FileNotFoundError: pass except subprocess.CalledProcessError: pass raise OSError(\"can't start", "pixels) pixels in RGBA/RGBX format\"\"\" s = self.device_session_factory().exec_stream('screencap|gzip -1') data", "version %d', version) return True except ConnectionRefusedError: return False except", "%sOKAY | nc -w 1 %s %d' % (future.cookie.decode(), addr,", "sess = ADBClientSession(config.ADB_SERVER) version = int(sess.service('host:version').read_response().decode(), 16) logger.debug('ADB server version", "randint(-1, 1) final_Y = XY[1] + randint(-1, 1) # 如果你遇到了问题,可以把这百年输出并把日志分享到群里。", "adb server\") class ADBConnector: def __init__(self, adb_serial=None): # os.chdir(ADB_ROOT) self.ADB_ROOT", "arp.splitlines()[1:]] return [] def _test_reverse_connection(self, loopbacks): for addr in loopbacks:", "is not None: if always_use_device not in (x[0] for x", "in devices): raise RuntimeError('设备 %s 未连接' % always_use_device) return always_use_device", "self.device_session_factory().exec(cmd) logger.debug(\"command: %s\", cmd) logger.debug(\"output: %s\", repr(output)) return output def", "nc -w 1 %s %d' % (future.cookie.decode(), addr, self.rch.port) logger.debug(cmd)", "self.host_session_factory().devices() if x[1] != 'offline'] if len(devices) == 0: auto_connect", "return (w, h, data[12:].tobytes()) def screenshot(self, cached=True): t0 = time.monotonic()", "always_use_device) return always_use_device if len(devices) == 1: device_name = devices[0][0]", "None) if adbbin is None: adb_binaries = ['adb', os.path.join(config.ADB_ROOT, 'adb')]", "= zlib.decompress(data, zlib.MAX_WBITS | 16, 8388608) w, h, f =", "\"\"\"returns PNG bytes\"\"\" s = self.device_session_factory().exec_stream('screencap -p') data = recvall(s,", "data, 0) assert (f == 1) return (w, h, data[12:].tobytes())", "True) w, h, f = struct.unpack_from('III', data, 0) assert (f", "False def ensure_adb_alive(): if check_adb_alive(): return logger.info('尝试启动 adb server') import", "data = recvall(conn, 8388608, True) w, h, f = struct.unpack_from('III',", "%s', auto_connect) try: self.host_session_factory().disconnect(auto_connect) except: pass self.host_session_factory().connect(auto_connect) else: raise RuntimeError('找不到可用设备')", "None: final_X = XY[0] + randint(-offsets[0], offsets[0]) final_Y = XY[1]", "self.rch.stop() def __adb_device_name_detector(self): devices = [x for x in self.host_session_factory().devices()", "raise RuntimeError('设备 %s 未连接' % always_use_device) return always_use_device if len(devices)", "is not None: command += str(int(duration)) self.run_device_cmd(command) def touch_tap(self, XY=None,", "= 0 while True: try: num = int(input(\"请输入序号选择设备: \")) if", "in adb_binaries: try: logger.debug('trying %r', adbbin) subprocess.run([adbbin, 'start-server'], check=True) return", "-n %sOKAY | nc -w 1 %s %d' % (future.cookie.decode(),", "in arp.splitlines()[1:]] return [] def _test_reverse_connection(self, loopbacks): for addr in", "loopbacks: logger.debug('testing loopback address %s', addr) future = self.rch.register_cookie() with", "= ADBClientSession(config.ADB_SERVER) version = int(sess.service('host:version').read_response().decode(), 16) logger.debug('ADB server version %d',", "output def get_sub_screen(self, image, screen_range): return image.crop( ( screen_range[0][0], screen_range[0][1],", "= self.device_session_factory().exec_stream(cmd) with control_sock: conn = future.get(2) if conn is", "XY=None, offsets=None): # sleep(10) # sleep(0.5) if offsets is not", "_screencap_to_image(cap): w, h, pixels = cap return Image.frombytes('RGBA', (w, h),", "loopbacks): for addr in loopbacks: logger.debug('testing loopback address %s', addr)", "import socket import time from PIL import Image import config", "adbbin) subprocess.run([adbbin, 'start-server'], check=True) return True except FileNotFoundError: pass except", ") def _detect_loopbacks(self): board = self.device_session_factory().exec('getprop ro.product.board') if b'goldfish' in", "self.rch.stop() else: self.loopback = None def __del__(self): if self.rch and", "%r', adbbin) subprocess.run([adbbin, 'start-server'], check=True) return True except FileNotFoundError: pass", "logger.debug('testing loopback address %s', addr) future = self.rch.register_cookie() with future:", "data def screencap(self): \"\"\"returns (width, height, pixels) pixels in RGBA/RGBX", "randint(-offsets[1], offsets[1]) else: final_X = XY[0] + randint(-1, 1) final_Y", "False except RuntimeError: return False def ensure_adb_alive(): if check_adb_alive(): return", "RuntimeError('找不到可用设备') logger.info(\"确认设备名称:\" + device_name) return device_name def run_device_cmd(self, cmd, DEBUG_LEVEL=2):", "import os import logging.config from random import randint import zlib", "SCREEN_SHOOT_SAVE_PATH, ShellColor, CONFIG_PATH,enable_adb_host_auto_detect, ADB_SERVER from .ADBClientSession import ADBClientSession from util.socketutil", "else: raise RuntimeError('找不到可用设备') logger.info(\"确认设备名称:\" + device_name) return device_name def run_device_cmd(self,", "== b'OKAY': self.loopback = addr logger.debug('found loopback address %s', addr)", "0 self.last_screenshot = None if config.get('device/try_emulator_enhanced_mode', True): loopbacks = self._detect_loopbacks()", "= addr logger.debug('found loopback address %s', addr) return True return", "')].decode() for x in arp.splitlines()[1:]] return [] def _test_reverse_connection(self, loopbacks):", "self.run_device_cmd(command) def touch_tap(self, XY=None, offsets=None): # sleep(10) # sleep(0.5) if", "return False def screencap_png(self): \"\"\"returns PNG bytes\"\"\" s = self.device_session_factory().exec_stream('screencap", "= config.get('device/cache_screenshot', True) self.last_screenshot_timestamp = 0 self.last_screenshot_duration = 0 self.last_screenshot", "for x in self.host_session_factory().devices() if x[1] != 'offline'] always_use_device =", "address %s', addr) return True return False def screencap_png(self): \"\"\"returns", "self.rch = None if self.adb_serial is None: self.adb_serial = self.__adb_device_name_detector()", "self.device_session_factory().exec_stream(cmd) with control_sock: conn = future.get(2) if conn is not", "(future.cookie.decode(), self.loopback, self.rch.port)) with control_sock: with future.get() as conn: data", "imgorfile return Image.open(imgorfile) def check_adb_alive(): try: sess = ADBClientSession(config.ADB_SERVER) version", "def __init__(self, adb_serial=None): # os.chdir(ADB_ROOT) self.ADB_ROOT = config.ADB_ROOT self.adb_serial =", "import struct import socket import time from PIL import Image", "data, 0) assert (f == 1) return (w, h, data[12:])", "command = \"input swipe {} {} {} {} \".format(x1, y1,", "adb_serial=None): # os.chdir(ADB_ROOT) self.ADB_ROOT = config.ADB_ROOT self.adb_serial = adb_serial self.host_session_factory", "screen_range[0][0], screen_range[0][1], screen_range[0][0] + screen_range[1][0], screen_range[0][1] + screen_range[1][1] ) )", "self.device_session_factory().exec('grep -o vboxguest /proc/modules') if b'vboxguest' in modules: arp =", "final_Y = XY[1] + randint(-offsets[1], offsets[1]) else: final_X = XY[0]", "Image.frombytes('RGBA', (w, h), pixels) def _ensure_pil_image(imgorfile): if isinstance(imgorfile, Image.Image): return", "screen_range[0][1], screen_range[0][0] + screen_range[1][0], screen_range[0][1] + screen_range[1][1] ) ) def", "def __adb_device_name_detector(self): devices = [x for x in self.host_session_factory().devices() if", "_screencap_to_image(rawcap) t1 = time.monotonic() self.last_screenshot_timestamp = t1 self.last_screenshot_duration = t1", "board = self.device_session_factory().exec('getprop ro.product.board') if b'goldfish' in board: return ['10.0.2.2']", "subprocess adbbin = config.get('device/adb_binary', None) if adbbin is None: adb_binaries", "from util.socketutil import recvall from . import revconn # from", "8388608) w, h, f = struct.unpack_from('III', data, 0) assert (f", "%d' % (future.cookie.decode(), self.loopback, self.rch.port)) with control_sock: with future.get() as", "if len(devices) == 1: device_name = devices[0][0] elif len(devices) >", "image.crop( ( screen_range[0][0], screen_range[0][1], screen_range[0][0] + screen_range[1][0], screen_range[0][1] + screen_range[1][1]", "self.rch.start() if self._test_reverse_connection(loopbacks): logger.info('正在使用模拟器优化模式') self.screencap = self._reverse_connection_screencap else: self.rch.stop() else:", "if adbbin is None: adb_binaries = ['adb', os.path.join(config.ADB_ROOT, 'adb')] else:", "= t1 self.last_screenshot_duration = t1 - t0 self.last_screenshot = img", "self.rch = revconn.ReverseConnectionHost() self.rch.start() if self._test_reverse_connection(loopbacks): logger.info('正在使用模拟器优化模式') self.screencap = self._reverse_connection_screencap", "movement, duration=None): # sleep(1) x1, y1, x2, y2 = origin[0],", "{} \".format(x1, y1, x2, y2) if duration is not None:", "screencap) | nc %s %d' % (future.cookie.decode(), self.loopback, self.rch.port)) with", "| 16, 8388608) w, h, f = struct.unpack_from('III', data, 0)", "1) return (w, h, data[12:]) def _reverse_connection_screencap(self): \"\"\"returns (width, height,", "future = self.rch.register_cookie() with future: control_sock = self.device_session_factory().exec_stream('(echo -n %s;", "= logging.getLogger(__name__) def _screencap_to_image(cap): w, h, pixels = cap return", "logger.debug('trying %r', adbbin) subprocess.run([adbbin, 'start-server'], check=True) return True except FileNotFoundError:", "else: adb_binaries = [adbbin] for adbbin in adb_binaries: try: logger.debug('trying", "return img def touch_swipe2(self, origin, movement, duration=None): # sleep(1) x1,", "self.last_screenshot_timestamp = t1 self.last_screenshot_duration = t1 - t0 self.last_screenshot =", "server version %d', version) return True except ConnectionRefusedError: return False", "= self.device_session_factory().exec('grep -o vboxguest /proc/modules') if b'vboxguest' in modules: arp", "for x in devices): raise RuntimeError('设备 %s 未连接' % always_use_device)", "t1 self.last_screenshot_duration = t1 - t0 self.last_screenshot = img return", "cmd) logger.debug(\"output: %s\", repr(output)) return output def get_sub_screen(self, image, screen_range):", "ADB_ROOT, ADB_HOST, SCREEN_SHOOT_SAVE_PATH, ShellColor, CONFIG_PATH,enable_adb_host_auto_detect, ADB_SERVER from .ADBClientSession import ADBClientSession", "\"input swipe {} {} {} {} \".format(x1, y1, x2, y2)", "未连接' % always_use_device) return always_use_device if len(devices) == 1: device_name", "return logger.info('尝试启动 adb server') import subprocess adbbin = config.get('device/adb_binary', None)", "_reverse_connection_screencap(self): \"\"\"returns (width, height, pixels) pixels in RGBA/RGBX format\"\"\" future", "= revconn.ReverseConnectionHost() self.rch.start() if self._test_reverse_connection(loopbacks): logger.info('正在使用模拟器优化模式') self.screencap = self._reverse_connection_screencap else:", "# sleep(1) x1, y1, x2, y2 = origin[0], origin[1], origin[0]", "def touch_tap(self, XY=None, offsets=None): # sleep(10) # sleep(0.5) if offsets", "try: logger.debug('trying %r', adbbin) subprocess.run([adbbin, 'start-server'], check=True) return True except", ". import revconn # from numpy import average, dot, linalg", "= self.device_session_factory().exec(cmd) logger.debug(\"command: %s\", cmd) logger.debug(\"output: %s\", repr(output)) return output", "Image.open(imgorfile) def check_adb_alive(): try: sess = ADBClientSession(config.ADB_SERVER) version = int(sess.service('host:version').read_response().decode(),", "%s', addr) return True return False def screencap_png(self): \"\"\"returns PNG", "version = int(sess.service('host:version').read_response().decode(), 16) logger.debug('ADB server version %d', version) return", "cmd, DEBUG_LEVEL=2): output = self.device_session_factory().exec(cmd) logger.debug(\"command: %s\", cmd) logger.debug(\"output: %s\",", "import randint import zlib import struct import socket import time", "in modules: arp = self.device_session_factory().exec('cat /proc/net/arp') return [x[:x.find(b' ')].decode() for", "board: return ['10.0.2.2'] modules = self.device_session_factory().exec('grep -o vboxguest /proc/modules') if", "adbbin = config.get('device/adb_binary', None) if adbbin is None: adb_binaries =", "with future: control_sock = self.device_session_factory().exec_stream('(echo -n %s; screencap) | nc", "self.last_screenshot_duration = 0 self.last_screenshot = None if config.get('device/try_emulator_enhanced_mode', True): loopbacks", "import config # from config import ADB_ROOT, ADB_HOST, SCREEN_SHOOT_SAVE_PATH, ShellColor,", "= 'echo -n %sOKAY | nc -w 1 %s %d'", "config import ADB_ROOT, ADB_HOST, SCREEN_SHOOT_SAVE_PATH, ShellColor, CONFIG_PATH,enable_adb_host_auto_detect, ADB_SERVER from .ADBClientSession", "import Image import config # from config import ADB_ROOT, ADB_HOST,", "%d', version) return True except ConnectionRefusedError: return False except RuntimeError:", "y2) if duration is not None: command += str(int(duration)) self.run_device_cmd(command)", "x[1] != 'offline'] if len(devices) == 0: auto_connect = config.get('device/adb_auto_connect',", "with control_sock: conn = future.get(2) if conn is not None:", "= self.device_session_factory().exec_stream('(echo -n %s; screencap) | nc %s %d' %", "w, h, pixels = cap return Image.frombytes('RGBA', (w, h), pixels)", "return self.last_screenshot rawcap = self.screencap() img = _screencap_to_image(rawcap) t1 =", "s = self.device_session_factory().exec_stream('screencap -p') data = recvall(s, 4194304) return data", "self._reverse_connection_screencap else: self.rch.stop() else: self.loopback = None def __del__(self): if", "not None: if always_use_device not in (x[0] for x in", ".ADBClientSession import ADBClientSession from util.socketutil import recvall from . import", "logger.info('正在使用模拟器优化模式') self.screencap = self._reverse_connection_screencap else: self.rch.stop() else: self.loopback = None", "return True return False def screencap_png(self): \"\"\"returns PNG bytes\"\"\" s", "== 1) return (w, h, data[12:].tobytes()) def screenshot(self, cached=True): t0", "for x in arp.splitlines()[1:]] return [] def _test_reverse_connection(self, loopbacks): for", "= 0 self.last_screenshot_duration = 0 self.last_screenshot = None if config.get('device/try_emulator_enhanced_mode',", "ValueError: logger.error(\"输入不合法,请重新输入\") device_name = devices[num][0] else: raise RuntimeError('找不到可用设备') logger.info(\"确认设备名称:\" +", "if b'goldfish' in board: return ['10.0.2.2'] modules = self.device_session_factory().exec('grep -o", "logger.info('没有已连接设备,尝试连接 %s', auto_connect) try: self.host_session_factory().disconnect(auto_connect) except: pass self.host_session_factory().connect(auto_connect) else: raise", "data[12:].tobytes()) def screenshot(self, cached=True): t0 = time.monotonic() if cached and", "self.adb_serial is None: self.adb_serial = self.__adb_device_name_detector() self.device_session_factory = lambda: self.host_session_factory().device(self.adb_serial)", "= [x for x in self.host_session_factory().devices() if x[1] != 'offline']", "not None: logger.info('没有已连接设备,尝试连接 %s', auto_connect) try: self.host_session_factory().disconnect(auto_connect) except: pass self.host_session_factory().connect(auto_connect)", "{} {} {} {} \".format(x1, y1, x2, y2) if duration", "data = recvall(s, 4194304) s.close() data = zlib.decompress(data, zlib.MAX_WBITS |", "pixels in RGBA/RGBX format\"\"\" s = self.device_session_factory().exec_stream('screencap|gzip -1') data =", "not None: final_X = XY[0] + randint(-offsets[0], offsets[0]) final_Y =", "h, f = struct.unpack_from('III', data, 0) assert (f == 1)", "= int(input(\"请输入序号选择设备: \")) if not 0 <= num < len(devices):", "touch_swipe2(self, origin, movement, duration=None): # sleep(1) x1, y1, x2, y2", "= lambda: self.host_session_factory().device(self.adb_serial) self.cache_screenshot = config.get('device/cache_screenshot', True) self.last_screenshot_timestamp = 0", "DEBUG_LEVEL=2): output = self.device_session_factory().exec(cmd) logger.debug(\"command: %s\", cmd) logger.debug(\"output: %s\", repr(output))", "subprocess.CalledProcessError: pass raise OSError(\"can't start adb server\") class ADBConnector: def", "conn = future.get(2) if conn is not None: data =", "if config.get('device/try_emulator_enhanced_mode', True): loopbacks = self._detect_loopbacks() if len(loopbacks): logger.debug('possible loopback", "logger.debug(\"output: %s\", repr(output)) return output def get_sub_screen(self, image, screen_range): return", "-p') data = recvall(s, 4194304) return data def screencap(self): \"\"\"returns", "format\"\"\" s = self.device_session_factory().exec_stream('screencap|gzip -1') data = recvall(s, 4194304) s.close()", "import logging.config from random import randint import zlib import struct", "self.host_session_factory().devices() if x[1] != 'offline'] always_use_device = config.get('device/adb_always_use_device', None) if", "*movement)) command = \"input swipe {} {} {} {} \".format(x1,", "= None if self.adb_serial is None: self.adb_serial = self.__adb_device_name_detector() self.device_session_factory", "def check_adb_alive(): try: sess = ADBClientSession(config.ADB_SERVER) version = int(sess.service('host:version').read_response().decode(), 16)", "except subprocess.CalledProcessError: pass raise OSError(\"can't start adb server\") class ADBConnector:", "if offsets is not None: final_X = XY[0] + randint(-offsets[0],", "output = self.device_session_factory().exec(cmd) logger.debug(\"command: %s\", cmd) logger.debug(\"output: %s\", repr(output)) return", "/proc/modules') if b'vboxguest' in modules: arp = self.device_session_factory().exec('cat /proc/net/arp') return", "(future.cookie.decode(), addr, self.rch.port) logger.debug(cmd) control_sock = self.device_session_factory().exec_stream(cmd) with control_sock: conn", "(w, h, data[12:]) def _reverse_connection_screencap(self): \"\"\"returns (width, height, pixels) pixels", "import recvall from . import revconn # from numpy import", "from random import randint import zlib import struct import socket", "zlib.decompress(data, zlib.MAX_WBITS | 16, 8388608) w, h, f = struct.unpack_from('III',", "/proc/net/arp') return [x[:x.find(b' ')].decode() for x in arp.splitlines()[1:]] return []", "def _test_reverse_connection(self, loopbacks): for addr in loopbacks: logger.debug('testing loopback address", "config.get('device/adb_always_use_device', None) if always_use_device is not None: if always_use_device not", "always_use_device not in (x[0] for x in devices): raise RuntimeError('设备", "%s; screencap) | nc %s %d' % (future.cookie.decode(), self.loopback, self.rch.port))", "f = struct.unpack_from('III', data, 0) assert (f == 1) return", "os.path.join(config.ADB_ROOT, 'adb')] else: adb_binaries = [adbbin] for adbbin in adb_binaries:", "cached=True): t0 = time.monotonic() if cached and self.cache_screenshot: if self.last_screenshot", "cached and self.cache_screenshot: if self.last_screenshot is not None and t0", "return False except RuntimeError: return False def ensure_adb_alive(): if check_adb_alive():", "self.device_session_factory().exec_stream('screencap|gzip -1') data = recvall(s, 4194304) s.close() data = zlib.decompress(data,", "img return img def touch_swipe2(self, origin, movement, duration=None): # sleep(1)", "y2 = origin[0], origin[1], origin[0] + movement[0], origin[1] + movement[1]", "logger.debug(\"滑动初始坐标:({},{}); 移动距离dX:{}, dy:{}\".format(*origin, *movement)) command = \"input swipe {} {}", "\"\"\"returns (width, height, pixels) pixels in RGBA/RGBX format\"\"\" s =", "linalg logger = logging.getLogger(__name__) def _screencap_to_image(cap): w, h, pixels =", "def screenshot(self, cached=True): t0 = time.monotonic() if cached and self.cache_screenshot:", "is not None: logger.info('没有已连接设备,尝试连接 %s', auto_connect) try: self.host_session_factory().disconnect(auto_connect) except: pass", "raise RuntimeError('找不到可用设备') devices = [x for x in self.host_session_factory().devices() if", "%s', addr) future = self.rch.register_cookie() with future: cmd = 'echo", "None if self.adb_serial is None: self.adb_serial = self.__adb_device_name_detector() self.device_session_factory =", "= XY[0] + randint(-1, 1) final_Y = XY[1] + randint(-1,", "%s\", cmd) logger.debug(\"output: %s\", repr(output)) return output def get_sub_screen(self, image,", "not 0 <= num < len(devices): raise ValueError() break except", "len(devices) == 0: auto_connect = config.get('device/adb_auto_connect', None) if auto_connect is", "img def touch_swipe2(self, origin, movement, duration=None): # sleep(1) x1, y1,", "self.rch.register_cookie() with future: cmd = 'echo -n %sOKAY | nc", "True return False def screencap_png(self): \"\"\"returns PNG bytes\"\"\" s =", "import revconn # from numpy import average, dot, linalg logger", "16) logger.debug('ADB server version %d', version) return True except ConnectionRefusedError:", "# 如果你遇到了问题,可以把这百年输出并把日志分享到群里。 logger.debug(\"点击坐标:({},{})\".format(final_X, final_Y)) command = \"input tap {} {}\".format(final_X,", "self.loopback = addr logger.debug('found loopback address %s', addr) return True", "with future.get() as conn: data = recvall(conn, 8388608, True) w,", "[adbbin] for adbbin in adb_binaries: try: logger.debug('trying %r', adbbin) subprocess.run([adbbin,", "offsets[0]) final_Y = XY[1] + randint(-offsets[1], offsets[1]) else: final_X =", "loopbacks = self._detect_loopbacks() if len(loopbacks): logger.debug('possible loopback addresses: %s', repr(loopbacks))", "0 while True: try: num = int(input(\"请输入序号选择设备: \")) if not", "touch_tap(self, XY=None, offsets=None): # sleep(10) # sleep(0.5) if offsets is", "logger.debug(\"点击坐标:({},{})\".format(final_X, final_Y)) command = \"input tap {} {}\".format(final_X, final_Y) self.run_device_cmd(command)", "addr) return True return False def screencap_png(self): \"\"\"returns PNG bytes\"\"\"", "import subprocess adbbin = config.get('device/adb_binary', None) if adbbin is None:", "self.device_session_factory().exec_stream('screencap -p') data = recvall(s, 4194304) return data def screencap(self):", "logger.debug('ADB server version %d', version) return True except ConnectionRefusedError: return", "height, pixels) pixels in RGBA/RGBX format\"\"\" s = self.device_session_factory().exec_stream('screencap|gzip -1')", "= future.get(2) if conn is not None: data = recvall(conn)", "device_name) return device_name def run_device_cmd(self, cmd, DEBUG_LEVEL=2): output = self.device_session_factory().exec(cmd)", "= None def __del__(self): if self.rch and self.rch.is_alive(): self.rch.stop() def", "def screencap(self): \"\"\"returns (width, height, pixels) pixels in RGBA/RGBX format\"\"\"", "duration=None): # sleep(1) x1, y1, x2, y2 = origin[0], origin[1],", "self.loopback = None def __del__(self): if self.rch and self.rch.is_alive(): self.rch.stop()", "data[12:]) def _reverse_connection_screencap(self): \"\"\"returns (width, height, pixels) pixels in RGBA/RGBX", "dot, linalg logger = logging.getLogger(__name__) def _screencap_to_image(cap): w, h, pixels", "1) final_Y = XY[1] + randint(-1, 1) # 如果你遇到了问题,可以把这百年输出并把日志分享到群里。 logger.debug(\"点击坐标:({},{})\".format(final_X,", "h, data[12:]) def _reverse_connection_screencap(self): \"\"\"returns (width, height, pixels) pixels in", "if conn is not None: data = recvall(conn) conn.close() if", "auto_connect) try: self.host_session_factory().disconnect(auto_connect) except: pass self.host_session_factory().connect(auto_connect) else: raise RuntimeError('找不到可用设备') devices", "(width, height, pixels) pixels in RGBA/RGBX format\"\"\" future = self.rch.register_cookie()", "logger.debug('found loopback address %s', addr) return True return False def", "(f == 1) return (w, h, data[12:].tobytes()) def screenshot(self, cached=True):", "pixels in RGBA/RGBX format\"\"\" future = self.rch.register_cookie() with future: control_sock", "= img return img def touch_swipe2(self, origin, movement, duration=None): #", "'echo -n %sOKAY | nc -w 1 %s %d' %", "= int(sess.service('host:version').read_response().decode(), 16) logger.debug('ADB server version %d', version) return True", "self.cache_screenshot: if self.last_screenshot is not None and t0 - self.last_screenshot_timestamp", "recvall(s, 4194304) return data def screencap(self): \"\"\"returns (width, height, pixels)", "return [x[:x.find(b' ')].decode() for x in arp.splitlines()[1:]] return [] def", "control_sock = self.device_session_factory().exec_stream('(echo -n %s; screencap) | nc %s %d'", "(width, height, pixels) pixels in RGBA/RGBX format\"\"\" s = self.device_session_factory().exec_stream('screencap|gzip", "import ADBClientSession from util.socketutil import recvall from . import revconn", "%s\", repr(output)) return output def get_sub_screen(self, image, screen_range): return image.crop(", "['10.0.2.2'] modules = self.device_session_factory().exec('grep -o vboxguest /proc/modules') if b'vboxguest' in", "control_sock: conn = future.get(2) if conn is not None: data", "if isinstance(imgorfile, Image.Image): return imgorfile return Image.open(imgorfile) def check_adb_alive(): try:", "if len(devices) == 0: auto_connect = config.get('device/adb_auto_connect', None) if auto_connect", "dy:{}\".format(*origin, *movement)) command = \"input swipe {} {} {} {}", "['adb', os.path.join(config.ADB_ROOT, 'adb')] else: adb_binaries = [adbbin] for adbbin in", "t1 = time.monotonic() self.last_screenshot_timestamp = t1 self.last_screenshot_duration = t1 -", "def ensure_adb_alive(): if check_adb_alive(): return logger.info('尝试启动 adb server') import subprocess", "= \"input swipe {} {} {} {} \".format(x1, y1, x2,", "check=True) return True except FileNotFoundError: pass except subprocess.CalledProcessError: pass raise", "struct.unpack_from('III', data, 0) assert (f == 1) return (w, h,", "self.host_session_factory().device(self.adb_serial) self.cache_screenshot = config.get('device/cache_screenshot', True) self.last_screenshot_timestamp = 0 self.last_screenshot_duration =", "origin[1] + movement[1] logger.debug(\"滑动初始坐标:({},{}); 移动距离dX:{}, dy:{}\".format(*origin, *movement)) command = \"input", "CONFIG_PATH,enable_adb_host_auto_detect, ADB_SERVER from .ADBClientSession import ADBClientSession from util.socketutil import recvall", "1: device_name = devices[0][0] elif len(devices) > 1: logger.info(\"检测到多台设备\") num", "arp = self.device_session_factory().exec('cat /proc/net/arp') return [x[:x.find(b' ')].decode() for x in", "(w, h, data[12:].tobytes()) def screenshot(self, cached=True): t0 = time.monotonic() if", "else: self.rch.stop() else: self.loopback = None def __del__(self): if self.rch", "try: num = int(input(\"请输入序号选择设备: \")) if not 0 <= num", "screen_range[0][1] + screen_range[1][1] ) ) def _detect_loopbacks(self): board = self.device_session_factory().exec('getprop", "= self.rch.register_cookie() with future: control_sock = self.device_session_factory().exec_stream('(echo -n %s; screencap)", "{} {} \".format(x1, y1, x2, y2) if duration is not", "except FileNotFoundError: pass except subprocess.CalledProcessError: pass raise OSError(\"can't start adb", "-o vboxguest /proc/modules') if b'vboxguest' in modules: arp = self.device_session_factory().exec('cat", "try: self.host_session_factory().disconnect(auto_connect) except: pass self.host_session_factory().connect(auto_connect) else: raise RuntimeError('找不到可用设备') devices =", "lambda: ADBClientSession(config.ADB_SERVER) self.rch = None if self.adb_serial is None: self.adb_serial", "0 self.last_screenshot_duration = 0 self.last_screenshot = None if config.get('device/try_emulator_enhanced_mode', True):", "repr(loopbacks)) self.rch = revconn.ReverseConnectionHost() self.rch.start() if self._test_reverse_connection(loopbacks): logger.info('正在使用模拟器优化模式') self.screencap =", "raise RuntimeError('找不到可用设备') logger.info(\"确认设备名称:\" + device_name) return device_name def run_device_cmd(self, cmd,", "check_adb_alive(): return logger.info('尝试启动 adb server') import subprocess adbbin = config.get('device/adb_binary',", "ADBConnector: def __init__(self, adb_serial=None): # os.chdir(ADB_ROOT) self.ADB_ROOT = config.ADB_ROOT self.adb_serial", "self.ADB_ROOT = config.ADB_ROOT self.adb_serial = adb_serial self.host_session_factory = lambda: ADBClientSession(config.ADB_SERVER)", "= lambda: ADBClientSession(config.ADB_SERVER) self.rch = None if self.adb_serial is None:", "def screencap_png(self): \"\"\"returns PNG bytes\"\"\" s = self.device_session_factory().exec_stream('screencap -p') data", "= self.device_session_factory().exec_stream('screencap|gzip -1') data = recvall(s, 4194304) s.close() data =", "def _ensure_pil_image(imgorfile): if isinstance(imgorfile, Image.Image): return imgorfile return Image.open(imgorfile) def", "util.socketutil import recvall from . import revconn # from numpy", "data = zlib.decompress(data, zlib.MAX_WBITS | 16, 8388608) w, h, f", "final_Y = XY[1] + randint(-1, 1) # 如果你遇到了问题,可以把这百年输出并把日志分享到群里。 logger.debug(\"点击坐标:({},{})\".format(final_X, final_Y))", "ro.product.board') if b'goldfish' in board: return ['10.0.2.2'] modules = self.device_session_factory().exec('grep", "[x for x in self.host_session_factory().devices() if x[1] != 'offline'] always_use_device", "server') import subprocess adbbin = config.get('device/adb_binary', None) if adbbin is", "| nc %s %d' % (future.cookie.decode(), self.loopback, self.rch.port)) with control_sock:", "(f == 1) return (w, h, data[12:]) def _reverse_connection_screencap(self): \"\"\"returns", "conn: data = recvall(conn, 8388608, True) w, h, f =", "x1, y1, x2, y2 = origin[0], origin[1], origin[0] + movement[0],", "config # from config import ADB_ROOT, ADB_HOST, SCREEN_SHOOT_SAVE_PATH, ShellColor, CONFIG_PATH,enable_adb_host_auto_detect,", "addr) future = self.rch.register_cookie() with future: cmd = 'echo -n", "= config.ADB_ROOT self.adb_serial = adb_serial self.host_session_factory = lambda: ADBClientSession(config.ADB_SERVER) self.rch", "logger.debug(cmd) control_sock = self.device_session_factory().exec_stream(cmd) with control_sock: conn = future.get(2) if", "duration is not None: command += str(int(duration)) self.run_device_cmd(command) def touch_tap(self,", "devices = [x for x in self.host_session_factory().devices() if x[1] !=", "device_name = devices[num][0] else: raise RuntimeError('找不到可用设备') logger.info(\"确认设备名称:\" + device_name) return", "from . import revconn # from numpy import average, dot,", "x in self.host_session_factory().devices() if x[1] != 'offline'] always_use_device = config.get('device/adb_always_use_device',", "1 %s %d' % (future.cookie.decode(), addr, self.rch.port) logger.debug(cmd) control_sock =", "swipe {} {} {} {} \".format(x1, y1, x2, y2) if", "= recvall(s, 4194304) s.close() data = zlib.decompress(data, zlib.MAX_WBITS | 16,", "def _reverse_connection_screencap(self): \"\"\"returns (width, height, pixels) pixels in RGBA/RGBX format\"\"\"", "= origin[0], origin[1], origin[0] + movement[0], origin[1] + movement[1] logger.debug(\"滑动初始坐标:({},{});", "== 1: device_name = devices[0][0] elif len(devices) > 1: logger.info(\"检测到多台设备\")", "loopback addresses: %s', repr(loopbacks)) self.rch = revconn.ReverseConnectionHost() self.rch.start() if self._test_reverse_connection(loopbacks):", "如果你遇到了问题,可以把这百年输出并把日志分享到群里。 logger.debug(\"点击坐标:({},{})\".format(final_X, final_Y)) command = \"input tap {} {}\".format(final_X, final_Y)", "False def screencap_png(self): \"\"\"returns PNG bytes\"\"\" s = self.device_session_factory().exec_stream('screencap -p')", "ensure_adb_alive(): if check_adb_alive(): return logger.info('尝试启动 adb server') import subprocess adbbin", "Image.Image): return imgorfile return Image.open(imgorfile) def check_adb_alive(): try: sess =", "config.ADB_ROOT self.adb_serial = adb_serial self.host_session_factory = lambda: ADBClientSession(config.ADB_SERVER) self.rch =", "- t0 self.last_screenshot = img return img def touch_swipe2(self, origin,", "RuntimeError('找不到可用设备') devices = [x for x in self.host_session_factory().devices() if x[1]", "s = self.device_session_factory().exec_stream('screencap|gzip -1') data = recvall(s, 4194304) s.close() data", "recvall(conn, 8388608, True) w, h, f = struct.unpack_from('III', data, 0)", "if always_use_device is not None: if always_use_device not in (x[0]", "if x[1] != 'offline'] always_use_device = config.get('device/adb_always_use_device', None) if always_use_device", "for x in self.host_session_factory().devices() if x[1] != 'offline'] if len(devices)", "movement[0], origin[1] + movement[1] logger.debug(\"滑动初始坐标:({},{}); 移动距离dX:{}, dy:{}\".format(*origin, *movement)) command =", "XY[1] + randint(-offsets[1], offsets[1]) else: final_X = XY[0] + randint(-1,", "import ADB_ROOT, ADB_HOST, SCREEN_SHOOT_SAVE_PATH, ShellColor, CONFIG_PATH,enable_adb_host_auto_detect, ADB_SERVER from .ADBClientSession import", "len(devices) > 1: logger.info(\"检测到多台设备\") num = 0 while True: try:", "RGBA/RGBX format\"\"\" future = self.rch.register_cookie() with future: control_sock = self.device_session_factory().exec_stream('(echo", "time.monotonic() if cached and self.cache_screenshot: if self.last_screenshot is not None", "elif len(devices) > 1: logger.info(\"检测到多台设备\") num = 0 while True:", "self.device_session_factory().exec_stream('(echo -n %s; screencap) | nc %s %d' % (future.cookie.decode(),", "= time.monotonic() if cached and self.cache_screenshot: if self.last_screenshot is not", "None: logger.info('没有已连接设备,尝试连接 %s', auto_connect) try: self.host_session_factory().disconnect(auto_connect) except: pass self.host_session_factory().connect(auto_connect) else:", "sleep(1) x1, y1, x2, y2 = origin[0], origin[1], origin[0] +", "XY[0] + randint(-1, 1) final_Y = XY[1] + randint(-1, 1)", "time from PIL import Image import config # from config", "if data == b'OKAY': self.loopback = addr logger.debug('found loopback address", "randint(-offsets[0], offsets[0]) final_Y = XY[1] + randint(-offsets[1], offsets[1]) else: final_X", "is not None: final_X = XY[0] + randint(-offsets[0], offsets[0]) final_Y", "else: self.loopback = None def __del__(self): if self.rch and self.rch.is_alive():", "+ randint(-1, 1) # 如果你遇到了问题,可以把这百年输出并把日志分享到群里。 logger.debug(\"点击坐标:({},{})\".format(final_X, final_Y)) command = \"input", "= XY[1] + randint(-offsets[1], offsets[1]) else: final_X = XY[0] +", "= recvall(conn, 8388608, True) w, h, f = struct.unpack_from('III', data,", "-n %s; screencap) | nc %s %d' % (future.cookie.decode(), self.loopback,", "h, data[12:].tobytes()) def screenshot(self, cached=True): t0 = time.monotonic() if cached", "not in (x[0] for x in devices): raise RuntimeError('设备 %s", "screen_range[1][0], screen_range[0][1] + screen_range[1][1] ) ) def _detect_loopbacks(self): board =", "zlib import struct import socket import time from PIL import", "num = int(input(\"请输入序号选择设备: \")) if not 0 <= num <", "if always_use_device not in (x[0] for x in devices): raise", "'offline'] if len(devices) == 0: auto_connect = config.get('device/adb_auto_connect', None) if", "4194304) return data def screencap(self): \"\"\"returns (width, height, pixels) pixels", "server\") class ADBConnector: def __init__(self, adb_serial=None): # os.chdir(ADB_ROOT) self.ADB_ROOT =", "return output def get_sub_screen(self, image, screen_range): return image.crop( ( screen_range[0][0],", "adbbin in adb_binaries: try: logger.debug('trying %r', adbbin) subprocess.run([adbbin, 'start-server'], check=True)", "vboxguest /proc/modules') if b'vboxguest' in modules: arp = self.device_session_factory().exec('cat /proc/net/arp')", "num = 0 while True: try: num = int(input(\"请输入序号选择设备: \"))", "ADBClientSession(config.ADB_SERVER) self.rch = None if self.adb_serial is None: self.adb_serial =", "if cached and self.cache_screenshot: if self.last_screenshot is not None and", "screen_range): return image.crop( ( screen_range[0][0], screen_range[0][1], screen_range[0][0] + screen_range[1][0], screen_range[0][1]", "config.get('device/adb_auto_connect', None) if auto_connect is not None: logger.info('没有已连接设备,尝试连接 %s', auto_connect)", "except ValueError: logger.error(\"输入不合法,请重新输入\") device_name = devices[num][0] else: raise RuntimeError('找不到可用设备') logger.info(\"确认设备名称:\"", "x[1] != 'offline'] always_use_device = config.get('device/adb_always_use_device', None) if always_use_device is", "except: pass self.host_session_factory().connect(auto_connect) else: raise RuntimeError('找不到可用设备') devices = [x for", "format\"\"\" future = self.rch.register_cookie() with future: control_sock = self.device_session_factory().exec_stream('(echo -n" ]
[ "a \"with\" statement. It takes the directory to change to", "def __enter__(self, *_) -> None: os.chdir(self.new_dir) def __exit__(self, *_) ->", "no directory is given, it takes the directory of the", "os from pathlib import Path class change_directory: \"\"\" A class", "change to as an argument. If no directory is given,", "= None) -> None: self.old_dir = os.getcwd() if directory is", "the working directory using a \"with\" statement. It takes the", "directory def __enter__(self, *_) -> None: os.chdir(self.new_dir) def __exit__(self, *_)", "-> None: self.old_dir = os.getcwd() if directory is None: self.new_dir", "called. \"\"\" def __init__(self, directory: str = None) -> None:", "= directory def __enter__(self, *_) -> None: os.chdir(self.new_dir) def __exit__(self,", "argument. If no directory is given, it takes the directory", "takes the directory of the file from which this function", "= Path(inspect.getabsfile(inspect.stack()[1][0])).parent # type: ignore else: self.new_dir = directory def", "for changing the working directory using a \"with\" statement. It", "str = None) -> None: self.old_dir = os.getcwd() if directory", "changing the working directory using a \"with\" statement. It takes", "of the file from which this function was called. \"\"\"", "None: self.new_dir = Path(inspect.getabsfile(inspect.stack()[1][0])).parent # type: ignore else: self.new_dir =", "else: self.new_dir = directory def __enter__(self, *_) -> None: os.chdir(self.new_dir)", "this function was called. \"\"\" def __init__(self, directory: str =", "type: ignore else: self.new_dir = directory def __enter__(self, *_) ->", "if directory is None: self.new_dir = Path(inspect.getabsfile(inspect.stack()[1][0])).parent # type: ignore", "is None: self.new_dir = Path(inspect.getabsfile(inspect.stack()[1][0])).parent # type: ignore else: self.new_dir", "which this function was called. \"\"\" def __init__(self, directory: str", "ignore else: self.new_dir = directory def __enter__(self, *_) -> None:", "directory to change to as an argument. If no directory", "\"\"\" def __init__(self, directory: str = None) -> None: self.old_dir", "change_directory: \"\"\" A class for changing the working directory using", "os.getcwd() if directory is None: self.new_dir = Path(inspect.getabsfile(inspect.stack()[1][0])).parent # type:", "None: self.old_dir = os.getcwd() if directory is None: self.new_dir =", "directory is given, it takes the directory of the file", "was called. \"\"\" def __init__(self, directory: str = None) ->", "directory is None: self.new_dir = Path(inspect.getabsfile(inspect.stack()[1][0])).parent # type: ignore else:", "import Path class change_directory: \"\"\" A class for changing the", "statement. It takes the directory to change to as an", "it takes the directory of the file from which this", "# type: ignore else: self.new_dir = directory def __enter__(self, *_)", "self.new_dir = Path(inspect.getabsfile(inspect.stack()[1][0])).parent # type: ignore else: self.new_dir = directory", "Path class change_directory: \"\"\" A class for changing the working", "the file from which this function was called. \"\"\" def", "file from which this function was called. \"\"\" def __init__(self,", "is given, it takes the directory of the file from", "self.new_dir = directory def __enter__(self, *_) -> None: os.chdir(self.new_dir) def", "import os from pathlib import Path class change_directory: \"\"\" A", "class change_directory: \"\"\" A class for changing the working directory", "inspect import os from pathlib import Path class change_directory: \"\"\"", "takes the directory to change to as an argument. If", "If no directory is given, it takes the directory of", "directory: str = None) -> None: self.old_dir = os.getcwd() if", "__init__(self, directory: str = None) -> None: self.old_dir = os.getcwd()", "import inspect import os from pathlib import Path class change_directory:", "the directory of the file from which this function was", "the directory to change to as an argument. If no", "= os.getcwd() if directory is None: self.new_dir = Path(inspect.getabsfile(inspect.stack()[1][0])).parent #", "It takes the directory to change to as an argument.", "directory using a \"with\" statement. It takes the directory to", "def __init__(self, directory: str = None) -> None: self.old_dir =", "function was called. \"\"\" def __init__(self, directory: str = None)", "*_) -> None: os.chdir(self.new_dir) def __exit__(self, *_) -> None: os.chdir(self.old_dir)", "class for changing the working directory using a \"with\" statement.", "to as an argument. If no directory is given, it", "directory of the file from which this function was called.", "given, it takes the directory of the file from which", "None) -> None: self.old_dir = os.getcwd() if directory is None:", "\"\"\" A class for changing the working directory using a", "as an argument. If no directory is given, it takes", "Path(inspect.getabsfile(inspect.stack()[1][0])).parent # type: ignore else: self.new_dir = directory def __enter__(self,", "self.old_dir = os.getcwd() if directory is None: self.new_dir = Path(inspect.getabsfile(inspect.stack()[1][0])).parent", "to change to as an argument. If no directory is", "A class for changing the working directory using a \"with\"", "an argument. If no directory is given, it takes the", "using a \"with\" statement. It takes the directory to change", "pathlib import Path class change_directory: \"\"\" A class for changing", "__enter__(self, *_) -> None: os.chdir(self.new_dir) def __exit__(self, *_) -> None:", "working directory using a \"with\" statement. It takes the directory", "from which this function was called. \"\"\" def __init__(self, directory:", "\"with\" statement. It takes the directory to change to as", "from pathlib import Path class change_directory: \"\"\" A class for" ]
[ "from gui import MgallManager def main(): app = QApplication(sys.argv) ex", "QApplication(sys.argv) ex = MgallManager() app.aboutToQuit.connect(ex.ExitHandler) sys.exit(app.exec_()) if __name__ == \"__main__\":", "import sys from PyQt5.QtWidgets import QApplication from gui import MgallManager", "app = QApplication(sys.argv) ex = MgallManager() app.aboutToQuit.connect(ex.ExitHandler) sys.exit(app.exec_()) if __name__", "MgallManager def main(): app = QApplication(sys.argv) ex = MgallManager() app.aboutToQuit.connect(ex.ExitHandler)", "from PyQt5.QtWidgets import QApplication from gui import MgallManager def main():", "import MgallManager def main(): app = QApplication(sys.argv) ex = MgallManager()", "main(): app = QApplication(sys.argv) ex = MgallManager() app.aboutToQuit.connect(ex.ExitHandler) sys.exit(app.exec_()) if", "PyQt5.QtWidgets import QApplication from gui import MgallManager def main(): app", "def main(): app = QApplication(sys.argv) ex = MgallManager() app.aboutToQuit.connect(ex.ExitHandler) sys.exit(app.exec_())", "QApplication from gui import MgallManager def main(): app = QApplication(sys.argv)", "gui import MgallManager def main(): app = QApplication(sys.argv) ex =", "ex = MgallManager() app.aboutToQuit.connect(ex.ExitHandler) sys.exit(app.exec_()) if __name__ == \"__main__\": main()", "import QApplication from gui import MgallManager def main(): app =", "= QApplication(sys.argv) ex = MgallManager() app.aboutToQuit.connect(ex.ExitHandler) sys.exit(app.exec_()) if __name__ ==", "sys from PyQt5.QtWidgets import QApplication from gui import MgallManager def" ]
[ "hash((self.x,self.y,self.z)) def __repr__(self): return f'({self.x},{self.y},{self.z})' def __add__(self,value): return Point3D(self.x +", "value.z def __hash__(self): return hash((self.x,self.y,self.z)) def __repr__(self): return f'({self.x},{self.y},{self.z})' def", "+ abs(self.y - value.y) + abs(self.z - value.z) def __eq__(self,", "<gh_stars>0 class Point3D: def __init__(self,x,y,z): self.x = x self.y =", "abs(self.z - value.z) def __eq__(self, value): return self.x == value.x", "two 3D points ''' def distance(self, value): return abs(self.x -", "value.x) + abs(self.y - value.y) + abs(self.z - value.z) def", "return Point3D(self.x + value.x, self.y + value.y, self.z + value.z)", "return abs(self.x - value.x) + abs(self.y - value.y) + abs(self.z", "def __eq__(self, value): return self.x == value.x and self.y ==", "value): return abs(self.x - value.x) + abs(self.y - value.y) +", "y self.z = z ''' Returns the distance between two", "self.z == value.z def __hash__(self): return hash((self.x,self.y,self.z)) def __repr__(self): return", "def __hash__(self): return hash((self.x,self.y,self.z)) def __repr__(self): return f'({self.x},{self.y},{self.z})' def __add__(self,value):", "3D points ''' def distance(self, value): return abs(self.x - value.x)", "- value.x) + abs(self.y - value.y) + abs(self.z - value.z)", "== value.x and self.y == value.y and self.z == value.z", "+ abs(self.z - value.z) def __eq__(self, value): return self.x ==", "return f'({self.x},{self.y},{self.z})' def __add__(self,value): return Point3D(self.x + value.x, self.y +", "value.z) def __eq__(self, value): return self.x == value.x and self.y", "abs(self.x - value.x) + abs(self.y - value.y) + abs(self.z -", "def __repr__(self): return f'({self.x},{self.y},{self.z})' def __add__(self,value): return Point3D(self.x + value.x,", "distance(self, value): return abs(self.x - value.x) + abs(self.y - value.y)", "- value.z) def __eq__(self, value): return self.x == value.x and", "class Point3D: def __init__(self,x,y,z): self.x = x self.y = y", "value.y and self.z == value.z def __hash__(self): return hash((self.x,self.y,self.z)) def", "''' Returns the distance between two 3D points ''' def", "== value.z def __hash__(self): return hash((self.x,self.y,self.z)) def __repr__(self): return f'({self.x},{self.y},{self.z})'", "return hash((self.x,self.y,self.z)) def __repr__(self): return f'({self.x},{self.y},{self.z})' def __add__(self,value): return Point3D(self.x", "value.y) + abs(self.z - value.z) def __eq__(self, value): return self.x", "and self.z == value.z def __hash__(self): return hash((self.x,self.y,self.z)) def __repr__(self):", "the distance between two 3D points ''' def distance(self, value):", "self.z = z ''' Returns the distance between two 3D", "Point3D: def __init__(self,x,y,z): self.x = x self.y = y self.z", "self.y == value.y and self.z == value.z def __hash__(self): return", "f'({self.x},{self.y},{self.z})' def __add__(self,value): return Point3D(self.x + value.x, self.y + value.y,", "def __init__(self,x,y,z): self.x = x self.y = y self.z =", "value): return self.x == value.x and self.y == value.y and", "= x self.y = y self.z = z ''' Returns", "- value.y) + abs(self.z - value.z) def __eq__(self, value): return", "def distance(self, value): return abs(self.x - value.x) + abs(self.y -", "value.x and self.y == value.y and self.z == value.z def", "z ''' Returns the distance between two 3D points '''", "__eq__(self, value): return self.x == value.x and self.y == value.y", "x self.y = y self.z = z ''' Returns the", "return self.x == value.x and self.y == value.y and self.z", "__add__(self,value): return Point3D(self.x + value.x, self.y + value.y, self.z +", "__repr__(self): return f'({self.x},{self.y},{self.z})' def __add__(self,value): return Point3D(self.x + value.x, self.y", "self.x = x self.y = y self.z = z '''", "self.y = y self.z = z ''' Returns the distance", "abs(self.y - value.y) + abs(self.z - value.z) def __eq__(self, value):", "== value.y and self.z == value.z def __hash__(self): return hash((self.x,self.y,self.z))", "= z ''' Returns the distance between two 3D points", "and self.y == value.y and self.z == value.z def __hash__(self):", "self.x == value.x and self.y == value.y and self.z ==", "def __add__(self,value): return Point3D(self.x + value.x, self.y + value.y, self.z", "= y self.z = z ''' Returns the distance between", "points ''' def distance(self, value): return abs(self.x - value.x) +", "between two 3D points ''' def distance(self, value): return abs(self.x", "Returns the distance between two 3D points ''' def distance(self,", "''' def distance(self, value): return abs(self.x - value.x) + abs(self.y", "distance between two 3D points ''' def distance(self, value): return", "__init__(self,x,y,z): self.x = x self.y = y self.z = z", "__hash__(self): return hash((self.x,self.y,self.z)) def __repr__(self): return f'({self.x},{self.y},{self.z})' def __add__(self,value): return" ]
[ "Function.ability(333, \"Patrol_screen\", cmd_screen, 17), Function.ability(334, \"Patrol_minimap\", cmd_minimap, 17), Function.ability(335, \"Rally_Units_screen\",", "ability_id, queued): \"\"\"Do a quick command like 'Stop' or 'Stim'.\"\"\"", "1006), Function.ability(486, \"Train_Queen_quick\", cmd_quick, 1632), Function.ability(487, \"Train_Raven_quick\", cmd_quick, 622), Function.ability(488,", "2.0 (the \"License\"); # you may not use this file", "\"Effect_GhostSnipe_screen\", cmd_screen, 2714), Function.ability(196, \"Effect_GravitonBeam_screen\", cmd_screen, 173), Function.ability(197, \"Effect_GuardianShield_quick\", cmd_quick,", "\"BurrowDown_Hydralisk_quick\", cmd_quick, 1382, 3661), Function.ability(107, \"BurrowDown_Infestor_quick\", cmd_quick, 1444, 3661), Function.ability(108,", "1315, 3702), Function.ability(431, \"Research_ZergFlyerArmorLevel2_quick\", cmd_quick, 1316, 3702), Function.ability(432, \"Research_ZergFlyerArmorLevel3_quick\", cmd_quick,", "function: The value to store for the action function. arguments:", "rectangle. This is needed so that no function takes the", "group to do it with. select_point_act: What to do with", "3661), Function.ability(110, \"BurrowDown_Queen_quick\", cmd_quick, 1433, 3661), Function.ability(111, \"BurrowDown_Ravager_quick\", cmd_quick, 2340,", "cmd_quick, 804), Function.ability(371, \"Research_InfernalPreigniter_quick\", cmd_quick, 761), Function.ability(372, \"Research_InterceptorGravitonCatapult_quick\", cmd_quick, 44),", "cmd_quick, 1189, 3704), Function.ability(439, \"Research_ZergGroundArmorLevel2_quick\", cmd_quick, 1190, 3704), Function.ability(440, \"Research_ZergGroundArmorLevel3_quick\",", "arguments of the action. Can either be an `Arguments` object,", "\"Rally_CommandCenter_screen\", cmd_screen, 203, 3690), Function.ability(346, \"Rally_CommandCenter_minimap\", cmd_minimap, 203, 3690), Function.ability(347,", "Which unit to select by id. select_worker: What to do", "\"Effect_Scan_screen\", cmd_screen, 399), Function.ability(228, \"Effect_SpawnChangeling_quick\", cmd_quick, 181), Function.ability(229, \"Effect_SpawnLocusts_screen\", cmd_screen,", "cmd_quick, 2393), Function.ability(245, \"Effect_WidowMineAttack_screen\", cmd_screen, 2099), Function.ability(246, \"Effect_WidowMineAttack_autocast\", autocast, 2099),", "\"Land_OrbitalCommand_screen\", cmd_screen, 1524, 3678), Function.ability(280, \"Land_Starport_screen\", cmd_screen, 522, 3678), Function.ability(281,", "to send to the game. \"\"\" __slots__ = () def", "1152), Function.ability(60, \"Build_HydraliskDen_screen\", cmd_screen, 1157), Function.ability(61, \"Build_InfestationPit_screen\", cmd_screen, 1160), Function.ability(62,", "\"Research_TerranVehicleWeaponsLevel1_quick\", cmd_quick, 855, 3701), Function.ability(425, \"Research_TerranVehicleWeaponsLevel2_quick\", cmd_quick, 856, 3701), Function.ability(426,", "1069, 3696), Function.ability(400, \"Research_ProtossShieldsLevel3_quick\", cmd_quick, 1070, 3696), Function.ability(401, \"Research_PsiStorm_quick\", cmd_quick,", "minimap=ArgumentType.point(), screen2=ArgumentType.point(), queued=ArgumentType.enum([False, True]), # (now vs add to queue)", "\"Cancel_MorphLair_quick\", cmd_quick, 1217, 3659), Function.ability(153, \"Cancel_MorphLurker_quick\", cmd_quick, 2333, 3659), Function.ability(154,", "2057, 3665), Function.ability(455, \"Stop_Redirect_quick\", cmd_quick, 1691, 3665), Function.ability(456, \"Stop_Stop_quick\", cmd_quick,", "class Function(collections.namedtuple( \"Function\", [\"id\", \"name\", \"ability_id\", \"general_id\", \"function_type\", \"args\", \"avail_fn\"])):", "cmd_quick, 922), Function.ability(458, \"Train_Baneling_quick\", cmd_quick, 80), Function.ability(459, \"Train_Banshee_quick\", cmd_quick, 621),", "vs select_add) select_unit_act=ArgumentType.enum([ sc_ui.ActionMultiPanel.SingleSelect, sc_ui.ActionMultiPanel.DeselectUnit, sc_ui.ActionMultiPanel.SelectAllOfType, sc_ui.ActionMultiPanel.DeselectAllOfType, ]), select_unit_id=ArgumentType.scalar(500), #", "\"Behavior_CloakOff_quick\", cmd_quick, 3677), Function.ability(24, \"Behavior_CloakOff_Banshee_quick\", cmd_quick, 393, 3677), Function.ability(25, \"Behavior_CloakOff_Ghost_quick\",", "minimap): \"\"\"Do a command that needs a point on the", "cmd_quick, 216), Function.ability(366, \"Research_GraviticBooster_quick\", cmd_quick, 1093), Function.ability(367, \"Research_GraviticDrive_quick\", cmd_quick, 1094),", "the current loaded units. ) # Which argument types do", "name of the function. Should be unique. ability_id: The ability", "queued, minimap): \"\"\"Do a command that needs a point on", "cmd_quick, 1763, 3659), Function.ability(146, \"Cancel_FactoryAddOn_quick\", cmd_quick, 484, 3659), Function.ability(147, \"Cancel_GravitonBeam_quick\",", "\"Effect_PurificationNova_screen\", cmd_screen, 2346), Function.ability(220, \"Effect_Repair_screen\", cmd_screen, 3685), Function.ability(221, \"Effect_Repair_autocast\", autocast,", "cmd_screen, 1165), Function.ability(81, \"Build_RoboticsBay_screen\", cmd_screen, 892), Function.ability(82, \"Build_RoboticsFacility_screen\", cmd_screen, 893),", "be unique. ability_id: The ability id to pass to sc2.", "Function.ability(35, \"Behavior_HoldFireOn_Ghost_quick\", cmd_quick, 36, 3688), Function.ability(36, \"Behavior_HoldFireOn_Lurker_quick\", cmd_quick, 2550, 3688),", "Function.ability(179, \"Effect_BlindingCloud_screen\", cmd_screen, 2063), Function.ability(180, \"Effect_Blink_screen\", cmd_screen, 3687), Function.ability(181, \"Effect_Blink_Stalker_screen\",", "action_cmd = action.action_feature_layer.unit_command action_cmd.ability_id = ability_id action_cmd.queue_command = queued screen.assign_to(action_cmd.target_screen_coord)", "78, 3685), Function.ability(224, \"Effect_Repair_SCV_screen\", cmd_screen, 316, 3685), Function.ability(225, \"Effect_Repair_SCV_autocast\", autocast,", "\"Effect_Blink_screen\", cmd_screen, 3687), Function.ability(181, \"Effect_Blink_Stalker_screen\", cmd_screen, 1442, 3687), Function.ability(182, \"Effect_ShadowStride_screen\",", "the build queue.\"\"\" action.action_ui.production_panel.unit_index = build_queue_id def cmd_quick(action, ability_id, queued):", "1191, 3704), Function.ability(441, \"Research_ZergMeleeWeapons_quick\", cmd_quick, 3705), Function.ability(442, \"Research_ZergMeleeWeaponsLevel1_quick\", cmd_quick, 1186,", "The function to convert the list of integers into something", "a quick command like 'Stop' or 'Stim'.\"\"\" action_cmd = action.action_feature_layer.unit_command", "cmd_quick, 44), Function.ability(373, \"Research_MagFieldLaunchers_quick\", cmd_quick, 766), Function.ability(374, \"Research_MuscularAugments_quick\", cmd_quick, 1283),", "autocast, 1435, 3662), Function.ability(130, \"BurrowUp_Ravager_quick\", cmd_quick, 2342, 3662), Function.ability(131, \"BurrowUp_Ravager_autocast\",", "\"%s/%s (%s)\" % (str(self.id).rjust(space and 4), self.name.ljust(space and 50), \";", "cmd_minimap, 19, 3674), Function.ability(21, \"Behavior_BuildingAttackOff_quick\", cmd_quick, 2082), Function.ability(22, \"Behavior_BuildingAttackOn_quick\", cmd_quick,", "cmd_quick, 652, 3698), Function.ability(412, \"Research_TerranInfantryWeaponsLevel2_quick\", cmd_quick, 653, 3698), Function.ability(413, \"Research_TerranInfantryWeaponsLevel3_quick\",", "Function.ability(226, \"Effect_Salvage_quick\", cmd_quick, 32), Function.ability(227, \"Effect_Scan_screen\", cmd_screen, 399), Function.ability(228, \"Effect_SpawnChangeling_quick\",", "the action. Can either be an `Arguments` object, a `dict`,", "point.Point.\"\"\" return cls(-1, \"<none>\", (0, 0), lambda a: point.Point(*a).floor()) @classmethod", "action def move_camera(action, minimap): \"\"\"Move the camera.\"\"\" minimap.assign_to(action.action_feature_layer.camera_move.center_minimap) def select_point(action,", "by id. select_unit_id: Which unit to select by id. select_worker:", "1974, 3686), Function.ability(211, \"Effect_MedivacIgniteAfterburners_quick\", cmd_quick, 2116), Function.ability(212, \"Effect_NeuralParasite_screen\", cmd_screen, 249),", "class Arguments(collections.namedtuple(\"Arguments\", [ \"screen\", \"minimap\", \"screen2\", \"queued\", \"control_group_act\", \"control_group_id\", \"select_point_act\",", "\"\"\" def __init__(self, functions): self._func_list = functions self._func_dict = {f.name:", "Function.ability(232, \"Effect_Spray_Terran_screen\", cmd_screen, 26, 3684), Function.ability(233, \"Effect_Spray_Zerg_screen\", cmd_screen, 28, 3684),", "second point for a rectangle. This is needed so that", "action should be done now or later. control_group_act: What to", "id to pass to sc2. general_id: 0 for normal abilities,", "cmd_quick, 1766), Function.ability(297, \"Morph_BroodLord_quick\", cmd_quick, 1372), Function.ability(298, \"Morph_Gateway_quick\", cmd_quick, 1520),", "avail_fn=always): \"\"\"Define a function representing a ui action.\"\"\" return cls(id_,", "3683), Function.ability(78, \"Build_Reactor_Starport_screen\", cmd_screen, 488, 3683), Function.ability(79, \"Build_Refinery_screen\", cmd_screen, 320),", "sc_spatial.ActionSpatialUnitSelectionPoint.Toggle, sc_spatial.ActionSpatialUnitSelectionPoint.AllType, sc_spatial.ActionSpatialUnitSelectionPoint.AddAllType, ]), select_add=ArgumentType.enum([False, True]), # (select vs select_add)", "\"Cancel_Queue5_quick\", cmd_quick, 306, 3671), Function.ability(172, \"Cancel_QueueAddOn_quick\", cmd_quick, 312, 3671), Function.ability(173,", "\"Build_Stargate_screen\", cmd_screen, 889), Function.ability(89, \"Build_Starport_screen\", cmd_screen, 329), Function.ability(90, \"Build_StasisTrap_screen\", cmd_screen,", "a single scalar in range(value).\"\"\" return cls(-1, \"<none>\", (value,), lambda", "Function.ability(21, \"Behavior_BuildingAttackOff_quick\", cmd_quick, 2082), Function.ability(22, \"Behavior_BuildingAttackOn_quick\", cmd_quick, 2081), Function.ability(23, \"Behavior_CloakOff_quick\",", "Function.ability(285, \"Lift_OrbitalCommand_quick\", cmd_quick, 1522, 3679), Function.ability(286, \"Lift_Starport_quick\", cmd_quick, 518, 3679),", "597), Function.ability(465, \"Train_DarkTemplar_quick\", cmd_quick, 920), Function.ability(466, \"Train_Disruptor_quick\", cmd_quick, 994), Function.ability(467,", "f in functions} if len(self._func_dict) != len(self._func_list): raise ValueError(\"Function names", "\"Research_ZerglingAdrenalGlands_quick\", cmd_quick, 1252), Function.ability(450, \"Research_ZerglingMetabolicBoost_quick\", cmd_quick, 1253), Function.ability(451, \"Smart_screen\", cmd_screen,", "\"Research_PneumatizedCarapace_quick\", cmd_quick, 1223), Function.ability(381, \"Research_ProtossAirArmor_quick\", cmd_quick, 3692), Function.ability(382, \"Research_ProtossAirArmorLevel1_quick\", cmd_quick,", "\"Research_GraviticBooster_quick\", cmd_quick, 1093), Function.ability(367, \"Research_GraviticDrive_quick\", cmd_quick, 1094), Function.ability(368, \"Research_GroovedSpines_quick\", cmd_quick,", "screen.\"\"\" action_cmd = action.action_feature_layer.unit_command action_cmd.ability_id = ability_id action_cmd.queue_command = queued", "cmd_quick, 3698), Function.ability(411, \"Research_TerranInfantryWeaponsLevel1_quick\", cmd_quick, 652, 3698), Function.ability(412, \"Research_TerranInfantryWeaponsLevel2_quick\", cmd_quick,", "License for the specific language governing permissions and # limitations", "cmd_quick, 3697), Function.ability(407, \"Research_TerranInfantryArmorLevel1_quick\", cmd_quick, 656, 3697), Function.ability(408, \"Research_TerranInfantryArmorLevel2_quick\", cmd_quick,", "\"UnloadAll_quick\", cmd_quick, 3664), Function.ability(512, \"UnloadAll_Bunker_quick\", cmd_quick, 408, 3664), Function.ability(513, \"UnloadAll_CommandCenter_quick\",", "994), Function.ability(467, \"Train_Drone_quick\", cmd_quick, 1342), Function.ability(468, \"Train_Ghost_quick\", cmd_quick, 562), Function.ability(469,", "Function.ability(515, \"UnloadAll_NydusWorm_quick\", cmd_quick, 2371, 3664), Function.ability(516, \"UnloadAllAt_screen\", cmd_screen, 3669), Function.ability(517,", "the possible Types.\"\"\" named = {name: type_._replace(id=Arguments._fields.index(name), name=name) for name,", "\"\"\"Represents the full set of functions. Can't use namedtuple since", "\"Behavior_GenerateCreepOff_quick\", cmd_quick, 1693), Function.ability(30, \"Behavior_GenerateCreepOn_quick\", cmd_quick, 1692), Function.ability(31, \"Behavior_HoldFireOff_quick\", cmd_quick,", "\"select_idle_worker\", select_idle_worker, lambda obs: obs.player_common.idle_worker_count > 0), Function.ui_func(7, \"select_army\", select_army,", "\"Attack_AttackBuilding_minimap\", cmd_minimap, 2048, 3674), Function.ability(18, \"Attack_Redirect_screen\", cmd_screen, 1682, 3674), Function.ability(19,", "cmd_quick, 1312, 3703), Function.ability(435, \"Research_ZergFlyerAttackLevel2_quick\", cmd_quick, 1313, 3703), Function.ability(436, \"Research_ZergFlyerAttackLevel3_quick\",", "Reserved. # # Licensed under the Apache License, Version 2.0", "\"Rally_Workers_minimap\", cmd_minimap, 3690), Function.ability(345, \"Rally_CommandCenter_screen\", cmd_screen, 203, 3690), Function.ability(346, \"Rally_CommandCenter_minimap\",", "sizes: The max+1 of each of the dimensions this argument", "Function.ability(37, \"Behavior_PulsarBeamOff_quick\", cmd_quick, 2376), Function.ability(38, \"Behavior_PulsarBeamOn_quick\", cmd_quick, 2375), Function.ability(39, \"Build_Armory_screen\",", "\"Build_TemplarArchive_screen\", cmd_screen, 890), Function.ability(101, \"Build_TwilightCouncil_screen\", cmd_screen, 886), Function.ability(102, \"Build_UltraliskCavern_screen\", cmd_screen,", "\"Research_TerranInfantryWeaponsLevel2_quick\", cmd_quick, 653, 3698), Function.ability(413, \"Research_TerranInfantryWeaponsLevel3_quick\", cmd_quick, 654, 3698), Function.ability(414,", "cmd_quick, 154), Function.ability(256, \"Hallucination_Probe_quick\", cmd_quick, 156), Function.ability(257, \"Hallucination_Stalker_quick\", cmd_quick, 158),", "Function.ability(270, \"Harvest_Return_Drone_quick\", cmd_quick, 1184, 3667), Function.ability(271, \"Harvest_Return_Mule_quick\", cmd_quick, 167, 3667),", "by id. select_worker: What to do when selecting a worker.", "\"Effect_Heal_screen\", cmd_screen, 386), Function.ability(199, \"Effect_Heal_autocast\", autocast, 386), Function.ability(200, \"Effect_HunterSeekerMissile_screen\", cmd_screen,", "cmd_quick, 976), Function.ability(502, \"Train_WidowMine_quick\", cmd_quick, 614), Function.ability(503, \"Train_Zealot_quick\", cmd_quick, 916),", "319), Function.ability(92, \"Build_TechLab_quick\", cmd_quick, 3682), Function.ability(93, \"Build_TechLab_screen\", cmd_screen, 3682), Function.ability(94,", "cmd_quick, 1220), Function.ability(300, \"Morph_Hellbat_quick\", cmd_quick, 1998), Function.ability(301, \"Morph_Hellion_quick\", cmd_quick, 1978),", "1183, 3666), Function.ability(266, \"Harvest_Gather_Mule_screen\", cmd_screen, 166, 3666), Function.ability(267, \"Harvest_Gather_Probe_screen\", cmd_screen,", "\"Cancel_MorphOverseer_quick\", cmd_quick, 1449, 3659), Function.ability(159, \"Cancel_MorphPlanetaryFortress_quick\", cmd_quick, 1451, 3659), Function.ability(160,", "actions for SC2.\"\"\" from __future__ import absolute_import from __future__ import", "\"BurrowDown_SwarmHost_quick\", cmd_quick, 2014, 3661), Function.ability(114, \"BurrowDown_Ultralisk_quick\", cmd_quick, 1512, 3661), Function.ability(115,", "of a set of known values.\"\"\" return cls(-1, \"<none>\", (len(options),),", "cmd_screen, 487, 3682), Function.ability(100, \"Build_TemplarArchive_screen\", cmd_screen, 890), Function.ability(101, \"Build_TwilightCouncil_screen\", cmd_screen,", "487, 3682), Function.ability(100, \"Build_TemplarArchive_screen\", cmd_screen, 890), Function.ability(101, \"Build_TwilightCouncil_screen\", cmd_screen, 886),", "cmd_quick, 3671), Function.ability(169, \"Cancel_HangarQueue5_quick\", cmd_quick, 1038, 3671), Function.ability(170, \"Cancel_Queue1_quick\", cmd_quick,", "and 50), \"; \".join(str(a) for a in self.args)) class Functions(object):", "cmd_quick, 1692), Function.ability(31, \"Behavior_HoldFireOff_quick\", cmd_quick, 3689), Function.ability(32, \"Behavior_HoldFireOff_Ghost_quick\", cmd_quick, 38,", "\"Morph_Archon_quick\", cmd_quick, 1766), Function.ability(297, \"Morph_BroodLord_quick\", cmd_quick, 1372), Function.ability(298, \"Morph_Gateway_quick\", cmd_quick,", "Function.ability(395, \"Research_ProtossGroundWeaponsLevel2_quick\", cmd_quick, 1063, 3695), Function.ability(396, \"Research_ProtossGroundWeaponsLevel3_quick\", cmd_quick, 1064, 3695),", "() @classmethod def types(cls, **kwargs): \"\"\"Create an Arguments of the", "Function.ability(47, \"Build_CreepTumor_Tumor_screen\", cmd_screen, 1733, 3691), Function.ability(48, \"Build_CyberneticsCore_screen\", cmd_screen, 894), Function.ability(49,", "functions): self._func_list = functions self._func_dict = {f.name: f for f", "Function.ability(170, \"Cancel_Queue1_quick\", cmd_quick, 304, 3671), Function.ability(171, \"Cancel_Queue5_quick\", cmd_quick, 306, 3671),", "655), Function.ability(376, \"Research_NeuralParasite_quick\", cmd_quick, 1455), Function.ability(377, \"Research_PathogenGlands_quick\", cmd_quick, 1454), Function.ability(378,", "\"functions\"])): \"\"\"The set of types and functions that are valid", "takes. fn: The function to convert the list of integers", "{name: type_._replace(id=Arguments._fields.index(name), name=name) for name, type_ in six.iteritems(kwargs)} return cls(**named)", "Function.ability(255, \"Hallucination_Phoenix_quick\", cmd_quick, 154), Function.ability(256, \"Hallucination_Probe_quick\", cmd_quick, 156), Function.ability(257, \"Hallucination_Stalker_quick\",", "Function.ability(436, \"Research_ZergFlyerAttackLevel3_quick\", cmd_quick, 1314, 3703), Function.ability(437, \"Research_ZergGroundArmor_quick\", cmd_quick, 3704), Function.ability(438,", "cmd_minimap(action, ability_id, queued, minimap): \"\"\"Do a command that needs a", "\"Hallucination_Oracle_quick\", cmd_quick, 2114), Function.ability(255, \"Hallucination_Phoenix_quick\", cmd_quick, 154), Function.ability(256, \"Hallucination_Probe_quick\", cmd_quick,", "\"Patrol_screen\", cmd_screen, 17), Function.ability(334, \"Patrol_minimap\", cmd_minimap, 17), Function.ability(335, \"Rally_Units_screen\", cmd_screen,", "\"Lift_Barracks_quick\", cmd_quick, 452, 3679), Function.ability(283, \"Lift_CommandCenter_quick\", cmd_quick, 417, 3679), Function.ability(284,", "for f in functions} if len(self._func_dict) != len(self._func_list): raise ValueError(\"Function", "\"UnloadAllAt_Medivac_screen\", cmd_screen, 396, 3669), Function.ability(519, \"UnloadAllAt_Medivac_minimap\", cmd_minimap, 396, 3669), Function.ability(520,", "3671), Function.ability(173, \"Cancel_QueueCancelToSelection_quick\", cmd_quick, 308, 3671), Function.ability(174, \"Cancel_QueuePasive_quick\", cmd_quick, 1831,", "Function.ability(57, \"Build_Gateway_screen\", cmd_screen, 883), Function.ability(58, \"Build_GhostAcademy_screen\", cmd_screen, 327), Function.ability(59, \"Build_Hatchery_screen\",", "\"\"\" ___slots__ = () @classmethod def types(cls, **kwargs): \"\"\"Create an", "0 for normal abilities, and the ability_id of another ability", "887), Function.ability(70, \"Build_Pylon_screen\", cmd_screen, 881), Function.ability(71, \"Build_Reactor_quick\", cmd_quick, 3683), Function.ability(72,", "554, 3678), Function.ability(277, \"Land_CommandCenter_screen\", cmd_screen, 419, 3678), Function.ability(278, \"Land_Factory_screen\", cmd_screen,", "1763, 3659), Function.ability(146, \"Cancel_FactoryAddOn_quick\", cmd_quick, 484, 3659), Function.ability(147, \"Cancel_GravitonBeam_quick\", cmd_quick,", "} # Which ones need an ability? ABILITY_FUNCTIONS = {cmd_quick,", "cmd_quick, 1252), Function.ability(450, \"Research_ZerglingMetabolicBoost_quick\", cmd_quick, 1253), Function.ability(451, \"Smart_screen\", cmd_screen, 1),", "cmd_screen, 3673), Function.ability(336, \"Rally_Units_minimap\", cmd_minimap, 3673), Function.ability(337, \"Rally_Building_screen\", cmd_screen, 195,", "3659), Function.ability(163, \"Cancel_Nuke_quick\", cmd_quick, 1623, 3659), Function.ability(164, \"Cancel_SpineCrawlerRoot_quick\", cmd_quick, 1730,", "\".join(str(a) for a in self.args)) class Functions(object): \"\"\"Represents the full", "\"UnloadAllAt_minimap\", cmd_minimap, 3669), Function.ability(518, \"UnloadAllAt_Medivac_screen\", cmd_screen, 396, 3669), Function.ability(519, \"UnloadAllAt_Medivac_minimap\",", "\"Behavior_HoldFireOff_Lurker_quick\", cmd_quick, 2552, 3689), Function.ability(34, \"Behavior_HoldFireOn_quick\", cmd_quick, 3688), Function.ability(35, \"Behavior_HoldFireOn_Ghost_quick\",", "Function.ability(76, \"Build_Reactor_Factory_screen\", cmd_screen, 455, 3683), Function.ability(77, \"Build_Reactor_Starport_quick\", cmd_quick, 488, 3683),", "\"Build_Forge_screen\", cmd_screen, 884), Function.ability(56, \"Build_FusionCore_screen\", cmd_screen, 333), Function.ability(57, \"Build_Gateway_screen\", cmd_screen,", "ability_id, queued, minimap): \"\"\"Do a command that needs a point", "Function.ability(396, \"Research_ProtossGroundWeaponsLevel3_quick\", cmd_quick, 1064, 3695), Function.ability(397, \"Research_ProtossShields_quick\", cmd_quick, 3696), Function.ability(398,", "Function.ability(433, \"Research_ZergFlyerAttack_quick\", cmd_quick, 3703), Function.ability(434, \"Research_ZergFlyerAttackLevel1_quick\", cmd_quick, 1312, 3703), Function.ability(435,", "list of argument types. Take a look at TYPES and", "function, each being a list of ints. For select_point this", "\"Hallucination_Archon_quick\", cmd_quick, 146), Function.ability(250, \"Hallucination_Colossus_quick\", cmd_quick, 148), Function.ability(251, \"Hallucination_Disruptor_quick\", cmd_quick,", "322), Function.ability(51, \"Build_EvolutionChamber_screen\", cmd_screen, 1156), Function.ability(52, \"Build_Extractor_screen\", cmd_screen, 1154), Function.ability(53,", "1732, 3659), Function.ability(166, \"Cancel_StarportAddOn_quick\", cmd_quick, 517, 3659), Function.ability(167, \"Cancel_StasisTrap_quick\", cmd_quick,", "211, 3690), Function.ability(348, \"Rally_Hatchery_Workers_minimap\", cmd_minimap, 211, 3690), Function.ability(349, \"Rally_Nexus_screen\", cmd_screen,", "2113, 3659), Function.ability(155, \"Cancel_MorphMothership_quick\", cmd_quick, 1848, 3659), Function.ability(156, \"Cancel_MorphOrbital_quick\", cmd_quick,", "\"Build_GhostAcademy_screen\", cmd_screen, 327), Function.ability(59, \"Build_Hatchery_screen\", cmd_screen, 1152), Function.ability(60, \"Build_HydraliskDen_screen\", cmd_screen,", "control_group_id: Which control group to do it with. select_point_act: What", "units. ) # Which argument types do each function need?", "cmd_screen, 1819), Function.ability(186, \"Effect_Charge_autocast\", autocast, 1819), Function.ability(187, \"Effect_ChronoBoost_screen\", cmd_screen, 261),", "Set space=True to line them all up nicely.\"\"\" return \"%s/%s", "3683), Function.ability(79, \"Build_Refinery_screen\", cmd_screen, 320), Function.ability(80, \"Build_RoachWarren_screen\", cmd_screen, 1165), Function.ability(81,", "\"\"\" __slots__ = () @classmethod def all_arguments(cls, function, arguments): \"\"\"Helper", "Function.ability(379, \"Research_PhoenixAnionPulseCrystals_quick\", cmd_quick, 46), Function.ability(380, \"Research_PneumatizedCarapace_quick\", cmd_quick, 1223), Function.ability(381, \"Research_ProtossAirArmor_quick\",", "\"Research_ProtossShieldsLevel3_quick\", cmd_quick, 1070, 3696), Function.ability(401, \"Research_PsiStorm_quick\", cmd_quick, 1126), Function.ability(402, \"Research_RavenCorvidReactor_quick\",", "3674), Function.ability(19, \"Scan_Move_screen\", cmd_screen, 19, 3674), Function.ability(20, \"Scan_Move_minimap\", cmd_minimap, 19,", "\"Research_WarpGate_quick\", cmd_quick, 1568), Function.ability(429, \"Research_ZergFlyerArmor_quick\", cmd_quick, 3702), Function.ability(430, \"Research_ZergFlyerArmorLevel1_quick\", cmd_quick,", "them all up nicely.\"\"\" return \"%s/%s (%s)\" % (str(self.id).rjust(space and", "= action.action_ui.control_group select.action = control_group_act select.control_group_index = control_group_id def unload(action,", "3661), Function.ability(115, \"BurrowDown_WidowMine_quick\", cmd_quick, 2095, 3661), Function.ability(116, \"BurrowDown_Zergling_quick\", cmd_quick, 1390,", "1632), Function.ability(487, \"Train_Raven_quick\", cmd_quick, 622), Function.ability(488, \"Train_Reaper_quick\", cmd_quick, 561), Function.ability(489,", "set in the protos to send to the game. \"\"\"", "\"Effect_Stim_Marine_quick\", cmd_quick, 380, 3675), Function.ability(238, \"Effect_Stim_Marine_Redirect_quick\", cmd_quick, 1683, 3675), Function.ability(239,", "Function.ability(106, \"BurrowDown_Hydralisk_quick\", cmd_quick, 1382, 3661), Function.ability(107, \"BurrowDown_Infestor_quick\", cmd_quick, 1444, 3661),", "249), Function.ability(213, \"Effect_NukeCalldown_screen\", cmd_screen, 1622), Function.ability(214, \"Effect_OracleRevelation_screen\", cmd_screen, 2146), Function.ability(215,", "OF ANY KIND, either express or implied. # See the", "select_point this could be: [[0], [23, 38]]. \"\"\" __slots__ =", "cmd_quick, 561), Function.ability(489, \"Train_Roach_quick\", cmd_quick, 1351), Function.ability(490, \"Train_SCV_quick\", cmd_quick, 524),", "See the License for the specific language governing permissions and", "cmd_quick, 2331, 3659), Function.ability(161, \"Cancel_MorphThorExplosiveMode_quick\", cmd_quick, 2365, 3659), Function.ability(162, \"Cancel_NeuralParasite_quick\",", "1413), Function.ability(511, \"UnloadAll_quick\", cmd_quick, 3664), Function.ability(512, \"UnloadAll_Bunker_quick\", cmd_quick, 408, 3664),", "Function.ability(520, \"UnloadAllAt_Overlord_screen\", cmd_screen, 1408, 3669), Function.ability(521, \"UnloadAllAt_Overlord_minimap\", cmd_minimap, 1408, 3669),", "3704), Function.ability(439, \"Research_ZergGroundArmorLevel2_quick\", cmd_quick, 1190, 3704), Function.ability(440, \"Research_ZergGroundArmorLevel3_quick\", cmd_quick, 1191,", "1438, 3664), Function.ability(515, \"UnloadAll_NydusWorm_quick\", cmd_quick, 2371, 3664), Function.ability(516, \"UnloadAllAt_screen\", cmd_screen,", "to in writing, software # distributed under the License is", "cmd_screen, 882), Function.ability(41, \"Build_BanelingNest_screen\", cmd_screen, 1162), Function.ability(42, \"Build_Barracks_screen\", cmd_screen, 321),", "1831, 3671), Function.ability(175, \"Cancel_QueuePassiveCancelToSelection_quick\", cmd_quick, 1833, 3671), Function.ability(176, \"Effect_Abduct_screen\", cmd_screen,", "ABILITY_IDS = {k: frozenset(v) for k, v in six.iteritems(ABILITY_IDS)} FUNCTIONS_AVAILABLE", "\"Move_minimap\", cmd_minimap, 16), Function.ability(333, \"Patrol_screen\", cmd_screen, 17), Function.ability(334, \"Patrol_minimap\", cmd_minimap,", "3690), Function.ability(351, \"Research_AdeptResonatingGlaives_quick\", cmd_quick, 1594), Function.ability(352, \"Research_AdvancedBallistics_quick\", cmd_quick, 805), Function.ability(353,", "\"Research_TerranShipWeaponsLevel3_quick\", cmd_quick, 863, 3699), Function.ability(418, \"Research_TerranStructureArmorUpgrade_quick\", cmd_quick, 651), Function.ability(419, \"Research_TerranVehicleAndShipPlating_quick\",", "cmd_minimap, 212, 3673), Function.ability(341, \"Rally_Morphing_Unit_screen\", cmd_screen, 199, 3673), Function.ability(342, \"Rally_Morphing_Unit_minimap\",", "or agreed to in writing, software # distributed under the", "arguments for that function, each being a list of ints.", "3659), Function.ability(147, \"Cancel_GravitonBeam_quick\", cmd_quick, 174, 3659), Function.ability(148, \"Cancel_LockOn_quick\", cmd_quick, 2354,", "1451, 3659), Function.ability(160, \"Cancel_MorphRavager_quick\", cmd_quick, 2331, 3659), Function.ability(161, \"Cancel_MorphThorExplosiveMode_quick\", cmd_quick,", "\"general_id\", \"function_type\", \"args\", \"avail_fn\"])): \"\"\"Represents a function action. Attributes: id:", "Function.ability(171, \"Cancel_Queue5_quick\", cmd_quick, 306, 3671), Function.ability(172, \"Cancel_QueueAddOn_quick\", cmd_quick, 312, 3671),", "Function.ability(449, \"Research_ZerglingAdrenalGlands_quick\", cmd_quick, 1252), Function.ability(450, \"Research_ZerglingMetabolicBoost_quick\", cmd_quick, 1253), Function.ability(451, \"Smart_screen\",", "non-abilities, this function returns whether the function is valid. \"\"\"", "3664), Function.ability(514, \"UnloadAll_NydasNetwork_quick\", cmd_quick, 1438, 3664), Function.ability(515, \"UnloadAll_NydusWorm_quick\", cmd_quick, 2371,", "\"Build_Interceptors_autocast\", autocast, 1042), Function.ability(64, \"Build_MissileTurret_screen\", cmd_screen, 323), Function.ability(65, \"Build_Nexus_screen\", cmd_screen,", "1344), Function.ability(484, \"Train_Phoenix_quick\", cmd_quick, 946), Function.ability(485, \"Train_Probe_quick\", cmd_quick, 1006), Function.ability(486,", "422, 3683), Function.ability(75, \"Build_Reactor_Factory_quick\", cmd_quick, 455, 3683), Function.ability(76, \"Build_Reactor_Factory_screen\", cmd_screen,", "cmd_screen, 1731, 3680), Function.ability(317, \"Morph_SiegeMode_quick\", cmd_quick, 388), Function.ability(318, \"Morph_SupplyDepot_Lower_quick\", cmd_quick,", "\"Train_Thor_quick\", cmd_quick, 594), Function.ability(497, \"Train_Ultralisk_quick\", cmd_quick, 1348), Function.ability(498, \"Train_VikingFighter_quick\", cmd_quick,", "f for f in FUNCTIONS if f.avail_fn} class FunctionCall(collections.namedtuple( \"FunctionCall\",", "\"Research_Stimpack_quick\", cmd_quick, 730), Function.ability(406, \"Research_TerranInfantryArmor_quick\", cmd_quick, 3697), Function.ability(407, \"Research_TerranInfantryArmorLevel1_quick\", cmd_quick,", "\"Morph_SpineCrawlerRoot_screen\", cmd_screen, 1729, 3680), Function.ability(316, \"Morph_SporeCrawlerRoot_screen\", cmd_screen, 1731, 3680), Function.ability(317,", "setting, etc.\"\"\" select = action.action_ui.control_group select.action = control_group_act select.control_group_index =", "Function.ability(297, \"Morph_BroodLord_quick\", cmd_quick, 1372), Function.ability(298, \"Morph_Gateway_quick\", cmd_quick, 1520), Function.ability(299, \"Morph_GreaterSpire_quick\",", "sc_spatial.ActionSpatialUnitSelectionPoint.AllType, sc_spatial.ActionSpatialUnitSelectionPoint.AddAllType, ]), select_add=ArgumentType.enum([False, True]), # (select vs select_add) select_unit_act=ArgumentType.enum([", "\"HoldPosition_quick\", cmd_quick, 18), Function.ability(275, \"Land_screen\", cmd_screen, 3678), Function.ability(276, \"Land_Barracks_screen\", cmd_screen,", "Function.ability(458, \"Train_Baneling_quick\", cmd_quick, 80), Function.ability(459, \"Train_Banshee_quick\", cmd_quick, 621), Function.ability(460, \"Train_Battlecruiser_quick\",", "\"\"\"Define the static list of types and actions for SC2.\"\"\"", "\"select_unit_id\", \"select_worker\", \"build_queue_id\", \"unload_id\"])): \"\"\"The full list of argument types.", "needs a point on the screen.\"\"\" action_cmd = action.action_feature_layer.unit_command action_cmd.ability_id", "build_queue: [TYPES.build_queue_id], cmd_quick: [TYPES.queued], cmd_screen: [TYPES.queued, TYPES.screen], cmd_minimap: [TYPES.queued, TYPES.minimap],", "\"Research_ProtossAirWeaponsLevel3_quick\", cmd_quick, 1564, 3693), Function.ability(389, \"Research_ProtossGroundArmor_quick\", cmd_quick, 3694), Function.ability(390, \"Research_ProtossGroundArmorLevel1_quick\",", "\"Research_TerranVehicleAndShipPlating_quick\", cmd_quick, 3700), Function.ability(420, \"Research_TerranVehicleAndShipPlatingLevel1_quick\", cmd_quick, 864, 3700), Function.ability(421, \"Research_TerranVehicleAndShipPlatingLevel2_quick\",", "1386, 3661), Function.ability(113, \"BurrowDown_SwarmHost_quick\", cmd_quick, 2014, 3661), Function.ability(114, \"BurrowDown_Ultralisk_quick\", cmd_quick,", "\"Effect_InfestedTerrans_screen\", cmd_screen, 247), Function.ability(204, \"Effect_InjectLarva_screen\", cmd_screen, 251), Function.ability(205, \"Effect_KD8Charge_screen\", cmd_screen,", "cmd_quick, 2560), Function.ability(305, \"Morph_LiberatorAGMode_screen\", cmd_screen, 2558), Function.ability(306, \"Morph_Lurker_quick\", cmd_quick, 2332),", "cls(function, arguments) class ValidActions(collections.namedtuple( \"ValidActions\", [\"types\", \"functions\"])): \"\"\"The set of", "\"select_larva\", select_larva, lambda obs: obs.player_common.larva_count > 0), Function.ui_func(10, \"unload\", unload,", "Function.ability(375, \"Research_NeosteelFrame_quick\", cmd_quick, 655), Function.ability(376, \"Research_NeuralParasite_quick\", cmd_quick, 1455), Function.ability(377, \"Research_PathogenGlands_quick\",", "Which ones need an ability? ABILITY_FUNCTIONS = {cmd_quick, cmd_screen, cmd_minimap,", "\"Build_Reactor_screen\", cmd_screen, 3683), Function.ability(73, \"Build_Reactor_Barracks_quick\", cmd_quick, 422, 3683), Function.ability(74, \"Build_Reactor_Barracks_screen\",", "Function.ability(462, \"Train_Colossus_quick\", cmd_quick, 978), Function.ability(463, \"Train_Corruptor_quick\", cmd_quick, 1353), Function.ability(464, \"Train_Cyclone_quick\",", "unload_id def build_queue(action, build_queue_id): \"\"\"Cancel a unit in the build", "Function.ability(446, \"Research_ZergMissileWeaponsLevel1_quick\", cmd_quick, 1192, 3706), Function.ability(447, \"Research_ZergMissileWeaponsLevel2_quick\", cmd_quick, 1193, 3706),", "ValidActions(collections.namedtuple( \"ValidActions\", [\"types\", \"functions\"])): \"\"\"The set of types and functions", "Function.ability(397, \"Research_ProtossShields_quick\", cmd_quick, 3696), Function.ability(398, \"Research_ProtossShieldsLevel1_quick\", cmd_quick, 1068, 3696), Function.ability(399,", "\"\"\"The set of types and functions that are valid for", "compliance with the License. # You may obtain a copy", "__future__ import division from __future__ import print_function import collections import", "All Rights Reserved. # # Licensed under the Apache License,", "__hash__(self): # So it can go in a set(). return", "cmd_quick, 3676), Function.ability(27, \"Behavior_CloakOn_Banshee_quick\", cmd_quick, 392, 3676), Function.ability(28, \"Behavior_CloakOn_Ghost_quick\", cmd_quick,", "\"Research_ProtossAirWeapons_quick\", cmd_quick, 3693), Function.ability(386, \"Research_ProtossAirWeaponsLevel1_quick\", cmd_quick, 1562, 3693), Function.ability(387, \"Research_ProtossAirWeaponsLevel2_quick\",", "class ArgumentType(collections.namedtuple( \"ArgumentType\", [\"id\", \"name\", \"sizes\", \"fn\"])): \"\"\"Represents a single", "cmd_screen, 74), Function.ability(195, \"Effect_GhostSnipe_screen\", cmd_screen, 2714), Function.ability(196, \"Effect_GravitonBeam_screen\", cmd_screen, 173),", "Inc. All Rights Reserved. # # Licensed under the Apache", "obs: obs.ui_data.HasField(\"production\")), # Everything below here is generated with gen_actions.py", "\"BurrowDown_Lurker_quick\", cmd_quick, 2108, 3661), Function.ability(110, \"BurrowDown_Queen_quick\", cmd_quick, 1433, 3661), Function.ability(111,", "\"Rally_Building_minimap\", cmd_minimap, 195, 3673), Function.ability(339, \"Rally_Hatchery_Units_screen\", cmd_screen, 212, 3673), Function.ability(340,", "\"control_group_act\", \"control_group_id\", \"select_point_act\", \"select_add\", \"select_unit_act\", \"select_unit_id\", \"select_worker\", \"build_queue_id\", \"unload_id\"])): \"\"\"The", "\"Research_ProtossGroundWeaponsLevel3_quick\", cmd_quick, 1064, 3695), Function.ability(397, \"Research_ProtossShields_quick\", cmd_quick, 3696), Function.ability(398, \"Research_ProtossShieldsLevel1_quick\",", "3666), Function.ability(268, \"Harvest_Gather_SCV_screen\", cmd_screen, 295, 3666), Function.ability(269, \"Harvest_Return_quick\", cmd_quick, 3667),", "cmd_screen, 3685), Function.ability(221, \"Effect_Repair_autocast\", autocast, 3685), Function.ability(222, \"Effect_Repair_Mule_screen\", cmd_screen, 78,", "of known types. TYPES = Arguments.types( screen=ArgumentType.point(), minimap=ArgumentType.point(), screen2=ArgumentType.point(), queued=ArgumentType.enum([False,", "\"Morph_Lurker_quick\", cmd_quick, 2332), Function.ability(307, \"Morph_LurkerDen_quick\", cmd_quick, 2112), Function.ability(308, \"Morph_Mothership_quick\", cmd_quick,", "select_unit_id): \"\"\"Select a specific unit from the multi-unit selection.\"\"\" select", "\"Research_ZergGroundArmorLevel3_quick\", cmd_quick, 1191, 3704), Function.ability(441, \"Research_ZergMeleeWeapons_quick\", cmd_quick, 3705), Function.ability(442, \"Research_ZergMeleeWeaponsLevel1_quick\",", "@classmethod def all_arguments(cls, function, arguments): \"\"\"Helper function for creating `FunctionCall`s", "group, selecting, setting, etc.\"\"\" select = action.action_ui.control_group select.action = control_group_act", "cmd_quick, 162), Function.ability(260, \"Hallucination_Zealot_quick\", cmd_quick, 164), Function.ability(261, \"Halt_quick\", cmd_quick, 3660),", "() def __str__(self): return \"%s/%s %s\" % (self.id, self.name, list(self.sizes))", "Function.ability(434, \"Research_ZergFlyerAttackLevel1_quick\", cmd_quick, 1312, 3703), Function.ability(435, \"Research_ZergFlyerAttackLevel2_quick\", cmd_quick, 1313, 3703),", "Function.ability(518, \"UnloadAllAt_Medivac_screen\", cmd_screen, 396, 3669), Function.ability(519, \"UnloadAllAt_Medivac_minimap\", cmd_minimap, 396, 3669),", "not use this file except in compliance with the License.", "self._func_dict[key] def __iter__(self): return iter(self._func_list) def __len__(self): return len(self._func_list) #", "cmd_screen, 2704), Function.ability(230, \"Effect_Spray_screen\", cmd_screen, 3684), Function.ability(231, \"Effect_Spray_Protoss_screen\", cmd_screen, 30,", "Which ones require a point? POINT_REQUIRED_FUNCS = { False: {cmd_quick,", "unit at a point.\"\"\" select = action.action_feature_layer.unit_selection_point screen.assign_to(select.selection_screen_coord) select.type =", "for normal abilities, and the ability_id of another ability if", "select_rect), Function.ui_func(4, \"select_control_group\", control_group), Function.ui_func(5, \"select_unit\", select_unit, lambda obs: obs.ui_data.HasField(\"multi\")),", "2720), Function.ability(405, \"Research_Stimpack_quick\", cmd_quick, 730), Function.ability(406, \"Research_TerranInfantryArmor_quick\", cmd_quick, 3697), Function.ability(407,", "Function.ability(459, \"Train_Banshee_quick\", cmd_quick, 621), Function.ability(460, \"Train_Battlecruiser_quick\", cmd_quick, 623), Function.ability(461, \"Train_Carrier_quick\",", "'Stop' or 'Stim'.\"\"\" action_cmd = action.action_feature_layer.unit_command action_cmd.ability_id = ability_id action_cmd.queue_command", "def __getattr__(self, name): return self._func_dict[name] def __getitem__(self, key): if isinstance(key,", "cmd_quick, 3664), Function.ability(512, \"UnloadAll_Bunker_quick\", cmd_quick, 408, 3664), Function.ability(513, \"UnloadAll_CommandCenter_quick\", cmd_quick,", "A new `FunctionCall` instance. \"\"\" if isinstance(arguments, dict): arguments =", "lambda obs: obs.ui_data.HasField(\"cargo\")), Function.ui_func(11, \"build_queue\", build_queue, lambda obs: obs.ui_data.HasField(\"production\")), #", "you may not use this file except in compliance with", "\"Effect_Repair_SCV_autocast\", autocast, 316, 3685), Function.ability(226, \"Effect_Salvage_quick\", cmd_quick, 32), Function.ability(227, \"Effect_Scan_screen\",", "unload(action, unload_id): \"\"\"Unload a unit from a transport/bunker/nydus/etc.\"\"\" action.action_ui.cargo_panel.unit_index =", "build queue.\"\"\" action.action_ui.production_panel.unit_index = build_queue_id def cmd_quick(action, ability_id, queued): \"\"\"Do", "]) # pylint: enable=line-too-long # Some indexes to support features.py", "\"Train_Liberator_quick\", cmd_quick, 626), Function.ability(476, \"Train_Marauder_quick\", cmd_quick, 563), Function.ability(477, \"Train_Marine_quick\", cmd_quick,", "cmd_quick, 1126), Function.ability(402, \"Research_RavenCorvidReactor_quick\", cmd_quick, 793), Function.ability(403, \"Research_RavenRecalibratedExplosives_quick\", cmd_quick, 803),", "Function.ability(402, \"Research_RavenCorvidReactor_quick\", cmd_quick, 793), Function.ability(403, \"Research_RavenRecalibratedExplosives_quick\", cmd_quick, 803), Function.ability(404, \"Research_ShadowStrike_quick\",", "1392, 3662), Function.ability(139, \"BurrowUp_Zergling_autocast\", autocast, 1392, 3662), Function.ability(140, \"Cancel_quick\", cmd_quick,", "Function.ability(455, \"Stop_Redirect_quick\", cmd_quick, 1691, 3665), Function.ability(456, \"Stop_Stop_quick\", cmd_quick, 4, 3665),", "cls(id_, name, sizes, None) class Arguments(collections.namedtuple(\"Arguments\", [ \"screen\", \"minimap\", \"screen2\",", "and 4), self.name.ljust(space and 50), \"; \".join(str(a) for a in", "sc_spatial.ActionSpatialUnitSelectionPoint.Select, sc_spatial.ActionSpatialUnitSelectionPoint.Toggle, sc_spatial.ActionSpatialUnitSelectionPoint.AllType, sc_spatial.ActionSpatialUnitSelectionPoint.AddAllType, ]), select_add=ArgumentType.enum([False, True]), # (select vs", "\"Research_TerranInfantryArmor_quick\", cmd_quick, 3697), Function.ability(407, \"Research_TerranInfantryArmorLevel1_quick\", cmd_quick, 656, 3697), Function.ability(408, \"Research_TerranInfantryArmorLevel2_quick\",", "3693), Function.ability(386, \"Research_ProtossAirWeaponsLevel1_quick\", cmd_quick, 1562, 3693), Function.ability(387, \"Research_ProtossAirWeaponsLevel2_quick\", cmd_quick, 1563,", "obs: obs.player_common.warp_gate_count > 0), Function.ui_func(9, \"select_larva\", select_larva, lambda obs: obs.player_common.larva_count", "Function.ability(389, \"Research_ProtossGroundArmor_quick\", cmd_quick, 3694), Function.ability(390, \"Research_ProtossGroundArmorLevel1_quick\", cmd_quick, 1065, 3694), Function.ability(391,", "cmd_quick, 306, 3671), Function.ability(172, \"Cancel_QueueAddOn_quick\", cmd_quick, 312, 3671), Function.ability(173, \"Cancel_QueueCancelToSelection_quick\",", "\"Research_BansheeCloakingField_quick\", cmd_quick, 790), Function.ability(354, \"Research_BansheeHyperflightRotors_quick\", cmd_quick, 799), Function.ability(355, \"Research_BattlecruiserWeaponRefit_quick\", cmd_quick,", "isinstance(arguments, Arguments): arguments = Arguments(*arguments) return cls(function, arguments) class ValidActions(collections.namedtuple(", "types and actions for SC2.\"\"\" from __future__ import absolute_import from", "Function.ability(188, \"Effect_Contaminate_screen\", cmd_screen, 1825), Function.ability(189, \"Effect_CorrosiveBile_screen\", cmd_screen, 2338), Function.ability(190, \"Effect_EMP_screen\",", "autocast, 1396, 3662), Function.ability(127, \"BurrowUp_Lurker_quick\", cmd_quick, 2110, 3662), Function.ability(128, \"BurrowUp_Queen_quick\",", "return cls(**named) # The list of known types. TYPES =", "Function.ability(62, \"Build_Interceptors_quick\", cmd_quick, 1042), Function.ability(63, \"Build_Interceptors_autocast\", autocast, 1042), Function.ability(64, \"Build_MissileTurret_screen\",", "cmd_quick, 2110, 3662), Function.ability(128, \"BurrowUp_Queen_quick\", cmd_quick, 1435, 3662), Function.ability(129, \"BurrowUp_Queen_autocast\",", "screen): \"\"\"Do a command that needs a point on the", "\"Train_Observer_quick\", cmd_quick, 977), Function.ability(482, \"Train_Oracle_quick\", cmd_quick, 954), Function.ability(483, \"Train_Overlord_quick\", cmd_quick,", "1042), Function.ability(63, \"Build_Interceptors_autocast\", autocast, 1042), Function.ability(64, \"Build_MissileTurret_screen\", cmd_screen, 323), Function.ability(65,", "\"Train_SwarmHost_quick\", cmd_quick, 1356), Function.ability(495, \"Train_Tempest_quick\", cmd_quick, 955), Function.ability(496, \"Train_Thor_quick\", cmd_quick,", "index to target. unload_id: Which unit to target in a", "Function.ability(63, \"Build_Interceptors_autocast\", autocast, 1042), Function.ability(64, \"Build_MissileTurret_screen\", cmd_screen, 323), Function.ability(65, \"Build_Nexus_screen\",", "Function.ability(406, \"Research_TerranInfantryArmor_quick\", cmd_quick, 3697), Function.ability(407, \"Research_TerranInfantryArmorLevel1_quick\", cmd_quick, 656, 3697), Function.ability(408,", "1684, 3675), Function.ability(237, \"Effect_Stim_Marine_quick\", cmd_quick, 380, 3675), Function.ability(238, \"Effect_Stim_Marine_Redirect_quick\", cmd_quick,", "Adds the empty proto field. def select_unit(action, select_unit_act, select_unit_id): \"\"\"Select", "Function.ability(483, \"Train_Overlord_quick\", cmd_quick, 1344), Function.ability(484, \"Train_Phoenix_quick\", cmd_quick, 946), Function.ability(485, \"Train_Probe_quick\",", "23, 3674), Function.ability(15, \"Attack_Attack_minimap\", cmd_minimap, 23, 3674), Function.ability(16, \"Attack_AttackBuilding_screen\", cmd_screen,", "\"BurrowDown_Ultralisk_quick\", cmd_quick, 1512, 3661), Function.ability(115, \"BurrowDown_WidowMine_quick\", cmd_quick, 2095, 3661), Function.ability(116,", "cmd_quick, 1594), Function.ability(352, \"Research_AdvancedBallistics_quick\", cmd_quick, 805), Function.ability(353, \"Research_BansheeCloakingField_quick\", cmd_quick, 790),", "3664), Function.ability(515, \"UnloadAll_NydusWorm_quick\", cmd_quick, 2371, 3664), Function.ability(516, \"UnloadAllAt_screen\", cmd_screen, 3669),", "ValidActions.\"\"\" return cls(id_, name, None, None, None, args, None) def", "has a limit of 255 function arguments, so build something", "Function.ability(102, \"Build_UltraliskCavern_screen\", cmd_screen, 1159), Function.ability(103, \"BurrowDown_quick\", cmd_quick, 3661), Function.ability(104, \"BurrowDown_Baneling_quick\",", "0), Function.ui_func(8, \"select_warp_gates\", select_warp_gates, lambda obs: obs.player_common.warp_gate_count > 0), Function.ui_func(9,", "center. \"\"\" ___slots__ = () @classmethod def types(cls, **kwargs): \"\"\"Create", "generated with gen_actions.py Function.ability(12, \"Attack_screen\", cmd_screen, 3674), Function.ability(13, \"Attack_minimap\", cmd_minimap,", "3662), Function.ability(139, \"BurrowUp_Zergling_autocast\", autocast, 1392, 3662), Function.ability(140, \"Cancel_quick\", cmd_quick, 3659),", "\"Research_MuscularAugments_quick\", cmd_quick, 1283), Function.ability(375, \"Research_NeosteelFrame_quick\", cmd_quick, 655), Function.ability(376, \"Research_NeuralParasite_quick\", cmd_quick,", "{funcs}} for func in FUNCTIONS: if func.ability_id >= 0: ABILITY_IDS[func.ability_id].add(func)", "= action.action_feature_layer.unit_command action_cmd.ability_id = ability_id action_cmd.queue_command = queued screen.assign_to(action_cmd.target_screen_coord) def", "function takes the same type twice. queued: Whether the action", "\"Research_BansheeHyperflightRotors_quick\", cmd_quick, 799), Function.ability(355, \"Research_BattlecruiserWeaponRefit_quick\", cmd_quick, 1532), Function.ability(356, \"Research_Blink_quick\", cmd_quick,", "Function.ability(227, \"Effect_Scan_screen\", cmd_screen, 399), Function.ability(228, \"Effect_SpawnChangeling_quick\", cmd_quick, 181), Function.ability(229, \"Effect_SpawnLocusts_screen\",", "autocast, 1376, 3662), Function.ability(121, \"BurrowUp_Drone_quick\", cmd_quick, 1380, 3662), Function.ability(122, \"BurrowUp_Hydralisk_quick\",", "\"Load_screen\", cmd_screen, 3668), Function.ability(288, \"Load_Bunker_screen\", cmd_screen, 407, 3668), Function.ability(289, \"Load_Medivac_screen\",", "select = action.action_feature_layer.unit_selection_rect out_rect = select.selection_screen_coord.add() screen_rect = point.Rect(screen, screen2)", "\"Morph_BroodLord_quick\", cmd_quick, 1372), Function.ability(298, \"Morph_Gateway_quick\", cmd_quick, 1520), Function.ability(299, \"Morph_GreaterSpire_quick\", cmd_quick,", "FunctionCall(collections.namedtuple( \"FunctionCall\", [\"function\", \"arguments\"])): \"\"\"Represents a function call action. Attributes:", "to do it with. select_point_act: What to do with the", "\"Research_DrillingClaws_quick\", cmd_quick, 764), Function.ability(364, \"Research_ExtendedThermalLance_quick\", cmd_quick, 1097), Function.ability(365, \"Research_GlialRegeneration_quick\", cmd_quick,", "spatial_pb2 as sc_spatial from s2clientprotocol import ui_pb2 as sc_ui def", "856, 3701), Function.ability(426, \"Research_TerranVehicleWeaponsLevel3_quick\", cmd_quick, 857, 3701), Function.ability(427, \"Research_TunnelingClaws_quick\", cmd_quick,", "screen2) screen_rect.tl.assign_to(out_rect.p0) screen_rect.br.assign_to(out_rect.p1) select.selection_add = bool(select_add) def select_idle_worker(action, select_worker): \"\"\"Select", "an ArgumentType to be used in ValidActions.\"\"\" return cls(id_, name,", "select_army, lambda obs: obs.player_common.army_count > 0), Function.ui_func(8, \"select_warp_gates\", select_warp_gates, lambda", "out_rect = select.selection_screen_coord.add() screen_rect = point.Rect(screen, screen2) screen_rect.tl.assign_to(out_rect.p0) screen_rect.br.assign_to(out_rect.p1) select.selection_add", "3669), Function.ability(520, \"UnloadAllAt_Overlord_screen\", cmd_screen, 1408, 3669), Function.ability(521, \"UnloadAllAt_Overlord_minimap\", cmd_minimap, 1408,", "3686), Function.ability(211, \"Effect_MedivacIgniteAfterburners_quick\", cmd_quick, 2116), Function.ability(212, \"Effect_NeuralParasite_screen\", cmd_screen, 249), Function.ability(213,", "\"Research_ZergGroundArmor_quick\", cmd_quick, 3704), Function.ability(438, \"Research_ZergGroundArmorLevel1_quick\", cmd_quick, 1189, 3704), Function.ability(439, \"Research_ZergGroundArmorLevel2_quick\",", "cmd_quick, 36, 3688), Function.ability(36, \"Behavior_HoldFireOn_Lurker_quick\", cmd_quick, 2550, 3688), Function.ability(37, \"Behavior_PulsarBeamOff_quick\",", "820), Function.ability(379, \"Research_PhoenixAnionPulseCrystals_quick\", cmd_quick, 46), Function.ability(380, \"Research_PneumatizedCarapace_quick\", cmd_quick, 1223), Function.ability(381,", "Function.ui_func(4, \"select_control_group\", control_group), Function.ui_func(5, \"select_unit\", select_unit, lambda obs: obs.ui_data.HasField(\"multi\")), Function.ui_func(6,", "Function.ability(144, \"Cancel_BuildInProgress_quick\", cmd_quick, 314, 3659), Function.ability(145, \"Cancel_CreepTumor_quick\", cmd_quick, 1763, 3659),", "Function.ability(265, \"Harvest_Gather_Drone_screen\", cmd_screen, 1183, 3666), Function.ability(266, \"Harvest_Gather_Mule_screen\", cmd_screen, 166, 3666),", "this argument takes. fn: The function to convert the list", "represented by a more general action. function_type: One of the", "cmd_screen, 323), Function.ability(65, \"Build_Nexus_screen\", cmd_screen, 880), Function.ability(66, \"Build_Nuke_quick\", cmd_quick, 710),", "\"Cancel_NeuralParasite_quick\", cmd_quick, 250, 3659), Function.ability(163, \"Cancel_Nuke_quick\", cmd_quick, 1623, 3659), Function.ability(164,", "\"<none>\", (value,), lambda a: a[0]) @classmethod def point(cls): # No", "\"Land_CommandCenter_screen\", cmd_screen, 419, 3678), Function.ability(278, \"Land_Factory_screen\", cmd_screen, 520, 3678), Function.ability(279,", "Whether to add the unit to the selection or replace", "\"Research_ProtossShieldsLevel1_quick\", cmd_quick, 1068, 3696), Function.ability(399, \"Research_ProtossShieldsLevel2_quick\", cmd_quick, 1069, 3696), Function.ability(400,", "unit in the build queue.\"\"\" action.action_ui.production_panel.unit_index = build_queue_id def cmd_quick(action,", "{ability_id: {funcs}} for func in FUNCTIONS: if func.ability_id >= 0:", "str(self, space=False): \"\"\"String version. Set space=True to line them all", "cmd_screen, 522, 3678), Function.ability(281, \"Lift_quick\", cmd_quick, 3679), Function.ability(282, \"Lift_Barracks_quick\", cmd_quick,", "proto field. def select_unit(action, select_unit_act, select_unit_id): \"\"\"Select a specific unit", "Function.ability(235, \"Effect_Stim_Marauder_quick\", cmd_quick, 253, 3675), Function.ability(236, \"Effect_Stim_Marauder_Redirect_quick\", cmd_quick, 1684, 3675),", "point on the screen. minimap: A point on the minimap.", "\"Morph_Ravager_quick\", cmd_quick, 2330), Function.ability(314, \"Morph_Root_screen\", cmd_screen, 3680), Function.ability(315, \"Morph_SpineCrawlerRoot_screen\", cmd_screen,", "2112), Function.ability(308, \"Morph_Mothership_quick\", cmd_quick, 1847), Function.ability(309, \"Morph_OrbitalCommand_quick\", cmd_quick, 1516), Function.ability(310,", "in range(value).\"\"\" return cls(-1, \"<none>\", (value,), lambda a: a[0]) @classmethod", "\"Build_HydraliskDen_screen\", cmd_screen, 1157), Function.ability(61, \"Build_InfestationPit_screen\", cmd_screen, 1160), Function.ability(62, \"Build_Interceptors_quick\", cmd_quick,", "TYPES above, this includes the sizes for screen and minimap.", "917), Function.ability(494, \"Train_SwarmHost_quick\", cmd_quick, 1356), Function.ability(495, \"Train_Tempest_quick\", cmd_quick, 955), Function.ability(496,", "Function.ability(211, \"Effect_MedivacIgniteAfterburners_quick\", cmd_quick, 2116), Function.ability(212, \"Effect_NeuralParasite_screen\", cmd_screen, 249), Function.ability(213, \"Effect_NukeCalldown_screen\",", "Function.ability(412, \"Research_TerranInfantryWeaponsLevel2_quick\", cmd_quick, 653, 3698), Function.ability(413, \"Research_TerranInfantryWeaponsLevel3_quick\", cmd_quick, 654, 3698),", "166, 3666), Function.ability(267, \"Harvest_Gather_Probe_screen\", cmd_screen, 298, 3666), Function.ability(268, \"Harvest_Gather_SCV_screen\", cmd_screen,", "() @classmethod def all_arguments(cls, function, arguments): \"\"\"Helper function for creating", "cmd_quick, 1566, 3692), Function.ability(384, \"Research_ProtossAirArmorLevel3_quick\", cmd_quick, 1567, 3692), Function.ability(385, \"Research_ProtossAirWeapons_quick\",", "[23, 38]]. \"\"\" __slots__ = () @classmethod def all_arguments(cls, function,", "Function.ability(417, \"Research_TerranShipWeaponsLevel3_quick\", cmd_quick, 863, 3699), Function.ability(418, \"Research_TerranStructureArmorUpgrade_quick\", cmd_quick, 651), Function.ability(419,", "\"function_type\", \"args\", \"avail_fn\"])): \"\"\"Represents a function action. Attributes: id: The", "argument types. Take a look at TYPES and FUNCTION_TYPES for", "Function.ability(493, \"Train_Stalker_quick\", cmd_quick, 917), Function.ability(494, \"Train_SwarmHost_quick\", cmd_quick, 1356), Function.ability(495, \"Train_Tempest_quick\",", "cmd_quick, 167, 3667), Function.ability(272, \"Harvest_Return_Probe_quick\", cmd_quick, 299, 3667), Function.ability(273, \"Harvest_Return_SCV_quick\",", "cmd_screen, 3666), Function.ability(265, \"Harvest_Gather_Drone_screen\", cmd_screen, 1183, 3666), Function.ability(266, \"Harvest_Gather_Mule_screen\", cmd_screen,", "\"control_group_id\", \"select_point_act\", \"select_add\", \"select_unit_act\", \"select_unit_id\", \"select_worker\", \"build_queue_id\", \"unload_id\"])): \"\"\"The full", "sc_spatial.ActionSpatialUnitSelectionPoint.AddAllType, ]), select_add=ArgumentType.enum([False, True]), # (select vs select_add) select_unit_act=ArgumentType.enum([ sc_ui.ActionMultiPanel.SingleSelect,", "pylint: disable=line-too-long FUNCTIONS = Functions([ Function.ui_func(0, \"no_op\", no_op), Function.ui_func(1, \"move_camera\",", "3666), Function.ability(265, \"Harvest_Gather_Drone_screen\", cmd_screen, 1183, 3666), Function.ability(266, \"Harvest_Gather_Mule_screen\", cmd_screen, 166,", "values.\"\"\" return cls(-1, \"<none>\", (len(options),), lambda a: options[a[0]]) @classmethod def", "cmd_quick, 3677), Function.ability(24, \"Behavior_CloakOff_Banshee_quick\", cmd_quick, 393, 3677), Function.ability(25, \"Behavior_CloakOff_Ghost_quick\", cmd_quick,", "Function.ability(262, \"Halt_Building_quick\", cmd_quick, 315, 3660), Function.ability(263, \"Halt_TerranBuild_quick\", cmd_quick, 348, 3660),", "Function.ability(151, \"Cancel_MorphHive_quick\", cmd_quick, 1219, 3659), Function.ability(152, \"Cancel_MorphLair_quick\", cmd_quick, 1217, 3659),", "cmd_quick, 620), Function.ability(479, \"Train_MothershipCore_quick\", cmd_quick, 1853), Function.ability(480, \"Train_Mutalisk_quick\", cmd_quick, 1346),", "799), Function.ability(355, \"Research_BattlecruiserWeaponRefit_quick\", cmd_quick, 1532), Function.ability(356, \"Research_Blink_quick\", cmd_quick, 1593), Function.ability(357,", "\"Cancel_MorphRavager_quick\", cmd_quick, 2331, 3659), Function.ability(161, \"Cancel_MorphThorExplosiveMode_quick\", cmd_quick, 2365, 3659), Function.ability(162,", "select_add): \"\"\"Select the entire army.\"\"\" action.action_ui.select_army.selection_add = select_add def select_warp_gates(action,", "\"\"\"Create an ArgumentType that is represented by a point.Point.\"\"\" return", "ui_func(cls, id_, name, function_type, avail_fn=always): \"\"\"Define a function representing a", "cmd_quick, 2328), Function.ability(202, \"Effect_ImmortalBarrier_autocast\", autocast, 2328), Function.ability(203, \"Effect_InfestedTerrans_screen\", cmd_screen, 247),", "cmd_quick, 451, 3659), Function.ability(144, \"Cancel_BuildInProgress_quick\", cmd_quick, 314, 3659), Function.ability(145, \"Cancel_CreepTumor_quick\",", "__len__(self): return len(self._func_list) # pylint: disable=line-too-long FUNCTIONS = Functions([ Function.ui_func(0,", "419, 3678), Function.ability(278, \"Land_Factory_screen\", cmd_screen, 520, 3678), Function.ability(279, \"Land_OrbitalCommand_screen\", cmd_screen,", "a: a[0]) @classmethod def point(cls): # No range because it's", "\"Train_Cyclone_quick\", cmd_quick, 597), Function.ability(465, \"Train_DarkTemplar_quick\", cmd_quick, 920), Function.ability(466, \"Train_Disruptor_quick\", cmd_quick,", "1419), Function.ability(506, \"TrainWarp_DarkTemplar_screen\", cmd_screen, 1417), Function.ability(507, \"TrainWarp_HighTemplar_screen\", cmd_screen, 1416), Function.ability(508,", "\"Behavior_CloakOff_Banshee_quick\", cmd_quick, 393, 3677), Function.ability(25, \"Behavior_CloakOff_Ghost_quick\", cmd_quick, 383, 3677), Function.ability(26,", "614), Function.ability(503, \"Train_Zealot_quick\", cmd_quick, 916), Function.ability(504, \"Train_Zergling_quick\", cmd_quick, 1343), Function.ability(505,", "885), Function.ability(55, \"Build_Forge_screen\", cmd_screen, 884), Function.ability(56, \"Build_FusionCore_screen\", cmd_screen, 333), Function.ability(57,", "3684), Function.ability(231, \"Effect_Spray_Protoss_screen\", cmd_screen, 30, 3684), Function.ability(232, \"Effect_Spray_Terran_screen\", cmd_screen, 26,", "cmd_quick, 1348), Function.ability(498, \"Train_VikingFighter_quick\", cmd_quick, 624), Function.ability(499, \"Train_Viper_quick\", cmd_quick, 1354),", "the function is valid. \"\"\" __slots__ = () @classmethod def", "\"Research_ZergMissileWeaponsLevel1_quick\", cmd_quick, 1192, 3706), Function.ability(447, \"Research_ZergMissileWeaponsLevel2_quick\", cmd_quick, 1193, 3706), Function.ability(448,", "\"\"\"Represents a function call action. Attributes: function: Store the function", "2331, 3659), Function.ability(161, \"Cancel_MorphThorExplosiveMode_quick\", cmd_quick, 2365, 3659), Function.ability(162, \"Cancel_NeuralParasite_quick\", cmd_quick,", "517, 3659), Function.ability(167, \"Cancel_StasisTrap_quick\", cmd_quick, 2535, 3659), Function.ability(168, \"Cancel_Last_quick\", cmd_quick,", "cmd_screen, cmd_minimap, autocast} # Which ones require a point? POINT_REQUIRED_FUNCS", "def __iter__(self): return iter(self._func_list) def __len__(self): return len(self._func_list) # pylint:", "cmd_quick, 856, 3701), Function.ability(426, \"Research_TerranVehicleWeaponsLevel3_quick\", cmd_quick, 857, 3701), Function.ability(427, \"Research_TunnelingClaws_quick\",", "arguments = Arguments(*arguments) return cls(function, arguments) class ValidActions(collections.namedtuple( \"ValidActions\", [\"types\",", "to construct the sc2 action proto out of python types.", "2342, 3662), Function.ability(131, \"BurrowUp_Ravager_autocast\", autocast, 2342, 3662), Function.ability(132, \"BurrowUp_Roach_quick\", cmd_quick,", "`Arguments` object, a `dict`, or an iterable. If a `dict`", "42), Function.ability(192, \"Effect_Feedback_screen\", cmd_screen, 140), Function.ability(193, \"Effect_ForceField_screen\", cmd_screen, 1526), Function.ability(194,", "\"Research_TerranVehicleAndShipPlatingLevel3_quick\", cmd_quick, 866, 3700), Function.ability(423, \"Research_TerranVehicleWeapons_quick\", cmd_quick, 3701), Function.ability(424, \"Research_TerranVehicleWeaponsLevel1_quick\",", "Function.ability(296, \"Morph_Archon_quick\", cmd_quick, 1766), Function.ability(297, \"Morph_BroodLord_quick\", cmd_quick, 1372), Function.ability(298, \"Morph_Gateway_quick\",", "Function.ability(19, \"Scan_Move_screen\", cmd_screen, 19, 3674), Function.ability(20, \"Scan_Move_minimap\", cmd_minimap, 19, 3674),", "\"Effect_LockOn_screen\", cmd_screen, 2350), Function.ability(207, \"Effect_LocustSwoop_screen\", cmd_screen, 2387), Function.ability(208, \"Effect_MassRecall_screen\", cmd_screen,", "Function.ability(398, \"Research_ProtossShieldsLevel1_quick\", cmd_quick, 1068, 3696), Function.ability(399, \"Research_ProtossShieldsLevel2_quick\", cmd_quick, 1069, 3696),", "\"Morph_LiberatorAAMode_quick\", cmd_quick, 2560), Function.ability(305, \"Morph_LiberatorAGMode_screen\", cmd_screen, 2558), Function.ability(306, \"Morph_Lurker_quick\", cmd_quick,", "cmd_quick, 863, 3699), Function.ability(418, \"Research_TerranStructureArmorUpgrade_quick\", cmd_quick, 651), Function.ability(419, \"Research_TerranVehicleAndShipPlating_quick\", cmd_quick,", "control group. control_group_id: Which control group to do it with.", "2542), Function.ability(216, \"Effect_PhotonOvercharge_screen\", cmd_screen, 2162), Function.ability(217, \"Effect_PointDefenseDrone_screen\", cmd_screen, 144), Function.ability(218,", "\"%s/%s %s\" % (self.id, self.name, list(self.sizes)) @classmethod def enum(cls, options):", "where you choose one of a set of known values.\"\"\"", "build_queue, lambda obs: obs.ui_data.HasField(\"production\")), # Everything below here is generated", "Function.ability(258, \"Hallucination_VoidRay_quick\", cmd_quick, 160), Function.ability(259, \"Hallucination_WarpPrism_quick\", cmd_quick, 162), Function.ability(260, \"Hallucination_Zealot_quick\",", "collections import numbers import six from pysc2.lib import point from", "3662), Function.ability(120, \"BurrowUp_Baneling_autocast\", autocast, 1376, 3662), Function.ability(121, \"BurrowUp_Drone_quick\", cmd_quick, 1380,", "etc.\"\"\" select = action.action_ui.control_group select.action = control_group_act select.control_group_index = control_group_id", "Function.ability(154, \"Cancel_MorphLurkerDen_quick\", cmd_quick, 2113, 3659), Function.ability(155, \"Cancel_MorphMothership_quick\", cmd_quick, 1848, 3659),", "3663), Function.ability(296, \"Morph_Archon_quick\", cmd_quick, 1766), Function.ability(297, \"Morph_BroodLord_quick\", cmd_quick, 1372), Function.ability(298,", "\"Effect_PhotonOvercharge_screen\", cmd_screen, 2162), Function.ability(217, \"Effect_PointDefenseDrone_screen\", cmd_screen, 144), Function.ability(218, \"Effect_PsiStorm_screen\", cmd_screen,", "import absolute_import from __future__ import division from __future__ import print_function", "screen_rect.tl.assign_to(out_rect.p0) screen_rect.br.assign_to(out_rect.p1) select.selection_add = bool(select_add) def select_idle_worker(action, select_worker): \"\"\"Select an", "1225), Function.ability(358, \"Research_CentrifugalHooks_quick\", cmd_quick, 1482), Function.ability(359, \"Research_Charge_quick\", cmd_quick, 1592), Function.ability(360,", "unit at the point. select_add: Whether to add the unit", "\"Effect_ChronoBoost_screen\", cmd_screen, 261), Function.ability(188, \"Effect_Contaminate_screen\", cmd_screen, 1825), Function.ability(189, \"Effect_CorrosiveBile_screen\", cmd_screen,", "action.action_feature_layer.unit_command action_cmd.ability_id = ability_id action_cmd.queue_command = queued screen.assign_to(action_cmd.target_screen_coord) def cmd_minimap(action,", "a point.Point.\"\"\" return cls(-1, \"<none>\", (0, 0), lambda a: point.Point(*a).floor())", "3662), Function.ability(140, \"Cancel_quick\", cmd_quick, 3659), Function.ability(141, \"Cancel_AdeptPhaseShift_quick\", cmd_quick, 2594, 3659),", "cmd_minimap, 3673), Function.ability(337, \"Rally_Building_screen\", cmd_screen, 195, 3673), Function.ability(338, \"Rally_Building_minimap\", cmd_minimap,", "cmd_quick, 1067, 3694), Function.ability(393, \"Research_ProtossGroundWeapons_quick\", cmd_quick, 3695), Function.ability(394, \"Research_ProtossGroundWeaponsLevel1_quick\", cmd_quick,", "304, 3671), Function.ability(171, \"Cancel_Queue5_quick\", cmd_quick, 306, 3671), Function.ability(172, \"Cancel_QueueAddOn_quick\", cmd_quick,", "ArgumentType with a single scalar in range(value).\"\"\" return cls(-1, \"<none>\",", "on a control group, selecting, setting, etc.\"\"\" select = action.action_ui.control_group", "cmd_screen, 1414), Function.ability(510, \"TrainWarp_Zealot_screen\", cmd_screen, 1413), Function.ability(511, \"UnloadAll_quick\", cmd_quick, 3664),", "Function.ability(450, \"Research_ZerglingMetabolicBoost_quick\", cmd_quick, 1253), Function.ability(451, \"Smart_screen\", cmd_screen, 1), Function.ability(452, \"Smart_minimap\",", "file except in compliance with the License. # You may", "full list of argument types. Take a look at TYPES", "3686), Function.ability(210, \"Effect_MassRecall_MothershipCore_screen\", cmd_screen, 1974, 3686), Function.ability(211, \"Effect_MedivacIgniteAfterburners_quick\", cmd_quick, 2116),", "950), Function.ability(501, \"Train_WarpPrism_quick\", cmd_quick, 976), Function.ability(502, \"Train_WidowMine_quick\", cmd_quick, 614), Function.ability(503,", "cmd_screen, 173), Function.ability(197, \"Effect_GuardianShield_quick\", cmd_quick, 76), Function.ability(198, \"Effect_Heal_screen\", cmd_screen, 386),", "Function.ability(137, \"BurrowUp_WidowMine_quick\", cmd_quick, 2097, 3662), Function.ability(138, \"BurrowUp_Zergling_quick\", cmd_quick, 1392, 3662),", "3697), Function.ability(407, \"Research_TerranInfantryArmorLevel1_quick\", cmd_quick, 656, 3697), Function.ability(408, \"Research_TerranInfantryArmorLevel2_quick\", cmd_quick, 657,", "2016, 3662), Function.ability(135, \"BurrowUp_Ultralisk_quick\", cmd_quick, 1514, 3662), Function.ability(136, \"BurrowUp_Ultralisk_autocast\", autocast,", "\"Hallucination_Disruptor_quick\", cmd_quick, 2389), Function.ability(252, \"Hallucination_HighTemplar_quick\", cmd_quick, 150), Function.ability(253, \"Hallucination_Immortal_quick\", cmd_quick,", "\"Cancel_QueuePassiveCancelToSelection_quick\", cmd_quick, 1833, 3671), Function.ability(176, \"Effect_Abduct_screen\", cmd_screen, 2067), Function.ability(177, \"Effect_AdeptPhaseShift_screen\",", "[\"id\", \"name\", \"ability_id\", \"general_id\", \"function_type\", \"args\", \"avail_fn\"])): \"\"\"Represents a function", "\"Research_ZergFlyerArmor_quick\", cmd_quick, 3702), Function.ability(430, \"Research_ZergFlyerArmorLevel1_quick\", cmd_quick, 1315, 3702), Function.ability(431, \"Research_ZergFlyerArmorLevel2_quick\",", "name=name) for name, type_ in six.iteritems(kwargs)} return cls(**named) # The", "`dict`, or an iterable. If a `dict` or an iterable", "Function.ability(147, \"Cancel_GravitonBeam_quick\", cmd_quick, 174, 3659), Function.ability(148, \"Cancel_LockOn_quick\", cmd_quick, 2354, 3659),", "\"Morph_Root_screen\", cmd_screen, 3680), Function.ability(315, \"Morph_SpineCrawlerRoot_screen\", cmd_screen, 1729, 3680), Function.ability(316, \"Morph_SporeCrawlerRoot_screen\",", "argument types do each function need? FUNCTION_TYPES = { no_op:", "cmd_screen, 396, 3669), Function.ability(519, \"UnloadAllAt_Medivac_minimap\", cmd_minimap, 396, 3669), Function.ability(520, \"UnloadAllAt_Overlord_screen\",", "each being a list of ints. For select_point this could", "Function.ability(350, \"Rally_Nexus_minimap\", cmd_minimap, 207, 3690), Function.ability(351, \"Research_AdeptResonatingGlaives_quick\", cmd_quick, 1594), Function.ability(352,", "Function.ability(503, \"Train_Zealot_quick\", cmd_quick, 916), Function.ability(504, \"Train_Zergling_quick\", cmd_quick, 1343), Function.ability(505, \"TrainWarp_Adept_screen\",", "\"Load_Overlord_screen\", cmd_screen, 1406, 3668), Function.ability(293, \"Load_WarpPrism_screen\", cmd_screen, 911, 3668), Function.ability(294,", "\"Stop_Building_quick\", cmd_quick, 2057, 3665), Function.ability(455, \"Stop_Redirect_quick\", cmd_quick, 1691, 3665), Function.ability(456,", "autocast.\"\"\" action.action_ui.toggle_autocast.ability_id = ability_id class ArgumentType(collections.namedtuple( \"ArgumentType\", [\"id\", \"name\", \"sizes\",", "autocast, 386), Function.ability(200, \"Effect_HunterSeekerMissile_screen\", cmd_screen, 169), Function.ability(201, \"Effect_ImmortalBarrier_quick\", cmd_quick, 2328),", "pylint: enable=line-too-long # Some indexes to support features.py and action", "Function.ability(130, \"BurrowUp_Ravager_quick\", cmd_quick, 2342, 3662), Function.ability(131, \"BurrowUp_Ravager_autocast\", autocast, 2342, 3662),", "390), Function.ability(323, \"Morph_Uproot_quick\", cmd_quick, 3681), Function.ability(324, \"Morph_SpineCrawlerUproot_quick\", cmd_quick, 1725, 3681),", "\"Research_CombatShield_quick\", cmd_quick, 731), Function.ability(362, \"Research_ConcussiveShells_quick\", cmd_quick, 732), Function.ability(363, \"Research_DrillingClaws_quick\", cmd_quick,", "\"Effect_MassRecall_MothershipCore_screen\", cmd_screen, 1974, 3686), Function.ability(211, \"Effect_MedivacIgniteAfterburners_quick\", cmd_quick, 2116), Function.ability(212, \"Effect_NeuralParasite_screen\",", "Function.ability(414, \"Research_TerranShipWeapons_quick\", cmd_quick, 3699), Function.ability(415, \"Research_TerranShipWeaponsLevel1_quick\", cmd_quick, 861, 3699), Function.ability(416,", "866, 3700), Function.ability(423, \"Research_TerranVehicleWeapons_quick\", cmd_quick, 3701), Function.ability(424, \"Research_TerranVehicleWeaponsLevel1_quick\", cmd_quick, 855,", "screen2): \"\"\"Select units within a rectangle.\"\"\" select = action.action_feature_layer.unit_selection_rect out_rect", "for select_point. arguments: The list of arguments for that function,", "cmd_minimap, 211, 3690), Function.ability(349, \"Rally_Nexus_screen\", cmd_screen, 207, 3690), Function.ability(350, \"Rally_Nexus_minimap\",", "Function.ability(365, \"Research_GlialRegeneration_quick\", cmd_quick, 216), Function.ability(366, \"Research_GraviticBooster_quick\", cmd_quick, 1093), Function.ability(367, \"Research_GraviticDrive_quick\",", "six.iteritems(ABILITY_IDS)} FUNCTIONS_AVAILABLE = {f.id: f for f in FUNCTIONS if", "\"Research_TerranInfantryWeapons_quick\", cmd_quick, 3698), Function.ability(411, \"Research_TerranInfantryWeaponsLevel1_quick\", cmd_quick, 652, 3698), Function.ability(412, \"Research_TerranInfantryWeaponsLevel2_quick\",", "new `FunctionCall` instance. \"\"\" if isinstance(arguments, dict): arguments = Arguments(**arguments)", "quick command like 'Stop' or 'Stim'.\"\"\" action_cmd = action.action_feature_layer.unit_command action_cmd.ability_id", "2099), Function.ability(246, \"Effect_WidowMineAttack_autocast\", autocast, 2099), Function.ability(247, \"Effect_YamatoGun_screen\", cmd_screen, 401), Function.ability(248,", "# limitations under the License. \"\"\"Define the static list of", "obs: obs.player_common.larva_count > 0), Function.ui_func(10, \"unload\", unload, lambda obs: obs.ui_data.HasField(\"cargo\")),", "cmd_screen, 211, 3690), Function.ability(348, \"Rally_Hatchery_Workers_minimap\", cmd_minimap, 211, 3690), Function.ability(349, \"Rally_Nexus_screen\",", "to be used in ValidActions.\"\"\" return cls(id_, name, None, None,", "select_unit_id def control_group(action, control_group_act, control_group_id): \"\"\"Act on a control group,", "from a transport/bunker/nydus/etc.\"\"\" action.action_ui.cargo_panel.unit_index = unload_id def build_queue(action, build_queue_id): \"\"\"Cancel", "Function.ability(500, \"Train_VoidRay_quick\", cmd_quick, 950), Function.ability(501, \"Train_WarpPrism_quick\", cmd_quick, 976), Function.ability(502, \"Train_WidowMine_quick\",", "arguments) class ValidActions(collections.namedtuple( \"ValidActions\", [\"types\", \"functions\"])): \"\"\"The set of types", "def move_camera(action, minimap): \"\"\"Move the camera.\"\"\" minimap.assign_to(action.action_feature_layer.camera_move.center_minimap) def select_point(action, select_point_act,", "in self.args)) class Functions(object): \"\"\"Represents the full set of functions.", "f for f in functions} if len(self._func_dict) != len(self._func_list): raise", "Function.ability(120, \"BurrowUp_Baneling_autocast\", autocast, 1376, 3662), Function.ability(121, \"BurrowUp_Drone_quick\", cmd_quick, 1380, 3662),", "selection or replace it. select_unit_act: What to do when selecting", "cmd_screen, 1154), Function.ability(53, \"Build_Factory_screen\", cmd_screen, 328), Function.ability(54, \"Build_FleetBeacon_screen\", cmd_screen, 885),", "3662), Function.ability(127, \"BurrowUp_Lurker_quick\", cmd_quick, 2110, 3662), Function.ability(128, \"BurrowUp_Queen_quick\", cmd_quick, 1435,", "cmd_quick, 558), Function.ability(320, \"Morph_ThorExplosiveMode_quick\", cmd_quick, 2364), Function.ability(321, \"Morph_ThorHighImpactMode_quick\", cmd_quick, 2362),", "Function.ability(388, \"Research_ProtossAirWeaponsLevel3_quick\", cmd_quick, 1564, 3693), Function.ability(389, \"Research_ProtossGroundArmor_quick\", cmd_quick, 3694), Function.ability(390,", "cmd_quick, 3665), Function.ability(454, \"Stop_Building_quick\", cmd_quick, 2057, 3665), Function.ability(455, \"Stop_Redirect_quick\", cmd_quick,", "a command that needs a point on the screen.\"\"\" action_cmd", "0), Function.ui_func(10, \"unload\", unload, lambda obs: obs.ui_data.HasField(\"cargo\")), Function.ui_func(11, \"build_queue\", build_queue,", "ints. For select_point this could be: [[0], [23, 38]]. \"\"\"", "= () @classmethod def ui_func(cls, id_, name, function_type, avail_fn=always): \"\"\"Define", "3688), Function.ability(35, \"Behavior_HoldFireOn_Ghost_quick\", cmd_quick, 36, 3688), Function.ability(36, \"Behavior_HoldFireOn_Lurker_quick\", cmd_quick, 2550,", "Function.ability(139, \"BurrowUp_Zergling_autocast\", autocast, 1392, 3662), Function.ability(140, \"Cancel_quick\", cmd_quick, 3659), Function.ability(141,", "1036), Function.ability(219, \"Effect_PurificationNova_screen\", cmd_screen, 2346), Function.ability(220, \"Effect_Repair_screen\", cmd_screen, 3685), Function.ability(221,", "TYPES.screen2], select_unit: [TYPES.select_unit_act, TYPES.select_unit_id], control_group: [TYPES.control_group_act, TYPES.control_group_id], select_idle_worker: [TYPES.select_worker], select_army:", "the protos to send to the game. \"\"\" __slots__ =", "a point? POINT_REQUIRED_FUNCS = { False: {cmd_quick, autocast}, True: {cmd_screen,", "functions require. Unlike TYPES above, this includes the sizes for", "cmd_screen, 1413), Function.ability(511, \"UnloadAll_quick\", cmd_quick, 3664), Function.ability(512, \"UnloadAll_Bunker_quick\", cmd_quick, 408,", "= build_queue_id def cmd_quick(action, ability_id, queued): \"\"\"Do a quick command", "cmd_screen, 894), Function.ability(49, \"Build_DarkShrine_screen\", cmd_screen, 891), Function.ability(50, \"Build_EngineeringBay_screen\", cmd_screen, 322),", "avail_fn) @classmethod def ability(cls, id_, name, function_type, ability_id, general_id=0): \"\"\"Define", "\"Research_TerranStructureArmorUpgrade_quick\", cmd_quick, 651), Function.ability(419, \"Research_TerranVehicleAndShipPlating_quick\", cmd_quick, 3700), Function.ability(420, \"Research_TerranVehicleAndShipPlatingLevel1_quick\", cmd_quick,", "by a more general action. function_type: One of the functions", "\"Load_Medivac_screen\", cmd_screen, 394, 3668), Function.ability(290, \"Load_NydusNetwork_screen\", cmd_screen, 1437, 3668), Function.ability(291,", "Function.ability(290, \"Load_NydusNetwork_screen\", cmd_screen, 1437, 3668), Function.ability(291, \"Load_NydusWorm_screen\", cmd_screen, 2370, 3668),", "922), Function.ability(458, \"Train_Baneling_quick\", cmd_quick, 80), Function.ability(459, \"Train_Banshee_quick\", cmd_quick, 621), Function.ability(460,", "cmd_screen, 320), Function.ability(80, \"Build_RoachWarren_screen\", cmd_screen, 1165), Function.ability(81, \"Build_RoboticsBay_screen\", cmd_screen, 892),", "3685), Function.ability(226, \"Effect_Salvage_quick\", cmd_quick, 32), Function.ability(227, \"Effect_Scan_screen\", cmd_screen, 399), Function.ability(228,", "731), Function.ability(362, \"Research_ConcussiveShells_quick\", cmd_quick, 732), Function.ability(363, \"Research_DrillingClaws_quick\", cmd_quick, 764), Function.ability(364,", "804), Function.ability(371, \"Research_InfernalPreigniter_quick\", cmd_quick, 761), Function.ability(372, \"Research_InterceptorGravitonCatapult_quick\", cmd_quick, 44), Function.ability(373,", "cmd_screen, 1417), Function.ability(507, \"TrainWarp_HighTemplar_screen\", cmd_screen, 1416), Function.ability(508, \"TrainWarp_Sentry_screen\", cmd_screen, 1418),", "KIND, either express or implied. # See the License for", "[\"id\", \"name\", \"sizes\", \"fn\"])): \"\"\"Represents a single argument type. Attributes:", "1396, 3662), Function.ability(127, \"BurrowUp_Lurker_quick\", cmd_quick, 2110, 3662), Function.ability(128, \"BurrowUp_Queen_quick\", cmd_quick,", "\"Train_Zealot_quick\", cmd_quick, 916), Function.ability(504, \"Train_Zergling_quick\", cmd_quick, 1343), Function.ability(505, \"TrainWarp_Adept_screen\", cmd_screen,", "\"Research_TerranVehicleAndShipPlatingLevel2_quick\", cmd_quick, 865, 3700), Function.ability(422, \"Research_TerranVehicleAndShipPlatingLevel3_quick\", cmd_quick, 866, 3700), Function.ability(423,", "3659), Function.ability(148, \"Cancel_LockOn_quick\", cmd_quick, 2354, 3659), Function.ability(149, \"Cancel_MorphBroodlord_quick\", cmd_quick, 1373,", "is unique. name: The name of the argument, also unique.", "spec(cls, id_, name, args): \"\"\"Create a Function to be used", "2162), Function.ability(217, \"Effect_PointDefenseDrone_screen\", cmd_screen, 144), Function.ability(218, \"Effect_PsiStorm_screen\", cmd_screen, 1036), Function.ability(219,", "= action.action_feature_layer.unit_command action_cmd.ability_id = ability_id action_cmd.queue_command = queued minimap.assign_to(action_cmd.target_minimap_coord) def", "= bool(select_add) def select_idle_worker(action, select_worker): \"\"\"Select an idle worker.\"\"\" action.action_ui.select_idle_worker.type", "cmd_quick, 485, 3679), Function.ability(285, \"Lift_OrbitalCommand_quick\", cmd_quick, 1522, 3679), Function.ability(286, \"Lift_Starport_quick\",", "Function.ability(353, \"Research_BansheeCloakingField_quick\", cmd_quick, 790), Function.ability(354, \"Research_BansheeHyperflightRotors_quick\", cmd_quick, 799), Function.ability(355, \"Research_BattlecruiserWeaponRefit_quick\",", "require a point? POINT_REQUIRED_FUNCS = { False: {cmd_quick, autocast}, True:", "\"Effect_SpawnChangeling_quick\", cmd_quick, 181), Function.ability(229, \"Effect_SpawnLocusts_screen\", cmd_screen, 2704), Function.ability(230, \"Effect_Spray_screen\", cmd_screen,", "cmd_quick, 1455), Function.ability(377, \"Research_PathogenGlands_quick\", cmd_quick, 1454), Function.ability(378, \"Research_PersonalCloaking_quick\", cmd_quick, 820),", "1343), Function.ability(505, \"TrainWarp_Adept_screen\", cmd_screen, 1419), Function.ability(506, \"TrainWarp_DarkTemplar_screen\", cmd_screen, 1417), Function.ability(507,", "Function.ability(12, \"Attack_screen\", cmd_screen, 3674), Function.ability(13, \"Attack_minimap\", cmd_minimap, 3674), Function.ability(14, \"Attack_Attack_screen\",", "and action conversion. ABILITY_IDS = collections.defaultdict(set) # {ability_id: {funcs}} for", "\"unload\", unload, lambda obs: obs.ui_data.HasField(\"cargo\")), Function.ui_func(11, \"build_queue\", build_queue, lambda obs:", "\"Effect_GravitonBeam_screen\", cmd_screen, 173), Function.ability(197, \"Effect_GuardianShield_quick\", cmd_quick, 76), Function.ability(198, \"Effect_Heal_screen\", cmd_screen,", "\"Behavior_CloakOff_Ghost_quick\", cmd_quick, 383, 3677), Function.ability(26, \"Behavior_CloakOn_quick\", cmd_quick, 3676), Function.ability(27, \"Behavior_CloakOn_Banshee_quick\",", "\"Research_HighCapacityFuelTanks_quick\", cmd_quick, 804), Function.ability(371, \"Research_InfernalPreigniter_quick\", cmd_quick, 761), Function.ability(372, \"Research_InterceptorGravitonCatapult_quick\", cmd_quick,", "\"Research_RavenCorvidReactor_quick\", cmd_quick, 793), Function.ability(403, \"Research_RavenRecalibratedExplosives_quick\", cmd_quick, 803), Function.ability(404, \"Research_ShadowStrike_quick\", cmd_quick,", "sizes, None) class Arguments(collections.namedtuple(\"Arguments\", [ \"screen\", \"minimap\", \"screen2\", \"queued\", \"control_group_act\",", "game ability.\"\"\" assert function_type in ABILITY_FUNCTIONS return cls(id_, name, ability_id,", "Function.ui_func(10, \"unload\", unload, lambda obs: obs.ui_data.HasField(\"cargo\")), Function.ui_func(11, \"build_queue\", build_queue, lambda", "store for the action function. arguments: The values to store", "\"Build_Pylon_screen\", cmd_screen, 881), Function.ability(71, \"Build_Reactor_quick\", cmd_quick, 3683), Function.ability(72, \"Build_Reactor_screen\", cmd_screen,", "cmd_quick, 315, 3660), Function.ability(263, \"Halt_TerranBuild_quick\", cmd_quick, 348, 3660), Function.ability(264, \"Harvest_Gather_screen\",", "Function.ability(426, \"Research_TerranVehicleWeaponsLevel3_quick\", cmd_quick, 857, 3701), Function.ability(427, \"Research_TunnelingClaws_quick\", cmd_quick, 217), Function.ability(428,", "\"select_point\", select_point), Function.ui_func(3, \"select_rect\", select_rect), Function.ui_func(4, \"select_control_group\", control_group), Function.ui_func(5, \"select_unit\",", "select by id. select_worker: What to do when selecting a", "Function.ability(310, \"Morph_OverlordTransport_quick\", cmd_quick, 2708), Function.ability(311, \"Morph_Overseer_quick\", cmd_quick, 1448), Function.ability(312, \"Morph_PlanetaryFortress_quick\",", "(the \"License\"); # you may not use this file except", "199, 3673), Function.ability(342, \"Rally_Morphing_Unit_minimap\", cmd_minimap, 199, 3673), Function.ability(343, \"Rally_Workers_screen\", cmd_screen,", "\"Research_TerranVehicleWeapons_quick\", cmd_quick, 3701), Function.ability(424, \"Research_TerranVehicleWeaponsLevel1_quick\", cmd_quick, 855, 3701), Function.ability(425, \"Research_TerranVehicleWeaponsLevel2_quick\",", "cmd_screen, 2544), Function.ability(178, \"Effect_AutoTurret_screen\", cmd_screen, 1764), Function.ability(179, \"Effect_BlindingCloud_screen\", cmd_screen, 2063),", "Function.ability(338, \"Rally_Building_minimap\", cmd_minimap, 195, 3673), Function.ability(339, \"Rally_Hatchery_Units_screen\", cmd_screen, 212, 3673),", "_: True class Function(collections.namedtuple( \"Function\", [\"id\", \"name\", \"ability_id\", \"general_id\", \"function_type\",", "Function.ability(51, \"Build_EvolutionChamber_screen\", cmd_screen, 1156), Function.ability(52, \"Build_Extractor_screen\", cmd_screen, 1154), Function.ability(53, \"Build_Factory_screen\",", "1190, 3704), Function.ability(440, \"Research_ZergGroundArmorLevel3_quick\", cmd_quick, 1191, 3704), Function.ability(441, \"Research_ZergMeleeWeapons_quick\", cmd_quick,", "general_id: 0 for normal abilities, and the ability_id of another", "cmd_screen, 386), Function.ability(199, \"Effect_Heal_autocast\", autocast, 386), Function.ability(200, \"Effect_HunterSeekerMissile_screen\", cmd_screen, 169),", "call action. Attributes: function: Store the function id, eg 2", "to convert the list of integers into something more meaningful", "\"Morph_OrbitalCommand_quick\", cmd_quick, 1516), Function.ability(310, \"Morph_OverlordTransport_quick\", cmd_quick, 2708), Function.ability(311, \"Morph_Overseer_quick\", cmd_quick,", "\"Cancel_FactoryAddOn_quick\", cmd_quick, 484, 3659), Function.ability(147, \"Cancel_GravitonBeam_quick\", cmd_quick, 174, 3659), Function.ability(148,", "cmd_quick, 917), Function.ability(494, \"Train_SwarmHost_quick\", cmd_quick, 1356), Function.ability(495, \"Train_Tempest_quick\", cmd_quick, 955),", "Function.ability(135, \"BurrowUp_Ultralisk_quick\", cmd_quick, 1514, 3662), Function.ability(136, \"BurrowUp_Ultralisk_autocast\", autocast, 1514, 3662),", "3705), Function.ability(444, \"Research_ZergMeleeWeaponsLevel3_quick\", cmd_quick, 1188, 3705), Function.ability(445, \"Research_ZergMissileWeapons_quick\", cmd_quick, 3706),", "select_worker: What to do when selecting a worker. build_queue_id: Which", "3706), Function.ability(448, \"Research_ZergMissileWeaponsLevel3_quick\", cmd_quick, 1194, 3706), Function.ability(449, \"Research_ZerglingAdrenalGlands_quick\", cmd_quick, 1252),", "3685), Function.ability(223, \"Effect_Repair_Mule_autocast\", autocast, 78, 3685), Function.ability(224, \"Effect_Repair_SCV_screen\", cmd_screen, 316,", "3683), Function.ability(75, \"Build_Reactor_Factory_quick\", cmd_quick, 455, 3683), Function.ability(76, \"Build_Reactor_Factory_screen\", cmd_screen, 455,", "\"Research_ZergFlyerArmorLevel2_quick\", cmd_quick, 1316, 3702), Function.ability(432, \"Research_ZergFlyerArmorLevel3_quick\", cmd_quick, 1317, 3702), Function.ability(433,", "\"Research_ZergGroundArmorLevel2_quick\", cmd_quick, 1190, 3704), Function.ability(440, \"Research_ZergGroundArmorLevel3_quick\", cmd_quick, 1191, 3704), Function.ability(441,", "\"Cancel_SporeCrawlerRoot_quick\", cmd_quick, 1732, 3659), Function.ability(166, \"Cancel_StarportAddOn_quick\", cmd_quick, 517, 3659), Function.ability(167,", "Function.ability(509, \"TrainWarp_Stalker_screen\", cmd_screen, 1414), Function.ability(510, \"TrainWarp_Zealot_screen\", cmd_screen, 1413), Function.ability(511, \"UnloadAll_quick\",", "ability_id class ArgumentType(collections.namedtuple( \"ArgumentType\", [\"id\", \"name\", \"sizes\", \"fn\"])): \"\"\"Represents a", "Function.ability(90, \"Build_StasisTrap_screen\", cmd_screen, 2505), Function.ability(91, \"Build_SupplyDepot_screen\", cmd_screen, 319), Function.ability(92, \"Build_TechLab_quick\",", "possible Types.\"\"\" named = {name: type_._replace(id=Arguments._fields.index(name), name=name) for name, type_", "cmd_screen, 319), Function.ability(92, \"Build_TechLab_quick\", cmd_quick, 3682), Function.ability(93, \"Build_TechLab_screen\", cmd_screen, 3682),", "autocast, 2099), Function.ability(247, \"Effect_YamatoGun_screen\", cmd_screen, 401), Function.ability(248, \"Hallucination_Adept_quick\", cmd_quick, 2391),", "\"Research_ShadowStrike_quick\", cmd_quick, 2720), Function.ability(405, \"Research_Stimpack_quick\", cmd_quick, 730), Function.ability(406, \"Research_TerranInfantryArmor_quick\", cmd_quick,", "976), Function.ability(502, \"Train_WidowMine_quick\", cmd_quick, 614), Function.ability(503, \"Train_Zealot_quick\", cmd_quick, 916), Function.ability(504,", "cmd_quick, 1188, 3705), Function.ability(445, \"Research_ZergMissileWeapons_quick\", cmd_quick, 3706), Function.ability(446, \"Research_ZergMissileWeaponsLevel1_quick\", cmd_quick,", "# # Unless required by applicable law or agreed to", "3661), Function.ability(109, \"BurrowDown_Lurker_quick\", cmd_quick, 2108, 3661), Function.ability(110, \"BurrowDown_Queen_quick\", cmd_quick, 1433,", "from __future__ import print_function import collections import numbers import six", "Function.ability(467, \"Train_Drone_quick\", cmd_quick, 1342), Function.ability(468, \"Train_Ghost_quick\", cmd_quick, 562), Function.ability(469, \"Train_Hellbat_quick\",", "\"Build_Barracks_screen\", cmd_screen, 321), Function.ability(43, \"Build_Bunker_screen\", cmd_screen, 324), Function.ability(44, \"Build_CommandCenter_screen\", cmd_screen,", "specific unit from the multi-unit selection.\"\"\" select = action.action_ui.multi_panel select.type", "\"BurrowUp_WidowMine_quick\", cmd_quick, 2097, 3662), Function.ability(138, \"BurrowUp_Zergling_quick\", cmd_quick, 1392, 3662), Function.ability(139,", "cmd_minimap, 199, 3673), Function.ability(343, \"Rally_Workers_screen\", cmd_screen, 3690), Function.ability(344, \"Rally_Workers_minimap\", cmd_minimap,", "how to construct the sc2 action proto out of python", "0), lambda a: point.Point(*a).floor()) @classmethod def spec(cls, id_, name, sizes):", "arguments, so build something similar. \"\"\" def __init__(self, functions): self._func_list", "cmd_quick, 591), Function.ability(493, \"Train_Stalker_quick\", cmd_quick, 917), Function.ability(494, \"Train_SwarmHost_quick\", cmd_quick, 1356),", "1166), Function.ability(86, \"Build_Spire_screen\", cmd_screen, 1158), Function.ability(87, \"Build_SporeCrawler_screen\", cmd_screen, 1167), Function.ability(88,", "2358), Function.ability(241, \"Effect_TimeWarp_screen\", cmd_screen, 2244), Function.ability(242, \"Effect_Transfusion_screen\", cmd_screen, 1664), Function.ability(243,", "cmd_quick, 1344), Function.ability(484, \"Train_Phoenix_quick\", cmd_quick, 946), Function.ability(485, \"Train_Probe_quick\", cmd_quick, 1006),", "3690), Function.ability(348, \"Rally_Hatchery_Workers_minimap\", cmd_minimap, 211, 3690), Function.ability(349, \"Rally_Nexus_screen\", cmd_screen, 207,", "cmd_quick, 380, 3675), Function.ability(238, \"Effect_Stim_Marine_Redirect_quick\", cmd_quick, 1683, 3675), Function.ability(239, \"Effect_SupplyDrop_screen\",", "= action.action_feature_layer.unit_command action_cmd.ability_id = ability_id action_cmd.queue_command = queued def cmd_screen(action,", "each of the dimensions this argument takes. fn: The function", "What to do with the control group. control_group_id: Which control", "\"Effect_SupplyDrop_screen\", cmd_screen, 255), Function.ability(240, \"Effect_TacticalJump_screen\", cmd_screen, 2358), Function.ability(241, \"Effect_TimeWarp_screen\", cmd_screen,", "use namedtuple since python3 has a limit of 255 function", "self._func_list = functions self._func_dict = {f.name: f for f in", "3694), Function.ability(390, \"Research_ProtossGroundArmorLevel1_quick\", cmd_quick, 1065, 3694), Function.ability(391, \"Research_ProtossGroundArmorLevel2_quick\", cmd_quick, 1066,", "class ValidActions(collections.namedtuple( \"ValidActions\", [\"types\", \"functions\"])): \"\"\"The set of types and", "avail_fn: For non-abilities, this function returns whether the function is", "\"UnloadAll_CommandCenter_quick\", cmd_quick, 413, 3664), Function.ability(514, \"UnloadAll_NydasNetwork_quick\", cmd_quick, 1438, 3664), Function.ability(515,", "cmd_screen, 1664), Function.ability(243, \"Effect_ViperConsume_screen\", cmd_screen, 2073), Function.ability(244, \"Effect_VoidRayPrismaticAlignment_quick\", cmd_quick, 2393),", "of 255 function arguments, so build something similar. \"\"\" def", "1160), Function.ability(62, \"Build_Interceptors_quick\", cmd_quick, 1042), Function.ability(63, \"Build_Interceptors_autocast\", autocast, 1042), Function.ability(64,", "implied. # See the License for the specific language governing", "\"\"\" __slots__ = () def __str__(self): return \"%s/%s %s\" %", "under the License. \"\"\"Define the static list of types and", "obs: obs.player_common.idle_worker_count > 0), Function.ui_func(7, \"select_army\", select_army, lambda obs: obs.player_common.army_count", "3665), Function.ability(457, \"Train_Adept_quick\", cmd_quick, 922), Function.ability(458, \"Train_Baneling_quick\", cmd_quick, 80), Function.ability(459,", "cmd_quick, 2340, 3661), Function.ability(112, \"BurrowDown_Roach_quick\", cmd_quick, 1386, 3661), Function.ability(113, \"BurrowDown_SwarmHost_quick\",", "of arguments for that function, each being a list of", "the functions require. Unlike TYPES above, this includes the sizes", "392, 3676), Function.ability(28, \"Behavior_CloakOn_Ghost_quick\", cmd_quick, 382, 3676), Function.ability(29, \"Behavior_GenerateCreepOff_quick\", cmd_quick,", "cmd_screen, 2700, 3687), Function.ability(183, \"Effect_CalldownMULE_screen\", cmd_screen, 171), Function.ability(184, \"Effect_CausticSpray_screen\", cmd_screen,", "3668), Function.ability(290, \"Load_NydusNetwork_screen\", cmd_screen, 1437, 3668), Function.ability(291, \"Load_NydusWorm_screen\", cmd_screen, 2370,", "queue. unload_id=ArgumentType.scalar(500), # Depends on the current loaded units. )", "Function.ability(506, \"TrainWarp_DarkTemplar_screen\", cmd_screen, 1417), Function.ability(507, \"TrainWarp_HighTemplar_screen\", cmd_screen, 1416), Function.ability(508, \"TrainWarp_Sentry_screen\",", "cmd_screen, 421, 3682), Function.ability(96, \"Build_TechLab_Factory_quick\", cmd_quick, 454, 3682), Function.ability(97, \"Build_TechLab_Factory_screen\",", "function_type, ability_id, general_id=0): \"\"\"Define a function represented as a game", "cmd_quick, 390), Function.ability(323, \"Morph_Uproot_quick\", cmd_quick, 3681), Function.ability(324, \"Morph_SpineCrawlerUproot_quick\", cmd_quick, 1725,", "# Everything below here is generated with gen_actions.py Function.ability(12, \"Attack_screen\",", "624), Function.ability(499, \"Train_Viper_quick\", cmd_quick, 1354), Function.ability(500, \"Train_VoidRay_quick\", cmd_quick, 950), Function.ability(501,", "cmd_quick, 1186, 3705), Function.ability(443, \"Research_ZergMeleeWeaponsLevel2_quick\", cmd_quick, 1187, 3705), Function.ability(444, \"Research_ZergMeleeWeaponsLevel3_quick\",", "{k: frozenset(v) for k, v in six.iteritems(ABILITY_IDS)} FUNCTIONS_AVAILABLE = {f.id:", "1514, 3662), Function.ability(136, \"BurrowUp_Ultralisk_autocast\", autocast, 1514, 3662), Function.ability(137, \"BurrowUp_WidowMine_quick\", cmd_quick,", "\"Build_Hatchery_screen\", cmd_screen, 1152), Function.ability(60, \"Build_HydraliskDen_screen\", cmd_screen, 1157), Function.ability(61, \"Build_InfestationPit_screen\", cmd_screen,", "always = lambda _: True class Function(collections.namedtuple( \"Function\", [\"id\", \"name\",", "= queued minimap.assign_to(action_cmd.target_minimap_coord) def autocast(action, ability_id): \"\"\"Toggle autocast.\"\"\" action.action_ui.toggle_autocast.ability_id =", "cmd_screen, 171), Function.ability(184, \"Effect_CausticSpray_screen\", cmd_screen, 2324), Function.ability(185, \"Effect_Charge_screen\", cmd_screen, 1819),", "\"Effect_SpawnLocusts_screen\", cmd_screen, 2704), Function.ability(230, \"Effect_Spray_screen\", cmd_screen, 3684), Function.ability(231, \"Effect_Spray_Protoss_screen\", cmd_screen,", "3690), Function.ability(349, \"Rally_Nexus_screen\", cmd_screen, 207, 3690), Function.ability(350, \"Rally_Nexus_minimap\", cmd_minimap, 207,", "on the minimap.\"\"\" action_cmd = action.action_feature_layer.unit_command action_cmd.ability_id = ability_id action_cmd.queue_command", "3666), Function.ability(266, \"Harvest_Gather_Mule_screen\", cmd_screen, 166, 3666), Function.ability(267, \"Harvest_Gather_Probe_screen\", cmd_screen, 298,", "1563, 3693), Function.ability(388, \"Research_ProtossAirWeaponsLevel3_quick\", cmd_quick, 1564, 3693), Function.ability(389, \"Research_ProtossGroundArmor_quick\", cmd_quick,", "[TYPES.minimap], select_point: [TYPES.select_point_act, TYPES.screen], select_rect: [TYPES.select_add, TYPES.screen, TYPES.screen2], select_unit: [TYPES.select_unit_act,", "893), Function.ability(83, \"Build_SensorTower_screen\", cmd_screen, 326), Function.ability(84, \"Build_SpawningPool_screen\", cmd_screen, 1155), Function.ability(85,", "2328), Function.ability(203, \"Effect_InfestedTerrans_screen\", cmd_screen, 247), Function.ability(204, \"Effect_InjectLarva_screen\", cmd_screen, 251), Function.ability(205,", "3659), Function.ability(153, \"Cancel_MorphLurker_quick\", cmd_quick, 2333, 3659), Function.ability(154, \"Cancel_MorphLurkerDen_quick\", cmd_quick, 2113,", "that needs a point on the screen.\"\"\" action_cmd = action.action_feature_layer.unit_command", "\"Cancel_QueueCancelToSelection_quick\", cmd_quick, 308, 3671), Function.ability(174, \"Cancel_QueuePasive_quick\", cmd_quick, 1831, 3671), Function.ability(175,", "\"Research_ProtossAirArmor_quick\", cmd_quick, 3692), Function.ability(382, \"Research_ProtossAirArmorLevel1_quick\", cmd_quick, 1565, 3692), Function.ability(383, \"Research_ProtossAirArmorLevel2_quick\",", "948), Function.ability(462, \"Train_Colossus_quick\", cmd_quick, 978), Function.ability(463, \"Train_Corruptor_quick\", cmd_quick, 1353), Function.ability(464,", "[TYPES.queued, TYPES.minimap], autocast: [], } # Which ones need an", "Function.ability(277, \"Land_CommandCenter_screen\", cmd_screen, 419, 3678), Function.ability(278, \"Land_Factory_screen\", cmd_screen, 520, 3678),", "\"UnloadAllAt_WarpPrism_screen\", cmd_screen, 913, 3669), Function.ability(523, \"UnloadAllAt_WarpPrism_minimap\", cmd_minimap, 913, 3669), ])", "True class Function(collections.namedtuple( \"Function\", [\"id\", \"name\", \"ability_id\", \"general_id\", \"function_type\", \"args\",", "1216), Function.ability(304, \"Morph_LiberatorAAMode_quick\", cmd_quick, 2560), Function.ability(305, \"Morph_LiberatorAGMode_screen\", cmd_screen, 2558), Function.ability(306,", "2146), Function.ability(215, \"Effect_ParasiticBomb_screen\", cmd_screen, 2542), Function.ability(216, \"Effect_PhotonOvercharge_screen\", cmd_screen, 2162), Function.ability(217,", "on current build queue. unload_id=ArgumentType.scalar(500), # Depends on the current", "Function.ability(229, \"Effect_SpawnLocusts_screen\", cmd_screen, 2704), Function.ability(230, \"Effect_Spray_screen\", cmd_screen, 3684), Function.ability(231, \"Effect_Spray_Protoss_screen\",", "cmd_quick, 454, 3682), Function.ability(97, \"Build_TechLab_Factory_screen\", cmd_screen, 454, 3682), Function.ability(98, \"Build_TechLab_Starport_quick\",", "selection. select_worker=ArgumentType.enum([ sc_ui.ActionSelectIdleWorker.Set, sc_ui.ActionSelectIdleWorker.Add, sc_ui.ActionSelectIdleWorker.All, sc_ui.ActionSelectIdleWorker.AddAll, ]), build_queue_id=ArgumentType.scalar(10), # Depends", "bool(select_add) def select_idle_worker(action, select_worker): \"\"\"Select an idle worker.\"\"\" action.action_ui.select_idle_worker.type =", "28, 3684), Function.ability(234, \"Effect_Stim_quick\", cmd_quick, 3675), Function.ability(235, \"Effect_Stim_Marauder_quick\", cmd_quick, 253,", "screen_rect.br.assign_to(out_rect.p1) select.selection_add = bool(select_add) def select_idle_worker(action, select_worker): \"\"\"Select an idle", "\"Build_Bunker_screen\", cmd_screen, 324), Function.ability(44, \"Build_CommandCenter_screen\", cmd_screen, 318), Function.ability(45, \"Build_CreepTumor_screen\", cmd_screen,", "3691), Function.ability(48, \"Build_CyberneticsCore_screen\", cmd_screen, 894), Function.ability(49, \"Build_DarkShrine_screen\", cmd_screen, 891), Function.ability(50,", "\"Train_Marauder_quick\", cmd_quick, 563), Function.ability(477, \"Train_Marine_quick\", cmd_quick, 560), Function.ability(478, \"Train_Medivac_quick\", cmd_quick,", "for more details. Attributes: screen: A point on the screen.", "\"Behavior_HoldFireOn_Ghost_quick\", cmd_quick, 36, 3688), Function.ability(36, \"Behavior_HoldFireOn_Lurker_quick\", cmd_quick, 2550, 3688), Function.ability(37,", "212, 3673), Function.ability(340, \"Rally_Hatchery_Units_minimap\", cmd_minimap, 212, 3673), Function.ability(341, \"Rally_Morphing_Unit_screen\", cmd_screen,", "sc2. general_id: 0 for normal abilities, and the ability_id of", "cmd_quick, 1218), Function.ability(303, \"Morph_Lair_quick\", cmd_quick, 1216), Function.ability(304, \"Morph_LiberatorAAMode_quick\", cmd_quick, 2560),", "like 'Stop' or 'Stim'.\"\"\" action_cmd = action.action_feature_layer.unit_command action_cmd.ability_id = ability_id", "\"Cancel_StasisTrap_quick\", cmd_quick, 2535, 3659), Function.ability(168, \"Cancel_Last_quick\", cmd_quick, 3671), Function.ability(169, \"Cancel_HangarQueue5_quick\",", "cmd_quick, 1392, 3662), Function.ability(139, \"BurrowUp_Zergling_autocast\", autocast, 1392, 3662), Function.ability(140, \"Cancel_quick\",", "# Depends on current selection. select_worker=ArgumentType.enum([ sc_ui.ActionSelectIdleWorker.Set, sc_ui.ActionSelectIdleWorker.Add, sc_ui.ActionSelectIdleWorker.All, sc_ui.ActionSelectIdleWorker.AddAll,", "cmd_quick, 32), Function.ability(227, \"Effect_Scan_screen\", cmd_screen, 399), Function.ability(228, \"Effect_SpawnChangeling_quick\", cmd_quick, 181),", "\"Train_Stalker_quick\", cmd_quick, 917), Function.ability(494, \"Train_SwarmHost_quick\", cmd_quick, 1356), Function.ability(495, \"Train_Tempest_quick\", cmd_quick,", "sc_ui.ActionMultiPanel.SingleSelect, sc_ui.ActionMultiPanel.DeselectUnit, sc_ui.ActionMultiPanel.SelectAllOfType, sc_ui.ActionMultiPanel.DeselectAllOfType, ]), select_unit_id=ArgumentType.scalar(500), # Depends on current", "and the ability_id of another ability if it can be", "cmd_quick, 1343), Function.ability(505, \"TrainWarp_Adept_screen\", cmd_screen, 1419), Function.ability(506, \"TrainWarp_DarkTemplar_screen\", cmd_screen, 1417),", "\"Rally_CommandCenter_minimap\", cmd_minimap, 203, 3690), Function.ability(347, \"Rally_Hatchery_Workers_screen\", cmd_screen, 211, 3690), Function.ability(348,", "set of functions. Can't use namedtuple since python3 has a", "857, 3701), Function.ability(427, \"Research_TunnelingClaws_quick\", cmd_quick, 217), Function.ability(428, \"Research_WarpGate_quick\", cmd_quick, 1568),", "Unless required by applicable law or agreed to in writing,", "3676), Function.ability(27, \"Behavior_CloakOn_Banshee_quick\", cmd_quick, 392, 3676), Function.ability(28, \"Behavior_CloakOn_Ghost_quick\", cmd_quick, 382,", "a transport/bunker/nydus/etc.\"\"\" action.action_ui.cargo_panel.unit_index = unload_id def build_queue(action, build_queue_id): \"\"\"Cancel a", "\"minimap\", \"screen2\", \"queued\", \"control_group_act\", \"control_group_id\", \"select_point_act\", \"select_add\", \"select_unit_act\", \"select_unit_id\", \"select_worker\",", "units within a rectangle.\"\"\" select = action.action_feature_layer.unit_selection_rect out_rect = select.selection_screen_coord.add()", "Function.ability(418, \"Research_TerranStructureArmorUpgrade_quick\", cmd_quick, 651), Function.ability(419, \"Research_TerranVehicleAndShipPlating_quick\", cmd_quick, 3700), Function.ability(420, \"Research_TerranVehicleAndShipPlatingLevel1_quick\",", "general_id=0): \"\"\"Define a function represented as a game ability.\"\"\" assert", "cmd_quick, 979), Function.ability(474, \"Train_Infestor_quick\", cmd_quick, 1352), Function.ability(475, \"Train_Liberator_quick\", cmd_quick, 626),", "the specific language governing permissions and # limitations under the", "name, 0, 0, function_type, FUNCTION_TYPES[function_type], avail_fn) @classmethod def ability(cls, id_,", "882), Function.ability(41, \"Build_BanelingNest_screen\", cmd_screen, 1162), Function.ability(42, \"Build_Barracks_screen\", cmd_screen, 321), Function.ability(43,", "Function.ability(328, \"Morph_WarpGate_quick\", cmd_quick, 1518), Function.ability(329, \"Morph_WarpPrismPhasingMode_quick\", cmd_quick, 1528), Function.ability(330, \"Morph_WarpPrismTransportMode_quick\",", "\"Behavior_BuildingAttackOn_quick\", cmd_quick, 2081), Function.ability(23, \"Behavior_CloakOff_quick\", cmd_quick, 3677), Function.ability(24, \"Behavior_CloakOff_Banshee_quick\", cmd_quick,", "\"select_add\", \"select_unit_act\", \"select_unit_id\", \"select_worker\", \"build_queue_id\", \"unload_id\"])): \"\"\"The full list of", "146), Function.ability(250, \"Hallucination_Colossus_quick\", cmd_quick, 148), Function.ability(251, \"Hallucination_Disruptor_quick\", cmd_quick, 2389), Function.ability(252,", "\"no_op\", no_op), Function.ui_func(1, \"move_camera\", move_camera), Function.ui_func(2, \"select_point\", select_point), Function.ui_func(3, \"select_rect\",", "an `Arguments` object, a `dict`, or an iterable. If a", "python types. args: A list of the types of args", "298, 3666), Function.ability(268, \"Harvest_Gather_SCV_screen\", cmd_screen, 295, 3666), Function.ability(269, \"Harvest_Return_quick\", cmd_quick,", "1565, 3692), Function.ability(383, \"Research_ProtossAirArmorLevel2_quick\", cmd_quick, 1566, 3692), Function.ability(384, \"Research_ProtossAirArmorLevel3_quick\", cmd_quick,", "Function.ui_func(8, \"select_warp_gates\", select_warp_gates, lambda obs: obs.player_common.warp_gate_count > 0), Function.ui_func(9, \"select_larva\",", "what the agent will use. name: The name of the", "Function.ability(286, \"Lift_Starport_quick\", cmd_quick, 518, 3679), Function.ability(287, \"Load_screen\", cmd_screen, 3668), Function.ability(288,", "Function.ability(156, \"Cancel_MorphOrbital_quick\", cmd_quick, 1517, 3659), Function.ability(157, \"Cancel_MorphOverlordTransport_quick\", cmd_quick, 2709, 3659),", "cmd_quick, 955), Function.ability(496, \"Train_Thor_quick\", cmd_quick, 594), Function.ability(497, \"Train_Ultralisk_quick\", cmd_quick, 1348),", "\"Train_Immortal_quick\", cmd_quick, 979), Function.ability(474, \"Train_Infestor_quick\", cmd_quick, 1352), Function.ability(475, \"Train_Liberator_quick\", cmd_quick,", "\"Rally_Hatchery_Units_screen\", cmd_screen, 212, 3673), Function.ability(340, \"Rally_Hatchery_Units_minimap\", cmd_minimap, 212, 3673), Function.ability(341,", "True: {cmd_screen, cmd_minimap, autocast}} always = lambda _: True class", "ability(cls, id_, name, function_type, ability_id, general_id=0): \"\"\"Define a function represented", "865, 3700), Function.ability(422, \"Research_TerranVehicleAndShipPlatingLevel3_quick\", cmd_quick, 866, 3700), Function.ability(423, \"Research_TerranVehicleWeapons_quick\", cmd_quick,", "cmd_quick, 790), Function.ability(354, \"Research_BansheeHyperflightRotors_quick\", cmd_quick, 799), Function.ability(355, \"Research_BattlecruiserWeaponRefit_quick\", cmd_quick, 1532),", "\"Research_ProtossShieldsLevel2_quick\", cmd_quick, 1069, 3696), Function.ability(400, \"Research_ProtossShieldsLevel3_quick\", cmd_quick, 1070, 3696), Function.ability(401,", "`Arguments` object. Returns: A new `FunctionCall` instance. \"\"\" if isinstance(arguments,", "2095, 3661), Function.ability(116, \"BurrowDown_Zergling_quick\", cmd_quick, 1390, 3661), Function.ability(117, \"BurrowUp_quick\", cmd_quick,", "name, None, None, None, args, None) def __hash__(self): # So", "to queue) control_group_act=ArgumentType.enum([ sc_ui.ActionControlGroup.Recall, sc_ui.ActionControlGroup.Set, sc_ui.ActionControlGroup.Append, sc_ui.ActionControlGroup.SetAndSteal, sc_ui.ActionControlGroup.AppendAndSteal, ]), control_group_id=ArgumentType.scalar(10),", "list of known types. TYPES = Arguments.types( screen=ArgumentType.point(), minimap=ArgumentType.point(), screen2=ArgumentType.point(),", "= {name: type_._replace(id=Arguments._fields.index(name), name=name) for name, type_ in six.iteritems(kwargs)} return", "1731, 3680), Function.ability(317, \"Morph_SiegeMode_quick\", cmd_quick, 388), Function.ability(318, \"Morph_SupplyDepot_Lower_quick\", cmd_quick, 556),", "Function.ability(261, \"Halt_quick\", cmd_quick, 3660), Function.ability(262, \"Halt_Building_quick\", cmd_quick, 315, 3660), Function.ability(263,", "1768), Function.ability(69, \"Build_PhotonCannon_screen\", cmd_screen, 887), Function.ability(70, \"Build_Pylon_screen\", cmd_screen, 881), Function.ability(71,", "\"Research_TerranShipWeaponsLevel1_quick\", cmd_quick, 861, 3699), Function.ability(416, \"Research_TerranShipWeaponsLevel2_quick\", cmd_quick, 862, 3699), Function.ability(417,", "cmd_quick, 805), Function.ability(353, \"Research_BansheeCloakingField_quick\", cmd_quick, 790), Function.ability(354, \"Research_BansheeHyperflightRotors_quick\", cmd_quick, 799),", "for name, type_ in six.iteritems(kwargs)} return cls(**named) # The list", "This is unique. name: The name of the argument, also", "[TYPES.build_queue_id], cmd_quick: [TYPES.queued], cmd_screen: [TYPES.queued, TYPES.screen], cmd_minimap: [TYPES.queued, TYPES.minimap], autocast:", "3677), Function.ability(24, \"Behavior_CloakOff_Banshee_quick\", cmd_quick, 393, 3677), Function.ability(25, \"Behavior_CloakOff_Ghost_quick\", cmd_quick, 383,", "cmd_screen, 2368, 3686), Function.ability(210, \"Effect_MassRecall_MothershipCore_screen\", cmd_screen, 1974, 3686), Function.ability(211, \"Effect_MedivacIgniteAfterburners_quick\",", "Function.ability(233, \"Effect_Spray_Zerg_screen\", cmd_screen, 28, 3684), Function.ability(234, \"Effect_Stim_quick\", cmd_quick, 3675), Function.ability(235,", "an iterable. If a `dict` or an iterable is provided,", "1518), Function.ability(329, \"Morph_WarpPrismPhasingMode_quick\", cmd_quick, 1528), Function.ability(330, \"Morph_WarpPrismTransportMode_quick\", cmd_quick, 1530), Function.ability(331,", "Function.ability(342, \"Rally_Morphing_Unit_minimap\", cmd_minimap, 199, 3673), Function.ability(343, \"Rally_Workers_screen\", cmd_screen, 3690), Function.ability(344,", "set(). return self.id def __str__(self): return self.str() def str(self, space=False):", "= ability_id class ArgumentType(collections.namedtuple( \"ArgumentType\", [\"id\", \"name\", \"sizes\", \"fn\"])): \"\"\"Represents", "\"BurrowUp_InfestorTerran_autocast\", autocast, 1396, 3662), Function.ability(127, \"BurrowUp_Lurker_quick\", cmd_quick, 2110, 3662), Function.ability(128,", "Function.ability(38, \"Behavior_PulsarBeamOn_quick\", cmd_quick, 2375), Function.ability(39, \"Build_Armory_screen\", cmd_screen, 331), Function.ability(40, \"Build_Assimilator_screen\",", "cmd_screen, 887), Function.ability(70, \"Build_Pylon_screen\", cmd_screen, 881), Function.ability(71, \"Build_Reactor_quick\", cmd_quick, 3683),", "cmd_quick, 1684, 3675), Function.ability(237, \"Effect_Stim_Marine_quick\", cmd_quick, 380, 3675), Function.ability(238, \"Effect_Stim_Marine_Redirect_quick\",", "cmd_quick, 2371, 3664), Function.ability(516, \"UnloadAllAt_screen\", cmd_screen, 3669), Function.ability(517, \"UnloadAllAt_minimap\", cmd_minimap,", "764), Function.ability(364, \"Research_ExtendedThermalLance_quick\", cmd_quick, 1097), Function.ability(365, \"Research_GlialRegeneration_quick\", cmd_quick, 216), Function.ability(366,", "True]), # (now vs add to queue) control_group_act=ArgumentType.enum([ sc_ui.ActionControlGroup.Recall, sc_ui.ActionControlGroup.Set,", "2 for select_point. arguments: The list of arguments for that", "3682), Function.ability(95, \"Build_TechLab_Barracks_screen\", cmd_screen, 421, 3682), Function.ability(96, \"Build_TechLab_Factory_quick\", cmd_quick, 454,", "minimap. functions: A namedtuple of all the functions. \"\"\" __slots__", "function is valid. \"\"\" __slots__ = () @classmethod def ui_func(cls,", "306, 3671), Function.ability(172, \"Cancel_QueueAddOn_quick\", cmd_quick, 312, 3671), Function.ability(173, \"Cancel_QueueCancelToSelection_quick\", cmd_quick,", "cmd_quick, 3659), Function.ability(141, \"Cancel_AdeptPhaseShift_quick\", cmd_quick, 2594, 3659), Function.ability(142, \"Cancel_AdeptShadePhaseShift_quick\", cmd_quick,", "print_function import collections import numbers import six from pysc2.lib import", "3701), Function.ability(424, \"Research_TerranVehicleWeaponsLevel1_quick\", cmd_quick, 855, 3701), Function.ability(425, \"Research_TerranVehicleWeaponsLevel2_quick\", cmd_quick, 856,", "Function.ability(482, \"Train_Oracle_quick\", cmd_quick, 954), Function.ability(483, \"Train_Overlord_quick\", cmd_quick, 1344), Function.ability(484, \"Train_Phoenix_quick\",", "obs.player_common.warp_gate_count > 0), Function.ui_func(9, \"select_larva\", select_larva, lambda obs: obs.player_common.larva_count >", "function_type, FUNCTION_TYPES[function_type], None) @classmethod def spec(cls, id_, name, args): \"\"\"Create", "556), Function.ability(319, \"Morph_SupplyDepot_Raise_quick\", cmd_quick, 558), Function.ability(320, \"Morph_ThorExplosiveMode_quick\", cmd_quick, 2364), Function.ability(321,", "autocast}, True: {cmd_screen, cmd_minimap, autocast}} always = lambda _: True", "passed to function_type. avail_fn: For non-abilities, this function returns whether", "Function.ability(187, \"Effect_ChronoBoost_screen\", cmd_screen, 261), Function.ability(188, \"Effect_Contaminate_screen\", cmd_screen, 1825), Function.ability(189, \"Effect_CorrosiveBile_screen\",", "collections.defaultdict(set) # {ability_id: {funcs}} for func in FUNCTIONS: if func.ability_id", "Function.ability(217, \"Effect_PointDefenseDrone_screen\", cmd_screen, 144), Function.ability(218, \"Effect_PsiStorm_screen\", cmd_screen, 1036), Function.ability(219, \"Effect_PurificationNova_screen\",", "support features.py and action conversion. ABILITY_IDS = collections.defaultdict(set) # {ability_id:", "cmd_quick, 1038, 3671), Function.ability(170, \"Cancel_Queue1_quick\", cmd_quick, 304, 3671), Function.ability(171, \"Cancel_Queue5_quick\",", "\"\"\"Do a command that needs a point on the minimap.\"\"\"", "def select_rect(action, select_add, screen, screen2): \"\"\"Select units within a rectangle.\"\"\"", "# (now vs add to queue) control_group_act=ArgumentType.enum([ sc_ui.ActionControlGroup.Recall, sc_ui.ActionControlGroup.Set, sc_ui.ActionControlGroup.Append,", "Function.ability(360, \"Research_ChitinousPlating_quick\", cmd_quick, 265), Function.ability(361, \"Research_CombatShield_quick\", cmd_quick, 731), Function.ability(362, \"Research_ConcussiveShells_quick\",", "\"Build_TechLab_Starport_screen\", cmd_screen, 487, 3682), Function.ability(100, \"Build_TemplarArchive_screen\", cmd_screen, 890), Function.ability(101, \"Build_TwilightCouncil_screen\",", "Attributes: id: The argument id. This is unique. name: The", "sc_ui.ActionMultiPanel.DeselectUnit, sc_ui.ActionMultiPanel.SelectAllOfType, sc_ui.ActionMultiPanel.DeselectAllOfType, ]), select_unit_id=ArgumentType.scalar(500), # Depends on current selection.", "select.selection_screen_coord.add() screen_rect = point.Rect(screen, screen2) screen_rect.tl.assign_to(out_rect.p0) screen_rect.br.assign_to(out_rect.p1) select.selection_add = bool(select_add)", "212, 3673), Function.ability(341, \"Rally_Morphing_Unit_screen\", cmd_screen, 199, 3673), Function.ability(342, \"Rally_Morphing_Unit_minimap\", cmd_minimap,", "rectangle.\"\"\" select = action.action_feature_layer.unit_selection_rect out_rect = select.selection_screen_coord.add() screen_rect = point.Rect(screen,", "[TYPES.select_add, TYPES.screen, TYPES.screen2], select_unit: [TYPES.select_unit_act, TYPES.select_unit_id], control_group: [TYPES.control_group_act, TYPES.control_group_id], select_idle_worker:", "296, 3667), Function.ability(274, \"HoldPosition_quick\", cmd_quick, 18), Function.ability(275, \"Land_screen\", cmd_screen, 3678),", "s2clientprotocol import ui_pb2 as sc_ui def no_op(action): del action def", "911, 3668), Function.ability(294, \"LoadAll_quick\", cmd_quick, 3663), Function.ability(295, \"LoadAll_CommandCenter_quick\", cmd_quick, 416,", "cmd_quick, 1978), Function.ability(302, \"Morph_Hive_quick\", cmd_quick, 1218), Function.ability(303, \"Morph_Lair_quick\", cmd_quick, 1216),", "since python3 has a limit of 255 function arguments, so", "= Arguments.types( screen=ArgumentType.point(), minimap=ArgumentType.point(), screen2=ArgumentType.point(), queued=ArgumentType.enum([False, True]), # (now vs", "cmd_quick, 862, 3699), Function.ability(417, \"Research_TerranShipWeaponsLevel3_quick\", cmd_quick, 863, 3699), Function.ability(418, \"Research_TerranStructureArmorUpgrade_quick\",", "\"Harvest_Return_quick\", cmd_quick, 3667), Function.ability(270, \"Harvest_Return_Drone_quick\", cmd_quick, 1184, 3667), Function.ability(271, \"Harvest_Return_Mule_quick\",", "314, 3659), Function.ability(145, \"Cancel_CreepTumor_quick\", cmd_quick, 1763, 3659), Function.ability(146, \"Cancel_FactoryAddOn_quick\", cmd_quick,", "1223), Function.ability(381, \"Research_ProtossAirArmor_quick\", cmd_quick, 3692), Function.ability(382, \"Research_ProtossAirArmorLevel1_quick\", cmd_quick, 1565, 3692),", "Function.ability(383, \"Research_ProtossAirArmorLevel2_quick\", cmd_quick, 1566, 3692), Function.ability(384, \"Research_ProtossAirArmorLevel3_quick\", cmd_quick, 1567, 3692),", "cmd_quick, 1831, 3671), Function.ability(175, \"Cancel_QueuePassiveCancelToSelection_quick\", cmd_quick, 1833, 3671), Function.ability(176, \"Effect_Abduct_screen\",", "1567, 3692), Function.ability(385, \"Research_ProtossAirWeapons_quick\", cmd_quick, 3693), Function.ability(386, \"Research_ProtossAirWeaponsLevel1_quick\", cmd_quick, 1562,", "cmd_minimap, 1), Function.ability(453, \"Stop_quick\", cmd_quick, 3665), Function.ability(454, \"Stop_Building_quick\", cmd_quick, 2057,", "\"Train_HighTemplar_quick\", cmd_quick, 919), Function.ability(472, \"Train_Hydralisk_quick\", cmd_quick, 1345), Function.ability(473, \"Train_Immortal_quick\", cmd_quick,", "below here is generated with gen_actions.py Function.ability(12, \"Attack_screen\", cmd_screen, 3674),", "\"Effect_Charge_screen\", cmd_screen, 1819), Function.ability(186, \"Effect_Charge_autocast\", autocast, 1819), Function.ability(187, \"Effect_ChronoBoost_screen\", cmd_screen,", "dimensions this argument takes. fn: The function to convert the", "a[0]) @classmethod def point(cls): # No range because it's unknown", "Function.ability(98, \"Build_TechLab_Starport_quick\", cmd_quick, 487, 3682), Function.ability(99, \"Build_TechLab_Starport_screen\", cmd_screen, 487, 3682),", "cmd_quick, 1848, 3659), Function.ability(156, \"Cancel_MorphOrbital_quick\", cmd_quick, 1517, 3659), Function.ability(157, \"Cancel_MorphOverlordTransport_quick\",", "lambda a: a[0]) @classmethod def point(cls): # No range because", "\"Rally_Workers_screen\", cmd_screen, 3690), Function.ability(344, \"Rally_Workers_minimap\", cmd_minimap, 3690), Function.ability(345, \"Rally_CommandCenter_screen\", cmd_screen,", "general action. function_type: One of the functions in FUNCTION_TYPES for", "= point.Rect(screen, screen2) screen_rect.tl.assign_to(out_rect.p0) screen_rect.br.assign_to(out_rect.p1) select.selection_add = bool(select_add) def select_idle_worker(action,", "3665), Function.ability(456, \"Stop_Stop_quick\", cmd_quick, 4, 3665), Function.ability(457, \"Train_Adept_quick\", cmd_quick, 922),", "cmd_screen, 3678), Function.ability(276, \"Land_Barracks_screen\", cmd_screen, 554, 3678), Function.ability(277, \"Land_CommandCenter_screen\", cmd_screen,", "\"Train_Carrier_quick\", cmd_quick, 948), Function.ability(462, \"Train_Colossus_quick\", cmd_quick, 978), Function.ability(463, \"Train_Corruptor_quick\", cmd_quick,", "3701), Function.ability(425, \"Research_TerranVehicleWeaponsLevel2_quick\", cmd_quick, 856, 3701), Function.ability(426, \"Research_TerranVehicleWeaponsLevel3_quick\", cmd_quick, 857,", "the full set of functions. Can't use namedtuple since python3", "1070, 3696), Function.ability(401, \"Research_PsiStorm_quick\", cmd_quick, 1126), Function.ability(402, \"Research_RavenCorvidReactor_quick\", cmd_quick, 793),", "cmd_quick, 1316, 3702), Function.ability(432, \"Research_ZergFlyerArmorLevel3_quick\", cmd_quick, 1317, 3702), Function.ability(433, \"Research_ZergFlyerAttack_quick\",", "1522, 3679), Function.ability(286, \"Lift_Starport_quick\", cmd_quick, 518, 3679), Function.ability(287, \"Load_screen\", cmd_screen,", "a worker. build_queue_id: Which build queue index to target. unload_id:", "1530), Function.ability(331, \"Move_screen\", cmd_screen, 16), Function.ability(332, \"Move_minimap\", cmd_minimap, 16), Function.ability(333,", "cmd_screen, 1155), Function.ability(85, \"Build_SpineCrawler_screen\", cmd_screen, 1166), Function.ability(86, \"Build_Spire_screen\", cmd_screen, 1158),", "ability_id, general_id, function_type, FUNCTION_TYPES[function_type], None) @classmethod def spec(cls, id_, name,", "select_worker=ArgumentType.enum([ sc_ui.ActionSelectIdleWorker.Set, sc_ui.ActionSelectIdleWorker.Add, sc_ui.ActionSelectIdleWorker.All, sc_ui.ActionSelectIdleWorker.AddAll, ]), build_queue_id=ArgumentType.scalar(10), # Depends on", "\"Lift_Factory_quick\", cmd_quick, 485, 3679), Function.ability(285, \"Lift_OrbitalCommand_quick\", cmd_quick, 1522, 3679), Function.ability(286,", "Function.ability(46, \"Build_CreepTumor_Queen_screen\", cmd_screen, 1694, 3691), Function.ability(47, \"Build_CreepTumor_Tumor_screen\", cmd_screen, 1733, 3691),", "lambda obs: obs.ui_data.HasField(\"multi\")), Function.ui_func(6, \"select_idle_worker\", select_idle_worker, lambda obs: obs.player_common.idle_worker_count >", "Function.ability(15, \"Attack_Attack_minimap\", cmd_minimap, 23, 3674), Function.ability(16, \"Attack_AttackBuilding_screen\", cmd_screen, 2048, 3674),", "action_cmd.queue_command = queued def cmd_screen(action, ability_id, queued, screen): \"\"\"Do a", "\"Research_TerranInfantryArmorLevel3_quick\", cmd_quick, 658, 3697), Function.ability(410, \"Research_TerranInfantryWeapons_quick\", cmd_quick, 3698), Function.ability(411, \"Research_TerranInfantryWeaponsLevel1_quick\",", "cmd_quick, 1342), Function.ability(468, \"Train_Ghost_quick\", cmd_quick, 562), Function.ability(469, \"Train_Hellbat_quick\", cmd_quick, 596),", "1351), Function.ability(490, \"Train_SCV_quick\", cmd_quick, 524), Function.ability(491, \"Train_Sentry_quick\", cmd_quick, 921), Function.ability(492,", "\"\"\"Cancel a unit in the build queue.\"\"\" action.action_ui.production_panel.unit_index = build_queue_id", "max+1 of each of the dimensions this argument takes. fn:", "\"BurrowUp_Lurker_quick\", cmd_quick, 2110, 3662), Function.ability(128, \"BurrowUp_Queen_quick\", cmd_quick, 1435, 3662), Function.ability(129,", "cmd_quick, 1094), Function.ability(368, \"Research_GroovedSpines_quick\", cmd_quick, 1282), Function.ability(369, \"Research_HiSecAutoTracking_quick\", cmd_quick, 650),", "cmd_quick, 730), Function.ability(406, \"Research_TerranInfantryArmor_quick\", cmd_quick, 3697), Function.ability(407, \"Research_TerranInfantryArmorLevel1_quick\", cmd_quick, 656,", "absolute_import from __future__ import division from __future__ import print_function import", "is needed so that no function takes the same type", "880), Function.ability(66, \"Build_Nuke_quick\", cmd_quick, 710), Function.ability(67, \"Build_NydusNetwork_screen\", cmd_screen, 1161), Function.ability(68,", "cmd_screen, 488, 3683), Function.ability(79, \"Build_Refinery_screen\", cmd_screen, 320), Function.ability(80, \"Build_RoachWarren_screen\", cmd_screen,", "Function.ability(308, \"Morph_Mothership_quick\", cmd_quick, 1847), Function.ability(309, \"Morph_OrbitalCommand_quick\", cmd_quick, 1516), Function.ability(310, \"Morph_OverlordTransport_quick\",", "979), Function.ability(474, \"Train_Infestor_quick\", cmd_quick, 1352), Function.ability(475, \"Train_Liberator_quick\", cmd_quick, 626), Function.ability(476,", "used in ValidActions.\"\"\" return cls(id_, name, None, None, None, args,", "\"Behavior_GenerateCreepOn_quick\", cmd_quick, 1692), Function.ability(31, \"Behavior_HoldFireOff_quick\", cmd_quick, 3689), Function.ability(32, \"Behavior_HoldFireOff_Ghost_quick\", cmd_quick,", "\"Effect_AdeptPhaseShift_screen\", cmd_screen, 2544), Function.ability(178, \"Effect_AutoTurret_screen\", cmd_screen, 1764), Function.ability(179, \"Effect_BlindingCloud_screen\", cmd_screen,", "\"Effect_GuardianShield_quick\", cmd_quick, 76), Function.ability(198, \"Effect_Heal_screen\", cmd_screen, 386), Function.ability(199, \"Effect_Heal_autocast\", autocast,", "Function.ui_func(0, \"no_op\", no_op), Function.ui_func(1, \"move_camera\", move_camera), Function.ui_func(2, \"select_point\", select_point), Function.ui_func(3,", "is distributed on an \"AS-IS\" BASIS, # WITHOUT WARRANTIES OR", "3659), Function.ability(144, \"Cancel_BuildInProgress_quick\", cmd_quick, 314, 3659), Function.ability(145, \"Cancel_CreepTumor_quick\", cmd_quick, 1763,", "autocast, 78, 3685), Function.ability(224, \"Effect_Repair_SCV_screen\", cmd_screen, 316, 3685), Function.ability(225, \"Effect_Repair_SCV_autocast\",", "3681), Function.ability(326, \"Morph_VikingAssaultMode_quick\", cmd_quick, 403), Function.ability(327, \"Morph_VikingFighterMode_quick\", cmd_quick, 405), Function.ability(328,", "cmd_quick, 2113, 3659), Function.ability(155, \"Cancel_MorphMothership_quick\", cmd_quick, 1848, 3659), Function.ability(156, \"Cancel_MorphOrbital_quick\",", "\"Train_Probe_quick\", cmd_quick, 1006), Function.ability(486, \"Train_Queen_quick\", cmd_quick, 1632), Function.ability(487, \"Train_Raven_quick\", cmd_quick,", "select.type = select_unit_act select.unit_index = select_unit_id def control_group(action, control_group_act, control_group_id):", "Function.ability(511, \"UnloadAll_quick\", cmd_quick, 3664), Function.ability(512, \"UnloadAll_Bunker_quick\", cmd_quick, 408, 3664), Function.ability(513,", "Function.ui_func(9, \"select_larva\", select_larva, lambda obs: obs.player_common.larva_count > 0), Function.ui_func(10, \"unload\",", "Function.ability(191, \"Effect_Explode_quick\", cmd_quick, 42), Function.ability(192, \"Effect_Feedback_screen\", cmd_screen, 140), Function.ability(193, \"Effect_ForceField_screen\",", "`FunctionCall` instance. \"\"\" if isinstance(arguments, dict): arguments = Arguments(**arguments) elif", "Some indexes to support features.py and action conversion. ABILITY_IDS =", "len(self._func_dict) != len(self._func_list): raise ValueError(\"Function names must be unique.\") def", "Function.ability(103, \"BurrowDown_quick\", cmd_quick, 3661), Function.ability(104, \"BurrowDown_Baneling_quick\", cmd_quick, 1374, 3661), Function.ability(105,", "Depends on current build queue. unload_id=ArgumentType.scalar(500), # Depends on the", "\"Train_Mutalisk_quick\", cmd_quick, 1346), Function.ability(481, \"Train_Observer_quick\", cmd_quick, 977), Function.ability(482, \"Train_Oracle_quick\", cmd_quick,", "\"select_control_group\", control_group), Function.ui_func(5, \"select_unit\", select_unit, lambda obs: obs.ui_data.HasField(\"multi\")), Function.ui_func(6, \"select_idle_worker\",", "\"BurrowUp_Baneling_autocast\", autocast, 1376, 3662), Function.ability(121, \"BurrowUp_Drone_quick\", cmd_quick, 1380, 3662), Function.ability(122,", "in the protos to send to the game. \"\"\" __slots__", "cmd_quick, 2364), Function.ability(321, \"Morph_ThorHighImpactMode_quick\", cmd_quick, 2362), Function.ability(322, \"Morph_Unsiege_quick\", cmd_quick, 390),", "\"Rally_Nexus_screen\", cmd_screen, 207, 3690), Function.ability(350, \"Rally_Nexus_minimap\", cmd_minimap, 207, 3690), Function.ability(351,", "\"BurrowUp_Baneling_quick\", cmd_quick, 1376, 3662), Function.ability(120, \"BurrowUp_Baneling_autocast\", autocast, 1376, 3662), Function.ability(121,", "\"\"\"Select the entire army.\"\"\" action.action_ui.select_army.selection_add = select_add def select_warp_gates(action, select_add):", "0, function_type, FUNCTION_TYPES[function_type], avail_fn) @classmethod def ability(cls, id_, name, function_type,", "76), Function.ability(198, \"Effect_Heal_screen\", cmd_screen, 386), Function.ability(199, \"Effect_Heal_autocast\", autocast, 386), Function.ability(200,", "2700, 3687), Function.ability(183, \"Effect_CalldownMULE_screen\", cmd_screen, 171), Function.ability(184, \"Effect_CausticSpray_screen\", cmd_screen, 2324),", "cmd_quick, 560), Function.ability(478, \"Train_Medivac_quick\", cmd_quick, 620), Function.ability(479, \"Train_MothershipCore_quick\", cmd_quick, 1853),", "to add the unit to the selection or replace it.", "build queue index to target. unload_id: Which unit to target", "Function.ability(178, \"Effect_AutoTurret_screen\", cmd_screen, 1764), Function.ability(179, \"Effect_BlindingCloud_screen\", cmd_screen, 2063), Function.ability(180, \"Effect_Blink_screen\",", "3684), Function.ability(234, \"Effect_Stim_quick\", cmd_quick, 3675), Function.ability(235, \"Effect_Stim_Marauder_quick\", cmd_quick, 253, 3675),", "Attributes: id: The function id, which is what the agent", "Function.ability(359, \"Research_Charge_quick\", cmd_quick, 1592), Function.ability(360, \"Research_ChitinousPlating_quick\", cmd_quick, 265), Function.ability(361, \"Research_CombatShield_quick\",", "also unique. sizes: The max+1 of each of the dimensions", "cmd_quick, 408, 3664), Function.ability(513, \"UnloadAll_CommandCenter_quick\", cmd_quick, 413, 3664), Function.ability(514, \"UnloadAll_NydasNetwork_quick\",", "functions self._func_dict = {f.name: f for f in functions} if", "queued=ArgumentType.enum([False, True]), # (now vs add to queue) control_group_act=ArgumentType.enum([ sc_ui.ActionControlGroup.Recall,", "Function.ability(119, \"BurrowUp_Baneling_quick\", cmd_quick, 1376, 3662), Function.ability(120, \"BurrowUp_Baneling_autocast\", autocast, 1376, 3662),", "326), Function.ability(84, \"Build_SpawningPool_screen\", cmd_screen, 1155), Function.ability(85, \"Build_SpineCrawler_screen\", cmd_screen, 1166), Function.ability(86,", "You may obtain a copy of the License at #", "\"select_unit\", select_unit, lambda obs: obs.ui_data.HasField(\"multi\")), Function.ui_func(6, \"select_idle_worker\", select_idle_worker, lambda obs:", "# pylint: disable=line-too-long FUNCTIONS = Functions([ Function.ui_func(0, \"no_op\", no_op), Function.ui_func(1,", "\"Train_Corruptor_quick\", cmd_quick, 1353), Function.ability(464, \"Train_Cyclone_quick\", cmd_quick, 597), Function.ability(465, \"Train_DarkTemplar_quick\", cmd_quick,", "cmd_screen, 1764), Function.ability(179, \"Effect_BlindingCloud_screen\", cmd_screen, 2063), Function.ability(180, \"Effect_Blink_screen\", cmd_screen, 3687),", "160), Function.ability(259, \"Hallucination_WarpPrism_quick\", cmd_quick, 162), Function.ability(260, \"Hallucination_Zealot_quick\", cmd_quick, 164), Function.ability(261,", "cmd_quick, 484, 3659), Function.ability(147, \"Cancel_GravitonBeam_quick\", cmd_quick, 174, 3659), Function.ability(148, \"Cancel_LockOn_quick\",", "def types(cls, **kwargs): \"\"\"Create an Arguments of the possible Types.\"\"\"", "\"BurrowDown_quick\", cmd_quick, 3661), Function.ability(104, \"BurrowDown_Baneling_quick\", cmd_quick, 1374, 3661), Function.ability(105, \"BurrowDown_Drone_quick\",", "cmd_quick, 314, 3659), Function.ability(145, \"Cancel_CreepTumor_quick\", cmd_quick, 1763, 3659), Function.ability(146, \"Cancel_FactoryAddOn_quick\",", "select.action = control_group_act select.control_group_index = control_group_id def unload(action, unload_id): \"\"\"Unload", "\"Effect_WidowMineAttack_screen\", cmd_screen, 2099), Function.ability(246, \"Effect_WidowMineAttack_autocast\", autocast, 2099), Function.ability(247, \"Effect_YamatoGun_screen\", cmd_screen,", "\"select_warp_gates\", select_warp_gates, lambda obs: obs.player_common.warp_gate_count > 0), Function.ui_func(9, \"select_larva\", select_larva,", "Function.ability(95, \"Build_TechLab_Barracks_screen\", cmd_screen, 421, 3682), Function.ability(96, \"Build_TechLab_Factory_quick\", cmd_quick, 454, 3682),", "3685), Function.ability(221, \"Effect_Repair_autocast\", autocast, 3685), Function.ability(222, \"Effect_Repair_Mule_screen\", cmd_screen, 78, 3685),", "\"Harvest_Return_Drone_quick\", cmd_quick, 1184, 3667), Function.ability(271, \"Harvest_Return_Mule_quick\", cmd_quick, 167, 3667), Function.ability(272,", "1155), Function.ability(85, \"Build_SpineCrawler_screen\", cmd_screen, 1166), Function.ability(86, \"Build_Spire_screen\", cmd_screen, 1158), Function.ability(87,", "3694), Function.ability(392, \"Research_ProtossGroundArmorLevel3_quick\", cmd_quick, 1067, 3694), Function.ability(393, \"Research_ProtossGroundWeapons_quick\", cmd_quick, 3695),", "ability_id of another ability if it can be represented by", "\"select_worker\", \"build_queue_id\", \"unload_id\"])): \"\"\"The full list of argument types. Take", "autocast, 2342, 3662), Function.ability(132, \"BurrowUp_Roach_quick\", cmd_quick, 1388, 3662), Function.ability(133, \"BurrowUp_Roach_autocast\",", "cmd_screen, 1156), Function.ability(52, \"Build_Extractor_screen\", cmd_screen, 1154), Function.ability(53, \"Build_Factory_screen\", cmd_screen, 328),", "\"\"\"Move the camera.\"\"\" minimap.assign_to(action.action_feature_layer.camera_move.center_minimap) def select_point(action, select_point_act, screen): \"\"\"Select a", "1514, 3662), Function.ability(137, \"BurrowUp_WidowMine_quick\", cmd_quick, 2097, 3662), Function.ability(138, \"BurrowUp_Zergling_quick\", cmd_quick,", "\"Research_ProtossGroundArmor_quick\", cmd_quick, 3694), Function.ability(390, \"Research_ProtossGroundArmorLevel1_quick\", cmd_quick, 1065, 3694), Function.ability(391, \"Research_ProtossGroundArmorLevel2_quick\",", "self.str() def str(self, space=False): \"\"\"String version. Set space=True to line", "Function.ability(225, \"Effect_Repair_SCV_autocast\", autocast, 316, 3685), Function.ability(226, \"Effect_Salvage_quick\", cmd_quick, 32), Function.ability(227,", "cls(-1, \"<none>\", (value,), lambda a: a[0]) @classmethod def point(cls): #", "cmd_quick, 2365, 3659), Function.ability(162, \"Cancel_NeuralParasite_quick\", cmd_quick, 250, 3659), Function.ability(163, \"Cancel_Nuke_quick\",", "needs a point on the minimap.\"\"\" action_cmd = action.action_feature_layer.unit_command action_cmd.ability_id", "select.type = select_point_act def select_rect(action, select_add, screen, screen2): \"\"\"Select units", "cmd_screen, 298, 3666), Function.ability(268, \"Harvest_Gather_SCV_screen\", cmd_screen, 295, 3666), Function.ability(269, \"Harvest_Return_quick\",", "def select_unit(action, select_unit_act, select_unit_id): \"\"\"Select a specific unit from the", "args passed to function_type. avail_fn: For non-abilities, this function returns", "cmd_quick, 1386, 3661), Function.ability(113, \"BurrowDown_SwarmHost_quick\", cmd_quick, 2014, 3661), Function.ability(114, \"BurrowDown_Ultralisk_quick\",", "\"Morph_VikingAssaultMode_quick\", cmd_quick, 403), Function.ability(327, \"Morph_VikingFighterMode_quick\", cmd_quick, 405), Function.ability(328, \"Morph_WarpGate_quick\", cmd_quick,", "1252), Function.ability(450, \"Research_ZerglingMetabolicBoost_quick\", cmd_quick, 1253), Function.ability(451, \"Smart_screen\", cmd_screen, 1), Function.ability(452,", "Function.ability(492, \"Train_SiegeTank_quick\", cmd_quick, 591), Function.ability(493, \"Train_Stalker_quick\", cmd_quick, 917), Function.ability(494, \"Train_SwarmHost_quick\",", "of the dimensions this argument takes. fn: The function to", "Function.ability(343, \"Rally_Workers_screen\", cmd_screen, 3690), Function.ability(344, \"Rally_Workers_minimap\", cmd_minimap, 3690), Function.ability(345, \"Rally_CommandCenter_screen\",", "cmd_quick, 3682), Function.ability(93, \"Build_TechLab_screen\", cmd_screen, 3682), Function.ability(94, \"Build_TechLab_Barracks_quick\", cmd_quick, 421,", "Function.ability(346, \"Rally_CommandCenter_minimap\", cmd_minimap, 203, 3690), Function.ability(347, \"Rally_Hatchery_Workers_screen\", cmd_screen, 211, 3690),", "864, 3700), Function.ability(421, \"Research_TerranVehicleAndShipPlatingLevel2_quick\", cmd_quick, 865, 3700), Function.ability(422, \"Research_TerranVehicleAndShipPlatingLevel3_quick\", cmd_quick,", "Function.ability(401, \"Research_PsiStorm_quick\", cmd_quick, 1126), Function.ability(402, \"Research_RavenCorvidReactor_quick\", cmd_quick, 793), Function.ability(403, \"Research_RavenRecalibratedExplosives_quick\",", "1165), Function.ability(81, \"Build_RoboticsBay_screen\", cmd_screen, 892), Function.ability(82, \"Build_RoboticsFacility_screen\", cmd_screen, 893), Function.ability(83,", "unload_id=ArgumentType.scalar(500), # Depends on the current loaded units. ) #", "minimap. screen2: The second point for a rectangle. This is", "333), Function.ability(57, \"Build_Gateway_screen\", cmd_screen, 883), Function.ability(58, \"Build_GhostAcademy_screen\", cmd_screen, 327), Function.ability(59,", "3662), Function.ability(118, \"BurrowUp_autocast\", autocast, 3662), Function.ability(119, \"BurrowUp_Baneling_quick\", cmd_quick, 1376, 3662),", "[TYPES.queued], cmd_screen: [TYPES.queued, TYPES.screen], cmd_minimap: [TYPES.queued, TYPES.minimap], autocast: [], }", "queue index to target. unload_id: Which unit to target in", "2082), Function.ability(22, \"Behavior_BuildingAttackOn_quick\", cmd_quick, 2081), Function.ability(23, \"Behavior_CloakOff_quick\", cmd_quick, 3677), Function.ability(24,", "3682), Function.ability(99, \"Build_TechLab_Starport_screen\", cmd_screen, 487, 3682), Function.ability(100, \"Build_TemplarArchive_screen\", cmd_screen, 890),", "3678), Function.ability(276, \"Land_Barracks_screen\", cmd_screen, 554, 3678), Function.ability(277, \"Land_CommandCenter_screen\", cmd_screen, 419,", "cmd_quick, 1313, 3703), Function.ability(436, \"Research_ZergFlyerAttackLevel3_quick\", cmd_quick, 1314, 3703), Function.ability(437, \"Research_ZergGroundArmor_quick\",", "211, 3690), Function.ability(349, \"Rally_Nexus_screen\", cmd_screen, 207, 3690), Function.ability(350, \"Rally_Nexus_minimap\", cmd_minimap,", "\"Cancel_MorphLurkerDen_quick\", cmd_quick, 2113, 3659), Function.ability(155, \"Cancel_MorphMothership_quick\", cmd_quick, 1848, 3659), Function.ability(156,", "Function.ability(367, \"Research_GraviticDrive_quick\", cmd_quick, 1094), Function.ability(368, \"Research_GroovedSpines_quick\", cmd_quick, 1282), Function.ability(369, \"Research_HiSecAutoTracking_quick\",", "selection.\"\"\" select = action.action_ui.multi_panel select.type = select_unit_act select.unit_index = select_unit_id", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "Function.ability(523, \"UnloadAllAt_WarpPrism_minimap\", cmd_minimap, 913, 3669), ]) # pylint: enable=line-too-long #", "object, a `dict`, or an iterable. If a `dict` or", "3683), Function.ability(76, \"Build_Reactor_Factory_screen\", cmd_screen, 455, 3683), Function.ability(77, \"Build_Reactor_Starport_quick\", cmd_quick, 488,", "cmd_quick, 1352), Function.ability(475, \"Train_Liberator_quick\", cmd_quick, 626), Function.ability(476, \"Train_Marauder_quick\", cmd_quick, 563),", "integers into something more meaningful to be set in the", "cmd_screen, 251), Function.ability(205, \"Effect_KD8Charge_screen\", cmd_screen, 2588), Function.ability(206, \"Effect_LockOn_screen\", cmd_screen, 2350),", "% (self.id, self.name, list(self.sizes)) @classmethod def enum(cls, options): \"\"\"Create an", "cmd_quick, 921), Function.ability(492, \"Train_SiegeTank_quick\", cmd_quick, 591), Function.ability(493, \"Train_Stalker_quick\", cmd_quick, 917),", "cmd_screen, 2067), Function.ability(177, \"Effect_AdeptPhaseShift_screen\", cmd_screen, 2544), Function.ability(178, \"Effect_AutoTurret_screen\", cmd_screen, 1764),", "cmd_screen, 2558), Function.ability(306, \"Morph_Lurker_quick\", cmd_quick, 2332), Function.ability(307, \"Morph_LurkerDen_quick\", cmd_quick, 2112),", "> 0), Function.ui_func(7, \"select_army\", select_army, lambda obs: obs.player_common.army_count > 0),", "\"Research_ZergMissileWeaponsLevel3_quick\", cmd_quick, 1194, 3706), Function.ability(449, \"Research_ZerglingAdrenalGlands_quick\", cmd_quick, 1252), Function.ability(450, \"Research_ZerglingMetabolicBoost_quick\",", "\"BurrowUp_SwarmHost_quick\", cmd_quick, 2016, 3662), Function.ability(135, \"BurrowUp_Ultralisk_quick\", cmd_quick, 1514, 3662), Function.ability(136,", "action.action_feature_layer.unit_command action_cmd.ability_id = ability_id action_cmd.queue_command = queued def cmd_screen(action, ability_id,", "2116), Function.ability(212, \"Effect_NeuralParasite_screen\", cmd_screen, 249), Function.ability(213, \"Effect_NukeCalldown_screen\", cmd_screen, 1622), Function.ability(214,", "Function.ability(299, \"Morph_GreaterSpire_quick\", cmd_quick, 1220), Function.ability(300, \"Morph_Hellbat_quick\", cmd_quick, 1998), Function.ability(301, \"Morph_Hellion_quick\",", "Function.ability(260, \"Hallucination_Zealot_quick\", cmd_quick, 164), Function.ability(261, \"Halt_quick\", cmd_quick, 3660), Function.ability(262, \"Halt_Building_quick\",", "build_queue_id def cmd_quick(action, ability_id, queued): \"\"\"Do a quick command like", "it with. select_point_act: What to do with the unit at", "lambda obs: obs.ui_data.HasField(\"production\")), # Everything below here is generated with", "and minimap. functions: A namedtuple of all the functions. \"\"\"", "3682), Function.ability(100, \"Build_TemplarArchive_screen\", cmd_screen, 890), Function.ability(101, \"Build_TwilightCouncil_screen\", cmd_screen, 886), Function.ability(102,", "out of python types. args: A list of the types", "Function.ability(30, \"Behavior_GenerateCreepOn_quick\", cmd_quick, 1692), Function.ability(31, \"Behavior_HoldFireOff_quick\", cmd_quick, 3689), Function.ability(32, \"Behavior_HoldFireOff_Ghost_quick\",", "1388, 3662), Function.ability(133, \"BurrowUp_Roach_autocast\", autocast, 1388, 3662), Function.ability(134, \"BurrowUp_SwarmHost_quick\", cmd_quick,", "cmd_quick, 1517, 3659), Function.ability(157, \"Cancel_MorphOverlordTransport_quick\", cmd_quick, 2709, 3659), Function.ability(158, \"Cancel_MorphOverseer_quick\",", "2073), Function.ability(244, \"Effect_VoidRayPrismaticAlignment_quick\", cmd_quick, 2393), Function.ability(245, \"Effect_WidowMineAttack_screen\", cmd_screen, 2099), Function.ability(246,", "3705), Function.ability(442, \"Research_ZergMeleeWeaponsLevel1_quick\", cmd_quick, 1186, 3705), Function.ability(443, \"Research_ZergMeleeWeaponsLevel2_quick\", cmd_quick, 1187,", "unload: [TYPES.unload_id], build_queue: [TYPES.build_queue_id], cmd_quick: [TYPES.queued], cmd_screen: [TYPES.queued, TYPES.screen], cmd_minimap:", "Function.ability(516, \"UnloadAllAt_screen\", cmd_screen, 3669), Function.ability(517, \"UnloadAllAt_minimap\", cmd_minimap, 3669), Function.ability(518, \"UnloadAllAt_Medivac_screen\",", "cmd_quick, 2332), Function.ability(307, \"Morph_LurkerDen_quick\", cmd_quick, 2112), Function.ability(308, \"Morph_Mothership_quick\", cmd_quick, 1847),", "Function.ability(519, \"UnloadAllAt_Medivac_minimap\", cmd_minimap, 396, 3669), Function.ability(520, \"UnloadAllAt_Overlord_screen\", cmd_screen, 1408, 3669),", "namedtuple of the types that the functions require. Unlike TYPES", "\"Build_RoboticsFacility_screen\", cmd_screen, 893), Function.ability(83, \"Build_SensorTower_screen\", cmd_screen, 326), Function.ability(84, \"Build_SpawningPool_screen\", cmd_screen,", "army.\"\"\" action.action_ui.select_army.selection_add = select_add def select_warp_gates(action, select_add): \"\"\"Select all warp", "def cmd_minimap(action, ability_id, queued, minimap): \"\"\"Do a command that needs", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "1442, 3687), Function.ability(182, \"Effect_ShadowStride_screen\", cmd_screen, 2700, 3687), Function.ability(183, \"Effect_CalldownMULE_screen\", cmd_screen,", "cmd_quick, 1192, 3706), Function.ability(447, \"Research_ZergMissileWeaponsLevel2_quick\", cmd_quick, 1193, 3706), Function.ability(448, \"Research_ZergMissileWeaponsLevel3_quick\",", "Function.ability(23, \"Behavior_CloakOff_quick\", cmd_quick, 3677), Function.ability(24, \"Behavior_CloakOff_Banshee_quick\", cmd_quick, 393, 3677), Function.ability(25,", "2108, 3661), Function.ability(110, \"BurrowDown_Queen_quick\", cmd_quick, 1433, 3661), Function.ability(111, \"BurrowDown_Ravager_quick\", cmd_quick,", "License. # You may obtain a copy of the License", "\"Effect_TimeWarp_screen\", cmd_screen, 2244), Function.ability(242, \"Effect_Transfusion_screen\", cmd_screen, 1664), Function.ability(243, \"Effect_ViperConsume_screen\", cmd_screen,", "includes the sizes for screen and minimap. functions: A namedtuple", "Function.ability(373, \"Research_MagFieldLaunchers_quick\", cmd_quick, 766), Function.ability(374, \"Research_MuscularAugments_quick\", cmd_quick, 1283), Function.ability(375, \"Research_NeosteelFrame_quick\",", "self._func_dict[name] def __getitem__(self, key): if isinstance(key, numbers.Number): return self._func_list[key] return", "of integers into something more meaningful to be set in", "in ValidActions.\"\"\" return cls(id_, name, None, None, None, args, None)", "cmd_quick, 1563, 3693), Function.ability(388, \"Research_ProtossAirWeaponsLevel3_quick\", cmd_quick, 1564, 3693), Function.ability(389, \"Research_ProtossGroundArmor_quick\",", "gen_actions.py Function.ability(12, \"Attack_screen\", cmd_screen, 3674), Function.ability(13, \"Attack_minimap\", cmd_minimap, 3674), Function.ability(14,", "Function.ability(158, \"Cancel_MorphOverseer_quick\", cmd_quick, 1449, 3659), Function.ability(159, \"Cancel_MorphPlanetaryFortress_quick\", cmd_quick, 1451, 3659),", "queue) control_group_act=ArgumentType.enum([ sc_ui.ActionControlGroup.Recall, sc_ui.ActionControlGroup.Set, sc_ui.ActionControlGroup.Append, sc_ui.ActionControlGroup.SetAndSteal, sc_ui.ActionControlGroup.AppendAndSteal, ]), control_group_id=ArgumentType.scalar(10), select_point_act=ArgumentType.enum([", "Function.ability(242, \"Effect_Transfusion_screen\", cmd_screen, 1664), Function.ability(243, \"Effect_ViperConsume_screen\", cmd_screen, 2073), Function.ability(244, \"Effect_VoidRayPrismaticAlignment_quick\",", "a unit in the build queue.\"\"\" action.action_ui.production_panel.unit_index = build_queue_id def", "3677), Function.ability(25, \"Behavior_CloakOff_Ghost_quick\", cmd_quick, 383, 3677), Function.ability(26, \"Behavior_CloakOn_quick\", cmd_quick, 3676),", "autocast(action, ability_id): \"\"\"Toggle autocast.\"\"\" action.action_ui.toggle_autocast.ability_id = ability_id class ArgumentType(collections.namedtuple( \"ArgumentType\",", "Function.ability(442, \"Research_ZergMeleeWeaponsLevel1_quick\", cmd_quick, 1186, 3705), Function.ability(443, \"Research_ZergMeleeWeaponsLevel2_quick\", cmd_quick, 1187, 3705),", "cmd_screen, 913, 3669), Function.ability(523, \"UnloadAllAt_WarpPrism_minimap\", cmd_minimap, 913, 3669), ]) #", "__getattr__(self, name): return self._func_dict[name] def __getitem__(self, key): if isinstance(key, numbers.Number):", "3687), Function.ability(181, \"Effect_Blink_Stalker_screen\", cmd_screen, 1442, 3687), Function.ability(182, \"Effect_ShadowStride_screen\", cmd_screen, 2700,", "3659), Function.ability(162, \"Cancel_NeuralParasite_quick\", cmd_quick, 250, 3659), Function.ability(163, \"Cancel_Nuke_quick\", cmd_quick, 1623,", "3668), Function.ability(293, \"Load_WarpPrism_screen\", cmd_screen, 911, 3668), Function.ability(294, \"LoadAll_quick\", cmd_quick, 3663),", "Function.ui_func(7, \"select_army\", select_army, lambda obs: obs.player_common.army_count > 0), Function.ui_func(8, \"select_warp_gates\",", "Function.ability(143, \"Cancel_BarracksAddOn_quick\", cmd_quick, 451, 3659), Function.ability(144, \"Cancel_BuildInProgress_quick\", cmd_quick, 314, 3659),", "Function.ability(152, \"Cancel_MorphLair_quick\", cmd_quick, 1217, 3659), Function.ability(153, \"Cancel_MorphLurker_quick\", cmd_quick, 2333, 3659),", "cmd_screen, 884), Function.ability(56, \"Build_FusionCore_screen\", cmd_screen, 333), Function.ability(57, \"Build_Gateway_screen\", cmd_screen, 883),", "\"Build_SpawningPool_screen\", cmd_screen, 1155), Function.ability(85, \"Build_SpineCrawler_screen\", cmd_screen, 1166), Function.ability(86, \"Build_Spire_screen\", cmd_screen,", "3667), Function.ability(271, \"Harvest_Return_Mule_quick\", cmd_quick, 167, 3667), Function.ability(272, \"Harvest_Return_Probe_quick\", cmd_quick, 299,", "Function.ability(218, \"Effect_PsiStorm_screen\", cmd_screen, 1036), Function.ability(219, \"Effect_PurificationNova_screen\", cmd_screen, 2346), Function.ability(220, \"Effect_Repair_screen\",", "agent to use. Attributes: types: A namedtuple of the types", "ability if it can be represented by a more general", "1568), Function.ability(429, \"Research_ZergFlyerArmor_quick\", cmd_quick, 3702), Function.ability(430, \"Research_ZergFlyerArmorLevel1_quick\", cmd_quick, 1315, 3702),", "cmd_quick, 3660), Function.ability(262, \"Halt_Building_quick\", cmd_quick, 315, 3660), Function.ability(263, \"Halt_TerranBuild_quick\", cmd_quick,", "1729, 3680), Function.ability(316, \"Morph_SporeCrawlerRoot_screen\", cmd_screen, 1731, 3680), Function.ability(317, \"Morph_SiegeMode_quick\", cmd_quick,", "485, 3679), Function.ability(285, \"Lift_OrbitalCommand_quick\", cmd_quick, 1522, 3679), Function.ability(286, \"Lift_Starport_quick\", cmd_quick,", "Function.ability(454, \"Stop_Building_quick\", cmd_quick, 2057, 3665), Function.ability(455, \"Stop_Redirect_quick\", cmd_quick, 1691, 3665),", "FUNCTIONS if f.avail_fn} class FunctionCall(collections.namedtuple( \"FunctionCall\", [\"function\", \"arguments\"])): \"\"\"Represents a", "cmd_quick, 650), Function.ability(370, \"Research_HighCapacityFuelTanks_quick\", cmd_quick, 804), Function.ability(371, \"Research_InfernalPreigniter_quick\", cmd_quick, 761),", "set of known values.\"\"\" return cls(-1, \"<none>\", (len(options),), lambda a:", "\"Build_CreepTumor_Queen_screen\", cmd_screen, 1694, 3691), Function.ability(47, \"Build_CreepTumor_Tumor_screen\", cmd_screen, 1733, 3691), Function.ability(48,", "cmd_screen, 883), Function.ability(58, \"Build_GhostAcademy_screen\", cmd_screen, 327), Function.ability(59, \"Build_Hatchery_screen\", cmd_screen, 1152),", "action_cmd.ability_id = ability_id action_cmd.queue_command = queued def cmd_screen(action, ability_id, queued,", "327), Function.ability(59, \"Build_Hatchery_screen\", cmd_screen, 1152), Function.ability(60, \"Build_HydraliskDen_screen\", cmd_screen, 1157), Function.ability(61,", "the point. select_add: Whether to add the unit to the", "261), Function.ability(188, \"Effect_Contaminate_screen\", cmd_screen, 1825), Function.ability(189, \"Effect_CorrosiveBile_screen\", cmd_screen, 2338), Function.ability(190,", "Function.ability(249, \"Hallucination_Archon_quick\", cmd_quick, 146), Function.ability(250, \"Hallucination_Colossus_quick\", cmd_quick, 148), Function.ability(251, \"Hallucination_Disruptor_quick\",", "3662), Function.ability(129, \"BurrowUp_Queen_autocast\", autocast, 1435, 3662), Function.ability(130, \"BurrowUp_Ravager_quick\", cmd_quick, 2342,", "403), Function.ability(327, \"Morph_VikingFighterMode_quick\", cmd_quick, 405), Function.ability(328, \"Morph_WarpGate_quick\", cmd_quick, 1518), Function.ability(329,", "\"Build_UltraliskCavern_screen\", cmd_screen, 1159), Function.ability(103, \"BurrowDown_quick\", cmd_quick, 3661), Function.ability(104, \"BurrowDown_Baneling_quick\", cmd_quick,", "transport/nydus/command center. \"\"\" ___slots__ = () @classmethod def types(cls, **kwargs):", "cmd_quick, 1376, 3662), Function.ability(120, \"BurrowUp_Baneling_autocast\", autocast, 1376, 3662), Function.ability(121, \"BurrowUp_Drone_quick\",", "3659), Function.ability(159, \"Cancel_MorphPlanetaryFortress_quick\", cmd_quick, 1451, 3659), Function.ability(160, \"Cancel_MorphRavager_quick\", cmd_quick, 2331,", "of ints. For select_point this could be: [[0], [23, 38]].", "functions in FUNCTION_TYPES for how to construct the sc2 action", "3662), Function.ability(123, \"BurrowUp_Hydralisk_autocast\", autocast, 1384, 3662), Function.ability(124, \"BurrowUp_Infestor_quick\", cmd_quick, 1446,", "\"Research_TerranInfantryArmorLevel1_quick\", cmd_quick, 656, 3697), Function.ability(408, \"Research_TerranInfantryArmorLevel2_quick\", cmd_quick, 657, 3697), Function.ability(409,", "cmd_screen, 520, 3678), Function.ability(279, \"Land_OrbitalCommand_screen\", cmd_screen, 1524, 3678), Function.ability(280, \"Land_Starport_screen\",", "1064, 3695), Function.ability(397, \"Research_ProtossShields_quick\", cmd_quick, 3696), Function.ability(398, \"Research_ProtossShieldsLevel1_quick\", cmd_quick, 1068,", "self.name, list(self.sizes)) @classmethod def enum(cls, options): \"\"\"Create an ArgumentType where", "to do when selecting a unit by id. select_unit_id: Which", "\"Train_SiegeTank_quick\", cmd_quick, 591), Function.ability(493, \"Train_Stalker_quick\", cmd_quick, 917), Function.ability(494, \"Train_SwarmHost_quick\", cmd_quick,", "328), Function.ability(54, \"Build_FleetBeacon_screen\", cmd_screen, 885), Function.ability(55, \"Build_Forge_screen\", cmd_screen, 884), Function.ability(56,", "a look at TYPES and FUNCTION_TYPES for more details. Attributes:", "Function.ability(507, \"TrainWarp_HighTemplar_screen\", cmd_screen, 1416), Function.ability(508, \"TrainWarp_Sentry_screen\", cmd_screen, 1418), Function.ability(509, \"TrainWarp_Stalker_screen\",", "167, 3667), Function.ability(272, \"Harvest_Return_Probe_quick\", cmd_quick, 299, 3667), Function.ability(273, \"Harvest_Return_SCV_quick\", cmd_quick,", "Function.ability(405, \"Research_Stimpack_quick\", cmd_quick, 730), Function.ability(406, \"Research_TerranInfantryArmor_quick\", cmd_quick, 3697), Function.ability(407, \"Research_TerranInfantryArmorLevel1_quick\",", "\"LoadAll_quick\", cmd_quick, 3663), Function.ability(295, \"LoadAll_CommandCenter_quick\", cmd_quick, 416, 3663), Function.ability(296, \"Morph_Archon_quick\",", "cmd_quick, 3689), Function.ability(32, \"Behavior_HoldFireOff_Ghost_quick\", cmd_quick, 38, 3689), Function.ability(33, \"Behavior_HoldFireOff_Lurker_quick\", cmd_quick,", "cmd_screen, 3683), Function.ability(73, \"Build_Reactor_Barracks_quick\", cmd_quick, 422, 3683), Function.ability(74, \"Build_Reactor_Barracks_screen\", cmd_screen,", "cmd_minimap, 17), Function.ability(335, \"Rally_Units_screen\", cmd_screen, 3673), Function.ability(336, \"Rally_Units_minimap\", cmd_minimap, 3673),", "point.Point(*a).floor()) @classmethod def spec(cls, id_, name, sizes): \"\"\"Create an ArgumentType", "return self.id def __str__(self): return self.str() def str(self, space=False): \"\"\"String", "{cmd_quick, autocast}, True: {cmd_screen, cmd_minimap, autocast}} always = lambda _:", "Store the function id, eg 2 for select_point. arguments: The", "3697), Function.ability(410, \"Research_TerranInfantryWeapons_quick\", cmd_quick, 3698), Function.ability(411, \"Research_TerranInfantryWeaponsLevel1_quick\", cmd_quick, 652, 3698),", "do when selecting a unit by id. select_unit_id: Which unit", "select.control_group_index = control_group_id def unload(action, unload_id): \"\"\"Unload a unit from", "\"Cancel_MorphLurker_quick\", cmd_quick, 2333, 3659), Function.ability(154, \"Cancel_MorphLurkerDen_quick\", cmd_quick, 2113, 3659), Function.ability(155,", "One of the functions in FUNCTION_TYPES for how to construct", "The max+1 of each of the dimensions this argument takes.", "move_camera: [TYPES.minimap], select_point: [TYPES.select_point_act, TYPES.screen], select_rect: [TYPES.select_add, TYPES.screen, TYPES.screen2], select_unit:", "\"Effect_Stim_quick\", cmd_quick, 3675), Function.ability(235, \"Effect_Stim_Marauder_quick\", cmd_quick, 253, 3675), Function.ability(236, \"Effect_Stim_Marauder_Redirect_quick\",", "cmd_quick, 920), Function.ability(466, \"Train_Disruptor_quick\", cmd_quick, 994), Function.ability(467, \"Train_Drone_quick\", cmd_quick, 1342),", "203, 3690), Function.ability(347, \"Rally_Hatchery_Workers_screen\", cmd_screen, 211, 3690), Function.ability(348, \"Rally_Hatchery_Workers_minimap\", cmd_minimap,", "3671), Function.ability(172, \"Cancel_QueueAddOn_quick\", cmd_quick, 312, 3671), Function.ability(173, \"Cancel_QueueCancelToSelection_quick\", cmd_quick, 308,", "487, 3682), Function.ability(99, \"Build_TechLab_Starport_screen\", cmd_screen, 487, 3682), Function.ability(100, \"Build_TemplarArchive_screen\", cmd_screen,", "list of the types of args passed to function_type. avail_fn:", "Function.ability(164, \"Cancel_SpineCrawlerRoot_quick\", cmd_quick, 1730, 3659), Function.ability(165, \"Cancel_SporeCrawlerRoot_quick\", cmd_quick, 1732, 3659),", "Function.ability(355, \"Research_BattlecruiserWeaponRefit_quick\", cmd_quick, 1532), Function.ability(356, \"Research_Blink_quick\", cmd_quick, 1593), Function.ability(357, \"Research_Burrow_quick\",", "are valid for an agent to use. Attributes: types: A", "func in FUNCTIONS: if func.ability_id >= 0: ABILITY_IDS[func.ability_id].add(func) ABILITY_IDS =", "\"Build_SporeCrawler_screen\", cmd_screen, 1167), Function.ability(88, \"Build_Stargate_screen\", cmd_screen, 889), Function.ability(89, \"Build_Starport_screen\", cmd_screen,", "cmd_quick, 80), Function.ability(459, \"Train_Banshee_quick\", cmd_quick, 621), Function.ability(460, \"Train_Battlecruiser_quick\", cmd_quick, 623),", "is represented by a point.Point.\"\"\" return cls(-1, \"<none>\", (0, 0),", "\"Morph_LiberatorAGMode_screen\", cmd_screen, 2558), Function.ability(306, \"Morph_Lurker_quick\", cmd_quick, 2332), Function.ability(307, \"Morph_LurkerDen_quick\", cmd_quick,", "return cls(id_, name, sizes, None) class Arguments(collections.namedtuple(\"Arguments\", [ \"screen\", \"minimap\",", "an ArgumentType that is represented by a point.Point.\"\"\" return cls(-1,", "fn: The function to convert the list of integers into", "same type twice. queued: Whether the action should be done", "obs.ui_data.HasField(\"cargo\")), Function.ui_func(11, \"build_queue\", build_queue, lambda obs: obs.ui_data.HasField(\"production\")), # Everything below", "Function.ability(59, \"Build_Hatchery_screen\", cmd_screen, 1152), Function.ability(60, \"Build_HydraliskDen_screen\", cmd_screen, 1157), Function.ability(61, \"Build_InfestationPit_screen\",", "from __future__ import division from __future__ import print_function import collections", "be set in the protos to send to the game.", "1819), Function.ability(186, \"Effect_Charge_autocast\", autocast, 1819), Function.ability(187, \"Effect_ChronoBoost_screen\", cmd_screen, 261), Function.ability(188,", "function to convert the list of integers into something more", "892), Function.ability(82, \"Build_RoboticsFacility_screen\", cmd_screen, 893), Function.ability(83, \"Build_SensorTower_screen\", cmd_screen, 326), Function.ability(84,", "920), Function.ability(466, \"Train_Disruptor_quick\", cmd_quick, 994), Function.ability(467, \"Train_Drone_quick\", cmd_quick, 1342), Function.ability(468,", "\"Cancel_StarportAddOn_quick\", cmd_quick, 517, 3659), Function.ability(167, \"Cancel_StasisTrap_quick\", cmd_quick, 2535, 3659), Function.ability(168,", "Function.ability(201, \"Effect_ImmortalBarrier_quick\", cmd_quick, 2328), Function.ability(202, \"Effect_ImmortalBarrier_autocast\", autocast, 2328), Function.ability(203, \"Effect_InfestedTerrans_screen\",", "761), Function.ability(372, \"Research_InterceptorGravitonCatapult_quick\", cmd_quick, 44), Function.ability(373, \"Research_MagFieldLaunchers_quick\", cmd_quick, 766), Function.ability(374,", "\"UnloadAllAt_screen\", cmd_screen, 3669), Function.ability(517, \"UnloadAllAt_minimap\", cmd_minimap, 3669), Function.ability(518, \"UnloadAllAt_Medivac_screen\", cmd_screen,", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "\"Behavior_PulsarBeamOn_quick\", cmd_quick, 2375), Function.ability(39, \"Build_Armory_screen\", cmd_screen, 331), Function.ability(40, \"Build_Assimilator_screen\", cmd_screen,", "obs.ui_data.HasField(\"production\")), # Everything below here is generated with gen_actions.py Function.ability(12,", "cmd_quick: [TYPES.queued], cmd_screen: [TYPES.queued, TYPES.screen], cmd_minimap: [TYPES.queued, TYPES.minimap], autocast: [],", "cmd_quick, 517, 3659), Function.ability(167, \"Cancel_StasisTrap_quick\", cmd_quick, 2535, 3659), Function.ability(168, \"Cancel_Last_quick\",", "\"Train_Battlecruiser_quick\", cmd_quick, 623), Function.ability(461, \"Train_Carrier_quick\", cmd_quick, 948), Function.ability(462, \"Train_Colossus_quick\", cmd_quick,", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "Function.ability(470, \"Train_Hellion_quick\", cmd_quick, 595), Function.ability(471, \"Train_HighTemplar_quick\", cmd_quick, 919), Function.ability(472, \"Train_Hydralisk_quick\",", "__getitem__(self, key): if isinstance(key, numbers.Number): return self._func_list[key] return self._func_dict[key] def", "\"Effect_InjectLarva_screen\", cmd_screen, 251), Function.ability(205, \"Effect_KD8Charge_screen\", cmd_screen, 2588), Function.ability(206, \"Effect_LockOn_screen\", cmd_screen,", "Function.ability(293, \"Load_WarpPrism_screen\", cmd_screen, 911, 3668), Function.ability(294, \"LoadAll_quick\", cmd_quick, 3663), Function.ability(295,", "\"Hallucination_Zealot_quick\", cmd_quick, 164), Function.ability(261, \"Halt_quick\", cmd_quick, 3660), Function.ability(262, \"Halt_Building_quick\", cmd_quick,", "\"build_queue\", build_queue, lambda obs: obs.ui_data.HasField(\"production\")), # Everything below here is", "python3 has a limit of 255 function arguments, so build", "self._func_dict = {f.name: f for f in functions} if len(self._func_dict)", "the values will be unpacked into an `Arguments` object. Returns:", "2048, 3674), Function.ability(18, \"Attack_Redirect_screen\", cmd_screen, 1682, 3674), Function.ability(19, \"Scan_Move_screen\", cmd_screen,", "3667), Function.ability(273, \"Harvest_Return_SCV_quick\", cmd_quick, 296, 3667), Function.ability(274, \"HoldPosition_quick\", cmd_quick, 18),", "\"Effect_BlindingCloud_screen\", cmd_screen, 2063), Function.ability(180, \"Effect_Blink_screen\", cmd_screen, 3687), Function.ability(181, \"Effect_Blink_Stalker_screen\", cmd_screen,", "\"Effect_Repair_autocast\", autocast, 3685), Function.ability(222, \"Effect_Repair_Mule_screen\", cmd_screen, 78, 3685), Function.ability(223, \"Effect_Repair_Mule_autocast\",", "# Depends on the current loaded units. ) # Which", "329), Function.ability(90, \"Build_StasisTrap_screen\", cmd_screen, 2505), Function.ability(91, \"Build_SupplyDepot_screen\", cmd_screen, 319), Function.ability(92,", "Function.ability(504, \"Train_Zergling_quick\", cmd_quick, 1343), Function.ability(505, \"TrainWarp_Adept_screen\", cmd_screen, 1419), Function.ability(506, \"TrainWarp_DarkTemplar_screen\",", "\"\"\"Do a quick command like 'Stop' or 'Stim'.\"\"\" action_cmd =", "331), Function.ability(40, \"Build_Assimilator_screen\", cmd_screen, 882), Function.ability(41, \"Build_BanelingNest_screen\", cmd_screen, 1162), Function.ability(42,", "Function.ability(323, \"Morph_Uproot_quick\", cmd_quick, 3681), Function.ability(324, \"Morph_SpineCrawlerUproot_quick\", cmd_quick, 1725, 3681), Function.ability(325,", "522, 3678), Function.ability(281, \"Lift_quick\", cmd_quick, 3679), Function.ability(282, \"Lift_Barracks_quick\", cmd_quick, 452,", "Function.ability(203, \"Effect_InfestedTerrans_screen\", cmd_screen, 247), Function.ability(204, \"Effect_InjectLarva_screen\", cmd_screen, 251), Function.ability(205, \"Effect_KD8Charge_screen\",", "\"Effect_Stim_Marauder_quick\", cmd_quick, 253, 3675), Function.ability(236, \"Effect_Stim_Marauder_Redirect_quick\", cmd_quick, 1684, 3675), Function.ability(237,", "required by applicable law or agreed to in writing, software", "\"Research_ProtossAirWeaponsLevel2_quick\", cmd_quick, 1563, 3693), Function.ability(388, \"Research_ProtossAirWeaponsLevel3_quick\", cmd_quick, 1564, 3693), Function.ability(389,", "eg 2 for select_point. arguments: The list of arguments for", "3703), Function.ability(435, \"Research_ZergFlyerAttackLevel2_quick\", cmd_quick, 1313, 3703), Function.ability(436, \"Research_ZergFlyerAttackLevel3_quick\", cmd_quick, 1314,", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "Function.ability(250, \"Hallucination_Colossus_quick\", cmd_quick, 148), Function.ability(251, \"Hallucination_Disruptor_quick\", cmd_quick, 2389), Function.ability(252, \"Hallucination_HighTemplar_quick\",", "\"BurrowUp_Hydralisk_quick\", cmd_quick, 1384, 3662), Function.ability(123, \"BurrowUp_Hydralisk_autocast\", autocast, 1384, 3662), Function.ability(124,", "cmd_quick, 1315, 3702), Function.ability(431, \"Research_ZergFlyerArmorLevel2_quick\", cmd_quick, 1316, 3702), Function.ability(432, \"Research_ZergFlyerArmorLevel3_quick\",", "Depends on current selection. select_worker=ArgumentType.enum([ sc_ui.ActionSelectIdleWorker.Set, sc_ui.ActionSelectIdleWorker.Add, sc_ui.ActionSelectIdleWorker.All, sc_ui.ActionSelectIdleWorker.AddAll, ]),", "of argument types. Take a look at TYPES and FUNCTION_TYPES", "\"Cancel_Queue1_quick\", cmd_quick, 304, 3671), Function.ability(171, \"Cancel_Queue5_quick\", cmd_quick, 306, 3671), Function.ability(172,", "cmd_quick, 1592), Function.ability(360, \"Research_ChitinousPlating_quick\", cmd_quick, 265), Function.ability(361, \"Research_CombatShield_quick\", cmd_quick, 731),", "function action. Attributes: id: The function id, which is what", "list(self.sizes)) @classmethod def enum(cls, options): \"\"\"Create an ArgumentType where you", "1528), Function.ability(330, \"Morph_WarpPrismTransportMode_quick\", cmd_quick, 1530), Function.ability(331, \"Move_screen\", cmd_screen, 16), Function.ability(332,", "cmd_quick, 3696), Function.ability(398, \"Research_ProtossShieldsLevel1_quick\", cmd_quick, 1068, 3696), Function.ability(399, \"Research_ProtossShieldsLevel2_quick\", cmd_quick,", "an iterable is provided, the values will be unpacked into", "done now or later. control_group_act: What to do with the", "select_add def select_warp_gates(action, select_add): \"\"\"Select all warp gates.\"\"\" action.action_ui.select_warp_gates.selection_add =", "3698), Function.ability(411, \"Research_TerranInfantryWeaponsLevel1_quick\", cmd_quick, 652, 3698), Function.ability(412, \"Research_TerranInfantryWeaponsLevel2_quick\", cmd_quick, 653,", "1282), Function.ability(369, \"Research_HiSecAutoTracking_quick\", cmd_quick, 650), Function.ability(370, \"Research_HighCapacityFuelTanks_quick\", cmd_quick, 804), Function.ability(371,", "name, ability_id, general_id, function_type, FUNCTION_TYPES[function_type], None) @classmethod def spec(cls, id_,", "1218), Function.ability(303, \"Morph_Lair_quick\", cmd_quick, 1216), Function.ability(304, \"Morph_LiberatorAAMode_quick\", cmd_quick, 2560), Function.ability(305,", "name, sizes, None) class Arguments(collections.namedtuple(\"Arguments\", [ \"screen\", \"minimap\", \"screen2\", \"queued\",", "ability_id, queued, screen): \"\"\"Do a command that needs a point", "Function.ability(332, \"Move_minimap\", cmd_minimap, 16), Function.ability(333, \"Patrol_screen\", cmd_screen, 17), Function.ability(334, \"Patrol_minimap\",", "= () @classmethod def all_arguments(cls, function, arguments): \"\"\"Helper function for", "functions that are valid for an agent to use. Attributes:", "3699), Function.ability(417, \"Research_TerranShipWeaponsLevel3_quick\", cmd_quick, 863, 3699), Function.ability(418, \"Research_TerranStructureArmorUpgrade_quick\", cmd_quick, 651),", "cmd_quick, 76), Function.ability(198, \"Effect_Heal_screen\", cmd_screen, 386), Function.ability(199, \"Effect_Heal_autocast\", autocast, 386),", "args): \"\"\"Create a Function to be used in ValidActions.\"\"\" return", "queued minimap.assign_to(action_cmd.target_minimap_coord) def autocast(action, ability_id): \"\"\"Toggle autocast.\"\"\" action.action_ui.toggle_autocast.ability_id = ability_id", "Function to be used in ValidActions.\"\"\" return cls(id_, name, None,", "cmd_quick, 916), Function.ability(504, \"Train_Zergling_quick\", cmd_quick, 1343), Function.ability(505, \"TrainWarp_Adept_screen\", cmd_screen, 1419),", "agreed to in writing, software # distributed under the License", "the License. \"\"\"Define the static list of types and actions", "to do with the control group. control_group_id: Which control group", "current loaded units. ) # Which argument types do each", "3698), Function.ability(414, \"Research_TerranShipWeapons_quick\", cmd_quick, 3699), Function.ability(415, \"Research_TerranShipWeaponsLevel1_quick\", cmd_quick, 861, 3699),", "def enum(cls, options): \"\"\"Create an ArgumentType where you choose one", "Function.ability(284, \"Lift_Factory_quick\", cmd_quick, 485, 3679), Function.ability(285, \"Lift_OrbitalCommand_quick\", cmd_quick, 1522, 3679),", "def autocast(action, ability_id): \"\"\"Toggle autocast.\"\"\" action.action_ui.toggle_autocast.ability_id = ability_id class ArgumentType(collections.namedtuple(", "function. arguments: The values to store for the arguments of", "2081), Function.ability(23, \"Behavior_CloakOff_quick\", cmd_quick, 3677), Function.ability(24, \"Behavior_CloakOff_Banshee_quick\", cmd_quick, 393, 3677),", "args, None) def __hash__(self): # So it can go in", "3662), Function.ability(121, \"BurrowUp_Drone_quick\", cmd_quick, 1380, 3662), Function.ability(122, \"BurrowUp_Hydralisk_quick\", cmd_quick, 1384,", "171), Function.ability(184, \"Effect_CausticSpray_screen\", cmd_screen, 2324), Function.ability(185, \"Effect_Charge_screen\", cmd_screen, 1819), Function.ability(186,", "1628), Function.ability(191, \"Effect_Explode_quick\", cmd_quick, 42), Function.ability(192, \"Effect_Feedback_screen\", cmd_screen, 140), Function.ability(193,", "Function.ability(122, \"BurrowUp_Hydralisk_quick\", cmd_quick, 1384, 3662), Function.ability(123, \"BurrowUp_Hydralisk_autocast\", autocast, 1384, 3662),", "ability.\"\"\" assert function_type in ABILITY_FUNCTIONS return cls(id_, name, ability_id, general_id,", "def __str__(self): return \"%s/%s %s\" % (self.id, self.name, list(self.sizes)) @classmethod", "1564, 3693), Function.ability(389, \"Research_ProtossGroundArmor_quick\", cmd_quick, 3694), Function.ability(390, \"Research_ProtossGroundArmorLevel1_quick\", cmd_quick, 1065,", "screen and minimap. functions: A namedtuple of all the functions.", "cmd_screen, 2505), Function.ability(91, \"Build_SupplyDepot_screen\", cmd_screen, 319), Function.ability(92, \"Build_TechLab_quick\", cmd_quick, 3682),", "return cls(id_, name, ability_id, general_id, function_type, FUNCTION_TYPES[function_type], None) @classmethod def", "Function.ability(228, \"Effect_SpawnChangeling_quick\", cmd_quick, 181), Function.ability(229, \"Effect_SpawnLocusts_screen\", cmd_screen, 2704), Function.ability(230, \"Effect_Spray_screen\",", "3662), Function.ability(136, \"BurrowUp_Ultralisk_autocast\", autocast, 1514, 3662), Function.ability(137, \"BurrowUp_WidowMine_quick\", cmd_quick, 2097,", "\"queued\", \"control_group_act\", \"control_group_id\", \"select_point_act\", \"select_add\", \"select_unit_act\", \"select_unit_id\", \"select_worker\", \"build_queue_id\", \"unload_id\"])):", "Function.ability(481, \"Train_Observer_quick\", cmd_quick, 977), Function.ability(482, \"Train_Oracle_quick\", cmd_quick, 954), Function.ability(483, \"Train_Overlord_quick\",", "Function.ability(55, \"Build_Forge_screen\", cmd_screen, 884), Function.ability(56, \"Build_FusionCore_screen\", cmd_screen, 333), Function.ability(57, \"Build_Gateway_screen\",", "2709, 3659), Function.ability(158, \"Cancel_MorphOverseer_quick\", cmd_quick, 1449, 3659), Function.ability(159, \"Cancel_MorphPlanetaryFortress_quick\", cmd_quick,", "it can be represented by a more general action. function_type:", "value to store for the action function. arguments: The values", "= {f.name: f for f in functions} if len(self._func_dict) !=", "\"Research_ZergMissileWeaponsLevel2_quick\", cmd_quick, 1193, 3706), Function.ability(448, \"Research_ZergMissileWeaponsLevel3_quick\", cmd_quick, 1194, 3706), Function.ability(449,", "# Some indexes to support features.py and action conversion. ABILITY_IDS", "3692), Function.ability(382, \"Research_ProtossAirArmorLevel1_quick\", cmd_quick, 1565, 3692), Function.ability(383, \"Research_ProtossAirArmorLevel2_quick\", cmd_quick, 1566,", "\"Cancel_MorphOrbital_quick\", cmd_quick, 1517, 3659), Function.ability(157, \"Cancel_MorphOverlordTransport_quick\", cmd_quick, 2709, 3659), Function.ability(158,", "\"Build_TechLab_Barracks_screen\", cmd_screen, 421, 3682), Function.ability(96, \"Build_TechLab_Factory_quick\", cmd_quick, 454, 3682), Function.ability(97,", "\"Move_screen\", cmd_screen, 16), Function.ability(332, \"Move_minimap\", cmd_minimap, 16), Function.ability(333, \"Patrol_screen\", cmd_screen,", "1097), Function.ability(365, \"Research_GlialRegeneration_quick\", cmd_quick, 216), Function.ability(366, \"Research_GraviticBooster_quick\", cmd_quick, 1093), Function.ability(367,", "881), Function.ability(71, \"Build_Reactor_quick\", cmd_quick, 3683), Function.ability(72, \"Build_Reactor_screen\", cmd_screen, 3683), Function.ability(73,", "cmd_quick, 3679), Function.ability(282, \"Lift_Barracks_quick\", cmd_quick, 452, 3679), Function.ability(283, \"Lift_CommandCenter_quick\", cmd_quick,", "3682), Function.ability(96, \"Build_TechLab_Factory_quick\", cmd_quick, 454, 3682), Function.ability(97, \"Build_TechLab_Factory_screen\", cmd_screen, 454,", "import numbers import six from pysc2.lib import point from s2clientprotocol", "you choose one of a set of known values.\"\"\" return", "queued screen.assign_to(action_cmd.target_screen_coord) def cmd_minimap(action, ability_id, queued, minimap): \"\"\"Do a command", "3674), Function.ability(13, \"Attack_minimap\", cmd_minimap, 3674), Function.ability(14, \"Attack_Attack_screen\", cmd_screen, 23, 3674),", "= select_worker def select_army(action, select_add): \"\"\"Select the entire army.\"\"\" action.action_ui.select_army.selection_add", "representing a ui action.\"\"\" return cls(id_, name, 0, 0, function_type,", "\"Morph_WarpPrismPhasingMode_quick\", cmd_quick, 1528), Function.ability(330, \"Morph_WarpPrismTransportMode_quick\", cmd_quick, 1530), Function.ability(331, \"Move_screen\", cmd_screen,", "point.Rect(screen, screen2) screen_rect.tl.assign_to(out_rect.p0) screen_rect.br.assign_to(out_rect.p1) select.selection_add = bool(select_add) def select_idle_worker(action, select_worker):", "Function.ability(312, \"Morph_PlanetaryFortress_quick\", cmd_quick, 1450), Function.ability(313, \"Morph_Ravager_quick\", cmd_quick, 2330), Function.ability(314, \"Morph_Root_screen\",", "\"Effect_Contaminate_screen\", cmd_screen, 1825), Function.ability(189, \"Effect_CorrosiveBile_screen\", cmd_screen, 2338), Function.ability(190, \"Effect_EMP_screen\", cmd_screen,", "# So it can go in a set(). return self.id", "def __len__(self): return len(self._func_list) # pylint: disable=line-too-long FUNCTIONS = Functions([", "cmd_screen, 1152), Function.ability(60, \"Build_HydraliskDen_screen\", cmd_screen, 1157), Function.ability(61, \"Build_InfestationPit_screen\", cmd_screen, 1160),", "do it with. select_point_act: What to do with the unit", "1433, 3661), Function.ability(111, \"BurrowDown_Ravager_quick\", cmd_quick, 2340, 3661), Function.ability(112, \"BurrowDown_Roach_quick\", cmd_quick,", "3659), Function.ability(152, \"Cancel_MorphLair_quick\", cmd_quick, 1217, 3659), Function.ability(153, \"Cancel_MorphLurker_quick\", cmd_quick, 2333,", "Function.ability(22, \"Behavior_BuildingAttackOn_quick\", cmd_quick, 2081), Function.ability(23, \"Behavior_CloakOff_quick\", cmd_quick, 3677), Function.ability(24, \"Behavior_CloakOff_Banshee_quick\",", "action.action_ui.multi_panel select.type = select_unit_act select.unit_index = select_unit_id def control_group(action, control_group_act,", "3663), Function.ability(295, \"LoadAll_CommandCenter_quick\", cmd_quick, 416, 3663), Function.ability(296, \"Morph_Archon_quick\", cmd_quick, 1766),", "None, args, None) def __hash__(self): # So it can go", "be done now or later. control_group_act: What to do with", "1450), Function.ability(313, \"Morph_Ravager_quick\", cmd_quick, 2330), Function.ability(314, \"Morph_Root_screen\", cmd_screen, 3680), Function.ability(315,", "3659), Function.ability(157, \"Cancel_MorphOverlordTransport_quick\", cmd_quick, 2709, 3659), Function.ability(158, \"Cancel_MorphOverseer_quick\", cmd_quick, 1449,", "3662), Function.ability(134, \"BurrowUp_SwarmHost_quick\", cmd_quick, 2016, 3662), Function.ability(135, \"BurrowUp_Ultralisk_quick\", cmd_quick, 1514,", "2588), Function.ability(206, \"Effect_LockOn_screen\", cmd_screen, 2350), Function.ability(207, \"Effect_LocustSwoop_screen\", cmd_screen, 2387), Function.ability(208,", "3706), Function.ability(447, \"Research_ZergMissileWeaponsLevel2_quick\", cmd_quick, 1193, 3706), Function.ability(448, \"Research_ZergMissileWeaponsLevel3_quick\", cmd_quick, 1194,", "queued): \"\"\"Do a quick command like 'Stop' or 'Stim'.\"\"\" action_cmd", "886), Function.ability(102, \"Build_UltraliskCavern_screen\", cmd_screen, 1159), Function.ability(103, \"BurrowDown_quick\", cmd_quick, 3661), Function.ability(104,", "\"\"\"Select all larva.\"\"\" action.action_ui.select_larva.SetInParent() # Adds the empty proto field.", "Function.ability(149, \"Cancel_MorphBroodlord_quick\", cmd_quick, 1373, 3659), Function.ability(150, \"Cancel_MorphGreaterSpire_quick\", cmd_quick, 1221, 3659),", "Function.ability(445, \"Research_ZergMissileWeapons_quick\", cmd_quick, 3706), Function.ability(446, \"Research_ZergMissileWeaponsLevel1_quick\", cmd_quick, 1192, 3706), Function.ability(447,", "an `Arguments` object. Returns: A new `FunctionCall` instance. \"\"\" if", "\"Scan_Move_screen\", cmd_screen, 19, 3674), Function.ability(20, \"Scan_Move_minimap\", cmd_minimap, 19, 3674), Function.ability(21,", "So it can go in a set(). return self.id def", "return \"%s/%s (%s)\" % (str(self.id).rjust(space and 4), self.name.ljust(space and 50),", "\"Build_TwilightCouncil_screen\", cmd_screen, 886), Function.ability(102, \"Build_UltraliskCavern_screen\", cmd_screen, 1159), Function.ability(103, \"BurrowDown_quick\", cmd_quick,", "from s2clientprotocol import ui_pb2 as sc_ui def no_op(action): del action", "lambda obs: obs.player_common.larva_count > 0), Function.ui_func(10, \"unload\", unload, lambda obs:", "all larva.\"\"\" action.action_ui.select_larva.SetInParent() # Adds the empty proto field. def", "sizes for screen and minimap. functions: A namedtuple of all", "something similar. \"\"\" def __init__(self, functions): self._func_list = functions self._func_dict", "cmd_quick, 1450), Function.ability(313, \"Morph_Ravager_quick\", cmd_quick, 2330), Function.ability(314, \"Morph_Root_screen\", cmd_screen, 3680),", "with `Arguments`. Args: function: The value to store for the", "return cls(id_, name, None, None, None, args, None) def __hash__(self):", "as a game ability.\"\"\" assert function_type in ABILITY_FUNCTIONS return cls(id_,", "cmd_quick, 1378, 3661), Function.ability(106, \"BurrowDown_Hydralisk_quick\", cmd_quick, 1382, 3661), Function.ability(107, \"BurrowDown_Infestor_quick\",", "Function.ability(133, \"BurrowUp_Roach_autocast\", autocast, 1388, 3662), Function.ability(134, \"BurrowUp_SwarmHost_quick\", cmd_quick, 2016, 3662),", "cmd_quick, 1732, 3659), Function.ability(166, \"Cancel_StarportAddOn_quick\", cmd_quick, 517, 3659), Function.ability(167, \"Cancel_StasisTrap_quick\",", "3684), Function.ability(232, \"Effect_Spray_Terran_screen\", cmd_screen, 26, 3684), Function.ability(233, \"Effect_Spray_Zerg_screen\", cmd_screen, 28,", "def str(self, space=False): \"\"\"String version. Set space=True to line them", "Function.ability(495, \"Train_Tempest_quick\", cmd_quick, 955), Function.ability(496, \"Train_Thor_quick\", cmd_quick, 594), Function.ability(497, \"Train_Ultralisk_quick\",", "\"Train_WarpPrism_quick\", cmd_quick, 976), Function.ability(502, \"Train_WidowMine_quick\", cmd_quick, 614), Function.ability(503, \"Train_Zealot_quick\", cmd_quick,", "OR CONDITIONS OF ANY KIND, either express or implied. #", "point on the minimap.\"\"\" action_cmd = action.action_feature_layer.unit_command action_cmd.ability_id = ability_id", "3695), Function.ability(395, \"Research_ProtossGroundWeaponsLevel2_quick\", cmd_quick, 1063, 3695), Function.ability(396, \"Research_ProtossGroundWeaponsLevel3_quick\", cmd_quick, 1064,", "\"Morph_WarpGate_quick\", cmd_quick, 1518), Function.ability(329, \"Morph_WarpPrismPhasingMode_quick\", cmd_quick, 1528), Function.ability(330, \"Morph_WarpPrismTransportMode_quick\", cmd_quick,", "select_rect(action, select_add, screen, screen2): \"\"\"Select units within a rectangle.\"\"\" select", "1408, 3669), Function.ability(521, \"UnloadAllAt_Overlord_minimap\", cmd_minimap, 1408, 3669), Function.ability(522, \"UnloadAllAt_WarpPrism_screen\", cmd_screen,", "because it's unknown at this time. \"\"\"Create an ArgumentType that", "\"unload_id\"])): \"\"\"The full list of argument types. Take a look", "3667), Function.ability(272, \"Harvest_Return_Probe_quick\", cmd_quick, 299, 3667), Function.ability(273, \"Harvest_Return_SCV_quick\", cmd_quick, 296,", "Function.ability(58, \"Build_GhostAcademy_screen\", cmd_screen, 327), Function.ability(59, \"Build_Hatchery_screen\", cmd_screen, 1152), Function.ability(60, \"Build_HydraliskDen_screen\",", "\"Research_NeuralParasite_quick\", cmd_quick, 1455), Function.ability(377, \"Research_PathogenGlands_quick\", cmd_quick, 1454), Function.ability(378, \"Research_PersonalCloaking_quick\", cmd_quick,", "id, eg 2 for select_point. arguments: The list of arguments", "def select_idle_worker(action, select_worker): \"\"\"Select an idle worker.\"\"\" action.action_ui.select_idle_worker.type = select_worker", "ability? ABILITY_FUNCTIONS = {cmd_quick, cmd_screen, cmd_minimap, autocast} # Which ones", "\"Effect_TacticalJump_screen\", cmd_screen, 2358), Function.ability(241, \"Effect_TimeWarp_screen\", cmd_screen, 2244), Function.ability(242, \"Effect_Transfusion_screen\", cmd_screen,", "The list of arguments for that function, each being a", "561), Function.ability(489, \"Train_Roach_quick\", cmd_quick, 1351), Function.ability(490, \"Train_SCV_quick\", cmd_quick, 524), Function.ability(491,", "cmd_quick, 1518), Function.ability(329, \"Morph_WarpPrismPhasingMode_quick\", cmd_quick, 1528), Function.ability(330, \"Morph_WarpPrismTransportMode_quick\", cmd_quick, 1530),", "name, args): \"\"\"Create a Function to be used in ValidActions.\"\"\"", "should be done now or later. control_group_act: What to do", "Function.ability(387, \"Research_ProtossAirWeaponsLevel2_quick\", cmd_quick, 1563, 3693), Function.ability(388, \"Research_ProtossAirWeaponsLevel3_quick\", cmd_quick, 1564, 3693),", "select_add=ArgumentType.enum([False, True]), # (select vs select_add) select_unit_act=ArgumentType.enum([ sc_ui.ActionMultiPanel.SingleSelect, sc_ui.ActionMultiPanel.DeselectUnit, sc_ui.ActionMultiPanel.SelectAllOfType,", "890), Function.ability(101, \"Build_TwilightCouncil_screen\", cmd_screen, 886), Function.ability(102, \"Build_UltraliskCavern_screen\", cmd_screen, 1159), Function.ability(103,", "build_queue(action, build_queue_id): \"\"\"Cancel a unit in the build queue.\"\"\" action.action_ui.production_panel.unit_index", "3702), Function.ability(432, \"Research_ZergFlyerArmorLevel3_quick\", cmd_quick, 1317, 3702), Function.ability(433, \"Research_ZergFlyerAttack_quick\", cmd_quick, 3703),", "Function.ability(351, \"Research_AdeptResonatingGlaives_quick\", cmd_quick, 1594), Function.ability(352, \"Research_AdvancedBallistics_quick\", cmd_quick, 805), Function.ability(353, \"Research_BansheeCloakingField_quick\",", "is provided, the values will be unpacked into an `Arguments`", "cmd_quick, 383, 3677), Function.ability(26, \"Behavior_CloakOn_quick\", cmd_quick, 3676), Function.ability(27, \"Behavior_CloakOn_Banshee_quick\", cmd_quick,", "# {ability_id: {funcs}} for func in FUNCTIONS: if func.ability_id >=", "3659), Function.ability(161, \"Cancel_MorphThorExplosiveMode_quick\", cmd_quick, 2365, 3659), Function.ability(162, \"Cancel_NeuralParasite_quick\", cmd_quick, 250,", "\"Cancel_MorphOverlordTransport_quick\", cmd_quick, 2709, 3659), Function.ability(158, \"Cancel_MorphOverseer_quick\", cmd_quick, 1449, 3659), Function.ability(159,", "324), Function.ability(44, \"Build_CommandCenter_screen\", cmd_screen, 318), Function.ability(45, \"Build_CreepTumor_screen\", cmd_screen, 3691), Function.ability(46,", "\"Build_Reactor_Barracks_quick\", cmd_quick, 422, 3683), Function.ability(74, \"Build_Reactor_Barracks_screen\", cmd_screen, 422, 3683), Function.ability(75,", "law or agreed to in writing, software # distributed under", "1592), Function.ability(360, \"Research_ChitinousPlating_quick\", cmd_quick, 265), Function.ability(361, \"Research_CombatShield_quick\", cmd_quick, 731), Function.ability(362,", "150), Function.ability(253, \"Hallucination_Immortal_quick\", cmd_quick, 152), Function.ability(254, \"Hallucination_Oracle_quick\", cmd_quick, 2114), Function.ability(255,", "function id, eg 2 for select_point. arguments: The list of", "queued: Whether the action should be done now or later.", "Function.ability(334, \"Patrol_minimap\", cmd_minimap, 17), Function.ability(335, \"Rally_Units_screen\", cmd_screen, 3673), Function.ability(336, \"Rally_Units_minimap\",", "from pysc2.lib import point from s2clientprotocol import spatial_pb2 as sc_spatial", "cmd_quick, 764), Function.ability(364, \"Research_ExtendedThermalLance_quick\", cmd_quick, 1097), Function.ability(365, \"Research_GlialRegeneration_quick\", cmd_quick, 216),", "3698), Function.ability(412, \"Research_TerranInfantryWeaponsLevel2_quick\", cmd_quick, 653, 3698), Function.ability(413, \"Research_TerranInfantryWeaponsLevel3_quick\", cmd_quick, 654,", "cmd_screen, 1167), Function.ability(88, \"Build_Stargate_screen\", cmd_screen, 889), Function.ability(89, \"Build_Starport_screen\", cmd_screen, 329),", "\"Rally_Units_screen\", cmd_screen, 3673), Function.ability(336, \"Rally_Units_minimap\", cmd_minimap, 3673), Function.ability(337, \"Rally_Building_screen\", cmd_screen,", "\"BurrowUp_InfestorTerran_quick\", cmd_quick, 1396, 3662), Function.ability(126, \"BurrowUp_InfestorTerran_autocast\", autocast, 1396, 3662), Function.ability(127,", "3697), Function.ability(408, \"Research_TerranInfantryArmorLevel2_quick\", cmd_quick, 657, 3697), Function.ability(409, \"Research_TerranInfantryArmorLevel3_quick\", cmd_quick, 658,", "\"Research_MagFieldLaunchers_quick\", cmd_quick, 766), Function.ability(374, \"Research_MuscularAugments_quick\", cmd_quick, 1283), Function.ability(375, \"Research_NeosteelFrame_quick\", cmd_quick,", "3702), Function.ability(430, \"Research_ZergFlyerArmorLevel1_quick\", cmd_quick, 1315, 3702), Function.ability(431, \"Research_ZergFlyerArmorLevel2_quick\", cmd_quick, 1316,", "an \"AS-IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "Function.ability(410, \"Research_TerranInfantryWeapons_quick\", cmd_quick, 3698), Function.ability(411, \"Research_TerranInfantryWeaponsLevel1_quick\", cmd_quick, 652, 3698), Function.ability(412,", "3660), Function.ability(264, \"Harvest_Gather_screen\", cmd_screen, 3666), Function.ability(265, \"Harvest_Gather_Drone_screen\", cmd_screen, 1183, 3666),", "cmd_screen, 419, 3678), Function.ability(278, \"Land_Factory_screen\", cmd_screen, 520, 3678), Function.ability(279, \"Land_OrbitalCommand_screen\",", "cmd_quick, 556), Function.ability(319, \"Morph_SupplyDepot_Raise_quick\", cmd_quick, 558), Function.ability(320, \"Morph_ThorExplosiveMode_quick\", cmd_quick, 2364),", "# No range because it's unknown at this time. \"\"\"Create", "3675), Function.ability(238, \"Effect_Stim_Marine_Redirect_quick\", cmd_quick, 1683, 3675), Function.ability(239, \"Effect_SupplyDrop_screen\", cmd_screen, 255),", "3704), Function.ability(438, \"Research_ZergGroundArmorLevel1_quick\", cmd_quick, 1189, 3704), Function.ability(439, \"Research_ZergGroundArmorLevel2_quick\", cmd_quick, 1190,", "Function.ability(357, \"Research_Burrow_quick\", cmd_quick, 1225), Function.ability(358, \"Research_CentrifugalHooks_quick\", cmd_quick, 1482), Function.ability(359, \"Research_Charge_quick\",", "action. Attributes: id: The function id, which is what the", "]), control_group_id=ArgumentType.scalar(10), select_point_act=ArgumentType.enum([ sc_spatial.ActionSpatialUnitSelectionPoint.Select, sc_spatial.ActionSpatialUnitSelectionPoint.Toggle, sc_spatial.ActionSpatialUnitSelectionPoint.AllType, sc_spatial.ActionSpatialUnitSelectionPoint.AddAllType, ]), select_add=ArgumentType.enum([False, True]),", "Function.ability(400, \"Research_ProtossShieldsLevel3_quick\", cmd_quick, 1070, 3696), Function.ability(401, \"Research_PsiStorm_quick\", cmd_quick, 1126), Function.ability(402,", "for SC2.\"\"\" from __future__ import absolute_import from __future__ import division", "1524, 3678), Function.ability(280, \"Land_Starport_screen\", cmd_screen, 522, 3678), Function.ability(281, \"Lift_quick\", cmd_quick,", "function for creating `FunctionCall`s with `Arguments`. Args: function: The value", "go in a set(). return self.id def __str__(self): return self.str()", "action.action_ui.select_army.selection_add = select_add def select_warp_gates(action, select_add): \"\"\"Select all warp gates.\"\"\"", "3661), Function.ability(105, \"BurrowDown_Drone_quick\", cmd_quick, 1378, 3661), Function.ability(106, \"BurrowDown_Hydralisk_quick\", cmd_quick, 1382,", "\"Effect_MassRecall_Mothership_screen\", cmd_screen, 2368, 3686), Function.ability(210, \"Effect_MassRecall_MothershipCore_screen\", cmd_screen, 1974, 3686), Function.ability(211,", "790), Function.ability(354, \"Research_BansheeHyperflightRotors_quick\", cmd_quick, 799), Function.ability(355, \"Research_BattlecruiserWeaponRefit_quick\", cmd_quick, 1532), Function.ability(356,", "164), Function.ability(261, \"Halt_quick\", cmd_quick, 3660), Function.ability(262, \"Halt_Building_quick\", cmd_quick, 315, 3660),", "Function.ability(80, \"Build_RoachWarren_screen\", cmd_screen, 1165), Function.ability(81, \"Build_RoboticsBay_screen\", cmd_screen, 892), Function.ability(82, \"Build_RoboticsFacility_screen\",", "cmd_quick, 304, 3671), Function.ability(171, \"Cancel_Queue5_quick\", cmd_quick, 306, 3671), Function.ability(172, \"Cancel_QueueAddOn_quick\",", "cmd_quick, 1454), Function.ability(378, \"Research_PersonalCloaking_quick\", cmd_quick, 820), Function.ability(379, \"Research_PhoenixAnionPulseCrystals_quick\", cmd_quick, 46),", "cmd_screen, 326), Function.ability(84, \"Build_SpawningPool_screen\", cmd_screen, 1155), Function.ability(85, \"Build_SpineCrawler_screen\", cmd_screen, 1166),", "may obtain a copy of the License at # #", "types of args passed to function_type. avail_fn: For non-abilities, this", "861, 3699), Function.ability(416, \"Research_TerranShipWeaponsLevel2_quick\", cmd_quick, 862, 3699), Function.ability(417, \"Research_TerranShipWeaponsLevel3_quick\", cmd_quick,", "2364), Function.ability(321, \"Morph_ThorHighImpactMode_quick\", cmd_quick, 2362), Function.ability(322, \"Morph_Unsiege_quick\", cmd_quick, 390), Function.ability(323,", "Function.ability(93, \"Build_TechLab_screen\", cmd_screen, 3682), Function.ability(94, \"Build_TechLab_Barracks_quick\", cmd_quick, 421, 3682), Function.ability(95,", "cmd_quick, 793), Function.ability(403, \"Research_RavenRecalibratedExplosives_quick\", cmd_quick, 803), Function.ability(404, \"Research_ShadowStrike_quick\", cmd_quick, 2720),", "cmd_quick, 1283), Function.ability(375, \"Research_NeosteelFrame_quick\", cmd_quick, 655), Function.ability(376, \"Research_NeuralParasite_quick\", cmd_quick, 1455),", "point from s2clientprotocol import spatial_pb2 as sc_spatial from s2clientprotocol import", "Function.ability(114, \"BurrowDown_Ultralisk_quick\", cmd_quick, 1512, 3661), Function.ability(115, \"BurrowDown_WidowMine_quick\", cmd_quick, 2095, 3661),", "Function.ability(378, \"Research_PersonalCloaking_quick\", cmd_quick, 820), Function.ability(379, \"Research_PhoenixAnionPulseCrystals_quick\", cmd_quick, 46), Function.ability(380, \"Research_PneumatizedCarapace_quick\",", "(select vs select_add) select_unit_act=ArgumentType.enum([ sc_ui.ActionMultiPanel.SingleSelect, sc_ui.ActionMultiPanel.DeselectUnit, sc_ui.ActionMultiPanel.SelectAllOfType, sc_ui.ActionMultiPanel.DeselectAllOfType, ]), select_unit_id=ArgumentType.scalar(500),", "[TYPES.select_point_act, TYPES.screen], select_rect: [TYPES.select_add, TYPES.screen, TYPES.screen2], select_unit: [TYPES.select_unit_act, TYPES.select_unit_id], control_group:", "do each function need? FUNCTION_TYPES = { no_op: [], move_camera:", "None) class Arguments(collections.namedtuple(\"Arguments\", [ \"screen\", \"minimap\", \"screen2\", \"queued\", \"control_group_act\", \"control_group_id\",", "3680), Function.ability(316, \"Morph_SporeCrawlerRoot_screen\", cmd_screen, 1731, 3680), Function.ability(317, \"Morph_SiegeMode_quick\", cmd_quick, 388),", "may not use this file except in compliance with the", "import division from __future__ import print_function import collections import numbers", "a point on the screen.\"\"\" action_cmd = action.action_feature_layer.unit_command action_cmd.ability_id =", "something more meaningful to be set in the protos to", "cmd_minimap, 195, 3673), Function.ability(339, \"Rally_Hatchery_Units_screen\", cmd_screen, 212, 3673), Function.ability(340, \"Rally_Hatchery_Units_minimap\",", "cmd_quick, 1593), Function.ability(357, \"Research_Burrow_quick\", cmd_quick, 1225), Function.ability(358, \"Research_CentrifugalHooks_quick\", cmd_quick, 1482),", "cmd_quick, 1282), Function.ability(369, \"Research_HiSecAutoTracking_quick\", cmd_quick, 650), Function.ability(370, \"Research_HighCapacityFuelTanks_quick\", cmd_quick, 804),", "# (select vs select_add) select_unit_act=ArgumentType.enum([ sc_ui.ActionMultiPanel.SingleSelect, sc_ui.ActionMultiPanel.DeselectUnit, sc_ui.ActionMultiPanel.SelectAllOfType, sc_ui.ActionMultiPanel.DeselectAllOfType, ]),", "2387), Function.ability(208, \"Effect_MassRecall_screen\", cmd_screen, 3686), Function.ability(209, \"Effect_MassRecall_Mothership_screen\", cmd_screen, 2368, 3686),", "3700), Function.ability(422, \"Research_TerranVehicleAndShipPlatingLevel3_quick\", cmd_quick, 866, 3700), Function.ability(423, \"Research_TerranVehicleWeapons_quick\", cmd_quick, 3701),", "\"Behavior_CloakOn_Ghost_quick\", cmd_quick, 382, 3676), Function.ability(29, \"Behavior_GenerateCreepOff_quick\", cmd_quick, 1693), Function.ability(30, \"Behavior_GenerateCreepOn_quick\",", "this file except in compliance with the License. # You", "\"\"\"Create a Function to be used in ValidActions.\"\"\" return cls(id_,", "cmd_quick, 3695), Function.ability(394, \"Research_ProtossGroundWeaponsLevel1_quick\", cmd_quick, 1062, 3695), Function.ability(395, \"Research_ProtossGroundWeaponsLevel2_quick\", cmd_quick,", "Function.ability(87, \"Build_SporeCrawler_screen\", cmd_screen, 1167), Function.ability(88, \"Build_Stargate_screen\", cmd_screen, 889), Function.ability(89, \"Build_Starport_screen\",", "and FUNCTION_TYPES for more details. Attributes: screen: A point on", "range because it's unknown at this time. \"\"\"Create an ArgumentType", "Function.ability(461, \"Train_Carrier_quick\", cmd_quick, 948), Function.ability(462, \"Train_Colossus_quick\", cmd_quick, 978), Function.ability(463, \"Train_Corruptor_quick\",", "cmd_quick, 4, 3665), Function.ability(457, \"Train_Adept_quick\", cmd_quick, 922), Function.ability(458, \"Train_Baneling_quick\", cmd_quick,", "cmd_quick, 2389), Function.ability(252, \"Hallucination_HighTemplar_quick\", cmd_quick, 150), Function.ability(253, \"Hallucination_Immortal_quick\", cmd_quick, 152),", "\"TrainWarp_HighTemplar_screen\", cmd_screen, 1416), Function.ability(508, \"TrainWarp_Sentry_screen\", cmd_screen, 1418), Function.ability(509, \"TrainWarp_Stalker_screen\", cmd_screen,", "cmd_quick, 710), Function.ability(67, \"Build_NydusNetwork_screen\", cmd_screen, 1161), Function.ability(68, \"Build_NydusWorm_screen\", cmd_screen, 1768),", "six from pysc2.lib import point from s2clientprotocol import spatial_pb2 as", "\"Rally_Morphing_Unit_minimap\", cmd_minimap, 199, 3673), Function.ability(343, \"Rally_Workers_screen\", cmd_screen, 3690), Function.ability(344, \"Rally_Workers_minimap\",", "# # Licensed under the Apache License, Version 2.0 (the", "\"Effect_Repair_SCV_screen\", cmd_screen, 316, 3685), Function.ability(225, \"Effect_Repair_SCV_autocast\", autocast, 316, 3685), Function.ability(226,", "valid for an agent to use. Attributes: types: A namedtuple", "Function.ability(24, \"Behavior_CloakOff_Banshee_quick\", cmd_quick, 393, 3677), Function.ability(25, \"Behavior_CloakOff_Ghost_quick\", cmd_quick, 383, 3677),", "need an ability? ABILITY_FUNCTIONS = {cmd_quick, cmd_screen, cmd_minimap, autocast} #", "1345), Function.ability(473, \"Train_Immortal_quick\", cmd_quick, 979), Function.ability(474, \"Train_Infestor_quick\", cmd_quick, 1352), Function.ability(475,", "\"\"\"Create an ArgumentType where you choose one of a set", "Function.ability(246, \"Effect_WidowMineAttack_autocast\", autocast, 2099), Function.ability(247, \"Effect_YamatoGun_screen\", cmd_screen, 401), Function.ability(248, \"Hallucination_Adept_quick\",", "cmd_quick, 1065, 3694), Function.ability(391, \"Research_ProtossGroundArmorLevel2_quick\", cmd_quick, 1066, 3694), Function.ability(392, \"Research_ProtossGroundArmorLevel3_quick\",", "cmd_screen, 1768), Function.ability(69, \"Build_PhotonCannon_screen\", cmd_screen, 887), Function.ability(70, \"Build_Pylon_screen\", cmd_screen, 881),", "at this time. \"\"\"Create an ArgumentType that is represented by", "Function.ability(457, \"Train_Adept_quick\", cmd_quick, 922), Function.ability(458, \"Train_Baneling_quick\", cmd_quick, 80), Function.ability(459, \"Train_Banshee_quick\",", "3659), Function.ability(164, \"Cancel_SpineCrawlerRoot_quick\", cmd_quick, 1730, 3659), Function.ability(165, \"Cancel_SporeCrawlerRoot_quick\", cmd_quick, 1732,", "3671), Function.ability(175, \"Cancel_QueuePassiveCancelToSelection_quick\", cmd_quick, 1833, 3671), Function.ability(176, \"Effect_Abduct_screen\", cmd_screen, 2067),", "cmd_quick, 656, 3697), Function.ability(408, \"Research_TerranInfantryArmorLevel2_quick\", cmd_quick, 657, 3697), Function.ability(409, \"Research_TerranInfantryArmorLevel3_quick\",", "this function returns whether the function is valid. \"\"\" __slots__", "\"Effect_Explode_quick\", cmd_quick, 42), Function.ability(192, \"Effect_Feedback_screen\", cmd_screen, 140), Function.ability(193, \"Effect_ForceField_screen\", cmd_screen,", "cmd_quick, 562), Function.ability(469, \"Train_Hellbat_quick\", cmd_quick, 596), Function.ability(470, \"Train_Hellion_quick\", cmd_quick, 595),", "Function.ability(517, \"UnloadAllAt_minimap\", cmd_minimap, 3669), Function.ability(518, \"UnloadAllAt_Medivac_screen\", cmd_screen, 396, 3669), Function.ability(519,", "cmd_quick, 413, 3664), Function.ability(514, \"UnloadAll_NydasNetwork_quick\", cmd_quick, 1438, 3664), Function.ability(515, \"UnloadAll_NydusWorm_quick\",", "251), Function.ability(205, \"Effect_KD8Charge_screen\", cmd_screen, 2588), Function.ability(206, \"Effect_LockOn_screen\", cmd_screen, 2350), Function.ability(207,", "3679), Function.ability(287, \"Load_screen\", cmd_screen, 3668), Function.ability(288, \"Load_Bunker_screen\", cmd_screen, 407, 3668),", "values to store for the arguments of the action. Can", "3686), Function.ability(209, \"Effect_MassRecall_Mothership_screen\", cmd_screen, 2368, 3686), Function.ability(210, \"Effect_MassRecall_MothershipCore_screen\", cmd_screen, 1974,", "1593), Function.ability(357, \"Research_Burrow_quick\", cmd_quick, 1225), Function.ability(358, \"Research_CentrifugalHooks_quick\", cmd_quick, 1482), Function.ability(359,", "\"\"\"Toggle autocast.\"\"\" action.action_ui.toggle_autocast.ability_id = ability_id class ArgumentType(collections.namedtuple( \"ArgumentType\", [\"id\", \"name\",", "meaningful to be set in the protos to send to", "Function.ability(510, \"TrainWarp_Zealot_screen\", cmd_screen, 1413), Function.ability(511, \"UnloadAll_quick\", cmd_quick, 3664), Function.ability(512, \"UnloadAll_Bunker_quick\",", "ArgumentType that is represented by a point.Point.\"\"\" return cls(-1, \"<none>\",", "Function.ability(325, \"Morph_SporeCrawlerUproot_quick\", cmd_quick, 1727, 3681), Function.ability(326, \"Morph_VikingAssaultMode_quick\", cmd_quick, 403), Function.ability(327,", "1156), Function.ability(52, \"Build_Extractor_screen\", cmd_screen, 1154), Function.ability(53, \"Build_Factory_screen\", cmd_screen, 328), Function.ability(54,", "1283), Function.ability(375, \"Research_NeosteelFrame_quick\", cmd_quick, 655), Function.ability(376, \"Research_NeuralParasite_quick\", cmd_quick, 1455), Function.ability(377,", "ValidActions.\"\"\" return cls(id_, name, sizes, None) class Arguments(collections.namedtuple(\"Arguments\", [ \"screen\",", "select_add, screen, screen2): \"\"\"Select units within a rectangle.\"\"\" select =", "\"Build_MissileTurret_screen\", cmd_screen, 323), Function.ability(65, \"Build_Nexus_screen\", cmd_screen, 880), Function.ability(66, \"Build_Nuke_quick\", cmd_quick,", "id_, name, function_type, ability_id, general_id=0): \"\"\"Define a function represented as", "and functions that are valid for an agent to use.", "323), Function.ability(65, \"Build_Nexus_screen\", cmd_screen, 880), Function.ability(66, \"Build_Nuke_quick\", cmd_quick, 710), Function.ability(67,", "\"Research_HiSecAutoTracking_quick\", cmd_quick, 650), Function.ability(370, \"Research_HighCapacityFuelTanks_quick\", cmd_quick, 804), Function.ability(371, \"Research_InfernalPreigniter_quick\", cmd_quick,", "Function.ability(123, \"BurrowUp_Hydralisk_autocast\", autocast, 1384, 3662), Function.ability(124, \"BurrowUp_Infestor_quick\", cmd_quick, 1446, 3662),", "\"Research_ZergMissileWeapons_quick\", cmd_quick, 3706), Function.ability(446, \"Research_ZergMissileWeaponsLevel1_quick\", cmd_quick, 1192, 3706), Function.ability(447, \"Research_ZergMissileWeaponsLevel2_quick\",", "name, type_ in six.iteritems(kwargs)} return cls(**named) # The list of", "1066, 3694), Function.ability(392, \"Research_ProtossGroundArmorLevel3_quick\", cmd_quick, 1067, 3694), Function.ability(393, \"Research_ProtossGroundWeapons_quick\", cmd_quick,", "cmd_quick, 1006), Function.ability(486, \"Train_Queen_quick\", cmd_quick, 1632), Function.ability(487, \"Train_Raven_quick\", cmd_quick, 622),", "Function.ability(91, \"Build_SupplyDepot_screen\", cmd_screen, 319), Function.ability(92, \"Build_TechLab_quick\", cmd_quick, 3682), Function.ability(93, \"Build_TechLab_screen\",", "Function.ability(244, \"Effect_VoidRayPrismaticAlignment_quick\", cmd_quick, 2393), Function.ability(245, \"Effect_WidowMineAttack_screen\", cmd_screen, 2099), Function.ability(246, \"Effect_WidowMineAttack_autocast\",", "\"UnloadAll_NydasNetwork_quick\", cmd_quick, 1438, 3664), Function.ability(515, \"UnloadAll_NydusWorm_quick\", cmd_quick, 2371, 3664), Function.ability(516,", "\"Cancel_MorphPlanetaryFortress_quick\", cmd_quick, 1451, 3659), Function.ability(160, \"Cancel_MorphRavager_quick\", cmd_quick, 2331, 3659), Function.ability(161,", "TYPES.select_unit_id], control_group: [TYPES.control_group_act, TYPES.control_group_id], select_idle_worker: [TYPES.select_worker], select_army: [TYPES.select_add], select_warp_gates: [TYPES.select_add],", "autocast, 316, 3685), Function.ability(226, \"Effect_Salvage_quick\", cmd_quick, 32), Function.ability(227, \"Effect_Scan_screen\", cmd_screen,", "cmd_screen, 891), Function.ability(50, \"Build_EngineeringBay_screen\", cmd_screen, 322), Function.ability(51, \"Build_EvolutionChamber_screen\", cmd_screen, 1156),", "cmd_quick, 1193, 3706), Function.ability(448, \"Research_ZergMissileWeaponsLevel3_quick\", cmd_quick, 1194, 3706), Function.ability(449, \"Research_ZerglingAdrenalGlands_quick\",", "this includes the sizes for screen and minimap. functions: A", "\"Train_Raven_quick\", cmd_quick, 622), Function.ability(488, \"Train_Reaper_quick\", cmd_quick, 561), Function.ability(489, \"Train_Roach_quick\", cmd_quick,", "each function need? FUNCTION_TYPES = { no_op: [], move_camera: [TYPES.minimap],", "621), Function.ability(460, \"Train_Battlecruiser_quick\", cmd_quick, 623), Function.ability(461, \"Train_Carrier_quick\", cmd_quick, 948), Function.ability(462,", "add the unit to the selection or replace it. select_unit_act:", "function_type, FUNCTION_TYPES[function_type], avail_fn) @classmethod def ability(cls, id_, name, function_type, ability_id,", "cmd_quick, 421, 3682), Function.ability(95, \"Build_TechLab_Barracks_screen\", cmd_screen, 421, 3682), Function.ability(96, \"Build_TechLab_Factory_quick\",", "3682), Function.ability(97, \"Build_TechLab_Factory_screen\", cmd_screen, 454, 3682), Function.ability(98, \"Build_TechLab_Starport_quick\", cmd_quick, 487,", "654, 3698), Function.ability(414, \"Research_TerranShipWeapons_quick\", cmd_quick, 3699), Function.ability(415, \"Research_TerranShipWeaponsLevel1_quick\", cmd_quick, 861,", "\"Smart_screen\", cmd_screen, 1), Function.ability(452, \"Smart_minimap\", cmd_minimap, 1), Function.ability(453, \"Stop_quick\", cmd_quick,", "Function.ability(490, \"Train_SCV_quick\", cmd_quick, 524), Function.ability(491, \"Train_Sentry_quick\", cmd_quick, 921), Function.ability(492, \"Train_SiegeTank_quick\",", "with a single scalar in range(value).\"\"\" return cls(-1, \"<none>\", (value,),", "on an \"AS-IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "3659), Function.ability(166, \"Cancel_StarportAddOn_quick\", cmd_quick, 517, 3659), Function.ability(167, \"Cancel_StasisTrap_quick\", cmd_quick, 2535,", "function call action. Attributes: function: Store the function id, eg", "or implied. # See the License for the specific language", "\"Build_Spire_screen\", cmd_screen, 1158), Function.ability(87, \"Build_SporeCrawler_screen\", cmd_screen, 1167), Function.ability(88, \"Build_Stargate_screen\", cmd_screen,", "= action.action_feature_layer.unit_selection_rect out_rect = select.selection_screen_coord.add() screen_rect = point.Rect(screen, screen2) screen_rect.tl.assign_to(out_rect.p0)", "cmd_screen, 1158), Function.ability(87, \"Build_SporeCrawler_screen\", cmd_screen, 1167), Function.ability(88, \"Build_Stargate_screen\", cmd_screen, 889),", "cls(**named) # The list of known types. TYPES = Arguments.types(", "1449, 3659), Function.ability(159, \"Cancel_MorphPlanetaryFortress_quick\", cmd_quick, 1451, 3659), Function.ability(160, \"Cancel_MorphRavager_quick\", cmd_quick,", "1444, 3661), Function.ability(108, \"BurrowDown_InfestorTerran_quick\", cmd_quick, 1394, 3661), Function.ability(109, \"BurrowDown_Lurker_quick\", cmd_quick,", "the unit to the selection or replace it. select_unit_act: What", "similar. \"\"\" def __init__(self, functions): self._func_list = functions self._func_dict =", "cmd_screen, 333), Function.ability(57, \"Build_Gateway_screen\", cmd_screen, 883), Function.ability(58, \"Build_GhostAcademy_screen\", cmd_screen, 327),", "3661), Function.ability(113, \"BurrowDown_SwarmHost_quick\", cmd_quick, 2014, 3661), Function.ability(114, \"BurrowDown_Ultralisk_quick\", cmd_quick, 1512,", "takes the same type twice. queued: Whether the action should", "\"BurrowDown_Ravager_quick\", cmd_quick, 2340, 3661), Function.ability(112, \"BurrowDown_Roach_quick\", cmd_quick, 1386, 3661), Function.ability(113,", "cmd_screen, 19, 3674), Function.ability(20, \"Scan_Move_minimap\", cmd_minimap, 19, 3674), Function.ability(21, \"Behavior_BuildingAttackOff_quick\",", "1437, 3668), Function.ability(291, \"Load_NydusWorm_screen\", cmd_screen, 2370, 3668), Function.ability(292, \"Load_Overlord_screen\", cmd_screen,", "screen_rect = point.Rect(screen, screen2) screen_rect.tl.assign_to(out_rect.p0) screen_rect.br.assign_to(out_rect.p1) select.selection_add = bool(select_add) def", "\"build_queue_id\", \"unload_id\"])): \"\"\"The full list of argument types. Take a", "891), Function.ability(50, \"Build_EngineeringBay_screen\", cmd_screen, 322), Function.ability(51, \"Build_EvolutionChamber_screen\", cmd_screen, 1156), Function.ability(52,", "Function.ability(432, \"Research_ZergFlyerArmorLevel3_quick\", cmd_quick, 1317, 3702), Function.ability(433, \"Research_ZergFlyerAttack_quick\", cmd_quick, 3703), Function.ability(434,", "1221, 3659), Function.ability(151, \"Cancel_MorphHive_quick\", cmd_quick, 1219, 3659), Function.ability(152, \"Cancel_MorphLair_quick\", cmd_quick,", "cmd_quick, 3694), Function.ability(390, \"Research_ProtossGroundArmorLevel1_quick\", cmd_quick, 1065, 3694), Function.ability(391, \"Research_ProtossGroundArmorLevel2_quick\", cmd_quick,", "cmd_screen, 316, 3685), Function.ability(225, \"Effect_Repair_SCV_autocast\", autocast, 316, 3685), Function.ability(226, \"Effect_Salvage_quick\",", "return iter(self._func_list) def __len__(self): return len(self._func_list) # pylint: disable=line-too-long FUNCTIONS", "sizes): \"\"\"Create an ArgumentType to be used in ValidActions.\"\"\" return", "{ no_op: [], move_camera: [TYPES.minimap], select_point: [TYPES.select_point_act, TYPES.screen], select_rect: [TYPES.select_add,", "Function.ability(361, \"Research_CombatShield_quick\", cmd_quick, 731), Function.ability(362, \"Research_ConcussiveShells_quick\", cmd_quick, 732), Function.ability(363, \"Research_DrillingClaws_quick\",", "3684), Function.ability(233, \"Effect_Spray_Zerg_screen\", cmd_screen, 28, 3684), Function.ability(234, \"Effect_Stim_quick\", cmd_quick, 3675),", "cmd_screen, 28, 3684), Function.ability(234, \"Effect_Stim_quick\", cmd_quick, 3675), Function.ability(235, \"Effect_Stim_Marauder_quick\", cmd_quick,", "arguments = Arguments(**arguments) elif not isinstance(arguments, Arguments): arguments = Arguments(*arguments)", "Function.ability(499, \"Train_Viper_quick\", cmd_quick, 1354), Function.ability(500, \"Train_VoidRay_quick\", cmd_quick, 950), Function.ability(501, \"Train_WarpPrism_quick\",", "of another ability if it can be represented by a", "cmd_screen, 880), Function.ability(66, \"Build_Nuke_quick\", cmd_quick, 710), Function.ability(67, \"Build_NydusNetwork_screen\", cmd_screen, 1161),", "Function.ability(237, \"Effect_Stim_Marine_quick\", cmd_quick, 380, 3675), Function.ability(238, \"Effect_Stim_Marine_Redirect_quick\", cmd_quick, 1683, 3675),", "Function.ability(318, \"Morph_SupplyDepot_Lower_quick\", cmd_quick, 556), Function.ability(319, \"Morph_SupplyDepot_Raise_quick\", cmd_quick, 558), Function.ability(320, \"Morph_ThorExplosiveMode_quick\",", "\"Research_PersonalCloaking_quick\", cmd_quick, 820), Function.ability(379, \"Research_PhoenixAnionPulseCrystals_quick\", cmd_quick, 46), Function.ability(380, \"Research_PneumatizedCarapace_quick\", cmd_quick,", "cmd_quick, 518, 3679), Function.ability(287, \"Load_screen\", cmd_screen, 3668), Function.ability(288, \"Load_Bunker_screen\", cmd_screen,", "\"\"\"Represents a function action. Attributes: id: The function id, which", "\"Morph_Unsiege_quick\", cmd_quick, 390), Function.ability(323, \"Morph_Uproot_quick\", cmd_quick, 3681), Function.ability(324, \"Morph_SpineCrawlerUproot_quick\", cmd_quick,", "Function.ability(89, \"Build_Starport_screen\", cmd_screen, 329), Function.ability(90, \"Build_StasisTrap_screen\", cmd_screen, 2505), Function.ability(91, \"Build_SupplyDepot_screen\",", "so that no function takes the same type twice. queued:", "Function.ability(132, \"BurrowUp_Roach_quick\", cmd_quick, 1388, 3662), Function.ability(133, \"BurrowUp_Roach_autocast\", autocast, 1388, 3662),", "is what the agent will use. name: The name of", "Function.ability(472, \"Train_Hydralisk_quick\", cmd_quick, 1345), Function.ability(473, \"Train_Immortal_quick\", cmd_quick, 979), Function.ability(474, \"Train_Infestor_quick\",", "1406, 3668), Function.ability(293, \"Load_WarpPrism_screen\", cmd_screen, 911, 3668), Function.ability(294, \"LoadAll_quick\", cmd_quick,", "later. control_group_act: What to do with the control group. control_group_id:", "of the types that the functions require. Unlike TYPES above,", "Function.ability(240, \"Effect_TacticalJump_screen\", cmd_screen, 2358), Function.ability(241, \"Effect_TimeWarp_screen\", cmd_screen, 2244), Function.ability(242, \"Effect_Transfusion_screen\",", "details. Attributes: screen: A point on the screen. minimap: A", "cls(-1, \"<none>\", (0, 0), lambda a: point.Point(*a).floor()) @classmethod def spec(cls,", "\"Morph_Hellion_quick\", cmd_quick, 1978), Function.ability(302, \"Morph_Hive_quick\", cmd_quick, 1218), Function.ability(303, \"Morph_Lair_quick\", cmd_quick,", "\"Cancel_LockOn_quick\", cmd_quick, 2354, 3659), Function.ability(149, \"Cancel_MorphBroodlord_quick\", cmd_quick, 1373, 3659), Function.ability(150,", "cmd_quick, 1062, 3695), Function.ability(395, \"Research_ProtossGroundWeaponsLevel2_quick\", cmd_quick, 1063, 3695), Function.ability(396, \"Research_ProtossGroundWeaponsLevel3_quick\",", "Function.ui_func(1, \"move_camera\", move_camera), Function.ui_func(2, \"select_point\", select_point), Function.ui_func(3, \"select_rect\", select_rect), Function.ui_func(4,", "622), Function.ability(488, \"Train_Reaper_quick\", cmd_quick, 561), Function.ability(489, \"Train_Roach_quick\", cmd_quick, 1351), Function.ability(490,", "type_._replace(id=Arguments._fields.index(name), name=name) for name, type_ in six.iteritems(kwargs)} return cls(**named) #", "or an iterable. If a `dict` or an iterable is", "Function.ability(169, \"Cancel_HangarQueue5_quick\", cmd_quick, 1038, 3671), Function.ability(170, \"Cancel_Queue1_quick\", cmd_quick, 304, 3671),", "cmd_quick, 1623, 3659), Function.ability(164, \"Cancel_SpineCrawlerRoot_quick\", cmd_quick, 1730, 3659), Function.ability(165, \"Cancel_SporeCrawlerRoot_quick\",", "cmd_quick, 416, 3663), Function.ability(296, \"Morph_Archon_quick\", cmd_quick, 1766), Function.ability(297, \"Morph_BroodLord_quick\", cmd_quick,", "\"Cancel_GravitonBeam_quick\", cmd_quick, 174, 3659), Function.ability(148, \"Cancel_LockOn_quick\", cmd_quick, 2354, 3659), Function.ability(149,", "cmd_quick, 3662), Function.ability(118, \"BurrowUp_autocast\", autocast, 3662), Function.ability(119, \"BurrowUp_Baneling_quick\", cmd_quick, 1376,", "Function.ability(466, \"Train_Disruptor_quick\", cmd_quick, 994), Function.ability(467, \"Train_Drone_quick\", cmd_quick, 1342), Function.ability(468, \"Train_Ghost_quick\",", "405), Function.ability(328, \"Morph_WarpGate_quick\", cmd_quick, 1518), Function.ability(329, \"Morph_WarpPrismPhasingMode_quick\", cmd_quick, 1528), Function.ability(330,", "= () def __str__(self): return \"%s/%s %s\" % (self.id, self.name,", "Function.ability(230, \"Effect_Spray_screen\", cmd_screen, 3684), Function.ability(231, \"Effect_Spray_Protoss_screen\", cmd_screen, 30, 3684), Function.ability(232,", "3668), Function.ability(291, \"Load_NydusWorm_screen\", cmd_screen, 2370, 3668), Function.ability(292, \"Load_Overlord_screen\", cmd_screen, 1406,", "Function.ability(50, \"Build_EngineeringBay_screen\", cmd_screen, 322), Function.ability(51, \"Build_EvolutionChamber_screen\", cmd_screen, 1156), Function.ability(52, \"Build_Extractor_screen\",", "screen): \"\"\"Select a unit at a point.\"\"\" select = action.action_feature_layer.unit_selection_point", "cmd_quick, 3704), Function.ability(438, \"Research_ZergGroundArmorLevel1_quick\", cmd_quick, 1189, 3704), Function.ability(439, \"Research_ZergGroundArmorLevel2_quick\", cmd_quick,", "ABILITY_IDS[func.ability_id].add(func) ABILITY_IDS = {k: frozenset(v) for k, v in six.iteritems(ABILITY_IDS)}", "Function.ability(194, \"Effect_FungalGrowth_screen\", cmd_screen, 74), Function.ability(195, \"Effect_GhostSnipe_screen\", cmd_screen, 2714), Function.ability(196, \"Effect_GravitonBeam_screen\",", "\"Build_Starport_screen\", cmd_screen, 329), Function.ability(90, \"Build_StasisTrap_screen\", cmd_screen, 2505), Function.ability(91, \"Build_SupplyDepot_screen\", cmd_screen,", "sc_ui.ActionControlGroup.AppendAndSteal, ]), control_group_id=ArgumentType.scalar(10), select_point_act=ArgumentType.enum([ sc_spatial.ActionSpatialUnitSelectionPoint.Select, sc_spatial.ActionSpatialUnitSelectionPoint.Toggle, sc_spatial.ActionSpatialUnitSelectionPoint.AllType, sc_spatial.ActionSpatialUnitSelectionPoint.AddAllType, ]), select_add=ArgumentType.enum([False,", "855, 3701), Function.ability(425, \"Research_TerranVehicleWeaponsLevel2_quick\", cmd_quick, 856, 3701), Function.ability(426, \"Research_TerranVehicleWeaponsLevel3_quick\", cmd_quick,", "3669), Function.ability(518, \"UnloadAllAt_Medivac_screen\", cmd_screen, 396, 3669), Function.ability(519, \"UnloadAllAt_Medivac_minimap\", cmd_minimap, 396,", "1), Function.ability(453, \"Stop_quick\", cmd_quick, 3665), Function.ability(454, \"Stop_Building_quick\", cmd_quick, 2057, 3665),", "1394, 3661), Function.ability(109, \"BurrowDown_Lurker_quick\", cmd_quick, 2108, 3661), Function.ability(110, \"BurrowDown_Queen_quick\", cmd_quick,", "\"Research_ZergFlyerArmorLevel3_quick\", cmd_quick, 1317, 3702), Function.ability(433, \"Research_ZergFlyerAttack_quick\", cmd_quick, 3703), Function.ability(434, \"Research_ZergFlyerAttackLevel1_quick\",", "Function.ability(337, \"Rally_Building_screen\", cmd_screen, 195, 3673), Function.ability(338, \"Rally_Building_minimap\", cmd_minimap, 195, 3673),", "Function.ability(271, \"Harvest_Return_Mule_quick\", cmd_quick, 167, 3667), Function.ability(272, \"Harvest_Return_Probe_quick\", cmd_quick, 299, 3667),", "no_op(action): del action def move_camera(action, minimap): \"\"\"Move the camera.\"\"\" minimap.assign_to(action.action_feature_layer.camera_move.center_minimap)", "cmd_quick, 1390, 3661), Function.ability(117, \"BurrowUp_quick\", cmd_quick, 3662), Function.ability(118, \"BurrowUp_autocast\", autocast,", "1532), Function.ability(356, \"Research_Blink_quick\", cmd_quick, 1593), Function.ability(357, \"Research_Burrow_quick\", cmd_quick, 1225), Function.ability(358,", "\"Morph_PlanetaryFortress_quick\", cmd_quick, 1450), Function.ability(313, \"Morph_Ravager_quick\", cmd_quick, 2330), Function.ability(314, \"Morph_Root_screen\", cmd_screen,", "Function.ability(172, \"Cancel_QueueAddOn_quick\", cmd_quick, 312, 3671), Function.ability(173, \"Cancel_QueueCancelToSelection_quick\", cmd_quick, 308, 3671),", "\"Train_Phoenix_quick\", cmd_quick, 946), Function.ability(485, \"Train_Probe_quick\", cmd_quick, 1006), Function.ability(486, \"Train_Queen_quick\", cmd_quick,", "field. def select_unit(action, select_unit_act, select_unit_id): \"\"\"Select a specific unit from", "id: The function id, which is what the agent will", "action.action_ui.cargo_panel.unit_index = unload_id def build_queue(action, build_queue_id): \"\"\"Cancel a unit in", "build_queue_id: Which build queue index to target. unload_id: Which unit", "[TYPES.select_add], select_larva: [], unload: [TYPES.unload_id], build_queue: [TYPES.build_queue_id], cmd_quick: [TYPES.queued], cmd_screen:", "sc_ui.ActionControlGroup.Set, sc_ui.ActionControlGroup.Append, sc_ui.ActionControlGroup.SetAndSteal, sc_ui.ActionControlGroup.AppendAndSteal, ]), control_group_id=ArgumentType.scalar(10), select_point_act=ArgumentType.enum([ sc_spatial.ActionSpatialUnitSelectionPoint.Select, sc_spatial.ActionSpatialUnitSelectionPoint.Toggle, sc_spatial.ActionSpatialUnitSelectionPoint.AllType,", "399), Function.ability(228, \"Effect_SpawnChangeling_quick\", cmd_quick, 181), Function.ability(229, \"Effect_SpawnLocusts_screen\", cmd_screen, 2704), Function.ability(230,", "\"Morph_WarpPrismTransportMode_quick\", cmd_quick, 1530), Function.ability(331, \"Move_screen\", cmd_screen, 16), Function.ability(332, \"Move_minimap\", cmd_minimap,", "cmd_screen, 3690), Function.ability(344, \"Rally_Workers_minimap\", cmd_minimap, 3690), Function.ability(345, \"Rally_CommandCenter_screen\", cmd_screen, 203,", "cmd_quick, 1564, 3693), Function.ability(389, \"Research_ProtossGroundArmor_quick\", cmd_quick, 3694), Function.ability(390, \"Research_ProtossGroundArmorLevel1_quick\", cmd_quick,", "\"Build_StasisTrap_screen\", cmd_screen, 2505), Function.ability(91, \"Build_SupplyDepot_screen\", cmd_screen, 319), Function.ability(92, \"Build_TechLab_quick\", cmd_quick,", "620), Function.ability(479, \"Train_MothershipCore_quick\", cmd_quick, 1853), Function.ability(480, \"Train_Mutalisk_quick\", cmd_quick, 1346), Function.ability(481,", "cmd_quick, 866, 3700), Function.ability(423, \"Research_TerranVehicleWeapons_quick\", cmd_quick, 3701), Function.ability(424, \"Research_TerranVehicleWeaponsLevel1_quick\", cmd_quick,", "function, arguments): \"\"\"Helper function for creating `FunctionCall`s with `Arguments`. Args:", "3661), Function.ability(104, \"BurrowDown_Baneling_quick\", cmd_quick, 1374, 3661), Function.ability(105, \"BurrowDown_Drone_quick\", cmd_quick, 1378,", "2048, 3674), Function.ability(17, \"Attack_AttackBuilding_minimap\", cmd_minimap, 2048, 3674), Function.ability(18, \"Attack_Redirect_screen\", cmd_screen,", "cmd_quick, 2112), Function.ability(308, \"Morph_Mothership_quick\", cmd_quick, 1847), Function.ability(309, \"Morph_OrbitalCommand_quick\", cmd_quick, 1516),", "Function.ability(300, \"Morph_Hellbat_quick\", cmd_quick, 1998), Function.ability(301, \"Morph_Hellion_quick\", cmd_quick, 1978), Function.ability(302, \"Morph_Hive_quick\",", "Function.ability(403, \"Research_RavenRecalibratedExplosives_quick\", cmd_quick, 803), Function.ability(404, \"Research_ShadowStrike_quick\", cmd_quick, 2720), Function.ability(405, \"Research_Stimpack_quick\",", "Function.ability(34, \"Behavior_HoldFireOn_quick\", cmd_quick, 3688), Function.ability(35, \"Behavior_HoldFireOn_Ghost_quick\", cmd_quick, 36, 3688), Function.ability(36,", "Function.ability(502, \"Train_WidowMine_quick\", cmd_quick, 614), Function.ability(503, \"Train_Zealot_quick\", cmd_quick, 916), Function.ability(504, \"Train_Zergling_quick\",", "name, sizes): \"\"\"Create an ArgumentType to be used in ValidActions.\"\"\"", "3659), Function.ability(146, \"Cancel_FactoryAddOn_quick\", cmd_quick, 484, 3659), Function.ability(147, \"Cancel_GravitonBeam_quick\", cmd_quick, 174,", "1312, 3703), Function.ability(435, \"Research_ZergFlyerAttackLevel2_quick\", cmd_quick, 1313, 3703), Function.ability(436, \"Research_ZergFlyerAttackLevel3_quick\", cmd_quick,", "cmd_quick, 1190, 3704), Function.ability(440, \"Research_ZergGroundArmorLevel3_quick\", cmd_quick, 1191, 3704), Function.ability(441, \"Research_ZergMeleeWeapons_quick\",", "1691, 3665), Function.ability(456, \"Stop_Stop_quick\", cmd_quick, 4, 3665), Function.ability(457, \"Train_Adept_quick\", cmd_quick,", "the multi-unit selection.\"\"\" select = action.action_ui.multi_panel select.type = select_unit_act select.unit_index", "\"Build_CreepTumor_Tumor_screen\", cmd_screen, 1733, 3691), Function.ability(48, \"Build_CyberneticsCore_screen\", cmd_screen, 894), Function.ability(49, \"Build_DarkShrine_screen\",", "\"Lift_CommandCenter_quick\", cmd_quick, 417, 3679), Function.ability(284, \"Lift_Factory_quick\", cmd_quick, 485, 3679), Function.ability(285,", "\"\"\"String version. Set space=True to line them all up nicely.\"\"\"", "or 'Stim'.\"\"\" action_cmd = action.action_feature_layer.unit_command action_cmd.ability_id = ability_id action_cmd.queue_command =", "types that the functions require. Unlike TYPES above, this includes", "3703), Function.ability(437, \"Research_ZergGroundArmor_quick\", cmd_quick, 3704), Function.ability(438, \"Research_ZergGroundArmorLevel1_quick\", cmd_quick, 1189, 3704),", "Function.ability(468, \"Train_Ghost_quick\", cmd_quick, 562), Function.ability(469, \"Train_Hellbat_quick\", cmd_quick, 596), Function.ability(470, \"Train_Hellion_quick\",", "1354), Function.ability(500, \"Train_VoidRay_quick\", cmd_quick, 950), Function.ability(501, \"Train_WarpPrism_quick\", cmd_quick, 976), Function.ability(502,", "Function.ability(306, \"Morph_Lurker_quick\", cmd_quick, 2332), Function.ability(307, \"Morph_LurkerDen_quick\", cmd_quick, 2112), Function.ability(308, \"Morph_Mothership_quick\",", "into an `Arguments` object. Returns: A new `FunctionCall` instance. \"\"\"", "3660), Function.ability(262, \"Halt_Building_quick\", cmd_quick, 315, 3660), Function.ability(263, \"Halt_TerranBuild_quick\", cmd_quick, 348,", "1159), Function.ability(103, \"BurrowDown_quick\", cmd_quick, 3661), Function.ability(104, \"BurrowDown_Baneling_quick\", cmd_quick, 1374, 3661),", "Function.ability(311, \"Morph_Overseer_quick\", cmd_quick, 1448), Function.ability(312, \"Morph_PlanetaryFortress_quick\", cmd_quick, 1450), Function.ability(313, \"Morph_Ravager_quick\",", "enum(cls, options): \"\"\"Create an ArgumentType where you choose one of", "1342), Function.ability(468, \"Train_Ghost_quick\", cmd_quick, 562), Function.ability(469, \"Train_Hellbat_quick\", cmd_quick, 596), Function.ability(470,", "action.action_feature_layer.unit_selection_rect out_rect = select.selection_screen_coord.add() screen_rect = point.Rect(screen, screen2) screen_rect.tl.assign_to(out_rect.p0) screen_rect.br.assign_to(out_rect.p1)", "2063), Function.ability(180, \"Effect_Blink_screen\", cmd_screen, 3687), Function.ability(181, \"Effect_Blink_Stalker_screen\", cmd_screen, 1442, 3687),", "(0, 0), lambda a: point.Point(*a).floor()) @classmethod def spec(cls, id_, name,", "\"Harvest_Return_Probe_quick\", cmd_quick, 299, 3667), Function.ability(273, \"Harvest_Return_SCV_quick\", cmd_quick, 296, 3667), Function.ability(274,", "= action.action_ui.multi_panel select.type = select_unit_act select.unit_index = select_unit_id def control_group(action,", "TYPES.screen], cmd_minimap: [TYPES.queued, TYPES.minimap], autocast: [], } # Which ones", "raise ValueError(\"Function names must be unique.\") def __getattr__(self, name): return", "Function.ability(142, \"Cancel_AdeptShadePhaseShift_quick\", cmd_quick, 2596, 3659), Function.ability(143, \"Cancel_BarracksAddOn_quick\", cmd_quick, 451, 3659),", "select_unit: [TYPES.select_unit_act, TYPES.select_unit_id], control_group: [TYPES.control_group_act, TYPES.control_group_id], select_idle_worker: [TYPES.select_worker], select_army: [TYPES.select_add],", "Function.ability(487, \"Train_Raven_quick\", cmd_quick, 622), Function.ability(488, \"Train_Reaper_quick\", cmd_quick, 561), Function.ability(489, \"Train_Roach_quick\",", "[TYPES.select_unit_act, TYPES.select_unit_id], control_group: [TYPES.control_group_act, TYPES.control_group_id], select_idle_worker: [TYPES.select_worker], select_army: [TYPES.select_add], select_warp_gates:", "of the function. Should be unique. ability_id: The ability id", "\"Build_CyberneticsCore_screen\", cmd_screen, 894), Function.ability(49, \"Build_DarkShrine_screen\", cmd_screen, 891), Function.ability(50, \"Build_EngineeringBay_screen\", cmd_screen,", "\"BurrowUp_Infestor_quick\", cmd_quick, 1446, 3662), Function.ability(125, \"BurrowUp_InfestorTerran_quick\", cmd_quick, 1396, 3662), Function.ability(126,", "cmd_quick, 296, 3667), Function.ability(274, \"HoldPosition_quick\", cmd_quick, 18), Function.ability(275, \"Land_screen\", cmd_screen,", "functions} if len(self._func_dict) != len(self._func_list): raise ValueError(\"Function names must be", "Function.ability(496, \"Train_Thor_quick\", cmd_quick, 594), Function.ability(497, \"Train_Ultralisk_quick\", cmd_quick, 1348), Function.ability(498, \"Train_VikingFighter_quick\",", "250, 3659), Function.ability(163, \"Cancel_Nuke_quick\", cmd_quick, 1623, 3659), Function.ability(164, \"Cancel_SpineCrawlerRoot_quick\", cmd_quick,", "cmd_quick, 3667), Function.ability(270, \"Harvest_Return_Drone_quick\", cmd_quick, 1184, 3667), Function.ability(271, \"Harvest_Return_Mule_quick\", cmd_quick,", "return cls(-1, \"<none>\", (len(options),), lambda a: options[a[0]]) @classmethod def scalar(cls,", "select_point_act: What to do with the unit at the point.", "def __getitem__(self, key): if isinstance(key, numbers.Number): return self._func_list[key] return self._func_dict[key]", "Function.ability(366, \"Research_GraviticBooster_quick\", cmd_quick, 1093), Function.ability(367, \"Research_GraviticDrive_quick\", cmd_quick, 1094), Function.ability(368, \"Research_GroovedSpines_quick\",", "1566, 3692), Function.ability(384, \"Research_ProtossAirArmorLevel3_quick\", cmd_quick, 1567, 3692), Function.ability(385, \"Research_ProtossAirWeapons_quick\", cmd_quick,", "26, 3684), Function.ability(233, \"Effect_Spray_Zerg_screen\", cmd_screen, 28, 3684), Function.ability(234, \"Effect_Stim_quick\", cmd_quick,", "\"name\", \"ability_id\", \"general_id\", \"function_type\", \"args\", \"avail_fn\"])): \"\"\"Represents a function action.", "2365, 3659), Function.ability(162, \"Cancel_NeuralParasite_quick\", cmd_quick, 250, 3659), Function.ability(163, \"Cancel_Nuke_quick\", cmd_quick,", "needed so that no function takes the same type twice.", "it. select_unit_act: What to do when selecting a unit by", "TYPES.screen], select_rect: [TYPES.select_add, TYPES.screen, TYPES.screen2], select_unit: [TYPES.select_unit_act, TYPES.select_unit_id], control_group: [TYPES.control_group_act,", "315, 3660), Function.ability(263, \"Halt_TerranBuild_quick\", cmd_quick, 348, 3660), Function.ability(264, \"Harvest_Gather_screen\", cmd_screen,", "cmd_quick, 1567, 3692), Function.ability(385, \"Research_ProtossAirWeapons_quick\", cmd_quick, 3693), Function.ability(386, \"Research_ProtossAirWeaponsLevel1_quick\", cmd_quick,", "a limit of 255 function arguments, so build something similar.", "(len(options),), lambda a: options[a[0]]) @classmethod def scalar(cls, value): \"\"\"Create an", "\"Research_BattlecruiserWeaponRefit_quick\", cmd_quick, 1532), Function.ability(356, \"Research_Blink_quick\", cmd_quick, 1593), Function.ability(357, \"Research_Burrow_quick\", cmd_quick,", "cmd_screen, 212, 3673), Function.ability(340, \"Rally_Hatchery_Units_minimap\", cmd_minimap, 212, 3673), Function.ability(341, \"Rally_Morphing_Unit_screen\",", "Function.ability(307, \"Morph_LurkerDen_quick\", cmd_quick, 2112), Function.ability(308, \"Morph_Mothership_quick\", cmd_quick, 1847), Function.ability(309, \"Morph_OrbitalCommand_quick\",", "The values to store for the arguments of the action.", "iterable is provided, the values will be unpacked into an", "295, 3666), Function.ability(269, \"Harvest_Return_quick\", cmd_quick, 3667), Function.ability(270, \"Harvest_Return_Drone_quick\", cmd_quick, 1184,", "591), Function.ability(493, \"Train_Stalker_quick\", cmd_quick, 917), Function.ability(494, \"Train_SwarmHost_quick\", cmd_quick, 1356), Function.ability(495,", "\"Build_Reactor_Barracks_screen\", cmd_screen, 422, 3683), Function.ability(75, \"Build_Reactor_Factory_quick\", cmd_quick, 455, 3683), Function.ability(76,", "Function.ability(66, \"Build_Nuke_quick\", cmd_quick, 710), Function.ability(67, \"Build_NydusNetwork_screen\", cmd_screen, 1161), Function.ability(68, \"Build_NydusWorm_screen\",", "\"FunctionCall\", [\"function\", \"arguments\"])): \"\"\"Represents a function call action. Attributes: function:", "id. select_unit_id: Which unit to select by id. select_worker: What", "the game. \"\"\" __slots__ = () def __str__(self): return \"%s/%s", "no_op: [], move_camera: [TYPES.minimap], select_point: [TYPES.select_point_act, TYPES.screen], select_rect: [TYPES.select_add, TYPES.screen,", "@classmethod def spec(cls, id_, name, args): \"\"\"Create a Function to", "cmd_screen, 2387), Function.ability(208, \"Effect_MassRecall_screen\", cmd_screen, 3686), Function.ability(209, \"Effect_MassRecall_Mothership_screen\", cmd_screen, 2368,", "\"Morph_SiegeMode_quick\", cmd_quick, 388), Function.ability(318, \"Morph_SupplyDepot_Lower_quick\", cmd_quick, 556), Function.ability(319, \"Morph_SupplyDepot_Raise_quick\", cmd_quick,", "\"Build_SensorTower_screen\", cmd_screen, 326), Function.ability(84, \"Build_SpawningPool_screen\", cmd_screen, 1155), Function.ability(85, \"Build_SpineCrawler_screen\", cmd_screen,", "add to queue) control_group_act=ArgumentType.enum([ sc_ui.ActionControlGroup.Recall, sc_ui.ActionControlGroup.Set, sc_ui.ActionControlGroup.Append, sc_ui.ActionControlGroup.SetAndSteal, sc_ui.ActionControlGroup.AppendAndSteal, ]),", "numbers.Number): return self._func_list[key] return self._func_dict[key] def __iter__(self): return iter(self._func_list) def", "select_add def select_larva(action): \"\"\"Select all larva.\"\"\" action.action_ui.select_larva.SetInParent() # Adds the", "cmd_screen, 3686), Function.ability(209, \"Effect_MassRecall_Mothership_screen\", cmd_screen, 2368, 3686), Function.ability(210, \"Effect_MassRecall_MothershipCore_screen\", cmd_screen,", "3678), Function.ability(277, \"Land_CommandCenter_screen\", cmd_screen, 419, 3678), Function.ability(278, \"Land_Factory_screen\", cmd_screen, 520,", "proto out of python types. args: A list of the", "\"\"\"Create an ArgumentType to be used in ValidActions.\"\"\" return cls(id_,", "Function.ability(26, \"Behavior_CloakOn_quick\", cmd_quick, 3676), Function.ability(27, \"Behavior_CloakOn_Banshee_quick\", cmd_quick, 392, 3676), Function.ability(28,", "Function.ability(320, \"Morph_ThorExplosiveMode_quick\", cmd_quick, 2364), Function.ability(321, \"Morph_ThorHighImpactMode_quick\", cmd_quick, 2362), Function.ability(322, \"Morph_Unsiege_quick\",", "at the point. select_add: Whether to add the unit to", "to line them all up nicely.\"\"\" return \"%s/%s (%s)\" %", "with gen_actions.py Function.ability(12, \"Attack_screen\", cmd_screen, 3674), Function.ability(13, \"Attack_minimap\", cmd_minimap, 3674),", "3674), Function.ability(16, \"Attack_AttackBuilding_screen\", cmd_screen, 2048, 3674), Function.ability(17, \"Attack_AttackBuilding_minimap\", cmd_minimap, 2048,", "in ValidActions.\"\"\" return cls(id_, name, sizes, None) class Arguments(collections.namedtuple(\"Arguments\", [", "452, 3679), Function.ability(283, \"Lift_CommandCenter_quick\", cmd_quick, 417, 3679), Function.ability(284, \"Lift_Factory_quick\", cmd_quick,", "\"TrainWarp_Adept_screen\", cmd_screen, 1419), Function.ability(506, \"TrainWarp_DarkTemplar_screen\", cmd_screen, 1417), Function.ability(507, \"TrainWarp_HighTemplar_screen\", cmd_screen,", "3682), Function.ability(98, \"Build_TechLab_Starport_quick\", cmd_quick, 487, 3682), Function.ability(99, \"Build_TechLab_Starport_screen\", cmd_screen, 487,", "a specific unit from the multi-unit selection.\"\"\" select = action.action_ui.multi_panel", "1388, 3662), Function.ability(134, \"BurrowUp_SwarmHost_quick\", cmd_quick, 2016, 3662), Function.ability(135, \"BurrowUp_Ultralisk_quick\", cmd_quick,", "4, 3665), Function.ability(457, \"Train_Adept_quick\", cmd_quick, 922), Function.ability(458, \"Train_Baneling_quick\", cmd_quick, 80),", "{cmd_screen, cmd_minimap, autocast}} always = lambda _: True class Function(collections.namedtuple(", "cmd_quick, 3702), Function.ability(430, \"Research_ZergFlyerArmorLevel1_quick\", cmd_quick, 1315, 3702), Function.ability(431, \"Research_ZergFlyerArmorLevel2_quick\", cmd_quick,", "names must be unique.\") def __getattr__(self, name): return self._func_dict[name] def", "Function.ability(326, \"Morph_VikingAssaultMode_quick\", cmd_quick, 403), Function.ability(327, \"Morph_VikingFighterMode_quick\", cmd_quick, 405), Function.ability(328, \"Morph_WarpGate_quick\",", "3683), Function.ability(77, \"Build_Reactor_Starport_quick\", cmd_quick, 488, 3683), Function.ability(78, \"Build_Reactor_Starport_screen\", cmd_screen, 488,", "Function.ability(444, \"Research_ZergMeleeWeaponsLevel3_quick\", cmd_quick, 1188, 3705), Function.ability(445, \"Research_ZergMissileWeapons_quick\", cmd_quick, 3706), Function.ability(446,", "select_unit_act select.unit_index = select_unit_id def control_group(action, control_group_act, control_group_id): \"\"\"Act on", "cmd_screen, 422, 3683), Function.ability(75, \"Build_Reactor_Factory_quick\", cmd_quick, 455, 3683), Function.ability(76, \"Build_Reactor_Factory_screen\",", "Function.ability(168, \"Cancel_Last_quick\", cmd_quick, 3671), Function.ability(169, \"Cancel_HangarQueue5_quick\", cmd_quick, 1038, 3671), Function.ability(170,", "cmd_minimap, autocast}} always = lambda _: True class Function(collections.namedtuple( \"Function\",", "represented by a point.Point.\"\"\" return cls(-1, \"<none>\", (0, 0), lambda", "524), Function.ability(491, \"Train_Sentry_quick\", cmd_quick, 921), Function.ability(492, \"Train_SiegeTank_quick\", cmd_quick, 591), Function.ability(493,", "\"Scan_Move_minimap\", cmd_minimap, 19, 3674), Function.ability(21, \"Behavior_BuildingAttackOff_quick\", cmd_quick, 2082), Function.ability(22, \"Behavior_BuildingAttackOn_quick\",", "3701), Function.ability(427, \"Research_TunnelingClaws_quick\", cmd_quick, 217), Function.ability(428, \"Research_WarpGate_quick\", cmd_quick, 1568), Function.ability(429,", "a unit from a transport/bunker/nydus/etc.\"\"\" action.action_ui.cargo_panel.unit_index = unload_id def build_queue(action,", "sc_ui.ActionControlGroup.Append, sc_ui.ActionControlGroup.SetAndSteal, sc_ui.ActionControlGroup.AppendAndSteal, ]), control_group_id=ArgumentType.scalar(10), select_point_act=ArgumentType.enum([ sc_spatial.ActionSpatialUnitSelectionPoint.Select, sc_spatial.ActionSpatialUnitSelectionPoint.Toggle, sc_spatial.ActionSpatialUnitSelectionPoint.AllType, sc_spatial.ActionSpatialUnitSelectionPoint.AddAllType,", "732), Function.ability(363, \"Research_DrillingClaws_quick\", cmd_quick, 764), Function.ability(364, \"Research_ExtendedThermalLance_quick\", cmd_quick, 1097), Function.ability(365,", "to support features.py and action conversion. ABILITY_IDS = collections.defaultdict(set) #", "Function.ability(82, \"Build_RoboticsFacility_screen\", cmd_screen, 893), Function.ability(83, \"Build_SensorTower_screen\", cmd_screen, 326), Function.ability(84, \"Build_SpawningPool_screen\",", "select_unit_act=ArgumentType.enum([ sc_ui.ActionMultiPanel.SingleSelect, sc_ui.ActionMultiPanel.DeselectUnit, sc_ui.ActionMultiPanel.SelectAllOfType, sc_ui.ActionMultiPanel.DeselectAllOfType, ]), select_unit_id=ArgumentType.scalar(500), # Depends on", "\"Train_Zergling_quick\", cmd_quick, 1343), Function.ability(505, \"TrainWarp_Adept_screen\", cmd_screen, 1419), Function.ability(506, \"TrainWarp_DarkTemplar_screen\", cmd_screen,", "2354, 3659), Function.ability(149, \"Cancel_MorphBroodlord_quick\", cmd_quick, 1373, 3659), Function.ability(150, \"Cancel_MorphGreaterSpire_quick\", cmd_quick,", "be unpacked into an `Arguments` object. Returns: A new `FunctionCall`", "If a `dict` or an iterable is provided, the values", "store for the arguments of the action. Can either be", "cmd_quick, 382, 3676), Function.ability(29, \"Behavior_GenerateCreepOff_quick\", cmd_quick, 1693), Function.ability(30, \"Behavior_GenerateCreepOn_quick\", cmd_quick,", "\"Effect_Repair_screen\", cmd_screen, 3685), Function.ability(221, \"Effect_Repair_autocast\", autocast, 3685), Function.ability(222, \"Effect_Repair_Mule_screen\", cmd_screen,", "assert function_type in ABILITY_FUNCTIONS return cls(id_, name, ability_id, general_id, function_type,", "in writing, software # distributed under the License is distributed", "autocast, 1388, 3662), Function.ability(134, \"BurrowUp_SwarmHost_quick\", cmd_quick, 2016, 3662), Function.ability(135, \"BurrowUp_Ultralisk_quick\",", "return self._func_dict[key] def __iter__(self): return iter(self._func_list) def __len__(self): return len(self._func_list)", "cmd_quick, 803), Function.ability(404, \"Research_ShadowStrike_quick\", cmd_quick, 2720), Function.ability(405, \"Research_Stimpack_quick\", cmd_quick, 730),", "cmd_quick, 1530), Function.ability(331, \"Move_screen\", cmd_screen, 16), Function.ability(332, \"Move_minimap\", cmd_minimap, 16),", "\"Morph_Lair_quick\", cmd_quick, 1216), Function.ability(304, \"Morph_LiberatorAAMode_quick\", cmd_quick, 2560), Function.ability(305, \"Morph_LiberatorAGMode_screen\", cmd_screen,", "\"Effect_VoidRayPrismaticAlignment_quick\", cmd_quick, 2393), Function.ability(245, \"Effect_WidowMineAttack_screen\", cmd_screen, 2099), Function.ability(246, \"Effect_WidowMineAttack_autocast\", autocast,", "cmd_screen, 3691), Function.ability(46, \"Build_CreepTumor_Queen_screen\", cmd_screen, 1694, 3691), Function.ability(47, \"Build_CreepTumor_Tumor_screen\", cmd_screen,", "cmd_quick, 2081), Function.ability(23, \"Behavior_CloakOff_quick\", cmd_quick, 3677), Function.ability(24, \"Behavior_CloakOff_Banshee_quick\", cmd_quick, 393,", "__str__(self): return self.str() def str(self, space=False): \"\"\"String version. Set space=True", "\"Effect_Salvage_quick\", cmd_quick, 32), Function.ability(227, \"Effect_Scan_screen\", cmd_screen, 399), Function.ability(228, \"Effect_SpawnChangeling_quick\", cmd_quick,", "cmd_quick, 622), Function.ability(488, \"Train_Reaper_quick\", cmd_quick, 561), Function.ability(489, \"Train_Roach_quick\", cmd_quick, 1351),", "self.id def __str__(self): return self.str() def str(self, space=False): \"\"\"String version.", "Function.ability(111, \"BurrowDown_Ravager_quick\", cmd_quick, 2340, 3661), Function.ability(112, \"BurrowDown_Roach_quick\", cmd_quick, 1386, 3661),", "cmd_screen, 2358), Function.ability(241, \"Effect_TimeWarp_screen\", cmd_screen, 2244), Function.ability(242, \"Effect_Transfusion_screen\", cmd_screen, 1664),", "cmd_screen, 140), Function.ability(193, \"Effect_ForceField_screen\", cmd_screen, 1526), Function.ability(194, \"Effect_FungalGrowth_screen\", cmd_screen, 74),", "3680), Function.ability(317, \"Morph_SiegeMode_quick\", cmd_quick, 388), Function.ability(318, \"Morph_SupplyDepot_Lower_quick\", cmd_quick, 556), Function.ability(319,", "3681), Function.ability(324, \"Morph_SpineCrawlerUproot_quick\", cmd_quick, 1725, 3681), Function.ability(325, \"Morph_SporeCrawlerUproot_quick\", cmd_quick, 1727,", "17), Function.ability(335, \"Rally_Units_screen\", cmd_screen, 3673), Function.ability(336, \"Rally_Units_minimap\", cmd_minimap, 3673), Function.ability(337,", "cmd_minimap, 207, 3690), Function.ability(351, \"Research_AdeptResonatingGlaives_quick\", cmd_quick, 1594), Function.ability(352, \"Research_AdvancedBallistics_quick\", cmd_quick,", "Function.ability(125, \"BurrowUp_InfestorTerran_quick\", cmd_quick, 1396, 3662), Function.ability(126, \"BurrowUp_InfestorTerran_autocast\", autocast, 1396, 3662),", "in functions} if len(self._func_dict) != len(self._func_list): raise ValueError(\"Function names must", "key): if isinstance(key, numbers.Number): return self._func_list[key] return self._func_dict[key] def __iter__(self):", "an idle worker.\"\"\" action.action_ui.select_idle_worker.type = select_worker def select_army(action, select_add): \"\"\"Select", "cls(-1, \"<none>\", (len(options),), lambda a: options[a[0]]) @classmethod def scalar(cls, value):", "'Stim'.\"\"\" action_cmd = action.action_feature_layer.unit_command action_cmd.ability_id = ability_id action_cmd.queue_command = queued", "Function.ability(489, \"Train_Roach_quick\", cmd_quick, 1351), Function.ability(490, \"Train_SCV_quick\", cmd_quick, 524), Function.ability(491, \"Train_Sentry_quick\",", "Function.ability(105, \"BurrowDown_Drone_quick\", cmd_quick, 1378, 3661), Function.ability(106, \"BurrowDown_Hydralisk_quick\", cmd_quick, 1382, 3661),", "2342, 3662), Function.ability(132, \"BurrowUp_Roach_quick\", cmd_quick, 1388, 3662), Function.ability(133, \"BurrowUp_Roach_autocast\", autocast,", "Function.ability(298, \"Morph_Gateway_quick\", cmd_quick, 1520), Function.ability(299, \"Morph_GreaterSpire_quick\", cmd_quick, 1220), Function.ability(300, \"Morph_Hellbat_quick\",", "set of types and functions that are valid for an", "def spec(cls, id_, name, sizes): \"\"\"Create an ArgumentType to be", "# Which argument types do each function need? FUNCTION_TYPES =", "cmd_quick, 3703), Function.ability(434, \"Research_ZergFlyerAttackLevel1_quick\", cmd_quick, 1312, 3703), Function.ability(435, \"Research_ZergFlyerAttackLevel2_quick\", cmd_quick,", "Function.ability(199, \"Effect_Heal_autocast\", autocast, 386), Function.ability(200, \"Effect_HunterSeekerMissile_screen\", cmd_screen, 169), Function.ability(201, \"Effect_ImmortalBarrier_quick\",", "select_unit(action, select_unit_act, select_unit_id): \"\"\"Select a specific unit from the multi-unit", "The list of known types. TYPES = Arguments.types( screen=ArgumentType.point(), minimap=ArgumentType.point(),", "a `dict`, or an iterable. If a `dict` or an", "License, Version 2.0 (the \"License\"); # you may not use", "sc_ui def no_op(action): del action def move_camera(action, minimap): \"\"\"Move the", "isinstance(arguments, dict): arguments = Arguments(**arguments) elif not isinstance(arguments, Arguments): arguments", "id_, name, sizes): \"\"\"Create an ArgumentType to be used in", "The function id, which is what the agent will use.", "select_unit_act: What to do when selecting a unit by id.", "\"Effect_Heal_autocast\", autocast, 386), Function.ability(200, \"Effect_HunterSeekerMissile_screen\", cmd_screen, 169), Function.ability(201, \"Effect_ImmortalBarrier_quick\", cmd_quick,", "group. control_group_id: Which control group to do it with. select_point_act:", "cmd_quick, 950), Function.ability(501, \"Train_WarpPrism_quick\", cmd_quick, 976), Function.ability(502, \"Train_WidowMine_quick\", cmd_quick, 614),", "\"Research_ZergFlyerAttackLevel1_quick\", cmd_quick, 1312, 3703), Function.ability(435, \"Research_ZergFlyerAttackLevel2_quick\", cmd_quick, 1313, 3703), Function.ability(436,", "Function.ability(452, \"Smart_minimap\", cmd_minimap, 1), Function.ability(453, \"Stop_quick\", cmd_quick, 3665), Function.ability(454, \"Stop_Building_quick\",", "cmd_quick, 148), Function.ability(251, \"Hallucination_Disruptor_quick\", cmd_quick, 2389), Function.ability(252, \"Hallucination_HighTemplar_quick\", cmd_quick, 150),", "\"Effect_CalldownMULE_screen\", cmd_screen, 171), Function.ability(184, \"Effect_CausticSpray_screen\", cmd_screen, 2324), Function.ability(185, \"Effect_Charge_screen\", cmd_screen,", "421, 3682), Function.ability(96, \"Build_TechLab_Factory_quick\", cmd_quick, 454, 3682), Function.ability(97, \"Build_TechLab_Factory_screen\", cmd_screen,", "%s\" % (self.id, self.name, list(self.sizes)) @classmethod def enum(cls, options): \"\"\"Create", "169), Function.ability(201, \"Effect_ImmortalBarrier_quick\", cmd_quick, 2328), Function.ability(202, \"Effect_ImmortalBarrier_autocast\", autocast, 2328), Function.ability(203,", "Function.ability(183, \"Effect_CalldownMULE_screen\", cmd_screen, 171), Function.ability(184, \"Effect_CausticSpray_screen\", cmd_screen, 2324), Function.ability(185, \"Effect_Charge_screen\",", "1384, 3662), Function.ability(124, \"BurrowUp_Infestor_quick\", cmd_quick, 1446, 3662), Function.ability(125, \"BurrowUp_InfestorTerran_quick\", cmd_quick,", "Function.ability(448, \"Research_ZergMissileWeaponsLevel3_quick\", cmd_quick, 1194, 3706), Function.ability(449, \"Research_ZerglingAdrenalGlands_quick\", cmd_quick, 1252), Function.ability(450,", "560), Function.ability(478, \"Train_Medivac_quick\", cmd_quick, 620), Function.ability(479, \"Train_MothershipCore_quick\", cmd_quick, 1853), Function.ability(480,", "of the action. Can either be an `Arguments` object, a", "cmd_screen, 26, 3684), Function.ability(233, \"Effect_Spray_Zerg_screen\", cmd_screen, 28, 3684), Function.ability(234, \"Effect_Stim_quick\",", "\"Build_Nuke_quick\", cmd_quick, 710), Function.ability(67, \"Build_NydusNetwork_screen\", cmd_screen, 1161), Function.ability(68, \"Build_NydusWorm_screen\", cmd_screen,", "argument id. This is unique. name: The name of the", "cmd_quick, 1382, 3661), Function.ability(107, \"BurrowDown_Infestor_quick\", cmd_quick, 1444, 3661), Function.ability(108, \"BurrowDown_InfestorTerran_quick\",", "\"\"\"Helper function for creating `FunctionCall`s with `Arguments`. Args: function: The", "\"arguments\"])): \"\"\"Represents a function call action. Attributes: function: Store the", "return len(self._func_list) # pylint: disable=line-too-long FUNCTIONS = Functions([ Function.ui_func(0, \"no_op\",", "\"Effect_NeuralParasite_screen\", cmd_screen, 249), Function.ability(213, \"Effect_NukeCalldown_screen\", cmd_screen, 1622), Function.ability(214, \"Effect_OracleRevelation_screen\", cmd_screen,", "\"Build_Gateway_screen\", cmd_screen, 883), Function.ability(58, \"Build_GhostAcademy_screen\", cmd_screen, 327), Function.ability(59, \"Build_Hatchery_screen\", cmd_screen,", "cmd_quick, 2708), Function.ability(311, \"Morph_Overseer_quick\", cmd_quick, 1448), Function.ability(312, \"Morph_PlanetaryFortress_quick\", cmd_quick, 1450),", "the License for the specific language governing permissions and #", "to select by id. select_worker: What to do when selecting", "ability_id action_cmd.queue_command = queued minimap.assign_to(action_cmd.target_minimap_coord) def autocast(action, ability_id): \"\"\"Toggle autocast.\"\"\"", "For select_point this could be: [[0], [23, 38]]. \"\"\" __slots__", "a function represented as a game ability.\"\"\" assert function_type in", "]), select_add=ArgumentType.enum([False, True]), # (select vs select_add) select_unit_act=ArgumentType.enum([ sc_ui.ActionMultiPanel.SingleSelect, sc_ui.ActionMultiPanel.DeselectUnit,", "for the arguments of the action. Can either be an", "cmd_screen, 2063), Function.ability(180, \"Effect_Blink_screen\", cmd_screen, 3687), Function.ability(181, \"Effect_Blink_Stalker_screen\", cmd_screen, 1442,", "queued, screen): \"\"\"Do a command that needs a point on", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "\"fn\"])): \"\"\"Represents a single argument type. Attributes: id: The argument", "look at TYPES and FUNCTION_TYPES for more details. Attributes: screen:", "\"Effect_ShadowStride_screen\", cmd_screen, 2700, 3687), Function.ability(183, \"Effect_CalldownMULE_screen\", cmd_screen, 171), Function.ability(184, \"Effect_CausticSpray_screen\",", "cmd_quick, 2535, 3659), Function.ability(168, \"Cancel_Last_quick\", cmd_quick, 3671), Function.ability(169, \"Cancel_HangarQueue5_quick\", cmd_quick,", "3680), Function.ability(315, \"Morph_SpineCrawlerRoot_screen\", cmd_screen, 1729, 3680), Function.ability(316, \"Morph_SporeCrawlerRoot_screen\", cmd_screen, 1731,", "Function.ability(327, \"Morph_VikingFighterMode_quick\", cmd_quick, 405), Function.ability(328, \"Morph_WarpGate_quick\", cmd_quick, 1518), Function.ability(329, \"Morph_WarpPrismPhasingMode_quick\",", "Arguments(collections.namedtuple(\"Arguments\", [ \"screen\", \"minimap\", \"screen2\", \"queued\", \"control_group_act\", \"control_group_id\", \"select_point_act\", \"select_add\",", "What to do when selecting a worker. build_queue_id: Which build", "1126), Function.ability(402, \"Research_RavenCorvidReactor_quick\", cmd_quick, 793), Function.ability(403, \"Research_RavenRecalibratedExplosives_quick\", cmd_quick, 803), Function.ability(404,", "list of types and actions for SC2.\"\"\" from __future__ import", "no function takes the same type twice. queued: Whether the", "\"Effect_ForceField_screen\", cmd_screen, 1526), Function.ability(194, \"Effect_FungalGrowth_screen\", cmd_screen, 74), Function.ability(195, \"Effect_GhostSnipe_screen\", cmd_screen,", "1418), Function.ability(509, \"TrainWarp_Stalker_screen\", cmd_screen, 1414), Function.ability(510, \"TrainWarp_Zealot_screen\", cmd_screen, 1413), Function.ability(511,", "955), Function.ability(496, \"Train_Thor_quick\", cmd_quick, 594), Function.ability(497, \"Train_Ultralisk_quick\", cmd_quick, 1348), Function.ability(498,", "1390, 3661), Function.ability(117, \"BurrowUp_quick\", cmd_quick, 3662), Function.ability(118, \"BurrowUp_autocast\", autocast, 3662),", "Function.ability(501, \"Train_WarpPrism_quick\", cmd_quick, 976), Function.ability(502, \"Train_WidowMine_quick\", cmd_quick, 614), Function.ability(503, \"Train_Zealot_quick\",", "30, 3684), Function.ability(232, \"Effect_Spray_Terran_screen\", cmd_screen, 26, 3684), Function.ability(233, \"Effect_Spray_Zerg_screen\", cmd_screen,", "the ability_id of another ability if it can be represented", "\"ArgumentType\", [\"id\", \"name\", \"sizes\", \"fn\"])): \"\"\"Represents a single argument type.", "\"BurrowUp_Roach_quick\", cmd_quick, 1388, 3662), Function.ability(133, \"BurrowUp_Roach_autocast\", autocast, 1388, 3662), Function.ability(134,", "Function.ability(392, \"Research_ProtossGroundArmorLevel3_quick\", cmd_quick, 1067, 3694), Function.ability(393, \"Research_ProtossGroundWeapons_quick\", cmd_quick, 3695), Function.ability(394,", "Arguments(**arguments) elif not isinstance(arguments, Arguments): arguments = Arguments(*arguments) return cls(function,", "Function.ability(427, \"Research_TunnelingClaws_quick\", cmd_quick, 217), Function.ability(428, \"Research_WarpGate_quick\", cmd_quick, 1568), Function.ability(429, \"Research_ZergFlyerArmor_quick\",", "Function.ability(81, \"Build_RoboticsBay_screen\", cmd_screen, 892), Function.ability(82, \"Build_RoboticsFacility_screen\", cmd_screen, 893), Function.ability(83, \"Build_SensorTower_screen\",", "cmd_quick, 2594, 3659), Function.ability(142, \"Cancel_AdeptShadePhaseShift_quick\", cmd_quick, 2596, 3659), Function.ability(143, \"Cancel_BarracksAddOn_quick\",", "154), Function.ability(256, \"Hallucination_Probe_quick\", cmd_quick, 156), Function.ability(257, \"Hallucination_Stalker_quick\", cmd_quick, 158), Function.ability(258,", "\"Build_InfestationPit_screen\", cmd_screen, 1160), Function.ability(62, \"Build_Interceptors_quick\", cmd_quick, 1042), Function.ability(63, \"Build_Interceptors_autocast\", autocast,", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "\"Research_ZergFlyerAttackLevel2_quick\", cmd_quick, 1313, 3703), Function.ability(436, \"Research_ZergFlyerAttackLevel3_quick\", cmd_quick, 1314, 3703), Function.ability(437,", "Function.ability(97, \"Build_TechLab_Factory_screen\", cmd_screen, 454, 3682), Function.ability(98, \"Build_TechLab_Starport_quick\", cmd_quick, 487, 3682),", "Arguments.types( screen=ArgumentType.point(), minimap=ArgumentType.point(), screen2=ArgumentType.point(), queued=ArgumentType.enum([False, True]), # (now vs add", "Function.ability(480, \"Train_Mutalisk_quick\", cmd_quick, 1346), Function.ability(481, \"Train_Observer_quick\", cmd_quick, 977), Function.ability(482, \"Train_Oracle_quick\",", "version. Set space=True to line them all up nicely.\"\"\" return", "queued def cmd_screen(action, ability_id, queued, screen): \"\"\"Do a command that", "Function.ability(390, \"Research_ProtossGroundArmorLevel1_quick\", cmd_quick, 1065, 3694), Function.ability(391, \"Research_ProtossGroundArmorLevel2_quick\", cmd_quick, 1066, 3694),", "\"BurrowUp_quick\", cmd_quick, 3662), Function.ability(118, \"BurrowUp_autocast\", autocast, 3662), Function.ability(119, \"BurrowUp_Baneling_quick\", cmd_quick,", "2067), Function.ability(177, \"Effect_AdeptPhaseShift_screen\", cmd_screen, 2544), Function.ability(178, \"Effect_AutoTurret_screen\", cmd_screen, 1764), Function.ability(179,", "\"Rally_Building_screen\", cmd_screen, 195, 3673), Function.ability(338, \"Rally_Building_minimap\", cmd_minimap, 195, 3673), Function.ability(339,", "3659), Function.ability(142, \"Cancel_AdeptShadePhaseShift_quick\", cmd_quick, 2596, 3659), Function.ability(143, \"Cancel_BarracksAddOn_quick\", cmd_quick, 451,", "action.action_ui.toggle_autocast.ability_id = ability_id class ArgumentType(collections.namedtuple( \"ArgumentType\", [\"id\", \"name\", \"sizes\", \"fn\"])):", "select_point_act def select_rect(action, select_add, screen, screen2): \"\"\"Select units within a", "Function.ability(73, \"Build_Reactor_Barracks_quick\", cmd_quick, 422, 3683), Function.ability(74, \"Build_Reactor_Barracks_screen\", cmd_screen, 422, 3683),", "3659), Function.ability(145, \"Cancel_CreepTumor_quick\", cmd_quick, 1763, 3659), Function.ability(146, \"Cancel_FactoryAddOn_quick\", cmd_quick, 484,", "cmd_quick(action, ability_id, queued): \"\"\"Do a quick command like 'Stop' or", "401), Function.ability(248, \"Hallucination_Adept_quick\", cmd_quick, 2391), Function.ability(249, \"Hallucination_Archon_quick\", cmd_quick, 146), Function.ability(250,", "50), \"; \".join(str(a) for a in self.args)) class Functions(object): \"\"\"Represents", "\"Train_MothershipCore_quick\", cmd_quick, 1853), Function.ability(480, \"Train_Mutalisk_quick\", cmd_quick, 1346), Function.ability(481, \"Train_Observer_quick\", cmd_quick,", "Function.ability(145, \"Cancel_CreepTumor_quick\", cmd_quick, 1763, 3659), Function.ability(146, \"Cancel_FactoryAddOn_quick\", cmd_quick, 484, 3659),", "conversion. ABILITY_IDS = collections.defaultdict(set) # {ability_id: {funcs}} for func in", "return self._func_dict[name] def __getitem__(self, key): if isinstance(key, numbers.Number): return self._func_list[key]", "cmd_screen, 2146), Function.ability(215, \"Effect_ParasiticBomb_screen\", cmd_screen, 2542), Function.ability(216, \"Effect_PhotonOvercharge_screen\", cmd_screen, 2162),", "766), Function.ability(374, \"Research_MuscularAugments_quick\", cmd_quick, 1283), Function.ability(375, \"Research_NeosteelFrame_quick\", cmd_quick, 655), Function.ability(376,", "the types of args passed to function_type. avail_fn: For non-abilities,", "1373, 3659), Function.ability(150, \"Cancel_MorphGreaterSpire_quick\", cmd_quick, 1221, 3659), Function.ability(151, \"Cancel_MorphHive_quick\", cmd_quick,", "__iter__(self): return iter(self._func_list) def __len__(self): return len(self._func_list) # pylint: disable=line-too-long", "454, 3682), Function.ability(97, \"Build_TechLab_Factory_screen\", cmd_screen, 454, 3682), Function.ability(98, \"Build_TechLab_Starport_quick\", cmd_quick,", "\"Effect_Spray_screen\", cmd_screen, 3684), Function.ability(231, \"Effect_Spray_Protoss_screen\", cmd_screen, 30, 3684), Function.ability(232, \"Effect_Spray_Terran_screen\",", "19, 3674), Function.ability(20, \"Scan_Move_minimap\", cmd_minimap, 19, 3674), Function.ability(21, \"Behavior_BuildingAttackOff_quick\", cmd_quick,", "function represented as a game ability.\"\"\" assert function_type in ABILITY_FUNCTIONS", "in FUNCTION_TYPES for how to construct the sc2 action proto", "Function.ability(283, \"Lift_CommandCenter_quick\", cmd_quick, 417, 3679), Function.ability(284, \"Lift_Factory_quick\", cmd_quick, 485, 3679),", "3669), Function.ability(522, \"UnloadAllAt_WarpPrism_screen\", cmd_screen, 913, 3669), Function.ability(523, \"UnloadAllAt_WarpPrism_minimap\", cmd_minimap, 913,", "use. Attributes: types: A namedtuple of the types that the", "sc2 action proto out of python types. args: A list", "at TYPES and FUNCTION_TYPES for more details. Attributes: screen: A", "cmd_minimap, 3669), Function.ability(518, \"UnloadAllAt_Medivac_screen\", cmd_screen, 396, 3669), Function.ability(519, \"UnloadAllAt_Medivac_minimap\", cmd_minimap,", "Function.ability(129, \"BurrowUp_Queen_autocast\", autocast, 1435, 3662), Function.ability(130, \"BurrowUp_Ravager_quick\", cmd_quick, 2342, 3662),", "3662), Function.ability(137, \"BurrowUp_WidowMine_quick\", cmd_quick, 2097, 3662), Function.ability(138, \"BurrowUp_Zergling_quick\", cmd_quick, 1392,", "cmd_quick, 597), Function.ability(465, \"Train_DarkTemplar_quick\", cmd_quick, 920), Function.ability(466, \"Train_Disruptor_quick\", cmd_quick, 994),", "44), Function.ability(373, \"Research_MagFieldLaunchers_quick\", cmd_quick, 766), Function.ability(374, \"Research_MuscularAugments_quick\", cmd_quick, 1283), Function.ability(375,", "a function call action. Attributes: function: Store the function id,", "cls(id_, name, None, None, None, args, None) def __hash__(self): #", "cmd_quick, 455, 3683), Function.ability(76, \"Build_Reactor_Factory_screen\", cmd_screen, 455, 3683), Function.ability(77, \"Build_Reactor_Starport_quick\",", "value): \"\"\"Create an ArgumentType with a single scalar in range(value).\"\"\"", "1356), Function.ability(495, \"Train_Tempest_quick\", cmd_quick, 955), Function.ability(496, \"Train_Thor_quick\", cmd_quick, 594), Function.ability(497,", "# Unless required by applicable law or agreed to in", "Function.ability(186, \"Effect_Charge_autocast\", autocast, 1819), Function.ability(187, \"Effect_ChronoBoost_screen\", cmd_screen, 261), Function.ability(188, \"Effect_Contaminate_screen\",", "cmd_minimap, 16), Function.ability(333, \"Patrol_screen\", cmd_screen, 17), Function.ability(334, \"Patrol_minimap\", cmd_minimap, 17),", "minimap): \"\"\"Move the camera.\"\"\" minimap.assign_to(action.action_feature_layer.camera_move.center_minimap) def select_point(action, select_point_act, screen): \"\"\"Select", "Function.ability(424, \"Research_TerranVehicleWeaponsLevel1_quick\", cmd_quick, 855, 3701), Function.ability(425, \"Research_TerranVehicleWeaponsLevel2_quick\", cmd_quick, 856, 3701),", "1189, 3704), Function.ability(439, \"Research_ZergGroundArmorLevel2_quick\", cmd_quick, 1190, 3704), Function.ability(440, \"Research_ZergGroundArmorLevel3_quick\", cmd_quick,", "to do with the unit at the point. select_add: Whether", "3662), Function.ability(128, \"BurrowUp_Queen_quick\", cmd_quick, 1435, 3662), Function.ability(129, \"BurrowUp_Queen_autocast\", autocast, 1435,", "\"Halt_quick\", cmd_quick, 3660), Function.ability(262, \"Halt_Building_quick\", cmd_quick, 315, 3660), Function.ability(263, \"Halt_TerranBuild_quick\",", "f in FUNCTIONS if f.avail_fn} class FunctionCall(collections.namedtuple( \"FunctionCall\", [\"function\", \"arguments\"])):", "Function.ability(435, \"Research_ZergFlyerAttackLevel2_quick\", cmd_quick, 1313, 3703), Function.ability(436, \"Research_ZergFlyerAttackLevel3_quick\", cmd_quick, 1314, 3703),", "1161), Function.ability(68, \"Build_NydusWorm_screen\", cmd_screen, 1768), Function.ability(69, \"Build_PhotonCannon_screen\", cmd_screen, 887), Function.ability(70,", "1516), Function.ability(310, \"Morph_OverlordTransport_quick\", cmd_quick, 2708), Function.ability(311, \"Morph_Overseer_quick\", cmd_quick, 1448), Function.ability(312,", "name): return self._func_dict[name] def __getitem__(self, key): if isinstance(key, numbers.Number): return", "the entire army.\"\"\" action.action_ui.select_army.selection_add = select_add def select_warp_gates(action, select_add): \"\"\"Select", "\"Build_Reactor_Factory_screen\", cmd_screen, 455, 3683), Function.ability(77, \"Build_Reactor_Starport_quick\", cmd_quick, 488, 3683), Function.ability(78,", "173), Function.ability(197, \"Effect_GuardianShield_quick\", cmd_quick, 76), Function.ability(198, \"Effect_Heal_screen\", cmd_screen, 386), Function.ability(199,", "action proto out of python types. args: A list of", "Function.ability(374, \"Research_MuscularAugments_quick\", cmd_quick, 1283), Function.ability(375, \"Research_NeosteelFrame_quick\", cmd_quick, 655), Function.ability(376, \"Research_NeuralParasite_quick\",", "Function.ability(247, \"Effect_YamatoGun_screen\", cmd_screen, 401), Function.ability(248, \"Hallucination_Adept_quick\", cmd_quick, 2391), Function.ability(249, \"Hallucination_Archon_quick\",", "= select_unit_id def control_group(action, control_group_act, control_group_id): \"\"\"Act on a control", "Function.ability(474, \"Train_Infestor_quick\", cmd_quick, 1352), Function.ability(475, \"Train_Liberator_quick\", cmd_quick, 626), Function.ability(476, \"Train_Marauder_quick\",", "3659), Function.ability(160, \"Cancel_MorphRavager_quick\", cmd_quick, 2331, 3659), Function.ability(161, \"Cancel_MorphThorExplosiveMode_quick\", cmd_quick, 2365,", "Function.ability(200, \"Effect_HunterSeekerMissile_screen\", cmd_screen, 169), Function.ability(201, \"Effect_ImmortalBarrier_quick\", cmd_quick, 2328), Function.ability(202, \"Effect_ImmortalBarrier_autocast\",", "the Apache License, Version 2.0 (the \"License\"); # you may", "unload, lambda obs: obs.ui_data.HasField(\"cargo\")), Function.ui_func(11, \"build_queue\", build_queue, lambda obs: obs.ui_data.HasField(\"production\")),", "3673), Function.ability(339, \"Rally_Hatchery_Units_screen\", cmd_screen, 212, 3673), Function.ability(340, \"Rally_Hatchery_Units_minimap\", cmd_minimap, 212,", "\"Build_TechLab_quick\", cmd_quick, 3682), Function.ability(93, \"Build_TechLab_screen\", cmd_screen, 3682), Function.ability(94, \"Build_TechLab_Barracks_quick\", cmd_quick,", "limitations under the License. \"\"\"Define the static list of types", "cmd_screen, 1166), Function.ability(86, \"Build_Spire_screen\", cmd_screen, 1158), Function.ability(87, \"Build_SporeCrawler_screen\", cmd_screen, 1167),", "3678), Function.ability(279, \"Land_OrbitalCommand_screen\", cmd_screen, 1524, 3678), Function.ability(280, \"Land_Starport_screen\", cmd_screen, 522,", "\"Patrol_minimap\", cmd_minimap, 17), Function.ability(335, \"Rally_Units_screen\", cmd_screen, 3673), Function.ability(336, \"Rally_Units_minimap\", cmd_minimap,", "3705), Function.ability(445, \"Research_ZergMissileWeapons_quick\", cmd_quick, 3706), Function.ability(446, \"Research_ZergMissileWeaponsLevel1_quick\", cmd_quick, 1192, 3706),", "Function.ability(477, \"Train_Marine_quick\", cmd_quick, 560), Function.ability(478, \"Train_Medivac_quick\", cmd_quick, 620), Function.ability(479, \"Train_MothershipCore_quick\",", "more details. Attributes: screen: A point on the screen. minimap:", "select_unit_id: Which unit to select by id. select_worker: What to", "\"Hallucination_WarpPrism_quick\", cmd_quick, 162), Function.ability(260, \"Hallucination_Zealot_quick\", cmd_quick, 164), Function.ability(261, \"Halt_quick\", cmd_quick,", "Function.ability(349, \"Rally_Nexus_screen\", cmd_screen, 207, 3690), Function.ability(350, \"Rally_Nexus_minimap\", cmd_minimap, 207, 3690),", "[\"function\", \"arguments\"])): \"\"\"Represents a function call action. Attributes: function: Store", "__str__(self): return \"%s/%s %s\" % (self.id, self.name, list(self.sizes)) @classmethod def", "the License is distributed on an \"AS-IS\" BASIS, # WITHOUT", "1435, 3662), Function.ability(130, \"BurrowUp_Ravager_quick\", cmd_quick, 2342, 3662), Function.ability(131, \"BurrowUp_Ravager_autocast\", autocast,", "require. Unlike TYPES above, this includes the sizes for screen", "no_op), Function.ui_func(1, \"move_camera\", move_camera), Function.ui_func(2, \"select_point\", select_point), Function.ui_func(3, \"select_rect\", select_rect),", "ones require a point? POINT_REQUIRED_FUNCS = { False: {cmd_quick, autocast},", "Function.ability(377, \"Research_PathogenGlands_quick\", cmd_quick, 1454), Function.ability(378, \"Research_PersonalCloaking_quick\", cmd_quick, 820), Function.ability(379, \"Research_PhoenixAnionPulseCrystals_quick\",", "Function.ability(165, \"Cancel_SporeCrawlerRoot_quick\", cmd_quick, 1732, 3659), Function.ability(166, \"Cancel_StarportAddOn_quick\", cmd_quick, 517, 3659),", "for the action function. arguments: The values to store for", "Function.ability(167, \"Cancel_StasisTrap_quick\", cmd_quick, 2535, 3659), Function.ability(168, \"Cancel_Last_quick\", cmd_quick, 3671), Function.ability(169,", "= control_group_act select.control_group_index = control_group_id def unload(action, unload_id): \"\"\"Unload a", "2330), Function.ability(314, \"Morph_Root_screen\", cmd_screen, 3680), Function.ability(315, \"Morph_SpineCrawlerRoot_screen\", cmd_screen, 1729, 3680),", "Function.ability(464, \"Train_Cyclone_quick\", cmd_quick, 597), Function.ability(465, \"Train_DarkTemplar_quick\", cmd_quick, 920), Function.ability(466, \"Train_Disruptor_quick\",", "Attributes: types: A namedtuple of the types that the functions", "3662), Function.ability(119, \"BurrowUp_Baneling_quick\", cmd_quick, 1376, 3662), Function.ability(120, \"BurrowUp_Baneling_autocast\", autocast, 1376,", "cmd_screen, 1416), Function.ability(508, \"TrainWarp_Sentry_screen\", cmd_screen, 1418), Function.ability(509, \"TrainWarp_Stalker_screen\", cmd_screen, 1414),", "function: Store the function id, eg 2 for select_point. arguments:", "cmd_screen, 3668), Function.ability(288, \"Load_Bunker_screen\", cmd_screen, 407, 3668), Function.ability(289, \"Load_Medivac_screen\", cmd_screen,", "3703), Function.ability(434, \"Research_ZergFlyerAttackLevel1_quick\", cmd_quick, 1312, 3703), Function.ability(435, \"Research_ZergFlyerAttackLevel2_quick\", cmd_quick, 1313,", "Function.ability(116, \"BurrowDown_Zergling_quick\", cmd_quick, 1390, 3661), Function.ability(117, \"BurrowUp_quick\", cmd_quick, 3662), Function.ability(118,", "= queued def cmd_screen(action, ability_id, queued, screen): \"\"\"Do a command", "\"Build_Factory_screen\", cmd_screen, 328), Function.ability(54, \"Build_FleetBeacon_screen\", cmd_screen, 885), Function.ability(55, \"Build_Forge_screen\", cmd_screen,", "Function.ability(71, \"Build_Reactor_quick\", cmd_quick, 3683), Function.ability(72, \"Build_Reactor_screen\", cmd_screen, 3683), Function.ability(73, \"Build_Reactor_Barracks_quick\",", "\"Harvest_Gather_Drone_screen\", cmd_screen, 1183, 3666), Function.ability(266, \"Harvest_Gather_Mule_screen\", cmd_screen, 166, 3666), Function.ability(267,", "= queued screen.assign_to(action_cmd.target_screen_coord) def cmd_minimap(action, ability_id, queued, minimap): \"\"\"Do a", "cmd_quick, 1451, 3659), Function.ability(160, \"Cancel_MorphRavager_quick\", cmd_quick, 2331, 3659), Function.ability(161, \"Cancel_MorphThorExplosiveMode_quick\",", "id: The argument id. This is unique. name: The name", "dict): arguments = Arguments(**arguments) elif not isinstance(arguments, Arguments): arguments =", "3668), Function.ability(288, \"Load_Bunker_screen\", cmd_screen, 407, 3668), Function.ability(289, \"Load_Medivac_screen\", cmd_screen, 394,", "3692), Function.ability(384, \"Research_ProtossAirArmorLevel3_quick\", cmd_quick, 1567, 3692), Function.ability(385, \"Research_ProtossAirWeapons_quick\", cmd_quick, 3693),", "{f.id: f for f in FUNCTIONS if f.avail_fn} class FunctionCall(collections.namedtuple(", "cmd_screen, 3674), Function.ability(13, \"Attack_minimap\", cmd_minimap, 3674), Function.ability(14, \"Attack_Attack_screen\", cmd_screen, 23,", "Function.ability(422, \"Research_TerranVehicleAndShipPlatingLevel3_quick\", cmd_quick, 866, 3700), Function.ability(423, \"Research_TerranVehicleWeapons_quick\", cmd_quick, 3701), Function.ability(424,", "Function.ability(522, \"UnloadAllAt_WarpPrism_screen\", cmd_screen, 913, 3669), Function.ability(523, \"UnloadAllAt_WarpPrism_minimap\", cmd_minimap, 913, 3669),", "Function.ability(146, \"Cancel_FactoryAddOn_quick\", cmd_quick, 484, 3659), Function.ability(147, \"Cancel_GravitonBeam_quick\", cmd_quick, 174, 3659),", "elif not isinstance(arguments, Arguments): arguments = Arguments(*arguments) return cls(function, arguments)", "cmd_quick, 393, 3677), Function.ability(25, \"Behavior_CloakOff_Ghost_quick\", cmd_quick, 383, 3677), Function.ability(26, \"Behavior_CloakOn_quick\",", "unique. sizes: The max+1 of each of the dimensions this", "cmd_quick, 820), Function.ability(379, \"Research_PhoenixAnionPulseCrystals_quick\", cmd_quick, 46), Function.ability(380, \"Research_PneumatizedCarapace_quick\", cmd_quick, 1223),", "3662), Function.ability(122, \"BurrowUp_Hydralisk_quick\", cmd_quick, 1384, 3662), Function.ability(123, \"BurrowUp_Hydralisk_autocast\", autocast, 1384,", "\"Behavior_HoldFireOff_Ghost_quick\", cmd_quick, 38, 3689), Function.ability(33, \"Behavior_HoldFireOff_Lurker_quick\", cmd_quick, 2552, 3689), Function.ability(34,", "choose one of a set of known values.\"\"\" return cls(-1,", "\"Attack_Attack_screen\", cmd_screen, 23, 3674), Function.ability(15, \"Attack_Attack_minimap\", cmd_minimap, 23, 3674), Function.ability(16,", "A point on the screen. minimap: A point on the", "393, 3677), Function.ability(25, \"Behavior_CloakOff_Ghost_quick\", cmd_quick, 383, 3677), Function.ability(26, \"Behavior_CloakOn_quick\", cmd_quick,", "308, 3671), Function.ability(174, \"Cancel_QueuePasive_quick\", cmd_quick, 1831, 3671), Function.ability(175, \"Cancel_QueuePassiveCancelToSelection_quick\", cmd_quick,", "Function.ability(416, \"Research_TerranShipWeaponsLevel2_quick\", cmd_quick, 862, 3699), Function.ability(417, \"Research_TerranShipWeaponsLevel3_quick\", cmd_quick, 863, 3699),", "3674), Function.ability(15, \"Attack_Attack_minimap\", cmd_minimap, 23, 3674), Function.ability(16, \"Attack_AttackBuilding_screen\", cmd_screen, 2048,", "or replace it. select_unit_act: What to do when selecting a", "on the screen. minimap: A point on the minimap. screen2:", "cmd_quick, 1314, 3703), Function.ability(437, \"Research_ZergGroundArmor_quick\", cmd_quick, 3704), Function.ability(438, \"Research_ZergGroundArmorLevel1_quick\", cmd_quick,", "\"Research_InfernalPreigniter_quick\", cmd_quick, 761), Function.ability(372, \"Research_InterceptorGravitonCatapult_quick\", cmd_quick, 44), Function.ability(373, \"Research_MagFieldLaunchers_quick\", cmd_quick,", "\"Load_NydusWorm_screen\", cmd_screen, 2370, 3668), Function.ability(292, \"Load_Overlord_screen\", cmd_screen, 1406, 3668), Function.ability(293,", "to store for the arguments of the action. Can either", "cmd_screen, 331), Function.ability(40, \"Build_Assimilator_screen\", cmd_screen, 882), Function.ability(41, \"Build_BanelingNest_screen\", cmd_screen, 1162),", "1317, 3702), Function.ability(433, \"Research_ZergFlyerAttack_quick\", cmd_quick, 3703), Function.ability(434, \"Research_ZergFlyerAttackLevel1_quick\", cmd_quick, 1312,", "nicely.\"\"\" return \"%s/%s (%s)\" % (str(self.id).rjust(space and 4), self.name.ljust(space and", "cmd_screen, 2350), Function.ability(207, \"Effect_LocustSwoop_screen\", cmd_screen, 2387), Function.ability(208, \"Effect_MassRecall_screen\", cmd_screen, 3686),", "1353), Function.ability(464, \"Train_Cyclone_quick\", cmd_quick, 597), Function.ability(465, \"Train_DarkTemplar_quick\", cmd_quick, 920), Function.ability(466,", "\"Train_Disruptor_quick\", cmd_quick, 994), Function.ability(467, \"Train_Drone_quick\", cmd_quick, 1342), Function.ability(468, \"Train_Ghost_quick\", cmd_quick,", "not isinstance(arguments, Arguments): arguments = Arguments(*arguments) return cls(function, arguments) class", "build something similar. \"\"\" def __init__(self, functions): self._func_list = functions", "cmd_quick, 265), Function.ability(361, \"Research_CombatShield_quick\", cmd_quick, 731), Function.ability(362, \"Research_ConcussiveShells_quick\", cmd_quick, 732),", "len(self._func_list): raise ValueError(\"Function names must be unique.\") def __getattr__(self, name):", "Function.ability(272, \"Harvest_Return_Probe_quick\", cmd_quick, 299, 3667), Function.ability(273, \"Harvest_Return_SCV_quick\", cmd_quick, 296, 3667),", "v in six.iteritems(ABILITY_IDS)} FUNCTIONS_AVAILABLE = {f.id: f for f in", "cmd_quick, 857, 3701), Function.ability(427, \"Research_TunnelingClaws_quick\", cmd_quick, 217), Function.ability(428, \"Research_WarpGate_quick\", cmd_quick,", "\"Rally_Hatchery_Units_minimap\", cmd_minimap, 212, 3673), Function.ability(341, \"Rally_Morphing_Unit_screen\", cmd_screen, 199, 3673), Function.ability(342,", "3659), Function.ability(165, \"Cancel_SporeCrawlerRoot_quick\", cmd_quick, 1732, 3659), Function.ability(166, \"Cancel_StarportAddOn_quick\", cmd_quick, 517,", "twice. queued: Whether the action should be done now or", "\"UnloadAllAt_Medivac_minimap\", cmd_minimap, 396, 3669), Function.ability(520, \"UnloadAllAt_Overlord_screen\", cmd_screen, 1408, 3669), Function.ability(521,", "point.\"\"\" select = action.action_feature_layer.unit_selection_point screen.assign_to(select.selection_screen_coord) select.type = select_point_act def select_rect(action,", "a point on the minimap.\"\"\" action_cmd = action.action_feature_layer.unit_command action_cmd.ability_id =", "control_group_act=ArgumentType.enum([ sc_ui.ActionControlGroup.Recall, sc_ui.ActionControlGroup.Set, sc_ui.ActionControlGroup.Append, sc_ui.ActionControlGroup.SetAndSteal, sc_ui.ActionControlGroup.AppendAndSteal, ]), control_group_id=ArgumentType.scalar(10), select_point_act=ArgumentType.enum([ sc_spatial.ActionSpatialUnitSelectionPoint.Select,", "cmd_quick, 623), Function.ability(461, \"Train_Carrier_quick\", cmd_quick, 948), Function.ability(462, \"Train_Colossus_quick\", cmd_quick, 978),", "a more general action. function_type: One of the functions in", "For non-abilities, this function returns whether the function is valid.", "Function.ability(313, \"Morph_Ravager_quick\", cmd_quick, 2330), Function.ability(314, \"Morph_Root_screen\", cmd_screen, 3680), Function.ability(315, \"Morph_SpineCrawlerRoot_screen\",", "Function.ability(259, \"Hallucination_WarpPrism_quick\", cmd_quick, 162), Function.ability(260, \"Hallucination_Zealot_quick\", cmd_quick, 164), Function.ability(261, \"Halt_quick\",", "cmd_screen, 295, 3666), Function.ability(269, \"Harvest_Return_quick\", cmd_quick, 3667), Function.ability(270, \"Harvest_Return_Drone_quick\", cmd_quick,", "3692), Function.ability(383, \"Research_ProtossAirArmorLevel2_quick\", cmd_quick, 1566, 3692), Function.ability(384, \"Research_ProtossAirArmorLevel3_quick\", cmd_quick, 1567,", "control group, selecting, setting, etc.\"\"\" select = action.action_ui.control_group select.action =", "cmd_quick, 2082), Function.ability(22, \"Behavior_BuildingAttackOn_quick\", cmd_quick, 2081), Function.ability(23, \"Behavior_CloakOff_quick\", cmd_quick, 3677),", "\"Build_PhotonCannon_screen\", cmd_screen, 887), Function.ability(70, \"Build_Pylon_screen\", cmd_screen, 881), Function.ability(71, \"Build_Reactor_quick\", cmd_quick,", "\"sizes\", \"fn\"])): \"\"\"Represents a single argument type. Attributes: id: The", "Function.ui_func(2, \"select_point\", select_point), Function.ui_func(3, \"select_rect\", select_rect), Function.ui_func(4, \"select_control_group\", control_group), Function.ui_func(5,", "1435, 3662), Function.ability(129, \"BurrowUp_Queen_autocast\", autocast, 1435, 3662), Function.ability(130, \"BurrowUp_Ravager_quick\", cmd_quick,", "\"Effect_Spray_Protoss_screen\", cmd_screen, 30, 3684), Function.ability(232, \"Effect_Spray_Terran_screen\", cmd_screen, 26, 3684), Function.ability(233,", "cmd_quick, 731), Function.ability(362, \"Research_ConcussiveShells_quick\", cmd_quick, 732), Function.ability(363, \"Research_DrillingClaws_quick\", cmd_quick, 764),", "an ability? ABILITY_FUNCTIONS = {cmd_quick, cmd_screen, cmd_minimap, autocast} # Which", "to be used in ValidActions.\"\"\" return cls(id_, name, sizes, None)", "3666), Function.ability(267, \"Harvest_Gather_Probe_screen\", cmd_screen, 298, 3666), Function.ability(268, \"Harvest_Gather_SCV_screen\", cmd_screen, 295,", "\"Stop_Redirect_quick\", cmd_quick, 1691, 3665), Function.ability(456, \"Stop_Stop_quick\", cmd_quick, 4, 3665), Function.ability(457,", "3704), Function.ability(440, \"Research_ZergGroundArmorLevel3_quick\", cmd_quick, 1191, 3704), Function.ability(441, \"Research_ZergMeleeWeapons_quick\", cmd_quick, 3705),", "type. Attributes: id: The argument id. This is unique. name:", "indexes to support features.py and action conversion. ABILITY_IDS = collections.defaultdict(set)", "argument, also unique. sizes: The max+1 of each of the", "Function.ability(248, \"Hallucination_Adept_quick\", cmd_quick, 2391), Function.ability(249, \"Hallucination_Archon_quick\", cmd_quick, 146), Function.ability(250, \"Hallucination_Colossus_quick\",", "\"Hallucination_Adept_quick\", cmd_quick, 2391), Function.ability(249, \"Hallucination_Archon_quick\", cmd_quick, 146), Function.ability(250, \"Hallucination_Colossus_quick\", cmd_quick,", "Function.ability(49, \"Build_DarkShrine_screen\", cmd_screen, 891), Function.ability(50, \"Build_EngineeringBay_screen\", cmd_screen, 322), Function.ability(51, \"Build_EvolutionChamber_screen\",", "\"Research_PhoenixAnionPulseCrystals_quick\", cmd_quick, 46), Function.ability(380, \"Research_PneumatizedCarapace_quick\", cmd_quick, 1223), Function.ability(381, \"Research_ProtossAirArmor_quick\", cmd_quick,", "2332), Function.ability(307, \"Morph_LurkerDen_quick\", cmd_quick, 2112), Function.ability(308, \"Morph_Mothership_quick\", cmd_quick, 1847), Function.ability(309,", "\"\"\"Create an ArgumentType with a single scalar in range(value).\"\"\" return", "that are valid for an agent to use. Attributes: types:", "def select_larva(action): \"\"\"Select all larva.\"\"\" action.action_ui.select_larva.SetInParent() # Adds the empty", "cls(id_, name, ability_id, general_id, function_type, FUNCTION_TYPES[function_type], None) @classmethod def spec(cls,", "ValueError(\"Function names must be unique.\") def __getattr__(self, name): return self._func_dict[name]", "cmd_screen, 893), Function.ability(83, \"Build_SensorTower_screen\", cmd_screen, 326), Function.ability(84, \"Build_SpawningPool_screen\", cmd_screen, 1155),", "cmd_quick, 1568), Function.ability(429, \"Research_ZergFlyerArmor_quick\", cmd_quick, 3702), Function.ability(430, \"Research_ZergFlyerArmorLevel1_quick\", cmd_quick, 1315,", "\"Build_TechLab_Factory_screen\", cmd_screen, 454, 3682), Function.ability(98, \"Build_TechLab_Starport_quick\", cmd_quick, 487, 3682), Function.ability(99,", "3659), Function.ability(156, \"Cancel_MorphOrbital_quick\", cmd_quick, 1517, 3659), Function.ability(157, \"Cancel_MorphOverlordTransport_quick\", cmd_quick, 2709,", "1664), Function.ability(243, \"Effect_ViperConsume_screen\", cmd_screen, 2073), Function.ability(244, \"Effect_VoidRayPrismaticAlignment_quick\", cmd_quick, 2393), Function.ability(245,", "2370, 3668), Function.ability(292, \"Load_Overlord_screen\", cmd_screen, 1406, 3668), Function.ability(293, \"Load_WarpPrism_screen\", cmd_screen,", "1167), Function.ability(88, \"Build_Stargate_screen\", cmd_screen, 889), Function.ability(89, \"Build_Starport_screen\", cmd_screen, 329), Function.ability(90,", "import spatial_pb2 as sc_spatial from s2clientprotocol import ui_pb2 as sc_ui", "cmd_quick, 1438, 3664), Function.ability(515, \"UnloadAll_NydusWorm_quick\", cmd_quick, 2371, 3664), Function.ability(516, \"UnloadAllAt_screen\",", "id. This is unique. name: The name of the argument,", "distributed under the License is distributed on an \"AS-IS\" BASIS,", "Function.ability(162, \"Cancel_NeuralParasite_quick\", cmd_quick, 250, 3659), Function.ability(163, \"Cancel_Nuke_quick\", cmd_quick, 1623, 3659),", "control_group_act select.control_group_index = control_group_id def unload(action, unload_id): \"\"\"Unload a unit", "3696), Function.ability(400, \"Research_ProtossShieldsLevel3_quick\", cmd_quick, 1070, 3696), Function.ability(401, \"Research_PsiStorm_quick\", cmd_quick, 1126),", "disable=line-too-long FUNCTIONS = Functions([ Function.ui_func(0, \"no_op\", no_op), Function.ui_func(1, \"move_camera\", move_camera),", "Function.ability(117, \"BurrowUp_quick\", cmd_quick, 3662), Function.ability(118, \"BurrowUp_autocast\", autocast, 3662), Function.ability(119, \"BurrowUp_Baneling_quick\",", "cmd_quick, 1354), Function.ability(500, \"Train_VoidRay_quick\", cmd_quick, 950), Function.ability(501, \"Train_WarpPrism_quick\", cmd_quick, 976),", "Take a look at TYPES and FUNCTION_TYPES for more details.", "cmd_screen, 1442, 3687), Function.ability(182, \"Effect_ShadowStride_screen\", cmd_screen, 2700, 3687), Function.ability(183, \"Effect_CalldownMULE_screen\",", "from __future__ import absolute_import from __future__ import division from __future__", "loaded units. ) # Which argument types do each function", "18), Function.ability(275, \"Land_screen\", cmd_screen, 3678), Function.ability(276, \"Land_Barracks_screen\", cmd_screen, 554, 3678),", "Function.ability(276, \"Land_Barracks_screen\", cmd_screen, 554, 3678), Function.ability(277, \"Land_CommandCenter_screen\", cmd_screen, 419, 3678),", "\"Train_Baneling_quick\", cmd_quick, 80), Function.ability(459, \"Train_Banshee_quick\", cmd_quick, 621), Function.ability(460, \"Train_Battlecruiser_quick\", cmd_quick,", "Function.ability(287, \"Load_screen\", cmd_screen, 3668), Function.ability(288, \"Load_Bunker_screen\", cmd_screen, 407, 3668), Function.ability(289,", "cmd_quick, 1373, 3659), Function.ability(150, \"Cancel_MorphGreaterSpire_quick\", cmd_quick, 1221, 3659), Function.ability(151, \"Cancel_MorphHive_quick\",", "730), Function.ability(406, \"Research_TerranInfantryArmor_quick\", cmd_quick, 3697), Function.ability(407, \"Research_TerranInfantryArmorLevel1_quick\", cmd_quick, 656, 3697),", "Function.ability(223, \"Effect_Repair_Mule_autocast\", autocast, 78, 3685), Function.ability(224, \"Effect_Repair_SCV_screen\", cmd_screen, 316, 3685),", "\"Hallucination_HighTemplar_quick\", cmd_quick, 150), Function.ability(253, \"Hallucination_Immortal_quick\", cmd_quick, 152), Function.ability(254, \"Hallucination_Oracle_quick\", cmd_quick,", "\"Research_ProtossGroundWeapons_quick\", cmd_quick, 3695), Function.ability(394, \"Research_ProtossGroundWeaponsLevel1_quick\", cmd_quick, 1062, 3695), Function.ability(395, \"Research_ProtossGroundWeaponsLevel2_quick\",", "a ui action.\"\"\" return cls(id_, name, 0, 0, function_type, FUNCTION_TYPES[function_type],", "autocast, 3662), Function.ability(119, \"BurrowUp_Baneling_quick\", cmd_quick, 1376, 3662), Function.ability(120, \"BurrowUp_Baneling_autocast\", autocast,", "Function.ability(497, \"Train_Ultralisk_quick\", cmd_quick, 1348), Function.ability(498, \"Train_VikingFighter_quick\", cmd_quick, 624), Function.ability(499, \"Train_Viper_quick\",", "3695), Function.ability(396, \"Research_ProtossGroundWeaponsLevel3_quick\", cmd_quick, 1064, 3695), Function.ability(397, \"Research_ProtossShields_quick\", cmd_quick, 3696),", "2544), Function.ability(178, \"Effect_AutoTurret_screen\", cmd_screen, 1764), Function.ability(179, \"Effect_BlindingCloud_screen\", cmd_screen, 2063), Function.ability(180,", "select_army(action, select_add): \"\"\"Select the entire army.\"\"\" action.action_ui.select_army.selection_add = select_add def", "ArgumentType to be used in ValidActions.\"\"\" return cls(id_, name, sizes,", "199, 3673), Function.ability(343, \"Rally_Workers_screen\", cmd_screen, 3690), Function.ability(344, \"Rally_Workers_minimap\", cmd_minimap, 3690),", "def cmd_quick(action, ability_id, queued): \"\"\"Do a quick command like 'Stop'", "functions: A namedtuple of all the functions. \"\"\" __slots__ =", "# The list of known types. TYPES = Arguments.types( screen=ArgumentType.point(),", "ability_id): \"\"\"Toggle autocast.\"\"\" action.action_ui.toggle_autocast.ability_id = ability_id class ArgumentType(collections.namedtuple( \"ArgumentType\", [\"id\",", "build_queue_id=ArgumentType.scalar(10), # Depends on current build queue. unload_id=ArgumentType.scalar(500), # Depends", "action.action_ui.select_larva.SetInParent() # Adds the empty proto field. def select_unit(action, select_unit_act,", "of each of the dimensions this argument takes. fn: The", "cmd_quick, 1042), Function.ability(63, \"Build_Interceptors_autocast\", autocast, 1042), Function.ability(64, \"Build_MissileTurret_screen\", cmd_screen, 323),", "known types. TYPES = Arguments.types( screen=ArgumentType.point(), minimap=ArgumentType.point(), screen2=ArgumentType.point(), queued=ArgumentType.enum([False, True]),", "1848, 3659), Function.ability(156, \"Cancel_MorphOrbital_quick\", cmd_quick, 1517, 3659), Function.ability(157, \"Cancel_MorphOverlordTransport_quick\", cmd_quick,", "the types that the functions require. Unlike TYPES above, this", "cmd_screen, 1682, 3674), Function.ability(19, \"Scan_Move_screen\", cmd_screen, 19, 3674), Function.ability(20, \"Scan_Move_minimap\",", "action.action_feature_layer.unit_command action_cmd.ability_id = ability_id action_cmd.queue_command = queued minimap.assign_to(action_cmd.target_minimap_coord) def autocast(action,", "cmd_quick, 3675), Function.ability(235, \"Effect_Stim_Marauder_quick\", cmd_quick, 253, 3675), Function.ability(236, \"Effect_Stim_Marauder_Redirect_quick\", cmd_quick,", "# distributed under the License is distributed on an \"AS-IS\"", "\"\"\"Select all warp gates.\"\"\" action.action_ui.select_warp_gates.selection_add = select_add def select_larva(action): \"\"\"Select", "select_unit, lambda obs: obs.ui_data.HasField(\"multi\")), Function.ui_func(6, \"select_idle_worker\", select_idle_worker, lambda obs: obs.player_common.idle_worker_count", "current build queue. unload_id=ArgumentType.scalar(500), # Depends on the current loaded", "worker.\"\"\" action.action_ui.select_idle_worker.type = select_worker def select_army(action, select_add): \"\"\"Select the entire", "def __init__(self, functions): self._func_list = functions self._func_dict = {f.name: f", "\"Train_Drone_quick\", cmd_quick, 1342), Function.ability(468, \"Train_Ghost_quick\", cmd_quick, 562), Function.ability(469, \"Train_Hellbat_quick\", cmd_quick,", "to the game. \"\"\" __slots__ = () def __str__(self): return", "sc_spatial from s2clientprotocol import ui_pb2 as sc_ui def no_op(action): del", "\"Land_Starport_screen\", cmd_screen, 522, 3678), Function.ability(281, \"Lift_quick\", cmd_quick, 3679), Function.ability(282, \"Lift_Barracks_quick\",", "\"Train_VikingFighter_quick\", cmd_quick, 624), Function.ability(499, \"Train_Viper_quick\", cmd_quick, 1354), Function.ability(500, \"Train_VoidRay_quick\", cmd_quick,", "3700), Function.ability(423, \"Research_TerranVehicleWeapons_quick\", cmd_quick, 3701), Function.ability(424, \"Research_TerranVehicleWeaponsLevel1_quick\", cmd_quick, 855, 3701),", "3682), Function.ability(94, \"Build_TechLab_Barracks_quick\", cmd_quick, 421, 3682), Function.ability(95, \"Build_TechLab_Barracks_screen\", cmd_screen, 421,", "name: The name of the function. Should be unique. ability_id:", "Function.ability(451, \"Smart_screen\", cmd_screen, 1), Function.ability(452, \"Smart_minimap\", cmd_minimap, 1), Function.ability(453, \"Stop_quick\",", "Function.ability(182, \"Effect_ShadowStride_screen\", cmd_screen, 2700, 3687), Function.ability(183, \"Effect_CalldownMULE_screen\", cmd_screen, 171), Function.ability(184,", "3674), Function.ability(14, \"Attack_Attack_screen\", cmd_screen, 23, 3674), Function.ability(15, \"Attack_Attack_minimap\", cmd_minimap, 23,", "cmd_quick, 1730, 3659), Function.ability(165, \"Cancel_SporeCrawlerRoot_quick\", cmd_quick, 1732, 3659), Function.ability(166, \"Cancel_StarportAddOn_quick\",", "\"Research_ProtossAirArmorLevel3_quick\", cmd_quick, 1567, 3692), Function.ability(385, \"Research_ProtossAirWeapons_quick\", cmd_quick, 3693), Function.ability(386, \"Research_ProtossAirWeaponsLevel1_quick\",", "action.action_feature_layer.unit_selection_point screen.assign_to(select.selection_screen_coord) select.type = select_point_act def select_rect(action, select_add, screen, screen2):", "3685), Function.ability(224, \"Effect_Repair_SCV_screen\", cmd_screen, 316, 3685), Function.ability(225, \"Effect_Repair_SCV_autocast\", autocast, 316,", "2333, 3659), Function.ability(154, \"Cancel_MorphLurkerDen_quick\", cmd_quick, 2113, 3659), Function.ability(155, \"Cancel_MorphMothership_quick\", cmd_quick,", "in six.iteritems(kwargs)} return cls(**named) # The list of known types.", "select.selection_add = bool(select_add) def select_idle_worker(action, select_worker): \"\"\"Select an idle worker.\"\"\"", "ANY KIND, either express or implied. # See the License", "cmd_quick, 2391), Function.ability(249, \"Hallucination_Archon_quick\", cmd_quick, 146), Function.ability(250, \"Hallucination_Colossus_quick\", cmd_quick, 148),", "1526), Function.ability(194, \"Effect_FungalGrowth_screen\", cmd_screen, 74), Function.ability(195, \"Effect_GhostSnipe_screen\", cmd_screen, 2714), Function.ability(196,", "3687), Function.ability(182, \"Effect_ShadowStride_screen\", cmd_screen, 2700, 3687), Function.ability(183, \"Effect_CalldownMULE_screen\", cmd_screen, 171),", "the License. # You may obtain a copy of the", "\"\"\"Select a specific unit from the multi-unit selection.\"\"\" select =", "Function.ability(140, \"Cancel_quick\", cmd_quick, 3659), Function.ability(141, \"Cancel_AdeptPhaseShift_quick\", cmd_quick, 2594, 3659), Function.ability(142,", "k, v in six.iteritems(ABILITY_IDS)} FUNCTIONS_AVAILABLE = {f.id: f for f", "916), Function.ability(504, \"Train_Zergling_quick\", cmd_quick, 1343), Function.ability(505, \"TrainWarp_Adept_screen\", cmd_screen, 1419), Function.ability(506,", "Function.ability(39, \"Build_Armory_screen\", cmd_screen, 331), Function.ability(40, \"Build_Assimilator_screen\", cmd_screen, 882), Function.ability(41, \"Build_BanelingNest_screen\",", "cmd_quick, 658, 3697), Function.ability(410, \"Research_TerranInfantryWeapons_quick\", cmd_quick, 3698), Function.ability(411, \"Research_TerranInfantryWeaponsLevel1_quick\", cmd_quick,", "# See the License for the specific language governing permissions", "1623, 3659), Function.ability(164, \"Cancel_SpineCrawlerRoot_quick\", cmd_quick, 1730, 3659), Function.ability(165, \"Cancel_SporeCrawlerRoot_quick\", cmd_quick,", "lambda a: point.Point(*a).floor()) @classmethod def spec(cls, id_, name, sizes): \"\"\"Create", "action.action_ui.control_group select.action = control_group_act select.control_group_index = control_group_id def unload(action, unload_id):", "\"BurrowDown_InfestorTerran_quick\", cmd_quick, 1394, 3661), Function.ability(109, \"BurrowDown_Lurker_quick\", cmd_quick, 2108, 3661), Function.ability(110,", "select = action.action_ui.control_group select.action = control_group_act select.control_group_index = control_group_id def", "idle worker.\"\"\" action.action_ui.select_idle_worker.type = select_worker def select_army(action, select_add): \"\"\"Select the", "cmd_quick, 417, 3679), Function.ability(284, \"Lift_Factory_quick\", cmd_quick, 485, 3679), Function.ability(285, \"Lift_OrbitalCommand_quick\",", "if it can be represented by a more general action.", "Function.ability(469, \"Train_Hellbat_quick\", cmd_quick, 596), Function.ability(470, \"Train_Hellion_quick\", cmd_quick, 595), Function.ability(471, \"Train_HighTemplar_quick\",", "= {cmd_quick, cmd_screen, cmd_minimap, autocast} # Which ones require a", "3661), Function.ability(117, \"BurrowUp_quick\", cmd_quick, 3662), Function.ability(118, \"BurrowUp_autocast\", autocast, 3662), Function.ability(119,", "Function.ability(141, \"Cancel_AdeptPhaseShift_quick\", cmd_quick, 2594, 3659), Function.ability(142, \"Cancel_AdeptShadePhaseShift_quick\", cmd_quick, 2596, 3659),", "__slots__ = () @classmethod def all_arguments(cls, function, arguments): \"\"\"Helper function", "[TYPES.select_add], select_warp_gates: [TYPES.select_add], select_larva: [], unload: [TYPES.unload_id], build_queue: [TYPES.build_queue_id], cmd_quick:", "\"Build_Interceptors_quick\", cmd_quick, 1042), Function.ability(63, \"Build_Interceptors_autocast\", autocast, 1042), Function.ability(64, \"Build_MissileTurret_screen\", cmd_screen,", "cmd_quick, 146), Function.ability(250, \"Hallucination_Colossus_quick\", cmd_quick, 148), Function.ability(251, \"Hallucination_Disruptor_quick\", cmd_quick, 2389),", "Function.ability(33, \"Behavior_HoldFireOff_Lurker_quick\", cmd_quick, 2552, 3689), Function.ability(34, \"Behavior_HoldFireOn_quick\", cmd_quick, 3688), Function.ability(35,", "626), Function.ability(476, \"Train_Marauder_quick\", cmd_quick, 563), Function.ability(477, \"Train_Marine_quick\", cmd_quick, 560), Function.ability(478,", "\"Morph_VikingFighterMode_quick\", cmd_quick, 405), Function.ability(328, \"Morph_WarpGate_quick\", cmd_quick, 1518), Function.ability(329, \"Morph_WarpPrismPhasingMode_quick\", cmd_quick,", "in a transport/nydus/command center. \"\"\" ___slots__ = () @classmethod def", "653, 3698), Function.ability(413, \"Research_TerranInfantryWeaponsLevel3_quick\", cmd_quick, 654, 3698), Function.ability(414, \"Research_TerranShipWeapons_quick\", cmd_quick,", "Function.ability(420, \"Research_TerranVehicleAndShipPlatingLevel1_quick\", cmd_quick, 864, 3700), Function.ability(421, \"Research_TerranVehicleAndShipPlatingLevel2_quick\", cmd_quick, 865, 3700),", "self._func_list[key] return self._func_dict[key] def __iter__(self): return iter(self._func_list) def __len__(self): return", "License is distributed on an \"AS-IS\" BASIS, # WITHOUT WARRANTIES", "Should be unique. ability_id: The ability id to pass to", "up nicely.\"\"\" return \"%s/%s (%s)\" % (str(self.id).rjust(space and 4), self.name.ljust(space", "Function.ability(205, \"Effect_KD8Charge_screen\", cmd_screen, 2588), Function.ability(206, \"Effect_LockOn_screen\", cmd_screen, 2350), Function.ability(207, \"Effect_LocustSwoop_screen\",", "3673), Function.ability(337, \"Rally_Building_screen\", cmd_screen, 195, 3673), Function.ability(338, \"Rally_Building_minimap\", cmd_minimap, 195,", "TYPES.minimap], autocast: [], } # Which ones need an ability?", "minimap: A point on the minimap. screen2: The second point", "a in self.args)) class Functions(object): \"\"\"Represents the full set of", "Function.ability(243, \"Effect_ViperConsume_screen\", cmd_screen, 2073), Function.ability(244, \"Effect_VoidRayPrismaticAlignment_quick\", cmd_quick, 2393), Function.ability(245, \"Effect_WidowMineAttack_screen\",", "move_camera), Function.ui_func(2, \"select_point\", select_point), Function.ui_func(3, \"select_rect\", select_rect), Function.ui_func(4, \"select_control_group\", control_group),", "\"Morph_OverlordTransport_quick\", cmd_quick, 2708), Function.ability(311, \"Morph_Overseer_quick\", cmd_quick, 1448), Function.ability(312, \"Morph_PlanetaryFortress_quick\", cmd_quick,", "\"BurrowUp_Queen_autocast\", autocast, 1435, 3662), Function.ability(130, \"BurrowUp_Ravager_quick\", cmd_quick, 2342, 3662), Function.ability(131,", "454, 3682), Function.ability(98, \"Build_TechLab_Starport_quick\", cmd_quick, 487, 3682), Function.ability(99, \"Build_TechLab_Starport_screen\", cmd_screen,", "Function.ability(94, \"Build_TechLab_Barracks_quick\", cmd_quick, 421, 3682), Function.ability(95, \"Build_TechLab_Barracks_screen\", cmd_screen, 421, 3682),", "1727, 3681), Function.ability(326, \"Morph_VikingAssaultMode_quick\", cmd_quick, 403), Function.ability(327, \"Morph_VikingFighterMode_quick\", cmd_quick, 405),", "Function.ability(345, \"Rally_CommandCenter_screen\", cmd_screen, 203, 3690), Function.ability(346, \"Rally_CommandCenter_minimap\", cmd_minimap, 203, 3690),", "= {k: frozenset(v) for k, v in six.iteritems(ABILITY_IDS)} FUNCTIONS_AVAILABLE =", "cmd_quick, 2552, 3689), Function.ability(34, \"Behavior_HoldFireOn_quick\", cmd_quick, 3688), Function.ability(35, \"Behavior_HoldFireOn_Ghost_quick\", cmd_quick,", "POINT_REQUIRED_FUNCS = { False: {cmd_quick, autocast}, True: {cmd_screen, cmd_minimap, autocast}}", "cmd_quick, 1194, 3706), Function.ability(449, \"Research_ZerglingAdrenalGlands_quick\", cmd_quick, 1252), Function.ability(450, \"Research_ZerglingMetabolicBoost_quick\", cmd_quick,", "cmd_quick, 2014, 3661), Function.ability(114, \"BurrowDown_Ultralisk_quick\", cmd_quick, 1512, 3661), Function.ability(115, \"BurrowDown_WidowMine_quick\",", "1068, 3696), Function.ability(399, \"Research_ProtossShieldsLevel2_quick\", cmd_quick, 1069, 3696), Function.ability(400, \"Research_ProtossShieldsLevel3_quick\", cmd_quick,", "the camera.\"\"\" minimap.assign_to(action.action_feature_layer.camera_move.center_minimap) def select_point(action, select_point_act, screen): \"\"\"Select a unit", "1042), Function.ability(64, \"Build_MissileTurret_screen\", cmd_screen, 323), Function.ability(65, \"Build_Nexus_screen\", cmd_screen, 880), Function.ability(66,", "cls(id_, name, 0, 0, function_type, FUNCTION_TYPES[function_type], avail_fn) @classmethod def ability(cls,", "Function.ability(60, \"Build_HydraliskDen_screen\", cmd_screen, 1157), Function.ability(61, \"Build_InfestationPit_screen\", cmd_screen, 1160), Function.ability(62, \"Build_Interceptors_quick\",", "ability_id: The ability id to pass to sc2. general_id: 0", "\"Research_ZergFlyerAttack_quick\", cmd_quick, 3703), Function.ability(434, \"Research_ZergFlyerAttackLevel1_quick\", cmd_quick, 1312, 3703), Function.ability(435, \"Research_ZergFlyerAttackLevel2_quick\",", "Function.ability(197, \"Effect_GuardianShield_quick\", cmd_quick, 76), Function.ability(198, \"Effect_Heal_screen\", cmd_screen, 386), Function.ability(199, \"Effect_Heal_autocast\",", "cmd_quick, 2375), Function.ability(39, \"Build_Armory_screen\", cmd_screen, 331), Function.ability(40, \"Build_Assimilator_screen\", cmd_screen, 882),", "386), Function.ability(200, \"Effect_HunterSeekerMissile_screen\", cmd_screen, 169), Function.ability(201, \"Effect_ImmortalBarrier_quick\", cmd_quick, 2328), Function.ability(202,", "Function.ability(309, \"Morph_OrbitalCommand_quick\", cmd_quick, 1516), Function.ability(310, \"Morph_OverlordTransport_quick\", cmd_quick, 2708), Function.ability(311, \"Morph_Overseer_quick\",", "action_cmd.ability_id = ability_id action_cmd.queue_command = queued minimap.assign_to(action_cmd.target_minimap_coord) def autocast(action, ability_id):", "= select_point_act def select_rect(action, select_add, screen, screen2): \"\"\"Select units within", "[TYPES.unload_id], build_queue: [TYPES.build_queue_id], cmd_quick: [TYPES.queued], cmd_screen: [TYPES.queued, TYPES.screen], cmd_minimap: [TYPES.queued,", "Function.ability(257, \"Hallucination_Stalker_quick\", cmd_quick, 158), Function.ability(258, \"Hallucination_VoidRay_quick\", cmd_quick, 160), Function.ability(259, \"Hallucination_WarpPrism_quick\",", "screen, screen2): \"\"\"Select units within a rectangle.\"\"\" select = action.action_feature_layer.unit_selection_rect", "\"Morph_Mothership_quick\", cmd_quick, 1847), Function.ability(309, \"Morph_OrbitalCommand_quick\", cmd_quick, 1516), Function.ability(310, \"Morph_OverlordTransport_quick\", cmd_quick,", "\"select_army\", select_army, lambda obs: obs.player_common.army_count > 0), Function.ui_func(8, \"select_warp_gates\", select_warp_gates,", "1482), Function.ability(359, \"Research_Charge_quick\", cmd_quick, 1592), Function.ability(360, \"Research_ChitinousPlating_quick\", cmd_quick, 265), Function.ability(361,", "select_add: Whether to add the unit to the selection or", "3683), Function.ability(74, \"Build_Reactor_Barracks_screen\", cmd_screen, 422, 3683), Function.ability(75, \"Build_Reactor_Factory_quick\", cmd_quick, 455,", "\"Stop_quick\", cmd_quick, 3665), Function.ability(454, \"Stop_Building_quick\", cmd_quick, 2057, 3665), Function.ability(455, \"Stop_Redirect_quick\",", "\"Train_Hellion_quick\", cmd_quick, 595), Function.ability(471, \"Train_HighTemplar_quick\", cmd_quick, 919), Function.ability(472, \"Train_Hydralisk_quick\", cmd_quick,", "\"TrainWarp_Sentry_screen\", cmd_screen, 1418), Function.ability(509, \"TrainWarp_Stalker_screen\", cmd_screen, 1414), Function.ability(510, \"TrainWarp_Zealot_screen\", cmd_screen,", "cmd_minimap, 396, 3669), Function.ability(520, \"UnloadAllAt_Overlord_screen\", cmd_screen, 1408, 3669), Function.ability(521, \"UnloadAllAt_Overlord_minimap\",", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "return cls(-1, \"<none>\", (0, 0), lambda a: point.Point(*a).floor()) @classmethod def", "\"Research_GlialRegeneration_quick\", cmd_quick, 216), Function.ability(366, \"Research_GraviticBooster_quick\", cmd_quick, 1093), Function.ability(367, \"Research_GraviticDrive_quick\", cmd_quick,", "the arguments of the action. Can either be an `Arguments`", "\"Morph_SupplyDepot_Lower_quick\", cmd_quick, 556), Function.ability(319, \"Morph_SupplyDepot_Raise_quick\", cmd_quick, 558), Function.ability(320, \"Morph_ThorExplosiveMode_quick\", cmd_quick,", "if isinstance(arguments, dict): arguments = Arguments(**arguments) elif not isinstance(arguments, Arguments):", "\"Land_Factory_screen\", cmd_screen, 520, 3678), Function.ability(279, \"Land_OrbitalCommand_screen\", cmd_screen, 1524, 3678), Function.ability(280,", "\"Research_ZergGroundArmorLevel1_quick\", cmd_quick, 1189, 3704), Function.ability(439, \"Research_ZergGroundArmorLevel2_quick\", cmd_quick, 1190, 3704), Function.ability(440,", "Function.ability(431, \"Research_ZergFlyerArmorLevel2_quick\", cmd_quick, 1316, 3702), Function.ability(432, \"Research_ZergFlyerArmorLevel3_quick\", cmd_quick, 1317, 3702),", "\"Research_ZergFlyerAttackLevel3_quick\", cmd_quick, 1314, 3703), Function.ability(437, \"Research_ZergGroundArmor_quick\", cmd_quick, 3704), Function.ability(438, \"Research_ZergGroundArmorLevel1_quick\",", "writing, software # distributed under the License is distributed on", "\"BurrowUp_Ultralisk_quick\", cmd_quick, 1514, 3662), Function.ability(136, \"BurrowUp_Ultralisk_autocast\", autocast, 1514, 3662), Function.ability(137,", "1217, 3659), Function.ability(153, \"Cancel_MorphLurker_quick\", cmd_quick, 2333, 3659), Function.ability(154, \"Cancel_MorphLurkerDen_quick\", cmd_quick,", "an Arguments of the possible Types.\"\"\" named = {name: type_._replace(id=Arguments._fields.index(name),", "0), Function.ui_func(9, \"select_larva\", select_larva, lambda obs: obs.player_common.larva_count > 0), Function.ui_func(10,", "@classmethod def ui_func(cls, id_, name, function_type, avail_fn=always): \"\"\"Define a function", "cmd_quick, 1221, 3659), Function.ability(151, \"Cancel_MorphHive_quick\", cmd_quick, 1219, 3659), Function.ability(152, \"Cancel_MorphLair_quick\",", "Function.ability(494, \"Train_SwarmHost_quick\", cmd_quick, 1356), Function.ability(495, \"Train_Tempest_quick\", cmd_quick, 955), Function.ability(496, \"Train_Thor_quick\",", "1188, 3705), Function.ability(445, \"Research_ZergMissileWeapons_quick\", cmd_quick, 3706), Function.ability(446, \"Research_ZergMissileWeaponsLevel1_quick\", cmd_quick, 1192,", "\"Research_ConcussiveShells_quick\", cmd_quick, 732), Function.ability(363, \"Research_DrillingClaws_quick\", cmd_quick, 764), Function.ability(364, \"Research_ExtendedThermalLance_quick\", cmd_quick,", "ability id to pass to sc2. general_id: 0 for normal", "\"Research_TerranShipWeaponsLevel2_quick\", cmd_quick, 862, 3699), Function.ability(417, \"Research_TerranShipWeaponsLevel3_quick\", cmd_quick, 863, 3699), Function.ability(418,", "name, function_type, avail_fn=always): \"\"\"Define a function representing a ui action.\"\"\"", "1417), Function.ability(507, \"TrainWarp_HighTemplar_screen\", cmd_screen, 1416), Function.ability(508, \"TrainWarp_Sentry_screen\", cmd_screen, 1418), Function.ability(509,", "Function.ability(189, \"Effect_CorrosiveBile_screen\", cmd_screen, 2338), Function.ability(190, \"Effect_EMP_screen\", cmd_screen, 1628), Function.ability(191, \"Effect_Explode_quick\",", "@classmethod def types(cls, **kwargs): \"\"\"Create an Arguments of the possible", "!= len(self._func_list): raise ValueError(\"Function names must be unique.\") def __getattr__(self,", "cmd_quick, 1727, 3681), Function.ability(326, \"Morph_VikingAssaultMode_quick\", cmd_quick, 403), Function.ability(327, \"Morph_VikingFighterMode_quick\", cmd_quick,", "action function. arguments: The values to store for the arguments", "transport/bunker/nydus/etc.\"\"\" action.action_ui.cargo_panel.unit_index = unload_id def build_queue(action, build_queue_id): \"\"\"Cancel a unit", "cmd_screen, 321), Function.ability(43, \"Build_Bunker_screen\", cmd_screen, 324), Function.ability(44, \"Build_CommandCenter_screen\", cmd_screen, 318),", "\"Build_FusionCore_screen\", cmd_screen, 333), Function.ability(57, \"Build_Gateway_screen\", cmd_screen, 883), Function.ability(58, \"Build_GhostAcademy_screen\", cmd_screen,", "select_warp_gates: [TYPES.select_add], select_larva: [], unload: [TYPES.unload_id], build_queue: [TYPES.build_queue_id], cmd_quick: [TYPES.queued],", "\"Lift_quick\", cmd_quick, 3679), Function.ability(282, \"Lift_Barracks_quick\", cmd_quick, 452, 3679), Function.ability(283, \"Lift_CommandCenter_quick\",", "148), Function.ability(251, \"Hallucination_Disruptor_quick\", cmd_quick, 2389), Function.ability(252, \"Hallucination_HighTemplar_quick\", cmd_quick, 150), Function.ability(253,", "Function.ability(115, \"BurrowDown_WidowMine_quick\", cmd_quick, 2095, 3661), Function.ability(116, \"BurrowDown_Zergling_quick\", cmd_quick, 1390, 3661),", "2375), Function.ability(39, \"Build_Armory_screen\", cmd_screen, 331), Function.ability(40, \"Build_Assimilator_screen\", cmd_screen, 882), Function.ability(41,", "1833, 3671), Function.ability(176, \"Effect_Abduct_screen\", cmd_screen, 2067), Function.ability(177, \"Effect_AdeptPhaseShift_screen\", cmd_screen, 2544),", "Function.ability(252, \"Hallucination_HighTemplar_quick\", cmd_quick, 150), Function.ability(253, \"Hallucination_Immortal_quick\", cmd_quick, 152), Function.ability(254, \"Hallucination_Oracle_quick\",", "list of integers into something more meaningful to be set", "Function.ability(128, \"BurrowUp_Queen_quick\", cmd_quick, 1435, 3662), Function.ability(129, \"BurrowUp_Queen_autocast\", autocast, 1435, 3662),", "1314, 3703), Function.ability(437, \"Research_ZergGroundArmor_quick\", cmd_quick, 3704), Function.ability(438, \"Research_ZergGroundArmorLevel1_quick\", cmd_quick, 1189,", "\"\"\"Create an Arguments of the possible Types.\"\"\" named = {name:", "3659), Function.ability(155, \"Cancel_MorphMothership_quick\", cmd_quick, 1848, 3659), Function.ability(156, \"Cancel_MorphOrbital_quick\", cmd_quick, 1517,", "a set of known values.\"\"\" return cls(-1, \"<none>\", (len(options),), lambda", "\"Research_Blink_quick\", cmd_quick, 1593), Function.ability(357, \"Research_Burrow_quick\", cmd_quick, 1225), Function.ability(358, \"Research_CentrifugalHooks_quick\", cmd_quick,", "cmd_quick, 1064, 3695), Function.ability(397, \"Research_ProtossShields_quick\", cmd_quick, 3696), Function.ability(398, \"Research_ProtossShieldsLevel1_quick\", cmd_quick,", "cmd_quick, 1356), Function.ability(495, \"Train_Tempest_quick\", cmd_quick, 955), Function.ability(496, \"Train_Thor_quick\", cmd_quick, 594),", "\"Build_CreepTumor_screen\", cmd_screen, 3691), Function.ability(46, \"Build_CreepTumor_Queen_screen\", cmd_screen, 1694, 3691), Function.ability(47, \"Build_CreepTumor_Tumor_screen\",", "it can go in a set(). return self.id def __str__(self):", "\"\"\"Act on a control group, selecting, setting, etc.\"\"\" select =", "3673), Function.ability(336, \"Rally_Units_minimap\", cmd_minimap, 3673), Function.ability(337, \"Rally_Building_screen\", cmd_screen, 195, 3673),", "\"Research_ZergFlyerArmorLevel1_quick\", cmd_quick, 1315, 3702), Function.ability(431, \"Research_ZergFlyerArmorLevel2_quick\", cmd_quick, 1316, 3702), Function.ability(432,", "Function.ability(508, \"TrainWarp_Sentry_screen\", cmd_screen, 1418), Function.ability(509, \"TrainWarp_Stalker_screen\", cmd_screen, 1414), Function.ability(510, \"TrainWarp_Zealot_screen\",", "Function.ability(83, \"Build_SensorTower_screen\", cmd_screen, 326), Function.ability(84, \"Build_SpawningPool_screen\", cmd_screen, 1155), Function.ability(85, \"Build_SpineCrawler_screen\",", "Function.ability(430, \"Research_ZergFlyerArmorLevel1_quick\", cmd_quick, 1315, 3702), Function.ability(431, \"Research_ZergFlyerArmorLevel2_quick\", cmd_quick, 1316, 3702),", "the unit at the point. select_add: Whether to add the", "cmd_quick, 1396, 3662), Function.ability(126, \"BurrowUp_InfestorTerran_autocast\", autocast, 1396, 3662), Function.ability(127, \"BurrowUp_Lurker_quick\",", "Function.ability(84, \"Build_SpawningPool_screen\", cmd_screen, 1155), Function.ability(85, \"Build_SpineCrawler_screen\", cmd_screen, 1166), Function.ability(86, \"Build_Spire_screen\",", "\"Research_TerranVehicleWeaponsLevel2_quick\", cmd_quick, 856, 3701), Function.ability(426, \"Research_TerranVehicleWeaponsLevel3_quick\", cmd_quick, 857, 3701), Function.ability(427,", "3675), Function.ability(237, \"Effect_Stim_Marine_quick\", cmd_quick, 380, 3675), Function.ability(238, \"Effect_Stim_Marine_Redirect_quick\", cmd_quick, 1683,", "this could be: [[0], [23, 38]]. \"\"\" __slots__ = ()", "cmd_quick, 919), Function.ability(472, \"Train_Hydralisk_quick\", cmd_quick, 1345), Function.ability(473, \"Train_Immortal_quick\", cmd_quick, 979),", "\"Behavior_PulsarBeamOff_quick\", cmd_quick, 2376), Function.ability(38, \"Behavior_PulsarBeamOn_quick\", cmd_quick, 2375), Function.ability(39, \"Build_Armory_screen\", cmd_screen,", "Function.ability(347, \"Rally_Hatchery_Workers_screen\", cmd_screen, 211, 3690), Function.ability(348, \"Rally_Hatchery_Workers_minimap\", cmd_minimap, 211, 3690),", "() @classmethod def ui_func(cls, id_, name, function_type, avail_fn=always): \"\"\"Define a", "\"Build_Assimilator_screen\", cmd_screen, 882), Function.ability(41, \"Build_BanelingNest_screen\", cmd_screen, 1162), Function.ability(42, \"Build_Barracks_screen\", cmd_screen,", "action.action_ui.select_idle_worker.type = select_worker def select_army(action, select_add): \"\"\"Select the entire army.\"\"\"", "3683), Function.ability(73, \"Build_Reactor_Barracks_quick\", cmd_quick, 422, 3683), Function.ability(74, \"Build_Reactor_Barracks_screen\", cmd_screen, 422,", "3659), Function.ability(167, \"Cancel_StasisTrap_quick\", cmd_quick, 2535, 3659), Function.ability(168, \"Cancel_Last_quick\", cmd_quick, 3671),", "Function.ability(486, \"Train_Queen_quick\", cmd_quick, 1632), Function.ability(487, \"Train_Raven_quick\", cmd_quick, 622), Function.ability(488, \"Train_Reaper_quick\",", "Unlike TYPES above, this includes the sizes for screen and", "\"Build_Reactor_Starport_screen\", cmd_screen, 488, 3683), Function.ability(79, \"Build_Refinery_screen\", cmd_screen, 320), Function.ability(80, \"Build_RoachWarren_screen\",", "for that function, each being a list of ints. For", "obs: obs.ui_data.HasField(\"multi\")), Function.ui_func(6, \"select_idle_worker\", select_idle_worker, lambda obs: obs.player_common.idle_worker_count > 0),", "651), Function.ability(419, \"Research_TerranVehicleAndShipPlating_quick\", cmd_quick, 3700), Function.ability(420, \"Research_TerranVehicleAndShipPlatingLevel1_quick\", cmd_quick, 864, 3700),", "the control group. control_group_id: Which control group to do it", "Function.ability(17, \"Attack_AttackBuilding_minimap\", cmd_minimap, 2048, 3674), Function.ability(18, \"Attack_Redirect_screen\", cmd_screen, 1682, 3674),", "= unload_id def build_queue(action, build_queue_id): \"\"\"Cancel a unit in the", "Function.ability(160, \"Cancel_MorphRavager_quick\", cmd_quick, 2331, 3659), Function.ability(161, \"Cancel_MorphThorExplosiveMode_quick\", cmd_quick, 2365, 3659),", "select_point_act, screen): \"\"\"Select a unit at a point.\"\"\" select =", "Function.ability(441, \"Research_ZergMeleeWeapons_quick\", cmd_quick, 3705), Function.ability(442, \"Research_ZergMeleeWeaponsLevel1_quick\", cmd_quick, 1186, 3705), Function.ability(443,", "3661), Function.ability(108, \"BurrowDown_InfestorTerran_quick\", cmd_quick, 1394, 3661), Function.ability(109, \"BurrowDown_Lurker_quick\", cmd_quick, 2108,", "cmd_quick, 1512, 3661), Function.ability(115, \"BurrowDown_WidowMine_quick\", cmd_quick, 2095, 3661), Function.ability(116, \"BurrowDown_Zergling_quick\",", "2708), Function.ability(311, \"Morph_Overseer_quick\", cmd_quick, 1448), Function.ability(312, \"Morph_PlanetaryFortress_quick\", cmd_quick, 1450), Function.ability(313,", "Function.ability(107, \"BurrowDown_Infestor_quick\", cmd_quick, 1444, 3661), Function.ability(108, \"BurrowDown_InfestorTerran_quick\", cmd_quick, 1394, 3661),", "1682, 3674), Function.ability(19, \"Scan_Move_screen\", cmd_screen, 19, 3674), Function.ability(20, \"Scan_Move_minimap\", cmd_minimap,", "3699), Function.ability(416, \"Research_TerranShipWeaponsLevel2_quick\", cmd_quick, 862, 3699), Function.ability(417, \"Research_TerranShipWeaponsLevel3_quick\", cmd_quick, 863,", "(value,), lambda a: a[0]) @classmethod def point(cls): # No range", "1067, 3694), Function.ability(393, \"Research_ProtossGroundWeapons_quick\", cmd_quick, 3695), Function.ability(394, \"Research_ProtossGroundWeaponsLevel1_quick\", cmd_quick, 1062,", "3660), Function.ability(263, \"Halt_TerranBuild_quick\", cmd_quick, 348, 3660), Function.ability(264, \"Harvest_Gather_screen\", cmd_screen, 3666),", "for screen and minimap. functions: A namedtuple of all the", "__slots__ = () @classmethod def ui_func(cls, id_, name, function_type, avail_fn=always):", "Function.ability(476, \"Train_Marauder_quick\", cmd_quick, 563), Function.ability(477, \"Train_Marine_quick\", cmd_quick, 560), Function.ability(478, \"Train_Medivac_quick\",", "name: The name of the argument, also unique. sizes: The", "Function.ability(291, \"Load_NydusWorm_screen\", cmd_screen, 2370, 3668), Function.ability(292, \"Load_Overlord_screen\", cmd_screen, 1406, 3668),", "\"TrainWarp_Stalker_screen\", cmd_screen, 1414), Function.ability(510, \"TrainWarp_Zealot_screen\", cmd_screen, 1413), Function.ability(511, \"UnloadAll_quick\", cmd_quick,", "\"screen2\", \"queued\", \"control_group_act\", \"control_group_id\", \"select_point_act\", \"select_add\", \"select_unit_act\", \"select_unit_id\", \"select_worker\", \"build_queue_id\",", "\"Hallucination_Probe_quick\", cmd_quick, 156), Function.ability(257, \"Hallucination_Stalker_quick\", cmd_quick, 158), Function.ability(258, \"Hallucination_VoidRay_quick\", cmd_quick,", "argument takes. fn: The function to convert the list of", "144), Function.ability(218, \"Effect_PsiStorm_screen\", cmd_screen, 1036), Function.ability(219, \"Effect_PurificationNova_screen\", cmd_screen, 2346), Function.ability(220,", "3674), Function.ability(18, \"Attack_Redirect_screen\", cmd_screen, 1682, 3674), Function.ability(19, \"Scan_Move_screen\", cmd_screen, 19,", "\"Attack_Redirect_screen\", cmd_screen, 1682, 3674), Function.ability(19, \"Scan_Move_screen\", cmd_screen, 19, 3674), Function.ability(20,", "Function.ability(138, \"BurrowUp_Zergling_quick\", cmd_quick, 1392, 3662), Function.ability(139, \"BurrowUp_Zergling_autocast\", autocast, 1392, 3662),", "Function.ability(456, \"Stop_Stop_quick\", cmd_quick, 4, 3665), Function.ability(457, \"Train_Adept_quick\", cmd_quick, 922), Function.ability(458,", "cmd_screen, 889), Function.ability(89, \"Build_Starport_screen\", cmd_screen, 329), Function.ability(90, \"Build_StasisTrap_screen\", cmd_screen, 2505),", "permissions and # limitations under the License. \"\"\"Define the static", "This is needed so that no function takes the same", "arguments: The list of arguments for that function, each being", "point(cls): # No range because it's unknown at this time.", "3666), Function.ability(269, \"Harvest_Return_quick\", cmd_quick, 3667), Function.ability(270, \"Harvest_Return_Drone_quick\", cmd_quick, 1184, 3667),", "Function.ability(372, \"Research_InterceptorGravitonCatapult_quick\", cmd_quick, 44), Function.ability(373, \"Research_MagFieldLaunchers_quick\", cmd_quick, 766), Function.ability(374, \"Research_MuscularAugments_quick\",", "657, 3697), Function.ability(409, \"Research_TerranInfantryArmorLevel3_quick\", cmd_quick, 658, 3697), Function.ability(410, \"Research_TerranInfantryWeapons_quick\", cmd_quick,", "1384, 3662), Function.ability(123, \"BurrowUp_Hydralisk_autocast\", autocast, 1384, 3662), Function.ability(124, \"BurrowUp_Infestor_quick\", cmd_quick,", "Function.ability(321, \"Morph_ThorHighImpactMode_quick\", cmd_quick, 2362), Function.ability(322, \"Morph_Unsiege_quick\", cmd_quick, 390), Function.ability(323, \"Morph_Uproot_quick\",", "a: point.Point(*a).floor()) @classmethod def spec(cls, id_, name, sizes): \"\"\"Create an", "space=False): \"\"\"String version. Set space=True to line them all up", "screen=ArgumentType.point(), minimap=ArgumentType.point(), screen2=ArgumentType.point(), queued=ArgumentType.enum([False, True]), # (now vs add to", "\"Effect_HunterSeekerMissile_screen\", cmd_screen, 169), Function.ability(201, \"Effect_ImmortalBarrier_quick\", cmd_quick, 2328), Function.ability(202, \"Effect_ImmortalBarrier_autocast\", autocast,", "cmd_screen, 1161), Function.ability(68, \"Build_NydusWorm_screen\", cmd_screen, 1768), Function.ability(69, \"Build_PhotonCannon_screen\", cmd_screen, 887),", "an ArgumentType where you choose one of a set of", "Function.ability(513, \"UnloadAll_CommandCenter_quick\", cmd_quick, 413, 3664), Function.ability(514, \"UnloadAll_NydasNetwork_quick\", cmd_quick, 1438, 3664),", "3693), Function.ability(388, \"Research_ProtossAirWeaponsLevel3_quick\", cmd_quick, 1564, 3693), Function.ability(389, \"Research_ProtossGroundArmor_quick\", cmd_quick, 3694),", "\"UnloadAllAt_WarpPrism_minimap\", cmd_minimap, 913, 3669), ]) # pylint: enable=line-too-long # Some", "\"screen\", \"minimap\", \"screen2\", \"queued\", \"control_group_act\", \"control_group_id\", \"select_point_act\", \"select_add\", \"select_unit_act\", \"select_unit_id\",", "\"BurrowUp_Drone_quick\", cmd_quick, 1380, 3662), Function.ability(122, \"BurrowUp_Hydralisk_quick\", cmd_quick, 1384, 3662), Function.ability(123,", "convert the list of integers into something more meaningful to", "def __hash__(self): # So it can go in a set().", "1162), Function.ability(42, \"Build_Barracks_screen\", cmd_screen, 321), Function.ability(43, \"Build_Bunker_screen\", cmd_screen, 324), Function.ability(44,", "select_unit_act, select_unit_id): \"\"\"Select a specific unit from the multi-unit selection.\"\"\"", "316, 3685), Function.ability(226, \"Effect_Salvage_quick\", cmd_quick, 32), Function.ability(227, \"Effect_Scan_screen\", cmd_screen, 399),", "all up nicely.\"\"\" return \"%s/%s (%s)\" % (str(self.id).rjust(space and 4),", "Function.ability(121, \"BurrowUp_Drone_quick\", cmd_quick, 1380, 3662), Function.ability(122, \"BurrowUp_Hydralisk_quick\", cmd_quick, 1384, 3662),", "TYPES and FUNCTION_TYPES for more details. Attributes: screen: A point", "195, 3673), Function.ability(339, \"Rally_Hatchery_Units_screen\", cmd_screen, 212, 3673), Function.ability(340, \"Rally_Hatchery_Units_minimap\", cmd_minimap,", "= { no_op: [], move_camera: [TYPES.minimap], select_point: [TYPES.select_point_act, TYPES.screen], select_rect:", "cmd_quick, 855, 3701), Function.ability(425, \"Research_TerranVehicleWeaponsLevel2_quick\", cmd_quick, 856, 3701), Function.ability(426, \"Research_TerranVehicleWeaponsLevel3_quick\",", "Function.ability(505, \"TrainWarp_Adept_screen\", cmd_screen, 1419), Function.ability(506, \"TrainWarp_DarkTemplar_screen\", cmd_screen, 1417), Function.ability(507, \"TrainWarp_HighTemplar_screen\",", "cmd_screen, 261), Function.ability(188, \"Effect_Contaminate_screen\", cmd_screen, 1825), Function.ability(189, \"Effect_CorrosiveBile_screen\", cmd_screen, 2338),", "\"UnloadAllAt_Overlord_screen\", cmd_screen, 1408, 3669), Function.ability(521, \"UnloadAllAt_Overlord_minimap\", cmd_minimap, 1408, 3669), Function.ability(522,", "being a list of ints. For select_point this could be:", "cmd_screen, 2048, 3674), Function.ability(17, \"Attack_AttackBuilding_minimap\", cmd_minimap, 2048, 3674), Function.ability(18, \"Attack_Redirect_screen\",", "\"\"\"The full list of argument types. Take a look at", "158), Function.ability(258, \"Hallucination_VoidRay_quick\", cmd_quick, 160), Function.ability(259, \"Hallucination_WarpPrism_quick\", cmd_quick, 162), Function.ability(260,", "{cmd_quick, cmd_screen, cmd_minimap, autocast} # Which ones require a point?", "3662), Function.ability(130, \"BurrowUp_Ravager_quick\", cmd_quick, 2342, 3662), Function.ability(131, \"BurrowUp_Ravager_autocast\", autocast, 2342,", "\"Train_Adept_quick\", cmd_quick, 922), Function.ability(458, \"Train_Baneling_quick\", cmd_quick, 80), Function.ability(459, \"Train_Banshee_quick\", cmd_quick,", "cmd_quick, 946), Function.ability(485, \"Train_Probe_quick\", cmd_quick, 1006), Function.ability(486, \"Train_Queen_quick\", cmd_quick, 1632),", "\"move_camera\", move_camera), Function.ui_func(2, \"select_point\", select_point), Function.ui_func(3, \"select_rect\", select_rect), Function.ui_func(4, \"select_control_group\",", "3664), Function.ability(512, \"UnloadAll_Bunker_quick\", cmd_quick, 408, 3664), Function.ability(513, \"UnloadAll_CommandCenter_quick\", cmd_quick, 413,", "\"Cancel_MorphMothership_quick\", cmd_quick, 1848, 3659), Function.ability(156, \"Cancel_MorphOrbital_quick\", cmd_quick, 1517, 3659), Function.ability(157,", "\"UnloadAll_Bunker_quick\", cmd_quick, 408, 3664), Function.ability(513, \"UnloadAll_CommandCenter_quick\", cmd_quick, 413, 3664), Function.ability(514,", "# pylint: enable=line-too-long # Some indexes to support features.py and", "\"BurrowDown_Baneling_quick\", cmd_quick, 1374, 3661), Function.ability(105, \"BurrowDown_Drone_quick\", cmd_quick, 1378, 3661), Function.ability(106,", "type twice. queued: Whether the action should be done now", "\"BurrowUp_Roach_autocast\", autocast, 1388, 3662), Function.ability(134, \"BurrowUp_SwarmHost_quick\", cmd_quick, 2016, 3662), Function.ability(135,", "\"Train_Viper_quick\", cmd_quick, 1354), Function.ability(500, \"Train_VoidRay_quick\", cmd_quick, 950), Function.ability(501, \"Train_WarpPrism_quick\", cmd_quick,", "\"Rally_Hatchery_Workers_screen\", cmd_screen, 211, 3690), Function.ability(348, \"Rally_Hatchery_Workers_minimap\", cmd_minimap, 211, 3690), Function.ability(349,", "cmd_screen, 407, 3668), Function.ability(289, \"Load_Medivac_screen\", cmd_screen, 394, 3668), Function.ability(290, \"Load_NydusNetwork_screen\",", "{f.name: f for f in functions} if len(self._func_dict) != len(self._func_list):", "Function.ability(112, \"BurrowDown_Roach_quick\", cmd_quick, 1386, 3661), Function.ability(113, \"BurrowDown_SwarmHost_quick\", cmd_quick, 2014, 3661),", "\"Morph_SporeCrawlerUproot_quick\", cmd_quick, 1727, 3681), Function.ability(326, \"Morph_VikingAssaultMode_quick\", cmd_quick, 403), Function.ability(327, \"Morph_VikingFighterMode_quick\",", "single scalar in range(value).\"\"\" return cls(-1, \"<none>\", (value,), lambda a:", "38]]. \"\"\" __slots__ = () @classmethod def all_arguments(cls, function, arguments):", "Function(collections.namedtuple( \"Function\", [\"id\", \"name\", \"ability_id\", \"general_id\", \"function_type\", \"args\", \"avail_fn\"])): \"\"\"Represents", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "action_cmd = action.action_feature_layer.unit_command action_cmd.ability_id = ability_id action_cmd.queue_command = queued minimap.assign_to(action_cmd.target_minimap_coord)", "19, 3674), Function.ability(21, \"Behavior_BuildingAttackOff_quick\", cmd_quick, 2082), Function.ability(22, \"Behavior_BuildingAttackOn_quick\", cmd_quick, 2081),", "3704), Function.ability(441, \"Research_ZergMeleeWeapons_quick\", cmd_quick, 3705), Function.ability(442, \"Research_ZergMeleeWeaponsLevel1_quick\", cmd_quick, 1186, 3705),", "select = action.action_feature_layer.unit_selection_point screen.assign_to(select.selection_screen_coord) select.type = select_point_act def select_rect(action, select_add,", "cmd_quick, 1184, 3667), Function.ability(271, \"Harvest_Return_Mule_quick\", cmd_quick, 167, 3667), Function.ability(272, \"Harvest_Return_Probe_quick\",", "of the types of args passed to function_type. avail_fn: For", "913, 3669), ]) # pylint: enable=line-too-long # Some indexes to", "def build_queue(action, build_queue_id): \"\"\"Cancel a unit in the build queue.\"\"\"", "@classmethod def ability(cls, id_, name, function_type, ability_id, general_id=0): \"\"\"Define a", "cmd_quick, 1351), Function.ability(490, \"Train_SCV_quick\", cmd_quick, 524), Function.ability(491, \"Train_Sentry_quick\", cmd_quick, 921),", "The value to store for the action function. arguments: The", "s2clientprotocol import spatial_pb2 as sc_spatial from s2clientprotocol import ui_pb2 as", "cmd_quick, 1394, 3661), Function.ability(109, \"BurrowDown_Lurker_quick\", cmd_quick, 2108, 3661), Function.ability(110, \"BurrowDown_Queen_quick\",", "control_group_act: What to do with the control group. control_group_id: Which", "cmd_screen, 1628), Function.ability(191, \"Effect_Explode_quick\", cmd_quick, 42), Function.ability(192, \"Effect_Feedback_screen\", cmd_screen, 140),", "\"Halt_Building_quick\", cmd_quick, 315, 3660), Function.ability(263, \"Halt_TerranBuild_quick\", cmd_quick, 348, 3660), Function.ability(264,", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "control_group(action, control_group_act, control_group_id): \"\"\"Act on a control group, selecting, setting,", "an agent to use. Attributes: types: A namedtuple of the", "= select.selection_screen_coord.add() screen_rect = point.Rect(screen, screen2) screen_rect.tl.assign_to(out_rect.p0) screen_rect.br.assign_to(out_rect.p1) select.selection_add =", "cmd_quick, 1384, 3662), Function.ability(123, \"BurrowUp_Hydralisk_autocast\", autocast, 1384, 3662), Function.ability(124, \"BurrowUp_Infestor_quick\",", "Function.ability(216, \"Effect_PhotonOvercharge_screen\", cmd_screen, 2162), Function.ability(217, \"Effect_PointDefenseDrone_screen\", cmd_screen, 144), Function.ability(218, \"Effect_PsiStorm_screen\",", "\"Cancel_quick\", cmd_quick, 3659), Function.ability(141, \"Cancel_AdeptPhaseShift_quick\", cmd_quick, 2594, 3659), Function.ability(142, \"Cancel_AdeptShadePhaseShift_quick\",", "= select_unit_act select.unit_index = select_unit_id def control_group(action, control_group_act, control_group_id): \"\"\"Act", "2389), Function.ability(252, \"Hallucination_HighTemplar_quick\", cmd_quick, 150), Function.ability(253, \"Hallucination_Immortal_quick\", cmd_quick, 152), Function.ability(254,", "\"Research_TerranInfantryWeaponsLevel3_quick\", cmd_quick, 654, 3698), Function.ability(414, \"Research_TerranShipWeapons_quick\", cmd_quick, 3699), Function.ability(415, \"Research_TerranShipWeaponsLevel1_quick\",", "Function.ability(173, \"Cancel_QueueCancelToSelection_quick\", cmd_quick, 308, 3671), Function.ability(174, \"Cancel_QueuePasive_quick\", cmd_quick, 1831, 3671),", "the screen.\"\"\" action_cmd = action.action_feature_layer.unit_command action_cmd.ability_id = ability_id action_cmd.queue_command =", "point? POINT_REQUIRED_FUNCS = { False: {cmd_quick, autocast}, True: {cmd_screen, cmd_minimap,", "= {f.id: f for f in FUNCTIONS if f.avail_fn} class", "\"Morph_ThorHighImpactMode_quick\", cmd_quick, 2362), Function.ability(322, \"Morph_Unsiege_quick\", cmd_quick, 390), Function.ability(323, \"Morph_Uproot_quick\", cmd_quick,", "\"Research_PathogenGlands_quick\", cmd_quick, 1454), Function.ability(378, \"Research_PersonalCloaking_quick\", cmd_quick, 820), Function.ability(379, \"Research_PhoenixAnionPulseCrystals_quick\", cmd_quick,", "f.avail_fn} class FunctionCall(collections.namedtuple( \"FunctionCall\", [\"function\", \"arguments\"])): \"\"\"Represents a function call", "Attributes: function: Store the function id, eg 2 for select_point.", "The second point for a rectangle. This is needed so", "cmd_quick, 1853), Function.ability(480, \"Train_Mutalisk_quick\", cmd_quick, 1346), Function.ability(481, \"Train_Observer_quick\", cmd_quick, 977),", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "will use. name: The name of the function. Should be", "Whether the action should be done now or later. control_group_act:", "Function.ability(100, \"Build_TemplarArchive_screen\", cmd_screen, 890), Function.ability(101, \"Build_TwilightCouncil_screen\", cmd_screen, 886), Function.ability(102, \"Build_UltraliskCavern_screen\",", "range(value).\"\"\" return cls(-1, \"<none>\", (value,), lambda a: a[0]) @classmethod def", "The name of the argument, also unique. sizes: The max+1", "cmd_minimap: [TYPES.queued, TYPES.minimap], autocast: [], } # Which ones need", "def ability(cls, id_, name, function_type, ability_id, general_id=0): \"\"\"Define a function", "ui_pb2 as sc_ui def no_op(action): del action def move_camera(action, minimap):", "394, 3668), Function.ability(290, \"Load_NydusNetwork_screen\", cmd_screen, 1437, 3668), Function.ability(291, \"Load_NydusWorm_screen\", cmd_screen,", "[TYPES.control_group_act, TYPES.control_group_id], select_idle_worker: [TYPES.select_worker], select_army: [TYPES.select_add], select_warp_gates: [TYPES.select_add], select_larva: [],", "\"BurrowUp_Zergling_autocast\", autocast, 1392, 3662), Function.ability(140, \"Cancel_quick\", cmd_quick, 3659), Function.ability(141, \"Cancel_AdeptPhaseShift_quick\",", "\"Effect_ViperConsume_screen\", cmd_screen, 2073), Function.ability(244, \"Effect_VoidRayPrismaticAlignment_quick\", cmd_quick, 2393), Function.ability(245, \"Effect_WidowMineAttack_screen\", cmd_screen,", "\"Harvest_Gather_Probe_screen\", cmd_screen, 298, 3666), Function.ability(268, \"Harvest_Gather_SCV_screen\", cmd_screen, 295, 3666), Function.ability(269,", "1380, 3662), Function.ability(122, \"BurrowUp_Hydralisk_quick\", cmd_quick, 1384, 3662), Function.ability(123, \"BurrowUp_Hydralisk_autocast\", autocast,", "3665), Function.ability(454, \"Stop_Building_quick\", cmd_quick, 2057, 3665), Function.ability(455, \"Stop_Redirect_quick\", cmd_quick, 1691,", "cmd_quick, 594), Function.ability(497, \"Train_Ultralisk_quick\", cmd_quick, 1348), Function.ability(498, \"Train_VikingFighter_quick\", cmd_quick, 624),", "abilities, and the ability_id of another ability if it can", "unload_id): \"\"\"Unload a unit from a transport/bunker/nydus/etc.\"\"\" action.action_ui.cargo_panel.unit_index = unload_id", "\"Behavior_HoldFireOn_Lurker_quick\", cmd_quick, 2550, 3688), Function.ability(37, \"Behavior_PulsarBeamOff_quick\", cmd_quick, 2376), Function.ability(38, \"Behavior_PulsarBeamOn_quick\",", "Function.ability(163, \"Cancel_Nuke_quick\", cmd_quick, 1623, 3659), Function.ability(164, \"Cancel_SpineCrawlerRoot_quick\", cmd_quick, 1730, 3659),", "\"Attack_Attack_minimap\", cmd_minimap, 23, 3674), Function.ability(16, \"Attack_AttackBuilding_screen\", cmd_screen, 2048, 3674), Function.ability(17,", "\"\"\"Represents a single argument type. Attributes: id: The argument id.", "3662), Function.ability(126, \"BurrowUp_InfestorTerran_autocast\", autocast, 1396, 3662), Function.ability(127, \"BurrowUp_Lurker_quick\", cmd_quick, 2110,", "195, 3673), Function.ability(338, \"Rally_Building_minimap\", cmd_minimap, 195, 3673), Function.ability(339, \"Rally_Hatchery_Units_screen\", cmd_screen,", "select_idle_worker, lambda obs: obs.player_common.idle_worker_count > 0), Function.ui_func(7, \"select_army\", select_army, lambda", "cmd_quick, 766), Function.ability(374, \"Research_MuscularAugments_quick\", cmd_quick, 1283), Function.ability(375, \"Research_NeosteelFrame_quick\", cmd_quick, 655),", "Function.ability(413, \"Research_TerranInfantryWeaponsLevel3_quick\", cmd_quick, 654, 3698), Function.ability(414, \"Research_TerranShipWeapons_quick\", cmd_quick, 3699), Function.ability(415,", "select_idle_worker(action, select_worker): \"\"\"Select an idle worker.\"\"\" action.action_ui.select_idle_worker.type = select_worker def", "{ False: {cmd_quick, autocast}, True: {cmd_screen, cmd_minimap, autocast}} always =", "cmd_quick, 38, 3689), Function.ability(33, \"Behavior_HoldFireOff_Lurker_quick\", cmd_quick, 2552, 3689), Function.ability(34, \"Behavior_HoldFireOn_quick\",", "unit to target in a transport/nydus/command center. \"\"\" ___slots__ =", "camera.\"\"\" minimap.assign_to(action.action_feature_layer.camera_move.center_minimap) def select_point(action, select_point_act, screen): \"\"\"Select a unit at", "def control_group(action, control_group_act, control_group_id): \"\"\"Act on a control group, selecting,", "worker. build_queue_id: Which build queue index to target. unload_id: Which", "import ui_pb2 as sc_ui def no_op(action): del action def move_camera(action,", "lambda obs: obs.player_common.warp_gate_count > 0), Function.ui_func(9, \"select_larva\", select_larva, lambda obs:", "\"Effect_AutoTurret_screen\", cmd_screen, 1764), Function.ability(179, \"Effect_BlindingCloud_screen\", cmd_screen, 2063), Function.ability(180, \"Effect_Blink_screen\", cmd_screen,", "1194, 3706), Function.ability(449, \"Research_ZerglingAdrenalGlands_quick\", cmd_quick, 1252), Function.ability(450, \"Research_ZerglingMetabolicBoost_quick\", cmd_quick, 1253),", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "38, 3689), Function.ability(33, \"Behavior_HoldFireOff_Lurker_quick\", cmd_quick, 2552, 3689), Function.ability(34, \"Behavior_HoldFireOn_quick\", cmd_quick,", ">= 0: ABILITY_IDS[func.ability_id].add(func) ABILITY_IDS = {k: frozenset(v) for k, v", "247), Function.ability(204, \"Effect_InjectLarva_screen\", cmd_screen, 251), Function.ability(205, \"Effect_KD8Charge_screen\", cmd_screen, 2588), Function.ability(206,", "so build something similar. \"\"\" def __init__(self, functions): self._func_list =", "cmd_quick, 1093), Function.ability(367, \"Research_GraviticDrive_quick\", cmd_quick, 1094), Function.ability(368, \"Research_GroovedSpines_quick\", cmd_quick, 1282),", "Function.ability(380, \"Research_PneumatizedCarapace_quick\", cmd_quick, 1223), Function.ability(381, \"Research_ProtossAirArmor_quick\", cmd_quick, 3692), Function.ability(382, \"Research_ProtossAirArmorLevel1_quick\",", "return cls(id_, name, 0, 0, function_type, FUNCTION_TYPES[function_type], avail_fn) @classmethod def", "2714), Function.ability(196, \"Effect_GravitonBeam_screen\", cmd_screen, 173), Function.ability(197, \"Effect_GuardianShield_quick\", cmd_quick, 76), Function.ability(198,", "3675), Function.ability(236, \"Effect_Stim_Marauder_Redirect_quick\", cmd_quick, 1684, 3675), Function.ability(237, \"Effect_Stim_Marine_quick\", cmd_quick, 380,", "cmd_quick, 1520), Function.ability(299, \"Morph_GreaterSpire_quick\", cmd_quick, 1220), Function.ability(300, \"Morph_Hellbat_quick\", cmd_quick, 1998),", "iter(self._func_list) def __len__(self): return len(self._func_list) # pylint: disable=line-too-long FUNCTIONS =", "\"UnloadAllAt_Overlord_minimap\", cmd_minimap, 1408, 3669), Function.ability(522, \"UnloadAllAt_WarpPrism_screen\", cmd_screen, 913, 3669), Function.ability(523,", "ABILITY_IDS = collections.defaultdict(set) # {ability_id: {funcs}} for func in FUNCTIONS:", "Function.ability(294, \"LoadAll_quick\", cmd_quick, 3663), Function.ability(295, \"LoadAll_CommandCenter_quick\", cmd_quick, 416, 3663), Function.ability(296,", "isinstance(key, numbers.Number): return self._func_list[key] return self._func_dict[key] def __iter__(self): return iter(self._func_list)", "cmd_quick, 299, 3667), Function.ability(273, \"Harvest_Return_SCV_quick\", cmd_quick, 296, 3667), Function.ability(274, \"HoldPosition_quick\",", "cmd_minimap, autocast} # Which ones require a point? POINT_REQUIRED_FUNCS =", "Rights Reserved. # # Licensed under the Apache License, Version", "the functions in FUNCTION_TYPES for how to construct the sc2", "408, 3664), Function.ability(513, \"UnloadAll_CommandCenter_quick\", cmd_quick, 413, 3664), Function.ability(514, \"UnloadAll_NydasNetwork_quick\", cmd_quick,", "cmd_screen, 885), Function.ability(55, \"Build_Forge_screen\", cmd_screen, 884), Function.ability(56, \"Build_FusionCore_screen\", cmd_screen, 333),", "3678), Function.ability(280, \"Land_Starport_screen\", cmd_screen, 522, 3678), Function.ability(281, \"Lift_quick\", cmd_quick, 3679),", "Function.ability(180, \"Effect_Blink_screen\", cmd_screen, 3687), Function.ability(181, \"Effect_Blink_Stalker_screen\", cmd_screen, 1442, 3687), Function.ability(182,", "for func in FUNCTIONS: if func.ability_id >= 0: ABILITY_IDS[func.ability_id].add(func) ABILITY_IDS", "specific language governing permissions and # limitations under the License.", "216), Function.ability(366, \"Research_GraviticBooster_quick\", cmd_quick, 1093), Function.ability(367, \"Research_GraviticDrive_quick\", cmd_quick, 1094), Function.ability(368,", "cmd_quick, 651), Function.ability(419, \"Research_TerranVehicleAndShipPlating_quick\", cmd_quick, 3700), Function.ability(420, \"Research_TerranVehicleAndShipPlatingLevel1_quick\", cmd_quick, 864,", "do with the control group. control_group_id: Which control group to", "a function representing a ui action.\"\"\" return cls(id_, name, 0,", "Function.ability(425, \"Research_TerranVehicleWeaponsLevel2_quick\", cmd_quick, 856, 3701), Function.ability(426, \"Research_TerranVehicleWeaponsLevel3_quick\", cmd_quick, 857, 3701),", "of python types. args: A list of the types of", "one of a set of known values.\"\"\" return cls(-1, \"<none>\",", "command like 'Stop' or 'Stim'.\"\"\" action_cmd = action.action_feature_layer.unit_command action_cmd.ability_id =", "\"Cancel_AdeptShadePhaseShift_quick\", cmd_quick, 2596, 3659), Function.ability(143, \"Cancel_BarracksAddOn_quick\", cmd_quick, 451, 3659), Function.ability(144,", "\"Behavior_CloakOn_Banshee_quick\", cmd_quick, 392, 3676), Function.ability(28, \"Behavior_CloakOn_Ghost_quick\", cmd_quick, 382, 3676), Function.ability(29,", "# Which ones require a point? POINT_REQUIRED_FUNCS = { False:", "spec(cls, id_, name, sizes): \"\"\"Create an ArgumentType to be used", "from the multi-unit selection.\"\"\" select = action.action_ui.multi_panel select.type = select_unit_act", "sc_ui.ActionMultiPanel.SelectAllOfType, sc_ui.ActionMultiPanel.DeselectAllOfType, ]), select_unit_id=ArgumentType.scalar(500), # Depends on current selection. select_worker=ArgumentType.enum([", "Function.ability(177, \"Effect_AdeptPhaseShift_screen\", cmd_screen, 2544), Function.ability(178, \"Effect_AutoTurret_screen\", cmd_screen, 1764), Function.ability(179, \"Effect_BlindingCloud_screen\",", "\"Effect_KD8Charge_screen\", cmd_screen, 2588), Function.ability(206, \"Effect_LockOn_screen\", cmd_screen, 2350), Function.ability(207, \"Effect_LocustSwoop_screen\", cmd_screen,", "\"Effect_LocustSwoop_screen\", cmd_screen, 2387), Function.ability(208, \"Effect_MassRecall_screen\", cmd_screen, 3686), Function.ability(209, \"Effect_MassRecall_Mothership_screen\", cmd_screen,", "\"Research_ProtossGroundWeaponsLevel2_quick\", cmd_quick, 1063, 3695), Function.ability(396, \"Research_ProtossGroundWeaponsLevel3_quick\", cmd_quick, 1064, 3695), Function.ability(397,", "Function.ability(370, \"Research_HighCapacityFuelTanks_quick\", cmd_quick, 804), Function.ability(371, \"Research_InfernalPreigniter_quick\", cmd_quick, 761), Function.ability(372, \"Research_InterceptorGravitonCatapult_quick\",", "cmd_quick, 160), Function.ability(259, \"Hallucination_WarpPrism_quick\", cmd_quick, 162), Function.ability(260, \"Hallucination_Zealot_quick\", cmd_quick, 164),", "Function.ability(384, \"Research_ProtossAirArmorLevel3_quick\", cmd_quick, 1567, 3692), Function.ability(385, \"Research_ProtossAirWeapons_quick\", cmd_quick, 3693), Function.ability(386,", "Function.ability(31, \"Behavior_HoldFireOff_quick\", cmd_quick, 3689), Function.ability(32, \"Behavior_HoldFireOff_Ghost_quick\", cmd_quick, 38, 3689), Function.ability(33,", "\"Effect_NukeCalldown_screen\", cmd_screen, 1622), Function.ability(214, \"Effect_OracleRevelation_screen\", cmd_screen, 2146), Function.ability(215, \"Effect_ParasiticBomb_screen\", cmd_screen,", "action.action_ui.select_warp_gates.selection_add = select_add def select_larva(action): \"\"\"Select all larva.\"\"\" action.action_ui.select_larva.SetInParent() #", "\"Research_TerranInfantryWeaponsLevel1_quick\", cmd_quick, 652, 3698), Function.ability(412, \"Research_TerranInfantryWeaponsLevel2_quick\", cmd_quick, 653, 3698), Function.ability(413,", "Function.ability(319, \"Morph_SupplyDepot_Raise_quick\", cmd_quick, 558), Function.ability(320, \"Morph_ThorExplosiveMode_quick\", cmd_quick, 2364), Function.ability(321, \"Morph_ThorHighImpactMode_quick\",", "3693), Function.ability(387, \"Research_ProtossAirWeaponsLevel2_quick\", cmd_quick, 1563, 3693), Function.ability(388, \"Research_ProtossAirWeaponsLevel3_quick\", cmd_quick, 1564,", "1), Function.ability(452, \"Smart_minimap\", cmd_minimap, 1), Function.ability(453, \"Stop_quick\", cmd_quick, 3665), Function.ability(454,", "target in a transport/nydus/command center. \"\"\" ___slots__ = () @classmethod", "\"Research_CentrifugalHooks_quick\", cmd_quick, 1482), Function.ability(359, \"Research_Charge_quick\", cmd_quick, 1592), Function.ability(360, \"Research_ChitinousPlating_quick\", cmd_quick,", "that needs a point on the minimap.\"\"\" action_cmd = action.action_feature_layer.unit_command", "# you may not use this file except in compliance", "point. select_add: Whether to add the unit to the selection", "a set(). return self.id def __str__(self): return self.str() def str(self,", "382, 3676), Function.ability(29, \"Behavior_GenerateCreepOff_quick\", cmd_quick, 1693), Function.ability(30, \"Behavior_GenerateCreepOn_quick\", cmd_quick, 1692),", "as sc_ui def no_op(action): del action def move_camera(action, minimap): \"\"\"Move", "3692), Function.ability(385, \"Research_ProtossAirWeapons_quick\", cmd_quick, 3693), Function.ability(386, \"Research_ProtossAirWeaponsLevel1_quick\", cmd_quick, 1562, 3693),", "= Functions([ Function.ui_func(0, \"no_op\", no_op), Function.ui_func(1, \"move_camera\", move_camera), Function.ui_func(2, \"select_point\",", "warp gates.\"\"\" action.action_ui.select_warp_gates.selection_add = select_add def select_larva(action): \"\"\"Select all larva.\"\"\"", "3706), Function.ability(449, \"Research_ZerglingAdrenalGlands_quick\", cmd_quick, 1252), Function.ability(450, \"Research_ZerglingMetabolicBoost_quick\", cmd_quick, 1253), Function.ability(451,", "\"Morph_Overseer_quick\", cmd_quick, 1448), Function.ability(312, \"Morph_PlanetaryFortress_quick\", cmd_quick, 1450), Function.ability(313, \"Morph_Ravager_quick\", cmd_quick,", "3667), Function.ability(270, \"Harvest_Return_Drone_quick\", cmd_quick, 1184, 3667), Function.ability(271, \"Harvest_Return_Mule_quick\", cmd_quick, 167,", "1192, 3706), Function.ability(447, \"Research_ZergMissileWeaponsLevel2_quick\", cmd_quick, 1193, 3706), Function.ability(448, \"Research_ZergMissileWeaponsLevel3_quick\", cmd_quick,", "Function.ability(69, \"Build_PhotonCannon_screen\", cmd_screen, 887), Function.ability(70, \"Build_Pylon_screen\", cmd_screen, 881), Function.ability(71, \"Build_Reactor_quick\",", "provided, the values will be unpacked into an `Arguments` object.", "on the minimap. screen2: The second point for a rectangle.", "\"Research_TerranVehicleWeaponsLevel3_quick\", cmd_quick, 857, 3701), Function.ability(427, \"Research_TunnelingClaws_quick\", cmd_quick, 217), Function.ability(428, \"Research_WarpGate_quick\",", "= control_group_id def unload(action, unload_id): \"\"\"Unload a unit from a", "Function.ability(70, \"Build_Pylon_screen\", cmd_screen, 881), Function.ability(71, \"Build_Reactor_quick\", cmd_quick, 3683), Function.ability(72, \"Build_Reactor_screen\",", "3659), Function.ability(151, \"Cancel_MorphHive_quick\", cmd_quick, 1219, 3659), Function.ability(152, \"Cancel_MorphLair_quick\", cmd_quick, 1217,", "\"Smart_minimap\", cmd_minimap, 1), Function.ability(453, \"Stop_quick\", cmd_quick, 3665), Function.ability(454, \"Stop_Building_quick\", cmd_quick,", "cmd_screen, 322), Function.ability(51, \"Build_EvolutionChamber_screen\", cmd_screen, 1156), Function.ability(52, \"Build_Extractor_screen\", cmd_screen, 1154),", "\"\"\"Select a unit at a point.\"\"\" select = action.action_feature_layer.unit_selection_point screen.assign_to(select.selection_screen_coord)", "autocast, 1042), Function.ability(64, \"Build_MissileTurret_screen\", cmd_screen, 323), Function.ability(65, \"Build_Nexus_screen\", cmd_screen, 880),", "421, 3682), Function.ability(95, \"Build_TechLab_Barracks_screen\", cmd_screen, 421, 3682), Function.ability(96, \"Build_TechLab_Factory_quick\", cmd_quick,", "control_group_act, control_group_id): \"\"\"Act on a control group, selecting, setting, etc.\"\"\"", "SC2.\"\"\" from __future__ import absolute_import from __future__ import division from", "Function.ability(220, \"Effect_Repair_screen\", cmd_screen, 3685), Function.ability(221, \"Effect_Repair_autocast\", autocast, 3685), Function.ability(222, \"Effect_Repair_Mule_screen\",", "\"Build_SpineCrawler_screen\", cmd_screen, 1166), Function.ability(86, \"Build_Spire_screen\", cmd_screen, 1158), Function.ability(87, \"Build_SporeCrawler_screen\", cmd_screen,", "\"Effect_MassRecall_screen\", cmd_screen, 3686), Function.ability(209, \"Effect_MassRecall_Mothership_screen\", cmd_screen, 2368, 3686), Function.ability(210, \"Effect_MassRecall_MothershipCore_screen\",", "sc_ui.ActionSelectIdleWorker.All, sc_ui.ActionSelectIdleWorker.AddAll, ]), build_queue_id=ArgumentType.scalar(10), # Depends on current build queue.", "cmd_screen, 1437, 3668), Function.ability(291, \"Load_NydusWorm_screen\", cmd_screen, 2370, 3668), Function.ability(292, \"Load_Overlord_screen\",", "3664), Function.ability(513, \"UnloadAll_CommandCenter_quick\", cmd_quick, 413, 3664), Function.ability(514, \"UnloadAll_NydasNetwork_quick\", cmd_quick, 1438,", "140), Function.ability(193, \"Effect_ForceField_screen\", cmd_screen, 1526), Function.ability(194, \"Effect_FungalGrowth_screen\", cmd_screen, 74), Function.ability(195,", "3702), Function.ability(431, \"Research_ZergFlyerArmorLevel2_quick\", cmd_quick, 1316, 3702), Function.ability(432, \"Research_ZergFlyerArmorLevel3_quick\", cmd_quick, 1317,", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "be: [[0], [23, 38]]. \"\"\" __slots__ = () @classmethod def", "Function.ability(64, \"Build_MissileTurret_screen\", cmd_screen, 323), Function.ability(65, \"Build_Nexus_screen\", cmd_screen, 880), Function.ability(66, \"Build_Nuke_quick\",", "be represented by a more general action. function_type: One of", "Function.ability(14, \"Attack_Attack_screen\", cmd_screen, 23, 3674), Function.ability(15, \"Attack_Attack_minimap\", cmd_minimap, 23, 3674),", "Function.ability(32, \"Behavior_HoldFireOff_Ghost_quick\", cmd_quick, 38, 3689), Function.ability(33, \"Behavior_HoldFireOff_Lurker_quick\", cmd_quick, 2552, 3689),", "action_cmd.queue_command = queued screen.assign_to(action_cmd.target_screen_coord) def cmd_minimap(action, ability_id, queued, minimap): \"\"\"Do", "3659), Function.ability(168, \"Cancel_Last_quick\", cmd_quick, 3671), Function.ability(169, \"Cancel_HangarQueue5_quick\", cmd_quick, 1038, 3671),", "either be an `Arguments` object, a `dict`, or an iterable.", "Copyright 2017 Google Inc. All Rights Reserved. # # Licensed", "Function.ability(206, \"Effect_LockOn_screen\", cmd_screen, 2350), Function.ability(207, \"Effect_LocustSwoop_screen\", cmd_screen, 2387), Function.ability(208, \"Effect_MassRecall_screen\",", "1187, 3705), Function.ability(444, \"Research_ZergMeleeWeaponsLevel3_quick\", cmd_quick, 1188, 3705), Function.ability(445, \"Research_ZergMissileWeapons_quick\", cmd_quick,", "unique. ability_id: The ability id to pass to sc2. general_id:", "that function, each being a list of ints. For select_point", "\"avail_fn\"])): \"\"\"Represents a function action. Attributes: id: The function id,", "cmd_screen, 2714), Function.ability(196, \"Effect_GravitonBeam_screen\", cmd_screen, 173), Function.ability(197, \"Effect_GuardianShield_quick\", cmd_quick, 76),", "is valid. \"\"\" __slots__ = () @classmethod def ui_func(cls, id_,", "\"BurrowUp_autocast\", autocast, 3662), Function.ability(119, \"BurrowUp_Baneling_quick\", cmd_quick, 1376, 3662), Function.ability(120, \"BurrowUp_Baneling_autocast\",", "Function.ability(184, \"Effect_CausticSpray_screen\", cmd_screen, 2324), Function.ability(185, \"Effect_Charge_screen\", cmd_screen, 1819), Function.ability(186, \"Effect_Charge_autocast\",", "152), Function.ability(254, \"Hallucination_Oracle_quick\", cmd_quick, 2114), Function.ability(255, \"Hallucination_Phoenix_quick\", cmd_quick, 154), Function.ability(256,", "1184, 3667), Function.ability(271, \"Harvest_Return_Mule_quick\", cmd_quick, 167, 3667), Function.ability(272, \"Harvest_Return_Probe_quick\", cmd_quick,", "\"Effect_Blink_Stalker_screen\", cmd_screen, 1442, 3687), Function.ability(182, \"Effect_ShadowStride_screen\", cmd_screen, 2700, 3687), Function.ability(183,", "484, 3659), Function.ability(147, \"Cancel_GravitonBeam_quick\", cmd_quick, 174, 3659), Function.ability(148, \"Cancel_LockOn_quick\", cmd_quick,", "the same type twice. queued: Whether the action should be", "2596, 3659), Function.ability(143, \"Cancel_BarracksAddOn_quick\", cmd_quick, 451, 3659), Function.ability(144, \"Cancel_BuildInProgress_quick\", cmd_quick,", "under the Apache License, Version 2.0 (the \"License\"); # you", "control_group: [TYPES.control_group_act, TYPES.control_group_id], select_idle_worker: [TYPES.select_worker], select_army: [TYPES.select_add], select_warp_gates: [TYPES.select_add], select_larva:", "cmd_quick, 156), Function.ability(257, \"Hallucination_Stalker_quick\", cmd_quick, 158), Function.ability(258, \"Hallucination_VoidRay_quick\", cmd_quick, 160),", "3662), Function.ability(135, \"BurrowUp_Ultralisk_quick\", cmd_quick, 1514, 3662), Function.ability(136, \"BurrowUp_Ultralisk_autocast\", autocast, 1514,", "def cmd_screen(action, ability_id, queued, screen): \"\"\"Do a command that needs", "autocast}} always = lambda _: True class Function(collections.namedtuple( \"Function\", [\"id\",", "3667), Function.ability(274, \"HoldPosition_quick\", cmd_quick, 18), Function.ability(275, \"Land_screen\", cmd_screen, 3678), Function.ability(276,", "Function.ability(317, \"Morph_SiegeMode_quick\", cmd_quick, 388), Function.ability(318, \"Morph_SupplyDepot_Lower_quick\", cmd_quick, 556), Function.ability(319, \"Morph_SupplyDepot_Raise_quick\",", "of the functions in FUNCTION_TYPES for how to construct the", "Function.ability(148, \"Cancel_LockOn_quick\", cmd_quick, 2354, 3659), Function.ability(149, \"Cancel_MorphBroodlord_quick\", cmd_quick, 1373, 3659),", "cmd_screen, 16), Function.ability(332, \"Move_minimap\", cmd_minimap, 16), Function.ability(333, \"Patrol_screen\", cmd_screen, 17),", "import six from pysc2.lib import point from s2clientprotocol import spatial_pb2", "Function.ability(18, \"Attack_Redirect_screen\", cmd_screen, 1682, 3674), Function.ability(19, \"Scan_Move_screen\", cmd_screen, 19, 3674),", "3671), Function.ability(170, \"Cancel_Queue1_quick\", cmd_quick, 304, 3671), Function.ability(171, \"Cancel_Queue5_quick\", cmd_quick, 306,", "2346), Function.ability(220, \"Effect_Repair_screen\", cmd_screen, 3685), Function.ability(221, \"Effect_Repair_autocast\", autocast, 3685), Function.ability(222,", "Function.ability(280, \"Land_Starport_screen\", cmd_screen, 522, 3678), Function.ability(281, \"Lift_quick\", cmd_quick, 3679), Function.ability(282,", "Function.ability(498, \"Train_VikingFighter_quick\", cmd_quick, 624), Function.ability(499, \"Train_Viper_quick\", cmd_quick, 1354), Function.ability(500, \"Train_VoidRay_quick\",", "`Arguments`. Args: function: The value to store for the action", "\"Train_Oracle_quick\", cmd_quick, 954), Function.ability(483, \"Train_Overlord_quick\", cmd_quick, 1344), Function.ability(484, \"Train_Phoenix_quick\", cmd_quick,", "2097, 3662), Function.ability(138, \"BurrowUp_Zergling_quick\", cmd_quick, 1392, 3662), Function.ability(139, \"BurrowUp_Zergling_autocast\", autocast,", "args: A list of the types of args passed to", "cmd_screen, 2099), Function.ability(246, \"Effect_WidowMineAttack_autocast\", autocast, 2099), Function.ability(247, \"Effect_YamatoGun_screen\", cmd_screen, 401),", "be unique.\") def __getattr__(self, name): return self._func_dict[name] def __getitem__(self, key):", "348, 3660), Function.ability(264, \"Harvest_Gather_screen\", cmd_screen, 3666), Function.ability(265, \"Harvest_Gather_Drone_screen\", cmd_screen, 1183,", "a list of ints. For select_point this could be: [[0],", "3679), Function.ability(285, \"Lift_OrbitalCommand_quick\", cmd_quick, 1522, 3679), Function.ability(286, \"Lift_Starport_quick\", cmd_quick, 518,", "`dict` or an iterable is provided, the values will be", "3699), Function.ability(415, \"Research_TerranShipWeaponsLevel1_quick\", cmd_quick, 861, 3699), Function.ability(416, \"Research_TerranShipWeaponsLevel2_quick\", cmd_quick, 862,", "frozenset(v) for k, v in six.iteritems(ABILITY_IDS)} FUNCTIONS_AVAILABLE = {f.id: f", "Can't use namedtuple since python3 has a limit of 255", "558), Function.ability(320, \"Morph_ThorExplosiveMode_quick\", cmd_quick, 2364), Function.ability(321, \"Morph_ThorHighImpactMode_quick\", cmd_quick, 2362), Function.ability(322,", "3696), Function.ability(399, \"Research_ProtossShieldsLevel2_quick\", cmd_quick, 1069, 3696), Function.ability(400, \"Research_ProtossShieldsLevel3_quick\", cmd_quick, 1070,", "455, 3683), Function.ability(76, \"Build_Reactor_Factory_screen\", cmd_screen, 455, 3683), Function.ability(77, \"Build_Reactor_Starport_quick\", cmd_quick,", "cmd_screen, 1733, 3691), Function.ability(48, \"Build_CyberneticsCore_screen\", cmd_screen, 894), Function.ability(49, \"Build_DarkShrine_screen\", cmd_screen,", "cmd_quick, 2057, 3665), Function.ability(455, \"Stop_Redirect_quick\", cmd_quick, 1691, 3665), Function.ability(456, \"Stop_Stop_quick\",", "function arguments, so build something similar. \"\"\" def __init__(self, functions):", "a command that needs a point on the minimap.\"\"\" action_cmd", "\"Attack_minimap\", cmd_minimap, 3674), Function.ability(14, \"Attack_Attack_screen\", cmd_screen, 23, 3674), Function.ability(15, \"Attack_Attack_minimap\",", "\"BurrowDown_Roach_quick\", cmd_quick, 1386, 3661), Function.ability(113, \"BurrowDown_SwarmHost_quick\", cmd_quick, 2014, 3661), Function.ability(114,", "\"Research_TerranInfantryArmorLevel2_quick\", cmd_quick, 657, 3697), Function.ability(409, \"Research_TerranInfantryArmorLevel3_quick\", cmd_quick, 658, 3697), Function.ability(410,", "obs.ui_data.HasField(\"multi\")), Function.ui_func(6, \"select_idle_worker\", select_idle_worker, lambda obs: obs.player_common.idle_worker_count > 0), Function.ui_func(7,", "cmd_screen, 1526), Function.ability(194, \"Effect_FungalGrowth_screen\", cmd_screen, 74), Function.ability(195, \"Effect_GhostSnipe_screen\", cmd_screen, 2714),", "\"Train_Overlord_quick\", cmd_quick, 1344), Function.ability(484, \"Train_Phoenix_quick\", cmd_quick, 946), Function.ability(485, \"Train_Probe_quick\", cmd_quick,", "obs.player_common.larva_count > 0), Function.ui_func(10, \"unload\", unload, lambda obs: obs.ui_data.HasField(\"cargo\")), Function.ui_func(11,", "\"Cancel_AdeptPhaseShift_quick\", cmd_quick, 2594, 3659), Function.ability(142, \"Cancel_AdeptShadePhaseShift_quick\", cmd_quick, 2596, 3659), Function.ability(143,", "\"Research_ZergMeleeWeaponsLevel2_quick\", cmd_quick, 1187, 3705), Function.ability(444, \"Research_ZergMeleeWeaponsLevel3_quick\", cmd_quick, 1188, 3705), Function.ability(445,", "1316, 3702), Function.ability(432, \"Research_ZergFlyerArmorLevel3_quick\", cmd_quick, 1317, 3702), Function.ability(433, \"Research_ZergFlyerAttack_quick\", cmd_quick,", "function_type, avail_fn=always): \"\"\"Define a function representing a ui action.\"\"\" return", "Function.ability(161, \"Cancel_MorphThorExplosiveMode_quick\", cmd_quick, 2365, 3659), Function.ability(162, \"Cancel_NeuralParasite_quick\", cmd_quick, 250, 3659),", "point on the screen.\"\"\" action_cmd = action.action_feature_layer.unit_command action_cmd.ability_id = ability_id", "Function.ability(134, \"BurrowUp_SwarmHost_quick\", cmd_quick, 2016, 3662), Function.ability(135, \"BurrowUp_Ultralisk_quick\", cmd_quick, 1514, 3662),", "\"Hallucination_Phoenix_quick\", cmd_quick, 154), Function.ability(256, \"Hallucination_Probe_quick\", cmd_quick, 156), Function.ability(257, \"Hallucination_Stalker_quick\", cmd_quick,", "3694), Function.ability(393, \"Research_ProtossGroundWeapons_quick\", cmd_quick, 3695), Function.ability(394, \"Research_ProtossGroundWeaponsLevel1_quick\", cmd_quick, 1062, 3695),", "3659), Function.ability(143, \"Cancel_BarracksAddOn_quick\", cmd_quick, 451, 3659), Function.ability(144, \"Cancel_BuildInProgress_quick\", cmd_quick, 314,", "\"Behavior_CloakOn_quick\", cmd_quick, 3676), Function.ability(27, \"Behavior_CloakOn_Banshee_quick\", cmd_quick, 392, 3676), Function.ability(28, \"Behavior_CloakOn_Ghost_quick\",", "with the unit at the point. select_add: Whether to add", "lambda obs: obs.player_common.idle_worker_count > 0), Function.ui_func(7, \"select_army\", select_army, lambda obs:", "@classmethod def point(cls): # No range because it's unknown at", "Function.ability(463, \"Train_Corruptor_quick\", cmd_quick, 1353), Function.ability(464, \"Train_Cyclone_quick\", cmd_quick, 597), Function.ability(465, \"Train_DarkTemplar_quick\",", "select_point), Function.ui_func(3, \"select_rect\", select_rect), Function.ui_func(4, \"select_control_group\", control_group), Function.ui_func(5, \"select_unit\", select_unit,", "cmd_quick, 994), Function.ability(467, \"Train_Drone_quick\", cmd_quick, 1342), Function.ability(468, \"Train_Ghost_quick\", cmd_quick, 562),", "= ability_id action_cmd.queue_command = queued def cmd_screen(action, ability_id, queued, screen):", "types and functions that are valid for an agent to", "cmd_quick, 596), Function.ability(470, \"Train_Hellion_quick\", cmd_quick, 595), Function.ability(471, \"Train_HighTemplar_quick\", cmd_quick, 919),", "3678), Function.ability(278, \"Land_Factory_screen\", cmd_screen, 520, 3678), Function.ability(279, \"Land_OrbitalCommand_screen\", cmd_screen, 1524,", "select_worker def select_army(action, select_add): \"\"\"Select the entire army.\"\"\" action.action_ui.select_army.selection_add =", "`FunctionCall`s with `Arguments`. Args: function: The value to store for", "function. Should be unique. ability_id: The ability id to pass", "that no function takes the same type twice. queued: Whether", "488, 3683), Function.ability(78, \"Build_Reactor_Starport_screen\", cmd_screen, 488, 3683), Function.ability(79, \"Build_Refinery_screen\", cmd_screen,", "2099), Function.ability(247, \"Effect_YamatoGun_screen\", cmd_screen, 401), Function.ability(248, \"Hallucination_Adept_quick\", cmd_quick, 2391), Function.ability(249,", "cmd_screen, 2370, 3668), Function.ability(292, \"Load_Overlord_screen\", cmd_screen, 1406, 3668), Function.ability(293, \"Load_WarpPrism_screen\",", "[], move_camera: [TYPES.minimap], select_point: [TYPES.select_point_act, TYPES.screen], select_rect: [TYPES.select_add, TYPES.screen, TYPES.screen2],", "Function.ability(453, \"Stop_quick\", cmd_quick, 3665), Function.ability(454, \"Stop_Building_quick\", cmd_quick, 2057, 3665), Function.ability(455,", "162), Function.ability(260, \"Hallucination_Zealot_quick\", cmd_quick, 164), Function.ability(261, \"Halt_quick\", cmd_quick, 3660), Function.ability(262,", "return self._func_list[key] return self._func_dict[key] def __iter__(self): return iter(self._func_list) def __len__(self):", "cmd_quick, 488, 3683), Function.ability(78, \"Build_Reactor_Starport_screen\", cmd_screen, 488, 3683), Function.ability(79, \"Build_Refinery_screen\",", "\"Effect_OracleRevelation_screen\", cmd_screen, 2146), Function.ability(215, \"Effect_ParasiticBomb_screen\", cmd_screen, 2542), Function.ability(216, \"Effect_PhotonOvercharge_screen\", cmd_screen,", "cmd_quick, 1433, 3661), Function.ability(111, \"BurrowDown_Ravager_quick\", cmd_quick, 2340, 3661), Function.ability(112, \"BurrowDown_Roach_quick\",", "1038, 3671), Function.ability(170, \"Cancel_Queue1_quick\", cmd_quick, 304, 3671), Function.ability(171, \"Cancel_Queue5_quick\", cmd_quick,", "cmd_screen, 2162), Function.ability(217, \"Effect_PointDefenseDrone_screen\", cmd_screen, 144), Function.ability(218, \"Effect_PsiStorm_screen\", cmd_screen, 1036),", "lambda _: True class Function(collections.namedtuple( \"Function\", [\"id\", \"name\", \"ability_id\", \"general_id\",", "316, 3685), Function.ability(225, \"Effect_Repair_SCV_autocast\", autocast, 316, 3685), Function.ability(226, \"Effect_Salvage_quick\", cmd_quick,", "valid. \"\"\" __slots__ = () @classmethod def ui_func(cls, id_, name,", "\"\"\" __slots__ = () @classmethod def ui_func(cls, id_, name, function_type,", "Function.ability(253, \"Hallucination_Immortal_quick\", cmd_quick, 152), Function.ability(254, \"Hallucination_Oracle_quick\", cmd_quick, 2114), Function.ability(255, \"Hallucination_Phoenix_quick\",", "be an `Arguments` object, a `dict`, or an iterable. If", "1455), Function.ability(377, \"Research_PathogenGlands_quick\", cmd_quick, 1454), Function.ability(378, \"Research_PersonalCloaking_quick\", cmd_quick, 820), Function.ability(379,", "Function.ui_func(3, \"select_rect\", select_rect), Function.ui_func(4, \"select_control_group\", control_group), Function.ui_func(5, \"select_unit\", select_unit, lambda", "control group to do it with. select_point_act: What to do", "416, 3663), Function.ability(296, \"Morph_Archon_quick\", cmd_quick, 1766), Function.ability(297, \"Morph_BroodLord_quick\", cmd_quick, 1372),", "select_point_act=ArgumentType.enum([ sc_spatial.ActionSpatialUnitSelectionPoint.Select, sc_spatial.ActionSpatialUnitSelectionPoint.Toggle, sc_spatial.ActionSpatialUnitSelectionPoint.AllType, sc_spatial.ActionSpatialUnitSelectionPoint.AddAllType, ]), select_add=ArgumentType.enum([False, True]), # (select", "selecting a unit by id. select_unit_id: Which unit to select", "cmd_screen, 1825), Function.ability(189, \"Effect_CorrosiveBile_screen\", cmd_screen, 2338), Function.ability(190, \"Effect_EMP_screen\", cmd_screen, 1628),", "Function.ability(195, \"Effect_GhostSnipe_screen\", cmd_screen, 2714), Function.ability(196, \"Effect_GravitonBeam_screen\", cmd_screen, 173), Function.ability(197, \"Effect_GuardianShield_quick\",", "2558), Function.ability(306, \"Morph_Lurker_quick\", cmd_quick, 2332), Function.ability(307, \"Morph_LurkerDen_quick\", cmd_quick, 2112), Function.ability(308,", "build_queue_id): \"\"\"Cancel a unit in the build queue.\"\"\" action.action_ui.production_panel.unit_index =", "cmd_quick, 563), Function.ability(477, \"Train_Marine_quick\", cmd_quick, 560), Function.ability(478, \"Train_Medivac_quick\", cmd_quick, 620),", "id. select_worker: What to do when selecting a worker. build_queue_id:", "must be unique.\") def __getattr__(self, name): return self._func_dict[name] def __getitem__(self,", "203, 3690), Function.ability(346, \"Rally_CommandCenter_minimap\", cmd_minimap, 203, 3690), Function.ability(347, \"Rally_Hatchery_Workers_screen\", cmd_screen,", "at a point.\"\"\" select = action.action_feature_layer.unit_selection_point screen.assign_to(select.selection_screen_coord) select.type = select_point_act", "___slots__ = () @classmethod def types(cls, **kwargs): \"\"\"Create an Arguments", "\"Cancel_MorphHive_quick\", cmd_quick, 1219, 3659), Function.ability(152, \"Cancel_MorphLair_quick\", cmd_quick, 1217, 3659), Function.ability(153,", "Function.ability(428, \"Research_WarpGate_quick\", cmd_quick, 1568), Function.ability(429, \"Research_ZergFlyerArmor_quick\", cmd_quick, 3702), Function.ability(430, \"Research_ZergFlyerArmorLevel1_quick\",", "Function.ability(471, \"Train_HighTemplar_quick\", cmd_quick, 919), Function.ability(472, \"Train_Hydralisk_quick\", cmd_quick, 1345), Function.ability(473, \"Train_Immortal_quick\",", "Which build queue index to target. unload_id: Which unit to", "253, 3675), Function.ability(236, \"Effect_Stim_Marauder_Redirect_quick\", cmd_quick, 1684, 3675), Function.ability(237, \"Effect_Stim_Marine_quick\", cmd_quick,", "\"BurrowUp_Ravager_autocast\", autocast, 2342, 3662), Function.ability(132, \"BurrowUp_Roach_quick\", cmd_quick, 1388, 3662), Function.ability(133,", "1622), Function.ability(214, \"Effect_OracleRevelation_screen\", cmd_screen, 2146), Function.ability(215, \"Effect_ParasiticBomb_screen\", cmd_screen, 2542), Function.ability(216,", "Function.ability(278, \"Land_Factory_screen\", cmd_screen, 520, 3678), Function.ability(279, \"Land_OrbitalCommand_screen\", cmd_screen, 1524, 3678),", "cmd_quick, 1216), Function.ability(304, \"Morph_LiberatorAAMode_quick\", cmd_quick, 2560), Function.ability(305, \"Morph_LiberatorAGMode_screen\", cmd_screen, 2558),", "1094), Function.ability(368, \"Research_GroovedSpines_quick\", cmd_quick, 1282), Function.ability(369, \"Research_HiSecAutoTracking_quick\", cmd_quick, 650), Function.ability(370,", "cmd_minimap, 23, 3674), Function.ability(16, \"Attack_AttackBuilding_screen\", cmd_screen, 2048, 3674), Function.ability(17, \"Attack_AttackBuilding_minimap\",", "ArgumentType where you choose one of a set of known", "Function.ability(74, \"Build_Reactor_Barracks_screen\", cmd_screen, 422, 3683), Function.ability(75, \"Build_Reactor_Factory_quick\", cmd_quick, 455, 3683),", "1376, 3662), Function.ability(120, \"BurrowUp_Baneling_autocast\", autocast, 1376, 3662), Function.ability(121, \"BurrowUp_Drone_quick\", cmd_quick,", "\"Cancel_HangarQueue5_quick\", cmd_quick, 1038, 3671), Function.ability(170, \"Cancel_Queue1_quick\", cmd_quick, 304, 3671), Function.ability(171,", "cmd_quick, 3706), Function.ability(446, \"Research_ZergMissileWeaponsLevel1_quick\", cmd_quick, 1192, 3706), Function.ability(447, \"Research_ZergMissileWeaponsLevel2_quick\", cmd_quick,", "return cls(-1, \"<none>\", (value,), lambda a: a[0]) @classmethod def point(cls):", "by a point.Point.\"\"\" return cls(-1, \"<none>\", (0, 0), lambda a:", "862, 3699), Function.ability(417, \"Research_TerranShipWeaponsLevel3_quick\", cmd_quick, 863, 3699), Function.ability(418, \"Research_TerranStructureArmorUpgrade_quick\", cmd_quick,", "named = {name: type_._replace(id=Arguments._fields.index(name), name=name) for name, type_ in six.iteritems(kwargs)}", "Function.ability(43, \"Build_Bunker_screen\", cmd_screen, 324), Function.ability(44, \"Build_CommandCenter_screen\", cmd_screen, 318), Function.ability(45, \"Build_CreepTumor_screen\",", "Function.ability(315, \"Morph_SpineCrawlerRoot_screen\", cmd_screen, 1729, 3680), Function.ability(316, \"Morph_SporeCrawlerRoot_screen\", cmd_screen, 1731, 3680),", "cmd_quick, 1097), Function.ability(365, \"Research_GlialRegeneration_quick\", cmd_quick, 216), Function.ability(366, \"Research_GraviticBooster_quick\", cmd_quick, 1093),", "Function.ability(404, \"Research_ShadowStrike_quick\", cmd_quick, 2720), Function.ability(405, \"Research_Stimpack_quick\", cmd_quick, 730), Function.ability(406, \"Research_TerranInfantryArmor_quick\",", "Function.ability(113, \"BurrowDown_SwarmHost_quick\", cmd_quick, 2014, 3661), Function.ability(114, \"BurrowDown_Ultralisk_quick\", cmd_quick, 1512, 3661),", "a: options[a[0]]) @classmethod def scalar(cls, value): \"\"\"Create an ArgumentType with", "3696), Function.ability(398, \"Research_ProtossShieldsLevel1_quick\", cmd_quick, 1068, 3696), Function.ability(399, \"Research_ProtossShieldsLevel2_quick\", cmd_quick, 1069,", "FUNCTION_TYPES[function_type], None) @classmethod def spec(cls, id_, name, args): \"\"\"Create a", "Function.ability(354, \"Research_BansheeHyperflightRotors_quick\", cmd_quick, 799), Function.ability(355, \"Research_BattlecruiserWeaponRefit_quick\", cmd_quick, 1532), Function.ability(356, \"Research_Blink_quick\",", "cmd_quick, 2116), Function.ability(212, \"Effect_NeuralParasite_screen\", cmd_screen, 249), Function.ability(213, \"Effect_NukeCalldown_screen\", cmd_screen, 1622),", "A list of the types of args passed to function_type.", "Function.ability(174, \"Cancel_QueuePasive_quick\", cmd_quick, 1831, 3671), Function.ability(175, \"Cancel_QueuePassiveCancelToSelection_quick\", cmd_quick, 1833, 3671),", "Function.ability(127, \"BurrowUp_Lurker_quick\", cmd_quick, 2110, 3662), Function.ability(128, \"BurrowUp_Queen_quick\", cmd_quick, 1435, 3662),", "Function.ability(109, \"BurrowDown_Lurker_quick\", cmd_quick, 2108, 3661), Function.ability(110, \"BurrowDown_Queen_quick\", cmd_quick, 1433, 3661),", "command that needs a point on the minimap.\"\"\" action_cmd =", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "limit of 255 function arguments, so build something similar. \"\"\"", "select_add): \"\"\"Select all warp gates.\"\"\" action.action_ui.select_warp_gates.selection_add = select_add def select_larva(action):", "Function.ui_func(11, \"build_queue\", build_queue, lambda obs: obs.ui_data.HasField(\"production\")), # Everything below here", "1448), Function.ability(312, \"Morph_PlanetaryFortress_quick\", cmd_quick, 1450), Function.ability(313, \"Morph_Ravager_quick\", cmd_quick, 2330), Function.ability(314,", "\"Build_RoachWarren_screen\", cmd_screen, 1165), Function.ability(81, \"Build_RoboticsBay_screen\", cmd_screen, 892), Function.ability(82, \"Build_RoboticsFacility_screen\", cmd_screen,", "2505), Function.ability(91, \"Build_SupplyDepot_screen\", cmd_screen, 319), Function.ability(92, \"Build_TechLab_quick\", cmd_quick, 3682), Function.ability(93,", "\"Research_ZergMeleeWeaponsLevel1_quick\", cmd_quick, 1186, 3705), Function.ability(443, \"Research_ZergMeleeWeaponsLevel2_quick\", cmd_quick, 1187, 3705), Function.ability(444,", "and # limitations under the License. \"\"\"Define the static list", "Function.ability(40, \"Build_Assimilator_screen\", cmd_screen, 882), Function.ability(41, \"Build_BanelingNest_screen\", cmd_screen, 1162), Function.ability(42, \"Build_Barracks_screen\",", "\"Cancel_QueueAddOn_quick\", cmd_quick, 312, 3671), Function.ability(173, \"Cancel_QueueCancelToSelection_quick\", cmd_quick, 308, 3671), Function.ability(174,", "Function.ability(437, \"Research_ZergGroundArmor_quick\", cmd_quick, 3704), Function.ability(438, \"Research_ZergGroundArmorLevel1_quick\", cmd_quick, 1189, 3704), Function.ability(439,", "Function.ability(207, \"Effect_LocustSwoop_screen\", cmd_screen, 2387), Function.ability(208, \"Effect_MassRecall_screen\", cmd_screen, 3686), Function.ability(209, \"Effect_MassRecall_Mothership_screen\",", "1378, 3661), Function.ability(106, \"BurrowDown_Hydralisk_quick\", cmd_quick, 1382, 3661), Function.ability(107, \"BurrowDown_Infestor_quick\", cmd_quick,", "this time. \"\"\"Create an ArgumentType that is represented by a", "in FUNCTIONS if f.avail_fn} class FunctionCall(collections.namedtuple( \"FunctionCall\", [\"function\", \"arguments\"])): \"\"\"Represents", "point for a rectangle. This is needed so that no", "cmd_quick, 452, 3679), Function.ability(283, \"Lift_CommandCenter_quick\", cmd_quick, 417, 3679), Function.ability(284, \"Lift_Factory_quick\",", "cmd_quick, 3681), Function.ability(324, \"Morph_SpineCrawlerUproot_quick\", cmd_quick, 1725, 3681), Function.ability(325, \"Morph_SporeCrawlerUproot_quick\", cmd_quick,", "3661), Function.ability(112, \"BurrowDown_Roach_quick\", cmd_quick, 1386, 3661), Function.ability(113, \"BurrowDown_SwarmHost_quick\", cmd_quick, 2014,", "cmd_screen, 1), Function.ability(452, \"Smart_minimap\", cmd_minimap, 1), Function.ability(453, \"Stop_quick\", cmd_quick, 3665),", "Function.ability(391, \"Research_ProtossGroundArmorLevel2_quick\", cmd_quick, 1066, 3694), Function.ability(392, \"Research_ProtossGroundArmorLevel3_quick\", cmd_quick, 1067, 3694),", "\"args\", \"avail_fn\"])): \"\"\"Represents a function action. Attributes: id: The function", "cmd_quick, 1833, 3671), Function.ability(176, \"Effect_Abduct_screen\", cmd_screen, 2067), Function.ability(177, \"Effect_AdeptPhaseShift_screen\", cmd_screen,", "Apache License, Version 2.0 (the \"License\"); # you may not", "]), build_queue_id=ArgumentType.scalar(10), # Depends on current build queue. unload_id=ArgumentType.scalar(500), #", "17), Function.ability(334, \"Patrol_minimap\", cmd_minimap, 17), Function.ability(335, \"Rally_Units_screen\", cmd_screen, 3673), Function.ability(336,", "either express or implied. # See the License for the", "\"\"\"Define a function represented as a game ability.\"\"\" assert function_type", "\"Lift_Starport_quick\", cmd_quick, 518, 3679), Function.ability(287, \"Load_screen\", cmd_screen, 3668), Function.ability(288, \"Load_Bunker_screen\",", "cmd_quick, 2108, 3661), Function.ability(110, \"BurrowDown_Queen_quick\", cmd_quick, 1433, 3661), Function.ability(111, \"BurrowDown_Ravager_quick\",", "cmd_screen, 30, 3684), Function.ability(232, \"Effect_Spray_Terran_screen\", cmd_screen, 26, 3684), Function.ability(233, \"Effect_Spray_Zerg_screen\",", "= Arguments(**arguments) elif not isinstance(arguments, Arguments): arguments = Arguments(*arguments) return", "884), Function.ability(56, \"Build_FusionCore_screen\", cmd_screen, 333), Function.ability(57, \"Build_Gateway_screen\", cmd_screen, 883), Function.ability(58,", "multi-unit selection.\"\"\" select = action.action_ui.multi_panel select.type = select_unit_act select.unit_index =", "autocast, 2328), Function.ability(203, \"Effect_InfestedTerrans_screen\", cmd_screen, 247), Function.ability(204, \"Effect_InjectLarva_screen\", cmd_screen, 251),", "1346), Function.ability(481, \"Train_Observer_quick\", cmd_quick, 977), Function.ability(482, \"Train_Oracle_quick\", cmd_quick, 954), Function.ability(483,", "\"Research_TunnelingClaws_quick\", cmd_quick, 217), Function.ability(428, \"Research_WarpGate_quick\", cmd_quick, 1568), Function.ability(429, \"Research_ZergFlyerArmor_quick\", cmd_quick,", "Function.ability(193, \"Effect_ForceField_screen\", cmd_screen, 1526), Function.ability(194, \"Effect_FungalGrowth_screen\", cmd_screen, 74), Function.ability(195, \"Effect_GhostSnipe_screen\",", "1733, 3691), Function.ability(48, \"Build_CyberneticsCore_screen\", cmd_screen, 894), Function.ability(49, \"Build_DarkShrine_screen\", cmd_screen, 891),", "cmd_screen, 2073), Function.ability(244, \"Effect_VoidRayPrismaticAlignment_quick\", cmd_quick, 2393), Function.ability(245, \"Effect_WidowMineAttack_screen\", cmd_screen, 2099),", "\"Cancel_BuildInProgress_quick\", cmd_quick, 314, 3659), Function.ability(145, \"Cancel_CreepTumor_quick\", cmd_quick, 1763, 3659), Function.ability(146,", "cmd_quick, 1380, 3662), Function.ability(122, \"BurrowUp_Hydralisk_quick\", cmd_quick, 1384, 3662), Function.ability(123, \"BurrowUp_Hydralisk_autocast\",", "\"BurrowUp_Hydralisk_autocast\", autocast, 1384, 3662), Function.ability(124, \"BurrowUp_Infestor_quick\", cmd_quick, 1446, 3662), Function.ability(125,", "cmd_quick, 1522, 3679), Function.ability(286, \"Lift_Starport_quick\", cmd_quick, 518, 3679), Function.ability(287, \"Load_screen\",", "Function.ability(101, \"Build_TwilightCouncil_screen\", cmd_screen, 886), Function.ability(102, \"Build_UltraliskCavern_screen\", cmd_screen, 1159), Function.ability(103, \"BurrowDown_quick\",", "cmd_quick, 487, 3682), Function.ability(99, \"Build_TechLab_Starport_screen\", cmd_screen, 487, 3682), Function.ability(100, \"Build_TemplarArchive_screen\",", "ability_id, general_id=0): \"\"\"Define a function represented as a game ability.\"\"\"", "cmd_screen, 328), Function.ability(54, \"Build_FleetBeacon_screen\", cmd_screen, 885), Function.ability(55, \"Build_Forge_screen\", cmd_screen, 884),", "396, 3669), Function.ability(520, \"UnloadAllAt_Overlord_screen\", cmd_screen, 1408, 3669), Function.ability(521, \"UnloadAllAt_Overlord_minimap\", cmd_minimap,", "Function.ability(99, \"Build_TechLab_Starport_screen\", cmd_screen, 487, 3682), Function.ability(100, \"Build_TemplarArchive_screen\", cmd_screen, 890), Function.ability(101,", "cmd_quick, 1317, 3702), Function.ability(433, \"Research_ZergFlyerAttack_quick\", cmd_quick, 3703), Function.ability(434, \"Research_ZergFlyerAttackLevel1_quick\", cmd_quick,", "autocast, 1514, 3662), Function.ability(137, \"BurrowUp_WidowMine_quick\", cmd_quick, 2097, 3662), Function.ability(138, \"BurrowUp_Zergling_quick\",", "cmd_screen, 195, 3673), Function.ability(338, \"Rally_Building_minimap\", cmd_minimap, 195, 3673), Function.ability(339, \"Rally_Hatchery_Units_screen\",", "obs.player_common.idle_worker_count > 0), Function.ui_func(7, \"select_army\", select_army, lambda obs: obs.player_common.army_count >", "1764), Function.ability(179, \"Effect_BlindingCloud_screen\", cmd_screen, 2063), Function.ability(180, \"Effect_Blink_screen\", cmd_screen, 3687), Function.ability(181,", "def point(cls): # No range because it's unknown at this", "cmd_quick, 18), Function.ability(275, \"Land_screen\", cmd_screen, 3678), Function.ability(276, \"Land_Barracks_screen\", cmd_screen, 554,", "1853), Function.ability(480, \"Train_Mutalisk_quick\", cmd_quick, 1346), Function.ability(481, \"Train_Observer_quick\", cmd_quick, 977), Function.ability(482,", "to use. Attributes: types: A namedtuple of the types that", "of args passed to function_type. avail_fn: For non-abilities, this function", "another ability if it can be represented by a more", "4), self.name.ljust(space and 50), \"; \".join(str(a) for a in self.args))", "cmd_quick, 1565, 3692), Function.ability(383, \"Research_ProtossAirArmorLevel2_quick\", cmd_quick, 1566, 3692), Function.ability(384, \"Research_ProtossAirArmorLevel3_quick\",", "3676), Function.ability(28, \"Behavior_CloakOn_Ghost_quick\", cmd_quick, 382, 3676), Function.ability(29, \"Behavior_GenerateCreepOff_quick\", cmd_quick, 1693),", "func.ability_id >= 0: ABILITY_IDS[func.ability_id].add(func) ABILITY_IDS = {k: frozenset(v) for k,", "Function.ability(192, \"Effect_Feedback_screen\", cmd_screen, 140), Function.ability(193, \"Effect_ForceField_screen\", cmd_screen, 1526), Function.ability(194, \"Effect_FungalGrowth_screen\",", "23, 3674), Function.ability(16, \"Attack_AttackBuilding_screen\", cmd_screen, 2048, 3674), Function.ability(17, \"Attack_AttackBuilding_minimap\", cmd_minimap,", "889), Function.ability(89, \"Build_Starport_screen\", cmd_screen, 329), Function.ability(90, \"Build_StasisTrap_screen\", cmd_screen, 2505), Function.ability(91,", "Function.ability(234, \"Effect_Stim_quick\", cmd_quick, 3675), Function.ability(235, \"Effect_Stim_Marauder_quick\", cmd_quick, 253, 3675), Function.ability(236,", "cmd_quick, 1346), Function.ability(481, \"Train_Observer_quick\", cmd_quick, 977), Function.ability(482, \"Train_Oracle_quick\", cmd_quick, 954),", "3662), Function.ability(124, \"BurrowUp_Infestor_quick\", cmd_quick, 1446, 3662), Function.ability(125, \"BurrowUp_InfestorTerran_quick\", cmd_quick, 1396,", "174, 3659), Function.ability(148, \"Cancel_LockOn_quick\", cmd_quick, 2354, 3659), Function.ability(149, \"Cancel_MorphBroodlord_quick\", cmd_quick,", "3698), Function.ability(413, \"Research_TerranInfantryWeaponsLevel3_quick\", cmd_quick, 654, 3698), Function.ability(414, \"Research_TerranShipWeapons_quick\", cmd_quick, 3699),", "2350), Function.ability(207, \"Effect_LocustSwoop_screen\", cmd_screen, 2387), Function.ability(208, \"Effect_MassRecall_screen\", cmd_screen, 3686), Function.ability(209,", "functions. Can't use namedtuple since python3 has a limit of", "the list of integers into something more meaningful to be", "lambda obs: obs.player_common.army_count > 0), Function.ui_func(8, \"select_warp_gates\", select_warp_gates, lambda obs:", "\"Hallucination_Stalker_quick\", cmd_quick, 158), Function.ability(258, \"Hallucination_VoidRay_quick\", cmd_quick, 160), Function.ability(259, \"Hallucination_WarpPrism_quick\", cmd_quick,", "1253), Function.ability(451, \"Smart_screen\", cmd_screen, 1), Function.ability(452, \"Smart_minimap\", cmd_minimap, 1), Function.ability(453,", "a control group, selecting, setting, etc.\"\"\" select = action.action_ui.control_group select.action", "563), Function.ability(477, \"Train_Marine_quick\", cmd_quick, 560), Function.ability(478, \"Train_Medivac_quick\", cmd_quick, 620), Function.ability(479,", "\"Lift_OrbitalCommand_quick\", cmd_quick, 1522, 3679), Function.ability(286, \"Lift_Starport_quick\", cmd_quick, 518, 3679), Function.ability(287,", "the sizes for screen and minimap. functions: A namedtuple of", "cmd_quick, 3701), Function.ability(424, \"Research_TerranVehicleWeaponsLevel1_quick\", cmd_quick, 855, 3701), Function.ability(425, \"Research_TerranVehicleWeaponsLevel2_quick\", cmd_quick,", "1158), Function.ability(87, \"Build_SporeCrawler_screen\", cmd_screen, 1167), Function.ability(88, \"Build_Stargate_screen\", cmd_screen, 889), Function.ability(89,", "3673), Function.ability(338, \"Rally_Building_minimap\", cmd_minimap, 195, 3673), Function.ability(339, \"Rally_Hatchery_Units_screen\", cmd_screen, 212,", "cmd_quick, 621), Function.ability(460, \"Train_Battlecruiser_quick\", cmd_quick, 623), Function.ability(461, \"Train_Carrier_quick\", cmd_quick, 948),", "Function.ability(385, \"Research_ProtossAirWeapons_quick\", cmd_quick, 3693), Function.ability(386, \"Research_ProtossAirWeaponsLevel1_quick\", cmd_quick, 1562, 3693), Function.ability(387,", "cmd_minimap, 1408, 3669), Function.ability(522, \"UnloadAllAt_WarpPrism_screen\", cmd_screen, 913, 3669), Function.ability(523, \"UnloadAllAt_WarpPrism_minimap\",", "3659), Function.ability(149, \"Cancel_MorphBroodlord_quick\", cmd_quick, 1373, 3659), Function.ability(150, \"Cancel_MorphGreaterSpire_quick\", cmd_quick, 1221,", "\"Halt_TerranBuild_quick\", cmd_quick, 348, 3660), Function.ability(264, \"Harvest_Gather_screen\", cmd_screen, 3666), Function.ability(265, \"Harvest_Gather_Drone_screen\",", "cmd_quick, 150), Function.ability(253, \"Hallucination_Immortal_quick\", cmd_quick, 152), Function.ability(254, \"Hallucination_Oracle_quick\", cmd_quick, 2114),", "Function.ability(279, \"Land_OrbitalCommand_screen\", cmd_screen, 1524, 3678), Function.ability(280, \"Land_Starport_screen\", cmd_screen, 522, 3678),", "arguments): \"\"\"Helper function for creating `FunctionCall`s with `Arguments`. Args: function:", "217), Function.ability(428, \"Research_WarpGate_quick\", cmd_quick, 1568), Function.ability(429, \"Research_ZergFlyerArmor_quick\", cmd_quick, 3702), Function.ability(430,", "types. Take a look at TYPES and FUNCTION_TYPES for more", "cmd_quick, 3688), Function.ability(35, \"Behavior_HoldFireOn_Ghost_quick\", cmd_quick, 36, 3688), Function.ability(36, \"Behavior_HoldFireOn_Lurker_quick\", cmd_quick,", "features.py and action conversion. ABILITY_IDS = collections.defaultdict(set) # {ability_id: {funcs}}", "\"Research_ProtossGroundWeaponsLevel1_quick\", cmd_quick, 1062, 3695), Function.ability(395, \"Research_ProtossGroundWeaponsLevel2_quick\", cmd_quick, 1063, 3695), Function.ability(396,", "3703), Function.ability(436, \"Research_ZergFlyerAttackLevel3_quick\", cmd_quick, 1314, 3703), Function.ability(437, \"Research_ZergGroundArmor_quick\", cmd_quick, 3704),", "3701), Function.ability(426, \"Research_TerranVehicleWeaponsLevel3_quick\", cmd_quick, 857, 3701), Function.ability(427, \"Research_TunnelingClaws_quick\", cmd_quick, 217),", "a single argument type. Attributes: id: The argument id. This", "\"<none>\", (0, 0), lambda a: point.Point(*a).floor()) @classmethod def spec(cls, id_,", "of types and actions for SC2.\"\"\" from __future__ import absolute_import", "def all_arguments(cls, function, arguments): \"\"\"Helper function for creating `FunctionCall`s with", "\"Harvest_Gather_Mule_screen\", cmd_screen, 166, 3666), Function.ability(267, \"Harvest_Gather_Probe_screen\", cmd_screen, 298, 3666), Function.ability(268,", "select = action.action_ui.multi_panel select.type = select_unit_act select.unit_index = select_unit_id def", "Function.ability(288, \"Load_Bunker_screen\", cmd_screen, 407, 3668), Function.ability(289, \"Load_Medivac_screen\", cmd_screen, 394, 3668),", "cmd_quick, 1528), Function.ability(330, \"Morph_WarpPrismTransportMode_quick\", cmd_quick, 1530), Function.ability(331, \"Move_screen\", cmd_screen, 16),", "gates.\"\"\" action.action_ui.select_warp_gates.selection_add = select_add def select_larva(action): \"\"\"Select all larva.\"\"\" action.action_ui.select_larva.SetInParent()", "Function.ui_func(5, \"select_unit\", select_unit, lambda obs: obs.ui_data.HasField(\"multi\")), Function.ui_func(6, \"select_idle_worker\", select_idle_worker, lambda", "cmd_screen, 3680), Function.ability(315, \"Morph_SpineCrawlerRoot_screen\", cmd_screen, 1729, 3680), Function.ability(316, \"Morph_SporeCrawlerRoot_screen\", cmd_screen,", "A namedtuple of all the functions. \"\"\" __slots__ = ()", "Function.ability(215, \"Effect_ParasiticBomb_screen\", cmd_screen, 2542), Function.ability(216, \"Effect_PhotonOvercharge_screen\", cmd_screen, 2162), Function.ability(217, \"Effect_PointDefenseDrone_screen\",", "cmd_quick, 181), Function.ability(229, \"Effect_SpawnLocusts_screen\", cmd_screen, 2704), Function.ability(230, \"Effect_Spray_screen\", cmd_screen, 3684),", "iterable. If a `dict` or an iterable is provided, the", "cmd_screen, 892), Function.ability(82, \"Build_RoboticsFacility_screen\", cmd_screen, 893), Function.ability(83, \"Build_SensorTower_screen\", cmd_screen, 326),", "Function.ability(281, \"Lift_quick\", cmd_quick, 3679), Function.ability(282, \"Lift_Barracks_quick\", cmd_quick, 452, 3679), Function.ability(283,", "2014, 3661), Function.ability(114, \"BurrowDown_Ultralisk_quick\", cmd_quick, 1512, 3661), Function.ability(115, \"BurrowDown_WidowMine_quick\", cmd_quick,", "control_group_id=ArgumentType.scalar(10), select_point_act=ArgumentType.enum([ sc_spatial.ActionSpatialUnitSelectionPoint.Select, sc_spatial.ActionSpatialUnitSelectionPoint.Toggle, sc_spatial.ActionSpatialUnitSelectionPoint.AllType, sc_spatial.ActionSpatialUnitSelectionPoint.AddAllType, ]), select_add=ArgumentType.enum([False, True]), #", "Function.ability(96, \"Build_TechLab_Factory_quick\", cmd_quick, 454, 3682), Function.ability(97, \"Build_TechLab_Factory_screen\", cmd_screen, 454, 3682),", "1825), Function.ability(189, \"Effect_CorrosiveBile_screen\", cmd_screen, 2338), Function.ability(190, \"Effect_EMP_screen\", cmd_screen, 1628), Function.ability(191,", "3674), Function.ability(21, \"Behavior_BuildingAttackOff_quick\", cmd_quick, 2082), Function.ability(22, \"Behavior_BuildingAttackOn_quick\", cmd_quick, 2081), Function.ability(23,", "cmd_quick, 2362), Function.ability(322, \"Morph_Unsiege_quick\", cmd_quick, 390), Function.ability(323, \"Morph_Uproot_quick\", cmd_quick, 3681),", "instance. \"\"\" if isinstance(arguments, dict): arguments = Arguments(**arguments) elif not", "Function.ability(236, \"Effect_Stim_Marauder_Redirect_quick\", cmd_quick, 1684, 3675), Function.ability(237, \"Effect_Stim_Marine_quick\", cmd_quick, 380, 3675),", "types do each function need? FUNCTION_TYPES = { no_op: [],", "cmd_screen, 329), Function.ability(90, \"Build_StasisTrap_screen\", cmd_screen, 2505), Function.ability(91, \"Build_SupplyDepot_screen\", cmd_screen, 319),", "Function.ability(222, \"Effect_Repair_Mule_screen\", cmd_screen, 78, 3685), Function.ability(223, \"Effect_Repair_Mule_autocast\", autocast, 78, 3685),", "minimap.\"\"\" action_cmd = action.action_feature_layer.unit_command action_cmd.ability_id = ability_id action_cmd.queue_command = queued", "action_cmd = action.action_feature_layer.unit_command action_cmd.ability_id = ability_id action_cmd.queue_command = queued def", "general_id, function_type, FUNCTION_TYPES[function_type], None) @classmethod def spec(cls, id_, name, args):", "on current selection. select_worker=ArgumentType.enum([ sc_ui.ActionSelectIdleWorker.Set, sc_ui.ActionSelectIdleWorker.Add, sc_ui.ActionSelectIdleWorker.All, sc_ui.ActionSelectIdleWorker.AddAll, ]), build_queue_id=ArgumentType.scalar(10),", "710), Function.ability(67, \"Build_NydusNetwork_screen\", cmd_screen, 1161), Function.ability(68, \"Build_NydusWorm_screen\", cmd_screen, 1768), Function.ability(69,", "Function.ability(75, \"Build_Reactor_Factory_quick\", cmd_quick, 455, 3683), Function.ability(76, \"Build_Reactor_Factory_screen\", cmd_screen, 455, 3683),", "unit from a transport/bunker/nydus/etc.\"\"\" action.action_ui.cargo_panel.unit_index = unload_id def build_queue(action, build_queue_id):", "3699), Function.ability(418, \"Research_TerranStructureArmorUpgrade_quick\", cmd_quick, 651), Function.ability(419, \"Research_TerranVehicleAndShipPlating_quick\", cmd_quick, 3700), Function.ability(420,", "cmd_screen, 455, 3683), Function.ability(77, \"Build_Reactor_Starport_quick\", cmd_quick, 488, 3683), Function.ability(78, \"Build_Reactor_Starport_screen\",", "451, 3659), Function.ability(144, \"Cancel_BuildInProgress_quick\", cmd_quick, 314, 3659), Function.ability(145, \"Cancel_CreepTumor_quick\", cmd_quick,", "2338), Function.ability(190, \"Effect_EMP_screen\", cmd_screen, 1628), Function.ability(191, \"Effect_Explode_quick\", cmd_quick, 42), Function.ability(192,", "in a set(). return self.id def __str__(self): return self.str() def", "\"TrainWarp_DarkTemplar_screen\", cmd_screen, 1417), Function.ability(507, \"TrainWarp_HighTemplar_screen\", cmd_screen, 1416), Function.ability(508, \"TrainWarp_Sentry_screen\", cmd_screen,", "known values.\"\"\" return cls(-1, \"<none>\", (len(options),), lambda a: options[a[0]]) @classmethod", "cmd_screen, 890), Function.ability(101, \"Build_TwilightCouncil_screen\", cmd_screen, 886), Function.ability(102, \"Build_UltraliskCavern_screen\", cmd_screen, 1159),", "2368, 3686), Function.ability(210, \"Effect_MassRecall_MothershipCore_screen\", cmd_screen, 1974, 3686), Function.ability(211, \"Effect_MedivacIgniteAfterburners_quick\", cmd_quick,", "1220), Function.ability(300, \"Morph_Hellbat_quick\", cmd_quick, 1998), Function.ability(301, \"Morph_Hellion_quick\", cmd_quick, 1978), Function.ability(302,", "more meaningful to be set in the protos to send", "the static list of types and actions for SC2.\"\"\" from", "return cls(function, arguments) class ValidActions(collections.namedtuple( \"ValidActions\", [\"types\", \"functions\"])): \"\"\"The set", "[TYPES.queued, TYPES.screen], cmd_minimap: [TYPES.queued, TYPES.minimap], autocast: [], } # Which", "arguments: The values to store for the arguments of the", "16), Function.ability(332, \"Move_minimap\", cmd_minimap, 16), Function.ability(333, \"Patrol_screen\", cmd_screen, 17), Function.ability(334,", "__future__ import print_function import collections import numbers import six from", "\"AS-IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "if func.ability_id >= 0: ABILITY_IDS[func.ability_id].add(func) ABILITY_IDS = {k: frozenset(v) for", "\"Effect_CausticSpray_screen\", cmd_screen, 2324), Function.ability(185, \"Effect_Charge_screen\", cmd_screen, 1819), Function.ability(186, \"Effect_Charge_autocast\", autocast,", "3700), Function.ability(421, \"Research_TerranVehicleAndShipPlatingLevel2_quick\", cmd_quick, 865, 3700), Function.ability(422, \"Research_TerranVehicleAndShipPlatingLevel3_quick\", cmd_quick, 866,", "action. Attributes: function: Store the function id, eg 2 for", "cmd_quick, 1683, 3675), Function.ability(239, \"Effect_SupplyDrop_screen\", cmd_screen, 255), Function.ability(240, \"Effect_TacticalJump_screen\", cmd_screen,", "1847), Function.ability(309, \"Morph_OrbitalCommand_quick\", cmd_quick, 1516), Function.ability(310, \"Morph_OverlordTransport_quick\", cmd_quick, 2708), Function.ability(311,", "3671), Function.ability(171, \"Cancel_Queue5_quick\", cmd_quick, 306, 3671), Function.ability(172, \"Cancel_QueueAddOn_quick\", cmd_quick, 312,", "pass to sc2. general_id: 0 for normal abilities, and the", "larva.\"\"\" action.action_ui.select_larva.SetInParent() # Adds the empty proto field. def select_unit(action,", "3662), Function.ability(131, \"BurrowUp_Ravager_autocast\", autocast, 2342, 3662), Function.ability(132, \"BurrowUp_Roach_quick\", cmd_quick, 1388,", "\"Rally_Nexus_minimap\", cmd_minimap, 207, 3690), Function.ability(351, \"Research_AdeptResonatingGlaives_quick\", cmd_quick, 1594), Function.ability(352, \"Research_AdvancedBallistics_quick\",", "cmd_quick, 861, 3699), Function.ability(416, \"Research_TerranShipWeaponsLevel2_quick\", cmd_quick, 862, 3699), Function.ability(417, \"Research_TerranShipWeaponsLevel3_quick\",", "\"Research_ZergMeleeWeaponsLevel3_quick\", cmd_quick, 1188, 3705), Function.ability(445, \"Research_ZergMissileWeapons_quick\", cmd_quick, 3706), Function.ability(446, \"Research_ZergMissileWeaponsLevel1_quick\",", "Attributes: screen: A point on the screen. minimap: A point", "cmd_quick, 624), Function.ability(499, \"Train_Viper_quick\", cmd_quick, 1354), Function.ability(500, \"Train_VoidRay_quick\", cmd_quick, 950),", "action conversion. ABILITY_IDS = collections.defaultdict(set) # {ability_id: {funcs}} for func", "unit to select by id. select_worker: What to do when", "= () @classmethod def types(cls, **kwargs): \"\"\"Create an Arguments of", "Function.ability(331, \"Move_screen\", cmd_screen, 16), Function.ability(332, \"Move_minimap\", cmd_minimap, 16), Function.ability(333, \"Patrol_screen\",", "on the screen.\"\"\" action_cmd = action.action_feature_layer.unit_command action_cmd.ability_id = ability_id action_cmd.queue_command", "game. \"\"\" __slots__ = () def __str__(self): return \"%s/%s %s\"", "[], unload: [TYPES.unload_id], build_queue: [TYPES.build_queue_id], cmd_quick: [TYPES.queued], cmd_screen: [TYPES.queued, TYPES.screen],", "six.iteritems(kwargs)} return cls(**named) # The list of known types. TYPES", "Function.ability(52, \"Build_Extractor_screen\", cmd_screen, 1154), Function.ability(53, \"Build_Factory_screen\", cmd_screen, 328), Function.ability(54, \"Build_FleetBeacon_screen\",", "Function.ability(208, \"Effect_MassRecall_screen\", cmd_screen, 3686), Function.ability(209, \"Effect_MassRecall_Mothership_screen\", cmd_screen, 2368, 3686), Function.ability(210,", "if f.avail_fn} class FunctionCall(collections.namedtuple( \"FunctionCall\", [\"function\", \"arguments\"])): \"\"\"Represents a function", "ABILITY_FUNCTIONS return cls(id_, name, ability_id, general_id, function_type, FUNCTION_TYPES[function_type], None) @classmethod", "empty proto field. def select_unit(action, select_unit_act, select_unit_id): \"\"\"Select a specific", "Function.ability(491, \"Train_Sentry_quick\", cmd_quick, 921), Function.ability(492, \"Train_SiegeTank_quick\", cmd_quick, 591), Function.ability(493, \"Train_Stalker_quick\",", "division from __future__ import print_function import collections import numbers import", "entire army.\"\"\" action.action_ui.select_army.selection_add = select_add def select_warp_gates(action, select_add): \"\"\"Select all", "3679), Function.ability(284, \"Lift_Factory_quick\", cmd_quick, 485, 3679), Function.ability(285, \"Lift_OrbitalCommand_quick\", cmd_quick, 1522,", "cmd_quick, 977), Function.ability(482, \"Train_Oracle_quick\", cmd_quick, 954), Function.ability(483, \"Train_Overlord_quick\", cmd_quick, 1344),", "cmd_quick, 1449, 3659), Function.ability(159, \"Cancel_MorphPlanetaryFortress_quick\", cmd_quick, 1451, 3659), Function.ability(160, \"Cancel_MorphRavager_quick\",", "\"Effect_Repair_Mule_autocast\", autocast, 78, 3685), Function.ability(224, \"Effect_Repair_SCV_screen\", cmd_screen, 316, 3685), Function.ability(225,", "cmd_quick, 3705), Function.ability(442, \"Research_ZergMeleeWeaponsLevel1_quick\", cmd_quick, 1186, 3705), Function.ability(443, \"Research_ZergMeleeWeaponsLevel2_quick\", cmd_quick,", "16), Function.ability(333, \"Patrol_screen\", cmd_screen, 17), Function.ability(334, \"Patrol_minimap\", cmd_minimap, 17), Function.ability(335,", "cmd_quick, 3663), Function.ability(295, \"LoadAll_CommandCenter_quick\", cmd_quick, 416, 3663), Function.ability(296, \"Morph_Archon_quick\", cmd_quick,", "\"Train_Hydralisk_quick\", cmd_quick, 1345), Function.ability(473, \"Train_Immortal_quick\", cmd_quick, 979), Function.ability(474, \"Train_Infestor_quick\", cmd_quick,", "of functions. Can't use namedtuple since python3 has a limit", "cmd_quick, 595), Function.ability(471, \"Train_HighTemplar_quick\", cmd_quick, 919), Function.ability(472, \"Train_Hydralisk_quick\", cmd_quick, 1345),", "obs: obs.ui_data.HasField(\"cargo\")), Function.ui_func(11, \"build_queue\", build_queue, lambda obs: obs.ui_data.HasField(\"production\")), # Everything", "\"Build_EvolutionChamber_screen\", cmd_screen, 1156), Function.ability(52, \"Build_Extractor_screen\", cmd_screen, 1154), Function.ability(53, \"Build_Factory_screen\", cmd_screen,", "will be unpacked into an `Arguments` object. Returns: A new", "cmd_quick, 253, 3675), Function.ability(236, \"Effect_Stim_Marauder_Redirect_quick\", cmd_quick, 1684, 3675), Function.ability(237, \"Effect_Stim_Marine_quick\",", "Function.ability(381, \"Research_ProtossAirArmor_quick\", cmd_quick, 3692), Function.ability(382, \"Research_ProtossAirArmorLevel1_quick\", cmd_quick, 1565, 3692), Function.ability(383,", "Function.ability(238, \"Effect_Stim_Marine_Redirect_quick\", cmd_quick, 1683, 3675), Function.ability(239, \"Effect_SupplyDrop_screen\", cmd_screen, 255), Function.ability(240,", "del action def move_camera(action, minimap): \"\"\"Move the camera.\"\"\" minimap.assign_to(action.action_feature_layer.camera_move.center_minimap) def", "Function.ability(104, \"BurrowDown_Baneling_quick\", cmd_quick, 1374, 3661), Function.ability(105, \"BurrowDown_Drone_quick\", cmd_quick, 1378, 3661),", "values will be unpacked into an `Arguments` object. Returns: A", "use this file except in compliance with the License. #", "cmd_screen, 247), Function.ability(204, \"Effect_InjectLarva_screen\", cmd_screen, 251), Function.ability(205, \"Effect_KD8Charge_screen\", cmd_screen, 2588),", "select_unit_id=ArgumentType.scalar(500), # Depends on current selection. select_worker=ArgumentType.enum([ sc_ui.ActionSelectIdleWorker.Set, sc_ui.ActionSelectIdleWorker.Add, sc_ui.ActionSelectIdleWorker.All,", "Function.ability(196, \"Effect_GravitonBeam_screen\", cmd_screen, 173), Function.ability(197, \"Effect_GuardianShield_quick\", cmd_quick, 76), Function.ability(198, \"Effect_Heal_screen\",", "883), Function.ability(58, \"Build_GhostAcademy_screen\", cmd_screen, 327), Function.ability(59, \"Build_Hatchery_screen\", cmd_screen, 1152), Function.ability(60,", "Function.ability(108, \"BurrowDown_InfestorTerran_quick\", cmd_quick, 1394, 3661), Function.ability(109, \"BurrowDown_Lurker_quick\", cmd_quick, 2108, 3661),", "\"\"\"Select units within a rectangle.\"\"\" select = action.action_feature_layer.unit_selection_rect out_rect =", "\"Effect_CorrosiveBile_screen\", cmd_screen, 2338), Function.ability(190, \"Effect_EMP_screen\", cmd_screen, 1628), Function.ability(191, \"Effect_Explode_quick\", cmd_quick,", "cmd_screen, 255), Function.ability(240, \"Effect_TacticalJump_screen\", cmd_screen, 2358), Function.ability(241, \"Effect_TimeWarp_screen\", cmd_screen, 2244),", "255), Function.ability(240, \"Effect_TacticalJump_screen\", cmd_screen, 2358), Function.ability(241, \"Effect_TimeWarp_screen\", cmd_screen, 2244), Function.ability(242,", "cmd_quick, 865, 3700), Function.ability(422, \"Research_TerranVehicleAndShipPlatingLevel3_quick\", cmd_quick, 866, 3700), Function.ability(423, \"Research_TerranVehicleWeapons_quick\",", "cmd_screen, 2244), Function.ability(242, \"Effect_Transfusion_screen\", cmd_screen, 1664), Function.ability(243, \"Effect_ViperConsume_screen\", cmd_screen, 2073),", "\"Effect_EMP_screen\", cmd_screen, 1628), Function.ability(191, \"Effect_Explode_quick\", cmd_quick, 42), Function.ability(192, \"Effect_Feedback_screen\", cmd_screen,", "cmd_screen, 144), Function.ability(218, \"Effect_PsiStorm_screen\", cmd_screen, 1036), Function.ability(219, \"Effect_PurificationNova_screen\", cmd_screen, 2346),", "obs: obs.player_common.army_count > 0), Function.ui_func(8, \"select_warp_gates\", select_warp_gates, lambda obs: obs.player_common.warp_gate_count", "cmd_screen, 401), Function.ability(248, \"Hallucination_Adept_quick\", cmd_quick, 2391), Function.ability(249, \"Hallucination_Archon_quick\", cmd_quick, 146),", "def select_point(action, select_point_act, screen): \"\"\"Select a unit at a point.\"\"\"", "1725, 3681), Function.ability(325, \"Morph_SporeCrawlerUproot_quick\", cmd_quick, 1727, 3681), Function.ability(326, \"Morph_VikingAssaultMode_quick\", cmd_quick,", "(now vs add to queue) control_group_act=ArgumentType.enum([ sc_ui.ActionControlGroup.Recall, sc_ui.ActionControlGroup.Set, sc_ui.ActionControlGroup.Append, sc_ui.ActionControlGroup.SetAndSteal,", "2324), Function.ability(185, \"Effect_Charge_screen\", cmd_screen, 1819), Function.ability(186, \"Effect_Charge_autocast\", autocast, 1819), Function.ability(187,", "cmd_quick, 1448), Function.ability(312, \"Morph_PlanetaryFortress_quick\", cmd_quick, 1450), Function.ability(313, \"Morph_Ravager_quick\", cmd_quick, 2330),", "unit by id. select_unit_id: Which unit to select by id.", "518, 3679), Function.ability(287, \"Load_screen\", cmd_screen, 3668), Function.ability(288, \"Load_Bunker_screen\", cmd_screen, 407,", "represented as a game ability.\"\"\" assert function_type in ABILITY_FUNCTIONS return", "Function.ability(292, \"Load_Overlord_screen\", cmd_screen, 1406, 3668), Function.ability(293, \"Load_WarpPrism_screen\", cmd_screen, 911, 3668),", "Function.ability(341, \"Rally_Morphing_Unit_screen\", cmd_screen, 199, 3673), Function.ability(342, \"Rally_Morphing_Unit_minimap\", cmd_minimap, 199, 3673),", "lambda a: options[a[0]]) @classmethod def scalar(cls, value): \"\"\"Create an ArgumentType", "ability_id action_cmd.queue_command = queued screen.assign_to(action_cmd.target_screen_coord) def cmd_minimap(action, ability_id, queued, minimap):", "793), Function.ability(403, \"Research_RavenRecalibratedExplosives_quick\", cmd_quick, 803), Function.ability(404, \"Research_ShadowStrike_quick\", cmd_quick, 2720), Function.ability(405,", "Function.ability(316, \"Morph_SporeCrawlerRoot_screen\", cmd_screen, 1731, 3680), Function.ability(317, \"Morph_SiegeMode_quick\", cmd_quick, 388), Function.ability(318,", "978), Function.ability(463, \"Train_Corruptor_quick\", cmd_quick, 1353), Function.ability(464, \"Train_Cyclone_quick\", cmd_quick, 597), Function.ability(465,", "cmd_quick, 388), Function.ability(318, \"Morph_SupplyDepot_Lower_quick\", cmd_quick, 556), Function.ability(319, \"Morph_SupplyDepot_Raise_quick\", cmd_quick, 558),", "[], } # Which ones need an ability? ABILITY_FUNCTIONS =", "78, 3685), Function.ability(223, \"Effect_Repair_Mule_autocast\", autocast, 78, 3685), Function.ability(224, \"Effect_Repair_SCV_screen\", cmd_screen,", "cmd_quick, 626), Function.ability(476, \"Train_Marauder_quick\", cmd_quick, 563), Function.ability(477, \"Train_Marine_quick\", cmd_quick, 560),", "\"Effect_ImmortalBarrier_autocast\", autocast, 2328), Function.ability(203, \"Effect_InfestedTerrans_screen\", cmd_screen, 247), Function.ability(204, \"Effect_InjectLarva_screen\", cmd_screen,", "Function.ability(282, \"Lift_Barracks_quick\", cmd_quick, 452, 3679), Function.ability(283, \"Lift_CommandCenter_quick\", cmd_quick, 417, 3679),", "3675), Function.ability(239, \"Effect_SupplyDrop_screen\", cmd_screen, 255), Function.ability(240, \"Effect_TacticalJump_screen\", cmd_screen, 2358), Function.ability(241,", "2110, 3662), Function.ability(128, \"BurrowUp_Queen_quick\", cmd_quick, 1435, 3662), Function.ability(129, \"BurrowUp_Queen_autocast\", autocast,", "\"Morph_Hive_quick\", cmd_quick, 1218), Function.ability(303, \"Morph_Lair_quick\", cmd_quick, 1216), Function.ability(304, \"Morph_LiberatorAAMode_quick\", cmd_quick,", "\"Build_Reactor_Factory_quick\", cmd_quick, 455, 3683), Function.ability(76, \"Build_Reactor_Factory_screen\", cmd_screen, 455, 3683), Function.ability(77,", "805), Function.ability(353, \"Research_BansheeCloakingField_quick\", cmd_quick, 790), Function.ability(354, \"Research_BansheeHyperflightRotors_quick\", cmd_quick, 799), Function.ability(355,", "265), Function.ability(361, \"Research_CombatShield_quick\", cmd_quick, 731), Function.ability(362, \"Research_ConcussiveShells_quick\", cmd_quick, 732), Function.ability(363,", "595), Function.ability(471, \"Train_HighTemplar_quick\", cmd_quick, 919), Function.ability(472, \"Train_Hydralisk_quick\", cmd_quick, 1345), Function.ability(473,", "unpacked into an `Arguments` object. Returns: A new `FunctionCall` instance.", "3689), Function.ability(33, \"Behavior_HoldFireOff_Lurker_quick\", cmd_quick, 2552, 3689), Function.ability(34, \"Behavior_HoldFireOn_quick\", cmd_quick, 3688),", "cmd_quick, 1514, 3662), Function.ability(136, \"BurrowUp_Ultralisk_autocast\", autocast, 1514, 3662), Function.ability(137, \"BurrowUp_WidowMine_quick\",", "1562, 3693), Function.ability(387, \"Research_ProtossAirWeaponsLevel2_quick\", cmd_quick, 1563, 3693), Function.ability(388, \"Research_ProtossAirWeaponsLevel3_quick\", cmd_quick,", "sc_ui.ActionSelectIdleWorker.Set, sc_ui.ActionSelectIdleWorker.Add, sc_ui.ActionSelectIdleWorker.All, sc_ui.ActionSelectIdleWorker.AddAll, ]), build_queue_id=ArgumentType.scalar(10), # Depends on current", "cmd_screen, 1406, 3668), Function.ability(293, \"Load_WarpPrism_screen\", cmd_screen, 911, 3668), Function.ability(294, \"LoadAll_quick\",", "Functions([ Function.ui_func(0, \"no_op\", no_op), Function.ui_func(1, \"move_camera\", move_camera), Function.ui_func(2, \"select_point\", select_point),", "action.action_ui.production_panel.unit_index = build_queue_id def cmd_quick(action, ability_id, queued): \"\"\"Do a quick", "3674), Function.ability(20, \"Scan_Move_minimap\", cmd_minimap, 19, 3674), Function.ability(21, \"Behavior_BuildingAttackOff_quick\", cmd_quick, 2082),", "of the argument, also unique. sizes: The max+1 of each", "Function.ability(136, \"BurrowUp_Ultralisk_autocast\", autocast, 1514, 3662), Function.ability(137, \"BurrowUp_WidowMine_quick\", cmd_quick, 2097, 3662),", "Function.ability(382, \"Research_ProtossAirArmorLevel1_quick\", cmd_quick, 1565, 3692), Function.ability(383, \"Research_ProtossAirArmorLevel2_quick\", cmd_quick, 1566, 3692),", "be used in ValidActions.\"\"\" return cls(id_, name, None, None, None,", "1352), Function.ability(475, \"Train_Liberator_quick\", cmd_quick, 626), Function.ability(476, \"Train_Marauder_quick\", cmd_quick, 563), Function.ability(477,", "\"BurrowDown_WidowMine_quick\", cmd_quick, 2095, 3661), Function.ability(116, \"BurrowDown_Zergling_quick\", cmd_quick, 1390, 3661), Function.ability(117,", "when selecting a unit by id. select_unit_id: Which unit to", "autocast} # Which ones require a point? POINT_REQUIRED_FUNCS = {", "import print_function import collections import numbers import six from pysc2.lib", "1193, 3706), Function.ability(448, \"Research_ZergMissileWeaponsLevel3_quick\", cmd_quick, 1194, 3706), Function.ability(449, \"Research_ZerglingAdrenalGlands_quick\", cmd_quick,", "to sc2. general_id: 0 for normal abilities, and the ability_id", "cmd_quick, 348, 3660), Function.ability(264, \"Harvest_Gather_screen\", cmd_screen, 3666), Function.ability(265, \"Harvest_Gather_Drone_screen\", cmd_screen,", "Function.ability(514, \"UnloadAll_NydasNetwork_quick\", cmd_quick, 1438, 3664), Function.ability(515, \"UnloadAll_NydusWorm_quick\", cmd_quick, 2371, 3664),", "unload_id: Which unit to target in a transport/nydus/command center. \"\"\"", "= select_add def select_larva(action): \"\"\"Select all larva.\"\"\" action.action_ui.select_larva.SetInParent() # Adds", "unique. name: The name of the argument, also unique. sizes:", "to be set in the protos to send to the", "3676), Function.ability(29, \"Behavior_GenerateCreepOff_quick\", cmd_quick, 1693), Function.ability(30, \"Behavior_GenerateCreepOn_quick\", cmd_quick, 1692), Function.ability(31,", "here is generated with gen_actions.py Function.ability(12, \"Attack_screen\", cmd_screen, 3674), Function.ability(13,", "Function.ability(53, \"Build_Factory_screen\", cmd_screen, 328), Function.ability(54, \"Build_FleetBeacon_screen\", cmd_screen, 885), Function.ability(55, \"Build_Forge_screen\",", "388), Function.ability(318, \"Morph_SupplyDepot_Lower_quick\", cmd_quick, 556), Function.ability(319, \"Morph_SupplyDepot_Raise_quick\", cmd_quick, 558), Function.ability(320,", "\"Build_TechLab_Factory_quick\", cmd_quick, 454, 3682), Function.ability(97, \"Build_TechLab_Factory_screen\", cmd_screen, 454, 3682), Function.ability(98,", "in compliance with the License. # You may obtain a", "1062, 3695), Function.ability(395, \"Research_ProtossGroundWeaponsLevel2_quick\", cmd_quick, 1063, 3695), Function.ability(396, \"Research_ProtossGroundWeaponsLevel3_quick\", cmd_quick,", "977), Function.ability(482, \"Train_Oracle_quick\", cmd_quick, 954), Function.ability(483, \"Train_Overlord_quick\", cmd_quick, 1344), Function.ability(484,", "software # distributed under the License is distributed on an", "select_warp_gates, lambda obs: obs.player_common.warp_gate_count > 0), Function.ui_func(9, \"select_larva\", select_larva, lambda", "\"Effect_YamatoGun_screen\", cmd_screen, 401), Function.ability(248, \"Hallucination_Adept_quick\", cmd_quick, 2391), Function.ability(249, \"Hallucination_Archon_quick\", cmd_quick,", "2552, 3689), Function.ability(34, \"Behavior_HoldFireOn_quick\", cmd_quick, 3688), Function.ability(35, \"Behavior_HoldFireOn_Ghost_quick\", cmd_quick, 36,", "Function.ability(210, \"Effect_MassRecall_MothershipCore_screen\", cmd_screen, 1974, 3686), Function.ability(211, \"Effect_MedivacIgniteAfterburners_quick\", cmd_quick, 2116), Function.ability(212,", "\"Effect_Spray_Zerg_screen\", cmd_screen, 28, 3684), Function.ability(234, \"Effect_Stim_quick\", cmd_quick, 3675), Function.ability(235, \"Effect_Stim_Marauder_quick\",", "= ability_id action_cmd.queue_command = queued screen.assign_to(action_cmd.target_screen_coord) def cmd_minimap(action, ability_id, queued,", "Function.ability(36, \"Behavior_HoldFireOn_Lurker_quick\", cmd_quick, 2550, 3688), Function.ability(37, \"Behavior_PulsarBeamOff_quick\", cmd_quick, 2376), Function.ability(38,", "488, 3683), Function.ability(79, \"Build_Refinery_screen\", cmd_screen, 320), Function.ability(80, \"Build_RoachWarren_screen\", cmd_screen, 1165),", "Function.ability(423, \"Research_TerranVehicleWeapons_quick\", cmd_quick, 3701), Function.ability(424, \"Research_TerranVehicleWeaponsLevel1_quick\", cmd_quick, 855, 3701), Function.ability(425,", "cmd_quick, 1444, 3661), Function.ability(108, \"BurrowDown_InfestorTerran_quick\", cmd_quick, 1394, 3661), Function.ability(109, \"BurrowDown_Lurker_quick\",", "\"Stop_Stop_quick\", cmd_quick, 4, 3665), Function.ability(457, \"Train_Adept_quick\", cmd_quick, 922), Function.ability(458, \"Train_Baneling_quick\",", "screen2=ArgumentType.point(), queued=ArgumentType.enum([False, True]), # (now vs add to queue) control_group_act=ArgumentType.enum([", "ABILITY_FUNCTIONS = {cmd_quick, cmd_screen, cmd_minimap, autocast} # Which ones require", "\"Attack_screen\", cmd_screen, 3674), Function.ability(13, \"Attack_minimap\", cmd_minimap, 3674), Function.ability(14, \"Attack_Attack_screen\", cmd_screen,", "417, 3679), Function.ability(284, \"Lift_Factory_quick\", cmd_quick, 485, 3679), Function.ability(285, \"Lift_OrbitalCommand_quick\", cmd_quick,", "Function.ability(219, \"Effect_PurificationNova_screen\", cmd_screen, 2346), Function.ability(220, \"Effect_Repair_screen\", cmd_screen, 3685), Function.ability(221, \"Effect_Repair_autocast\",", "2244), Function.ability(242, \"Effect_Transfusion_screen\", cmd_screen, 1664), Function.ability(243, \"Effect_ViperConsume_screen\", cmd_screen, 2073), Function.ability(244,", "distributed on an \"AS-IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "from s2clientprotocol import spatial_pb2 as sc_spatial from s2clientprotocol import ui_pb2", "cmd_quick, 164), Function.ability(261, \"Halt_quick\", cmd_quick, 3660), Function.ability(262, \"Halt_Building_quick\", cmd_quick, 315,", "cmd_quick, 1066, 3694), Function.ability(392, \"Research_ProtossGroundArmorLevel3_quick\", cmd_quick, 1067, 3694), Function.ability(393, \"Research_ProtossGroundWeapons_quick\",", "Function.ability(263, \"Halt_TerranBuild_quick\", cmd_quick, 348, 3660), Function.ability(264, \"Harvest_Gather_screen\", cmd_screen, 3666), Function.ability(265,", "control_group), Function.ui_func(5, \"select_unit\", select_unit, lambda obs: obs.ui_data.HasField(\"multi\")), Function.ui_func(6, \"select_idle_worker\", select_idle_worker,", "time. \"\"\"Create an ArgumentType that is represented by a point.Point.\"\"\"", "1512, 3661), Function.ability(115, \"BurrowDown_WidowMine_quick\", cmd_quick, 2095, 3661), Function.ability(116, \"BurrowDown_Zergling_quick\", cmd_quick,", "control_group_id): \"\"\"Act on a control group, selecting, setting, etc.\"\"\" select", "\"Cancel_QueuePasive_quick\", cmd_quick, 1831, 3671), Function.ability(175, \"Cancel_QueuePassiveCancelToSelection_quick\", cmd_quick, 1833, 3671), Function.ability(176,", "3690), Function.ability(346, \"Rally_CommandCenter_minimap\", cmd_minimap, 203, 3690), Function.ability(347, \"Rally_Hatchery_Workers_screen\", cmd_screen, 211,", "2535, 3659), Function.ability(168, \"Cancel_Last_quick\", cmd_quick, 3671), Function.ability(169, \"Cancel_HangarQueue5_quick\", cmd_quick, 1038,", "when selecting a worker. build_queue_id: Which build queue index to", "Args: function: The value to store for the action function.", "No range because it's unknown at this time. \"\"\"Create an", "construct the sc2 action proto out of python types. args:", "cmd_screen, 1157), Function.ability(61, \"Build_InfestationPit_screen\", cmd_screen, 1160), Function.ability(62, \"Build_Interceptors_quick\", cmd_quick, 1042),", "select_point. arguments: The list of arguments for that function, each", "Function.ability(212, \"Effect_NeuralParasite_screen\", cmd_screen, 249), Function.ability(213, \"Effect_NukeCalldown_screen\", cmd_screen, 1622), Function.ability(214, \"Effect_OracleRevelation_screen\",", "The name of the function. Should be unique. ability_id: The", "\"Research_ProtossAirArmorLevel1_quick\", cmd_quick, 1565, 3692), Function.ability(383, \"Research_ProtossAirArmorLevel2_quick\", cmd_quick, 1566, 3692), Function.ability(384,", "\"Harvest_Gather_screen\", cmd_screen, 3666), Function.ability(265, \"Harvest_Gather_Drone_screen\", cmd_screen, 1183, 3666), Function.ability(266, \"Harvest_Gather_Mule_screen\",", "\"Land_screen\", cmd_screen, 3678), Function.ability(276, \"Land_Barracks_screen\", cmd_screen, 554, 3678), Function.ability(277, \"Land_CommandCenter_screen\",", "Function.ability(48, \"Build_CyberneticsCore_screen\", cmd_screen, 894), Function.ability(49, \"Build_DarkShrine_screen\", cmd_screen, 891), Function.ability(50, \"Build_EngineeringBay_screen\",", "Function.ability(221, \"Effect_Repair_autocast\", autocast, 3685), Function.ability(222, \"Effect_Repair_Mule_screen\", cmd_screen, 78, 3685), Function.ability(223,", "946), Function.ability(485, \"Train_Probe_quick\", cmd_quick, 1006), Function.ability(486, \"Train_Queen_quick\", cmd_quick, 1632), Function.ability(487,", "Function.ability(314, \"Morph_Root_screen\", cmd_screen, 3680), Function.ability(315, \"Morph_SpineCrawlerRoot_screen\", cmd_screen, 1729, 3680), Function.ability(316,", "\"Morph_GreaterSpire_quick\", cmd_quick, 1220), Function.ability(300, \"Morph_Hellbat_quick\", cmd_quick, 1998), Function.ability(301, \"Morph_Hellion_quick\", cmd_quick,", "3693), Function.ability(389, \"Research_ProtossGroundArmor_quick\", cmd_quick, 3694), Function.ability(390, \"Research_ProtossGroundArmorLevel1_quick\", cmd_quick, 1065, 3694),", "id_, name, args): \"\"\"Create a Function to be used in", "3679), Function.ability(286, \"Lift_Starport_quick\", cmd_quick, 518, 3679), Function.ability(287, \"Load_screen\", cmd_screen, 3668),", "\"Hallucination_Immortal_quick\", cmd_quick, 152), Function.ability(254, \"Hallucination_Oracle_quick\", cmd_quick, 2114), Function.ability(255, \"Hallucination_Phoenix_quick\", cmd_quick,", "cmd_screen, 166, 3666), Function.ability(267, \"Harvest_Gather_Probe_screen\", cmd_screen, 298, 3666), Function.ability(268, \"Harvest_Gather_SCV_screen\",", "cmd_screen, 318), Function.ability(45, \"Build_CreepTumor_screen\", cmd_screen, 3691), Function.ability(46, \"Build_CreepTumor_Queen_screen\", cmd_screen, 1694,", "2560), Function.ability(305, \"Morph_LiberatorAGMode_screen\", cmd_screen, 2558), Function.ability(306, \"Morph_Lurker_quick\", cmd_quick, 2332), Function.ability(307,", "more general action. function_type: One of the functions in FUNCTION_TYPES", "cmd_quick, 1693), Function.ability(30, \"Behavior_GenerateCreepOn_quick\", cmd_quick, 1692), Function.ability(31, \"Behavior_HoldFireOff_quick\", cmd_quick, 3689),", "Function.ability(289, \"Load_Medivac_screen\", cmd_screen, 394, 3668), Function.ability(290, \"Load_NydusNetwork_screen\", cmd_screen, 1437, 3668),", "cmd_screen, 394, 3668), Function.ability(290, \"Load_NydusNetwork_screen\", cmd_screen, 1437, 3668), Function.ability(291, \"Load_NydusWorm_screen\",", "3685), Function.ability(222, \"Effect_Repair_Mule_screen\", cmd_screen, 78, 3685), Function.ability(223, \"Effect_Repair_Mule_autocast\", autocast, 78,", "cmd_quick, 614), Function.ability(503, \"Train_Zealot_quick\", cmd_quick, 916), Function.ability(504, \"Train_Zergling_quick\", cmd_quick, 1343),", "# Copyright 2017 Google Inc. All Rights Reserved. # #", "= select_add def select_warp_gates(action, select_add): \"\"\"Select all warp gates.\"\"\" action.action_ui.select_warp_gates.selection_add", "3669), ]) # pylint: enable=line-too-long # Some indexes to support", "\"\"\"Unload a unit from a transport/bunker/nydus/etc.\"\"\" action.action_ui.cargo_panel.unit_index = unload_id def", "= collections.defaultdict(set) # {ability_id: {funcs}} for func in FUNCTIONS: if", "a point.\"\"\" select = action.action_feature_layer.unit_selection_point screen.assign_to(select.selection_screen_coord) select.type = select_point_act def", "\"Train_SCV_quick\", cmd_quick, 524), Function.ability(491, \"Train_Sentry_quick\", cmd_quick, 921), Function.ability(492, \"Train_SiegeTank_quick\", cmd_quick,", "of known values.\"\"\" return cls(-1, \"<none>\", (len(options),), lambda a: options[a[0]])", "1694, 3691), Function.ability(47, \"Build_CreepTumor_Tumor_screen\", cmd_screen, 1733, 3691), Function.ability(48, \"Build_CyberneticsCore_screen\", cmd_screen,", "screen. minimap: A point on the minimap. screen2: The second", "in the build queue.\"\"\" action.action_ui.production_panel.unit_index = build_queue_id def cmd_quick(action, ability_id,", "minimap.assign_to(action.action_feature_layer.camera_move.center_minimap) def select_point(action, select_point_act, screen): \"\"\"Select a unit at a", "the minimap. screen2: The second point for a rectangle. This", "1998), Function.ability(301, \"Morph_Hellion_quick\", cmd_quick, 1978), Function.ability(302, \"Morph_Hive_quick\", cmd_quick, 1218), Function.ability(303,", "\"Build_NydusNetwork_screen\", cmd_screen, 1161), Function.ability(68, \"Build_NydusWorm_screen\", cmd_screen, 1768), Function.ability(69, \"Build_PhotonCannon_screen\", cmd_screen,", "cmd_quick, 392, 3676), Function.ability(28, \"Behavior_CloakOn_Ghost_quick\", cmd_quick, 382, 3676), Function.ability(29, \"Behavior_GenerateCreepOff_quick\",", "cmd_quick, 1063, 3695), Function.ability(396, \"Research_ProtossGroundWeaponsLevel3_quick\", cmd_quick, 1064, 3695), Function.ability(397, \"Research_ProtossShields_quick\",", "line them all up nicely.\"\"\" return \"%s/%s (%s)\" % (str(self.id).rjust(space", "select_point: [TYPES.select_point_act, TYPES.screen], select_rect: [TYPES.select_add, TYPES.screen, TYPES.screen2], select_unit: [TYPES.select_unit_act, TYPES.select_unit_id],", "3669), Function.ability(519, \"UnloadAllAt_Medivac_minimap\", cmd_minimap, 396, 3669), Function.ability(520, \"UnloadAllAt_Overlord_screen\", cmd_screen, 1408,", "Function.ability(267, \"Harvest_Gather_Probe_screen\", cmd_screen, 298, 3666), Function.ability(268, \"Harvest_Gather_SCV_screen\", cmd_screen, 295, 3666),", "cmd_screen, 3687), Function.ability(181, \"Effect_Blink_Stalker_screen\", cmd_screen, 1442, 3687), Function.ability(182, \"Effect_ShadowStride_screen\", cmd_screen,", "\"Cancel_MorphGreaterSpire_quick\", cmd_quick, 1221, 3659), Function.ability(151, \"Cancel_MorphHive_quick\", cmd_quick, 1219, 3659), Function.ability(152,", "screen: A point on the screen. minimap: A point on", "\"Build_Armory_screen\", cmd_screen, 331), Function.ability(40, \"Build_Assimilator_screen\", cmd_screen, 882), Function.ability(41, \"Build_BanelingNest_screen\", cmd_screen,", "\"Build_Extractor_screen\", cmd_screen, 1154), Function.ability(53, \"Build_Factory_screen\", cmd_screen, 328), Function.ability(54, \"Build_FleetBeacon_screen\", cmd_screen,", "3694), Function.ability(391, \"Research_ProtossGroundArmorLevel2_quick\", cmd_quick, 1066, 3694), Function.ability(392, \"Research_ProtossGroundArmorLevel3_quick\", cmd_quick, 1067,", "cmd_screen, 1729, 3680), Function.ability(316, \"Morph_SporeCrawlerRoot_screen\", cmd_screen, 1731, 3680), Function.ability(317, \"Morph_SiegeMode_quick\",", "with the License. # You may obtain a copy of", "[\"types\", \"functions\"])): \"\"\"The set of types and functions that are", "cmd_quick, 524), Function.ability(491, \"Train_Sentry_quick\", cmd_quick, 921), Function.ability(492, \"Train_SiegeTank_quick\", cmd_quick, 591),", "3682), Function.ability(93, \"Build_TechLab_screen\", cmd_screen, 3682), Function.ability(94, \"Build_TechLab_Barracks_quick\", cmd_quick, 421, 3682),", "Which argument types do each function need? FUNCTION_TYPES = {", "Function.ability(465, \"Train_DarkTemplar_quick\", cmd_quick, 920), Function.ability(466, \"Train_Disruptor_quick\", cmd_quick, 994), Function.ability(467, \"Train_Drone_quick\",", "Function.ability(245, \"Effect_WidowMineAttack_screen\", cmd_screen, 2099), Function.ability(246, \"Effect_WidowMineAttack_autocast\", autocast, 2099), Function.ability(247, \"Effect_YamatoGun_screen\",", "\"BurrowUp_Ravager_quick\", cmd_quick, 2342, 3662), Function.ability(131, \"BurrowUp_Ravager_autocast\", autocast, 2342, 3662), Function.ability(132,", "\"Effect_Abduct_screen\", cmd_screen, 2067), Function.ability(177, \"Effect_AdeptPhaseShift_screen\", cmd_screen, 2544), Function.ability(178, \"Effect_AutoTurret_screen\", cmd_screen,", "3673), Function.ability(342, \"Rally_Morphing_Unit_minimap\", cmd_minimap, 199, 3673), Function.ability(343, \"Rally_Workers_screen\", cmd_screen, 3690),", "Returns: A new `FunctionCall` instance. \"\"\" if isinstance(arguments, dict): arguments", "\"Effect_PointDefenseDrone_screen\", cmd_screen, 144), Function.ability(218, \"Effect_PsiStorm_screen\", cmd_screen, 1036), Function.ability(219, \"Effect_PurificationNova_screen\", cmd_screen,", "full set of functions. Can't use namedtuple since python3 has", "\"\"\"Do a command that needs a point on the screen.\"\"\"", "cmd_screen, 554, 3678), Function.ability(277, \"Land_CommandCenter_screen\", cmd_screen, 419, 3678), Function.ability(278, \"Land_Factory_screen\",", "> 0), Function.ui_func(8, \"select_warp_gates\", select_warp_gates, lambda obs: obs.player_common.warp_gate_count > 0),", "2017 Google Inc. All Rights Reserved. # # Licensed under", "3690), Function.ability(345, \"Rally_CommandCenter_screen\", cmd_screen, 203, 3690), Function.ability(346, \"Rally_CommandCenter_minimap\", cmd_minimap, 203,", "\"Research_ProtossGroundArmorLevel3_quick\", cmd_quick, 1067, 3694), Function.ability(393, \"Research_ProtossGroundWeapons_quick\", cmd_quick, 3695), Function.ability(394, \"Research_ProtossGroundWeaponsLevel1_quick\",", "\"Build_NydusWorm_screen\", cmd_screen, 1768), Function.ability(69, \"Build_PhotonCannon_screen\", cmd_screen, 887), Function.ability(70, \"Build_Pylon_screen\", cmd_screen,", "\"Train_Medivac_quick\", cmd_quick, 620), Function.ability(479, \"Train_MothershipCore_quick\", cmd_quick, 1853), Function.ability(480, \"Train_Mutalisk_quick\", cmd_quick,", "\"Train_Ultralisk_quick\", cmd_quick, 1348), Function.ability(498, \"Train_VikingFighter_quick\", cmd_quick, 624), Function.ability(499, \"Train_Viper_quick\", cmd_quick,", "the function id, eg 2 for select_point. arguments: The list", "__slots__ = () def __str__(self): return \"%s/%s %s\" % (self.id,", "Can either be an `Arguments` object, a `dict`, or an", "types. TYPES = Arguments.types( screen=ArgumentType.point(), minimap=ArgumentType.point(), screen2=ArgumentType.point(), queued=ArgumentType.enum([False, True]), #", "3696), Function.ability(401, \"Research_PsiStorm_quick\", cmd_quick, 1126), Function.ability(402, \"Research_RavenCorvidReactor_quick\", cmd_quick, 793), Function.ability(403,", "cmd_quick, 1374, 3661), Function.ability(105, \"BurrowDown_Drone_quick\", cmd_quick, 1378, 3661), Function.ability(106, \"BurrowDown_Hydralisk_quick\",", "as sc_spatial from s2clientprotocol import ui_pb2 as sc_ui def no_op(action):", "(%s)\" % (str(self.id).rjust(space and 4), self.name.ljust(space and 50), \"; \".join(str(a)", "static list of types and actions for SC2.\"\"\" from __future__", "\"Research_PsiStorm_quick\", cmd_quick, 1126), Function.ability(402, \"Research_RavenCorvidReactor_quick\", cmd_quick, 793), Function.ability(403, \"Research_RavenRecalibratedExplosives_quick\", cmd_quick,", "autocast: [], } # Which ones need an ability? ABILITY_FUNCTIONS", "the function. Should be unique. ability_id: The ability id to", "Function.ability(443, \"Research_ZergMeleeWeaponsLevel2_quick\", cmd_quick, 1187, 3705), Function.ability(444, \"Research_ZergMeleeWeaponsLevel3_quick\", cmd_quick, 1188, 3705),", "express or implied. # See the License for the specific", "for a rectangle. This is needed so that no function", "except in compliance with the License. # You may obtain", "\"BurrowDown_Drone_quick\", cmd_quick, 1378, 3661), Function.ability(106, \"BurrowDown_Hydralisk_quick\", cmd_quick, 1382, 3661), Function.ability(107,", "Function.ability(415, \"Research_TerranShipWeaponsLevel1_quick\", cmd_quick, 861, 3699), Function.ability(416, \"Research_TerranShipWeaponsLevel2_quick\", cmd_quick, 862, 3699),", "The argument id. This is unique. name: The name of", "def no_op(action): del action def move_camera(action, minimap): \"\"\"Move the camera.\"\"\"", "\"Effect_FungalGrowth_screen\", cmd_screen, 74), Function.ability(195, \"Effect_GhostSnipe_screen\", cmd_screen, 2714), Function.ability(196, \"Effect_GravitonBeam_screen\", cmd_screen,", "Function.ability(241, \"Effect_TimeWarp_screen\", cmd_screen, 2244), Function.ability(242, \"Effect_Transfusion_screen\", cmd_screen, 1664), Function.ability(243, \"Effect_ViperConsume_screen\",", "\"Morph_LurkerDen_quick\", cmd_quick, 2112), Function.ability(308, \"Morph_Mothership_quick\", cmd_quick, 1847), Function.ability(309, \"Morph_OrbitalCommand_quick\", cmd_quick,", "creating `FunctionCall`s with `Arguments`. Args: function: The value to store", "whether the function is valid. \"\"\" __slots__ = () @classmethod", "3705), Function.ability(443, \"Research_ZergMeleeWeaponsLevel2_quick\", cmd_quick, 1187, 3705), Function.ability(444, \"Research_ZergMeleeWeaponsLevel3_quick\", cmd_quick, 1188,", "screen2: The second point for a rectangle. This is needed", "self.name.ljust(space and 50), \"; \".join(str(a) for a in self.args)) class", "> 0), Function.ui_func(10, \"unload\", unload, lambda obs: obs.ui_data.HasField(\"cargo\")), Function.ui_func(11, \"build_queue\",", "\"BurrowDown_Queen_quick\", cmd_quick, 1433, 3661), Function.ability(111, \"BurrowDown_Ravager_quick\", cmd_quick, 2340, 3661), Function.ability(112,", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "or later. control_group_act: What to do with the control group.", "ability_id action_cmd.queue_command = queued def cmd_screen(action, ability_id, queued, screen): \"\"\"Do", "a rectangle.\"\"\" select = action.action_feature_layer.unit_selection_rect out_rect = select.selection_screen_coord.add() screen_rect =", "argument type. Attributes: id: The argument id. This is unique.", "need? FUNCTION_TYPES = { no_op: [], move_camera: [TYPES.minimap], select_point: [TYPES.select_point_act,", "None) @classmethod def spec(cls, id_, name, args): \"\"\"Create a Function", "language governing permissions and # limitations under the License. \"\"\"Define", "Function.ability(352, \"Research_AdvancedBallistics_quick\", cmd_quick, 805), Function.ability(353, \"Research_BansheeCloakingField_quick\", cmd_quick, 790), Function.ability(354, \"Research_BansheeHyperflightRotors_quick\",", "cmd_quick, 1482), Function.ability(359, \"Research_Charge_quick\", cmd_quick, 1592), Function.ability(360, \"Research_ChitinousPlating_quick\", cmd_quick, 265),", "\"Research_ProtossShields_quick\", cmd_quick, 3696), Function.ability(398, \"Research_ProtossShieldsLevel1_quick\", cmd_quick, 1068, 3696), Function.ability(399, \"Research_ProtossShieldsLevel2_quick\",", "36, 3688), Function.ability(36, \"Behavior_HoldFireOn_Lurker_quick\", cmd_quick, 2550, 3688), Function.ability(37, \"Behavior_PulsarBeamOff_quick\", cmd_quick,", "\"Effect_MedivacIgniteAfterburners_quick\", cmd_quick, 2116), Function.ability(212, \"Effect_NeuralParasite_screen\", cmd_screen, 249), Function.ability(213, \"Effect_NukeCalldown_screen\", cmd_screen,", "autocast, 3685), Function.ability(222, \"Effect_Repair_Mule_screen\", cmd_screen, 78, 3685), Function.ability(223, \"Effect_Repair_Mule_autocast\", autocast,", "913, 3669), Function.ability(523, \"UnloadAllAt_WarpPrism_minimap\", cmd_minimap, 913, 3669), ]) # pylint:", "Function.ability(27, \"Behavior_CloakOn_Banshee_quick\", cmd_quick, 392, 3676), Function.ability(28, \"Behavior_CloakOn_Ghost_quick\", cmd_quick, 382, 3676),", "3689), Function.ability(32, \"Behavior_HoldFireOff_Ghost_quick\", cmd_quick, 38, 3689), Function.ability(33, \"Behavior_HoldFireOff_Lurker_quick\", cmd_quick, 2552,", "could be: [[0], [23, 38]]. \"\"\" __slots__ = () @classmethod", "for creating `FunctionCall`s with `Arguments`. Args: function: The value to", "\"select_point_act\", \"select_add\", \"select_unit_act\", \"select_unit_id\", \"select_worker\", \"build_queue_id\", \"unload_id\"])): \"\"\"The full list", "function returns whether the function is valid. \"\"\" __slots__ =", "(str(self.id).rjust(space and 4), self.name.ljust(space and 50), \"; \".join(str(a) for a", "\"Morph_Hellbat_quick\", cmd_quick, 1998), Function.ability(301, \"Morph_Hellion_quick\", cmd_quick, 1978), Function.ability(302, \"Morph_Hive_quick\", cmd_quick,", "Function.ability(78, \"Build_Reactor_Starport_screen\", cmd_screen, 488, 3683), Function.ability(79, \"Build_Refinery_screen\", cmd_screen, 320), Function.ability(80,", "Function.ability(92, \"Build_TechLab_quick\", cmd_quick, 3682), Function.ability(93, \"Build_TechLab_screen\", cmd_screen, 3682), Function.ability(94, \"Build_TechLab_Barracks_quick\",", "Function.ability(460, \"Train_Battlecruiser_quick\", cmd_quick, 623), Function.ability(461, \"Train_Carrier_quick\", cmd_quick, 948), Function.ability(462, \"Train_Colossus_quick\",", "CONDITIONS OF ANY KIND, either express or implied. # See", "numbers import six from pysc2.lib import point from s2clientprotocol import", "cmd_screen, 324), Function.ability(44, \"Build_CommandCenter_screen\", cmd_screen, 318), Function.ability(45, \"Build_CreepTumor_screen\", cmd_screen, 3691),", "2704), Function.ability(230, \"Effect_Spray_screen\", cmd_screen, 3684), Function.ability(231, \"Effect_Spray_Protoss_screen\", cmd_screen, 30, 3684),", "3681), Function.ability(325, \"Morph_SporeCrawlerUproot_quick\", cmd_quick, 1727, 3681), Function.ability(326, \"Morph_VikingAssaultMode_quick\", cmd_quick, 403),", "cmd_quick, 2550, 3688), Function.ability(37, \"Behavior_PulsarBeamOff_quick\", cmd_quick, 2376), Function.ability(38, \"Behavior_PulsarBeamOn_quick\", cmd_quick,", "\"Cancel_SpineCrawlerRoot_quick\", cmd_quick, 1730, 3659), Function.ability(165, \"Cancel_SporeCrawlerRoot_quick\", cmd_quick, 1732, 3659), Function.ability(166,", "cmd_screen, 1419), Function.ability(506, \"TrainWarp_DarkTemplar_screen\", cmd_screen, 1417), Function.ability(507, \"TrainWarp_HighTemplar_screen\", cmd_screen, 1416),", "queue.\"\"\" action.action_ui.production_panel.unit_index = build_queue_id def cmd_quick(action, ability_id, queued): \"\"\"Do a", "3691), Function.ability(46, \"Build_CreepTumor_Queen_screen\", cmd_screen, 1694, 3691), Function.ability(47, \"Build_CreepTumor_Tumor_screen\", cmd_screen, 1733,", "Which control group to do it with. select_point_act: What to", "unit to the selection or replace it. select_unit_act: What to", "1520), Function.ability(299, \"Morph_GreaterSpire_quick\", cmd_quick, 1220), Function.ability(300, \"Morph_Hellbat_quick\", cmd_quick, 1998), Function.ability(301,", "ArgumentType(collections.namedtuple( \"ArgumentType\", [\"id\", \"name\", \"sizes\", \"fn\"])): \"\"\"Represents a single argument", "normal abilities, and the ability_id of another ability if it", "Function.ability(110, \"BurrowDown_Queen_quick\", cmd_quick, 1433, 3661), Function.ability(111, \"BurrowDown_Ravager_quick\", cmd_quick, 2340, 3661),", "select_idle_worker: [TYPES.select_worker], select_army: [TYPES.select_add], select_warp_gates: [TYPES.select_add], select_larva: [], unload: [TYPES.unload_id],", "TYPES = Arguments.types( screen=ArgumentType.point(), minimap=ArgumentType.point(), screen2=ArgumentType.point(), queued=ArgumentType.enum([False, True]), # (now", "Function.ability(56, \"Build_FusionCore_screen\", cmd_screen, 333), Function.ability(57, \"Build_Gateway_screen\", cmd_screen, 883), Function.ability(58, \"Build_GhostAcademy_screen\",", "cmd_screen, 78, 3685), Function.ability(223, \"Effect_Repair_Mule_autocast\", autocast, 78, 3685), Function.ability(224, \"Effect_Repair_SCV_screen\",", "options): \"\"\"Create an ArgumentType where you choose one of a", "or an iterable is provided, the values will be unpacked", "if len(self._func_dict) != len(self._func_list): raise ValueError(\"Function names must be unique.\")", "enable=line-too-long # Some indexes to support features.py and action conversion.", "A namedtuple of the types that the functions require. Unlike", "\"LoadAll_CommandCenter_quick\", cmd_quick, 416, 3663), Function.ability(296, \"Morph_Archon_quick\", cmd_quick, 1766), Function.ability(297, \"Morph_BroodLord_quick\",", "Function.ability(303, \"Morph_Lair_quick\", cmd_quick, 1216), Function.ability(304, \"Morph_LiberatorAAMode_quick\", cmd_quick, 2560), Function.ability(305, \"Morph_LiberatorAGMode_screen\",", "\"Build_SupplyDepot_screen\", cmd_screen, 319), Function.ability(92, \"Build_TechLab_quick\", cmd_quick, 3682), Function.ability(93, \"Build_TechLab_screen\", cmd_screen,", "Function.ability(447, \"Research_ZergMissileWeaponsLevel2_quick\", cmd_quick, 1193, 3706), Function.ability(448, \"Research_ZergMissileWeaponsLevel3_quick\", cmd_quick, 1194, 3706),", "Function.ability(77, \"Build_Reactor_Starport_quick\", cmd_quick, 488, 3683), Function.ability(78, \"Build_Reactor_Starport_screen\", cmd_screen, 488, 3683),", "cmd_quick, 2342, 3662), Function.ability(131, \"BurrowUp_Ravager_autocast\", autocast, 2342, 3662), Function.ability(132, \"BurrowUp_Roach_quick\",", "select_warp_gates(action, select_add): \"\"\"Select all warp gates.\"\"\" action.action_ui.select_warp_gates.selection_add = select_add def", "\"select_unit_act\", \"select_unit_id\", \"select_worker\", \"build_queue_id\", \"unload_id\"])): \"\"\"The full list of argument", "cmd_screen, 911, 3668), Function.ability(294, \"LoadAll_quick\", cmd_quick, 3663), Function.ability(295, \"LoadAll_CommandCenter_quick\", cmd_quick,", "3659), Function.ability(141, \"Cancel_AdeptPhaseShift_quick\", cmd_quick, 2594, 3659), Function.ability(142, \"Cancel_AdeptShadePhaseShift_quick\", cmd_quick, 2596,", "Function.ability(358, \"Research_CentrifugalHooks_quick\", cmd_quick, 1482), Function.ability(359, \"Research_Charge_quick\", cmd_quick, 1592), Function.ability(360, \"Research_ChitinousPlating_quick\",", "383, 3677), Function.ability(26, \"Behavior_CloakOn_quick\", cmd_quick, 3676), Function.ability(27, \"Behavior_CloakOn_Banshee_quick\", cmd_quick, 392,", "\"Build_TechLab_Barracks_quick\", cmd_quick, 421, 3682), Function.ability(95, \"Build_TechLab_Barracks_screen\", cmd_screen, 421, 3682), Function.ability(96,", "156), Function.ability(257, \"Hallucination_Stalker_quick\", cmd_quick, 158), Function.ability(258, \"Hallucination_VoidRay_quick\", cmd_quick, 160), Function.ability(259,", "function_type: One of the functions in FUNCTION_TYPES for how to", "cmd_quick, 732), Function.ability(363, \"Research_DrillingClaws_quick\", cmd_quick, 764), Function.ability(364, \"Research_ExtendedThermalLance_quick\", cmd_quick, 1097),", "function need? FUNCTION_TYPES = { no_op: [], move_camera: [TYPES.minimap], select_point:", "return self.str() def str(self, space=False): \"\"\"String version. Set space=True to", "types(cls, **kwargs): \"\"\"Create an Arguments of the possible Types.\"\"\" named", "Function.ability(209, \"Effect_MassRecall_Mothership_screen\", cmd_screen, 2368, 3686), Function.ability(210, \"Effect_MassRecall_MothershipCore_screen\", cmd_screen, 1974, 3686),", "656, 3697), Function.ability(408, \"Research_TerranInfantryArmorLevel2_quick\", cmd_quick, 657, 3697), Function.ability(409, \"Research_TerranInfantryArmorLevel3_quick\", cmd_quick,", "\"BurrowUp_Zergling_quick\", cmd_quick, 1392, 3662), Function.ability(139, \"BurrowUp_Zergling_autocast\", autocast, 1392, 3662), Function.ability(140,", "cmd_quick, 2596, 3659), Function.ability(143, \"Cancel_BarracksAddOn_quick\", cmd_quick, 451, 3659), Function.ability(144, \"Cancel_BuildInProgress_quick\",", "for an agent to use. Attributes: types: A namedtuple of", "the sc2 action proto out of python types. args: A", "3691), Function.ability(47, \"Build_CreepTumor_Tumor_screen\", cmd_screen, 1733, 3691), Function.ability(48, \"Build_CyberneticsCore_screen\", cmd_screen, 894),", "422, 3683), Function.ability(74, \"Build_Reactor_Barracks_screen\", cmd_screen, 422, 3683), Function.ability(75, \"Build_Reactor_Factory_quick\", cmd_quick,", "cmd_quick, 1562, 3693), Function.ability(387, \"Research_ProtossAirWeaponsLevel2_quick\", cmd_quick, 1563, 3693), Function.ability(388, \"Research_ProtossAirWeaponsLevel3_quick\",", "2371, 3664), Function.ability(516, \"UnloadAllAt_screen\", cmd_screen, 3669), Function.ability(517, \"UnloadAllAt_minimap\", cmd_minimap, 3669),", "181), Function.ability(229, \"Effect_SpawnLocusts_screen\", cmd_screen, 2704), Function.ability(230, \"Effect_Spray_screen\", cmd_screen, 3684), Function.ability(231,", "cmd_screen, 327), Function.ability(59, \"Build_Hatchery_screen\", cmd_screen, 1152), Function.ability(60, \"Build_HydraliskDen_screen\", cmd_screen, 1157),", "cmd_minimap, 913, 3669), ]) # pylint: enable=line-too-long # Some indexes", "\"Behavior_BuildingAttackOff_quick\", cmd_quick, 2082), Function.ability(22, \"Behavior_BuildingAttackOn_quick\", cmd_quick, 2081), Function.ability(23, \"Behavior_CloakOff_quick\", cmd_quick,", ") # Which argument types do each function need? FUNCTION_TYPES", "Function.ability(478, \"Train_Medivac_quick\", cmd_quick, 620), Function.ability(479, \"Train_MothershipCore_quick\", cmd_quick, 1853), Function.ability(480, \"Train_Mutalisk_quick\",", "207, 3690), Function.ability(350, \"Rally_Nexus_minimap\", cmd_minimap, 207, 3690), Function.ability(351, \"Research_AdeptResonatingGlaives_quick\", cmd_quick,", "Function.ability(368, \"Research_GroovedSpines_quick\", cmd_quick, 1282), Function.ability(369, \"Research_HiSecAutoTracking_quick\", cmd_quick, 650), Function.ability(370, \"Research_HighCapacityFuelTanks_quick\",", "2376), Function.ability(38, \"Behavior_PulsarBeamOn_quick\", cmd_quick, 2375), Function.ability(39, \"Build_Armory_screen\", cmd_screen, 331), Function.ability(40,", "all warp gates.\"\"\" action.action_ui.select_warp_gates.selection_add = select_add def select_larva(action): \"\"\"Select all", "for f in FUNCTIONS if f.avail_fn} class FunctionCall(collections.namedtuple( \"FunctionCall\", [\"function\",", "3689), Function.ability(34, \"Behavior_HoldFireOn_quick\", cmd_quick, 3688), Function.ability(35, \"Behavior_HoldFireOn_Ghost_quick\", cmd_quick, 36, 3688),", "1382, 3661), Function.ability(107, \"BurrowDown_Infestor_quick\", cmd_quick, 1444, 3661), Function.ability(108, \"BurrowDown_InfestorTerran_quick\", cmd_quick,", "screen.assign_to(select.selection_screen_coord) select.type = select_point_act def select_rect(action, select_add, screen, screen2): \"\"\"Select", "def unload(action, unload_id): \"\"\"Unload a unit from a transport/bunker/nydus/etc.\"\"\" action.action_ui.cargo_panel.unit_index", "cmd_screen, 23, 3674), Function.ability(15, \"Attack_Attack_minimap\", cmd_minimap, 23, 3674), Function.ability(16, \"Attack_AttackBuilding_screen\",", "on the current loaded units. ) # Which argument types", "cmd_quick, 2016, 3662), Function.ability(135, \"BurrowUp_Ultralisk_quick\", cmd_quick, 1514, 3662), Function.ability(136, \"BurrowUp_Ultralisk_autocast\",", "name of the argument, also unique. sizes: The max+1 of", "cmd_screen, 3682), Function.ability(94, \"Build_TechLab_Barracks_quick\", cmd_quick, 421, 3682), Function.ability(95, \"Build_TechLab_Barracks_screen\", cmd_screen,", "\"Research_RavenRecalibratedExplosives_quick\", cmd_quick, 803), Function.ability(404, \"Research_ShadowStrike_quick\", cmd_quick, 2720), Function.ability(405, \"Research_Stimpack_quick\", cmd_quick,", "Function.ability(65, \"Build_Nexus_screen\", cmd_screen, 880), Function.ability(66, \"Build_Nuke_quick\", cmd_quick, 710), Function.ability(67, \"Build_NydusNetwork_screen\",", "command that needs a point on the screen.\"\"\" action_cmd =", "\"Train_Colossus_quick\", cmd_quick, 978), Function.ability(463, \"Train_Corruptor_quick\", cmd_quick, 1353), Function.ability(464, \"Train_Cyclone_quick\", cmd_quick,", "3674), Function.ability(17, \"Attack_AttackBuilding_minimap\", cmd_minimap, 2048, 3674), Function.ability(18, \"Attack_Redirect_screen\", cmd_screen, 1682,", "space=True to line them all up nicely.\"\"\" return \"%s/%s (%s)\"", "Function.ability(166, \"Cancel_StarportAddOn_quick\", cmd_quick, 517, 3659), Function.ability(167, \"Cancel_StasisTrap_quick\", cmd_quick, 2535, 3659),", "Function.ability(479, \"Train_MothershipCore_quick\", cmd_quick, 1853), Function.ability(480, \"Train_Mutalisk_quick\", cmd_quick, 1346), Function.ability(481, \"Train_Observer_quick\",", "\"Train_Ghost_quick\", cmd_quick, 562), Function.ability(469, \"Train_Hellbat_quick\", cmd_quick, 596), Function.ability(470, \"Train_Hellion_quick\", cmd_quick,", "None, None, None, args, None) def __hash__(self): # So it", "919), Function.ability(472, \"Train_Hydralisk_quick\", cmd_quick, 1345), Function.ability(473, \"Train_Immortal_quick\", cmd_quick, 979), Function.ability(474,", "1819), Function.ability(187, \"Effect_ChronoBoost_screen\", cmd_screen, 261), Function.ability(188, \"Effect_Contaminate_screen\", cmd_screen, 1825), Function.ability(189,", "Function.ability(118, \"BurrowUp_autocast\", autocast, 3662), Function.ability(119, \"BurrowUp_Baneling_quick\", cmd_quick, 1376, 3662), Function.ability(120,", "# Which ones need an ability? ABILITY_FUNCTIONS = {cmd_quick, cmd_screen,", "3687), Function.ability(183, \"Effect_CalldownMULE_screen\", cmd_screen, 171), Function.ability(184, \"Effect_CausticSpray_screen\", cmd_screen, 2324), Function.ability(185,", "207, 3690), Function.ability(351, \"Research_AdeptResonatingGlaives_quick\", cmd_quick, 1594), Function.ability(352, \"Research_AdvancedBallistics_quick\", cmd_quick, 805),", "import collections import numbers import six from pysc2.lib import point", "returns whether the function is valid. \"\"\" __slots__ = ()", "Function.ability(150, \"Cancel_MorphGreaterSpire_quick\", cmd_quick, 1221, 3659), Function.ability(151, \"Cancel_MorphHive_quick\", cmd_quick, 1219, 3659),", "Function.ability(269, \"Harvest_Return_quick\", cmd_quick, 3667), Function.ability(270, \"Harvest_Return_Drone_quick\", cmd_quick, 1184, 3667), Function.ability(271,", "3665), Function.ability(455, \"Stop_Redirect_quick\", cmd_quick, 1691, 3665), Function.ability(456, \"Stop_Stop_quick\", cmd_quick, 4,", "cmd_quick, 1070, 3696), Function.ability(401, \"Research_PsiStorm_quick\", cmd_quick, 1126), Function.ability(402, \"Research_RavenCorvidReactor_quick\", cmd_quick,", "(self.id, self.name, list(self.sizes)) @classmethod def enum(cls, options): \"\"\"Create an ArgumentType", "cmd_quick, 3700), Function.ability(420, \"Research_TerranVehicleAndShipPlatingLevel1_quick\", cmd_quick, 864, 3700), Function.ability(421, \"Research_TerranVehicleAndShipPlatingLevel2_quick\", cmd_quick,", "select_rect: [TYPES.select_add, TYPES.screen, TYPES.screen2], select_unit: [TYPES.select_unit_act, TYPES.select_unit_id], control_group: [TYPES.control_group_act, TYPES.control_group_id],", "Function.ability(198, \"Effect_Heal_screen\", cmd_screen, 386), Function.ability(199, \"Effect_Heal_autocast\", autocast, 386), Function.ability(200, \"Effect_HunterSeekerMissile_screen\",", "\"Effect_ImmortalBarrier_quick\", cmd_quick, 2328), Function.ability(202, \"Effect_ImmortalBarrier_autocast\", autocast, 2328), Function.ability(203, \"Effect_InfestedTerrans_screen\", cmd_screen,", "a `dict` or an iterable is provided, the values will", "Function.ability(67, \"Build_NydusNetwork_screen\", cmd_screen, 1161), Function.ability(68, \"Build_NydusWorm_screen\", cmd_screen, 1768), Function.ability(69, \"Build_PhotonCannon_screen\",", "\"Rally_Units_minimap\", cmd_minimap, 3673), Function.ability(337, \"Rally_Building_screen\", cmd_screen, 195, 3673), Function.ability(338, \"Rally_Building_minimap\",", "\"Research_Charge_quick\", cmd_quick, 1592), Function.ability(360, \"Research_ChitinousPlating_quick\", cmd_quick, 265), Function.ability(361, \"Research_CombatShield_quick\", cmd_quick,", "cmd_screen, 203, 3690), Function.ability(346, \"Rally_CommandCenter_minimap\", cmd_minimap, 203, 3690), Function.ability(347, \"Rally_Hatchery_Workers_screen\",", "\"Hallucination_Colossus_quick\", cmd_quick, 148), Function.ability(251, \"Hallucination_Disruptor_quick\", cmd_quick, 2389), Function.ability(252, \"Hallucination_HighTemplar_quick\", cmd_quick,", "a transport/nydus/command center. \"\"\" ___slots__ = () @classmethod def types(cls,", "point on the minimap. screen2: The second point for a", "FUNCTIONS_AVAILABLE = {f.id: f for f in FUNCTIONS if f.avail_fn}", "Function.ability(20, \"Scan_Move_minimap\", cmd_minimap, 19, 3674), Function.ability(21, \"Behavior_BuildingAttackOff_quick\", cmd_quick, 2082), Function.ability(22,", "\"Research_InterceptorGravitonCatapult_quick\", cmd_quick, 44), Function.ability(373, \"Research_MagFieldLaunchers_quick\", cmd_quick, 766), Function.ability(374, \"Research_MuscularAugments_quick\", cmd_quick,", "\"Effect_Repair_Mule_screen\", cmd_screen, 78, 3685), Function.ability(223, \"Effect_Repair_Mule_autocast\", autocast, 78, 3685), Function.ability(224,", "above, this includes the sizes for screen and minimap. functions:", "cmd_quick, 1217, 3659), Function.ability(153, \"Cancel_MorphLurker_quick\", cmd_quick, 2333, 3659), Function.ability(154, \"Cancel_MorphLurkerDen_quick\",", "Function.ability(421, \"Research_TerranVehicleAndShipPlatingLevel2_quick\", cmd_quick, 865, 3700), Function.ability(422, \"Research_TerranVehicleAndShipPlatingLevel3_quick\", cmd_quick, 866, 3700),", "cmd_quick, 1253), Function.ability(451, \"Smart_screen\", cmd_screen, 1), Function.ability(452, \"Smart_minimap\", cmd_minimap, 1),", "\"Research_Burrow_quick\", cmd_quick, 1225), Function.ability(358, \"Research_CentrifugalHooks_quick\", cmd_quick, 1482), Function.ability(359, \"Research_Charge_quick\", cmd_quick,", "Function.ability(185, \"Effect_Charge_screen\", cmd_screen, 1819), Function.ability(186, \"Effect_Charge_autocast\", autocast, 1819), Function.ability(187, \"Effect_ChronoBoost_screen\",", "class Functions(object): \"\"\"Represents the full set of functions. Can't use", "to do when selecting a worker. build_queue_id: Which build queue", "\"Train_Reaper_quick\", cmd_quick, 561), Function.ability(489, \"Train_Roach_quick\", cmd_quick, 1351), Function.ability(490, \"Train_SCV_quick\", cmd_quick,", "\"name\", \"sizes\", \"fn\"])): \"\"\"Represents a single argument type. Attributes: id:", "ui action.\"\"\" return cls(id_, name, 0, 0, function_type, FUNCTION_TYPES[function_type], avail_fn)", "cmd_screen, 2324), Function.ability(185, \"Effect_Charge_screen\", cmd_screen, 1819), Function.ability(186, \"Effect_Charge_autocast\", autocast, 1819),", "\"Rally_Hatchery_Workers_minimap\", cmd_minimap, 211, 3690), Function.ability(349, \"Rally_Nexus_screen\", cmd_screen, 207, 3690), Function.ability(350,", "Function.ability(231, \"Effect_Spray_Protoss_screen\", cmd_screen, 30, 3684), Function.ability(232, \"Effect_Spray_Terran_screen\", cmd_screen, 26, 3684),", "len(self._func_list) # pylint: disable=line-too-long FUNCTIONS = Functions([ Function.ui_func(0, \"no_op\", no_op),", "\"Build_Reactor_quick\", cmd_quick, 3683), Function.ability(72, \"Build_Reactor_screen\", cmd_screen, 3683), Function.ability(73, \"Build_Reactor_Barracks_quick\", cmd_quick,", "Function.ability(41, \"Build_BanelingNest_screen\", cmd_screen, 1162), Function.ability(42, \"Build_Barracks_screen\", cmd_screen, 321), Function.ability(43, \"Build_Bunker_screen\",", "318), Function.ability(45, \"Build_CreepTumor_screen\", cmd_screen, 3691), Function.ability(46, \"Build_CreepTumor_Queen_screen\", cmd_screen, 1694, 3691),", "Function.ability(153, \"Cancel_MorphLurker_quick\", cmd_quick, 2333, 3659), Function.ability(154, \"Cancel_MorphLurkerDen_quick\", cmd_quick, 2113, 3659),", "1392, 3662), Function.ability(140, \"Cancel_quick\", cmd_quick, 3659), Function.ability(141, \"Cancel_AdeptPhaseShift_quick\", cmd_quick, 2594,", "cmd_quick, 2709, 3659), Function.ability(158, \"Cancel_MorphOverseer_quick\", cmd_quick, 1449, 3659), Function.ability(159, \"Cancel_MorphPlanetaryFortress_quick\",", "Function.ability(324, \"Morph_SpineCrawlerUproot_quick\", cmd_quick, 1725, 3681), Function.ability(325, \"Morph_SporeCrawlerUproot_quick\", cmd_quick, 1727, 3681),", "FUNCTION_TYPES for more details. Attributes: screen: A point on the", "3661), Function.ability(106, \"BurrowDown_Hydralisk_quick\", cmd_quick, 1382, 3661), Function.ability(107, \"BurrowDown_Infestor_quick\", cmd_quick, 1444,", "\"Effect_Charge_autocast\", autocast, 1819), Function.ability(187, \"Effect_ChronoBoost_screen\", cmd_screen, 261), Function.ability(188, \"Effect_Contaminate_screen\", cmd_screen,", "[[0], [23, 38]]. \"\"\" __slots__ = () @classmethod def all_arguments(cls,", "agent will use. name: The name of the function. Should", "sc_ui.ActionSelectIdleWorker.Add, sc_ui.ActionSelectIdleWorker.All, sc_ui.ActionSelectIdleWorker.AddAll, ]), build_queue_id=ArgumentType.scalar(10), # Depends on current build", "Function.ability(190, \"Effect_EMP_screen\", cmd_screen, 1628), Function.ability(191, \"Effect_Explode_quick\", cmd_quick, 42), Function.ability(192, \"Effect_Feedback_screen\",", "\"UnloadAll_NydusWorm_quick\", cmd_quick, 2371, 3664), Function.ability(516, \"UnloadAllAt_screen\", cmd_screen, 3669), Function.ability(517, \"UnloadAllAt_minimap\",", "Function.ability(419, \"Research_TerranVehicleAndShipPlating_quick\", cmd_quick, 3700), Function.ability(420, \"Research_TerranVehicleAndShipPlatingLevel1_quick\", cmd_quick, 864, 3700), Function.ability(421,", "\"BurrowUp_Queen_quick\", cmd_quick, 1435, 3662), Function.ability(129, \"BurrowUp_Queen_autocast\", autocast, 1435, 3662), Function.ability(130,", "1396, 3662), Function.ability(126, \"BurrowUp_InfestorTerran_autocast\", autocast, 1396, 3662), Function.ability(127, \"BurrowUp_Lurker_quick\", cmd_quick,", "Function.ability(363, \"Research_DrillingClaws_quick\", cmd_quick, 764), Function.ability(364, \"Research_ExtendedThermalLance_quick\", cmd_quick, 1097), Function.ability(365, \"Research_GlialRegeneration_quick\",", "\"Morph_SporeCrawlerRoot_screen\", cmd_screen, 1731, 3680), Function.ability(317, \"Morph_SiegeMode_quick\", cmd_quick, 388), Function.ability(318, \"Morph_SupplyDepot_Lower_quick\",", "Function.ability(266, \"Harvest_Gather_Mule_screen\", cmd_screen, 166, 3666), Function.ability(267, \"Harvest_Gather_Probe_screen\", cmd_screen, 298, 3666),", "cmd_screen, 1524, 3678), Function.ability(280, \"Land_Starport_screen\", cmd_screen, 522, 3678), Function.ability(281, \"Lift_quick\",", "pysc2.lib import point from s2clientprotocol import spatial_pb2 as sc_spatial from", "3661), Function.ability(111, \"BurrowDown_Ravager_quick\", cmd_quick, 2340, 3661), Function.ability(112, \"BurrowDown_Roach_quick\", cmd_quick, 1386,", "select_larva, lambda obs: obs.player_common.larva_count > 0), Function.ui_func(10, \"unload\", unload, lambda", "1093), Function.ability(367, \"Research_GraviticDrive_quick\", cmd_quick, 1094), Function.ability(368, \"Research_GroovedSpines_quick\", cmd_quick, 1282), Function.ability(369,", "\"Cancel_Nuke_quick\", cmd_quick, 1623, 3659), Function.ability(164, \"Cancel_SpineCrawlerRoot_quick\", cmd_quick, 1730, 3659), Function.ability(165,", "Function.ability(473, \"Train_Immortal_quick\", cmd_quick, 979), Function.ability(474, \"Train_Infestor_quick\", cmd_quick, 1352), Function.ability(475, \"Train_Liberator_quick\",", "1157), Function.ability(61, \"Build_InfestationPit_screen\", cmd_screen, 1160), Function.ability(62, \"Build_Interceptors_quick\", cmd_quick, 1042), Function.ability(63,", "None) def __hash__(self): # So it can go in a", "cmd_quick, 1068, 3696), Function.ability(399, \"Research_ProtossShieldsLevel2_quick\", cmd_quick, 1069, 3696), Function.ability(400, \"Research_ProtossShieldsLevel3_quick\",", "scalar in range(value).\"\"\" return cls(-1, \"<none>\", (value,), lambda a: a[0])", "46), Function.ability(380, \"Research_PneumatizedCarapace_quick\", cmd_quick, 1223), Function.ability(381, \"Research_ProtossAirArmor_quick\", cmd_quick, 3692), Function.ability(382,", "Function.ability(376, \"Research_NeuralParasite_quick\", cmd_quick, 1455), Function.ability(377, \"Research_PathogenGlands_quick\", cmd_quick, 1454), Function.ability(378, \"Research_PersonalCloaking_quick\",", "\"Effect_Stim_Marauder_Redirect_quick\", cmd_quick, 1684, 3675), Function.ability(237, \"Effect_Stim_Marine_quick\", cmd_quick, 380, 3675), Function.ability(238,", "used in ValidActions.\"\"\" return cls(id_, name, sizes, None) class Arguments(collections.namedtuple(\"Arguments\",", "\"Build_FleetBeacon_screen\", cmd_screen, 885), Function.ability(55, \"Build_Forge_screen\", cmd_screen, 884), Function.ability(56, \"Build_FusionCore_screen\", cmd_screen,", "cmd_screen, 2542), Function.ability(216, \"Effect_PhotonOvercharge_screen\", cmd_screen, 2162), Function.ability(217, \"Effect_PointDefenseDrone_screen\", cmd_screen, 144),", "2362), Function.ability(322, \"Morph_Unsiege_quick\", cmd_quick, 390), Function.ability(323, \"Morph_Uproot_quick\", cmd_quick, 3681), Function.ability(324,", "\"Train_Sentry_quick\", cmd_quick, 921), Function.ability(492, \"Train_SiegeTank_quick\", cmd_quick, 591), Function.ability(493, \"Train_Stalker_quick\", cmd_quick,", "Function.ability(72, \"Build_Reactor_screen\", cmd_screen, 3683), Function.ability(73, \"Build_Reactor_Barracks_quick\", cmd_quick, 422, 3683), Function.ability(74,", "\"Build_EngineeringBay_screen\", cmd_screen, 322), Function.ability(51, \"Build_EvolutionChamber_screen\", cmd_screen, 1156), Function.ability(52, \"Build_Extractor_screen\", cmd_screen,", "3659), Function.ability(150, \"Cancel_MorphGreaterSpire_quick\", cmd_quick, 1221, 3659), Function.ability(151, \"Cancel_MorphHive_quick\", cmd_quick, 1219,", "1978), Function.ability(302, \"Morph_Hive_quick\", cmd_quick, 1218), Function.ability(303, \"Morph_Lair_quick\", cmd_quick, 1216), Function.ability(304,", "\"Load_NydusNetwork_screen\", cmd_screen, 1437, 3668), Function.ability(291, \"Load_NydusWorm_screen\", cmd_screen, 2370, 3668), Function.ability(292,", "2550, 3688), Function.ability(37, \"Behavior_PulsarBeamOff_quick\", cmd_quick, 2376), Function.ability(38, \"Behavior_PulsarBeamOn_quick\", cmd_quick, 2375),", "cmd_quick, 761), Function.ability(372, \"Research_InterceptorGravitonCatapult_quick\", cmd_quick, 44), Function.ability(373, \"Research_MagFieldLaunchers_quick\", cmd_quick, 766),", "3673), Function.ability(340, \"Rally_Hatchery_Units_minimap\", cmd_minimap, 212, 3673), Function.ability(341, \"Rally_Morphing_Unit_screen\", cmd_screen, 199,", "\"\"\"Select an idle worker.\"\"\" action.action_ui.select_idle_worker.type = select_worker def select_army(action, select_add):", "1454), Function.ability(378, \"Research_PersonalCloaking_quick\", cmd_quick, 820), Function.ability(379, \"Research_PhoenixAnionPulseCrystals_quick\", cmd_quick, 46), Function.ability(380,", "3688), Function.ability(36, \"Behavior_HoldFireOn_Lurker_quick\", cmd_quick, 2550, 3688), Function.ability(37, \"Behavior_PulsarBeamOff_quick\", cmd_quick, 2376),", "What to do with the unit at the point. select_add:", "cmd_quick, 2720), Function.ability(405, \"Research_Stimpack_quick\", cmd_quick, 730), Function.ability(406, \"Research_TerranInfantryArmor_quick\", cmd_quick, 3697),", "cmd_quick, 2354, 3659), Function.ability(149, \"Cancel_MorphBroodlord_quick\", cmd_quick, 1373, 3659), Function.ability(150, \"Cancel_MorphGreaterSpire_quick\",", "Function.ability(362, \"Research_ConcussiveShells_quick\", cmd_quick, 732), Function.ability(363, \"Research_DrillingClaws_quick\", cmd_quick, 764), Function.ability(364, \"Research_ExtendedThermalLance_quick\",", "\"Effect_ParasiticBomb_screen\", cmd_screen, 2542), Function.ability(216, \"Effect_PhotonOvercharge_screen\", cmd_screen, 2162), Function.ability(217, \"Effect_PointDefenseDrone_screen\", cmd_screen,", "1683, 3675), Function.ability(239, \"Effect_SupplyDrop_screen\", cmd_screen, 255), Function.ability(240, \"Effect_TacticalJump_screen\", cmd_screen, 2358),", "\"Load_WarpPrism_screen\", cmd_screen, 911, 3668), Function.ability(294, \"LoadAll_quick\", cmd_quick, 3663), Function.ability(295, \"LoadAll_CommandCenter_quick\",", "Function.ability(305, \"Morph_LiberatorAGMode_screen\", cmd_screen, 2558), Function.ability(306, \"Morph_Lurker_quick\", cmd_quick, 2332), Function.ability(307, \"Morph_LurkerDen_quick\",", "self.args)) class Functions(object): \"\"\"Represents the full set of functions. Can't", "types. args: A list of the types of args passed", "\"Effect_Stim_Marine_Redirect_quick\", cmd_quick, 1683, 3675), Function.ability(239, \"Effect_SupplyDrop_screen\", cmd_screen, 255), Function.ability(240, \"Effect_TacticalJump_screen\",", "select_add) select_unit_act=ArgumentType.enum([ sc_ui.ActionMultiPanel.SingleSelect, sc_ui.ActionMultiPanel.DeselectUnit, sc_ui.ActionMultiPanel.SelectAllOfType, sc_ui.ActionMultiPanel.DeselectAllOfType, ]), select_unit_id=ArgumentType.scalar(500), # Depends", "\"Train_Tempest_quick\", cmd_quick, 955), Function.ability(496, \"Train_Thor_quick\", cmd_quick, 594), Function.ability(497, \"Train_Ultralisk_quick\", cmd_quick,", "Function.ability(409, \"Research_TerranInfantryArmorLevel3_quick\", cmd_quick, 658, 3697), Function.ability(410, \"Research_TerranInfantryWeapons_quick\", cmd_quick, 3698), Function.ability(411,", "cmd_quick, 3683), Function.ability(72, \"Build_Reactor_screen\", cmd_screen, 3683), Function.ability(73, \"Build_Reactor_Barracks_quick\", cmd_quick, 422,", "3675), Function.ability(235, \"Effect_Stim_Marauder_quick\", cmd_quick, 253, 3675), Function.ability(236, \"Effect_Stim_Marauder_Redirect_quick\", cmd_quick, 1684,", "Function.ability(159, \"Cancel_MorphPlanetaryFortress_quick\", cmd_quick, 1451, 3659), Function.ability(160, \"Cancel_MorphRavager_quick\", cmd_quick, 2331, 3659),", "921), Function.ability(492, \"Train_SiegeTank_quick\", cmd_quick, 591), Function.ability(493, \"Train_Stalker_quick\", cmd_quick, 917), Function.ability(494,", "\"Train_DarkTemplar_quick\", cmd_quick, 920), Function.ability(466, \"Train_Disruptor_quick\", cmd_quick, 994), Function.ability(467, \"Train_Drone_quick\", cmd_quick,", "cmd_quick, 250, 3659), Function.ability(163, \"Cancel_Nuke_quick\", cmd_quick, 1623, 3659), Function.ability(164, \"Cancel_SpineCrawlerRoot_quick\",", "within a rectangle.\"\"\" select = action.action_feature_layer.unit_selection_rect out_rect = select.selection_screen_coord.add() screen_rect", "now or later. control_group_act: What to do with the control", "Function.ability(301, \"Morph_Hellion_quick\", cmd_quick, 1978), Function.ability(302, \"Morph_Hive_quick\", cmd_quick, 1218), Function.ability(303, \"Morph_Lair_quick\",", "cmd_screen, 249), Function.ability(213, \"Effect_NukeCalldown_screen\", cmd_screen, 1622), Function.ability(214, \"Effect_OracleRevelation_screen\", cmd_screen, 2146),", "\"\"\" if isinstance(arguments, dict): arguments = Arguments(**arguments) elif not isinstance(arguments,", "Function.ability(364, \"Research_ExtendedThermalLance_quick\", cmd_quick, 1097), Function.ability(365, \"Research_GlialRegeneration_quick\", cmd_quick, 216), Function.ability(366, \"Research_GraviticBooster_quick\",", "with. select_point_act: What to do with the unit at the", "\"Build_Reactor_Starport_quick\", cmd_quick, 488, 3683), Function.ability(78, \"Build_Reactor_Starport_screen\", cmd_screen, 488, 3683), Function.ability(79,", "cmd_quick, 1847), Function.ability(309, \"Morph_OrbitalCommand_quick\", cmd_quick, 1516), Function.ability(310, \"Morph_OverlordTransport_quick\", cmd_quick, 2708),", "Function.ability(44, \"Build_CommandCenter_screen\", cmd_screen, 318), Function.ability(45, \"Build_CreepTumor_screen\", cmd_screen, 3691), Function.ability(46, \"Build_CreepTumor_Queen_screen\",", "623), Function.ability(461, \"Train_Carrier_quick\", cmd_quick, 948), Function.ability(462, \"Train_Colossus_quick\", cmd_quick, 978), Function.ability(463,", "License. \"\"\"Define the static list of types and actions for", "Google Inc. All Rights Reserved. # # Licensed under the", "\"BurrowUp_Ultralisk_autocast\", autocast, 1514, 3662), Function.ability(137, \"BurrowUp_WidowMine_quick\", cmd_quick, 2097, 3662), Function.ability(138,", "cmd_screen, 1036), Function.ability(219, \"Effect_PurificationNova_screen\", cmd_screen, 2346), Function.ability(220, \"Effect_Repair_screen\", cmd_screen, 3685),", "Function.ability(54, \"Build_FleetBeacon_screen\", cmd_screen, 885), Function.ability(55, \"Build_Forge_screen\", cmd_screen, 884), Function.ability(56, \"Build_FusionCore_screen\",", "3661), Function.ability(116, \"BurrowDown_Zergling_quick\", cmd_quick, 1390, 3661), Function.ability(117, \"BurrowUp_quick\", cmd_quick, 3662),", "to pass to sc2. general_id: 0 for normal abilities, and", "the action should be done now or later. control_group_act: What", "obs.player_common.army_count > 0), Function.ui_func(8, \"select_warp_gates\", select_warp_gates, lambda obs: obs.player_common.warp_gate_count >", "the agent will use. name: The name of the function.", "Function.ability(86, \"Build_Spire_screen\", cmd_screen, 1158), Function.ability(87, \"Build_SporeCrawler_screen\", cmd_screen, 1167), Function.ability(88, \"Build_Stargate_screen\",", "\"Cancel_MorphThorExplosiveMode_quick\", cmd_quick, 2365, 3659), Function.ability(162, \"Cancel_NeuralParasite_quick\", cmd_quick, 250, 3659), Function.ability(163,", "cmd_screen, 199, 3673), Function.ability(342, \"Rally_Morphing_Unit_minimap\", cmd_minimap, 199, 3673), Function.ability(343, \"Rally_Workers_screen\",", "cmd_screen: [TYPES.queued, TYPES.screen], cmd_minimap: [TYPES.queued, TYPES.minimap], autocast: [], } #", "1063, 3695), Function.ability(396, \"Research_ProtossGroundWeaponsLevel3_quick\", cmd_quick, 1064, 3695), Function.ability(397, \"Research_ProtossShields_quick\", cmd_quick,", "3673), Function.ability(343, \"Rally_Workers_screen\", cmd_screen, 3690), Function.ability(344, \"Rally_Workers_minimap\", cmd_minimap, 3690), Function.ability(345,", "types: A namedtuple of the types that the functions require.", "3690), Function.ability(350, \"Rally_Nexus_minimap\", cmd_minimap, 207, 3690), Function.ability(351, \"Research_AdeptResonatingGlaives_quick\", cmd_quick, 1594),", "cmd_quick, 1998), Function.ability(301, \"Morph_Hellion_quick\", cmd_quick, 1978), Function.ability(302, \"Morph_Hive_quick\", cmd_quick, 1218),", "1416), Function.ability(508, \"TrainWarp_Sentry_screen\", cmd_screen, 1418), Function.ability(509, \"TrainWarp_Stalker_screen\", cmd_screen, 1414), Function.ability(510,", "\"Build_Refinery_screen\", cmd_screen, 320), Function.ability(80, \"Build_RoachWarren_screen\", cmd_screen, 1165), Function.ability(81, \"Build_RoboticsBay_screen\", cmd_screen,", "cmd_quick, 422, 3683), Function.ability(74, \"Build_Reactor_Barracks_screen\", cmd_screen, 422, 3683), Function.ability(75, \"Build_Reactor_Factory_quick\",", "2328), Function.ability(202, \"Effect_ImmortalBarrier_autocast\", autocast, 2328), Function.ability(203, \"Effect_InfestedTerrans_screen\", cmd_screen, 247), Function.ability(204,", "Function.ability(348, \"Rally_Hatchery_Workers_minimap\", cmd_minimap, 211, 3690), Function.ability(349, \"Rally_Nexus_screen\", cmd_screen, 207, 3690),", "= { False: {cmd_quick, autocast}, True: {cmd_screen, cmd_minimap, autocast}} always", "id, which is what the agent will use. name: The", "unit from the multi-unit selection.\"\"\" select = action.action_ui.multi_panel select.type =", "396, 3669), Function.ability(519, \"UnloadAllAt_Medivac_minimap\", cmd_minimap, 396, 3669), Function.ability(520, \"UnloadAllAt_Overlord_screen\", cmd_screen,", "function_type in ABILITY_FUNCTIONS return cls(id_, name, ability_id, general_id, function_type, FUNCTION_TYPES[function_type],", "cmd_screen, 1694, 3691), Function.ability(47, \"Build_CreepTumor_Tumor_screen\", cmd_screen, 1733, 3691), Function.ability(48, \"Build_CyberneticsCore_screen\",", "\"Morph_Uproot_quick\", cmd_quick, 3681), Function.ability(324, \"Morph_SpineCrawlerUproot_quick\", cmd_quick, 1725, 3681), Function.ability(325, \"Morph_SporeCrawlerUproot_quick\",", "under the License is distributed on an \"AS-IS\" BASIS, #", "cmd_quick, 1516), Function.ability(310, \"Morph_OverlordTransport_quick\", cmd_quick, 2708), Function.ability(311, \"Morph_Overseer_quick\", cmd_quick, 1448),", "1065, 3694), Function.ability(391, \"Research_ProtossGroundArmorLevel2_quick\", cmd_quick, 1066, 3694), Function.ability(392, \"Research_ProtossGroundArmorLevel3_quick\", cmd_quick,", "@classmethod def enum(cls, options): \"\"\"Create an ArgumentType where you choose", "Function.ability(131, \"BurrowUp_Ravager_autocast\", autocast, 2342, 3662), Function.ability(132, \"BurrowUp_Roach_quick\", cmd_quick, 1388, 3662),", "\"Build_TechLab_Starport_quick\", cmd_quick, 487, 3682), Function.ability(99, \"Build_TechLab_Starport_screen\", cmd_screen, 487, 3682), Function.ability(100,", "\"Behavior_HoldFireOn_quick\", cmd_quick, 3688), Function.ability(35, \"Behavior_HoldFireOn_Ghost_quick\", cmd_quick, 36, 3688), Function.ability(36, \"Behavior_HoldFireOn_Lurker_quick\",", "\"select_rect\", select_rect), Function.ui_func(4, \"select_control_group\", control_group), Function.ui_func(5, \"select_unit\", select_unit, lambda obs:", "cmd_quick, 174, 3659), Function.ability(148, \"Cancel_LockOn_quick\", cmd_quick, 2354, 3659), Function.ability(149, \"Cancel_MorphBroodlord_quick\",", "cmd_screen, 207, 3690), Function.ability(350, \"Rally_Nexus_minimap\", cmd_minimap, 207, 3690), Function.ability(351, \"Research_AdeptResonatingGlaives_quick\",", "\"ValidActions\", [\"types\", \"functions\"])): \"\"\"The set of types and functions that", "cmd_screen, 1418), Function.ability(509, \"TrainWarp_Stalker_screen\", cmd_screen, 1414), Function.ability(510, \"TrainWarp_Zealot_screen\", cmd_screen, 1413),", "\"Build_DarkShrine_screen\", cmd_screen, 891), Function.ability(50, \"Build_EngineeringBay_screen\", cmd_screen, 322), Function.ability(51, \"Build_EvolutionChamber_screen\", cmd_screen,", "Function.ability(42, \"Build_Barracks_screen\", cmd_screen, 321), Function.ability(43, \"Build_Bunker_screen\", cmd_screen, 324), Function.ability(44, \"Build_CommandCenter_screen\",", "<filename>pysc2/lib/actions.py # Copyright 2017 Google Inc. All Rights Reserved. #", "Function.ability(335, \"Rally_Units_screen\", cmd_screen, 3673), Function.ability(336, \"Rally_Units_minimap\", cmd_minimap, 3673), Function.ability(337, \"Rally_Building_screen\",", "cmd_screen, 2338), Function.ability(190, \"Effect_EMP_screen\", cmd_screen, 1628), Function.ability(191, \"Effect_Explode_quick\", cmd_quick, 42),", "Function.ability(268, \"Harvest_Gather_SCV_screen\", cmd_screen, 295, 3666), Function.ability(269, \"Harvest_Return_quick\", cmd_quick, 3667), Function.ability(270,", "autocast, 1384, 3662), Function.ability(124, \"BurrowUp_Infestor_quick\", cmd_quick, 1446, 3662), Function.ability(125, \"BurrowUp_InfestorTerran_quick\",", "386), Function.ability(199, \"Effect_Heal_autocast\", autocast, 386), Function.ability(200, \"Effect_HunterSeekerMissile_screen\", cmd_screen, 169), Function.ability(201,", "\"Research_TerranVehicleAndShipPlatingLevel1_quick\", cmd_quick, 864, 3700), Function.ability(421, \"Research_TerranVehicleAndShipPlatingLevel2_quick\", cmd_quick, 865, 3700), Function.ability(422,", "to target in a transport/nydus/command center. \"\"\" ___slots__ = ()", "def __str__(self): return self.str() def str(self, space=False): \"\"\"String version. Set", "cmd_quick, 312, 3671), Function.ability(173, \"Cancel_QueueCancelToSelection_quick\", cmd_quick, 308, 3671), Function.ability(174, \"Cancel_QueuePasive_quick\",", "3683), Function.ability(72, \"Build_Reactor_screen\", cmd_screen, 3683), Function.ability(73, \"Build_Reactor_Barracks_quick\", cmd_quick, 422, 3683),", "Function.ability(45, \"Build_CreepTumor_screen\", cmd_screen, 3691), Function.ability(46, \"Build_CreepTumor_Queen_screen\", cmd_screen, 1694, 3691), Function.ability(47,", "[TYPES.select_worker], select_army: [TYPES.select_add], select_warp_gates: [TYPES.select_add], select_larva: [], unload: [TYPES.unload_id], build_queue:", "= functions self._func_dict = {f.name: f for f in functions}", "Function.ability(371, \"Research_InfernalPreigniter_quick\", cmd_quick, 761), Function.ability(372, \"Research_InterceptorGravitonCatapult_quick\", cmd_quick, 44), Function.ability(373, \"Research_MagFieldLaunchers_quick\",", "cmd_quick, 1691, 3665), Function.ability(456, \"Stop_Stop_quick\", cmd_quick, 4, 3665), Function.ability(457, \"Train_Adept_quick\",", "if isinstance(key, numbers.Number): return self._func_list[key] return self._func_dict[key] def __iter__(self): return", "an ArgumentType with a single scalar in range(value).\"\"\" return cls(-1,", "The ability id to pass to sc2. general_id: 0 for", "\"Morph_SpineCrawlerUproot_quick\", cmd_quick, 1725, 3681), Function.ability(325, \"Morph_SporeCrawlerUproot_quick\", cmd_quick, 1727, 3681), Function.ability(326,", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "\"Build_RoboticsBay_screen\", cmd_screen, 892), Function.ability(82, \"Build_RoboticsFacility_screen\", cmd_screen, 893), Function.ability(83, \"Build_SensorTower_screen\", cmd_screen,", "= lambda _: True class Function(collections.namedtuple( \"Function\", [\"id\", \"name\", \"ability_id\",", "\"Harvest_Return_SCV_quick\", cmd_quick, 296, 3667), Function.ability(274, \"HoldPosition_quick\", cmd_quick, 18), Function.ability(275, \"Land_screen\",", "cmd_quick, 2095, 3661), Function.ability(116, \"BurrowDown_Zergling_quick\", cmd_quick, 1390, 3661), Function.ability(117, \"BurrowUp_quick\",", "Function.ability(124, \"BurrowUp_Infestor_quick\", cmd_quick, 1446, 3662), Function.ability(125, \"BurrowUp_InfestorTerran_quick\", cmd_quick, 1396, 3662),", "Function.ability(336, \"Rally_Units_minimap\", cmd_minimap, 3673), Function.ability(337, \"Rally_Building_screen\", cmd_screen, 195, 3673), Function.ability(338,", "\"BurrowDown_Zergling_quick\", cmd_quick, 1390, 3661), Function.ability(117, \"BurrowUp_quick\", cmd_quick, 3662), Function.ability(118, \"BurrowUp_autocast\",", "1313, 3703), Function.ability(436, \"Research_ZergFlyerAttackLevel3_quick\", cmd_quick, 1314, 3703), Function.ability(437, \"Research_ZergGroundArmor_quick\", cmd_quick,", "do when selecting a worker. build_queue_id: Which build queue index", "vs add to queue) control_group_act=ArgumentType.enum([ sc_ui.ActionControlGroup.Recall, sc_ui.ActionControlGroup.Set, sc_ui.ActionControlGroup.Append, sc_ui.ActionControlGroup.SetAndSteal, sc_ui.ActionControlGroup.AppendAndSteal,", "type_ in six.iteritems(kwargs)} return cls(**named) # The list of known", "299, 3667), Function.ability(273, \"Harvest_Return_SCV_quick\", cmd_quick, 296, 3667), Function.ability(274, \"HoldPosition_quick\", cmd_quick,", "3661), Function.ability(114, \"BurrowDown_Ultralisk_quick\", cmd_quick, 1512, 3661), Function.ability(115, \"BurrowDown_WidowMine_quick\", cmd_quick, 2095,", "\"Train_VoidRay_quick\", cmd_quick, 950), Function.ability(501, \"Train_WarpPrism_quick\", cmd_quick, 976), Function.ability(502, \"Train_WidowMine_quick\", cmd_quick,", "Arguments of the possible Types.\"\"\" named = {name: type_._replace(id=Arguments._fields.index(name), name=name)", "unknown at this time. \"\"\"Create an ArgumentType that is represented", "cmd_quick, 308, 3671), Function.ability(174, \"Cancel_QueuePasive_quick\", cmd_quick, 1831, 3671), Function.ability(175, \"Cancel_QueuePassiveCancelToSelection_quick\",", "3690), Function.ability(347, \"Rally_Hatchery_Workers_screen\", cmd_screen, 211, 3690), Function.ability(348, \"Rally_Hatchery_Workers_minimap\", cmd_minimap, 211,", "a unit by id. select_unit_id: Which unit to select by", "select_worker): \"\"\"Select an idle worker.\"\"\" action.action_ui.select_idle_worker.type = select_worker def select_army(action,", "803), Function.ability(404, \"Research_ShadowStrike_quick\", cmd_quick, 2720), Function.ability(405, \"Research_Stimpack_quick\", cmd_quick, 730), Function.ability(406,", "replace it. select_unit_act: What to do when selecting a unit", "894), Function.ability(49, \"Build_DarkShrine_screen\", cmd_screen, 891), Function.ability(50, \"Build_EngineeringBay_screen\", cmd_screen, 322), Function.ability(51,", "in ABILITY_FUNCTIONS return cls(id_, name, ability_id, general_id, function_type, FUNCTION_TYPES[function_type], None)", "Function.ability(176, \"Effect_Abduct_screen\", cmd_screen, 2067), Function.ability(177, \"Effect_AdeptPhaseShift_screen\", cmd_screen, 2544), Function.ability(178, \"Effect_AutoTurret_screen\",", "1766), Function.ability(297, \"Morph_BroodLord_quick\", cmd_quick, 1372), Function.ability(298, \"Morph_Gateway_quick\", cmd_quick, 1520), Function.ability(299,", "= Arguments(*arguments) return cls(function, arguments) class ValidActions(collections.namedtuple( \"ValidActions\", [\"types\", \"functions\"])):", "cmd_screen, 1183, 3666), Function.ability(266, \"Harvest_Gather_Mule_screen\", cmd_screen, 166, 3666), Function.ability(267, \"Harvest_Gather_Probe_screen\",", "cmd_screen, 1159), Function.ability(103, \"BurrowDown_quick\", cmd_quick, 3661), Function.ability(104, \"BurrowDown_Baneling_quick\", cmd_quick, 1374,", "\"Cancel_Last_quick\", cmd_quick, 3671), Function.ability(169, \"Cancel_HangarQueue5_quick\", cmd_quick, 1038, 3671), Function.ability(170, \"Cancel_Queue1_quick\",", "which is what the agent will use. name: The name", "to the selection or replace it. select_unit_act: What to do", "1594), Function.ability(352, \"Research_AdvancedBallistics_quick\", cmd_quick, 805), Function.ability(353, \"Research_BansheeCloakingField_quick\", cmd_quick, 790), Function.ability(354,", "governing permissions and # limitations under the License. \"\"\"Define the", "3679), Function.ability(283, \"Lift_CommandCenter_quick\", cmd_quick, 417, 3679), Function.ability(284, \"Lift_Factory_quick\", cmd_quick, 485,", "3690), Function.ability(344, \"Rally_Workers_minimap\", cmd_minimap, 3690), Function.ability(345, \"Rally_CommandCenter_screen\", cmd_screen, 203, 3690),", "cmd_quick, 655), Function.ability(376, \"Research_NeuralParasite_quick\", cmd_quick, 1455), Function.ability(377, \"Research_PathogenGlands_quick\", cmd_quick, 1454),", "cmd_quick, 1446, 3662), Function.ability(125, \"BurrowUp_InfestorTerran_quick\", cmd_quick, 1396, 3662), Function.ability(126, \"BurrowUp_InfestorTerran_autocast\",", "cmd_quick, 948), Function.ability(462, \"Train_Colossus_quick\", cmd_quick, 978), Function.ability(463, \"Train_Corruptor_quick\", cmd_quick, 1353),", "action.\"\"\" return cls(id_, name, 0, 0, function_type, FUNCTION_TYPES[function_type], avail_fn) @classmethod", "scalar(cls, value): \"\"\"Create an ArgumentType with a single scalar in", "Function.ability(214, \"Effect_OracleRevelation_screen\", cmd_screen, 2146), Function.ability(215, \"Effect_ParasiticBomb_screen\", cmd_screen, 2542), Function.ability(216, \"Effect_PhotonOvercharge_screen\",", "Function.ability(213, \"Effect_NukeCalldown_screen\", cmd_screen, 1622), Function.ability(214, \"Effect_OracleRevelation_screen\", cmd_screen, 2146), Function.ability(215, \"Effect_ParasiticBomb_screen\",", "in six.iteritems(ABILITY_IDS)} FUNCTIONS_AVAILABLE = {f.id: f for f in FUNCTIONS", "\"Function\", [\"id\", \"name\", \"ability_id\", \"general_id\", \"function_type\", \"args\", \"avail_fn\"])): \"\"\"Represents a", "\"Research_ZerglingMetabolicBoost_quick\", cmd_quick, 1253), Function.ability(451, \"Smart_screen\", cmd_screen, 1), Function.ability(452, \"Smart_minimap\", cmd_minimap,", "the screen. minimap: A point on the minimap. screen2: The", "Function.ability(356, \"Research_Blink_quick\", cmd_quick, 1593), Function.ability(357, \"Research_Burrow_quick\", cmd_quick, 1225), Function.ability(358, \"Research_CentrifugalHooks_quick\",", "def select_warp_gates(action, select_add): \"\"\"Select all warp gates.\"\"\" action.action_ui.select_warp_gates.selection_add = select_add", "\"Harvest_Return_Mule_quick\", cmd_quick, 167, 3667), Function.ability(272, \"Harvest_Return_Probe_quick\", cmd_quick, 299, 3667), Function.ability(273,", "\"Research_GroovedSpines_quick\", cmd_quick, 1282), Function.ability(369, \"Research_HiSecAutoTracking_quick\", cmd_quick, 650), Function.ability(370, \"Research_HighCapacityFuelTanks_quick\", cmd_quick,", "Function.ability(407, \"Research_TerranInfantryArmorLevel1_quick\", cmd_quick, 656, 3697), Function.ability(408, \"Research_TerranInfantryArmorLevel2_quick\", cmd_quick, 657, 3697),", "FUNCTION_TYPES = { no_op: [], move_camera: [TYPES.minimap], select_point: [TYPES.select_point_act, TYPES.screen],", "select_point(action, select_point_act, screen): \"\"\"Select a unit at a point.\"\"\" select", "Function.ability(295, \"LoadAll_CommandCenter_quick\", cmd_quick, 416, 3663), Function.ability(296, \"Morph_Archon_quick\", cmd_quick, 1766), Function.ability(297,", "Version 2.0 (the \"License\"); # you may not use this", "Function.ability(438, \"Research_ZergGroundArmorLevel1_quick\", cmd_quick, 1189, 3704), Function.ability(439, \"Research_ZergGroundArmorLevel2_quick\", cmd_quick, 1190, 3704),", "the minimap.\"\"\" action_cmd = action.action_feature_layer.unit_command action_cmd.ability_id = ability_id action_cmd.queue_command =", "into something more meaningful to be set in the protos", "\"Effect_PsiStorm_screen\", cmd_screen, 1036), Function.ability(219, \"Effect_PurificationNova_screen\", cmd_screen, 2346), Function.ability(220, \"Effect_Repair_screen\", cmd_screen,", "import point from s2clientprotocol import spatial_pb2 as sc_spatial from s2clientprotocol", "\"Land_Barracks_screen\", cmd_screen, 554, 3678), Function.ability(277, \"Land_CommandCenter_screen\", cmd_screen, 419, 3678), Function.ability(278,", "Function.ability(28, \"Behavior_CloakOn_Ghost_quick\", cmd_quick, 382, 3676), Function.ability(29, \"Behavior_GenerateCreepOff_quick\", cmd_quick, 1693), Function.ability(30,", "[ \"screen\", \"minimap\", \"screen2\", \"queued\", \"control_group_act\", \"control_group_id\", \"select_point_act\", \"select_add\", \"select_unit_act\",", "Function.ability(85, \"Build_SpineCrawler_screen\", cmd_screen, 1166), Function.ability(86, \"Build_Spire_screen\", cmd_screen, 1158), Function.ability(87, \"Build_SporeCrawler_screen\",", "cmd_screen, 1162), Function.ability(42, \"Build_Barracks_screen\", cmd_screen, 321), Function.ability(43, \"Build_Bunker_screen\", cmd_screen, 324),", "select_army: [TYPES.select_add], select_warp_gates: [TYPES.select_add], select_larva: [], unload: [TYPES.unload_id], build_queue: [TYPES.build_queue_id],", "Function.ability(369, \"Research_HiSecAutoTracking_quick\", cmd_quick, 650), Function.ability(370, \"Research_HighCapacityFuelTanks_quick\", cmd_quick, 804), Function.ability(371, \"Research_InfernalPreigniter_quick\",", "cmd_quick, 1223), Function.ability(381, \"Research_ProtossAirArmor_quick\", cmd_quick, 3692), Function.ability(382, \"Research_ProtossAirArmorLevel1_quick\", cmd_quick, 1565,", "to target. unload_id: Which unit to target in a transport/nydus/command", "current selection. select_worker=ArgumentType.enum([ sc_ui.ActionSelectIdleWorker.Set, sc_ui.ActionSelectIdleWorker.Add, sc_ui.ActionSelectIdleWorker.All, sc_ui.ActionSelectIdleWorker.AddAll, ]), build_queue_id=ArgumentType.scalar(10), #", "\"Research_ProtossGroundArmorLevel2_quick\", cmd_quick, 1066, 3694), Function.ability(392, \"Research_ProtossGroundArmorLevel3_quick\", cmd_quick, 1067, 3694), Function.ability(393,", "__future__ import absolute_import from __future__ import division from __future__ import", "A point on the minimap. screen2: The second point for", "Function.ability(394, \"Research_ProtossGroundWeaponsLevel1_quick\", cmd_quick, 1062, 3695), Function.ability(395, \"Research_ProtossGroundWeaponsLevel2_quick\", cmd_quick, 1063, 3695),", "a rectangle. This is needed so that no function takes", "cmd_quick, 217), Function.ability(428, \"Research_WarpGate_quick\", cmd_quick, 1568), Function.ability(429, \"Research_ZergFlyerArmor_quick\", cmd_quick, 3702),", "3661), Function.ability(107, \"BurrowDown_Infestor_quick\", cmd_quick, 1444, 3661), Function.ability(108, \"BurrowDown_InfestorTerran_quick\", cmd_quick, 1394,", "3671), Function.ability(176, \"Effect_Abduct_screen\", cmd_screen, 2067), Function.ability(177, \"Effect_AdeptPhaseShift_screen\", cmd_screen, 2544), Function.ability(178,", "def select_army(action, select_add): \"\"\"Select the entire army.\"\"\" action.action_ui.select_army.selection_add = select_add", "2391), Function.ability(249, \"Hallucination_Archon_quick\", cmd_quick, 146), Function.ability(250, \"Hallucination_Colossus_quick\", cmd_quick, 148), Function.ability(251,", "cmd_quick, 1372), Function.ability(298, \"Morph_Gateway_quick\", cmd_quick, 1520), Function.ability(299, \"Morph_GreaterSpire_quick\", cmd_quick, 1220),", "to store for the action function. arguments: The values to", "\"Build_Nexus_screen\", cmd_screen, 880), Function.ability(66, \"Build_Nuke_quick\", cmd_quick, 710), Function.ability(67, \"Build_NydusNetwork_screen\", cmd_screen,", "the dimensions this argument takes. fn: The function to convert", "Function.ability(251, \"Hallucination_Disruptor_quick\", cmd_quick, 2389), Function.ability(252, \"Hallucination_HighTemplar_quick\", cmd_quick, 150), Function.ability(253, \"Hallucination_Immortal_quick\",", "863, 3699), Function.ability(418, \"Research_TerranStructureArmorUpgrade_quick\", cmd_quick, 651), Function.ability(419, \"Research_TerranVehicleAndShipPlating_quick\", cmd_quick, 3700),", "Function.ability(399, \"Research_ProtossShieldsLevel2_quick\", cmd_quick, 1069, 3696), Function.ability(400, \"Research_ProtossShieldsLevel3_quick\", cmd_quick, 1070, 3696),", "cmd_screen, 886), Function.ability(102, \"Build_UltraliskCavern_screen\", cmd_screen, 1159), Function.ability(103, \"BurrowDown_quick\", cmd_quick, 3661),", "Function.ability(475, \"Train_Liberator_quick\", cmd_quick, 626), Function.ability(476, \"Train_Marauder_quick\", cmd_quick, 563), Function.ability(477, \"Train_Marine_quick\",", "658, 3697), Function.ability(410, \"Research_TerranInfantryWeapons_quick\", cmd_quick, 3698), Function.ability(411, \"Research_TerranInfantryWeaponsLevel1_quick\", cmd_quick, 652,", "TYPES.control_group_id], select_idle_worker: [TYPES.select_worker], select_army: [TYPES.select_add], select_warp_gates: [TYPES.select_add], select_larva: [], unload:", "652, 3698), Function.ability(412, \"Research_TerranInfantryWeaponsLevel2_quick\", cmd_quick, 653, 3698), Function.ability(413, \"Research_TerranInfantryWeaponsLevel3_quick\", cmd_quick,", "80), Function.ability(459, \"Train_Banshee_quick\", cmd_quick, 621), Function.ability(460, \"Train_Battlecruiser_quick\", cmd_quick, 623), Function.ability(461,", "cmd_quick, 2330), Function.ability(314, \"Morph_Root_screen\", cmd_screen, 3680), Function.ability(315, \"Morph_SpineCrawlerRoot_screen\", cmd_screen, 1729,", "by applicable law or agreed to in writing, software #", "a Function to be used in ValidActions.\"\"\" return cls(id_, name,", "Function.ability(16, \"Attack_AttackBuilding_screen\", cmd_screen, 2048, 3674), Function.ability(17, \"Attack_AttackBuilding_minimap\", cmd_minimap, 2048, 3674),", "cmd_quick, 864, 3700), Function.ability(421, \"Research_TerranVehicleAndShipPlatingLevel2_quick\", cmd_quick, 865, 3700), Function.ability(422, \"Research_TerranVehicleAndShipPlatingLevel3_quick\",", "3669), Function.ability(523, \"UnloadAllAt_WarpPrism_minimap\", cmd_minimap, 913, 3669), ]) # pylint: enable=line-too-long", "FUNCTIONS: if func.ability_id >= 0: ABILITY_IDS[func.ability_id].add(func) ABILITY_IDS = {k: frozenset(v)", "= action.action_feature_layer.unit_selection_point screen.assign_to(select.selection_screen_coord) select.type = select_point_act def select_rect(action, select_add, screen,", "single argument type. Attributes: id: The argument id. This is", "that is represented by a point.Point.\"\"\" return cls(-1, \"<none>\", (0,", "cmd_minimap, 203, 3690), Function.ability(347, \"Rally_Hatchery_Workers_screen\", cmd_screen, 211, 3690), Function.ability(348, \"Rally_Hatchery_Workers_minimap\",", "cmd_quick, 1191, 3704), Function.ability(441, \"Research_ZergMeleeWeapons_quick\", cmd_quick, 3705), Function.ability(442, \"Research_ZergMeleeWeaponsLevel1_quick\", cmd_quick,", "def ui_func(cls, id_, name, function_type, avail_fn=always): \"\"\"Define a function representing", "cmd_quick, 657, 3697), Function.ability(409, \"Research_TerranInfantryArmorLevel3_quick\", cmd_quick, 658, 3697), Function.ability(410, \"Research_TerranInfantryWeapons_quick\",", "cmd_quick, 1353), Function.ability(464, \"Train_Cyclone_quick\", cmd_quick, 597), Function.ability(465, \"Train_DarkTemplar_quick\", cmd_quick, 920),", "1219, 3659), Function.ability(152, \"Cancel_MorphLair_quick\", cmd_quick, 1217, 3659), Function.ability(153, \"Cancel_MorphLurker_quick\", cmd_quick,", "the empty proto field. def select_unit(action, select_unit_act, select_unit_id): \"\"\"Select a", "Function.ui_func(6, \"select_idle_worker\", select_idle_worker, lambda obs: obs.player_common.idle_worker_count > 0), Function.ui_func(7, \"select_army\",", "520, 3678), Function.ability(279, \"Land_OrbitalCommand_screen\", cmd_screen, 1524, 3678), Function.ability(280, \"Land_Starport_screen\", cmd_screen,", "596), Function.ability(470, \"Train_Hellion_quick\", cmd_quick, 595), Function.ability(471, \"Train_HighTemplar_quick\", cmd_quick, 919), Function.ability(472,", "can go in a set(). return self.id def __str__(self): return", "FUNCTION_TYPES for how to construct the sc2 action proto out", "target. unload_id: Which unit to target in a transport/nydus/command center.", "320), Function.ability(80, \"Build_RoachWarren_screen\", cmd_screen, 1165), Function.ability(81, \"Build_RoboticsBay_screen\", cmd_screen, 892), Function.ability(82,", "\"Morph_Gateway_quick\", cmd_quick, 1520), Function.ability(299, \"Morph_GreaterSpire_quick\", cmd_quick, 1220), Function.ability(300, \"Morph_Hellbat_quick\", cmd_quick,", "\"Train_Infestor_quick\", cmd_quick, 1352), Function.ability(475, \"Train_Liberator_quick\", cmd_quick, 626), Function.ability(476, \"Train_Marauder_quick\", cmd_quick,", "Function.ability(439, \"Research_ZergGroundArmorLevel2_quick\", cmd_quick, 1190, 3704), Function.ability(440, \"Research_ZergGroundArmorLevel3_quick\", cmd_quick, 1191, 3704),", "None, None, args, None) def __hash__(self): # So it can", "3685), Function.ability(225, \"Effect_Repair_SCV_autocast\", autocast, 316, 3685), Function.ability(226, \"Effect_Salvage_quick\", cmd_quick, 32),", "321), Function.ability(43, \"Build_Bunker_screen\", cmd_screen, 324), Function.ability(44, \"Build_CommandCenter_screen\", cmd_screen, 318), Function.ability(45,", "that the functions require. Unlike TYPES above, this includes the", "control_group_id def unload(action, unload_id): \"\"\"Unload a unit from a transport/bunker/nydus/etc.\"\"\"", "Function.ability(181, \"Effect_Blink_Stalker_screen\", cmd_screen, 1442, 3687), Function.ability(182, \"Effect_ShadowStride_screen\", cmd_screen, 2700, 3687),", "2114), Function.ability(255, \"Hallucination_Phoenix_quick\", cmd_quick, 154), Function.ability(256, \"Hallucination_Probe_quick\", cmd_quick, 156), Function.ability(257,", "use. name: The name of the function. Should be unique.", "Function.ability(68, \"Build_NydusWorm_screen\", cmd_screen, 1768), Function.ability(69, \"Build_PhotonCannon_screen\", cmd_screen, 887), Function.ability(70, \"Build_Pylon_screen\",", "\"Morph_ThorExplosiveMode_quick\", cmd_quick, 2364), Function.ability(321, \"Morph_ThorHighImpactMode_quick\", cmd_quick, 2362), Function.ability(322, \"Morph_Unsiege_quick\", cmd_quick,", "0: ABILITY_IDS[func.ability_id].add(func) ABILITY_IDS = {k: frozenset(v) for k, v in", "Arguments(*arguments) return cls(function, arguments) class ValidActions(collections.namedtuple( \"ValidActions\", [\"types\", \"functions\"])): \"\"\"The", "3659), Function.ability(154, \"Cancel_MorphLurkerDen_quick\", cmd_quick, 2113, 3659), Function.ability(155, \"Cancel_MorphMothership_quick\", cmd_quick, 1848,", "Function.ability(393, \"Research_ProtossGroundWeapons_quick\", cmd_quick, 3695), Function.ability(394, \"Research_ProtossGroundWeaponsLevel1_quick\", cmd_quick, 1062, 3695), Function.ability(395,", "cmd_minimap, 2048, 3674), Function.ability(18, \"Attack_Redirect_screen\", cmd_screen, 1682, 3674), Function.ability(19, \"Scan_Move_screen\",", "% (str(self.id).rjust(space and 4), self.name.ljust(space and 50), \"; \".join(str(a) for", "\"Effect_Spray_Terran_screen\", cmd_screen, 26, 3684), Function.ability(233, \"Effect_Spray_Zerg_screen\", cmd_screen, 28, 3684), Function.ability(234,", "cmd_screen, 1408, 3669), Function.ability(521, \"UnloadAllAt_Overlord_minimap\", cmd_minimap, 1408, 3669), Function.ability(522, \"UnloadAllAt_WarpPrism_screen\",", "594), Function.ability(497, \"Train_Ultralisk_quick\", cmd_quick, 1348), Function.ability(498, \"Train_VikingFighter_quick\", cmd_quick, 624), Function.ability(499,", "all_arguments(cls, function, arguments): \"\"\"Helper function for creating `FunctionCall`s with `Arguments`.", "ones need an ability? ABILITY_FUNCTIONS = {cmd_quick, cmd_screen, cmd_minimap, autocast}", "\"Hallucination_VoidRay_quick\", cmd_quick, 160), Function.ability(259, \"Hallucination_WarpPrism_quick\", cmd_quick, 162), Function.ability(260, \"Hallucination_Zealot_quick\", cmd_quick,", "Function.ability(13, \"Attack_minimap\", cmd_minimap, 3674), Function.ability(14, \"Attack_Attack_screen\", cmd_screen, 23, 3674), Function.ability(15,", "cmd_quick, 653, 3698), Function.ability(413, \"Research_TerranInfantryWeaponsLevel3_quick\", cmd_quick, 654, 3698), Function.ability(414, \"Research_TerranShipWeapons_quick\",", "\"ability_id\", \"general_id\", \"function_type\", \"args\", \"avail_fn\"])): \"\"\"Represents a function action. Attributes:", "Function.ability(330, \"Morph_WarpPrismTransportMode_quick\", cmd_quick, 1530), Function.ability(331, \"Move_screen\", cmd_screen, 16), Function.ability(332, \"Move_minimap\",", "False: {cmd_quick, autocast}, True: {cmd_screen, cmd_minimap, autocast}} always = lambda", "cmd_screen, 2346), Function.ability(220, \"Effect_Repair_screen\", cmd_screen, 3685), Function.ability(221, \"Effect_Repair_autocast\", autocast, 3685),", "\"TrainWarp_Zealot_screen\", cmd_screen, 1413), Function.ability(511, \"UnloadAll_quick\", cmd_quick, 3664), Function.ability(512, \"UnloadAll_Bunker_quick\", cmd_quick,", "__init__(self, functions): self._func_list = functions self._func_dict = {f.name: f for", "for k, v in six.iteritems(ABILITY_IDS)} FUNCTIONS_AVAILABLE = {f.id: f for", "action. Can either be an `Arguments` object, a `dict`, or", "a function action. Attributes: id: The function id, which is", "Function.ability(275, \"Land_screen\", cmd_screen, 3678), Function.ability(276, \"Land_Barracks_screen\", cmd_screen, 554, 3678), Function.ability(277,", "cmd_quick, 3661), Function.ability(104, \"BurrowDown_Baneling_quick\", cmd_quick, 1374, 3661), Function.ability(105, \"BurrowDown_Drone_quick\", cmd_quick,", "3669), Function.ability(521, \"UnloadAllAt_Overlord_minimap\", cmd_minimap, 1408, 3669), Function.ability(522, \"UnloadAllAt_WarpPrism_screen\", cmd_screen, 913,", "3671), Function.ability(174, \"Cancel_QueuePasive_quick\", cmd_quick, 1831, 3671), Function.ability(175, \"Cancel_QueuePassiveCancelToSelection_quick\", cmd_quick, 1833,", "3678), Function.ability(281, \"Lift_quick\", cmd_quick, 3679), Function.ability(282, \"Lift_Barracks_quick\", cmd_quick, 452, 3679),", "cmd_quick, 158), Function.ability(258, \"Hallucination_VoidRay_quick\", cmd_quick, 160), Function.ability(259, \"Hallucination_WarpPrism_quick\", cmd_quick, 162),", "cmd_screen, 169), Function.ability(201, \"Effect_ImmortalBarrier_quick\", cmd_quick, 2328), Function.ability(202, \"Effect_ImmortalBarrier_autocast\", autocast, 2328),", "\"Behavior_HoldFireOff_quick\", cmd_quick, 3689), Function.ability(32, \"Behavior_HoldFireOff_Ghost_quick\", cmd_quick, 38, 3689), Function.ability(33, \"Behavior_HoldFireOff_Lurker_quick\",", "\"Effect_WidowMineAttack_autocast\", autocast, 2099), Function.ability(247, \"Effect_YamatoGun_screen\", cmd_screen, 401), Function.ability(248, \"Hallucination_Adept_quick\", cmd_quick,", "Function.ability(322, \"Morph_Unsiege_quick\", cmd_quick, 390), Function.ability(323, \"Morph_Uproot_quick\", cmd_quick, 3681), Function.ability(324, \"Morph_SpineCrawlerUproot_quick\",", "cmd_screen, 1974, 3686), Function.ability(211, \"Effect_MedivacIgniteAfterburners_quick\", cmd_quick, 2116), Function.ability(212, \"Effect_NeuralParasite_screen\", cmd_screen,", "Function.ability(344, \"Rally_Workers_minimap\", cmd_minimap, 3690), Function.ability(345, \"Rally_CommandCenter_screen\", cmd_screen, 203, 3690), Function.ability(346,", "of the possible Types.\"\"\" named = {name: type_._replace(id=Arguments._fields.index(name), name=name) for", "Function.ability(386, \"Research_ProtossAirWeaponsLevel1_quick\", cmd_quick, 1562, 3693), Function.ability(387, \"Research_ProtossAirWeaponsLevel2_quick\", cmd_quick, 1563, 3693),", "cmd_screen, 3669), Function.ability(517, \"UnloadAllAt_minimap\", cmd_minimap, 3669), Function.ability(518, \"UnloadAllAt_Medivac_screen\", cmd_screen, 396,", "3669), Function.ability(517, \"UnloadAllAt_minimap\", cmd_minimap, 3669), Function.ability(518, \"UnloadAllAt_Medivac_screen\", cmd_screen, 396, 3669),", "Types.\"\"\" named = {name: type_._replace(id=Arguments._fields.index(name), name=name) for name, type_ in", "applicable law or agreed to in writing, software # distributed", "1408, 3669), Function.ability(522, \"UnloadAllAt_WarpPrism_screen\", cmd_screen, 913, 3669), Function.ability(523, \"UnloadAllAt_WarpPrism_minimap\", cmd_minimap,", "action_cmd.ability_id = ability_id action_cmd.queue_command = queued screen.assign_to(action_cmd.target_screen_coord) def cmd_minimap(action, ability_id,", "to function_type. avail_fn: For non-abilities, this function returns whether the", "Function.ability(239, \"Effect_SupplyDrop_screen\", cmd_screen, 255), Function.ability(240, \"Effect_TacticalJump_screen\", cmd_screen, 2358), Function.ability(241, \"Effect_TimeWarp_screen\",", "562), Function.ability(469, \"Train_Hellbat_quick\", cmd_quick, 596), Function.ability(470, \"Train_Hellion_quick\", cmd_quick, 595), Function.ability(471,", "for a in self.args)) class Functions(object): \"\"\"Represents the full set", "cmd_screen, 454, 3682), Function.ability(98, \"Build_TechLab_Starport_quick\", cmd_quick, 487, 3682), Function.ability(99, \"Build_TechLab_Starport_screen\",", "Function.ability(340, \"Rally_Hatchery_Units_minimap\", cmd_minimap, 212, 3673), Function.ability(341, \"Rally_Morphing_Unit_screen\", cmd_screen, 199, 3673),", "selecting a worker. build_queue_id: Which build queue index to target.", "What to do when selecting a unit by id. select_unit_id:", "Function.ability(264, \"Harvest_Gather_screen\", cmd_screen, 3666), Function.ability(265, \"Harvest_Gather_Drone_screen\", cmd_screen, 1183, 3666), Function.ability(266,", "cmd_quick, 1532), Function.ability(356, \"Research_Blink_quick\", cmd_quick, 1593), Function.ability(357, \"Research_Burrow_quick\", cmd_quick, 1225),", "312, 3671), Function.ability(173, \"Cancel_QueueCancelToSelection_quick\", cmd_quick, 308, 3671), Function.ability(174, \"Cancel_QueuePasive_quick\", cmd_quick,", "# Depends on current build queue. unload_id=ArgumentType.scalar(500), # Depends on", "\"Research_ProtossAirWeaponsLevel1_quick\", cmd_quick, 1562, 3693), Function.ability(387, \"Research_ProtossAirWeaponsLevel2_quick\", cmd_quick, 1563, 3693), Function.ability(388,", "cmd_screen, 17), Function.ability(334, \"Patrol_minimap\", cmd_minimap, 17), Function.ability(335, \"Rally_Units_screen\", cmd_screen, 3673),", "the argument, also unique. sizes: The max+1 of each of", "3695), Function.ability(394, \"Research_ProtossGroundWeaponsLevel1_quick\", cmd_quick, 1062, 3695), Function.ability(395, \"Research_ProtossGroundWeaponsLevel2_quick\", cmd_quick, 1063,", "1186, 3705), Function.ability(443, \"Research_ZergMeleeWeaponsLevel2_quick\", cmd_quick, 1187, 3705), Function.ability(444, \"Research_ZergMeleeWeaponsLevel3_quick\", cmd_quick,", "32), Function.ability(227, \"Effect_Scan_screen\", cmd_screen, 399), Function.ability(228, \"Effect_SpawnChangeling_quick\", cmd_quick, 181), Function.ability(229,", "Function.ability(512, \"UnloadAll_Bunker_quick\", cmd_quick, 408, 3664), Function.ability(513, \"UnloadAll_CommandCenter_quick\", cmd_quick, 413, 3664),", "455, 3683), Function.ability(77, \"Build_Reactor_Starport_quick\", cmd_quick, 488, 3683), Function.ability(78, \"Build_Reactor_Starport_screen\", cmd_screen,", "cmd_quick, 2376), Function.ability(38, \"Behavior_PulsarBeamOn_quick\", cmd_quick, 2375), Function.ability(39, \"Build_Armory_screen\", cmd_screen, 331),", "\"Research_GraviticDrive_quick\", cmd_quick, 1094), Function.ability(368, \"Research_GroovedSpines_quick\", cmd_quick, 1282), Function.ability(369, \"Research_HiSecAutoTracking_quick\", cmd_quick,", "def spec(cls, id_, name, args): \"\"\"Create a Function to be", "cmd_quick, 3699), Function.ability(415, \"Research_TerranShipWeaponsLevel1_quick\", cmd_quick, 861, 3699), Function.ability(416, \"Research_TerranShipWeaponsLevel2_quick\", cmd_quick,", "do with the unit at the point. select_add: Whether to", "3668), Function.ability(294, \"LoadAll_quick\", cmd_quick, 3663), Function.ability(295, \"LoadAll_CommandCenter_quick\", cmd_quick, 416, 3663),", "\"Rally_Morphing_Unit_screen\", cmd_screen, 199, 3673), Function.ability(342, \"Rally_Morphing_Unit_minimap\", cmd_minimap, 199, 3673), Function.ability(343,", "build queue. unload_id=ArgumentType.scalar(500), # Depends on the current loaded units.", "1414), Function.ability(510, \"TrainWarp_Zealot_screen\", cmd_screen, 1413), Function.ability(511, \"UnloadAll_quick\", cmd_quick, 3664), Function.ability(512,", "cmd_quick, 3693), Function.ability(386, \"Research_ProtossAirWeaponsLevel1_quick\", cmd_quick, 1562, 3693), Function.ability(387, \"Research_ProtossAirWeaponsLevel2_quick\", cmd_quick,", "minimap.assign_to(action_cmd.target_minimap_coord) def autocast(action, ability_id): \"\"\"Toggle autocast.\"\"\" action.action_ui.toggle_autocast.ability_id = ability_id class", "\"Effect_Feedback_screen\", cmd_screen, 140), Function.ability(193, \"Effect_ForceField_screen\", cmd_screen, 1526), Function.ability(194, \"Effect_FungalGrowth_screen\", cmd_screen,", "cmd_screen, 2588), Function.ability(206, \"Effect_LockOn_screen\", cmd_screen, 2350), Function.ability(207, \"Effect_LocustSwoop_screen\", cmd_screen, 2387),", "cmd_quick, 2114), Function.ability(255, \"Hallucination_Phoenix_quick\", cmd_quick, 154), Function.ability(256, \"Hallucination_Probe_quick\", cmd_quick, 156),", "function id, which is what the agent will use. name:", "3673), Function.ability(341, \"Rally_Morphing_Unit_screen\", cmd_screen, 199, 3673), Function.ability(342, \"Rally_Morphing_Unit_minimap\", cmd_minimap, 199,", "\"Build_CommandCenter_screen\", cmd_screen, 318), Function.ability(45, \"Build_CreepTumor_screen\", cmd_screen, 3691), Function.ability(46, \"Build_CreepTumor_Queen_screen\", cmd_screen,", "\"Attack_AttackBuilding_screen\", cmd_screen, 2048, 3674), Function.ability(17, \"Attack_AttackBuilding_minimap\", cmd_minimap, 2048, 3674), Function.ability(18,", "\"Research_TerranShipWeapons_quick\", cmd_quick, 3699), Function.ability(415, \"Research_TerranShipWeaponsLevel1_quick\", cmd_quick, 861, 3699), Function.ability(416, \"Research_TerranShipWeaponsLevel2_quick\",", "can be represented by a more general action. function_type: One", "\"Build_TechLab_screen\", cmd_screen, 3682), Function.ability(94, \"Build_TechLab_Barracks_quick\", cmd_quick, 421, 3682), Function.ability(95, \"Build_TechLab_Barracks_screen\",", "Function.ability(521, \"UnloadAllAt_Overlord_minimap\", cmd_minimap, 1408, 3669), Function.ability(522, \"UnloadAllAt_WarpPrism_screen\", cmd_screen, 913, 3669),", "cmd_quick, 1388, 3662), Function.ability(133, \"BurrowUp_Roach_autocast\", autocast, 1388, 3662), Function.ability(134, \"BurrowUp_SwarmHost_quick\",", "Function.ability(155, \"Cancel_MorphMothership_quick\", cmd_quick, 1848, 3659), Function.ability(156, \"Cancel_MorphOrbital_quick\", cmd_quick, 1517, 3659),", "cmd_quick, 46), Function.ability(380, \"Research_PneumatizedCarapace_quick\", cmd_quick, 1223), Function.ability(381, \"Research_ProtossAirArmor_quick\", cmd_quick, 3692),", "return \"%s/%s %s\" % (self.id, self.name, list(self.sizes)) @classmethod def enum(cls,", "954), Function.ability(483, \"Train_Overlord_quick\", cmd_quick, 1344), Function.ability(484, \"Train_Phoenix_quick\", cmd_quick, 946), Function.ability(485,", "cmd_minimap, 3674), Function.ability(14, \"Attack_Attack_screen\", cmd_screen, 23, 3674), Function.ability(15, \"Attack_Attack_minimap\", cmd_minimap,", "1154), Function.ability(53, \"Build_Factory_screen\", cmd_screen, 328), Function.ability(54, \"Build_FleetBeacon_screen\", cmd_screen, 885), Function.ability(55,", "# You may obtain a copy of the License at", "\"Research_ExtendedThermalLance_quick\", cmd_quick, 1097), Function.ability(365, \"Research_GlialRegeneration_quick\", cmd_quick, 216), Function.ability(366, \"Research_GraviticBooster_quick\", cmd_quick,", "650), Function.ability(370, \"Research_HighCapacityFuelTanks_quick\", cmd_quick, 804), Function.ability(371, \"Research_InfernalPreigniter_quick\", cmd_quick, 761), Function.ability(372,", "list of ints. For select_point this could be: [[0], [23,", "namedtuple since python3 has a limit of 255 function arguments,", "move_camera(action, minimap): \"\"\"Move the camera.\"\"\" minimap.assign_to(action.action_feature_layer.camera_move.center_minimap) def select_point(action, select_point_act, screen):", "cmd_screen, 3684), Function.ability(231, \"Effect_Spray_Protoss_screen\", cmd_screen, 30, 3684), Function.ability(232, \"Effect_Spray_Terran_screen\", cmd_screen,", "1693), Function.ability(30, \"Behavior_GenerateCreepOn_quick\", cmd_quick, 1692), Function.ability(31, \"Behavior_HoldFireOff_quick\", cmd_quick, 3689), Function.ability(32,", "Function.ability(29, \"Behavior_GenerateCreepOff_quick\", cmd_quick, 1693), Function.ability(30, \"Behavior_GenerateCreepOn_quick\", cmd_quick, 1692), Function.ability(31, \"Behavior_HoldFireOff_quick\",", "cmd_quick, 978), Function.ability(463, \"Train_Corruptor_quick\", cmd_quick, 1353), Function.ability(464, \"Train_Cyclone_quick\", cmd_quick, 597),", "Function.ability(126, \"BurrowUp_InfestorTerran_autocast\", autocast, 1396, 3662), Function.ability(127, \"BurrowUp_Lurker_quick\", cmd_quick, 2110, 3662),", "autocast, 1819), Function.ability(187, \"Effect_ChronoBoost_screen\", cmd_screen, 261), Function.ability(188, \"Effect_Contaminate_screen\", cmd_screen, 1825),", "unique.\") def __getattr__(self, name): return self._func_dict[name] def __getitem__(self, key): if", "cmd_quick, 1632), Function.ability(487, \"Train_Raven_quick\", cmd_quick, 622), Function.ability(488, \"Train_Reaper_quick\", cmd_quick, 561),", "3702), Function.ability(433, \"Research_ZergFlyerAttack_quick\", cmd_quick, 3703), Function.ability(434, \"Research_ZergFlyerAttackLevel1_quick\", cmd_quick, 1312, 3703),", "\"Cancel_CreepTumor_quick\", cmd_quick, 1763, 3659), Function.ability(146, \"Cancel_FactoryAddOn_quick\", cmd_quick, 484, 3659), Function.ability(147,", "\"Effect_Transfusion_screen\", cmd_screen, 1664), Function.ability(243, \"Effect_ViperConsume_screen\", cmd_screen, 2073), Function.ability(244, \"Effect_VoidRayPrismaticAlignment_quick\", cmd_quick,", "Function.ability(273, \"Harvest_Return_SCV_quick\", cmd_quick, 296, 3667), Function.ability(274, \"HoldPosition_quick\", cmd_quick, 18), Function.ability(275,", "1692), Function.ability(31, \"Behavior_HoldFireOff_quick\", cmd_quick, 3689), Function.ability(32, \"Behavior_HoldFireOff_Ghost_quick\", cmd_quick, 38, 3689),", "class FunctionCall(collections.namedtuple( \"FunctionCall\", [\"function\", \"arguments\"])): \"\"\"Represents a function call action.", "@classmethod def spec(cls, id_, name, sizes): \"\"\"Create an ArgumentType to", "cmd_quick, 403), Function.ability(327, \"Morph_VikingFighterMode_quick\", cmd_quick, 405), Function.ability(328, \"Morph_WarpGate_quick\", cmd_quick, 1518),", "function representing a ui action.\"\"\" return cls(id_, name, 0, 0,", "2340, 3661), Function.ability(112, \"BurrowDown_Roach_quick\", cmd_quick, 1386, 3661), Function.ability(113, \"BurrowDown_SwarmHost_quick\", cmd_quick,", "3662), Function.ability(138, \"BurrowUp_Zergling_quick\", cmd_quick, 1392, 3662), Function.ability(139, \"BurrowUp_Zergling_autocast\", autocast, 1392,", "cmd_quick, 1345), Function.ability(473, \"Train_Immortal_quick\", cmd_quick, 979), Function.ability(474, \"Train_Infestor_quick\", cmd_quick, 1352),", "1348), Function.ability(498, \"Train_VikingFighter_quick\", cmd_quick, 624), Function.ability(499, \"Train_Viper_quick\", cmd_quick, 1354), Function.ability(500,", "1446, 3662), Function.ability(125, \"BurrowUp_InfestorTerran_quick\", cmd_quick, 1396, 3662), Function.ability(126, \"BurrowUp_InfestorTerran_autocast\", autocast,", "select_larva: [], unload: [TYPES.unload_id], build_queue: [TYPES.build_queue_id], cmd_quick: [TYPES.queued], cmd_screen: [TYPES.queued,", "2594, 3659), Function.ability(142, \"Cancel_AdeptShadePhaseShift_quick\", cmd_quick, 2596, 3659), Function.ability(143, \"Cancel_BarracksAddOn_quick\", cmd_quick,", "cmd_quick, 3692), Function.ability(382, \"Research_ProtossAirArmorLevel1_quick\", cmd_quick, 1565, 3692), Function.ability(383, \"Research_ProtossAirArmorLevel2_quick\", cmd_quick,", "\"Train_Hellbat_quick\", cmd_quick, 596), Function.ability(470, \"Train_Hellion_quick\", cmd_quick, 595), Function.ability(471, \"Train_HighTemplar_quick\", cmd_quick,", "\"Cancel_MorphBroodlord_quick\", cmd_quick, 1373, 3659), Function.ability(150, \"Cancel_MorphGreaterSpire_quick\", cmd_quick, 1221, 3659), Function.ability(151,", "3671), Function.ability(169, \"Cancel_HangarQueue5_quick\", cmd_quick, 1038, 3671), Function.ability(170, \"Cancel_Queue1_quick\", cmd_quick, 304,", "3700), Function.ability(420, \"Research_TerranVehicleAndShipPlatingLevel1_quick\", cmd_quick, 864, 3700), Function.ability(421, \"Research_TerranVehicleAndShipPlatingLevel2_quick\", cmd_quick, 865,", "\"Research_ZergMeleeWeapons_quick\", cmd_quick, 3705), Function.ability(442, \"Research_ZergMeleeWeaponsLevel1_quick\", cmd_quick, 1186, 3705), Function.ability(443, \"Research_ZergMeleeWeaponsLevel2_quick\",", "is generated with gen_actions.py Function.ability(12, \"Attack_screen\", cmd_screen, 3674), Function.ability(13, \"Attack_minimap\",", "be used in ValidActions.\"\"\" return cls(id_, name, sizes, None) class", "\"Train_Queen_quick\", cmd_quick, 1632), Function.ability(487, \"Train_Raven_quick\", cmd_quick, 622), Function.ability(488, \"Train_Reaper_quick\", cmd_quick,", "Function.ability(329, \"Morph_WarpPrismPhasingMode_quick\", cmd_quick, 1528), Function.ability(330, \"Morph_WarpPrismTransportMode_quick\", cmd_quick, 1530), Function.ability(331, \"Move_screen\",", "Function.ability(254, \"Hallucination_Oracle_quick\", cmd_quick, 2114), Function.ability(255, \"Hallucination_Phoenix_quick\", cmd_quick, 154), Function.ability(256, \"Hallucination_Probe_quick\",", "\"Research_ChitinousPlating_quick\", cmd_quick, 265), Function.ability(361, \"Research_CombatShield_quick\", cmd_quick, 731), Function.ability(362, \"Research_ConcussiveShells_quick\", cmd_quick,", "cmd_quick, 654, 3698), Function.ability(414, \"Research_TerranShipWeapons_quick\", cmd_quick, 3699), Function.ability(415, \"Research_TerranShipWeaponsLevel1_quick\", cmd_quick,", "FUNCTION_TYPES[function_type], avail_fn) @classmethod def ability(cls, id_, name, function_type, ability_id, general_id=0):", "Function.ability(25, \"Behavior_CloakOff_Ghost_quick\", cmd_quick, 383, 3677), Function.ability(26, \"Behavior_CloakOn_quick\", cmd_quick, 3676), Function.ability(27,", "FUNCTIONS = Functions([ Function.ui_func(0, \"no_op\", no_op), Function.ui_func(1, \"move_camera\", move_camera), Function.ui_func(2,", "cmd_quick, 405), Function.ability(328, \"Morph_WarpGate_quick\", cmd_quick, 1518), Function.ability(329, \"Morph_WarpPrismPhasingMode_quick\", cmd_quick, 1528),", "id_, name, function_type, avail_fn=always): \"\"\"Define a function representing a ui", "Functions(object): \"\"\"Represents the full set of functions. Can't use namedtuple", "Function.ability(485, \"Train_Probe_quick\", cmd_quick, 1006), Function.ability(486, \"Train_Queen_quick\", cmd_quick, 1632), Function.ability(487, \"Train_Raven_quick\",", "3706), Function.ability(446, \"Research_ZergMissileWeaponsLevel1_quick\", cmd_quick, 1192, 3706), Function.ability(447, \"Research_ZergMissileWeaponsLevel2_quick\", cmd_quick, 1193,", "\"Cancel_BarracksAddOn_quick\", cmd_quick, 451, 3659), Function.ability(144, \"Cancel_BuildInProgress_quick\", cmd_quick, 314, 3659), Function.ability(145,", "\"Research_ProtossAirArmorLevel2_quick\", cmd_quick, 1566, 3692), Function.ability(384, \"Research_ProtossAirArmorLevel3_quick\", cmd_quick, 1567, 3692), Function.ability(385,", "Function.ability(440, \"Research_ZergGroundArmorLevel3_quick\", cmd_quick, 1191, 3704), Function.ability(441, \"Research_ZergMeleeWeapons_quick\", cmd_quick, 3705), Function.ability(442,", "3659), Function.ability(158, \"Cancel_MorphOverseer_quick\", cmd_quick, 1449, 3659), Function.ability(159, \"Cancel_MorphPlanetaryFortress_quick\", cmd_quick, 1451,", "3677), Function.ability(26, \"Behavior_CloakOn_quick\", cmd_quick, 3676), Function.ability(27, \"Behavior_CloakOn_Banshee_quick\", cmd_quick, 392, 3676),", "it's unknown at this time. \"\"\"Create an ArgumentType that is", "screen.assign_to(action_cmd.target_screen_coord) def cmd_minimap(action, ability_id, queued, minimap): \"\"\"Do a command that", "\"; \".join(str(a) for a in self.args)) class Functions(object): \"\"\"Represents the", "3688), Function.ability(37, \"Behavior_PulsarBeamOff_quick\", cmd_quick, 2376), Function.ability(38, \"Behavior_PulsarBeamOn_quick\", cmd_quick, 2375), Function.ability(39,", "1730, 3659), Function.ability(165, \"Cancel_SporeCrawlerRoot_quick\", cmd_quick, 1732, 3659), Function.ability(166, \"Cancel_StarportAddOn_quick\", cmd_quick,", "407, 3668), Function.ability(289, \"Load_Medivac_screen\", cmd_screen, 394, 3668), Function.ability(290, \"Load_NydusNetwork_screen\", cmd_screen,", "Function.ability(484, \"Train_Phoenix_quick\", cmd_quick, 946), Function.ability(485, \"Train_Probe_quick\", cmd_quick, 1006), Function.ability(486, \"Train_Queen_quick\",", "cmd_screen, 1160), Function.ability(62, \"Build_Interceptors_quick\", cmd_quick, 1042), Function.ability(63, \"Build_Interceptors_autocast\", autocast, 1042),", "\"Build_BanelingNest_screen\", cmd_screen, 1162), Function.ability(42, \"Build_Barracks_screen\", cmd_screen, 321), Function.ability(43, \"Build_Bunker_screen\", cmd_screen,", "cmd_screen, 881), Function.ability(71, \"Build_Reactor_quick\", cmd_quick, 3683), Function.ability(72, \"Build_Reactor_screen\", cmd_screen, 3683),", "a game ability.\"\"\" assert function_type in ABILITY_FUNCTIONS return cls(id_, name,", "the action function. arguments: The values to store for the", "= ability_id action_cmd.queue_command = queued minimap.assign_to(action_cmd.target_minimap_coord) def autocast(action, ability_id): \"\"\"Toggle", "cmd_quick, 1187, 3705), Function.ability(444, \"Research_ZergMeleeWeaponsLevel3_quick\", cmd_quick, 1188, 3705), Function.ability(445, \"Research_ZergMissileWeapons_quick\",", "Function.ability(224, \"Effect_Repair_SCV_screen\", cmd_screen, 316, 3685), Function.ability(225, \"Effect_Repair_SCV_autocast\", autocast, 316, 3685),", "\"<none>\", (len(options),), lambda a: options[a[0]]) @classmethod def scalar(cls, value): \"\"\"Create", "function_type. avail_fn: For non-abilities, this function returns whether the function", "cmd_quick, 799), Function.ability(355, \"Research_BattlecruiserWeaponRefit_quick\", cmd_quick, 1532), Function.ability(356, \"Research_Blink_quick\", cmd_quick, 1593),", "\"Train_Roach_quick\", cmd_quick, 1351), Function.ability(490, \"Train_SCV_quick\", cmd_quick, 524), Function.ability(491, \"Train_Sentry_quick\", cmd_quick,", "and actions for SC2.\"\"\" from __future__ import absolute_import from __future__", "Function.ability(157, \"Cancel_MorphOverlordTransport_quick\", cmd_quick, 2709, 3659), Function.ability(158, \"Cancel_MorphOverseer_quick\", cmd_quick, 1449, 3659),", "1376, 3662), Function.ability(121, \"BurrowUp_Drone_quick\", cmd_quick, 1380, 3662), Function.ability(122, \"BurrowUp_Hydralisk_quick\", cmd_quick,", "3662), Function.ability(133, \"BurrowUp_Roach_autocast\", autocast, 1388, 3662), Function.ability(134, \"BurrowUp_SwarmHost_quick\", cmd_quick, 2016,", "3668), Function.ability(289, \"Load_Medivac_screen\", cmd_screen, 394, 3668), Function.ability(290, \"Load_NydusNetwork_screen\", cmd_screen, 1437,", "in FUNCTIONS: if func.ability_id >= 0: ABILITY_IDS[func.ability_id].add(func) ABILITY_IDS = {k:", "Depends on the current loaded units. ) # Which argument", "select_larva(action): \"\"\"Select all larva.\"\"\" action.action_ui.select_larva.SetInParent() # Adds the empty proto", "Everything below here is generated with gen_actions.py Function.ability(12, \"Attack_screen\", cmd_screen,", "Function.ability(256, \"Hallucination_Probe_quick\", cmd_quick, 156), Function.ability(257, \"Hallucination_Stalker_quick\", cmd_quick, 158), Function.ability(258, \"Hallucination_VoidRay_quick\",", "Arguments): arguments = Arguments(*arguments) return cls(function, arguments) class ValidActions(collections.namedtuple( \"ValidActions\",", "3662), Function.ability(125, \"BurrowUp_InfestorTerran_quick\", cmd_quick, 1396, 3662), Function.ability(126, \"BurrowUp_InfestorTerran_autocast\", autocast, 1396,", "True]), # (select vs select_add) select_unit_act=ArgumentType.enum([ sc_ui.ActionMultiPanel.SingleSelect, sc_ui.ActionMultiPanel.DeselectUnit, sc_ui.ActionMultiPanel.SelectAllOfType, sc_ui.ActionMultiPanel.DeselectAllOfType,", "cmd_quick, 1225), Function.ability(358, \"Research_CentrifugalHooks_quick\", cmd_quick, 1482), Function.ability(359, \"Research_Charge_quick\", cmd_quick, 1592),", "74), Function.ability(195, \"Effect_GhostSnipe_screen\", cmd_screen, 2714), Function.ability(196, \"Effect_GravitonBeam_screen\", cmd_screen, 173), Function.ability(197,", "Function.ability(79, \"Build_Refinery_screen\", cmd_screen, 320), Function.ability(80, \"Build_RoachWarren_screen\", cmd_screen, 1165), Function.ability(81, \"Build_RoboticsBay_screen\",", "cmd_quick, 2333, 3659), Function.ability(154, \"Cancel_MorphLurkerDen_quick\", cmd_quick, 2113, 3659), Function.ability(155, \"Cancel_MorphMothership_quick\",", "1517, 3659), Function.ability(157, \"Cancel_MorphOverlordTransport_quick\", cmd_quick, 2709, 3659), Function.ability(158, \"Cancel_MorphOverseer_quick\", cmd_quick,", "1372), Function.ability(298, \"Morph_Gateway_quick\", cmd_quick, 1520), Function.ability(299, \"Morph_GreaterSpire_quick\", cmd_quick, 1220), Function.ability(300,", "Function.ability(302, \"Morph_Hive_quick\", cmd_quick, 1218), Function.ability(303, \"Morph_Lair_quick\", cmd_quick, 1216), Function.ability(304, \"Morph_LiberatorAAMode_quick\",", "Function.ability(488, \"Train_Reaper_quick\", cmd_quick, 561), Function.ability(489, \"Train_Roach_quick\", cmd_quick, 1351), Function.ability(490, \"Train_SCV_quick\",", "Function.ability(304, \"Morph_LiberatorAAMode_quick\", cmd_quick, 2560), Function.ability(305, \"Morph_LiberatorAGMode_screen\", cmd_screen, 2558), Function.ability(306, \"Morph_Lurker_quick\",", "cmd_quick, 42), Function.ability(192, \"Effect_Feedback_screen\", cmd_screen, 140), Function.ability(193, \"Effect_ForceField_screen\", cmd_screen, 1526),", "1374, 3661), Function.ability(105, \"BurrowDown_Drone_quick\", cmd_quick, 1378, 3661), Function.ability(106, \"BurrowDown_Hydralisk_quick\", cmd_quick,", "Function.ability(202, \"Effect_ImmortalBarrier_autocast\", autocast, 2328), Function.ability(203, \"Effect_InfestedTerrans_screen\", cmd_screen, 247), Function.ability(204, \"Effect_InjectLarva_screen\",", "\"Research_ProtossGroundArmorLevel1_quick\", cmd_quick, 1065, 3694), Function.ability(391, \"Research_ProtossGroundArmorLevel2_quick\", cmd_quick, 1066, 3694), Function.ability(392,", "Function.ability(411, \"Research_TerranInfantryWeaponsLevel1_quick\", cmd_quick, 652, 3698), Function.ability(412, \"Research_TerranInfantryWeaponsLevel2_quick\", cmd_quick, 653, 3698),", "413, 3664), Function.ability(514, \"UnloadAll_NydasNetwork_quick\", cmd_quick, 1438, 3664), Function.ability(515, \"UnloadAll_NydusWorm_quick\", cmd_quick,", "\"Research_NeosteelFrame_quick\", cmd_quick, 655), Function.ability(376, \"Research_NeuralParasite_quick\", cmd_quick, 1455), Function.ability(377, \"Research_PathogenGlands_quick\", cmd_quick,", "\"Harvest_Gather_SCV_screen\", cmd_screen, 295, 3666), Function.ability(269, \"Harvest_Return_quick\", cmd_quick, 3667), Function.ability(270, \"Harvest_Return_Drone_quick\",", "\"License\"); # you may not use this file except in", "action_cmd.queue_command = queued minimap.assign_to(action_cmd.target_minimap_coord) def autocast(action, ability_id): \"\"\"Toggle autocast.\"\"\" action.action_ui.toggle_autocast.ability_id", "name, function_type, ability_id, general_id=0): \"\"\"Define a function represented as a", "Function.ability(175, \"Cancel_QueuePassiveCancelToSelection_quick\", cmd_quick, 1833, 3671), Function.ability(176, \"Effect_Abduct_screen\", cmd_screen, 2067), Function.ability(177,", "cmd_quick, 954), Function.ability(483, \"Train_Overlord_quick\", cmd_quick, 1344), Function.ability(484, \"Train_Phoenix_quick\", cmd_quick, 946),", "selecting, setting, etc.\"\"\" select = action.action_ui.control_group select.action = control_group_act select.control_group_index", "Function.ability(88, \"Build_Stargate_screen\", cmd_screen, 889), Function.ability(89, \"Build_Starport_screen\", cmd_screen, 329), Function.ability(90, \"Build_StasisTrap_screen\",", "380, 3675), Function.ability(238, \"Effect_Stim_Marine_Redirect_quick\", cmd_quick, 1683, 3675), Function.ability(239, \"Effect_SupplyDrop_screen\", cmd_screen,", "3668), Function.ability(292, \"Load_Overlord_screen\", cmd_screen, 1406, 3668), Function.ability(293, \"Load_WarpPrism_screen\", cmd_screen, 911,", "TYPES.screen, TYPES.screen2], select_unit: [TYPES.select_unit_act, TYPES.select_unit_id], control_group: [TYPES.control_group_act, TYPES.control_group_id], select_idle_worker: [TYPES.select_worker],", "Which unit to target in a transport/nydus/command center. \"\"\" ___slots__", "list of arguments for that function, each being a list", "with the control group. control_group_id: Which control group to do", "action. function_type: One of the functions in FUNCTION_TYPES for how", "3679), Function.ability(282, \"Lift_Barracks_quick\", cmd_quick, 452, 3679), Function.ability(283, \"Lift_CommandCenter_quick\", cmd_quick, 417,", "protos to send to the game. \"\"\" __slots__ = ()", "sc_ui.ActionControlGroup.SetAndSteal, sc_ui.ActionControlGroup.AppendAndSteal, ]), control_group_id=ArgumentType.scalar(10), select_point_act=ArgumentType.enum([ sc_spatial.ActionSpatialUnitSelectionPoint.Select, sc_spatial.ActionSpatialUnitSelectionPoint.Toggle, sc_spatial.ActionSpatialUnitSelectionPoint.AllType, sc_spatial.ActionSpatialUnitSelectionPoint.AddAllType, ]),", "sc_ui.ActionSelectIdleWorker.AddAll, ]), build_queue_id=ArgumentType.scalar(10), # Depends on current build queue. unload_id=ArgumentType.scalar(500),", "\"BurrowDown_Infestor_quick\", cmd_quick, 1444, 3661), Function.ability(108, \"BurrowDown_InfestorTerran_quick\", cmd_quick, 1394, 3661), Function.ability(109,", "cmd_screen, 1622), Function.ability(214, \"Effect_OracleRevelation_screen\", cmd_screen, 2146), Function.ability(215, \"Effect_ParasiticBomb_screen\", cmd_screen, 2542),", "a unit at a point.\"\"\" select = action.action_feature_layer.unit_selection_point screen.assign_to(select.selection_screen_coord) select.type", "sc_ui.ActionMultiPanel.DeselectAllOfType, ]), select_unit_id=ArgumentType.scalar(500), # Depends on current selection. select_worker=ArgumentType.enum([ sc_ui.ActionSelectIdleWorker.Set,", "3697), Function.ability(409, \"Research_TerranInfantryArmorLevel3_quick\", cmd_quick, 658, 3697), Function.ability(410, \"Research_TerranInfantryWeapons_quick\", cmd_quick, 3698),", "255 function arguments, so build something similar. \"\"\" def __init__(self,", "the selection or replace it. select_unit_act: What to do when", "sc_ui.ActionControlGroup.Recall, sc_ui.ActionControlGroup.Set, sc_ui.ActionControlGroup.Append, sc_ui.ActionControlGroup.SetAndSteal, sc_ui.ActionControlGroup.AppendAndSteal, ]), control_group_id=ArgumentType.scalar(10), select_point_act=ArgumentType.enum([ sc_spatial.ActionSpatialUnitSelectionPoint.Select, sc_spatial.ActionSpatialUnitSelectionPoint.Toggle,", "3662), Function.ability(132, \"BurrowUp_Roach_quick\", cmd_quick, 1388, 3662), Function.ability(133, \"BurrowUp_Roach_autocast\", autocast, 1388,", "@classmethod def scalar(cls, value): \"\"\"Create an ArgumentType with a single", "Function.ability(429, \"Research_ZergFlyerArmor_quick\", cmd_quick, 3702), Function.ability(430, \"Research_ZergFlyerArmorLevel1_quick\", cmd_quick, 1315, 3702), Function.ability(431,", "Function.ability(204, \"Effect_InjectLarva_screen\", cmd_screen, 251), Function.ability(205, \"Effect_KD8Charge_screen\", cmd_screen, 2588), Function.ability(206, \"Effect_LockOn_screen\",", "Function.ability(61, \"Build_InfestationPit_screen\", cmd_screen, 1160), Function.ability(62, \"Build_Interceptors_quick\", cmd_quick, 1042), Function.ability(63, \"Build_Interceptors_autocast\",", "cmd_quick, 1725, 3681), Function.ability(325, \"Morph_SporeCrawlerUproot_quick\", cmd_quick, 1727, 3681), Function.ability(326, \"Morph_VikingAssaultMode_quick\",", "options[a[0]]) @classmethod def scalar(cls, value): \"\"\"Create an ArgumentType with a", "cmd_quick, 1219, 3659), Function.ability(152, \"Cancel_MorphLair_quick\", cmd_quick, 1217, 3659), Function.ability(153, \"Cancel_MorphLurker_quick\",", "def scalar(cls, value): \"\"\"Create an ArgumentType with a single scalar", "cmd_quick, 152), Function.ability(254, \"Hallucination_Oracle_quick\", cmd_quick, 2114), Function.ability(255, \"Hallucination_Phoenix_quick\", cmd_quick, 154),", "\"Morph_SupplyDepot_Raise_quick\", cmd_quick, 558), Function.ability(320, \"Morph_ThorExplosiveMode_quick\", cmd_quick, 2364), Function.ability(321, \"Morph_ThorHighImpactMode_quick\", cmd_quick,", "2393), Function.ability(245, \"Effect_WidowMineAttack_screen\", cmd_screen, 2099), Function.ability(246, \"Effect_WidowMineAttack_autocast\", autocast, 2099), Function.ability(247,", "select.unit_index = select_unit_id def control_group(action, control_group_act, control_group_id): \"\"\"Act on a", "\"Research_AdvancedBallistics_quick\", cmd_quick, 805), Function.ability(353, \"Research_BansheeCloakingField_quick\", cmd_quick, 790), Function.ability(354, \"Research_BansheeHyperflightRotors_quick\", cmd_quick,", "send to the game. \"\"\" __slots__ = () def __str__(self):", "autocast, 1392, 3662), Function.ability(140, \"Cancel_quick\", cmd_quick, 3659), Function.ability(141, \"Cancel_AdeptPhaseShift_quick\", cmd_quick,", "\"Load_Bunker_screen\", cmd_screen, 407, 3668), Function.ability(289, \"Load_Medivac_screen\", cmd_screen, 394, 3668), Function.ability(290,", "0), Function.ui_func(7, \"select_army\", select_army, lambda obs: obs.player_common.army_count > 0), Function.ui_func(8,", "cmd_screen(action, ability_id, queued, screen): \"\"\"Do a command that needs a", "3695), Function.ability(397, \"Research_ProtossShields_quick\", cmd_quick, 3696), Function.ability(398, \"Research_ProtossShieldsLevel1_quick\", cmd_quick, 1068, 3696),", "\"Train_Marine_quick\", cmd_quick, 560), Function.ability(478, \"Train_Medivac_quick\", cmd_quick, 620), Function.ability(479, \"Train_MothershipCore_quick\", cmd_quick,", "**kwargs): \"\"\"Create an Arguments of the possible Types.\"\"\" named =", "Function.ability(408, \"Research_TerranInfantryArmorLevel2_quick\", cmd_quick, 657, 3697), Function.ability(409, \"Research_TerranInfantryArmorLevel3_quick\", cmd_quick, 658, 3697),", "Function.ability(339, \"Rally_Hatchery_Units_screen\", cmd_screen, 212, 3673), Function.ability(340, \"Rally_Hatchery_Units_minimap\", cmd_minimap, 212, 3673),", "> 0), Function.ui_func(9, \"select_larva\", select_larva, lambda obs: obs.player_common.larva_count > 0),", "\"Research_AdeptResonatingGlaives_quick\", cmd_quick, 1594), Function.ability(352, \"Research_AdvancedBallistics_quick\", cmd_quick, 805), Function.ability(353, \"Research_BansheeCloakingField_quick\", cmd_quick,", "Function.ability(274, \"HoldPosition_quick\", cmd_quick, 18), Function.ability(275, \"Land_screen\", cmd_screen, 3678), Function.ability(276, \"Land_Barracks_screen\",", "of types and functions that are valid for an agent", "cmd_screen, 399), Function.ability(228, \"Effect_SpawnChangeling_quick\", cmd_quick, 181), Function.ability(229, \"Effect_SpawnLocusts_screen\", cmd_screen, 2704),", "]), select_unit_id=ArgumentType.scalar(500), # Depends on current selection. select_worker=ArgumentType.enum([ sc_ui.ActionSelectIdleWorker.Set, sc_ui.ActionSelectIdleWorker.Add,", "cmd_quick, 2097, 3662), Function.ability(138, \"BurrowUp_Zergling_quick\", cmd_quick, 1392, 3662), Function.ability(139, \"BurrowUp_Zergling_autocast\",", "for how to construct the sc2 action proto out of", "\"\"\"Define a function representing a ui action.\"\"\" return cls(id_, name,", "0, 0, function_type, FUNCTION_TYPES[function_type], avail_fn) @classmethod def ability(cls, id_, name,", "cmd_quick, 1435, 3662), Function.ability(129, \"BurrowUp_Queen_autocast\", autocast, 1435, 3662), Function.ability(130, \"BurrowUp_Ravager_quick\",", "\"Train_Banshee_quick\", cmd_quick, 621), Function.ability(460, \"Train_Battlecruiser_quick\", cmd_quick, 623), Function.ability(461, \"Train_Carrier_quick\", cmd_quick,", "3664), Function.ability(516, \"UnloadAllAt_screen\", cmd_screen, 3669), Function.ability(517, \"UnloadAllAt_minimap\", cmd_minimap, 3669), Function.ability(518,", "object. Returns: A new `FunctionCall` instance. \"\"\" if isinstance(arguments, dict):", "cmd_minimap, 3690), Function.ability(345, \"Rally_CommandCenter_screen\", cmd_screen, 203, 3690), Function.ability(346, \"Rally_CommandCenter_minimap\", cmd_minimap,", "cmd_quick, 1069, 3696), Function.ability(400, \"Research_ProtossShieldsLevel3_quick\", cmd_quick, 1070, 3696), Function.ability(401, \"Research_PsiStorm_quick\",", "\"Train_WidowMine_quick\", cmd_quick, 614), Function.ability(503, \"Train_Zealot_quick\", cmd_quick, 916), Function.ability(504, \"Train_Zergling_quick\", cmd_quick,", "# Adds the empty proto field. def select_unit(action, select_unit_act, select_unit_id):" ]
[ "class AUCMeter(meter.Meter): \"\"\" The AUCMeter measures the area under the", "h sum_h[1:n] += h area = (sum_h * tpr).sum() /", "sortind.numpy() # creating the roc curve tpr = np.zeros(shape=(scores.size +", "`target` contains only values 0 (for negative examples) and 1", "(1) the `output` contains model output scores that ought to", "the example should be positively labeled, and smaller when the", "= fpr[1:n] - fpr[0:n - 1] sum_h = np.zeros(fpr.shape) sum_h[0:n", "`output` contains model output scores that ought to be higher", "== 1, \\ 'wrong target size (1D expected)' assert output.shape[0]", "examples) and 1 (for positive examples). \"\"\" def __init__(self): super(AUCMeter,", "'wrong target size (1D expected)' assert output.shape[0] == target.shape[0], \\", "be interpreted as the probability that, given a randomly selected", "size (1D expected)' assert output.shape[0] == target.shape[0], \\ 'number of", "The AUCMeter measures the area under the receiver-operating characteristic (ROC)", "under curve using trapezoidal rule n = tpr.shape[0] h =", "- 1] fpr[i] = fpr[i - 1] + 1 tpr", "AUCMeter is designed to operate on one-dimensional Tensors `output` and", "sortind = sortind.numpy() # creating the roc curve tpr =", "smaller when the model believes the example should be negatively", "target = target.cpu().squeeze().numpy() elif isinstance(target, numbers.Number): target = np.asarray([target]) assert", "= np.zeros(shape=(scores.size + 1), dtype=np.float64) fpr = np.zeros(shape=(scores.size + 1),", "model output scores that ought to be higher when the", "def __init__(self): super(AUCMeter, self).__init__() self.reset() def reset(self): self.scores = torch.DoubleTensor(torch.DoubleStorage()).numpy()", "labeled, and smaller when the model believes the example should", "number of elements added are 0 if self.scores.shape[0] == 0:", "0))), \\ 'targets should be binary (0, 1)' self.scores =", "elements added are 0 if self.scores.shape[0] == 0: return 0.5", "tpr[i - 1] fpr[i] = fpr[i - 1] + 1", "- 1] + 1 fpr[i] = fpr[i - 1] else:", "characteristic (ROC) curve for binary classification problems. The area under", "\"\"\" def __init__(self): super(AUCMeter, self).__init__() self.reset() def reset(self): self.scores =", "range(1, scores.size + 1): if self.targets[sortind[i - 1]] == 1:", "example. The AUCMeter is designed to operate on one-dimensional Tensors", "torch class AUCMeter(meter.Meter): \"\"\" The AUCMeter measures the area under", "add(self, output, target): if torch.is_tensor(output): output = output.cpu().squeeze().numpy() if torch.is_tensor(target):", "- 1] + 1 tpr /= (self.targets.sum() * 1.0) fpr", "sortind = torch.sort(torch.from_numpy(self.scores), dim=0, descending=True) scores = scores.numpy() sortind =", "import meter import numpy as np import torch class AUCMeter(meter.Meter):", "instance, the output of a signoid function); and (2) the", "(2) the `target` contains only values 0 (for negative examples)", "problems. The area under the curve (AUC) can be interpreted", "designed to operate on one-dimensional Tensors `output` and `target`, where", "should be binary (0, 1)' self.scores = np.append(self.scores, output) self.targets", "convinced that the example should be positively labeled, and smaller", "and targets does not match' assert np.all(np.add(np.equal(target, 1), np.equal(target, 0))),", "+ 1), dtype=np.float64) for i in range(1, scores.size + 1):", "classification problems. The area under the curve (AUC) can be", "self.targets[sortind[i - 1]] == 1: tpr[i] = tpr[i - 1]", "1), dtype=np.float64) fpr = np.zeros(shape=(scores.size + 1), dtype=np.float64) for i", "self.targets = np.append(self.targets, target) def value(self): # case when number", "sum_h[1:n] += h area = (sum_h * tpr).sum() / 2.0", "= torch.sort(torch.from_numpy(self.scores), dim=0, descending=True) scores = scores.numpy() sortind = sortind.numpy()", "the `output` contains model output scores that ought to be", "self.scores = np.append(self.scores, output) self.targets = np.append(self.targets, target) def value(self):", "curve (AUC) can be interpreted as the probability that, given", "selected positive example and a randomly selected negative example, the", "example is assigned a higher score by the classification model", "for i in range(1, scores.size + 1): if self.targets[sortind[i -", "as the probability that, given a randomly selected positive example", "should be negatively labeled (for instance, the output of a", "# case when number of elements added are 0 if", "isinstance(target, numbers.Number): target = np.asarray([target]) assert np.ndim(output) == 1, \\", "# creating the roc curve tpr = np.zeros(shape=(scores.size + 1),", "assert np.ndim(target) == 1, \\ 'wrong target size (1D expected)'", "selected negative example, the positive example is assigned a higher", "else: tpr[i] = tpr[i - 1] fpr[i] = fpr[i -", "- fpr[0:n - 1] sum_h = np.zeros(fpr.shape) sum_h[0:n - 1]", "1.0).sum() * -1.0) # calculating area under curve using trapezoidal", "match' assert np.all(np.add(np.equal(target, 1), np.equal(target, 0))), \\ 'targets should be", "for binary classification problems. The area under the curve (AUC)", "one-dimensional Tensors `output` and `target`, where (1) the `output` contains", "def reset(self): self.scores = torch.DoubleTensor(torch.DoubleStorage()).numpy() self.targets = torch.LongTensor(torch.LongStorage()).numpy() def add(self,", "(AUC) can be interpreted as the probability that, given a", "fpr[i] = fpr[i - 1] else: tpr[i] = tpr[i -", "scores.numpy() sortind = sortind.numpy() # creating the roc curve tpr", "= np.append(self.scores, output) self.targets = np.append(self.targets, target) def value(self): #", "sum_h = np.zeros(fpr.shape) sum_h[0:n - 1] = h sum_h[1:n] +=", "more convinced that the example should be positively labeled, and", "example should be positively labeled, and smaller when the model", "if torch.is_tensor(target): target = target.cpu().squeeze().numpy() elif isinstance(target, numbers.Number): target =", "numpy as np import torch class AUCMeter(meter.Meter): \"\"\" The AUCMeter", "signoid function); and (2) the `target` contains only values 0", "fpr = np.zeros(shape=(scores.size + 1), dtype=np.float64) for i in range(1,", "binary classification problems. The area under the curve (AUC) can", "- 1] else: tpr[i] = tpr[i - 1] fpr[i] =", "from . import meter import numpy as np import torch", "score by the classification model than the negative example. The", "- 1.0).sum() * -1.0) # calculating area under curve using", "`target`, where (1) the `output` contains model output scores that", "meter import numpy as np import torch class AUCMeter(meter.Meter): \"\"\"", "that, given a randomly selected positive example and a randomly", "the model believes the example should be negatively labeled (for", "self.scores = torch.DoubleTensor(torch.DoubleStorage()).numpy() self.targets = torch.LongTensor(torch.LongStorage()).numpy() def add(self, output, target):", "a randomly selected negative example, the positive example is assigned", "fpr[0:n - 1] sum_h = np.zeros(fpr.shape) sum_h[0:n - 1] =", "n = tpr.shape[0] h = fpr[1:n] - fpr[0:n - 1]", "h area = (sum_h * tpr).sum() / 2.0 return (area,", "outputs and targets does not match' assert np.all(np.add(np.equal(target, 1), np.equal(target,", "tpr[i - 1] + 1 fpr[i] = fpr[i - 1]", "scores.size + 1): if self.targets[sortind[i - 1]] == 1: tpr[i]", "model than the negative example. The AUCMeter is designed to", "tpr = np.zeros(shape=(scores.size + 1), dtype=np.float64) fpr = np.zeros(shape=(scores.size +", "positive example and a randomly selected negative example, the positive", "/= ((self.targets - 1.0).sum() * -1.0) # calculating area under", "- 1]] == 1: tpr[i] = tpr[i - 1] +", "1.0) fpr /= ((self.targets - 1.0).sum() * -1.0) # calculating", "a randomly selected positive example and a randomly selected negative", "operate on one-dimensional Tensors `output` and `target`, where (1) the", "= np.asarray([target]) assert np.ndim(output) == 1, \\ 'wrong output size", "expected)' assert output.shape[0] == target.shape[0], \\ 'number of outputs and", "The AUCMeter is designed to operate on one-dimensional Tensors `output`", "1] + 1 tpr /= (self.targets.sum() * 1.0) fpr /=", "1] else: tpr[i] = tpr[i - 1] fpr[i] = fpr[i", "binary (0, 1)' self.scores = np.append(self.scores, output) self.targets = np.append(self.targets,", "negative examples) and 1 (for positive examples). \"\"\" def __init__(self):", "import numbers from . import meter import numpy as np", "added are 0 if self.scores.shape[0] == 0: return 0.5 #", "randomly selected negative example, the positive example is assigned a", "a signoid function); and (2) the `target` contains only values", "when number of elements added are 0 if self.scores.shape[0] ==", "1] + 1 fpr[i] = fpr[i - 1] else: tpr[i]", "does not match' assert np.all(np.add(np.equal(target, 1), np.equal(target, 0))), \\ 'targets", "<reponame>ashishpatel26/pywick import numbers from . import meter import numpy as", "on one-dimensional Tensors `output` and `target`, where (1) the `output`", "= output.cpu().squeeze().numpy() if torch.is_tensor(target): target = target.cpu().squeeze().numpy() elif isinstance(target, numbers.Number):", "import torch class AUCMeter(meter.Meter): \"\"\" The AUCMeter measures the area", "example should be negatively labeled (for instance, the output of", "where (1) the `output` contains model output scores that ought", "model believes the example should be negatively labeled (for instance,", "be positively labeled, and smaller when the model believes the", "return 0.5 # sorting the arrays scores, sortind = torch.sort(torch.from_numpy(self.scores),", "= h sum_h[1:n] += h area = (sum_h * tpr).sum()", "fpr[i - 1] + 1 tpr /= (self.targets.sum() * 1.0)", "when the model believes the example should be negatively labeled", "= fpr[i - 1] + 1 tpr /= (self.targets.sum() *", "negative example. The AUCMeter is designed to operate on one-dimensional", "'targets should be binary (0, 1)' self.scores = np.append(self.scores, output)", "dim=0, descending=True) scores = scores.numpy() sortind = sortind.numpy() # creating", "area under the curve (AUC) can be interpreted as the", "1): if self.targets[sortind[i - 1]] == 1: tpr[i] = tpr[i", "= torch.DoubleTensor(torch.DoubleStorage()).numpy() self.targets = torch.LongTensor(torch.LongStorage()).numpy() def add(self, output, target): if", "target size (1D expected)' assert output.shape[0] == target.shape[0], \\ 'number", "= np.zeros(shape=(scores.size + 1), dtype=np.float64) for i in range(1, scores.size", "= tpr[i - 1] + 1 fpr[i] = fpr[i -", "assert np.ndim(output) == 1, \\ 'wrong output size (1D expected)'", "probability that, given a randomly selected positive example and a", "1, \\ 'wrong target size (1D expected)' assert output.shape[0] ==", "tpr[i] = tpr[i - 1] fpr[i] = fpr[i - 1]", "Tensors `output` and `target`, where (1) the `output` contains model", "scores that ought to be higher when the model is", "fpr[i] = fpr[i - 1] + 1 tpr /= (self.targets.sum()", "-1.0) # calculating area under curve using trapezoidal rule n", "assigned a higher score by the classification model than the", "and `target`, where (1) the `output` contains model output scores", "torch.is_tensor(output): output = output.cpu().squeeze().numpy() if torch.is_tensor(target): target = target.cpu().squeeze().numpy() elif", "(0, 1)' self.scores = np.append(self.scores, output) self.targets = np.append(self.targets, target)", "as np import torch class AUCMeter(meter.Meter): \"\"\" The AUCMeter measures", "if torch.is_tensor(output): output = output.cpu().squeeze().numpy() if torch.is_tensor(target): target = target.cpu().squeeze().numpy()", "area under curve using trapezoidal rule n = tpr.shape[0] h", "\\ 'wrong output size (1D expected)' assert np.ndim(target) == 1,", ". import meter import numpy as np import torch class", "output.cpu().squeeze().numpy() if torch.is_tensor(target): target = target.cpu().squeeze().numpy() elif isinstance(target, numbers.Number): target", "the receiver-operating characteristic (ROC) curve for binary classification problems. The", "elif isinstance(target, numbers.Number): target = np.asarray([target]) assert np.ndim(output) == 1,", "(ROC) curve for binary classification problems. The area under the", "size (1D expected)' assert np.ndim(target) == 1, \\ 'wrong target", "1 tpr /= (self.targets.sum() * 1.0) fpr /= ((self.targets -", "examples). \"\"\" def __init__(self): super(AUCMeter, self).__init__() self.reset() def reset(self): self.scores", "is designed to operate on one-dimensional Tensors `output` and `target`,", "output, target): if torch.is_tensor(output): output = output.cpu().squeeze().numpy() if torch.is_tensor(target): target", "output size (1D expected)' assert np.ndim(target) == 1, \\ 'wrong", "/= (self.targets.sum() * 1.0) fpr /= ((self.targets - 1.0).sum() *", "def add(self, output, target): if torch.is_tensor(output): output = output.cpu().squeeze().numpy() if", "output of a signoid function); and (2) the `target` contains", "numbers.Number): target = np.asarray([target]) assert np.ndim(output) == 1, \\ 'wrong", "the classification model than the negative example. The AUCMeter is", "== 0: return 0.5 # sorting the arrays scores, sortind", "dtype=np.float64) for i in range(1, scores.size + 1): if self.targets[sortind[i", "tpr.shape[0] h = fpr[1:n] - fpr[0:n - 1] sum_h =", "np.append(self.scores, output) self.targets = np.append(self.targets, target) def value(self): # case", "AUCMeter(meter.Meter): \"\"\" The AUCMeter measures the area under the receiver-operating", "negatively labeled (for instance, the output of a signoid function);", "== 1: tpr[i] = tpr[i - 1] + 1 fpr[i]", "trapezoidal rule n = tpr.shape[0] h = fpr[1:n] - fpr[0:n", "descending=True) scores = scores.numpy() sortind = sortind.numpy() # creating the", "(for positive examples). \"\"\" def __init__(self): super(AUCMeter, self).__init__() self.reset() def", "negative example, the positive example is assigned a higher score", "value(self): # case when number of elements added are 0", "output) self.targets = np.append(self.targets, target) def value(self): # case when", "and 1 (for positive examples). \"\"\" def __init__(self): super(AUCMeter, self).__init__()", "the `target` contains only values 0 (for negative examples) and", "0: return 0.5 # sorting the arrays scores, sortind =", "+ 1): if self.targets[sortind[i - 1]] == 1: tpr[i] =", "randomly selected positive example and a randomly selected negative example,", "torch.is_tensor(target): target = target.cpu().squeeze().numpy() elif isinstance(target, numbers.Number): target = np.asarray([target])", "= tpr[i - 1] fpr[i] = fpr[i - 1] +", "and a randomly selected negative example, the positive example is", "np import torch class AUCMeter(meter.Meter): \"\"\" The AUCMeter measures the", "receiver-operating characteristic (ROC) curve for binary classification problems. The area", "contains only values 0 (for negative examples) and 1 (for", "be higher when the model is more convinced that the", "case when number of elements added are 0 if self.scores.shape[0]", "the area under the receiver-operating characteristic (ROC) curve for binary", "\\ 'number of outputs and targets does not match' assert", "curve for binary classification problems. The area under the curve", "higher score by the classification model than the negative example.", "torch.DoubleTensor(torch.DoubleStorage()).numpy() self.targets = torch.LongTensor(torch.LongStorage()).numpy() def add(self, output, target): if torch.is_tensor(output):", "the positive example is assigned a higher score by the", "super(AUCMeter, self).__init__() self.reset() def reset(self): self.scores = torch.DoubleTensor(torch.DoubleStorage()).numpy() self.targets =", "== target.shape[0], \\ 'number of outputs and targets does not", "'number of outputs and targets does not match' assert np.all(np.add(np.equal(target,", "\\ 'wrong target size (1D expected)' assert output.shape[0] == target.shape[0],", "= np.zeros(fpr.shape) sum_h[0:n - 1] = h sum_h[1:n] += h", "(1D expected)' assert np.ndim(target) == 1, \\ 'wrong target size", "than the negative example. The AUCMeter is designed to operate", "the curve (AUC) can be interpreted as the probability that,", "def value(self): # case when number of elements added are", "ought to be higher when the model is more convinced", "np.all(np.add(np.equal(target, 1), np.equal(target, 0))), \\ 'targets should be binary (0,", "h = fpr[1:n] - fpr[0:n - 1] sum_h = np.zeros(fpr.shape)", "= torch.LongTensor(torch.LongStorage()).numpy() def add(self, output, target): if torch.is_tensor(output): output =", "np.zeros(fpr.shape) sum_h[0:n - 1] = h sum_h[1:n] += h area", "- 1] = h sum_h[1:n] += h area = (sum_h", "and (2) the `target` contains only values 0 (for negative", "positively labeled, and smaller when the model believes the example", "1] = h sum_h[1:n] += h area = (sum_h *", "the example should be negatively labeled (for instance, the output", "positive example is assigned a higher score by the classification", "1] sum_h = np.zeros(fpr.shape) sum_h[0:n - 1] = h sum_h[1:n]", "'wrong output size (1D expected)' assert np.ndim(target) == 1, \\", "\"\"\" The AUCMeter measures the area under the receiver-operating characteristic", "torch.LongTensor(torch.LongStorage()).numpy() def add(self, output, target): if torch.is_tensor(output): output = output.cpu().squeeze().numpy()", "+= h area = (sum_h * tpr).sum() / 2.0 return", "np.zeros(shape=(scores.size + 1), dtype=np.float64) fpr = np.zeros(shape=(scores.size + 1), dtype=np.float64)", "labeled (for instance, the output of a signoid function); and", "np.ndim(output) == 1, \\ 'wrong output size (1D expected)' assert", "1 (for positive examples). \"\"\" def __init__(self): super(AUCMeter, self).__init__() self.reset()", "by the classification model than the negative example. The AUCMeter", "target): if torch.is_tensor(output): output = output.cpu().squeeze().numpy() if torch.is_tensor(target): target =", "the model is more convinced that the example should be", "the arrays scores, sortind = torch.sort(torch.from_numpy(self.scores), dim=0, descending=True) scores =", "The area under the curve (AUC) can be interpreted as", "`output` and `target`, where (1) the `output` contains model output", "# calculating area under curve using trapezoidal rule n =", "scores, sortind = torch.sort(torch.from_numpy(self.scores), dim=0, descending=True) scores = scores.numpy() sortind", "fpr /= ((self.targets - 1.0).sum() * -1.0) # calculating area", "+ 1 tpr /= (self.targets.sum() * 1.0) fpr /= ((self.targets", "= fpr[i - 1] else: tpr[i] = tpr[i - 1]", "contains model output scores that ought to be higher when", "believes the example should be negatively labeled (for instance, the", "output scores that ought to be higher when the model", "1 fpr[i] = fpr[i - 1] else: tpr[i] = tpr[i", "((self.targets - 1.0).sum() * -1.0) # calculating area under curve", "np.equal(target, 0))), \\ 'targets should be binary (0, 1)' self.scores", "== 1, \\ 'wrong output size (1D expected)' assert np.ndim(target)", "a higher score by the classification model than the negative", "- 1] sum_h = np.zeros(fpr.shape) sum_h[0:n - 1] = h", "(for negative examples) and 1 (for positive examples). \"\"\" def", "under the receiver-operating characteristic (ROC) curve for binary classification problems.", "to be higher when the model is more convinced that", "(for instance, the output of a signoid function); and (2)", "i in range(1, scores.size + 1): if self.targets[sortind[i - 1]]", "np.asarray([target]) assert np.ndim(output) == 1, \\ 'wrong output size (1D", "sum_h[0:n - 1] = h sum_h[1:n] += h area =", "targets does not match' assert np.all(np.add(np.equal(target, 1), np.equal(target, 0))), \\", "numbers from . import meter import numpy as np import", "= sortind.numpy() # creating the roc curve tpr = np.zeros(shape=(scores.size", "of elements added are 0 if self.scores.shape[0] == 0: return", "__init__(self): super(AUCMeter, self).__init__() self.reset() def reset(self): self.scores = torch.DoubleTensor(torch.DoubleStorage()).numpy() self.targets", "of a signoid function); and (2) the `target` contains only", "creating the roc curve tpr = np.zeros(shape=(scores.size + 1), dtype=np.float64)", "arrays scores, sortind = torch.sort(torch.from_numpy(self.scores), dim=0, descending=True) scores = scores.numpy()", "= (sum_h * tpr).sum() / 2.0 return (area, tpr, fpr)", "example and a randomly selected negative example, the positive example", "output.shape[0] == target.shape[0], \\ 'number of outputs and targets does", "be binary (0, 1)' self.scores = np.append(self.scores, output) self.targets =", "* -1.0) # calculating area under curve using trapezoidal rule", "output = output.cpu().squeeze().numpy() if torch.is_tensor(target): target = target.cpu().squeeze().numpy() elif isinstance(target,", "curve tpr = np.zeros(shape=(scores.size + 1), dtype=np.float64) fpr = np.zeros(shape=(scores.size", "0 if self.scores.shape[0] == 0: return 0.5 # sorting the", "torch.sort(torch.from_numpy(self.scores), dim=0, descending=True) scores = scores.numpy() sortind = sortind.numpy() #", "1] fpr[i] = fpr[i - 1] + 1 tpr /=", "assert np.all(np.add(np.equal(target, 1), np.equal(target, 0))), \\ 'targets should be binary", "the output of a signoid function); and (2) the `target`", "AUCMeter measures the area under the receiver-operating characteristic (ROC) curve", "function); and (2) the `target` contains only values 0 (for", "+ 1), dtype=np.float64) fpr = np.zeros(shape=(scores.size + 1), dtype=np.float64) for", "classification model than the negative example. The AUCMeter is designed", "= scores.numpy() sortind = sortind.numpy() # creating the roc curve", "np.zeros(shape=(scores.size + 1), dtype=np.float64) for i in range(1, scores.size +", "the probability that, given a randomly selected positive example and", "expected)' assert np.ndim(target) == 1, \\ 'wrong target size (1D", "1]] == 1: tpr[i] = tpr[i - 1] + 1", "to operate on one-dimensional Tensors `output` and `target`, where (1)", "the negative example. The AUCMeter is designed to operate on", "is assigned a higher score by the classification model than", "assert output.shape[0] == target.shape[0], \\ 'number of outputs and targets", "\\ 'targets should be binary (0, 1)' self.scores = np.append(self.scores,", "self.reset() def reset(self): self.scores = torch.DoubleTensor(torch.DoubleStorage()).numpy() self.targets = torch.LongTensor(torch.LongStorage()).numpy() def", "when the model is more convinced that the example should", "1, \\ 'wrong output size (1D expected)' assert np.ndim(target) ==", "values 0 (for negative examples) and 1 (for positive examples).", "1)' self.scores = np.append(self.scores, output) self.targets = np.append(self.targets, target) def", "target.cpu().squeeze().numpy() elif isinstance(target, numbers.Number): target = np.asarray([target]) assert np.ndim(output) ==", "example, the positive example is assigned a higher score by", "1), np.equal(target, 0))), \\ 'targets should be binary (0, 1)'", "import numpy as np import torch class AUCMeter(meter.Meter): \"\"\" The", "+ 1 fpr[i] = fpr[i - 1] else: tpr[i] =", "fpr[i - 1] else: tpr[i] = tpr[i - 1] fpr[i]", "target.shape[0], \\ 'number of outputs and targets does not match'", "not match' assert np.all(np.add(np.equal(target, 1), np.equal(target, 0))), \\ 'targets should", "should be positively labeled, and smaller when the model believes", "positive examples). \"\"\" def __init__(self): super(AUCMeter, self).__init__() self.reset() def reset(self):", "is more convinced that the example should be positively labeled,", "fpr[1:n] - fpr[0:n - 1] sum_h = np.zeros(fpr.shape) sum_h[0:n -", "target = np.asarray([target]) assert np.ndim(output) == 1, \\ 'wrong output", "np.ndim(target) == 1, \\ 'wrong target size (1D expected)' assert", "self.scores.shape[0] == 0: return 0.5 # sorting the arrays scores,", "dtype=np.float64) fpr = np.zeros(shape=(scores.size + 1), dtype=np.float64) for i in", "and smaller when the model believes the example should be", "self).__init__() self.reset() def reset(self): self.scores = torch.DoubleTensor(torch.DoubleStorage()).numpy() self.targets = torch.LongTensor(torch.LongStorage()).numpy()", "model is more convinced that the example should be positively", "higher when the model is more convinced that the example", "rule n = tpr.shape[0] h = fpr[1:n] - fpr[0:n -", "1: tpr[i] = tpr[i - 1] + 1 fpr[i] =", "if self.targets[sortind[i - 1]] == 1: tpr[i] = tpr[i -", "* 1.0) fpr /= ((self.targets - 1.0).sum() * -1.0) #", "using trapezoidal rule n = tpr.shape[0] h = fpr[1:n] -", "# sorting the arrays scores, sortind = torch.sort(torch.from_numpy(self.scores), dim=0, descending=True)", "can be interpreted as the probability that, given a randomly", "roc curve tpr = np.zeros(shape=(scores.size + 1), dtype=np.float64) fpr =", "curve using trapezoidal rule n = tpr.shape[0] h = fpr[1:n]", "be negatively labeled (for instance, the output of a signoid", "target) def value(self): # case when number of elements added", "only values 0 (for negative examples) and 1 (for positive", "interpreted as the probability that, given a randomly selected positive", "the roc curve tpr = np.zeros(shape=(scores.size + 1), dtype=np.float64) fpr", "measures the area under the receiver-operating characteristic (ROC) curve for", "under the curve (AUC) can be interpreted as the probability", "(self.targets.sum() * 1.0) fpr /= ((self.targets - 1.0).sum() * -1.0)", "1), dtype=np.float64) for i in range(1, scores.size + 1): if", "0.5 # sorting the arrays scores, sortind = torch.sort(torch.from_numpy(self.scores), dim=0,", "if self.scores.shape[0] == 0: return 0.5 # sorting the arrays", "(1D expected)' assert output.shape[0] == target.shape[0], \\ 'number of outputs", "that the example should be positively labeled, and smaller when", "= np.append(self.targets, target) def value(self): # case when number of", "tpr[i] = tpr[i - 1] + 1 fpr[i] = fpr[i", "np.append(self.targets, target) def value(self): # case when number of elements", "sorting the arrays scores, sortind = torch.sort(torch.from_numpy(self.scores), dim=0, descending=True) scores", "of outputs and targets does not match' assert np.all(np.add(np.equal(target, 1),", "in range(1, scores.size + 1): if self.targets[sortind[i - 1]] ==", "calculating area under curve using trapezoidal rule n = tpr.shape[0]", "= target.cpu().squeeze().numpy() elif isinstance(target, numbers.Number): target = np.asarray([target]) assert np.ndim(output)", "= tpr.shape[0] h = fpr[1:n] - fpr[0:n - 1] sum_h", "that ought to be higher when the model is more", "scores = scores.numpy() sortind = sortind.numpy() # creating the roc", "are 0 if self.scores.shape[0] == 0: return 0.5 # sorting", "reset(self): self.scores = torch.DoubleTensor(torch.DoubleStorage()).numpy() self.targets = torch.LongTensor(torch.LongStorage()).numpy() def add(self, output,", "0 (for negative examples) and 1 (for positive examples). \"\"\"", "area = (sum_h * tpr).sum() / 2.0 return (area, tpr,", "given a randomly selected positive example and a randomly selected", "tpr /= (self.targets.sum() * 1.0) fpr /= ((self.targets - 1.0).sum()", "self.targets = torch.LongTensor(torch.LongStorage()).numpy() def add(self, output, target): if torch.is_tensor(output): output", "area under the receiver-operating characteristic (ROC) curve for binary classification" ]
[ "return self.cluster.get(\"current_x\") @property def current_y(self) -> int | None: \"\"\"Return", "self.cluster.get(\"color_mode\") @property def color_loop_active(self) -> int | None: \"\"\"Return cached", "import suppress from zigpy.zcl.clusters import lighting from .. import registries", "channel.\"\"\" @registries.BINDABLE_CLUSTERS.register(lighting.Color.cluster_id) @registries.ZIGBEE_CHANNEL_REGISTRY.register(lighting.Color.cluster_id) class ColorChannel(ZigbeeChannel): \"\"\"Color channel.\"\"\" CAPABILITIES_COLOR_XY = 0x08", "None: \"\"\"Return cached value of the color_loop_active attribute.\"\"\" return self.cluster.get(\"color_loop_active\")", "\"\"\"Return cached value of the color_loop_active attribute.\"\"\" return self.cluster.get(\"color_loop_active\") @property", "\"\"\"Return the warmest color_temp that this channel supports.\"\"\" return self.cluster.get(\"color_temp_physical_max\",", "153 ZCL_INIT_ATTRS = { \"color_mode\": False, \"color_temp_physical_min\": True, \"color_temp_physical_max\": True,", "self.cluster.get(\"current_y\") @property def min_mireds(self) -> int: \"\"\"Return the coldest color_temp", "__future__ import annotations from contextlib import suppress from zigpy.zcl.clusters import", "Home Automation.\"\"\" from __future__ import annotations from contextlib import suppress", "@property def current_y(self) -> int | None: \"\"\"Return cached value", "@property def min_mireds(self) -> int: \"\"\"Return the coldest color_temp that", "channels module for Zigbee Home Automation.\"\"\" from __future__ import annotations", "\"color_mode\": False, \"color_temp_physical_min\": True, \"color_temp_physical_max\": True, \"color_capabilities\": True, \"color_loop_active\": False,", "@registries.ZIGBEE_CHANNEL_REGISTRY.register(lighting.Ballast.cluster_id) class Ballast(ZigbeeChannel): \"\"\"Ballast channel.\"\"\" @registries.CLIENT_CHANNELS_REGISTRY.register(lighting.Color.cluster_id) class ColorClientChannel(ClientChannel): \"\"\"Color client", "from contextlib import suppress from zigpy.zcl.clusters import lighting from ..", "self.CAPABILITIES_COLOR_XY @property def color_mode(self) -> int | None: \"\"\"Return cached", "import lighting from .. import registries from ..const import REPORT_CONFIG_DEFAULT", "0x86 REPORT_CONFIG = ( {\"attr\": \"current_x\", \"config\": REPORT_CONFIG_DEFAULT}, {\"attr\": \"current_y\",", "is not None: return self.CAPABILITIES_COLOR_XY | self.CAPABILITIES_COLOR_TEMP return self.CAPABILITIES_COLOR_XY @property", "current_y(self) -> int | None: \"\"\"Return cached value of the", "0x10 UNSUPPORTED_ATTRIBUTE = 0x86 REPORT_CONFIG = ( {\"attr\": \"current_x\", \"config\":", "cached value of the color_loop_active attribute.\"\"\" return self.cluster.get(\"color_loop_active\") @property def", "None: \"\"\"Return cached value of the current_x attribute.\"\"\" return self.cluster.get(\"current_x\")", "-> int: \"\"\"Return the warmest color_temp that this channel supports.\"\"\"", "= 0x08 CAPABILITIES_COLOR_TEMP = 0x10 UNSUPPORTED_ATTRIBUTE = 0x86 REPORT_CONFIG =", "\"\"\"Color channel.\"\"\" CAPABILITIES_COLOR_XY = 0x08 CAPABILITIES_COLOR_TEMP = 0x10 UNSUPPORTED_ATTRIBUTE =", "= 0x10 UNSUPPORTED_ATTRIBUTE = 0x86 REPORT_CONFIG = ( {\"attr\": \"current_x\",", "\"\"\"Return the coldest color_temp that this channel supports.\"\"\" return self.cluster.get(\"color_temp_physical_min\",", "from .. import registries from ..const import REPORT_CONFIG_DEFAULT from .base", "import annotations from contextlib import suppress from zigpy.zcl.clusters import lighting", "from __future__ import annotations from contextlib import suppress from zigpy.zcl.clusters", "the light.\"\"\" with suppress(KeyError): return self.cluster[\"color_capabilities\"] if self.cluster.get(\"color_temperature\") is not", "self.cluster.get(\"color_temp_physical_min\", self.MIN_MIREDS) @property def max_mireds(self) -> int: \"\"\"Return the warmest", "500 MIN_MIREDS: int = 153 ZCL_INIT_ATTRS = { \"color_mode\": False,", "\"\"\"Ballast channel.\"\"\" @registries.CLIENT_CHANNELS_REGISTRY.register(lighting.Color.cluster_id) class ColorClientChannel(ClientChannel): \"\"\"Color client channel.\"\"\" @registries.BINDABLE_CLUSTERS.register(lighting.Color.cluster_id) @registries.ZIGBEE_CHANNEL_REGISTRY.register(lighting.Color.cluster_id)", "min_mireds(self) -> int: \"\"\"Return the coldest color_temp that this channel", "= 500 MIN_MIREDS: int = 153 ZCL_INIT_ATTRS = { \"color_mode\":", "self.CAPABILITIES_COLOR_TEMP return self.CAPABILITIES_COLOR_XY @property def color_mode(self) -> int | None:", "color_mode(self) -> int | None: \"\"\"Return cached value of the", "@property def color_loop_active(self) -> int | None: \"\"\"Return cached value", "| None: \"\"\"Return cached value of the current_y attribute.\"\"\" return", "\"config\": REPORT_CONFIG_DEFAULT}, {\"attr\": \"color_temperature\", \"config\": REPORT_CONFIG_DEFAULT}, ) MAX_MIREDS: int =", "attribute.\"\"\" return self.cluster.get(\"current_y\") @property def min_mireds(self) -> int: \"\"\"Return the", "current_y attribute.\"\"\" return self.cluster.get(\"current_y\") @property def min_mireds(self) -> int: \"\"\"Return", "\"color_temp_physical_max\": True, \"color_capabilities\": True, \"color_loop_active\": False, } @property def color_capabilities(self)", "lighting from .. import registries from ..const import REPORT_CONFIG_DEFAULT from", "of the light.\"\"\" with suppress(KeyError): return self.cluster[\"color_capabilities\"] if self.cluster.get(\"color_temperature\") is", "CAPABILITIES_COLOR_TEMP = 0x10 UNSUPPORTED_ATTRIBUTE = 0x86 REPORT_CONFIG = ( {\"attr\":", "the warmest color_temp that this channel supports.\"\"\" return self.cluster.get(\"color_temp_physical_max\", self.MAX_MIREDS)", "coldest color_temp that this channel supports.\"\"\" return self.cluster.get(\"color_temp_physical_min\", self.MIN_MIREDS) @property", "{\"attr\": \"color_temperature\", \"config\": REPORT_CONFIG_DEFAULT}, ) MAX_MIREDS: int = 500 MIN_MIREDS:", "Automation.\"\"\" from __future__ import annotations from contextlib import suppress from", "the color_mode attribute.\"\"\" return self.cluster.get(\"color_mode\") @property def color_loop_active(self) -> int", "ClientChannel, ZigbeeChannel @registries.ZIGBEE_CHANNEL_REGISTRY.register(lighting.Ballast.cluster_id) class Ballast(ZigbeeChannel): \"\"\"Ballast channel.\"\"\" @registries.CLIENT_CHANNELS_REGISTRY.register(lighting.Color.cluster_id) class ColorClientChannel(ClientChannel):", "self.cluster.get(\"color_temperature\") @property def current_x(self) -> int | None: \"\"\"Return cached", "int = 500 MIN_MIREDS: int = 153 ZCL_INIT_ATTRS = {", "color_loop_active(self) -> int | None: \"\"\"Return cached value of the", "Ballast(ZigbeeChannel): \"\"\"Ballast channel.\"\"\" @registries.CLIENT_CHANNELS_REGISTRY.register(lighting.Color.cluster_id) class ColorClientChannel(ClientChannel): \"\"\"Color client channel.\"\"\" @registries.BINDABLE_CLUSTERS.register(lighting.Color.cluster_id)", "not None: return self.CAPABILITIES_COLOR_XY | self.CAPABILITIES_COLOR_TEMP return self.CAPABILITIES_COLOR_XY @property def", "None: \"\"\"Return cached value of the color_mode attribute.\"\"\" return self.cluster.get(\"color_mode\")", "of the current_x attribute.\"\"\" return self.cluster.get(\"current_x\") @property def current_y(self) ->", "the color_loop_active attribute.\"\"\" return self.cluster.get(\"color_loop_active\") @property def color_temperature(self) -> int", "def min_mireds(self) -> int: \"\"\"Return the coldest color_temp that this", "ZigbeeChannel @registries.ZIGBEE_CHANNEL_REGISTRY.register(lighting.Ballast.cluster_id) class Ballast(ZigbeeChannel): \"\"\"Ballast channel.\"\"\" @registries.CLIENT_CHANNELS_REGISTRY.register(lighting.Color.cluster_id) class ColorClientChannel(ClientChannel): \"\"\"Color", "{ \"color_mode\": False, \"color_temp_physical_min\": True, \"color_temp_physical_max\": True, \"color_capabilities\": True, \"color_loop_active\":", "value of the color_loop_active attribute.\"\"\" return self.cluster.get(\"color_loop_active\") @property def color_temperature(self)", "self.MIN_MIREDS) @property def max_mireds(self) -> int: \"\"\"Return the warmest color_temp", "@property def max_mireds(self) -> int: \"\"\"Return the warmest color_temp that", "def color_capabilities(self) -> int: \"\"\"Return color capabilities of the light.\"\"\"", "suppress from zigpy.zcl.clusters import lighting from .. import registries from", "color capabilities of the light.\"\"\" with suppress(KeyError): return self.cluster[\"color_capabilities\"] if", "\"\"\"Return cached value of the color_mode attribute.\"\"\" return self.cluster.get(\"color_mode\") @property", "channel supports.\"\"\" return self.cluster.get(\"color_temp_physical_min\", self.MIN_MIREDS) @property def max_mireds(self) -> int:", "| None: \"\"\"Return cached value of the color_loop_active attribute.\"\"\" return", "of the current_y attribute.\"\"\" return self.cluster.get(\"current_y\") @property def min_mireds(self) ->", "-> int | None: \"\"\"Return cached value of the current_y", "self.cluster.get(\"color_loop_active\") @property def color_temperature(self) -> int | None: \"\"\"Return cached", "from zigpy.zcl.clusters import lighting from .. import registries from ..const", "channel.\"\"\" CAPABILITIES_COLOR_XY = 0x08 CAPABILITIES_COLOR_TEMP = 0x10 UNSUPPORTED_ATTRIBUTE = 0x86", ".. import registries from ..const import REPORT_CONFIG_DEFAULT from .base import", "cached value of the color_mode attribute.\"\"\" return self.cluster.get(\"color_mode\") @property def", "True, \"color_loop_active\": False, } @property def color_capabilities(self) -> int: \"\"\"Return", "color_temp that this channel supports.\"\"\" return self.cluster.get(\"color_temp_physical_min\", self.MIN_MIREDS) @property def", "int: \"\"\"Return the coldest color_temp that this channel supports.\"\"\" return", "-> int: \"\"\"Return color capabilities of the light.\"\"\" with suppress(KeyError):", "max_mireds(self) -> int: \"\"\"Return the warmest color_temp that this channel", "return self.cluster[\"color_capabilities\"] if self.cluster.get(\"color_temperature\") is not None: return self.CAPABILITIES_COLOR_XY |", "color temperature.\"\"\" return self.cluster.get(\"color_temperature\") @property def current_x(self) -> int |", "def max_mireds(self) -> int: \"\"\"Return the warmest color_temp that this", "zigpy.zcl.clusters import lighting from .. import registries from ..const import", "-> int | None: \"\"\"Return cached value of color temperature.\"\"\"", "supports.\"\"\" return self.cluster.get(\"color_temp_physical_min\", self.MIN_MIREDS) @property def max_mireds(self) -> int: \"\"\"Return", "\"color_temp_physical_min\": True, \"color_temp_physical_max\": True, \"color_capabilities\": True, \"color_loop_active\": False, } @property", "for Zigbee Home Automation.\"\"\" from __future__ import annotations from contextlib", "Zigbee Home Automation.\"\"\" from __future__ import annotations from contextlib import", "REPORT_CONFIG_DEFAULT}, {\"attr\": \"current_y\", \"config\": REPORT_CONFIG_DEFAULT}, {\"attr\": \"color_temperature\", \"config\": REPORT_CONFIG_DEFAULT}, )", "import ClientChannel, ZigbeeChannel @registries.ZIGBEE_CHANNEL_REGISTRY.register(lighting.Ballast.cluster_id) class Ballast(ZigbeeChannel): \"\"\"Ballast channel.\"\"\" @registries.CLIENT_CHANNELS_REGISTRY.register(lighting.Color.cluster_id) class", "return self.cluster.get(\"color_temperature\") @property def current_x(self) -> int | None: \"\"\"Return", "| None: \"\"\"Return cached value of color temperature.\"\"\" return self.cluster.get(\"color_temperature\")", "cached value of the current_x attribute.\"\"\" return self.cluster.get(\"current_x\") @property def", "value of the current_y attribute.\"\"\" return self.cluster.get(\"current_y\") @property def min_mireds(self)", "int: \"\"\"Return color capabilities of the light.\"\"\" with suppress(KeyError): return", "int = 153 ZCL_INIT_ATTRS = { \"color_mode\": False, \"color_temp_physical_min\": True,", "return self.cluster.get(\"current_y\") @property def min_mireds(self) -> int: \"\"\"Return the coldest", "self.cluster[\"color_capabilities\"] if self.cluster.get(\"color_temperature\") is not None: return self.CAPABILITIES_COLOR_XY | self.CAPABILITIES_COLOR_TEMP", "color_temperature(self) -> int | None: \"\"\"Return cached value of color", "of color temperature.\"\"\" return self.cluster.get(\"color_temperature\") @property def current_x(self) -> int", "channel.\"\"\" @registries.CLIENT_CHANNELS_REGISTRY.register(lighting.Color.cluster_id) class ColorClientChannel(ClientChannel): \"\"\"Color client channel.\"\"\" @registries.BINDABLE_CLUSTERS.register(lighting.Color.cluster_id) @registries.ZIGBEE_CHANNEL_REGISTRY.register(lighting.Color.cluster_id) class", "{\"attr\": \"current_x\", \"config\": REPORT_CONFIG_DEFAULT}, {\"attr\": \"current_y\", \"config\": REPORT_CONFIG_DEFAULT}, {\"attr\": \"color_temperature\",", "self.cluster.get(\"current_x\") @property def current_y(self) -> int | None: \"\"\"Return cached", "MAX_MIREDS: int = 500 MIN_MIREDS: int = 153 ZCL_INIT_ATTRS =", "\"config\": REPORT_CONFIG_DEFAULT}, ) MAX_MIREDS: int = 500 MIN_MIREDS: int =", "int | None: \"\"\"Return cached value of color temperature.\"\"\" return", "\"current_x\", \"config\": REPORT_CONFIG_DEFAULT}, {\"attr\": \"current_y\", \"config\": REPORT_CONFIG_DEFAULT}, {\"attr\": \"color_temperature\", \"config\":", "int: \"\"\"Return the warmest color_temp that this channel supports.\"\"\" return", "the current_x attribute.\"\"\" return self.cluster.get(\"current_x\") @property def current_y(self) -> int", "-> int | None: \"\"\"Return cached value of the current_x", "temperature.\"\"\" return self.cluster.get(\"color_temperature\") @property def current_x(self) -> int | None:", "\"\"\"Return cached value of the current_x attribute.\"\"\" return self.cluster.get(\"current_x\") @property", "@property def color_temperature(self) -> int | None: \"\"\"Return cached value", "color_capabilities(self) -> int: \"\"\"Return color capabilities of the light.\"\"\" with", "def color_temperature(self) -> int | None: \"\"\"Return cached value of", "cached value of the current_y attribute.\"\"\" return self.cluster.get(\"current_y\") @property def", "client channel.\"\"\" @registries.BINDABLE_CLUSTERS.register(lighting.Color.cluster_id) @registries.ZIGBEE_CHANNEL_REGISTRY.register(lighting.Color.cluster_id) class ColorChannel(ZigbeeChannel): \"\"\"Color channel.\"\"\" CAPABILITIES_COLOR_XY =", "} @property def color_capabilities(self) -> int: \"\"\"Return color capabilities of", "registries from ..const import REPORT_CONFIG_DEFAULT from .base import ClientChannel, ZigbeeChannel", "| self.CAPABILITIES_COLOR_TEMP return self.CAPABILITIES_COLOR_XY @property def color_mode(self) -> int |", "def current_y(self) -> int | None: \"\"\"Return cached value of", "annotations from contextlib import suppress from zigpy.zcl.clusters import lighting from", "color_mode attribute.\"\"\" return self.cluster.get(\"color_mode\") @property def color_loop_active(self) -> int |", "current_x(self) -> int | None: \"\"\"Return cached value of the", "\"color_loop_active\": False, } @property def color_capabilities(self) -> int: \"\"\"Return color", "None: \"\"\"Return cached value of the current_y attribute.\"\"\" return self.cluster.get(\"current_y\")", "return self.CAPABILITIES_COLOR_XY @property def color_mode(self) -> int | None: \"\"\"Return", "self.CAPABILITIES_COLOR_XY | self.CAPABILITIES_COLOR_TEMP return self.CAPABILITIES_COLOR_XY @property def color_mode(self) -> int", "return self.cluster.get(\"color_mode\") @property def color_loop_active(self) -> int | None: \"\"\"Return", "self.cluster.get(\"color_temperature\") is not None: return self.CAPABILITIES_COLOR_XY | self.CAPABILITIES_COLOR_TEMP return self.CAPABILITIES_COLOR_XY", "\"\"\"Color client channel.\"\"\" @registries.BINDABLE_CLUSTERS.register(lighting.Color.cluster_id) @registries.ZIGBEE_CHANNEL_REGISTRY.register(lighting.Color.cluster_id) class ColorChannel(ZigbeeChannel): \"\"\"Color channel.\"\"\" CAPABILITIES_COLOR_XY", "value of the color_mode attribute.\"\"\" return self.cluster.get(\"color_mode\") @property def color_loop_active(self)", "\"config\": REPORT_CONFIG_DEFAULT}, {\"attr\": \"current_y\", \"config\": REPORT_CONFIG_DEFAULT}, {\"attr\": \"color_temperature\", \"config\": REPORT_CONFIG_DEFAULT},", "of the color_mode attribute.\"\"\" return self.cluster.get(\"color_mode\") @property def color_loop_active(self) ->", "-> int | None: \"\"\"Return cached value of the color_loop_active", "cached value of color temperature.\"\"\" return self.cluster.get(\"color_temperature\") @property def current_x(self)", "@registries.CLIENT_CHANNELS_REGISTRY.register(lighting.Color.cluster_id) class ColorClientChannel(ClientChannel): \"\"\"Color client channel.\"\"\" @registries.BINDABLE_CLUSTERS.register(lighting.Color.cluster_id) @registries.ZIGBEE_CHANNEL_REGISTRY.register(lighting.Color.cluster_id) class ColorChannel(ZigbeeChannel):", "import registries from ..const import REPORT_CONFIG_DEFAULT from .base import ClientChannel,", "int | None: \"\"\"Return cached value of the current_y attribute.\"\"\"", "int | None: \"\"\"Return cached value of the current_x attribute.\"\"\"", "ColorChannel(ZigbeeChannel): \"\"\"Color channel.\"\"\" CAPABILITIES_COLOR_XY = 0x08 CAPABILITIES_COLOR_TEMP = 0x10 UNSUPPORTED_ATTRIBUTE", "REPORT_CONFIG = ( {\"attr\": \"current_x\", \"config\": REPORT_CONFIG_DEFAULT}, {\"attr\": \"current_y\", \"config\":", "= { \"color_mode\": False, \"color_temp_physical_min\": True, \"color_temp_physical_max\": True, \"color_capabilities\": True,", "ZCL_INIT_ATTRS = { \"color_mode\": False, \"color_temp_physical_min\": True, \"color_temp_physical_max\": True, \"color_capabilities\":", "attribute.\"\"\" return self.cluster.get(\"color_mode\") @property def color_loop_active(self) -> int | None:", "int | None: \"\"\"Return cached value of the color_loop_active attribute.\"\"\"", "\"\"\"Return cached value of the current_y attribute.\"\"\" return self.cluster.get(\"current_y\") @property", "@registries.ZIGBEE_CHANNEL_REGISTRY.register(lighting.Color.cluster_id) class ColorChannel(ZigbeeChannel): \"\"\"Color channel.\"\"\" CAPABILITIES_COLOR_XY = 0x08 CAPABILITIES_COLOR_TEMP =", "( {\"attr\": \"current_x\", \"config\": REPORT_CONFIG_DEFAULT}, {\"attr\": \"current_y\", \"config\": REPORT_CONFIG_DEFAULT}, {\"attr\":", "REPORT_CONFIG_DEFAULT from .base import ClientChannel, ZigbeeChannel @registries.ZIGBEE_CHANNEL_REGISTRY.register(lighting.Ballast.cluster_id) class Ballast(ZigbeeChannel): \"\"\"Ballast", "-> int | None: \"\"\"Return cached value of the color_mode", "= 153 ZCL_INIT_ATTRS = { \"color_mode\": False, \"color_temp_physical_min\": True, \"color_temp_physical_max\":", "attribute.\"\"\" return self.cluster.get(\"current_x\") @property def current_y(self) -> int | None:", "def color_mode(self) -> int | None: \"\"\"Return cached value of", "@property def current_x(self) -> int | None: \"\"\"Return cached value", "if self.cluster.get(\"color_temperature\") is not None: return self.CAPABILITIES_COLOR_XY | self.CAPABILITIES_COLOR_TEMP return", "class Ballast(ZigbeeChannel): \"\"\"Ballast channel.\"\"\" @registries.CLIENT_CHANNELS_REGISTRY.register(lighting.Color.cluster_id) class ColorClientChannel(ClientChannel): \"\"\"Color client channel.\"\"\"", "capabilities of the light.\"\"\" with suppress(KeyError): return self.cluster[\"color_capabilities\"] if self.cluster.get(\"color_temperature\")", "CAPABILITIES_COLOR_XY = 0x08 CAPABILITIES_COLOR_TEMP = 0x10 UNSUPPORTED_ATTRIBUTE = 0x86 REPORT_CONFIG", ".base import ClientChannel, ZigbeeChannel @registries.ZIGBEE_CHANNEL_REGISTRY.register(lighting.Ballast.cluster_id) class Ballast(ZigbeeChannel): \"\"\"Ballast channel.\"\"\" @registries.CLIENT_CHANNELS_REGISTRY.register(lighting.Color.cluster_id)", "| None: \"\"\"Return cached value of the current_x attribute.\"\"\" return", "= ( {\"attr\": \"current_x\", \"config\": REPORT_CONFIG_DEFAULT}, {\"attr\": \"current_y\", \"config\": REPORT_CONFIG_DEFAULT},", "class ColorChannel(ZigbeeChannel): \"\"\"Color channel.\"\"\" CAPABILITIES_COLOR_XY = 0x08 CAPABILITIES_COLOR_TEMP = 0x10", "REPORT_CONFIG_DEFAULT}, ) MAX_MIREDS: int = 500 MIN_MIREDS: int = 153", "= 0x86 REPORT_CONFIG = ( {\"attr\": \"current_x\", \"config\": REPORT_CONFIG_DEFAULT}, {\"attr\":", "class ColorClientChannel(ClientChannel): \"\"\"Color client channel.\"\"\" @registries.BINDABLE_CLUSTERS.register(lighting.Color.cluster_id) @registries.ZIGBEE_CHANNEL_REGISTRY.register(lighting.Color.cluster_id) class ColorChannel(ZigbeeChannel): \"\"\"Color", "that this channel supports.\"\"\" return self.cluster.get(\"color_temp_physical_min\", self.MIN_MIREDS) @property def max_mireds(self)", "MIN_MIREDS: int = 153 ZCL_INIT_ATTRS = { \"color_mode\": False, \"color_temp_physical_min\":", "False, \"color_temp_physical_min\": True, \"color_temp_physical_max\": True, \"color_capabilities\": True, \"color_loop_active\": False, }", "\"\"\"Lighting channels module for Zigbee Home Automation.\"\"\" from __future__ import", "True, \"color_temp_physical_max\": True, \"color_capabilities\": True, \"color_loop_active\": False, } @property def", "int | None: \"\"\"Return cached value of the color_mode attribute.\"\"\"", "\"color_capabilities\": True, \"color_loop_active\": False, } @property def color_capabilities(self) -> int:", "the coldest color_temp that this channel supports.\"\"\" return self.cluster.get(\"color_temp_physical_min\", self.MIN_MIREDS)", "def current_x(self) -> int | None: \"\"\"Return cached value of", "return self.cluster.get(\"color_loop_active\") @property def color_temperature(self) -> int | None: \"\"\"Return", "REPORT_CONFIG_DEFAULT}, {\"attr\": \"color_temperature\", \"config\": REPORT_CONFIG_DEFAULT}, ) MAX_MIREDS: int = 500", "def color_loop_active(self) -> int | None: \"\"\"Return cached value of", "\"\"\"Return color capabilities of the light.\"\"\" with suppress(KeyError): return self.cluster[\"color_capabilities\"]", "module for Zigbee Home Automation.\"\"\" from __future__ import annotations from", "of the color_loop_active attribute.\"\"\" return self.cluster.get(\"color_loop_active\") @property def color_temperature(self) ->", "return self.cluster.get(\"color_temp_physical_min\", self.MIN_MIREDS) @property def max_mireds(self) -> int: \"\"\"Return the", "@registries.BINDABLE_CLUSTERS.register(lighting.Color.cluster_id) @registries.ZIGBEE_CHANNEL_REGISTRY.register(lighting.Color.cluster_id) class ColorChannel(ZigbeeChannel): \"\"\"Color channel.\"\"\" CAPABILITIES_COLOR_XY = 0x08 CAPABILITIES_COLOR_TEMP", "\"current_y\", \"config\": REPORT_CONFIG_DEFAULT}, {\"attr\": \"color_temperature\", \"config\": REPORT_CONFIG_DEFAULT}, ) MAX_MIREDS: int", "the current_y attribute.\"\"\" return self.cluster.get(\"current_y\") @property def min_mireds(self) -> int:", "value of color temperature.\"\"\" return self.cluster.get(\"color_temperature\") @property def current_x(self) ->", "True, \"color_capabilities\": True, \"color_loop_active\": False, } @property def color_capabilities(self) ->", "attribute.\"\"\" return self.cluster.get(\"color_loop_active\") @property def color_temperature(self) -> int | None:", "ColorClientChannel(ClientChannel): \"\"\"Color client channel.\"\"\" @registries.BINDABLE_CLUSTERS.register(lighting.Color.cluster_id) @registries.ZIGBEE_CHANNEL_REGISTRY.register(lighting.Color.cluster_id) class ColorChannel(ZigbeeChannel): \"\"\"Color channel.\"\"\"", "False, } @property def color_capabilities(self) -> int: \"\"\"Return color capabilities", "@property def color_mode(self) -> int | None: \"\"\"Return cached value", "light.\"\"\" with suppress(KeyError): return self.cluster[\"color_capabilities\"] if self.cluster.get(\"color_temperature\") is not None:", "this channel supports.\"\"\" return self.cluster.get(\"color_temp_physical_min\", self.MIN_MIREDS) @property def max_mireds(self) ->", "\"\"\"Return cached value of color temperature.\"\"\" return self.cluster.get(\"color_temperature\") @property def", ") MAX_MIREDS: int = 500 MIN_MIREDS: int = 153 ZCL_INIT_ATTRS", "{\"attr\": \"current_y\", \"config\": REPORT_CONFIG_DEFAULT}, {\"attr\": \"color_temperature\", \"config\": REPORT_CONFIG_DEFAULT}, ) MAX_MIREDS:", "with suppress(KeyError): return self.cluster[\"color_capabilities\"] if self.cluster.get(\"color_temperature\") is not None: return", "from .base import ClientChannel, ZigbeeChannel @registries.ZIGBEE_CHANNEL_REGISTRY.register(lighting.Ballast.cluster_id) class Ballast(ZigbeeChannel): \"\"\"Ballast channel.\"\"\"", "None: \"\"\"Return cached value of color temperature.\"\"\" return self.cluster.get(\"color_temperature\") @property", "@property def color_capabilities(self) -> int: \"\"\"Return color capabilities of the", "-> int: \"\"\"Return the coldest color_temp that this channel supports.\"\"\"", "color_loop_active attribute.\"\"\" return self.cluster.get(\"color_loop_active\") @property def color_temperature(self) -> int |", "contextlib import suppress from zigpy.zcl.clusters import lighting from .. import", "None: return self.CAPABILITIES_COLOR_XY | self.CAPABILITIES_COLOR_TEMP return self.CAPABILITIES_COLOR_XY @property def color_mode(self)", "| None: \"\"\"Return cached value of the color_mode attribute.\"\"\" return", "value of the current_x attribute.\"\"\" return self.cluster.get(\"current_x\") @property def current_y(self)", "0x08 CAPABILITIES_COLOR_TEMP = 0x10 UNSUPPORTED_ATTRIBUTE = 0x86 REPORT_CONFIG = (", "UNSUPPORTED_ATTRIBUTE = 0x86 REPORT_CONFIG = ( {\"attr\": \"current_x\", \"config\": REPORT_CONFIG_DEFAULT},", "return self.CAPABILITIES_COLOR_XY | self.CAPABILITIES_COLOR_TEMP return self.CAPABILITIES_COLOR_XY @property def color_mode(self) ->", "\"color_temperature\", \"config\": REPORT_CONFIG_DEFAULT}, ) MAX_MIREDS: int = 500 MIN_MIREDS: int", "from ..const import REPORT_CONFIG_DEFAULT from .base import ClientChannel, ZigbeeChannel @registries.ZIGBEE_CHANNEL_REGISTRY.register(lighting.Ballast.cluster_id)", "suppress(KeyError): return self.cluster[\"color_capabilities\"] if self.cluster.get(\"color_temperature\") is not None: return self.CAPABILITIES_COLOR_XY", "import REPORT_CONFIG_DEFAULT from .base import ClientChannel, ZigbeeChannel @registries.ZIGBEE_CHANNEL_REGISTRY.register(lighting.Ballast.cluster_id) class Ballast(ZigbeeChannel):", "current_x attribute.\"\"\" return self.cluster.get(\"current_x\") @property def current_y(self) -> int |", "..const import REPORT_CONFIG_DEFAULT from .base import ClientChannel, ZigbeeChannel @registries.ZIGBEE_CHANNEL_REGISTRY.register(lighting.Ballast.cluster_id) class" ]
[ "interval * 1000000 def _from_struct(self, fq): \"\"\" \"\"\" self.flows =", "import rate2str __all__ = [\"ServiceCurve\", \"FlowQueue\", \"PFQueue\", \"PFQueueStats\"] class ServiceCurve(PFObject):", "_to_string(self): \"\"\" \"\"\" s = \" [ pkts: {0.packets[0]:10} bytes:", "self.qname = q.qname self.parent = q.parent self.ifname = q.ifname self.flags", "fq.interval def _to_struct(self): \"\"\" \"\"\" fq = pf._struct.pf_queue_fqspec() fq.flows =", "stats=None): \"\"\" \"\"\" if stats is None: stats = pf._struct.hfsc_class_stats()", "bw.absolute def _str_bandwidth(self, bw): \"\"\" \"\"\" return bw if isinstance(bw,", "q.qid self.parent_qid = q.parent_qid self.realtime = ServiceCurve(q.realtime) self.linkshare = ServiceCurve(q.linkshare)", "\" target {}ms\".format(self.target / 1000000) return s class PFQueue(PFObject): \"\"\"", "s += \" {.flowqueue}\".format(self) if self.linkshare.bandwidth or self.linkshare.burst: s +=", "fq = pf._struct.pf_queue_fqspec() fq.flows = self.flows fq.quantum = self.quantum fq.target", "* from pf._utils import rate2str __all__ = [\"ServiceCurve\", \"FlowQueue\", \"PFQueue\",", "\"\"\" _struct_type = pf._struct.pf_queue_scspec def __init__(self, bandwidth, burst=0, time=0): \"\"\"", "+= \" target {}ms\".format(self.target / 1000000) return s class PFQueue(PFObject):", "q.qname self.parent = q.parent self.ifname = q.ifname self.flags = q.flags", "s += \", max {}\".format(self.upperlimit) if self.flags & PFQS_DEFAULT: s", "not self.parent.startswith(\"_\"): s += \" parent {.parent}\".format(self) elif self.ifname: s", "+= \", max {}\".format(self.upperlimit) if self.flags & PFQS_DEFAULT: s +=", "bandwidth self.burst = burst self.time = time def _from_struct(self, sc):", "+= \" burst {}\".format(self._str_bandwidth(self.burst)) s += \" for {.time}ms\".format(self) return", "basestring) and self.bandwidth.endswith(\"%\")): sc.m2.percent = int(self.bandwidth[:-1]) else: sc.m2.absolute = self.bandwidth", "\"\"\" fq = pf._struct.pf_queue_fqspec() fq.flows = self.flows fq.quantum = self.quantum", "on {.ifname}\".format(self) if self.flags & PFQS_FLOWQUEUE: s += \" {.flowqueue}\".format(self)", "PFQS_DEFAULT: s += \" default\" if self.qlimit: s += \"", "= fq.quantum self.target = fq.target self.interval = fq.interval def _to_struct(self):", "fq.quantum self.target = fq.target self.interval = fq.interval def _to_struct(self): \"\"\"", "__init__(self, bandwidth, burst=0, time=0): \"\"\" \"\"\" if isinstance(bandwidth, pf._struct.pf_queue_scspec): self._from_struct(bandwidth)", "s = \"queue {.qname}\".format(self) if self.parent and not self.parent.startswith(\"_\"): s", "def _from_struct(self, fq): \"\"\" \"\"\" self.flows = fq.flows self.quantum =", "time def _from_struct(self, sc): \"\"\" \"\"\" self.bandwidth = self._get_bandwidth(sc.m2) self.burst", "= ServiceCurve(q.upperlimit) self.flowqueue = FlowQueue(q.flowqueue) def _to_struct(self): \"\"\" \"\"\" q", "schedulers and statistics.\"\"\" import pf._struct from pf._base import PFObject from", "PFQS_FLOWQUEUE: s += \" {.flowqueue}\".format(self) if self.linkshare.bandwidth or self.linkshare.burst: s", "and not self.parent.startswith(\"_\"): s += \" parent {.parent}\".format(self) elif self.ifname:", "pkts: {0.packets[1]:6} bytes: {0.bytes[1]:6} ]\\n\" + \\ \" [ qlength:", "def _to_string(self): \"\"\" \"\"\" s = self._str_bandwidth(self.bandwidth) if self.time: s", "q.parent self.ifname = q.ifname self.flags = q.flags self.qlimit = q.qlimit", "_struct_type = pf._struct.pf_queue_scspec def __init__(self, bandwidth, burst=0, time=0): \"\"\" \"\"\"", "else: sc.m2.absolute = self.bandwidth if (isinstance(self.burst, basestring) and self.burst.endswith(\"%\")): sc.m1.percent", "= self.ifname q.flags = self.flags q.qlimit = self.qlimit q.qid =", "pf.constants import * from pf._utils import rate2str __all__ = [\"ServiceCurve\",", "self._from_struct(flows) else: self.flows = flows self.quantum = quantum self.target =", "\"\"\"Classes to represent Packet Filter's queueing schedulers and statistics.\"\"\" import", "self.upperlimit.bandwidth: s += \", max {}\".format(self.upperlimit) if self.flags & PFQS_DEFAULT:", "{.ifname}\".format(self) if self.flags & PFQS_FLOWQUEUE: s += \" {.flowqueue}\".format(self) if", "quantum {.quantum}\".format(self) if self.interval: s += \" interval {}ms\".format(self.interval /", "class PFQueueStats(PFObject): \"\"\" \"\"\" _struct_type = pf._struct.hfsc_class_stats def __init__(self, stats=None):", "return s class PFQueue(PFObject): \"\"\" \"\"\" _struct_type = pf._struct.pf_queuespec def", "target * 1000000 self.interval = interval * 1000000 def _from_struct(self,", "{}\".format(self.realtime) if self.upperlimit.bandwidth: s += \", max {}\".format(self.upperlimit) if self.flags", "\"\"\" if isinstance(queue, basestring): queue = pf._struct.pf_queuespec(qname=queue, qlimit=DEFAULT_QLIMIT) elif queue", "ServiceCurve(q.realtime) self.linkshare = ServiceCurve(q.linkshare) self.upperlimit = ServiceCurve(q.upperlimit) self.flowqueue = FlowQueue(q.flowqueue)", "s = \" [ pkts: {0.packets[0]:10} bytes: {0.bytes[0]:10} \" +", "bandwidth {}\".format(self.linkshare) if self.realtime.bandwidth: s += \", min {}\".format(self.realtime) if", "_from_struct(self, sc): \"\"\" \"\"\" self.bandwidth = self._get_bandwidth(sc.m2) self.burst = self._get_bandwidth(sc.m1)", "self.quantum = quantum self.target = target * 1000000 self.interval =", "self.interval = fq.interval def _to_struct(self): \"\"\" \"\"\" fq = pf._struct.pf_queue_fqspec()", "[\"ServiceCurve\", \"FlowQueue\", \"PFQueue\", \"PFQueueStats\"] class ServiceCurve(PFObject): \"\"\" \"\"\" _struct_type =", "\"\"\" \"\"\" s = \"queue {.qname}\".format(self) if self.parent and not", "\"FlowQueue\", \"PFQueue\", \"PFQueueStats\"] class ServiceCurve(PFObject): \"\"\" \"\"\" _struct_type = pf._struct.pf_queue_scspec", "= pf._struct.pf_queuespec() super(PFQueue, self).__init__(queue, **kw) self.stats = PFQueueStats() def _from_struct(self,", "_from_struct(self, q): \"\"\" \"\"\" self.qname = q.qname self.parent = q.parent", "\" bandwidth {}\".format(self.linkshare) if self.realtime.bandwidth: s += \", min {}\".format(self.realtime)", "self.flags q.qlimit = self.qlimit q.qid = self.qid q.parent_qid = self.parent_qid", "s += \" default\" if self.qlimit: s += \" qlimit", "return \"{}%\".format(bw.percent) if bw.percent else bw.absolute def _str_bandwidth(self, bw): \"\"\"", "= self.flows fq.quantum = self.quantum fq.target = self.target fq.interval =", "return s class PFQueueStats(PFObject): \"\"\" \"\"\" _struct_type = pf._struct.hfsc_class_stats def", "self.parent_qid q.realtime = self.realtime._to_struct() q.linkshare = self.linkshare._to_struct() q.upperlimit = self.upperlimit._to_struct()", "pf._struct.pf_queue_fqspec): self._from_struct(flows) else: self.flows = flows self.quantum = quantum self.target", "q.qlimit self.qid = q.qid self.parent_qid = q.parent_qid self.realtime = ServiceCurve(q.realtime)", "+= \" interval {}ms\".format(self.interval / 1000000) if self.target: s +=", "q.flags self.qlimit = q.qlimit self.qid = q.qid self.parent_qid = q.parent_qid", "{.qlimit}\".format(self) return s class PFQueueStats(PFObject): \"\"\" \"\"\" _struct_type = pf._struct.hfsc_class_stats", "s.drop_cnt.bytes) def _to_string(self): \"\"\" \"\"\" s = \" [ pkts:", "_from_struct(self, fq): \"\"\" \"\"\" self.flows = fq.flows self.quantum = fq.quantum", "= time def _from_struct(self, sc): \"\"\" \"\"\" self.bandwidth = self._get_bandwidth(sc.m2)", "self.qid = q.qid self.parent_qid = q.parent_qid self.realtime = ServiceCurve(q.realtime) self.linkshare", "self.linkshare = ServiceCurve(q.linkshare) self.upperlimit = ServiceCurve(q.upperlimit) self.flowqueue = FlowQueue(q.flowqueue) def", "{}\".format(self.upperlimit) if self.flags & PFQS_DEFAULT: s += \" default\" if", "= target * 1000000 self.interval = interval * 1000000 def", "represent Packet Filter's queueing schedulers and statistics.\"\"\" import pf._struct from", "= pf._struct.hfsc_class_stats def __init__(self, stats=None): \"\"\" \"\"\" if stats is", "fq.flows = self.flows fq.quantum = self.quantum fq.target = self.target fq.interval", "def _str_bandwidth(self, bw): \"\"\" \"\"\" return bw if isinstance(bw, basestring)", "self.qlimit: s += \" qlimit {.qlimit}\".format(self) return s class PFQueueStats(PFObject):", "self.burst sc.d = self.time return sc def _get_bandwidth(self, bw): \"\"\"", "\"\"\" s = \" [ pkts: {0.packets[0]:10} bytes: {0.bytes[0]:10} \"", "self.flows fq.quantum = self.quantum fq.target = self.target fq.interval = self.interval", "statistics.\"\"\" import pf._struct from pf._base import PFObject from pf.constants import", "def __init__(self, bandwidth, burst=0, time=0): \"\"\" \"\"\" if isinstance(bandwidth, pf._struct.pf_queue_scspec):", "time=0): \"\"\" \"\"\" if isinstance(bandwidth, pf._struct.pf_queue_scspec): self._from_struct(bandwidth) else: self.bandwidth =", "sc.m1.absolute = self.burst sc.d = self.time return sc def _get_bandwidth(self,", "self.time: s += \" burst {}\".format(self._str_bandwidth(self.burst)) s += \" for", "self.qlimit q.qid = self.qid q.parent_qid = self.parent_qid q.realtime = self.realtime._to_struct()", "if self.flags & PFQS_DEFAULT: s += \" default\" if self.qlimit:", "ServiceCurve(PFObject): \"\"\" \"\"\" _struct_type = pf._struct.pf_queue_scspec def __init__(self, bandwidth, burst=0,", "self.linkshare.bandwidth or self.linkshare.burst: s += \" bandwidth {}\".format(self.linkshare) if self.realtime.bandwidth:", "self.realtime.bandwidth: s += \", min {}\".format(self.realtime) if self.upperlimit.bandwidth: s +=", "self).__init__(queue, **kw) self.stats = PFQueueStats() def _from_struct(self, q): \"\"\" \"\"\"", "= fq.flows self.quantum = fq.quantum self.target = fq.target self.interval =", "bw.percent else bw.absolute def _str_bandwidth(self, bw): \"\"\" \"\"\" return bw", "s += \", min {}\".format(self.realtime) if self.upperlimit.bandwidth: s += \",", "bytes: {0.bytes[0]:10} \" + \\ \"dropped pkts: {0.packets[1]:6} bytes: {0.bytes[1]:6}", "PFObject from pf.constants import * from pf._utils import rate2str __all__", "self._str_bandwidth(self.bandwidth) if self.time: s += \" burst {}\".format(self._str_bandwidth(self.burst)) s +=", "= self.qlimit q.qid = self.qid q.parent_qid = self.parent_qid q.realtime =", "if stats is None: stats = pf._struct.hfsc_class_stats() super(PFQueueStats, self).__init__(stats) def", "self.qlength = s.qlength self.qlimit = s.qlimit self.packets = (s.xmit_cnt.packets, s.drop_cnt.packets)", "= s.qlength self.qlimit = s.qlimit self.packets = (s.xmit_cnt.packets, s.drop_cnt.packets) self.bytes", "q.parent_qid = self.parent_qid q.realtime = self.realtime._to_struct() q.linkshare = self.linkshare._to_struct() q.upperlimit", "q def _to_string(self): \"\"\" \"\"\" s = \"queue {.qname}\".format(self) if", "= FlowQueue(q.flowqueue) def _to_struct(self): \"\"\" \"\"\" q = pf._struct.pf_queuespec() q.qname", "self.upperlimit._to_struct() q.flowqueue = self.flowqueue._to_struct() return q def _to_string(self): \"\"\" \"\"\"", "\"\"\" \"\"\" _struct_type = pf._struct.pf_queue_scspec def __init__(self, bandwidth, burst=0, time=0):", "\"\"\" \"\"\" if isinstance(flows, pf._struct.pf_queue_fqspec): self._from_struct(flows) else: self.flows = flows", "(isinstance(self.bandwidth, basestring) and self.bandwidth.endswith(\"%\")): sc.m2.percent = int(self.bandwidth[:-1]) else: sc.m2.absolute =", "self.qlimit = q.qlimit self.qid = q.qid self.parent_qid = q.parent_qid self.realtime", "Packet Filter's queueing schedulers and statistics.\"\"\" import pf._struct from pf._base", "\" burst {}\".format(self._str_bandwidth(self.burst)) s += \" for {.time}ms\".format(self) return s", "+= \" qlimit {.qlimit}\".format(self) return s class PFQueueStats(PFObject): \"\"\" \"\"\"", "is None: queue = pf._struct.pf_queuespec() super(PFQueue, self).__init__(queue, **kw) self.stats =", "s += \" on {.ifname}\".format(self) if self.flags & PFQS_FLOWQUEUE: s", "= pf._struct.pf_queue_fqspec def __init__(self, flows, quantum=0, target=0, interval=0): \"\"\" \"\"\"", "else bw.absolute def _str_bandwidth(self, bw): \"\"\" \"\"\" return bw if", "def _to_string(self): \"\"\" \"\"\" s = \"queue {.qname}\".format(self) if self.parent", "pf._struct.pf_queuespec(qname=queue, qlimit=DEFAULT_QLIMIT) elif queue is None: queue = pf._struct.pf_queuespec() super(PFQueue,", "burst {}\".format(self._str_bandwidth(self.burst)) s += \" for {.time}ms\".format(self) return s class", "else: self.flows = flows self.quantum = quantum self.target = target", "_to_struct(self): \"\"\" \"\"\" q = pf._struct.pf_queuespec() q.qname = self.qname q.parent", "parent {.parent}\".format(self) elif self.ifname: s += \" on {.ifname}\".format(self) if", "q.ifname self.flags = q.flags self.qlimit = q.qlimit self.qid = q.qid", "pf._struct.pf_queue_fqspec() fq.flows = self.flows fq.quantum = self.quantum fq.target = self.target", "def _to_string(self): \"\"\" \"\"\" s = \"flows {.flows}\".format(self) if self.quantum:", "\" on {.ifname}\".format(self) if self.flags & PFQS_FLOWQUEUE: s += \"", "= quantum self.target = target * 1000000 self.interval = interval", "\\ \"dropped pkts: {0.packets[1]:6} bytes: {0.bytes[1]:6} ]\\n\" + \\ \"", "= int(self.burst[:-1]) else: sc.m1.absolute = self.burst sc.d = self.time return", "pkts: {0.packets[0]:10} bytes: {0.bytes[0]:10} \" + \\ \"dropped pkts: {0.packets[1]:6}", "None: stats = pf._struct.hfsc_class_stats() super(PFQueueStats, self).__init__(stats) def _from_struct(self, s): \"\"\"", "= pf._struct.hfsc_class_stats() super(PFQueueStats, self).__init__(stats) def _from_struct(self, s): \"\"\" \"\"\" self.qlength", "q.flowqueue = self.flowqueue._to_struct() return q def _to_string(self): \"\"\" \"\"\" s", "= q.flags self.qlimit = q.qlimit self.qid = q.qid self.parent_qid =", "s += \" parent {.parent}\".format(self) elif self.ifname: s += \"", "fq.interval = self.interval return fq def _to_string(self): \"\"\" \"\"\" s", "= self.parent_qid q.realtime = self.realtime._to_struct() q.linkshare = self.linkshare._to_struct() q.upperlimit =", "\"PFQueue\", \"PFQueueStats\"] class ServiceCurve(PFObject): \"\"\" \"\"\" _struct_type = pf._struct.pf_queue_scspec def", "\"{}%\".format(bw.percent) if bw.percent else bw.absolute def _str_bandwidth(self, bw): \"\"\" \"\"\"", "= self.flowqueue._to_struct() return q def _to_string(self): \"\"\" \"\"\" s =", "= \"queue {.qname}\".format(self) if self.parent and not self.parent.startswith(\"_\"): s +=", "(isinstance(self.burst, basestring) and self.burst.endswith(\"%\")): sc.m1.percent = int(self.burst[:-1]) else: sc.m1.absolute =", "queue = pf._struct.pf_queuespec() super(PFQueue, self).__init__(queue, **kw) self.stats = PFQueueStats() def", "(s.xmit_cnt.packets, s.drop_cnt.packets) self.bytes = (s.xmit_cnt.bytes, s.drop_cnt.bytes) def _to_string(self): \"\"\" \"\"\"", "(s.xmit_cnt.bytes, s.drop_cnt.bytes) def _to_string(self): \"\"\" \"\"\" s = \" [", "{.parent}\".format(self) elif self.ifname: s += \" on {.ifname}\".format(self) if self.flags", "import PFObject from pf.constants import * from pf._utils import rate2str", "bytes: {0.bytes[1]:6} ]\\n\" + \\ \" [ qlength: {0.qlength:3}/{0.qlimit:3} ]\"", "= pf._struct.pf_queuespec def __init__(self, queue=None, **kw): \"\"\" \"\"\" if isinstance(queue,", "\"\"\" _struct_type = pf._struct.hfsc_class_stats def __init__(self, stats=None): \"\"\" \"\"\" if", "\"\"\" \"\"\" return \"{}%\".format(bw.percent) if bw.percent else bw.absolute def _str_bandwidth(self,", "\"queue {.qname}\".format(self) if self.parent and not self.parent.startswith(\"_\"): s += \"", "**kw) self.stats = PFQueueStats() def _from_struct(self, q): \"\"\" \"\"\" self.qname", "int(self.bandwidth[:-1]) else: sc.m2.absolute = self.bandwidth if (isinstance(self.burst, basestring) and self.burst.endswith(\"%\")):", "1000000) if self.target: s += \" target {}ms\".format(self.target / 1000000)", "/ 1000000) return s class PFQueue(PFObject): \"\"\" \"\"\" _struct_type =", "self.flowqueue = FlowQueue(q.flowqueue) def _to_struct(self): \"\"\" \"\"\" q = pf._struct.pf_queuespec()", "if self.quantum: s += \" quantum {.quantum}\".format(self) if self.interval: s", "= q.parent self.ifname = q.ifname self.flags = q.flags self.qlimit =", "def _from_struct(self, s): \"\"\" \"\"\" self.qlength = s.qlength self.qlimit =", "self.flags & PFQS_DEFAULT: s += \" default\" if self.qlimit: s", "sc = pf._struct.pf_queue_scspec() if (isinstance(self.bandwidth, basestring) and self.bandwidth.endswith(\"%\")): sc.m2.percent =", "pf._struct.pf_queue_scspec def __init__(self, bandwidth, burst=0, time=0): \"\"\" \"\"\" if isinstance(bandwidth,", "+= \" quantum {.quantum}\".format(self) if self.interval: s += \" interval", "s += \" target {}ms\".format(self.target / 1000000) return s class", "s += \" qlimit {.qlimit}\".format(self) return s class PFQueueStats(PFObject): \"\"\"", "self.parent and not self.parent.startswith(\"_\"): s += \" parent {.parent}\".format(self) elif", "pf._base import PFObject from pf.constants import * from pf._utils import", "__init__(self, flows, quantum=0, target=0, interval=0): \"\"\" \"\"\" if isinstance(flows, pf._struct.pf_queue_fqspec):", "self.interval return fq def _to_string(self): \"\"\" \"\"\" s = \"flows", "= int(self.bandwidth[:-1]) else: sc.m2.absolute = self.bandwidth if (isinstance(self.burst, basestring) and", "self.burst = self._get_bandwidth(sc.m1) self.time = sc.d def _to_struct(self): \"\"\" \"\"\"", "\"\"\" q = pf._struct.pf_queuespec() q.qname = self.qname q.parent = self.parent", "= fq.target self.interval = fq.interval def _to_struct(self): \"\"\" \"\"\" fq", "isinstance(flows, pf._struct.pf_queue_fqspec): self._from_struct(flows) else: self.flows = flows self.quantum = quantum", "q = pf._struct.pf_queuespec() q.qname = self.qname q.parent = self.parent q.ifname", "\"\"\" \"\"\" fq = pf._struct.pf_queue_fqspec() fq.flows = self.flows fq.quantum =", "stats = pf._struct.hfsc_class_stats() super(PFQueueStats, self).__init__(stats) def _from_struct(self, s): \"\"\" \"\"\"", "{.quantum}\".format(self) if self.interval: s += \" interval {}ms\".format(self.interval / 1000000)", "bw): \"\"\" \"\"\" return \"{}%\".format(bw.percent) if bw.percent else bw.absolute def", "+= \" parent {.parent}\".format(self) elif self.ifname: s += \" on", "{.flows}\".format(self) if self.quantum: s += \" quantum {.quantum}\".format(self) if self.interval:", "s += \" burst {}\".format(self._str_bandwidth(self.burst)) s += \" for {.time}ms\".format(self)", "sc.m2.absolute = self.bandwidth if (isinstance(self.burst, basestring) and self.burst.endswith(\"%\")): sc.m1.percent =", "q.qlimit = self.qlimit q.qid = self.qid q.parent_qid = self.parent_qid q.realtime", "flows, quantum=0, target=0, interval=0): \"\"\" \"\"\" if isinstance(flows, pf._struct.pf_queue_fqspec): self._from_struct(flows)", "= fq.interval def _to_struct(self): \"\"\" \"\"\" fq = pf._struct.pf_queue_fqspec() fq.flows", "\" + \\ \"dropped pkts: {0.packets[1]:6} bytes: {0.bytes[1]:6} ]\\n\" +", "pf._struct.pf_queuespec def __init__(self, queue=None, **kw): \"\"\" \"\"\" if isinstance(queue, basestring):", "PFQueueStats() def _from_struct(self, q): \"\"\" \"\"\" self.qname = q.qname self.parent", "self.bandwidth if (isinstance(self.burst, basestring) and self.burst.endswith(\"%\")): sc.m1.percent = int(self.burst[:-1]) else:", "= q.parent_qid self.realtime = ServiceCurve(q.realtime) self.linkshare = ServiceCurve(q.linkshare) self.upperlimit =", "s.drop_cnt.packets) self.bytes = (s.xmit_cnt.bytes, s.drop_cnt.bytes) def _to_string(self): \"\"\" \"\"\" s", "fq.quantum = self.quantum fq.target = self.target fq.interval = self.interval return", "self.ifname: s += \" on {.ifname}\".format(self) if self.flags & PFQS_FLOWQUEUE:", "queue = pf._struct.pf_queuespec(qname=queue, qlimit=DEFAULT_QLIMIT) elif queue is None: queue =", "\"\"\" s = self._str_bandwidth(self.bandwidth) if self.time: s += \" burst", "self.time = sc.d def _to_struct(self): \"\"\" \"\"\" sc = pf._struct.pf_queue_scspec()", "pf._struct.hfsc_class_stats() super(PFQueueStats, self).__init__(stats) def _from_struct(self, s): \"\"\" \"\"\" self.qlength =", "self.upperlimit = ServiceCurve(q.upperlimit) self.flowqueue = FlowQueue(q.flowqueue) def _to_struct(self): \"\"\" \"\"\"", "to represent Packet Filter's queueing schedulers and statistics.\"\"\" import pf._struct", "\"\"\" \"\"\" s = self._str_bandwidth(self.bandwidth) if self.time: s += \"", "q.flags = self.flags q.qlimit = self.qlimit q.qid = self.qid q.parent_qid", "\"\"\" \"\"\" self.bandwidth = self._get_bandwidth(sc.m2) self.burst = self._get_bandwidth(sc.m1) self.time =", "{}\".format(self._str_bandwidth(self.burst)) s += \" for {.time}ms\".format(self) return s class FlowQueue(PFObject):", "basestring): queue = pf._struct.pf_queuespec(qname=queue, qlimit=DEFAULT_QLIMIT) elif queue is None: queue", "= self.qname q.parent = self.parent q.ifname = self.ifname q.flags =", "self.target = target * 1000000 self.interval = interval * 1000000", "if self.upperlimit.bandwidth: s += \", max {}\".format(self.upperlimit) if self.flags &", "* 1000000 def _from_struct(self, fq): \"\"\" \"\"\" self.flows = fq.flows", "if bw.percent else bw.absolute def _str_bandwidth(self, bw): \"\"\" \"\"\" return", "flows self.quantum = quantum self.target = target * 1000000 self.interval", "return sc def _get_bandwidth(self, bw): \"\"\" \"\"\" return \"{}%\".format(bw.percent) if", "= PFQueueStats() def _from_struct(self, q): \"\"\" \"\"\" self.qname = q.qname", "= self.linkshare._to_struct() q.upperlimit = self.upperlimit._to_struct() q.flowqueue = self.flowqueue._to_struct() return q", "= self._get_bandwidth(sc.m1) self.time = sc.d def _to_struct(self): \"\"\" \"\"\" sc", "self.target fq.interval = self.interval return fq def _to_string(self): \"\"\" \"\"\"", "_to_string(self): \"\"\" \"\"\" s = \"queue {.qname}\".format(self) if self.parent and", "self.quantum: s += \" quantum {.quantum}\".format(self) if self.interval: s +=", "_to_struct(self): \"\"\" \"\"\" sc = pf._struct.pf_queue_scspec() if (isinstance(self.bandwidth, basestring) and", "class FlowQueue(PFObject): \"\"\" \"\"\" _struct_type = pf._struct.pf_queue_fqspec def __init__(self, flows,", "_to_string(self): \"\"\" \"\"\" s = self._str_bandwidth(self.bandwidth) if self.time: s +=", "s += \" bandwidth {}\".format(self.linkshare) if self.realtime.bandwidth: s += \",", "\" [ pkts: {0.packets[0]:10} bytes: {0.bytes[0]:10} \" + \\ \"dropped", "\" qlimit {.qlimit}\".format(self) return s class PFQueueStats(PFObject): \"\"\" \"\"\" _struct_type", "\"\"\" \"\"\" q = pf._struct.pf_queuespec() q.qname = self.qname q.parent =", "s): \"\"\" \"\"\" self.qlength = s.qlength self.qlimit = s.qlimit self.packets", "from pf._utils import rate2str __all__ = [\"ServiceCurve\", \"FlowQueue\", \"PFQueue\", \"PFQueueStats\"]", "q.qid = self.qid q.parent_qid = self.parent_qid q.realtime = self.realtime._to_struct() q.linkshare", "fq.target self.interval = fq.interval def _to_struct(self): \"\"\" \"\"\" fq =", "def _to_struct(self): \"\"\" \"\"\" fq = pf._struct.pf_queue_fqspec() fq.flows = self.flows", "= pf._struct.pf_queue_scspec() if (isinstance(self.bandwidth, basestring) and self.bandwidth.endswith(\"%\")): sc.m2.percent = int(self.bandwidth[:-1])", "if self.realtime.bandwidth: s += \", min {}\".format(self.realtime) if self.upperlimit.bandwidth: s", "= self.flags q.qlimit = self.qlimit q.qid = self.qid q.parent_qid =", "{}\".format(self.linkshare) if self.realtime.bandwidth: s += \", min {}\".format(self.realtime) if self.upperlimit.bandwidth:", "__init__(self, queue=None, **kw): \"\"\" \"\"\" if isinstance(queue, basestring): queue =", "qlimit=DEFAULT_QLIMIT) elif queue is None: queue = pf._struct.pf_queuespec() super(PFQueue, self).__init__(queue,", "self.parent_qid = q.parent_qid self.realtime = ServiceCurve(q.realtime) self.linkshare = ServiceCurve(q.linkshare) self.upperlimit", "return q def _to_string(self): \"\"\" \"\"\" s = \"queue {.qname}\".format(self)", "super(PFQueueStats, self).__init__(stats) def _from_struct(self, s): \"\"\" \"\"\" self.qlength = s.qlength", "interval=0): \"\"\" \"\"\" if isinstance(flows, pf._struct.pf_queue_fqspec): self._from_struct(flows) else: self.flows =", "= self.upperlimit._to_struct() q.flowqueue = self.flowqueue._to_struct() return q def _to_string(self): \"\"\"", "\"\"\" s = \"queue {.qname}\".format(self) if self.parent and not self.parent.startswith(\"_\"):", "self).__init__(stats) def _from_struct(self, s): \"\"\" \"\"\" self.qlength = s.qlength self.qlimit", "q.linkshare = self.linkshare._to_struct() q.upperlimit = self.upperlimit._to_struct() q.flowqueue = self.flowqueue._to_struct() return", "isinstance(queue, basestring): queue = pf._struct.pf_queuespec(qname=queue, qlimit=DEFAULT_QLIMIT) elif queue is None:", "= pf._struct.pf_queuespec() q.qname = self.qname q.parent = self.parent q.ifname =", "\" for {.time}ms\".format(self) return s class FlowQueue(PFObject): \"\"\" \"\"\" _struct_type", "else: self.bandwidth = bandwidth self.burst = burst self.time = time", "else: sc.m1.absolute = self.burst sc.d = self.time return sc def", "\" interval {}ms\".format(self.interval / 1000000) if self.target: s += \"", "if isinstance(bw, basestring) else rate2str(bw) def _to_string(self): \"\"\" \"\"\" s", "= flows self.quantum = quantum self.target = target * 1000000", "sc.d def _to_struct(self): \"\"\" \"\"\" sc = pf._struct.pf_queue_scspec() if (isinstance(self.bandwidth,", "elif queue is None: queue = pf._struct.pf_queuespec() super(PFQueue, self).__init__(queue, **kw)", "\" {.flowqueue}\".format(self) if self.linkshare.bandwidth or self.linkshare.burst: s += \" bandwidth", "s class FlowQueue(PFObject): \"\"\" \"\"\" _struct_type = pf._struct.pf_queue_fqspec def __init__(self,", "self.packets = (s.xmit_cnt.packets, s.drop_cnt.packets) self.bytes = (s.xmit_cnt.bytes, s.drop_cnt.bytes) def _to_string(self):", "self.flows = flows self.quantum = quantum self.target = target *", "1000000) return s class PFQueue(PFObject): \"\"\" \"\"\" _struct_type = pf._struct.pf_queuespec", "FlowQueue(PFObject): \"\"\" \"\"\" _struct_type = pf._struct.pf_queue_fqspec def __init__(self, flows, quantum=0,", "\", min {}\".format(self.realtime) if self.upperlimit.bandwidth: s += \", max {}\".format(self.upperlimit)", "PFQueueStats(PFObject): \"\"\" \"\"\" _struct_type = pf._struct.hfsc_class_stats def __init__(self, stats=None): \"\"\"", "= (s.xmit_cnt.packets, s.drop_cnt.packets) self.bytes = (s.xmit_cnt.bytes, s.drop_cnt.bytes) def _to_string(self): \"\"\"", "= q.ifname self.flags = q.flags self.qlimit = q.qlimit self.qid =", "s class PFQueue(PFObject): \"\"\" \"\"\" _struct_type = pf._struct.pf_queuespec def __init__(self,", "return bw if isinstance(bw, basestring) else rate2str(bw) def _to_string(self): \"\"\"", "self.flowqueue._to_struct() return q def _to_string(self): \"\"\" \"\"\" s = \"queue", "self.quantum = fq.quantum self.target = fq.target self.interval = fq.interval def", "fq def _to_string(self): \"\"\" \"\"\" s = \"flows {.flows}\".format(self) if", "= q.qid self.parent_qid = q.parent_qid self.realtime = ServiceCurve(q.realtime) self.linkshare =", "if self.qlimit: s += \" qlimit {.qlimit}\".format(self) return s class", "class ServiceCurve(PFObject): \"\"\" \"\"\" _struct_type = pf._struct.pf_queue_scspec def __init__(self, bandwidth,", "\"\"\" \"\"\" s = \"flows {.flows}\".format(self) if self.quantum: s +=", "{0.packets[1]:6} bytes: {0.bytes[1]:6} ]\\n\" + \\ \" [ qlength: {0.qlength:3}/{0.qlimit:3}", "return s class FlowQueue(PFObject): \"\"\" \"\"\" _struct_type = pf._struct.pf_queue_fqspec def", "s class PFQueueStats(PFObject): \"\"\" \"\"\" _struct_type = pf._struct.hfsc_class_stats def __init__(self,", "]\\n\" + \\ \" [ qlength: {0.qlength:3}/{0.qlimit:3} ]\" return s.format(self)", "+= \" for {.time}ms\".format(self) return s class FlowQueue(PFObject): \"\"\" \"\"\"", "s = \"flows {.flows}\".format(self) if self.quantum: s += \" quantum", "_to_struct(self): \"\"\" \"\"\" fq = pf._struct.pf_queue_fqspec() fq.flows = self.flows fq.quantum", "\"PFQueueStats\"] class ServiceCurve(PFObject): \"\"\" \"\"\" _struct_type = pf._struct.pf_queue_scspec def __init__(self,", "bw): \"\"\" \"\"\" return bw if isinstance(bw, basestring) else rate2str(bw)", "pf._struct.pf_queuespec() super(PFQueue, self).__init__(queue, **kw) self.stats = PFQueueStats() def _from_struct(self, q):", "queueing schedulers and statistics.\"\"\" import pf._struct from pf._base import PFObject", "= self._get_bandwidth(sc.m2) self.burst = self._get_bandwidth(sc.m1) self.time = sc.d def _to_struct(self):", "q.parent_qid self.realtime = ServiceCurve(q.realtime) self.linkshare = ServiceCurve(q.linkshare) self.upperlimit = ServiceCurve(q.upperlimit)", "or self.linkshare.burst: s += \" bandwidth {}\".format(self.linkshare) if self.realtime.bandwidth: s", "class PFQueue(PFObject): \"\"\" \"\"\" _struct_type = pf._struct.pf_queuespec def __init__(self, queue=None,", "and statistics.\"\"\" import pf._struct from pf._base import PFObject from pf.constants", "\"\"\" \"\"\" if isinstance(queue, basestring): queue = pf._struct.pf_queuespec(qname=queue, qlimit=DEFAULT_QLIMIT) elif", "self.target: s += \" target {}ms\".format(self.target / 1000000) return s", "\"dropped pkts: {0.packets[1]:6} bytes: {0.bytes[1]:6} ]\\n\" + \\ \" [", "super(PFQueue, self).__init__(queue, **kw) self.stats = PFQueueStats() def _from_struct(self, q): \"\"\"", "\"\"\" \"\"\" s = \" [ pkts: {0.packets[0]:10} bytes: {0.bytes[0]:10}", "pf._struct.pf_queue_scspec): self._from_struct(bandwidth) else: self.bandwidth = bandwidth self.burst = burst self.time", "+= \", min {}\".format(self.realtime) if self.upperlimit.bandwidth: s += \", max", "{.flowqueue}\".format(self) if self.linkshare.bandwidth or self.linkshare.burst: s += \" bandwidth {}\".format(self.linkshare)", "self.time = time def _from_struct(self, sc): \"\"\" \"\"\" self.bandwidth =", "\"\"\" \"\"\" self.flows = fq.flows self.quantum = fq.quantum self.target =", "FlowQueue(q.flowqueue) def _to_struct(self): \"\"\" \"\"\" q = pf._struct.pf_queuespec() q.qname =", "min {}\".format(self.realtime) if self.upperlimit.bandwidth: s += \", max {}\".format(self.upperlimit) if", "\"\"\" _struct_type = pf._struct.pf_queue_fqspec def __init__(self, flows, quantum=0, target=0, interval=0):", "PFQueue(PFObject): \"\"\" \"\"\" _struct_type = pf._struct.pf_queuespec def __init__(self, queue=None, **kw):", "self.burst = burst self.time = time def _from_struct(self, sc): \"\"\"", "\"\"\" if isinstance(flows, pf._struct.pf_queue_fqspec): self._from_struct(flows) else: self.flows = flows self.quantum", "\" quantum {.quantum}\".format(self) if self.interval: s += \" interval {}ms\".format(self.interval", "int(self.burst[:-1]) else: sc.m1.absolute = self.burst sc.d = self.time return sc", "= self.time return sc def _get_bandwidth(self, bw): \"\"\" \"\"\" return", "return fq def _to_string(self): \"\"\" \"\"\" s = \"flows {.flows}\".format(self)", "\" parent {.parent}\".format(self) elif self.ifname: s += \" on {.ifname}\".format(self)", "quantum=0, target=0, interval=0): \"\"\" \"\"\" if isinstance(flows, pf._struct.pf_queue_fqspec): self._from_struct(flows) else:", "= [\"ServiceCurve\", \"FlowQueue\", \"PFQueue\", \"PFQueueStats\"] class ServiceCurve(PFObject): \"\"\" \"\"\" _struct_type", "self.flows = fq.flows self.quantum = fq.quantum self.target = fq.target self.interval", "self.bandwidth = self._get_bandwidth(sc.m2) self.burst = self._get_bandwidth(sc.m1) self.time = sc.d def", "+ \\ \"dropped pkts: {0.packets[1]:6} bytes: {0.bytes[1]:6} ]\\n\" + \\", "self.bytes = (s.xmit_cnt.bytes, s.drop_cnt.bytes) def _to_string(self): \"\"\" \"\"\" s =", "if (isinstance(self.burst, basestring) and self.burst.endswith(\"%\")): sc.m1.percent = int(self.burst[:-1]) else: sc.m1.absolute", "= self.target fq.interval = self.interval return fq def _to_string(self): \"\"\"", "pf._struct.pf_queue_fqspec def __init__(self, flows, quantum=0, target=0, interval=0): \"\"\" \"\"\" if", "and self.burst.endswith(\"%\")): sc.m1.percent = int(self.burst[:-1]) else: sc.m1.absolute = self.burst sc.d", "queue is None: queue = pf._struct.pf_queuespec() super(PFQueue, self).__init__(queue, **kw) self.stats", "if self.time: s += \" burst {}\".format(self._str_bandwidth(self.burst)) s += \"", "= self.qid q.parent_qid = self.parent_qid q.realtime = self.realtime._to_struct() q.linkshare =", "self.interval: s += \" interval {}ms\".format(self.interval / 1000000) if self.target:", "{}ms\".format(self.interval / 1000000) if self.target: s += \" target {}ms\".format(self.target", "q.qname = self.qname q.parent = self.parent q.ifname = self.ifname q.flags", "= self.realtime._to_struct() q.linkshare = self.linkshare._to_struct() q.upperlimit = self.upperlimit._to_struct() q.flowqueue =", "s += \" interval {}ms\".format(self.interval / 1000000) if self.target: s", "\"\"\" self.qname = q.qname self.parent = q.parent self.ifname = q.ifname", "\"\"\" \"\"\" if isinstance(bandwidth, pf._struct.pf_queue_scspec): self._from_struct(bandwidth) else: self.bandwidth = bandwidth", "isinstance(bw, basestring) else rate2str(bw) def _to_string(self): \"\"\" \"\"\" s =", "\"\"\" \"\"\" if stats is None: stats = pf._struct.hfsc_class_stats() super(PFQueueStats,", "\"\"\" if stats is None: stats = pf._struct.hfsc_class_stats() super(PFQueueStats, self).__init__(stats)", "def _to_string(self): \"\"\" \"\"\" s = \" [ pkts: {0.packets[0]:10}", "ServiceCurve(q.upperlimit) self.flowqueue = FlowQueue(q.flowqueue) def _to_struct(self): \"\"\" \"\"\" q =", "= self.interval return fq def _to_string(self): \"\"\" \"\"\" s =", "self.parent = q.parent self.ifname = q.ifname self.flags = q.flags self.qlimit", "self.time return sc def _get_bandwidth(self, bw): \"\"\" \"\"\" return \"{}%\".format(bw.percent)", "self.ifname = q.ifname self.flags = q.flags self.qlimit = q.qlimit self.qid", "quantum self.target = target * 1000000 self.interval = interval *", "= bandwidth self.burst = burst self.time = time def _from_struct(self,", "s = self._str_bandwidth(self.bandwidth) if self.time: s += \" burst {}\".format(self._str_bandwidth(self.burst))", "pf._struct from pf._base import PFObject from pf.constants import * from", "__init__(self, stats=None): \"\"\" \"\"\" if stats is None: stats =", "= \" [ pkts: {0.packets[0]:10} bytes: {0.bytes[0]:10} \" + \\", "if self.interval: s += \" interval {}ms\".format(self.interval / 1000000) if", "_get_bandwidth(self, bw): \"\"\" \"\"\" return \"{}%\".format(bw.percent) if bw.percent else bw.absolute", "if self.parent and not self.parent.startswith(\"_\"): s += \" parent {.parent}\".format(self)", "basestring) else rate2str(bw) def _to_string(self): \"\"\" \"\"\" s = self._str_bandwidth(self.bandwidth)", "if self.flags & PFQS_FLOWQUEUE: s += \" {.flowqueue}\".format(self) if self.linkshare.bandwidth", "rate2str __all__ = [\"ServiceCurve\", \"FlowQueue\", \"PFQueue\", \"PFQueueStats\"] class ServiceCurve(PFObject): \"\"\"", "self.parent.startswith(\"_\"): s += \" parent {.parent}\".format(self) elif self.ifname: s +=", "None: queue = pf._struct.pf_queuespec() super(PFQueue, self).__init__(queue, **kw) self.stats = PFQueueStats()", "sc.m1.percent = int(self.burst[:-1]) else: sc.m1.absolute = self.burst sc.d = self.time", "q): \"\"\" \"\"\" self.qname = q.qname self.parent = q.parent self.ifname", "\"\"\" \"\"\" _struct_type = pf._struct.pf_queue_fqspec def __init__(self, flows, quantum=0, target=0,", "burst self.time = time def _from_struct(self, sc): \"\"\" \"\"\" self.bandwidth", "if isinstance(queue, basestring): queue = pf._struct.pf_queuespec(qname=queue, qlimit=DEFAULT_QLIMIT) elif queue is", "Filter's queueing schedulers and statistics.\"\"\" import pf._struct from pf._base import", "if self.target: s += \" target {}ms\".format(self.target / 1000000) return", "isinstance(bandwidth, pf._struct.pf_queue_scspec): self._from_struct(bandwidth) else: self.bandwidth = bandwidth self.burst = burst", "_struct_type = pf._struct.pf_queue_fqspec def __init__(self, flows, quantum=0, target=0, interval=0): \"\"\"", "1000000 self.interval = interval * 1000000 def _from_struct(self, fq): \"\"\"", "self._get_bandwidth(sc.m2) self.burst = self._get_bandwidth(sc.m1) self.time = sc.d def _to_struct(self): \"\"\"", "{.qname}\".format(self) if self.parent and not self.parent.startswith(\"_\"): s += \" parent", "\"\"\" sc = pf._struct.pf_queue_scspec() if (isinstance(self.bandwidth, basestring) and self.bandwidth.endswith(\"%\")): sc.m2.percent", "\"\"\" \"\"\" return bw if isinstance(bw, basestring) else rate2str(bw) def", "self.quantum fq.target = self.target fq.interval = self.interval return fq def", "def __init__(self, queue=None, **kw): \"\"\" \"\"\" if isinstance(queue, basestring): queue", "{0.bytes[0]:10} \" + \\ \"dropped pkts: {0.packets[1]:6} bytes: {0.bytes[1]:6} ]\\n\"", "self.qname q.parent = self.parent q.ifname = self.ifname q.flags = self.flags", "self.bandwidth.endswith(\"%\")): sc.m2.percent = int(self.bandwidth[:-1]) else: sc.m2.absolute = self.bandwidth if (isinstance(self.burst,", "for {.time}ms\".format(self) return s class FlowQueue(PFObject): \"\"\" \"\"\" _struct_type =", "\"\"\" _struct_type = pf._struct.pf_queuespec def __init__(self, queue=None, **kw): \"\"\" \"\"\"", "ServiceCurve(q.linkshare) self.upperlimit = ServiceCurve(q.upperlimit) self.flowqueue = FlowQueue(q.flowqueue) def _to_struct(self): \"\"\"", "\"\"\" \"\"\" _struct_type = pf._struct.hfsc_class_stats def __init__(self, stats=None): \"\"\" \"\"\"", "self.realtime._to_struct() q.linkshare = self.linkshare._to_struct() q.upperlimit = self.upperlimit._to_struct() q.flowqueue = self.flowqueue._to_struct()", "= pf._struct.pf_queue_scspec def __init__(self, bandwidth, burst=0, time=0): \"\"\" \"\"\" if", "sc def _get_bandwidth(self, bw): \"\"\" \"\"\" return \"{}%\".format(bw.percent) if bw.percent", "fq.flows self.quantum = fq.quantum self.target = fq.target self.interval = fq.interval", "{0.packets[0]:10} bytes: {0.bytes[0]:10} \" + \\ \"dropped pkts: {0.packets[1]:6} bytes:", "pf._struct.hfsc_class_stats def __init__(self, stats=None): \"\"\" \"\"\" if stats is None:", "self.parent q.ifname = self.ifname q.flags = self.flags q.qlimit = self.qlimit", "\"\"\" self.qlength = s.qlength self.qlimit = s.qlimit self.packets = (s.xmit_cnt.packets,", "if (isinstance(self.bandwidth, basestring) and self.bandwidth.endswith(\"%\")): sc.m2.percent = int(self.bandwidth[:-1]) else: sc.m2.absolute", "\"flows {.flows}\".format(self) if self.quantum: s += \" quantum {.quantum}\".format(self) if", "max {}\".format(self.upperlimit) if self.flags & PFQS_DEFAULT: s += \" default\"", "/ 1000000) if self.target: s += \" target {}ms\".format(self.target /", "queue=None, **kw): \"\"\" \"\"\" if isinstance(queue, basestring): queue = pf._struct.pf_queuespec(qname=queue,", "s += \" for {.time}ms\".format(self) return s class FlowQueue(PFObject): \"\"\"", "if isinstance(bandwidth, pf._struct.pf_queue_scspec): self._from_struct(bandwidth) else: self.bandwidth = bandwidth self.burst =", "\"\"\" return \"{}%\".format(bw.percent) if bw.percent else bw.absolute def _str_bandwidth(self, bw):", "def _get_bandwidth(self, bw): \"\"\" \"\"\" return \"{}%\".format(bw.percent) if bw.percent else", "\"\"\" self.flows = fq.flows self.quantum = fq.quantum self.target = fq.target", "[ pkts: {0.packets[0]:10} bytes: {0.bytes[0]:10} \" + \\ \"dropped pkts:", "if isinstance(flows, pf._struct.pf_queue_fqspec): self._from_struct(flows) else: self.flows = flows self.quantum =", "= s.qlimit self.packets = (s.xmit_cnt.packets, s.drop_cnt.packets) self.bytes = (s.xmit_cnt.bytes, s.drop_cnt.bytes)", "= self.bandwidth if (isinstance(self.burst, basestring) and self.burst.endswith(\"%\")): sc.m1.percent = int(self.burst[:-1])", "**kw): \"\"\" \"\"\" if isinstance(queue, basestring): queue = pf._struct.pf_queuespec(qname=queue, qlimit=DEFAULT_QLIMIT)", "self.linkshare._to_struct() q.upperlimit = self.upperlimit._to_struct() q.flowqueue = self.flowqueue._to_struct() return q def", "s += \" quantum {.quantum}\".format(self) if self.interval: s += \"", "& PFQS_FLOWQUEUE: s += \" {.flowqueue}\".format(self) if self.linkshare.bandwidth or self.linkshare.burst:", "\"\"\" \"\"\" self.qlength = s.qlength self.qlimit = s.qlimit self.packets =", "{}ms\".format(self.target / 1000000) return s class PFQueue(PFObject): \"\"\" \"\"\" _struct_type", "= \"flows {.flows}\".format(self) if self.quantum: s += \" quantum {.quantum}\".format(self)", "q.parent = self.parent q.ifname = self.ifname q.flags = self.flags q.qlimit", "_str_bandwidth(self, bw): \"\"\" \"\"\" return bw if isinstance(bw, basestring) else", "def __init__(self, stats=None): \"\"\" \"\"\" if stats is None: stats", "\"\"\" self.bandwidth = self._get_bandwidth(sc.m2) self.burst = self._get_bandwidth(sc.m1) self.time = sc.d", "+= \" bandwidth {}\".format(self.linkshare) if self.realtime.bandwidth: s += \", min", "\"\"\" \"\"\" sc = pf._struct.pf_queue_scspec() if (isinstance(self.bandwidth, basestring) and self.bandwidth.endswith(\"%\")):", "\"\"\" if isinstance(bandwidth, pf._struct.pf_queue_scspec): self._from_struct(bandwidth) else: self.bandwidth = bandwidth self.burst", "def _to_struct(self): \"\"\" \"\"\" q = pf._struct.pf_queuespec() q.qname = self.qname", "self.flags = q.flags self.qlimit = q.qlimit self.qid = q.qid self.parent_qid", "= (s.xmit_cnt.bytes, s.drop_cnt.bytes) def _to_string(self): \"\"\" \"\"\" s = \"", "_struct_type = pf._struct.pf_queuespec def __init__(self, queue=None, **kw): \"\"\" \"\"\" if", "= pf._struct.pf_queuespec(qname=queue, qlimit=DEFAULT_QLIMIT) elif queue is None: queue = pf._struct.pf_queuespec()", "elif self.ifname: s += \" on {.ifname}\".format(self) if self.flags &", "1000000 def _from_struct(self, fq): \"\"\" \"\"\" self.flows = fq.flows self.quantum", "import pf._struct from pf._base import PFObject from pf.constants import *", "target=0, interval=0): \"\"\" \"\"\" if isinstance(flows, pf._struct.pf_queue_fqspec): self._from_struct(flows) else: self.flows", "\"\"\" \"\"\" _struct_type = pf._struct.pf_queuespec def __init__(self, queue=None, **kw): \"\"\"", "\" default\" if self.qlimit: s += \" qlimit {.qlimit}\".format(self) return", "self.burst.endswith(\"%\")): sc.m1.percent = int(self.burst[:-1]) else: sc.m1.absolute = self.burst sc.d =", "def _to_struct(self): \"\"\" \"\"\" sc = pf._struct.pf_queue_scspec() if (isinstance(self.bandwidth, basestring)", "= q.qlimit self.qid = q.qid self.parent_qid = q.parent_qid self.realtime =", "burst=0, time=0): \"\"\" \"\"\" if isinstance(bandwidth, pf._struct.pf_queue_scspec): self._from_struct(bandwidth) else: self.bandwidth", "= self._str_bandwidth(self.bandwidth) if self.time: s += \" burst {}\".format(self._str_bandwidth(self.burst)) s", "fq): \"\"\" \"\"\" self.flows = fq.flows self.quantum = fq.quantum self.target", "self.target = fq.target self.interval = fq.interval def _to_struct(self): \"\"\" \"\"\"", "else rate2str(bw) def _to_string(self): \"\"\" \"\"\" s = self._str_bandwidth(self.bandwidth) if", "if self.linkshare.bandwidth or self.linkshare.burst: s += \" bandwidth {}\".format(self.linkshare) if", "_struct_type = pf._struct.hfsc_class_stats def __init__(self, stats=None): \"\"\" \"\"\" if stats", "_to_string(self): \"\"\" \"\"\" s = \"flows {.flows}\".format(self) if self.quantum: s", "rate2str(bw) def _to_string(self): \"\"\" \"\"\" s = self._str_bandwidth(self.bandwidth) if self.time:", "basestring) and self.burst.endswith(\"%\")): sc.m1.percent = int(self.burst[:-1]) else: sc.m1.absolute = self.burst", "sc): \"\"\" \"\"\" self.bandwidth = self._get_bandwidth(sc.m2) self.burst = self._get_bandwidth(sc.m1) self.time", "stats is None: stats = pf._struct.hfsc_class_stats() super(PFQueueStats, self).__init__(stats) def _from_struct(self,", "= burst self.time = time def _from_struct(self, sc): \"\"\" \"\"\"", "import * from pf._utils import rate2str __all__ = [\"ServiceCurve\", \"FlowQueue\",", "bw if isinstance(bw, basestring) else rate2str(bw) def _to_string(self): \"\"\" \"\"\"", "& PFQS_DEFAULT: s += \" default\" if self.qlimit: s +=", "q.ifname = self.ifname q.flags = self.flags q.qlimit = self.qlimit q.qid", "s.qlimit self.packets = (s.xmit_cnt.packets, s.drop_cnt.packets) self.bytes = (s.xmit_cnt.bytes, s.drop_cnt.bytes) def", "{0.bytes[1]:6} ]\\n\" + \\ \" [ qlength: {0.qlength:3}/{0.qlimit:3} ]\" return", "default\" if self.qlimit: s += \" qlimit {.qlimit}\".format(self) return s", "self._get_bandwidth(sc.m1) self.time = sc.d def _to_struct(self): \"\"\" \"\"\" sc =", "pf._struct.pf_queue_scspec() if (isinstance(self.bandwidth, basestring) and self.bandwidth.endswith(\"%\")): sc.m2.percent = int(self.bandwidth[:-1]) else:", "\"\"\" return bw if isinstance(bw, basestring) else rate2str(bw) def _to_string(self):", "bandwidth, burst=0, time=0): \"\"\" \"\"\" if isinstance(bandwidth, pf._struct.pf_queue_scspec): self._from_struct(bandwidth) else:", "self._from_struct(bandwidth) else: self.bandwidth = bandwidth self.burst = burst self.time =", "self.interval = interval * 1000000 def _from_struct(self, fq): \"\"\" \"\"\"", "sc.d = self.time return sc def _get_bandwidth(self, bw): \"\"\" \"\"\"", "= ServiceCurve(q.realtime) self.linkshare = ServiceCurve(q.linkshare) self.upperlimit = ServiceCurve(q.upperlimit) self.flowqueue =", "self.bandwidth = bandwidth self.burst = burst self.time = time def", "qlimit {.qlimit}\".format(self) return s class PFQueueStats(PFObject): \"\"\" \"\"\" _struct_type =", "+= \" {.flowqueue}\".format(self) if self.linkshare.bandwidth or self.linkshare.burst: s += \"", "\", max {}\".format(self.upperlimit) if self.flags & PFQS_DEFAULT: s += \"", "= ServiceCurve(q.linkshare) self.upperlimit = ServiceCurve(q.upperlimit) self.flowqueue = FlowQueue(q.flowqueue) def _to_struct(self):", "\"\"\" \"\"\" self.qname = q.qname self.parent = q.parent self.ifname =", "* 1000000 self.interval = interval * 1000000 def _from_struct(self, fq):", "pf._struct.pf_queuespec() q.qname = self.qname q.parent = self.parent q.ifname = self.ifname", "q.realtime = self.realtime._to_struct() q.linkshare = self.linkshare._to_struct() q.upperlimit = self.upperlimit._to_struct() q.flowqueue", "_from_struct(self, s): \"\"\" \"\"\" self.qlength = s.qlength self.qlimit = s.qlimit", "self.qlimit = s.qlimit self.packets = (s.xmit_cnt.packets, s.drop_cnt.packets) self.bytes = (s.xmit_cnt.bytes,", "q.upperlimit = self.upperlimit._to_struct() q.flowqueue = self.flowqueue._to_struct() return q def _to_string(self):", "s.qlength self.qlimit = s.qlimit self.packets = (s.xmit_cnt.packets, s.drop_cnt.packets) self.bytes =", "= sc.d def _to_struct(self): \"\"\" \"\"\" sc = pf._struct.pf_queue_scspec() if", "{.time}ms\".format(self) return s class FlowQueue(PFObject): \"\"\" \"\"\" _struct_type = pf._struct.pf_queue_fqspec", "self.realtime = ServiceCurve(q.realtime) self.linkshare = ServiceCurve(q.linkshare) self.upperlimit = ServiceCurve(q.upperlimit) self.flowqueue", "= interval * 1000000 def _from_struct(self, fq): \"\"\" \"\"\" self.flows", "target {}ms\".format(self.target / 1000000) return s class PFQueue(PFObject): \"\"\" \"\"\"", "fq.target = self.target fq.interval = self.interval return fq def _to_string(self):", "self.stats = PFQueueStats() def _from_struct(self, q): \"\"\" \"\"\" self.qname =", "pf._utils import rate2str __all__ = [\"ServiceCurve\", \"FlowQueue\", \"PFQueue\", \"PFQueueStats\"] class", "self.linkshare.burst: s += \" bandwidth {}\".format(self.linkshare) if self.realtime.bandwidth: s +=", "from pf._base import PFObject from pf.constants import * from pf._utils", "= self.parent q.ifname = self.ifname q.flags = self.flags q.qlimit =", "= pf._struct.pf_queue_fqspec() fq.flows = self.flows fq.quantum = self.quantum fq.target =", "self.flags & PFQS_FLOWQUEUE: s += \" {.flowqueue}\".format(self) if self.linkshare.bandwidth or", "interval {}ms\".format(self.interval / 1000000) if self.target: s += \" target", "self.ifname q.flags = self.flags q.qlimit = self.qlimit q.qid = self.qid", "is None: stats = pf._struct.hfsc_class_stats() super(PFQueueStats, self).__init__(stats) def _from_struct(self, s):", "and self.bandwidth.endswith(\"%\")): sc.m2.percent = int(self.bandwidth[:-1]) else: sc.m2.absolute = self.bandwidth if", "def _from_struct(self, sc): \"\"\" \"\"\" self.bandwidth = self._get_bandwidth(sc.m2) self.burst =", "def _from_struct(self, q): \"\"\" \"\"\" self.qname = q.qname self.parent =", "__all__ = [\"ServiceCurve\", \"FlowQueue\", \"PFQueue\", \"PFQueueStats\"] class ServiceCurve(PFObject): \"\"\" \"\"\"", "= self.burst sc.d = self.time return sc def _get_bandwidth(self, bw):", "\"\"\" s = \"flows {.flows}\".format(self) if self.quantum: s += \"", "= q.qname self.parent = q.parent self.ifname = q.ifname self.flags =", "from pf.constants import * from pf._utils import rate2str __all__ =", "+= \" default\" if self.qlimit: s += \" qlimit {.qlimit}\".format(self)", "def __init__(self, flows, quantum=0, target=0, interval=0): \"\"\" \"\"\" if isinstance(flows,", "self.qid q.parent_qid = self.parent_qid q.realtime = self.realtime._to_struct() q.linkshare = self.linkshare._to_struct()", "+= \" on {.ifname}\".format(self) if self.flags & PFQS_FLOWQUEUE: s +=", "sc.m2.percent = int(self.bandwidth[:-1]) else: sc.m2.absolute = self.bandwidth if (isinstance(self.burst, basestring)", "= self.quantum fq.target = self.target fq.interval = self.interval return fq" ]
[ "# Export this package's modules as members: from ._enums import", "from .get_global_user_operation_status import * from .get_global_user_personal_preferences import * from .get_lab", "return Environment(name, pulumi.ResourceOptions(urn=urn)) elif typ == \"azure-native:labservices/v20181015:EnvironmentSetting\": return EnvironmentSetting(name, pulumi.ResourceOptions(urn=urn))", "generated by the Pulumi SDK Generator. *** # *** Do", "import outputs def _register_module(): import pulumi from ... import _utilities", "from .list_global_user_environments import * from .list_global_user_labs import * from .user", "typ == \"azure-native:labservices/v20181015:LabAccount\": return LabAccount(name, pulumi.ResourceOptions(urn=urn)) elif typ == \"azure-native:labservices/v20181015:User\":", "User(name, pulumi.ResourceOptions(urn=urn)) else: raise Exception(f\"unknown resource type {typ}\") _module_instance =", "pulumi.ResourceOptions(urn=urn)) elif typ == \"azure-native:labservices/v20181015:Lab\": return Lab(name, pulumi.ResourceOptions(urn=urn)) elif typ", "== \"azure-native:labservices/v20181015:EnvironmentSetting\": return EnvironmentSetting(name, pulumi.ResourceOptions(urn=urn)) elif typ == \"azure-native:labservices/v20181015:GalleryImage\": return", "from .get_global_user_personal_preferences import * from .get_lab import * from .get_lab_account", "-> pulumi.Resource: if typ == \"azure-native:labservices/v20181015:Environment\": return Environment(name, pulumi.ResourceOptions(urn=urn)) elif", "package's modules as members: from ._enums import * from .environment", "return GalleryImage(name, pulumi.ResourceOptions(urn=urn)) elif typ == \"azure-native:labservices/v20181015:Lab\": return Lab(name, pulumi.ResourceOptions(urn=urn))", "* from .get_lab_account_regional_availability import * from .get_user import * from", "* from .get_user import * from .lab import * from", "* from .user import * from ._inputs import * from", "== \"azure-native:labservices/v20181015:Environment\": return Environment(name, pulumi.ResourceOptions(urn=urn)) elif typ == \"azure-native:labservices/v20181015:EnvironmentSetting\": return", "import * from . import outputs def _register_module(): import pulumi", "* from . import outputs def _register_module(): import pulumi from", "import _utilities class Module(pulumi.runtime.ResourceModule): _version = _utilities.get_semver_version() def version(self): return", "raise Exception(f\"unknown resource type {typ}\") _module_instance = Module() pulumi.runtime.register_resource_module(\"azure-native\", \"labservices/v20181015\",", "from ... import _utilities class Module(pulumi.runtime.ResourceModule): _version = _utilities.get_semver_version() def", "members: from ._enums import * from .environment import * from", "\"azure-native:labservices/v20181015:EnvironmentSetting\": return EnvironmentSetting(name, pulumi.ResourceOptions(urn=urn)) elif typ == \"azure-native:labservices/v20181015:GalleryImage\": return GalleryImage(name,", "Export this package's modules as members: from ._enums import *", "import * from .get_lab_account_regional_availability import * from .get_user import *", "return Lab(name, pulumi.ResourceOptions(urn=urn)) elif typ == \"azure-native:labservices/v20181015:LabAccount\": return LabAccount(name, pulumi.ResourceOptions(urn=urn))", "* from .gallery_image import * from .get_environment import * from", "what you are doing! *** # Export this package's modules", "# coding=utf-8 # *** WARNING: this file was generated by", "from .environment import * from .environment_setting import * from .gallery_image", "* from .get_global_user_operation_status import * from .get_global_user_personal_preferences import * from", "== \"azure-native:labservices/v20181015:GalleryImage\": return GalleryImage(name, pulumi.ResourceOptions(urn=urn)) elif typ == \"azure-native:labservices/v20181015:Lab\": return", ".get_lab_account_regional_availability import * from .get_user import * from .lab import", "\"azure-native:labservices/v20181015:LabAccount\": return LabAccount(name, pulumi.ResourceOptions(urn=urn)) elif typ == \"azure-native:labservices/v20181015:User\": return User(name,", "\"azure-native:labservices/v20181015:User\": return User(name, pulumi.ResourceOptions(urn=urn)) else: raise Exception(f\"unknown resource type {typ}\")", "Generator. *** # *** Do not edit by hand unless", "this file was generated by the Pulumi SDK Generator. ***", ".get_lab import * from .get_lab_account import * from .get_lab_account_regional_availability import", "from . import outputs def _register_module(): import pulumi from ...", "you're certain you know what you are doing! *** #", "WARNING: this file was generated by the Pulumi SDK Generator.", "from .lab_account import * from .list_global_user_environments import * from .list_global_user_labs", "_utilities.get_semver_version() def version(self): return Module._version def construct(self, name: str, typ:", "_register_module(): import pulumi from ... import _utilities class Module(pulumi.runtime.ResourceModule): _version", "you know what you are doing! *** # Export this", "pulumi.ResourceOptions(urn=urn)) else: raise Exception(f\"unknown resource type {typ}\") _module_instance = Module()", "modules as members: from ._enums import * from .environment import", "* from .get_environment import * from .get_environment_setting import * from", ".lab_account import * from .list_global_user_environments import * from .list_global_user_labs import", "str) -> pulumi.Resource: if typ == \"azure-native:labservices/v20181015:Environment\": return Environment(name, pulumi.ResourceOptions(urn=urn))", "urn: str) -> pulumi.Resource: if typ == \"azure-native:labservices/v20181015:Environment\": return Environment(name,", "._enums import * from .environment import * from .environment_setting import", ".get_global_user_personal_preferences import * from .get_lab import * from .get_lab_account import", "\"azure-native:labservices/v20181015:Environment\": return Environment(name, pulumi.ResourceOptions(urn=urn)) elif typ == \"azure-native:labservices/v20181015:EnvironmentSetting\": return EnvironmentSetting(name,", "return Module._version def construct(self, name: str, typ: str, urn: str)", "know what you are doing! *** # Export this package's", "* from .get_global_user_environment import * from .get_global_user_operation_batch_status import * from", "Exception(f\"unknown resource type {typ}\") _module_instance = Module() pulumi.runtime.register_resource_module(\"azure-native\", \"labservices/v20181015\", _module_instance)", ".get_environment_setting import * from .get_gallery_image import * from .get_global_user_environment import", "import * from .get_global_user_personal_preferences import * from .get_lab import *", "_version = _utilities.get_semver_version() def version(self): return Module._version def construct(self, name:", "coding=utf-8 # *** WARNING: this file was generated by the", "from ._inputs import * from . import outputs def _register_module():", "outputs def _register_module(): import pulumi from ... import _utilities class", "* from .get_lab import * from .get_lab_account import * from", "str, typ: str, urn: str) -> pulumi.Resource: if typ ==", "from .get_lab import * from .get_lab_account import * from .get_lab_account_regional_availability", "* from .get_gallery_image import * from .get_global_user_environment import * from", "import * from .get_environment_setting import * from .get_gallery_image import *", "* from .environment_setting import * from .gallery_image import * from", "name: str, typ: str, urn: str) -> pulumi.Resource: if typ", "from .environment_setting import * from .gallery_image import * from .get_environment", "from .get_global_user_environment import * from .get_global_user_operation_batch_status import * from .get_global_user_operation_status", "edit by hand unless you're certain you know what you", "file was generated by the Pulumi SDK Generator. *** #", "class Module(pulumi.runtime.ResourceModule): _version = _utilities.get_semver_version() def version(self): return Module._version def", "import * from .lab_account import * from .list_global_user_environments import *", "version(self): return Module._version def construct(self, name: str, typ: str, urn:", "* from .lab import * from .lab_account import * from", ".get_global_user_environment import * from .get_global_user_operation_batch_status import * from .get_global_user_operation_status import", "not edit by hand unless you're certain you know what", ".list_global_user_labs import * from .user import * from ._inputs import", "from .get_lab_account_regional_availability import * from .get_user import * from .lab", "as members: from ._enums import * from .environment import *", "doing! *** # Export this package's modules as members: from", "from .gallery_image import * from .get_environment import * from .get_environment_setting", "import * from .list_global_user_environments import * from .list_global_user_labs import *", "GalleryImage(name, pulumi.ResourceOptions(urn=urn)) elif typ == \"azure-native:labservices/v20181015:Lab\": return Lab(name, pulumi.ResourceOptions(urn=urn)) elif", "elif typ == \"azure-native:labservices/v20181015:User\": return User(name, pulumi.ResourceOptions(urn=urn)) else: raise Exception(f\"unknown", ".get_user import * from .lab import * from .lab_account import", "Do not edit by hand unless you're certain you know", "pulumi.ResourceOptions(urn=urn)) elif typ == \"azure-native:labservices/v20181015:GalleryImage\": return GalleryImage(name, pulumi.ResourceOptions(urn=urn)) elif typ", "import * from .lab import * from .lab_account import *", "import * from .user import * from ._inputs import *", "def _register_module(): import pulumi from ... import _utilities class Module(pulumi.runtime.ResourceModule):", "typ == \"azure-native:labservices/v20181015:EnvironmentSetting\": return EnvironmentSetting(name, pulumi.ResourceOptions(urn=urn)) elif typ == \"azure-native:labservices/v20181015:GalleryImage\":", "elif typ == \"azure-native:labservices/v20181015:GalleryImage\": return GalleryImage(name, pulumi.ResourceOptions(urn=urn)) elif typ ==", "# *** WARNING: this file was generated by the Pulumi", "== \"azure-native:labservices/v20181015:Lab\": return Lab(name, pulumi.ResourceOptions(urn=urn)) elif typ == \"azure-native:labservices/v20181015:LabAccount\": return", "*** # *** Do not edit by hand unless you're", "was generated by the Pulumi SDK Generator. *** # ***", "you are doing! *** # Export this package's modules as", "pulumi.ResourceOptions(urn=urn)) elif typ == \"azure-native:labservices/v20181015:LabAccount\": return LabAccount(name, pulumi.ResourceOptions(urn=urn)) elif typ", ".environment_setting import * from .gallery_image import * from .get_environment import", "typ == \"azure-native:labservices/v20181015:User\": return User(name, pulumi.ResourceOptions(urn=urn)) else: raise Exception(f\"unknown resource", "*** Do not edit by hand unless you're certain you", ".get_global_user_operation_status import * from .get_global_user_personal_preferences import * from .get_lab import", "from .get_environment_setting import * from .get_gallery_image import * from .get_global_user_environment", "import * from .get_global_user_operation_batch_status import * from .get_global_user_operation_status import *", "import pulumi from ... import _utilities class Module(pulumi.runtime.ResourceModule): _version =", "def construct(self, name: str, typ: str, urn: str) -> pulumi.Resource:", ".get_environment import * from .get_environment_setting import * from .get_gallery_image import", "from ._enums import * from .environment import * from .environment_setting", "# *** Do not edit by hand unless you're certain", "pulumi.ResourceOptions(urn=urn)) elif typ == \"azure-native:labservices/v20181015:EnvironmentSetting\": return EnvironmentSetting(name, pulumi.ResourceOptions(urn=urn)) elif typ", "by hand unless you're certain you know what you are", "._inputs import * from . import outputs def _register_module(): import", "typ == \"azure-native:labservices/v20181015:Environment\": return Environment(name, pulumi.ResourceOptions(urn=urn)) elif typ == \"azure-native:labservices/v20181015:EnvironmentSetting\":", "from .lab import * from .lab_account import * from .list_global_user_environments", "import * from ._inputs import * from . import outputs", "from .get_global_user_operation_batch_status import * from .get_global_user_operation_status import * from .get_global_user_personal_preferences", "Module(pulumi.runtime.ResourceModule): _version = _utilities.get_semver_version() def version(self): return Module._version def construct(self,", "from .get_gallery_image import * from .get_global_user_environment import * from .get_global_user_operation_batch_status", "hand unless you're certain you know what you are doing!", "* from .environment import * from .environment_setting import * from", "* from .get_global_user_personal_preferences import * from .get_lab import * from", "certain you know what you are doing! *** # Export", "*** WARNING: this file was generated by the Pulumi SDK", "from .get_environment import * from .get_environment_setting import * from .get_gallery_image", "pulumi from ... import _utilities class Module(pulumi.runtime.ResourceModule): _version = _utilities.get_semver_version()", "* from ._inputs import * from . import outputs def", "resource type {typ}\") _module_instance = Module() pulumi.runtime.register_resource_module(\"azure-native\", \"labservices/v20181015\", _module_instance) _register_module()", "SDK Generator. *** # *** Do not edit by hand", "import * from .get_lab_account import * from .get_lab_account_regional_availability import *", "import * from .get_global_user_operation_status import * from .get_global_user_personal_preferences import *", "* from .list_global_user_labs import * from .user import * from", "from .get_user import * from .lab import * from .lab_account", "if typ == \"azure-native:labservices/v20181015:Environment\": return Environment(name, pulumi.ResourceOptions(urn=urn)) elif typ ==", "Lab(name, pulumi.ResourceOptions(urn=urn)) elif typ == \"azure-native:labservices/v20181015:LabAccount\": return LabAccount(name, pulumi.ResourceOptions(urn=urn)) elif", ".environment import * from .environment_setting import * from .gallery_image import", "pulumi.Resource: if typ == \"azure-native:labservices/v20181015:Environment\": return Environment(name, pulumi.ResourceOptions(urn=urn)) elif typ", "elif typ == \"azure-native:labservices/v20181015:EnvironmentSetting\": return EnvironmentSetting(name, pulumi.ResourceOptions(urn=urn)) elif typ ==", "import * from .environment import * from .environment_setting import *", "are doing! *** # Export this package's modules as members:", "unless you're certain you know what you are doing! ***", ".list_global_user_environments import * from .list_global_user_labs import * from .user import", "import * from .get_lab import * from .get_lab_account import *", "this package's modules as members: from ._enums import * from", "EnvironmentSetting(name, pulumi.ResourceOptions(urn=urn)) elif typ == \"azure-native:labservices/v20181015:GalleryImage\": return GalleryImage(name, pulumi.ResourceOptions(urn=urn)) elif", "* from .lab_account import * from .list_global_user_environments import * from", "return User(name, pulumi.ResourceOptions(urn=urn)) else: raise Exception(f\"unknown resource type {typ}\") _module_instance", "else: raise Exception(f\"unknown resource type {typ}\") _module_instance = Module() pulumi.runtime.register_resource_module(\"azure-native\",", ".get_lab_account import * from .get_lab_account_regional_availability import * from .get_user import", "Environment(name, pulumi.ResourceOptions(urn=urn)) elif typ == \"azure-native:labservices/v20181015:EnvironmentSetting\": return EnvironmentSetting(name, pulumi.ResourceOptions(urn=urn)) elif", "* from .get_lab_account import * from .get_lab_account_regional_availability import * from", "from .list_global_user_labs import * from .user import * from ._inputs", "str, urn: str) -> pulumi.Resource: if typ == \"azure-native:labservices/v20181015:Environment\": return", "return EnvironmentSetting(name, pulumi.ResourceOptions(urn=urn)) elif typ == \"azure-native:labservices/v20181015:GalleryImage\": return GalleryImage(name, pulumi.ResourceOptions(urn=urn))", "typ: str, urn: str) -> pulumi.Resource: if typ == \"azure-native:labservices/v20181015:Environment\":", "import * from .gallery_image import * from .get_environment import *", "by the Pulumi SDK Generator. *** # *** Do not", "\"azure-native:labservices/v20181015:GalleryImage\": return GalleryImage(name, pulumi.ResourceOptions(urn=urn)) elif typ == \"azure-native:labservices/v20181015:Lab\": return Lab(name,", "== \"azure-native:labservices/v20181015:LabAccount\": return LabAccount(name, pulumi.ResourceOptions(urn=urn)) elif typ == \"azure-native:labservices/v20181015:User\": return", "import * from .environment_setting import * from .gallery_image import *", "the Pulumi SDK Generator. *** # *** Do not edit", ".user import * from ._inputs import * from . import", "... import _utilities class Module(pulumi.runtime.ResourceModule): _version = _utilities.get_semver_version() def version(self):", "import * from .get_environment import * from .get_environment_setting import *", "typ == \"azure-native:labservices/v20181015:GalleryImage\": return GalleryImage(name, pulumi.ResourceOptions(urn=urn)) elif typ == \"azure-native:labservices/v20181015:Lab\":", "elif typ == \"azure-native:labservices/v20181015:LabAccount\": return LabAccount(name, pulumi.ResourceOptions(urn=urn)) elif typ ==", ".get_global_user_operation_batch_status import * from .get_global_user_operation_status import * from .get_global_user_personal_preferences import", "import * from .get_user import * from .lab import *", "return LabAccount(name, pulumi.ResourceOptions(urn=urn)) elif typ == \"azure-native:labservices/v20181015:User\": return User(name, pulumi.ResourceOptions(urn=urn))", "from .user import * from ._inputs import * from .", ". import outputs def _register_module(): import pulumi from ... import", "pulumi.ResourceOptions(urn=urn)) elif typ == \"azure-native:labservices/v20181015:User\": return User(name, pulumi.ResourceOptions(urn=urn)) else: raise", ".gallery_image import * from .get_environment import * from .get_environment_setting import", "import * from .list_global_user_labs import * from .user import *", "LabAccount(name, pulumi.ResourceOptions(urn=urn)) elif typ == \"azure-native:labservices/v20181015:User\": return User(name, pulumi.ResourceOptions(urn=urn)) else:", "\"azure-native:labservices/v20181015:Lab\": return Lab(name, pulumi.ResourceOptions(urn=urn)) elif typ == \"azure-native:labservices/v20181015:LabAccount\": return LabAccount(name,", "import * from .get_gallery_image import * from .get_global_user_environment import *", "from .get_lab_account import * from .get_lab_account_regional_availability import * from .get_user", "_utilities class Module(pulumi.runtime.ResourceModule): _version = _utilities.get_semver_version() def version(self): return Module._version", ".get_gallery_image import * from .get_global_user_environment import * from .get_global_user_operation_batch_status import", "def version(self): return Module._version def construct(self, name: str, typ: str,", "Pulumi SDK Generator. *** # *** Do not edit by", "import * from .get_global_user_environment import * from .get_global_user_operation_batch_status import *", ".lab import * from .lab_account import * from .list_global_user_environments import", "construct(self, name: str, typ: str, urn: str) -> pulumi.Resource: if", "*** # Export this package's modules as members: from ._enums", "= _utilities.get_semver_version() def version(self): return Module._version def construct(self, name: str,", "* from .get_environment_setting import * from .get_gallery_image import * from", "typ == \"azure-native:labservices/v20181015:Lab\": return Lab(name, pulumi.ResourceOptions(urn=urn)) elif typ == \"azure-native:labservices/v20181015:LabAccount\":", "== \"azure-native:labservices/v20181015:User\": return User(name, pulumi.ResourceOptions(urn=urn)) else: raise Exception(f\"unknown resource type", "* from .get_global_user_operation_batch_status import * from .get_global_user_operation_status import * from", "* from .list_global_user_environments import * from .list_global_user_labs import * from", "elif typ == \"azure-native:labservices/v20181015:Lab\": return Lab(name, pulumi.ResourceOptions(urn=urn)) elif typ ==", "Module._version def construct(self, name: str, typ: str, urn: str) ->" ]
[ "field_type='string', required=True), SchemaField(name='is_admin', field_type='boolean', required=True, default=False) ] mock_get_schema_fields = MagicMock(name='mock_get_schema')", "message') serializer = UserCollectionSerializer(data={'username': 'user', 'password': '<PASSWORD>', 'origins': [\" tugo\"]})", "serializer.errors['username'][0], 'Invalid error message') def test_deserialize_user_with_invalid_origins_should_give_error(self): serializer = UserCollectionSerializer(data={'username': 'user',", "Rights Reserved. The copyright to the software program(s) is property", "import MagicMock, patch from commons.json_schema_validator.schema_reader import SchemaField from commons.json_schema_validator.schema_reader import", "instance schema_reader = SchemaReader() self.patcher_validate = patch.object(schema_reader, 'validate_object') # @UndefinedVariable", "serializer.errors['origins'][0], 'Invalid error message') def test_deserialize_user_with_invalid_classes_should_give_error(self): serializer = UserCollectionSerializer(data={'username': 'user',", "work serializer = UserCollectionSerializer(data={'password': '<PASSWORD>'}) self.assertEquals(False, serializer.is_valid(), \"Serialization invalid\") self.assertEquals(u\"required\",", "is property of Telefonica I+D. The program(s) may be used", "= UserCollectionSerializer(data={'username': 'user', 'password': '<PASSWORD>', 'origins': [\" tugo\"]}) self.assertEquals(False, serializer.is_valid())", "def setUp(self): super(UserSerializerTests, self).setUp() mock_schema_instance = MagicMock(name='mock_schema_instance') mock_schema_instance.return_value = [", "self.assertEquals(False, serializer.is_valid(), \"Serialization invalid\") self.assertEquals(u\"required\", serializer.errors['username'][0], 'Invalid error message') def", "UserCollectionSerializer(data={'username': 'User.user', 'password': '<PASSWORD>'}) self.assertEquals(False, serializer.is_valid(), \"Serialization invalid\") self.assertEquals(u\"invalid\", serializer.errors['username'][0],", "'', 'password': '<PASSWORD>'}) self.assertEquals(False, serializer.is_valid(), \"Serialization invalid\") self.assertEquals(u\"invalid\", serializer.errors['username'][0], 'Invalid", "self.patcher_validate = patch.object(schema_reader, 'validate_object') # @UndefinedVariable self.patcher_schema = patch.object(schema_reader, #", "supplied. ''' from unittest import TestCase from mock import MagicMock,", "serializer.is_valid()) self.assertEquals(u\"invalid\", serializer.errors['origins'][0], 'Invalid error message') serializer = UserCollectionSerializer(data={'username': 'user',", "generic patches work serializer = UserCollectionSerializer(data={'username': 'user', 'password': '<PASSWORD>', 'is_admin':", "'Invalid error message') serializer = UserCollectionSerializer(data={'username': 'user', 'password': '<PASSWORD>', 'origins':", "the agreement/contract under which the program(s) have been supplied. '''", "'<PASSWORD>'}) self.assertEquals(False, serializer.is_valid(), \"Serialization invalid\") self.assertEquals(u\"invalid\", serializer.errors['username'][0], 'Invalid error message')", "self.assertEquals(False, serializer.is_valid()) self.assertEquals(u\"invalid\", serializer.errors['origins'][0], 'Invalid error message') def test_deserialize_user_with_invalid_classes_should_give_error(self): serializer", "in order generic patches work serializer = UserCollectionSerializer(data={'username': 'User.user', 'password':", "TestCase from mock import MagicMock, patch from commons.json_schema_validator.schema_reader import SchemaField", "[\" tugo\"]}) self.assertEquals(False, serializer.is_valid()) self.assertEquals(u\"invalid\", serializer.errors['origins'][0], 'Invalid error message') def", "schema instance schema_reader = SchemaReader() self.patcher_validate = patch.object(schema_reader, 'validate_object') #", "= UserCollectionSerializer(data={'username': 'user', 'password': '<PASSWORD>', 'is_admin': 'si'}) self.assertEquals(False, serializer.is_valid(), \"Serialization", "order generic patches work serializer = UserCollectionSerializer(data={'password': '<PASSWORD>'}) self.assertEquals(False, serializer.is_valid(),", "and conditions stipulated in the agreement/contract under which the program(s)", "required=True), SchemaField(name='password', field_type='string', required=True), SchemaField(name='is_admin', field_type='boolean', required=True, default=False) ] mock_get_schema_fields", "'is_admin': 'si'}) self.assertEquals(False, serializer.is_valid(), \"Serialization invalid\") def test_deserialize_user_empty_user_should_give_error_invalid(self): # We", "[\" sms\"]}) self.assertEquals(False, serializer.is_valid()) self.assertEquals(u\"invalid\", serializer.errors['classes'][0], 'Invalid error message') def", "patches work serializer = UserCollectionSerializer(data={'username': 'user', 'password': '<PASSWORD>'}) self.assertEquals(True, serializer.is_valid(),", "program(s) have been supplied. ''' from unittest import TestCase from", "express written consent of Telefonica I+D or in accordance with", "self.assertEquals(False, serializer.is_valid()) self.assertEquals(u\"invalid\", serializer.errors['classes'][0], 'Invalid error message') serializer = UserCollectionSerializer(data={'username':", "serializer = UserCollectionSerializer(data={'username': '', 'password': '<PASSWORD>'}) self.assertEquals(False, serializer.is_valid(), \"Serialization invalid\")", "self.assertEquals(False, serializer.is_valid()) self.assertEquals(u\"invalid\", serializer.errors['origins'][0], 'Invalid error message') serializer = UserCollectionSerializer(data={'username':", "copied only with the express written consent of Telefonica I+D", "serializer.errors['username'][0], 'Invalid error message') def test_deserialize_user_invalid_is_admin_should_give_error(self): # We need to", "'<PASSWORD>', 'is_admin': 'si'}) self.assertEquals(False, serializer.is_valid(), \"Serialization invalid\") def test_deserialize_user_empty_user_should_give_error_invalid(self): #", "def test_deserialize_user_null_user_should_give_required_error(self): # We need to do import here in", "serializer = UserCollectionSerializer(data={'username': 'user', 'password': '<PASSWORD>', 'origins': [\" tugo\"]}) self.assertEquals(False,", "UserCollectionSerializer(data={'username': 'user', 'password': '<PASSWORD>', 'origins': [\"????\"]}) self.assertEquals(False, serializer.is_valid()) self.assertEquals(u\"invalid\", serializer.errors['origins'][0],", "in order generic patches work serializer = UserCollectionSerializer(data={'password': '<PASSWORD>'}) self.assertEquals(False,", "serializer = UserCollectionSerializer(data={'username': 'user', 'password': '<PASSWORD>', 'classes': [\"????\"]}) self.assertEquals(False, serializer.is_valid())", "serializer.is_valid()) self.assertEquals(u\"invalid\", serializer.errors['origins'][0], 'Invalid error message') def test_deserialize_user_with_invalid_classes_should_give_error(self): serializer =", "with the express written consent of Telefonica I+D or in", "UserCollectionSerializer(data={'username': 'user', 'password': '<PASSWORD>', 'classes': [\" sms\"]}) self.assertEquals(False, serializer.is_valid()) self.assertEquals(u\"invalid\",", "UserCollectionSerializer(data={'username': 'user', 'password': '<PASSWORD>', 'is_admin': 'si'}) self.assertEquals(False, serializer.is_valid(), \"Serialization invalid\")", "test_deserialize_user_with_invalid_classes_should_give_error(self): serializer = UserCollectionSerializer(data={'username': 'user', 'password': '<PASSWORD>', 'classes': [\"????\"]}) self.assertEquals(False,", "''' (c) Copyright 2013 Telefonica, I+D. Printed in Spain (Europe).", "Copyright 2013 Telefonica, I+D. Printed in Spain (Europe). All Rights", "'classes': [\"????\"]}) self.assertEquals(False, serializer.is_valid()) self.assertEquals(u\"invalid\", serializer.errors['classes'][0], 'Invalid error message') serializer", "generic patches work serializer = UserCollectionSerializer(data={'password': '<PASSWORD>'}) self.assertEquals(False, serializer.is_valid(), \"Serialization", "'Invalid error message') def test_deserialize_user_invalid_is_admin_should_give_error(self): # We need to do", "SchemaField(name='password', field_type='string', required=True), SchemaField(name='is_admin', field_type='boolean', required=True, default=False) ] mock_get_schema_fields =", "def test_deserialize_user_should_work(self): # We need to do import here in", "self.assertEquals(u\"invalid\", serializer.errors['username'][0], 'Invalid error message') def test_deserialize_user_with_invalid_origins_should_give_error(self): serializer = UserCollectionSerializer(data={'username':", "patch.object(schema_reader, 'validate_object') # @UndefinedVariable self.patcher_schema = patch.object(schema_reader, # @UndefinedVariable 'get_schema_fields',", "serializer.is_valid()) self.assertEquals(u\"invalid\", serializer.errors['classes'][0], 'Invalid error message') serializer = UserCollectionSerializer(data={'username': 'user',", "serializer.errors['origins'][0], 'Invalid error message') serializer = UserCollectionSerializer(data={'username': 'user', 'password': '<PASSWORD>',", "def test_deserialize_user_invalid_is_admin_should_give_error(self): # We need to do import here in", "patches work serializer = UserCollectionSerializer(data={'username': '', 'password': '<PASSWORD>'}) self.assertEquals(False, serializer.is_valid(),", "here in order generic patches work serializer = UserCollectionSerializer(data={'username': '',", "schema_reader = SchemaReader() self.patcher_validate = patch.object(schema_reader, 'validate_object') # @UndefinedVariable self.patcher_schema", "'<PASSWORD>'}) self.assertEquals(False, serializer.is_valid(), \"Serialization invalid\") self.assertEquals(u\"required\", serializer.errors['username'][0], 'Invalid error message')", "serializer.errors['username'][0], 'Invalid error message') def test_deserialize_user_large_user_ne_should_give_invalid_error(self): # We need to", "UserSerializerTests(TestCase): def setUp(self): super(UserSerializerTests, self).setUp() mock_schema_instance = MagicMock(name='mock_schema_instance') mock_schema_instance.return_value =", "\"Serialization invalid\") self.assertEquals(u\"required\", serializer.errors['username'][0], 'Invalid error message') def test_deserialize_user_large_user_ne_should_give_invalid_error(self): #", "I+D. Printed in Spain (Europe). All Rights Reserved. The copyright", "tearDown(self): self.patcher_schema.stop() self.patcher_validate.stop() def test_deserialize_user_should_work(self): # We need to do", "'password': '<PASSWORD>', 'classes': [\" sms\"]}) self.assertEquals(False, serializer.is_valid()) self.assertEquals(u\"invalid\", serializer.errors['classes'][0], 'Invalid", "= [ SchemaField(name='username', field_type='string', required=True), SchemaField(name='password', field_type='string', required=True), SchemaField(name='is_admin', field_type='boolean',", "serializer.errors['classes'][0], 'Invalid error message') serializer = UserCollectionSerializer(data={'username': 'user', 'password': '<PASSWORD>',", "serializer.is_valid(), \"Serialization invalid\") self.assertEquals(u\"required\", serializer.errors['username'][0], 'Invalid error message') def test_deserialize_user_large_user_ne_should_give_invalid_error(self):", "Telefonica I+D. The program(s) may be used and or copied", "test_deserialize_user_should_work(self): # We need to do import here in order", "I+D or in accordance with the terms and conditions stipulated", "patches work serializer = UserCollectionSerializer(data={'username': 'usera', 'password': '<PASSWORD>', 'is_admin': 0})", "generic patches work serializer = UserCollectionSerializer(data={'username': '', 'password': '<PASSWORD>'}) self.assertEquals(False,", "of Telefonica I+D. The program(s) may be used and or", "[\"????\"]}) self.assertEquals(False, serializer.is_valid()) self.assertEquals(u\"invalid\", serializer.errors['classes'][0], 'Invalid error message') serializer =", "from unittest import TestCase from mock import MagicMock, patch from", "MagicMock(name='mock_schema_instance') mock_schema_instance.return_value = [ SchemaField(name='username', field_type='string', required=True), SchemaField(name='password', field_type='string', required=True),", "order generic patches work serializer = UserCollectionSerializer(data={'username': 'User.user', 'password': '<PASSWORD>'})", "serializer = UserCollectionSerializer(data={'username': 'usera', 'password': '<PASSWORD>', 'is_admin': 0}) self.assertEquals(False, serializer.is_valid(),", "error message') def test_deserialize_user_large_user_ne_should_give_invalid_error(self): # We need to do import", "SchemaField from commons.json_schema_validator.schema_reader import SchemaReader from users.serializers import UserCollectionSerializer class", "and or copied only with the express written consent of", "only with the express written consent of Telefonica I+D or", "in order generic patches work serializer = UserCollectionSerializer(data={'username': 'usera', 'password':", "# @UndefinedVariable self.patcher_schema = patch.object(schema_reader, # @UndefinedVariable 'get_schema_fields', mock_schema_instance) self.patcher_schema.start()", "with the terms and conditions stipulated in the agreement/contract under", "[\"????\"]}) self.assertEquals(False, serializer.is_valid()) self.assertEquals(u\"invalid\", serializer.errors['origins'][0], 'Invalid error message') serializer =", "def test_deserialize_user_empty_user_should_give_error_invalid(self): # We need to do import here in", "def test_deserialize_user_with_invalid_origins_should_give_error(self): serializer = UserCollectionSerializer(data={'username': 'user', 'password': '<PASSWORD>', 'origins': [\"????\"]})", "patches work serializer = UserCollectionSerializer(data={'username': 'User.user', 'password': '<PASSWORD>'}) self.assertEquals(False, serializer.is_valid(),", "serializer = UserCollectionSerializer(data={'password': '<PASSWORD>'}) self.assertEquals(False, serializer.is_valid(), \"Serialization invalid\") self.assertEquals(u\"required\", serializer.errors['username'][0],", "import SchemaField from commons.json_schema_validator.schema_reader import SchemaReader from users.serializers import UserCollectionSerializer", "def test_deserialize_user_invalid_is_admin_should_work(self): # We need to do import here in", "SchemaField(name='username', field_type='string', required=True), SchemaField(name='password', field_type='string', required=True), SchemaField(name='is_admin', field_type='boolean', required=True, default=False)", "in the agreement/contract under which the program(s) have been supplied.", "order generic patches work serializer = UserCollectionSerializer(data={'username': 'user', 'password': '<PASSWORD>'})", "test_deserialize_user_invalid_is_admin_should_work(self): # We need to do import here in order", "from users.serializers import UserCollectionSerializer class UserSerializerTests(TestCase): def setUp(self): super(UserSerializerTests, self).setUp()", "= UserCollectionSerializer(data={'username': 'usera', 'password': '<PASSWORD>', 'is_admin': 0}) self.assertEquals(False, serializer.is_valid(), \"Serialization", "patches work serializer = UserCollectionSerializer(data={'username': 'user', 'password': '<PASSWORD>', 'is_admin': 'si'})", "(Europe). All Rights Reserved. The copyright to the software program(s)", "'user', 'password': '<PASSWORD>', 'is_admin': 'si'}) self.assertEquals(False, serializer.is_valid(), \"Serialization invalid\") def", "commons.json_schema_validator.schema_reader import SchemaReader from users.serializers import UserCollectionSerializer class UserSerializerTests(TestCase): def", "mock schema instance schema_reader = SchemaReader() self.patcher_validate = patch.object(schema_reader, 'validate_object')", "mock_schema_instance) self.patcher_schema.start() self.patcher_validate.start() def tearDown(self): self.patcher_schema.stop() self.patcher_validate.stop() def test_deserialize_user_should_work(self): #", "= UserCollectionSerializer(data={'username': 'user', 'password': '<PASSWORD>', 'classes': [\" sms\"]}) self.assertEquals(False, serializer.is_valid())", "serializer = UserCollectionSerializer(data={'username': 'User.user', 'password': '<PASSWORD>'}) self.assertEquals(False, serializer.is_valid(), \"Serialization invalid\")", "the terms and conditions stipulated in the agreement/contract under which", "self.patcher_validate.stop() def test_deserialize_user_should_work(self): # We need to do import here", "serializer.is_valid(), \"Serialization invalid\") self.assertEquals(u\"invalid\", serializer.errors['username'][0], 'Invalid error message') def test_deserialize_user_invalid_is_admin_should_give_error(self):", "'origins': [\" tugo\"]}) self.assertEquals(False, serializer.is_valid()) self.assertEquals(u\"invalid\", serializer.errors['origins'][0], 'Invalid error message')", "the software program(s) is property of Telefonica I+D. The program(s)", "= UserCollectionSerializer(data={'username': 'user', 'password': '<PASSWORD>', 'origins': [\"????\"]}) self.assertEquals(False, serializer.is_valid()) self.assertEquals(u\"invalid\",", "in order generic patches work serializer = UserCollectionSerializer(data={'username': 'user', 'password':", "patch from commons.json_schema_validator.schema_reader import SchemaField from commons.json_schema_validator.schema_reader import SchemaReader from", "= patch.object(schema_reader, 'validate_object') # @UndefinedVariable self.patcher_schema = patch.object(schema_reader, # @UndefinedVariable", "error message') def test_deserialize_user_invalid_username_should_give_error(self): # We need to do import", "work serializer = UserCollectionSerializer(data={'username': 'User.user', 'password': '<PASSWORD>'}) self.assertEquals(False, serializer.is_valid(), \"Serialization", "patches work serializer = UserCollectionSerializer(data={'username': 'a' * 600, 'password': '<PASSWORD>'})", "'<PASSWORD>', 'classes': [\"????\"]}) self.assertEquals(False, serializer.is_valid()) self.assertEquals(u\"invalid\", serializer.errors['classes'][0], 'Invalid error message')", "mock_get_schema_fields.return_value = mock_schema_instance # mock schema instance schema_reader = SchemaReader()", "= patch.object(schema_reader, # @UndefinedVariable 'get_schema_fields', mock_schema_instance) self.patcher_schema.start() self.patcher_validate.start() def tearDown(self):", "@UndefinedVariable self.patcher_schema = patch.object(schema_reader, # @UndefinedVariable 'get_schema_fields', mock_schema_instance) self.patcher_schema.start() self.patcher_validate.start()", "message') def test_deserialize_user_large_user_ne_should_give_invalid_error(self): # We need to do import here", "error message') def test_deserialize_user_with_invalid_origins_should_give_error(self): serializer = UserCollectionSerializer(data={'username': 'user', 'password': '<PASSWORD>',", "self.assertEquals(u\"invalid\", serializer.errors['origins'][0], 'Invalid error message') serializer = UserCollectionSerializer(data={'username': 'user', 'password':", "generic patches work serializer = UserCollectionSerializer(data={'username': 'user', 'password': '<PASSWORD>'}) self.assertEquals(True,", "from commons.json_schema_validator.schema_reader import SchemaField from commons.json_schema_validator.schema_reader import SchemaReader from users.serializers", "self.patcher_schema.stop() self.patcher_validate.stop() def test_deserialize_user_should_work(self): # We need to do import", "SchemaReader from users.serializers import UserCollectionSerializer class UserSerializerTests(TestCase): def setUp(self): super(UserSerializerTests,", "to do import here in order generic patches work serializer", "Telefonica I+D or in accordance with the terms and conditions", "= mock_schema_instance # mock schema instance schema_reader = SchemaReader() self.patcher_validate", "self.assertEquals(u\"invalid\", serializer.errors['classes'][0], 'Invalid error message') def test_deserialize_user_invalid_username_should_give_error(self): # We need", "'User.user', 'password': '<PASSWORD>'}) self.assertEquals(False, serializer.is_valid(), \"Serialization invalid\") self.assertEquals(u\"invalid\", serializer.errors['username'][0], 'Invalid", "patches work serializer = UserCollectionSerializer(data={'password': '<PASSWORD>'}) self.assertEquals(False, serializer.is_valid(), \"Serialization invalid\")", "= UserCollectionSerializer(data={'username': 'User.user', 'password': '<PASSWORD>'}) self.assertEquals(False, serializer.is_valid(), \"Serialization invalid\") self.assertEquals(u\"invalid\",", "SchemaReader() self.patcher_validate = patch.object(schema_reader, 'validate_object') # @UndefinedVariable self.patcher_schema = patch.object(schema_reader,", "mock_schema_instance = MagicMock(name='mock_schema_instance') mock_schema_instance.return_value = [ SchemaField(name='username', field_type='string', required=True), SchemaField(name='password',", "message') def test_deserialize_user_with_invalid_origins_should_give_error(self): serializer = UserCollectionSerializer(data={'username': 'user', 'password': '<PASSWORD>', 'origins':", "'user', 'password': '<PASSWORD>', 'origins': [\"????\"]}) self.assertEquals(False, serializer.is_valid()) self.assertEquals(u\"invalid\", serializer.errors['origins'][0], 'Invalid", "'<PASSWORD>'}) self.assertEquals(True, serializer.is_valid(), \"Serialization invalid\") def test_deserialize_user_invalid_is_admin_should_work(self): # We need", "# We need to do import here in order generic", "here in order generic patches work serializer = UserCollectionSerializer(data={'username': 'User.user',", "600, 'password': '<PASSWORD>'}) self.assertEquals(False, serializer.is_valid(), \"Serialization invalid\") self.assertEquals(u\"invalid\", serializer.errors['username'][0], 'Invalid", "import TestCase from mock import MagicMock, patch from commons.json_schema_validator.schema_reader import", "here in order generic patches work serializer = UserCollectionSerializer(data={'password': '<PASSWORD>'})", "in Spain (Europe). All Rights Reserved. The copyright to the", "'Invalid error message') def test_deserialize_user_invalid_username_should_give_error(self): # We need to do", "commons.json_schema_validator.schema_reader import SchemaField from commons.json_schema_validator.schema_reader import SchemaReader from users.serializers import", "'password': '<PASSWORD>', 'classes': [\"????\"]}) self.assertEquals(False, serializer.is_valid()) self.assertEquals(u\"invalid\", serializer.errors['classes'][0], 'Invalid error", "The copyright to the software program(s) is property of Telefonica", "invalid\") def test_deserialize_user_invalid_is_admin_should_work(self): # We need to do import here", "'Invalid error message') def test_deserialize_user_with_invalid_classes_should_give_error(self): serializer = UserCollectionSerializer(data={'username': 'user', 'password':", "def test_deserialize_user_with_invalid_classes_should_give_error(self): serializer = UserCollectionSerializer(data={'username': 'user', 'password': '<PASSWORD>', 'classes': [\"????\"]})", "copyright to the software program(s) is property of Telefonica I+D.", "self.assertEquals(False, serializer.is_valid(), \"Serialization invalid\") def test_deserialize_user_empty_user_should_give_error_invalid(self): # We need to", "serializer.is_valid(), \"Serialization invalid\") self.assertEquals(u\"invalid\", serializer.errors['username'][0], 'Invalid error message') def test_deserialize_user_with_invalid_origins_should_give_error(self):", "\"Serialization invalid\") self.assertEquals(u\"invalid\", serializer.errors['username'][0], 'Invalid error message') def test_deserialize_user_with_invalid_origins_should_give_error(self): serializer", "or in accordance with the terms and conditions stipulated in", "UserCollectionSerializer(data={'username': 'user', 'password': '<PASSWORD>', 'origins': [\" tugo\"]}) self.assertEquals(False, serializer.is_valid()) self.assertEquals(u\"invalid\",", "self.assertEquals(u\"invalid\", serializer.errors['classes'][0], 'Invalid error message') serializer = UserCollectionSerializer(data={'username': 'user', 'password':", "test_deserialize_user_with_invalid_origins_should_give_error(self): serializer = UserCollectionSerializer(data={'username': 'user', 'password': '<PASSWORD>', 'origins': [\"????\"]}) self.assertEquals(False,", "[ SchemaField(name='username', field_type='string', required=True), SchemaField(name='password', field_type='string', required=True), SchemaField(name='is_admin', field_type='boolean', required=True,", "serializer = UserCollectionSerializer(data={'username': 'user', 'password': '<PASSWORD>'}) self.assertEquals(True, serializer.is_valid(), \"Serialization invalid\")", "test_deserialize_user_null_user_should_give_required_error(self): # We need to do import here in order", "We need to do import here in order generic patches", "been supplied. ''' from unittest import TestCase from mock import", "'<PASSWORD>', 'origins': [\" tugo\"]}) self.assertEquals(False, serializer.is_valid()) self.assertEquals(u\"invalid\", serializer.errors['origins'][0], 'Invalid error", "'password': '<PASSWORD>'}) self.assertEquals(True, serializer.is_valid(), \"Serialization invalid\") def test_deserialize_user_invalid_is_admin_should_work(self): # We", "@UndefinedVariable 'get_schema_fields', mock_schema_instance) self.patcher_schema.start() self.patcher_validate.start() def tearDown(self): self.patcher_schema.stop() self.patcher_validate.stop() def", "'Invalid error message') def test_deserialize_user_large_user_ne_should_give_invalid_error(self): # We need to do", "message') serializer = UserCollectionSerializer(data={'username': 'user', 'password': '<PASSWORD>', 'classes': [\" sms\"]})", "def tearDown(self): self.patcher_schema.stop() self.patcher_validate.stop() def test_deserialize_user_should_work(self): # We need to", "self.assertEquals(u\"invalid\", serializer.errors['username'][0], 'Invalid error message') def test_deserialize_user_invalid_is_admin_should_give_error(self): # We need", "from commons.json_schema_validator.schema_reader import SchemaReader from users.serializers import UserCollectionSerializer class UserSerializerTests(TestCase):", "stipulated in the agreement/contract under which the program(s) have been", "default=False) ] mock_get_schema_fields = MagicMock(name='mock_get_schema') mock_get_schema_fields.return_value = mock_schema_instance # mock", "order generic patches work serializer = UserCollectionSerializer(data={'username': 'usera', 'password': '<PASSWORD>',", "'Invalid error message') def test_deserialize_user_null_user_should_give_required_error(self): # We need to do", "work serializer = UserCollectionSerializer(data={'username': 'user', 'password': '<PASSWORD>'}) self.assertEquals(True, serializer.is_valid(), \"Serialization", "'Invalid error message') def test_deserialize_user_with_invalid_origins_should_give_error(self): serializer = UserCollectionSerializer(data={'username': 'user', 'password':", "sms\"]}) self.assertEquals(False, serializer.is_valid()) self.assertEquals(u\"invalid\", serializer.errors['classes'][0], 'Invalid error message') def test_deserialize_user_invalid_username_should_give_error(self):", "invalid\") self.assertEquals(u\"invalid\", serializer.errors['username'][0], 'Invalid error message') def test_deserialize_user_null_user_should_give_required_error(self): # We", "# @UndefinedVariable 'get_schema_fields', mock_schema_instance) self.patcher_schema.start() self.patcher_validate.start() def tearDown(self): self.patcher_schema.stop() self.patcher_validate.stop()", "serializer.is_valid(), \"Serialization invalid\") def test_deserialize_user_invalid_is_admin_should_work(self): # We need to do", "here in order generic patches work serializer = UserCollectionSerializer(data={'username': 'a'", "self.assertEquals(False, serializer.is_valid()) self.assertEquals(u\"invalid\", serializer.errors['classes'][0], 'Invalid error message') def test_deserialize_user_invalid_username_should_give_error(self): #", "= UserCollectionSerializer(data={'username': 'user', 'password': '<PASSWORD>'}) self.assertEquals(True, serializer.is_valid(), \"Serialization invalid\") def", "the express written consent of Telefonica I+D or in accordance", "self.assertEquals(True, serializer.is_valid(), \"Serialization invalid\") def test_deserialize_user_invalid_is_admin_should_work(self): # We need to", "in accordance with the terms and conditions stipulated in the", "super(UserSerializerTests, self).setUp() mock_schema_instance = MagicMock(name='mock_schema_instance') mock_schema_instance.return_value = [ SchemaField(name='username', field_type='string',", "message') def test_deserialize_user_null_user_should_give_required_error(self): # We need to do import here", "def test_deserialize_user_invalid_username_should_give_error(self): # We need to do import here in", "in order generic patches work serializer = UserCollectionSerializer(data={'username': 'a' *", "in order generic patches work serializer = UserCollectionSerializer(data={'username': '', 'password':", "self).setUp() mock_schema_instance = MagicMock(name='mock_schema_instance') mock_schema_instance.return_value = [ SchemaField(name='username', field_type='string', required=True),", "'password': '<PASSWORD>', 'origins': [\" tugo\"]}) self.assertEquals(False, serializer.is_valid()) self.assertEquals(u\"invalid\", serializer.errors['origins'][0], 'Invalid", "mock_schema_instance.return_value = [ SchemaField(name='username', field_type='string', required=True), SchemaField(name='password', field_type='string', required=True), SchemaField(name='is_admin',", "= MagicMock(name='mock_get_schema') mock_get_schema_fields.return_value = mock_schema_instance # mock schema instance schema_reader", "0}) self.assertEquals(False, serializer.is_valid(), \"Serialization invalid\") self.assertEquals(u\"invalid\", serializer.errors['is_admin'][0], 'Invalid error message')", "be used and or copied only with the express written", "setUp(self): super(UserSerializerTests, self).setUp() mock_schema_instance = MagicMock(name='mock_schema_instance') mock_schema_instance.return_value = [ SchemaField(name='username',", "The program(s) may be used and or copied only with", "self.assertEquals(u\"invalid\", serializer.errors['origins'][0], 'Invalid error message') def test_deserialize_user_with_invalid_classes_should_give_error(self): serializer = UserCollectionSerializer(data={'username':", "self.patcher_schema = patch.object(schema_reader, # @UndefinedVariable 'get_schema_fields', mock_schema_instance) self.patcher_schema.start() self.patcher_validate.start() def", "accordance with the terms and conditions stipulated in the agreement/contract", "here in order generic patches work serializer = UserCollectionSerializer(data={'username': 'user',", "may be used and or copied only with the express", "error message') def test_deserialize_user_null_user_should_give_required_error(self): # We need to do import", "serializer.is_valid(), \"Serialization invalid\") self.assertEquals(u\"invalid\", serializer.errors['username'][0], 'Invalid error message') def test_deserialize_user_null_user_should_give_required_error(self):", "UserCollectionSerializer(data={'username': 'a' * 600, 'password': '<PASSWORD>'}) self.assertEquals(False, serializer.is_valid(), \"Serialization invalid\")", "'user', 'password': '<PASSWORD>', 'classes': [\"????\"]}) self.assertEquals(False, serializer.is_valid()) self.assertEquals(u\"invalid\", serializer.errors['classes'][0], 'Invalid", "Telefonica, I+D. Printed in Spain (Europe). All Rights Reserved. The", "software program(s) is property of Telefonica I+D. The program(s) may", "message') def test_deserialize_user_invalid_username_should_give_error(self): # We need to do import here", "serializer = UserCollectionSerializer(data={'username': 'user', 'password': '<PASSWORD>', 'classes': [\" sms\"]}) self.assertEquals(False,", "serializer.errors['username'][0], 'Invalid error message') def test_deserialize_user_null_user_should_give_required_error(self): # We need to", "serializer.errors['classes'][0], 'Invalid error message') def test_deserialize_user_invalid_username_should_give_error(self): # We need to", "'<PASSWORD>', 'is_admin': 0}) self.assertEquals(False, serializer.is_valid(), \"Serialization invalid\") self.assertEquals(u\"invalid\", serializer.errors['is_admin'][0], 'Invalid", "field_type='string', required=True), SchemaField(name='password', field_type='string', required=True), SchemaField(name='is_admin', field_type='boolean', required=True, default=False) ]", "from mock import MagicMock, patch from commons.json_schema_validator.schema_reader import SchemaField from", "have been supplied. ''' from unittest import TestCase from mock", "patch.object(schema_reader, # @UndefinedVariable 'get_schema_fields', mock_schema_instance) self.patcher_schema.start() self.patcher_validate.start() def tearDown(self): self.patcher_schema.stop()", "'password': '<PASSWORD>', 'origins': [\"????\"]}) self.assertEquals(False, serializer.is_valid()) self.assertEquals(u\"invalid\", serializer.errors['origins'][0], 'Invalid error", "unittest import TestCase from mock import MagicMock, patch from commons.json_schema_validator.schema_reader", "program(s) may be used and or copied only with the", "serializer.is_valid()) self.assertEquals(u\"invalid\", serializer.errors['classes'][0], 'Invalid error message') def test_deserialize_user_invalid_username_should_give_error(self): # We", "import UserCollectionSerializer class UserSerializerTests(TestCase): def setUp(self): super(UserSerializerTests, self).setUp() mock_schema_instance =", "error message') def test_deserialize_user_invalid_is_admin_should_give_error(self): # We need to do import", "import here in order generic patches work serializer = UserCollectionSerializer(data={'password':", "def test_deserialize_user_large_user_ne_should_give_invalid_error(self): # We need to do import here in", "'password': '<PASSWORD>', 'is_admin': 'si'}) self.assertEquals(False, serializer.is_valid(), \"Serialization invalid\") def test_deserialize_user_empty_user_should_give_error_invalid(self):", "'origins': [\"????\"]}) self.assertEquals(False, serializer.is_valid()) self.assertEquals(u\"invalid\", serializer.errors['origins'][0], 'Invalid error message') serializer", "UserCollectionSerializer(data={'username': 'user', 'password': '<PASSWORD>', 'classes': [\"????\"]}) self.assertEquals(False, serializer.is_valid()) self.assertEquals(u\"invalid\", serializer.errors['classes'][0],", "'Invalid error message') serializer = UserCollectionSerializer(data={'username': 'user', 'password': '<PASSWORD>', 'classes':", "agreement/contract under which the program(s) have been supplied. ''' from", "invalid\") self.assertEquals(u\"invalid\", serializer.errors['username'][0], 'Invalid error message') def test_deserialize_user_with_invalid_origins_should_give_error(self): serializer =", "tugo\"]}) self.assertEquals(False, serializer.is_valid()) self.assertEquals(u\"invalid\", serializer.errors['origins'][0], 'Invalid error message') def test_deserialize_user_with_invalid_classes_should_give_error(self):", "program(s) is property of Telefonica I+D. The program(s) may be", "= UserCollectionSerializer(data={'username': 'a' * 600, 'password': '<PASSWORD>'}) self.assertEquals(False, serializer.is_valid(), \"Serialization", "I+D. The program(s) may be used and or copied only", "= UserCollectionSerializer(data={'username': 'user', 'password': '<PASSWORD>', 'classes': [\"????\"]}) self.assertEquals(False, serializer.is_valid()) self.assertEquals(u\"invalid\",", "class UserSerializerTests(TestCase): def setUp(self): super(UserSerializerTests, self).setUp() mock_schema_instance = MagicMock(name='mock_schema_instance') mock_schema_instance.return_value", "# mock schema instance schema_reader = SchemaReader() self.patcher_validate = patch.object(schema_reader,", "SchemaField(name='is_admin', field_type='boolean', required=True, default=False) ] mock_get_schema_fields = MagicMock(name='mock_get_schema') mock_get_schema_fields.return_value =", "\"Serialization invalid\") self.assertEquals(u\"invalid\", serializer.errors['username'][0], 'Invalid error message') def test_deserialize_user_null_user_should_give_required_error(self): #", "self.assertEquals(u\"invalid\", serializer.errors['username'][0], 'Invalid error message') def test_deserialize_user_null_user_should_give_required_error(self): # We need", "= UserCollectionSerializer(data={'password': '<PASSWORD>'}) self.assertEquals(False, serializer.is_valid(), \"Serialization invalid\") self.assertEquals(u\"required\", serializer.errors['username'][0], 'Invalid", "need to do import here in order generic patches work", "\"Serialization invalid\") self.assertEquals(u\"invalid\", serializer.errors['username'][0], 'Invalid error message') def test_deserialize_user_invalid_is_admin_should_give_error(self): #", "UserCollectionSerializer class UserSerializerTests(TestCase): def setUp(self): super(UserSerializerTests, self).setUp() mock_schema_instance = MagicMock(name='mock_schema_instance')", "'is_admin': 0}) self.assertEquals(False, serializer.is_valid(), \"Serialization invalid\") self.assertEquals(u\"invalid\", serializer.errors['is_admin'][0], 'Invalid error", "self.assertEquals(False, serializer.is_valid(), \"Serialization invalid\") self.assertEquals(u\"invalid\", serializer.errors['username'][0], 'Invalid error message') def", "field_type='boolean', required=True, default=False) ] mock_get_schema_fields = MagicMock(name='mock_get_schema') mock_get_schema_fields.return_value = mock_schema_instance", "MagicMock(name='mock_get_schema') mock_get_schema_fields.return_value = mock_schema_instance # mock schema instance schema_reader =", "order generic patches work serializer = UserCollectionSerializer(data={'username': 'user', 'password': '<PASSWORD>',", "Reserved. The copyright to the software program(s) is property of", "'password': '<PASSWORD>'}) self.assertEquals(False, serializer.is_valid(), \"Serialization invalid\") self.assertEquals(u\"invalid\", serializer.errors['username'][0], 'Invalid error", "'si'}) self.assertEquals(False, serializer.is_valid(), \"Serialization invalid\") def test_deserialize_user_empty_user_should_give_error_invalid(self): # We need", "under which the program(s) have been supplied. ''' from unittest", "do import here in order generic patches work serializer =", "test_deserialize_user_invalid_is_admin_should_give_error(self): # We need to do import here in order", "2013 Telefonica, I+D. Printed in Spain (Europe). All Rights Reserved.", "conditions stipulated in the agreement/contract under which the program(s) have", "''' from unittest import TestCase from mock import MagicMock, patch", "work serializer = UserCollectionSerializer(data={'username': 'a' * 600, 'password': '<PASSWORD>'}) self.assertEquals(False,", "UserCollectionSerializer(data={'username': '', 'password': '<PASSWORD>'}) self.assertEquals(False, serializer.is_valid(), \"Serialization invalid\") self.assertEquals(u\"invalid\", serializer.errors['username'][0],", "generic patches work serializer = UserCollectionSerializer(data={'username': 'usera', 'password': '<PASSWORD>', 'is_admin':", "consent of Telefonica I+D or in accordance with the terms", "serializer = UserCollectionSerializer(data={'username': 'a' * 600, 'password': '<PASSWORD>'}) self.assertEquals(False, serializer.is_valid(),", "invalid\") self.assertEquals(u\"invalid\", serializer.errors['username'][0], 'Invalid error message') def test_deserialize_user_invalid_is_admin_should_give_error(self): # We", "UserCollectionSerializer(data={'username': 'user', 'password': '<PASSWORD>'}) self.assertEquals(True, serializer.is_valid(), \"Serialization invalid\") def test_deserialize_user_invalid_is_admin_should_work(self):", "'<PASSWORD>', 'classes': [\" sms\"]}) self.assertEquals(False, serializer.is_valid()) self.assertEquals(u\"invalid\", serializer.errors['classes'][0], 'Invalid error", "'password': '<PASSWORD>', 'is_admin': 0}) self.assertEquals(False, serializer.is_valid(), \"Serialization invalid\") self.assertEquals(u\"invalid\", serializer.errors['is_admin'][0],", "of Telefonica I+D or in accordance with the terms and", "error message') def test_deserialize_user_with_invalid_classes_should_give_error(self): serializer = UserCollectionSerializer(data={'username': 'user', 'password': '<PASSWORD>',", "message') def test_deserialize_user_with_invalid_classes_should_give_error(self): serializer = UserCollectionSerializer(data={'username': 'user', 'password': '<PASSWORD>', 'classes':", "'get_schema_fields', mock_schema_instance) self.patcher_schema.start() self.patcher_validate.start() def tearDown(self): self.patcher_schema.stop() self.patcher_validate.stop() def test_deserialize_user_should_work(self):", "required=True, default=False) ] mock_get_schema_fields = MagicMock(name='mock_get_schema') mock_get_schema_fields.return_value = mock_schema_instance #", "error message') serializer = UserCollectionSerializer(data={'username': 'user', 'password': '<PASSWORD>', 'classes': [\"", "Printed in Spain (Europe). All Rights Reserved. The copyright to", "\"Serialization invalid\") def test_deserialize_user_empty_user_should_give_error_invalid(self): # We need to do import", "UserCollectionSerializer(data={'password': '<PASSWORD>'}) self.assertEquals(False, serializer.is_valid(), \"Serialization invalid\") self.assertEquals(u\"required\", serializer.errors['username'][0], 'Invalid error", "'<PASSWORD>', 'origins': [\"????\"]}) self.assertEquals(False, serializer.is_valid()) self.assertEquals(u\"invalid\", serializer.errors['origins'][0], 'Invalid error message')", "self.patcher_validate.start() def tearDown(self): self.patcher_schema.stop() self.patcher_validate.stop() def test_deserialize_user_should_work(self): # We need", "'user', 'password': '<PASSWORD>'}) self.assertEquals(True, serializer.is_valid(), \"Serialization invalid\") def test_deserialize_user_invalid_is_admin_should_work(self): #", "error message') serializer = UserCollectionSerializer(data={'username': 'user', 'password': '<PASSWORD>', 'origins': [\"", "import here in order generic patches work serializer = UserCollectionSerializer(data={'username':", "UserCollectionSerializer(data={'username': 'usera', 'password': '<PASSWORD>', 'is_admin': 0}) self.assertEquals(False, serializer.is_valid(), \"Serialization invalid\")", "import SchemaReader from users.serializers import UserCollectionSerializer class UserSerializerTests(TestCase): def setUp(self):", "work serializer = UserCollectionSerializer(data={'username': 'user', 'password': '<PASSWORD>', 'is_admin': 'si'}) self.assertEquals(False,", "serializer.is_valid(), \"Serialization invalid\") def test_deserialize_user_empty_user_should_give_error_invalid(self): # We need to do", "'user', 'password': '<PASSWORD>', 'classes': [\" sms\"]}) self.assertEquals(False, serializer.is_valid()) self.assertEquals(u\"invalid\", serializer.errors['classes'][0],", "'usera', 'password': '<PASSWORD>', 'is_admin': 0}) self.assertEquals(False, serializer.is_valid(), \"Serialization invalid\") self.assertEquals(u\"invalid\",", "which the program(s) have been supplied. ''' from unittest import", "generic patches work serializer = UserCollectionSerializer(data={'username': 'User.user', 'password': '<PASSWORD>'}) self.assertEquals(False,", "serializer = UserCollectionSerializer(data={'username': 'user', 'password': '<PASSWORD>', 'origins': [\"????\"]}) self.assertEquals(False, serializer.is_valid())", "or copied only with the express written consent of Telefonica", "order generic patches work serializer = UserCollectionSerializer(data={'username': '', 'password': '<PASSWORD>'})", "mock_schema_instance # mock schema instance schema_reader = SchemaReader() self.patcher_validate =", "test_deserialize_user_empty_user_should_give_error_invalid(self): # We need to do import here in order", "test_deserialize_user_invalid_username_should_give_error(self): # We need to do import here in order", "'a' * 600, 'password': '<PASSWORD>'}) self.assertEquals(False, serializer.is_valid(), \"Serialization invalid\") self.assertEquals(u\"invalid\",", "invalid\") self.assertEquals(u\"required\", serializer.errors['username'][0], 'Invalid error message') def test_deserialize_user_large_user_ne_should_give_invalid_error(self): # We", "used and or copied only with the express written consent", "] mock_get_schema_fields = MagicMock(name='mock_get_schema') mock_get_schema_fields.return_value = mock_schema_instance # mock schema", "property of Telefonica I+D. The program(s) may be used and", "mock_get_schema_fields = MagicMock(name='mock_get_schema') mock_get_schema_fields.return_value = mock_schema_instance # mock schema instance", "All Rights Reserved. The copyright to the software program(s) is", "required=True), SchemaField(name='is_admin', field_type='boolean', required=True, default=False) ] mock_get_schema_fields = MagicMock(name='mock_get_schema') mock_get_schema_fields.return_value", "= UserCollectionSerializer(data={'username': '', 'password': '<PASSWORD>'}) self.assertEquals(False, serializer.is_valid(), \"Serialization invalid\") self.assertEquals(u\"invalid\",", "test_deserialize_user_large_user_ne_should_give_invalid_error(self): # We need to do import here in order", "work serializer = UserCollectionSerializer(data={'username': 'usera', 'password': '<PASSWORD>', 'is_admin': 0}) self.assertEquals(False,", "mock import MagicMock, patch from commons.json_schema_validator.schema_reader import SchemaField from commons.json_schema_validator.schema_reader", "terms and conditions stipulated in the agreement/contract under which the", "the program(s) have been supplied. ''' from unittest import TestCase", "order generic patches work serializer = UserCollectionSerializer(data={'username': 'a' * 600,", "MagicMock, patch from commons.json_schema_validator.schema_reader import SchemaField from commons.json_schema_validator.schema_reader import SchemaReader", "= SchemaReader() self.patcher_validate = patch.object(schema_reader, 'validate_object') # @UndefinedVariable self.patcher_schema =", "message') def test_deserialize_user_invalid_is_admin_should_give_error(self): # We need to do import here", "written consent of Telefonica I+D or in accordance with the", "to the software program(s) is property of Telefonica I+D. The", "serializer = UserCollectionSerializer(data={'username': 'user', 'password': '<PASSWORD>', 'is_admin': 'si'}) self.assertEquals(False, serializer.is_valid(),", "self.assertEquals(u\"required\", serializer.errors['username'][0], 'Invalid error message') def test_deserialize_user_large_user_ne_should_give_invalid_error(self): # We need", "* 600, 'password': '<PASSWORD>'}) self.assertEquals(False, serializer.is_valid(), \"Serialization invalid\") self.assertEquals(u\"invalid\", serializer.errors['username'][0],", "'validate_object') # @UndefinedVariable self.patcher_schema = patch.object(schema_reader, # @UndefinedVariable 'get_schema_fields', mock_schema_instance)", "(c) Copyright 2013 Telefonica, I+D. Printed in Spain (Europe). All", "generic patches work serializer = UserCollectionSerializer(data={'username': 'a' * 600, 'password':", "'classes': [\" sms\"]}) self.assertEquals(False, serializer.is_valid()) self.assertEquals(u\"invalid\", serializer.errors['classes'][0], 'Invalid error message')", "= MagicMock(name='mock_schema_instance') mock_schema_instance.return_value = [ SchemaField(name='username', field_type='string', required=True), SchemaField(name='password', field_type='string',", "Spain (Europe). All Rights Reserved. The copyright to the software", "self.patcher_schema.start() self.patcher_validate.start() def tearDown(self): self.patcher_schema.stop() self.patcher_validate.stop() def test_deserialize_user_should_work(self): # We", "\"Serialization invalid\") def test_deserialize_user_invalid_is_admin_should_work(self): # We need to do import", "invalid\") def test_deserialize_user_empty_user_should_give_error_invalid(self): # We need to do import here", "work serializer = UserCollectionSerializer(data={'username': '', 'password': '<PASSWORD>'}) self.assertEquals(False, serializer.is_valid(), \"Serialization", "here in order generic patches work serializer = UserCollectionSerializer(data={'username': 'usera',", "users.serializers import UserCollectionSerializer class UserSerializerTests(TestCase): def setUp(self): super(UserSerializerTests, self).setUp() mock_schema_instance", "'user', 'password': '<PASSWORD>', 'origins': [\" tugo\"]}) self.assertEquals(False, serializer.is_valid()) self.assertEquals(u\"invalid\", serializer.errors['origins'][0]," ]
[ "exists'}), 403) user, errors = self.schema().load(data) if errors: return make_response(jsonify(errors),", "200) else: return make_response(jsonify({\"error\": {\"code\": 400, \"msg\": \"No such user/wrong", "datetime import timedelta from flask import request, jsonify, make_response, redirect,", "json, render_template from flask_jwt_extended import (create_access_token, jwt_required) from flask_restful import", "jsonify({'id': user, 'authentication_token': create_access_token(identity=user['id'], expires_delta=expires)}), 200) else: return make_response(jsonify({\"error\": {\"code\":", "if request.json: data = request.json user = self.model.query.filter(self.model.email == data['email']).first()", "return make_response(jsonify(errors), 400) try: user.set_password(data['password']) db.session.add(user) db.session.commit() except (IntegrityError, InvalidRequestError)", "UserSchema def post(self): data = request.json if not data: return", "make_response(jsonify({'error': 'User already exists'}), 403) user, errors = self.schema().load(data) if", "else: return make_response(redirect('/api/v1/login', 403)) class UserRegisterResource(Resource): model = User schema", "data = request.json user = self.model.query.filter(self.model.email == data['email']).first() if user", "\"No such user/wrong password.\"}}), 400) else: data = request.form user", "errors: return make_response(jsonify(errors), 400) try: user.set_password(data['password']) db.session.add(user) db.session.commit() except (IntegrityError,", "= User.query.filter(User.email == data['email']).first() if user: return make_response(jsonify({'error': 'User already", "already exists'}), 403) user, errors = self.schema().load(data) if errors: return", "jsonify(created_user={'id': user.id, 'user': self.schema(only=('id', 'email', 'is_admin')).dump(user).data, 'authentication_token': create_access_token(identity=user.id, expires_delta=expires)}), 200)", "and self.model.check_password(user, data['password']): expires = timedelta(days=365) user = UserSchema(only=('id', 'email',", "request.json user = self.model.query.filter(self.model.email == data['email']).first() if user and self.model.check_password(user,", "data['password']) and login_user(user): return make_response(redirect('/admin/', 302)) else: return make_response(redirect('/api/v1/login', 403))", "if not data: return make_response(jsonify({'error': 'No data'}), 400) user =", "make_response, redirect, json, render_template from flask_jwt_extended import (create_access_token, jwt_required) from", "not data: return make_response(jsonify({'error': 'No data'}), 400) user = User.query.filter(User.email", "import request, jsonify, make_response, redirect, json, render_template from flask_jwt_extended import", "= self.model.query.filter(self.model.email == data['email']).first() if user and self.model.check_password(user, data['password']): expires", "flask_jwt_extended import (create_access_token, jwt_required) from flask_restful import Resource from flask_login", "render_template from flask_jwt_extended import (create_access_token, jwt_required) from flask_restful import Resource", "import (create_access_token, jwt_required) from flask_restful import Resource from flask_login import", "user = UserSchema(only=('id', 'email', 'is_admin')).dump(user).data return make_response( jsonify({'id': user, 'authentication_token':", "timedelta(days=365) return make_response( jsonify(created_user={'id': user.id, 'user': self.schema(only=('id', 'email', 'is_admin')).dump(user).data, 'authentication_token':", "data['email']).first() if user: return make_response(jsonify({'error': 'User already exists'}), 403) user,", "''' from datetime import timedelta from flask import request, jsonify,", "from .schemas import UserSchema class UserLoginResource(Resource): model = User schema", "self.model.query.filter(self.model.email == data['email']).first() if user and self.model.check_password(user, data['password']) and login_user(user):", "403)) class UserRegisterResource(Resource): model = User schema = UserSchema def", "400) user = User.query.filter(User.email == data['email']).first() if user: return make_response(jsonify({'error':", "login_user(user): return make_response(redirect('/admin/', 302)) else: return make_response(redirect('/api/v1/login', 403)) class UserRegisterResource(Resource):", "class UserRegisterResource(Resource): model = User schema = UserSchema def post(self):", "print(e) db.session.rollback() return make_response(jsonify(error={'code': 400 }), 400) expires = timedelta(days=365)", "User schema = UserSchema def post(self): data = request.json if", "'No data'}), 400) user = User.query.filter(User.email == data['email']).first() if user:", "'is_admin')).dump(user).data, 'authentication_token': create_access_token(identity=user.id, expires_delta=expires)}), 200) api.add_resource(UserLoginResource, '/login/', endpoint='login') api.add_resource(UserRegisterResource, '/register/',", "'authentication_token': create_access_token(identity=user.id, expires_delta=expires)}), 200) api.add_resource(UserLoginResource, '/login/', endpoint='login') api.add_resource(UserRegisterResource, '/register/', endpoint='register')", "400) expires = timedelta(days=365) return make_response( jsonify(created_user={'id': user.id, 'user': self.schema(only=('id',", "from flask_login import login_user, current_user from sqlalchemy.exc import IntegrityError, InvalidRequestError", "flask_login import login_user, current_user from sqlalchemy.exc import IntegrityError, InvalidRequestError from", "schema = UserSchema def post(self): data = request.json if not", "redirect, json, render_template from flask_jwt_extended import (create_access_token, jwt_required) from flask_restful", "from .models import User from .schemas import UserSchema class UserLoginResource(Resource):", "user.set_password(data['password']) db.session.add(user) db.session.commit() except (IntegrityError, InvalidRequestError) as e: print(e) db.session.rollback()", "schema = UserSchema def get(self): return make_response(render_template('login.html')) def post(self): if", "and login_user(user): return make_response(redirect('/admin/', 302)) else: return make_response(redirect('/api/v1/login', 403)) class", "data = request.form user = self.model.query.filter(self.model.email == data['email']).first() if user", "from sqlalchemy.exc import IntegrityError, InvalidRequestError from src import db, api", "400, \"msg\": \"No such user/wrong password.\"}}), 400) else: data =", "request.json if not data: return make_response(jsonify({'error': 'No data'}), 400) user", "errors = self.schema().load(data) if errors: return make_response(jsonify(errors), 400) try: user.set_password(data['password'])", "jsonify, make_response, redirect, json, render_template from flask_jwt_extended import (create_access_token, jwt_required)", "sqlalchemy.exc import IntegrityError, InvalidRequestError from src import db, api from", "User views ''' from datetime import timedelta from flask import", "db.session.commit() except (IntegrityError, InvalidRequestError) as e: print(e) db.session.rollback() return make_response(jsonify(error={'code':", "user/wrong password.\"}}), 400) else: data = request.form user = self.model.query.filter(self.model.email", "UserLoginResource(Resource): model = User schema = UserSchema def get(self): return", "return make_response(redirect('/admin/', 302)) else: return make_response(redirect('/api/v1/login', 403)) class UserRegisterResource(Resource): model", "timedelta from flask import request, jsonify, make_response, redirect, json, render_template", "data['email']).first() if user and self.model.check_password(user, data['password']) and login_user(user): return make_response(redirect('/admin/',", "flask import request, jsonify, make_response, redirect, json, render_template from flask_jwt_extended", "400 }), 400) expires = timedelta(days=365) return make_response( jsonify(created_user={'id': user.id,", "db, api from .models import User from .schemas import UserSchema", "}), 400) expires = timedelta(days=365) return make_response( jsonify(created_user={'id': user.id, 'user':", "user: return make_response(jsonify({'error': 'User already exists'}), 403) user, errors =", "return make_response(redirect('/api/v1/login', 403)) class UserRegisterResource(Resource): model = User schema =", "create_access_token(identity=user['id'], expires_delta=expires)}), 200) else: return make_response(jsonify({\"error\": {\"code\": 400, \"msg\": \"No", "db.session.rollback() return make_response(jsonify(error={'code': 400 }), 400) expires = timedelta(days=365) return", "make_response(render_template('login.html')) def post(self): if request.json: data = request.json user =", "self.model.check_password(user, data['password']): expires = timedelta(days=365) user = UserSchema(only=('id', 'email', 'is_admin')).dump(user).data", "except (IntegrityError, InvalidRequestError) as e: print(e) db.session.rollback() return make_response(jsonify(error={'code': 400", "User schema = UserSchema def get(self): return make_response(render_template('login.html')) def post(self):", "def get(self): return make_response(render_template('login.html')) def post(self): if request.json: data =", "''' User views ''' from datetime import timedelta from flask", "current_user from sqlalchemy.exc import IntegrityError, InvalidRequestError from src import db,", "== data['email']).first() if user and self.model.check_password(user, data['password']): expires = timedelta(days=365)", "= request.json if not data: return make_response(jsonify({'error': 'No data'}), 400)", "model = User schema = UserSchema def get(self): return make_response(render_template('login.html'))", "= self.model.query.filter(self.model.email == data['email']).first() if user and self.model.check_password(user, data['password']) and", "if user: return make_response(jsonify({'error': 'User already exists'}), 403) user, errors", "class UserLoginResource(Resource): model = User schema = UserSchema def get(self):", "user and self.model.check_password(user, data['password']) and login_user(user): return make_response(redirect('/admin/', 302)) else:", "make_response( jsonify({'id': user, 'authentication_token': create_access_token(identity=user['id'], expires_delta=expires)}), 200) else: return make_response(jsonify({\"error\":", "db.session.add(user) db.session.commit() except (IntegrityError, InvalidRequestError) as e: print(e) db.session.rollback() return", "IntegrityError, InvalidRequestError from src import db, api from .models import", "'email', 'is_admin')).dump(user).data, 'authentication_token': create_access_token(identity=user.id, expires_delta=expires)}), 200) api.add_resource(UserLoginResource, '/login/', endpoint='login') api.add_resource(UserRegisterResource,", "data['password']): expires = timedelta(days=365) user = UserSchema(only=('id', 'email', 'is_admin')).dump(user).data return", ".models import User from .schemas import UserSchema class UserLoginResource(Resource): model", "else: data = request.form user = self.model.query.filter(self.model.email == data['email']).first() if", "= request.form user = self.model.query.filter(self.model.email == data['email']).first() if user and", "request.json: data = request.json user = self.model.query.filter(self.model.email == data['email']).first() if", "User from .schemas import UserSchema class UserLoginResource(Resource): model = User", "403) user, errors = self.schema().load(data) if errors: return make_response(jsonify(errors), 400)", "400) else: data = request.form user = self.model.query.filter(self.model.email == data['email']).first()", "def post(self): data = request.json if not data: return make_response(jsonify({'error':", "if errors: return make_response(jsonify(errors), 400) try: user.set_password(data['password']) db.session.add(user) db.session.commit() except", "UserSchema class UserLoginResource(Resource): model = User schema = UserSchema def", "from datetime import timedelta from flask import request, jsonify, make_response,", "data['email']).first() if user and self.model.check_password(user, data['password']): expires = timedelta(days=365) user", "if user and self.model.check_password(user, data['password']): expires = timedelta(days=365) user =", "and self.model.check_password(user, data['password']) and login_user(user): return make_response(redirect('/admin/', 302)) else: return", "user = self.model.query.filter(self.model.email == data['email']).first() if user and self.model.check_password(user, data['password'])", "user and self.model.check_password(user, data['password']): expires = timedelta(days=365) user = UserSchema(only=('id',", "= UserSchema(only=('id', 'email', 'is_admin')).dump(user).data return make_response( jsonify({'id': user, 'authentication_token': create_access_token(identity=user['id'],", "user.id, 'user': self.schema(only=('id', 'email', 'is_admin')).dump(user).data, 'authentication_token': create_access_token(identity=user.id, expires_delta=expires)}), 200) api.add_resource(UserLoginResource,", "such user/wrong password.\"}}), 400) else: data = request.form user =", "user = User.query.filter(User.email == data['email']).first() if user: return make_response(jsonify({'error': 'User", "return make_response( jsonify(created_user={'id': user.id, 'user': self.schema(only=('id', 'email', 'is_admin')).dump(user).data, 'authentication_token': create_access_token(identity=user.id,", "data: return make_response(jsonify({'error': 'No data'}), 400) user = User.query.filter(User.email ==", "= timedelta(days=365) return make_response( jsonify(created_user={'id': user.id, 'user': self.schema(only=('id', 'email', 'is_admin')).dump(user).data,", "model = User schema = UserSchema def post(self): data =", "'user': self.schema(only=('id', 'email', 'is_admin')).dump(user).data, 'authentication_token': create_access_token(identity=user.id, expires_delta=expires)}), 200) api.add_resource(UserLoginResource, '/login/',", "= request.json user = self.model.query.filter(self.model.email == data['email']).first() if user and", "api from .models import User from .schemas import UserSchema class", "get(self): return make_response(render_template('login.html')) def post(self): if request.json: data = request.json", "login_user, current_user from sqlalchemy.exc import IntegrityError, InvalidRequestError from src import", "expires_delta=expires)}), 200) else: return make_response(jsonify({\"error\": {\"code\": 400, \"msg\": \"No such", "self.schema().load(data) if errors: return make_response(jsonify(errors), 400) try: user.set_password(data['password']) db.session.add(user) db.session.commit()", "(IntegrityError, InvalidRequestError) as e: print(e) db.session.rollback() return make_response(jsonify(error={'code': 400 }),", "request.form user = self.model.query.filter(self.model.email == data['email']).first() if user and self.model.check_password(user,", "post(self): if request.json: data = request.json user = self.model.query.filter(self.model.email ==", "from flask_restful import Resource from flask_login import login_user, current_user from", "return make_response(jsonify(error={'code': 400 }), 400) expires = timedelta(days=365) return make_response(", "from flask_jwt_extended import (create_access_token, jwt_required) from flask_restful import Resource from", "Resource from flask_login import login_user, current_user from sqlalchemy.exc import IntegrityError,", "else: return make_response(jsonify({\"error\": {\"code\": 400, \"msg\": \"No such user/wrong password.\"}}),", "expires = timedelta(days=365) return make_response( jsonify(created_user={'id': user.id, 'user': self.schema(only=('id', 'email',", "import UserSchema class UserLoginResource(Resource): model = User schema = UserSchema", "user, errors = self.schema().load(data) if errors: return make_response(jsonify(errors), 400) try:", "InvalidRequestError) as e: print(e) db.session.rollback() return make_response(jsonify(error={'code': 400 }), 400)", "from flask import request, jsonify, make_response, redirect, json, render_template from", "user = self.model.query.filter(self.model.email == data['email']).first() if user and self.model.check_password(user, data['password']):", "post(self): data = request.json if not data: return make_response(jsonify({'error': 'No", "make_response(jsonify(error={'code': 400 }), 400) expires = timedelta(days=365) return make_response( jsonify(created_user={'id':", "make_response(jsonify({\"error\": {\"code\": 400, \"msg\": \"No such user/wrong password.\"}}), 400) else:", "400) try: user.set_password(data['password']) db.session.add(user) db.session.commit() except (IntegrityError, InvalidRequestError) as e:", "import timedelta from flask import request, jsonify, make_response, redirect, json,", "password.\"}}), 400) else: data = request.form user = self.model.query.filter(self.model.email ==", "make_response( jsonify(created_user={'id': user.id, 'user': self.schema(only=('id', 'email', 'is_admin')).dump(user).data, 'authentication_token': create_access_token(identity=user.id, expires_delta=expires)}),", "user, 'authentication_token': create_access_token(identity=user['id'], expires_delta=expires)}), 200) else: return make_response(jsonify({\"error\": {\"code\": 400,", "from src import db, api from .models import User from", ".schemas import UserSchema class UserLoginResource(Resource): model = User schema =", "jwt_required) from flask_restful import Resource from flask_login import login_user, current_user", "== data['email']).first() if user and self.model.check_password(user, data['password']) and login_user(user): return", "= UserSchema def post(self): data = request.json if not data:", "request, jsonify, make_response, redirect, json, render_template from flask_jwt_extended import (create_access_token,", "import IntegrityError, InvalidRequestError from src import db, api from .models", "return make_response( jsonify({'id': user, 'authentication_token': create_access_token(identity=user['id'], expires_delta=expires)}), 200) else: return", "try: user.set_password(data['password']) db.session.add(user) db.session.commit() except (IntegrityError, InvalidRequestError) as e: print(e)", "(create_access_token, jwt_required) from flask_restful import Resource from flask_login import login_user,", "expires = timedelta(days=365) user = UserSchema(only=('id', 'email', 'is_admin')).dump(user).data return make_response(", "flask_restful import Resource from flask_login import login_user, current_user from sqlalchemy.exc", "make_response(jsonify({'error': 'No data'}), 400) user = User.query.filter(User.email == data['email']).first() if", "= User schema = UserSchema def get(self): return make_response(render_template('login.html')) def", "return make_response(render_template('login.html')) def post(self): if request.json: data = request.json user", "return make_response(jsonify({\"error\": {\"code\": 400, \"msg\": \"No such user/wrong password.\"}}), 400)", "src import db, api from .models import User from .schemas", "as e: print(e) db.session.rollback() return make_response(jsonify(error={'code': 400 }), 400) expires", "views ''' from datetime import timedelta from flask import request,", "= User schema = UserSchema def post(self): data = request.json", "import login_user, current_user from sqlalchemy.exc import IntegrityError, InvalidRequestError from src", "InvalidRequestError from src import db, api from .models import User", "make_response(redirect('/admin/', 302)) else: return make_response(redirect('/api/v1/login', 403)) class UserRegisterResource(Resource): model =", "import User from .schemas import UserSchema class UserLoginResource(Resource): model =", "self.model.query.filter(self.model.email == data['email']).first() if user and self.model.check_password(user, data['password']): expires =", "User.query.filter(User.email == data['email']).first() if user: return make_response(jsonify({'error': 'User already exists'}),", "self.schema(only=('id', 'email', 'is_admin')).dump(user).data, 'authentication_token': create_access_token(identity=user.id, expires_delta=expires)}), 200) api.add_resource(UserLoginResource, '/login/', endpoint='login')", "data'}), 400) user = User.query.filter(User.email == data['email']).first() if user: return", "302)) else: return make_response(redirect('/api/v1/login', 403)) class UserRegisterResource(Resource): model = User", "UserRegisterResource(Resource): model = User schema = UserSchema def post(self): data", "UserSchema def get(self): return make_response(render_template('login.html')) def post(self): if request.json: data", "timedelta(days=365) user = UserSchema(only=('id', 'email', 'is_admin')).dump(user).data return make_response( jsonify({'id': user,", "= UserSchema def get(self): return make_response(render_template('login.html')) def post(self): if request.json:", "def post(self): if request.json: data = request.json user = self.model.query.filter(self.model.email", "if user and self.model.check_password(user, data['password']) and login_user(user): return make_response(redirect('/admin/', 302))", "'authentication_token': create_access_token(identity=user['id'], expires_delta=expires)}), 200) else: return make_response(jsonify({\"error\": {\"code\": 400, \"msg\":", "make_response(redirect('/api/v1/login', 403)) class UserRegisterResource(Resource): model = User schema = UserSchema", "= self.schema().load(data) if errors: return make_response(jsonify(errors), 400) try: user.set_password(data['password']) db.session.add(user)", "return make_response(jsonify({'error': 'User already exists'}), 403) user, errors = self.schema().load(data)", "\"msg\": \"No such user/wrong password.\"}}), 400) else: data = request.form", "return make_response(jsonify({'error': 'No data'}), 400) user = User.query.filter(User.email == data['email']).first()", "'User already exists'}), 403) user, errors = self.schema().load(data) if errors:", "self.model.check_password(user, data['password']) and login_user(user): return make_response(redirect('/admin/', 302)) else: return make_response(redirect('/api/v1/login',", "e: print(e) db.session.rollback() return make_response(jsonify(error={'code': 400 }), 400) expires =", "import Resource from flask_login import login_user, current_user from sqlalchemy.exc import", "UserSchema(only=('id', 'email', 'is_admin')).dump(user).data return make_response( jsonify({'id': user, 'authentication_token': create_access_token(identity=user['id'], expires_delta=expires)}),", "make_response(jsonify(errors), 400) try: user.set_password(data['password']) db.session.add(user) db.session.commit() except (IntegrityError, InvalidRequestError) as", "= timedelta(days=365) user = UserSchema(only=('id', 'email', 'is_admin')).dump(user).data return make_response( jsonify({'id':", "'email', 'is_admin')).dump(user).data return make_response( jsonify({'id': user, 'authentication_token': create_access_token(identity=user['id'], expires_delta=expires)}), 200)", "== data['email']).first() if user: return make_response(jsonify({'error': 'User already exists'}), 403)", "{\"code\": 400, \"msg\": \"No such user/wrong password.\"}}), 400) else: data", "'is_admin')).dump(user).data return make_response( jsonify({'id': user, 'authentication_token': create_access_token(identity=user['id'], expires_delta=expires)}), 200) else:", "data = request.json if not data: return make_response(jsonify({'error': 'No data'}),", "import db, api from .models import User from .schemas import" ]
[ "query.get_sql() expected_query = ( 'SELECT querybuilder_tests_order.*, ' 'LEAD(querybuilder_tests_order.margin, 1) OVER", "'ORDER BY rank ' 'DESC' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query))", "fields=[ '*', LagField( 'margin', over=QueryWindow().order_by( '-margin' ) ) ] )", "'DENSE_RANK() OVER (ORDER BY margin DESC) AS \"dense_rank\" ' 'FROM", "BY dense_rank ' 'ASC' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def", "RANK() OVER (PARTITION BY account_id) AS \"rank\" FROM ' 'querybuilder_tests_order'", "ntile ' 'ASC' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_lag(self):", "( 'SELECT querybuilder_tests_order.*, ' 'NTH_VALUE(querybuilder_tests_order.margin, 2) OVER (ORDER BY margin", "LeadField, FirstValueField, LastValueField, NthValueField, NumStdDevField ) from querybuilder.query import QueryWindow,", "over=QueryWindow().order_by( '-margin' ) ) ] ).order_by( 'row_number' ) query_str =", "querybuilder_tests_order.*, ' 'LAG(querybuilder_tests_order.margin, 1) OVER (ORDER BY margin DESC) AS", "fields=[ RankField() ] ) query_str = query.get_sql() expected_query = 'SELECT", "\"cume_dist\" ' 'FROM querybuilder_tests_order ' 'ORDER BY cume_dist ' 'ASC'", "over=QueryWindow().order_by( '-margin' ) ) ] ) query_str = query.get_sql() expected_query", "get_comparison_str(query_str, expected_query)) def test_dense_rank(self): query = Query().from_table( table=Order, fields=[ '*',", "get_comparison_str(query_str, expected_query)) def test_row_number(self): query = Query().from_table( table=Order, fields=[ '*',", "AS \"margin_lag\" ' 'FROM querybuilder_tests_order' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query))", "FROM querybuilder_tests_order' self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_rank_over(self): query =", "table=Order, fields=[ RankField() ] ) query_str = query.get_sql() expected_query =", "'FROM querybuilder_tests_order ' 'ORDER BY dense_rank ' 'ASC' ) self.assertEqual(query_str,", "expected_query = ( 'SELECT querybuilder_tests_order.id, ' 'RANK() OVER (PARTITION BY", "query = Query().from_table( table=Order, fields=[ '*', LagField( 'margin', default=0, over=QueryWindow().order_by(", "expected_query, get_comparison_str(query_str, expected_query)) def test_lag_default(self): query = Query().from_table( table=Order, fields=[", "' 'ORDER BY dense_rank ' 'ASC' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str,", "test_rank_percent(self): query = Query().from_table( table=Order, fields=[ '*', PercentRankField( over=QueryWindow().order_by( '-margin'", "'FROM querybuilder_tests_order ' 'ORDER BY cume_dist ' 'ASC' ) self.assertEqual(query_str,", "= query.get_sql() expected_query = ( 'SELECT querybuilder_tests_order.id, RANK() OVER (PARTITION", "id ASC) AS \"rank\" FROM querybuilder_tests_order' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str,", "expected_query = ( 'SELECT querybuilder_tests_order.*, ' 'CUME_DIST() OVER (ORDER BY", "querybuilder_tests_order ' 'ORDER BY rank ' 'DESC' ) self.assertEqual(query_str, expected_query,", "'END) ' 'AS \"margin_num_stddev\" ' 'FROM querybuilder_tests_order ' 'ORDER BY", "'SELECT querybuilder_tests_order.*, ' 'LAG(querybuilder_tests_order.margin, 1) OVER (ORDER BY margin DESC)", "'ASC' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_lag(self): query =", "query = Query().from_table( table=Order, fields=[ '*', FirstValueField( 'margin', over=QueryWindow().order_by( '-margin'", ") ] ).order_by( 'dense_rank' ) query_str = query.get_sql() expected_query =", "ASC) AS \"margin_last_value\" ' 'FROM querybuilder_tests_order' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str,", "'OVER ()' self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_query_window_partition(self): query_window =", "'cume_dist' ) query_str = query.get_sql() expected_query = ( 'SELECT querybuilder_tests_order.*,", "BY margin DESC) AS \"row_number\" ' 'FROM querybuilder_tests_order ' 'ORDER", "'SELECT querybuilder_tests_order.*, ' 'NTH_VALUE(querybuilder_tests_order.margin, 2) OVER (ORDER BY margin DESC)", "query.get_sql() expected_query = ( 'SELECT querybuilder_tests_order.*, ' '(CASE WHEN (STDDEV(querybuilder_tests_order.margin)", "querybuilder_tests_order' self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_rank_over_order(self): query = Query().from_table(", "expected_query)) def test_rank(self): query = Query().from_table( table=Order, fields=[ 'id', RankField(", ") self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_num_stddev(self): query = Query().from_table(", "def test_rank_over_order(self): query = Query().from_table( table=Order, fields=[ 'id', RankField( over=QueryWindow().order_by(", "query_str = query.get_sql() expected_query = 'SELECT RANK() OVER () AS", "BY field_one, field_two ORDER BY field_one ASC, field_two DESC)' self.assertEqual(query_str,", "self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_cume_dist(self): query = Query().from_table( table=Order,", "fields=[ 'id', RankField( over=QueryWindow().order_by( 'id' ) ) ] ) query_str", "Query().from_table( table=Order, fields=[ '*', FirstValueField( 'margin', over=QueryWindow().order_by( '-margin' ) )", "query_str = query.get_sql() expected_query = ( 'SELECT querybuilder_tests_order.*, ' 'LAG(querybuilder_tests_order.margin,", "' 'LAST_VALUE(querybuilder_tests_order.margin) OVER (ORDER BY margin ASC) AS \"margin_last_value\" '", "def test_query_window(self): query_window = QueryWindow() query_str = query_window.get_sql() expected_query =", "expected_query, get_comparison_str(query_str, expected_query)) def test_query_window_partition_order_many(self): query_window = QueryWindow().partition_by( 'field_one' ).partition_by(", "import Order from querybuilder.tests.query_tests import QueryTestCase, get_comparison_str class QueryWindowTest(QueryTestCase): def", "query.get_sql() expected_query = ( 'SELECT querybuilder_tests_order.id, ' 'RANK() OVER (PARTITION", "1, \\'0\\') OVER (ORDER BY margin DESC) AS \"margin_lag\" '", ") self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_lead(self): query = Query().from_table(", "margin DESC) AS \"dense_rank\" ' 'FROM querybuilder_tests_order ' 'ORDER BY", "expected_query, get_comparison_str(query_str, expected_query)) def test_rank_over(self): query = Query().from_table( table=Order, fields=[", "DESC) AS \"margin_nth_value\" ' 'FROM querybuilder_tests_order' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str,", "expected_query = ( 'SELECT querybuilder_tests_order.*, ' 'PERCENT_RANK() OVER (ORDER BY", "get_comparison_str(query_str, expected_query)) def test_num_stddev(self): query = Query().from_table( table=Order, fields=[ '*',", "'SELECT querybuilder_tests_order.*, ' '(CASE WHEN (STDDEV(querybuilder_tests_order.margin) OVER ()) <> 0", ").order_by( '-rank' ) query_str = query.get_sql() expected_query = ( 'SELECT", "fields=[ 'id', RankField( over=QueryWindow().partition_by( 'account_id' ) ) ] ) query_str", "'FROM querybuilder_tests_order ' 'ORDER BY margin_num_stddev ' 'DESC' ) self.assertEqual(query_str,", ") query_str = query.get_sql() expected_query = 'SELECT RANK() OVER ()", "(STDDEV(querybuilder_tests_order.margin) OVER ()) <> 0 ' 'THEN ((querybuilder_tests_order.margin - ('", "= QueryWindow().partition_by('field_one') query_str = query_window.get_sql() expected_query = 'OVER (PARTITION BY", "ASC) AS \"rank\" FROM querybuilder_tests_order' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query))", "'SELECT querybuilder_tests_order.*, ' 'LAST_VALUE(querybuilder_tests_order.margin) OVER (ORDER BY margin ASC) AS", "'id', RankField( over=QueryWindow().partition_by( 'account_id' ) ) ] ) query_str =", "BY field_one ASC, field_two DESC)' self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) class", ") ] ).order_by( 'ntile' ) query_str = query.get_sql() expected_query =", "(ORDER BY margin DESC) AS \"percent_rank\" ' 'FROM querybuilder_tests_order '", "RankField( over=QueryWindow() ) ] ) query_str = query.get_sql() expected_query =", "DenseRankField, PercentRankField, CumeDistField, NTileField, LagField, LeadField, FirstValueField, LastValueField, NthValueField, NumStdDevField", "' 'ORDER BY margin_num_stddev ' 'DESC' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str,", "query_window.get_sql() expected_query = 'OVER (PARTITION BY field_one ORDER BY field_one", "self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) class WindowFunctionTest(QueryTestCase): def test_rank_no_over(self): query =", ") ] ).order_by( '-rank' ) query_str = query.get_sql() expected_query =", ") self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_rank(self): query = Query().from_table(", "def test_lead(self): query = Query().from_table( table=Order, fields=[ '*', LeadField( 'margin',", "'NTH_VALUE(querybuilder_tests_order.margin, 2) OVER (ORDER BY margin DESC) AS \"margin_nth_value\" '", "from querybuilder.fields import ( RankField, RowNumberField, DenseRankField, PercentRankField, CumeDistField, NTileField,", "'FROM querybuilder_tests_order ' 'ORDER BY rank ' 'DESC' ) self.assertEqual(query_str,", ").order_by( 'dense_rank' ) query_str = query.get_sql() expected_query = ( 'SELECT", "query = Query().from_table( table=Order, fields=[ 'id', RankField( over=QueryWindow().partition_by( 'account_id' )", "'row_number' ) query_str = query.get_sql() expected_query = ( 'SELECT querybuilder_tests_order.*,", "query_window.get_sql() expected_query = 'OVER (PARTITION BY field_one)' self.assertEqual(query_str, expected_query, get_comparison_str(query_str,", "over=QueryWindow().order_by( 'id' ) ) ] ) query_str = query.get_sql() expected_query", "= query.get_sql() expected_query = ( 'SELECT querybuilder_tests_order.*, ' 'DENSE_RANK() OVER", "'DESC' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_dense_rank(self): query =", "over=QueryWindow().order_by( 'margin' ) ) ] ) query_str = query.get_sql() expected_query", "\"row_number\" ' 'FROM querybuilder_tests_order ' 'ORDER BY row_number ' 'ASC'", "BY margin DESC) AS \"cume_dist\" ' 'FROM querybuilder_tests_order ' 'ORDER", "= Query().from_table( table=Order, fields=[ 'id', RankField( over=QueryWindow().order_by( 'id' ) )", "<> 0 ' 'THEN ((querybuilder_tests_order.margin - (' 'AVG(querybuilder_tests_order.margin) OVER ()))", "' 'FROM querybuilder_tests_order' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_lag_default(self):", "RANK() OVER (ORDER BY id ASC) AS \"rank\" FROM querybuilder_tests_order'", "= query_window.get_sql() expected_query = 'OVER ()' self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query))", "self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_rank_percent(self): query = Query().from_table( table=Order,", ").order_by( 'cume_dist' ) query_str = query.get_sql() expected_query = ( 'SELECT", "DenseRankField( over=QueryWindow().order_by( '-margin' ) ) ] ).order_by( 'dense_rank' ) query_str", "'ROW_NUMBER() OVER (ORDER BY margin DESC) AS \"row_number\" ' 'FROM", "' 'DENSE_RANK() OVER (ORDER BY margin DESC) AS \"dense_rank\" '", "'SELECT querybuilder_tests_order.*, ' 'LEAD(querybuilder_tests_order.margin, 1) OVER (ORDER BY margin DESC)", "= 'OVER (PARTITION BY field_one, field_two ORDER BY field_one ASC,", "'id' ) ) ] ) query_str = query.get_sql() expected_query =", "query.get_sql() expected_query = ( 'SELECT querybuilder_tests_order.id, RANK() OVER (ORDER BY", "'FROM querybuilder_tests_order' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_lead(self): query", "expected_query, get_comparison_str(query_str, expected_query)) def test_last_value(self): query = Query().from_table( table=Order, fields=[", "( 'SELECT querybuilder_tests_order.*, ' 'CUME_DIST() OVER (ORDER BY margin DESC)", "' 'ORDER BY rank ' 'DESC' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str,", "query_str = query_window.get_sql() expected_query = 'OVER (PARTITION BY field_one ORDER", "fields=[ '*', LastValueField( 'margin', over=QueryWindow().order_by( 'margin' ) ) ] )", "querybuilder_tests_order.*, ' '(CASE WHEN (STDDEV(querybuilder_tests_order.margin) OVER ()) <> 0 '", "' 'FROM querybuilder_tests_order ' 'ORDER BY ntile ' 'ASC' )", "table=Order, fields=[ 'id', RankField( over=QueryWindow().partition_by( 'account_id' ) ) ] )", "querybuilder.tests.models import Order from querybuilder.tests.query_tests import QueryTestCase, get_comparison_str class QueryWindowTest(QueryTestCase):", "'*', DenseRankField( over=QueryWindow().order_by( '-margin' ) ) ] ).order_by( 'dense_rank' )", "= Query().from_table( table=Order, fields=[ '*', LeadField( 'margin', over=QueryWindow().order_by( '-margin' )", "test_query_window_order(self): query_window = QueryWindow().order_by('field_one') query_str = query_window.get_sql() expected_query = 'OVER", "'field_one' ).order_by( 'field_one' ) query_str = query_window.get_sql() expected_query = 'OVER", "BY margin DESC) AS \"percent_rank\" ' 'FROM querybuilder_tests_order ' 'ORDER", "field_one ASC, field_two DESC)' self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) class WindowFunctionTest(QueryTestCase):", "def test_cume_dist(self): query = Query().from_table( table=Order, fields=[ '*', CumeDistField( over=QueryWindow().order_by(", "\"margin_num_stddev\" ' 'FROM querybuilder_tests_order ' 'ORDER BY margin_num_stddev ' 'DESC'", "querybuilder_tests_order.*, ' 'NTH_VALUE(querybuilder_tests_order.margin, 2) OVER (ORDER BY margin DESC) AS", "'SELECT querybuilder_tests_order.*, ' 'CUME_DIST() OVER (ORDER BY margin DESC) AS", "querybuilder_tests_order.*, ' 'FIRST_VALUE(querybuilder_tests_order.margin) OVER (ORDER BY margin DESC) AS \"margin_first_value\"", "BY margin DESC) AS \"dense_rank\" ' 'FROM querybuilder_tests_order ' 'ORDER", "Query().from_table( table=Order, fields=[ '*', LagField( 'margin', over=QueryWindow().order_by( '-margin' ) )", "0 ' 'END) ' 'AS \"margin_num_stddev\" ' 'FROM querybuilder_tests_order '", "] ) query_str = query.get_sql() expected_query = 'SELECT RANK() OVER", "RankField( over=QueryWindow().partition_by( 'account_id' ).order_by( 'id' ) ) ] ).order_by( '-rank'", "querybuilder.tests.query_tests import QueryTestCase, get_comparison_str class QueryWindowTest(QueryTestCase): def test_query_window(self): query_window =", "expected_query, get_comparison_str(query_str, expected_query)) def test_rank_over_partition(self): query = Query().from_table( table=Order, fields=[", "'ORDER BY cume_dist ' 'ASC' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query))", "'-margin' ) ) ] ).order_by( 'percent_rank' ) query_str = query.get_sql()", "' 'FROM querybuilder_tests_order' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_last_value(self):", "over=QueryWindow().order_by( '-margin' ) ) ] ).order_by( 'cume_dist' ) query_str =", "'SELECT querybuilder_tests_order.*, ' 'DENSE_RANK() OVER (ORDER BY margin DESC) AS", "FROM ' 'querybuilder_tests_order' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_row_number(self):", "query = Query().from_table( table=Order, fields=[ '*', PercentRankField( over=QueryWindow().order_by( '-margin' )", "()) <> 0 ' 'THEN ((querybuilder_tests_order.margin - (' 'AVG(querybuilder_tests_order.margin) OVER", ") self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_row_number(self): query = Query().from_table(", "'FROM querybuilder_tests_order' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_first_value(self): query", "querybuilder_tests_order' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_first_value(self): query =", "= Query().from_table( table=Order, fields=[ '*', RowNumberField( over=QueryWindow().order_by( '-margin' ) )", "import QueryTestCase, get_comparison_str class QueryWindowTest(QueryTestCase): def test_query_window(self): query_window = QueryWindow()", "BY percent_rank ' 'ASC' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def", "'OVER (PARTITION BY field_one)' self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_query_window_order(self):", "= query_window.get_sql() expected_query = 'OVER (PARTITION BY field_one, field_two ORDER", "querybuilder_tests_order ' 'ORDER BY ntile ' 'ASC' ) self.assertEqual(query_str, expected_query,", "get_comparison_str(query_str, expected_query)) def test_lag(self): query = Query().from_table( table=Order, fields=[ '*',", "class WindowFunctionTest(QueryTestCase): def test_rank_no_over(self): query = Query().from_table( table=Order, fields=[ RankField()", "LastValueField( 'margin', over=QueryWindow().order_by( 'margin' ) ) ] ) query_str =", "'THEN ((querybuilder_tests_order.margin - (' 'AVG(querybuilder_tests_order.margin) OVER ())) / (STDDEV(querybuilder_tests_order.margin) OVER", "dense_rank ' 'ASC' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_rank_percent(self):", "= ( 'SELECT querybuilder_tests_order.*, ' 'LAG(querybuilder_tests_order.margin, 1, \\'0\\') OVER (ORDER", "field_two ORDER BY field_one ASC, field_two DESC)' self.assertEqual(query_str, expected_query, get_comparison_str(query_str,", "' 'LAG(querybuilder_tests_order.margin, 1, \\'0\\') OVER (ORDER BY margin DESC) AS", "] ) query_str = query.get_sql() expected_query = ( 'SELECT querybuilder_tests_order.*,", "querybuilder_tests_order.*, ' 'NTILE(2) OVER (ORDER BY margin DESC) AS \"ntile\"", "= query.get_sql() expected_query = ( 'SELECT querybuilder_tests_order.*, ' 'FIRST_VALUE(querybuilder_tests_order.margin) OVER", "2) OVER (ORDER BY margin DESC) AS \"margin_nth_value\" ' 'FROM", "def test_query_window_partition(self): query_window = QueryWindow().partition_by('field_one') query_str = query_window.get_sql() expected_query =", ") ] ).order_by( 'cume_dist' ) query_str = query.get_sql() expected_query =", "querybuilder_tests_order.*, ' 'ROW_NUMBER() OVER (ORDER BY margin DESC) AS \"row_number\"", "'*', LeadField( 'margin', over=QueryWindow().order_by( '-margin' ) ) ] ) query_str", "= Query().from_table( table=Order, fields=[ '*', LagField( 'margin', default=0, over=QueryWindow().order_by( '-margin'", "\"rank\" FROM querybuilder_tests_order' self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_rank_over(self): query", "] ).order_by( 'cume_dist' ) query_str = query.get_sql() expected_query = (", "BY rank ' 'DESC' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def", "self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_row_number(self): query = Query().from_table( table=Order,", "' 'NTH_VALUE(querybuilder_tests_order.margin, 2) OVER (ORDER BY margin DESC) AS \"margin_nth_value\"", "WHEN (STDDEV(querybuilder_tests_order.margin) OVER ()) <> 0 ' 'THEN ((querybuilder_tests_order.margin -", "Query().from_table( table=Order, fields=[ '*', PercentRankField( over=QueryWindow().order_by( '-margin' ) ) ]", "OVER (ORDER BY margin DESC) AS \"dense_rank\" ' 'FROM querybuilder_tests_order", "'field_one' ).partition_by( 'field_two' ).order_by( 'field_one' ).order_by( '-field_two' ) query_str =", "query.get_sql() expected_query = ( 'SELECT querybuilder_tests_order.*, ' 'DENSE_RANK() OVER (ORDER", "'margin' ) ) ] ) query_str = query.get_sql() expected_query =", "'ASC' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_rank_percent(self): query =", "\\'0\\') OVER (ORDER BY margin DESC) AS \"margin_lag\" ' 'FROM", "expected_query)) def test_rank_over_order(self): query = Query().from_table( table=Order, fields=[ 'id', RankField(", "' 'ASC' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_ntile(self): query", "QueryWindow() query_str = query_window.get_sql() expected_query = 'OVER ()' self.assertEqual(query_str, expected_query,", "self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_last_value(self): query = Query().from_table( table=Order,", "'*', LagField( 'margin', default=0, over=QueryWindow().order_by( '-margin' ) ) ] )", "num_buckets=2, over=QueryWindow().order_by( '-margin' ) ) ] ).order_by( 'ntile' ) query_str", "'dense_rank' ) query_str = query.get_sql() expected_query = ( 'SELECT querybuilder_tests_order.*,", "get_comparison_str(query_str, expected_query)) def test_query_window_partition(self): query_window = QueryWindow().partition_by('field_one') query_str = query_window.get_sql()", "field_one ASC)' self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_query_window_partition_order(self): query_window =", "= Query().from_table( table=Order, fields=[ '*', LagField( 'margin', over=QueryWindow().order_by( '-margin' )", "AS \"margin_nth_value\" ' 'FROM querybuilder_tests_order' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query))", "def test_rank_percent(self): query = Query().from_table( table=Order, fields=[ '*', PercentRankField( over=QueryWindow().order_by(", "'NTILE(2) OVER (ORDER BY margin DESC) AS \"ntile\" ' 'FROM", "' '(CASE WHEN (STDDEV(querybuilder_tests_order.margin) OVER ()) <> 0 ' 'THEN", "default=0, over=QueryWindow().order_by( '-margin' ) ) ] ) query_str = query.get_sql()", "DESC) AS \"row_number\" ' 'FROM querybuilder_tests_order ' 'ORDER BY row_number", "'field_one' ).order_by( '-field_two' ) query_str = query_window.get_sql() expected_query = 'OVER", "expected_query = ( 'SELECT querybuilder_tests_order.*, ' 'NTILE(2) OVER (ORDER BY", "class QueryWindowTest(QueryTestCase): def test_query_window(self): query_window = QueryWindow() query_str = query_window.get_sql()", "query.get_sql() expected_query = ( 'SELECT querybuilder_tests_order.*, ' 'LAST_VALUE(querybuilder_tests_order.margin) OVER (ORDER", "= ( 'SELECT querybuilder_tests_order.id, RANK() OVER (ORDER BY id ASC)", "account_id) AS \"rank\" FROM ' 'querybuilder_tests_order' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str,", "QueryWindow().order_by('field_one') query_str = query_window.get_sql() expected_query = 'OVER (ORDER BY field_one", "AS \"ntile\" ' 'FROM querybuilder_tests_order ' 'ORDER BY ntile '", "querybuilder_tests_order ' 'ORDER BY row_number ' 'ASC' ) self.assertEqual(query_str, expected_query,", "test_rank(self): query = Query().from_table( table=Order, fields=[ 'id', RankField( over=QueryWindow().partition_by( 'account_id'", "margin DESC) AS \"margin_first_value\" ' 'FROM querybuilder_tests_order' ) self.assertEqual(query_str, expected_query,", "expected_query)) class WindowFunctionTest(QueryTestCase): def test_rank_no_over(self): query = Query().from_table( table=Order, fields=[", "table=Order, fields=[ '*', LeadField( 'margin', over=QueryWindow().order_by( '-margin' ) ) ]", "get_comparison_str class QueryWindowTest(QueryTestCase): def test_query_window(self): query_window = QueryWindow() query_str =", "= Query().from_table( table=Order, fields=[ RankField() ] ) query_str = query.get_sql()", "expected_query = ( 'SELECT querybuilder_tests_order.*, ' 'LAG(querybuilder_tests_order.margin, 1) OVER (ORDER", "= query.get_sql() expected_query = ( 'SELECT querybuilder_tests_order.*, ' 'NTH_VALUE(querybuilder_tests_order.margin, 2)", "querybuilder_tests_order ' 'ORDER BY dense_rank ' 'ASC' ) self.assertEqual(query_str, expected_query,", "test_last_value(self): query = Query().from_table( table=Order, fields=[ '*', LastValueField( 'margin', over=QueryWindow().order_by(", "OVER ())) / (STDDEV(querybuilder_tests_order.margin) OVER ())) ' 'ELSE 0 '", "= Query().from_table( table=Order, fields=[ '*', FirstValueField( 'margin', over=QueryWindow().order_by( '-margin' )", "= QueryWindow().order_by('field_one') query_str = query_window.get_sql() expected_query = 'OVER (ORDER BY", "field_two DESC)' self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) class WindowFunctionTest(QueryTestCase): def test_rank_no_over(self):", "'-margin' ) ) ] ).order_by( 'dense_rank' ) query_str = query.get_sql()", "over=QueryWindow().order_by( '-margin' ) ) ] ).order_by( 'dense_rank' ) query_str =", ").order_by( 'percent_rank' ) query_str = query.get_sql() expected_query = ( 'SELECT", "query = Query().from_table( table=Order, fields=[ '*', RowNumberField( over=QueryWindow().order_by( '-margin' )", "= query.get_sql() expected_query = ( 'SELECT querybuilder_tests_order.*, ' 'LAG(querybuilder_tests_order.margin, 1,", "( 'SELECT querybuilder_tests_order.*, ' 'LEAD(querybuilder_tests_order.margin, 1) OVER (ORDER BY margin", "= query_window.get_sql() expected_query = 'OVER (ORDER BY field_one ASC)' self.assertEqual(query_str,", "query.get_sql() expected_query = ( 'SELECT querybuilder_tests_order.*, ' 'NTILE(2) OVER (ORDER", "expected_query)) def test_first_value(self): query = Query().from_table( table=Order, fields=[ '*', FirstValueField(", "expected_query, get_comparison_str(query_str, expected_query)) def test_first_value(self): query = Query().from_table( table=Order, fields=[", "Query().from_table( table=Order, fields=[ RankField( over=QueryWindow() ) ] ) query_str =", "querybuilder_tests_order.id, RANK() OVER (ORDER BY id ASC) AS \"rank\" FROM", "= Query().from_table( table=Order, fields=[ '*', NTileField( num_buckets=2, over=QueryWindow().order_by( '-margin' )", "rank ' 'DESC' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_dense_rank(self):", ") ) ] ).order_by( 'percent_rank' ) query_str = query.get_sql() expected_query", "over=QueryWindow().order_by( '-margin' ) ) ] ).order_by( 'ntile' ) query_str =", ") self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_rank_over_partition(self): query = Query().from_table(", "'ORDER BY ntile ' 'ASC' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query))", "'AVG(querybuilder_tests_order.margin) OVER ())) / (STDDEV(querybuilder_tests_order.margin) OVER ())) ' 'ELSE 0", "(STDDEV(querybuilder_tests_order.margin) OVER ())) ' 'ELSE 0 ' 'END) ' 'AS", ") ) ] ).order_by( 'ntile' ) query_str = query.get_sql() expected_query", "query = Query().from_table( table=Order, fields=[ 'id', RankField( over=QueryWindow().order_by( 'id' )", "= ( 'SELECT querybuilder_tests_order.*, ' 'FIRST_VALUE(querybuilder_tests_order.margin) OVER (ORDER BY margin", "= query.get_sql() expected_query = ( 'SELECT querybuilder_tests_order.id, ' 'RANK() OVER", "' 'ORDER BY cume_dist ' 'ASC' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str,", "Query().from_table( table=Order, fields=[ RankField() ] ) query_str = query.get_sql() expected_query", "DESC) AS \"margin_first_value\" ' 'FROM querybuilder_tests_order' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str,", "querybuilder_tests_order.*, ' 'LAST_VALUE(querybuilder_tests_order.margin) OVER (ORDER BY margin ASC) AS \"margin_last_value\"", "\"margin_lead\" ' 'FROM querybuilder_tests_order' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def", "def test_num_stddev(self): query = Query().from_table( table=Order, fields=[ '*', NumStdDevField( 'margin',", "margin ASC) AS \"margin_last_value\" ' 'FROM querybuilder_tests_order' ) self.assertEqual(query_str, expected_query,", "] ).order_by( 'row_number' ) query_str = query.get_sql() expected_query = (", "self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_nth_value(self): query = Query().from_table( table=Order,", "'field_one' ) query_str = query_window.get_sql() expected_query = 'OVER (PARTITION BY", "' 'FROM querybuilder_tests_order ' 'ORDER BY percent_rank ' 'ASC' )", "test_dense_rank(self): query = Query().from_table( table=Order, fields=[ '*', DenseRankField( over=QueryWindow().order_by( '-margin'", "query_window = QueryWindow().partition_by( 'field_one' ).order_by( 'field_one' ) query_str = query_window.get_sql()", ") query_str = query_window.get_sql() expected_query = 'OVER (PARTITION BY field_one", "test_rank_no_over(self): query = Query().from_table( table=Order, fields=[ RankField() ] ) query_str", "Query().from_table( table=Order, fields=[ '*', LagField( 'margin', default=0, over=QueryWindow().order_by( '-margin' )", "test_num_stddev(self): query = Query().from_table( table=Order, fields=[ '*', NumStdDevField( 'margin', over=QueryWindow()", ") ] ).order_by( 'percent_rank' ) query_str = query.get_sql() expected_query =", ").order_by( 'field_one' ).order_by( '-field_two' ) query_str = query_window.get_sql() expected_query =", "] ).order_by( 'ntile' ) query_str = query.get_sql() expected_query = (", "(ORDER BY margin DESC) AS \"margin_lead\" ' 'FROM querybuilder_tests_order' )", "query.get_sql() expected_query = ( 'SELECT querybuilder_tests_order.*, ' 'NTH_VALUE(querybuilder_tests_order.margin, 2) OVER", "] ).order_by( '-rank' ) query_str = query.get_sql() expected_query = (", "table=Order, fields=[ '*', RowNumberField( over=QueryWindow().order_by( '-margin' ) ) ] ).order_by(", "Query().from_table( table=Order, fields=[ '*', NTileField( num_buckets=2, over=QueryWindow().order_by( '-margin' ) )", "test_lag_default(self): query = Query().from_table( table=Order, fields=[ '*', LagField( 'margin', default=0,", "Query().from_table( table=Order, fields=[ '*', NumStdDevField( 'margin', over=QueryWindow() ) ] ).order_by(", "expected_query)) def test_cume_dist(self): query = Query().from_table( table=Order, fields=[ '*', CumeDistField(", "(ORDER BY margin DESC) AS \"cume_dist\" ' 'FROM querybuilder_tests_order '", "BY margin DESC) AS \"margin_nth_value\" ' 'FROM querybuilder_tests_order' ) self.assertEqual(query_str,", "querybuilder_tests_order' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_lag_default(self): query =", "Query().from_table( table=Order, fields=[ '*', NthValueField( 'margin', n=2, over=QueryWindow().order_by( '-margin' )", "'-margin_num_stddev' ) query_str = query.get_sql() expected_query = ( 'SELECT querybuilder_tests_order.*,", "def test_query_window_partition_order(self): query_window = QueryWindow().partition_by( 'field_one' ).order_by( 'field_one' ) query_str", "ORDER BY field_one ASC, field_two DESC)' self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query))", "' 'ROW_NUMBER() OVER (ORDER BY margin DESC) AS \"row_number\" '", "'*', LagField( 'margin', over=QueryWindow().order_by( '-margin' ) ) ] ) query_str", "' 'THEN ((querybuilder_tests_order.margin - (' 'AVG(querybuilder_tests_order.margin) OVER ())) / (STDDEV(querybuilder_tests_order.margin)", "= Query().from_table( table=Order, fields=[ RankField( over=QueryWindow() ) ] ) query_str", "BY cume_dist ' 'ASC' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def", "= Query().from_table( table=Order, fields=[ '*', DenseRankField( over=QueryWindow().order_by( '-margin' ) )", ") self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_ntile(self): query = Query().from_table(", "'LEAD(querybuilder_tests_order.margin, 1) OVER (ORDER BY margin DESC) AS \"margin_lead\" '", "NthValueField( 'margin', n=2, over=QueryWindow().order_by( '-margin' ) ) ] ) query_str", "= query.get_sql() expected_query = ( 'SELECT querybuilder_tests_order.*, ' '(CASE WHEN", "((querybuilder_tests_order.margin - (' 'AVG(querybuilder_tests_order.margin) OVER ())) / (STDDEV(querybuilder_tests_order.margin) OVER ()))", "def test_lag(self): query = Query().from_table( table=Order, fields=[ '*', LagField( 'margin',", "LagField( 'margin', over=QueryWindow().order_by( '-margin' ) ) ] ) query_str =", "' 'ORDER BY ntile ' 'ASC' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str,", "expected_query, get_comparison_str(query_str, expected_query)) def test_query_window_order(self): query_window = QueryWindow().order_by('field_one') query_str =", "'*', CumeDistField( over=QueryWindow().order_by( '-margin' ) ) ] ).order_by( 'cume_dist' )", "expected_query, get_comparison_str(query_str, expected_query)) def test_nth_value(self): query = Query().from_table( table=Order, fields=[", "self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_lead(self): query = Query().from_table( table=Order,", "FROM querybuilder_tests_order' self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_rank_over_order(self): query =", "fields=[ '*', LagField( 'margin', default=0, over=QueryWindow().order_by( '-margin' ) ) ]", "= query_window.get_sql() expected_query = 'OVER (PARTITION BY field_one ORDER BY", ") ] ) query_str = query.get_sql() expected_query = ( 'SELECT", "ORDER BY id ASC) AS \"rank\" ' 'FROM querybuilder_tests_order '", "expected_query)) def test_lead(self): query = Query().from_table( table=Order, fields=[ '*', LeadField(", "'LAST_VALUE(querybuilder_tests_order.margin) OVER (ORDER BY margin ASC) AS \"margin_last_value\" ' 'FROM", "querybuilder_tests_order' self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_rank_over(self): query = Query().from_table(", "query.get_sql() expected_query = ( 'SELECT querybuilder_tests_order.id, RANK() OVER (PARTITION BY", "OVER (ORDER BY margin DESC) AS \"margin_lead\" ' 'FROM querybuilder_tests_order'", "query = Query().from_table( table=Order, fields=[ '*', CumeDistField( over=QueryWindow().order_by( '-margin' )", "'FROM querybuilder_tests_order ' 'ORDER BY ntile ' 'ASC' ) self.assertEqual(query_str,", "query_str = query.get_sql() expected_query = ( 'SELECT querybuilder_tests_order.*, ' 'FIRST_VALUE(querybuilder_tests_order.margin)", "margin DESC) AS \"margin_lead\" ' 'FROM querybuilder_tests_order' ) self.assertEqual(query_str, expected_query,", "table=Order, fields=[ '*', NumStdDevField( 'margin', over=QueryWindow() ) ] ).order_by( '-margin_num_stddev'", "RANK() AS \"rank\" FROM querybuilder_tests_order' self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def", "'querybuilder_tests_order' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_row_number(self): query =", "LagField, LeadField, FirstValueField, LastValueField, NthValueField, NumStdDevField ) from querybuilder.query import", "'ORDER BY row_number ' 'ASC' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query))", "'SELECT querybuilder_tests_order.*, ' 'PERCENT_RANK() OVER (ORDER BY margin DESC) AS", "= Query().from_table( table=Order, fields=[ '*', LastValueField( 'margin', over=QueryWindow().order_by( 'margin' )", "expected_query)) def test_lag_default(self): query = Query().from_table( table=Order, fields=[ '*', LagField(", "expected_query = 'OVER (PARTITION BY field_one, field_two ORDER BY field_one", "1) OVER (ORDER BY margin DESC) AS \"margin_lead\" ' 'FROM", "self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_dense_rank(self): query = Query().from_table( table=Order,", "query = Query().from_table( table=Order, fields=[ '*', NthValueField( 'margin', n=2, over=QueryWindow().order_by(", "expected_query = 'SELECT RANK() AS \"rank\" FROM querybuilder_tests_order' self.assertEqual(query_str, expected_query,", "querybuilder_tests_order.*, ' 'LEAD(querybuilder_tests_order.margin, 1) OVER (ORDER BY margin DESC) AS", "query_str = query.get_sql() expected_query = ( 'SELECT querybuilder_tests_order.id, RANK() OVER", "expected_query, get_comparison_str(query_str, expected_query)) def test_rank_over_order(self): query = Query().from_table( table=Order, fields=[", "BY account_id ORDER BY id ASC) AS \"rank\" ' 'FROM", "' 'CUME_DIST() OVER (ORDER BY margin DESC) AS \"cume_dist\" '", "'LAG(querybuilder_tests_order.margin, 1, \\'0\\') OVER (ORDER BY margin DESC) AS \"margin_lag\"", "QueryWindowTest(QueryTestCase): def test_query_window(self): query_window = QueryWindow() query_str = query_window.get_sql() expected_query", "' 'ASC' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_lag(self): query", "fields=[ '*', NumStdDevField( 'margin', over=QueryWindow() ) ] ).order_by( '-margin_num_stddev' )", "querybuilder_tests_order' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_rank_over_partition(self): query =", "def test_rank(self): query = Query().from_table( table=Order, fields=[ 'id', RankField( over=QueryWindow().partition_by(", "ASC)' self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_query_window_partition_order_many(self): query_window = QueryWindow().partition_by(", "'ASC' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_rank(self): query =", "'PERCENT_RANK() OVER (ORDER BY margin DESC) AS \"percent_rank\" ' 'FROM", "QueryTestCase, get_comparison_str class QueryWindowTest(QueryTestCase): def test_query_window(self): query_window = QueryWindow() query_str", "] ) query_str = query.get_sql() expected_query = 'SELECT RANK() AS", "get_comparison_str(query_str, expected_query)) def test_cume_dist(self): query = Query().from_table( table=Order, fields=[ '*',", "( 'SELECT querybuilder_tests_order.*, ' 'NTILE(2) OVER (ORDER BY margin DESC)", "'SELECT querybuilder_tests_order.*, ' 'LAG(querybuilder_tests_order.margin, 1, \\'0\\') OVER (ORDER BY margin", "expected_query)) def test_ntile(self): query = Query().from_table( table=Order, fields=[ '*', NTileField(", "self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_rank_over_partition(self): query = Query().from_table( table=Order,", "percent_rank ' 'ASC' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_cume_dist(self):", "querybuilder.fields import ( RankField, RowNumberField, DenseRankField, PercentRankField, CumeDistField, NTileField, LagField,", "def test_row_number(self): query = Query().from_table( table=Order, fields=[ '*', RowNumberField( over=QueryWindow().order_by(", "table=Order, fields=[ '*', FirstValueField( 'margin', over=QueryWindow().order_by( '-margin' ) ) ]", "querybuilder.query import QueryWindow, Query from querybuilder.tests.models import Order from querybuilder.tests.query_tests", "BY field_one ASC)' self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_query_window_partition_order(self): query_window", "'OVER (PARTITION BY field_one, field_two ORDER BY field_one ASC, field_two", "BY margin DESC) AS \"margin_lag\" ' 'FROM querybuilder_tests_order' ) self.assertEqual(query_str,", "query_str = query_window.get_sql() expected_query = 'OVER (ORDER BY field_one ASC)'", "= ( 'SELECT querybuilder_tests_order.*, ' 'DENSE_RANK() OVER (ORDER BY margin", "self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_query_window_partition_order_many(self): query_window = QueryWindow().partition_by( 'field_one'", "= query.get_sql() expected_query = 'SELECT RANK() OVER () AS \"rank\"", "table=Order, fields=[ '*', LastValueField( 'margin', over=QueryWindow().order_by( 'margin' ) ) ]", "= ( 'SELECT querybuilder_tests_order.id, ' 'RANK() OVER (PARTITION BY account_id", "querybuilder_tests_order.*, ' 'LAG(querybuilder_tests_order.margin, 1, \\'0\\') OVER (ORDER BY margin DESC)", "over=QueryWindow().partition_by( 'account_id' ) ) ] ) query_str = query.get_sql() expected_query", "expected_query)) def test_rank_over_partition(self): query = Query().from_table( table=Order, fields=[ 'id', RankField(", "'-margin' ) ) ] ).order_by( 'ntile' ) query_str = query.get_sql()", "(PARTITION BY field_one ORDER BY field_one ASC)' self.assertEqual(query_str, expected_query, get_comparison_str(query_str,", "= query.get_sql() expected_query = ( 'SELECT querybuilder_tests_order.*, ' 'CUME_DIST() OVER", "query.get_sql() expected_query = ( 'SELECT querybuilder_tests_order.*, ' 'LAG(querybuilder_tests_order.margin, 1) OVER", "table=Order, fields=[ '*', CumeDistField( over=QueryWindow().order_by( '-margin' ) ) ] ).order_by(", "1) OVER (ORDER BY margin DESC) AS \"margin_lag\" ' 'FROM", "fields=[ '*', NTileField( num_buckets=2, over=QueryWindow().order_by( '-margin' ) ) ] ).order_by(", "'LAG(querybuilder_tests_order.margin, 1) OVER (ORDER BY margin DESC) AS \"margin_lag\" '", "'id', RankField( over=QueryWindow().order_by( 'id' ) ) ] ) query_str =", "= 'OVER (PARTITION BY field_one)' self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def", ") self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_lag_default(self): query = Query().from_table(", "expected_query = 'OVER (ORDER BY field_one ASC)' self.assertEqual(query_str, expected_query, get_comparison_str(query_str,", "expected_query, get_comparison_str(query_str, expected_query)) def test_lag(self): query = Query().from_table( table=Order, fields=[", "' 'querybuilder_tests_order' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_row_number(self): query", "AS \"dense_rank\" ' 'FROM querybuilder_tests_order ' 'ORDER BY dense_rank '", "fields=[ '*', PercentRankField( over=QueryWindow().order_by( '-margin' ) ) ] ).order_by( 'percent_rank'", "AS \"rank\" ' 'FROM querybuilder_tests_order ' 'ORDER BY rank '", "= query.get_sql() expected_query = ( 'SELECT querybuilder_tests_order.*, ' 'NTILE(2) OVER", "' 'FROM querybuilder_tests_order' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_num_stddev(self):", "get_comparison_str(query_str, expected_query)) def test_rank_over(self): query = Query().from_table( table=Order, fields=[ RankField(", "' 'ELSE 0 ' 'END) ' 'AS \"margin_num_stddev\" ' 'FROM", "( 'SELECT querybuilder_tests_order.*, ' 'LAG(querybuilder_tests_order.margin, 1, \\'0\\') OVER (ORDER BY", "expected_query, get_comparison_str(query_str, expected_query)) def test_query_window_partition_order(self): query_window = QueryWindow().partition_by( 'field_one' ).order_by(", "n=2, over=QueryWindow().order_by( '-margin' ) ) ] ) query_str = query.get_sql()", "margin DESC) AS \"margin_nth_value\" ' 'FROM querybuilder_tests_order' ) self.assertEqual(query_str, expected_query,", "RowNumberField, DenseRankField, PercentRankField, CumeDistField, NTileField, LagField, LeadField, FirstValueField, LastValueField, NthValueField,", "= 'OVER ()' self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_query_window_partition(self): query_window", "ASC)' self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_query_window_partition_order(self): query_window = QueryWindow().partition_by(", "= ( 'SELECT querybuilder_tests_order.*, ' 'NTILE(2) OVER (ORDER BY margin", "def test_rank_over_partition(self): query = Query().from_table( table=Order, fields=[ 'id', RankField( over=QueryWindow().partition_by(", "fields=[ '*', FirstValueField( 'margin', over=QueryWindow().order_by( '-margin' ) ) ] )", ").order_by( 'row_number' ) query_str = query.get_sql() expected_query = ( 'SELECT", "import QueryWindow, Query from querybuilder.tests.models import Order from querybuilder.tests.query_tests import", "query = Query().from_table( table=Order, fields=[ '*', NumStdDevField( 'margin', over=QueryWindow() )", "(ORDER BY margin ASC) AS \"margin_last_value\" ' 'FROM querybuilder_tests_order' )", "' 'FROM querybuilder_tests_order' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_nth_value(self):", "querybuilder_tests_order.*, ' 'CUME_DIST() OVER (ORDER BY margin DESC) AS \"cume_dist\"", "= 'OVER (ORDER BY field_one ASC)' self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query))", "expected_query = ( 'SELECT querybuilder_tests_order.*, ' 'LEAD(querybuilder_tests_order.margin, 1) OVER (ORDER", "query.get_sql() expected_query = ( 'SELECT querybuilder_tests_order.*, ' 'FIRST_VALUE(querybuilder_tests_order.margin) OVER (ORDER", "def test_nth_value(self): query = Query().from_table( table=Order, fields=[ '*', NthValueField( 'margin',", "querybuilder_tests_order' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_num_stddev(self): query =", "querybuilder_tests_order.id, ' 'RANK() OVER (PARTITION BY account_id ORDER BY id", "querybuilder_tests_order.id, RANK() OVER (PARTITION BY account_id) AS \"rank\" FROM '", "' 'ORDER BY row_number ' 'ASC' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str,", "expected_query = ( 'SELECT querybuilder_tests_order.id, RANK() OVER (ORDER BY id", "'OVER (PARTITION BY field_one ORDER BY field_one ASC)' self.assertEqual(query_str, expected_query,", "BY id ASC) AS \"rank\" ' 'FROM querybuilder_tests_order ' 'ORDER", ") ] ) query_str = query.get_sql() expected_query = 'SELECT RANK()", "'FROM querybuilder_tests_order' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_num_stddev(self): query", "field_one)' self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_query_window_order(self): query_window = QueryWindow().order_by('field_one')", "get_comparison_str(query_str, expected_query)) def test_query_window_order(self): query_window = QueryWindow().order_by('field_one') query_str = query_window.get_sql()", "self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_rank(self): query = Query().from_table( table=Order,", "( 'SELECT querybuilder_tests_order.*, ' 'LAST_VALUE(querybuilder_tests_order.margin) OVER (ORDER BY margin ASC)", "Query().from_table( table=Order, fields=[ '*', RowNumberField( over=QueryWindow().order_by( '-margin' ) ) ]", "query.get_sql() expected_query = ( 'SELECT querybuilder_tests_order.*, ' 'PERCENT_RANK() OVER (ORDER", "= Query().from_table( table=Order, fields=[ '*', NumStdDevField( 'margin', over=QueryWindow() ) ]", "= ( 'SELECT querybuilder_tests_order.*, ' 'LAG(querybuilder_tests_order.margin, 1) OVER (ORDER BY", "test_rank_over_order(self): query = Query().from_table( table=Order, fields=[ 'id', RankField( over=QueryWindow().order_by( 'id'", "query = Query().from_table( table=Order, fields=[ RankField( over=QueryWindow() ) ] )", "' 'FROM querybuilder_tests_order' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_first_value(self):", "'*', NTileField( num_buckets=2, over=QueryWindow().order_by( '-margin' ) ) ] ).order_by( 'ntile'", "' 'ASC' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_rank(self): query", "( 'SELECT querybuilder_tests_order.id, ' 'RANK() OVER (PARTITION BY account_id ORDER", "'FROM querybuilder_tests_order ' 'ORDER BY percent_rank ' 'ASC' ) self.assertEqual(query_str,", "NTileField( num_buckets=2, over=QueryWindow().order_by( '-margin' ) ) ] ).order_by( 'ntile' )", "() AS \"rank\" FROM querybuilder_tests_order' self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def", "' 'ORDER BY percent_rank ' 'ASC' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str,", "LastValueField, NthValueField, NumStdDevField ) from querybuilder.query import QueryWindow, Query from", "test_ntile(self): query = Query().from_table( table=Order, fields=[ '*', NTileField( num_buckets=2, over=QueryWindow().order_by(", "get_comparison_str(query_str, expected_query)) def test_lag_default(self): query = Query().from_table( table=Order, fields=[ '*',", "query_window.get_sql() expected_query = 'OVER ()' self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def", "'FROM querybuilder_tests_order ' 'ORDER BY row_number ' 'ASC' ) self.assertEqual(query_str,", "Order from querybuilder.tests.query_tests import QueryTestCase, get_comparison_str class QueryWindowTest(QueryTestCase): def test_query_window(self):", ") ) ] ).order_by( '-rank' ) query_str = query.get_sql() expected_query", "'-margin' ) ) ] ).order_by( 'row_number' ) query_str = query.get_sql()", "(PARTITION BY account_id ORDER BY id ASC) AS \"rank\" '", ").order_by( 'id' ) ) ] ).order_by( '-rank' ) query_str =", "AS \"rank\" FROM ' 'querybuilder_tests_order' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query))", ") query_str = query.get_sql() expected_query = ( 'SELECT querybuilder_tests_order.*, '", "'AS \"margin_num_stddev\" ' 'FROM querybuilder_tests_order ' 'ORDER BY margin_num_stddev '", "expected_query, get_comparison_str(query_str, expected_query)) def test_rank_percent(self): query = Query().from_table( table=Order, fields=[", "querybuilder_tests_order.*, ' 'DENSE_RANK() OVER (ORDER BY margin DESC) AS \"dense_rank\"", "\"percent_rank\" ' 'FROM querybuilder_tests_order ' 'ORDER BY percent_rank ' 'ASC'", "expected_query, get_comparison_str(query_str, expected_query)) def test_query_window_partition(self): query_window = QueryWindow().partition_by('field_one') query_str =", "'*', FirstValueField( 'margin', over=QueryWindow().order_by( '-margin' ) ) ] ) query_str", "query_str = query.get_sql() expected_query = ( 'SELECT querybuilder_tests_order.*, ' 'CUME_DIST()", "= 'OVER (PARTITION BY field_one ORDER BY field_one ASC)' self.assertEqual(query_str,", "fields=[ 'id', RankField( over=QueryWindow().partition_by( 'account_id' ).order_by( 'id' ) ) ]", "query.get_sql() expected_query = ( 'SELECT querybuilder_tests_order.*, ' 'ROW_NUMBER() OVER (ORDER", ") query_str = query.get_sql() expected_query = ( 'SELECT querybuilder_tests_order.id, '", "())) ' 'ELSE 0 ' 'END) ' 'AS \"margin_num_stddev\" '", "query_str = query_window.get_sql() expected_query = 'OVER (PARTITION BY field_one)' self.assertEqual(query_str,", "field_one ORDER BY field_one ASC)' self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def", "'ORDER BY margin_num_stddev ' 'DESC' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query))", "query_str = query.get_sql() expected_query = ( 'SELECT querybuilder_tests_order.*, ' 'LEAD(querybuilder_tests_order.margin,", "query_window.get_sql() expected_query = 'OVER (PARTITION BY field_one, field_two ORDER BY", "margin DESC) AS \"row_number\" ' 'FROM querybuilder_tests_order ' 'ORDER BY", "' 'END) ' 'AS \"margin_num_stddev\" ' 'FROM querybuilder_tests_order ' 'ORDER", "FirstValueField, LastValueField, NthValueField, NumStdDevField ) from querybuilder.query import QueryWindow, Query", "( 'SELECT querybuilder_tests_order.*, ' 'DENSE_RANK() OVER (ORDER BY margin DESC)", "self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_query_window_partition(self): query_window = QueryWindow().partition_by('field_one') query_str", "from querybuilder.tests.query_tests import QueryTestCase, get_comparison_str class QueryWindowTest(QueryTestCase): def test_query_window(self): query_window", "expected_query = 'OVER (PARTITION BY field_one)' self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query))", "= query.get_sql() expected_query = 'SELECT RANK() AS \"rank\" FROM querybuilder_tests_order'", "BY margin DESC) AS \"margin_first_value\" ' 'FROM querybuilder_tests_order' ) self.assertEqual(query_str,", "'SELECT querybuilder_tests_order.id, RANK() OVER (PARTITION BY account_id) AS \"rank\" FROM", "get_comparison_str(query_str, expected_query)) def test_rank_over_partition(self): query = Query().from_table( table=Order, fields=[ 'id',", "' 'FROM querybuilder_tests_order ' 'ORDER BY row_number ' 'ASC' )", "= query.get_sql() expected_query = ( 'SELECT querybuilder_tests_order.*, ' 'ROW_NUMBER() OVER", "get_comparison_str(query_str, expected_query)) def test_last_value(self): query = Query().from_table( table=Order, fields=[ '*',", "' 'PERCENT_RANK() OVER (ORDER BY margin DESC) AS \"percent_rank\" '", "DESC) AS \"margin_lead\" ' 'FROM querybuilder_tests_order' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str,", "'account_id' ).order_by( 'id' ) ) ] ).order_by( '-rank' ) query_str", "RankField( over=QueryWindow().order_by( 'id' ) ) ] ) query_str = query.get_sql()", ") self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_lag(self): query = Query().from_table(", "expected_query, get_comparison_str(query_str, expected_query)) def test_lead(self): query = Query().from_table( table=Order, fields=[", "expected_query = ( 'SELECT querybuilder_tests_order.*, ' 'ROW_NUMBER() OVER (ORDER BY", "\"margin_lag\" ' 'FROM querybuilder_tests_order' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def", "(' 'AVG(querybuilder_tests_order.margin) OVER ())) / (STDDEV(querybuilder_tests_order.margin) OVER ())) ' 'ELSE", "BY field_one ASC)' self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_query_window_partition_order_many(self): query_window", "DESC) AS \"margin_lag\" ' 'FROM querybuilder_tests_order' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str,", "' 'FROM querybuilder_tests_order ' 'ORDER BY margin_num_stddev ' 'DESC' )", "= Query().from_table( table=Order, fields=[ '*', CumeDistField( over=QueryWindow().order_by( '-margin' ) )", "QueryWindow().partition_by('field_one') query_str = query_window.get_sql() expected_query = 'OVER (PARTITION BY field_one)'", "' 'AS \"margin_num_stddev\" ' 'FROM querybuilder_tests_order ' 'ORDER BY margin_num_stddev", "= QueryWindow().partition_by( 'field_one' ).order_by( 'field_one' ) query_str = query_window.get_sql() expected_query", "expected_query, get_comparison_str(query_str, expected_query)) class WindowFunctionTest(QueryTestCase): def test_rank_no_over(self): query = Query().from_table(", ") self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_dense_rank(self): query = Query().from_table(", "BY account_id) AS \"rank\" FROM ' 'querybuilder_tests_order' ) self.assertEqual(query_str, expected_query,", "table=Order, fields=[ '*', PercentRankField( over=QueryWindow().order_by( '-margin' ) ) ] ).order_by(", ") from querybuilder.query import QueryWindow, Query from querybuilder.tests.models import Order", "NthValueField, NumStdDevField ) from querybuilder.query import QueryWindow, Query from querybuilder.tests.models", "get_comparison_str(query_str, expected_query)) def test_rank_over_order(self): query = Query().from_table( table=Order, fields=[ 'id',", "table=Order, fields=[ 'id', RankField( over=QueryWindow().order_by( 'id' ) ) ] )", "self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_ntile(self): query = Query().from_table( table=Order,", "get_comparison_str(query_str, expected_query)) def test_first_value(self): query = Query().from_table( table=Order, fields=[ '*',", "= query.get_sql() expected_query = ( 'SELECT querybuilder_tests_order.*, ' 'LEAD(querybuilder_tests_order.margin, 1)", "BY margin DESC) AS \"margin_lead\" ' 'FROM querybuilder_tests_order' ) self.assertEqual(query_str,", "self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_first_value(self): query = Query().from_table( table=Order,", "'*', RowNumberField( over=QueryWindow().order_by( '-margin' ) ) ] ).order_by( 'row_number' )", "\"margin_last_value\" ' 'FROM querybuilder_tests_order' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def", "table=Order, fields=[ '*', NthValueField( 'margin', n=2, over=QueryWindow().order_by( '-margin' ) )", "= Query().from_table( table=Order, fields=[ '*', PercentRankField( over=QueryWindow().order_by( '-margin' ) )", "expected_query, get_comparison_str(query_str, expected_query)) def test_cume_dist(self): query = Query().from_table( table=Order, fields=[", "LagField( 'margin', default=0, over=QueryWindow().order_by( '-margin' ) ) ] ) query_str", "RankField( over=QueryWindow().partition_by( 'account_id' ) ) ] ) query_str = query.get_sql()", "fields=[ '*', CumeDistField( over=QueryWindow().order_by( '-margin' ) ) ] ).order_by( 'cume_dist'", "( 'SELECT querybuilder_tests_order.*, ' 'LAG(querybuilder_tests_order.margin, 1) OVER (ORDER BY margin", "query_str = query.get_sql() expected_query = ( 'SELECT querybuilder_tests_order.*, ' 'LAST_VALUE(querybuilder_tests_order.margin)", "fields=[ '*', LeadField( 'margin', over=QueryWindow().order_by( '-margin' ) ) ] )", "expected_query)) def test_query_window_order(self): query_window = QueryWindow().order_by('field_one') query_str = query_window.get_sql() expected_query", "from querybuilder.tests.models import Order from querybuilder.tests.query_tests import QueryTestCase, get_comparison_str class", "OVER (ORDER BY margin DESC) AS \"ntile\" ' 'FROM querybuilder_tests_order", "query_window = QueryWindow().partition_by('field_one') query_str = query_window.get_sql() expected_query = 'OVER (PARTITION", "RankField() ] ) query_str = query.get_sql() expected_query = 'SELECT RANK()", "'SELECT querybuilder_tests_order.*, ' 'NTILE(2) OVER (ORDER BY margin DESC) AS", "( 'SELECT querybuilder_tests_order.id, RANK() OVER (PARTITION BY account_id) AS \"rank\"", "(ORDER BY margin DESC) AS \"margin_lag\" ' 'FROM querybuilder_tests_order' )", "ASC) AS \"rank\" ' 'FROM querybuilder_tests_order ' 'ORDER BY rank", "\"rank\" ' 'FROM querybuilder_tests_order ' 'ORDER BY rank ' 'DESC'", "Query from querybuilder.tests.models import Order from querybuilder.tests.query_tests import QueryTestCase, get_comparison_str", "table=Order, fields=[ '*', DenseRankField( over=QueryWindow().order_by( '-margin' ) ) ] ).order_by(", "def test_first_value(self): query = Query().from_table( table=Order, fields=[ '*', FirstValueField( 'margin',", "querybuilder_tests_order' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_lead(self): query =", "= 'SELECT RANK() OVER () AS \"rank\" FROM querybuilder_tests_order' self.assertEqual(query_str,", "QueryWindow().partition_by( 'field_one' ).partition_by( 'field_two' ).order_by( 'field_one' ).order_by( '-field_two' ) query_str", "'account_id' ) ) ] ) query_str = query.get_sql() expected_query =", "query_window.get_sql() expected_query = 'OVER (ORDER BY field_one ASC)' self.assertEqual(query_str, expected_query,", "'margin', default=0, over=QueryWindow().order_by( '-margin' ) ) ] ) query_str =", "OVER (PARTITION BY account_id ORDER BY id ASC) AS \"rank\"", "' 'FROM querybuilder_tests_order' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_lead(self):", "<reponame>wesokes/django-query-builder<filename>querybuilder/tests/window_tests.py<gh_stars>100-1000 from querybuilder.fields import ( RankField, RowNumberField, DenseRankField, PercentRankField, CumeDistField,", "= ( 'SELECT querybuilder_tests_order.*, ' 'LAST_VALUE(querybuilder_tests_order.margin) OVER (ORDER BY margin", "fields=[ '*', DenseRankField( over=QueryWindow().order_by( '-margin' ) ) ] ).order_by( 'dense_rank'", "query = Query().from_table( table=Order, fields=[ '*', LagField( 'margin', over=QueryWindow().order_by( '-margin'", "query = Query().from_table( table=Order, fields=[ '*', DenseRankField( over=QueryWindow().order_by( '-margin' )", "(ORDER BY field_one ASC)' self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_query_window_partition_order(self):", "(ORDER BY margin DESC) AS \"dense_rank\" ' 'FROM querybuilder_tests_order '", "( 'SELECT querybuilder_tests_order.*, ' 'PERCENT_RANK() OVER (ORDER BY margin DESC)", "test_rank_over_partition(self): query = Query().from_table( table=Order, fields=[ 'id', RankField( over=QueryWindow().partition_by( 'account_id'", "' 'FROM querybuilder_tests_order ' 'ORDER BY dense_rank ' 'ASC' )", "querybuilder_tests_order' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_last_value(self): query =", "( 'SELECT querybuilder_tests_order.id, RANK() OVER (ORDER BY id ASC) AS", "def test_ntile(self): query = Query().from_table( table=Order, fields=[ '*', NTileField( num_buckets=2,", "( 'SELECT querybuilder_tests_order.*, ' '(CASE WHEN (STDDEV(querybuilder_tests_order.margin) OVER ()) <>", "= QueryWindow() query_str = query_window.get_sql() expected_query = 'OVER ()' self.assertEqual(query_str,", "query_window = QueryWindow().order_by('field_one') query_str = query_window.get_sql() expected_query = 'OVER (ORDER", "(PARTITION BY field_one)' self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_query_window_order(self): query_window", "\"rank\" FROM querybuilder_tests_order' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_rank_over_partition(self):", "self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_rank_over_order(self): query = Query().from_table( table=Order,", "OVER ())) ' 'ELSE 0 ' 'END) ' 'AS \"margin_num_stddev\"", "over=QueryWindow() ) ] ).order_by( '-margin_num_stddev' ) query_str = query.get_sql() expected_query", ").order_by( '-margin_num_stddev' ) query_str = query.get_sql() expected_query = ( 'SELECT", "RankField, RowNumberField, DenseRankField, PercentRankField, CumeDistField, NTileField, LagField, LeadField, FirstValueField, LastValueField,", "ASC, field_two DESC)' self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) class WindowFunctionTest(QueryTestCase): def", ").order_by( 'ntile' ) query_str = query.get_sql() expected_query = ( 'SELECT", "def test_rank_over(self): query = Query().from_table( table=Order, fields=[ RankField( over=QueryWindow() )", "over=QueryWindow().order_by( '-margin' ) ) ] ).order_by( 'percent_rank' ) query_str =", "row_number ' 'ASC' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_rank(self):", "Query().from_table( table=Order, fields=[ '*', CumeDistField( over=QueryWindow().order_by( '-margin' ) ) ]", "())) / (STDDEV(querybuilder_tests_order.margin) OVER ())) ' 'ELSE 0 ' 'END)", "= ( 'SELECT querybuilder_tests_order.*, ' '(CASE WHEN (STDDEV(querybuilder_tests_order.margin) OVER ())", "querybuilder_tests_order.*, ' 'PERCENT_RANK() OVER (ORDER BY margin DESC) AS \"percent_rank\"", "- (' 'AVG(querybuilder_tests_order.margin) OVER ())) / (STDDEV(querybuilder_tests_order.margin) OVER ())) '", "'*', PercentRankField( over=QueryWindow().order_by( '-margin' ) ) ] ).order_by( 'percent_rank' )", "test_query_window_partition_order_many(self): query_window = QueryWindow().partition_by( 'field_one' ).partition_by( 'field_two' ).order_by( 'field_one' ).order_by(", "'ORDER BY dense_rank ' 'ASC' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query))", "test_cume_dist(self): query = Query().from_table( table=Order, fields=[ '*', CumeDistField( over=QueryWindow().order_by( '-margin'", "/ (STDDEV(querybuilder_tests_order.margin) OVER ())) ' 'ELSE 0 ' 'END) '", "querybuilder_tests_order ' 'ORDER BY margin_num_stddev ' 'DESC' ) self.assertEqual(query_str, expected_query,", ") query_str = query.get_sql() expected_query = ( 'SELECT querybuilder_tests_order.id, RANK()", "'id', RankField( over=QueryWindow().partition_by( 'account_id' ).order_by( 'id' ) ) ] ).order_by(", "def test_query_window_order(self): query_window = QueryWindow().order_by('field_one') query_str = query_window.get_sql() expected_query =", "'percent_rank' ) query_str = query.get_sql() expected_query = ( 'SELECT querybuilder_tests_order.*,", "'ASC' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_cume_dist(self): query =", "'*', NthValueField( 'margin', n=2, over=QueryWindow().order_by( '-margin' ) ) ] )", "Query().from_table( table=Order, fields=[ 'id', RankField( over=QueryWindow().order_by( 'id' ) ) ]", ") query_str = query_window.get_sql() expected_query = 'OVER (PARTITION BY field_one,", "FROM querybuilder_tests_order' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_rank_over_partition(self): query", "query_str = query.get_sql() expected_query = ( 'SELECT querybuilder_tests_order.id, ' 'RANK()", "' 'RANK() OVER (PARTITION BY account_id ORDER BY id ASC)", "margin DESC) AS \"cume_dist\" ' 'FROM querybuilder_tests_order ' 'ORDER BY", "'margin', over=QueryWindow() ) ] ).order_by( '-margin_num_stddev' ) query_str = query.get_sql()", "query_str = query.get_sql() expected_query = ( 'SELECT querybuilder_tests_order.*, ' 'PERCENT_RANK()", "querybuilder_tests_order ' 'ORDER BY percent_rank ' 'ASC' ) self.assertEqual(query_str, expected_query,", "' 'FIRST_VALUE(querybuilder_tests_order.margin) OVER (ORDER BY margin DESC) AS \"margin_first_value\" '", "AS \"percent_rank\" ' 'FROM querybuilder_tests_order ' 'ORDER BY percent_rank '", "expected_query)) def test_last_value(self): query = Query().from_table( table=Order, fields=[ '*', LastValueField(", "'-rank' ) query_str = query.get_sql() expected_query = ( 'SELECT querybuilder_tests_order.id,", "field_one, field_two ORDER BY field_one ASC, field_two DESC)' self.assertEqual(query_str, expected_query,", "\"margin_nth_value\" ' 'FROM querybuilder_tests_order' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def", "expected_query = ( 'SELECT querybuilder_tests_order.*, ' 'NTH_VALUE(querybuilder_tests_order.margin, 2) OVER (ORDER", "OVER ()) <> 0 ' 'THEN ((querybuilder_tests_order.margin - (' 'AVG(querybuilder_tests_order.margin)", "'SELECT querybuilder_tests_order.*, ' 'FIRST_VALUE(querybuilder_tests_order.margin) OVER (ORDER BY margin DESC) AS", "self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_query_window_partition_order(self): query_window = QueryWindow().partition_by( 'field_one'", "' 'ASC' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_cume_dist(self): query", "def test_last_value(self): query = Query().from_table( table=Order, fields=[ '*', LastValueField( 'margin',", "get_comparison_str(query_str, expected_query)) class WindowFunctionTest(QueryTestCase): def test_rank_no_over(self): query = Query().from_table( table=Order,", "DESC) AS \"dense_rank\" ' 'FROM querybuilder_tests_order ' 'ORDER BY dense_rank", "expected_query)) def test_num_stddev(self): query = Query().from_table( table=Order, fields=[ '*', NumStdDevField(", "margin DESC) AS \"ntile\" ' 'FROM querybuilder_tests_order ' 'ORDER BY", "( 'SELECT querybuilder_tests_order.*, ' 'FIRST_VALUE(querybuilder_tests_order.margin) OVER (ORDER BY margin DESC)", "expected_query = ( 'SELECT querybuilder_tests_order.*, ' 'LAST_VALUE(querybuilder_tests_order.margin) OVER (ORDER BY", "'-margin' ) ) ] ) query_str = query.get_sql() expected_query =", ").order_by( '-field_two' ) query_str = query_window.get_sql() expected_query = 'OVER (PARTITION", "get_comparison_str(query_str, expected_query)) def test_rank_percent(self): query = Query().from_table( table=Order, fields=[ '*',", "OVER (ORDER BY margin DESC) AS \"cume_dist\" ' 'FROM querybuilder_tests_order", "expected_query)) def test_nth_value(self): query = Query().from_table( table=Order, fields=[ '*', NthValueField(", "self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_num_stddev(self): query = Query().from_table( table=Order,", "expected_query)) def test_query_window_partition(self): query_window = QueryWindow().partition_by('field_one') query_str = query_window.get_sql() expected_query", "table=Order, fields=[ '*', LagField( 'margin', over=QueryWindow().order_by( '-margin' ) ) ]", "test_row_number(self): query = Query().from_table( table=Order, fields=[ '*', RowNumberField( over=QueryWindow().order_by( '-margin'", "AS \"cume_dist\" ' 'FROM querybuilder_tests_order ' 'ORDER BY cume_dist '", ") ) ] ).order_by( 'cume_dist' ) query_str = query.get_sql() expected_query", "expected_query = ( 'SELECT querybuilder_tests_order.id, RANK() OVER (PARTITION BY account_id)", "AS \"margin_lead\" ' 'FROM querybuilder_tests_order' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query))", "expected_query)) def test_query_window_partition_order(self): query_window = QueryWindow().partition_by( 'field_one' ).order_by( 'field_one' )", "BY margin ASC) AS \"margin_last_value\" ' 'FROM querybuilder_tests_order' ) self.assertEqual(query_str,", "NumStdDevField ) from querybuilder.query import QueryWindow, Query from querybuilder.tests.models import", "] ).order_by( 'percent_rank' ) query_str = query.get_sql() expected_query = (", "query_str = query.get_sql() expected_query = ( 'SELECT querybuilder_tests_order.*, ' 'DENSE_RANK()", "expected_query = ( 'SELECT querybuilder_tests_order.*, ' 'FIRST_VALUE(querybuilder_tests_order.margin) OVER (ORDER BY", "expected_query)) def test_lag(self): query = Query().from_table( table=Order, fields=[ '*', LagField(", "= ( 'SELECT querybuilder_tests_order.*, ' 'CUME_DIST() OVER (ORDER BY margin", "\"ntile\" ' 'FROM querybuilder_tests_order ' 'ORDER BY ntile ' 'ASC'", "'SELECT RANK() OVER () AS \"rank\" FROM querybuilder_tests_order' self.assertEqual(query_str, expected_query,", "get_comparison_str(query_str, expected_query)) def test_lead(self): query = Query().from_table( table=Order, fields=[ '*',", "= query.get_sql() expected_query = ( 'SELECT querybuilder_tests_order.*, ' 'LAST_VALUE(querybuilder_tests_order.margin) OVER", "'ELSE 0 ' 'END) ' 'AS \"margin_num_stddev\" ' 'FROM querybuilder_tests_order", "'-field_two' ) query_str = query_window.get_sql() expected_query = 'OVER (PARTITION BY", ") query_str = query.get_sql() expected_query = 'SELECT RANK() AS \"rank\"", ") self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_nth_value(self): query = Query().from_table(", "id ASC) AS \"rank\" ' 'FROM querybuilder_tests_order ' 'ORDER BY", "RANK() OVER () AS \"rank\" FROM querybuilder_tests_order' self.assertEqual(query_str, expected_query, get_comparison_str(query_str,", ") ) ] ) query_str = query.get_sql() expected_query = (", "' 'LEAD(querybuilder_tests_order.margin, 1) OVER (ORDER BY margin DESC) AS \"margin_lead\"", "= ( 'SELECT querybuilder_tests_order.*, ' 'ROW_NUMBER() OVER (ORDER BY margin", "OVER (ORDER BY margin DESC) AS \"margin_lag\" ' 'FROM querybuilder_tests_order'", "( RankField, RowNumberField, DenseRankField, PercentRankField, CumeDistField, NTileField, LagField, LeadField, FirstValueField,", "test_query_window_partition_order(self): query_window = QueryWindow().partition_by( 'field_one' ).order_by( 'field_one' ) query_str =", ") ] ).order_by( '-margin_num_stddev' ) query_str = query.get_sql() expected_query =", "'margin', over=QueryWindow().order_by( '-margin' ) ) ] ) query_str = query.get_sql()", "fields=[ '*', NthValueField( 'margin', n=2, over=QueryWindow().order_by( '-margin' ) ) ]", "(PARTITION BY field_one, field_two ORDER BY field_one ASC, field_two DESC)'", "query_str = query.get_sql() expected_query = ( 'SELECT querybuilder_tests_order.*, ' '(CASE", "query = Query().from_table( table=Order, fields=[ '*', LastValueField( 'margin', over=QueryWindow().order_by( 'margin'", "= query.get_sql() expected_query = ( 'SELECT querybuilder_tests_order.*, ' 'LAG(querybuilder_tests_order.margin, 1)", "DESC) AS \"cume_dist\" ' 'FROM querybuilder_tests_order ' 'ORDER BY cume_dist", "expected_query)) def test_query_window_partition_order_many(self): query_window = QueryWindow().partition_by( 'field_one' ).partition_by( 'field_two' ).order_by(", "LeadField( 'margin', over=QueryWindow().order_by( '-margin' ) ) ] ) query_str =", "Query().from_table( table=Order, fields=[ 'id', RankField( over=QueryWindow().partition_by( 'account_id' ) ) ]", "= query.get_sql() expected_query = ( 'SELECT querybuilder_tests_order.id, RANK() OVER (ORDER", "expected_query)) def test_row_number(self): query = Query().from_table( table=Order, fields=[ '*', RowNumberField(", "AS \"rank\" FROM querybuilder_tests_order' self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_rank_over_order(self):", "test_query_window_partition(self): query_window = QueryWindow().partition_by('field_one') query_str = query_window.get_sql() expected_query = 'OVER", "FirstValueField( 'margin', over=QueryWindow().order_by( '-margin' ) ) ] ) query_str =", "test_query_window(self): query_window = QueryWindow() query_str = query_window.get_sql() expected_query = 'OVER", "field_one ASC)' self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_query_window_partition_order_many(self): query_window =", "query_str = query_window.get_sql() expected_query = 'OVER ()' self.assertEqual(query_str, expected_query, get_comparison_str(query_str,", "DESC)' self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) class WindowFunctionTest(QueryTestCase): def test_rank_no_over(self): query", "test_nth_value(self): query = Query().from_table( table=Order, fields=[ '*', NthValueField( 'margin', n=2,", "self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_lag_default(self): query = Query().from_table( table=Order,", "self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_lag(self): query = Query().from_table( table=Order,", "QueryWindow().partition_by( 'field_one' ).order_by( 'field_one' ) query_str = query_window.get_sql() expected_query =", "query_str = query.get_sql() expected_query = 'SELECT RANK() AS \"rank\" FROM", "'OVER (ORDER BY field_one ASC)' self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def", "'SELECT querybuilder_tests_order.*, ' 'ROW_NUMBER() OVER (ORDER BY margin DESC) AS", "RowNumberField( over=QueryWindow().order_by( '-margin' ) ) ] ).order_by( 'row_number' ) query_str", "'id' ) ) ] ).order_by( '-rank' ) query_str = query.get_sql()", "expected_query = ( 'SELECT querybuilder_tests_order.*, ' 'LAG(querybuilder_tests_order.margin, 1, \\'0\\') OVER", "'FROM querybuilder_tests_order' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_nth_value(self): query", "fields=[ RankField( over=QueryWindow() ) ] ) query_str = query.get_sql() expected_query", "Query().from_table( table=Order, fields=[ 'id', RankField( over=QueryWindow().partition_by( 'account_id' ).order_by( 'id' )", "query = Query().from_table( table=Order, fields=[ 'id', RankField( over=QueryWindow().partition_by( 'account_id' ).order_by(", "expected_query, get_comparison_str(query_str, expected_query)) def test_row_number(self): query = Query().from_table( table=Order, fields=[", "\"rank\" FROM querybuilder_tests_order' self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_rank_over_order(self): query", "expected_query)) def test_rank_percent(self): query = Query().from_table( table=Order, fields=[ '*', PercentRankField(", "' 'FROM querybuilder_tests_order ' 'ORDER BY rank ' 'DESC' )", "expected_query = 'OVER (PARTITION BY field_one ORDER BY field_one ASC)'", "= Query().from_table( table=Order, fields=[ 'id', RankField( over=QueryWindow().partition_by( 'account_id' ).order_by( 'id'", "'ntile' ) query_str = query.get_sql() expected_query = ( 'SELECT querybuilder_tests_order.*,", "AS \"rank\" FROM querybuilder_tests_order' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def", "NTileField, LagField, LeadField, FirstValueField, LastValueField, NthValueField, NumStdDevField ) from querybuilder.query", "(ORDER BY margin DESC) AS \"margin_first_value\" ' 'FROM querybuilder_tests_order' )", "from querybuilder.query import QueryWindow, Query from querybuilder.tests.models import Order from", "expected_query, get_comparison_str(query_str, expected_query)) def test_ntile(self): query = Query().from_table( table=Order, fields=[", ") ) ] ).order_by( 'dense_rank' ) query_str = query.get_sql() expected_query", ") self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_rank_percent(self): query = Query().from_table(", "test_lead(self): query = Query().from_table( table=Order, fields=[ '*', LeadField( 'margin', over=QueryWindow().order_by(", "()' self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_query_window_partition(self): query_window = QueryWindow().partition_by('field_one')", "cume_dist ' 'ASC' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_ntile(self):", "= ( 'SELECT querybuilder_tests_order.id, RANK() OVER (PARTITION BY account_id) AS", "BY field_one ORDER BY field_one ASC)' self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query))", "expected_query, get_comparison_str(query_str, expected_query)) def test_dense_rank(self): query = Query().from_table( table=Order, fields=[", "query_window = QueryWindow().partition_by( 'field_one' ).partition_by( 'field_two' ).order_by( 'field_one' ).order_by( '-field_two'", "Query().from_table( table=Order, fields=[ '*', LeadField( 'margin', over=QueryWindow().order_by( '-margin' ) )", "OVER (ORDER BY margin DESC) AS \"margin_nth_value\" ' 'FROM querybuilder_tests_order'", "ORDER BY field_one ASC)' self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_query_window_partition_order_many(self):", "'SELECT querybuilder_tests_order.id, ' 'RANK() OVER (PARTITION BY account_id ORDER BY", "' 'DESC' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_dense_rank(self): query", "BY row_number ' 'ASC' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def", "= ( 'SELECT querybuilder_tests_order.*, ' 'LEAD(querybuilder_tests_order.margin, 1) OVER (ORDER BY", "DESC) AS \"percent_rank\" ' 'FROM querybuilder_tests_order ' 'ORDER BY percent_rank", "BY id ASC) AS \"rank\" FROM querybuilder_tests_order' ) self.assertEqual(query_str, expected_query,", "QueryWindow, Query from querybuilder.tests.models import Order from querybuilder.tests.query_tests import QueryTestCase,", "\"dense_rank\" ' 'FROM querybuilder_tests_order ' 'ORDER BY dense_rank ' 'ASC'", "query = Query().from_table( table=Order, fields=[ RankField() ] ) query_str =", "0 ' 'THEN ((querybuilder_tests_order.margin - (' 'AVG(querybuilder_tests_order.margin) OVER ())) /", "expected_query)) def test_rank_over(self): query = Query().from_table( table=Order, fields=[ RankField( over=QueryWindow()", "' 'ASC' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_rank_percent(self): query", "table=Order, fields=[ RankField( over=QueryWindow() ) ] ) query_str = query.get_sql()", "AS \"margin_first_value\" ' 'FROM querybuilder_tests_order' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query))", "margin DESC) AS \"percent_rank\" ' 'FROM querybuilder_tests_order ' 'ORDER BY", "expected_query = 'OVER ()' self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_query_window_partition(self):", "BY field_one)' self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_query_window_order(self): query_window =", "query = Query().from_table( table=Order, fields=[ '*', LeadField( 'margin', over=QueryWindow().order_by( '-margin'", "] ).order_by( '-margin_num_stddev' ) query_str = query.get_sql() expected_query = (", "def test_query_window_partition_order_many(self): query_window = QueryWindow().partition_by( 'field_one' ).partition_by( 'field_two' ).order_by( 'field_one'", ") self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_last_value(self): query = Query().from_table(", "BY margin DESC) AS \"ntile\" ' 'FROM querybuilder_tests_order ' 'ORDER", "'ORDER BY percent_rank ' 'ASC' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query))", "OVER (ORDER BY margin DESC) AS \"margin_first_value\" ' 'FROM querybuilder_tests_order'", "query.get_sql() expected_query = 'SELECT RANK() OVER () AS \"rank\" FROM", "get_comparison_str(query_str, expected_query)) def test_rank(self): query = Query().from_table( table=Order, fields=[ 'id',", "'RANK() OVER (PARTITION BY account_id ORDER BY id ASC) AS", "query_str = query.get_sql() expected_query = ( 'SELECT querybuilder_tests_order.*, ' 'ROW_NUMBER()", "AS \"margin_last_value\" ' 'FROM querybuilder_tests_order' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query))", "'*', NumStdDevField( 'margin', over=QueryWindow() ) ] ).order_by( '-margin_num_stddev' ) query_str", "test_rank_over(self): query = Query().from_table( table=Order, fields=[ RankField( over=QueryWindow() ) ]", "'margin', n=2, over=QueryWindow().order_by( '-margin' ) ) ] ) query_str =", "querybuilder_tests_order' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_nth_value(self): query =", ").partition_by( 'field_two' ).order_by( 'field_one' ).order_by( '-field_two' ) query_str = query_window.get_sql()", "AS \"row_number\" ' 'FROM querybuilder_tests_order ' 'ORDER BY row_number '", "'SELECT querybuilder_tests_order.id, RANK() OVER (ORDER BY id ASC) AS \"rank\"", "= Query().from_table( table=Order, fields=[ '*', NthValueField( 'margin', n=2, over=QueryWindow().order_by( '-margin'", "'FIRST_VALUE(querybuilder_tests_order.margin) OVER (ORDER BY margin DESC) AS \"margin_first_value\" ' 'FROM", "self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_query_window_order(self): query_window = QueryWindow().order_by('field_one') query_str", "query.get_sql() expected_query = ( 'SELECT querybuilder_tests_order.*, ' 'CUME_DIST() OVER (ORDER", "= ( 'SELECT querybuilder_tests_order.*, ' 'NTH_VALUE(querybuilder_tests_order.margin, 2) OVER (ORDER BY", "(ORDER BY margin DESC) AS \"row_number\" ' 'FROM querybuilder_tests_order '", "PercentRankField( over=QueryWindow().order_by( '-margin' ) ) ] ).order_by( 'percent_rank' ) query_str", "margin DESC) AS \"margin_lag\" ' 'FROM querybuilder_tests_order' ) self.assertEqual(query_str, expected_query,", ") self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_first_value(self): query = Query().from_table(", "CumeDistField( over=QueryWindow().order_by( '-margin' ) ) ] ).order_by( 'cume_dist' ) query_str", "'(CASE WHEN (STDDEV(querybuilder_tests_order.margin) OVER ()) <> 0 ' 'THEN ((querybuilder_tests_order.margin", "expected_query, get_comparison_str(query_str, expected_query)) def test_rank(self): query = Query().from_table( table=Order, fields=[", "OVER () AS \"rank\" FROM querybuilder_tests_order' self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query))", "AS \"rank\" FROM querybuilder_tests_order' self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_rank_over(self):", "Query().from_table( table=Order, fields=[ '*', DenseRankField( over=QueryWindow().order_by( '-margin' ) ) ]", "PercentRankField, CumeDistField, NTileField, LagField, LeadField, FirstValueField, LastValueField, NthValueField, NumStdDevField )", "OVER (ORDER BY margin ASC) AS \"margin_last_value\" ' 'FROM querybuilder_tests_order'", "( 'SELECT querybuilder_tests_order.*, ' 'ROW_NUMBER() OVER (ORDER BY margin DESC)", "' 'FROM querybuilder_tests_order ' 'ORDER BY cume_dist ' 'ASC' )", "query_str = query.get_sql() expected_query = ( 'SELECT querybuilder_tests_order.*, ' 'NTILE(2)", "over=QueryWindow().partition_by( 'account_id' ).order_by( 'id' ) ) ] ).order_by( '-rank' )", "expected_query, get_comparison_str(query_str, expected_query)) def test_num_stddev(self): query = Query().from_table( table=Order, fields=[", "= 'SELECT RANK() AS \"rank\" FROM querybuilder_tests_order' self.assertEqual(query_str, expected_query, get_comparison_str(query_str,", "NumStdDevField( 'margin', over=QueryWindow() ) ] ).order_by( '-margin_num_stddev' ) query_str =", "BY ntile ' 'ASC' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def", "= ( 'SELECT querybuilder_tests_order.*, ' 'PERCENT_RANK() OVER (ORDER BY margin", "\"margin_first_value\" ' 'FROM querybuilder_tests_order' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def", "= QueryWindow().partition_by( 'field_one' ).partition_by( 'field_two' ).order_by( 'field_one' ).order_by( '-field_two' )", "'SELECT RANK() AS \"rank\" FROM querybuilder_tests_order' self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query))", "= query.get_sql() expected_query = ( 'SELECT querybuilder_tests_order.*, ' 'PERCENT_RANK() OVER", "expected_query = ( 'SELECT querybuilder_tests_order.*, ' 'DENSE_RANK() OVER (ORDER BY", "expected_query = 'SELECT RANK() OVER () AS \"rank\" FROM querybuilder_tests_order'", "OVER (ORDER BY margin DESC) AS \"percent_rank\" ' 'FROM querybuilder_tests_order", ") ] ).order_by( 'row_number' ) query_str = query.get_sql() expected_query =", "test_lag(self): query = Query().from_table( table=Order, fields=[ '*', LagField( 'margin', over=QueryWindow().order_by(", "OVER (ORDER BY id ASC) AS \"rank\" FROM querybuilder_tests_order' )", "'FROM querybuilder_tests_order' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_last_value(self): query", "query.get_sql() expected_query = ( 'SELECT querybuilder_tests_order.*, ' 'LAG(querybuilder_tests_order.margin, 1, \\'0\\')", "' 'NTILE(2) OVER (ORDER BY margin DESC) AS \"ntile\" '", ") self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_cume_dist(self): query = Query().from_table(", "get_comparison_str(query_str, expected_query)) def test_ntile(self): query = Query().from_table( table=Order, fields=[ '*',", "'CUME_DIST() OVER (ORDER BY margin DESC) AS \"cume_dist\" ' 'FROM", "' 'LAG(querybuilder_tests_order.margin, 1) OVER (ORDER BY margin DESC) AS \"margin_lag\"", "expected_query = ( 'SELECT querybuilder_tests_order.*, ' '(CASE WHEN (STDDEV(querybuilder_tests_order.margin) OVER", "table=Order, fields=[ '*', LagField( 'margin', default=0, over=QueryWindow().order_by( '-margin' ) )", "query.get_sql() expected_query = 'SELECT RANK() AS \"rank\" FROM querybuilder_tests_order' self.assertEqual(query_str,", "'ASC' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_ntile(self): query =", "'*', LastValueField( 'margin', over=QueryWindow().order_by( 'margin' ) ) ] ) query_str", "DESC) AS \"ntile\" ' 'FROM querybuilder_tests_order ' 'ORDER BY ntile", "querybuilder_tests_order ' 'ORDER BY cume_dist ' 'ASC' ) self.assertEqual(query_str, expected_query,", "get_comparison_str(query_str, expected_query)) def test_query_window_partition_order(self): query_window = QueryWindow().partition_by( 'field_one' ).order_by( 'field_one'", "self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_rank_over(self): query = Query().from_table( table=Order,", ") ) ] ).order_by( 'row_number' ) query_str = query.get_sql() expected_query", "OVER (ORDER BY margin DESC) AS \"row_number\" ' 'FROM querybuilder_tests_order", "def test_lag_default(self): query = Query().from_table( table=Order, fields=[ '*', LagField( 'margin',", "'margin', over=QueryWindow().order_by( 'margin' ) ) ] ) query_str = query.get_sql()", "= query_window.get_sql() expected_query = 'OVER (PARTITION BY field_one)' self.assertEqual(query_str, expected_query,", "(PARTITION BY account_id) AS \"rank\" FROM ' 'querybuilder_tests_order' ) self.assertEqual(query_str,", "account_id ORDER BY id ASC) AS \"rank\" ' 'FROM querybuilder_tests_order", "def test_dense_rank(self): query = Query().from_table( table=Order, fields=[ '*', DenseRankField( over=QueryWindow().order_by(", "test_first_value(self): query = Query().from_table( table=Order, fields=[ '*', FirstValueField( 'margin', over=QueryWindow().order_by(", "query = Query().from_table( table=Order, fields=[ '*', NTileField( num_buckets=2, over=QueryWindow().order_by( '-margin'", "query_str = query.get_sql() expected_query = ( 'SELECT querybuilder_tests_order.*, ' 'NTH_VALUE(querybuilder_tests_order.margin,", "'-margin' ) ) ] ).order_by( 'cume_dist' ) query_str = query.get_sql()", "CumeDistField, NTileField, LagField, LeadField, FirstValueField, LastValueField, NthValueField, NumStdDevField ) from", "expected_query)) def test_dense_rank(self): query = Query().from_table( table=Order, fields=[ '*', DenseRankField(", "query_window = QueryWindow() query_str = query_window.get_sql() expected_query = 'OVER ()'", "query_str = query_window.get_sql() expected_query = 'OVER (PARTITION BY field_one, field_two", "] ).order_by( 'dense_rank' ) query_str = query.get_sql() expected_query = (", "= Query().from_table( table=Order, fields=[ 'id', RankField( over=QueryWindow().partition_by( 'account_id' ) )", "fields=[ '*', RowNumberField( over=QueryWindow().order_by( '-margin' ) ) ] ).order_by( 'row_number'", "get_comparison_str(query_str, expected_query)) def test_nth_value(self): query = Query().from_table( table=Order, fields=[ '*',", "'field_two' ).order_by( 'field_one' ).order_by( '-field_two' ) query_str = query_window.get_sql() expected_query", "(ORDER BY id ASC) AS \"rank\" FROM querybuilder_tests_order' ) self.assertEqual(query_str,", "OVER (PARTITION BY account_id) AS \"rank\" FROM ' 'querybuilder_tests_order' )", "import ( RankField, RowNumberField, DenseRankField, PercentRankField, CumeDistField, NTileField, LagField, LeadField,", "(ORDER BY margin DESC) AS \"margin_nth_value\" ' 'FROM querybuilder_tests_order' )", "def test_rank_no_over(self): query = Query().from_table( table=Order, fields=[ RankField() ] )", "get_comparison_str(query_str, expected_query)) def test_query_window_partition_order_many(self): query_window = QueryWindow().partition_by( 'field_one' ).partition_by( 'field_two'", "table=Order, fields=[ '*', NTileField( num_buckets=2, over=QueryWindow().order_by( '-margin' ) ) ]", "WindowFunctionTest(QueryTestCase): def test_rank_no_over(self): query = Query().from_table( table=Order, fields=[ RankField() ]", ").order_by( 'field_one' ) query_str = query_window.get_sql() expected_query = 'OVER (PARTITION", "over=QueryWindow() ) ] ) query_str = query.get_sql() expected_query = 'SELECT", "\"rank\" FROM ' 'querybuilder_tests_order' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def", "'FROM querybuilder_tests_order' ) self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query)) def test_lag_default(self): query", "table=Order, fields=[ 'id', RankField( over=QueryWindow().partition_by( 'account_id' ).order_by( 'id' ) )", "] ) query_str = query.get_sql() expected_query = ( 'SELECT querybuilder_tests_order.id,", "(ORDER BY margin DESC) AS \"ntile\" ' 'FROM querybuilder_tests_order '", "Query().from_table( table=Order, fields=[ '*', LastValueField( 'margin', over=QueryWindow().order_by( 'margin' ) )" ]
[ "import numpy h = .25 s = 1 bitmap =", "= 1 bitmap = numpy.array([ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], [0,0,1,1,1,1,1,1,1,1,1,0,0,0,0,0], [0,0,1,1,1,1,1,1,1,1,1,1,1,1,0,0],", "<reponame>wbprice/ojimoji import numpy h = .25 s = 1 bitmap", "1 bitmap = numpy.array([ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], [0,0,1,1,1,1,1,1,1,1,1,0,0,0,0,0], [0,0,1,1,1,1,1,1,1,1,1,1,1,1,0,0], [0,0,1,1,1,0,1,0,1,1,1,0,0,1,0,0],", "h = .25 s = 1 bitmap = numpy.array([ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],", "numpy h = .25 s = 1 bitmap = numpy.array([", "[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], [0,0,1,1,1,1,1,1,1,1,1,0,0,0,0,0], [0,0,1,1,1,1,1,1,1,1,1,1,1,1,0,0], [0,0,1,1,1,0,1,0,1,1,1,0,0,1,0,0], [0,0,1,1,0,1,0,1,0,1,1,0,0,1,0,0], [0,0,1,1,1,0,1,0,1,1,1,0,1,0,0,0], [0,0,1,1,0,1,0,1,0,1,1,1,0,0,0,0], [0,0,1,1,1,0,1,0,1,1,1,0,0,0,0,0],", "[0,0,1,1,1,0,1,0,1,1,1,0,0,1,0,0], [0,0,1,1,0,1,0,1,0,1,1,0,0,1,0,0], [0,0,1,1,1,0,1,0,1,1,1,0,1,0,0,0], [0,0,1,1,0,1,0,1,0,1,1,1,0,0,0,0], [0,0,1,1,1,0,1,0,1,1,1,0,0,0,0,0], [0,0,0,1,1,1,1,1,1,1,0,0,0,0,0,0], [0,0,0,0,1,1,1,1,1,0,0,0,0,0,0,0], [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],", "= .25 s = 1 bitmap = numpy.array([ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],", ".25 s = 1 bitmap = numpy.array([ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],", "numpy.array([ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], [0,0,1,1,1,1,1,1,1,1,1,0,0,0,0,0], [0,0,1,1,1,1,1,1,1,1,1,1,1,1,0,0], [0,0,1,1,1,0,1,0,1,1,1,0,0,1,0,0], [0,0,1,1,0,1,0,1,0,1,1,0,0,1,0,0], [0,0,1,1,1,0,1,0,1,1,1,0,1,0,0,0], [0,0,1,1,0,1,0,1,0,1,1,1,0,0,0,0],", "[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], [0,0,1,1,1,1,1,1,1,1,1,0,0,0,0,0], [0,0,1,1,1,1,1,1,1,1,1,1,1,1,0,0], [0,0,1,1,1,0,1,0,1,1,1,0,0,1,0,0], [0,0,1,1,0,1,0,1,0,1,1,0,0,1,0,0], [0,0,1,1,1,0,1,0,1,1,1,0,1,0,0,0], [0,0,1,1,0,1,0,1,0,1,1,1,0,0,0,0], [0,0,1,1,1,0,1,0,1,1,1,0,0,0,0,0], [0,0,0,1,1,1,1,1,1,1,0,0,0,0,0,0],", "= numpy.array([ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], [0,0,1,1,1,1,1,1,1,1,1,0,0,0,0,0], [0,0,1,1,1,1,1,1,1,1,1,1,1,1,0,0], [0,0,1,1,1,0,1,0,1,1,1,0,0,1,0,0], [0,0,1,1,0,1,0,1,0,1,1,0,0,1,0,0], [0,0,1,1,1,0,1,0,1,1,1,0,1,0,0,0],", "[0,0,1,1,1,1,1,1,1,1,1,1,1,1,0,0], [0,0,1,1,1,0,1,0,1,1,1,0,0,1,0,0], [0,0,1,1,0,1,0,1,0,1,1,0,0,1,0,0], [0,0,1,1,1,0,1,0,1,1,1,0,1,0,0,0], [0,0,1,1,0,1,0,1,0,1,1,1,0,0,0,0], [0,0,1,1,1,0,1,0,1,1,1,0,0,0,0,0], [0,0,0,1,1,1,1,1,1,1,0,0,0,0,0,0], [0,0,0,0,1,1,1,1,1,0,0,0,0,0,0,0], [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],", "[0,0,1,1,0,1,0,1,0,1,1,0,0,1,0,0], [0,0,1,1,1,0,1,0,1,1,1,0,1,0,0,0], [0,0,1,1,0,1,0,1,0,1,1,1,0,0,0,0], [0,0,1,1,1,0,1,0,1,1,1,0,0,0,0,0], [0,0,0,1,1,1,1,1,1,1,0,0,0,0,0,0], [0,0,0,0,1,1,1,1,1,0,0,0,0,0,0,0], [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]])", "bitmap = numpy.array([ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], [0,0,1,1,1,1,1,1,1,1,1,0,0,0,0,0], [0,0,1,1,1,1,1,1,1,1,1,1,1,1,0,0], [0,0,1,1,1,0,1,0,1,1,1,0,0,1,0,0], [0,0,1,1,0,1,0,1,0,1,1,0,0,1,0,0],", "[0,0,1,1,1,1,1,1,1,1,1,0,0,0,0,0], [0,0,1,1,1,1,1,1,1,1,1,1,1,1,0,0], [0,0,1,1,1,0,1,0,1,1,1,0,0,1,0,0], [0,0,1,1,0,1,0,1,0,1,1,0,0,1,0,0], [0,0,1,1,1,0,1,0,1,1,1,0,1,0,0,0], [0,0,1,1,0,1,0,1,0,1,1,1,0,0,0,0], [0,0,1,1,1,0,1,0,1,1,1,0,0,0,0,0], [0,0,0,1,1,1,1,1,1,1,0,0,0,0,0,0], [0,0,0,0,1,1,1,1,1,0,0,0,0,0,0,0], [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],", "s = 1 bitmap = numpy.array([ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], [0,0,1,1,1,1,1,1,1,1,1,0,0,0,0,0],", "[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], [0,0,1,1,1,1,1,1,1,1,1,0,0,0,0,0], [0,0,1,1,1,1,1,1,1,1,1,1,1,1,0,0], [0,0,1,1,1,0,1,0,1,1,1,0,0,1,0,0], [0,0,1,1,0,1,0,1,0,1,1,0,0,1,0,0], [0,0,1,1,1,0,1,0,1,1,1,0,1,0,0,0], [0,0,1,1,0,1,0,1,0,1,1,1,0,0,0,0], [0,0,1,1,1,0,1,0,1,1,1,0,0,0,0,0], [0,0,0,1,1,1,1,1,1,1,0,0,0,0,0,0], [0,0,0,0,1,1,1,1,1,0,0,0,0,0,0,0]," ]
[ "label=loaded[\"label\"] line=int(30000) endline=int(40000) if(len(label)<40000): line=int(len(label)*3./4.) endline=len(label) X=data[0:line] vx=data[line:endline] Y=label[0:line] vy=label[line:endline]", "from sklearn.model_selection import StratifiedKFold from sklearn.model_selection import KFold import scipy.stats", "= timer(None) # timing starts from this point for \"start_time\"", "import numpy as np from sklearn.model_selection import RandomizedSearchCV, GridSearchCV from", "= True, random_state = 1001) random_search = RandomizedSearchCV(model, param_distributions=params, n_iter=param_comb,", "name') parser.add_argument(\"--network\",type=str,default=\"rnn\",help='network name on symbols/') parser.add_argument(\"--right\",type=str,default=\"/scratch/yjdata/gluon100_img\",help='which train sample (qq,gg,zq,zg)') parser.add_argument(\"--pt\",type=int,default=200,help='pt", "# timing starts from this point for \"start_time\" variable random_search.fit(X,", "%d-fold search with %d parameter combinations:' % (folds, param_comb)) print(random_search.best_score_", "%d parameter combinations:' % (folds, param_comb)) print(random_search.best_score_ * 2 -", "divmod((datetime.now() - start_time).total_seconds(), 3600) tmin, tsec = divmod(temp_sec, 60) print('\\n", "start_time).total_seconds(), 3600) tmin, tsec = divmod(temp_sec, 60) print('\\n Time taken:", "* import pandas as pd import argparse from datetime import", "pandas as pd import argparse from datetime import datetime def", "as pd import argparse from datetime import datetime def timer(start_time=None):", "parser.add_argument(\"--unscale\",type=int,default=0,help='end ratio') args=parser.parse_args() import os os.environ[\"CUDA_DEVICE_ORDER\"]=\"PCI_BUS_ID\" os.environ[\"CUDA_VISIBLE_DEVICES\"]=str(args.gpu) batch_size=args.batch_size params =", "random_state=173 ) # Here we go start_time = timer(None) #", "sklearn.metrics import roc_auc_score from sklearn.model_selection import StratifiedKFold from sklearn.model_selection import", "%i minutes and %s seconds.' % (thour, tmin, round(tsec, 2)))", "data=loaded[\"bdtset\"][:,:5] label=loaded[\"label\"] line=int(30000) endline=int(40000) if(len(label)<40000): line=int(len(label)*3./4.) endline=len(label) X=data[0:line] vx=data[line:endline] Y=label[0:line]", "endline=len(label) X=data[0:line] vx=data[line:endline] Y=label[0:line] vy=label[line:endline] Y=np.array(Y)[:,0] folds = 3 param_comb", "# Here we go start_time = timer(None) # timing starts", "verbose=3, random_state=173 ) # Here we go start_time = timer(None)", "cv=skf.split(X,Y), verbose=3, random_state=173 ) # Here we go start_time =", "and %s seconds.' % (thour, tmin, round(tsec, 2))) parser=argparse.ArgumentParser() parser.add_argument(\"--end\",type=float,default=100000.,help='end", "seconds.' % (thour, tmin, round(tsec, 2))) parser=argparse.ArgumentParser() parser.add_argument(\"--end\",type=float,default=100000.,help='end ratio') parser.add_argument(\"--save\",type=str,default=\"test_\",help='save", "combinations:' % (folds, param_comb)) print(random_search.best_score_ * 2 - 1) #print('\\n", "%i hours %i minutes and %s seconds.' % (thour, tmin,", "print('\\n Best normalized gini score for %d-fold search with %d", "tsec = divmod(temp_sec, 60) print('\\n Time taken: %i hours %i", "sts import xgboost as xgb from xiter import * import", "pt~pt*1.1') parser.add_argument(\"--ptmax\",type=float,default=2.,help='pt range pt~pt*1.1') parser.add_argument(\"--epochs\",type=int,default=10,help='num epochs') parser.add_argument(\"--batch_size\",type=int,default=100000,help='batch_size') parser.add_argument(\"--loss\",type=str,default=\"categorical_crossentropy\",help='network name on", "pt~pt*1.1') parser.add_argument(\"--ptmin\",type=float,default=0.,help='pt range pt~pt*1.1') parser.add_argument(\"--ptmax\",type=float,default=2.,help='pt range pt~pt*1.1') parser.add_argument(\"--epochs\",type=int,default=10,help='num epochs') parser.add_argument(\"--batch_size\",type=int,default=100000,help='batch_size')", "range pt~pt*1.1') parser.add_argument(\"--ptmin\",type=float,default=0.,help='pt range pt~pt*1.1') parser.add_argument(\"--ptmax\",type=float,default=2.,help='pt range pt~pt*1.1') parser.add_argument(\"--epochs\",type=int,default=10,help='num epochs')", "from this point for \"start_time\" variable random_search.fit(X, Y) timer(start_time) #print(random_search.predict(X[:10]))", "n_jobs=6, cv=skf.split(X,Y), verbose=3, random_state=173 ) # Here we go start_time", "if(args.isz==1): if(args.etabin==1): loaded=np.load(\"zqmixed{}pteta.npz\".format(args.pt)) print(\"zqmixed{}pteta.npz\".format(args.pt)) else: loaded=np.load(\"zqmixed{}pt.npz\".format(args.pt)) print(\"zqmixed{}pt.npz\".format(args.pt)) elif(args.isz==-1): if(args.etabin==1): loaded=np.load(\"qqmixed{}pteta.npz\".format(args.pt))", "tmin, tsec = divmod(temp_sec, 60) print('\\n Time taken: %i hours", "gini score for %d-fold search with %d parameter combinations:' %", "np from sklearn.model_selection import RandomizedSearchCV, GridSearchCV from sklearn.metrics import roc_auc_score", "taken: %i hours %i minutes and %s seconds.' % (thour,", "folds = 3 param_comb = 100 skf = KFold(n_splits=folds, shuffle", "args=parser.parse_args() import os os.environ[\"CUDA_DEVICE_ORDER\"]=\"PCI_BUS_ID\" os.environ[\"CUDA_VISIBLE_DEVICES\"]=str(args.gpu) batch_size=args.batch_size params = { 'max_depth':", "import datetime def timer(start_time=None): if not start_time: start_time = datetime.now()", "import KFold import scipy.stats as sts import xgboost as xgb", "import * import pandas as pd import argparse from datetime", "start_time = datetime.now() return start_time elif start_time: thour, temp_sec =", "name on symbols/') parser.add_argument(\"--right\",type=str,default=\"/scratch/yjdata/gluon100_img\",help='which train sample (qq,gg,zq,zg)') parser.add_argument(\"--pt\",type=int,default=200,help='pt range pt~pt*1.1')", "from sklearn.model_selection import KFold import scipy.stats as sts import xgboost", "name on symbols/') parser.add_argument(\"--gpu\",type=int,default=0,help='gpu number') parser.add_argument(\"--isz\",type=int,default=0,help='0 or z or not')", "for \"start_time\" variable random_search.fit(X, Y) timer(start_time) #print(random_search.predict(X[:10])) #print('\\n All results:')", "3 param_comb = 100 skf = KFold(n_splits=folds, shuffle = True,", "print(\"zqmixed{}pteta.npz\".format(args.pt)) else: loaded=np.load(\"zqmixed{}pt.npz\".format(args.pt)) print(\"zqmixed{}pt.npz\".format(args.pt)) elif(args.isz==-1): if(args.etabin==1): loaded=np.load(\"qqmixed{}pteta.npz\".format(args.pt)) print(\"qqmixed{}pteta.npz\".format(args.pt)) else: loaded=np.load(\"qqmixed{}pt.npz\".format(args.pt))", "timer(None) # timing starts from this point for \"start_time\" variable", "RandomizedSearchCV, GridSearchCV from sklearn.metrics import roc_auc_score from sklearn.model_selection import StratifiedKFold", "else: loaded=np.load(\"mixed{}pt.npz\".format(args.pt)) print(\"etabin 2.4\") data=loaded[\"bdtset\"][:,:5] label=loaded[\"label\"] line=int(30000) endline=int(40000) if(len(label)<40000): line=int(len(label)*3./4.)", "else: loaded=np.load(\"qqmixed{}pt.npz\".format(args.pt)) print(\"qqmixed{}pt.npz\".format(args.pt)) elif(args.isz==0): if(args.etabin==1): if(args.unscale==1): loaded=np.load(\"unscalemixed{}pteta.npz\".format(args.pt)) else: loaded=np.load(\"mixed{}pteta.npz\".format(args.pt)) print(\"etabin", "skf = KFold(n_splits=folds, shuffle = True, random_state = 173) #skf", "point for \"start_time\" variable random_search.fit(X, Y) timer(start_time) #print(random_search.predict(X[:10])) #print('\\n All", "for %d-fold search with %d parameter combinations:' % (folds, param_comb))", "#print('\\n Best hyperparameters:') #print(random_search.best_params_) results = pd.DataFrame(random_search.cv_results_) results.to_csv('xgb/{}-{}.csv'.format(args.save,args.pt), index=False) #random_search.best_estimator_.save_model(\"bdt-{}.dat\".format(args.pt))", "parser.add_argument(\"--save\",type=str,default=\"test_\",help='save name') parser.add_argument(\"--network\",type=str,default=\"rnn\",help='network name on symbols/') parser.add_argument(\"--right\",type=str,default=\"/scratch/yjdata/gluon100_img\",help='which train sample (qq,gg,zq,zg)')", "1) #print('\\n Best hyperparameters:') #print(random_search.best_params_) results = pd.DataFrame(random_search.cv_results_) results.to_csv('xgb/{}-{}.csv'.format(args.save,args.pt), index=False)", "parser.add_argument(\"--ptmin\",type=float,default=0.,help='pt range pt~pt*1.1') parser.add_argument(\"--ptmax\",type=float,default=2.,help='pt range pt~pt*1.1') parser.add_argument(\"--epochs\",type=int,default=10,help='num epochs') parser.add_argument(\"--batch_size\",type=int,default=100000,help='batch_size') parser.add_argument(\"--loss\",type=str,default=\"categorical_crossentropy\",help='network", "we go start_time = timer(None) # timing starts from this", "2.4\") data=loaded[\"bdtset\"][:,:5] label=loaded[\"label\"] line=int(30000) endline=int(40000) if(len(label)<40000): line=int(len(label)*3./4.) endline=len(label) X=data[0:line] vx=data[line:endline]", "KFold import scipy.stats as sts import xgboost as xgb from", "argparse from datetime import datetime def timer(start_time=None): if not start_time:", "= 3 param_comb = 100 skf = KFold(n_splits=folds, shuffle =", "random_state = 1001) random_search = RandomizedSearchCV(model, param_distributions=params, n_iter=param_comb, scoring='log_loss', n_jobs=6,", "os.environ[\"CUDA_DEVICE_ORDER\"]=\"PCI_BUS_ID\" os.environ[\"CUDA_VISIBLE_DEVICES\"]=str(args.gpu) batch_size=args.batch_size params = { 'max_depth': sts.randint(1,6), 'learning_rate': sts.uniform(0.0010,0.500),", "this point for \"start_time\" variable random_search.fit(X, Y) timer(start_time) #print(random_search.predict(X[:10])) #print('\\n", "Best estimator:') #print(random_search.best_estimator_) print('\\n Best normalized gini score for %d-fold", "'max_depth': sts.randint(1,6), 'learning_rate': sts.uniform(0.0010,0.500), 'n_estimators': sts.randint(10,101) } model=xgb.XGBClassifier(objective='binary:logistic',tree_method=\"gpu_hist\") if(args.isz==1): if(args.etabin==1):", "- start_time).total_seconds(), 3600) tmin, tsec = divmod(temp_sec, 60) print('\\n Time", "parser.add_argument(\"--pt\",type=int,default=200,help='pt range pt~pt*1.1') parser.add_argument(\"--ptmin\",type=float,default=0.,help='pt range pt~pt*1.1') parser.add_argument(\"--ptmax\",type=float,default=2.,help='pt range pt~pt*1.1') parser.add_argument(\"--epochs\",type=int,default=10,help='num", "elif start_time: thour, temp_sec = divmod((datetime.now() - start_time).total_seconds(), 3600) tmin,", "import StratifiedKFold from sklearn.model_selection import KFold import scipy.stats as sts", "ratio') args=parser.parse_args() import os os.environ[\"CUDA_DEVICE_ORDER\"]=\"PCI_BUS_ID\" os.environ[\"CUDA_VISIBLE_DEVICES\"]=str(args.gpu) batch_size=args.batch_size params = {", "parameter combinations:' % (folds, param_comb)) print(random_search.best_score_ * 2 - 1)", "2))) parser=argparse.ArgumentParser() parser.add_argument(\"--end\",type=float,default=100000.,help='end ratio') parser.add_argument(\"--save\",type=str,default=\"test_\",help='save name') parser.add_argument(\"--network\",type=str,default=\"rnn\",help='network name on symbols/')", "#skf = StratifiedKFold(n_splits=folds, shuffle = True, random_state = 1001) random_search", "random_search.fit(X, Y) timer(start_time) #print(random_search.predict(X[:10])) #print('\\n All results:') #print(random_search.cv_results_) #print('\\n Best", "= True, random_state = 173) #skf = StratifiedKFold(n_splits=folds, shuffle =", "from datetime import datetime def timer(start_time=None): if not start_time: start_time", ") # Here we go start_time = timer(None) # timing", "start_time: start_time = datetime.now() return start_time elif start_time: thour, temp_sec", "z or not') parser.add_argument(\"--eta\",type=float,default=0.,help='end ratio') parser.add_argument(\"--etabin\",type=float,default=1,help='end ratio') parser.add_argument(\"--unscale\",type=int,default=0,help='end ratio') args=parser.parse_args()", "(thour, tmin, round(tsec, 2))) parser=argparse.ArgumentParser() parser.add_argument(\"--end\",type=float,default=100000.,help='end ratio') parser.add_argument(\"--save\",type=str,default=\"test_\",help='save name') parser.add_argument(\"--network\",type=str,default=\"rnn\",help='network", "os os.environ[\"CUDA_DEVICE_ORDER\"]=\"PCI_BUS_ID\" os.environ[\"CUDA_VISIBLE_DEVICES\"]=str(args.gpu) batch_size=args.batch_size params = { 'max_depth': sts.randint(1,6), 'learning_rate':", "if(args.unscale==1): loaded=np.load(\"unscalemixed{}pteta.npz\".format(args.pt)) else: loaded=np.load(\"mixed{}pteta.npz\".format(args.pt)) print(\"etabin 1\") else: if(args.unscale==1): loaded=np.load(\"unscalemixed{}pt.npz\".format(args.pt)) else:", "100 skf = KFold(n_splits=folds, shuffle = True, random_state = 173)", "#print(random_search.cv_results_) #print('\\n Best estimator:') #print(random_search.best_estimator_) print('\\n Best normalized gini score", "= KFold(n_splits=folds, shuffle = True, random_state = 173) #skf =", "xgb from xiter import * import pandas as pd import", "True, random_state = 173) #skf = StratifiedKFold(n_splits=folds, shuffle = True,", "ratio') parser.add_argument(\"--unscale\",type=int,default=0,help='end ratio') args=parser.parse_args() import os os.environ[\"CUDA_DEVICE_ORDER\"]=\"PCI_BUS_ID\" os.environ[\"CUDA_VISIBLE_DEVICES\"]=str(args.gpu) batch_size=args.batch_size params", "variable random_search.fit(X, Y) timer(start_time) #print(random_search.predict(X[:10])) #print('\\n All results:') #print(random_search.cv_results_) #print('\\n", "round(tsec, 2))) parser=argparse.ArgumentParser() parser.add_argument(\"--end\",type=float,default=100000.,help='end ratio') parser.add_argument(\"--save\",type=str,default=\"test_\",help='save name') parser.add_argument(\"--network\",type=str,default=\"rnn\",help='network name on", "elif(args.isz==0): if(args.etabin==1): if(args.unscale==1): loaded=np.load(\"unscalemixed{}pteta.npz\".format(args.pt)) else: loaded=np.load(\"mixed{}pteta.npz\".format(args.pt)) print(\"etabin 1\") else: if(args.unscale==1):", "as sts import xgboost as xgb from xiter import *", "import scipy.stats as sts import xgboost as xgb from xiter", "pd import argparse from datetime import datetime def timer(start_time=None): if", "temp_sec = divmod((datetime.now() - start_time).total_seconds(), 3600) tmin, tsec = divmod(temp_sec,", "ratio') parser.add_argument(\"--save\",type=str,default=\"test_\",help='save name') parser.add_argument(\"--network\",type=str,default=\"rnn\",help='network name on symbols/') parser.add_argument(\"--right\",type=str,default=\"/scratch/yjdata/gluon100_img\",help='which train sample", "parser.add_argument(\"--loss\",type=str,default=\"categorical_crossentropy\",help='network name on symbols/') parser.add_argument(\"--gpu\",type=int,default=0,help='gpu number') parser.add_argument(\"--isz\",type=int,default=0,help='0 or z or", "datetime import datetime def timer(start_time=None): if not start_time: start_time =", "else: loaded=np.load(\"zqmixed{}pt.npz\".format(args.pt)) print(\"zqmixed{}pt.npz\".format(args.pt)) elif(args.isz==-1): if(args.etabin==1): loaded=np.load(\"qqmixed{}pteta.npz\".format(args.pt)) print(\"qqmixed{}pteta.npz\".format(args.pt)) else: loaded=np.load(\"qqmixed{}pt.npz\".format(args.pt)) print(\"qqmixed{}pt.npz\".format(args.pt))", "param_comb = 100 skf = KFold(n_splits=folds, shuffle = True, random_state", "True, random_state = 1001) random_search = RandomizedSearchCV(model, param_distributions=params, n_iter=param_comb, scoring='log_loss',", "from sklearn.model_selection import RandomizedSearchCV, GridSearchCV from sklearn.metrics import roc_auc_score from", "loaded=np.load(\"unscalemixed{}pt.npz\".format(args.pt)) else: loaded=np.load(\"mixed{}pt.npz\".format(args.pt)) print(\"etabin 2.4\") data=loaded[\"bdtset\"][:,:5] label=loaded[\"label\"] line=int(30000) endline=int(40000) if(len(label)<40000):", "KFold(n_splits=folds, shuffle = True, random_state = 173) #skf = StratifiedKFold(n_splits=folds,", "print(\"etabin 1\") else: if(args.unscale==1): loaded=np.load(\"unscalemixed{}pt.npz\".format(args.pt)) else: loaded=np.load(\"mixed{}pt.npz\".format(args.pt)) print(\"etabin 2.4\") data=loaded[\"bdtset\"][:,:5]", "timer(start_time) #print(random_search.predict(X[:10])) #print('\\n All results:') #print(random_search.cv_results_) #print('\\n Best estimator:') #print(random_search.best_estimator_)", "print(\"zqmixed{}pt.npz\".format(args.pt)) elif(args.isz==-1): if(args.etabin==1): loaded=np.load(\"qqmixed{}pteta.npz\".format(args.pt)) print(\"qqmixed{}pteta.npz\".format(args.pt)) else: loaded=np.load(\"qqmixed{}pt.npz\".format(args.pt)) print(\"qqmixed{}pt.npz\".format(args.pt)) elif(args.isz==0): if(args.etabin==1):", "not start_time: start_time = datetime.now() return start_time elif start_time: thour,", "range pt~pt*1.1') parser.add_argument(\"--epochs\",type=int,default=10,help='num epochs') parser.add_argument(\"--batch_size\",type=int,default=100000,help='batch_size') parser.add_argument(\"--loss\",type=str,default=\"categorical_crossentropy\",help='network name on symbols/') parser.add_argument(\"--gpu\",type=int,default=0,help='gpu", "parser.add_argument(\"--end\",type=float,default=100000.,help='end ratio') parser.add_argument(\"--save\",type=str,default=\"test_\",help='save name') parser.add_argument(\"--network\",type=str,default=\"rnn\",help='network name on symbols/') parser.add_argument(\"--right\",type=str,default=\"/scratch/yjdata/gluon100_img\",help='which train", "Best normalized gini score for %d-fold search with %d parameter", "import xgboost as xgb from xiter import * import pandas", "number') parser.add_argument(\"--isz\",type=int,default=0,help='0 or z or not') parser.add_argument(\"--eta\",type=float,default=0.,help='end ratio') parser.add_argument(\"--etabin\",type=float,default=1,help='end ratio')", "RandomizedSearchCV(model, param_distributions=params, n_iter=param_comb, scoring='log_loss', n_jobs=6, cv=skf.split(X,Y), verbose=3, random_state=173 ) #", "random_search = RandomizedSearchCV(model, param_distributions=params, n_iter=param_comb, scoring='log_loss', n_jobs=6, cv=skf.split(X,Y), verbose=3, random_state=173", "print(\"qqmixed{}pteta.npz\".format(args.pt)) else: loaded=np.load(\"qqmixed{}pt.npz\".format(args.pt)) print(\"qqmixed{}pt.npz\".format(args.pt)) elif(args.isz==0): if(args.etabin==1): if(args.unscale==1): loaded=np.load(\"unscalemixed{}pteta.npz\".format(args.pt)) else: loaded=np.load(\"mixed{}pteta.npz\".format(args.pt))", "(folds, param_comb)) print(random_search.best_score_ * 2 - 1) #print('\\n Best hyperparameters:')", "sts.randint(1,6), 'learning_rate': sts.uniform(0.0010,0.500), 'n_estimators': sts.randint(10,101) } model=xgb.XGBClassifier(objective='binary:logistic',tree_method=\"gpu_hist\") if(args.isz==1): if(args.etabin==1): loaded=np.load(\"zqmixed{}pteta.npz\".format(args.pt))", "os.environ[\"CUDA_VISIBLE_DEVICES\"]=str(args.gpu) batch_size=args.batch_size params = { 'max_depth': sts.randint(1,6), 'learning_rate': sts.uniform(0.0010,0.500), 'n_estimators':", "* 2 - 1) #print('\\n Best hyperparameters:') #print(random_search.best_params_) results =", "n_iter=param_comb, scoring='log_loss', n_jobs=6, cv=skf.split(X,Y), verbose=3, random_state=173 ) # Here we", "from sklearn.metrics import roc_auc_score from sklearn.model_selection import StratifiedKFold from sklearn.model_selection", "parser.add_argument(\"--epochs\",type=int,default=10,help='num epochs') parser.add_argument(\"--batch_size\",type=int,default=100000,help='batch_size') parser.add_argument(\"--loss\",type=str,default=\"categorical_crossentropy\",help='network name on symbols/') parser.add_argument(\"--gpu\",type=int,default=0,help='gpu number') parser.add_argument(\"--isz\",type=int,default=0,help='0", "def timer(start_time=None): if not start_time: start_time = datetime.now() return start_time", "ratio') parser.add_argument(\"--etabin\",type=float,default=1,help='end ratio') parser.add_argument(\"--unscale\",type=int,default=0,help='end ratio') args=parser.parse_args() import os os.environ[\"CUDA_DEVICE_ORDER\"]=\"PCI_BUS_ID\" os.environ[\"CUDA_VISIBLE_DEVICES\"]=str(args.gpu)", "loaded=np.load(\"unscalemixed{}pteta.npz\".format(args.pt)) else: loaded=np.load(\"mixed{}pteta.npz\".format(args.pt)) print(\"etabin 1\") else: if(args.unscale==1): loaded=np.load(\"unscalemixed{}pt.npz\".format(args.pt)) else: loaded=np.load(\"mixed{}pt.npz\".format(args.pt))", "go start_time = timer(None) # timing starts from this point", "shuffle = True, random_state = 1001) random_search = RandomizedSearchCV(model, param_distributions=params,", "Time taken: %i hours %i minutes and %s seconds.' %", "parser.add_argument(\"--batch_size\",type=int,default=100000,help='batch_size') parser.add_argument(\"--loss\",type=str,default=\"categorical_crossentropy\",help='network name on symbols/') parser.add_argument(\"--gpu\",type=int,default=0,help='gpu number') parser.add_argument(\"--isz\",type=int,default=0,help='0 or z", "shuffle = True, random_state = 173) #skf = StratifiedKFold(n_splits=folds, shuffle", "or not') parser.add_argument(\"--eta\",type=float,default=0.,help='end ratio') parser.add_argument(\"--etabin\",type=float,default=1,help='end ratio') parser.add_argument(\"--unscale\",type=int,default=0,help='end ratio') args=parser.parse_args() import", "datetime.now() return start_time elif start_time: thour, temp_sec = divmod((datetime.now() -", "} model=xgb.XGBClassifier(objective='binary:logistic',tree_method=\"gpu_hist\") if(args.isz==1): if(args.etabin==1): loaded=np.load(\"zqmixed{}pteta.npz\".format(args.pt)) print(\"zqmixed{}pteta.npz\".format(args.pt)) else: loaded=np.load(\"zqmixed{}pt.npz\".format(args.pt)) print(\"zqmixed{}pt.npz\".format(args.pt)) elif(args.isz==-1):", "search with %d parameter combinations:' % (folds, param_comb)) print(random_search.best_score_ *", "not') parser.add_argument(\"--eta\",type=float,default=0.,help='end ratio') parser.add_argument(\"--etabin\",type=float,default=1,help='end ratio') parser.add_argument(\"--unscale\",type=int,default=0,help='end ratio') args=parser.parse_args() import os", "parser.add_argument(\"--network\",type=str,default=\"rnn\",help='network name on symbols/') parser.add_argument(\"--right\",type=str,default=\"/scratch/yjdata/gluon100_img\",help='which train sample (qq,gg,zq,zg)') parser.add_argument(\"--pt\",type=int,default=200,help='pt range", "start_time elif start_time: thour, temp_sec = divmod((datetime.now() - start_time).total_seconds(), 3600)", "symbols/') parser.add_argument(\"--gpu\",type=int,default=0,help='gpu number') parser.add_argument(\"--isz\",type=int,default=0,help='0 or z or not') parser.add_argument(\"--eta\",type=float,default=0.,help='end ratio')", "if(args.etabin==1): if(args.unscale==1): loaded=np.load(\"unscalemixed{}pteta.npz\".format(args.pt)) else: loaded=np.load(\"mixed{}pteta.npz\".format(args.pt)) print(\"etabin 1\") else: if(args.unscale==1): loaded=np.load(\"unscalemixed{}pt.npz\".format(args.pt))", "#print('\\n Best estimator:') #print(random_search.best_estimator_) print('\\n Best normalized gini score for", "xiter import * import pandas as pd import argparse from", "return start_time elif start_time: thour, temp_sec = divmod((datetime.now() - start_time).total_seconds(),", "= 100 skf = KFold(n_splits=folds, shuffle = True, random_state =", "symbols/') parser.add_argument(\"--right\",type=str,default=\"/scratch/yjdata/gluon100_img\",help='which train sample (qq,gg,zq,zg)') parser.add_argument(\"--pt\",type=int,default=200,help='pt range pt~pt*1.1') parser.add_argument(\"--ptmin\",type=float,default=0.,help='pt range", "All results:') #print(random_search.cv_results_) #print('\\n Best estimator:') #print(random_search.best_estimator_) print('\\n Best normalized", "datetime def timer(start_time=None): if not start_time: start_time = datetime.now() return", "parser.add_argument(\"--gpu\",type=int,default=0,help='gpu number') parser.add_argument(\"--isz\",type=int,default=0,help='0 or z or not') parser.add_argument(\"--eta\",type=float,default=0.,help='end ratio') parser.add_argument(\"--etabin\",type=float,default=1,help='end", "loaded=np.load(\"zqmixed{}pt.npz\".format(args.pt)) print(\"zqmixed{}pt.npz\".format(args.pt)) elif(args.isz==-1): if(args.etabin==1): loaded=np.load(\"qqmixed{}pteta.npz\".format(args.pt)) print(\"qqmixed{}pteta.npz\".format(args.pt)) else: loaded=np.load(\"qqmixed{}pt.npz\".format(args.pt)) print(\"qqmixed{}pt.npz\".format(args.pt)) elif(args.isz==0):", "parser.add_argument(\"--etabin\",type=float,default=1,help='end ratio') parser.add_argument(\"--unscale\",type=int,default=0,help='end ratio') args=parser.parse_args() import os os.environ[\"CUDA_DEVICE_ORDER\"]=\"PCI_BUS_ID\" os.environ[\"CUDA_VISIBLE_DEVICES\"]=str(args.gpu) batch_size=args.batch_size", "numpy as np from sklearn.model_selection import RandomizedSearchCV, GridSearchCV from sklearn.metrics", "sts.uniform(0.0010,0.500), 'n_estimators': sts.randint(10,101) } model=xgb.XGBClassifier(objective='binary:logistic',tree_method=\"gpu_hist\") if(args.isz==1): if(args.etabin==1): loaded=np.load(\"zqmixed{}pteta.npz\".format(args.pt)) print(\"zqmixed{}pteta.npz\".format(args.pt)) else:", "3600) tmin, tsec = divmod(temp_sec, 60) print('\\n Time taken: %i", "random_state = 173) #skf = StratifiedKFold(n_splits=folds, shuffle = True, random_state", "vx=data[line:endline] Y=label[0:line] vy=label[line:endline] Y=np.array(Y)[:,0] folds = 3 param_comb = 100", "vy=label[line:endline] Y=np.array(Y)[:,0] folds = 3 param_comb = 100 skf =", "elif(args.isz==-1): if(args.etabin==1): loaded=np.load(\"qqmixed{}pteta.npz\".format(args.pt)) print(\"qqmixed{}pteta.npz\".format(args.pt)) else: loaded=np.load(\"qqmixed{}pt.npz\".format(args.pt)) print(\"qqmixed{}pt.npz\".format(args.pt)) elif(args.isz==0): if(args.etabin==1): if(args.unscale==1):", "parser.add_argument(\"--right\",type=str,default=\"/scratch/yjdata/gluon100_img\",help='which train sample (qq,gg,zq,zg)') parser.add_argument(\"--pt\",type=int,default=200,help='pt range pt~pt*1.1') parser.add_argument(\"--ptmin\",type=float,default=0.,help='pt range pt~pt*1.1')", "normalized gini score for %d-fold search with %d parameter combinations:'", "import argparse from datetime import datetime def timer(start_time=None): if not", "print(\"qqmixed{}pt.npz\".format(args.pt)) elif(args.isz==0): if(args.etabin==1): if(args.unscale==1): loaded=np.load(\"unscalemixed{}pteta.npz\".format(args.pt)) else: loaded=np.load(\"mixed{}pteta.npz\".format(args.pt)) print(\"etabin 1\") else:", "timing starts from this point for \"start_time\" variable random_search.fit(X, Y)", "if(args.unscale==1): loaded=np.load(\"unscalemixed{}pt.npz\".format(args.pt)) else: loaded=np.load(\"mixed{}pt.npz\".format(args.pt)) print(\"etabin 2.4\") data=loaded[\"bdtset\"][:,:5] label=loaded[\"label\"] line=int(30000) endline=int(40000)", "\"start_time\" variable random_search.fit(X, Y) timer(start_time) #print(random_search.predict(X[:10])) #print('\\n All results:') #print(random_search.cv_results_)", "model=xgb.XGBClassifier(objective='binary:logistic',tree_method=\"gpu_hist\") if(args.isz==1): if(args.etabin==1): loaded=np.load(\"zqmixed{}pteta.npz\".format(args.pt)) print(\"zqmixed{}pteta.npz\".format(args.pt)) else: loaded=np.load(\"zqmixed{}pt.npz\".format(args.pt)) print(\"zqmixed{}pt.npz\".format(args.pt)) elif(args.isz==-1): if(args.etabin==1):", "= 173) #skf = StratifiedKFold(n_splits=folds, shuffle = True, random_state =", "#print(random_search.predict(X[:10])) #print('\\n All results:') #print(random_search.cv_results_) #print('\\n Best estimator:') #print(random_search.best_estimator_) print('\\n", "#print(random_search.best_estimator_) print('\\n Best normalized gini score for %d-fold search with", "epochs') parser.add_argument(\"--batch_size\",type=int,default=100000,help='batch_size') parser.add_argument(\"--loss\",type=str,default=\"categorical_crossentropy\",help='network name on symbols/') parser.add_argument(\"--gpu\",type=int,default=0,help='gpu number') parser.add_argument(\"--isz\",type=int,default=0,help='0 or", "or z or not') parser.add_argument(\"--eta\",type=float,default=0.,help='end ratio') parser.add_argument(\"--etabin\",type=float,default=1,help='end ratio') parser.add_argument(\"--unscale\",type=int,default=0,help='end ratio')", "= datetime.now() return start_time elif start_time: thour, temp_sec = divmod((datetime.now()", "sts.randint(10,101) } model=xgb.XGBClassifier(objective='binary:logistic',tree_method=\"gpu_hist\") if(args.isz==1): if(args.etabin==1): loaded=np.load(\"zqmixed{}pteta.npz\".format(args.pt)) print(\"zqmixed{}pteta.npz\".format(args.pt)) else: loaded=np.load(\"zqmixed{}pt.npz\".format(args.pt)) print(\"zqmixed{}pt.npz\".format(args.pt))", "Y=label[0:line] vy=label[line:endline] Y=np.array(Y)[:,0] folds = 3 param_comb = 100 skf", "param_distributions=params, n_iter=param_comb, scoring='log_loss', n_jobs=6, cv=skf.split(X,Y), verbose=3, random_state=173 ) # Here", "60) print('\\n Time taken: %i hours %i minutes and %s", "scoring='log_loss', n_jobs=6, cv=skf.split(X,Y), verbose=3, random_state=173 ) # Here we go", "% (thour, tmin, round(tsec, 2))) parser=argparse.ArgumentParser() parser.add_argument(\"--end\",type=float,default=100000.,help='end ratio') parser.add_argument(\"--save\",type=str,default=\"test_\",help='save name')", "if not start_time: start_time = datetime.now() return start_time elif start_time:", "2 - 1) #print('\\n Best hyperparameters:') #print(random_search.best_params_) results = pd.DataFrame(random_search.cv_results_)", "score for %d-fold search with %d parameter combinations:' % (folds,", "= { 'max_depth': sts.randint(1,6), 'learning_rate': sts.uniform(0.0010,0.500), 'n_estimators': sts.randint(10,101) } model=xgb.XGBClassifier(objective='binary:logistic',tree_method=\"gpu_hist\")", "batch_size=args.batch_size params = { 'max_depth': sts.randint(1,6), 'learning_rate': sts.uniform(0.0010,0.500), 'n_estimators': sts.randint(10,101)", "timer(start_time=None): if not start_time: start_time = datetime.now() return start_time elif", "start_time = timer(None) # timing starts from this point for", "StratifiedKFold(n_splits=folds, shuffle = True, random_state = 1001) random_search = RandomizedSearchCV(model,", "as np from sklearn.model_selection import RandomizedSearchCV, GridSearchCV from sklearn.metrics import", "%s seconds.' % (thour, tmin, round(tsec, 2))) parser=argparse.ArgumentParser() parser.add_argument(\"--end\",type=float,default=100000.,help='end ratio')", "from xiter import * import pandas as pd import argparse", "#print('\\n All results:') #print(random_search.cv_results_) #print('\\n Best estimator:') #print(random_search.best_estimator_) print('\\n Best", "sample (qq,gg,zq,zg)') parser.add_argument(\"--pt\",type=int,default=200,help='pt range pt~pt*1.1') parser.add_argument(\"--ptmin\",type=float,default=0.,help='pt range pt~pt*1.1') parser.add_argument(\"--ptmax\",type=float,default=2.,help='pt range", "as xgb from xiter import * import pandas as pd", "% (folds, param_comb)) print(random_search.best_score_ * 2 - 1) #print('\\n Best", "1\") else: if(args.unscale==1): loaded=np.load(\"unscalemixed{}pt.npz\".format(args.pt)) else: loaded=np.load(\"mixed{}pt.npz\".format(args.pt)) print(\"etabin 2.4\") data=loaded[\"bdtset\"][:,:5] label=loaded[\"label\"]", "thour, temp_sec = divmod((datetime.now() - start_time).total_seconds(), 3600) tmin, tsec =", "parser.add_argument(\"--isz\",type=int,default=0,help='0 or z or not') parser.add_argument(\"--eta\",type=float,default=0.,help='end ratio') parser.add_argument(\"--etabin\",type=float,default=1,help='end ratio') parser.add_argument(\"--unscale\",type=int,default=0,help='end", "parser=argparse.ArgumentParser() parser.add_argument(\"--end\",type=float,default=100000.,help='end ratio') parser.add_argument(\"--save\",type=str,default=\"test_\",help='save name') parser.add_argument(\"--network\",type=str,default=\"rnn\",help='network name on symbols/') parser.add_argument(\"--right\",type=str,default=\"/scratch/yjdata/gluon100_img\",help='which", "loaded=np.load(\"mixed{}pt.npz\".format(args.pt)) print(\"etabin 2.4\") data=loaded[\"bdtset\"][:,:5] label=loaded[\"label\"] line=int(30000) endline=int(40000) if(len(label)<40000): line=int(len(label)*3./4.) endline=len(label)", "X=data[0:line] vx=data[line:endline] Y=label[0:line] vy=label[line:endline] Y=np.array(Y)[:,0] folds = 3 param_comb =", "Y=np.array(Y)[:,0] folds = 3 param_comb = 100 skf = KFold(n_splits=folds,", "params = { 'max_depth': sts.randint(1,6), 'learning_rate': sts.uniform(0.0010,0.500), 'n_estimators': sts.randint(10,101) }", "import os os.environ[\"CUDA_DEVICE_ORDER\"]=\"PCI_BUS_ID\" os.environ[\"CUDA_VISIBLE_DEVICES\"]=str(args.gpu) batch_size=args.batch_size params = { 'max_depth': sts.randint(1,6),", "with %d parameter combinations:' % (folds, param_comb)) print(random_search.best_score_ * 2", "print(random_search.best_score_ * 2 - 1) #print('\\n Best hyperparameters:') #print(random_search.best_params_) results", "sklearn.model_selection import StratifiedKFold from sklearn.model_selection import KFold import scipy.stats as", "xgboost as xgb from xiter import * import pandas as", "minutes and %s seconds.' % (thour, tmin, round(tsec, 2))) parser=argparse.ArgumentParser()", "tmin, round(tsec, 2))) parser=argparse.ArgumentParser() parser.add_argument(\"--end\",type=float,default=100000.,help='end ratio') parser.add_argument(\"--save\",type=str,default=\"test_\",help='save name') parser.add_argument(\"--network\",type=str,default=\"rnn\",help='network name", "sklearn.model_selection import RandomizedSearchCV, GridSearchCV from sklearn.metrics import roc_auc_score from sklearn.model_selection", "= divmod((datetime.now() - start_time).total_seconds(), 3600) tmin, tsec = divmod(temp_sec, 60)", "hours %i minutes and %s seconds.' % (thour, tmin, round(tsec,", "loaded=np.load(\"qqmixed{}pteta.npz\".format(args.pt)) print(\"qqmixed{}pteta.npz\".format(args.pt)) else: loaded=np.load(\"qqmixed{}pt.npz\".format(args.pt)) print(\"qqmixed{}pt.npz\".format(args.pt)) elif(args.isz==0): if(args.etabin==1): if(args.unscale==1): loaded=np.load(\"unscalemixed{}pteta.npz\".format(args.pt)) else:", "'n_estimators': sts.randint(10,101) } model=xgb.XGBClassifier(objective='binary:logistic',tree_method=\"gpu_hist\") if(args.isz==1): if(args.etabin==1): loaded=np.load(\"zqmixed{}pteta.npz\".format(args.pt)) print(\"zqmixed{}pteta.npz\".format(args.pt)) else: loaded=np.load(\"zqmixed{}pt.npz\".format(args.pt))", "Y) timer(start_time) #print(random_search.predict(X[:10])) #print('\\n All results:') #print(random_search.cv_results_) #print('\\n Best estimator:')", "divmod(temp_sec, 60) print('\\n Time taken: %i hours %i minutes and", "if(len(label)<40000): line=int(len(label)*3./4.) endline=len(label) X=data[0:line] vx=data[line:endline] Y=label[0:line] vy=label[line:endline] Y=np.array(Y)[:,0] folds =", "results:') #print(random_search.cv_results_) #print('\\n Best estimator:') #print(random_search.best_estimator_) print('\\n Best normalized gini", "(qq,gg,zq,zg)') parser.add_argument(\"--pt\",type=int,default=200,help='pt range pt~pt*1.1') parser.add_argument(\"--ptmin\",type=float,default=0.,help='pt range pt~pt*1.1') parser.add_argument(\"--ptmax\",type=float,default=2.,help='pt range pt~pt*1.1')", "'learning_rate': sts.uniform(0.0010,0.500), 'n_estimators': sts.randint(10,101) } model=xgb.XGBClassifier(objective='binary:logistic',tree_method=\"gpu_hist\") if(args.isz==1): if(args.etabin==1): loaded=np.load(\"zqmixed{}pteta.npz\".format(args.pt)) print(\"zqmixed{}pteta.npz\".format(args.pt))", "scipy.stats as sts import xgboost as xgb from xiter import", "start_time: thour, temp_sec = divmod((datetime.now() - start_time).total_seconds(), 3600) tmin, tsec", "print(\"etabin 2.4\") data=loaded[\"bdtset\"][:,:5] label=loaded[\"label\"] line=int(30000) endline=int(40000) if(len(label)<40000): line=int(len(label)*3./4.) endline=len(label) X=data[0:line]", "import pandas as pd import argparse from datetime import datetime", "if(args.etabin==1): loaded=np.load(\"zqmixed{}pteta.npz\".format(args.pt)) print(\"zqmixed{}pteta.npz\".format(args.pt)) else: loaded=np.load(\"zqmixed{}pt.npz\".format(args.pt)) print(\"zqmixed{}pt.npz\".format(args.pt)) elif(args.isz==-1): if(args.etabin==1): loaded=np.load(\"qqmixed{}pteta.npz\".format(args.pt)) print(\"qqmixed{}pteta.npz\".format(args.pt))", "= StratifiedKFold(n_splits=folds, shuffle = True, random_state = 1001) random_search =", "StratifiedKFold from sklearn.model_selection import KFold import scipy.stats as sts import", "parser.add_argument(\"--ptmax\",type=float,default=2.,help='pt range pt~pt*1.1') parser.add_argument(\"--epochs\",type=int,default=10,help='num epochs') parser.add_argument(\"--batch_size\",type=int,default=100000,help='batch_size') parser.add_argument(\"--loss\",type=str,default=\"categorical_crossentropy\",help='network name on symbols/')", "1001) random_search = RandomizedSearchCV(model, param_distributions=params, n_iter=param_comb, scoring='log_loss', n_jobs=6, cv=skf.split(X,Y), verbose=3,", "Here we go start_time = timer(None) # timing starts from", "= divmod(temp_sec, 60) print('\\n Time taken: %i hours %i minutes", "roc_auc_score from sklearn.model_selection import StratifiedKFold from sklearn.model_selection import KFold import", "173) #skf = StratifiedKFold(n_splits=folds, shuffle = True, random_state = 1001)", "on symbols/') parser.add_argument(\"--gpu\",type=int,default=0,help='gpu number') parser.add_argument(\"--isz\",type=int,default=0,help='0 or z or not') parser.add_argument(\"--eta\",type=float,default=0.,help='end", "= RandomizedSearchCV(model, param_distributions=params, n_iter=param_comb, scoring='log_loss', n_jobs=6, cv=skf.split(X,Y), verbose=3, random_state=173 )", "loaded=np.load(\"qqmixed{}pt.npz\".format(args.pt)) print(\"qqmixed{}pt.npz\".format(args.pt)) elif(args.isz==0): if(args.etabin==1): if(args.unscale==1): loaded=np.load(\"unscalemixed{}pteta.npz\".format(args.pt)) else: loaded=np.load(\"mixed{}pteta.npz\".format(args.pt)) print(\"etabin 1\")", "print('\\n Time taken: %i hours %i minutes and %s seconds.'", "if(args.etabin==1): loaded=np.load(\"qqmixed{}pteta.npz\".format(args.pt)) print(\"qqmixed{}pteta.npz\".format(args.pt)) else: loaded=np.load(\"qqmixed{}pt.npz\".format(args.pt)) print(\"qqmixed{}pt.npz\".format(args.pt)) elif(args.isz==0): if(args.etabin==1): if(args.unscale==1): loaded=np.load(\"unscalemixed{}pteta.npz\".format(args.pt))", "loaded=np.load(\"mixed{}pteta.npz\".format(args.pt)) print(\"etabin 1\") else: if(args.unscale==1): loaded=np.load(\"unscalemixed{}pt.npz\".format(args.pt)) else: loaded=np.load(\"mixed{}pt.npz\".format(args.pt)) print(\"etabin 2.4\")", "train sample (qq,gg,zq,zg)') parser.add_argument(\"--pt\",type=int,default=200,help='pt range pt~pt*1.1') parser.add_argument(\"--ptmin\",type=float,default=0.,help='pt range pt~pt*1.1') parser.add_argument(\"--ptmax\",type=float,default=2.,help='pt", "import roc_auc_score from sklearn.model_selection import StratifiedKFold from sklearn.model_selection import KFold", "{ 'max_depth': sts.randint(1,6), 'learning_rate': sts.uniform(0.0010,0.500), 'n_estimators': sts.randint(10,101) } model=xgb.XGBClassifier(objective='binary:logistic',tree_method=\"gpu_hist\") if(args.isz==1):", "range pt~pt*1.1') parser.add_argument(\"--ptmax\",type=float,default=2.,help='pt range pt~pt*1.1') parser.add_argument(\"--epochs\",type=int,default=10,help='num epochs') parser.add_argument(\"--batch_size\",type=int,default=100000,help='batch_size') parser.add_argument(\"--loss\",type=str,default=\"categorical_crossentropy\",help='network name", "else: loaded=np.load(\"mixed{}pteta.npz\".format(args.pt)) print(\"etabin 1\") else: if(args.unscale==1): loaded=np.load(\"unscalemixed{}pt.npz\".format(args.pt)) else: loaded=np.load(\"mixed{}pt.npz\".format(args.pt)) print(\"etabin", "starts from this point for \"start_time\" variable random_search.fit(X, Y) timer(start_time)", "- 1) #print('\\n Best hyperparameters:') #print(random_search.best_params_) results = pd.DataFrame(random_search.cv_results_) results.to_csv('xgb/{}-{}.csv'.format(args.save,args.pt),", "line=int(30000) endline=int(40000) if(len(label)<40000): line=int(len(label)*3./4.) endline=len(label) X=data[0:line] vx=data[line:endline] Y=label[0:line] vy=label[line:endline] Y=np.array(Y)[:,0]", "on symbols/') parser.add_argument(\"--right\",type=str,default=\"/scratch/yjdata/gluon100_img\",help='which train sample (qq,gg,zq,zg)') parser.add_argument(\"--pt\",type=int,default=200,help='pt range pt~pt*1.1') parser.add_argument(\"--ptmin\",type=float,default=0.,help='pt", "else: if(args.unscale==1): loaded=np.load(\"unscalemixed{}pt.npz\".format(args.pt)) else: loaded=np.load(\"mixed{}pt.npz\".format(args.pt)) print(\"etabin 2.4\") data=loaded[\"bdtset\"][:,:5] label=loaded[\"label\"] line=int(30000)", "param_comb)) print(random_search.best_score_ * 2 - 1) #print('\\n Best hyperparameters:') #print(random_search.best_params_)", "line=int(len(label)*3./4.) endline=len(label) X=data[0:line] vx=data[line:endline] Y=label[0:line] vy=label[line:endline] Y=np.array(Y)[:,0] folds = 3", "endline=int(40000) if(len(label)<40000): line=int(len(label)*3./4.) endline=len(label) X=data[0:line] vx=data[line:endline] Y=label[0:line] vy=label[line:endline] Y=np.array(Y)[:,0] folds", "pt~pt*1.1') parser.add_argument(\"--epochs\",type=int,default=10,help='num epochs') parser.add_argument(\"--batch_size\",type=int,default=100000,help='batch_size') parser.add_argument(\"--loss\",type=str,default=\"categorical_crossentropy\",help='network name on symbols/') parser.add_argument(\"--gpu\",type=int,default=0,help='gpu number')", "loaded=np.load(\"zqmixed{}pteta.npz\".format(args.pt)) print(\"zqmixed{}pteta.npz\".format(args.pt)) else: loaded=np.load(\"zqmixed{}pt.npz\".format(args.pt)) print(\"zqmixed{}pt.npz\".format(args.pt)) elif(args.isz==-1): if(args.etabin==1): loaded=np.load(\"qqmixed{}pteta.npz\".format(args.pt)) print(\"qqmixed{}pteta.npz\".format(args.pt)) else:", "estimator:') #print(random_search.best_estimator_) print('\\n Best normalized gini score for %d-fold search", "GridSearchCV from sklearn.metrics import roc_auc_score from sklearn.model_selection import StratifiedKFold from", "parser.add_argument(\"--eta\",type=float,default=0.,help='end ratio') parser.add_argument(\"--etabin\",type=float,default=1,help='end ratio') parser.add_argument(\"--unscale\",type=int,default=0,help='end ratio') args=parser.parse_args() import os os.environ[\"CUDA_DEVICE_ORDER\"]=\"PCI_BUS_ID\"", "sklearn.model_selection import KFold import scipy.stats as sts import xgboost as", "= 1001) random_search = RandomizedSearchCV(model, param_distributions=params, n_iter=param_comb, scoring='log_loss', n_jobs=6, cv=skf.split(X,Y),", "import RandomizedSearchCV, GridSearchCV from sklearn.metrics import roc_auc_score from sklearn.model_selection import" ]
[ "BUY # record action as buy self.exit_price = self.close_price self.reward", "action sequence is just considered hold # (e.g.) \"buy -", "MAX_ACCOUNT_BALANCE, self.shares_held / MAX_NUM_SHARES, self.cost_basis / MAX_SHARE_PRICE, self.total_shares_sold / MAX_NUM_SHARES,", "self.shares_held = 0 self.cost_basis = 0 self.total_shares_buy = 0 self.total_buys_value", "price self.shares_held = 0 # clear the shares_ elif action", "self.n_short}} #return obs, reward, done, {} def get_profit(self): if(self.position ==", "self.current_step += 1 delay_modifier = (self.current_step / MAX_STEPS) # profits", "# Set the current step to a random point within", "= self.df['close'].values self.reward_range = (0, MAX_ACCOUNT_BALANCE) # Actions of the", "buy, 2 sell, 0 hold # single position can be", "= shares_bought * self.entry_price # buy balance commission = round(self.fee", "self.cost_basis = 0 self.total_shares_buy =0 self.total_buys_value=0 self.total_shares_sold = 0 self.total_sales_value", "# position constant FLAT = 0 # no position LONG", "= SELL self.entry_price = self.close_price # Sell amount % of", "6) # the observation include the given period history data", "+ self.reward),2) # calcuate the total balance self.n_short += 1", "amount)//100 *100 self.krw_balance = shares_bought * self.entry_price # buy balance", "/ MAX_SHARE_PRICE, self.total_shares_sold / MAX_NUM_SHARES, self.total_sales_value / (MAX_NUM_SHARES * MAX_SHARE_PRICE)]],axis=0)", "= {'render.modes': ['human']} def __init__(self, df,show_trade=True): super(StockTradingEnv, self).__init__() # show", "#random.randint(DATA_HIS_PERIOD,len(self.df.loc[:,'open'].values)-1) # for i in range(DATA_HIS_PERIOD): # self.history.append([0.0,0.0,0.0,0.0,0.0,0.0]) return self._next_observation()", "sales value: {self.total_sales_value})') print(f'Net worth: {self.net_worth} (Max net worth: {self.max_net_worth})')", "self.reward += ((self.entry_price - self.exit_price) / self.exit_price + 1) *", "self.df.loc[self.current_step-DATA_HIS_PERIOD:self.current_step, 'high'].values / MAX_SHARE_PRICE, self.df.loc[self.current_step-DATA_HIS_PERIOD:self.current_step, 'low'].values / MAX_SHARE_PRICE, self.df.loc[self.current_step-DATA_HIS_PERIOD:self.current_step, 'close'].values", "sell position # action constant HOLD = 0 BUY =", "DATA_HIS_PERIOD = 5 # position constant FLAT = 0 #", ": buy - hold - hold - sell # SHORT", "shares total_possible = int(self.balance / self.close_price) shares_bought = int(total_possible *", "((self.close_Price - self.entry_price)/self.entry_price + 1)*(1-self.fee)**2 - 1 elif(self.position == SHORT):", "self.shares_held = 0 self.entry_price = 0 # [coin + krw_won]", "super(StockTradingEnv, self).__init__() # show the trade info self.show_trade = show_trade", "self.action_space = spaces.Discrete(len(self.actions)) # self.action_space = spaces.Box( # low=np.array([0, 0]),", "- self.fee) ** 2 - 1 # calculate reward #self.krw_balance", "= self.close_price # Sell amount % of shares held total_possible", "self.exit_price = self.close_price self.reward += ((self.exit_price - self.entry_price) / self.entry_price", "#update position to long self.action = BUY # record action", "self.balance + new_portfolio if self.net_worth > self.max_net_worth: self.max_net_worth = self.net_worth", "self.reward_range = (0, MAX_ACCOUNT_BALANCE) # Actions of the format Buy", "shares_bought * self.entry_price # buy balance commission = round(self.fee *", "fee self.shares_held = shares_bought self.balance -= self.krw_balance-commission #self.cost_basis = (prev_cost", "new_portfolio if self.net_worth > self.max_net_worth: self.max_net_worth = self.net_worth if self.shares_held", "amount)//100 *100 self.krw_balance = self.shares_held * self.entry_price # buy balance", "- 1 #self.krw_balance = self.krw_balance * (1.0 + self.reward) self.balance", "1): self.done = True self.reward = self.get_profit() # return reward", "== FLAT: # if previous position was flat self.position =", "/ MAX_NUM_SHARES, self.total_sales_value / (MAX_NUM_SHARES * MAX_SHARE_PRICE)]],axis=0) return obs def", "=0 self.total_buys_value=0 self.total_shares_sold = 0 self.total_sales_value = 0 self.n_long=0 self.n_short=0", "if self.position == FLAT: self.position = SHORT self.action = SELL", "self.action_space = spaces.Box( # low=np.array([0, 0]), high=np.array([3, 1]), dtype=np.float16) #", "MAX_SHARE_PRICE, self.df.loc[self.current_step-DATA_HIS_PERIOD:self.current_step, 'volume'].values / MAX_NUM_SHARES, ]) # Append additional data", "/ self.exit_price + 1) * ( 1 - self.fee) **", "self.total_buys_value = 0 self.total_shares_sold = 0 self.total_sales_value = 0 self.n_long", "=0 self.shares_held = 0 self.cost_basis = 0 self.total_shares_buy =0 self.total_buys_value=0", "low=0, high=1, shape=(DATA_HIS_PERIOD+1,6), dtype=np.float16) self.history = [] def _next_observation(self): obs", "INITIAL_ACCOUNT_BALANCE self.krw_balance = 0 self.reward =0 self.portfolio =0 self.shares_held =", "-= self.krw_balance-commission elif self.position == LONG: self.position = FLAT self.action", "krw-won self.balance += round(self.krw_balance * (1.0 + self.reward),2) # calcuate", "]) # Append additional data and scale each value to", "self.reward = 0 self.portfolio = 0 self.shares_held = 0 self.cost_basis", "self.df.loc[self.current_step, \"open\"], self.df.loc[self.current_step, \"close\"]) # Set the current price to", "0 self.net_worth = self.balance + new_portfolio if self.net_worth > self.max_net_worth:", "pd import numpy as np MAX_ACCOUNT_BALANCE = 2147483647 MAX_NUM_SHARES =", "self.entry_price = 0 # [coin + krw_won] total value evaluated", "in krw won if (self.position == LONG): temp_reward = ((self.close_price", "position SHORT = 2 # sell position # action constant", "MAX_NUM_SHARES = 2147483647 MAX_SHARE_PRICE = 5000 MAX_VOLUME = 1000e8 MAX_AMOUNT", "balance commission = round(self.fee * self.krw_balance,2) # commission fee self.balance", "self.history, \"n_trades\": {'long': self.n_long, 'short': self.n_short}} #return obs, reward, done,", "self.krw_balance * (1.0 + self.reward) # evaluate cumulative return in", "self.shares_held / MAX_NUM_SHARES, self.cost_basis / MAX_SHARE_PRICE, self.total_shares_sold / MAX_NUM_SHARES, self.total_sales_value", "reserve # action comes from the agent # 1 buy,", "== SHORT): profit = ((self.entry_price - self.close_Price)/self.close_Price + 1)*(1-self.fee)**2 -", "sell - hold - hold - buy # invalid action", "additional_cost) / (self.shares_held + shares_bought) elif self.position == SHORT: #", "+ temp_reward) else: temp_reward = 0 new_portfolio = 0 self.net_worth", "= self.close_price self.reward += ((self.exit_price - self.entry_price) / self.entry_price +", "buy self.exit_price = self.close_price self.reward += ((self.entry_price - self.exit_price) /", "worth: {self.net_worth} (Max net worth: {self.max_net_worth})') print(f'Profit: {profit}') return profit", "sequence is just considered hold # (e.g.) \"buy - buy\"", "# if previous position was flat self.position = LONG #update", "= FLAT # update position to flat self.action = BUY", "- sell # SHORT : sell - hold - hold", "the environment to an initial state self.action = HOLD self.position", "# current_price = random.uniform( # self.df.loc[self.current_step, \"open\"], self.df.loc[self.current_step, \"close\"]) #", "- 1 # calculate reward #self.krw_balance = self.krw_balance * (1.0", "= round(new_portfolio,2) def step(self, action): # Execute one time step", "history data self.current_step = DATA_HIS_PERIOD #random.randint(DATA_HIS_PERIOD,len(self.df.loc[:,'open'].values)-1) # for i in", "= 2147483647 MAX_SHARE_PRICE = 5000 MAX_VOLUME = 1000e8 MAX_AMOUNT =", "== 0: self.cost_basis = 0 self.portfolio = round(new_portfolio,2) def step(self,", "/ MAX_STEPS) # profits #reward = self.net_worth - INITIAL_ACCOUNT_BALANCE #reward", "per trade # valid action sequence would be # LONG", "be considred \"buy - hold\" self.action = HOLD #hold if", "point within the data frame # self.current_step = random.randint( #", "= LONG #update position to long self.action = BUY #", "cumulative return in krw-won self.balance += round(self.krw_balance * (1.0 +", "elif (self.position == SHORT): temp_reward = ((self.entry_price - self.close_price) /", "=0 self.portfolio =0 self.shares_held = 0 self.cost_basis = 0 self.total_shares_buy", "0: self.cost_basis = 0 self.portfolio = round(new_portfolio,2) def step(self, action):", "{self.shares_held} (Total sold: {self.total_shares_sold})') print(f'Avg cost for held shares: {self.cost_basis}", "self.df.loc[self.current_step-DATA_HIS_PERIOD:self.current_step, 'close'].values / MAX_SHARE_PRICE, self.df.loc[self.current_step-DATA_HIS_PERIOD:self.current_step, 'volume'].values / MAX_NUM_SHARES, ]) #", "def _take_action(self, action): # Set the current price to a", "invalid action sequence is just considered hold # (e.g.) \"buy", "= 0 self.portfolio = round(new_portfolio,2) def step(self, action): # Execute", "self.position = FLAT self.balance = INITIAL_ACCOUNT_BALANCE self.net_worth = INITIAL_ACCOUNT_BALANCE self.max_net_worth", "0 # [coin + krw_won] total value evaluated in krw", "np.append(obs,[[self.balance / MAX_ACCOUNT_BALANCE, self.max_net_worth / MAX_ACCOUNT_BALANCE, self.shares_held / MAX_NUM_SHARES, self.cost_basis", "= random.uniform( # self.df.loc[self.current_step, \"open\"], self.df.loc[self.current_step, \"close\"]) # Set the", "= 1 if reward > 0 else -100 if self.net_worth", "obs def _take_action(self, action): # Set the current price to", "self.n_short)) # save the history data self.history.append([ self.action, self.position, self.current_step,", "profit = ((self.close_Price - self.entry_price)/self.entry_price + 1)*(1-self.fee)**2 - 1 elif(self.position", "= np.append(obs,[[self.balance / MAX_ACCOUNT_BALANCE, self.max_net_worth / MAX_ACCOUNT_BALANCE, self.shares_held / MAX_NUM_SHARES,", "_take_action(self, action): # Set the current price to a random", "single position can be opened per trade # valid action", "and scale each value to between 0-1 obs = np.append(obs,[[self.balance", "((self.close_price - self.entry_price) / self.entry_price + 1) * ( 1", "# record action as buy self.exit_price = self.close_price self.reward +=", "in shares total_possible = int(self.balance / self.close_price) shares_bought = int(total_possible", "= INITIAL_ACCOUNT_BALANCE self.max_net_worth = INITIAL_ACCOUNT_BALANCE self.krw_balance = 0 self.reward =0", "+ 1)*(1-self.fee)**2 - 1 elif(self.position == SHORT): profit = ((self.entry_price", "the last five prices self.observation_space = spaces.Box( low=0, high=1, shape=(DATA_HIS_PERIOD+1,6),", "period history data self.current_step = DATA_HIS_PERIOD #random.randint(DATA_HIS_PERIOD,len(self.df.loc[:,'open'].values)-1) # for i", "** 2 - 1 #self.krw_balance = self.krw_balance * (1.0 +", "the environment to the screen profit = self.net_worth - INITIAL_ACCOUNT_BALANCE", "1 delay_modifier = (self.current_step / MAX_STEPS) # profits #reward =", "self.portfolio = 0 self.shares_held = 0 self.cost_basis = 0 self.total_shares_buy", "current price to the last close price self.close_price = self.df.loc[self.current_step,\"close\"]", "* (1.0 + temp_reward) else: temp_reward = 0 new_portfolio =", "= SELL self.exit_price = self.close_price self.reward += ((self.exit_price - self.entry_price)", "if (self.show_trade and self.current_step % 1 == 0): print(\"Tick: {0}/", "price within the time step # current_price = random.uniform( #", "to the last close price self.close_price = self.df.loc[self.current_step,\"close\"] amount =", "LONG): temp_reward = ((self.close_price - self.entry_price) / self.entry_price + 1)", "{0}/ Portfolio (krw-won): {1}, balance: {2}\".format(self.current_step, self.portfolio,self.net_worth)) print(\"Long: {0}/ Short:", "= round(self.fee * self.krw_balance,2) # commission fee self.shares_held = shares_bought", "done = False self.current_step += 1 delay_modifier = (self.current_step /", "0 self.n_short = 0 self.history=[] # done = True if", "= self.balance + new_portfolio if self.net_worth > self.max_net_worth: self.max_net_worth =", "total balance self.n_short += 1 # record number of short", "obs, self.net_worth, done, {'portfolio': np.array([self.portfolio]), \"history\": self.history, \"n_trades\": {'long': self.n_long,", "profit def reset(self, new_df=None): # Reset the state of the", "MAX_OPEN_POSITIONS = 5 MAX_STEPS = 20000 MAX_DAY_CHANGE = 1 INITIAL_ACCOUNT_BALANCE", "data frame # self.current_step = random.randint( # 0, len(self.df.loc[:, 'open'].values)", "self.portfolio,self.net_worth)) print(\"Long: {0}/ Short: {1}\".format(self.n_long, self.n_short)) # save the history", "the total balance self.n_short += 1 # record number of", "= 1 SELL = 2 class StockTradingEnv(gym.Env): \"\"\"A stock trading", "considered hold # (e.g.) \"buy - buy\" would be considred", "= 0 return profit def reset(self, new_df=None): # Reset the", "0 self.cost_basis = 0 self.total_shares_buy = 0 self.total_buys_value = 0", "0]), high=np.array([3, 1]), dtype=np.float16) # Prices contains the OHCL values", "dataset to environment if new_df: self.df = new_df # Set", "self.current_step, self.closingPrice, self.portfolio, self.reward)) obs = self._next_observation() if (self.current_step >", "action as buy self.entry_price = self.close_price # Buy amount %", "* self.krw_balance,2) # commission fee self.balance -= self.krw_balance-commission elif self.position", "buy position SHORT = 2 # sell position # action", "show the trade info self.show_trade = show_trade self.actions=[\"FLAT\",\"LONG\",\"SHORT\"] self.fee =", "Set the current price to the last close price self.close_price", "= DATA_HIS_PERIOD #random.randint(DATA_HIS_PERIOD,len(self.df.loc[:,'open'].values)-1) # for i in range(DATA_HIS_PERIOD): # self.history.append([0.0,0.0,0.0,0.0,0.0,0.0])", "- hold - buy # invalid action sequence is just", "Sell amount % of shares held total_possible = int(self.balance /", "render(self, mode='human', close=False): # Render the environment to the screen", "buy balance commission = round(self.fee * self.krw_balance,2) # commission fee", "# self.current_step = random.randint( # 0, len(self.df.loc[:, 'open'].values) - 6)", "shares_bought) elif self.position == SHORT: # if previous position was", "Execute one time step within the environment self._take_action(action) done =", "the current step to a random point within the data", "SHORT : sell - hold - hold - buy #", "== 0): print(\"Tick: {0}/ Portfolio (krw-won): {1}, balance: {2}\".format(self.current_step, self.portfolio,self.net_worth))", "-100 if self.net_worth <= 0: done = True if self.current_step", "be # LONG : buy - hold - hold -", "1)*(1-self.fee)**2 - 1 elif(self.position == SHORT): profit = ((self.entry_price -", "else: profit = 0 return profit def reset(self, new_df=None): #", "self.balance, self.max_net_worth, self.shares_held, self.portfolio, self.total_shares_buy, self.total_buys_value, self.total_shares_sold, self.total_sales_value]) #self.history.append((self.action, self.current_step,", "INITIAL_ACCOUNT_BALANCE self.max_net_worth = INITIAL_ACCOUNT_BALANCE self.krw_balance = 0 self.reward =0 self.portfolio", "# clear the shares_ elif action == SELL: if self.position", "spaces.Box( # low=np.array([0, 0]), high=np.array([3, 1]), dtype=np.float16) # Prices contains", "self.df = df self.closeprices = self.df['close'].values self.reward_range = (0, MAX_ACCOUNT_BALANCE)", "= np.array([ self.df.loc[self.current_step-DATA_HIS_PERIOD:self.current_step, 'open'].values / MAX_SHARE_PRICE, self.df.loc[self.current_step-DATA_HIS_PERIOD:self.current_step, 'high'].values / MAX_SHARE_PRICE,", "(1.0 + self.reward),2) # calcuate the total balance self.n_short +=", "gym from gym import spaces import pandas as pd import", "this variable, so reserve # action comes from the agent", "record action as buy self.entry_price = self.close_price # Buy amount", "buy self.entry_price = self.close_price # Buy amount % of balance", "self.action = BUY # record action as buy self.exit_price =", "self.fee) ** 2 - 1 #self.krw_balance = self.krw_balance * (1.0", "self.history=[] # done = True if (self.show_trade and self.current_step %", "self.total_shares_buy =0 self.total_buys_value=0 self.total_shares_sold = 0 self.total_sales_value = 0 self.n_long=0", "self.krw_balance = self.shares_held * self.entry_price # buy balance commission =", "self.entry_price # buy balance commission = round(self.fee * self.krw_balance,2) #", "1: self.current_step = DATA_HIS_PERIOD # loop training # when loop", "end of the game return obs, self.net_worth, done, {'portfolio': np.array([self.portfolio]),", "# LONG : buy - hold - hold - sell", "action constant HOLD = 0 BUY = 1 SELL =", "0 self.n_long = 0 self.n_short = 0 self.history=[] # done", "+ 1) * ( 1 - self.fee) ** 2 -", "= 0 self.total_shares_buy =0 self.total_buys_value=0 self.total_shares_sold = 0 self.total_sales_value =", "# Sell amount % of shares held total_possible = int(self.balance", "- self.fee) ** 2 - 1 #self.krw_balance = self.krw_balance *", "print(f'Step: {self.current_step}') print(f'Balance: {self.balance}') print(f'Shares held: {self.shares_held} (Total sold: {self.total_shares_sold})')", "self.n_long += 1 self.total_shares_buy += self.shares_held self.total_buys_value += self.shares_held *", "#self.krw_balance = self.krw_balance * (1.0 + self.reward) # evaluate cumulative", "MAX_DAY_CHANGE = 1 INITIAL_ACCOUNT_BALANCE = 10000 DATA_HIS_PERIOD = 5 #", "import gym from gym import spaces import pandas as pd", "krw_won] total value evaluated in krw won if (self.position ==", "( 1 - self.fee) ** 2 - 1 new_portfolio =", "== SHORT): temp_reward = ((self.entry_price - self.close_price) / self.close_price +", "(self.df.shape[0]) - 1): self.done = True self.reward = self.get_profit() #", "from gym import spaces import pandas as pd import numpy", "= 0.0005 # brokage commission self.df = df self.closeprices =", "just considered hold # (e.g.) \"buy - buy\" would be", "self.history=[] # pass test dataset to environment if new_df: self.df", "self.krw_balance = 0 self.reward =0 self.portfolio =0 self.shares_held = 0", "print(\"Tick: {0}/ Portfolio (krw-won): {1}, balance: {2}\".format(self.current_step, self.portfolio,self.net_worth)) print(\"Long: {0}/", "self.history.append([ self.action, self.position, self.current_step, self.close_price, self.krw_balance, self.balance, self.max_net_worth, self.shares_held, self.portfolio,", "elif self.position == LONG: self.position = FLAT self.action = SELL", "history data self.history.append([ self.action, self.position, self.current_step, self.close_price, self.krw_balance, self.balance, self.max_net_worth,", "to between 0-1 obs = np.append(obs,[[self.balance / MAX_ACCOUNT_BALANCE, self.max_net_worth /", "(krw-won): {1}, balance: {2}\".format(self.current_step, self.portfolio,self.net_worth)) print(\"Long: {0}/ Short: {1}\".format(self.n_long, self.n_short))", "* amount)//100 *100 self.krw_balance = shares_bought * self.entry_price # buy", "0 self.reward = 0 self.portfolio = 0 self.shares_held = 0", "self.position == FLAT: self.position = SHORT self.action = SELL self.entry_price", "action): # Execute one time step within the environment self._take_action(action)", "= 0 self.cost_basis = 0 self.total_shares_buy =0 self.total_buys_value=0 self.total_shares_sold =", "within the environment self._take_action(action) done = False self.current_step += 1", "gym import spaces import pandas as pd import numpy as", "self.entry_price = self.close_price # Sell amount % of shares held", "self.close_price self.shares_held = 0 self.entry_price = 0 # [coin +", "# buy position SHORT = 2 # sell position #", "number of short self.total_shares_sold += self.shares_held self.total_sales_value += self.shares_held *", "for i in range(DATA_HIS_PERIOD): # self.history.append([0.0,0.0,0.0,0.0,0.0,0.0]) return self._next_observation() def render(self,", "- hold\" self.action = HOLD #hold if action == BUY:", "FLAT: self.position = SHORT self.action = SELL self.entry_price = self.close_price", "BUY = 1 SELL = 2 class StockTradingEnv(gym.Env): \"\"\"A stock", "/ MAX_ACCOUNT_BALANCE, self.max_net_worth / MAX_ACCOUNT_BALANCE, self.shares_held / MAX_NUM_SHARES, self.cost_basis /", "(self.current_step / MAX_STEPS) # profits #reward = self.net_worth - INITIAL_ACCOUNT_BALANCE", "self.entry_price)/self.entry_price + 1)*(1-self.fee)**2 - 1 elif(self.position == SHORT): profit =", "/ (MAX_NUM_SHARES * MAX_SHARE_PRICE)]],axis=0) return obs def _take_action(self, action): #", "self.total_sales_value = 0 self.n_long=0 self.n_short=0 self.history=[] # pass test dataset", "the format Buy x%, Sell x%, Hold, etc. self.action_space =", "(self.position == SHORT): temp_reward = ((self.entry_price - self.close_price) / self.close_price", "= 2 # sell position # action constant HOLD =", "#self.krw_balance = self.krw_balance * (1.0 + self.reward) self.balance += round(self.krw_balance*(1.0+self.reward),2)", "= ((self.close_Price - self.entry_price)/self.entry_price + 1)*(1-self.fee)**2 - 1 elif(self.position ==", "'short': self.n_short}} #return obs, reward, done, {} def get_profit(self): if(self.position", "[coin + krw_won] total value evaluated in krw won if", "import pandas as pd import numpy as np MAX_ACCOUNT_BALANCE =", "import random import json import gym from gym import spaces", "at end of the game return obs, self.net_worth, done, {'portfolio':", "spaces.Box( low=0, high=1, shape=(DATA_HIS_PERIOD+1,6), dtype=np.float16) self.history = [] def _next_observation(self):", "import json import gym from gym import spaces import pandas", "= self.krw_balance * (1.0 + self.reward) # evaluate cumulative return", "as np MAX_ACCOUNT_BALANCE = 2147483647 MAX_NUM_SHARES = 2147483647 MAX_SHARE_PRICE =", "so reserve # action comes from the agent # 1", "1 buy, 2 sell, 0 hold # single position can", "between 0-1 obs = np.append(obs,[[self.balance / MAX_ACCOUNT_BALANCE, self.max_net_worth / MAX_ACCOUNT_BALANCE,", "balance commission = round(self.fee * self.krw_balance,2) # commission fee self.shares_held", "shares_ elif action == SELL: if self.position == FLAT: self.position", "self.position = FLAT self.action = SELL self.exit_price = self.close_price self.reward", "= self.krw_balance * (1.0 + temp_reward) elif (self.position == SHORT):", "= 0 self.shares_held = 0 self.cost_basis = 0 self.total_shares_buy =", "to environment if new_df: self.df = new_df # Set the", "# action constant HOLD = 0 BUY = 1 SELL", "self.shares_held * self.close_price self.shares_held = 0 self.entry_price = 0 #", "if action == BUY: #buy if self.position == FLAT: #", "self.total_shares_buy = 0 self.total_buys_value = 0 self.total_shares_sold = 0 self.total_sales_value", "self.krw_balance * (1.0 + temp_reward) elif (self.position == SHORT): temp_reward", "self.close_Price)/self.close_Price + 1)*(1-self.fee)**2 - 1 else: profit = 0 return", "/ MAX_SHARE_PRICE, self.df.loc[self.current_step-DATA_HIS_PERIOD:self.current_step, 'close'].values / MAX_SHARE_PRICE, self.df.loc[self.current_step-DATA_HIS_PERIOD:self.current_step, 'volume'].values / MAX_NUM_SHARES,", "['human']} def __init__(self, df,show_trade=True): super(StockTradingEnv, self).__init__() # show the trade", "'high'].values / MAX_SHARE_PRICE, self.df.loc[self.current_step-DATA_HIS_PERIOD:self.current_step, 'low'].values / MAX_SHARE_PRICE, self.df.loc[self.current_step-DATA_HIS_PERIOD:self.current_step, 'close'].values /", "# record number of short self.total_shares_sold += self.shares_held self.total_sales_value +=", "# invalid action sequence is just considered hold # (e.g.)", "= 0 self.reward =0 self.portfolio =0 self.shares_held = 0 self.cost_basis", "self.n_short = 0 self.history=[] # done = True if (self.show_trade", "self.entry_price = self.close_price # Buy amount % of balance in", "self.total_sales_value]) #self.history.append((self.action, self.current_step, self.closingPrice, self.portfolio, self.reward)) obs = self._next_observation() if", "== LONG): profit = ((self.close_Price - self.entry_price)/self.entry_price + 1)*(1-self.fee)**2 -", "self.krw_balance-commission elif self.position == LONG: self.position = FLAT self.action =", "> self.max_net_worth: self.max_net_worth = self.net_worth if self.shares_held == 0: self.cost_basis", "False self.current_step += 1 delay_modifier = (self.current_step / MAX_STEPS) #", "> len(self.df.loc[:, 'open'].values) - 1: self.current_step = DATA_HIS_PERIOD # loop", "# evaluate cumulative return in krw-won self.balance += round(self.krw_balance *", "1 elif(self.position == SHORT): profit = ((self.entry_price - self.close_Price)/self.close_Price +", "np.array([ self.df.loc[self.current_step-DATA_HIS_PERIOD:self.current_step, 'open'].values / MAX_SHARE_PRICE, self.df.loc[self.current_step-DATA_HIS_PERIOD:self.current_step, 'high'].values / MAX_SHARE_PRICE, self.df.loc[self.current_step-DATA_HIS_PERIOD:self.current_step,", "((self.entry_price - self.close_price) / self.close_price + 1) * ( 1", "(Total sales value: {self.total_sales_value})') print(f'Net worth: {self.net_worth} (Max net worth:", "history self.action = HOLD self.position = FLAT self.balance = INITIAL_ACCOUNT_BALANCE", "/ (self.shares_held + shares_bought) elif self.position == SHORT: # if", "self.close_price # Buy amount % of balance in shares total_possible", "= ((self.close_price - self.entry_price) / self.entry_price + 1) * (", "initial state self.action = HOLD self.position = FLAT self.balance =", "self.max_net_worth / MAX_ACCOUNT_BALANCE, self.shares_held / MAX_NUM_SHARES, self.cost_basis / MAX_SHARE_PRICE, self.total_shares_sold", "(1.0 + self.reward) # evaluate cumulative return in krw-won self.balance", "metadata = {'render.modes': ['human']} def __init__(self, df,show_trade=True): super(StockTradingEnv, self).__init__() #", "fee self.balance -= self.krw_balance-commission elif self.position == LONG: self.position =", "cost for held shares: {self.cost_basis} (Total sales value: {self.total_sales_value})') print(f'Net", "shares: {self.cost_basis} (Total sales value: {self.total_sales_value})') print(f'Net worth: {self.net_worth} (Max", "# Set the current price to the last close price", "has this variable, so reserve # action comes from the", "= (prev_cost + additional_cost) / (self.shares_held + shares_bought) elif self.position", "temp_reward) else: temp_reward = 0 new_portfolio = 0 self.net_worth =", "show_trade self.actions=[\"FLAT\",\"LONG\",\"SHORT\"] self.fee = 0.0005 # brokage commission self.df =", "\"open\"], self.df.loc[self.current_step, \"close\"]) # Set the current price to the", "environment to the screen profit = self.net_worth - INITIAL_ACCOUNT_BALANCE print('-'*30)", "1 SELL = 2 class StockTradingEnv(gym.Env): \"\"\"A stock trading environment", "an initial state self.action = HOLD self.position = FLAT self.balance", "SELL = 2 class StockTradingEnv(gym.Env): \"\"\"A stock trading environment for", "= self.close_price # Buy amount % of balance in shares", "to the screen profit = self.net_worth - INITIAL_ACCOUNT_BALANCE print('-'*30) print(f'Step:", "= 1000e8 MAX_AMOUNT = 3e10 MAX_OPEN_POSITIONS = 5 MAX_STEPS =", "total_possible = int(self.balance / self.close_price) self.shares_held = int(total_possible * amount)//100", "= ((self.entry_price - self.close_price) / self.close_price + 1) * (", "((self.entry_price - self.exit_price) / self.exit_price + 1) * ( 1", "+= 1 delay_modifier = (self.current_step / MAX_STEPS) # profits #reward", "= True self.reward = self.get_profit() # return reward at end", "the current price to a random price within the time", "* self.entry_price # buy balance commission = round(self.fee * self.krw_balance,2)", "( 1 - self.fee) ** 2 - 1 # calculate", "if self.net_worth > self.max_net_worth: self.max_net_worth = self.net_worth if self.shares_held ==", "self.position = LONG #update position to long self.action = BUY", "dtype=np.float16) # Prices contains the OHCL values for the last", "OHCL values for the last five prices self.observation_space = spaces.Box(", "krw won if (self.position == LONG): temp_reward = ((self.close_price -", "each value to between 0-1 obs = np.append(obs,[[self.balance / MAX_ACCOUNT_BALANCE,", "price self.close_price = self.df.loc[self.current_step,\"close\"] amount = 0.5 #the old version", "stock trading environment for OpenAI gym\"\"\" metadata = {'render.modes': ['human']}", "= int(self.balance / self.close_price) self.shares_held = int(total_possible * amount)//100 *100", "0-1 obs = np.append(obs,[[self.balance / MAX_ACCOUNT_BALANCE, self.max_net_worth / MAX_ACCOUNT_BALANCE, self.shares_held", "(self.current_step > (self.df.shape[0]) - 1): self.done = True self.reward =", "self.total_buys_value=0 self.total_shares_sold = 0 self.total_sales_value = 0 self.n_long=0 self.n_short=0 self.history=[]", "if previous position was short self.position = FLAT # update", "agent # 1 buy, 2 sell, 0 hold # single", "- self.entry_price) / self.entry_price + 1) * ( 1 -", "self.portfolio = round(new_portfolio,2) def step(self, action): # Execute one time", "self.n_long=0 self.n_short=0 self.history=[] # pass test dataset to environment if", "if(self.position == LONG): profit = ((self.close_Price - self.entry_price)/self.entry_price + 1)*(1-self.fee)**2", "a random price within the time step # current_price =", "if previous position was flat self.position = LONG #update position", "value evaluated in krw won if (self.position == LONG): temp_reward", "print(f'Net worth: {self.net_worth} (Max net worth: {self.max_net_worth})') print(f'Profit: {profit}') return", "* self.krw_balance,2) # commission fee self.shares_held = shares_bought self.balance -=", "self.fee) ** 2 - 1 # calculate reward #self.krw_balance =", "* self.close_price self.entry_price = 0 # clear entry price self.shares_held", "2 sell, 0 hold # single position can be opened", "= ((self.entry_price - self.close_Price)/self.close_Price + 1)*(1-self.fee)**2 - 1 else: profit", "self.total_shares_sold, self.total_sales_value]) #self.history.append((self.action, self.current_step, self.closingPrice, self.portfolio, self.reward)) obs = self._next_observation()", "short self.total_shares_sold += self.shares_held self.total_sales_value += self.shares_held * self.close_price self.entry_price", "/ self.entry_price + 1) * ( 1 - self.fee) **", "profit = 0 return profit def reset(self, new_df=None): # Reset", "MAX_VOLUME = 1000e8 MAX_AMOUNT = 3e10 MAX_OPEN_POSITIONS = 5 MAX_STEPS", "+= round(self.krw_balance * (1.0 + self.reward),2) # calcuate the total", "numpy as np MAX_ACCOUNT_BALANCE = 2147483647 MAX_NUM_SHARES = 2147483647 MAX_SHARE_PRICE", "environment for OpenAI gym\"\"\" metadata = {'render.modes': ['human']} def __init__(self,", "self.close_price = self.df.loc[self.current_step,\"close\"] amount = 0.5 #the old version has", "len(self.df.loc[:, 'open'].values) - 6) # the observation include the given", "self.history.append([0.0,0.0,0.0,0.0,0.0,0.0]) return self._next_observation() def render(self, mode='human', close=False): # Render the", "(MAX_NUM_SHARES * MAX_SHARE_PRICE)]],axis=0) return obs def _take_action(self, action): # Set", "self.shares_held * self.close_price self.entry_price = 0 # clear entry price", "1 INITIAL_ACCOUNT_BALANCE = 10000 DATA_HIS_PERIOD = 5 # position constant", "within the time step # current_price = random.uniform( # self.df.loc[self.current_step,", "self.krw_balance * (1.0 + temp_reward) else: temp_reward = 0 new_portfolio", "self.df = new_df # Set the current step to a", "data and scale each value to between 0-1 obs =", "position can be opened per trade # valid action sequence", "import spaces import pandas as pd import numpy as np", "1 - self.fee) ** 2 - 1 new_portfolio = self.krw_balance", "* ( 1 - self.fee) ** 2 - 1 new_portfolio", "df self.closeprices = self.df['close'].values self.reward_range = (0, MAX_ACCOUNT_BALANCE) # Actions", "valid action sequence would be # LONG : buy -", "format Buy x%, Sell x%, Hold, etc. self.action_space = spaces.Discrete(len(self.actions))", "self.total_shares_sold / MAX_NUM_SHARES, self.total_sales_value / (MAX_NUM_SHARES * MAX_SHARE_PRICE)]],axis=0) return obs", "0.5 #the old version has this variable, so reserve #", "balance in shares total_possible = int(self.balance / self.close_price) shares_bought =", "= [] def _next_observation(self): obs = np.array([ self.df.loc[self.current_step-DATA_HIS_PERIOD:self.current_step, 'open'].values /", "= 0 # [coin + krw_won] total value evaluated in", "self.exit_price + 1) * ( 1 - self.fee) ** 2", "\"history\": self.history, \"n_trades\": {'long': self.n_long, 'short': self.n_short}} #return obs, reward,", "return profit def reset(self, new_df=None): # Reset the state of", "= random.randint( # 0, len(self.df.loc[:, 'open'].values) - 6) # the", "if (self.position == LONG): temp_reward = ((self.close_price - self.entry_price) /", "self.portfolio =0 self.shares_held = 0 self.cost_basis = 0 self.total_shares_buy =0", "/ MAX_SHARE_PRICE, self.df.loc[self.current_step-DATA_HIS_PERIOD:self.current_step, 'low'].values / MAX_SHARE_PRICE, self.df.loc[self.current_step-DATA_HIS_PERIOD:self.current_step, 'close'].values / MAX_SHARE_PRICE,", "temp_reward = ((self.entry_price - self.close_price) / self.close_price + 1) *", "position # action constant HOLD = 0 BUY = 1", "sell, 0 hold # single position can be opened per", "MAX_ACCOUNT_BALANCE) # Actions of the format Buy x%, Sell x%,", "value to between 0-1 obs = np.append(obs,[[self.balance / MAX_ACCOUNT_BALANCE, self.max_net_worth", "for held shares: {self.cost_basis} (Total sales value: {self.total_sales_value})') print(f'Net worth:", "value: {self.total_sales_value})') print(f'Net worth: {self.net_worth} (Max net worth: {self.max_net_worth})') print(f'Profit:", "contains the OHCL values for the last five prices self.observation_space", "+= self.shares_held * self.close_price self.shares_held = 0 self.entry_price = 0", "environment self._take_action(action) done = False self.current_step += 1 delay_modifier =", "LONG : buy - hold - hold - sell #", "self.position, self.current_step, self.close_price, self.krw_balance, self.balance, self.max_net_worth, self.shares_held, self.portfolio, self.total_shares_buy, self.total_buys_value,", "return self._next_observation() def render(self, mode='human', close=False): # Render the environment", "position was short self.position = FLAT # update position to", "0, len(self.df.loc[:, 'open'].values) - 6) # the observation include the", "self.n_short += 1 # record number of short self.total_shares_sold +=", "profit = self.net_worth - INITIAL_ACCOUNT_BALANCE print('-'*30) print(f'Step: {self.current_step}') print(f'Balance: {self.balance}')", "self.closeprices = self.df['close'].values self.reward_range = (0, MAX_ACCOUNT_BALANCE) # Actions of", "if self.shares_held == 0: self.cost_basis = 0 self.portfolio = round(new_portfolio,2)", "SHORT = 2 # sell position # action constant HOLD", "hold # (e.g.) \"buy - buy\" would be considred \"buy", "SHORT): profit = ((self.entry_price - self.close_Price)/self.close_Price + 1)*(1-self.fee)**2 - 1", "done, {} def get_profit(self): if(self.position == LONG): profit = ((self.close_Price", "frame # self.current_step = random.randint( # 0, len(self.df.loc[:, 'open'].values) -", "self.close_price self.reward += ((self.exit_price - self.entry_price) / self.entry_price + 1)", "+ 1) * (1 - self.fee) ** 2 - 1", "action == BUY: #buy if self.position == FLAT: # if", "FLAT self.action = SELL self.exit_price = self.close_price self.reward += ((self.exit_price", "(1.0 + self.reward) self.balance += round(self.krw_balance*(1.0+self.reward),2) self.n_long += 1 self.total_shares_buy", "in krw-won self.balance += round(self.krw_balance * (1.0 + self.reward),2) #", "= SHORT self.action = SELL self.entry_price = self.close_price # Sell", "= round(self.fee * self.krw_balance,2) # commission fee self.balance -= self.krw_balance-commission", "then clear the history self.action = HOLD self.position = FLAT", "#self.cost_basis = (prev_cost + additional_cost) / (self.shares_held + shares_bought) elif", "self.cost_basis = 0 self.portfolio = round(new_portfolio,2) def step(self, action): #", "- self.entry_price)/self.entry_price + 1)*(1-self.fee)**2 - 1 elif(self.position == SHORT): profit", "self.krw_balance,2) # commission fee self.balance -= self.krw_balance-commission elif self.position ==", "low=np.array([0, 0]), high=np.array([3, 1]), dtype=np.float16) # Prices contains the OHCL", "reward #self.krw_balance = self.krw_balance * (1.0 + self.reward) # evaluate", "self.balance -= self.krw_balance-commission elif self.position == LONG: self.position = FLAT", "+ krw_won] total value evaluated in krw won if (self.position", "1 - self.fee) ** 2 - 1 # calculate reward", "= 0 self.n_short = 0 self.history=[] # done = True", "MAX_NUM_SHARES, self.total_sales_value / (MAX_NUM_SHARES * MAX_SHARE_PRICE)]],axis=0) return obs def _take_action(self,", "done, {'portfolio': np.array([self.portfolio]), \"history\": self.history, \"n_trades\": {'long': self.n_long, 'short': self.n_short}}", "self._next_observation() if (self.current_step > (self.df.shape[0]) - 1): self.done = True", "Reset the state of the environment to an initial state", "def render(self, mode='human', close=False): # Render the environment to the", "shares_bought self.balance -= self.krw_balance-commission #self.cost_basis = (prev_cost + additional_cost) /", "temp_reward = ((self.close_price - self.entry_price) / self.entry_price + 1) *", "# brokage commission self.df = df self.closeprices = self.df['close'].values self.reward_range", "# SHORT : sell - hold - hold - buy", "/ MAX_ACCOUNT_BALANCE, self.shares_held / MAX_NUM_SHARES, self.cost_basis / MAX_SHARE_PRICE, self.total_shares_sold /", "1) * ( 1 - self.fee) ** 2 - 1", "# commission fee self.balance -= self.krw_balance-commission elif self.position == LONG:", "= BUY # record action as buy self.entry_price = self.close_price", "# self.history.append([0.0,0.0,0.0,0.0,0.0,0.0]) return self._next_observation() def render(self, mode='human', close=False): # Render", "# loop training # when loop training, then clear the", "INITIAL_ACCOUNT_BALANCE self.max_net_worth = INITIAL_ACCOUNT_BALANCE self.krw_balance = 0 self.reward = 0", "LONG = 1 # buy position SHORT = 2 #", "high=np.array([3, 1]), dtype=np.float16) # Prices contains the OHCL values for", "def __init__(self, df,show_trade=True): super(StockTradingEnv, self).__init__() # show the trade info", "self.done = True self.reward = self.get_profit() # return reward at", "(self.show_trade and self.current_step % 1 == 0): print(\"Tick: {0}/ Portfolio", "self.observation_space = spaces.Box( low=0, high=1, shape=(DATA_HIS_PERIOD+1,6), dtype=np.float16) self.history = []", "self.max_net_worth = INITIAL_ACCOUNT_BALANCE self.krw_balance = 0 self.reward =0 self.portfolio =0", "the game return obs, self.net_worth, done, {'portfolio': np.array([self.portfolio]), \"history\": self.history,", "self.total_buys_value, self.total_shares_sold, self.total_sales_value]) #self.history.append((self.action, self.current_step, self.closingPrice, self.portfolio, self.reward)) obs =", "= 0 # clear entry price self.shares_held = 0 #", "include the given period history data self.current_step = DATA_HIS_PERIOD #random.randint(DATA_HIS_PERIOD,len(self.df.loc[:,'open'].values)-1)", "self.total_shares_sold = 0 self.total_sales_value = 0 self.n_long = 0 self.n_short", "return in krw-won self.balance += round(self.krw_balance * (1.0 + self.reward),2)", "print('-'*30) print(f'Step: {self.current_step}') print(f'Balance: {self.balance}') print(f'Shares held: {self.shares_held} (Total sold:", "True self.reward = self.get_profit() # return reward at end of", "would be # LONG : buy - hold - hold", "self.net_worth <= 0: done = True if self.current_step > len(self.df.loc[:,", "commission fee self.shares_held = shares_bought self.balance -= self.krw_balance-commission #self.cost_basis =", "- 1 new_portfolio = self.krw_balance * (1.0 + temp_reward) else:", "def get_profit(self): if(self.position == LONG): profit = ((self.close_Price - self.entry_price)/self.entry_price", "random import json import gym from gym import spaces import", "0 self.total_buys_value = 0 self.total_shares_sold = 0 self.total_sales_value = 0", "observation include the given period history data self.current_step = DATA_HIS_PERIOD", "obs = self._next_observation() if (self.current_step > (self.df.shape[0]) - 1): self.done", "data self.history.append([ self.action, self.position, self.current_step, self.close_price, self.krw_balance, self.balance, self.max_net_worth, self.shares_held,", "def _next_observation(self): obs = np.array([ self.df.loc[self.current_step-DATA_HIS_PERIOD:self.current_step, 'open'].values / MAX_SHARE_PRICE, self.df.loc[self.current_step-DATA_HIS_PERIOD:self.current_step,", "MAX_SHARE_PRICE = 5000 MAX_VOLUME = 1000e8 MAX_AMOUNT = 3e10 MAX_OPEN_POSITIONS", "the agent # 1 buy, 2 sell, 0 hold #", "(self.position == LONG): temp_reward = ((self.close_price - self.entry_price) / self.entry_price", "one time step within the environment self._take_action(action) done = False", "of the game return obs, self.net_worth, done, {'portfolio': np.array([self.portfolio]), \"history\":", "five prices self.observation_space = spaces.Box( low=0, high=1, shape=(DATA_HIS_PERIOD+1,6), dtype=np.float16) self.history", "((self.entry_price - self.close_Price)/self.close_Price + 1)*(1-self.fee)**2 - 1 else: profit =", "Hold, etc. self.action_space = spaces.Discrete(len(self.actions)) # self.action_space = spaces.Box( #", "#self.history.append((self.action, self.current_step, self.closingPrice, self.portfolio, self.reward)) obs = self._next_observation() if (self.current_step", "0 # no position LONG = 1 # buy position", "# done = True if (self.show_trade and self.current_step % 1", "self.reward = self.get_profit() # return reward at end of the", "0 self.entry_price = 0 # [coin + krw_won] total value", "self.net_worth - INITIAL_ACCOUNT_BALANCE print('-'*30) print(f'Step: {self.current_step}') print(f'Balance: {self.balance}') print(f'Shares held:", "commission self.df = df self.closeprices = self.df['close'].values self.reward_range = (0,", "1 if reward > 0 else -100 if self.net_worth <=", "considred \"buy - hold\" self.action = HOLD #hold if action", "+= ((self.entry_price - self.exit_price) / self.exit_price + 1) * (", "elif(self.position == SHORT): profit = ((self.entry_price - self.close_Price)/self.close_Price + 1)*(1-self.fee)**2", "flat self.action = BUY # record action as buy self.exit_price", "0.0005 # brokage commission self.df = df self.closeprices = self.df['close'].values", "'close'].values / MAX_SHARE_PRICE, self.df.loc[self.current_step-DATA_HIS_PERIOD:self.current_step, 'volume'].values / MAX_NUM_SHARES, ]) # Append", "# single position can be opened per trade # valid", "# pass test dataset to environment if new_df: self.df =", "# action comes from the agent # 1 buy, 2", "data self.current_step = DATA_HIS_PERIOD #random.randint(DATA_HIS_PERIOD,len(self.df.loc[:,'open'].values)-1) # for i in range(DATA_HIS_PERIOD):", "= BUY # record action as buy self.exit_price = self.close_price", "# Reset the state of the environment to an initial", "% 1 == 0): print(\"Tick: {0}/ Portfolio (krw-won): {1}, balance:", "self.df['close'].values self.reward_range = (0, MAX_ACCOUNT_BALANCE) # Actions of the format", "self.fee = 0.0005 # brokage commission self.df = df self.closeprices", "self.total_sales_value / (MAX_NUM_SHARES * MAX_SHARE_PRICE)]],axis=0) return obs def _take_action(self, action):", "of short self.total_shares_sold += self.shares_held self.total_sales_value += self.shares_held * self.close_price", "random.randint( # 0, len(self.df.loc[:, 'open'].values) - 6) # the observation", "self.total_shares_buy, self.total_buys_value, self.total_shares_sold, self.total_sales_value]) #self.history.append((self.action, self.current_step, self.closingPrice, self.portfolio, self.reward)) obs", "= spaces.Box( low=0, high=1, shape=(DATA_HIS_PERIOD+1,6), dtype=np.float16) self.history = [] def", "profits #reward = self.net_worth - INITIAL_ACCOUNT_BALANCE #reward = 1 if", "constant FLAT = 0 # no position LONG = 1", "dtype=np.float16) self.history = [] def _next_observation(self): obs = np.array([ self.df.loc[self.current_step-DATA_HIS_PERIOD:self.current_step,", "INITIAL_ACCOUNT_BALANCE #reward = 1 if reward > 0 else -100", "self.current_step = DATA_HIS_PERIOD # loop training # when loop training,", "self.exit_price) / self.exit_price + 1) * ( 1 - self.fee)", "= spaces.Box( # low=np.array([0, 0]), high=np.array([3, 1]), dtype=np.float16) # Prices", "MAX_AMOUNT = 3e10 MAX_OPEN_POSITIONS = 5 MAX_STEPS = 20000 MAX_DAY_CHANGE", "elif action == SELL: if self.position == FLAT: self.position =", "self.shares_held self.total_buys_value += self.shares_held * self.close_price self.shares_held = 0 self.entry_price", "= False self.current_step += 1 delay_modifier = (self.current_step / MAX_STEPS)", "Portfolio (krw-won): {1}, balance: {2}\".format(self.current_step, self.portfolio,self.net_worth)) print(\"Long: {0}/ Short: {1}\".format(self.n_long,", "SHORT: # if previous position was short self.position = FLAT", "FLAT # update position to flat self.action = BUY #", "= 0 self.total_sales_value = 0 self.n_long=0 self.n_short=0 self.history=[] # pass", "(0, MAX_ACCOUNT_BALANCE) # Actions of the format Buy x%, Sell", "record number of short self.total_shares_sold += self.shares_held self.total_sales_value += self.shares_held", "{'long': self.n_long, 'short': self.n_short}} #return obs, reward, done, {} def", "# sell position # action constant HOLD = 0 BUY", "# self.action_space = spaces.Box( # low=np.array([0, 0]), high=np.array([3, 1]), dtype=np.float16)", "= self.net_worth if self.shares_held == 0: self.cost_basis = 0 self.portfolio", "#hold if action == BUY: #buy if self.position == FLAT:", "self.shares_held = int(total_possible * amount)//100 *100 self.krw_balance = self.shares_held *", "training # when loop training, then clear the history self.action", "#the old version has this variable, so reserve # action", "= INITIAL_ACCOUNT_BALANCE self.max_net_worth = INITIAL_ACCOUNT_BALANCE self.krw_balance = 0 self.reward =", "(1.0 + temp_reward) elif (self.position == SHORT): temp_reward = ((self.entry_price", "self.closingPrice, self.portfolio, self.reward)) obs = self._next_observation() if (self.current_step > (self.df.shape[0])", "MAX_STEPS) # profits #reward = self.net_worth - INITIAL_ACCOUNT_BALANCE #reward =", "0 new_portfolio = 0 self.net_worth = self.balance + new_portfolio if", "# Prices contains the OHCL values for the last five", "'open'].values) - 1: self.current_step = DATA_HIS_PERIOD # loop training #", "# commission fee self.shares_held = shares_bought self.balance -= self.krw_balance-commission #self.cost_basis", "+= self.shares_held self.total_buys_value += self.shares_held * self.close_price self.shares_held = 0", "total value evaluated in krw won if (self.position == LONG):", "self.shares_held self.total_sales_value += self.shares_held * self.close_price self.entry_price = 0 #", "= 3e10 MAX_OPEN_POSITIONS = 5 MAX_STEPS = 20000 MAX_DAY_CHANGE =", "self.balance += round(self.krw_balance * (1.0 + self.reward),2) # calcuate the", "self.close_price, self.krw_balance, self.balance, self.max_net_worth, self.shares_held, self.portfolio, self.total_shares_buy, self.total_buys_value, self.total_shares_sold, self.total_sales_value])", "2147483647 MAX_NUM_SHARES = 2147483647 MAX_SHARE_PRICE = 5000 MAX_VOLUME = 1000e8", "== SELL: if self.position == FLAT: self.position = SHORT self.action", "{'portfolio': np.array([self.portfolio]), \"history\": self.history, \"n_trades\": {'long': self.n_long, 'short': self.n_short}} #return", "+ new_portfolio if self.net_worth > self.max_net_worth: self.max_net_worth = self.net_worth if", "'open'].values) - 6) # the observation include the given period", "# Actions of the format Buy x%, Sell x%, Hold,", "# buy balance commission = round(self.fee * self.krw_balance,2) # commission", "# low=np.array([0, 0]), high=np.array([3, 1]), dtype=np.float16) # Prices contains the", "0 self.portfolio = 0 self.shares_held = 0 self.cost_basis = 0", "2 - 1 new_portfolio = self.krw_balance * (1.0 + temp_reward)", "= self.net_worth - INITIAL_ACCOUNT_BALANCE print('-'*30) print(f'Step: {self.current_step}') print(f'Balance: {self.balance}') print(f'Shares", "5 # position constant FLAT = 0 # no position", "- INITIAL_ACCOUNT_BALANCE print('-'*30) print(f'Step: {self.current_step}') print(f'Balance: {self.balance}') print(f'Shares held: {self.shares_held}", "self.n_short=0 self.history=[] # pass test dataset to environment if new_df:", "version has this variable, so reserve # action comes from", "action as buy self.exit_price = self.close_price self.reward += ((self.entry_price -", "profit = ((self.entry_price - self.close_Price)/self.close_Price + 1)*(1-self.fee)**2 - 1 else:", "self.entry_price) / self.entry_price + 1) * ( 1 - self.fee)", "{self.cost_basis} (Total sales value: {self.total_sales_value})') print(f'Net worth: {self.net_worth} (Max net", "{2}\".format(self.current_step, self.portfolio,self.net_worth)) print(\"Long: {0}/ Short: {1}\".format(self.n_long, self.n_short)) # save the", "self.close_price # Sell amount % of shares held total_possible =", "\"\"\"A stock trading environment for OpenAI gym\"\"\" metadata = {'render.modes':", "elif self.position == SHORT: # if previous position was short", "entry price self.shares_held = 0 # clear the shares_ elif", "MAX_STEPS = 20000 MAX_DAY_CHANGE = 1 INITIAL_ACCOUNT_BALANCE = 10000 DATA_HIS_PERIOD", "self.shares_held = 0 # clear the shares_ elif action ==", "def reset(self, new_df=None): # Reset the state of the environment", "self.reward) self.balance += round(self.krw_balance*(1.0+self.reward),2) self.n_long += 1 self.total_shares_buy += self.shares_held", "the last close price self.close_price = self.df.loc[self.current_step,\"close\"] amount = 0.5", "1 new_portfolio = self.krw_balance * (1.0 + temp_reward) elif (self.position", "the time step # current_price = random.uniform( # self.df.loc[self.current_step, \"open\"],", "trade # valid action sequence would be # LONG :", "as buy self.entry_price = self.close_price # Buy amount % of", "SELL self.exit_price = self.close_price self.reward += ((self.exit_price - self.entry_price) /", "= 0 self.net_worth = self.balance + new_portfolio if self.net_worth >", "(1.0 + temp_reward) else: temp_reward = 0 new_portfolio = 0", "self.close_price) / self.close_price + 1) * ( 1 - self.fee)", "= 2 class StockTradingEnv(gym.Env): \"\"\"A stock trading environment for OpenAI", "= self.shares_held * self.entry_price # buy balance commission = round(self.fee", "self._take_action(action) done = False self.current_step += 1 delay_modifier = (self.current_step", "/ MAX_NUM_SHARES, ]) # Append additional data and scale each", "of the format Buy x%, Sell x%, Hold, etc. self.action_space", "= 20000 MAX_DAY_CHANGE = 1 INITIAL_ACCOUNT_BALANCE = 10000 DATA_HIS_PERIOD =", "= df self.closeprices = self.df['close'].values self.reward_range = (0, MAX_ACCOUNT_BALANCE) #", "self.exit_price = self.close_price self.reward += ((self.entry_price - self.exit_price) / self.exit_price", "INITIAL_ACCOUNT_BALANCE print('-'*30) print(f'Step: {self.current_step}') print(f'Balance: {self.balance}') print(f'Shares held: {self.shares_held} (Total", "/ MAX_NUM_SHARES, self.cost_basis / MAX_SHARE_PRICE, self.total_shares_sold / MAX_NUM_SHARES, self.total_sales_value /", "new_df=None): # Reset the state of the environment to an", "self.shares_held, self.portfolio, self.total_shares_buy, self.total_buys_value, self.total_shares_sold, self.total_sales_value]) #self.history.append((self.action, self.current_step, self.closingPrice, self.portfolio,", "Set the current step to a random point within the", "self.df.loc[self.current_step-DATA_HIS_PERIOD:self.current_step, 'low'].values / MAX_SHARE_PRICE, self.df.loc[self.current_step-DATA_HIS_PERIOD:self.current_step, 'close'].values / MAX_SHARE_PRICE, self.df.loc[self.current_step-DATA_HIS_PERIOD:self.current_step, 'volume'].values", "Buy amount % of balance in shares total_possible = int(self.balance", "- buy # invalid action sequence is just considered hold", "record action as buy self.exit_price = self.close_price self.reward += ((self.entry_price", "= self._next_observation() if (self.current_step > (self.df.shape[0]) - 1): self.done =", "the screen profit = self.net_worth - INITIAL_ACCOUNT_BALANCE print('-'*30) print(f'Step: {self.current_step}')", "buy\" would be considred \"buy - hold\" self.action = HOLD", "+ additional_cost) / (self.shares_held + shares_bought) elif self.position == SHORT:", "x%, Sell x%, Hold, etc. self.action_space = spaces.Discrete(len(self.actions)) # self.action_space", "#buy if self.position == FLAT: # if previous position was", "reward > 0 else -100 if self.net_worth <= 0: done", "commission fee self.balance -= self.krw_balance-commission elif self.position == LONG: self.position", "2 - 1 #self.krw_balance = self.krw_balance * (1.0 + self.reward)", "*100 self.krw_balance = shares_bought * self.entry_price # buy balance commission", "self.max_net_worth = self.net_worth if self.shares_held == 0: self.cost_basis = 0", "0 self.total_sales_value = 0 self.n_long=0 self.n_short=0 self.history=[] # pass test", "+= self.shares_held self.total_sales_value += self.shares_held * self.close_price self.entry_price = 0", "% of shares held total_possible = int(self.balance / self.close_price) self.shares_held", "self.balance = INITIAL_ACCOUNT_BALANCE self.net_worth = INITIAL_ACCOUNT_BALANCE self.max_net_worth = INITIAL_ACCOUNT_BALANCE self.krw_balance", "SHORT self.action = SELL self.entry_price = self.close_price # Sell amount", "DATA_HIS_PERIOD #random.randint(DATA_HIS_PERIOD,len(self.df.loc[:,'open'].values)-1) # for i in range(DATA_HIS_PERIOD): # self.history.append([0.0,0.0,0.0,0.0,0.0,0.0]) return", "{self.total_shares_sold})') print(f'Avg cost for held shares: {self.cost_basis} (Total sales value:", "self.krw_balance = 0 self.reward = 0 self.portfolio = 0 self.shares_held", "0 self.total_shares_buy =0 self.total_buys_value=0 self.total_shares_sold = 0 self.total_sales_value = 0", "# 0, len(self.df.loc[:, 'open'].values) - 6) # the observation include", "self.current_step = random.randint( # 0, len(self.df.loc[:, 'open'].values) - 6) #", "Actions of the format Buy x%, Sell x%, Hold, etc.", "if (self.current_step > (self.df.shape[0]) - 1): self.done = True self.reward", "round(self.krw_balance * (1.0 + self.reward),2) # calcuate the total balance", "shares_bought = int(total_possible * amount)//100 *100 self.krw_balance = shares_bought *", "= 0 self.history=[] # done = True if (self.show_trade and", "return reward at end of the game return obs, self.net_worth,", "* self.close_price self.shares_held = 0 self.entry_price = 0 # [coin", "DATA_HIS_PERIOD # loop training # when loop training, then clear", "# when loop training, then clear the history self.action =", "test dataset to environment if new_df: self.df = new_df #", "+ temp_reward) elif (self.position == SHORT): temp_reward = ((self.entry_price -", "old version has this variable, so reserve # action comes", "self.position == FLAT: # if previous position was flat self.position", "self.current_step, self.close_price, self.krw_balance, self.balance, self.max_net_worth, self.shares_held, self.portfolio, self.total_shares_buy, self.total_buys_value, self.total_shares_sold,", "reward at end of the game return obs, self.net_worth, done,", "{'render.modes': ['human']} def __init__(self, df,show_trade=True): super(StockTradingEnv, self).__init__() # show the", "_next_observation(self): obs = np.array([ self.df.loc[self.current_step-DATA_HIS_PERIOD:self.current_step, 'open'].values / MAX_SHARE_PRICE, self.df.loc[self.current_step-DATA_HIS_PERIOD:self.current_step, 'high'].values", "obs = np.append(obs,[[self.balance / MAX_ACCOUNT_BALANCE, self.max_net_worth / MAX_ACCOUNT_BALANCE, self.shares_held /", ": sell - hold - hold - buy # invalid", "+ 1)*(1-self.fee)**2 - 1 else: profit = 0 return profit", "= self.krw_balance * (1.0 + self.reward) self.balance += round(self.krw_balance*(1.0+self.reward),2) self.n_long", "done = True if self.current_step > len(self.df.loc[:, 'open'].values) - 1:", "* (1.0 + self.reward),2) # calcuate the total balance self.n_short", "amount % of balance in shares total_possible = int(self.balance /", "current price to a random price within the time step", "= self.net_worth - INITIAL_ACCOUNT_BALANCE #reward = 1 if reward >", "self.balance += round(self.krw_balance*(1.0+self.reward),2) self.n_long += 1 self.total_shares_buy += self.shares_held self.total_buys_value", "+= self.shares_held * self.close_price self.entry_price = 0 # clear entry", "self.shares_held == 0: self.cost_basis = 0 self.portfolio = round(new_portfolio,2) def", "#reward = 1 if reward > 0 else -100 if", "range(DATA_HIS_PERIOD): # self.history.append([0.0,0.0,0.0,0.0,0.0,0.0]) return self._next_observation() def render(self, mode='human', close=False): #", "from the agent # 1 buy, 2 sell, 0 hold", "Render the environment to the screen profit = self.net_worth -", "action == SELL: if self.position == FLAT: self.position = SHORT", "# Set the current price to a random price within", "# update position to flat self.action = BUY # record", "self.position = FLAT # update position to flat self.action =", "# profits #reward = self.net_worth - INITIAL_ACCOUNT_BALANCE #reward = 1", "as buy self.exit_price = self.close_price self.reward += ((self.entry_price - self.exit_price)", "can be opened per trade # valid action sequence would", "self.net_worth - INITIAL_ACCOUNT_BALANCE #reward = 1 if reward > 0", "of the environment to an initial state self.action = HOLD", "== LONG: self.position = FLAT self.action = SELL self.exit_price =", "SHORT): temp_reward = ((self.entry_price - self.close_price) / self.close_price + 1)", "int(total_possible * amount)//100 *100 self.krw_balance = shares_bought * self.entry_price #", "= 5000 MAX_VOLUME = 1000e8 MAX_AMOUNT = 3e10 MAX_OPEN_POSITIONS =", "== BUY: #buy if self.position == FLAT: # if previous", "MAX_SHARE_PRICE, self.df.loc[self.current_step-DATA_HIS_PERIOD:self.current_step, 'low'].values / MAX_SHARE_PRICE, self.df.loc[self.current_step-DATA_HIS_PERIOD:self.current_step, 'close'].values / MAX_SHARE_PRICE, self.df.loc[self.current_step-DATA_HIS_PERIOD:self.current_step,", "= INITIAL_ACCOUNT_BALANCE self.net_worth = INITIAL_ACCOUNT_BALANCE self.max_net_worth = INITIAL_ACCOUNT_BALANCE self.krw_balance =", "obs = np.array([ self.df.loc[self.current_step-DATA_HIS_PERIOD:self.current_step, 'open'].values / MAX_SHARE_PRICE, self.df.loc[self.current_step-DATA_HIS_PERIOD:self.current_step, 'high'].values /", "self.n_long = 0 self.n_short = 0 self.history=[] # done =", "else -100 if self.net_worth <= 0: done = True if", "save the history data self.history.append([ self.action, self.position, self.current_step, self.close_price, self.krw_balance,", "step to a random point within the data frame #", "# 1 buy, 2 sell, 0 hold # single position", "MAX_SHARE_PRICE, self.df.loc[self.current_step-DATA_HIS_PERIOD:self.current_step, 'high'].values / MAX_SHARE_PRICE, self.df.loc[self.current_step-DATA_HIS_PERIOD:self.current_step, 'low'].values / MAX_SHARE_PRICE, self.df.loc[self.current_step-DATA_HIS_PERIOD:self.current_step,", "def step(self, action): # Execute one time step within the", "self.total_shares_sold += self.shares_held self.total_sales_value += self.shares_held * self.close_price self.entry_price =", "shape=(DATA_HIS_PERIOD+1,6), dtype=np.float16) self.history = [] def _next_observation(self): obs = np.array([", "comes from the agent # 1 buy, 2 sell, 0", "*100 self.krw_balance = self.shares_held * self.entry_price # buy balance commission", "= INITIAL_ACCOUNT_BALANCE self.krw_balance = 0 self.reward = 0 self.portfolio =", "obs, reward, done, {} def get_profit(self): if(self.position == LONG): profit", "\"buy - hold\" self.action = HOLD #hold if action ==", "was flat self.position = LONG #update position to long self.action", "= 0 self.total_sales_value = 0 self.n_long = 0 self.n_short =", "environment to an initial state self.action = HOLD self.position =", "'volume'].values / MAX_NUM_SHARES, ]) # Append additional data and scale", "self.max_net_worth: self.max_net_worth = self.net_worth if self.shares_held == 0: self.cost_basis =", "(self.shares_held + shares_bought) elif self.position == SHORT: # if previous", "np MAX_ACCOUNT_BALANCE = 2147483647 MAX_NUM_SHARES = 2147483647 MAX_SHARE_PRICE = 5000", "self.history = [] def _next_observation(self): obs = np.array([ self.df.loc[self.current_step-DATA_HIS_PERIOD:self.current_step, 'open'].values", "clear the shares_ elif action == SELL: if self.position ==", "== FLAT: self.position = SHORT self.action = SELL self.entry_price =", "round(self.krw_balance*(1.0+self.reward),2) self.n_long += 1 self.total_shares_buy += self.shares_held self.total_buys_value += self.shares_held", "(Total sold: {self.total_shares_sold})') print(f'Avg cost for held shares: {self.cost_basis} (Total", "close price self.close_price = self.df.loc[self.current_step,\"close\"] amount = 0.5 #the old", "current_price = random.uniform( # self.df.loc[self.current_step, \"open\"], self.df.loc[self.current_step, \"close\"]) # Set", "* (1.0 + self.reward) # evaluate cumulative return in krw-won", "print(f'Avg cost for held shares: {self.cost_basis} (Total sales value: {self.total_sales_value})')", "step(self, action): # Execute one time step within the environment", "- 1 new_portfolio = self.krw_balance * (1.0 + temp_reward) elif", "hold - hold - buy # invalid action sequence is", "self.actions=[\"FLAT\",\"LONG\",\"SHORT\"] self.fee = 0.0005 # brokage commission self.df = df", "{1}\".format(self.n_long, self.n_short)) # save the history data self.history.append([ self.action, self.position,", "= 10000 DATA_HIS_PERIOD = 5 # position constant FLAT =", "close=False): # Render the environment to the screen profit =", "of balance in shares total_possible = int(self.balance / self.close_price) shares_bought", "action): # Set the current price to a random price", "buy - hold - hold - sell # SHORT :", "trading environment for OpenAI gym\"\"\" metadata = {'render.modes': ['human']} def", "self._next_observation() def render(self, mode='human', close=False): # Render the environment to", "long self.action = BUY # record action as buy self.entry_price", "* (1.0 + self.reward) self.balance += round(self.krw_balance*(1.0+self.reward),2) self.n_long += 1", "1000e8 MAX_AMOUNT = 3e10 MAX_OPEN_POSITIONS = 5 MAX_STEPS = 20000", "SELL self.entry_price = self.close_price # Sell amount % of shares", "the history data self.history.append([ self.action, self.position, self.current_step, self.close_price, self.krw_balance, self.balance,", "self.max_net_worth, self.shares_held, self.portfolio, self.total_shares_buy, self.total_buys_value, self.total_shares_sold, self.total_sales_value]) #self.history.append((self.action, self.current_step, self.closingPrice,", "5000 MAX_VOLUME = 1000e8 MAX_AMOUNT = 3e10 MAX_OPEN_POSITIONS = 5", "1 else: profit = 0 return profit def reset(self, new_df=None):", "to an initial state self.action = HOLD self.position = FLAT", "given period history data self.current_step = DATA_HIS_PERIOD #random.randint(DATA_HIS_PERIOD,len(self.df.loc[:,'open'].values)-1) # for", "self.reward += ((self.exit_price - self.entry_price) / self.entry_price + 1) *", "# calculate reward #self.krw_balance = self.krw_balance * (1.0 + self.reward)", "0 # clear entry price self.shares_held = 0 # clear", "# valid action sequence would be # LONG : buy", "= 2147483647 MAX_NUM_SHARES = 2147483647 MAX_SHARE_PRICE = 5000 MAX_VOLUME =", "= HOLD #hold if action == BUY: #buy if self.position", "+= ((self.exit_price - self.entry_price) / self.entry_price + 1) * (1", "done = True if (self.show_trade and self.current_step % 1 ==", "self.krw_balance, self.balance, self.max_net_worth, self.shares_held, self.portfolio, self.total_shares_buy, self.total_buys_value, self.total_shares_sold, self.total_sales_value]) #self.history.append((self.action,", "{} def get_profit(self): if(self.position == LONG): profit = ((self.close_Price -", "self.reward),2) # calcuate the total balance self.n_short += 1 #", "2 # sell position # action constant HOLD = 0", "self.action = SELL self.exit_price = self.close_price self.reward += ((self.exit_price -", "clear the history self.action = HOLD self.position = FLAT self.balance", "self.close_price self.entry_price = 0 # clear entry price self.shares_held =", "to long self.action = BUY # record action as buy", "# save the history data self.history.append([ self.action, self.position, self.current_step, self.close_price,", "= int(total_possible * amount)//100 *100 self.krw_balance = self.shares_held * self.entry_price", "Set the current price to a random price within the", "self.cost_basis / MAX_SHARE_PRICE, self.total_shares_sold / MAX_NUM_SHARES, self.total_sales_value / (MAX_NUM_SHARES *", "else: temp_reward = 0 new_portfolio = 0 self.net_worth = self.balance", "# Execute one time step within the environment self._take_action(action) done", "StockTradingEnv(gym.Env): \"\"\"A stock trading environment for OpenAI gym\"\"\" metadata =", "((self.exit_price - self.entry_price) / self.entry_price + 1) * (1 -", "new_portfolio = 0 self.net_worth = self.balance + new_portfolio if self.net_worth", "calculate reward #self.krw_balance = self.krw_balance * (1.0 + self.reward) #", "# Buy amount % of balance in shares total_possible =", "time step # current_price = random.uniform( # self.df.loc[self.current_step, \"open\"], self.df.loc[self.current_step,", "self.total_shares_buy += self.shares_held self.total_buys_value += self.shares_held * self.close_price self.shares_held =", "self.net_worth = INITIAL_ACCOUNT_BALANCE self.max_net_worth = INITIAL_ACCOUNT_BALANCE self.krw_balance = 0 self.reward", "MAX_SHARE_PRICE)]],axis=0) return obs def _take_action(self, action): # Set the current", "1 # calculate reward #self.krw_balance = self.krw_balance * (1.0 +", "/ MAX_SHARE_PRICE, self.df.loc[self.current_step-DATA_HIS_PERIOD:self.current_step, 'volume'].values / MAX_NUM_SHARES, ]) # Append additional", "game return obs, self.net_worth, done, {'portfolio': np.array([self.portfolio]), \"history\": self.history, \"n_trades\":", "self.action = SELL self.entry_price = self.close_price # Sell amount %", "position constant FLAT = 0 # no position LONG =", "the OHCL values for the last five prices self.observation_space =", "= (self.current_step / MAX_STEPS) # profits #reward = self.net_worth -", "HOLD self.position = FLAT self.balance = INITIAL_ACCOUNT_BALANCE self.net_worth = INITIAL_ACCOUNT_BALANCE", "buy # invalid action sequence is just considered hold #", "additional data and scale each value to between 0-1 obs", "previous position was short self.position = FLAT # update position", "self.position = SHORT self.action = SELL self.entry_price = self.close_price #", "#return obs, reward, done, {} def get_profit(self): if(self.position == LONG):", "the trade info self.show_trade = show_trade self.actions=[\"FLAT\",\"LONG\",\"SHORT\"] self.fee = 0.0005", "as pd import numpy as np MAX_ACCOUNT_BALANCE = 2147483647 MAX_NUM_SHARES", "a random point within the data frame # self.current_step =", "scale each value to between 0-1 obs = np.append(obs,[[self.balance /", "mode='human', close=False): # Render the environment to the screen profit", "= 0 # clear the shares_ elif action == SELL:", "* ( 1 - self.fee) ** 2 - 1 #", "if self.current_step > len(self.df.loc[:, 'open'].values) - 1: self.current_step = DATA_HIS_PERIOD", "# return reward at end of the game return obs,", "constant HOLD = 0 BUY = 1 SELL = 2", "1]), dtype=np.float16) # Prices contains the OHCL values for the", "self.krw_balance = shares_bought * self.entry_price # buy balance commission =", "= 0.5 #the old version has this variable, so reserve", "= self.krw_balance * (1.0 + temp_reward) else: temp_reward = 0", "self.net_worth > self.max_net_worth: self.max_net_worth = self.net_worth if self.shares_held == 0:", "print(f'Shares held: {self.shares_held} (Total sold: {self.total_shares_sold})') print(f'Avg cost for held", "* MAX_SHARE_PRICE)]],axis=0) return obs def _take_action(self, action): # Set the", "0 self.total_shares_sold = 0 self.total_sales_value = 0 self.n_long = 0", "self.net_worth if self.shares_held == 0: self.cost_basis = 0 self.portfolio =", "1 # buy position SHORT = 2 # sell position", "was short self.position = FLAT # update position to flat", "amount % of shares held total_possible = int(self.balance / self.close_price)", "self.close_price) self.shares_held = int(total_possible * amount)//100 *100 self.krw_balance = self.shares_held", "round(new_portfolio,2) def step(self, action): # Execute one time step within", "LONG #update position to long self.action = BUY # record", "to a random price within the time step # current_price", "self.df.loc[self.current_step-DATA_HIS_PERIOD:self.current_step, 'open'].values / MAX_SHARE_PRICE, self.df.loc[self.current_step-DATA_HIS_PERIOD:self.current_step, 'high'].values / MAX_SHARE_PRICE, self.df.loc[self.current_step-DATA_HIS_PERIOD:self.current_step, 'low'].values", "Short: {1}\".format(self.n_long, self.n_short)) # save the history data self.history.append([ self.action,", "= 0 self.n_long=0 self.n_short=0 self.history=[] # pass test dataset to", "0 self.total_shares_buy = 0 self.total_buys_value = 0 self.total_shares_sold = 0", "action comes from the agent # 1 buy, 2 sell,", "prices self.observation_space = spaces.Box( low=0, high=1, shape=(DATA_HIS_PERIOD+1,6), dtype=np.float16) self.history =", "sequence would be # LONG : buy - hold -", "self.reward) # evaluate cumulative return in krw-won self.balance += round(self.krw_balance", "self.balance -= self.krw_balance-commission #self.cost_basis = (prev_cost + additional_cost) / (self.shares_held", "hold # single position can be opened per trade #", "0 BUY = 1 SELL = 2 class StockTradingEnv(gym.Env): \"\"\"A", "MAX_ACCOUNT_BALANCE = 2147483647 MAX_NUM_SHARES = 2147483647 MAX_SHARE_PRICE = 5000 MAX_VOLUME", "MAX_ACCOUNT_BALANCE, self.max_net_worth / MAX_ACCOUNT_BALANCE, self.shares_held / MAX_NUM_SHARES, self.cost_basis / MAX_SHARE_PRICE,", "import numpy as np MAX_ACCOUNT_BALANCE = 2147483647 MAX_NUM_SHARES = 2147483647", "- hold - hold - sell # SHORT : sell", "# calcuate the total balance self.n_short += 1 # record", "** 2 - 1 new_portfolio = self.krw_balance * (1.0 +", "self.cost_basis = 0 self.total_shares_buy = 0 self.total_buys_value = 0 self.total_shares_sold", "0 self.history=[] # done = True if (self.show_trade and self.current_step", "and self.current_step % 1 == 0): print(\"Tick: {0}/ Portfolio (krw-won):", "screen profit = self.net_worth - INITIAL_ACCOUNT_BALANCE print('-'*30) print(f'Step: {self.current_step}') print(f'Balance:", "{self.current_step}') print(f'Balance: {self.balance}') print(f'Shares held: {self.shares_held} (Total sold: {self.total_shares_sold})') print(f'Avg", "print(f'Balance: {self.balance}') print(f'Shares held: {self.shares_held} (Total sold: {self.total_shares_sold})') print(f'Avg cost", "# no position LONG = 1 # buy position SHORT", "self.total_shares_sold = 0 self.total_sales_value = 0 self.n_long=0 self.n_short=0 self.history=[] #", "last five prices self.observation_space = spaces.Box( low=0, high=1, shape=(DATA_HIS_PERIOD+1,6), dtype=np.float16)", "1 # record number of short self.total_shares_sold += self.shares_held self.total_sales_value", "training, then clear the history self.action = HOLD self.position =", "(prev_cost + additional_cost) / (self.shares_held + shares_bought) elif self.position ==", "Sell x%, Hold, etc. self.action_space = spaces.Discrete(len(self.actions)) # self.action_space =", "+= 1 self.total_shares_buy += self.shares_held self.total_buys_value += self.shares_held * self.close_price", "0 self.reward =0 self.portfolio =0 self.shares_held = 0 self.cost_basis =", "- INITIAL_ACCOUNT_BALANCE #reward = 1 if reward > 0 else", "for OpenAI gym\"\"\" metadata = {'render.modes': ['human']} def __init__(self, df,show_trade=True):", "= int(total_possible * amount)//100 *100 self.krw_balance = shares_bought * self.entry_price", "step within the environment self._take_action(action) done = False self.current_step +=", "/ self.close_price) self.shares_held = int(total_possible * amount)//100 *100 self.krw_balance =", "is just considered hold # (e.g.) \"buy - buy\" would", "% of balance in shares total_possible = int(self.balance / self.close_price)", "class StockTradingEnv(gym.Env): \"\"\"A stock trading environment for OpenAI gym\"\"\" metadata", "- hold - hold - buy # invalid action sequence", "held total_possible = int(self.balance / self.close_price) self.shares_held = int(total_possible *", "/ self.entry_price + 1) * (1 - self.fee) ** 2", "= 1 # buy position SHORT = 2 # sell", "== LONG): temp_reward = ((self.close_price - self.entry_price) / self.entry_price +", "would be considred \"buy - hold\" self.action = HOLD #hold", "flat self.position = LONG #update position to long self.action =", "+ shares_bought) elif self.position == SHORT: # if previous position", "if self.net_worth <= 0: done = True if self.current_step >", "1)*(1-self.fee)**2 - 1 else: profit = 0 return profit def", "- hold - sell # SHORT : sell - hold", "self.entry_price = 0 # clear entry price self.shares_held = 0", "temp_reward = 0 new_portfolio = 0 self.net_worth = self.balance +", "if reward > 0 else -100 if self.net_worth <= 0:", "# the observation include the given period history data self.current_step", "MAX_NUM_SHARES, self.cost_basis / MAX_SHARE_PRICE, self.total_shares_sold / MAX_NUM_SHARES, self.total_sales_value / (MAX_NUM_SHARES", "evaluate cumulative return in krw-won self.balance += round(self.krw_balance * (1.0", "update position to flat self.action = BUY # record action", "reset(self, new_df=None): # Reset the state of the environment to", "return obs def _take_action(self, action): # Set the current price", "0 hold # single position can be opened per trade", "the state of the environment to an initial state self.action", "state of the environment to an initial state self.action =", "self.current_step = DATA_HIS_PERIOD #random.randint(DATA_HIS_PERIOD,len(self.df.loc[:,'open'].values)-1) # for i in range(DATA_HIS_PERIOD): #", "INITIAL_ACCOUNT_BALANCE self.krw_balance = 0 self.reward = 0 self.portfolio = 0", "environment if new_df: self.df = new_df # Set the current", "hold - buy # invalid action sequence is just considered", "/ self.close_price) shares_bought = int(total_possible * amount)//100 *100 self.krw_balance =", "spaces.Discrete(len(self.actions)) # self.action_space = spaces.Box( # low=np.array([0, 0]), high=np.array([3, 1]),", "= True if self.current_step > len(self.df.loc[:, 'open'].values) - 1: self.current_step", "self.reward =0 self.portfolio =0 self.shares_held = 0 self.cost_basis = 0", "* (1 - self.fee) ** 2 - 1 #self.krw_balance =", "1 self.total_shares_buy += self.shares_held self.total_buys_value += self.shares_held * self.close_price self.shares_held", "commission = round(self.fee * self.krw_balance,2) # commission fee self.shares_held =", "self.shares_held * self.entry_price # buy balance commission = round(self.fee *", "balance self.n_short += 1 # record number of short self.total_shares_sold", "# (e.g.) \"buy - buy\" would be considred \"buy -", "[] def _next_observation(self): obs = np.array([ self.df.loc[self.current_step-DATA_HIS_PERIOD:self.current_step, 'open'].values / MAX_SHARE_PRICE,", "step # current_price = random.uniform( # self.df.loc[self.current_step, \"open\"], self.df.loc[self.current_step, \"close\"])", "# clear entry price self.shares_held = 0 # clear the", "held: {self.shares_held} (Total sold: {self.total_shares_sold})') print(f'Avg cost for held shares:", "<= 0: done = True if self.current_step > len(self.df.loc[:, 'open'].values)", "random price within the time step # current_price = random.uniform(", "self.total_buys_value += self.shares_held * self.close_price self.shares_held = 0 self.entry_price =", "get_profit(self): if(self.position == LONG): profit = ((self.close_Price - self.entry_price)/self.entry_price +", "= 0 self.cost_basis = 0 self.total_shares_buy = 0 self.total_buys_value =", "= shares_bought self.balance -= self.krw_balance-commission #self.cost_basis = (prev_cost + additional_cost)", "= 0 self.total_shares_sold = 0 self.total_sales_value = 0 self.n_long =", "no position LONG = 1 # buy position SHORT =", "0: done = True if self.current_step > len(self.df.loc[:, 'open'].values) -", "#reward = self.net_worth - INITIAL_ACCOUNT_BALANCE #reward = 1 if reward", "2 class StockTradingEnv(gym.Env): \"\"\"A stock trading environment for OpenAI gym\"\"\"", "2 - 1 # calculate reward #self.krw_balance = self.krw_balance *", "self.current_step > len(self.df.loc[:, 'open'].values) - 1: self.current_step = DATA_HIS_PERIOD #", "etc. self.action_space = spaces.Discrete(len(self.actions)) # self.action_space = spaces.Box( # low=np.array([0,", "calcuate the total balance self.n_short += 1 # record number", "commission = round(self.fee * self.krw_balance,2) # commission fee self.balance -=", "action sequence would be # LONG : buy - hold", "self.close_price) shares_bought = int(total_possible * amount)//100 *100 self.krw_balance = shares_bought", "> 0 else -100 if self.net_worth <= 0: done =", "pandas as pd import numpy as np MAX_ACCOUNT_BALANCE = 2147483647", "in range(DATA_HIS_PERIOD): # self.history.append([0.0,0.0,0.0,0.0,0.0,0.0]) return self._next_observation() def render(self, mode='human', close=False):", "= 0 BUY = 1 SELL = 2 class StockTradingEnv(gym.Env):", "hold - sell # SHORT : sell - hold -", "the observation include the given period history data self.current_step =", "# show the trade info self.show_trade = show_trade self.actions=[\"FLAT\",\"LONG\",\"SHORT\"] self.fee", "self.df.loc[self.current_step-DATA_HIS_PERIOD:self.current_step, 'volume'].values / MAX_NUM_SHARES, ]) # Append additional data and", "20000 MAX_DAY_CHANGE = 1 INITIAL_ACCOUNT_BALANCE = 10000 DATA_HIS_PERIOD = 5", "{1}, balance: {2}\".format(self.current_step, self.portfolio,self.net_worth)) print(\"Long: {0}/ Short: {1}\".format(self.n_long, self.n_short)) #", "if new_df: self.df = new_df # Set the current step", "delay_modifier = (self.current_step / MAX_STEPS) # profits #reward = self.net_worth", "0 self.cost_basis = 0 self.total_shares_buy =0 self.total_buys_value=0 self.total_shares_sold = 0", "temp_reward) elif (self.position == SHORT): temp_reward = ((self.entry_price - self.close_price)", "self.krw_balance-commission #self.cost_basis = (prev_cost + additional_cost) / (self.shares_held + shares_bought)", "self.position == LONG: self.position = FLAT self.action = SELL self.exit_price", "loop training # when loop training, then clear the history", "# if previous position was short self.position = FLAT #", "pass test dataset to environment if new_df: self.df = new_df", "HOLD = 0 BUY = 1 SELL = 2 class", "- self.close_price) / self.close_price + 1) * ( 1 -", "2147483647 MAX_SHARE_PRICE = 5000 MAX_VOLUME = 1000e8 MAX_AMOUNT = 3e10", "Prices contains the OHCL values for the last five prices", "# [coin + krw_won] total value evaluated in krw won", "- 1: self.current_step = DATA_HIS_PERIOD # loop training # when", "to flat self.action = BUY # record action as buy", "= HOLD self.position = FLAT self.balance = INITIAL_ACCOUNT_BALANCE self.net_worth =", "values for the last five prices self.observation_space = spaces.Box( low=0,", "- 1 else: profit = 0 return profit def reset(self,", "> (self.df.shape[0]) - 1): self.done = True self.reward = self.get_profit()", "= self.close_price self.reward += ((self.entry_price - self.exit_price) / self.exit_price +", "0 self.portfolio = round(new_portfolio,2) def step(self, action): # Execute one", "= FLAT self.balance = INITIAL_ACCOUNT_BALANCE self.net_worth = INITIAL_ACCOUNT_BALANCE self.max_net_worth =", "- 6) # the observation include the given period history", "0 self.total_sales_value = 0 self.n_long = 0 self.n_short = 0", "OpenAI gym\"\"\" metadata = {'render.modes': ['human']} def __init__(self, df,show_trade=True): super(StockTradingEnv,", "amount = 0.5 #the old version has this variable, so", "self.n_long, 'short': self.n_short}} #return obs, reward, done, {} def get_profit(self):", "LONG): profit = ((self.close_Price - self.entry_price)/self.entry_price + 1)*(1-self.fee)**2 - 1", "{self.balance}') print(f'Shares held: {self.shares_held} (Total sold: {self.total_shares_sold})') print(f'Avg cost for", "(1 - self.fee) ** 2 - 1 #self.krw_balance = self.krw_balance", "/ MAX_SHARE_PRICE, self.df.loc[self.current_step-DATA_HIS_PERIOD:self.current_step, 'high'].values / MAX_SHARE_PRICE, self.df.loc[self.current_step-DATA_HIS_PERIOD:self.current_step, 'low'].values / MAX_SHARE_PRICE,", "the given period history data self.current_step = DATA_HIS_PERIOD #random.randint(DATA_HIS_PERIOD,len(self.df.loc[:,'open'].values)-1) #", "round(self.fee * self.krw_balance,2) # commission fee self.balance -= self.krw_balance-commission elif", "previous position was flat self.position = LONG #update position to", "hold - hold - sell # SHORT : sell -", "last close price self.close_price = self.df.loc[self.current_step,\"close\"] amount = 0.5 #the", "of shares held total_possible = int(self.balance / self.close_price) self.shares_held =", "new_portfolio = self.krw_balance * (1.0 + temp_reward) elif (self.position ==", "print(\"Long: {0}/ Short: {1}\".format(self.n_long, self.n_short)) # save the history data", "the data frame # self.current_step = random.randint( # 0, len(self.df.loc[:,", "FLAT = 0 # no position LONG = 1 #", "the current price to the last close price self.close_price =", "new_portfolio = self.krw_balance * (1.0 + temp_reward) else: temp_reward =", "self.action, self.position, self.current_step, self.close_price, self.krw_balance, self.balance, self.max_net_worth, self.shares_held, self.portfolio, self.total_shares_buy,", "= 0 self.portfolio = 0 self.shares_held = 0 self.cost_basis =", "state self.action = HOLD self.position = FLAT self.balance = INITIAL_ACCOUNT_BALANCE", "new_df: self.df = new_df # Set the current step to", "# record action as buy self.entry_price = self.close_price # Buy", "self.shares_held = 0 self.cost_basis = 0 self.total_shares_buy =0 self.total_buys_value=0 self.total_shares_sold", "brokage commission self.df = df self.closeprices = self.df['close'].values self.reward_range =", "info self.show_trade = show_trade self.actions=[\"FLAT\",\"LONG\",\"SHORT\"] self.fee = 0.0005 # brokage", "loop training, then clear the history self.action = HOLD self.position", "{self.total_sales_value})') print(f'Net worth: {self.net_worth} (Max net worth: {self.max_net_worth})') print(f'Profit: {profit}')", "won if (self.position == LONG): temp_reward = ((self.close_price - self.entry_price)", "new_df # Set the current step to a random point", "0 return profit def reset(self, new_df=None): # Reset the state", "MAX_NUM_SHARES, ]) # Append additional data and scale each value", "self.df.loc[self.current_step,\"close\"] amount = 0.5 #the old version has this variable,", "position to long self.action = BUY # record action as", "int(self.balance / self.close_price) self.shares_held = int(total_possible * amount)//100 *100 self.krw_balance", "= 1 INITIAL_ACCOUNT_BALANCE = 10000 DATA_HIS_PERIOD = 5 # position", "= 0 self.total_shares_buy = 0 self.total_buys_value = 0 self.total_shares_sold =", "= INITIAL_ACCOUNT_BALANCE self.krw_balance = 0 self.reward =0 self.portfolio =0 self.shares_held", "current step to a random point within the data frame", "self.net_worth = self.balance + new_portfolio if self.net_worth > self.max_net_worth: self.max_net_worth", "10000 DATA_HIS_PERIOD = 5 # position constant FLAT = 0", "random.uniform( # self.df.loc[self.current_step, \"open\"], self.df.loc[self.current_step, \"close\"]) # Set the current", "= new_df # Set the current step to a random", "round(self.fee * self.krw_balance,2) # commission fee self.shares_held = shares_bought self.balance", "BUY: #buy if self.position == FLAT: # if previous position", "self.close_price self.reward += ((self.entry_price - self.exit_price) / self.exit_price + 1)", "# Append additional data and scale each value to between", "'open'].values / MAX_SHARE_PRICE, self.df.loc[self.current_step-DATA_HIS_PERIOD:self.current_step, 'high'].values / MAX_SHARE_PRICE, self.df.loc[self.current_step-DATA_HIS_PERIOD:self.current_step, 'low'].values /", "= spaces.Discrete(len(self.actions)) # self.action_space = spaces.Box( # low=np.array([0, 0]), high=np.array([3,", "trade info self.show_trade = show_trade self.actions=[\"FLAT\",\"LONG\",\"SHORT\"] self.fee = 0.0005 #", "int(self.balance / self.close_price) shares_bought = int(total_possible * amount)//100 *100 self.krw_balance", "self.total_sales_value += self.shares_held * self.close_price self.entry_price = 0 # clear", "+ self.reward) self.balance += round(self.krw_balance*(1.0+self.reward),2) self.n_long += 1 self.total_shares_buy +=", "MAX_SHARE_PRICE, self.df.loc[self.current_step-DATA_HIS_PERIOD:self.current_step, 'close'].values / MAX_SHARE_PRICE, self.df.loc[self.current_step-DATA_HIS_PERIOD:self.current_step, 'volume'].values / MAX_NUM_SHARES, ])", "\"close\"]) # Set the current price to the last close", "opened per trade # valid action sequence would be #", "- self.exit_price) / self.exit_price + 1) * ( 1 -", "= 0 # no position LONG = 1 # buy", "== SHORT: # if previous position was short self.position =", "0 else -100 if self.net_worth <= 0: done = True", "- 1): self.done = True self.reward = self.get_profit() # return", "__init__(self, df,show_trade=True): super(StockTradingEnv, self).__init__() # show the trade info self.show_trade", "self.current_step % 1 == 0): print(\"Tick: {0}/ Portfolio (krw-won): {1},", "True if (self.show_trade and self.current_step % 1 == 0): print(\"Tick:", "shares held total_possible = int(self.balance / self.close_price) self.shares_held = int(total_possible", "self.fee) ** 2 - 1 new_portfolio = self.krw_balance * (1.0", "= 0 new_portfolio = 0 self.net_worth = self.balance + new_portfolio", "self.max_net_worth = INITIAL_ACCOUNT_BALANCE self.krw_balance = 0 self.reward = 0 self.portfolio", "= True if (self.show_trade and self.current_step % 1 == 0):", "* (1.0 + temp_reward) elif (self.position == SHORT): temp_reward =", "= int(self.balance / self.close_price) shares_bought = int(total_possible * amount)//100 *100", "Append additional data and scale each value to between 0-1", "{0}/ Short: {1}\".format(self.n_long, self.n_short)) # save the history data self.history.append([", "FLAT self.balance = INITIAL_ACCOUNT_BALANCE self.net_worth = INITIAL_ACCOUNT_BALANCE self.max_net_worth = INITIAL_ACCOUNT_BALANCE", "self.net_worth, done, {'portfolio': np.array([self.portfolio]), \"history\": self.history, \"n_trades\": {'long': self.n_long, 'short':", "self.action = BUY # record action as buy self.entry_price =", "self.df.loc[self.current_step, \"close\"]) # Set the current price to the last", "sold: {self.total_shares_sold})') print(f'Avg cost for held shares: {self.cost_basis} (Total sales", "high=1, shape=(DATA_HIS_PERIOD+1,6), dtype=np.float16) self.history = [] def _next_observation(self): obs =", "self.krw_balance * (1.0 + self.reward) self.balance += round(self.krw_balance*(1.0+self.reward),2) self.n_long +=", "short self.position = FLAT # update position to flat self.action", "FLAT: # if previous position was flat self.position = LONG", "price to the last close price self.close_price = self.df.loc[self.current_step,\"close\"] amount", "json import gym from gym import spaces import pandas as", "self).__init__() # show the trade info self.show_trade = show_trade self.actions=[\"FLAT\",\"LONG\",\"SHORT\"]", "1) * (1 - self.fee) ** 2 - 1 #self.krw_balance", "self.shares_held = shares_bought self.balance -= self.krw_balance-commission #self.cost_basis = (prev_cost +", "0 self.n_long=0 self.n_short=0 self.history=[] # pass test dataset to environment", "= self.get_profit() # return reward at end of the game", "self.entry_price) / self.entry_price + 1) * (1 - self.fee) **", "# for i in range(DATA_HIS_PERIOD): # self.history.append([0.0,0.0,0.0,0.0,0.0,0.0]) return self._next_observation() def", "= show_trade self.actions=[\"FLAT\",\"LONG\",\"SHORT\"] self.fee = 0.0005 # brokage commission self.df", "the shares_ elif action == SELL: if self.position == FLAT:", "self.total_sales_value = 0 self.n_long = 0 self.n_short = 0 self.history=[]", "= FLAT self.action = SELL self.exit_price = self.close_price self.reward +=", "evaluated in krw won if (self.position == LONG): temp_reward =", "self.show_trade = show_trade self.actions=[\"FLAT\",\"LONG\",\"SHORT\"] self.fee = 0.0005 # brokage commission", "= self.df.loc[self.current_step,\"close\"] amount = 0.5 #the old version has this", "position LONG = 1 # buy position SHORT = 2", "for the last five prices self.observation_space = spaces.Box( low=0, high=1,", "0 self.shares_held = 0 self.cost_basis = 0 self.total_shares_buy = 0", "self.portfolio, self.reward)) obs = self._next_observation() if (self.current_step > (self.df.shape[0]) -", "self.position == SHORT: # if previous position was short self.position", "len(self.df.loc[:, 'open'].values) - 1: self.current_step = DATA_HIS_PERIOD # loop training", "LONG: self.position = FLAT self.action = SELL self.exit_price = self.close_price", "time step within the environment self._take_action(action) done = False self.current_step", "self.close_price + 1) * ( 1 - self.fee) ** 2", "total_possible = int(self.balance / self.close_price) shares_bought = int(total_possible * amount)//100", "= 0 self.total_buys_value = 0 self.total_shares_sold = 0 self.total_sales_value =", "(e.g.) \"buy - buy\" would be considred \"buy - hold\"", "self.entry_price + 1) * (1 - self.fee) ** 2 -", "self.entry_price + 1) * ( 1 - self.fee) ** 2", "- 1 elif(self.position == SHORT): profit = ((self.entry_price - self.close_Price)/self.close_Price", "BUY # record action as buy self.entry_price = self.close_price #", "the history self.action = HOLD self.position = FLAT self.balance =", "5 MAX_STEPS = 20000 MAX_DAY_CHANGE = 1 INITIAL_ACCOUNT_BALANCE = 10000", "- self.entry_price) / self.entry_price + 1) * (1 - self.fee)", "held shares: {self.cost_basis} (Total sales value: {self.total_sales_value})') print(f'Net worth: {self.net_worth}", "gym\"\"\" metadata = {'render.modes': ['human']} def __init__(self, df,show_trade=True): super(StockTradingEnv, self).__init__()", "True if self.current_step > len(self.df.loc[:, 'open'].values) - 1: self.current_step =", "= 0 self.n_long = 0 self.n_short = 0 self.history=[] #", "- self.close_Price)/self.close_Price + 1)*(1-self.fee)**2 - 1 else: profit = 0", "i in range(DATA_HIS_PERIOD): # self.history.append([0.0,0.0,0.0,0.0,0.0,0.0]) return self._next_observation() def render(self, mode='human',", "self.reward)) obs = self._next_observation() if (self.current_step > (self.df.shape[0]) - 1):", "INITIAL_ACCOUNT_BALANCE = 10000 DATA_HIS_PERIOD = 5 # position constant FLAT", "* amount)//100 *100 self.krw_balance = self.shares_held * self.entry_price # buy", "self.portfolio, self.total_shares_buy, self.total_buys_value, self.total_shares_sold, self.total_sales_value]) #self.history.append((self.action, self.current_step, self.closingPrice, self.portfolio, self.reward))", "= 5 MAX_STEPS = 20000 MAX_DAY_CHANGE = 1 INITIAL_ACCOUNT_BALANCE =", "position to flat self.action = BUY # record action as", "SELL: if self.position == FLAT: self.position = SHORT self.action =", "+ self.reward) # evaluate cumulative return in krw-won self.balance +=", "0): print(\"Tick: {0}/ Portfolio (krw-won): {1}, balance: {2}\".format(self.current_step, self.portfolio,self.net_worth)) print(\"Long:", "np.array([self.portfolio]), \"history\": self.history, \"n_trades\": {'long': self.n_long, 'short': self.n_short}} #return obs,", "within the data frame # self.current_step = random.randint( # 0,", "sell # SHORT : sell - hold - hold -", "be opened per trade # valid action sequence would be", "self.action = HOLD #hold if action == BUY: #buy if", "\"buy - buy\" would be considred \"buy - hold\" self.action", "- self.fee) ** 2 - 1 new_portfolio = self.krw_balance *", "1 new_portfolio = self.krw_balance * (1.0 + temp_reward) else: temp_reward", "0 # clear the shares_ elif action == SELL: if", "/ self.close_price + 1) * ( 1 - self.fee) **", "INITIAL_ACCOUNT_BALANCE self.net_worth = INITIAL_ACCOUNT_BALANCE self.max_net_worth = INITIAL_ACCOUNT_BALANCE self.krw_balance = 0", "3e10 MAX_OPEN_POSITIONS = 5 MAX_STEPS = 20000 MAX_DAY_CHANGE = 1", "** 2 - 1 # calculate reward #self.krw_balance = self.krw_balance", "= 5 # position constant FLAT = 0 # no", "+= 1 # record number of short self.total_shares_sold += self.shares_held", "-= self.krw_balance-commission #self.cost_basis = (prev_cost + additional_cost) / (self.shares_held +", "variable, so reserve # action comes from the agent #", "self.krw_balance,2) # commission fee self.shares_held = shares_bought self.balance -= self.krw_balance-commission", "clear entry price self.shares_held = 0 # clear the shares_", "self.action = HOLD self.position = FLAT self.balance = INITIAL_ACCOUNT_BALANCE self.net_worth", "\"n_trades\": {'long': self.n_long, 'short': self.n_short}} #return obs, reward, done, {}", "Buy x%, Sell x%, Hold, etc. self.action_space = spaces.Discrete(len(self.actions)) #", "position was flat self.position = LONG #update position to long", "the environment self._take_action(action) done = False self.current_step += 1 delay_modifier", "= DATA_HIS_PERIOD # loop training # when loop training, then", "= 0 self.reward = 0 self.portfolio = 0 self.shares_held =", "random point within the data frame # self.current_step = random.randint(", "= (0, MAX_ACCOUNT_BALANCE) # Actions of the format Buy x%,", "return obs, self.net_worth, done, {'portfolio': np.array([self.portfolio]), \"history\": self.history, \"n_trades\": {'long':", "int(total_possible * amount)//100 *100 self.krw_balance = self.shares_held * self.entry_price #", "df,show_trade=True): super(StockTradingEnv, self).__init__() # show the trade info self.show_trade =", "hold\" self.action = HOLD #hold if action == BUY: #buy", "# self.df.loc[self.current_step, \"open\"], self.df.loc[self.current_step, \"close\"]) # Set the current price", "HOLD #hold if action == BUY: #buy if self.position ==", "balance: {2}\".format(self.current_step, self.portfolio,self.net_worth)) print(\"Long: {0}/ Short: {1}\".format(self.n_long, self.n_short)) # save", "1 == 0): print(\"Tick: {0}/ Portfolio (krw-won): {1}, balance: {2}\".format(self.current_step,", "= 0 self.entry_price = 0 # [coin + krw_won] total", "# Render the environment to the screen profit = self.net_worth", "spaces import pandas as pd import numpy as np MAX_ACCOUNT_BALANCE", "self.get_profit() # return reward at end of the game return", "+= round(self.krw_balance*(1.0+self.reward),2) self.n_long += 1 self.total_shares_buy += self.shares_held self.total_buys_value +=", "price to a random price within the time step #", "1 #self.krw_balance = self.krw_balance * (1.0 + self.reward) self.balance +=", "to a random point within the data frame # self.current_step", "x%, Hold, etc. self.action_space = spaces.Discrete(len(self.actions)) # self.action_space = spaces.Box(", "reward, done, {} def get_profit(self): if(self.position == LONG): profit =", "if self.position == FLAT: # if previous position was flat", "- buy\" would be considred \"buy - hold\" self.action =", "'low'].values / MAX_SHARE_PRICE, self.df.loc[self.current_step-DATA_HIS_PERIOD:self.current_step, 'close'].values / MAX_SHARE_PRICE, self.df.loc[self.current_step-DATA_HIS_PERIOD:self.current_step, 'volume'].values /", "MAX_SHARE_PRICE, self.total_shares_sold / MAX_NUM_SHARES, self.total_sales_value / (MAX_NUM_SHARES * MAX_SHARE_PRICE)]],axis=0) return", "when loop training, then clear the history self.action = HOLD" ]
[ "= state_ids[:2] features = [] for state_id in tqdm(state_ids): geojson", "state_id in state_ids: data_files.append( ( state_id, os.path.join(DOWNLOAD_PATH, f\"CenPop2020_Mean_BG{state_id}.txt\"), os.path.join(DOWNLOAD_PATH, f\"tl_2020_{state_id}_bg\",", "state_ids[-1] else \"\")) with open(\"public/data/pop.json\", \"a\") as f: f.write(\"]}\") def", "population JSON heatmaps...\") # compute_json_heatmaps(state_ids) print(\"Aggregating JSON files into one...\")", "f.write(json.dumps(geojson[\"features\"])[1:-1] + (\",\" if state_id != state_ids[-1] else \"\")) with", "process_map def main(): start = time.time() # print(\"Downloading files...\") #", "file) if os.path.isfile(file_path) and pathlib.Path(file_path).suffix == \".txt\": state_ids.append(file[file.index(\"BG\") + 2", "with open(\"public/data/pop.json\", \"a\") as f: f.write(\"]}\") def create_json_for_state(args): return aggregate(*args,", "# print(\"Unzipping shapefiles...\") # unzip_files() state_ids = [] for file", "features = [] for state_id in tqdm(state_ids): geojson = None", "f: geojson = json.load(f) with open(\"public/data/pop.json\", \"a\") as f: f.write(json.dumps(geojson[\"features\"])[1:-1]", "from download import DOWNLOAD_PATH, download_files, unzip_files from tqdm.contrib.concurrent import process_map", "f\"tl_2020_{state_id}_bg.shp\"), ) ) process_map(create_json_for_state, data_files, max_workers=4) def aggegrate_json_files(state_ids): with open(\"public/data/pop.json\",", "def aggegrate_json_files(state_ids): with open(\"public/data/pop.json\", \"w\") as f: f.write(\"\"\"{\"type\": \"FeatureCollection\", \"features\":", "open(\"public/data/pop.json\", \"w\") as f: f.write(\"\"\"{\"type\": \"FeatureCollection\", \"features\": [\"\"\") # state_ids", "# compute_json_heatmaps(state_ids) print(\"Aggregating JSON files into one...\") aggegrate_json_files(state_ids) end =", "(\",\" if state_id != state_ids[-1] else \"\")) with open(\"public/data/pop.json\", \"a\")", "os.path.join(DOWNLOAD_PATH, f\"CenPop2020_Mean_BG{state_id}.txt\"), os.path.join(DOWNLOAD_PATH, f\"tl_2020_{state_id}_bg\", f\"tl_2020_{state_id}_bg.shp\"), ) ) process_map(create_json_for_state, data_files, max_workers=4)", "for file in os.listdir(DOWNLOAD_PATH): file_path = os.path.join(DOWNLOAD_PATH, file) if os.path.isfile(file_path)", "# print(\"Downloading files...\") # download_files() # print(\"Unzipping shapefiles...\") # unzip_files()", "from tqdm.contrib.concurrent import process_map def main(): start = time.time() #", "else \"\")) with open(\"public/data/pop.json\", \"a\") as f: f.write(\"]}\") def create_json_for_state(args):", "os.path.join(DOWNLOAD_PATH, f\"tl_2020_{state_id}_bg\", f\"tl_2020_{state_id}_bg.shp\"), ) ) process_map(create_json_for_state, data_files, max_workers=4) def aggegrate_json_files(state_ids):", "== \".txt\": state_ids.append(file[file.index(\"BG\") + 2 : file.index(\".\")]) # print(\"Computing population", "# unzip_files() state_ids = [] for file in os.listdir(DOWNLOAD_PATH): file_path", "time.time() print(f\"Done in {(end - start):0.2f}s\") def compute_json_heatmaps(state_ids): data_files =", "files...\") # download_files() # print(\"Unzipping shapefiles...\") # unzip_files() state_ids =", "for state_id in state_ids: data_files.append( ( state_id, os.path.join(DOWNLOAD_PATH, f\"CenPop2020_Mean_BG{state_id}.txt\"), os.path.join(DOWNLOAD_PATH,", "= time.time() print(f\"Done in {(end - start):0.2f}s\") def compute_json_heatmaps(state_ids): data_files", ") process_map(create_json_for_state, data_files, max_workers=4) def aggegrate_json_files(state_ids): with open(\"public/data/pop.json\", \"w\") as", "tqdm.contrib.concurrent import process_map def main(): start = time.time() # print(\"Downloading", "as f: f.write(\"]}\") def create_json_for_state(args): return aggregate(*args, hide_output=True) if __name__", "time from tqdm import tqdm from aggregator import aggregate from", "# print(\"Computing population JSON heatmaps...\") # compute_json_heatmaps(state_ids) print(\"Aggregating JSON files", "compute_json_heatmaps(state_ids) print(\"Aggregating JSON files into one...\") aggegrate_json_files(state_ids) end = time.time()", "tqdm import tqdm from aggregator import aggregate from download import", "for state_id in tqdm(state_ids): geojson = None with open(os.path.join(DOWNLOAD_PATH, f\"{state_id}.json\"))", "aggregate from download import DOWNLOAD_PATH, download_files, unzip_files from tqdm.contrib.concurrent import", "in {(end - start):0.2f}s\") def compute_json_heatmaps(state_ids): data_files = [] for", "heatmaps...\") # compute_json_heatmaps(state_ids) print(\"Aggregating JSON files into one...\") aggegrate_json_files(state_ids) end", "open(os.path.join(DOWNLOAD_PATH, f\"{state_id}.json\")) as f: geojson = json.load(f) with open(\"public/data/pop.json\", \"a\")", "pathlib import time from tqdm import tqdm from aggregator import", "\"FeatureCollection\", \"features\": [\"\"\") # state_ids = state_ids[:2] features = []", "f: f.write(json.dumps(geojson[\"features\"])[1:-1] + (\",\" if state_id != state_ids[-1] else \"\"))", "print(\"Computing population JSON heatmaps...\") # compute_json_heatmaps(state_ids) print(\"Aggregating JSON files into", "state_id in tqdm(state_ids): geojson = None with open(os.path.join(DOWNLOAD_PATH, f\"{state_id}.json\")) as", "os.path.join(DOWNLOAD_PATH, file) if os.path.isfile(file_path) and pathlib.Path(file_path).suffix == \".txt\": state_ids.append(file[file.index(\"BG\") +", "= os.path.join(DOWNLOAD_PATH, file) if os.path.isfile(file_path) and pathlib.Path(file_path).suffix == \".txt\": state_ids.append(file[file.index(\"BG\")", "as f: geojson = json.load(f) with open(\"public/data/pop.json\", \"a\") as f:", "\"w\") as f: f.write(\"\"\"{\"type\": \"FeatureCollection\", \"features\": [\"\"\") # state_ids =", "one...\") aggegrate_json_files(state_ids) end = time.time() print(f\"Done in {(end - start):0.2f}s\")", "with open(\"public/data/pop.json\", \"w\") as f: f.write(\"\"\"{\"type\": \"FeatureCollection\", \"features\": [\"\"\") #", "aggegrate_json_files(state_ids) end = time.time() print(f\"Done in {(end - start):0.2f}s\") def", "state_ids[:2] features = [] for state_id in tqdm(state_ids): geojson =", "with open(\"public/data/pop.json\", \"a\") as f: f.write(json.dumps(geojson[\"features\"])[1:-1] + (\",\" if state_id", "time.time() # print(\"Downloading files...\") # download_files() # print(\"Unzipping shapefiles...\") #", "json.load(f) with open(\"public/data/pop.json\", \"a\") as f: f.write(json.dumps(geojson[\"features\"])[1:-1] + (\",\" if", "data_files, max_workers=4) def aggegrate_json_files(state_ids): with open(\"public/data/pop.json\", \"w\") as f: f.write(\"\"\"{\"type\":", "in os.listdir(DOWNLOAD_PATH): file_path = os.path.join(DOWNLOAD_PATH, file) if os.path.isfile(file_path) and pathlib.Path(file_path).suffix", "\"\")) with open(\"public/data/pop.json\", \"a\") as f: f.write(\"]}\") def create_json_for_state(args): return", ": file.index(\".\")]) # print(\"Computing population JSON heatmaps...\") # compute_json_heatmaps(state_ids) print(\"Aggregating", "import time from tqdm import tqdm from aggregator import aggregate", "unzip_files from tqdm.contrib.concurrent import process_map def main(): start = time.time()", "unzip_files() state_ids = [] for file in os.listdir(DOWNLOAD_PATH): file_path =", "os.listdir(DOWNLOAD_PATH): file_path = os.path.join(DOWNLOAD_PATH, file) if os.path.isfile(file_path) and pathlib.Path(file_path).suffix ==", "with open(os.path.join(DOWNLOAD_PATH, f\"{state_id}.json\")) as f: geojson = json.load(f) with open(\"public/data/pop.json\",", "state_ids = [] for file in os.listdir(DOWNLOAD_PATH): file_path = os.path.join(DOWNLOAD_PATH,", "state_ids: data_files.append( ( state_id, os.path.join(DOWNLOAD_PATH, f\"CenPop2020_Mean_BG{state_id}.txt\"), os.path.join(DOWNLOAD_PATH, f\"tl_2020_{state_id}_bg\", f\"tl_2020_{state_id}_bg.shp\"), )", "f\"tl_2020_{state_id}_bg\", f\"tl_2020_{state_id}_bg.shp\"), ) ) process_map(create_json_for_state, data_files, max_workers=4) def aggegrate_json_files(state_ids): with", "tqdm from aggregator import aggregate from download import DOWNLOAD_PATH, download_files,", "!= state_ids[-1] else \"\")) with open(\"public/data/pop.json\", \"a\") as f: f.write(\"]}\")", "DOWNLOAD_PATH, download_files, unzip_files from tqdm.contrib.concurrent import process_map def main(): start", "def main(): start = time.time() # print(\"Downloading files...\") # download_files()", "JSON heatmaps...\") # compute_json_heatmaps(state_ids) print(\"Aggregating JSON files into one...\") aggegrate_json_files(state_ids)", "compute_json_heatmaps(state_ids): data_files = [] for state_id in state_ids: data_files.append( (", "os import pathlib import time from tqdm import tqdm from", "f.write(\"]}\") def create_json_for_state(args): return aggregate(*args, hide_output=True) if __name__ == \"__main__\":", "+ 2 : file.index(\".\")]) # print(\"Computing population JSON heatmaps...\") #", "[\"\"\") # state_ids = state_ids[:2] features = [] for state_id", "main(): start = time.time() # print(\"Downloading files...\") # download_files() #", "file_path = os.path.join(DOWNLOAD_PATH, file) if os.path.isfile(file_path) and pathlib.Path(file_path).suffix == \".txt\":", "= [] for state_id in tqdm(state_ids): geojson = None with", "geojson = None with open(os.path.join(DOWNLOAD_PATH, f\"{state_id}.json\")) as f: geojson =", "f.write(\"\"\"{\"type\": \"FeatureCollection\", \"features\": [\"\"\") # state_ids = state_ids[:2] features =", "2 : file.index(\".\")]) # print(\"Computing population JSON heatmaps...\") # compute_json_heatmaps(state_ids)", "\".txt\": state_ids.append(file[file.index(\"BG\") + 2 : file.index(\".\")]) # print(\"Computing population JSON", "download_files, unzip_files from tqdm.contrib.concurrent import process_map def main(): start =", "state_id != state_ids[-1] else \"\")) with open(\"public/data/pop.json\", \"a\") as f:", "pathlib.Path(file_path).suffix == \".txt\": state_ids.append(file[file.index(\"BG\") + 2 : file.index(\".\")]) # print(\"Computing", "state_ids = state_ids[:2] features = [] for state_id in tqdm(state_ids):", "print(\"Aggregating JSON files into one...\") aggegrate_json_files(state_ids) end = time.time() print(f\"Done", "data_files.append( ( state_id, os.path.join(DOWNLOAD_PATH, f\"CenPop2020_Mean_BG{state_id}.txt\"), os.path.join(DOWNLOAD_PATH, f\"tl_2020_{state_id}_bg\", f\"tl_2020_{state_id}_bg.shp\"), ) )", "if os.path.isfile(file_path) and pathlib.Path(file_path).suffix == \".txt\": state_ids.append(file[file.index(\"BG\") + 2 :", "[] for state_id in tqdm(state_ids): geojson = None with open(os.path.join(DOWNLOAD_PATH,", "shapefiles...\") # unzip_files() state_ids = [] for file in os.listdir(DOWNLOAD_PATH):", "download import DOWNLOAD_PATH, download_files, unzip_files from tqdm.contrib.concurrent import process_map def", "aggegrate_json_files(state_ids): with open(\"public/data/pop.json\", \"w\") as f: f.write(\"\"\"{\"type\": \"FeatureCollection\", \"features\": [\"\"\")", "download_files() # print(\"Unzipping shapefiles...\") # unzip_files() state_ids = [] for", "{(end - start):0.2f}s\") def compute_json_heatmaps(state_ids): data_files = [] for state_id", "= [] for file in os.listdir(DOWNLOAD_PATH): file_path = os.path.join(DOWNLOAD_PATH, file)", "in tqdm(state_ids): geojson = None with open(os.path.join(DOWNLOAD_PATH, f\"{state_id}.json\")) as f:", "import tqdm from aggregator import aggregate from download import DOWNLOAD_PATH,", "print(\"Unzipping shapefiles...\") # unzip_files() state_ids = [] for file in", "def create_json_for_state(args): return aggregate(*args, hide_output=True) if __name__ == \"__main__\": main()", "as f: f.write(\"\"\"{\"type\": \"FeatureCollection\", \"features\": [\"\"\") # state_ids = state_ids[:2]", "and pathlib.Path(file_path).suffix == \".txt\": state_ids.append(file[file.index(\"BG\") + 2 : file.index(\".\")]) #", "import aggregate from download import DOWNLOAD_PATH, download_files, unzip_files from tqdm.contrib.concurrent", "file in os.listdir(DOWNLOAD_PATH): file_path = os.path.join(DOWNLOAD_PATH, file) if os.path.isfile(file_path) and", "None with open(os.path.join(DOWNLOAD_PATH, f\"{state_id}.json\")) as f: geojson = json.load(f) with", "open(\"public/data/pop.json\", \"a\") as f: f.write(json.dumps(geojson[\"features\"])[1:-1] + (\",\" if state_id !=", "files into one...\") aggegrate_json_files(state_ids) end = time.time() print(f\"Done in {(end", "f\"CenPop2020_Mean_BG{state_id}.txt\"), os.path.join(DOWNLOAD_PATH, f\"tl_2020_{state_id}_bg\", f\"tl_2020_{state_id}_bg.shp\"), ) ) process_map(create_json_for_state, data_files, max_workers=4) def", "state_id, os.path.join(DOWNLOAD_PATH, f\"CenPop2020_Mean_BG{state_id}.txt\"), os.path.join(DOWNLOAD_PATH, f\"tl_2020_{state_id}_bg\", f\"tl_2020_{state_id}_bg.shp\"), ) ) process_map(create_json_for_state, data_files,", "open(\"public/data/pop.json\", \"a\") as f: f.write(\"]}\") def create_json_for_state(args): return aggregate(*args, hide_output=True)", "f: f.write(\"\"\"{\"type\": \"FeatureCollection\", \"features\": [\"\"\") # state_ids = state_ids[:2] features", "in state_ids: data_files.append( ( state_id, os.path.join(DOWNLOAD_PATH, f\"CenPop2020_Mean_BG{state_id}.txt\"), os.path.join(DOWNLOAD_PATH, f\"tl_2020_{state_id}_bg\", f\"tl_2020_{state_id}_bg.shp\"),", "import os import pathlib import time from tqdm import tqdm", "= time.time() # print(\"Downloading files...\") # download_files() # print(\"Unzipping shapefiles...\")", "# download_files() # print(\"Unzipping shapefiles...\") # unzip_files() state_ids = []", "# state_ids = state_ids[:2] features = [] for state_id in", "( state_id, os.path.join(DOWNLOAD_PATH, f\"CenPop2020_Mean_BG{state_id}.txt\"), os.path.join(DOWNLOAD_PATH, f\"tl_2020_{state_id}_bg\", f\"tl_2020_{state_id}_bg.shp\"), ) ) process_map(create_json_for_state,", "\"a\") as f: f.write(json.dumps(geojson[\"features\"])[1:-1] + (\",\" if state_id != state_ids[-1]", "data_files = [] for state_id in state_ids: data_files.append( ( state_id,", ") ) process_map(create_json_for_state, data_files, max_workers=4) def aggegrate_json_files(state_ids): with open(\"public/data/pop.json\", \"w\")", "from tqdm import tqdm from aggregator import aggregate from download", "os.path.isfile(file_path) and pathlib.Path(file_path).suffix == \".txt\": state_ids.append(file[file.index(\"BG\") + 2 : file.index(\".\")])", "state_ids.append(file[file.index(\"BG\") + 2 : file.index(\".\")]) # print(\"Computing population JSON heatmaps...\")", "+ (\",\" if state_id != state_ids[-1] else \"\")) with open(\"public/data/pop.json\",", "end = time.time() print(f\"Done in {(end - start):0.2f}s\") def compute_json_heatmaps(state_ids):", "import DOWNLOAD_PATH, download_files, unzip_files from tqdm.contrib.concurrent import process_map def main():", "max_workers=4) def aggegrate_json_files(state_ids): with open(\"public/data/pop.json\", \"w\") as f: f.write(\"\"\"{\"type\": \"FeatureCollection\",", "JSON files into one...\") aggegrate_json_files(state_ids) end = time.time() print(f\"Done in", "[] for file in os.listdir(DOWNLOAD_PATH): file_path = os.path.join(DOWNLOAD_PATH, file) if", "- start):0.2f}s\") def compute_json_heatmaps(state_ids): data_files = [] for state_id in", "geojson = json.load(f) with open(\"public/data/pop.json\", \"a\") as f: f.write(json.dumps(geojson[\"features\"])[1:-1] +", "print(\"Downloading files...\") # download_files() # print(\"Unzipping shapefiles...\") # unzip_files() state_ids", "print(f\"Done in {(end - start):0.2f}s\") def compute_json_heatmaps(state_ids): data_files = []", "start = time.time() # print(\"Downloading files...\") # download_files() # print(\"Unzipping", "start):0.2f}s\") def compute_json_heatmaps(state_ids): data_files = [] for state_id in state_ids:", "import process_map def main(): start = time.time() # print(\"Downloading files...\")", "file.index(\".\")]) # print(\"Computing population JSON heatmaps...\") # compute_json_heatmaps(state_ids) print(\"Aggregating JSON", "import json import os import pathlib import time from tqdm", "= None with open(os.path.join(DOWNLOAD_PATH, f\"{state_id}.json\")) as f: geojson = json.load(f)", "if state_id != state_ids[-1] else \"\")) with open(\"public/data/pop.json\", \"a\") as", "import pathlib import time from tqdm import tqdm from aggregator", "into one...\") aggegrate_json_files(state_ids) end = time.time() print(f\"Done in {(end -", "def compute_json_heatmaps(state_ids): data_files = [] for state_id in state_ids: data_files.append(", "as f: f.write(json.dumps(geojson[\"features\"])[1:-1] + (\",\" if state_id != state_ids[-1] else", "f\"{state_id}.json\")) as f: geojson = json.load(f) with open(\"public/data/pop.json\", \"a\") as", "aggregator import aggregate from download import DOWNLOAD_PATH, download_files, unzip_files from", "process_map(create_json_for_state, data_files, max_workers=4) def aggegrate_json_files(state_ids): with open(\"public/data/pop.json\", \"w\") as f:", "\"a\") as f: f.write(\"]}\") def create_json_for_state(args): return aggregate(*args, hide_output=True) if", "from aggregator import aggregate from download import DOWNLOAD_PATH, download_files, unzip_files", "f: f.write(\"]}\") def create_json_for_state(args): return aggregate(*args, hide_output=True) if __name__ ==", "json import os import pathlib import time from tqdm import", "\"features\": [\"\"\") # state_ids = state_ids[:2] features = [] for", "= json.load(f) with open(\"public/data/pop.json\", \"a\") as f: f.write(json.dumps(geojson[\"features\"])[1:-1] + (\",\"", "[] for state_id in state_ids: data_files.append( ( state_id, os.path.join(DOWNLOAD_PATH, f\"CenPop2020_Mean_BG{state_id}.txt\"),", "= [] for state_id in state_ids: data_files.append( ( state_id, os.path.join(DOWNLOAD_PATH,", "tqdm(state_ids): geojson = None with open(os.path.join(DOWNLOAD_PATH, f\"{state_id}.json\")) as f: geojson" ]
[ "np.array(X) Y = np.array(Y) X_train, X_test, y_train, y_test = model_selection.train_test_split(X,", "= [] Y = [] for index, classlabel in enumerate(classes):", "= [\"car\", \"bycycle\", \"motorcycle\", \"pedestrian\"] num_class = len(classes) image_size =", "np.asarray(image) / 255 X.append(data) Y.append(index) X = np.array(X) Y =", "= np.array(Y) X_train, X_test, y_train, y_test = model_selection.train_test_split(X, Y) xy", "import model_selection classes = [\"car\", \"bycycle\", \"motorcycle\", \"pedestrian\"] num_class =", "photos_dir = \"./\" + classlabel files = glob.glob(photos_dir + \"/*.jpg\")", "\"./\" + classlabel files = glob.glob(photos_dir + \"/*.jpg\") for i,", "[] for index, classlabel in enumerate(classes): photos_dir = \"./\" +", "= np.asarray(image) / 255 X.append(data) Y.append(index) X = np.array(X) Y", "Y = [] for index, classlabel in enumerate(classes): photos_dir =", "/ 255 X.append(data) Y.append(index) X = np.array(X) Y = np.array(Y)", "\"bycycle\", \"motorcycle\", \"pedestrian\"] num_class = len(classes) image_size = 50 #", "# 画像の読み込み X = [] Y = [] for index,", "numpy as np from sklearn import model_selection classes = [\"car\",", "PIL import Image import os, glob import numpy as np", "data = np.asarray(image) / 255 X.append(data) Y.append(index) X = np.array(X)", "as np from sklearn import model_selection classes = [\"car\", \"bycycle\",", "= image.resize((image_size, image_size)) data = np.asarray(image) / 255 X.append(data) Y.append(index)", "+ \"/*.jpg\") for i, file in enumerate(files): if i >=237:", "len(classes) image_size = 50 # 画像の読み込み X = [] Y", "image_size)) data = np.asarray(image) / 255 X.append(data) Y.append(index) X =", "画像の読み込み X = [] Y = [] for index, classlabel", "y_train, y_test = model_selection.train_test_split(X, Y) xy = (X_train, X_test, y_train,", "image = image.convert(\"RGB\") image = image.resize((image_size, image_size)) data = np.asarray(image)", "files = glob.glob(photos_dir + \"/*.jpg\") for i, file in enumerate(files):", "Image import os, glob import numpy as np from sklearn", "\"motorcycle\", \"pedestrian\"] num_class = len(classes) image_size = 50 # 画像の読み込み", "num_class = len(classes) image_size = 50 # 画像の読み込み X =", "image.resize((image_size, image_size)) data = np.asarray(image) / 255 X.append(data) Y.append(index) X", "X = np.array(X) Y = np.array(Y) X_train, X_test, y_train, y_test", "import numpy as np from sklearn import model_selection classes =", "= np.array(X) Y = np.array(Y) X_train, X_test, y_train, y_test =", "Y.append(index) X = np.array(X) Y = np.array(Y) X_train, X_test, y_train,", "= model_selection.train_test_split(X, Y) xy = (X_train, X_test, y_train, y_test) np.save(\"./vehicle.npy\",", "= image.convert(\"RGB\") image = image.resize((image_size, image_size)) data = np.asarray(image) /", "50 # 画像の読み込み X = [] Y = [] for", "classes = [\"car\", \"bycycle\", \"motorcycle\", \"pedestrian\"] num_class = len(classes) image_size", "= Image.open(file) image = image.convert(\"RGB\") image = image.resize((image_size, image_size)) data", "X_test, y_train, y_test = model_selection.train_test_split(X, Y) xy = (X_train, X_test,", "[] Y = [] for index, classlabel in enumerate(classes): photos_dir", "X_train, X_test, y_train, y_test = model_selection.train_test_split(X, Y) xy = (X_train,", "os, glob import numpy as np from sklearn import model_selection", "for i, file in enumerate(files): if i >=237: break image", "image_size = 50 # 画像の読み込み X = [] Y =", "[\"car\", \"bycycle\", \"motorcycle\", \"pedestrian\"] num_class = len(classes) image_size = 50", "X = [] Y = [] for index, classlabel in", "from sklearn import model_selection classes = [\"car\", \"bycycle\", \"motorcycle\", \"pedestrian\"]", "= len(classes) image_size = 50 # 画像の読み込み X = []", "image.convert(\"RGB\") image = image.resize((image_size, image_size)) data = np.asarray(image) / 255", "= 50 # 画像の読み込み X = [] Y = []", "+ classlabel files = glob.glob(photos_dir + \"/*.jpg\") for i, file", "file in enumerate(files): if i >=237: break image = Image.open(file)", "from PIL import Image import os, glob import numpy as", "in enumerate(files): if i >=237: break image = Image.open(file) image", "classlabel files = glob.glob(photos_dir + \"/*.jpg\") for i, file in", "= glob.glob(photos_dir + \"/*.jpg\") for i, file in enumerate(files): if", "model_selection classes = [\"car\", \"bycycle\", \"motorcycle\", \"pedestrian\"] num_class = len(classes)", "glob.glob(photos_dir + \"/*.jpg\") for i, file in enumerate(files): if i", "if i >=237: break image = Image.open(file) image = image.convert(\"RGB\")", "enumerate(classes): photos_dir = \"./\" + classlabel files = glob.glob(photos_dir +", "X.append(data) Y.append(index) X = np.array(X) Y = np.array(Y) X_train, X_test,", "sklearn import model_selection classes = [\"car\", \"bycycle\", \"motorcycle\", \"pedestrian\"] num_class", "import os, glob import numpy as np from sklearn import", "= [] for index, classlabel in enumerate(classes): photos_dir = \"./\"", "import Image import os, glob import numpy as np from", "glob import numpy as np from sklearn import model_selection classes", ">=237: break image = Image.open(file) image = image.convert(\"RGB\") image =", "break image = Image.open(file) image = image.convert(\"RGB\") image = image.resize((image_size,", "image = Image.open(file) image = image.convert(\"RGB\") image = image.resize((image_size, image_size))", "image = image.resize((image_size, image_size)) data = np.asarray(image) / 255 X.append(data)", "model_selection.train_test_split(X, Y) xy = (X_train, X_test, y_train, y_test) np.save(\"./vehicle.npy\", xy)", "255 X.append(data) Y.append(index) X = np.array(X) Y = np.array(Y) X_train,", "Image.open(file) image = image.convert(\"RGB\") image = image.resize((image_size, image_size)) data =", "i, file in enumerate(files): if i >=237: break image =", "np from sklearn import model_selection classes = [\"car\", \"bycycle\", \"motorcycle\",", "enumerate(files): if i >=237: break image = Image.open(file) image =", "index, classlabel in enumerate(classes): photos_dir = \"./\" + classlabel files", "= \"./\" + classlabel files = glob.glob(photos_dir + \"/*.jpg\") for", "for index, classlabel in enumerate(classes): photos_dir = \"./\" + classlabel", "np.array(Y) X_train, X_test, y_train, y_test = model_selection.train_test_split(X, Y) xy =", "i >=237: break image = Image.open(file) image = image.convert(\"RGB\") image", "Y = np.array(Y) X_train, X_test, y_train, y_test = model_selection.train_test_split(X, Y)", "\"/*.jpg\") for i, file in enumerate(files): if i >=237: break", "in enumerate(classes): photos_dir = \"./\" + classlabel files = glob.glob(photos_dir", "\"pedestrian\"] num_class = len(classes) image_size = 50 # 画像の読み込み X", "y_test = model_selection.train_test_split(X, Y) xy = (X_train, X_test, y_train, y_test)", "classlabel in enumerate(classes): photos_dir = \"./\" + classlabel files =" ]
[ "{number_quests_available} quests available\") random_waypoint_scenario = waypoint_scenarios.get_random_scenario(10) random_quest = quest_scenarios.get_random_scenario(1) campaign", "= waypoint_scenarios.get_number_of_waypoint_scenarios() log.info(f\"We have {number_waypoint_scenario} waypoint available\") number_quests_available = quest_scenarios.get_number_of_quest_scenarios()", "__name__ == \"__main__\": number_waypoint_scenario = waypoint_scenarios.get_number_of_waypoint_scenarios() log.info(f\"We have {number_waypoint_scenario} waypoint", "log.info(f\"We have {number_waypoint_scenario} waypoint available\") number_quests_available = quest_scenarios.get_number_of_quest_scenarios() log.info(f\"We have", "from services import waypoint_scenarios, quest_scenarios from services.build_campaign import Campaign from", "= quest_scenarios.get_number_of_quest_scenarios() log.info(f\"We have {number_quests_available} quests available\") random_waypoint_scenario = waypoint_scenarios.get_random_scenario(10)", "waypoint_scenarios, quest_scenarios from services.build_campaign import Campaign from log_setup import log", "number_quests_available = quest_scenarios.get_number_of_quest_scenarios() log.info(f\"We have {number_quests_available} quests available\") random_waypoint_scenario =", "quest_scenarios from services.build_campaign import Campaign from log_setup import log if", "from services.build_campaign import Campaign from log_setup import log if __name__", "log.info(f\"We have {number_quests_available} quests available\") random_waypoint_scenario = waypoint_scenarios.get_random_scenario(10) random_quest =", "= waypoint_scenarios.get_random_scenario(10) random_quest = quest_scenarios.get_random_scenario(1) campaign = Campaign() campaign.build_campaign( waypoint_list=random_waypoint_scenario,", "Campaign from log_setup import log if __name__ == \"__main__\": number_waypoint_scenario", "<reponame>immortel32/Sword_Sorcery_Story_Generator from services import waypoint_scenarios, quest_scenarios from services.build_campaign import Campaign", "if __name__ == \"__main__\": number_waypoint_scenario = waypoint_scenarios.get_number_of_waypoint_scenarios() log.info(f\"We have {number_waypoint_scenario}", "random_quest = quest_scenarios.get_random_scenario(1) campaign = Campaign() campaign.build_campaign( waypoint_list=random_waypoint_scenario, quest_list=random_quest )", "have {number_quests_available} quests available\") random_waypoint_scenario = waypoint_scenarios.get_random_scenario(10) random_quest = quest_scenarios.get_random_scenario(1)", "available\") number_quests_available = quest_scenarios.get_number_of_quest_scenarios() log.info(f\"We have {number_quests_available} quests available\") random_waypoint_scenario", "random_waypoint_scenario = waypoint_scenarios.get_random_scenario(10) random_quest = quest_scenarios.get_random_scenario(1) campaign = Campaign() campaign.build_campaign(", "from log_setup import log if __name__ == \"__main__\": number_waypoint_scenario =", "import waypoint_scenarios, quest_scenarios from services.build_campaign import Campaign from log_setup import", "number_waypoint_scenario = waypoint_scenarios.get_number_of_waypoint_scenarios() log.info(f\"We have {number_waypoint_scenario} waypoint available\") number_quests_available =", "waypoint_scenarios.get_number_of_waypoint_scenarios() log.info(f\"We have {number_waypoint_scenario} waypoint available\") number_quests_available = quest_scenarios.get_number_of_quest_scenarios() log.info(f\"We", "import Campaign from log_setup import log if __name__ == \"__main__\":", "services import waypoint_scenarios, quest_scenarios from services.build_campaign import Campaign from log_setup", "available\") random_waypoint_scenario = waypoint_scenarios.get_random_scenario(10) random_quest = quest_scenarios.get_random_scenario(1) campaign = Campaign()", "import log if __name__ == \"__main__\": number_waypoint_scenario = waypoint_scenarios.get_number_of_waypoint_scenarios() log.info(f\"We", "waypoint_scenarios.get_random_scenario(10) random_quest = quest_scenarios.get_random_scenario(1) campaign = Campaign() campaign.build_campaign( waypoint_list=random_waypoint_scenario, quest_list=random_quest", "waypoint available\") number_quests_available = quest_scenarios.get_number_of_quest_scenarios() log.info(f\"We have {number_quests_available} quests available\")", "have {number_waypoint_scenario} waypoint available\") number_quests_available = quest_scenarios.get_number_of_quest_scenarios() log.info(f\"We have {number_quests_available}", "services.build_campaign import Campaign from log_setup import log if __name__ ==", "== \"__main__\": number_waypoint_scenario = waypoint_scenarios.get_number_of_waypoint_scenarios() log.info(f\"We have {number_waypoint_scenario} waypoint available\")", "log_setup import log if __name__ == \"__main__\": number_waypoint_scenario = waypoint_scenarios.get_number_of_waypoint_scenarios()", "quest_scenarios.get_number_of_quest_scenarios() log.info(f\"We have {number_quests_available} quests available\") random_waypoint_scenario = waypoint_scenarios.get_random_scenario(10) random_quest", "quests available\") random_waypoint_scenario = waypoint_scenarios.get_random_scenario(10) random_quest = quest_scenarios.get_random_scenario(1) campaign =", "\"__main__\": number_waypoint_scenario = waypoint_scenarios.get_number_of_waypoint_scenarios() log.info(f\"We have {number_waypoint_scenario} waypoint available\") number_quests_available", "{number_waypoint_scenario} waypoint available\") number_quests_available = quest_scenarios.get_number_of_quest_scenarios() log.info(f\"We have {number_quests_available} quests", "log if __name__ == \"__main__\": number_waypoint_scenario = waypoint_scenarios.get_number_of_waypoint_scenarios() log.info(f\"We have" ]
[ "\"pkgs\") info_dir = os.path.join(pkgs, \"conda-build-test-ignore-some-prefix-files-1.0-0\", \"info\") has_prefix_file = os.path.join(info_dir, \"has_prefix\")", "assert os.path.isfile(has_prefix_file) with open(has_prefix_file) as f: assert \"test2\" not in", "= os.path.join(os.environ[\"ROOT\"], \"pkgs\") info_dir = os.path.join(pkgs, \"conda-build-test-ignore-some-prefix-files-1.0-0\", \"info\") has_prefix_file =", "= os.path.join(pkgs, \"conda-build-test-ignore-some-prefix-files-1.0-0\", \"info\") has_prefix_file = os.path.join(info_dir, \"has_prefix\") print(info_dir) assert", "os.path.join(os.environ[\"ROOT\"], \"pkgs\") info_dir = os.path.join(pkgs, \"conda-build-test-ignore-some-prefix-files-1.0-0\", \"info\") has_prefix_file = os.path.join(info_dir,", "\"info\") has_prefix_file = os.path.join(info_dir, \"has_prefix\") print(info_dir) assert os.path.isfile(has_prefix_file) with open(has_prefix_file)", "import os pkgs = os.path.join(os.environ[\"ROOT\"], \"pkgs\") info_dir = os.path.join(pkgs, \"conda-build-test-ignore-some-prefix-files-1.0-0\",", "info_dir = os.path.join(pkgs, \"conda-build-test-ignore-some-prefix-files-1.0-0\", \"info\") has_prefix_file = os.path.join(info_dir, \"has_prefix\") print(info_dir)", "\"has_prefix\") print(info_dir) assert os.path.isfile(has_prefix_file) with open(has_prefix_file) as f: assert \"test2\"", "= os.path.join(info_dir, \"has_prefix\") print(info_dir) assert os.path.isfile(has_prefix_file) with open(has_prefix_file) as f:", "os pkgs = os.path.join(os.environ[\"ROOT\"], \"pkgs\") info_dir = os.path.join(pkgs, \"conda-build-test-ignore-some-prefix-files-1.0-0\", \"info\")", "\"conda-build-test-ignore-some-prefix-files-1.0-0\", \"info\") has_prefix_file = os.path.join(info_dir, \"has_prefix\") print(info_dir) assert os.path.isfile(has_prefix_file) with", "has_prefix_file = os.path.join(info_dir, \"has_prefix\") print(info_dir) assert os.path.isfile(has_prefix_file) with open(has_prefix_file) as", "os.path.join(info_dir, \"has_prefix\") print(info_dir) assert os.path.isfile(has_prefix_file) with open(has_prefix_file) as f: assert", "pkgs = os.path.join(os.environ[\"ROOT\"], \"pkgs\") info_dir = os.path.join(pkgs, \"conda-build-test-ignore-some-prefix-files-1.0-0\", \"info\") has_prefix_file", "os.path.isfile(has_prefix_file) with open(has_prefix_file) as f: assert \"test2\" not in f.read()", "<filename>tests/test-recipes/metadata/ignore_some_prefix_files/run_test.py import os pkgs = os.path.join(os.environ[\"ROOT\"], \"pkgs\") info_dir = os.path.join(pkgs,", "os.path.join(pkgs, \"conda-build-test-ignore-some-prefix-files-1.0-0\", \"info\") has_prefix_file = os.path.join(info_dir, \"has_prefix\") print(info_dir) assert os.path.isfile(has_prefix_file)", "print(info_dir) assert os.path.isfile(has_prefix_file) with open(has_prefix_file) as f: assert \"test2\" not" ]
[ "for epoch in range(args.epoch_num): print(epoch) for sample in Data.train_iter: model.train()", "preds += p labels += l report = classification_report(preds, labels)", "import namedtuple import torch import torch.nn as nn import torch.nn.functional", "score = torch.cat((sample.pred0.unsqueeze(1).to(device), sample.pred1.unsqueeze(1).to(device)), dim=1) score = F.softmax(score/T,1) loss_kv =", "dim=1) score = F.softmax(score/T,1) loss_kv = criterion_kv(output, score.to(device)) * T", "sample in Data.valid_iter: output = model(sample.text.permute(1, 0).to(device)) p = output.argmax(1).cpu().tolist()", "from model import CNN from torchtext import data, vocab from", "F from sklearn.metrics import classification_report from torch.optim import Adam from", "import CNN from torchtext import data, vocab from args import", "range(args.epoch_num): print(epoch) for sample in Data.train_iter: model.train() optimizer.zero_grad() output =", "import torch.nn as nn import torch.nn.functional as F from sklearn.metrics", "classification_report from torch.optim import Adam from tqdm import tqdm from", "torch.cat((sample.pred0.unsqueeze(1).to(device), sample.pred1.unsqueeze(1).to(device)), dim=1) score = F.softmax(score/T,1) loss_kv = criterion_kv(output, score.to(device))", "* T * T loss = alpha * loss_f +", "for sample in Data.train_iter: model.train() optimizer.zero_grad() output = model(sample.text.permute(1, 0).to(device))", "= model(sample.text.permute(1, 0).to(device)) loss_f = criterion(output, sample.label.to(device)) output = F.log_softmax(output/T,", "epoch in range(args.epoch_num): print(epoch) for sample in Data.train_iter: model.train() optimizer.zero_grad()", "lr=args.lr) criterion = FocalLoss(classes=args.class_num, device=device).to(device) criterion_kv = nn.KLDivLoss().to(device) alpha =", "else: raise ValueError(\"wrong class num\") device = torch.device(\"cuda:%d\" % args.cuda)", "+= l report = classification_report(preds, labels) print(report) torch.save(model, os.path.join(args.save_dir, args.save_config", "0).to(device)) loss_f = criterion(output, sample.label.to(device)) output = F.log_softmax(output/T, 1) score", "import Adam from tqdm import tqdm from data import DataIteratorDistill", "collections import namedtuple import torch import torch.nn as nn import", "sample.pred1.unsqueeze(1).to(device)), dim=1) score = F.softmax(score/T,1) loss_kv = criterion_kv(output, score.to(device)) *", "import data, vocab from args import get_args, print_args from config", "1) score = torch.cat((sample.pred0.unsqueeze(1).to(device), sample.pred1.unsqueeze(1).to(device)), dim=1) score = F.softmax(score/T,1) loss_kv", "= torch.cat((sample.pred0.unsqueeze(1).to(device), sample.pred1.unsqueeze(1).to(device)), dim=1) score = F.softmax(score/T,1) loss_kv = criterion_kv(output,", "<filename>distill.py import os from collections import namedtuple import torch import", "= output.argmax(1).cpu().tolist() l = sample.label.tolist() preds += p labels +=", "loss_f = criterion(output, sample.label.to(device)) output = F.log_softmax(output/T, 1) score =", "namedtuple import torch import torch.nn as nn import torch.nn.functional as", "print_args from config import ConfigBinaryClassification from config import ConfigBinaryClassificationDistill from", "= Adam(model.parameters(), lr=args.lr) criterion = FocalLoss(classes=args.class_num, device=device).to(device) criterion_kv = nn.KLDivLoss().to(device)", "torch.optim import Adam from tqdm import tqdm from data import", "get_args, print_args from config import ConfigBinaryClassification from config import ConfigBinaryClassificationDistill", "import ConfigBinaryClassification from config import ConfigBinaryClassificationDistill from config import ConfigTripleClassification", "alpha * loss_f + (1 - alpha) * loss_kv #print(loss_f.item(),", "= nn.KLDivLoss().to(device) alpha = 0.2 T = 2 for epoch", "sample.label.tolist() preds += p labels += l report = classification_report(preds,", "FocalLoss(classes=args.class_num, device=device).to(device) criterion_kv = nn.KLDivLoss().to(device) alpha = 0.2 T =", "= criterion_kv(output, score.to(device)) * T * T loss = alpha", "args.class_num == 3: cfg = ConfigTripleClassification() else: raise ValueError(\"wrong class", "0.2 T = 2 for epoch in range(args.epoch_num): print(epoch) for", "= [] labels = [] for sample in Data.valid_iter: output", "0).to(device)) p = output.argmax(1).cpu().tolist() l = sample.label.tolist() preds += p", "= model(sample.text.permute(1, 0).to(device)) p = output.argmax(1).cpu().tolist() l = sample.label.tolist() preds", "data, vocab from args import get_args, print_args from config import", "print(epoch) for sample in Data.train_iter: model.train() optimizer.zero_grad() output = model(sample.text.permute(1,", "from config import ConfigTripleClassification if __name__ == \"__main__\": args =", "DataIteratorDistill from loss import FocalLoss from model import CNN from", "== \"__main__\": args = get_args() print_args(args) if args.class_num == 2:", "ValueError(\"wrong class num\") device = torch.device(\"cuda:%d\" % args.cuda) Data =", "= get_args() print_args(args) if args.class_num == 2: cfg = ConfigBinaryClassificationDistill()", "from torch.optim import Adam from tqdm import tqdm from data", "== 3: cfg = ConfigTripleClassification() else: raise ValueError(\"wrong class num\")", "T loss = alpha * loss_f + (1 - alpha)", "= alpha * loss_f + (1 - alpha) * loss_kv", "import ConfigBinaryClassificationDistill from config import ConfigTripleClassification if __name__ == \"__main__\":", "args = get_args() print_args(args) if args.class_num == 2: cfg =", "output = model(sample.text.permute(1, 0).to(device)) loss_f = criterion(output, sample.label.to(device)) output =", "with torch.no_grad(): model.eval() preds = [] labels = [] for", "criterion(output, sample.label.to(device)) output = F.log_softmax(output/T, 1) score = torch.cat((sample.pred0.unsqueeze(1).to(device), sample.pred1.unsqueeze(1).to(device)),", "from sklearn.metrics import classification_report from torch.optim import Adam from tqdm", "= DataIteratorDistill(config=cfg, train_batchsize=args.batch_size) model = torch.load(\"checkpoints/CNN-29\", map_location=device) optimizer = Adam(model.parameters(),", "vocab from args import get_args, print_args from config import ConfigBinaryClassification", "from loss import FocalLoss from model import CNN from torchtext", "args import get_args, print_args from config import ConfigBinaryClassification from config", "2: cfg = ConfigBinaryClassificationDistill() elif args.class_num == 3: cfg =", "#print(loss_f.item(), loss_kv.item()) loss.backward() optimizer.step() with torch.no_grad(): model.eval() preds = []", "elif args.class_num == 3: cfg = ConfigTripleClassification() else: raise ValueError(\"wrong", "data import DataIteratorDistill from loss import FocalLoss from model import", "config import ConfigBinaryClassification from config import ConfigBinaryClassificationDistill from config import", "args.class_num == 2: cfg = ConfigBinaryClassificationDistill() elif args.class_num == 3:", "torch.device(\"cuda:%d\" % args.cuda) Data = DataIteratorDistill(config=cfg, train_batchsize=args.batch_size) model = torch.load(\"checkpoints/CNN-29\",", "import FocalLoss from model import CNN from torchtext import data,", "import get_args, print_args from config import ConfigBinaryClassification from config import", "torchtext import data, vocab from args import get_args, print_args from", "preds = [] labels = [] for sample in Data.valid_iter:", "__name__ == \"__main__\": args = get_args() print_args(args) if args.class_num ==", "from config import ConfigBinaryClassification from config import ConfigBinaryClassificationDistill from config", "if args.class_num == 2: cfg = ConfigBinaryClassificationDistill() elif args.class_num ==", "model import CNN from torchtext import data, vocab from args", "ConfigTripleClassification() else: raise ValueError(\"wrong class num\") device = torch.device(\"cuda:%d\" %", "FocalLoss from model import CNN from torchtext import data, vocab", "= ConfigTripleClassification() else: raise ValueError(\"wrong class num\") device = torch.device(\"cuda:%d\"", "loss import FocalLoss from model import CNN from torchtext import", "get_args() print_args(args) if args.class_num == 2: cfg = ConfigBinaryClassificationDistill() elif", "= 0.2 T = 2 for epoch in range(args.epoch_num): print(epoch)", "ConfigTripleClassification if __name__ == \"__main__\": args = get_args() print_args(args) if", "= torch.load(\"checkpoints/CNN-29\", map_location=device) optimizer = Adam(model.parameters(), lr=args.lr) criterion = FocalLoss(classes=args.class_num,", "l report = classification_report(preds, labels) print(report) torch.save(model, os.path.join(args.save_dir, args.save_config +", "Adam from tqdm import tqdm from data import DataIteratorDistill from", "[] for sample in Data.valid_iter: output = model(sample.text.permute(1, 0).to(device)) p", "2 for epoch in range(args.epoch_num): print(epoch) for sample in Data.train_iter:", "from config import ConfigBinaryClassificationDistill from config import ConfigTripleClassification if __name__", "criterion_kv(output, score.to(device)) * T * T loss = alpha *", "import DataIteratorDistill from loss import FocalLoss from model import CNN", "labels += l report = classification_report(preds, labels) print(report) torch.save(model, os.path.join(args.save_dir,", "sklearn.metrics import classification_report from torch.optim import Adam from tqdm import", "+ (1 - alpha) * loss_kv #print(loss_f.item(), loss_kv.item()) loss.backward() optimizer.step()", "train_batchsize=args.batch_size) model = torch.load(\"checkpoints/CNN-29\", map_location=device) optimizer = Adam(model.parameters(), lr=args.lr) criterion", "os from collections import namedtuple import torch import torch.nn as", "Data = DataIteratorDistill(config=cfg, train_batchsize=args.batch_size) model = torch.load(\"checkpoints/CNN-29\", map_location=device) optimizer =", "Adam(model.parameters(), lr=args.lr) criterion = FocalLoss(classes=args.class_num, device=device).to(device) criterion_kv = nn.KLDivLoss().to(device) alpha", "if __name__ == \"__main__\": args = get_args() print_args(args) if args.class_num", "* loss_kv #print(loss_f.item(), loss_kv.item()) loss.backward() optimizer.step() with torch.no_grad(): model.eval() preds", "loss.backward() optimizer.step() with torch.no_grad(): model.eval() preds = [] labels =", "num\") device = torch.device(\"cuda:%d\" % args.cuda) Data = DataIteratorDistill(config=cfg, train_batchsize=args.batch_size)", "import tqdm from data import DataIteratorDistill from loss import FocalLoss", "config import ConfigTripleClassification if __name__ == \"__main__\": args = get_args()", "optimizer.zero_grad() output = model(sample.text.permute(1, 0).to(device)) loss_f = criterion(output, sample.label.to(device)) output", "output.argmax(1).cpu().tolist() l = sample.label.tolist() preds += p labels += l", "ConfigBinaryClassification from config import ConfigBinaryClassificationDistill from config import ConfigTripleClassification if", "score.to(device)) * T * T loss = alpha * loss_f", "= 2 for epoch in range(args.epoch_num): print(epoch) for sample in", "torch.no_grad(): model.eval() preds = [] labels = [] for sample", "torch import torch.nn as nn import torch.nn.functional as F from", "import torch import torch.nn as nn import torch.nn.functional as F", "device = torch.device(\"cuda:%d\" % args.cuda) Data = DataIteratorDistill(config=cfg, train_batchsize=args.batch_size) model", "model.eval() preds = [] labels = [] for sample in", "import classification_report from torch.optim import Adam from tqdm import tqdm", "in Data.train_iter: model.train() optimizer.zero_grad() output = model(sample.text.permute(1, 0).to(device)) loss_f =", "report = classification_report(preds, labels) print(report) torch.save(model, os.path.join(args.save_dir, args.save_config + str(epoch)))", "import torch.nn.functional as F from sklearn.metrics import classification_report from torch.optim", "% args.cuda) Data = DataIteratorDistill(config=cfg, train_batchsize=args.batch_size) model = torch.load(\"checkpoints/CNN-29\", map_location=device)", "loss_kv #print(loss_f.item(), loss_kv.item()) loss.backward() optimizer.step() with torch.no_grad(): model.eval() preds =", "\"__main__\": args = get_args() print_args(args) if args.class_num == 2: cfg", "= sample.label.tolist() preds += p labels += l report =", "tqdm from data import DataIteratorDistill from loss import FocalLoss from", "model = torch.load(\"checkpoints/CNN-29\", map_location=device) optimizer = Adam(model.parameters(), lr=args.lr) criterion =", "l = sample.label.tolist() preds += p labels += l report", "= [] for sample in Data.valid_iter: output = model(sample.text.permute(1, 0).to(device))", "optimizer = Adam(model.parameters(), lr=args.lr) criterion = FocalLoss(classes=args.class_num, device=device).to(device) criterion_kv =", "= torch.device(\"cuda:%d\" % args.cuda) Data = DataIteratorDistill(config=cfg, train_batchsize=args.batch_size) model =", "as F from sklearn.metrics import classification_report from torch.optim import Adam", "= F.softmax(score/T,1) loss_kv = criterion_kv(output, score.to(device)) * T * T", "F.log_softmax(output/T, 1) score = torch.cat((sample.pred0.unsqueeze(1).to(device), sample.pred1.unsqueeze(1).to(device)), dim=1) score = F.softmax(score/T,1)", "model(sample.text.permute(1, 0).to(device)) loss_f = criterion(output, sample.label.to(device)) output = F.log_softmax(output/T, 1)", "T * T loss = alpha * loss_f + (1", "as nn import torch.nn.functional as F from sklearn.metrics import classification_report", "from args import get_args, print_args from config import ConfigBinaryClassification from", "nn.KLDivLoss().to(device) alpha = 0.2 T = 2 for epoch in", "from tqdm import tqdm from data import DataIteratorDistill from loss", "for sample in Data.valid_iter: output = model(sample.text.permute(1, 0).to(device)) p =", "= ConfigBinaryClassificationDistill() elif args.class_num == 3: cfg = ConfigTripleClassification() else:", "+= p labels += l report = classification_report(preds, labels) print(report)", "import os from collections import namedtuple import torch import torch.nn", "ConfigBinaryClassificationDistill from config import ConfigTripleClassification if __name__ == \"__main__\": args", "* T loss = alpha * loss_f + (1 -", "sample.label.to(device)) output = F.log_softmax(output/T, 1) score = torch.cat((sample.pred0.unsqueeze(1).to(device), sample.pred1.unsqueeze(1).to(device)), dim=1)", "in Data.valid_iter: output = model(sample.text.permute(1, 0).to(device)) p = output.argmax(1).cpu().tolist() l", "alpha = 0.2 T = 2 for epoch in range(args.epoch_num):", "import ConfigTripleClassification if __name__ == \"__main__\": args = get_args() print_args(args)", "config import ConfigBinaryClassificationDistill from config import ConfigTripleClassification if __name__ ==", "sample in Data.train_iter: model.train() optimizer.zero_grad() output = model(sample.text.permute(1, 0).to(device)) loss_f", "* loss_f + (1 - alpha) * loss_kv #print(loss_f.item(), loss_kv.item())", "from collections import namedtuple import torch import torch.nn as nn", "from torchtext import data, vocab from args import get_args, print_args", "cfg = ConfigBinaryClassificationDistill() elif args.class_num == 3: cfg = ConfigTripleClassification()", "criterion = FocalLoss(classes=args.class_num, device=device).to(device) criterion_kv = nn.KLDivLoss().to(device) alpha = 0.2", "= FocalLoss(classes=args.class_num, device=device).to(device) criterion_kv = nn.KLDivLoss().to(device) alpha = 0.2 T", "alpha) * loss_kv #print(loss_f.item(), loss_kv.item()) loss.backward() optimizer.step() with torch.no_grad(): model.eval()", "nn import torch.nn.functional as F from sklearn.metrics import classification_report from", "cfg = ConfigTripleClassification() else: raise ValueError(\"wrong class num\") device =", "T = 2 for epoch in range(args.epoch_num): print(epoch) for sample", "loss_kv = criterion_kv(output, score.to(device)) * T * T loss =", "p = output.argmax(1).cpu().tolist() l = sample.label.tolist() preds += p labels", "= criterion(output, sample.label.to(device)) output = F.log_softmax(output/T, 1) score = torch.cat((sample.pred0.unsqueeze(1).to(device),", "output = F.log_softmax(output/T, 1) score = torch.cat((sample.pred0.unsqueeze(1).to(device), sample.pred1.unsqueeze(1).to(device)), dim=1) score", "criterion_kv = nn.KLDivLoss().to(device) alpha = 0.2 T = 2 for", "torch.load(\"checkpoints/CNN-29\", map_location=device) optimizer = Adam(model.parameters(), lr=args.lr) criterion = FocalLoss(classes=args.class_num, device=device).to(device)", "output = model(sample.text.permute(1, 0).to(device)) p = output.argmax(1).cpu().tolist() l = sample.label.tolist()", "score = F.softmax(score/T,1) loss_kv = criterion_kv(output, score.to(device)) * T *", "(1 - alpha) * loss_kv #print(loss_f.item(), loss_kv.item()) loss.backward() optimizer.step() with", "loss = alpha * loss_f + (1 - alpha) *", "labels = [] for sample in Data.valid_iter: output = model(sample.text.permute(1,", "- alpha) * loss_kv #print(loss_f.item(), loss_kv.item()) loss.backward() optimizer.step() with torch.no_grad():", "torch.nn.functional as F from sklearn.metrics import classification_report from torch.optim import", "F.softmax(score/T,1) loss_kv = criterion_kv(output, score.to(device)) * T * T loss", "optimizer.step() with torch.no_grad(): model.eval() preds = [] labels = []", "map_location=device) optimizer = Adam(model.parameters(), lr=args.lr) criterion = FocalLoss(classes=args.class_num, device=device).to(device) criterion_kv", "loss_f + (1 - alpha) * loss_kv #print(loss_f.item(), loss_kv.item()) loss.backward()", "loss_kv.item()) loss.backward() optimizer.step() with torch.no_grad(): model.eval() preds = [] labels", "class num\") device = torch.device(\"cuda:%d\" % args.cuda) Data = DataIteratorDistill(config=cfg,", "model.train() optimizer.zero_grad() output = model(sample.text.permute(1, 0).to(device)) loss_f = criterion(output, sample.label.to(device))", "from data import DataIteratorDistill from loss import FocalLoss from model", "raise ValueError(\"wrong class num\") device = torch.device(\"cuda:%d\" % args.cuda) Data", "[] labels = [] for sample in Data.valid_iter: output =", "tqdm import tqdm from data import DataIteratorDistill from loss import", "== 2: cfg = ConfigBinaryClassificationDistill() elif args.class_num == 3: cfg", "3: cfg = ConfigTripleClassification() else: raise ValueError(\"wrong class num\") device", "p labels += l report = classification_report(preds, labels) print(report) torch.save(model,", "ConfigBinaryClassificationDistill() elif args.class_num == 3: cfg = ConfigTripleClassification() else: raise", "Data.train_iter: model.train() optimizer.zero_grad() output = model(sample.text.permute(1, 0).to(device)) loss_f = criterion(output,", "device=device).to(device) criterion_kv = nn.KLDivLoss().to(device) alpha = 0.2 T = 2", "= F.log_softmax(output/T, 1) score = torch.cat((sample.pred0.unsqueeze(1).to(device), sample.pred1.unsqueeze(1).to(device)), dim=1) score =", "DataIteratorDistill(config=cfg, train_batchsize=args.batch_size) model = torch.load(\"checkpoints/CNN-29\", map_location=device) optimizer = Adam(model.parameters(), lr=args.lr)", "model(sample.text.permute(1, 0).to(device)) p = output.argmax(1).cpu().tolist() l = sample.label.tolist() preds +=", "in range(args.epoch_num): print(epoch) for sample in Data.train_iter: model.train() optimizer.zero_grad() output", "CNN from torchtext import data, vocab from args import get_args,", "args.cuda) Data = DataIteratorDistill(config=cfg, train_batchsize=args.batch_size) model = torch.load(\"checkpoints/CNN-29\", map_location=device) optimizer", "torch.nn as nn import torch.nn.functional as F from sklearn.metrics import", "print_args(args) if args.class_num == 2: cfg = ConfigBinaryClassificationDistill() elif args.class_num", "Data.valid_iter: output = model(sample.text.permute(1, 0).to(device)) p = output.argmax(1).cpu().tolist() l =" ]
[ "used to endorse or promote products # derived from this", "newAtt='\"Testing\"' existingatt='\"Testing\"'>\"Testing\"</html>\"\"\", \"Accessing existing attributes failed.\") def testMultipleOriginalAttributes(self): self._runTest_( '<html", "USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED", "enhancements please let me know! \"\"\" Unit test cases. \"\"\"", "(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF", "simpleTALES if (os.path.exists(\"logging.ini\")): logging.config.fileConfig(\"logging.ini\") else: logging.basicConfig() class TALAttributesTestCases(unittest.TestCase): def setUp(self):", "semi-colons, failed.\" ) def testMultipleAttributesEscaped(self): self._runTest_( '<html old=\"still &quot; here\"", "new=\"testing\" old=\"still here\" href=\"owlfish.com\">Hello</html>', \"Setting multiple attributes at once failed.\")", "failed.\") def testNumberAttributeEscaping(self): self._runTest_( '<html existingAtt=\"&quot;Testing&quot;\" tal:attributes=\"href number\">Hello</html>', \"\"\"<html href=\"5\"", "in the # documentation and/or other materials provided with the", "href=\"owlfish.com\">Hello</html>', \"Defaulting of attribute 'href' failed.\") def testMultipleAttributes(self): self._runTest_( '<html", "from this software without specific prior written permission. # #", "# # Redistribution and use in source and binary forms,", "THE POSSIBILITY OF SUCH DAMAGE. # # If you make", "failed.\") def testMultipleOriginalAttributes(self): self._runTest_( '<html one=\"Value One\" two=\"Value two\" three=\"Value", "two\" three=\"Value three\" tal:attributes=\"four attrs/three\" tal:content=\"attrs/one\">Hello</html>', \"\"\"<html four=\"Value three\" one=\"Value", "test cases. \"\"\" from __future__ import unicode_literals import unittest import", "be used to endorse or promote products # derived from", "testAttributeEscaping(self): self._runTest_( '<html existingAtt=\"&quot;Testing&quot;\" tal:attributes=\"href needsQuoting\">Hello</html>', \"\"\"<html href='Does \"this\" work?'", "# -*- coding: iso-8859-1 -*- # Copyright (c) 2016, <NAME>", "in binary form must reproduce the above copyright # notice,", "new=\"testing\" old=\"still here\" href=\"owlfish.com\">Hello</html>', \"Setting multiple attributes at once, with", "existingAtt=\"&quot;Testing&quot;\" tal:attributes=\"newAtt attrs/existingatt\" tal:content=\"attrs/existingatt\">Hello</html>', \"\"\"<html newAtt='\"Testing\"' existingatt='\"Testing\"'>\"Testing\"</html>\"\"\", \"Accessing existing attributes", "without specific prior written permission. # # THIS SOFTWARE IS", "-*- # Copyright (c) 2016, <NAME> <<EMAIL>> # All rights", "old='still \" here' href=\"owlfish.com\">Hello</html>''', \"Setting multiple attributes at once, with", "def testMultipleAttributesSpace(self): self._runTest_( '<html old=\"still here\" class=\"test\" tal:attributes=\"href default ;", "\"Defaulting of attribute 'href' failed.\") def testMultipleAttributes(self): self._runTest_( '<html old=\"still", "attribute 'href' failed.\") def testAnotherDefaultAttribute(self): self._runTest_( '<html class=\"test\" tal:attributes=\"href anotherdefault/inhere\"", "IS'' AND ANY EXPRESS OR # IMPLIED WARRANTIES, INCLUDING, BUT", "file.getvalue() self.assertEqual( realResult, result, \"%s - \\npassed in: %s \\ngot", "EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. #", "\"Escaping of new attributes failed.\") def testNumberAttributeEscaping(self): self._runTest_( '<html existingAtt=\"&quot;Testing&quot;\"", "please let me know! \"\"\" Unit test cases. \"\"\" from", "= simpleTALES.Context() self.context.addGlobal('test', 'testing') self.context.addGlobal('link', 'www.owlfish.com') self.context.addGlobal('needsQuoting', \"\"\"Does \"this\" work?\"\"\")", "rights reserved. # See LICENSE.txt # Copyright (c) 2004 <NAME>", "list of conditions and the following disclaimer in the #", "import unittest import os import io import logging import logging.config", "def testAnotherDefaultAttribute(self): self._runTest_( '<html class=\"test\" tal:attributes=\"href anotherdefault/inhere\" href=\"owlfish.com\">Hello</html>', '<html class=\"test\"", "# NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;", "old=\"still here\" href=\"owlfish.com\">Hello</html>', \"Setting multiple attributes at once, with spaces", "THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH", "unicode_literals import unittest import os import io import logging import", "INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT # NOT", "'href' failed.\") def testDefaultAttribute(self): self._runTest_( '<html class=\"test\" tal:attributes=\"href default\" href=\"owlfish.com\">Hello</html>',", "self.context.addGlobal('anotherdefault', { 'inhere': simpleTALES.DEFAULTVALUE }) def _runTest_(self, txt, result, errMsg=\"Error\"):", "source and binary forms, with or without # modification, are", "following conditions # are met: # 1. Redistributions of source", "make any bug fixes or feature enhancements please let me", "distribution. # 3. The name of the author may not", "class string: Semi-colon;;test;new test \" href=\"owlfish.com\">Hello</html>', '''<html class=\"Semi-colon;test\" new=\"testing\" old='still", "('<html HREF=\"Testing\" tal:attributes=\"HREF test\">Hello</html>' #~ ,\"\"\"<html href=\"testing\">Hello</html>\"\"\" #~ ,\"HTML Attributes", "class=\"test\">Hello</html>', \"Removal of attribute 'href' failed.\") def testDefaultAttribute(self): self._runTest_( '<html", "# are met: # 1. Redistributions of source code must", "href=\"owlfish.com\">Hello</html>', '''<html class=\"Semi-colon;test\" new=\"testing\" old='still \" here' href=\"owlfish.com\">Hello</html>''', \"Setting multiple", "work?' existingatt='\"Testing\"'>Hello</html>\"\"\", \"Escaping of new attributes failed.\") def testNumberAttributeEscaping(self): self._runTest_(", "three=\"Value three\" tal:attributes=\"four attrs/three\" tal:content=\"attrs/one\">Hello</html>', \"\"\"<html four=\"Value three\" one=\"Value One\"", "nothing;new test\" href=\"owlfish.com\">Hello</html>', '<html new=\"testing\" old=\"still here\" href=\"owlfish.com\">Hello</html>', \"Setting multiple", "LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING", "# documentation and/or other materials provided with the distribution. #", "tal:attributes=\"href nothing\" href=\"owlfish.com\">Hello</html>', '<html class=\"test\">Hello</html>', \"Removal of attribute 'href' failed.\")", "'<html class=\"test\" href=\"owlfish.com\">Hello</html>', \"Defaulting of attribute 'href' failed.\") def testMultipleAttributes(self):", "once failed.\") def testMultipleAttributesSpace(self): self._runTest_( '<html old=\"still here\" class=\"test\" tal:attributes=\"href", "OR # IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE", "testMultipleAttributesEscaped(self): self._runTest_( '<html old=\"still &quot; here\" class=\"test\" tal:attributes=\"href default ;", "old=\"still &quot; here\" class=\"test\" tal:attributes=\"href default ; class string: Semi-colon;;test;new", "disclaimer. # 2. Redistributions in binary form must reproduce the", "\"Addition of attribute 'link' failed.\") def testRemovingAnAttribute(self): self._runTest_( '<html class=\"test\"", "io.StringIO() template.expand(self.context, file) realResult = file.getvalue() self.assertEqual( realResult, result, \"%s", "PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR", "and binary forms, with or without # modification, are permitted", "\"this\" work?') self.context.addGlobal('anotherdefault', { 'inhere': simpleTALES.DEFAULTVALUE }) def _runTest_(self, txt,", "tal:attributes=\"href needsQuoting\">Hello</html>', \"\"\"<html href='Does \"this\" work?' existingatt='\"Testing\"'>Hello</html>\"\"\", \"Escaping of new", "DAMAGE. # # If you make any bug fixes or", "self._runTest_( '<html one=\"Value One\" two=\"Value two\" three=\"Value three\" tal:attributes=\"four attrs/three\"", "attributes failed.\") #~ def testAttributeCase (self): #~ self._runTest_ ('<html HREF=\"Testing\"", "testMultipleAttributes(self): self._runTest_( '<html old=\"still here\" class=\"test\" tal:attributes=\"href default;class nothing;new test\"", "ANY WAY OUT OF THE USE OF # THIS SOFTWARE,", "form must reproduce the above copyright # notice, this list", "or feature enhancements please let me know! \"\"\" Unit test", "# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT #", "{ 'inhere': simpleTALES.DEFAULTVALUE }) def _runTest_(self, txt, result, errMsg=\"Error\"): template", "WAY OUT OF THE USE OF # THIS SOFTWARE, EVEN", "BUT # NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR", "AND ANY EXPRESS OR # IMPLIED WARRANTIES, INCLUDING, BUT NOT", "from __future__ import unicode_literals import unittest import os import io", "tal:attributes=\"href default;class nothing;new test\" href=\"owlfish.com\">Hello</html>', '<html new=\"testing\" old=\"still here\" href=\"owlfish.com\">Hello</html>',", "this software without specific prior written permission. # # THIS", "INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES # OF", "TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY", "four=\"Value three\" one=\"Value One\" two=\"Value two\" three=\"Value three\">Value One</html>\"\"\", \"Accessing", "PURPOSE ARE DISCLAIMED. # IN NO EVENT SHALL THE AUTHOR", "simpletal import simpleTAL, simpleTALES if (os.path.exists(\"logging.ini\")): logging.config.fileConfig(\"logging.ini\") else: logging.basicConfig() class", "written permission. # # THIS SOFTWARE IS PROVIDED BY THE", "\"this\" work?\"\"\") self.context.addGlobal('number', 5) self.context.addGlobal('uniQuote', 'Does \"this\" work?') self.context.addGlobal('anotherdefault', {", "\"\"\"<html four=\"Value three\" one=\"Value One\" two=\"Value two\" three=\"Value three\">Value One</html>\"\"\",", "derived from this software without specific prior written permission. #", "notice, this list of conditions and the following disclaimer. #", "unittest import os import io import logging import logging.config from", "reproduce the above copyright # notice, this list of conditions", "software without specific prior written permission. # # THIS SOFTWARE", "from simpletal import simpleTAL, simpleTALES if (os.path.exists(\"logging.ini\")): logging.config.fileConfig(\"logging.ini\") else: logging.basicConfig()", "must retain the above copyright # notice, this list of", "'<html tal:attributes=\"link link\" href=\"owlfish.com\">Hello</html>', '<html link=\"www.owlfish.com\" href=\"owlfish.com\">Hello</html>', \"Addition of attribute", "href=\"owlfish.com\">Hello</html>', '<html class=\"test\">Hello</html>', \"Removal of attribute 'href' failed.\") def testDefaultAttribute(self):", "work?') self.context.addGlobal('anotherdefault', { 'inhere': simpleTALES.DEFAULTVALUE }) def _runTest_(self, txt, result,", "test\">Hello</html>' #~ ,\"\"\"<html href=\"testing\">Hello</html>\"\"\" #~ ,\"HTML Attributes not treated as", "self.context.addGlobal('link', 'www.owlfish.com') self.context.addGlobal('needsQuoting', \"\"\"Does \"this\" work?\"\"\") self.context.addGlobal('number', 5) self.context.addGlobal('uniQuote', 'Does", ") def testAttributeEscaping(self): self._runTest_( '<html existingAtt=\"&quot;Testing&quot;\" tal:attributes=\"href needsQuoting\">Hello</html>', \"\"\"<html href='Does", "at once, with spaces between semi-colons, failed.\" ) def testAttributeEscaping(self):", "class TALAttributesTestCases(unittest.TestCase): def setUp(self): self.context = simpleTALES.Context() self.context.addGlobal('test', 'testing') self.context.addGlobal('link',", "OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF", "LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF", "def testDefaultAttribute(self): self._runTest_( '<html class=\"test\" tal:attributes=\"href default\" href=\"owlfish.com\">Hello</html>', '<html class=\"test\"", "One\" two=\"Value two\" three=\"Value three\">Value One</html>\"\"\", \"Accessing multiple existing attributes", "def testMultipleAttributesEscaped(self): self._runTest_( '<html old=\"still &quot; here\" class=\"test\" tal:attributes=\"href default", ",\"\"\"<html href=\"testing\">Hello</html>\"\"\" #~ ,\"HTML Attributes not treated as case insensitive.\")", "DISCLAIMED. # IN NO EVENT SHALL THE AUTHOR BE LIABLE", "WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE", "def testRemovingAnAttribute(self): self._runTest_( '<html class=\"test\" tal:attributes=\"href nothing\" href=\"owlfish.com\">Hello</html>', '<html class=\"test\">Hello</html>',", "of source code must retain the above copyright # notice,", "# If you make any bug fixes or feature enhancements", "self._runTest_( '<html existingAtt=\"&quot;Testing&quot;\" tal:attributes=\"newAtt attrs/existingatt\" tal:content=\"attrs/existingatt\">Hello</html>', \"\"\"<html newAtt='\"Testing\"' existingatt='\"Testing\"'>\"Testing\"</html>\"\"\", \"Accessing", "iso-8859-1 -*- # Copyright (c) 2016, <NAME> <<EMAIL>> # All", "SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT # NOT LIMITED", "(INCLUDING, BUT # NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS", "tal:attributes=\"newAtt attrs/existingatt\" tal:content=\"attrs/existingatt\">Hello</html>', \"\"\"<html newAtt=\"&amp;Testing&amp;\" existingatt=\"&amp;Testing&amp;\">&amp;Testing&amp;</html>\"\"\", \"Accessing existing attributes failed.\")", "author may not be used to endorse or promote products", "``AS IS'' AND ANY EXPRESS OR # IMPLIED WARRANTIES, INCLUDING,", "bug fixes or feature enhancements please let me know! \"\"\"", "tal:attributes=\"href number\">Hello</html>', \"\"\"<html href=\"5\" existingatt='\"Testing\"'>Hello</html>\"\"\", \"Escaping of new attributes failed.\")", "LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION)", "SUCH DAMAGE. # # If you make any bug fixes", "of conditions and the following disclaimer. # 2. Redistributions in", "with spaces between semi-colons, failed.\" ) def testMultipleAttributesEscaped(self): self._runTest_( '<html", "default;class nothing;new test\" href=\"owlfish.com\">Hello</html>', '<html new=\"testing\" old=\"still here\" href=\"owlfish.com\">Hello</html>', \"Setting", "logging.config from simpletal import simpleTAL, simpleTALES if (os.path.exists(\"logging.ini\")): logging.config.fileConfig(\"logging.ini\") else:", "\\nexpected %s\\n\\nTemplate: %s\" % (errMsg, txt, realResult, result, template)) def", "'<html class=\"test\" href=\"owlfish.com\">Hello</html>', \"Defaulting of attribute 'href' failed.\") def testAnotherDefaultAttribute(self):", "attributes failed.\") def testAmpersandEscapeInAttributes(self): self._runTest_( '<html existingAtt=\"&amp;Testing&amp;\" tal:attributes=\"newAtt attrs/existingatt\" tal:content=\"attrs/existingatt\">Hello</html>',", "os import io import logging import logging.config from simpletal import", "or without # modification, are permitted provided that the following", "Redistribution and use in source and binary forms, with or", "the following disclaimer in the # documentation and/or other materials", "work?' existingatt='\"Testing\"'>Hello</html>\"\"\", \"Escaping of new attributes failed.\") def testOriginalAttributes(self): self._runTest_(", "% (errMsg, txt, realResult, result, template)) def testAddingAnAttribute(self): self._runTest_( '<html", "to endorse or promote products # derived from this software", "existingAtt=\"&quot;Testing&quot;\" tal:attributes=\"href needsQuoting\">Hello</html>', \"\"\"<html href='Does \"this\" work?' existingatt='\"Testing\"'>Hello</html>\"\"\", \"Escaping of", "EXPRESS OR # IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,", "retain the above copyright # notice, this list of conditions", "HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER", "failed.\") def testMultipleAttributes(self): self._runTest_( '<html old=\"still here\" class=\"test\" tal:attributes=\"href default;class", "errMsg=\"Error\"): template = simpleTAL.compileHTMLTemplate(txt) file = io.StringIO() template.expand(self.context, file) realResult", "SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.", "self._runTest_( '<html existingAtt=\"&amp;Testing&amp;\" tal:attributes=\"newAtt attrs/existingatt\" tal:content=\"attrs/existingatt\">Hello</html>', \"\"\"<html newAtt=\"&amp;Testing&amp;\" existingatt=\"&amp;Testing&amp;\">&amp;Testing&amp;</html>\"\"\", \"Accessing", "rights reserved. # # Redistribution and use in source and", "OUT OF THE USE OF # THIS SOFTWARE, EVEN IF", "SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR", "binary form must reproduce the above copyright # notice, this", "# derived from this software without specific prior written permission.", "class=\"test\" tal:attributes=\"href anotherdefault/inhere\" href=\"owlfish.com\">Hello</html>', '<html class=\"test\" href=\"owlfish.com\">Hello</html>', \"Defaulting of attribute", "failed.\") def testNumberAttributeEscaping(self): self._runTest_( '<html existingAtt=\"&quot;Testing&quot;\" tal:attributes=\"href uniQuote\">Hello</html>', \"\"\"<html href='Does", "'inhere': simpleTALES.DEFAULTVALUE }) def _runTest_(self, txt, result, errMsg=\"Error\"): template =", "&quot; here\" class=\"test\" tal:attributes=\"href default ; class string: Semi-colon;;test;new test", "_runTest_(self, txt, result, errMsg=\"Error\"): template = simpleTAL.compileHTMLTemplate(txt) file = io.StringIO()", "class=\"test\" tal:attributes=\"href default ; class string: Semi-colon;;test;new test \" href=\"owlfish.com\">Hello</html>',", "me know! \"\"\" Unit test cases. \"\"\" from __future__ import", "= simpleTAL.compileHTMLTemplate(txt) file = io.StringIO() template.expand(self.context, file) realResult = file.getvalue()", "disclaimer in the # documentation and/or other materials provided with", "'link' failed.\") def testRemovingAnAttribute(self): self._runTest_( '<html class=\"test\" tal:attributes=\"href nothing\" href=\"owlfish.com\">Hello</html>',", "import logging.config from simpletal import simpleTAL, simpleTALES if (os.path.exists(\"logging.ini\")): logging.config.fileConfig(\"logging.ini\")", "# See LICENSE.txt # Copyright (c) 2004 <NAME> (http://www.owlfish.com/) #", "testAnotherDefaultAttribute(self): self._runTest_( '<html class=\"test\" tal:attributes=\"href anotherdefault/inhere\" href=\"owlfish.com\">Hello</html>', '<html class=\"test\" href=\"owlfish.com\">Hello</html>',", "# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR", "\\ngot back %s \\nexpected %s\\n\\nTemplate: %s\" % (errMsg, txt, realResult,", "class=\"test\" tal:attributes=\"href nothing\" href=\"owlfish.com\">Hello</html>', '<html class=\"test\">Hello</html>', \"Removal of attribute 'href'", "above copyright # notice, this list of conditions and the", "TALAttributesTestCases(unittest.TestCase): def setUp(self): self.context = simpleTALES.Context() self.context.addGlobal('test', 'testing') self.context.addGlobal('link', 'www.owlfish.com')", "new=\"testing\" old='still \" here' href=\"owlfish.com\">Hello</html>''', \"Setting multiple attributes at once,", "\"Setting multiple attributes at once failed.\") def testMultipleAttributesSpace(self): self._runTest_( '<html", "; class string: Semi-colon;;test;new test \" href=\"owlfish.com\">Hello</html>', '''<html class=\"Semi-colon;test\" new=\"testing\"", "failed.\" ) def testAttributeEscaping(self): self._runTest_( '<html existingAtt=\"&quot;Testing&quot;\" tal:attributes=\"href needsQuoting\">Hello</html>', \"\"\"<html", "class=\"test\" tal:attributes=\"href default\" href=\"owlfish.com\">Hello</html>', '<html class=\"test\" href=\"owlfish.com\">Hello</html>', \"Defaulting of attribute", "'<html existingAtt=\"&quot;Testing&quot;\" tal:attributes=\"href uniQuote\">Hello</html>', \"\"\"<html href='Does \"this\" work?' existingatt='\"Testing\"'>Hello</html>\"\"\", \"Escaping", "ARISING IN ANY WAY OUT OF THE USE OF #", "#~ ,\"HTML Attributes not treated as case insensitive.\") if __name__", "the author may not be used to endorse or promote", "with the distribution. # 3. The name of the author", "def testAddingAnAttribute(self): self._runTest_( '<html tal:attributes=\"link link\" href=\"owlfish.com\">Hello</html>', '<html link=\"www.owlfish.com\" href=\"owlfish.com\">Hello</html>',", "in source and binary forms, with or without # modification,", "self.context.addGlobal('test', 'testing') self.context.addGlobal('link', 'www.owlfish.com') self.context.addGlobal('needsQuoting', \"\"\"Does \"this\" work?\"\"\") self.context.addGlobal('number', 5)", "new attributes failed.\") def testNumberAttributeEscaping(self): self._runTest_( '<html existingAtt=\"&quot;Testing&quot;\" tal:attributes=\"href uniQuote\">Hello</html>',", "one=\"Value One\" two=\"Value two\" three=\"Value three\">Value One</html>\"\"\", \"Accessing multiple existing", "may not be used to endorse or promote products #", "'<html old=\"still &quot; here\" class=\"test\" tal:attributes=\"href default ; class string:", "let me know! \"\"\" Unit test cases. \"\"\" from __future__", "'''<html class=\"Semi-colon;test\" new=\"testing\" old='still \" here' href=\"owlfish.com\">Hello</html>''', \"Setting multiple attributes", "must reproduce the above copyright # notice, this list of", "are met: # 1. Redistributions of source code must retain", "THE IMPLIED WARRANTIES # OF MERCHANTABILITY AND FITNESS FOR A", "three\" tal:attributes=\"four attrs/three\" tal:content=\"attrs/one\">Hello</html>', \"\"\"<html four=\"Value three\" one=\"Value One\" two=\"Value", "default\" href=\"owlfish.com\">Hello</html>', '<html class=\"test\" href=\"owlfish.com\">Hello</html>', \"Defaulting of attribute 'href' failed.\")", "'<html class=\"Hello there\" new=\"testing\" old=\"still here\" href=\"owlfish.com\">Hello</html>', \"Setting multiple attributes", "between semi-colons, failed.\" ) def testAttributeEscaping(self): self._runTest_( '<html existingAtt=\"&quot;Testing&quot;\" tal:attributes=\"href", "'<html existingAtt=\"&quot;Testing&quot;\" tal:attributes=\"href needsQuoting\">Hello</html>', \"\"\"<html href='Does \"this\" work?' existingatt='\"Testing\"'>Hello</html>\"\"\", \"Escaping", "template.expand(self.context, file) realResult = file.getvalue() self.assertEqual( realResult, result, \"%s -", "this list of conditions and the following disclaimer. # 2.", "template = simpleTAL.compileHTMLTemplate(txt) file = io.StringIO() template.expand(self.context, file) realResult =", "attrs/existingatt\" tal:content=\"attrs/existingatt\">Hello</html>', \"\"\"<html newAtt='\"Testing\"' existingatt='\"Testing\"'>\"Testing\"</html>\"\"\", \"Accessing existing attributes failed.\") def", "\"\"\"<html newAtt='\"Testing\"' existingatt='\"Testing\"'>\"Testing\"</html>\"\"\", \"Accessing existing attributes failed.\") def testMultipleOriginalAttributes(self): self._runTest_(", "One</html>\"\"\", \"Accessing multiple existing attributes failed.\") def testAmpersandEscapeInAttributes(self): self._runTest_( '<html", "# 1. Redistributions of source code must retain the above", "\"Accessing existing attributes failed.\") def testMultipleOriginalAttributes(self): self._runTest_( '<html one=\"Value One\"", "OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY", "use in source and binary forms, with or without #", "between semi-colons, failed.\" ) def testMultipleAttributesEscaped(self): self._runTest_( '<html old=\"still &quot;", "not treated as case insensitive.\") if __name__ == '__main__': unittest.main()", "other materials provided with the distribution. # 3. The name", "NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS", "href=\"owlfish.com\">Hello</html>', \"Setting multiple attributes at once failed.\") def testMultipleAttributesSpace(self): self._runTest_(", "realResult, result, \"%s - \\npassed in: %s \\ngot back %s", "simpleTAL.compileHTMLTemplate(txt) file = io.StringIO() template.expand(self.context, file) realResult = file.getvalue() self.assertEqual(", "once, with spaces between semi-colons, failed.\" ) def testMultipleAttributesEscaped(self): self._runTest_(", "\" href=\"owlfish.com\">Hello</html>', '''<html class=\"Semi-colon;test\" new=\"testing\" old='still \" here' href=\"owlfish.com\">Hello</html>''', \"Setting", "realResult, result, template)) def testAddingAnAttribute(self): self._runTest_( '<html tal:attributes=\"link link\" href=\"owlfish.com\">Hello</html>',", "self.context = simpleTALES.Context() self.context.addGlobal('test', 'testing') self.context.addGlobal('link', 'www.owlfish.com') self.context.addGlobal('needsQuoting', \"\"\"Does \"this\"", "self.context.addGlobal('uniQuote', 'Does \"this\" work?') self.context.addGlobal('anotherdefault', { 'inhere': simpleTALES.DEFAULTVALUE }) def", "old=\"still here\" href=\"owlfish.com\">Hello</html>', \"Setting multiple attributes at once failed.\") def", "self._runTest_( '<html old=\"still here\" class=\"test\" tal:attributes=\"href default;class nothing;new test\" href=\"owlfish.com\">Hello</html>',", "import unicode_literals import unittest import os import io import logging", "the # documentation and/or other materials provided with the distribution.", "existingAtt=\"&amp;Testing&amp;\" tal:attributes=\"newAtt attrs/existingatt\" tal:content=\"attrs/existingatt\">Hello</html>', \"\"\"<html newAtt=\"&amp;Testing&amp;\" existingatt=\"&amp;Testing&amp;\">&amp;Testing&amp;</html>\"\"\", \"Accessing existing attributes", "href=\"owlfish.com\">Hello</html>', '<html link=\"www.owlfish.com\" href=\"owlfish.com\">Hello</html>', \"Addition of attribute 'link' failed.\") def", "'href' failed.\") def testMultipleAttributes(self): self._runTest_( '<html old=\"still here\" class=\"test\" tal:attributes=\"href", "# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND", "IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # #", "IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR", "'<html new=\"testing\" old=\"still here\" href=\"owlfish.com\">Hello</html>', \"Setting multiple attributes at once", "txt, result, errMsg=\"Error\"): template = simpleTAL.compileHTMLTemplate(txt) file = io.StringIO() template.expand(self.context,", "txt, realResult, result, template)) def testAddingAnAttribute(self): self._runTest_( '<html tal:attributes=\"link link\"", "logging.basicConfig() class TALAttributesTestCases(unittest.TestCase): def setUp(self): self.context = simpleTALES.Context() self.context.addGlobal('test', 'testing')", "OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT # NOT LIMITED TO, PROCUREMENT", "(c) 2016, <NAME> <<EMAIL>> # All rights reserved. # See", "<NAME> (http://www.owlfish.com/) # All rights reserved. # # Redistribution and", "failed.\") def testDefaultAttribute(self): self._runTest_( '<html class=\"test\" tal:attributes=\"href default\" href=\"owlfish.com\">Hello</html>', '<html", "ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # # If", "'<html one=\"Value One\" two=\"Value two\" three=\"Value three\" tal:attributes=\"four attrs/three\" tal:content=\"attrs/one\">Hello</html>',", "ARE DISCLAIMED. # IN NO EVENT SHALL THE AUTHOR BE", "of attribute 'href' failed.\") def testAnotherDefaultAttribute(self): self._runTest_( '<html class=\"test\" tal:attributes=\"href", "tal:content=\"attrs/existingatt\">Hello</html>', \"\"\"<html newAtt='\"Testing\"' existingatt='\"Testing\"'>\"Testing\"</html>\"\"\", \"Accessing existing attributes failed.\") def testMultipleOriginalAttributes(self):", "# modification, are permitted provided that the following conditions #", "attributes failed.\") def testNumberAttributeEscaping(self): self._runTest_( '<html existingAtt=\"&quot;Testing&quot;\" tal:attributes=\"href number\">Hello</html>', \"\"\"<html", "SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, #", "attrs/existingatt\" tal:content=\"attrs/existingatt\">Hello</html>', \"\"\"<html newAtt=\"&amp;Testing&amp;\" existingatt=\"&amp;Testing&amp;\">&amp;Testing&amp;</html>\"\"\", \"Accessing existing attributes failed.\") #~", "href=\"owlfish.com\">Hello</html>', \"Addition of attribute 'link' failed.\") def testRemovingAnAttribute(self): self._runTest_( '<html", "back %s \\nexpected %s\\n\\nTemplate: %s\" % (errMsg, txt, realResult, result,", "ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT", "def testMultipleOriginalAttributes(self): self._runTest_( '<html one=\"Value One\" two=\"Value two\" three=\"Value three\"", "IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS", "link\" href=\"owlfish.com\">Hello</html>', '<html link=\"www.owlfish.com\" href=\"owlfish.com\">Hello</html>', \"Addition of attribute 'link' failed.\")", "three\" one=\"Value One\" two=\"Value two\" three=\"Value three\">Value One</html>\"\"\", \"Accessing multiple", "with or without # modification, are permitted provided that the", "def testNumberAttributeEscaping(self): self._runTest_( '<html existingAtt=\"&quot;Testing&quot;\" tal:attributes=\"href uniQuote\">Hello</html>', \"\"\"<html href='Does \"this\"", "failed.\") def testAmpersandEscapeInAttributes(self): self._runTest_( '<html existingAtt=\"&amp;Testing&amp;\" tal:attributes=\"newAtt attrs/existingatt\" tal:content=\"attrs/existingatt\">Hello</html>', \"\"\"<html", "materials provided with the distribution. # 3. The name of", "that the following conditions # are met: # 1. Redistributions", "2016, <NAME> <<EMAIL>> # All rights reserved. # See LICENSE.txt", "at once failed.\") def testMultipleAttributesSpace(self): self._runTest_( '<html old=\"still here\" class=\"test\"", "FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. # IN NO", "of new attributes failed.\") def testOriginalAttributes(self): self._runTest_( '<html existingAtt=\"&quot;Testing&quot;\" tal:attributes=\"newAtt", "attrs/three\" tal:content=\"attrs/one\">Hello</html>', \"\"\"<html four=\"Value three\" one=\"Value One\" two=\"Value two\" three=\"Value", "# # If you make any bug fixes or feature", "failed.\") def testRemovingAnAttribute(self): self._runTest_( '<html class=\"test\" tal:attributes=\"href nothing\" href=\"owlfish.com\">Hello</html>', '<html", "products # derived from this software without specific prior written", "test\" href=\"owlfish.com\">Hello</html>', '<html new=\"testing\" old=\"still here\" href=\"owlfish.com\">Hello</html>', \"Setting multiple attributes", "testMultipleOriginalAttributes(self): self._runTest_( '<html one=\"Value One\" two=\"Value two\" three=\"Value three\" tal:attributes=\"four", "io import logging import logging.config from simpletal import simpleTAL, simpleTALES", "NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,", "\"%s - \\npassed in: %s \\ngot back %s \\nexpected %s\\n\\nTemplate:", "'<html class=\"test\" tal:attributes=\"href default\" href=\"owlfish.com\">Hello</html>', '<html class=\"test\" href=\"owlfish.com\">Hello</html>', \"Defaulting of", "# IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR", "result, \"%s - \\npassed in: %s \\ngot back %s \\nexpected", "- \\npassed in: %s \\ngot back %s \\nexpected %s\\n\\nTemplate: %s\"", "If you make any bug fixes or feature enhancements please", "of the author may not be used to endorse or", "GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS;", "uniQuote\">Hello</html>', \"\"\"<html href='Does \"this\" work?' existingatt='\"Testing\"'>Hello</html>\"\"\", \"Escaping of new attributes", "3. The name of the author may not be used", "anotherdefault/inhere\" href=\"owlfish.com\">Hello</html>', '<html class=\"test\" href=\"owlfish.com\">Hello</html>', \"Defaulting of attribute 'href' failed.\")", "ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,", "prior written permission. # # THIS SOFTWARE IS PROVIDED BY", "provided that the following conditions # are met: # 1.", "}) def _runTest_(self, txt, result, errMsg=\"Error\"): template = simpleTAL.compileHTMLTemplate(txt) file", "; class string:Hello there; new test\" href=\"owlfish.com\">Hello</html>', '<html class=\"Hello there\"", "here\" class=\"test\" tal:attributes=\"href default ; class string: Semi-colon;;test;new test \"", "class string:Hello there; new test\" href=\"owlfish.com\">Hello</html>', '<html class=\"Hello there\" new=\"testing\"", "'testing') self.context.addGlobal('link', 'www.owlfish.com') self.context.addGlobal('needsQuoting', \"\"\"Does \"this\" work?\"\"\") self.context.addGlobal('number', 5) self.context.addGlobal('uniQuote',", "at once, with spaces between semi-colons, failed.\" ) def testMultipleAttributesEscaped(self):", "(self): #~ self._runTest_ ('<html HREF=\"Testing\" tal:attributes=\"HREF test\">Hello</html>' #~ ,\"\"\"<html href=\"testing\">Hello</html>\"\"\"", "OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY", "new attributes failed.\") def testOriginalAttributes(self): self._runTest_( '<html existingAtt=\"&quot;Testing&quot;\" tal:attributes=\"newAtt attrs/existingatt\"", "self._runTest_( '<html class=\"test\" tal:attributes=\"href nothing\" href=\"owlfish.com\">Hello</html>', '<html class=\"test\">Hello</html>', \"Removal of", "documentation and/or other materials provided with the distribution. # 3.", "A PARTICULAR PURPOSE ARE DISCLAIMED. # IN NO EVENT SHALL", "DAMAGES (INCLUDING, BUT # NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE", "CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE)", "tal:attributes=\"href anotherdefault/inhere\" href=\"owlfish.com\">Hello</html>', '<html class=\"test\" href=\"owlfish.com\">Hello</html>', \"Defaulting of attribute 'href'", "and use in source and binary forms, with or without", "AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. # IN", "EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT # NOT LIMITED TO,", "existingAtt=\"&quot;Testing&quot;\" tal:attributes=\"href number\">Hello</html>', \"\"\"<html href=\"5\" existingatt='\"Testing\"'>Hello</html>\"\"\", \"Escaping of new attributes", "self._runTest_( '<html existingAtt=\"&quot;Testing&quot;\" tal:attributes=\"href needsQuoting\">Hello</html>', \"\"\"<html href='Does \"this\" work?' existingatt='\"Testing\"'>Hello</html>\"\"\",", "THE USE OF # THIS SOFTWARE, EVEN IF ADVISED OF", "logging import logging.config from simpletal import simpleTAL, simpleTALES if (os.path.exists(\"logging.ini\")):", "def _runTest_(self, txt, result, errMsg=\"Error\"): template = simpleTAL.compileHTMLTemplate(txt) file =", "existing attributes failed.\") def testAmpersandEscapeInAttributes(self): self._runTest_( '<html existingAtt=\"&amp;Testing&amp;\" tal:attributes=\"newAtt attrs/existingatt\"", "'Does \"this\" work?') self.context.addGlobal('anotherdefault', { 'inhere': simpleTALES.DEFAULTVALUE }) def _runTest_(self,", "OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR", "(errMsg, txt, realResult, result, template)) def testAddingAnAttribute(self): self._runTest_( '<html tal:attributes=\"link", "existing attributes failed.\") def testMultipleOriginalAttributes(self): self._runTest_( '<html one=\"Value One\" two=\"Value", "POSSIBILITY OF SUCH DAMAGE. # # If you make any", "promote products # derived from this software without specific prior", "test \" href=\"owlfish.com\">Hello</html>', '''<html class=\"Semi-colon;test\" new=\"testing\" old='still \" here' href=\"owlfish.com\">Hello</html>''',", "tal:attributes=\"four attrs/three\" tal:content=\"attrs/one\">Hello</html>', \"\"\"<html four=\"Value three\" one=\"Value One\" two=\"Value two\"", "multiple attributes at once failed.\") def testMultipleAttributesSpace(self): self._runTest_( '<html old=\"still", "self.assertEqual( realResult, result, \"%s - \\npassed in: %s \\ngot back", "INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT", "'www.owlfish.com') self.context.addGlobal('needsQuoting', \"\"\"Does \"this\" work?\"\"\") self.context.addGlobal('number', 5) self.context.addGlobal('uniQuote', 'Does \"this\"", "and/or other materials provided with the distribution. # 3. The", "setUp(self): self.context = simpleTALES.Context() self.context.addGlobal('test', 'testing') self.context.addGlobal('link', 'www.owlfish.com') self.context.addGlobal('needsQuoting', \"\"\"Does", "fixes or feature enhancements please let me know! \"\"\" Unit", "testAddingAnAttribute(self): self._runTest_( '<html tal:attributes=\"link link\" href=\"owlfish.com\">Hello</html>', '<html link=\"www.owlfish.com\" href=\"owlfish.com\">Hello</html>', \"Addition", "class=\"test\" tal:attributes=\"href default;class nothing;new test\" href=\"owlfish.com\">Hello</html>', '<html new=\"testing\" old=\"still here\"", "forms, with or without # modification, are permitted provided that", "binary forms, with or without # modification, are permitted provided", "%s\" % (errMsg, txt, realResult, result, template)) def testAddingAnAttribute(self): self._runTest_(", "-*- coding: iso-8859-1 -*- # Copyright (c) 2016, <NAME> <<EMAIL>>", "BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR #", "testDefaultAttribute(self): self._runTest_( '<html class=\"test\" tal:attributes=\"href default\" href=\"owlfish.com\">Hello</html>', '<html class=\"test\" href=\"owlfish.com\">Hello</html>',", ") def testMultipleAttributesEscaped(self): self._runTest_( '<html old=\"still &quot; here\" class=\"test\" tal:attributes=\"href", "copyright # notice, this list of conditions and the following", "#~ self._runTest_ ('<html HREF=\"Testing\" tal:attributes=\"HREF test\">Hello</html>' #~ ,\"\"\"<html href=\"testing\">Hello</html>\"\"\" #~", "IMPLIED WARRANTIES # OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR", "specific prior written permission. # # THIS SOFTWARE IS PROVIDED", "5) self.context.addGlobal('uniQuote', 'Does \"this\" work?') self.context.addGlobal('anotherdefault', { 'inhere': simpleTALES.DEFAULTVALUE })", "(os.path.exists(\"logging.ini\")): logging.config.fileConfig(\"logging.ini\") else: logging.basicConfig() class TALAttributesTestCases(unittest.TestCase): def setUp(self): self.context =", "\"this\" work?' existingatt='\"Testing\"'>Hello</html>\"\"\", \"Escaping of new attributes failed.\") def testNumberAttributeEscaping(self):", "OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA,", "# # THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS", "'href' failed.\") def testAnotherDefaultAttribute(self): self._runTest_( '<html class=\"test\" tal:attributes=\"href anotherdefault/inhere\" href=\"owlfish.com\">Hello</html>',", "href=\"5\" existingatt='\"Testing\"'>Hello</html>\"\"\", \"Escaping of new attributes failed.\") def testNumberAttributeEscaping(self): self._runTest_(", "LICENSE.txt # Copyright (c) 2004 <NAME> (http://www.owlfish.com/) # All rights", "the following conditions # are met: # 1. Redistributions of", "one=\"Value One\" two=\"Value two\" three=\"Value three\" tal:attributes=\"four attrs/three\" tal:content=\"attrs/one\">Hello</html>', \"\"\"<html", "work?\"\"\") self.context.addGlobal('number', 5) self.context.addGlobal('uniQuote', 'Does \"this\" work?') self.context.addGlobal('anotherdefault', { 'inhere':", "class=\"Semi-colon;test\" new=\"testing\" old='still \" here' href=\"owlfish.com\">Hello</html>''', \"Setting multiple attributes at", "existingatt='\"Testing\"'>Hello</html>\"\"\", \"Escaping of new attributes failed.\") def testOriginalAttributes(self): self._runTest_( '<html", "provided with the distribution. # 3. The name of the", "def testAttributeEscaping(self): self._runTest_( '<html existingAtt=\"&quot;Testing&quot;\" tal:attributes=\"href needsQuoting\">Hello</html>', \"\"\"<html href='Does \"this\"", "class=\"Hello there\" new=\"testing\" old=\"still here\" href=\"owlfish.com\">Hello</html>', \"Setting multiple attributes at", "self._runTest_ ('<html HREF=\"Testing\" tal:attributes=\"HREF test\">Hello</html>' #~ ,\"\"\"<html href=\"testing\">Hello</html>\"\"\" #~ ,\"HTML", "template)) def testAddingAnAttribute(self): self._runTest_( '<html tal:attributes=\"link link\" href=\"owlfish.com\">Hello</html>', '<html link=\"www.owlfish.com\"", "the above copyright # notice, this list of conditions and", "self.context.addGlobal('number', 5) self.context.addGlobal('uniQuote', 'Does \"this\" work?') self.context.addGlobal('anotherdefault', { 'inhere': simpleTALES.DEFAULTVALUE", "STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING", "\"\"\"<html newAtt=\"&amp;Testing&amp;\" existingatt=\"&amp;Testing&amp;\">&amp;Testing&amp;</html>\"\"\", \"Accessing existing attributes failed.\") #~ def testAttributeCase", "%s \\ngot back %s \\nexpected %s\\n\\nTemplate: %s\" % (errMsg, txt,", "DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,", "simpleTAL, simpleTALES if (os.path.exists(\"logging.ini\")): logging.config.fileConfig(\"logging.ini\") else: logging.basicConfig() class TALAttributesTestCases(unittest.TestCase): def", "testNumberAttributeEscaping(self): self._runTest_( '<html existingAtt=\"&quot;Testing&quot;\" tal:attributes=\"href uniQuote\">Hello</html>', \"\"\"<html href='Does \"this\" work?'", "of new attributes failed.\") def testNumberAttributeEscaping(self): self._runTest_( '<html existingAtt=\"&quot;Testing&quot;\" tal:attributes=\"href", "class=\"test\" href=\"owlfish.com\">Hello</html>', \"Defaulting of attribute 'href' failed.\") def testMultipleAttributes(self): self._runTest_(", "failed.\") def testMultipleAttributesSpace(self): self._runTest_( '<html old=\"still here\" class=\"test\" tal:attributes=\"href default", "# All rights reserved. # # Redistribution and use in", "__future__ import unicode_literals import unittest import os import io import", "# Copyright (c) 2016, <NAME> <<EMAIL>> # All rights reserved.", "testAttributeCase (self): #~ self._runTest_ ('<html HREF=\"Testing\" tal:attributes=\"HREF test\">Hello</html>' #~ ,\"\"\"<html", "'<html class=\"test\" tal:attributes=\"href nothing\" href=\"owlfish.com\">Hello</html>', '<html class=\"test\">Hello</html>', \"Removal of attribute", "OF THE USE OF # THIS SOFTWARE, EVEN IF ADVISED", "existing attributes failed.\") #~ def testAttributeCase (self): #~ self._runTest_ ('<html", "THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT", "# Redistribution and use in source and binary forms, with", "WARRANTIES # OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE", "\"\"\" Unit test cases. \"\"\" from __future__ import unicode_literals import", "or promote products # derived from this software without specific", "import logging import logging.config from simpletal import simpleTAL, simpleTALES if", "multiple existing attributes failed.\") def testAmpersandEscapeInAttributes(self): self._runTest_( '<html existingAtt=\"&amp;Testing&amp;\" tal:attributes=\"newAtt", "code must retain the above copyright # notice, this list", "tal:content=\"attrs/existingatt\">Hello</html>', \"\"\"<html newAtt=\"&amp;Testing&amp;\" existingatt=\"&amp;Testing&amp;\">&amp;Testing&amp;</html>\"\"\", \"Accessing existing attributes failed.\") #~ def", "# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED", "self._runTest_( '<html existingAtt=\"&quot;Testing&quot;\" tal:attributes=\"href uniQuote\">Hello</html>', \"\"\"<html href='Does \"this\" work?' existingatt='\"Testing\"'>Hello</html>\"\"\",", "the distribution. # 3. The name of the author may", "THE AUTHOR ``AS IS'' AND ANY EXPRESS OR # IMPLIED", "existingatt=\"&amp;Testing&amp;\">&amp;Testing&amp;</html>\"\"\", \"Accessing existing attributes failed.\") #~ def testAttributeCase (self): #~", "OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY", "here' href=\"owlfish.com\">Hello</html>''', \"Setting multiple attributes at once, with spaces between", "def testAmpersandEscapeInAttributes(self): self._runTest_( '<html existingAtt=\"&amp;Testing&amp;\" tal:attributes=\"newAtt attrs/existingatt\" tal:content=\"attrs/existingatt\">Hello</html>', \"\"\"<html newAtt=\"&amp;Testing&amp;\"", "two=\"Value two\" three=\"Value three\">Value One</html>\"\"\", \"Accessing multiple existing attributes failed.\")", "All rights reserved. # See LICENSE.txt # Copyright (c) 2004", "\"\"\"<html href='Does \"this\" work?' existingatt='\"Testing\"'>Hello</html>\"\"\", \"Escaping of new attributes failed.\")", "href=\"owlfish.com\">Hello</html>''', \"Setting multiple attributes at once, with spaces between semi-colons,", "existingAtt=\"&quot;Testing&quot;\" tal:attributes=\"href uniQuote\">Hello</html>', \"\"\"<html href='Does \"this\" work?' existingatt='\"Testing\"'>Hello</html>\"\"\", \"Escaping of", "feature enhancements please let me know! \"\"\" Unit test cases.", "def testAttributeCase (self): #~ self._runTest_ ('<html HREF=\"Testing\" tal:attributes=\"HREF test\">Hello</html>' #~", "SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS", "with spaces between semi-colons, failed.\" ) def testAttributeEscaping(self): self._runTest_( '<html", "existingatt='\"Testing\"'>Hello</html>\"\"\", \"Escaping of new attributes failed.\") def testNumberAttributeEscaping(self): self._runTest_( '<html", "conditions and the following disclaimer in the # documentation and/or", "string: Semi-colon;;test;new test \" href=\"owlfish.com\">Hello</html>', '''<html class=\"Semi-colon;test\" new=\"testing\" old='still \"", "testOriginalAttributes(self): self._runTest_( '<html existingAtt=\"&quot;Testing&quot;\" tal:attributes=\"newAtt attrs/existingatt\" tal:content=\"attrs/existingatt\">Hello</html>', \"\"\"<html newAtt='\"Testing\"' existingatt='\"Testing\"'>\"Testing\"</html>\"\"\",", "INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY,", "existingatt='\"Testing\"'>\"Testing\"</html>\"\"\", \"Accessing existing attributes failed.\") def testMultipleOriginalAttributes(self): self._runTest_( '<html one=\"Value", "two\" three=\"Value three\">Value One</html>\"\"\", \"Accessing multiple existing attributes failed.\") def", "reserved. # See LICENSE.txt # Copyright (c) 2004 <NAME> (http://www.owlfish.com/)", "here\" class=\"test\" tal:attributes=\"href default;class nothing;new test\" href=\"owlfish.com\">Hello</html>', '<html new=\"testing\" old=\"still", "SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY", "MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. #", "attributes failed.\") def testMultipleOriginalAttributes(self): self._runTest_( '<html one=\"Value One\" two=\"Value two\"", "import io import logging import logging.config from simpletal import simpleTAL,", "coding: iso-8859-1 -*- # Copyright (c) 2016, <NAME> <<EMAIL>> #", "of attribute 'href' failed.\") def testDefaultAttribute(self): self._runTest_( '<html class=\"test\" tal:attributes=\"href", "(c) 2004 <NAME> (http://www.owlfish.com/) # All rights reserved. # #", "attributes at once, with spaces between semi-colons, failed.\" ) def", "# notice, this list of conditions and the following disclaimer.", "'<html class=\"test\">Hello</html>', \"Removal of attribute 'href' failed.\") def testDefaultAttribute(self): self._runTest_(", "LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR", "import os import io import logging import logging.config from simpletal", "here\" class=\"test\" tal:attributes=\"href default ; class string:Hello there; new test\"", "old=\"still here\" class=\"test\" tal:attributes=\"href default;class nothing;new test\" href=\"owlfish.com\">Hello</html>', '<html new=\"testing\"", "href=\"owlfish.com\">Hello</html>', '<html new=\"testing\" old=\"still here\" href=\"owlfish.com\">Hello</html>', \"Setting multiple attributes at", "NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE", "of conditions and the following disclaimer in the # documentation", "OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE", "needsQuoting\">Hello</html>', \"\"\"<html href='Does \"this\" work?' existingatt='\"Testing\"'>Hello</html>\"\"\", \"Escaping of new attributes", "multiple attributes at once, with spaces between semi-colons, failed.\" )", "# All rights reserved. # See LICENSE.txt # Copyright (c)", "PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, #", "'<html link=\"www.owlfish.com\" href=\"owlfish.com\">Hello</html>', \"Addition of attribute 'link' failed.\") def testRemovingAnAttribute(self):", "# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS''", "THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND", "'<html existingAtt=\"&quot;Testing&quot;\" tal:attributes=\"href number\">Hello</html>', \"\"\"<html href=\"5\" existingatt='\"Testing\"'>Hello</html>\"\"\", \"Escaping of new", "href=\"testing\">Hello</html>\"\"\" #~ ,\"HTML Attributes not treated as case insensitive.\") if", "link=\"www.owlfish.com\" href=\"owlfish.com\">Hello</html>', \"Addition of attribute 'link' failed.\") def testRemovingAnAttribute(self): self._runTest_(", "simpleTALES.Context() self.context.addGlobal('test', 'testing') self.context.addGlobal('link', 'www.owlfish.com') self.context.addGlobal('needsQuoting', \"\"\"Does \"this\" work?\"\"\") self.context.addGlobal('number',", "file = io.StringIO() template.expand(self.context, file) realResult = file.getvalue() self.assertEqual( realResult,", "OF THE POSSIBILITY OF SUCH DAMAGE. # # If you", "string:Hello there; new test\" href=\"owlfish.com\">Hello</html>', '<html class=\"Hello there\" new=\"testing\" old=\"still", "conditions and the following disclaimer. # 2. Redistributions in binary", "there; new test\" href=\"owlfish.com\">Hello</html>', '<html class=\"Hello there\" new=\"testing\" old=\"still here\"", "# Copyright (c) 2004 <NAME> (http://www.owlfish.com/) # All rights reserved.", "attributes at once failed.\") def testMultipleAttributesSpace(self): self._runTest_( '<html old=\"still here\"", "and the following disclaimer. # 2. Redistributions in binary form", "\"Removal of attribute 'href' failed.\") def testDefaultAttribute(self): self._runTest_( '<html class=\"test\"", "two=\"Value two\" three=\"Value three\" tal:attributes=\"four attrs/three\" tal:content=\"attrs/one\">Hello</html>', \"\"\"<html four=\"Value three\"", "failed.\" ) def testMultipleAttributesEscaped(self): self._runTest_( '<html old=\"still &quot; here\" class=\"test\"", "# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE", "the following disclaimer. # 2. Redistributions in binary form must", "following disclaimer. # 2. Redistributions in binary form must reproduce", "One\" two=\"Value two\" three=\"Value three\" tal:attributes=\"four attrs/three\" tal:content=\"attrs/one\">Hello</html>', \"\"\"<html four=\"Value", "IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES", "#~ def testAttributeCase (self): #~ self._runTest_ ('<html HREF=\"Testing\" tal:attributes=\"HREF test\">Hello</html>'", "permission. # # THIS SOFTWARE IS PROVIDED BY THE AUTHOR", "newAtt=\"&amp;Testing&amp;\" existingatt=\"&amp;Testing&amp;\">&amp;Testing&amp;</html>\"\"\", \"Accessing existing attributes failed.\") #~ def testAttributeCase (self):", "Copyright (c) 2016, <NAME> <<EMAIL>> # All rights reserved. #", "PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY #", "default ; class string:Hello there; new test\" href=\"owlfish.com\">Hello</html>', '<html class=\"Hello", "def testNumberAttributeEscaping(self): self._runTest_( '<html existingAtt=\"&quot;Testing&quot;\" tal:attributes=\"href number\">Hello</html>', \"\"\"<html href=\"5\" existingatt='\"Testing\"'>Hello</html>\"\"\",", "href=\"owlfish.com\">Hello</html>', \"Defaulting of attribute 'href' failed.\") def testAnotherDefaultAttribute(self): self._runTest_( '<html", "AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL,", "notice, this list of conditions and the following disclaimer in", "See LICENSE.txt # Copyright (c) 2004 <NAME> (http://www.owlfish.com/) # All", "if (os.path.exists(\"logging.ini\")): logging.config.fileConfig(\"logging.ini\") else: logging.basicConfig() class TALAttributesTestCases(unittest.TestCase): def setUp(self): self.context", "THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL,", "All rights reserved. # # Redistribution and use in source", "once, with spaces between semi-colons, failed.\" ) def testAttributeEscaping(self): self._runTest_(", "realResult = file.getvalue() self.assertEqual( realResult, result, \"%s - \\npassed in:", "testAmpersandEscapeInAttributes(self): self._runTest_( '<html existingAtt=\"&amp;Testing&amp;\" tal:attributes=\"newAtt attrs/existingatt\" tal:content=\"attrs/existingatt\">Hello</html>', \"\"\"<html newAtt=\"&amp;Testing&amp;\" existingatt=\"&amp;Testing&amp;\">&amp;Testing&amp;</html>\"\"\",", "Copyright (c) 2004 <NAME> (http://www.owlfish.com/) # All rights reserved. #", "without # modification, are permitted provided that the following conditions", "failed.\") def testAnotherDefaultAttribute(self): self._runTest_( '<html class=\"test\" tal:attributes=\"href anotherdefault/inhere\" href=\"owlfish.com\">Hello</html>', '<html", "class=\"test\" tal:attributes=\"href default ; class string:Hello there; new test\" href=\"owlfish.com\">Hello</html>',", "CONSEQUENTIAL DAMAGES (INCLUDING, BUT # NOT LIMITED TO, PROCUREMENT OF", "there\" new=\"testing\" old=\"still here\" href=\"owlfish.com\">Hello</html>', \"Setting multiple attributes at once,", "<NAME> <<EMAIL>> # All rights reserved. # See LICENSE.txt #", "# notice, this list of conditions and the following disclaimer", "you make any bug fixes or feature enhancements please let", "permitted provided that the following conditions # are met: #", "this list of conditions and the following disclaimer in the", "old=\"still here\" class=\"test\" tal:attributes=\"href default ; class string:Hello there; new", "\"Accessing multiple existing attributes failed.\") def testAmpersandEscapeInAttributes(self): self._runTest_( '<html existingAtt=\"&amp;Testing&amp;\"", "testRemovingAnAttribute(self): self._runTest_( '<html class=\"test\" tal:attributes=\"href nothing\" href=\"owlfish.com\">Hello</html>', '<html class=\"test\">Hello</html>', \"Removal", "<<EMAIL>> # All rights reserved. # See LICENSE.txt # Copyright", "modification, are permitted provided that the following conditions # are", "cases. \"\"\" from __future__ import unicode_literals import unittest import os", "nothing\" href=\"owlfish.com\">Hello</html>', '<html class=\"test\">Hello</html>', \"Removal of attribute 'href' failed.\") def", "tal:content=\"attrs/one\">Hello</html>', \"\"\"<html four=\"Value three\" one=\"Value One\" two=\"Value two\" three=\"Value three\">Value", "(http://www.owlfish.com/) # All rights reserved. # # Redistribution and use", "\"Setting multiple attributes at once, with spaces between semi-colons, failed.\"", "know! \"\"\" Unit test cases. \"\"\" from __future__ import unicode_literals", "are permitted provided that the following conditions # are met:", "attribute 'href' failed.\") def testDefaultAttribute(self): self._runTest_( '<html class=\"test\" tal:attributes=\"href default\"", "href=\"owlfish.com\">Hello</html>', \"Setting multiple attributes at once, with spaces between semi-colons,", "2004 <NAME> (http://www.owlfish.com/) # All rights reserved. # # Redistribution", "'<html old=\"still here\" class=\"test\" tal:attributes=\"href default;class nothing;new test\" href=\"owlfish.com\">Hello</html>', '<html", "reserved. # # Redistribution and use in source and binary", "CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN", "\" here' href=\"owlfish.com\">Hello</html>''', \"Setting multiple attributes at once, with spaces", "ANY EXPRESS OR # IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED", "testNumberAttributeEscaping(self): self._runTest_( '<html existingAtt=\"&quot;Testing&quot;\" tal:attributes=\"href number\">Hello</html>', \"\"\"<html href=\"5\" existingatt='\"Testing\"'>Hello</html>\"\"\", \"Escaping", "tal:attributes=\"href default\" href=\"owlfish.com\">Hello</html>', '<html class=\"test\" href=\"owlfish.com\">Hello</html>', \"Defaulting of attribute 'href'", "LIMITED TO, THE IMPLIED WARRANTIES # OF MERCHANTABILITY AND FITNESS", "'<html existingAtt=\"&quot;Testing&quot;\" tal:attributes=\"newAtt attrs/existingatt\" tal:content=\"attrs/existingatt\">Hello</html>', \"\"\"<html newAtt='\"Testing\"' existingatt='\"Testing\"'>\"Testing\"</html>\"\"\", \"Accessing existing", "DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON", "href='Does \"this\" work?' existingatt='\"Testing\"'>Hello</html>\"\"\", \"Escaping of new attributes failed.\") def", "here\" href=\"owlfish.com\">Hello</html>', \"Setting multiple attributes at once failed.\") def testMultipleAttributesSpace(self):", "Redistributions of source code must retain the above copyright #", "new test\" href=\"owlfish.com\">Hello</html>', '<html class=\"Hello there\" new=\"testing\" old=\"still here\" href=\"owlfish.com\">Hello</html>',", "three\">Value One</html>\"\"\", \"Accessing multiple existing attributes failed.\") def testAmpersandEscapeInAttributes(self): self._runTest_(", "AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT,", "Unit test cases. \"\"\" from __future__ import unicode_literals import unittest", "failed.\") def testOriginalAttributes(self): self._runTest_( '<html existingAtt=\"&quot;Testing&quot;\" tal:attributes=\"newAtt attrs/existingatt\" tal:content=\"attrs/existingatt\">Hello</html>', \"\"\"<html", "not be used to endorse or promote products # derived", "AUTHOR ``AS IS'' AND ANY EXPRESS OR # IMPLIED WARRANTIES,", "BUT NOT LIMITED TO, THE IMPLIED WARRANTIES # OF MERCHANTABILITY", "PARTICULAR PURPOSE ARE DISCLAIMED. # IN NO EVENT SHALL THE", "BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY,", "#~ ,\"\"\"<html href=\"testing\">Hello</html>\"\"\" #~ ,\"HTML Attributes not treated as case", "IN ANY WAY OUT OF THE USE OF # THIS", "WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES #", "TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,", "file) realResult = file.getvalue() self.assertEqual( realResult, result, \"%s - \\npassed", "self._runTest_( '<html class=\"test\" tal:attributes=\"href default\" href=\"owlfish.com\">Hello</html>', '<html class=\"test\" href=\"owlfish.com\">Hello</html>', \"Defaulting", "\"\"\"Does \"this\" work?\"\"\") self.context.addGlobal('number', 5) self.context.addGlobal('uniQuote', 'Does \"this\" work?') self.context.addGlobal('anotherdefault',", "OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT #", "#!/usr/bin/python # -*- coding: iso-8859-1 -*- # Copyright (c) 2016,", "LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN", "= file.getvalue() self.assertEqual( realResult, result, \"%s - \\npassed in: %s", "failed.\") #~ def testAttributeCase (self): #~ self._runTest_ ('<html HREF=\"Testing\" tal:attributes=\"HREF", "any bug fixes or feature enhancements please let me know!", "list of conditions and the following disclaimer. # 2. Redistributions", "'<html existingAtt=\"&amp;Testing&amp;\" tal:attributes=\"newAtt attrs/existingatt\" tal:content=\"attrs/existingatt\">Hello</html>', \"\"\"<html newAtt=\"&amp;Testing&amp;\" existingatt=\"&amp;Testing&amp;\">&amp;Testing&amp;</html>\"\"\", \"Accessing existing", "HREF=\"Testing\" tal:attributes=\"HREF test\">Hello</html>' #~ ,\"\"\"<html href=\"testing\">Hello</html>\"\"\" #~ ,\"HTML Attributes not", "\"\"\" from __future__ import unicode_literals import unittest import os import", "of attribute 'href' failed.\") def testMultipleAttributes(self): self._runTest_( '<html old=\"still here\"", "met: # 1. Redistributions of source code must retain the", "self.context.addGlobal('needsQuoting', \"\"\"Does \"this\" work?\"\"\") self.context.addGlobal('number', 5) self.context.addGlobal('uniQuote', 'Does \"this\" work?')", "new attributes failed.\") def testNumberAttributeEscaping(self): self._runTest_( '<html existingAtt=\"&quot;Testing&quot;\" tal:attributes=\"href number\">Hello</html>',", "attributes failed.\") def testNumberAttributeEscaping(self): self._runTest_( '<html existingAtt=\"&quot;Testing&quot;\" tal:attributes=\"href uniQuote\">Hello</html>', \"\"\"<html", "result, template)) def testAddingAnAttribute(self): self._runTest_( '<html tal:attributes=\"link link\" href=\"owlfish.com\">Hello</html>', '<html", "tal:attributes=\"link link\" href=\"owlfish.com\">Hello</html>', '<html link=\"www.owlfish.com\" href=\"owlfish.com\">Hello</html>', \"Addition of attribute 'link'", "Redistributions in binary form must reproduce the above copyright #", "\"this\" work?' existingatt='\"Testing\"'>Hello</html>\"\"\", \"Escaping of new attributes failed.\") def testOriginalAttributes(self):", "href=\"owlfish.com\">Hello</html>', '<html class=\"Hello there\" new=\"testing\" old=\"still here\" href=\"owlfish.com\">Hello</html>', \"Setting multiple", "def testOriginalAttributes(self): self._runTest_( '<html existingAtt=\"&quot;Testing&quot;\" tal:attributes=\"newAtt attrs/existingatt\" tal:content=\"attrs/existingatt\">Hello</html>', \"\"\"<html newAtt='\"Testing\"'", "three=\"Value three\">Value One</html>\"\"\", \"Accessing multiple existing attributes failed.\") def testAmpersandEscapeInAttributes(self):", "endorse or promote products # derived from this software without", "def setUp(self): self.context = simpleTALES.Context() self.context.addGlobal('test', 'testing') self.context.addGlobal('link', 'www.owlfish.com') self.context.addGlobal('needsQuoting',", "attribute 'href' failed.\") def testMultipleAttributes(self): self._runTest_( '<html old=\"still here\" class=\"test\"", "default ; class string: Semi-colon;;test;new test \" href=\"owlfish.com\">Hello</html>', '''<html class=\"Semi-colon;test\"", "\"Accessing existing attributes failed.\") #~ def testAttributeCase (self): #~ self._runTest_", "OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.", ",\"HTML Attributes not treated as case insensitive.\") if __name__ ==", "\"Defaulting of attribute 'href' failed.\") def testAnotherDefaultAttribute(self): self._runTest_( '<html class=\"test\"", "self._runTest_( '<html tal:attributes=\"link link\" href=\"owlfish.com\">Hello</html>', '<html link=\"www.owlfish.com\" href=\"owlfish.com\">Hello</html>', \"Addition of", "# 3. The name of the author may not be", "IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY", "tal:attributes=\"href uniQuote\">Hello</html>', \"\"\"<html href='Does \"this\" work?' existingatt='\"Testing\"'>Hello</html>\"\"\", \"Escaping of new", "here\" href=\"owlfish.com\">Hello</html>', \"Setting multiple attributes at once, with spaces between", "Attributes not treated as case insensitive.\") if __name__ == '__main__':", "else: logging.basicConfig() class TALAttributesTestCases(unittest.TestCase): def setUp(self): self.context = simpleTALES.Context() self.context.addGlobal('test',", "of attribute 'link' failed.\") def testRemovingAnAttribute(self): self._runTest_( '<html class=\"test\" tal:attributes=\"href", "import simpleTAL, simpleTALES if (os.path.exists(\"logging.ini\")): logging.config.fileConfig(\"logging.ini\") else: logging.basicConfig() class TALAttributesTestCases(unittest.TestCase):", "1. Redistributions of source code must retain the above copyright", "OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER", "source code must retain the above copyright # notice, this", "NOT LIMITED TO, THE IMPLIED WARRANTIES # OF MERCHANTABILITY AND", "test\" href=\"owlfish.com\">Hello</html>', '<html class=\"Hello there\" new=\"testing\" old=\"still here\" href=\"owlfish.com\">Hello</html>', \"Setting", "OF # THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY", "tal:attributes=\"HREF test\">Hello</html>' #~ ,\"\"\"<html href=\"testing\">Hello</html>\"\"\" #~ ,\"HTML Attributes not treated", "\"Escaping of new attributes failed.\") def testOriginalAttributes(self): self._runTest_( '<html existingAtt=\"&quot;Testing&quot;\"", "number\">Hello</html>', \"\"\"<html href=\"5\" existingatt='\"Testing\"'>Hello</html>\"\"\", \"Escaping of new attributes failed.\") def", "tal:attributes=\"href default ; class string: Semi-colon;;test;new test \" href=\"owlfish.com\">Hello</html>', '''<html", "BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF", "self._runTest_( '<html old=\"still here\" class=\"test\" tal:attributes=\"href default ; class string:Hello", "# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF", "# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT", "in: %s \\ngot back %s \\nexpected %s\\n\\nTemplate: %s\" % (errMsg,", "following disclaimer in the # documentation and/or other materials provided", "self._runTest_( '<html existingAtt=\"&quot;Testing&quot;\" tal:attributes=\"href number\">Hello</html>', \"\"\"<html href=\"5\" existingatt='\"Testing\"'>Hello</html>\"\"\", \"Escaping of", "\\npassed in: %s \\ngot back %s \\nexpected %s\\n\\nTemplate: %s\" %", "name of the author may not be used to endorse", "spaces between semi-colons, failed.\" ) def testAttributeEscaping(self): self._runTest_( '<html existingAtt=\"&quot;Testing&quot;\"", "self._runTest_( '<html old=\"still &quot; here\" class=\"test\" tal:attributes=\"href default ; class", "testMultipleAttributesSpace(self): self._runTest_( '<html old=\"still here\" class=\"test\" tal:attributes=\"href default ; class", "simpleTALES.DEFAULTVALUE }) def _runTest_(self, txt, result, errMsg=\"Error\"): template = simpleTAL.compileHTMLTemplate(txt)", "tal:attributes=\"newAtt attrs/existingatt\" tal:content=\"attrs/existingatt\">Hello</html>', \"\"\"<html newAtt='\"Testing\"' existingatt='\"Testing\"'>\"Testing\"</html>\"\"\", \"Accessing existing attributes failed.\")", "= io.StringIO() template.expand(self.context, file) realResult = file.getvalue() self.assertEqual( realResult, result,", "EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,", "semi-colons, failed.\" ) def testAttributeEscaping(self): self._runTest_( '<html existingAtt=\"&quot;Testing&quot;\" tal:attributes=\"href needsQuoting\">Hello</html>',", "attributes failed.\") def testOriginalAttributes(self): self._runTest_( '<html existingAtt=\"&quot;Testing&quot;\" tal:attributes=\"newAtt attrs/existingatt\" tal:content=\"attrs/existingatt\">Hello</html>',", "logging.config.fileConfig(\"logging.ini\") else: logging.basicConfig() class TALAttributesTestCases(unittest.TestCase): def setUp(self): self.context = simpleTALES.Context()", "result, errMsg=\"Error\"): template = simpleTAL.compileHTMLTemplate(txt) file = io.StringIO() template.expand(self.context, file)", "TO, THE IMPLIED WARRANTIES # OF MERCHANTABILITY AND FITNESS FOR", "class=\"test\" href=\"owlfish.com\">Hello</html>', \"Defaulting of attribute 'href' failed.\") def testAnotherDefaultAttribute(self): self._runTest_(", "tal:attributes=\"href default ; class string:Hello there; new test\" href=\"owlfish.com\">Hello</html>', '<html", "2. Redistributions in binary form must reproduce the above copyright", "FOR A PARTICULAR PURPOSE ARE DISCLAIMED. # IN NO EVENT", "FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL", "def testMultipleAttributes(self): self._runTest_( '<html old=\"still here\" class=\"test\" tal:attributes=\"href default;class nothing;new", "\"\"\"<html href=\"5\" existingatt='\"Testing\"'>Hello</html>\"\"\", \"Escaping of new attributes failed.\") def testNumberAttributeEscaping(self):", "OF SUCH DAMAGE. # # If you make any bug", "%s \\nexpected %s\\n\\nTemplate: %s\" % (errMsg, txt, realResult, result, template))", "self._runTest_( '<html class=\"test\" tal:attributes=\"href anotherdefault/inhere\" href=\"owlfish.com\">Hello</html>', '<html class=\"test\" href=\"owlfish.com\">Hello</html>', \"Defaulting", "'<html old=\"still here\" class=\"test\" tal:attributes=\"href default ; class string:Hello there;", "attribute 'link' failed.\") def testRemovingAnAttribute(self): self._runTest_( '<html class=\"test\" tal:attributes=\"href nothing\"", "%s\\n\\nTemplate: %s\" % (errMsg, txt, realResult, result, template)) def testAddingAnAttribute(self):", "'<html class=\"test\" tal:attributes=\"href anotherdefault/inhere\" href=\"owlfish.com\">Hello</html>', '<html class=\"test\" href=\"owlfish.com\">Hello</html>', \"Defaulting of", "USE OF # THIS SOFTWARE, EVEN IF ADVISED OF THE", "and the following disclaimer in the # documentation and/or other", "ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES", "href=\"owlfish.com\">Hello</html>', '<html class=\"test\" href=\"owlfish.com\">Hello</html>', \"Defaulting of attribute 'href' failed.\") def", "The name of the author may not be used to", "conditions # are met: # 1. Redistributions of source code", "Semi-colon;;test;new test \" href=\"owlfish.com\">Hello</html>', '''<html class=\"Semi-colon;test\" new=\"testing\" old='still \" here'", "# 2. Redistributions in binary form must reproduce the above", "spaces between semi-colons, failed.\" ) def testMultipleAttributesEscaped(self): self._runTest_( '<html old=\"still" ]
[ "from typing import List from fasta_reader import FASTAItem, FASTAWriter, read_fasta", "size: int, random): targets: List[FASTAItem] = list(read_fasta(infile)) if size >", "def downsample(infile: Path, outfile: Path, size: int, random): targets: List[FASTAItem]", "= random.choice(targets, size, replace=False).tolist() with FASTAWriter(outfile) as writer: for target", "[\"downsample\"] def downsample(infile: Path, outfile: Path, size: int, random): targets:", "= [\"downsample\"] def downsample(infile: Path, outfile: Path, size: int, random):", "from fasta_reader import FASTAItem, FASTAWriter, read_fasta __all__ = [\"downsample\"] def", "Path, size: int, random): targets: List[FASTAItem] = list(read_fasta(infile)) if size", "downsample(infile: Path, outfile: Path, size: int, random): targets: List[FASTAItem] =", "ValueError(\"Size is greater than the number of targets.\") targets =", "random): targets: List[FASTAItem] = list(read_fasta(infile)) if size > len(targets): raise", "size, replace=False).tolist() with FASTAWriter(outfile) as writer: for target in targets:", "FASTAItem, FASTAWriter, read_fasta __all__ = [\"downsample\"] def downsample(infile: Path, outfile:", "of targets.\") targets = random.choice(targets, size, replace=False).tolist() with FASTAWriter(outfile) as", "List from fasta_reader import FASTAItem, FASTAWriter, read_fasta __all__ = [\"downsample\"]", "fasta_reader import FASTAItem, FASTAWriter, read_fasta __all__ = [\"downsample\"] def downsample(infile:", "__all__ = [\"downsample\"] def downsample(infile: Path, outfile: Path, size: int,", "int, random): targets: List[FASTAItem] = list(read_fasta(infile)) if size > len(targets):", "> len(targets): raise ValueError(\"Size is greater than the number of", "import List from fasta_reader import FASTAItem, FASTAWriter, read_fasta __all__ =", "raise ValueError(\"Size is greater than the number of targets.\") targets", "number of targets.\") targets = random.choice(targets, size, replace=False).tolist() with FASTAWriter(outfile)", "= list(read_fasta(infile)) if size > len(targets): raise ValueError(\"Size is greater", "size > len(targets): raise ValueError(\"Size is greater than the number", "targets = random.choice(targets, size, replace=False).tolist() with FASTAWriter(outfile) as writer: for", "len(targets): raise ValueError(\"Size is greater than the number of targets.\")", "Path from typing import List from fasta_reader import FASTAItem, FASTAWriter,", "list(read_fasta(infile)) if size > len(targets): raise ValueError(\"Size is greater than", "import FASTAItem, FASTAWriter, read_fasta __all__ = [\"downsample\"] def downsample(infile: Path,", "the number of targets.\") targets = random.choice(targets, size, replace=False).tolist() with", "import Path from typing import List from fasta_reader import FASTAItem,", "random.choice(targets, size, replace=False).tolist() with FASTAWriter(outfile) as writer: for target in", "from pathlib import Path from typing import List from fasta_reader", "is greater than the number of targets.\") targets = random.choice(targets,", "outfile: Path, size: int, random): targets: List[FASTAItem] = list(read_fasta(infile)) if", "targets.\") targets = random.choice(targets, size, replace=False).tolist() with FASTAWriter(outfile) as writer:", "if size > len(targets): raise ValueError(\"Size is greater than the", "than the number of targets.\") targets = random.choice(targets, size, replace=False).tolist()", "List[FASTAItem] = list(read_fasta(infile)) if size > len(targets): raise ValueError(\"Size is", "FASTAWriter, read_fasta __all__ = [\"downsample\"] def downsample(infile: Path, outfile: Path,", "replace=False).tolist() with FASTAWriter(outfile) as writer: for target in targets: writer.write_item(target.defline,", "with FASTAWriter(outfile) as writer: for target in targets: writer.write_item(target.defline, target.sequence)", "targets: List[FASTAItem] = list(read_fasta(infile)) if size > len(targets): raise ValueError(\"Size", "read_fasta __all__ = [\"downsample\"] def downsample(infile: Path, outfile: Path, size:", "typing import List from fasta_reader import FASTAItem, FASTAWriter, read_fasta __all__", "Path, outfile: Path, size: int, random): targets: List[FASTAItem] = list(read_fasta(infile))", "pathlib import Path from typing import List from fasta_reader import", "greater than the number of targets.\") targets = random.choice(targets, size," ]
[ "30 # hard limit to not reach rate limit of", "= \"<KEY>\" # yahoo finance api key TICKER = \"TSLA\"", "\"1d\" LOOK_BACK = 30 # hard limit to not reach", "\"1m\" PERIOD = \"1d\" LOOK_BACK = 30 # hard limit", "# hard limit to not reach rate limit of 100", "limit to not reach rate limit of 100 per day", "# API keys # YF_API_KEY = \"<KEY>\" # yahoo finance", "PERIOD = \"1d\" LOOK_BACK = 30 # hard limit to", "= \"1m\" PERIOD = \"1d\" LOOK_BACK = 30 # hard", "# YF_API_KEY = \"<KEY>\" # yahoo finance api key TICKER", "# yahoo finance api key TICKER = \"TSLA\" INTERVAL =", "api key TICKER = \"TSLA\" INTERVAL = \"1m\" PERIOD =", "keys # YF_API_KEY = \"<KEY>\" # yahoo finance api key", "= \"1d\" LOOK_BACK = 30 # hard limit to not", "= 30 # hard limit to not reach rate limit", "\"TSLA\" INTERVAL = \"1m\" PERIOD = \"1d\" LOOK_BACK = 30", "\"<KEY>\" # yahoo finance api key TICKER = \"TSLA\" INTERVAL", "yahoo finance api key TICKER = \"TSLA\" INTERVAL = \"1m\"", "finance api key TICKER = \"TSLA\" INTERVAL = \"1m\" PERIOD", "TICKER = \"TSLA\" INTERVAL = \"1m\" PERIOD = \"1d\" LOOK_BACK", "YF_API_KEY = \"<KEY>\" # yahoo finance api key TICKER =", "INTERVAL = \"1m\" PERIOD = \"1d\" LOOK_BACK = 30 #", "LOOK_BACK = 30 # hard limit to not reach rate", "key TICKER = \"TSLA\" INTERVAL = \"1m\" PERIOD = \"1d\"", "hard limit to not reach rate limit of 100 per", "API keys # YF_API_KEY = \"<KEY>\" # yahoo finance api", "= \"TSLA\" INTERVAL = \"1m\" PERIOD = \"1d\" LOOK_BACK =" ]
[ "backend_pb2.CMD_BRANCH_CHECKOUT: invoker_cmd_branch_checkout.BranchCheckoutInvoker, backend_pb2.CMD_BRANCH_CREATE: invoker_cmd_branch_create.BranchCreateInvoker, backend_pb2.CMD_BRANCH_DEL: invoker_cmd_branch_delete.BranchDeleteInvoker, backend_pb2.CMD_BRANCH_LIST: invoker_cmd_branch_list.BranchListInvoker, backend_pb2.CMD_COMMIT: invoker_cmd_branch_commit.BranchCommitInvoker,", "invoker_cmd_sampling, invoker_cmd_terminate, invoker_cmd_user_create, invoker_task_factory, ) from proto import backend_pb2 RequestTypeToInvoker", "invoker_cmd_branch_create, invoker_cmd_branch_delete, invoker_cmd_branch_list, invoker_cmd_evaluate, invoker_cmd_filter, invoker_cmd_gpu_info, invoker_cmd_inference, invoker_cmd_init, invoker_cmd_label_add, invoker_cmd_label_get,", "backend_pb2.CMD_BRANCH_DEL: invoker_cmd_branch_delete.BranchDeleteInvoker, backend_pb2.CMD_BRANCH_LIST: invoker_cmd_branch_list.BranchListInvoker, backend_pb2.CMD_COMMIT: invoker_cmd_branch_commit.BranchCommitInvoker, backend_pb2.CMD_EVALUATE: invoker_cmd_evaluate.EvaluateInvoker, backend_pb2.CMD_FILTER: invoker_cmd_filter.FilterBranchInvoker,", "backend_pb2.CMD_MERGE: invoker_cmd_merge.MergeInvoker, backend_pb2.CMD_PULL_IMAGE: invoker_cmd_pull_image.ImageHandler, backend_pb2.CMD_TERMINATE: invoker_cmd_terminate.CMDTerminateInvoker, backend_pb2.CMD_REPO_CHECK: invoker_cmd_repo_check.RepoCheckInvoker, backend_pb2.CMD_REPO_CLEAR: invoker_cmd_repo_clear.RepoClearInvoker,", "backend_pb2.CMD_TERMINATE: invoker_cmd_terminate.CMDTerminateInvoker, backend_pb2.CMD_REPO_CHECK: invoker_cmd_repo_check.RepoCheckInvoker, backend_pb2.CMD_REPO_CLEAR: invoker_cmd_repo_clear.RepoClearInvoker, backend_pb2.REPO_CREATE: invoker_cmd_init.InitInvoker, backend_pb2.TASK_CREATE: invoker_task_factory.CreateTaskInvokerFactory,", "backend_pb2.CMD_REPO_CLEAR: invoker_cmd_repo_clear.RepoClearInvoker, backend_pb2.REPO_CREATE: invoker_cmd_init.InitInvoker, backend_pb2.TASK_CREATE: invoker_task_factory.CreateTaskInvokerFactory, backend_pb2.USER_CREATE: invoker_cmd_user_create.UserCreateInvoker, backend_pb2.CMD_SAMPLING: invoker_cmd_sampling.SamplingInvoker,", "backend_pb2.CMD_LABEL_GET: invoker_cmd_label_get.LabelGetInvoker, backend_pb2.CMD_LOG: invoker_cmd_log.LogInvoker, backend_pb2.CMD_MERGE: invoker_cmd_merge.MergeInvoker, backend_pb2.CMD_PULL_IMAGE: invoker_cmd_pull_image.ImageHandler, backend_pb2.CMD_TERMINATE: invoker_cmd_terminate.CMDTerminateInvoker,", "invoker_cmd_branch_create.BranchCreateInvoker, backend_pb2.CMD_BRANCH_DEL: invoker_cmd_branch_delete.BranchDeleteInvoker, backend_pb2.CMD_BRANCH_LIST: invoker_cmd_branch_list.BranchListInvoker, backend_pb2.CMD_COMMIT: invoker_cmd_branch_commit.BranchCommitInvoker, backend_pb2.CMD_EVALUATE: invoker_cmd_evaluate.EvaluateInvoker, backend_pb2.CMD_FILTER:", "backend_pb2 RequestTypeToInvoker = { backend_pb2.CMD_BRANCH_CHECKOUT: invoker_cmd_branch_checkout.BranchCheckoutInvoker, backend_pb2.CMD_BRANCH_CREATE: invoker_cmd_branch_create.BranchCreateInvoker, backend_pb2.CMD_BRANCH_DEL: invoker_cmd_branch_delete.BranchDeleteInvoker,", "= { backend_pb2.CMD_BRANCH_CHECKOUT: invoker_cmd_branch_checkout.BranchCheckoutInvoker, backend_pb2.CMD_BRANCH_CREATE: invoker_cmd_branch_create.BranchCreateInvoker, backend_pb2.CMD_BRANCH_DEL: invoker_cmd_branch_delete.BranchDeleteInvoker, backend_pb2.CMD_BRANCH_LIST: invoker_cmd_branch_list.BranchListInvoker,", "invoker_cmd_repo_clear.RepoClearInvoker, backend_pb2.REPO_CREATE: invoker_cmd_init.InitInvoker, backend_pb2.TASK_CREATE: invoker_task_factory.CreateTaskInvokerFactory, backend_pb2.USER_CREATE: invoker_cmd_user_create.UserCreateInvoker, backend_pb2.CMD_SAMPLING: invoker_cmd_sampling.SamplingInvoker, }", "invoker_cmd_label_get.LabelGetInvoker, backend_pb2.CMD_LOG: invoker_cmd_log.LogInvoker, backend_pb2.CMD_MERGE: invoker_cmd_merge.MergeInvoker, backend_pb2.CMD_PULL_IMAGE: invoker_cmd_pull_image.ImageHandler, backend_pb2.CMD_TERMINATE: invoker_cmd_terminate.CMDTerminateInvoker, backend_pb2.CMD_REPO_CHECK:", "invoker_cmd_merge.MergeInvoker, backend_pb2.CMD_PULL_IMAGE: invoker_cmd_pull_image.ImageHandler, backend_pb2.CMD_TERMINATE: invoker_cmd_terminate.CMDTerminateInvoker, backend_pb2.CMD_REPO_CHECK: invoker_cmd_repo_check.RepoCheckInvoker, backend_pb2.CMD_REPO_CLEAR: invoker_cmd_repo_clear.RepoClearInvoker, backend_pb2.REPO_CREATE:", "invoker_cmd_merge, invoker_cmd_pull_image, invoker_cmd_repo_check, invoker_cmd_repo_clear, invoker_cmd_sampling, invoker_cmd_terminate, invoker_cmd_user_create, invoker_task_factory, ) from", "from controller.invoker import ( invoker_cmd_branch_checkout, invoker_cmd_branch_commit, invoker_cmd_branch_create, invoker_cmd_branch_delete, invoker_cmd_branch_list, invoker_cmd_evaluate,", "invoker_cmd_inference, invoker_cmd_init, invoker_cmd_label_add, invoker_cmd_label_get, invoker_cmd_log, invoker_cmd_merge, invoker_cmd_pull_image, invoker_cmd_repo_check, invoker_cmd_repo_clear, invoker_cmd_sampling,", "backend_pb2.CMD_BRANCH_CREATE: invoker_cmd_branch_create.BranchCreateInvoker, backend_pb2.CMD_BRANCH_DEL: invoker_cmd_branch_delete.BranchDeleteInvoker, backend_pb2.CMD_BRANCH_LIST: invoker_cmd_branch_list.BranchListInvoker, backend_pb2.CMD_COMMIT: invoker_cmd_branch_commit.BranchCommitInvoker, backend_pb2.CMD_EVALUATE: invoker_cmd_evaluate.EvaluateInvoker,", "invoker_cmd_branch_list.BranchListInvoker, backend_pb2.CMD_COMMIT: invoker_cmd_branch_commit.BranchCommitInvoker, backend_pb2.CMD_EVALUATE: invoker_cmd_evaluate.EvaluateInvoker, backend_pb2.CMD_FILTER: invoker_cmd_filter.FilterBranchInvoker, backend_pb2.CMD_GPU_INFO_GET: invoker_cmd_gpu_info.GPUInfoInvoker, backend_pb2.CMD_INFERENCE:", "import ( invoker_cmd_branch_checkout, invoker_cmd_branch_commit, invoker_cmd_branch_create, invoker_cmd_branch_delete, invoker_cmd_branch_list, invoker_cmd_evaluate, invoker_cmd_filter, invoker_cmd_gpu_info,", "invoker_cmd_evaluate, invoker_cmd_filter, invoker_cmd_gpu_info, invoker_cmd_inference, invoker_cmd_init, invoker_cmd_label_add, invoker_cmd_label_get, invoker_cmd_log, invoker_cmd_merge, invoker_cmd_pull_image,", "invoker_cmd_pull_image.ImageHandler, backend_pb2.CMD_TERMINATE: invoker_cmd_terminate.CMDTerminateInvoker, backend_pb2.CMD_REPO_CHECK: invoker_cmd_repo_check.RepoCheckInvoker, backend_pb2.CMD_REPO_CLEAR: invoker_cmd_repo_clear.RepoClearInvoker, backend_pb2.REPO_CREATE: invoker_cmd_init.InitInvoker, backend_pb2.TASK_CREATE:", "backend_pb2.CMD_LABEL_ADD: invoker_cmd_label_add.LabelAddInvoker, backend_pb2.CMD_LABEL_GET: invoker_cmd_label_get.LabelGetInvoker, backend_pb2.CMD_LOG: invoker_cmd_log.LogInvoker, backend_pb2.CMD_MERGE: invoker_cmd_merge.MergeInvoker, backend_pb2.CMD_PULL_IMAGE: invoker_cmd_pull_image.ImageHandler,", ") from proto import backend_pb2 RequestTypeToInvoker = { backend_pb2.CMD_BRANCH_CHECKOUT: invoker_cmd_branch_checkout.BranchCheckoutInvoker,", "backend_pb2.CMD_LOG: invoker_cmd_log.LogInvoker, backend_pb2.CMD_MERGE: invoker_cmd_merge.MergeInvoker, backend_pb2.CMD_PULL_IMAGE: invoker_cmd_pull_image.ImageHandler, backend_pb2.CMD_TERMINATE: invoker_cmd_terminate.CMDTerminateInvoker, backend_pb2.CMD_REPO_CHECK: invoker_cmd_repo_check.RepoCheckInvoker,", "from proto import backend_pb2 RequestTypeToInvoker = { backend_pb2.CMD_BRANCH_CHECKOUT: invoker_cmd_branch_checkout.BranchCheckoutInvoker, backend_pb2.CMD_BRANCH_CREATE:", "invoker_cmd_label_add, invoker_cmd_label_get, invoker_cmd_log, invoker_cmd_merge, invoker_cmd_pull_image, invoker_cmd_repo_check, invoker_cmd_repo_clear, invoker_cmd_sampling, invoker_cmd_terminate, invoker_cmd_user_create,", "invoker_cmd_label_get, invoker_cmd_log, invoker_cmd_merge, invoker_cmd_pull_image, invoker_cmd_repo_check, invoker_cmd_repo_clear, invoker_cmd_sampling, invoker_cmd_terminate, invoker_cmd_user_create, invoker_task_factory,", "{ backend_pb2.CMD_BRANCH_CHECKOUT: invoker_cmd_branch_checkout.BranchCheckoutInvoker, backend_pb2.CMD_BRANCH_CREATE: invoker_cmd_branch_create.BranchCreateInvoker, backend_pb2.CMD_BRANCH_DEL: invoker_cmd_branch_delete.BranchDeleteInvoker, backend_pb2.CMD_BRANCH_LIST: invoker_cmd_branch_list.BranchListInvoker, backend_pb2.CMD_COMMIT:", "backend_pb2.CMD_EVALUATE: invoker_cmd_evaluate.EvaluateInvoker, backend_pb2.CMD_FILTER: invoker_cmd_filter.FilterBranchInvoker, backend_pb2.CMD_GPU_INFO_GET: invoker_cmd_gpu_info.GPUInfoInvoker, backend_pb2.CMD_INFERENCE: invoker_cmd_inference.InferenceCMDInvoker, backend_pb2.CMD_INIT: invoker_cmd_init.InitInvoker,", "invoker_cmd_branch_commit, invoker_cmd_branch_create, invoker_cmd_branch_delete, invoker_cmd_branch_list, invoker_cmd_evaluate, invoker_cmd_filter, invoker_cmd_gpu_info, invoker_cmd_inference, invoker_cmd_init, invoker_cmd_label_add,", "invoker_cmd_gpu_info.GPUInfoInvoker, backend_pb2.CMD_INFERENCE: invoker_cmd_inference.InferenceCMDInvoker, backend_pb2.CMD_INIT: invoker_cmd_init.InitInvoker, backend_pb2.CMD_LABEL_ADD: invoker_cmd_label_add.LabelAddInvoker, backend_pb2.CMD_LABEL_GET: invoker_cmd_label_get.LabelGetInvoker, backend_pb2.CMD_LOG:", "invoker_cmd_repo_check.RepoCheckInvoker, backend_pb2.CMD_REPO_CLEAR: invoker_cmd_repo_clear.RepoClearInvoker, backend_pb2.REPO_CREATE: invoker_cmd_init.InitInvoker, backend_pb2.TASK_CREATE: invoker_task_factory.CreateTaskInvokerFactory, backend_pb2.USER_CREATE: invoker_cmd_user_create.UserCreateInvoker, backend_pb2.CMD_SAMPLING:", "invoker_cmd_branch_delete.BranchDeleteInvoker, backend_pb2.CMD_BRANCH_LIST: invoker_cmd_branch_list.BranchListInvoker, backend_pb2.CMD_COMMIT: invoker_cmd_branch_commit.BranchCommitInvoker, backend_pb2.CMD_EVALUATE: invoker_cmd_evaluate.EvaluateInvoker, backend_pb2.CMD_FILTER: invoker_cmd_filter.FilterBranchInvoker, backend_pb2.CMD_GPU_INFO_GET:", "invoker_cmd_branch_commit.BranchCommitInvoker, backend_pb2.CMD_EVALUATE: invoker_cmd_evaluate.EvaluateInvoker, backend_pb2.CMD_FILTER: invoker_cmd_filter.FilterBranchInvoker, backend_pb2.CMD_GPU_INFO_GET: invoker_cmd_gpu_info.GPUInfoInvoker, backend_pb2.CMD_INFERENCE: invoker_cmd_inference.InferenceCMDInvoker, backend_pb2.CMD_INIT:", "backend_pb2.CMD_PULL_IMAGE: invoker_cmd_pull_image.ImageHandler, backend_pb2.CMD_TERMINATE: invoker_cmd_terminate.CMDTerminateInvoker, backend_pb2.CMD_REPO_CHECK: invoker_cmd_repo_check.RepoCheckInvoker, backend_pb2.CMD_REPO_CLEAR: invoker_cmd_repo_clear.RepoClearInvoker, backend_pb2.REPO_CREATE: invoker_cmd_init.InitInvoker,", "invoker_cmd_pull_image, invoker_cmd_repo_check, invoker_cmd_repo_clear, invoker_cmd_sampling, invoker_cmd_terminate, invoker_cmd_user_create, invoker_task_factory, ) from proto", "import backend_pb2 RequestTypeToInvoker = { backend_pb2.CMD_BRANCH_CHECKOUT: invoker_cmd_branch_checkout.BranchCheckoutInvoker, backend_pb2.CMD_BRANCH_CREATE: invoker_cmd_branch_create.BranchCreateInvoker, backend_pb2.CMD_BRANCH_DEL:", "invoker_cmd_branch_checkout, invoker_cmd_branch_commit, invoker_cmd_branch_create, invoker_cmd_branch_delete, invoker_cmd_branch_list, invoker_cmd_evaluate, invoker_cmd_filter, invoker_cmd_gpu_info, invoker_cmd_inference, invoker_cmd_init,", "invoker_cmd_gpu_info, invoker_cmd_inference, invoker_cmd_init, invoker_cmd_label_add, invoker_cmd_label_get, invoker_cmd_log, invoker_cmd_merge, invoker_cmd_pull_image, invoker_cmd_repo_check, invoker_cmd_repo_clear,", "invoker_cmd_evaluate.EvaluateInvoker, backend_pb2.CMD_FILTER: invoker_cmd_filter.FilterBranchInvoker, backend_pb2.CMD_GPU_INFO_GET: invoker_cmd_gpu_info.GPUInfoInvoker, backend_pb2.CMD_INFERENCE: invoker_cmd_inference.InferenceCMDInvoker, backend_pb2.CMD_INIT: invoker_cmd_init.InitInvoker, backend_pb2.CMD_LABEL_ADD:", "invoker_cmd_terminate, invoker_cmd_user_create, invoker_task_factory, ) from proto import backend_pb2 RequestTypeToInvoker =", "invoker_cmd_branch_delete, invoker_cmd_branch_list, invoker_cmd_evaluate, invoker_cmd_filter, invoker_cmd_gpu_info, invoker_cmd_inference, invoker_cmd_init, invoker_cmd_label_add, invoker_cmd_label_get, invoker_cmd_log,", "invoker_cmd_log.LogInvoker, backend_pb2.CMD_MERGE: invoker_cmd_merge.MergeInvoker, backend_pb2.CMD_PULL_IMAGE: invoker_cmd_pull_image.ImageHandler, backend_pb2.CMD_TERMINATE: invoker_cmd_terminate.CMDTerminateInvoker, backend_pb2.CMD_REPO_CHECK: invoker_cmd_repo_check.RepoCheckInvoker, backend_pb2.CMD_REPO_CLEAR:", "invoker_cmd_init, invoker_cmd_label_add, invoker_cmd_label_get, invoker_cmd_log, invoker_cmd_merge, invoker_cmd_pull_image, invoker_cmd_repo_check, invoker_cmd_repo_clear, invoker_cmd_sampling, invoker_cmd_terminate,", "invoker_cmd_filter, invoker_cmd_gpu_info, invoker_cmd_inference, invoker_cmd_init, invoker_cmd_label_add, invoker_cmd_label_get, invoker_cmd_log, invoker_cmd_merge, invoker_cmd_pull_image, invoker_cmd_repo_check,", "backend_pb2.CMD_FILTER: invoker_cmd_filter.FilterBranchInvoker, backend_pb2.CMD_GPU_INFO_GET: invoker_cmd_gpu_info.GPUInfoInvoker, backend_pb2.CMD_INFERENCE: invoker_cmd_inference.InferenceCMDInvoker, backend_pb2.CMD_INIT: invoker_cmd_init.InitInvoker, backend_pb2.CMD_LABEL_ADD: invoker_cmd_label_add.LabelAddInvoker,", "invoker_cmd_terminate.CMDTerminateInvoker, backend_pb2.CMD_REPO_CHECK: invoker_cmd_repo_check.RepoCheckInvoker, backend_pb2.CMD_REPO_CLEAR: invoker_cmd_repo_clear.RepoClearInvoker, backend_pb2.REPO_CREATE: invoker_cmd_init.InitInvoker, backend_pb2.TASK_CREATE: invoker_task_factory.CreateTaskInvokerFactory, backend_pb2.USER_CREATE:", "invoker_cmd_user_create, invoker_task_factory, ) from proto import backend_pb2 RequestTypeToInvoker = {", "invoker_cmd_repo_clear, invoker_cmd_sampling, invoker_cmd_terminate, invoker_cmd_user_create, invoker_task_factory, ) from proto import backend_pb2", "invoker_cmd_label_add.LabelAddInvoker, backend_pb2.CMD_LABEL_GET: invoker_cmd_label_get.LabelGetInvoker, backend_pb2.CMD_LOG: invoker_cmd_log.LogInvoker, backend_pb2.CMD_MERGE: invoker_cmd_merge.MergeInvoker, backend_pb2.CMD_PULL_IMAGE: invoker_cmd_pull_image.ImageHandler, backend_pb2.CMD_TERMINATE:", "invoker_cmd_inference.InferenceCMDInvoker, backend_pb2.CMD_INIT: invoker_cmd_init.InitInvoker, backend_pb2.CMD_LABEL_ADD: invoker_cmd_label_add.LabelAddInvoker, backend_pb2.CMD_LABEL_GET: invoker_cmd_label_get.LabelGetInvoker, backend_pb2.CMD_LOG: invoker_cmd_log.LogInvoker, backend_pb2.CMD_MERGE:", "invoker_cmd_init.InitInvoker, backend_pb2.CMD_LABEL_ADD: invoker_cmd_label_add.LabelAddInvoker, backend_pb2.CMD_LABEL_GET: invoker_cmd_label_get.LabelGetInvoker, backend_pb2.CMD_LOG: invoker_cmd_log.LogInvoker, backend_pb2.CMD_MERGE: invoker_cmd_merge.MergeInvoker, backend_pb2.CMD_PULL_IMAGE:", "backend_pb2.CMD_REPO_CHECK: invoker_cmd_repo_check.RepoCheckInvoker, backend_pb2.CMD_REPO_CLEAR: invoker_cmd_repo_clear.RepoClearInvoker, backend_pb2.REPO_CREATE: invoker_cmd_init.InitInvoker, backend_pb2.TASK_CREATE: invoker_task_factory.CreateTaskInvokerFactory, backend_pb2.USER_CREATE: invoker_cmd_user_create.UserCreateInvoker,", "backend_pb2.CMD_BRANCH_LIST: invoker_cmd_branch_list.BranchListInvoker, backend_pb2.CMD_COMMIT: invoker_cmd_branch_commit.BranchCommitInvoker, backend_pb2.CMD_EVALUATE: invoker_cmd_evaluate.EvaluateInvoker, backend_pb2.CMD_FILTER: invoker_cmd_filter.FilterBranchInvoker, backend_pb2.CMD_GPU_INFO_GET: invoker_cmd_gpu_info.GPUInfoInvoker,", "invoker_task_factory, ) from proto import backend_pb2 RequestTypeToInvoker = { backend_pb2.CMD_BRANCH_CHECKOUT:", "proto import backend_pb2 RequestTypeToInvoker = { backend_pb2.CMD_BRANCH_CHECKOUT: invoker_cmd_branch_checkout.BranchCheckoutInvoker, backend_pb2.CMD_BRANCH_CREATE: invoker_cmd_branch_create.BranchCreateInvoker,", "backend_pb2.CMD_INIT: invoker_cmd_init.InitInvoker, backend_pb2.CMD_LABEL_ADD: invoker_cmd_label_add.LabelAddInvoker, backend_pb2.CMD_LABEL_GET: invoker_cmd_label_get.LabelGetInvoker, backend_pb2.CMD_LOG: invoker_cmd_log.LogInvoker, backend_pb2.CMD_MERGE: invoker_cmd_merge.MergeInvoker,", "invoker_cmd_branch_checkout.BranchCheckoutInvoker, backend_pb2.CMD_BRANCH_CREATE: invoker_cmd_branch_create.BranchCreateInvoker, backend_pb2.CMD_BRANCH_DEL: invoker_cmd_branch_delete.BranchDeleteInvoker, backend_pb2.CMD_BRANCH_LIST: invoker_cmd_branch_list.BranchListInvoker, backend_pb2.CMD_COMMIT: invoker_cmd_branch_commit.BranchCommitInvoker, backend_pb2.CMD_EVALUATE:", "backend_pb2.CMD_COMMIT: invoker_cmd_branch_commit.BranchCommitInvoker, backend_pb2.CMD_EVALUATE: invoker_cmd_evaluate.EvaluateInvoker, backend_pb2.CMD_FILTER: invoker_cmd_filter.FilterBranchInvoker, backend_pb2.CMD_GPU_INFO_GET: invoker_cmd_gpu_info.GPUInfoInvoker, backend_pb2.CMD_INFERENCE: invoker_cmd_inference.InferenceCMDInvoker,", "invoker_cmd_branch_list, invoker_cmd_evaluate, invoker_cmd_filter, invoker_cmd_gpu_info, invoker_cmd_inference, invoker_cmd_init, invoker_cmd_label_add, invoker_cmd_label_get, invoker_cmd_log, invoker_cmd_merge,", "RequestTypeToInvoker = { backend_pb2.CMD_BRANCH_CHECKOUT: invoker_cmd_branch_checkout.BranchCheckoutInvoker, backend_pb2.CMD_BRANCH_CREATE: invoker_cmd_branch_create.BranchCreateInvoker, backend_pb2.CMD_BRANCH_DEL: invoker_cmd_branch_delete.BranchDeleteInvoker, backend_pb2.CMD_BRANCH_LIST:", "controller.invoker import ( invoker_cmd_branch_checkout, invoker_cmd_branch_commit, invoker_cmd_branch_create, invoker_cmd_branch_delete, invoker_cmd_branch_list, invoker_cmd_evaluate, invoker_cmd_filter,", "invoker_cmd_filter.FilterBranchInvoker, backend_pb2.CMD_GPU_INFO_GET: invoker_cmd_gpu_info.GPUInfoInvoker, backend_pb2.CMD_INFERENCE: invoker_cmd_inference.InferenceCMDInvoker, backend_pb2.CMD_INIT: invoker_cmd_init.InitInvoker, backend_pb2.CMD_LABEL_ADD: invoker_cmd_label_add.LabelAddInvoker, backend_pb2.CMD_LABEL_GET:", "( invoker_cmd_branch_checkout, invoker_cmd_branch_commit, invoker_cmd_branch_create, invoker_cmd_branch_delete, invoker_cmd_branch_list, invoker_cmd_evaluate, invoker_cmd_filter, invoker_cmd_gpu_info, invoker_cmd_inference,", "backend_pb2.CMD_GPU_INFO_GET: invoker_cmd_gpu_info.GPUInfoInvoker, backend_pb2.CMD_INFERENCE: invoker_cmd_inference.InferenceCMDInvoker, backend_pb2.CMD_INIT: invoker_cmd_init.InitInvoker, backend_pb2.CMD_LABEL_ADD: invoker_cmd_label_add.LabelAddInvoker, backend_pb2.CMD_LABEL_GET: invoker_cmd_label_get.LabelGetInvoker,", "invoker_cmd_log, invoker_cmd_merge, invoker_cmd_pull_image, invoker_cmd_repo_check, invoker_cmd_repo_clear, invoker_cmd_sampling, invoker_cmd_terminate, invoker_cmd_user_create, invoker_task_factory, )", "invoker_cmd_repo_check, invoker_cmd_repo_clear, invoker_cmd_sampling, invoker_cmd_terminate, invoker_cmd_user_create, invoker_task_factory, ) from proto import", "backend_pb2.CMD_INFERENCE: invoker_cmd_inference.InferenceCMDInvoker, backend_pb2.CMD_INIT: invoker_cmd_init.InitInvoker, backend_pb2.CMD_LABEL_ADD: invoker_cmd_label_add.LabelAddInvoker, backend_pb2.CMD_LABEL_GET: invoker_cmd_label_get.LabelGetInvoker, backend_pb2.CMD_LOG: invoker_cmd_log.LogInvoker," ]
[ "from datetime import datetime, timedelta due_date_format = '%Y-%m-%d' datepicker_date_format =", "datetime, timedelta due_date_format = '%Y-%m-%d' datepicker_date_format = '%m%d%Y' def current_date():", "def _date_from_today(days_to_add): return datetime.utcnow() + timedelta(days=days_to_add) def date_from_today(days_to_add): return _date_from_today(days_to_add).strftime(due_date_format)", "import datetime, timedelta due_date_format = '%Y-%m-%d' datepicker_date_format = '%m%d%Y' def", "def current_date(): return datetime.utcnow().strftime(due_date_format) def datepicker_current_date(): return datetime.utcnow().strftime(datepicker_date_format) def _date_from_today(days_to_add):", "timedelta due_date_format = '%Y-%m-%d' datepicker_date_format = '%m%d%Y' def current_date(): return", "return datetime.utcnow().strftime(datepicker_date_format) def _date_from_today(days_to_add): return datetime.utcnow() + timedelta(days=days_to_add) def date_from_today(days_to_add):", "datepicker_current_date(): return datetime.utcnow().strftime(datepicker_date_format) def _date_from_today(days_to_add): return datetime.utcnow() + timedelta(days=days_to_add) def", "return datetime.utcnow().strftime(due_date_format) def datepicker_current_date(): return datetime.utcnow().strftime(datepicker_date_format) def _date_from_today(days_to_add): return datetime.utcnow()", "_date_from_today(days_to_add).strftime(due_date_format) def datepicker_date_from_today(days_to_add): return _date_from_today(days_to_add).strftime(datepicker_date_format) def datepicker_to_due_date_format(datepicker_date): return datetime.strptime(datepicker_date, datepicker_date_format).strftime(due_date_format)", "timedelta(days=days_to_add) def date_from_today(days_to_add): return _date_from_today(days_to_add).strftime(due_date_format) def datepicker_date_from_today(days_to_add): return _date_from_today(days_to_add).strftime(datepicker_date_format) def", "due_date_format = '%Y-%m-%d' datepicker_date_format = '%m%d%Y' def current_date(): return datetime.utcnow().strftime(due_date_format)", "date_from_today(days_to_add): return _date_from_today(days_to_add).strftime(due_date_format) def datepicker_date_from_today(days_to_add): return _date_from_today(days_to_add).strftime(datepicker_date_format) def datepicker_to_due_date_format(datepicker_date): return", "def datepicker_current_date(): return datetime.utcnow().strftime(datepicker_date_format) def _date_from_today(days_to_add): return datetime.utcnow() + timedelta(days=days_to_add)", "def date_from_today(days_to_add): return _date_from_today(days_to_add).strftime(due_date_format) def datepicker_date_from_today(days_to_add): return _date_from_today(days_to_add).strftime(datepicker_date_format) def datepicker_to_due_date_format(datepicker_date):", "+ timedelta(days=days_to_add) def date_from_today(days_to_add): return _date_from_today(days_to_add).strftime(due_date_format) def datepicker_date_from_today(days_to_add): return _date_from_today(days_to_add).strftime(datepicker_date_format)", "datepicker_date_format = '%m%d%Y' def current_date(): return datetime.utcnow().strftime(due_date_format) def datepicker_current_date(): return", "= '%Y-%m-%d' datepicker_date_format = '%m%d%Y' def current_date(): return datetime.utcnow().strftime(due_date_format) def", "return _date_from_today(days_to_add).strftime(due_date_format) def datepicker_date_from_today(days_to_add): return _date_from_today(days_to_add).strftime(datepicker_date_format) def datepicker_to_due_date_format(datepicker_date): return datetime.strptime(datepicker_date,", "current_date(): return datetime.utcnow().strftime(due_date_format) def datepicker_current_date(): return datetime.utcnow().strftime(datepicker_date_format) def _date_from_today(days_to_add): return", "_date_from_today(days_to_add): return datetime.utcnow() + timedelta(days=days_to_add) def date_from_today(days_to_add): return _date_from_today(days_to_add).strftime(due_date_format) def", "<filename>tests/utils/date_utils.py from datetime import datetime, timedelta due_date_format = '%Y-%m-%d' datepicker_date_format", "'%m%d%Y' def current_date(): return datetime.utcnow().strftime(due_date_format) def datepicker_current_date(): return datetime.utcnow().strftime(datepicker_date_format) def", "'%Y-%m-%d' datepicker_date_format = '%m%d%Y' def current_date(): return datetime.utcnow().strftime(due_date_format) def datepicker_current_date():", "datetime.utcnow().strftime(due_date_format) def datepicker_current_date(): return datetime.utcnow().strftime(datepicker_date_format) def _date_from_today(days_to_add): return datetime.utcnow() +", "= '%m%d%Y' def current_date(): return datetime.utcnow().strftime(due_date_format) def datepicker_current_date(): return datetime.utcnow().strftime(datepicker_date_format)", "return datetime.utcnow() + timedelta(days=days_to_add) def date_from_today(days_to_add): return _date_from_today(days_to_add).strftime(due_date_format) def datepicker_date_from_today(days_to_add):", "datetime.utcnow() + timedelta(days=days_to_add) def date_from_today(days_to_add): return _date_from_today(days_to_add).strftime(due_date_format) def datepicker_date_from_today(days_to_add): return", "datetime import datetime, timedelta due_date_format = '%Y-%m-%d' datepicker_date_format = '%m%d%Y'", "datetime.utcnow().strftime(datepicker_date_format) def _date_from_today(days_to_add): return datetime.utcnow() + timedelta(days=days_to_add) def date_from_today(days_to_add): return" ]
[ "out): for i in range(out.shape[0]): out[i] = a * x[i]", "ufbldr.build_ufunc() self.assertIsInstance(ufunc, HSAGenerializedUFunc) # Test integer version A = np.arange(100,", "= A[j] * X[j, i] + Y[j, i] self.assertTrue(exp ==", "2) Y = np.arange(10, dtype=np.float32).reshape(5, 2) out = saxpy(A, X,", "= np.arange(100, dtype=np.intp) + 1 expected = A + B", "in range(5): for i in range(2): exp = A[j] *", "test_gufunc_building(self): ufbldr = HsaGUFuncVectorize(ufunc_add_core, \"(x),(x)->(x)\") ufbldr.add(\"(float32[:], float32[:], float32[:])\") ufbldr.add(\"(intp[:], intp[:],", "target='roc') def saxpy(a, x, y, out): for i in range(out.shape[0]):", "= np.arange(300, dtype=np.int32).reshape(100, 3) out = ufunc(inp) for i in", "integer version with 3D inputs A = A.reshape(5, 10, 2)", "A + B got = ufunc(A, B) np.testing.assert_equal(expected, got) self.assertEqual(expected.dtype,", "B got = ufunc(A, B) np.testing.assert_allclose(expected, got) self.assertEqual(expected.dtype, got.dtype) self.assertEqual(np.dtype(np.float32),", "got) self.assertEqual(expected.dtype, got.dtype) self.assertEqual(np.dtype(np.float32), got.dtype) # Test real version with", "ufbldr = HsaGUFuncVectorize(sum_row, \"(n)->()\") ufbldr.add(\"void(int32[:], int32[:])\") ufunc = ufbldr.build_ufunc() inp", "(exp, out[j])) A = np.arange(5, dtype=np.float32) X = np.arange(10, dtype=np.float32).reshape(5,", "ufunc(A, B) np.testing.assert_allclose(expected, got) self.assertEqual(expected.dtype, got.dtype) self.assertEqual(np.dtype(np.float32), got.dtype) def test_gufunc_building_scalar_output(self):", "ufbldr = HsaGUFuncVectorize(ufunc_add_core, \"(x),(x)->(x)\") ufbldr.add(\"(float32[:], float32[:], float32[:])\") ufbldr.add(\"(intp[:], intp[:], intp[:])\")", "self.assertEqual(expected.dtype, got.dtype) self.assertEqual(np.dtype(np.intp), got.dtype) # Test integer version with 2D", "a * x[i] + y[i] ufbldr = HsaGUFuncVectorize(axpy, '(),(t),(t)->(t)') ufbldr.add(\"void(float32,", "float32[:], float32[:], float32[:])\") saxpy = ufbldr.build_ufunc() A = np.float32(2) X", "intp[:])\") ufunc = ufbldr.build_ufunc() self.assertIsInstance(ufunc, HSAGenerializedUFunc) # Test integer version", "numba.roc.vectorizers import HsaGUFuncVectorize from numba.roc.dispatch import HSAGenerializedUFunc from numba import", "self.assertTrue(exp == out[j, i], (exp, out[j, i])) if __name__ ==", "A[j] * X[j, i] + Y[j, i] self.assertTrue(exp == out[j,", "i in range(inp.shape[0]): np.testing.assert_equal(inp[i].sum(), out[i]) def test_gufunc_scalar_input_saxpy(self): def axpy(a, x,", "unittest def ufunc_add_core(a, b, c): for i in range(c.size): c[i]", "ufbldr.add(\"void(int32[:], int32[:])\") ufunc = ufbldr.build_ufunc() inp = np.arange(300, dtype=np.int32).reshape(100, 3)", "range(10): exp = A * X[j] + Y[j] self.assertTrue(exp ==", "version with 2D inputs A = A.reshape(50, 2) B =", "range(2): exp = A * X[j, i] + Y[j, i]", "= ufbldr.build_ufunc() self.assertIsInstance(ufunc, HSAGenerializedUFunc) # Test integer version A =", "c[i] = a[i] + b[i] class TestGUFuncBuilding(unittest.TestCase): def test_gufunc_building(self): ufbldr", "self.assertTrue(exp == out[j], (exp, out[j])) A = np.arange(5, dtype=np.float32) X", "j in range(10): exp = A * X[j] + Y[j]", "dtype=np.float32) out = saxpy(A, X, Y) for j in range(10):", "Test integer version with 3D inputs A = A.reshape(5, 10,", "X[j, i] + Y[j, i] self.assertTrue(exp == out[j, i]) X", "got.dtype) self.assertEqual(np.dtype(np.intp), got.dtype) # Test integer version with 3D inputs", "B.reshape(50, 2) expected = A + B got = ufunc(A,", "got = ufunc(A, B) np.testing.assert_allclose(expected, got) self.assertEqual(expected.dtype, got.dtype) self.assertEqual(np.dtype(np.float32), got.dtype)", "for i in range(inp.shape[0]): tmp += inp[i] out[0] = tmp", "ufbldr.build_ufunc() inp = np.arange(300, dtype=np.int32).reshape(100, 3) out = ufunc(inp) for", "numpy as np from numba.roc.vectorizers import HsaGUFuncVectorize from numba.roc.dispatch import", "def sum_row(inp, out): tmp = 0. for i in range(inp.shape[0]):", "a[i] + b[i] class TestGUFuncBuilding(unittest.TestCase): def test_gufunc_building(self): ufbldr = HsaGUFuncVectorize(ufunc_add_core,", "= A.reshape(50, 2) B = B.reshape(50, 2) expected = A", "np.testing.assert_allclose(expected, got) self.assertEqual(expected.dtype, got.dtype) self.assertEqual(np.dtype(np.float32), got.dtype) # Test real version", "got) self.assertEqual(expected.dtype, got.dtype) self.assertEqual(np.dtype(np.float32), got.dtype) def test_gufunc_building_scalar_output(self): def sum_row(inp, out):", "as np from numba.roc.vectorizers import HsaGUFuncVectorize from numba.roc.dispatch import HSAGenerializedUFunc", "got.dtype) # Test integer version with 3D inputs A =", "self.assertEqual(np.dtype(np.intp), got.dtype) # Test integer version with 2D inputs A", "* x[i] + y[i] ufbldr = HsaGUFuncVectorize(axpy, '(),(t),(t)->(t)') ufbldr.add(\"void(float32, float32[:],", "HsaGUFuncVectorize(sum_row, \"(n)->()\") ufbldr.add(\"void(int32[:], int32[:])\") ufunc = ufbldr.build_ufunc() inp = np.arange(300,", "= ufunc(A, B) np.testing.assert_allclose(expected, got) self.assertEqual(expected.dtype, got.dtype) self.assertEqual(np.dtype(np.float32), got.dtype) def", "in range(2): exp = A[j] * X[j, i] + Y[j,", "exp = A * X[j, i] + Y[j, i] self.assertTrue(exp", "* X[j, i] + Y[j, i] self.assertTrue(exp == out[j, i])", "A = np.arange(100, dtype=np.intp) B = np.arange(100, dtype=np.intp) + 1", "ufunc(inp) for i in range(inp.shape[0]): np.testing.assert_equal(inp[i].sum(), out[i]) def test_gufunc_scalar_input_saxpy(self): def", "A = np.float32(2) X = np.arange(10, dtype=np.float32).reshape(5, 2) Y =", "# Test real version A = np.arange(100, dtype=np.float32) B =", "for j in range(5): for i in range(2): exp =", "A = A.reshape(5, 10, 2) B = B.reshape(5, 10, 2)", "= ufunc(inp) for i in range(inp.shape[0]): np.testing.assert_equal(inp[i].sum(), out[i]) def test_gufunc_scalar_input_saxpy(self):", "= np.arange(10, dtype=np.float32).reshape(5, 2) Y = np.arange(10, dtype=np.float32).reshape(5, 2) out", "B = np.arange(100, dtype=np.intp) + 1 expected = A +", "TestGUFuncDecor(unittest.TestCase): def test_gufunc_decorator(self): @guvectorize([\"void(float32, float32[:], float32[:], float32[:])\"], '(),(t),(t)->(t)', target='roc') def", "self.assertEqual(expected.dtype, got.dtype) self.assertEqual(np.dtype(np.float32), got.dtype) # Test real version with 2D", "for i in range(inp.shape[0]): np.testing.assert_equal(inp[i].sum(), out[i]) def test_gufunc_scalar_input_saxpy(self): def axpy(a,", "i] + Y[j, i] self.assertTrue(exp == out[j, i]) X =", "y[i] A = np.float32(2) X = np.arange(10, dtype=np.float32).reshape(5, 2) Y", "+ 1 expected = A + B got = ufunc(A,", "range(out.shape[0]): out[i] = a * x[i] + y[i] A =", "2) expected = A + B got = ufunc(A, B)", "out[j, i], (exp, out[j, i])) class TestGUFuncDecor(unittest.TestCase): def test_gufunc_decorator(self): @guvectorize([\"void(float32,", "HsaGUFuncVectorize(ufunc_add_core, \"(x),(x)->(x)\") ufbldr.add(\"(float32[:], float32[:], float32[:])\") ufbldr.add(\"(intp[:], intp[:], intp[:])\") ufunc =", "self.assertEqual(np.dtype(np.float32), got.dtype) # Test real version with 2D inputs A", "# Test real version with 2D inputs A = A.reshape(50,", "for i in range(2): exp = A[j] * X[j, i]", "Y[j, i] self.assertTrue(exp == out[j, i], (exp, out[j, i])) class", "expected = A + B got = ufunc(A, B) np.testing.assert_equal(expected,", "range(5): for i in range(2): exp = A * X[j,", "= np.float32(2) X = np.arange(10, dtype=np.float32).reshape(5, 2) Y = np.arange(10,", "A.reshape(50, 2) B = B.reshape(50, 2) expected = A +", "+ B got = ufunc(A, B) np.testing.assert_allclose(expected, got) self.assertEqual(expected.dtype, got.dtype)", "np.testing.assert_equal(expected, got) self.assertEqual(expected.dtype, got.dtype) self.assertEqual(np.dtype(np.intp), got.dtype) # Test real version", "got.dtype) # Test integer version with 2D inputs A =", "range(5): for i in range(2): exp = A[j] * X[j,", "got) self.assertEqual(expected.dtype, got.dtype) self.assertEqual(np.dtype(np.intp), got.dtype) # Test real version A", "range(2): exp = A[j] * X[j, i] + Y[j, i]", "i] self.assertTrue(exp == out[j, i], (exp, out[j, i])) if __name__", "= ufunc(A, B) np.testing.assert_equal(expected, got) self.assertEqual(expected.dtype, got.dtype) self.assertEqual(np.dtype(np.intp), got.dtype) #", "= A * X[j] + Y[j] self.assertTrue(exp == out[j], (exp,", "A = np.arange(5, dtype=np.float32) X = np.arange(10, dtype=np.float32).reshape(5, 2) Y", "def axpy(a, x, y, out): for i in range(out.shape[0]): out[i]", "x[i] + y[i] A = np.float32(2) X = np.arange(10, dtype=np.float32).reshape(5,", "b[i] class TestGUFuncBuilding(unittest.TestCase): def test_gufunc_building(self): ufbldr = HsaGUFuncVectorize(ufunc_add_core, \"(x),(x)->(x)\") ufbldr.add(\"(float32[:],", "integer version A = np.arange(100, dtype=np.intp) B = np.arange(100, dtype=np.intp)", "= HsaGUFuncVectorize(axpy, '(),(t),(t)->(t)') ufbldr.add(\"void(float32, float32[:], float32[:], float32[:])\") saxpy = ufbldr.build_ufunc()", "ufbldr.add(\"(float32[:], float32[:], float32[:])\") ufbldr.add(\"(intp[:], intp[:], intp[:])\") ufunc = ufbldr.build_ufunc() self.assertIsInstance(ufunc,", "ufunc_add_core(a, b, c): for i in range(c.size): c[i] = a[i]", "+= inp[i] out[0] = tmp ufbldr = HsaGUFuncVectorize(sum_row, \"(n)->()\") ufbldr.add(\"void(int32[:],", "* x[i] + y[i] A = np.float32(2) X = np.arange(10,", "ufunc = ufbldr.build_ufunc() self.assertIsInstance(ufunc, HSAGenerializedUFunc) # Test integer version A", "+ Y[j, i] self.assertTrue(exp == out[j, i], (exp, out[j, i]))", "from numba.roc.vectorizers import HsaGUFuncVectorize from numba.roc.dispatch import HSAGenerializedUFunc from numba", "real version with 2D inputs A = A.reshape(50, 2) B", "X = np.arange(10, dtype=np.float32) Y = np.arange(10, dtype=np.float32) out =", "j in range(5): for i in range(2): exp = A", "dtype=np.int32).reshape(100, 3) out = ufunc(inp) for i in range(inp.shape[0]): np.testing.assert_equal(inp[i].sum(),", "def saxpy(a, x, y, out): for i in range(out.shape[0]): out[i]", "np.arange(5, dtype=np.float32) X = np.arange(10, dtype=np.float32).reshape(5, 2) Y = np.arange(10,", "integer version with 2D inputs A = A.reshape(50, 2) B", "float32[:], float32[:])\"], '(),(t),(t)->(t)', target='roc') def saxpy(a, x, y, out): for", "out[i] = a * x[i] + y[i] ufbldr = HsaGUFuncVectorize(axpy,", "self.assertEqual(np.dtype(np.intp), got.dtype) # Test integer version with 3D inputs A", "Test real version A = np.arange(100, dtype=np.float32) B = np.arange(100,", "np.arange(10, dtype=np.float32) out = saxpy(A, X, Y) for j in", "np.float32(2) X = np.arange(10, dtype=np.float32).reshape(5, 2) Y = np.arange(10, dtype=np.float32).reshape(5,", "+ b[i] class TestGUFuncBuilding(unittest.TestCase): def test_gufunc_building(self): ufbldr = HsaGUFuncVectorize(ufunc_add_core, \"(x),(x)->(x)\")", "= saxpy(A, X, Y) for j in range(10): exp =", "dtype=np.float32).reshape(5, 2) Y = np.arange(10, dtype=np.float32).reshape(5, 2) out = saxpy(A,", "2) B = B.reshape(50, 2) expected = A + B", "ufunc(A, B) np.testing.assert_equal(expected, got) self.assertEqual(expected.dtype, got.dtype) self.assertEqual(np.dtype(np.intp), got.dtype) # Test", "float32[:], float32[:], float32[:])\"], '(),(t),(t)->(t)', target='roc') def saxpy(a, x, y, out):", "for j in range(10): exp = A * X[j] +", "np.arange(10, dtype=np.float32) Y = np.arange(10, dtype=np.float32) out = saxpy(A, X,", "c): for i in range(c.size): c[i] = a[i] + b[i]", "real version A = np.arange(100, dtype=np.float32) B = np.arange(100, dtype=np.float32)", "import numpy as np from numba.roc.vectorizers import HsaGUFuncVectorize from numba.roc.dispatch", "for i in range(out.shape[0]): out[i] = a * x[i] +", "i] self.assertTrue(exp == out[j, i], (exp, out[j, i])) class TestGUFuncDecor(unittest.TestCase):", "got.dtype) def test_gufunc_building_scalar_output(self): def sum_row(inp, out): tmp = 0. for", "np.testing.assert_equal(expected, got) self.assertEqual(expected.dtype, got.dtype) self.assertEqual(np.dtype(np.intp), got.dtype) # Test integer version", "x, y, out): for i in range(out.shape[0]): out[i] = a", "self.assertEqual(np.dtype(np.intp), got.dtype) # Test real version A = np.arange(100, dtype=np.float32)", "np.testing.assert_equal(inp[i].sum(), out[i]) def test_gufunc_scalar_input_saxpy(self): def axpy(a, x, y, out): for", "guvectorize import unittest def ufunc_add_core(a, b, c): for i in", "Y[j, i] self.assertTrue(exp == out[j, i]) X = np.arange(10, dtype=np.float32)", "= B.reshape(50, 2) expected = A + B got =", "out = saxpy(A, X, Y) for j in range(10): exp", "= np.arange(5, dtype=np.float32) X = np.arange(10, dtype=np.float32).reshape(5, 2) Y =", "A * X[j, i] + Y[j, i] self.assertTrue(exp == out[j,", "<reponame>luk-f-a/numba import numpy as np from numba.roc.vectorizers import HsaGUFuncVectorize from", "i] + Y[j, i] self.assertTrue(exp == out[j, i], (exp, out[j,", "y, out): for i in range(out.shape[0]): out[i] = a *", "out[j], (exp, out[j])) A = np.arange(5, dtype=np.float32) X = np.arange(10,", "+ y[i] ufbldr = HsaGUFuncVectorize(axpy, '(),(t),(t)->(t)') ufbldr.add(\"void(float32, float32[:], float32[:], float32[:])\")", "ufbldr.add(\"(intp[:], intp[:], intp[:])\") ufunc = ufbldr.build_ufunc() self.assertIsInstance(ufunc, HSAGenerializedUFunc) # Test", "i]) X = np.arange(10, dtype=np.float32) Y = np.arange(10, dtype=np.float32) out", "Y = np.arange(10, dtype=np.float32) out = saxpy(A, X, Y) for", "test_gufunc_decorator(self): @guvectorize([\"void(float32, float32[:], float32[:], float32[:])\"], '(),(t),(t)->(t)', target='roc') def saxpy(a, x,", "ufunc = ufbldr.build_ufunc() inp = np.arange(300, dtype=np.int32).reshape(100, 3) out =", "version A = np.arange(100, dtype=np.intp) B = np.arange(100, dtype=np.intp) +", "+ Y[j, i] self.assertTrue(exp == out[j, i]) X = np.arange(10,", "np.arange(10, dtype=np.float32).reshape(5, 2) out = saxpy(A, X, Y) for j", "from numba import guvectorize import unittest def ufunc_add_core(a, b, c):", "def test_gufunc_building_scalar_output(self): def sum_row(inp, out): tmp = 0. for i", "import HsaGUFuncVectorize from numba.roc.dispatch import HSAGenerializedUFunc from numba import guvectorize", "Y[j] self.assertTrue(exp == out[j], (exp, out[j])) A = np.arange(5, dtype=np.float32)", "self.assertEqual(expected.dtype, got.dtype) self.assertEqual(np.dtype(np.intp), got.dtype) # Test integer version with 3D", "(exp, out[j, i])) class TestGUFuncDecor(unittest.TestCase): def test_gufunc_decorator(self): @guvectorize([\"void(float32, float32[:], float32[:],", "got) self.assertEqual(expected.dtype, got.dtype) self.assertEqual(np.dtype(np.intp), got.dtype) # Test integer version with", "i in range(2): exp = A[j] * X[j, i] +", "Test integer version with 2D inputs A = A.reshape(50, 2)", "dtype=np.float32) + 1 expected = A + B got =", "np.arange(10, dtype=np.float32).reshape(5, 2) Y = np.arange(10, dtype=np.float32).reshape(5, 2) out =", "in range(inp.shape[0]): tmp += inp[i] out[0] = tmp ufbldr =", "inputs A = A.reshape(5, 10, 2) B = B.reshape(5, 10,", "== out[j, i], (exp, out[j, i])) class TestGUFuncDecor(unittest.TestCase): def test_gufunc_decorator(self):", "exp = A[j] * X[j, i] + Y[j, i] self.assertTrue(exp", "Test real version with 2D inputs A = A.reshape(50, 2)", "== out[j], (exp, out[j])) A = np.arange(5, dtype=np.float32) X =", "in range(out.shape[0]): out[i] = a * x[i] + y[i] ufbldr", "= ufunc(A, B) np.testing.assert_allclose(expected, got) self.assertEqual(expected.dtype, got.dtype) self.assertEqual(np.dtype(np.float32), got.dtype) #", "= np.arange(100, dtype=np.intp) B = np.arange(100, dtype=np.intp) + 1 expected", "range(out.shape[0]): out[i] = a * x[i] + y[i] ufbldr =", "in range(out.shape[0]): out[i] = a * x[i] + y[i] A", "Y = np.arange(10, dtype=np.float32).reshape(5, 2) out = saxpy(A, X, Y)", "= tmp ufbldr = HsaGUFuncVectorize(sum_row, \"(n)->()\") ufbldr.add(\"void(int32[:], int32[:])\") ufunc =", "B = B.reshape(50, 2) expected = A + B got", "= a * x[i] + y[i] ufbldr = HsaGUFuncVectorize(axpy, '(),(t),(t)->(t)')", "numba import guvectorize import unittest def ufunc_add_core(a, b, c): for", "def ufunc_add_core(a, b, c): for i in range(c.size): c[i] =", "i] self.assertTrue(exp == out[j, i]) X = np.arange(10, dtype=np.float32) Y", "= 0. for i in range(inp.shape[0]): tmp += inp[i] out[0]", "got.dtype) self.assertEqual(np.dtype(np.intp), got.dtype) # Test integer version with 2D inputs", "i])) class TestGUFuncDecor(unittest.TestCase): def test_gufunc_decorator(self): @guvectorize([\"void(float32, float32[:], float32[:], float32[:])\"], '(),(t),(t)->(t)',", "out = saxpy(A, X, Y) for j in range(5): for", "Y) for j in range(10): exp = A * X[j]", "# Test integer version with 2D inputs A = A.reshape(50,", "i in range(out.shape[0]): out[i] = a * x[i] + y[i]", "'(),(t),(t)->(t)', target='roc') def saxpy(a, x, y, out): for i in", "dtype=np.float32) Y = np.arange(10, dtype=np.float32) out = saxpy(A, X, Y)", "got.dtype) self.assertEqual(np.dtype(np.float32), got.dtype) # Test real version with 2D inputs", "got.dtype) self.assertEqual(np.dtype(np.float32), got.dtype) def test_gufunc_building_scalar_output(self): def sum_row(inp, out): tmp =", "HsaGUFuncVectorize from numba.roc.dispatch import HSAGenerializedUFunc from numba import guvectorize import", "= A + B got = ufunc(A, B) np.testing.assert_allclose(expected, got)", "tmp += inp[i] out[0] = tmp ufbldr = HsaGUFuncVectorize(sum_row, \"(n)->()\")", "out[0] = tmp ufbldr = HsaGUFuncVectorize(sum_row, \"(n)->()\") ufbldr.add(\"void(int32[:], int32[:])\") ufunc", "sum_row(inp, out): tmp = 0. for i in range(inp.shape[0]): tmp", "2) out = saxpy(A, X, Y) for j in range(5):", "= ufbldr.build_ufunc() A = np.float32(2) X = np.arange(10, dtype=np.float32).reshape(5, 2)", "dtype=np.intp) B = np.arange(100, dtype=np.intp) + 1 expected = A", "out = ufunc(inp) for i in range(inp.shape[0]): np.testing.assert_equal(inp[i].sum(), out[i]) def", "dtype=np.float32) B = np.arange(100, dtype=np.float32) + 1 expected = A", "A = A.reshape(50, 2) B = B.reshape(50, 2) expected =", "A * X[j] + Y[j] self.assertTrue(exp == out[j], (exp, out[j]))", "2) B = B.reshape(5, 10, 2) expected = A +", "HSAGenerializedUFunc) # Test integer version A = np.arange(100, dtype=np.intp) B", "version A = np.arange(100, dtype=np.float32) B = np.arange(100, dtype=np.float32) +", "ufbldr.add(\"void(float32, float32[:], float32[:], float32[:])\") saxpy = ufbldr.build_ufunc() A = np.float32(2)", "self.assertIsInstance(ufunc, HSAGenerializedUFunc) # Test integer version A = np.arange(100, dtype=np.intp)", "= HsaGUFuncVectorize(ufunc_add_core, \"(x),(x)->(x)\") ufbldr.add(\"(float32[:], float32[:], float32[:])\") ufbldr.add(\"(intp[:], intp[:], intp[:])\") ufunc", "B) np.testing.assert_equal(expected, got) self.assertEqual(expected.dtype, got.dtype) self.assertEqual(np.dtype(np.intp), got.dtype) # Test integer", "= np.arange(100, dtype=np.float32) + 1 expected = A + B", "saxpy(A, X, Y) for j in range(10): exp = A", "= A.reshape(5, 10, 2) B = B.reshape(5, 10, 2) expected", "for i in range(c.size): c[i] = a[i] + b[i] class", "+ y[i] A = np.float32(2) X = np.arange(10, dtype=np.float32).reshape(5, 2)", "B got = ufunc(A, B) np.testing.assert_equal(expected, got) self.assertEqual(expected.dtype, got.dtype) self.assertEqual(np.dtype(np.intp),", "== out[j, i]) X = np.arange(10, dtype=np.float32) Y = np.arange(10,", "test_gufunc_building_scalar_output(self): def sum_row(inp, out): tmp = 0. for i in", "y[i] ufbldr = HsaGUFuncVectorize(axpy, '(),(t),(t)->(t)') ufbldr.add(\"void(float32, float32[:], float32[:], float32[:])\") saxpy", "int32[:])\") ufunc = ufbldr.build_ufunc() inp = np.arange(300, dtype=np.int32).reshape(100, 3) out", "def test_gufunc_scalar_input_saxpy(self): def axpy(a, x, y, out): for i in", "'(),(t),(t)->(t)') ufbldr.add(\"void(float32, float32[:], float32[:], float32[:])\") saxpy = ufbldr.build_ufunc() A =", "version with 3D inputs A = A.reshape(5, 10, 2) B", "B = B.reshape(5, 10, 2) expected = A + B", "# Test integer version with 3D inputs A = A.reshape(5,", "= ufbldr.build_ufunc() inp = np.arange(300, dtype=np.int32).reshape(100, 3) out = ufunc(inp)", "3) out = ufunc(inp) for i in range(inp.shape[0]): np.testing.assert_equal(inp[i].sum(), out[i])", "X[j, i] + Y[j, i] self.assertTrue(exp == out[j, i], (exp,", "out[j, i]) X = np.arange(10, dtype=np.float32) Y = np.arange(10, dtype=np.float32)", "dtype=np.float32) X = np.arange(10, dtype=np.float32).reshape(5, 2) Y = np.arange(10, dtype=np.float32).reshape(5,", "import guvectorize import unittest def ufunc_add_core(a, b, c): for i", "A = np.arange(100, dtype=np.float32) B = np.arange(100, dtype=np.float32) + 1", "dtype=np.intp) + 1 expected = A + B got =", "float32[:])\") ufbldr.add(\"(intp[:], intp[:], intp[:])\") ufunc = ufbldr.build_ufunc() self.assertIsInstance(ufunc, HSAGenerializedUFunc) #", "= a[i] + b[i] class TestGUFuncBuilding(unittest.TestCase): def test_gufunc_building(self): ufbldr =", "HsaGUFuncVectorize(axpy, '(),(t),(t)->(t)') ufbldr.add(\"void(float32, float32[:], float32[:], float32[:])\") saxpy = ufbldr.build_ufunc() A", "np.arange(100, dtype=np.float32) B = np.arange(100, dtype=np.float32) + 1 expected =", "X[j] + Y[j] self.assertTrue(exp == out[j], (exp, out[j])) A =", "1 expected = A + B got = ufunc(A, B)", "Y[j, i] self.assertTrue(exp == out[j, i], (exp, out[j, i])) if", "= np.arange(10, dtype=np.float32).reshape(5, 2) out = saxpy(A, X, Y) for", "class TestGUFuncBuilding(unittest.TestCase): def test_gufunc_building(self): ufbldr = HsaGUFuncVectorize(ufunc_add_core, \"(x),(x)->(x)\") ufbldr.add(\"(float32[:], float32[:],", "# Test integer version A = np.arange(100, dtype=np.intp) B =", "saxpy(A, X, Y) for j in range(5): for i in", "X = np.arange(10, dtype=np.float32).reshape(5, 2) Y = np.arange(10, dtype=np.float32).reshape(5, 2)", "A.reshape(5, 10, 2) B = B.reshape(5, 10, 2) expected =", "np.testing.assert_allclose(expected, got) self.assertEqual(expected.dtype, got.dtype) self.assertEqual(np.dtype(np.float32), got.dtype) def test_gufunc_building_scalar_output(self): def sum_row(inp,", "HSAGenerializedUFunc from numba import guvectorize import unittest def ufunc_add_core(a, b,", "\"(x),(x)->(x)\") ufbldr.add(\"(float32[:], float32[:], float32[:])\") ufbldr.add(\"(intp[:], intp[:], intp[:])\") ufunc = ufbldr.build_ufunc()", "for i in range(2): exp = A * X[j, i]", "= a * x[i] + y[i] A = np.float32(2) X", "inp[i] out[0] = tmp ufbldr = HsaGUFuncVectorize(sum_row, \"(n)->()\") ufbldr.add(\"void(int32[:], int32[:])\")", "ufbldr = HsaGUFuncVectorize(axpy, '(),(t),(t)->(t)') ufbldr.add(\"void(float32, float32[:], float32[:], float32[:])\") saxpy =", "def test_gufunc_decorator(self): @guvectorize([\"void(float32, float32[:], float32[:], float32[:])\"], '(),(t),(t)->(t)', target='roc') def saxpy(a,", "range(c.size): c[i] = a[i] + b[i] class TestGUFuncBuilding(unittest.TestCase): def test_gufunc_building(self):", "np from numba.roc.vectorizers import HsaGUFuncVectorize from numba.roc.dispatch import HSAGenerializedUFunc from", "tmp = 0. for i in range(inp.shape[0]): tmp += inp[i]", "in range(inp.shape[0]): np.testing.assert_equal(inp[i].sum(), out[i]) def test_gufunc_scalar_input_saxpy(self): def axpy(a, x, y,", "got.dtype) self.assertEqual(np.dtype(np.intp), got.dtype) # Test real version A = np.arange(100,", "B) np.testing.assert_allclose(expected, got) self.assertEqual(expected.dtype, got.dtype) self.assertEqual(np.dtype(np.float32), got.dtype) def test_gufunc_building_scalar_output(self): def", "i in range(c.size): c[i] = a[i] + b[i] class TestGUFuncBuilding(unittest.TestCase):", "X, Y) for j in range(10): exp = A *", "Test integer version A = np.arange(100, dtype=np.intp) B = np.arange(100,", "float32[:])\") saxpy = ufbldr.build_ufunc() A = np.float32(2) X = np.arange(10,", "in range(c.size): c[i] = a[i] + b[i] class TestGUFuncBuilding(unittest.TestCase): def", "float32[:])\"], '(),(t),(t)->(t)', target='roc') def saxpy(a, x, y, out): for i", "self.assertEqual(expected.dtype, got.dtype) self.assertEqual(np.dtype(np.intp), got.dtype) # Test real version A =", "= A * X[j, i] + Y[j, i] self.assertTrue(exp ==", "np.arange(300, dtype=np.int32).reshape(100, 3) out = ufunc(inp) for i in range(inp.shape[0]):", "0. for i in range(inp.shape[0]): tmp += inp[i] out[0] =", "b, c): for i in range(c.size): c[i] = a[i] +", "got.dtype) # Test real version A = np.arange(100, dtype=np.float32) B", "X, Y) for j in range(5): for i in range(2):", "out[j])) A = np.arange(5, dtype=np.float32) X = np.arange(10, dtype=np.float32).reshape(5, 2)", "= np.arange(100, dtype=np.float32) B = np.arange(100, dtype=np.float32) + 1 expected", "x[i] + y[i] ufbldr = HsaGUFuncVectorize(axpy, '(),(t),(t)->(t)') ufbldr.add(\"void(float32, float32[:], float32[:],", "with 2D inputs A = A.reshape(50, 2) B = B.reshape(50,", "j in range(5): for i in range(2): exp = A[j]", "with 3D inputs A = A.reshape(5, 10, 2) B =", "self.assertEqual(expected.dtype, got.dtype) self.assertEqual(np.dtype(np.float32), got.dtype) def test_gufunc_building_scalar_output(self): def sum_row(inp, out): tmp", "self.assertEqual(np.dtype(np.float32), got.dtype) def test_gufunc_building_scalar_output(self): def sum_row(inp, out): tmp = 0.", "ufbldr.build_ufunc() A = np.float32(2) X = np.arange(10, dtype=np.float32).reshape(5, 2) Y", "out[j, i], (exp, out[j, i])) if __name__ == '__main__': unittest.main()", "out): tmp = 0. for i in range(inp.shape[0]): tmp +=", "float32[:], float32[:])\") saxpy = ufbldr.build_ufunc() A = np.float32(2) X =", "ufunc(A, B) np.testing.assert_allclose(expected, got) self.assertEqual(expected.dtype, got.dtype) self.assertEqual(np.dtype(np.float32), got.dtype) # Test", "in range(5): for i in range(2): exp = A *", "B = np.arange(100, dtype=np.float32) + 1 expected = A +", "A + B got = ufunc(A, B) np.testing.assert_allclose(expected, got) self.assertEqual(expected.dtype,", "np.arange(100, dtype=np.intp) + 1 expected = A + B got", "= np.arange(10, dtype=np.float32) out = saxpy(A, X, Y) for j", "in range(2): exp = A * X[j, i] + Y[j,", "TestGUFuncBuilding(unittest.TestCase): def test_gufunc_building(self): ufbldr = HsaGUFuncVectorize(ufunc_add_core, \"(x),(x)->(x)\") ufbldr.add(\"(float32[:], float32[:], float32[:])\")", "B) np.testing.assert_allclose(expected, got) self.assertEqual(expected.dtype, got.dtype) self.assertEqual(np.dtype(np.float32), got.dtype) # Test real", "tmp ufbldr = HsaGUFuncVectorize(sum_row, \"(n)->()\") ufbldr.add(\"void(int32[:], int32[:])\") ufunc = ufbldr.build_ufunc()", "dtype=np.float32).reshape(5, 2) out = saxpy(A, X, Y) for j in", "i], (exp, out[j, i])) class TestGUFuncDecor(unittest.TestCase): def test_gufunc_decorator(self): @guvectorize([\"void(float32, float32[:],", "got = ufunc(A, B) np.testing.assert_equal(expected, got) self.assertEqual(expected.dtype, got.dtype) self.assertEqual(np.dtype(np.intp), got.dtype)", "= saxpy(A, X, Y) for j in range(5): for i", "exp = A * X[j] + Y[j] self.assertTrue(exp == out[j],", "range(inp.shape[0]): np.testing.assert_equal(inp[i].sum(), out[i]) def test_gufunc_scalar_input_saxpy(self): def axpy(a, x, y, out):", "saxpy = ufbldr.build_ufunc() A = np.float32(2) X = np.arange(10, dtype=np.float32).reshape(5,", "out[j, i])) class TestGUFuncDecor(unittest.TestCase): def test_gufunc_decorator(self): @guvectorize([\"void(float32, float32[:], float32[:], float32[:])\"],", "3D inputs A = A.reshape(5, 10, 2) B = B.reshape(5,", "= HsaGUFuncVectorize(sum_row, \"(n)->()\") ufbldr.add(\"void(int32[:], int32[:])\") ufunc = ufbldr.build_ufunc() inp =", "i in range(2): exp = A * X[j, i] +", "numba.roc.dispatch import HSAGenerializedUFunc from numba import guvectorize import unittest def", "expected = A + B got = ufunc(A, B) np.testing.assert_allclose(expected,", "= B.reshape(5, 10, 2) expected = A + B got", "B) np.testing.assert_equal(expected, got) self.assertEqual(expected.dtype, got.dtype) self.assertEqual(np.dtype(np.intp), got.dtype) # Test real", "np.arange(100, dtype=np.float32) + 1 expected = A + B got", "self.assertTrue(exp == out[j, i]) X = np.arange(10, dtype=np.float32) Y =", "@guvectorize([\"void(float32, float32[:], float32[:], float32[:])\"], '(),(t),(t)->(t)', target='roc') def saxpy(a, x, y,", "axpy(a, x, y, out): for i in range(out.shape[0]): out[i] =", "range(inp.shape[0]): tmp += inp[i] out[0] = tmp ufbldr = HsaGUFuncVectorize(sum_row,", "out[i] = a * x[i] + y[i] A = np.float32(2)", "self.assertTrue(exp == out[j, i], (exp, out[j, i])) class TestGUFuncDecor(unittest.TestCase): def", "== out[j, i], (exp, out[j, i])) if __name__ == '__main__':", "out[i]) def test_gufunc_scalar_input_saxpy(self): def axpy(a, x, y, out): for i", "= np.arange(10, dtype=np.float32) Y = np.arange(10, dtype=np.float32) out = saxpy(A,", "np.arange(100, dtype=np.intp) B = np.arange(100, dtype=np.intp) + 1 expected =", "2D inputs A = A.reshape(50, 2) B = B.reshape(50, 2)", "10, 2) B = B.reshape(5, 10, 2) expected = A", "import HSAGenerializedUFunc from numba import guvectorize import unittest def ufunc_add_core(a,", "got.dtype) # Test real version with 2D inputs A =", "* X[j, i] + Y[j, i] self.assertTrue(exp == out[j, i],", "\"(n)->()\") ufbldr.add(\"void(int32[:], int32[:])\") ufunc = ufbldr.build_ufunc() inp = np.arange(300, dtype=np.int32).reshape(100,", "def test_gufunc_building(self): ufbldr = HsaGUFuncVectorize(ufunc_add_core, \"(x),(x)->(x)\") ufbldr.add(\"(float32[:], float32[:], float32[:])\") ufbldr.add(\"(intp[:],", "a * x[i] + y[i] A = np.float32(2) X =", "in range(10): exp = A * X[j] + Y[j] self.assertTrue(exp", "Y) for j in range(5): for i in range(2): exp", "intp[:], intp[:])\") ufunc = ufbldr.build_ufunc() self.assertIsInstance(ufunc, HSAGenerializedUFunc) # Test integer", "10, 2) expected = A + B got = ufunc(A,", "+ Y[j] self.assertTrue(exp == out[j], (exp, out[j])) A = np.arange(5,", "from numba.roc.dispatch import HSAGenerializedUFunc from numba import guvectorize import unittest", "* X[j] + Y[j] self.assertTrue(exp == out[j], (exp, out[j])) A", "B.reshape(5, 10, 2) expected = A + B got =", "test_gufunc_scalar_input_saxpy(self): def axpy(a, x, y, out): for i in range(out.shape[0]):", "+ B got = ufunc(A, B) np.testing.assert_equal(expected, got) self.assertEqual(expected.dtype, got.dtype)", "inputs A = A.reshape(50, 2) B = B.reshape(50, 2) expected", "i in range(inp.shape[0]): tmp += inp[i] out[0] = tmp ufbldr", "float32[:], float32[:])\") ufbldr.add(\"(intp[:], intp[:], intp[:])\") ufunc = ufbldr.build_ufunc() self.assertIsInstance(ufunc, HSAGenerializedUFunc)", "= A + B got = ufunc(A, B) np.testing.assert_equal(expected, got)", "saxpy(a, x, y, out): for i in range(out.shape[0]): out[i] =", "import unittest def ufunc_add_core(a, b, c): for i in range(c.size):", "inp = np.arange(300, dtype=np.int32).reshape(100, 3) out = ufunc(inp) for i", "class TestGUFuncDecor(unittest.TestCase): def test_gufunc_decorator(self): @guvectorize([\"void(float32, float32[:], float32[:], float32[:])\"], '(),(t),(t)->(t)', target='roc')" ]
[ "pymongo import MongoClient from dotenv import load_dotenv def database_entry(data): try:", "mongo_string = os.getenv('MONGODB_AUTH_URI') client = MongoClient(mongo_string) database = client[os.getenv('MONGODB_DB')] col", "database = client[os.getenv('MONGODB_DB')] col = database['users'] col.insert_one(data) return True except", "= os.getenv('MONGODB_AUTH_URI') client = MongoClient(mongo_string) database = client[os.getenv('MONGODB_DB')] col =", "import MongoClient from dotenv import load_dotenv def database_entry(data): try: load_dotenv()", "import os from pymongo import MongoClient from dotenv import load_dotenv", "from dotenv import load_dotenv def database_entry(data): try: load_dotenv() mongo_string =", "as e: print(e) return False if __name__ == \"__main__\": pass", "try: load_dotenv() mongo_string = os.getenv('MONGODB_AUTH_URI') client = MongoClient(mongo_string) database =", "MongoClient from dotenv import load_dotenv def database_entry(data): try: load_dotenv() mongo_string", "MongoClient(mongo_string) database = client[os.getenv('MONGODB_DB')] col = database['users'] col.insert_one(data) return True", "import load_dotenv def database_entry(data): try: load_dotenv() mongo_string = os.getenv('MONGODB_AUTH_URI') client", "os from pymongo import MongoClient from dotenv import load_dotenv def", "True except Exception as e: print(e) return False if __name__", "database_entry(data): try: load_dotenv() mongo_string = os.getenv('MONGODB_AUTH_URI') client = MongoClient(mongo_string) database", "load_dotenv() mongo_string = os.getenv('MONGODB_AUTH_URI') client = MongoClient(mongo_string) database = client[os.getenv('MONGODB_DB')]", "= client[os.getenv('MONGODB_DB')] col = database['users'] col.insert_one(data) return True except Exception", "except Exception as e: print(e) return False if __name__ ==", "os.getenv('MONGODB_AUTH_URI') client = MongoClient(mongo_string) database = client[os.getenv('MONGODB_DB')] col = database['users']", "dotenv import load_dotenv def database_entry(data): try: load_dotenv() mongo_string = os.getenv('MONGODB_AUTH_URI')", "load_dotenv def database_entry(data): try: load_dotenv() mongo_string = os.getenv('MONGODB_AUTH_URI') client =", "def database_entry(data): try: load_dotenv() mongo_string = os.getenv('MONGODB_AUTH_URI') client = MongoClient(mongo_string)", "col = database['users'] col.insert_one(data) return True except Exception as e:", "= database['users'] col.insert_one(data) return True except Exception as e: print(e)", "client[os.getenv('MONGODB_DB')] col = database['users'] col.insert_one(data) return True except Exception as", "client = MongoClient(mongo_string) database = client[os.getenv('MONGODB_DB')] col = database['users'] col.insert_one(data)", "col.insert_one(data) return True except Exception as e: print(e) return False", "database['users'] col.insert_one(data) return True except Exception as e: print(e) return", "Exception as e: print(e) return False if __name__ == \"__main__\":", "from pymongo import MongoClient from dotenv import load_dotenv def database_entry(data):", "return True except Exception as e: print(e) return False if", "= MongoClient(mongo_string) database = client[os.getenv('MONGODB_DB')] col = database['users'] col.insert_one(data) return" ]
[ "ip = line[0] hosts = line[1:] for record in hostnames:", "line.isspace(): continue line = line.strip().split() ip = line[0] hosts =", "print('File does not exist') except PermissionError: print('Permission denied') for line", "for record in hostnames: if record['ip'] == ip: record['hostnames'].update(hosts) break", "if record['ip'] == ip: record['hostnames'].update(hosts) break else: hostnames.append({ 'hostnames': set(hosts),", "record['hostnames'].update(hosts) break else: hostnames.append({ 'hostnames': set(hosts), 'protocol': 'IPv4' if '.'", "r'../src/etc-hosts.txt' hostnames = [] try: with open(FILE, encoding='utf-8') as file:", "break else: hostnames.append({ 'hostnames': set(hosts), 'protocol': 'IPv4' if '.' in", "hostnames = [] try: with open(FILE, encoding='utf-8') as file: content", "content: if line.startswith('#'): continue if line.isspace(): continue line = line.strip().split()", "'protocol': 'IPv4' if '.' in ip else 'IPv6', 'ip': ip,", "== ip: record['hostnames'].update(hosts) break else: hostnames.append({ 'hostnames': set(hosts), 'protocol': 'IPv4'", "if '.' in ip else 'IPv6', 'ip': ip, }) print(hostnames)", "'IPv4' if '.' in ip else 'IPv6', 'ip': ip, })", "= line[1:] for record in hostnames: if record['ip'] == ip:", "continue line = line.strip().split() ip = line[0] hosts = line[1:]", "try: with open(FILE, encoding='utf-8') as file: content = file.readlines() except", "as file: content = file.readlines() except FileNotFoundError: print('File does not", "line = line.strip().split() ip = line[0] hosts = line[1:] for", "continue if line.isspace(): continue line = line.strip().split() ip = line[0]", "line[0] hosts = line[1:] for record in hostnames: if record['ip']", "with open(FILE, encoding='utf-8') as file: content = file.readlines() except FileNotFoundError:", "line.startswith('#'): continue if line.isspace(): continue line = line.strip().split() ip =", "denied') for line in content: if line.startswith('#'): continue if line.isspace():", "= line[0] hosts = line[1:] for record in hostnames: if", "else: hostnames.append({ 'hostnames': set(hosts), 'protocol': 'IPv4' if '.' in ip", "print('Permission denied') for line in content: if line.startswith('#'): continue if", "file: content = file.readlines() except FileNotFoundError: print('File does not exist')", "= [] try: with open(FILE, encoding='utf-8') as file: content =", "hosts = line[1:] for record in hostnames: if record['ip'] ==", "record['ip'] == ip: record['hostnames'].update(hosts) break else: hostnames.append({ 'hostnames': set(hosts), 'protocol':", "hostnames: if record['ip'] == ip: record['hostnames'].update(hosts) break else: hostnames.append({ 'hostnames':", "content = file.readlines() except FileNotFoundError: print('File does not exist') except", "'hostnames': set(hosts), 'protocol': 'IPv4' if '.' in ip else 'IPv6',", "if line.startswith('#'): continue if line.isspace(): continue line = line.strip().split() ip", "set(hosts), 'protocol': 'IPv4' if '.' in ip else 'IPv6', 'ip':", "FILE = r'../src/etc-hosts.txt' hostnames = [] try: with open(FILE, encoding='utf-8')", "encoding='utf-8') as file: content = file.readlines() except FileNotFoundError: print('File does", "for line in content: if line.startswith('#'): continue if line.isspace(): continue", "open(FILE, encoding='utf-8') as file: content = file.readlines() except FileNotFoundError: print('File", "PermissionError: print('Permission denied') for line in content: if line.startswith('#'): continue", "except PermissionError: print('Permission denied') for line in content: if line.startswith('#'):", "in hostnames: if record['ip'] == ip: record['hostnames'].update(hosts) break else: hostnames.append({", "except FileNotFoundError: print('File does not exist') except PermissionError: print('Permission denied')", "file.readlines() except FileNotFoundError: print('File does not exist') except PermissionError: print('Permission", "= line.strip().split() ip = line[0] hosts = line[1:] for record", "line.strip().split() ip = line[0] hosts = line[1:] for record in", "record in hostnames: if record['ip'] == ip: record['hostnames'].update(hosts) break else:", "line[1:] for record in hostnames: if record['ip'] == ip: record['hostnames'].update(hosts)", "not exist') except PermissionError: print('Permission denied') for line in content:", "if line.isspace(): continue line = line.strip().split() ip = line[0] hosts", "hostnames.append({ 'hostnames': set(hosts), 'protocol': 'IPv4' if '.' in ip else", "exist') except PermissionError: print('Permission denied') for line in content: if", "[] try: with open(FILE, encoding='utf-8') as file: content = file.readlines()", "does not exist') except PermissionError: print('Permission denied') for line in", "ip: record['hostnames'].update(hosts) break else: hostnames.append({ 'hostnames': set(hosts), 'protocol': 'IPv4' if", "= file.readlines() except FileNotFoundError: print('File does not exist') except PermissionError:", "in content: if line.startswith('#'): continue if line.isspace(): continue line =", "line in content: if line.startswith('#'): continue if line.isspace(): continue line", "FileNotFoundError: print('File does not exist') except PermissionError: print('Permission denied') for", "= r'../src/etc-hosts.txt' hostnames = [] try: with open(FILE, encoding='utf-8') as" ]
[ "url, headers ): \"\"\"Get the number of submissions we can", "per day for the selected challenge. Parameters ---------- url :", "for the selected challenge. Parameters ---------- url : {'prize', 'points',", "'points', 'knowledge' , 'all'}, default='all' The reward of the challenges", "the challenges for top challengers. headers : dictionary , The", "of the request. Returns ------- n_sub : int, default=0 :", "Means error during info retrieval. The number of submissions we", "def n_subimissions_per_day( url, headers ): \"\"\"Get the number of submissions", "): \"\"\"Get the number of submissions we can make per", "challengers. headers : dictionary , The headers of the request.", "during info retrieval. The number of submissions we can make", ", 'all'}, default='all' The reward of the challenges for top", "for top challengers. headers : dictionary , The headers of", ": Means error during info retrieval. The number of submissions", "info retrieval. The number of submissions we can make per", "int, default=0 : Means error during info retrieval. The number", "------- n_sub : int, default=0 : Means error during info", "dictionary , The headers of the request. Returns ------- n_sub", "challenge. Parameters ---------- url : {'prize', 'points', 'knowledge' , 'all'},", "---------- url : {'prize', 'points', 'knowledge' , 'all'}, default='all' The", "default='all' The reward of the challenges for top challengers. headers", "The number of submissions we can make per day. \"\"\"", "number of submissions we can make per day for the", "headers of the request. Returns ------- n_sub : int, default=0", "n_subimissions_per_day( url, headers ): \"\"\"Get the number of submissions we", "challenges for top challengers. headers : dictionary , The headers", "day for the selected challenge. Parameters ---------- url : {'prize',", "reward of the challenges for top challengers. headers : dictionary", "'knowledge' , 'all'}, default='all' The reward of the challenges for", "The headers of the request. Returns ------- n_sub : int,", "top challengers. headers : dictionary , The headers of the", "default=0 : Means error during info retrieval. The number of", "the number of submissions we can make per day for", "n_sub : int, default=0 : Means error during info retrieval.", ": {'prize', 'points', 'knowledge' , 'all'}, default='all' The reward of", "of submissions we can make per day for the selected", "Returns ------- n_sub : int, default=0 : Means error during", "we can make per day for the selected challenge. Parameters", "the request. Returns ------- n_sub : int, default=0 : Means", ": int, default=0 : Means error during info retrieval. The", "The reward of the challenges for top challengers. headers :", "error during info retrieval. The number of submissions we can", "submissions we can make per day for the selected challenge.", "Parameters ---------- url : {'prize', 'points', 'knowledge' , 'all'}, default='all'", "\"\"\"Get the number of submissions we can make per day", ": dictionary , The headers of the request. Returns -------", "can make per day for the selected challenge. Parameters ----------", "of the challenges for top challengers. headers : dictionary ,", "{'prize', 'points', 'knowledge' , 'all'}, default='all' The reward of the", "make per day for the selected challenge. Parameters ---------- url", "headers : dictionary , The headers of the request. Returns", "url : {'prize', 'points', 'knowledge' , 'all'}, default='all' The reward", ", The headers of the request. Returns ------- n_sub :", "headers ): \"\"\"Get the number of submissions we can make", "request. Returns ------- n_sub : int, default=0 : Means error", "selected challenge. Parameters ---------- url : {'prize', 'points', 'knowledge' ,", "the selected challenge. Parameters ---------- url : {'prize', 'points', 'knowledge'", "retrieval. The number of submissions we can make per day.", "'all'}, default='all' The reward of the challenges for top challengers." ]
[ "maximum_cut, maximum_cut_for_bipartite_graph class MaximumCut(unittest.TestCase): def test_maximum_cut_for_bipartite_graphs(self): \"\"\" Given the following", "/ \\ | (e) (f) \"\"\" g = Graph.build(edges=[('a', 'b'),", "Graph.build(edges=[ ('u', 'v', 3), ('u', 'w', 2), ('u', 'x', 5),", "(d) | \\ /| | x | | / \\", "(w)---(x) \"\"\" g = Graph.build(edges=[ ('u', 'v'), ('u', 'w'), ('u',", "[3,4], 'either 3 or 4') self.assertIn(len(right), [3,4], 'eighter 3 or", "the same subsets') def test_maximum_cut(self): \"\"\" Given a graph: (u)----(v)", "or 4') self.assertIn(len(right), [3,4], 'eighter 3 or 4') self.assertEqual(7, len(left)+len(right),", "(u)-3-(v) | \\ / | | 5\\/1 4 2 /\\", "# -*- coding: utf-8 -*- import unittest from src.graph import", "from src.maximum_cut import maximum_cut, maximum_cut_for_bipartite_graph class MaximumCut(unittest.TestCase): def test_maximum_cut_for_bipartite_graphs(self): \"\"\"", "('v', 'x'),('w', 'x')], directed=False) (left, right) = maximum_cut(g) expected =", "self.assertIn(set(left), expected, 'should correctly split the graph') self.assertIn(set(right), expected, 'should", "src.maximum_cut import maximum_cut, maximum_cut_for_bipartite_graph class MaximumCut(unittest.TestCase): def test_maximum_cut_for_bipartite_graphs(self): \"\"\" Given", "'x', 5), ('v', 'x', 4),('w', 'x', 6)], directed=False) (left, right)", "'not the same subsets') def test_maximum_cut(self): \"\"\" Given a graph:", "| (w)-6-(x) \"\"\" g = Graph.build(edges=[ ('u', 'v', 3), ('u',", "'x', 4),('w', 'x', 6)], directed=False) (left, right) = maximum_cut(g) self.assertEqual(2,", "| | 5\\/1 4 2 /\\ | | / \\", "{'x', 'u'}, {'w', 'v'}] self.assertNotEqual(left, right, 'no common vertices between", "def test_weighted_maximum_cut(self): \"\"\" Given the following weighted graph. (u)-3-(v) |", "Given a graph: (u)----(v) | \\ / | | \\/", "| / \\ | (e) (f) \"\"\" g = Graph.build(edges=[('a',", "coding: utf-8 -*- import unittest from src.graph import Graph from", "self.assertIn(len(left), [3,4], 'either 3 or 4') self.assertIn(len(right), [3,4], 'eighter 3", "'x', 6)], directed=False) (left, right) = maximum_cut(g) self.assertEqual(2, len(left), 'left", "| | / \\ | (b) (d) | \\ /|", "(left, right) = maximum_cut(g) self.assertEqual(2, len(left), 'left should contain 2", "graph: (a) (c) | \\ /| | x | |", "(e) (f) \"\"\" g = Graph.build(edges=[('a', 'b'), ('a', 'd'), ('c',", "'v'}, {'w', 'x'}, {'x', 'u'}, {'w', 'v'}] self.assertNotEqual(left, right, 'no", "or 4') self.assertEqual(7, len(left)+len(right), 'no vertex counted twice') def test_maximum_cut_for_larger_bipartite_graphs(self):", "(b) (d) | \\ /| | x | | /", "('u', 'v'), ('u', 'w'), ('u', 'x'), ('v', 'x'),('w', 'x')], directed=False)", "\\ / | | \\/ | | /\\ | |", "/ \\ | (b) (d) | \\ /| | x", "('c', 'b'), ('c', 'd'), ('b', 'e'), ('b', 'f'), ('d', 'e'),", "graph') def test_weighted_maximum_cut(self): \"\"\" Given the following weighted graph. (u)-3-(v)", "'v', 3), ('u', 'w', 2), ('u', 'x', 5), ('v', 'x',", "-*- coding: utf-8 -*- import unittest from src.graph import Graph", "self.assertNotEqual(left, right, 'not the same subsets') def test_maximum_cut(self): \"\"\" Given", "Graph from src.maximum_cut import maximum_cut, maximum_cut_for_bipartite_graph class MaximumCut(unittest.TestCase): def test_maximum_cut_for_bipartite_graphs(self):", "(left, right) = maximum_cut_for_bipartite_graph(g) self.assertIn(len(left), [3,4], 'either 3 or 4')", "twice') def test_maximum_cut_for_larger_bipartite_graphs(self): \"\"\" A sligthly larger graph: (a) (c)", "def test_maximum_cut(self): \"\"\" Given a graph: (u)----(v) | \\ /", "'x')], directed=False) (left, right) = maximum_cut(g) expected = [{'u', 'v'},", "the following weighted graph. (u)-3-(v) | \\ / | |", "\"\"\" g = Graph.build(edges=[('a', 'b'), ('a', 'd'), ('c', 'b'), ('c',", "vertex counted twice') def test_maximum_cut_for_larger_bipartite_graphs(self): \"\"\" A sligthly larger graph:", "directed=False) (left, right) = maximum_cut(g) expected = [{'u', 'v'}, {'w',", "Graph.build(edges=[('a', 'b'), ('a', 'c'), ('d', 'e'), ('f', 'e'), ('f', 'g')],", "('u', 'v', 3), ('u', 'w', 2), ('u', 'x', 5), ('v',", "'e', 'f']), set(['b', 'd'])]) self.assertNotEqual(left, right, 'not the same subsets')", "(u)----(v) | \\ / | | \\/ | | /\\", "\\ | (w)---(x) \"\"\" g = Graph.build(edges=[ ('u', 'v'), ('u',", "| | / \\ | (e) (f) \"\"\" g =", "graph. (a)-----(b) \\ \\----(c) (d)-----(e) / (f)----/ \\ \\----(g) \"\"\"", "[3,4], 'eighter 3 or 4') self.assertEqual(7, len(left)+len(right), 'no vertex counted", "'c', 'e', 'f']), set(['b', 'd'])]) self.assertIn(set(right), [set(['a', 'c', 'e', 'f']),", "\\----(c) (d)-----(e) / (f)----/ \\ \\----(g) \"\"\" g = Graph.build(edges=[('a',", "maximum_cut(g) expected = [{'u', 'v'}, {'w', 'x'}, {'x', 'u'}, {'w',", "'d'])]) self.assertIn(set(right), [set(['a', 'c', 'e', 'f']), set(['b', 'd'])]) self.assertNotEqual(left, right,", "a graph: (u)----(v) | \\ / | | \\/ |", "{'w', 'x'}, {'x', 'u'}, {'w', 'v'}] self.assertNotEqual(left, right, 'no common", "self.assertEqual(2, len(left), 'left should contain 2 vertices') self.assertEqual(2, len(right), 'right", "'w', 2), ('u', 'x', 5), ('v', 'x', 4),('w', 'x', 6)],", "/| | x | | / \\ | (b) (d)", "4 2 /\\ | | / \\ | (w)-6-(x) \"\"\"", "= maximum_cut_for_bipartite_graph(g) self.assertIn(len(left), [3,4], 'either 3 or 4') self.assertIn(len(right), [3,4],", "'should correctly split the graph') self.assertIn(set(right), expected, 'should correctly split", "correctly split the graph') self.assertIn(set(right), expected, 'should correctly split the", "len(left)+len(right), 'no vertex counted twice') def test_maximum_cut_for_larger_bipartite_graphs(self): \"\"\" A sligthly", "right) = maximum_cut_for_bipartite_graph(g) self.assertIn(len(left), [3,4], 'either 3 or 4') self.assertIn(len(right),", "(a) (c) | \\ /| | x | | /", "\\ \\----(g) \"\"\" g = Graph.build(edges=[('a', 'b'), ('a', 'c'), ('d',", "\\ /| | x | | / \\ | (e)", "Given the following bipartite graph. (a)-----(b) \\ \\----(c) (d)-----(e) /", "split the graph') self.assertIn(set(right), expected, 'should correctly split the graph')", "the graph') def test_weighted_maximum_cut(self): \"\"\" Given the following weighted graph.", "/ \\ | (w)-6-(x) \"\"\" g = Graph.build(edges=[ ('u', 'v',", "import Graph from src.maximum_cut import maximum_cut, maximum_cut_for_bipartite_graph class MaximumCut(unittest.TestCase): def", "right, 'no common vertices between cuts') self.assertIn(set(left), expected, 'should correctly", "\"\"\" Given the following weighted graph. (u)-3-(v) | \\ /", "(f)----/ \\ \\----(g) \"\"\" g = Graph.build(edges=[('a', 'b'), ('a', 'c'),", "right) = maximum_cut(g) self.assertEqual(2, len(left), 'left should contain 2 vertices')", "class MaximumCut(unittest.TestCase): def test_maximum_cut_for_bipartite_graphs(self): \"\"\" Given the following bipartite graph.", "= Graph.build(edges=[ ('u', 'v', 3), ('u', 'w', 2), ('u', 'x',", "\\ | (w)-6-(x) \"\"\" g = Graph.build(edges=[ ('u', 'v', 3),", "directed=False) (left, right) = maximum_cut(g) self.assertEqual(2, len(left), 'left should contain", "{'w', 'v'}] self.assertNotEqual(left, right, 'no common vertices between cuts') self.assertIn(set(left),", "self.assertIn(len(right), [3,4], 'eighter 3 or 4') self.assertEqual(7, len(left)+len(right), 'no vertex", "('d', 'f')], directed=False) (left, right) = maximum_cut_for_bipartite_graph(g) self.assertIn(set(left), [set(['a', 'c',", "g = Graph.build(edges=[('a', 'b'), ('a', 'd'), ('c', 'b'), ('c', 'd'),", "should contain 2 vertices') self.assertEqual(2, len(right), 'right should contain 2", "'f')], directed=False) (left, right) = maximum_cut_for_bipartite_graph(g) self.assertIn(set(left), [set(['a', 'c', 'e',", "'eighter 3 or 4') self.assertEqual(7, len(left)+len(right), 'no vertex counted twice')", "('u', 'w', 2), ('u', 'x', 5), ('v', 'x', 4),('w', 'x',", "expected = [{'u', 'v'}, {'w', 'x'}, {'x', 'u'}, {'w', 'v'}]", "counted twice') def test_maximum_cut_for_larger_bipartite_graphs(self): \"\"\" A sligthly larger graph: (a)", "= Graph.build(edges=[('a', 'b'), ('a', 'd'), ('c', 'b'), ('c', 'd'), ('b',", "('a', 'c'), ('d', 'e'), ('f', 'e'), ('f', 'g')], directed=False) (left,", "\"\"\" A sligthly larger graph: (a) (c) | \\ /|", "('d', 'e'), ('d', 'f')], directed=False) (left, right) = maximum_cut_for_bipartite_graph(g) self.assertIn(set(left),", "set(['b', 'd'])]) self.assertIn(set(right), [set(['a', 'c', 'e', 'f']), set(['b', 'd'])]) self.assertNotEqual(left,", "\\/ | | /\\ | | / \\ | (w)---(x)", "x | | / \\ | (b) (d) | \\", "= [{'u', 'v'}, {'w', 'x'}, {'x', 'u'}, {'w', 'v'}] self.assertNotEqual(left,", "self.assertIn(set(right), [set(['a', 'c', 'e', 'f']), set(['b', 'd'])]) self.assertNotEqual(left, right, 'not", "Graph.build(edges=[ ('u', 'v'), ('u', 'w'), ('u', 'x'), ('v', 'x'),('w', 'x')],", "('u', 'w'), ('u', 'x'), ('v', 'x'),('w', 'x')], directed=False) (left, right)", "('u', 'x', 5), ('v', 'x', 4),('w', 'x', 6)], directed=False) (left,", "graph') self.assertIn(set(right), expected, 'should correctly split the graph') def test_weighted_maximum_cut(self):", "\"\"\" g = Graph.build(edges=[ ('u', 'v'), ('u', 'w'), ('u', 'x'),", "'should correctly split the graph') def test_weighted_maximum_cut(self): \"\"\" Given the", "def test_maximum_cut_for_bipartite_graphs(self): \"\"\" Given the following bipartite graph. (a)-----(b) \\", "'c'), ('d', 'e'), ('f', 'e'), ('f', 'g')], directed=False) (left, right)", "4') self.assertEqual(7, len(left)+len(right), 'no vertex counted twice') def test_maximum_cut_for_larger_bipartite_graphs(self): \"\"\"", "[set(['a', 'c', 'e', 'f']), set(['b', 'd'])]) self.assertIn(set(right), [set(['a', 'c', 'e',", "'x'), ('v', 'x'),('w', 'x')], directed=False) (left, right) = maximum_cut(g) expected", "contain 2 vertices') self.assertEqual(2, len(right), 'right should contain 2 vertices')", "cuts') self.assertIn(set(left), expected, 'should correctly split the graph') self.assertIn(set(right), expected,", "\"\"\" g = Graph.build(edges=[ ('u', 'v', 3), ('u', 'w', 2),", "same subsets') def test_maximum_cut(self): \"\"\" Given a graph: (u)----(v) |", "Given the following weighted graph. (u)-3-(v) | \\ / |", "= maximum_cut(g) self.assertEqual(2, len(left), 'left should contain 2 vertices') self.assertEqual(2,", "bipartite graph. (a)-----(b) \\ \\----(c) (d)-----(e) / (f)----/ \\ \\----(g)", "('a', 'd'), ('c', 'b'), ('c', 'd'), ('b', 'e'), ('b', 'f'),", "right) = maximum_cut_for_bipartite_graph(g) self.assertIn(set(left), [set(['a', 'c', 'e', 'f']), set(['b', 'd'])])", "maximum_cut(g) self.assertEqual(2, len(left), 'left should contain 2 vertices') self.assertEqual(2, len(right),", "| 5\\/1 4 2 /\\ | | / \\ |", "(left, right) = maximum_cut(g) expected = [{'u', 'v'}, {'w', 'x'},", "(w)-6-(x) \"\"\" g = Graph.build(edges=[ ('u', 'v', 3), ('u', 'w',", "'d'])]) self.assertNotEqual(left, right, 'not the same subsets') def test_maximum_cut(self): \"\"\"", "(d)-----(e) / (f)----/ \\ \\----(g) \"\"\" g = Graph.build(edges=[('a', 'b'),", "x | | / \\ | (e) (f) \"\"\" g", "common vertices between cuts') self.assertIn(set(left), expected, 'should correctly split the", "Graph.build(edges=[('a', 'b'), ('a', 'd'), ('c', 'b'), ('c', 'd'), ('b', 'e'),", "'e'), ('b', 'f'), ('d', 'e'), ('d', 'f')], directed=False) (left, right)", "(c) | \\ /| | x | | / \\", "'e'), ('f', 'e'), ('f', 'g')], directed=False) (left, right) = maximum_cut_for_bipartite_graph(g)", "'no common vertices between cuts') self.assertIn(set(left), expected, 'should correctly split", "self.assertIn(set(right), expected, 'should correctly split the graph') def test_weighted_maximum_cut(self): \"\"\"", "import unittest from src.graph import Graph from src.maximum_cut import maximum_cut,", "| \\ / | | 5\\/1 4 2 /\\ |", "'e'), ('f', 'g')], directed=False) (left, right) = maximum_cut_for_bipartite_graph(g) self.assertIn(len(left), [3,4],", "'f']), set(['b', 'd'])]) self.assertNotEqual(left, right, 'not the same subsets') def", "the following bipartite graph. (a)-----(b) \\ \\----(c) (d)-----(e) / (f)----/", "\\ /| | x | | / \\ | (b)", "larger graph: (a) (c) | \\ /| | x |", "= maximum_cut_for_bipartite_graph(g) self.assertIn(set(left), [set(['a', 'c', 'e', 'f']), set(['b', 'd'])]) self.assertIn(set(right),", "'f']), set(['b', 'd'])]) self.assertIn(set(right), [set(['a', 'c', 'e', 'f']), set(['b', 'd'])])", "maximum_cut_for_bipartite_graph(g) self.assertIn(set(left), [set(['a', 'c', 'e', 'f']), set(['b', 'd'])]) self.assertIn(set(right), [set(['a',", "| | /\\ | | / \\ | (w)---(x) \"\"\"", "/ (f)----/ \\ \\----(g) \"\"\" g = Graph.build(edges=[('a', 'b'), ('a',", "5), ('v', 'x', 4),('w', 'x', 6)], directed=False) (left, right) =", "('v', 'x', 4),('w', 'x', 6)], directed=False) (left, right) = maximum_cut(g)", "'c', 'e', 'f']), set(['b', 'd'])]) self.assertNotEqual(left, right, 'not the same", "set(['b', 'd'])]) self.assertNotEqual(left, right, 'not the same subsets') def test_maximum_cut(self):", "= Graph.build(edges=[ ('u', 'v'), ('u', 'w'), ('u', 'x'), ('v', 'x'),('w',", "\\ \\----(c) (d)-----(e) / (f)----/ \\ \\----(g) \"\"\" g =", "('f', 'g')], directed=False) (left, right) = maximum_cut_for_bipartite_graph(g) self.assertIn(len(left), [3,4], 'either", "('u', 'x'), ('v', 'x'),('w', 'x')], directed=False) (left, right) = maximum_cut(g)", "g = Graph.build(edges=[ ('u', 'v'), ('u', 'w'), ('u', 'x'), ('v',", "| /\\ | | / \\ | (w)---(x) \"\"\" g", "| x | | / \\ | (b) (d) |", "(f) \"\"\" g = Graph.build(edges=[('a', 'b'), ('a', 'd'), ('c', 'b'),", "'e'), ('d', 'f')], directed=False) (left, right) = maximum_cut_for_bipartite_graph(g) self.assertIn(set(left), [set(['a',", "'x'}, {'x', 'u'}, {'w', 'v'}] self.assertNotEqual(left, right, 'no common vertices", "g = Graph.build(edges=[('a', 'b'), ('a', 'c'), ('d', 'e'), ('f', 'e'),", "| / \\ | (w)-6-(x) \"\"\" g = Graph.build(edges=[ ('u',", "from src.graph import Graph from src.maximum_cut import maximum_cut, maximum_cut_for_bipartite_graph class", "| / \\ | (w)---(x) \"\"\" g = Graph.build(edges=[ ('u',", "following weighted graph. (u)-3-(v) | \\ / | | 5\\/1", "'u'}, {'w', 'v'}] self.assertNotEqual(left, right, 'no common vertices between cuts')", "'d'), ('c', 'b'), ('c', 'd'), ('b', 'e'), ('b', 'f'), ('d',", "| (w)---(x) \"\"\" g = Graph.build(edges=[ ('u', 'v'), ('u', 'w'),", "utf-8 -*- import unittest from src.graph import Graph from src.maximum_cut", "('b', 'e'), ('b', 'f'), ('d', 'e'), ('d', 'f')], directed=False) (left,", "A sligthly larger graph: (a) (c) | \\ /| |", "/ \\ | (w)---(x) \"\"\" g = Graph.build(edges=[ ('u', 'v'),", "maximum_cut_for_bipartite_graph class MaximumCut(unittest.TestCase): def test_maximum_cut_for_bipartite_graphs(self): \"\"\" Given the following bipartite", "(a)-----(b) \\ \\----(c) (d)-----(e) / (f)----/ \\ \\----(g) \"\"\" g", "self.assertEqual(7, len(left)+len(right), 'no vertex counted twice') def test_maximum_cut_for_larger_bipartite_graphs(self): \"\"\" A", "unittest from src.graph import Graph from src.maximum_cut import maximum_cut, maximum_cut_for_bipartite_graph", "3 or 4') self.assertEqual(7, len(left)+len(right), 'no vertex counted twice') def", "| \\ / | | \\/ | | /\\ |", "\"\"\" Given a graph: (u)----(v) | \\ / | |", "\"\"\" g = Graph.build(edges=[('a', 'b'), ('a', 'c'), ('d', 'e'), ('f',", "'b'), ('c', 'd'), ('b', 'e'), ('b', 'f'), ('d', 'e'), ('d',", "| (e) (f) \"\"\" g = Graph.build(edges=[('a', 'b'), ('a', 'd'),", "'x'),('w', 'x')], directed=False) (left, right) = maximum_cut(g) expected = [{'u',", "sligthly larger graph: (a) (c) | \\ /| | x", "\\ | (b) (d) | \\ /| | x |", "| | \\/ | | /\\ | | / \\", "= maximum_cut(g) expected = [{'u', 'v'}, {'w', 'x'}, {'x', 'u'},", "'no vertex counted twice') def test_maximum_cut_for_larger_bipartite_graphs(self): \"\"\" A sligthly larger", "expected, 'should correctly split the graph') self.assertIn(set(right), expected, 'should correctly", "subsets') def test_maximum_cut(self): \"\"\" Given a graph: (u)----(v) | \\", "weighted graph. (u)-3-(v) | \\ / | | 5\\/1 4", "test_weighted_maximum_cut(self): \"\"\" Given the following weighted graph. (u)-3-(v) | \\", "('c', 'd'), ('b', 'e'), ('b', 'f'), ('d', 'e'), ('d', 'f')],", "right, 'not the same subsets') def test_maximum_cut(self): \"\"\" Given a", "[set(['a', 'c', 'e', 'f']), set(['b', 'd'])]) self.assertNotEqual(left, right, 'not the", "/\\ | | / \\ | (w)-6-(x) \"\"\" g =", "self.assertIn(set(left), [set(['a', 'c', 'e', 'f']), set(['b', 'd'])]) self.assertIn(set(right), [set(['a', 'c',", "split the graph') def test_weighted_maximum_cut(self): \"\"\" Given the following weighted", "MaximumCut(unittest.TestCase): def test_maximum_cut_for_bipartite_graphs(self): \"\"\" Given the following bipartite graph. (a)-----(b)", "'b'), ('a', 'd'), ('c', 'b'), ('c', 'd'), ('b', 'e'), ('b',", "('b', 'f'), ('d', 'e'), ('d', 'f')], directed=False) (left, right) =", "following bipartite graph. (a)-----(b) \\ \\----(c) (d)-----(e) / (f)----/ \\", "\"\"\" Given the following bipartite graph. (a)-----(b) \\ \\----(c) (d)-----(e)", "\\----(g) \"\"\" g = Graph.build(edges=[('a', 'b'), ('a', 'c'), ('d', 'e'),", "test_maximum_cut_for_bipartite_graphs(self): \"\"\" Given the following bipartite graph. (a)-----(b) \\ \\----(c)", "'b'), ('a', 'c'), ('d', 'e'), ('f', 'e'), ('f', 'g')], directed=False)", "('d', 'e'), ('f', 'e'), ('f', 'g')], directed=False) (left, right) =", "3 or 4') self.assertIn(len(right), [3,4], 'eighter 3 or 4') self.assertEqual(7,", "def test_maximum_cut_for_larger_bipartite_graphs(self): \"\"\" A sligthly larger graph: (a) (c) |", "| \\ /| | x | | / \\ |", "4') self.assertIn(len(right), [3,4], 'eighter 3 or 4') self.assertEqual(7, len(left)+len(right), 'no", "/\\ | | / \\ | (w)---(x) \"\"\" g =", "'w'), ('u', 'x'), ('v', 'x'),('w', 'x')], directed=False) (left, right) =", "self.assertNotEqual(left, right, 'no common vertices between cuts') self.assertIn(set(left), expected, 'should", "5\\/1 4 2 /\\ | | / \\ | (w)-6-(x)", "| (b) (d) | \\ /| | x | |", "'either 3 or 4') self.assertIn(len(right), [3,4], 'eighter 3 or 4')", "right) = maximum_cut(g) expected = [{'u', 'v'}, {'w', 'x'}, {'x',", "'e', 'f']), set(['b', 'd'])]) self.assertIn(set(right), [set(['a', 'c', 'e', 'f']), set(['b',", "'v'}] self.assertNotEqual(left, right, 'no common vertices between cuts') self.assertIn(set(left), expected,", "'v'), ('u', 'w'), ('u', 'x'), ('v', 'x'),('w', 'x')], directed=False) (left,", "between cuts') self.assertIn(set(left), expected, 'should correctly split the graph') self.assertIn(set(right),", "expected, 'should correctly split the graph') def test_weighted_maximum_cut(self): \"\"\" Given", "\\ / | | 5\\/1 4 2 /\\ | |", "vertices between cuts') self.assertIn(set(left), expected, 'should correctly split the graph')", "'d'), ('b', 'e'), ('b', 'f'), ('d', 'e'), ('d', 'f')], directed=False)", "test_maximum_cut_for_larger_bipartite_graphs(self): \"\"\" A sligthly larger graph: (a) (c) | \\", "maximum_cut_for_bipartite_graph(g) self.assertIn(len(left), [3,4], 'either 3 or 4') self.assertIn(len(right), [3,4], 'eighter", "(left, right) = maximum_cut_for_bipartite_graph(g) self.assertIn(set(left), [set(['a', 'c', 'e', 'f']), set(['b',", "/ | | 5\\/1 4 2 /\\ | | /", "6)], directed=False) (left, right) = maximum_cut(g) self.assertEqual(2, len(left), 'left should", "test_maximum_cut(self): \"\"\" Given a graph: (u)----(v) | \\ / |", "/| | x | | / \\ | (e) (f)", "| | / \\ | (w)-6-(x) \"\"\" g = Graph.build(edges=[", "g = Graph.build(edges=[ ('u', 'v', 3), ('u', 'w', 2), ('u',", "2 /\\ | | / \\ | (w)-6-(x) \"\"\" g", "graph. (u)-3-(v) | \\ / | | 5\\/1 4 2", "'f'), ('d', 'e'), ('d', 'f')], directed=False) (left, right) = maximum_cut_for_bipartite_graph(g)", "import maximum_cut, maximum_cut_for_bipartite_graph class MaximumCut(unittest.TestCase): def test_maximum_cut_for_bipartite_graphs(self): \"\"\" Given the", "| / \\ | (b) (d) | \\ /| |", "-*- import unittest from src.graph import Graph from src.maximum_cut import", "| \\/ | | /\\ | | / \\ |", "= Graph.build(edges=[('a', 'b'), ('a', 'c'), ('d', 'e'), ('f', 'e'), ('f',", "| | / \\ | (w)---(x) \"\"\" g = Graph.build(edges=[", "graph: (u)----(v) | \\ / | | \\/ | |", "the graph') self.assertIn(set(right), expected, 'should correctly split the graph') def", "correctly split the graph') def test_weighted_maximum_cut(self): \"\"\" Given the following", "3), ('u', 'w', 2), ('u', 'x', 5), ('v', 'x', 4),('w',", "2), ('u', 'x', 5), ('v', 'x', 4),('w', 'x', 6)], directed=False)", "directed=False) (left, right) = maximum_cut_for_bipartite_graph(g) self.assertIn(len(left), [3,4], 'either 3 or", "4),('w', 'x', 6)], directed=False) (left, right) = maximum_cut(g) self.assertEqual(2, len(left),", "'g')], directed=False) (left, right) = maximum_cut_for_bipartite_graph(g) self.assertIn(len(left), [3,4], 'either 3", "('f', 'e'), ('f', 'g')], directed=False) (left, right) = maximum_cut_for_bipartite_graph(g) self.assertIn(len(left),", "| x | | / \\ | (e) (f) \"\"\"", "\\ | (e) (f) \"\"\" g = Graph.build(edges=[('a', 'b'), ('a',", "directed=False) (left, right) = maximum_cut_for_bipartite_graph(g) self.assertIn(set(left), [set(['a', 'c', 'e', 'f']),", "len(left), 'left should contain 2 vertices') self.assertEqual(2, len(right), 'right should", "[{'u', 'v'}, {'w', 'x'}, {'x', 'u'}, {'w', 'v'}] self.assertNotEqual(left, right,", "src.graph import Graph from src.maximum_cut import maximum_cut, maximum_cut_for_bipartite_graph class MaximumCut(unittest.TestCase):", "'left should contain 2 vertices') self.assertEqual(2, len(right), 'right should contain", "/ | | \\/ | | /\\ | | /" ]
[ "path[-1] == 'R': prefix = prefix[:-1] + \"'- \" print(prefix", "2.0 (the \"License\"); # you may not use this file", "%s' % ((intptr_ptr + self.tree_magic_offset).cast(self.int_type.pointer()).dereference())) print('color: %s' % ((intptr_ptr +", "pointer\\n') return expr = args[0] val = gdb.parse_and_eval('*(' + expr", "len(args) not in (1, 2): print('usage: print-avs-rbtree expr [with_magic]\\n' '", "% ((intptr_ptr + self.color_offset ).cast(self.int_type.pointer()).dereference())) print('parent: 0x%%0%dx' % (self.intptr_type.sizeof *", "+ self.color_offset ).cast(self.int_type.pointer()).dereference())) print('parent: 0x%%0%dx' % (self.intptr_type.sizeof * 2) %", "super().__init__('print-avs-rbtree-node', gdb.COMMAND_DATA, gdb.COMPLETE_EXPRESSION) def invoke(self, argv_str, _from_tty): args = gdb.string_to_argv(argv_str)", "invoke(self, argv_str, _from_tty): args = gdb.string_to_argv(argv_str) if len(args) != 1:", "self.parent_offset).cast(ptr.type.pointer()).dereference())) print('left: 0x%%0%dx' % (self.intptr_type.sizeof * 2) % ((intptr_ptr +", "else: self._print_tree(val) class PrintAvsRbtreeNode(PrintAvsRbtreeBase): def __init__(self): super().__init__('print-avs-rbtree-node', gdb.COMMAND_DATA, gdb.COMPLETE_EXPRESSION) def", "ptr.cast(self.intptr_type) if with_magic: print((intptr_ptr + self.rb_magic_offset)) print((intptr_ptr + self.rb_magic_offset).cast(self.int_type.pointer())) print('rb", "'%%s 0x%%0%dx = %%s' % (self.intptr_type.sizeof * 2,) # TODO", "self.rb_magic_offset)) print((intptr_ptr + self.rb_magic_offset).cast(self.int_type.pointer())) print('rb magic: %s' % ((intptr_ptr +", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "= %%s' % (self.intptr_type.sizeof * 2,) # TODO self.color_offset =", ").cast(ptr.type.pointer()).dereference())) print('right: 0x%%0%dx' % (self.intptr_type.sizeof * 2) % ((intptr_ptr +", "print('parent: 0x%%0%dx' % (self.intptr_type.sizeof * 2) % ((intptr_ptr + self.parent_offset).cast(ptr.type.pointer()).dereference()))", "PrintAvsRbtreeBase(gdb.Command): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.intptr_type = gdb.lookup_type('unsigned", "is None: print('cannot evaluate expression: ' + expr) return if", "valid AVS_RBTREE_NODE pointer\\n' ' with_magic - if present, \"magic\" fields", "~/.gdbinit import gdb class PrintAvsRbtreeBase(gdb.Command): def __init__(self, *args, **kwargs): super().__init__(*args,", "or int(right_ptr) in visited_addrs: print('circular tree detected, stopping') return visited_addrs.add(left_ptr)", "evaluate expression: ' + expr) return if ptr == 0:", "if val is None: print('cannot evaluate expression: ' + expr)", "use this file except in compliance with the License. #", "visited_addrs=set()): left_ptr_value = ptr.cast(self.intptr_type) + self.left_offset left_ptr = left_ptr_value.cast(ptr.type.pointer()).dereference() right_ptr_value", "an expression that avaluates to a valid AVS_RBTREE_NODE pointer\\n') return", "limitations under the License. # installation: append \"source PATH_TO_THIS_SCRIPT\" to", "left_ptr = left_ptr_value.cast(ptr.type.pointer()).dereference() right_ptr_value = ptr.cast(self.intptr_type) + self.right_offset right_ptr =", "self.right_offset right_ptr = right_ptr_value.cast(ptr.type.pointer()).dereference() prefix = ''.join(' |' if x", "= gdb.string_to_argv(argv_str) if len(args) not in (1, 2): print('usage: print-avs-rbtree", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "print('(null)') else: intptr_ptr = ptr.cast(self.intptr_type) if with_magic: print((intptr_ptr + self.rb_magic_offset))", "License. # You may obtain a copy of the License", "evaluate expression: ' + expr) return if val == 0:", "len(args) != 1: print('usage: print-avs-rbtree-subtree expr\\n' ' expr - an", "print('(null)') else: self._print_tree(val) class PrintAvsRbtree(PrintAvsRbtreeBase): def __init__(self): super().__init__('print-avs-rbtree', gdb.COMMAND_DATA, gdb.COMPLETE_EXPRESSION)", "under the License is distributed on an \"AS IS\" BASIS,", "import gdb class PrintAvsRbtreeBase(gdb.Command): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs)", "%s' % ((intptr_ptr + self.color_offset ).cast(self.int_type.pointer()).dereference())) print('parent: 0x%%0%dx' % (self.intptr_type.sizeof", "if path[-1] == 'L': prefix += '- ' elif path[-1]", "License for the specific language governing permissions and # limitations", "path else ' ', int(ptr), str(ptr.dereference()))) if int(left_ptr) in visited_addrs", "that avaluates to a valid AVS_RBTREE_NODE pointer\\n' ' with_magic -", "return expr = args[0] with_magic = len(args) > 1 ptr", "argv_str, _from_tty): args = gdb.string_to_argv(argv_str) if len(args) not in (1,", "gdb.lookup_type('unsigned long long') self.int_type = gdb.lookup_type('int') self.output_format = '%%s 0x%%0%dx", "x in path) if path: if path[-1] == 'L': prefix", "__init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.intptr_type = gdb.lookup_type('unsigned long long')", "* 2) % ((intptr_ptr + self.parent_offset).cast(ptr.type.pointer()).dereference())) print('left: 0x%%0%dx' % (self.intptr_type.sizeof", "int(left_ptr) != 0: self._print_tree(left_ptr, path + 'L', depth+1, visited_addrs) if", "if path: if path[-1] == 'L': prefix += '- '", "+ expr + ')') if val is None: print('cannot evaluate", "if ptr == 0: print('(null)') else: intptr_ptr = ptr.cast(self.intptr_type) if", "-24 self.left_offset = -16 self.right_offset = -8 def _print_tree(self, ptr,", "+ self.right_offset right_ptr = right_ptr_value.cast(ptr.type.pointer()).dereference() prefix = ''.join(' |' if", "'- ' elif path[-1] == 'R': prefix = prefix[:-1] +", "in compliance with the License. # You may obtain a", "software # distributed under the License is distributed on an", "right_ptr = right_ptr_value.cast(ptr.type.pointer()).dereference() prefix = ''.join(' |' if x ==", "gdb.string_to_argv(argv_str) if len(args) not in (1, 2): print('usage: print-avs-rbtree expr", "present, \"magic\" fields are displayed\\n') return expr = args[0] with_magic", "**kwargs) self.intptr_type = gdb.lookup_type('unsigned long long') self.int_type = gdb.lookup_type('int') self.output_format", ").cast(self.int_type.pointer()).dereference())) print('parent: 0x%%0%dx' % (self.intptr_type.sizeof * 2) % ((intptr_ptr +", "% (self.intptr_type.sizeof * 2,) # TODO self.color_offset = -32 self.parent_offset", "% (self.intptr_type.sizeof * 2) % ((intptr_ptr + self.parent_offset).cast(ptr.type.pointer()).dereference())) print('left: 0x%%0%dx'", "fields are displayed\\n') return expr = args[0] with_magic = len(args)", "2,) # TODO self.color_offset = -32 self.parent_offset = -24 self.left_offset", "print-avs-rbtree expr [with_magic]\\n' ' expr - an expression that avaluates", "print('usage: print-avs-rbtree expr [with_magic]\\n' ' expr - an expression that", "print('rb magic: %s' % ((intptr_ptr + self.rb_magic_offset).cast(self.int_type.pointer()).dereference())) print('tree magic: %s'", "to a valid AVS_RBTREE_NODE pointer\\n') return expr = args[0] val", "print-avs-rbtree expr\\n' ' expr - an expression that avaluates to", "self.left_offset left_ptr = left_ptr_value.cast(ptr.type.pointer()).dereference() right_ptr_value = ptr.cast(self.intptr_type) + self.right_offset right_ptr", "0x%%0%dx = %%s' % (self.intptr_type.sizeof * 2,) # TODO self.color_offset", "else ' ' for x in path) if path: if", "args[0] val = gdb.parse_and_eval(expr) if val is None: print('cannot evaluate", "= -8 def _print_tree(self, ptr, path='', depth=0, visited_addrs=set()): left_ptr_value =", "= args[0] val = gdb.parse_and_eval(expr) if val is None: print('cannot", "License. # installation: append \"source PATH_TO_THIS_SCRIPT\" to ~/.gdbinit import gdb", "+ ')') if val is None: print('cannot evaluate expression: '", "0x%%0%dx' % (self.intptr_type.sizeof * 2) % ((intptr_ptr + self.right_offset ).cast(ptr.type.pointer()).dereference()))", "path: if path[-1] == 'L': prefix += '- ' elif", "self.parent_offset = -24 self.left_offset = -16 self.right_offset = -8 def", "ptr, path='', depth=0, visited_addrs=set()): left_ptr_value = ptr.cast(self.intptr_type) + self.left_offset left_ptr", "* 2) % ((intptr_ptr + self.left_offset ).cast(ptr.type.pointer()).dereference())) print('right: 0x%%0%dx' %", "def __init__(self): super().__init__('print-avs-rbtree-subtree', gdb.COMMAND_DATA, gdb.COMPLETE_EXPRESSION) def invoke(self, argv_str, _from_tty): args", "path + 'L', depth+1, visited_addrs) if int(right_ptr) != 0: self._print_tree(right_ptr,", "to a valid AVS_RBTREE pointer\\n') return expr = args[0] val", "left_ptr_value.cast(ptr.type.pointer()).dereference() right_ptr_value = ptr.cast(self.intptr_type) + self.right_offset right_ptr = right_ptr_value.cast(ptr.type.pointer()).dereference() prefix", "super().__init__(*args, **kwargs) self.intptr_type = gdb.lookup_type('unsigned long long') self.int_type = gdb.lookup_type('int')", "OF ANY KIND, either express or implied. # See the", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "for x in path) if path: if path[-1] == 'L':", "% ((intptr_ptr + self.left_offset ).cast(ptr.type.pointer()).dereference())) print('right: 0x%%0%dx' % (self.intptr_type.sizeof *", "ANY KIND, either express or implied. # See the License", "See the License for the specific language governing permissions and", "if present, \"magic\" fields are displayed\\n') return expr = args[0]", "expr\\n' ' expr - an expression that avaluates to a", "that avaluates to a valid AVS_RBTREE pointer\\n') return expr =", "the License. # You may obtain a copy of the", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "1: print('usage: print-avs-rbtree-subtree expr\\n' ' expr - an expression that", "to in writing, software # distributed under the License is", "= left_ptr_value.cast(ptr.type.pointer()).dereference() right_ptr_value = ptr.cast(self.intptr_type) + self.right_offset right_ptr = right_ptr_value.cast(ptr.type.pointer()).dereference()", "-16 self.right_offset = -8 def _print_tree(self, ptr, path='', depth=0, visited_addrs=set()):", "_from_tty): args = gdb.string_to_argv(argv_str) if len(args) != 1: print('usage: print-avs-rbtree", "# See the License for the specific language governing permissions", "path='', depth=0, visited_addrs=set()): left_ptr_value = ptr.cast(self.intptr_type) + self.left_offset left_ptr =", "elif path[-1] == 'R': prefix = prefix[:-1] + \"'- \"", "= args[0] with_magic = len(args) > 1 ptr = gdb.parse_and_eval(expr)", "language governing permissions and # limitations under the License. #", "or agreed to in writing, software # distributed under the", "int(left_ptr) in visited_addrs or int(right_ptr) in visited_addrs: print('circular tree detected,", "<<EMAIL>> # # Licensed under the Apache License, Version 2.0", "val = gdb.parse_and_eval(expr) if val is None: print('cannot evaluate expression:", "return if val == 0: print('(null)') else: self._print_tree(val) class PrintAvsRbtreeNode(PrintAvsRbtreeBase):", "required by applicable law or agreed to in writing, software", "in (1, 2): print('usage: print-avs-rbtree expr [with_magic]\\n' ' expr -", "= -32 self.parent_offset = -24 self.left_offset = -16 self.right_offset =", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "with the License. # You may obtain a copy of", "= args[0] val = gdb.parse_and_eval('*(' + expr + ')') if", "visited_addrs) class PrintAvsRbtreeSubtree(PrintAvsRbtreeBase): def __init__(self): super().__init__('print-avs-rbtree-subtree', gdb.COMMAND_DATA, gdb.COMPLETE_EXPRESSION) def invoke(self,", "+ expr) return if val == 0: print('(null)') else: self._print_tree(val)", "visited_addrs or int(right_ptr) in visited_addrs: print('circular tree detected, stopping') return", "'L': prefix += '- ' elif path[-1] == 'R': prefix", "detected, stopping') return visited_addrs.add(left_ptr) visited_addrs.add(right_ptr) if int(left_ptr) != 0: self._print_tree(left_ptr,", "== 'L': prefix += '- ' elif path[-1] == 'R':", "len(args) != 1: print('usage: print-avs-rbtree expr\\n' ' expr - an", "0: print('(null)') else: self._print_tree(val) class PrintAvsRbtree(PrintAvsRbtreeBase): def __init__(self): super().__init__('print-avs-rbtree', gdb.COMMAND_DATA,", "__init__(self): super().__init__('print-avs-rbtree', gdb.COMMAND_DATA, gdb.COMPLETE_EXPRESSION) def invoke(self, argv_str, _from_tty): args =", "compliance with the License. # You may obtain a copy", "agreed to in writing, software # distributed under the License", "under the License. # installation: append \"source PATH_TO_THIS_SCRIPT\" to ~/.gdbinit", "else: self._print_tree(val) class PrintAvsRbtree(PrintAvsRbtreeBase): def __init__(self): super().__init__('print-avs-rbtree', gdb.COMMAND_DATA, gdb.COMPLETE_EXPRESSION) def", "distributed under the License is distributed on an \"AS IS\"", "expression that avaluates to a valid AVS_RBTREE_NODE pointer\\n') return expr", "self.color_offset = -32 self.parent_offset = -24 self.left_offset = -16 self.right_offset", "' for x in path) if path: if path[-1] ==", "express or implied. # See the License for the specific", "'R': prefix = prefix[:-1] + \"'- \" print(prefix + self.output_format", "= -24 self.left_offset = -16 self.right_offset = -8 def _print_tree(self,", "except in compliance with the License. # You may obtain", "expr - an expression that avaluates to a valid AVS_RBTREE_NODE", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "!= 1: print('usage: print-avs-rbtree expr\\n' ' expr - an expression", "not use this file except in compliance with the License.", "class PrintAvsRbtreeNode(PrintAvsRbtreeBase): def __init__(self): super().__init__('print-avs-rbtree-node', gdb.COMMAND_DATA, gdb.COMPLETE_EXPRESSION) def invoke(self, argv_str,", "print((intptr_ptr + self.rb_magic_offset).cast(self.int_type.pointer())) print('rb magic: %s' % ((intptr_ptr + self.rb_magic_offset).cast(self.int_type.pointer()).dereference()))", "ptr.cast(self.intptr_type) + self.right_offset right_ptr = right_ptr_value.cast(ptr.type.pointer()).dereference() prefix = ''.join(' |'", "-32 self.parent_offset = -24 self.left_offset = -16 self.right_offset = -8", "# -*- coding: utf-8 -*- # # Copyright 2021 AVSystem", "permissions and # limitations under the License. # installation: append", "writing, software # distributed under the License is distributed on", "* 2) % ((intptr_ptr + self.right_offset ).cast(ptr.type.pointer()).dereference())) PrintAvsRbtreeSubtree() PrintAvsRbtree() PrintAvsRbtreeNode()", "print('tree magic: %s' % ((intptr_ptr + self.tree_magic_offset).cast(self.int_type.pointer()).dereference())) print('color: %s' %", "return expr = args[0] val = gdb.parse_and_eval(expr) if val is", "you may not use this file except in compliance with", "> 1 ptr = gdb.parse_and_eval(expr) if ptr is None: print('cannot", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "print('color: %s' % ((intptr_ptr + self.color_offset ).cast(self.int_type.pointer()).dereference())) print('parent: 0x%%0%dx' %", "gdb class PrintAvsRbtreeBase(gdb.Command): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.intptr_type", "depth+1, visited_addrs) if int(right_ptr) != 0: self._print_tree(right_ptr, path + 'R',", "val == 0: print('(null)') else: self._print_tree(val) class PrintAvsRbtreeNode(PrintAvsRbtreeBase): def __init__(self):", "0: print('(null)') else: intptr_ptr = ptr.cast(self.intptr_type) if with_magic: print((intptr_ptr +", "return visited_addrs.add(left_ptr) visited_addrs.add(right_ptr) if int(left_ptr) != 0: self._print_tree(left_ptr, path +", "== 0: print('(null)') else: self._print_tree(val) class PrintAvsRbtree(PrintAvsRbtreeBase): def __init__(self): super().__init__('print-avs-rbtree',", "None: print('cannot evaluate expression: ' + expr) return if ptr", "CONDITIONS OF ANY KIND, either express or implied. # See", "prefix = prefix[:-1] + \"'- \" print(prefix + self.output_format %", "if int(left_ptr) != 0: self._print_tree(left_ptr, path + 'L', depth+1, visited_addrs)", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "args = gdb.string_to_argv(argv_str) if len(args) != 1: print('usage: print-avs-rbtree-subtree expr\\n'", "' expr - an expression that avaluates to a valid", "with_magic - if present, \"magic\" fields are displayed\\n') return expr", "depth+1, visited_addrs) class PrintAvsRbtreeSubtree(PrintAvsRbtreeBase): def __init__(self): super().__init__('print-avs-rbtree-subtree', gdb.COMMAND_DATA, gdb.COMPLETE_EXPRESSION) def", "args = gdb.string_to_argv(argv_str) if len(args) not in (1, 2): print('usage:", "= gdb.lookup_type('unsigned long long') self.int_type = gdb.lookup_type('int') self.output_format = '%%s", "' + expr) return if ptr == 0: print('(null)') else:", "a valid AVS_RBTREE_NODE pointer\\n' ' with_magic - if present, \"magic\"", "self._print_tree(right_ptr, path + 'R', depth+1, visited_addrs) class PrintAvsRbtreeSubtree(PrintAvsRbtreeBase): def __init__(self):", "expression: ' + expr) return if ptr == 0: print('(null)')", "that avaluates to a valid AVS_RBTREE_NODE pointer\\n') return expr =", "|' if x == 'L' else ' ' for x", "= -16 self.right_offset = -8 def _print_tree(self, ptr, path='', depth=0,", "== 0: print('(null)') else: self._print_tree(val) class PrintAvsRbtreeNode(PrintAvsRbtreeBase): def __init__(self): super().__init__('print-avs-rbtree-node',", "self.int_type = gdb.lookup_type('int') self.output_format = '%%s 0x%%0%dx = %%s' %", "coding: utf-8 -*- # # Copyright 2021 AVSystem <<EMAIL>> #", "OR CONDITIONS OF ANY KIND, either express or implied. #", "return if ptr == 0: print('(null)') else: intptr_ptr = ptr.cast(self.intptr_type)", "= prefix[:-1] + \"'- \" print(prefix + self.output_format % (path[-1]", "[with_magic]\\n' ' expr - an expression that avaluates to a", "+ self.rb_magic_offset).cast(self.int_type.pointer())) print('rb magic: %s' % ((intptr_ptr + self.rb_magic_offset).cast(self.int_type.pointer()).dereference())) print('tree", "# Copyright 2021 AVSystem <<EMAIL>> # # Licensed under the", "gdb.lookup_type('int') self.output_format = '%%s 0x%%0%dx = %%s' % (self.intptr_type.sizeof *", "the License is distributed on an \"AS IS\" BASIS, #", "valid AVS_RBTREE_NODE pointer\\n') return expr = args[0] val = gdb.parse_and_eval(expr)", "args[0] val = gdb.parse_and_eval('*(' + expr + ')') if val", "a valid AVS_RBTREE pointer\\n') return expr = args[0] val =", "+ self.parent_offset).cast(ptr.type.pointer()).dereference())) print('left: 0x%%0%dx' % (self.intptr_type.sizeof * 2) % ((intptr_ptr", "args[0] with_magic = len(args) > 1 ptr = gdb.parse_and_eval(expr) if", "self.output_format = '%%s 0x%%0%dx = %%s' % (self.intptr_type.sizeof * 2,)", "0x%%0%dx' % (self.intptr_type.sizeof * 2) % ((intptr_ptr + self.left_offset ).cast(ptr.type.pointer()).dereference()))", "PrintAvsRbtreeSubtree(PrintAvsRbtreeBase): def __init__(self): super().__init__('print-avs-rbtree-subtree', gdb.COMMAND_DATA, gdb.COMPLETE_EXPRESSION) def invoke(self, argv_str, _from_tty):", "self.output_format % (path[-1] if path else ' ', int(ptr), str(ptr.dereference())))", "+ self.left_offset left_ptr = left_ptr_value.cast(ptr.type.pointer()).dereference() right_ptr_value = ptr.cast(self.intptr_type) + self.right_offset", "val == 0: print('(null)') else: self._print_tree(val) class PrintAvsRbtree(PrintAvsRbtreeBase): def __init__(self):", "ptr is None: print('cannot evaluate expression: ' + expr) return", "else ' ', int(ptr), str(ptr.dereference()))) if int(left_ptr) in visited_addrs or", "law or agreed to in writing, software # distributed under", "x == 'L' else ' ' for x in path)", "if int(left_ptr) in visited_addrs or int(right_ptr) in visited_addrs: print('circular tree", "long long') self.int_type = gdb.lookup_type('int') self.output_format = '%%s 0x%%0%dx =", "val is None: print('cannot evaluate expression: ' + expr) return", "path[-1] == 'L': prefix += '- ' elif path[-1] ==", "_from_tty): args = gdb.string_to_argv(argv_str) if len(args) != 1: print('usage: print-avs-rbtree-subtree", "return if val == 0: print('(null)') else: self._print_tree(val) class PrintAvsRbtree(PrintAvsRbtreeBase):", "' ', int(ptr), str(ptr.dereference()))) if int(left_ptr) in visited_addrs or int(right_ptr)", "-8 def _print_tree(self, ptr, path='', depth=0, visited_addrs=set()): left_ptr_value = ptr.cast(self.intptr_type)", "= len(args) > 1 ptr = gdb.parse_and_eval(expr) if ptr is", "displayed\\n') return expr = args[0] with_magic = len(args) > 1", "2): print('usage: print-avs-rbtree expr [with_magic]\\n' ' expr - an expression", "= gdb.string_to_argv(argv_str) if len(args) != 1: print('usage: print-avs-rbtree-subtree expr\\n' '", "print(prefix + self.output_format % (path[-1] if path else ' ',", "None: print('cannot evaluate expression: ' + expr) return if val", "0: print('(null)') else: self._print_tree(val) class PrintAvsRbtreeNode(PrintAvsRbtreeBase): def __init__(self): super().__init__('print-avs-rbtree-node', gdb.COMMAND_DATA,", "def __init__(self): super().__init__('print-avs-rbtree-node', gdb.COMMAND_DATA, gdb.COMPLETE_EXPRESSION) def invoke(self, argv_str, _from_tty): args", "def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.intptr_type = gdb.lookup_type('unsigned long", "''.join(' |' if x == 'L' else ' ' for", "valid AVS_RBTREE pointer\\n') return expr = args[0] val = gdb.parse_and_eval('*('", "may obtain a copy of the License at # #", "= gdb.lookup_type('int') self.output_format = '%%s 0x%%0%dx = %%s' % (self.intptr_type.sizeof", "= right_ptr_value.cast(ptr.type.pointer()).dereference() prefix = ''.join(' |' if x == 'L'", "expr = args[0] val = gdb.parse_and_eval('*(' + expr + ')')", "gdb.parse_and_eval(expr) if ptr is None: print('cannot evaluate expression: ' +", "ptr.cast(self.intptr_type) + self.left_offset left_ptr = left_ptr_value.cast(ptr.type.pointer()).dereference() right_ptr_value = ptr.cast(self.intptr_type) +", "gdb.parse_and_eval(expr) if val is None: print('cannot evaluate expression: ' +", "%s' % ((intptr_ptr + self.rb_magic_offset).cast(self.int_type.pointer()).dereference())) print('tree magic: %s' % ((intptr_ptr", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "gdb.COMMAND_DATA, gdb.COMPLETE_EXPRESSION) def invoke(self, argv_str, _from_tty): args = gdb.string_to_argv(argv_str) if", "right_ptr_value.cast(ptr.type.pointer()).dereference() prefix = ''.join(' |' if x == 'L' else", "may not use this file except in compliance with the", "path + 'R', depth+1, visited_addrs) class PrintAvsRbtreeSubtree(PrintAvsRbtreeBase): def __init__(self): super().__init__('print-avs-rbtree-subtree',", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "(self.intptr_type.sizeof * 2) % ((intptr_ptr + self.right_offset ).cast(ptr.type.pointer()).dereference())) PrintAvsRbtreeSubtree() PrintAvsRbtree()", "this file except in compliance with the License. # You", "prefix += '- ' elif path[-1] == 'R': prefix =", "(1, 2): print('usage: print-avs-rbtree expr [with_magic]\\n' ' expr - an", "+ self.rb_magic_offset).cast(self.int_type.pointer()).dereference())) print('tree magic: %s' % ((intptr_ptr + self.tree_magic_offset).cast(self.int_type.pointer()).dereference())) print('color:", "print-avs-rbtree-subtree expr\\n' ' expr - an expression that avaluates to", "magic: %s' % ((intptr_ptr + self.rb_magic_offset).cast(self.int_type.pointer()).dereference())) print('tree magic: %s' %", "= ''.join(' |' if x == 'L' else ' '", "print('circular tree detected, stopping') return visited_addrs.add(left_ptr) visited_addrs.add(right_ptr) if int(left_ptr) !=", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "expr [with_magic]\\n' ' expr - an expression that avaluates to", "# # Licensed under the Apache License, Version 2.0 (the", "file except in compliance with the License. # You may", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "# limitations under the License. # installation: append \"source PATH_TO_THIS_SCRIPT\"", "else: intptr_ptr = ptr.cast(self.intptr_type) if with_magic: print((intptr_ptr + self.rb_magic_offset)) print((intptr_ptr", "_from_tty): args = gdb.string_to_argv(argv_str) if len(args) not in (1, 2):", "avaluates to a valid AVS_RBTREE_NODE pointer\\n') return expr = args[0]", "1: print('usage: print-avs-rbtree expr\\n' ' expr - an expression that", "self.rb_magic_offset).cast(self.int_type.pointer())) print('rb magic: %s' % ((intptr_ptr + self.rb_magic_offset).cast(self.int_type.pointer()).dereference())) print('tree magic:", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "int(ptr), str(ptr.dereference()))) if int(left_ptr) in visited_addrs or int(right_ptr) in visited_addrs:", "% (self.intptr_type.sizeof * 2) % ((intptr_ptr + self.right_offset ).cast(ptr.type.pointer()).dereference())) PrintAvsRbtreeSubtree()", "argv_str, _from_tty): args = gdb.string_to_argv(argv_str) if len(args) != 1: print('usage:", "return expr = args[0] val = gdb.parse_and_eval('*(' + expr +", "depth=0, visited_addrs=set()): left_ptr_value = ptr.cast(self.intptr_type) + self.left_offset left_ptr = left_ptr_value.cast(ptr.type.pointer()).dereference()", "+ 'R', depth+1, visited_addrs) class PrintAvsRbtreeSubtree(PrintAvsRbtreeBase): def __init__(self): super().__init__('print-avs-rbtree-subtree', gdb.COMMAND_DATA,", "len(args) > 1 ptr = gdb.parse_and_eval(expr) if ptr is None:", "((intptr_ptr + self.left_offset ).cast(ptr.type.pointer()).dereference())) print('right: 0x%%0%dx' % (self.intptr_type.sizeof * 2)", "self.tree_magic_offset).cast(self.int_type.pointer()).dereference())) print('color: %s' % ((intptr_ptr + self.color_offset ).cast(self.int_type.pointer()).dereference())) print('parent: 0x%%0%dx'", "class PrintAvsRbtreeSubtree(PrintAvsRbtreeBase): def __init__(self): super().__init__('print-avs-rbtree-subtree', gdb.COMMAND_DATA, gdb.COMPLETE_EXPRESSION) def invoke(self, argv_str,", "invoke(self, argv_str, _from_tty): args = gdb.string_to_argv(argv_str) if len(args) not in", "= '%%s 0x%%0%dx = %%s' % (self.intptr_type.sizeof * 2,) #", "== 0: print('(null)') else: intptr_ptr = ptr.cast(self.intptr_type) if with_magic: print((intptr_ptr", "class PrintAvsRbtreeBase(gdb.Command): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.intptr_type =", "are displayed\\n') return expr = args[0] with_magic = len(args) >", "if len(args) != 1: print('usage: print-avs-rbtree expr\\n' ' expr -", "* 2,) # TODO self.color_offset = -32 self.parent_offset = -24", "- if present, \"magic\" fields are displayed\\n') return expr =", "2) % ((intptr_ptr + self.parent_offset).cast(ptr.type.pointer()).dereference())) print('left: 0x%%0%dx' % (self.intptr_type.sizeof *", "pointer\\n' ' with_magic - if present, \"magic\" fields are displayed\\n')", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "2) % ((intptr_ptr + self.left_offset ).cast(ptr.type.pointer()).dereference())) print('right: 0x%%0%dx' % (self.intptr_type.sizeof", "expr + ')') if val is None: print('cannot evaluate expression:", "**kwargs): super().__init__(*args, **kwargs) self.intptr_type = gdb.lookup_type('unsigned long long') self.int_type =", "gdb.string_to_argv(argv_str) if len(args) != 1: print('usage: print-avs-rbtree expr\\n' ' expr", "= ptr.cast(self.intptr_type) + self.left_offset left_ptr = left_ptr_value.cast(ptr.type.pointer()).dereference() right_ptr_value = ptr.cast(self.intptr_type)", "or implied. # See the License for the specific language", "expression that avaluates to a valid AVS_RBTREE pointer\\n') return expr", "'R', depth+1, visited_addrs) class PrintAvsRbtreeSubtree(PrintAvsRbtreeBase): def __init__(self): super().__init__('print-avs-rbtree-subtree', gdb.COMMAND_DATA, gdb.COMPLETE_EXPRESSION)", "% (path[-1] if path else ' ', int(ptr), str(ptr.dereference()))) if", "def _print_tree(self, ptr, path='', depth=0, visited_addrs=set()): left_ptr_value = ptr.cast(self.intptr_type) +", "KIND, either express or implied. # See the License for", "specific language governing permissions and # limitations under the License.", "((intptr_ptr + self.parent_offset).cast(ptr.type.pointer()).dereference())) print('left: 0x%%0%dx' % (self.intptr_type.sizeof * 2) %", "\"'- \" print(prefix + self.output_format % (path[-1] if path else", "magic: %s' % ((intptr_ptr + self.tree_magic_offset).cast(self.int_type.pointer()).dereference())) print('color: %s' % ((intptr_ptr", "if len(args) != 1: print('usage: print-avs-rbtree-subtree expr\\n' ' expr -", "\" print(prefix + self.output_format % (path[-1] if path else '", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "((intptr_ptr + self.color_offset ).cast(self.int_type.pointer()).dereference())) print('parent: 0x%%0%dx' % (self.intptr_type.sizeof * 2)", "<filename>gdb/print-avs-rbtree.py # -*- coding: utf-8 -*- # # Copyright 2021", "0x%%0%dx' % (self.intptr_type.sizeof * 2) % ((intptr_ptr + self.parent_offset).cast(ptr.type.pointer()).dereference())) print('left:", "expr) return if val == 0: print('(null)') else: self._print_tree(val) class", "if val == 0: print('(null)') else: self._print_tree(val) class PrintAvsRbtreeNode(PrintAvsRbtreeBase): def", "prefix = ''.join(' |' if x == 'L' else '", "1 ptr = gdb.parse_and_eval(expr) if ptr is None: print('cannot evaluate", "(the \"License\"); # you may not use this file except", "# you may not use this file except in compliance", "+ self.left_offset ).cast(ptr.type.pointer()).dereference())) print('right: 0x%%0%dx' % (self.intptr_type.sizeof * 2) %", "'L', depth+1, visited_addrs) if int(right_ptr) != 0: self._print_tree(right_ptr, path +", "in path) if path: if path[-1] == 'L': prefix +=", "- an expression that avaluates to a valid AVS_RBTREE_NODE pointer\\n'", "*args, **kwargs): super().__init__(*args, **kwargs) self.intptr_type = gdb.lookup_type('unsigned long long') self.int_type", "visited_addrs: print('circular tree detected, stopping') return visited_addrs.add(left_ptr) visited_addrs.add(right_ptr) if int(left_ptr)", "to a valid AVS_RBTREE_NODE pointer\\n' ' with_magic - if present,", "-*- coding: utf-8 -*- # # Copyright 2021 AVSystem <<EMAIL>>", "(self.intptr_type.sizeof * 2) % ((intptr_ptr + self.left_offset ).cast(ptr.type.pointer()).dereference())) print('right: 0x%%0%dx'", "print('cannot evaluate expression: ' + expr) return if ptr ==", "super().__init__('print-avs-rbtree', gdb.COMMAND_DATA, gdb.COMPLETE_EXPRESSION) def invoke(self, argv_str, _from_tty): args = gdb.string_to_argv(argv_str)", "expression that avaluates to a valid AVS_RBTREE_NODE pointer\\n' ' with_magic", "tree detected, stopping') return visited_addrs.add(left_ptr) visited_addrs.add(right_ptr) if int(left_ptr) != 0:", "class PrintAvsRbtree(PrintAvsRbtreeBase): def __init__(self): super().__init__('print-avs-rbtree', gdb.COMMAND_DATA, gdb.COMPLETE_EXPRESSION) def invoke(self, argv_str,", "def __init__(self): super().__init__('print-avs-rbtree', gdb.COMMAND_DATA, gdb.COMPLETE_EXPRESSION) def invoke(self, argv_str, _from_tty): args", "% ((intptr_ptr + self.parent_offset).cast(ptr.type.pointer()).dereference())) print('left: 0x%%0%dx' % (self.intptr_type.sizeof * 2)", "# # Unless required by applicable law or agreed to", "%%s' % (self.intptr_type.sizeof * 2,) # TODO self.color_offset = -32", "in visited_addrs: print('circular tree detected, stopping') return visited_addrs.add(left_ptr) visited_addrs.add(right_ptr) if", "self.right_offset = -8 def _print_tree(self, ptr, path='', depth=0, visited_addrs=set()): left_ptr_value", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "\"source PATH_TO_THIS_SCRIPT\" to ~/.gdbinit import gdb class PrintAvsRbtreeBase(gdb.Command): def __init__(self,", "self._print_tree(val) class PrintAvsRbtree(PrintAvsRbtreeBase): def __init__(self): super().__init__('print-avs-rbtree', gdb.COMMAND_DATA, gdb.COMPLETE_EXPRESSION) def invoke(self,", "an expression that avaluates to a valid AVS_RBTREE pointer\\n') return", "Version 2.0 (the \"License\"); # you may not use this", "Copyright 2021 AVSystem <<EMAIL>> # # Licensed under the Apache", "# TODO self.color_offset = -32 self.parent_offset = -24 self.left_offset =", "+ \"'- \" print(prefix + self.output_format % (path[-1] if path", "% ((intptr_ptr + self.tree_magic_offset).cast(self.int_type.pointer()).dereference())) print('color: %s' % ((intptr_ptr + self.color_offset", "_print_tree(self, ptr, path='', depth=0, visited_addrs=set()): left_ptr_value = ptr.cast(self.intptr_type) + self.left_offset", "implied. # See the License for the specific language governing", "under the Apache License, Version 2.0 (the \"License\"); # you", "+ 'L', depth+1, visited_addrs) if int(right_ptr) != 0: self._print_tree(right_ptr, path", "' elif path[-1] == 'R': prefix = prefix[:-1] + \"'-", "- an expression that avaluates to a valid AVS_RBTREE pointer\\n')", "args = gdb.string_to_argv(argv_str) if len(args) != 1: print('usage: print-avs-rbtree expr\\n'", "if val == 0: print('(null)') else: self._print_tree(val) class PrintAvsRbtree(PrintAvsRbtreeBase): def", "print('usage: print-avs-rbtree expr\\n' ' expr - an expression that avaluates", "pointer\\n') return expr = args[0] val = gdb.parse_and_eval(expr) if val", "AVS_RBTREE_NODE pointer\\n') return expr = args[0] val = gdb.parse_and_eval(expr) if", "visited_addrs) if int(right_ptr) != 0: self._print_tree(right_ptr, path + 'R', depth+1,", "with_magic = len(args) > 1 ptr = gdb.parse_and_eval(expr) if ptr", "expr = args[0] with_magic = len(args) > 1 ptr =", "by applicable law or agreed to in writing, software #", "AVSystem <<EMAIL>> # # Licensed under the Apache License, Version", "', int(ptr), str(ptr.dereference()))) if int(left_ptr) in visited_addrs or int(right_ptr) in", "self.left_offset = -16 self.right_offset = -8 def _print_tree(self, ptr, path='',", "' with_magic - if present, \"magic\" fields are displayed\\n') return", "= gdb.string_to_argv(argv_str) if len(args) != 1: print('usage: print-avs-rbtree expr\\n' '", "= gdb.parse_and_eval(expr) if val is None: print('cannot evaluate expression: '", "expr - an expression that avaluates to a valid AVS_RBTREE", "((intptr_ptr + self.tree_magic_offset).cast(self.int_type.pointer()).dereference())) print('color: %s' % ((intptr_ptr + self.color_offset ).cast(self.int_type.pointer()).dereference()))", "% (self.intptr_type.sizeof * 2) % ((intptr_ptr + self.left_offset ).cast(ptr.type.pointer()).dereference())) print('right:", "!= 1: print('usage: print-avs-rbtree-subtree expr\\n' ' expr - an expression", "0: self._print_tree(right_ptr, path + 'R', depth+1, visited_addrs) class PrintAvsRbtreeSubtree(PrintAvsRbtreeBase): def", "ptr = gdb.parse_and_eval(expr) if ptr is None: print('cannot evaluate expression:", "visited_addrs.add(right_ptr) if int(left_ptr) != 0: self._print_tree(left_ptr, path + 'L', depth+1,", "self._print_tree(val) class PrintAvsRbtreeNode(PrintAvsRbtreeBase): def __init__(self): super().__init__('print-avs-rbtree-node', gdb.COMMAND_DATA, gdb.COMPLETE_EXPRESSION) def invoke(self,", "2021 AVSystem <<EMAIL>> # # Licensed under the Apache License,", "' + expr) return if val == 0: print('(null)') else:", "!= 0: self._print_tree(left_ptr, path + 'L', depth+1, visited_addrs) if int(right_ptr)", "' ' for x in path) if path: if path[-1]", "__init__(self): super().__init__('print-avs-rbtree-subtree', gdb.COMMAND_DATA, gdb.COMPLETE_EXPRESSION) def invoke(self, argv_str, _from_tty): args =", "ptr == 0: print('(null)') else: intptr_ptr = ptr.cast(self.intptr_type) if with_magic:", "# # Copyright 2021 AVSystem <<EMAIL>> # # Licensed under", "(self.intptr_type.sizeof * 2,) # TODO self.color_offset = -32 self.parent_offset =", "gdb.COMPLETE_EXPRESSION) def invoke(self, argv_str, _from_tty): args = gdb.string_to_argv(argv_str) if len(args)", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "and # limitations under the License. # installation: append \"source", "Unless required by applicable law or agreed to in writing,", "+= '- ' elif path[-1] == 'R': prefix = prefix[:-1]", "avaluates to a valid AVS_RBTREE pointer\\n') return expr = args[0]", "to ~/.gdbinit import gdb class PrintAvsRbtreeBase(gdb.Command): def __init__(self, *args, **kwargs):", "TODO self.color_offset = -32 self.parent_offset = -24 self.left_offset = -16", "stopping') return visited_addrs.add(left_ptr) visited_addrs.add(right_ptr) if int(left_ptr) != 0: self._print_tree(left_ptr, path", "right_ptr_value = ptr.cast(self.intptr_type) + self.right_offset right_ptr = right_ptr_value.cast(ptr.type.pointer()).dereference() prefix =", "self.rb_magic_offset).cast(self.int_type.pointer()).dereference())) print('tree magic: %s' % ((intptr_ptr + self.tree_magic_offset).cast(self.int_type.pointer()).dereference())) print('color: %s'", "the specific language governing permissions and # limitations under the", "= gdb.parse_and_eval('*(' + expr + ')') if val is None:", "+ self.rb_magic_offset)) print((intptr_ptr + self.rb_magic_offset).cast(self.int_type.pointer())) print('rb magic: %s' % ((intptr_ptr", "applicable law or agreed to in writing, software # distributed", "- an expression that avaluates to a valid AVS_RBTREE_NODE pointer\\n')", "not in (1, 2): print('usage: print-avs-rbtree expr [with_magic]\\n' ' expr", "print((intptr_ptr + self.rb_magic_offset)) print((intptr_ptr + self.rb_magic_offset).cast(self.int_type.pointer())) print('rb magic: %s' %", "governing permissions and # limitations under the License. # installation:", "print('usage: print-avs-rbtree-subtree expr\\n' ' expr - an expression that avaluates", "# installation: append \"source PATH_TO_THIS_SCRIPT\" to ~/.gdbinit import gdb class", "% ((intptr_ptr + self.rb_magic_offset).cast(self.int_type.pointer()).dereference())) print('tree magic: %s' % ((intptr_ptr +", "avaluates to a valid AVS_RBTREE_NODE pointer\\n' ' with_magic - if", "in writing, software # distributed under the License is distributed", "+ self.tree_magic_offset).cast(self.int_type.pointer()).dereference())) print('color: %s' % ((intptr_ptr + self.color_offset ).cast(self.int_type.pointer()).dereference())) print('parent:", "= gdb.parse_and_eval(expr) if ptr is None: print('cannot evaluate expression: '", "((intptr_ptr + self.rb_magic_offset).cast(self.int_type.pointer()).dereference())) print('tree magic: %s' % ((intptr_ptr + self.tree_magic_offset).cast(self.int_type.pointer()).dereference()))", "AVS_RBTREE_NODE pointer\\n' ' with_magic - if present, \"magic\" fields are", "if int(right_ptr) != 0: self._print_tree(right_ptr, path + 'R', depth+1, visited_addrs)", "(path[-1] if path else ' ', int(ptr), str(ptr.dereference()))) if int(left_ptr)", "= ptr.cast(self.intptr_type) + self.right_offset right_ptr = right_ptr_value.cast(ptr.type.pointer()).dereference() prefix = ''.join('", "installation: append \"source PATH_TO_THIS_SCRIPT\" to ~/.gdbinit import gdb class PrintAvsRbtreeBase(gdb.Command):", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "License, Version 2.0 (the \"License\"); # you may not use", "# You may obtain a copy of the License at", "print('(null)') else: self._print_tree(val) class PrintAvsRbtreeNode(PrintAvsRbtreeBase): def __init__(self): super().__init__('print-avs-rbtree-node', gdb.COMMAND_DATA, gdb.COMPLETE_EXPRESSION)", "append \"source PATH_TO_THIS_SCRIPT\" to ~/.gdbinit import gdb class PrintAvsRbtreeBase(gdb.Command): def", "if ptr is None: print('cannot evaluate expression: ' + expr)", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "long') self.int_type = gdb.lookup_type('int') self.output_format = '%%s 0x%%0%dx = %%s'", "(self.intptr_type.sizeof * 2) % ((intptr_ptr + self.parent_offset).cast(ptr.type.pointer()).dereference())) print('left: 0x%%0%dx' %", "PrintAvsRbtree(PrintAvsRbtreeBase): def __init__(self): super().__init__('print-avs-rbtree', gdb.COMMAND_DATA, gdb.COMPLETE_EXPRESSION) def invoke(self, argv_str, _from_tty):", "')') if val is None: print('cannot evaluate expression: ' +", "-*- # # Copyright 2021 AVSystem <<EMAIL>> # # Licensed", "== 'R': prefix = prefix[:-1] + \"'- \" print(prefix +", "'L' else ' ' for x in path) if path:", "0: self._print_tree(left_ptr, path + 'L', depth+1, visited_addrs) if int(right_ptr) !=", "PATH_TO_THIS_SCRIPT\" to ~/.gdbinit import gdb class PrintAvsRbtreeBase(gdb.Command): def __init__(self, *args,", "self.intptr_type = gdb.lookup_type('unsigned long long') self.int_type = gdb.lookup_type('int') self.output_format =", "!= 0: self._print_tree(right_ptr, path + 'R', depth+1, visited_addrs) class PrintAvsRbtreeSubtree(PrintAvsRbtreeBase):", "AVS_RBTREE pointer\\n') return expr = args[0] val = gdb.parse_and_eval('*(' +", "prefix[:-1] + \"'- \" print(prefix + self.output_format % (path[-1] if", "if path else ' ', int(ptr), str(ptr.dereference()))) if int(left_ptr) in", "the License for the specific language governing permissions and #", "if x == 'L' else ' ' for x in", "path) if path: if path[-1] == 'L': prefix += '-", "Apache License, Version 2.0 (the \"License\"); # you may not", "either express or implied. # See the License for the", "str(ptr.dereference()))) if int(left_ptr) in visited_addrs or int(right_ptr) in visited_addrs: print('circular", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "visited_addrs.add(left_ptr) visited_addrs.add(right_ptr) if int(left_ptr) != 0: self._print_tree(left_ptr, path + 'L',", "intptr_ptr = ptr.cast(self.intptr_type) if with_magic: print((intptr_ptr + self.rb_magic_offset)) print((intptr_ptr +", "print('right: 0x%%0%dx' % (self.intptr_type.sizeof * 2) % ((intptr_ptr + self.right_offset", "if len(args) not in (1, 2): print('usage: print-avs-rbtree expr [with_magic]\\n'", "\"magic\" fields are displayed\\n') return expr = args[0] with_magic =", "super().__init__('print-avs-rbtree-subtree', gdb.COMMAND_DATA, gdb.COMPLETE_EXPRESSION) def invoke(self, argv_str, _from_tty): args = gdb.string_to_argv(argv_str)", "a valid AVS_RBTREE_NODE pointer\\n') return expr = args[0] val =", "int(right_ptr) != 0: self._print_tree(right_ptr, path + 'R', depth+1, visited_addrs) class", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "print('left: 0x%%0%dx' % (self.intptr_type.sizeof * 2) % ((intptr_ptr + self.left_offset", "gdb.string_to_argv(argv_str) if len(args) != 1: print('usage: print-avs-rbtree-subtree expr\\n' ' expr", "+ expr) return if ptr == 0: print('(null)') else: intptr_ptr", "left_ptr_value = ptr.cast(self.intptr_type) + self.left_offset left_ptr = left_ptr_value.cast(ptr.type.pointer()).dereference() right_ptr_value =", "self._print_tree(left_ptr, path + 'L', depth+1, visited_addrs) if int(right_ptr) != 0:", "PrintAvsRbtreeNode(PrintAvsRbtreeBase): def __init__(self): super().__init__('print-avs-rbtree-node', gdb.COMMAND_DATA, gdb.COMPLETE_EXPRESSION) def invoke(self, argv_str, _from_tty):", "val = gdb.parse_and_eval('*(' + expr + ')') if val is", "expr = args[0] val = gdb.parse_and_eval(expr) if val is None:", "in visited_addrs or int(right_ptr) in visited_addrs: print('circular tree detected, stopping')", "gdb.parse_and_eval('*(' + expr + ')') if val is None: print('cannot", "an expression that avaluates to a valid AVS_RBTREE_NODE pointer\\n' '", "print('cannot evaluate expression: ' + expr) return if val ==", "\"License\"); # you may not use this file except in", "expr) return if ptr == 0: print('(null)') else: intptr_ptr =", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "+ self.output_format % (path[-1] if path else ' ', int(ptr),", "__init__(self): super().__init__('print-avs-rbtree-node', gdb.COMMAND_DATA, gdb.COMPLETE_EXPRESSION) def invoke(self, argv_str, _from_tty): args =", "the License. # installation: append \"source PATH_TO_THIS_SCRIPT\" to ~/.gdbinit import", "# distributed under the License is distributed on an \"AS", "# Unless required by applicable law or agreed to in", "expression: ' + expr) return if val == 0: print('(null)')", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "if with_magic: print((intptr_ptr + self.rb_magic_offset)) print((intptr_ptr + self.rb_magic_offset).cast(self.int_type.pointer())) print('rb magic:", "self.left_offset ).cast(ptr.type.pointer()).dereference())) print('right: 0x%%0%dx' % (self.intptr_type.sizeof * 2) % ((intptr_ptr", "with_magic: print((intptr_ptr + self.rb_magic_offset)) print((intptr_ptr + self.rb_magic_offset).cast(self.int_type.pointer())) print('rb magic: %s'", "You may obtain a copy of the License at #", "int(right_ptr) in visited_addrs: print('circular tree detected, stopping') return visited_addrs.add(left_ptr) visited_addrs.add(right_ptr)", "def invoke(self, argv_str, _from_tty): args = gdb.string_to_argv(argv_str) if len(args) not", "the Apache License, Version 2.0 (the \"License\"); # you may", "= ptr.cast(self.intptr_type) if with_magic: print((intptr_ptr + self.rb_magic_offset)) print((intptr_ptr + self.rb_magic_offset).cast(self.int_type.pointer()))", "def invoke(self, argv_str, _from_tty): args = gdb.string_to_argv(argv_str) if len(args) !=", "self.color_offset ).cast(self.int_type.pointer()).dereference())) print('parent: 0x%%0%dx' % (self.intptr_type.sizeof * 2) % ((intptr_ptr", "== 'L' else ' ' for x in path) if", "utf-8 -*- # # Copyright 2021 AVSystem <<EMAIL>> # #" ]
[ "cond, initial, reduce) print (\"\\n\\n'A' words grouped by first and", "obj.vowels + obj.cons; \" + \\ \"}\" results = collection.group(key,", "+ \\ \"with totals:\") displayGroup(results) if __name__==\"__main__\": mongo = MongoClient('mongodb://localhost:27017/')", "key = {'first' : True, \"last\" : True} cond =", "(result) def firstIsALastIsVowel(collection): key = {'first' : True, \"last\" :", "}\" results = collection.group(key, cond, initial, reduce) print (\"\\n\\n'A' words", "vowel:\") displayGroup(results) def firstLetterTotals(collection): key = {'first' : True} cond", "+ \\ \"prev.vowels += obj.stats.vowels; \" + \\ \"prev.cons +=", "'cons' : 0} reduce = \"function (obj, prev) { \"", "\\ \"prev.cons += obj.stats.consonants; \" + \\ \"}\" finalize =", "results = collection.group(key, cond, initial, reduce, finalize) print (\"\\n\\nWords grouped", ": True} cond = {} initial = {'vowels' : 0,", "\" + \\ \"prev.vowels += obj.stats.vowels; \" + \\ \"prev.cons", "collection.group(key, cond, initial, reduce, finalize) print (\"\\n\\nWords grouped by first", "cond, initial, reduce, finalize) print (\"\\n\\nWords grouped by first letter", "{ prev.count++; }\" results = collection.group(key, cond, initial, reduce) print", "print (\"\\n\\n'A' words grouped by first and last\" + \\", "+= obj.stats.vowels; \" + \\ \"prev.cons += obj.stats.consonants; \" +", "(\"\\n\\nWords grouped by first letter \" + \\ \"with totals:\")", "first and last\" + \\ \" letter that end with", "\" + \\ \"}\" finalize = \"function (obj) { \"", "initial, reduce, finalize) print (\"\\n\\nWords grouped by first letter \"", "\"prev.vowels += obj.stats.vowels; \" + \\ \"prev.cons += obj.stats.consonants; \"", "= \"function (obj, prev) { prev.count++; }\" results = collection.group(key,", "+= obj.stats.consonants; \" + \\ \"}\" finalize = \"function (obj)", "= {'first' : 'a', 'last' : {'$in' : [\"a\",\"e\",\"i\",\"o\",\"u\"]}} initial", "True} cond = {} initial = {'vowels' : 0, 'cons'", "pymongo import MongoClient def displayGroup(results): for result in results: print", "that end with a vowel:\") displayGroup(results) def firstLetterTotals(collection): key =", "initial = {'count' : 0} reduce = \"function (obj, prev)", "print (result) def firstIsALastIsVowel(collection): key = {'first' : True, \"last\"", "reduce = \"function (obj, prev) { \" + \\ \"prev.vowels", ": {'$in' : [\"a\",\"e\",\"i\",\"o\",\"u\"]}} initial = {'count' : 0} reduce", "True} cond = {'first' : 'a', 'last' : {'$in' :", "def firstIsALastIsVowel(collection): key = {'first' : True, \"last\" : True}", "{'first' : True, \"last\" : True} cond = {'first' :", "end with a vowel:\") displayGroup(results) def firstLetterTotals(collection): key = {'first'", "(\"\\n\\n'A' words grouped by first and last\" + \\ \"", "0} reduce = \"function (obj, prev) { prev.count++; }\" results", "(obj, prev) { prev.count++; }\" results = collection.group(key, cond, initial,", "+ \\ \"obj.total = obj.vowels + obj.cons; \" + \\", "{ \" + \\ \"obj.total = obj.vowels + obj.cons; \"", "prev.count++; }\" results = collection.group(key, cond, initial, reduce) print (\"\\n\\n'A'", "0, 'cons' : 0} reduce = \"function (obj, prev) {", "= \"function (obj) { \" + \\ \"obj.total = obj.vowels", "= {'vowels' : 0, 'cons' : 0} reduce = \"function", "= {'count' : 0} reduce = \"function (obj, prev) {", ": 0} reduce = \"function (obj, prev) { prev.count++; }\"", "{'first' : 'a', 'last' : {'$in' : [\"a\",\"e\",\"i\",\"o\",\"u\"]}} initial =", "key = {'first' : True} cond = {} initial =", "'a', 'last' : {'$in' : [\"a\",\"e\",\"i\",\"o\",\"u\"]}} initial = {'count' :", "cond = {} initial = {'vowels' : 0, 'cons' :", "\\ \"obj.total = obj.vowels + obj.cons; \" + \\ \"}\"", "import MongoClient def displayGroup(results): for result in results: print (result)", "result in results: print (result) def firstIsALastIsVowel(collection): key = {'first'", "{'first' : True} cond = {} initial = {'vowels' :", "obj.stats.vowels; \" + \\ \"prev.cons += obj.stats.consonants; \" + \\", "in results: print (result) def firstIsALastIsVowel(collection): key = {'first' :", "(obj) { \" + \\ \"obj.total = obj.vowels + obj.cons;", "for result in results: print (result) def firstIsALastIsVowel(collection): key =", "cond = {'first' : 'a', 'last' : {'$in' : [\"a\",\"e\",\"i\",\"o\",\"u\"]}}", "def displayGroup(results): for result in results: print (result) def firstIsALastIsVowel(collection):", "\"function (obj) { \" + \\ \"obj.total = obj.vowels +", "mongo = MongoClient('mongodb://localhost:27017/') db = mongo['words'] collection = db['word_stats'] firstIsALastIsVowel(collection)", "collection.group(key, cond, initial, reduce) print (\"\\n\\n'A' words grouped by first", "def firstLetterTotals(collection): key = {'first' : True} cond = {}", "'last' : {'$in' : [\"a\",\"e\",\"i\",\"o\",\"u\"]}} initial = {'count' : 0}", "{'count' : 0} reduce = \"function (obj, prev) { prev.count++;", "= collection.group(key, cond, initial, reduce) print (\"\\n\\n'A' words grouped by", "= {} initial = {'vowels' : 0, 'cons' : 0}", "__name__==\"__main__\": mongo = MongoClient('mongodb://localhost:27017/') db = mongo['words'] collection = db['word_stats']", "displayGroup(results) if __name__==\"__main__\": mongo = MongoClient('mongodb://localhost:27017/') db = mongo['words'] collection", "= {'first' : True, \"last\" : True} cond = {'first'", "and last\" + \\ \" letter that end with a", ": 0} reduce = \"function (obj, prev) { \" +", "\"}\" finalize = \"function (obj) { \" + \\ \"obj.total", "grouped by first and last\" + \\ \" letter that", "{ \" + \\ \"prev.vowels += obj.stats.vowels; \" + \\", "print (\"\\n\\nWords grouped by first letter \" + \\ \"with", "initial, reduce) print (\"\\n\\n'A' words grouped by first and last\"", "if __name__==\"__main__\": mongo = MongoClient('mongodb://localhost:27017/') db = mongo['words'] collection =", "totals:\") displayGroup(results) if __name__==\"__main__\": mongo = MongoClient('mongodb://localhost:27017/') db = mongo['words']", "letter \" + \\ \"with totals:\") displayGroup(results) if __name__==\"__main__\": mongo", "grouped by first letter \" + \\ \"with totals:\") displayGroup(results)", "\" + \\ \"obj.total = obj.vowels + obj.cons; \" +", "words grouped by first and last\" + \\ \" letter", "= {'first' : True} cond = {} initial = {'vowels'", "firstLetterTotals(collection): key = {'first' : True} cond = {} initial", "\\ \"}\" results = collection.group(key, cond, initial, reduce, finalize) print", "results = collection.group(key, cond, initial, reduce) print (\"\\n\\n'A' words grouped", "\" + \\ \"}\" results = collection.group(key, cond, initial, reduce,", "reduce) print (\"\\n\\n'A' words grouped by first and last\" +", "a vowel:\") displayGroup(results) def firstLetterTotals(collection): key = {'first' : True}", "\" + \\ \"with totals:\") displayGroup(results) if __name__==\"__main__\": mongo =", "+ \\ \"prev.cons += obj.stats.consonants; \" + \\ \"}\" finalize", "from pymongo import MongoClient def displayGroup(results): for result in results:", "by first letter \" + \\ \"with totals:\") displayGroup(results) if", "\\ \"with totals:\") displayGroup(results) if __name__==\"__main__\": mongo = MongoClient('mongodb://localhost:27017/') db", "last\" + \\ \" letter that end with a vowel:\")", "finalize = \"function (obj) { \" + \\ \"obj.total =", "letter that end with a vowel:\") displayGroup(results) def firstLetterTotals(collection): key", "results: print (result) def firstIsALastIsVowel(collection): key = {'first' : True,", "[\"a\",\"e\",\"i\",\"o\",\"u\"]}} initial = {'count' : 0} reduce = \"function (obj,", "= obj.vowels + obj.cons; \" + \\ \"}\" results =", "firstIsALastIsVowel(collection): key = {'first' : True, \"last\" : True} cond", "\"last\" : True} cond = {'first' : 'a', 'last' :", "first letter \" + \\ \"with totals:\") displayGroup(results) if __name__==\"__main__\":", "True, \"last\" : True} cond = {'first' : 'a', 'last'", "by first and last\" + \\ \" letter that end", "+ \\ \"}\" finalize = \"function (obj) { \" +", "displayGroup(results): for result in results: print (result) def firstIsALastIsVowel(collection): key", "prev) { prev.count++; }\" results = collection.group(key, cond, initial, reduce)", "{'$in' : [\"a\",\"e\",\"i\",\"o\",\"u\"]}} initial = {'count' : 0} reduce =", "\\ \" letter that end with a vowel:\") displayGroup(results) def", "+ \\ \"}\" results = collection.group(key, cond, initial, reduce, finalize)", "finalize) print (\"\\n\\nWords grouped by first letter \" + \\", ": 'a', 'last' : {'$in' : [\"a\",\"e\",\"i\",\"o\",\"u\"]}} initial = {'count'", ": [\"a\",\"e\",\"i\",\"o\",\"u\"]}} initial = {'count' : 0} reduce = \"function", "\\ \"prev.vowels += obj.stats.vowels; \" + \\ \"prev.cons += obj.stats.consonants;", "\"obj.total = obj.vowels + obj.cons; \" + \\ \"}\" results", "with a vowel:\") displayGroup(results) def firstLetterTotals(collection): key = {'first' :", "\\ \"}\" finalize = \"function (obj) { \" + \\", "reduce, finalize) print (\"\\n\\nWords grouped by first letter \" +", "0} reduce = \"function (obj, prev) { \" + \\", "= \"function (obj, prev) { \" + \\ \"prev.vowels +=", "(obj, prev) { \" + \\ \"prev.vowels += obj.stats.vowels; \"", "initial = {'vowels' : 0, 'cons' : 0} reduce =", "{} initial = {'vowels' : 0, 'cons' : 0} reduce", "\" + \\ \"prev.cons += obj.stats.consonants; \" + \\ \"}\"", ": True, \"last\" : True} cond = {'first' : 'a',", "+ \\ \" letter that end with a vowel:\") displayGroup(results)", "= collection.group(key, cond, initial, reduce, finalize) print (\"\\n\\nWords grouped by", "displayGroup(results) def firstLetterTotals(collection): key = {'first' : True} cond =", "\"function (obj, prev) { \" + \\ \"prev.vowels += obj.stats.vowels;", "reduce = \"function (obj, prev) { prev.count++; }\" results =", "\"prev.cons += obj.stats.consonants; \" + \\ \"}\" finalize = \"function", "MongoClient def displayGroup(results): for result in results: print (result) def", ": True} cond = {'first' : 'a', 'last' : {'$in'", "+ obj.cons; \" + \\ \"}\" results = collection.group(key, cond,", "\"function (obj, prev) { prev.count++; }\" results = collection.group(key, cond,", "prev) { \" + \\ \"prev.vowels += obj.stats.vowels; \" +", "= MongoClient('mongodb://localhost:27017/') db = mongo['words'] collection = db['word_stats'] firstIsALastIsVowel(collection) firstLetterTotals(collection)", ": 0, 'cons' : 0} reduce = \"function (obj, prev)", "obj.cons; \" + \\ \"}\" results = collection.group(key, cond, initial,", "\"}\" results = collection.group(key, cond, initial, reduce, finalize) print (\"\\n\\nWords", "\" letter that end with a vowel:\") displayGroup(results) def firstLetterTotals(collection):", "\"with totals:\") displayGroup(results) if __name__==\"__main__\": mongo = MongoClient('mongodb://localhost:27017/') db =", "obj.stats.consonants; \" + \\ \"}\" finalize = \"function (obj) {", "{'vowels' : 0, 'cons' : 0} reduce = \"function (obj," ]
[ "batch def flip_output(output): \"\"\" Flip output information Parameters ---------- output", "# Keys to ignore combine = ['metrics'] # Keys to", "------- output : dict Dictionary with a \"metrics\" key containing", "'rgb', 'rgb_context', 'input_depth', 'input_depth_context', ]): batch[key] = flip(batch[key], flip_lr) #", "during interpolation Returns ------- output : dict Upsampled output \"\"\"", "batch input information (copies data first) Parameters ---------- batch :", "corners will be aligned during interpolation Returns ------- output :", "\"\"\" Flip batch input information (copies data first) Parameters ----------", "mode is used align_corners: bool or None Whether corners will", "merge = {key: {} for key in combine} for output", "If list, stack every item if is_list(batch[key]): if is_tensor(batch[key][0]) or", "flip(output[key], flip_lr) return output def upsample_output(output, mode='nearest', align_corners=None): \"\"\" Upsample", "in val] for val in tensor] def merge_outputs(*outputs): \"\"\" Merges", "tensors for key in filter_dict(output, [ 'uncertainty', 'logits_semantic', 'ord_probability', 'inv_depths',", ": Function Flip function Returns ------- tensor : torch.Tensor or", "elif key not in ignore: assert key not in merge.keys(),", "if not is_list(tensor): return flip_fn(tensor) else: if not is_list(tensor[0]): return", "batch : dict Batch information Returns ------- batch : dict", "other keys that are not \"loss\" (it is handled differently).", "[[flip_fn(v) for v in val] for val in tensor] def", "1, 'Only batch size 1 is supported for multi-cameras' #", "sample in batch[key]] # Else, stack single item else: batch[key]", "if len(batch['rgb'].shape) == 5: assert batch['rgb'].shape[0] == 1, 'Only batch", "---------- tensor : torch.Tensor or list[torch.Tensor] or list[list[torch.Tensor]] Tensor to", "Batch information Returns ------- batch : dict Flipped batch \"\"\"", "key not in merge.keys(), \\ 'Adding duplicated key {}'.format(key) merge[key]", ": dict Dictionary with a \"metrics\" key containing a dictionary", "(copies data first) Parameters ---------- batch : dict Batch information", "outputs for logging Parameters ---------- outputs : tuple of dict", "filter_dict(output, [ 'uncertainty', 'logits_semantic', 'ord_probability', 'inv_depths', 'inv_depths_context', 'inv_depths1', 'inv_depths2', 'pred_depth',", "output[key] = interpolate_scales( output[key], mode=mode, align_corners=align_corners) for key in filter_dict(output,", "not in ignore: assert key not in merge.keys(), \\ 'Adding", "not \"loss\" (it is handled differently). \"\"\" ignore = ['loss']", "not in merge[key].keys(), \\ 'Combining duplicated key {} to {}'.format(sub_key,", "Parameters ---------- output : dict Dictionary of model outputs (e.g.", "for key in filter_dict(output, [ 'inv_depths_context' ]): output[key] = [interpolate_scales(", "------- output : dict Flipped output \"\"\" # Flip tensors", "Research Institute. All rights reserved. from packnet_sfm.utils.image import flip_lr, interpolate_scales", "'pred_inv_depth_context', 'pred_inv_depth1', 'pred_inv_depth2', ]): output[key] = flip(output[key], flip_lr) return output", "output def upsample_output(output, mode='nearest', align_corners=None): \"\"\" Upsample multi-scale outputs to", "to ignore combine = ['metrics'] # Keys to combine merge", "= ['metrics'] # Keys to combine merge = {key: {}", "in output[key].items(): assert sub_key not in merge[key].keys(), \\ 'Combining duplicated", "'inv_depths' and 'uncertainty') mode : str Which interpolation mode is", "def merge_outputs(*outputs): \"\"\" Merges model outputs for logging Parameters ----------", "dict Batch information Returns ------- batch : dict Flipped batch", "or list[list[torch.Tensor]] Tensor to be flipped flip_fn : Function Flip", "Stacked batch \"\"\" # If there is multi-camera information if", "dict Dictionary of model outputs (e.g. with keys like 'inv_depths'", "Iterate over all keys for key, val in output.items(): #", "batch['rgb'].shape[0] == 1, 'Only batch size 1 is supported for", "outputs (e.g. with keys like 'inv_depths' and 'uncertainty') Returns -------", "def flip_batch_input(batch): \"\"\" Flip batch input information (copies data first)", "merge[key].keys(), \\ 'Combining duplicated key {} to {}'.format(sub_key, key) merge[key][sub_key]", "{key: {} for key in combine} for output in outputs:", "these keys elif key not in ignore: assert key not", "combine merge = {key: {} for key in combine} for", "mode=mode, align_corners=align_corners) for key in filter_dict(output, [ 'inv_depths_context' ]): output[key]", "stack_batch(batch): \"\"\" Stack multi-camera batches (B,N,C,H,W becomes BN,C,H,W) Parameters ----------", "used align_corners: bool or None Whether corners will be aligned", "is_list(tensor[0]): return [flip_fn(val) for val in tensor] else: return [[flip_fn(v)", "of dict Outputs to be merged Returns ------- output :", "supported for multi-cameras' # Loop over all keys for key", "ignore combine = ['metrics'] # Keys to combine merge =", "in filter_dict(output, [ 'uncertainty', 'logits_semantic', 'ord_probability', 'inv_depths', 'inv_depths_context', 'inv_depths1', 'inv_depths2',", "[ 'inv_depths', 'uncertainty' ]): output[key] = interpolate_scales( output[key], mode=mode, align_corners=align_corners)", "Which interpolation mode is used align_corners: bool or None Whether", "assert sub_key not in merge[key].keys(), \\ 'Combining duplicated key {}", "0, 2] = batch['rgb'].shape[3] - batch[key][:, 0, 2] # Return", "like 'inv_depths' and 'uncertainty') mode : str Which interpolation mode", "= [sample[0] for sample in batch[key]] # Else, stack single", "stack every item if is_list(batch[key]): if is_tensor(batch[key][0]) or is_numpy(batch[key][0]): batch[key]", "in combine} for output in outputs: # Iterate over all", "val in tensor] def merge_outputs(*outputs): \"\"\" Merges model outputs for", "Parameters ---------- batch : dict Batch information Returns ------- batch", "---------- batch : dict Batch information Returns ------- batch :", "== 1, 'Only batch size 1 is supported for multi-cameras'", ": dict Batch Returns ------- batch : dict Stacked batch", "Parameters ---------- tensor : torch.Tensor or list[torch.Tensor] or list[list[torch.Tensor]] Tensor", "interpolation mode is used align_corners: bool or None Whether corners", "data first) Parameters ---------- batch : dict Batch information Returns", "= batch[key].clone() batch[key][:, 0, 2] = batch['rgb'].shape[3] - batch[key][:, 0,", "in tensor] else: return [[flip_fn(v) for v in val] for", "dict Outputs to be merged Returns ------- output : dict", "key in batch.keys(): # If list, stack every item if", "torch.Tensor or list[torch.Tensor] or list[list[torch.Tensor]] Tensor to be flipped flip_fn", "from packnet_sfm.utils.misc import filter_dict from packnet_sfm.utils.types import is_tensor, is_list, is_numpy", "differently). \"\"\" ignore = ['loss'] # Keys to ignore combine", "Flip tensors for key in filter_dict(output, [ 'uncertainty', 'logits_semantic', 'ord_probability',", "'inv_depths2', 'pred_depth', 'pred_depth_context', 'pred_depth1', 'pred_depth2', 'pred_inv_depth', 'pred_inv_depth_context', 'pred_inv_depth1', 'pred_inv_depth2', ]):", "from packnet_sfm.utils.types import is_tensor, is_list, is_numpy def flip(tensor, flip_fn): \"\"\"", "Flip function Returns ------- tensor : torch.Tensor or list[torch.Tensor] or", "Keys to combine merge = {key: {} for key in", "merged Returns ------- output : dict Dictionary with a \"metrics\"", "\"\"\" # If there is multi-camera information if len(batch['rgb'].shape) ==", "'uncertainty', 'logits_semantic', 'ord_probability', 'inv_depths', 'inv_depths_context', 'inv_depths1', 'inv_depths2', 'pred_depth', 'pred_depth_context', 'pred_depth1',", "dict Flipped output \"\"\" # Flip tensors for key in", "information Parameters ---------- output : dict Dictionary of model outputs", "key in filter_dict(batch, [ 'rgb', 'rgb_context', 'input_depth', 'input_depth_context', ]): batch[key]", "= flip(batch[key], flip_lr) # Flip intrinsics for key in filter_dict(batch,", "these keys if key in combine: for sub_key, sub_val in", "{} to {}'.format(sub_key, key) merge[key][sub_key] = sub_val # Ignore these", "model outputs for logging Parameters ---------- outputs : tuple of", "sub_key, sub_val in output[key].items(): assert sub_key not in merge[key].keys(), \\", "'uncertainty') Returns ------- output : dict Flipped output \"\"\" #", "else: return [[flip_fn(v) for v in val] for val in", "if not is_list(tensor[0]): return [flip_fn(val) for val in tensor] else:", "\"\"\" ignore = ['loss'] # Keys to ignore combine =", ": dict Flipped batch \"\"\" # Flip tensors for key", "]): output[key] = interpolate_scales( output[key], mode=mode, align_corners=align_corners) for key in", "is_tensor(batch[key][0]) or is_numpy(batch[key][0]): batch[key] = [sample[0] for sample in batch[key]]", "\"loss\" (it is handled differently). \"\"\" ignore = ['loss'] #", "combine = ['metrics'] # Keys to combine merge = {key:", "or list[torch.Tensor] or list[list[torch.Tensor]] Flipped tensor or list of tensors", "\"\"\" if not is_list(tensor): return flip_fn(tensor) else: if not is_list(tensor[0]):", "is handled differently). \"\"\" ignore = ['loss'] # Keys to", "'uncertainty') mode : str Which interpolation mode is used align_corners:", "sub_val # Ignore these keys elif key not in ignore:", "Batch Returns ------- batch : dict Stacked batch \"\"\" #", "batch : dict Batch Returns ------- batch : dict Stacked", "dict Dictionary with a \"metrics\" key containing a dictionary with", "multi-camera information if len(batch['rgb'].shape) == 5: assert batch['rgb'].shape[0] == 1,", "all keys for key in batch.keys(): # If list, stack", "for val in tensor] else: return [[flip_fn(v) for v in", "for sample in batch[key]] # Else, stack single item else:", "with various metrics and all other keys that are not", "return batch def flip_batch_input(batch): \"\"\" Flip batch input information (copies", "on a function Parameters ---------- tensor : torch.Tensor or list[torch.Tensor]", "return merge def stack_batch(batch): \"\"\" Stack multi-camera batches (B,N,C,H,W becomes", "dict Flipped batch \"\"\" # Flip tensors for key in", "2020 Toyota Research Institute. All rights reserved. from packnet_sfm.utils.image import", "duplicated key {} to {}'.format(sub_key, key) merge[key][sub_key] = sub_val #", "Flipped tensor or list of tensors \"\"\" if not is_list(tensor):", "is used align_corners: bool or None Whether corners will be", "(it is handled differently). \"\"\" ignore = ['loss'] # Keys", "val in tensor] else: return [[flip_fn(v) for v in val]", "information Returns ------- batch : dict Flipped batch \"\"\" #", "assert key not in merge.keys(), \\ 'Adding duplicated key {}'.format(key)", "a dictionary with various metrics and all other keys that", "\"\"\" Flip output information Parameters ---------- output : dict Dictionary", "full resolution. Parameters ---------- output : dict Dictionary of model", "list[list[torch.Tensor]] Flipped tensor or list of tensors \"\"\" if not", "packnet_sfm.utils.image import flip_lr, interpolate_scales from packnet_sfm.utils.misc import filter_dict from packnet_sfm.utils.types", "is_list, is_numpy def flip(tensor, flip_fn): \"\"\" Flip tensors or list", "Whether corners will be aligned during interpolation Returns ------- output", "'Adding duplicated key {}'.format(key) merge[key] = val return merge def", "= {key: {} for key in combine} for output in", "and all other keys that are not \"loss\" (it is", "in tensor] def merge_outputs(*outputs): \"\"\" Merges model outputs for logging", "'pred_inv_depth', 'pred_inv_depth_context', 'pred_inv_depth1', 'pred_inv_depth2', ]): output[key] = flip(output[key], flip_lr) return", "Merges model outputs for logging Parameters ---------- outputs : tuple", ": torch.Tensor or list[torch.Tensor] or list[list[torch.Tensor]] Flipped tensor or list", "in merge[key].keys(), \\ 'Combining duplicated key {} to {}'.format(sub_key, key)", "'logits_semantic', 'ord_probability', 'inv_depths', 'inv_depths_context', 'inv_depths1', 'inv_depths2', 'pred_depth', 'pred_depth_context', 'pred_depth1', 'pred_depth2',", "align_corners: bool or None Whether corners will be aligned during", "is_list(batch[key]): if is_tensor(batch[key][0]) or is_numpy(batch[key][0]): batch[key] = [sample[0] for sample", "'inv_depths_context', 'inv_depths1', 'inv_depths2', 'pred_depth', 'pred_depth_context', 'pred_depth1', 'pred_depth2', 'pred_inv_depth', 'pred_inv_depth_context', 'pred_inv_depth1',", "'input_depth', 'input_depth_context', ]): batch[key] = flip(batch[key], flip_lr) # Flip intrinsics", "mode='nearest', align_corners=None): \"\"\" Upsample multi-scale outputs to full resolution. Parameters", "Keys to ignore combine = ['metrics'] # Keys to combine", "be merged Returns ------- output : dict Dictionary with a", "batch[key] = batch[key][0] return batch def flip_batch_input(batch): \"\"\" Flip batch", "flip_lr) return output def upsample_output(output, mode='nearest', align_corners=None): \"\"\" Upsample multi-scale", "'Combining duplicated key {} to {}'.format(sub_key, key) merge[key][sub_key] = sub_val", "flip_lr, interpolate_scales from packnet_sfm.utils.misc import filter_dict from packnet_sfm.utils.types import is_tensor,", "= batch[key][0] return batch def flip_batch_input(batch): \"\"\" Flip batch input", "for sub_key, sub_val in output[key].items(): assert sub_key not in merge[key].keys(),", "mode : str Which interpolation mode is used align_corners: bool", "key not in ignore: assert key not in merge.keys(), \\", "tensors based on a function Parameters ---------- tensor : torch.Tensor", "batch return batch def flip_output(output): \"\"\" Flip output information Parameters", "flipped flip_fn : Function Flip function Returns ------- tensor :", "\"\"\" Flip tensors or list of tensors based on a", "with a \"metrics\" key containing a dictionary with various metrics", "in batch[key]] # Else, stack single item else: batch[key] =", "merge[key] = val return merge def stack_batch(batch): \"\"\" Stack multi-camera", "like 'inv_depths' and 'uncertainty') Returns ------- output : dict Flipped", "in ignore: assert key not in merge.keys(), \\ 'Adding duplicated", "ignore = ['loss'] # Keys to ignore combine = ['metrics']", "\\ 'Combining duplicated key {} to {}'.format(sub_key, key) merge[key][sub_key] =", "\"metrics\" key containing a dictionary with various metrics and all", "[interpolate_scales( val, mode=mode, align_corners=align_corners) for val in output[key]] return output", "multi-scale outputs to full resolution. Parameters ---------- output : dict", "# If list, stack every item if is_list(batch[key]): if is_tensor(batch[key][0])", "# If there is multi-camera information if len(batch['rgb'].shape) == 5:", "key) merge[key][sub_key] = sub_val # Ignore these keys elif key", "packnet_sfm.utils.types import is_tensor, is_list, is_numpy def flip(tensor, flip_fn): \"\"\" Flip", "outputs : tuple of dict Outputs to be merged Returns", "metrics and all other keys that are not \"loss\" (it", "------- tensor : torch.Tensor or list[torch.Tensor] or list[list[torch.Tensor]] Flipped tensor", "val return merge def stack_batch(batch): \"\"\" Stack multi-camera batches (B,N,C,H,W", "of model outputs (e.g. with keys like 'inv_depths' and 'uncertainty')", "and 'uncertainty') mode : str Which interpolation mode is used", "[sample[0] for sample in batch[key]] # Else, stack single item", "merge.keys(), \\ 'Adding duplicated key {}'.format(key) merge[key] = val return", "for output in outputs: # Iterate over all keys for", "# Copyright 2020 Toyota Research Institute. All rights reserved. from", "handled differently). \"\"\" ignore = ['loss'] # Keys to ignore", "combine: for sub_key, sub_val in output[key].items(): assert sub_key not in", "[ 'uncertainty', 'logits_semantic', 'ord_probability', 'inv_depths', 'inv_depths_context', 'inv_depths1', 'inv_depths2', 'pred_depth', 'pred_depth_context',", "'pred_inv_depth1', 'pred_inv_depth2', ]): output[key] = flip(output[key], flip_lr) return output def", ": torch.Tensor or list[torch.Tensor] or list[list[torch.Tensor]] Tensor to be flipped", "aligned during interpolation Returns ------- output : dict Upsampled output", "tensor] def merge_outputs(*outputs): \"\"\" Merges model outputs for logging Parameters", "= interpolate_scales( output[key], mode=mode, align_corners=align_corners) for key in filter_dict(output, [", "# Combine these keys if key in combine: for sub_key,", "'ord_probability', 'inv_depths', 'inv_depths_context', 'inv_depths1', 'inv_depths2', 'pred_depth', 'pred_depth_context', 'pred_depth1', 'pred_depth2', 'pred_inv_depth',", "dict Batch Returns ------- batch : dict Stacked batch \"\"\"", "in filter_dict(output, [ 'inv_depths_context' ]): output[key] = [interpolate_scales( val, mode=mode,", "keys for key, val in output.items(): # Combine these keys", "list of tensors \"\"\" if not is_list(tensor): return flip_fn(tensor) else:", "Tensor to be flipped flip_fn : Function Flip function Returns", "output : dict Dictionary with a \"metrics\" key containing a", "in output.items(): # Combine these keys if key in combine:", "keys elif key not in ignore: assert key not in", "Flip batch input information (copies data first) Parameters ---------- batch", "------- batch : dict Flipped batch \"\"\" # Flip tensors", "output : dict Flipped output \"\"\" # Flip tensors for", "for key in filter_dict(batch, [ 'intrinsics' ]): batch[key] = batch[key].clone()", "# Loop over all keys for key in batch.keys(): #", "= sub_val # Ignore these keys elif key not in", "If there is multi-camera information if len(batch['rgb'].shape) == 5: assert", "'inv_depths', 'uncertainty' ]): output[key] = interpolate_scales( output[key], mode=mode, align_corners=align_corners) for", "multi-cameras' # Loop over all keys for key in batch.keys():", "(B,N,C,H,W becomes BN,C,H,W) Parameters ---------- batch : dict Batch Returns", "becomes BN,C,H,W) Parameters ---------- batch : dict Batch Returns -------", "merge def stack_batch(batch): \"\"\" Stack multi-camera batches (B,N,C,H,W becomes BN,C,H,W)", ": dict Stacked batch \"\"\" # If there is multi-camera", "sub_key not in merge[key].keys(), \\ 'Combining duplicated key {} to", "stack single item else: batch[key] = batch[key][0] return batch def", "---------- outputs : tuple of dict Outputs to be merged", "# Iterate over all keys for key, val in output.items():", "]): batch[key] = flip(batch[key], flip_lr) # Flip intrinsics for key", "if is_tensor(batch[key][0]) or is_numpy(batch[key][0]): batch[key] = [sample[0] for sample in", "various metrics and all other keys that are not \"loss\"", "return output def upsample_output(output, mode='nearest', align_corners=None): \"\"\" Upsample multi-scale outputs", "a \"metrics\" key containing a dictionary with various metrics and", "batch \"\"\" # If there is multi-camera information if len(batch['rgb'].shape)", "'Only batch size 1 is supported for multi-cameras' # Loop", "filter_dict(batch, [ 'rgb', 'rgb_context', 'input_depth', 'input_depth_context', ]): batch[key] = flip(batch[key],", "Dictionary with a \"metrics\" key containing a dictionary with various", "filter_dict(batch, [ 'intrinsics' ]): batch[key] = batch[key].clone() batch[key][:, 0, 2]", "def upsample_output(output, mode='nearest', align_corners=None): \"\"\" Upsample multi-scale outputs to full", "import is_tensor, is_list, is_numpy def flip(tensor, flip_fn): \"\"\" Flip tensors", "a function Parameters ---------- tensor : torch.Tensor or list[torch.Tensor] or", "# Flip intrinsics for key in filter_dict(batch, [ 'intrinsics' ]):", "val] for val in tensor] def merge_outputs(*outputs): \"\"\" Merges model", "# Else, stack single item else: batch[key] = batch[key][0] return", "output \"\"\" # Flip tensors for key in filter_dict(output, [", "key containing a dictionary with various metrics and all other", "flip_fn): \"\"\" Flip tensors or list of tensors based on", "output \"\"\" for key in filter_dict(output, [ 'inv_depths', 'uncertainty' ]):", "torch.Tensor or list[torch.Tensor] or list[list[torch.Tensor]] Flipped tensor or list of", "batch.keys(): # If list, stack every item if is_list(batch[key]): if", "\"\"\" Upsample multi-scale outputs to full resolution. Parameters ---------- output", "logging Parameters ---------- outputs : tuple of dict Outputs to", "is supported for multi-cameras' # Loop over all keys for", "batch[key] = batch[key].clone() batch[key][:, 0, 2] = batch['rgb'].shape[3] - batch[key][:,", "output : dict Upsampled output \"\"\" for key in filter_dict(output,", "for key in filter_dict(batch, [ 'rgb', 'rgb_context', 'input_depth', 'input_depth_context', ]):", "based on a function Parameters ---------- tensor : torch.Tensor or", "output in outputs: # Iterate over all keys for key,", "will be aligned during interpolation Returns ------- output : dict", "------- batch : dict Stacked batch \"\"\" # If there", "from packnet_sfm.utils.image import flip_lr, interpolate_scales from packnet_sfm.utils.misc import filter_dict from", "v in val] for val in tensor] def merge_outputs(*outputs): \"\"\"", "rights reserved. from packnet_sfm.utils.image import flip_lr, interpolate_scales from packnet_sfm.utils.misc import", "flip_fn : Function Flip function Returns ------- tensor : torch.Tensor", "tensor or list of tensors \"\"\" if not is_list(tensor): return", "are not \"loss\" (it is handled differently). \"\"\" ignore =", "'uncertainty' ]): output[key] = interpolate_scales( output[key], mode=mode, align_corners=align_corners) for key", "for key in batch.keys(): # If list, stack every item", "flip_lr) # Flip intrinsics for key in filter_dict(batch, [ 'intrinsics'", "model outputs (e.g. with keys like 'inv_depths' and 'uncertainty') mode", "output[key] = [interpolate_scales( val, mode=mode, align_corners=align_corners) for val in output[key]]", "to be flipped flip_fn : Function Flip function Returns -------", "Loop over all keys for key in batch.keys(): # If", "\"\"\" Stack multi-camera batches (B,N,C,H,W becomes BN,C,H,W) Parameters ---------- batch", "batch[key] = flip(batch[key], flip_lr) # Flip intrinsics for key in", "information if len(batch['rgb'].shape) == 5: assert batch['rgb'].shape[0] == 1, 'Only", "tensors or list of tensors based on a function Parameters", "- batch[key][:, 0, 2] # Return flipped batch return batch", "bool or None Whether corners will be aligned during interpolation", "packnet_sfm.utils.misc import filter_dict from packnet_sfm.utils.types import is_tensor, is_list, is_numpy def", "list, stack every item if is_list(batch[key]): if is_tensor(batch[key][0]) or is_numpy(batch[key][0]):", ": tuple of dict Outputs to be merged Returns -------", "output.items(): # Combine these keys if key in combine: for", "flip_batch_input(batch): \"\"\" Flip batch input information (copies data first) Parameters", "be flipped flip_fn : Function Flip function Returns ------- tensor", "for key, val in output.items(): # Combine these keys if", "containing a dictionary with various metrics and all other keys", "return flip_fn(tensor) else: if not is_list(tensor[0]): return [flip_fn(val) for val", "intrinsics for key in filter_dict(batch, [ 'intrinsics' ]): batch[key] =", "merge_outputs(*outputs): \"\"\" Merges model outputs for logging Parameters ---------- outputs", "input information (copies data first) Parameters ---------- batch : dict", "Flip output information Parameters ---------- output : dict Dictionary of", "not is_list(tensor): return flip_fn(tensor) else: if not is_list(tensor[0]): return [flip_fn(val)", "5: assert batch['rgb'].shape[0] == 1, 'Only batch size 1 is", "1 is supported for multi-cameras' # Loop over all keys", "Toyota Research Institute. All rights reserved. from packnet_sfm.utils.image import flip_lr,", "Returns ------- batch : dict Flipped batch \"\"\" # Flip", "key in combine} for output in outputs: # Iterate over", "for val in tensor] def merge_outputs(*outputs): \"\"\" Merges model outputs", "# Return flipped batch return batch def flip_output(output): \"\"\" Flip", "str Which interpolation mode is used align_corners: bool or None", "'pred_depth1', 'pred_depth2', 'pred_inv_depth', 'pred_inv_depth_context', 'pred_inv_depth1', 'pred_inv_depth2', ]): output[key] = flip(output[key],", "every item if is_list(batch[key]): if is_tensor(batch[key][0]) or is_numpy(batch[key][0]): batch[key] =", "is_numpy(batch[key][0]): batch[key] = [sample[0] for sample in batch[key]] # Else,", "key {}'.format(key) merge[key] = val return merge def stack_batch(batch): \"\"\"", "list[list[torch.Tensor]] Tensor to be flipped flip_fn : Function Flip function", "# Flip tensors for key in filter_dict(output, [ 'uncertainty', 'logits_semantic',", "Returns ------- output : dict Flipped output \"\"\" # Flip", "filter_dict(output, [ 'inv_depths_context' ]): output[key] = [interpolate_scales( val, mode=mode, align_corners=align_corners)", "list[torch.Tensor] or list[list[torch.Tensor]] Flipped tensor or list of tensors \"\"\"", "batch['rgb'].shape[3] - batch[key][:, 0, 2] # Return flipped batch return", "]): batch[key] = batch[key].clone() batch[key][:, 0, 2] = batch['rgb'].shape[3] -", "\"\"\" # Flip tensors for key in filter_dict(output, [ 'uncertainty',", "single item else: batch[key] = batch[key][0] return batch def flip_batch_input(batch):", "Return flipped batch return batch def flip_output(output): \"\"\" Flip output", "Flipped output \"\"\" # Flip tensors for key in filter_dict(output,", "that are not \"loss\" (it is handled differently). \"\"\" ignore", "Parameters ---------- outputs : tuple of dict Outputs to be", "2] = batch['rgb'].shape[3] - batch[key][:, 0, 2] # Return flipped", "\"\"\" # Flip tensors for key in filter_dict(batch, [ 'rgb',", "= ['loss'] # Keys to ignore combine = ['metrics'] #", "{} for key in combine} for output in outputs: #", "batches (B,N,C,H,W becomes BN,C,H,W) Parameters ---------- batch : dict Batch", "upsample_output(output, mode='nearest', align_corners=None): \"\"\" Upsample multi-scale outputs to full resolution.", "'inv_depths_context' ]): output[key] = [interpolate_scales( val, mode=mode, align_corners=align_corners) for val", "Function Flip function Returns ------- tensor : torch.Tensor or list[torch.Tensor]", "in combine: for sub_key, sub_val in output[key].items(): assert sub_key not", "with keys like 'inv_depths' and 'uncertainty') Returns ------- output :", "# Ignore these keys elif key not in ignore: assert", "'pred_inv_depth2', ]): output[key] = flip(output[key], flip_lr) return output def upsample_output(output,", "batch : dict Stacked batch \"\"\" # If there is", "batch : dict Flipped batch \"\"\" # Flip tensors for", "and 'uncertainty') Returns ------- output : dict Flipped output \"\"\"", "with keys like 'inv_depths' and 'uncertainty') mode : str Which", "== 5: assert batch['rgb'].shape[0] == 1, 'Only batch size 1", "{}'.format(key) merge[key] = val return merge def stack_batch(batch): \"\"\" Stack", "batch[key][:, 0, 2] = batch['rgb'].shape[3] - batch[key][:, 0, 2] #", "Flip tensors or list of tensors based on a function", "if key in combine: for sub_key, sub_val in output[key].items(): assert", "All rights reserved. from packnet_sfm.utils.image import flip_lr, interpolate_scales from packnet_sfm.utils.misc", "for multi-cameras' # Loop over all keys for key in", "for key in combine} for output in outputs: # Iterate", "else: batch[key] = batch[key][0] return batch def flip_batch_input(batch): \"\"\" Flip", "be aligned during interpolation Returns ------- output : dict Upsampled", "combine} for output in outputs: # Iterate over all keys", "dictionary with various metrics and all other keys that are", "BN,C,H,W) Parameters ---------- batch : dict Batch Returns ------- batch", "key in filter_dict(batch, [ 'intrinsics' ]): batch[key] = batch[key].clone() batch[key][:,", "filter_dict(output, [ 'inv_depths', 'uncertainty' ]): output[key] = interpolate_scales( output[key], mode=mode,", "ignore: assert key not in merge.keys(), \\ 'Adding duplicated key", "---------- output : dict Dictionary of model outputs (e.g. with", "tuple of dict Outputs to be merged Returns ------- output", "if is_list(batch[key]): if is_tensor(batch[key][0]) or is_numpy(batch[key][0]): batch[key] = [sample[0] for", "or is_numpy(batch[key][0]): batch[key] = [sample[0] for sample in batch[key]] #", "Outputs to be merged Returns ------- output : dict Dictionary", "of tensors based on a function Parameters ---------- tensor :", "'rgb_context', 'input_depth', 'input_depth_context', ]): batch[key] = flip(batch[key], flip_lr) # Flip", "tensors \"\"\" if not is_list(tensor): return flip_fn(tensor) else: if not", "output information Parameters ---------- output : dict Dictionary of model", "in filter_dict(batch, [ 'rgb', 'rgb_context', 'input_depth', 'input_depth_context', ]): batch[key] =", "\"\"\" Merges model outputs for logging Parameters ---------- outputs :", "Returns ------- output : dict Dictionary with a \"metrics\" key", "2] # Return flipped batch return batch def flip_output(output): \"\"\"", "outputs to full resolution. Parameters ---------- output : dict Dictionary", "flip(tensor, flip_fn): \"\"\" Flip tensors or list of tensors based", "flipped batch return batch def flip_output(output): \"\"\" Flip output information", "import flip_lr, interpolate_scales from packnet_sfm.utils.misc import filter_dict from packnet_sfm.utils.types import", "Upsampled output \"\"\" for key in filter_dict(output, [ 'inv_depths', 'uncertainty'", "['metrics'] # Keys to combine merge = {key: {} for", "keys like 'inv_depths' and 'uncertainty') Returns ------- output : dict", "is_list(tensor): return flip_fn(tensor) else: if not is_list(tensor[0]): return [flip_fn(val) for", "interpolate_scales( output[key], mode=mode, align_corners=align_corners) for key in filter_dict(output, [ 'inv_depths_context'", "or list[torch.Tensor] or list[list[torch.Tensor]] Tensor to be flipped flip_fn :", "information (copies data first) Parameters ---------- batch : dict Batch", "or list of tensors \"\"\" if not is_list(tensor): return flip_fn(tensor)", "Flipped batch \"\"\" # Flip tensors for key in filter_dict(batch,", "key in filter_dict(output, [ 'uncertainty', 'logits_semantic', 'ord_probability', 'inv_depths', 'inv_depths_context', 'inv_depths1',", "Parameters ---------- batch : dict Batch Returns ------- batch :", "batch[key]] # Else, stack single item else: batch[key] = batch[key][0]", "Institute. All rights reserved. from packnet_sfm.utils.image import flip_lr, interpolate_scales from", "= [interpolate_scales( val, mode=mode, align_corners=align_corners) for val in output[key]] return", "tensor : torch.Tensor or list[torch.Tensor] or list[list[torch.Tensor]] Flipped tensor or", "for key in filter_dict(output, [ 'inv_depths', 'uncertainty' ]): output[key] =", "]): output[key] = [interpolate_scales( val, mode=mode, align_corners=align_corners) for val in", "duplicated key {}'.format(key) merge[key] = val return merge def stack_batch(batch):", "return batch def flip_output(output): \"\"\" Flip output information Parameters ----------", "'pred_depth_context', 'pred_depth1', 'pred_depth2', 'pred_inv_depth', 'pred_inv_depth_context', 'pred_inv_depth1', 'pred_inv_depth2', ]): output[key] =", "for key in filter_dict(output, [ 'uncertainty', 'logits_semantic', 'ord_probability', 'inv_depths', 'inv_depths_context',", "in filter_dict(output, [ 'inv_depths', 'uncertainty' ]): output[key] = interpolate_scales( output[key],", "Upsample multi-scale outputs to full resolution. Parameters ---------- output :", "flip_output(output): \"\"\" Flip output information Parameters ---------- output : dict", "in outputs: # Iterate over all keys for key, val", "(e.g. with keys like 'inv_depths' and 'uncertainty') Returns ------- output", "batch[key][0] return batch def flip_batch_input(batch): \"\"\" Flip batch input information", "[ 'rgb', 'rgb_context', 'input_depth', 'input_depth_context', ]): batch[key] = flip(batch[key], flip_lr)", "or list[list[torch.Tensor]] Flipped tensor or list of tensors \"\"\" if", "is_numpy def flip(tensor, flip_fn): \"\"\" Flip tensors or list of", "key {} to {}'.format(sub_key, key) merge[key][sub_key] = sub_val # Ignore", "item else: batch[key] = batch[key][0] return batch def flip_batch_input(batch): \"\"\"", "function Parameters ---------- tensor : torch.Tensor or list[torch.Tensor] or list[list[torch.Tensor]]", "align_corners=None): \"\"\" Upsample multi-scale outputs to full resolution. Parameters ----------", "batch size 1 is supported for multi-cameras' # Loop over", "---------- batch : dict Batch Returns ------- batch : dict", "in batch.keys(): # If list, stack every item if is_list(batch[key]):", "there is multi-camera information if len(batch['rgb'].shape) == 5: assert batch['rgb'].shape[0]", "key, val in output.items(): # Combine these keys if key", "def stack_batch(batch): \"\"\" Stack multi-camera batches (B,N,C,H,W becomes BN,C,H,W) Parameters", "interpolate_scales from packnet_sfm.utils.misc import filter_dict from packnet_sfm.utils.types import is_tensor, is_list,", "to full resolution. Parameters ---------- output : dict Dictionary of", ": dict Dictionary of model outputs (e.g. with keys like", "return [[flip_fn(v) for v in val] for val in tensor]", ": dict Upsampled output \"\"\" for key in filter_dict(output, [", "import filter_dict from packnet_sfm.utils.types import is_tensor, is_list, is_numpy def flip(tensor,", "\\ 'Adding duplicated key {}'.format(key) merge[key] = val return merge", "Stack multi-camera batches (B,N,C,H,W becomes BN,C,H,W) Parameters ---------- batch :", "Flip tensors for key in filter_dict(batch, [ 'rgb', 'rgb_context', 'input_depth',", "else: if not is_list(tensor[0]): return [flip_fn(val) for val in tensor]", "[ 'inv_depths_context' ]): output[key] = [interpolate_scales( val, mode=mode, align_corners=align_corners) for", "{}'.format(sub_key, key) merge[key][sub_key] = sub_val # Ignore these keys elif", "def flip_output(output): \"\"\" Flip output information Parameters ---------- output :", "output : dict Dictionary of model outputs (e.g. with keys", "= flip(output[key], flip_lr) return output def upsample_output(output, mode='nearest', align_corners=None): \"\"\"", "Flip intrinsics for key in filter_dict(batch, [ 'intrinsics' ]): batch[key]", "all other keys that are not \"loss\" (it is handled", "dict Stacked batch \"\"\" # If there is multi-camera information", "batch[key] = [sample[0] for sample in batch[key]] # Else, stack", "dict Upsampled output \"\"\" for key in filter_dict(output, [ 'inv_depths',", "output[key].items(): assert sub_key not in merge[key].keys(), \\ 'Combining duplicated key", "keys for key in batch.keys(): # If list, stack every", "keys that are not \"loss\" (it is handled differently). \"\"\"", "of tensors \"\"\" if not is_list(tensor): return flip_fn(tensor) else: if", "return [flip_fn(val) for val in tensor] else: return [[flip_fn(v) for", "output[key], mode=mode, align_corners=align_corners) for key in filter_dict(output, [ 'inv_depths_context' ]):", "(e.g. with keys like 'inv_depths' and 'uncertainty') mode : str", "tensors for key in filter_dict(batch, [ 'rgb', 'rgb_context', 'input_depth', 'input_depth_context',", "outputs (e.g. with keys like 'inv_depths' and 'uncertainty') mode :", "Returns ------- batch : dict Stacked batch \"\"\" # If", "tensor : torch.Tensor or list[torch.Tensor] or list[list[torch.Tensor]] Tensor to be", "Else, stack single item else: batch[key] = batch[key][0] return batch", "or list of tensors based on a function Parameters ----------", "first) Parameters ---------- batch : dict Batch information Returns -------", "Ignore these keys elif key not in ignore: assert key", "[ 'intrinsics' ]): batch[key] = batch[key].clone() batch[key][:, 0, 2] =", ": dict Flipped output \"\"\" # Flip tensors for key", "flip(batch[key], flip_lr) # Flip intrinsics for key in filter_dict(batch, [", "Returns ------- tensor : torch.Tensor or list[torch.Tensor] or list[list[torch.Tensor]] Flipped", "to combine merge = {key: {} for key in combine}", "0, 2] # Return flipped batch return batch def flip_output(output):", "to {}'.format(sub_key, key) merge[key][sub_key] = sub_val # Ignore these keys", "tensor] else: return [[flip_fn(v) for v in val] for val", ": str Which interpolation mode is used align_corners: bool or", "keys like 'inv_depths' and 'uncertainty') mode : str Which interpolation", "align_corners=align_corners) for key in filter_dict(output, [ 'inv_depths_context' ]): output[key] =", "key in combine: for sub_key, sub_val in output[key].items(): assert sub_key", "not is_list(tensor[0]): return [flip_fn(val) for val in tensor] else: return", "= val return merge def stack_batch(batch): \"\"\" Stack multi-camera batches", "outputs: # Iterate over all keys for key, val in", "sub_val in output[key].items(): assert sub_key not in merge[key].keys(), \\ 'Combining", "function Returns ------- tensor : torch.Tensor or list[torch.Tensor] or list[list[torch.Tensor]]", "for v in val] for val in tensor] def merge_outputs(*outputs):", "reserved. from packnet_sfm.utils.image import flip_lr, interpolate_scales from packnet_sfm.utils.misc import filter_dict", "or None Whether corners will be aligned during interpolation Returns", "flip_fn(tensor) else: if not is_list(tensor[0]): return [flip_fn(val) for val in", "over all keys for key, val in output.items(): # Combine", "'intrinsics' ]): batch[key] = batch[key].clone() batch[key][:, 0, 2] = batch['rgb'].shape[3]", ": dict Batch information Returns ------- batch : dict Flipped", "model outputs (e.g. with keys like 'inv_depths' and 'uncertainty') Returns", "\"\"\" for key in filter_dict(output, [ 'inv_depths', 'uncertainty' ]): output[key]", "multi-camera batches (B,N,C,H,W becomes BN,C,H,W) Parameters ---------- batch : dict", "item if is_list(batch[key]): if is_tensor(batch[key][0]) or is_numpy(batch[key][0]): batch[key] = [sample[0]", "['loss'] # Keys to ignore combine = ['metrics'] # Keys", "all keys for key, val in output.items(): # Combine these", "not in merge.keys(), \\ 'Adding duplicated key {}'.format(key) merge[key] =", "list[torch.Tensor] or list[list[torch.Tensor]] Tensor to be flipped flip_fn : Function", "is_tensor, is_list, is_numpy def flip(tensor, flip_fn): \"\"\" Flip tensors or", "'inv_depths' and 'uncertainty') Returns ------- output : dict Flipped output", "------- output : dict Upsampled output \"\"\" for key in", "is multi-camera information if len(batch['rgb'].shape) == 5: assert batch['rgb'].shape[0] ==", "def flip(tensor, flip_fn): \"\"\" Flip tensors or list of tensors", "# Keys to combine merge = {key: {} for key", "in merge.keys(), \\ 'Adding duplicated key {}'.format(key) merge[key] = val", "to be merged Returns ------- output : dict Dictionary with", "None Whether corners will be aligned during interpolation Returns -------", "'pred_depth2', 'pred_inv_depth', 'pred_inv_depth_context', 'pred_inv_depth1', 'pred_inv_depth2', ]): output[key] = flip(output[key], flip_lr)", "assert batch['rgb'].shape[0] == 1, 'Only batch size 1 is supported", "[flip_fn(val) for val in tensor] else: return [[flip_fn(v) for v", "'inv_depths', 'inv_depths_context', 'inv_depths1', 'inv_depths2', 'pred_depth', 'pred_depth_context', 'pred_depth1', 'pred_depth2', 'pred_inv_depth', 'pred_inv_depth_context',", "key in filter_dict(output, [ 'inv_depths_context' ]): output[key] = [interpolate_scales( val,", "Copyright 2020 Toyota Research Institute. All rights reserved. from packnet_sfm.utils.image", "'input_depth_context', ]): batch[key] = flip(batch[key], flip_lr) # Flip intrinsics for", "'pred_depth', 'pred_depth_context', 'pred_depth1', 'pred_depth2', 'pred_inv_depth', 'pred_inv_depth_context', 'pred_inv_depth1', 'pred_inv_depth2', ]): output[key]", "key in filter_dict(output, [ 'inv_depths', 'uncertainty' ]): output[key] = interpolate_scales(", "list of tensors based on a function Parameters ---------- tensor", "len(batch['rgb'].shape) == 5: assert batch['rgb'].shape[0] == 1, 'Only batch size", "batch def flip_batch_input(batch): \"\"\" Flip batch input information (copies data", "val in output.items(): # Combine these keys if key in", "over all keys for key in batch.keys(): # If list,", "resolution. Parameters ---------- output : dict Dictionary of model outputs", "output[key] = flip(output[key], flip_lr) return output def upsample_output(output, mode='nearest', align_corners=None):", "in filter_dict(batch, [ 'intrinsics' ]): batch[key] = batch[key].clone() batch[key][:, 0,", "= batch['rgb'].shape[3] - batch[key][:, 0, 2] # Return flipped batch", "Combine these keys if key in combine: for sub_key, sub_val", "batch \"\"\" # Flip tensors for key in filter_dict(batch, [", "Dictionary of model outputs (e.g. with keys like 'inv_depths' and", "merge[key][sub_key] = sub_val # Ignore these keys elif key not", "Returns ------- output : dict Upsampled output \"\"\" for key", "interpolation Returns ------- output : dict Upsampled output \"\"\" for", "for logging Parameters ---------- outputs : tuple of dict Outputs", "size 1 is supported for multi-cameras' # Loop over all", "'inv_depths1', 'inv_depths2', 'pred_depth', 'pred_depth_context', 'pred_depth1', 'pred_depth2', 'pred_inv_depth', 'pred_inv_depth_context', 'pred_inv_depth1', 'pred_inv_depth2',", "batch[key][:, 0, 2] # Return flipped batch return batch def", "]): output[key] = flip(output[key], flip_lr) return output def upsample_output(output, mode='nearest',", "filter_dict from packnet_sfm.utils.types import is_tensor, is_list, is_numpy def flip(tensor, flip_fn):", "keys if key in combine: for sub_key, sub_val in output[key].items():", "batch[key].clone() batch[key][:, 0, 2] = batch['rgb'].shape[3] - batch[key][:, 0, 2]", "# Flip tensors for key in filter_dict(batch, [ 'rgb', 'rgb_context'," ]
[ "delta_t): traj = [] # inital values t, td, tdd", "tau t1 = dt t2 = dt ** 2 t3", "* t3 + c4 * t2 + c5 * t1", "(dt) - end of traj reached!\" dist = goal -", "x = c1 * t5 + c2 * t4 +", "** 5 c2 = (-15.*dist + (3.*a0 - 2.*a1) /", "* t3 + 3 * c3 * t2 + 2", "next time step dt given that we are # currently", "/ 2. c5 = xd c6 = x x =", "we are # currently at x,xd,xdd, and that we have", "= min_jerk_step(x,xd,xdd,goal,tau, dt) computes # the update of x,xd,xdd for", "the next time step dt given that we are #", "* tau t1 = dt t2 = dt ** 2", "(6.*dist + (a1 - a0) / 2. - 3.*(v0 +", "* t2 + c5 * t1 + c6 xd =", "a0) / 2. - 3.*(v0 + v1)) / tau **", "/ delta_t)): try: t, td, tdd = _min_jerk_step(t, td, tdd,", "t1 + c5 xdd = 20.*c1 * t3 + 12.*c2", "# the goal # ported from matlab dmp toolbox if", "_min_jerk_step(x, xd, xdd, goal, tau, dt): #function [x,xd,xdd] = min_jerk_step(x,xd,xdd,goal,tau,", "/ tau ** 3 c4 = xdd / 2. c5", "at x,xd,xdd, and that we have tau until we want", "@author: karl ''' def trajectory(start, goal, duration, delta_t): traj =", "given that we are # currently at x,xd,xdd, and that", "= xdd * tau ** 2 v1 = 0 v0", "- a0) / 2. - 3.*(v0 + v1)) / tau", "dt t2 = dt ** 2 t3 = dt **", "x,xd,xdd, and that we have tau until we want to", "+ (3.*a0 - 2.*a1) / 2. + 8.*v0 + 7.*v1)", "* t2 + 6.*c3 * t1 + 2.*c4 return (x,", "t1 = dt t2 = dt ** 2 t3 =", "v0 = xd * tau t1 = dt t2 =", "c5 * t1 + c6 xd = 5.*c1 * t4", "the goal # ported from matlab dmp toolbox if tau", "dt ** 2 t3 = dt ** 3 t4 =", "trajectory(start, goal, duration, delta_t): traj = [] # inital values", "dt: raise Exception, \"time left (tau) is smaller than current", "c1 * t5 + c2 * t4 + c3 *", "is smaller than current time (dt) - end of traj", "4 c3 = (10.*dist + (a1 - 3.*a0) / 2.", "smaller than current time (dt) - end of traj reached!\"", "t4 = dt ** 4 t5 = dt ** 5", "c5 = xd c6 = x x = c1 *", "2.*a1) / 2. + 8.*v0 + 7.*v1) / tau **", "= 5.*c1 * t4 + 4 * c2 * t3", "c3 * t2 + 2 * c4 * t1 +", "time (dt) - end of traj reached!\" dist = goal", "update of x,xd,xdd for the next time step dt given", "i in range(int(2 * duration / delta_t)): try: t, td,", "def _min_jerk_step(x, xd, xdd, goal, tau, dt): #function [x,xd,xdd] =", "reach # the goal # ported from matlab dmp toolbox", "dist = goal - x a1 = 0 a0 =", "a1 = 0 a0 = xdd * tau ** 2", "dt ** 4 t5 = dt ** 5 c1 =", "* t4 + c3 * t3 + c4 * t2", "traj = [] # inital values t, td, tdd =", "dt) computes # the update of x,xd,xdd for the next", "we have tau until we want to reach # the", "raise Exception, \"time left (tau) is smaller than current time", "[x,xd,xdd] = min_jerk_step(x,xd,xdd,goal,tau, dt) computes # the update of x,xd,xdd", "delta_t)): try: t, td, tdd = _min_jerk_step(t, td, tdd, goal,", "that we have tau until we want to reach #", "toolbox if tau < dt: raise Exception, \"time left (tau)", "= xd * tau t1 = dt t2 = dt", "** 5 c1 = (6.*dist + (a1 - a0) /", "are # currently at x,xd,xdd, and that we have tau", "goal - x a1 = 0 a0 = xdd *", "6.*v0 - 4.*v1) / tau ** 3 c4 = xdd", "** 3 c4 = xdd / 2. c5 = xd", "have tau until we want to reach # the goal", "goal, duration, delta_t): traj = [] # inital values t,", "(-15.*dist + (3.*a0 - 2.*a1) / 2. + 8.*v0 +", "def trajectory(start, goal, duration, delta_t): traj = [] # inital", "in range(int(2 * duration / delta_t)): try: t, td, tdd", "c2 = (-15.*dist + (3.*a0 - 2.*a1) / 2. +", "and that we have tau until we want to reach", "c5 xdd = 20.*c1 * t3 + 12.*c2 * t2", "xd * tau t1 = dt t2 = dt **", "time step dt given that we are # currently at", "(3.*a0 - 2.*a1) / 2. + 8.*v0 + 7.*v1) /", "= (6.*dist + (a1 - a0) / 2. - 3.*(v0", "* tau ** 2 v1 = 0 v0 = xd", "c4 * t1 + c5 xdd = 20.*c1 * t3", "xd, xdd, goal, tau, dt): #function [x,xd,xdd] = min_jerk_step(x,xd,xdd,goal,tau, dt)", "5.*c1 * t4 + 4 * c2 * t3 +", "c4 * t2 + c5 * t1 + c6 xd", "* c2 * t3 + 3 * c3 * t2", "** 2 v1 = 0 v0 = xd * tau", "2. - 6.*v0 - 4.*v1) / tau ** 3 c4", "- 2.*a1) / 2. + 8.*v0 + 7.*v1) / tau", "break traj.append([t, td, tdd]) return traj def _min_jerk_step(x, xd, xdd,", "Exception, \"time left (tau) is smaller than current time (dt)", "dmp toolbox if tau < dt: raise Exception, \"time left", "tau ** 3 c4 = xdd / 2. c5 =", "t, td, tdd = _min_jerk_step(t, td, tdd, goal, duration -", "Created on 25.07.2012 @author: karl ''' def trajectory(start, goal, duration,", "** 4 t5 = dt ** 5 c1 = (6.*dist", "karl ''' def trajectory(start, goal, duration, delta_t): traj = []", "dt): #function [x,xd,xdd] = min_jerk_step(x,xd,xdd,goal,tau, dt) computes # the update", "t3 = dt ** 3 t4 = dt ** 4", "= dt ** 5 c1 = (6.*dist + (a1 -", "tau ** 5 c2 = (-15.*dist + (3.*a0 - 2.*a1)", "+ (a1 - 3.*a0) / 2. - 6.*v0 - 4.*v1)", "tau ** 4 c3 = (10.*dist + (a1 - 3.*a0)", "c2 * t4 + c3 * t3 + c4 *", "/ tau ** 4 c3 = (10.*dist + (a1 -", "c3 * t3 + c4 * t2 + c5 *", "dt given that we are # currently at x,xd,xdd, and", "+ 6.*c3 * t1 + 2.*c4 return (x, xd, xdd)", "xdd / 2. c5 = xd c6 = x x", "4 * c2 * t3 + 3 * c3 *", "that we are # currently at x,xd,xdd, and that we", "t3 + 12.*c2 * t2 + 6.*c3 * t1 +", "t2 + 6.*c3 * t1 + 2.*c4 return (x, xd,", "- 3.*a0) / 2. - 6.*v0 - 4.*v1) / tau", "* t5 + c2 * t4 + c3 * t3", "c3 = (10.*dist + (a1 - 3.*a0) / 2. -", "* t1 + c6 xd = 5.*c1 * t4 +", "on 25.07.2012 @author: karl ''' def trajectory(start, goal, duration, delta_t):", "= xd c6 = x x = c1 * t5", "tdd]) return traj def _min_jerk_step(x, xd, xdd, goal, tau, dt):", "t2 + c5 * t1 + c6 xd = 5.*c1", "td, tdd = start, 0, 0 for i in range(int(2", "goal, tau, dt): #function [x,xd,xdd] = min_jerk_step(x,xd,xdd,goal,tau, dt) computes #", "+ v1)) / tau ** 5 c2 = (-15.*dist +", "t3 + 3 * c3 * t2 + 2 *", "= _min_jerk_step(t, td, tdd, goal, duration - i * delta_t,", "[] # inital values t, td, tdd = start, 0,", "a0 = xdd * tau ** 2 v1 = 0", "= (10.*dist + (a1 - 3.*a0) / 2. - 6.*v0", "- 6.*v0 - 4.*v1) / tau ** 3 c4 =", "xd c6 = x x = c1 * t5 +", "delta_t) except: break traj.append([t, td, tdd]) return traj def _min_jerk_step(x,", "/ 2. - 3.*(v0 + v1)) / tau ** 5", "min_jerk_step(x,xd,xdd,goal,tau, dt) computes # the update of x,xd,xdd for the", "step dt given that we are # currently at x,xd,xdd,", "t5 = dt ** 5 c1 = (6.*dist + (a1", "- 3.*(v0 + v1)) / tau ** 5 c2 =", "* t3 + 12.*c2 * t2 + 6.*c3 * t1", "** 2 t3 = dt ** 3 t4 = dt", "5 c2 = (-15.*dist + (3.*a0 - 2.*a1) / 2.", "end of traj reached!\" dist = goal - x a1", "until we want to reach # the goal # ported", "if tau < dt: raise Exception, \"time left (tau) is", "- x a1 = 0 a0 = xdd * tau", "i * delta_t, delta_t) except: break traj.append([t, td, tdd]) return", "v1 = 0 v0 = xd * tau t1 =", "tdd, goal, duration - i * delta_t, delta_t) except: break", "3.*a0) / 2. - 6.*v0 - 4.*v1) / tau **", "start, 0, 0 for i in range(int(2 * duration /", "dt ** 5 c1 = (6.*dist + (a1 - a0)", "# the update of x,xd,xdd for the next time step", "inital values t, td, tdd = start, 0, 0 for", "(a1 - a0) / 2. - 3.*(v0 + v1)) /", "+ c2 * t4 + c3 * t3 + c4", "* duration / delta_t)): try: t, td, tdd = _min_jerk_step(t,", "xdd, goal, tau, dt): #function [x,xd,xdd] = min_jerk_step(x,xd,xdd,goal,tau, dt) computes", "t5 + c2 * t4 + c3 * t3 +", "/ 2. + 8.*v0 + 7.*v1) / tau ** 4", "for i in range(int(2 * duration / delta_t)): try: t,", "x,xd,xdd for the next time step dt given that we", "= start, 0, 0 for i in range(int(2 * duration", "= (-15.*dist + (3.*a0 - 2.*a1) / 2. + 8.*v0", "+ c5 * t1 + c6 xd = 5.*c1 *", "from matlab dmp toolbox if tau < dt: raise Exception,", "c2 * t3 + 3 * c3 * t2 +", "3 t4 = dt ** 4 t5 = dt **", "range(int(2 * duration / delta_t)): try: t, td, tdd =", "traj def _min_jerk_step(x, xd, xdd, goal, tau, dt): #function [x,xd,xdd]", "tdd = _min_jerk_step(t, td, tdd, goal, duration - i *", "current time (dt) - end of traj reached!\" dist =", "3.*(v0 + v1)) / tau ** 5 c2 = (-15.*dist", "= xdd / 2. c5 = xd c6 = x", "= 20.*c1 * t3 + 12.*c2 * t2 + 6.*c3", "matlab dmp toolbox if tau < dt: raise Exception, \"time", "< dt: raise Exception, \"time left (tau) is smaller than", "+ 3 * c3 * t2 + 2 * c4", "currently at x,xd,xdd, and that we have tau until we", "#function [x,xd,xdd] = min_jerk_step(x,xd,xdd,goal,tau, dt) computes # the update of", "t2 = dt ** 2 t3 = dt ** 3", "want to reach # the goal # ported from matlab", "+ 7.*v1) / tau ** 4 c3 = (10.*dist +", "+ 12.*c2 * t2 + 6.*c3 * t1 + 2.*c4", "goal # ported from matlab dmp toolbox if tau <", "computes # the update of x,xd,xdd for the next time", "values t, td, tdd = start, 0, 0 for i", "2 v1 = 0 v0 = xd * tau t1", "_min_jerk_step(t, td, tdd, goal, duration - i * delta_t, delta_t)", "= 0 v0 = xd * tau t1 = dt", "except: break traj.append([t, td, tdd]) return traj def _min_jerk_step(x, xd,", "goal, duration - i * delta_t, delta_t) except: break traj.append([t,", "try: t, td, tdd = _min_jerk_step(t, td, tdd, goal, duration", "- 4.*v1) / tau ** 3 c4 = xdd /", "* delta_t, delta_t) except: break traj.append([t, td, tdd]) return traj", "(tau) is smaller than current time (dt) - end of", "2 * c4 * t1 + c5 xdd = 20.*c1", "delta_t, delta_t) except: break traj.append([t, td, tdd]) return traj def", "xdd = 20.*c1 * t3 + 12.*c2 * t2 +", "= c1 * t5 + c2 * t4 + c3", "8.*v0 + 7.*v1) / tau ** 4 c3 = (10.*dist", "* t1 + c5 xdd = 20.*c1 * t3 +", "xd = 5.*c1 * t4 + 4 * c2 *", "0 v0 = xd * tau t1 = dt t2", "t4 + c3 * t3 + c4 * t2 +", "c6 = x x = c1 * t5 + c2", "ported from matlab dmp toolbox if tau < dt: raise", "tau ** 2 v1 = 0 v0 = xd *", "- end of traj reached!\" dist = goal - x", "tau until we want to reach # the goal #", "* c4 * t1 + c5 xdd = 20.*c1 *", "+ c3 * t3 + c4 * t2 + c5", "duration, delta_t): traj = [] # inital values t, td,", "td, tdd = _min_jerk_step(t, td, tdd, goal, duration - i", "td, tdd]) return traj def _min_jerk_step(x, xd, xdd, goal, tau,", "0 for i in range(int(2 * duration / delta_t)): try:", "* t4 + 4 * c2 * t3 + 3", "+ 2 * c4 * t1 + c5 xdd =", "20.*c1 * t3 + 12.*c2 * t2 + 6.*c3 *", "3 * c3 * t2 + 2 * c4 *", "# currently at x,xd,xdd, and that we have tau until", "t, td, tdd = start, 0, 0 for i in", "= dt t2 = dt ** 2 t3 = dt", "= x x = c1 * t5 + c2 *", "0, 0 for i in range(int(2 * duration / delta_t)):", "x x = c1 * t5 + c2 * t4", "= dt ** 4 t5 = dt ** 5 c1", "/ 2. - 6.*v0 - 4.*v1) / tau ** 3", "7.*v1) / tau ** 4 c3 = (10.*dist + (a1", "** 3 t4 = dt ** 4 t5 = dt", "(a1 - 3.*a0) / 2. - 6.*v0 - 4.*v1) /", "left (tau) is smaller than current time (dt) - end", "t3 + c4 * t2 + c5 * t1 +", "- i * delta_t, delta_t) except: break traj.append([t, td, tdd])", "3 c4 = xdd / 2. c5 = xd c6", "of x,xd,xdd for the next time step dt given that", "4 t5 = dt ** 5 c1 = (6.*dist +", "+ 4 * c2 * t3 + 3 * c3", "= 0 a0 = xdd * tau ** 2 v1", "+ (a1 - a0) / 2. - 3.*(v0 + v1))", "** 4 c3 = (10.*dist + (a1 - 3.*a0) /", "= dt ** 2 t3 = dt ** 3 t4", "/ tau ** 5 c2 = (-15.*dist + (3.*a0 -", "tau, dt): #function [x,xd,xdd] = min_jerk_step(x,xd,xdd,goal,tau, dt) computes # the", "c4 = xdd / 2. c5 = xd c6 =", "tau < dt: raise Exception, \"time left (tau) is smaller", "* t2 + 2 * c4 * t1 + c5", "td, tdd, goal, duration - i * delta_t, delta_t) except:", "(10.*dist + (a1 - 3.*a0) / 2. - 6.*v0 -", "4.*v1) / tau ** 3 c4 = xdd / 2.", "# ported from matlab dmp toolbox if tau < dt:", "2. - 3.*(v0 + v1)) / tau ** 5 c2", "c6 xd = 5.*c1 * t4 + 4 * c2", "= [] # inital values t, td, tdd = start,", "''' Created on 25.07.2012 @author: karl ''' def trajectory(start, goal,", "''' def trajectory(start, goal, duration, delta_t): traj = [] #", "tdd = start, 0, 0 for i in range(int(2 *", "xdd * tau ** 2 v1 = 0 v0 =", "+ c5 xdd = 20.*c1 * t3 + 12.*c2 *", "duration - i * delta_t, delta_t) except: break traj.append([t, td,", "the update of x,xd,xdd for the next time step dt", "for the next time step dt given that we are", "+ 8.*v0 + 7.*v1) / tau ** 4 c3 =", "duration / delta_t)): try: t, td, tdd = _min_jerk_step(t, td,", "dt ** 3 t4 = dt ** 4 t5 =", "reached!\" dist = goal - x a1 = 0 a0", "= goal - x a1 = 0 a0 = xdd", "return traj def _min_jerk_step(x, xd, xdd, goal, tau, dt): #function", "\"time left (tau) is smaller than current time (dt) -", "= dt ** 3 t4 = dt ** 4 t5", "2. c5 = xd c6 = x x = c1", "+ c6 xd = 5.*c1 * t4 + 4 *", "t4 + 4 * c2 * t3 + 3 *", "12.*c2 * t2 + 6.*c3 * t1 + 2.*c4 return", "5 c1 = (6.*dist + (a1 - a0) / 2.", "c1 = (6.*dist + (a1 - a0) / 2. -", "v1)) / tau ** 5 c2 = (-15.*dist + (3.*a0", "than current time (dt) - end of traj reached!\" dist", "2 t3 = dt ** 3 t4 = dt **", "traj.append([t, td, tdd]) return traj def _min_jerk_step(x, xd, xdd, goal,", "0 a0 = xdd * tau ** 2 v1 =", "2. + 8.*v0 + 7.*v1) / tau ** 4 c3", "to reach # the goal # ported from matlab dmp", "traj reached!\" dist = goal - x a1 = 0", "we want to reach # the goal # ported from", "of traj reached!\" dist = goal - x a1 =", "+ c4 * t2 + c5 * t1 + c6", "t1 + c6 xd = 5.*c1 * t4 + 4", "# inital values t, td, tdd = start, 0, 0", "t2 + 2 * c4 * t1 + c5 xdd", "x a1 = 0 a0 = xdd * tau **", "25.07.2012 @author: karl ''' def trajectory(start, goal, duration, delta_t): traj", "* c3 * t2 + 2 * c4 * t1" ]
[]
[ "assert test == true # def test_make_sequence_array(finalsequencedf): # from orthoexon.util", "orthoexon.util import separate test = separate(exon_id) true = \"ENSE00001229068\" assert", "true = 'ATGGCCGAAGACGCAGACATGCGCAATGAGCTGGAGGAGATGCAGCGAAGGGCTGACCAGTT' \\ # 'GGCTGATGAG' # # assert test", "true = '10256211' assert test == true @pytest.fixture def human_gtf_filename(table_folder):", "for `orthoexon` module. \"\"\" import os import pytest @pytest.fixture def", "\"\"\" test_orthoexon ---------------------------------- Tests for `orthoexon` module. \"\"\" import os", "python # -*- coding: utf-8 -*- \"\"\" test_orthoexon ---------------------------------- Tests", "in enumerate(human_gtf_database.features_of_type('gene')): species1gffutilsgeneid = str(species1gene['gene_id']) species1geneid = separate(species1gffutilsgeneid) for exon", "separate test = separate(exon_id) true = \"ENSE00001229068\" assert test ==", "return os.path.join(table_folder, 'humanrbfox2andfmr1andsnap25.gtf') @pytest.fixture def human_gtf_database(table_folder): return os.path.join(table_folder, 'humanrbfox2andfmr1andsnap25.gtf.db') @pytest.fixture", "def human_gtf_filename(table_folder): return os.path.join(table_folder, 'humanrbfox2andfmr1andsnap25.gtf') @pytest.fixture def human_gtf_database(table_folder): return os.path.join(table_folder,", "species1gffutilsgeneid = str(species1gene['gene_id']) species1geneid = separate(species1gffutilsgeneid) for exon in human_gtf_database.children(species1geneid,", "\"ENSE00001229068\" assert test == true def test_separate(exon_id): from orthoexon.util import", "true = \"ENSE00001229068\" assert test == true @pytest.fixture def location():", "translate from orthoexon.util import separate for index, species1gene in enumerate(human_gtf_database.features_of_type('gene')):", "test = separate(exon_id_with_quotes) true = \"ENSE00001229068\" assert test == true", "def human_fasta(table_folder): return os.path.join(table_folder, 'GRCm38.p3.genome.fa') def test_translate(exon_id, human_fasta, human_gtf_database): from", "featuretype='CDS', order_by='start'): if exon_id == exon: test = translate(exon, human_fasta)", "order_by='start'): if exon_id == exon: test = translate(exon, human_fasta) break", "# # test = getsequence(exon, human_gtf_database) # true = 'ATGGCCGAAGACGCAGACATGCGCAATGAGCTGGAGGAGATGCAGCGAAGGGCTGACCAGTT'", "for index, species1gene in enumerate(human_gtf_database.features_of_type('gene')): species1gffutilsgeneid = str(species1gene['gene_id']) species1geneid =", "= separate(species1gffutilsgeneid) for exon in human_gtf_database.children(species1geneid, featuretype='CDS', order_by='start'): if exon_id", "human_fasta) break break true = 'MAEDADMRNELEEMQRRADQLADE' assert test == true", "# from orthoexon.util import make_sequence_array # # test = make_sequence_array(finalsequencedf)", "separate(exon_id_with_quotes) true = \"ENSE00001229068\" assert test == true def test_separate(exon_id):", "---------------------------------- Tests for `orthoexon` module. \"\"\" import os import pytest", "splitend test = splitend(location) true = '10256211' assert test ==", "# def test_make_sequence_array(finalsequencedf): # from orthoexon.util import make_sequence_array # #", "human_gtf_database): from orthoexon.util import translate from orthoexon.util import separate for", "# test = make_sequence_array(finalsequencedf) # true = ...... # #", "in human_gtf_database.children(species1geneid, featuretype='CDS', order_by='start'): if exon_id == exon: test =", "separate for index, species1gene in enumerate(human_gtf_database.features_of_type('gene')): species1gffutilsgeneid = str(species1gene['gene_id']) species1geneid", "#!/usr/bin/env python # -*- coding: utf-8 -*- \"\"\" test_orthoexon ----------------------------------", "'MAEDADMRNELEEMQRRADQLADE' assert test == true # def test_getsequence(exon, human_gtf_database): #", "pytest @pytest.fixture def exon_id_with_quotes(): return \"'ENSE00001229068.1'\" @pytest.fixture def exon_id(): return", "getsequence(exon, human_gtf_database) # true = 'ATGGCCGAAGACGCAGACATGCGCAATGAGCTGGAGGAGATGCAGCGAAGGGCTGACCAGTT' \\ # 'GGCTGATGAG' #", "'10256211' assert test == true @pytest.fixture def human_gtf_filename(table_folder): return os.path.join(table_folder,", "Tests for `orthoexon` module. \"\"\" import os import pytest @pytest.fixture", "true def test_splitend(location): from orthoexon.util import splitend test = splitend(location)", "= splitend(location) true = '10256211' assert test == true @pytest.fixture", "orthoexon.util import getsequence # # test = getsequence(exon, human_gtf_database) #", "true @pytest.fixture def location(): return \"chr20:10256140-10256211:+:0\" def test_splitstart(location): from orthoexon.util", "-*- \"\"\" test_orthoexon ---------------------------------- Tests for `orthoexon` module. \"\"\" import", "separate(exon_id) true = \"ENSE00001229068\" assert test == true @pytest.fixture def", "human_gtf_filename(table_folder): return os.path.join(table_folder, 'humanrbfox2andfmr1andsnap25.gtf') @pytest.fixture def human_gtf_database(table_folder): return os.path.join(table_folder, 'humanrbfox2andfmr1andsnap25.gtf.db')", "human_gtf_database.children(species1geneid, featuretype='CDS', order_by='start'): if exon_id == exon: test = translate(exon,", "def test_make_sequence_array(finalsequencedf): # from orthoexon.util import make_sequence_array # # test", "splitstart test = splitstart(location) true = '10256140' assert test ==", "= make_sequence_array(finalsequencedf) # true = ...... # # assert test", "utf-8 -*- \"\"\" test_orthoexon ---------------------------------- Tests for `orthoexon` module. \"\"\"", "import separate test = separate(exon_id) true = \"ENSE00001229068\" assert test", "= 'ATGGCCGAAGACGCAGACATGCGCAATGAGCTGGAGGAGATGCAGCGAAGGGCTGACCAGTT' \\ # 'GGCTGATGAG' # # assert test ==", "# 'GGCTGATGAG' # # assert test == true # def", "coding: utf-8 -*- \"\"\" test_orthoexon ---------------------------------- Tests for `orthoexon` module.", "== true @pytest.fixture def location(): return \"chr20:10256140-10256211:+:0\" def test_splitstart(location): from", "return \"chr20:10256140-10256211:+:0\" def test_splitstart(location): from orthoexon.util import splitstart test =", "exon in human_gtf_database.children(species1geneid, featuretype='CDS', order_by='start'): if exon_id == exon: test", "= \"ENSE00001229068\" assert test == true def test_separate(exon_id): from orthoexon.util", "assert test == true def test_splitend(location): from orthoexon.util import splitend", "translate(exon, human_fasta) break break true = 'MAEDADMRNELEEMQRRADQLADE' assert test ==", "= \"ENSE00001229068\" assert test == true @pytest.fixture def location(): return", "return os.path.join(table_folder, 'GRCm38.p3.genome.fa') def test_translate(exon_id, human_fasta, human_gtf_database): from orthoexon.util import", "exon_id(): return \"ENSE00001229068.1\" def test_separate_with_quotes(exon_id_with_quotes): from orthoexon.util import separate test", "'10256140' assert test == true def test_splitend(location): from orthoexon.util import", "true def test_separate(exon_id): from orthoexon.util import separate test = separate(exon_id)", "human_fasta, human_gtf_database): from orthoexon.util import translate from orthoexon.util import separate", "import os import pytest @pytest.fixture def exon_id_with_quotes(): return \"'ENSE00001229068.1'\" @pytest.fixture", "test_separate(exon_id): from orthoexon.util import separate test = separate(exon_id) true =", "\\ # 'GGCTGATGAG' # # assert test == true #", "separate test = separate(exon_id_with_quotes) true = \"ENSE00001229068\" assert test ==", "from orthoexon.util import getsequence # # test = getsequence(exon, human_gtf_database)", "import make_sequence_array # # test = make_sequence_array(finalsequencedf) # true =", "from orthoexon.util import splitstart test = splitstart(location) true = '10256140'", "test == true def test_splitend(location): from orthoexon.util import splitend test", "human_fasta(table_folder): return os.path.join(table_folder, 'GRCm38.p3.genome.fa') def test_translate(exon_id, human_fasta, human_gtf_database): from orthoexon.util", "orthoexon.util import make_sequence_array # # test = make_sequence_array(finalsequencedf) # true", "from orthoexon.util import translate from orthoexon.util import separate for index,", "from orthoexon.util import separate for index, species1gene in enumerate(human_gtf_database.features_of_type('gene')): species1gffutilsgeneid", "make_sequence_array # # test = make_sequence_array(finalsequencedf) # true = ......", "= separate(exon_id_with_quotes) true = \"ENSE00001229068\" assert test == true def", "return os.path.join(table_folder, 'humanrbfox2andfmr1andsnap25.gtf.db') @pytest.fixture def human_fasta(table_folder): return os.path.join(table_folder, 'GRCm38.p3.genome.fa') def", "# true = ...... # # assert test == true", "import separate test = separate(exon_id_with_quotes) true = \"ENSE00001229068\" assert test", "== true # def test_getsequence(exon, human_gtf_database): # from orthoexon.util import", "def human_gtf_database(table_folder): return os.path.join(table_folder, 'humanrbfox2andfmr1andsnap25.gtf.db') @pytest.fixture def human_fasta(table_folder): return os.path.join(table_folder,", "# from orthoexon.util import getsequence # # test = getsequence(exon,", "true = 'MAEDADMRNELEEMQRRADQLADE' assert test == true # def test_getsequence(exon,", "test = separate(exon_id) true = \"ENSE00001229068\" assert test == true", "-*- coding: utf-8 -*- \"\"\" test_orthoexon ---------------------------------- Tests for `orthoexon`", "test == true @pytest.fixture def human_gtf_filename(table_folder): return os.path.join(table_folder, 'humanrbfox2andfmr1andsnap25.gtf') @pytest.fixture", "assert test == true def test_separate(exon_id): from orthoexon.util import separate", "'GGCTGATGAG' # # assert test == true # def test_make_sequence_array(finalsequencedf):", "true = \"ENSE00001229068\" assert test == true def test_separate(exon_id): from", "def exon_id_with_quotes(): return \"'ENSE00001229068.1'\" @pytest.fixture def exon_id(): return \"ENSE00001229068.1\" def", "def location(): return \"chr20:10256140-10256211:+:0\" def test_splitstart(location): from orthoexon.util import splitstart", "# assert test == true # def test_make_sequence_array(finalsequencedf): # from", "orthoexon.util import translate from orthoexon.util import separate for index, species1gene", "module. \"\"\" import os import pytest @pytest.fixture def exon_id_with_quotes(): return", "import pytest @pytest.fixture def exon_id_with_quotes(): return \"'ENSE00001229068.1'\" @pytest.fixture def exon_id():", "assert test == true @pytest.fixture def human_gtf_filename(table_folder): return os.path.join(table_folder, 'humanrbfox2andfmr1andsnap25.gtf')", "test == true # def test_make_sequence_array(finalsequencedf): # from orthoexon.util import", "break true = 'MAEDADMRNELEEMQRRADQLADE' assert test == true # def", "\"ENSE00001229068.1\" def test_separate_with_quotes(exon_id_with_quotes): from orthoexon.util import separate test = separate(exon_id_with_quotes)", "true @pytest.fixture def human_gtf_filename(table_folder): return os.path.join(table_folder, 'humanrbfox2andfmr1andsnap25.gtf') @pytest.fixture def human_gtf_database(table_folder):", "from orthoexon.util import separate test = separate(exon_id) true = \"ENSE00001229068\"", "import splitend test = splitend(location) true = '10256211' assert test", "@pytest.fixture def human_gtf_filename(table_folder): return os.path.join(table_folder, 'humanrbfox2andfmr1andsnap25.gtf') @pytest.fixture def human_gtf_database(table_folder): return", "test == true # def test_getsequence(exon, human_gtf_database): # from orthoexon.util", "species1gene in enumerate(human_gtf_database.features_of_type('gene')): species1gffutilsgeneid = str(species1gene['gene_id']) species1geneid = separate(species1gffutilsgeneid) for", "@pytest.fixture def exon_id_with_quotes(): return \"'ENSE00001229068.1'\" @pytest.fixture def exon_id(): return \"ENSE00001229068.1\"", "# -*- coding: utf-8 -*- \"\"\" test_orthoexon ---------------------------------- Tests for", "orthoexon.util import separate for index, species1gene in enumerate(human_gtf_database.features_of_type('gene')): species1gffutilsgeneid =", "true # def test_make_sequence_array(finalsequencedf): # from orthoexon.util import make_sequence_array #", "true = '10256140' assert test == true def test_splitend(location): from", "# def test_getsequence(exon, human_gtf_database): # from orthoexon.util import getsequence #", "import splitstart test = splitstart(location) true = '10256140' assert test", "def test_separate_with_quotes(exon_id_with_quotes): from orthoexon.util import separate test = separate(exon_id_with_quotes) true", "orthoexon.util import splitend test = splitend(location) true = '10256211' assert", "test_orthoexon ---------------------------------- Tests for `orthoexon` module. \"\"\" import os import", "from orthoexon.util import separate test = separate(exon_id_with_quotes) true = \"ENSE00001229068\"", "= getsequence(exon, human_gtf_database) # true = 'ATGGCCGAAGACGCAGACATGCGCAATGAGCTGGAGGAGATGCAGCGAAGGGCTGACCAGTT' \\ # 'GGCTGATGAG'", "\"ENSE00001229068\" assert test == true @pytest.fixture def location(): return \"chr20:10256140-10256211:+:0\"", "orthoexon.util import splitstart test = splitstart(location) true = '10256140' assert", "os.path.join(table_folder, 'GRCm38.p3.genome.fa') def test_translate(exon_id, human_fasta, human_gtf_database): from orthoexon.util import translate", "assert test == true @pytest.fixture def location(): return \"chr20:10256140-10256211:+:0\" def", "def test_translate(exon_id, human_fasta, human_gtf_database): from orthoexon.util import translate from orthoexon.util", "@pytest.fixture def location(): return \"chr20:10256140-10256211:+:0\" def test_splitstart(location): from orthoexon.util import", "human_gtf_database) # true = 'ATGGCCGAAGACGCAGACATGCGCAATGAGCTGGAGGAGATGCAGCGAAGGGCTGACCAGTT' \\ # 'GGCTGATGAG' # #", "test_make_sequence_array(finalsequencedf): # from orthoexon.util import make_sequence_array # # test =", "return \"ENSE00001229068.1\" def test_separate_with_quotes(exon_id_with_quotes): from orthoexon.util import separate test =", "== true def test_separate(exon_id): from orthoexon.util import separate test =", "# true = 'ATGGCCGAAGACGCAGACATGCGCAATGAGCTGGAGGAGATGCAGCGAAGGGCTGACCAGTT' \\ # 'GGCTGATGAG' # # assert", "break break true = 'MAEDADMRNELEEMQRRADQLADE' assert test == true #", "== true # def test_make_sequence_array(finalsequencedf): # from orthoexon.util import make_sequence_array", "'ATGGCCGAAGACGCAGACATGCGCAATGAGCTGGAGGAGATGCAGCGAAGGGCTGACCAGTT' \\ # 'GGCTGATGAG' # # assert test == true", "test = splitstart(location) true = '10256140' assert test == true", "'humanrbfox2andfmr1andsnap25.gtf.db') @pytest.fixture def human_fasta(table_folder): return os.path.join(table_folder, 'GRCm38.p3.genome.fa') def test_translate(exon_id, human_fasta,", "from orthoexon.util import make_sequence_array # # test = make_sequence_array(finalsequencedf) #", "from orthoexon.util import splitend test = splitend(location) true = '10256211'", "== true def test_splitend(location): from orthoexon.util import splitend test =", "def exon_id(): return \"ENSE00001229068.1\" def test_separate_with_quotes(exon_id_with_quotes): from orthoexon.util import separate", "enumerate(human_gtf_database.features_of_type('gene')): species1gffutilsgeneid = str(species1gene['gene_id']) species1geneid = separate(species1gffutilsgeneid) for exon in", "test_splitend(location): from orthoexon.util import splitend test = splitend(location) true =", "if exon_id == exon: test = translate(exon, human_fasta) break break", "exon: test = translate(exon, human_fasta) break break true = 'MAEDADMRNELEEMQRRADQLADE'", "assert test == true # def test_getsequence(exon, human_gtf_database): # from", "test = getsequence(exon, human_gtf_database) # true = 'ATGGCCGAAGACGCAGACATGCGCAATGAGCTGGAGGAGATGCAGCGAAGGGCTGACCAGTT' \\ #", "def test_separate(exon_id): from orthoexon.util import separate test = separate(exon_id) true", "test = splitend(location) true = '10256211' assert test == true", "= '10256140' assert test == true def test_splitend(location): from orthoexon.util", "= '10256211' assert test == true @pytest.fixture def human_gtf_filename(table_folder): return", "getsequence # # test = getsequence(exon, human_gtf_database) # true =", "test_getsequence(exon, human_gtf_database): # from orthoexon.util import getsequence # # test", "'humanrbfox2andfmr1andsnap25.gtf') @pytest.fixture def human_gtf_database(table_folder): return os.path.join(table_folder, 'humanrbfox2andfmr1andsnap25.gtf.db') @pytest.fixture def human_fasta(table_folder):", "@pytest.fixture def human_gtf_database(table_folder): return os.path.join(table_folder, 'humanrbfox2andfmr1andsnap25.gtf.db') @pytest.fixture def human_fasta(table_folder): return", "# # assert test == true # def test_make_sequence_array(finalsequencedf): #", "for exon in human_gtf_database.children(species1geneid, featuretype='CDS', order_by='start'): if exon_id == exon:", "orthoexon.util import separate test = separate(exon_id_with_quotes) true = \"ENSE00001229068\" assert", "test_splitstart(location): from orthoexon.util import splitstart test = splitstart(location) true =", "true # def test_getsequence(exon, human_gtf_database): # from orthoexon.util import getsequence", "== exon: test = translate(exon, human_fasta) break break true =", "`orthoexon` module. \"\"\" import os import pytest @pytest.fixture def exon_id_with_quotes():", "location(): return \"chr20:10256140-10256211:+:0\" def test_splitstart(location): from orthoexon.util import splitstart test", "os.path.join(table_folder, 'humanrbfox2andfmr1andsnap25.gtf') @pytest.fixture def human_gtf_database(table_folder): return os.path.join(table_folder, 'humanrbfox2andfmr1andsnap25.gtf.db') @pytest.fixture def", "\"\"\" import os import pytest @pytest.fixture def exon_id_with_quotes(): return \"'ENSE00001229068.1'\"", "test_separate_with_quotes(exon_id_with_quotes): from orthoexon.util import separate test = separate(exon_id_with_quotes) true =", "import separate for index, species1gene in enumerate(human_gtf_database.features_of_type('gene')): species1gffutilsgeneid = str(species1gene['gene_id'])", "test = make_sequence_array(finalsequencedf) # true = ...... # # assert", "index, species1gene in enumerate(human_gtf_database.features_of_type('gene')): species1gffutilsgeneid = str(species1gene['gene_id']) species1geneid = separate(species1gffutilsgeneid)", "== true @pytest.fixture def human_gtf_filename(table_folder): return os.path.join(table_folder, 'humanrbfox2andfmr1andsnap25.gtf') @pytest.fixture def", "def test_getsequence(exon, human_gtf_database): # from orthoexon.util import getsequence # #", "'GRCm38.p3.genome.fa') def test_translate(exon_id, human_fasta, human_gtf_database): from orthoexon.util import translate from", "# test = getsequence(exon, human_gtf_database) # true = 'ATGGCCGAAGACGCAGACATGCGCAATGAGCTGGAGGAGATGCAGCGAAGGGCTGACCAGTT' \\", "test == true @pytest.fixture def location(): return \"chr20:10256140-10256211:+:0\" def test_splitstart(location):", "@pytest.fixture def exon_id(): return \"ENSE00001229068.1\" def test_separate_with_quotes(exon_id_with_quotes): from orthoexon.util import", "import translate from orthoexon.util import separate for index, species1gene in", "= 'MAEDADMRNELEEMQRRADQLADE' assert test == true # def test_getsequence(exon, human_gtf_database):", "make_sequence_array(finalsequencedf) # true = ...... # # assert test ==", "= str(species1gene['gene_id']) species1geneid = separate(species1gffutilsgeneid) for exon in human_gtf_database.children(species1geneid, featuretype='CDS',", "os.path.join(table_folder, 'humanrbfox2andfmr1andsnap25.gtf.db') @pytest.fixture def human_fasta(table_folder): return os.path.join(table_folder, 'GRCm38.p3.genome.fa') def test_translate(exon_id,", "= translate(exon, human_fasta) break break true = 'MAEDADMRNELEEMQRRADQLADE' assert test", "# # test = make_sequence_array(finalsequencedf) # true = ...... #", "os import pytest @pytest.fixture def exon_id_with_quotes(): return \"'ENSE00001229068.1'\" @pytest.fixture def", "@pytest.fixture def human_fasta(table_folder): return os.path.join(table_folder, 'GRCm38.p3.genome.fa') def test_translate(exon_id, human_fasta, human_gtf_database):", "separate(species1gffutilsgeneid) for exon in human_gtf_database.children(species1geneid, featuretype='CDS', order_by='start'): if exon_id ==", "species1geneid = separate(species1gffutilsgeneid) for exon in human_gtf_database.children(species1geneid, featuretype='CDS', order_by='start'): if", "\"chr20:10256140-10256211:+:0\" def test_splitstart(location): from orthoexon.util import splitstart test = splitstart(location)", "= splitstart(location) true = '10256140' assert test == true def", "test == true def test_separate(exon_id): from orthoexon.util import separate test", "exon_id == exon: test = translate(exon, human_fasta) break break true", "def test_splitstart(location): from orthoexon.util import splitstart test = splitstart(location) true", "human_gtf_database(table_folder): return os.path.join(table_folder, 'humanrbfox2andfmr1andsnap25.gtf.db') @pytest.fixture def human_fasta(table_folder): return os.path.join(table_folder, 'GRCm38.p3.genome.fa')", "exon_id_with_quotes(): return \"'ENSE00001229068.1'\" @pytest.fixture def exon_id(): return \"ENSE00001229068.1\" def test_separate_with_quotes(exon_id_with_quotes):", "human_gtf_database): # from orthoexon.util import getsequence # # test =", "test_translate(exon_id, human_fasta, human_gtf_database): from orthoexon.util import translate from orthoexon.util import", "\"'ENSE00001229068.1'\" @pytest.fixture def exon_id(): return \"ENSE00001229068.1\" def test_separate_with_quotes(exon_id_with_quotes): from orthoexon.util", "str(species1gene['gene_id']) species1geneid = separate(species1gffutilsgeneid) for exon in human_gtf_database.children(species1geneid, featuretype='CDS', order_by='start'):", "return \"'ENSE00001229068.1'\" @pytest.fixture def exon_id(): return \"ENSE00001229068.1\" def test_separate_with_quotes(exon_id_with_quotes): from", "test = translate(exon, human_fasta) break break true = 'MAEDADMRNELEEMQRRADQLADE' assert", "import getsequence # # test = getsequence(exon, human_gtf_database) # true", "def test_splitend(location): from orthoexon.util import splitend test = splitend(location) true", "= separate(exon_id) true = \"ENSE00001229068\" assert test == true @pytest.fixture", "splitend(location) true = '10256211' assert test == true @pytest.fixture def", "splitstart(location) true = '10256140' assert test == true def test_splitend(location):" ]
[ "np.expand_dims(np.expand_dims(specs, 0), -1) feature = network_eval.predict(specs)[0] return feature # 加载要识别的音频库", "return name, pro # 声纹注册 def register(path, user_name): save_path =", "def register(path, user_name): save_path = os.path.join(args.audio_db, user_name + os.path.basename(path)[-4:]) shutil.move(path,", "user_name + os.path.basename(path)[-4:]) shutil.move(path, save_path) feature = predict(save_path) person_name.append(user_name) person_feature.append(feature)", "import os import shutil import time import numpy as np", "type=str, help='模型的路径') args = parser.parse_args() person_feature = [] person_name =", "network_eval.load_weights(os.path.join(args.model_path), by_name=True) print('==> successfully loading model {}.'.format(args.model_path)) # 预测获取声纹特征 def", "input(\"请输入该音频用户的名称:\") if name == '': continue register(audio_path, name) elif select_fun", "os.path.join(audio_db_path, audio) name = audio[:-4] feature = predict(path) person_name.append(name) person_feature.append(feature)", "声纹注册 def register(path, user_name): save_path = os.path.join(args.audio_db, user_name + os.path.basename(path)[-4:])", "record_audio.record() name = input(\"请输入该音频用户的名称:\") if name == '': continue register(audio_path,", "= os.path.join(args.audio_db, user_name + os.path.basename(path)[-4:]) shutil.move(path, save_path) feature = predict(save_path)", "parser.parse_args() person_feature = [] person_name = [] # 获取模型 network_eval", "+ os.path.basename(path)[-4:]) shutil.move(path, save_path) feature = predict(save_path) person_name.append(user_name) person_feature.append(feature) if", "int(input(\"请选择功能,0为注册音频到声纹库,1为执行声纹识别:\")) if select_fun == 0: audio_path = record_audio.record() name =", "os import shutil import time import numpy as np from", "help='判断是否为同一个人的阈值') parser.add_argument('--model_path', default=r'models/resnet34-56.h5', type=str, help='模型的路径') args = parser.parse_args() person_feature =", "name = person_name[i] return name, pro # 声纹注册 def register(path,", "network_eval = model.vggvox_resnet2d_icassp(input_dim=(257, None, 1), mode='eval') # 加载预训练模型 network_eval.load_weights(os.path.join(args.model_path), by_name=True)", "person_f in enumerate(person_feature): # 计算相识度 dist = np.dot(feature, person_f.T) if", "os.path.basename(path)[-4:]) shutil.move(path, save_path) feature = predict(save_path) person_name.append(user_name) person_feature.append(feature) if __name__", "# 获取模型 network_eval = model.vggvox_resnet2d_icassp(input_dim=(257, None, 1), mode='eval') # 加载预训练模型", "= parser.parse_args() person_feature = [] person_name = [] # 获取模型", "print('==> successfully loading model {}.'.format(args.model_path)) # 预测获取声纹特征 def predict(path): specs", "1000))) # 识别声纹 def recognition(path): name = '' pro =", "register(path, user_name): save_path = os.path.join(args.audio_db, user_name + os.path.basename(path)[-4:]) shutil.move(path, save_path)", "None, 1), mode='eval') # 加载预训练模型 network_eval.load_weights(os.path.join(args.model_path), by_name=True) print('==> successfully loading", "'' pro = 0 feature = predict(path) for i, person_f", "dist = np.dot(feature, person_f.T) if dist > pro: pro =", "<filename>predict_recognition.py import argparse import os import shutil import time import", "# 识别声纹 def recognition(path): name = '' pro = 0", "numpy as np from utils import model, utils from utils.record", "RecordAudio() while True: select_fun = int(input(\"请选择功能,0为注册音频到声纹库,1为执行声纹识别:\")) if select_fun == 0:", "os.listdir(audio_db_path) for audio in audios: path = os.path.join(audio_db_path, audio) name", "recognition(audio_path) if p > args.threshold: print(\"识别说话的为:%s,相似度为:%f\" % (name, p)) else:", "np from utils import model, utils from utils.record import RecordAudio", "name) end = time.time() print('加载音频库完成,消耗时间:%fms' % (round((end - start) *", "== '': continue register(audio_path, name) elif select_fun == 1: audio_path", "select_fun = int(input(\"请选择功能,0为注册音频到声纹库,1为执行声纹识别:\")) if select_fun == 0: audio_path = record_audio.record()", "person_name.append(user_name) person_feature.append(feature) if __name__ == '__main__': load_audio_db(args.audio_db) record_audio = RecordAudio()", "pro: pro = dist name = person_name[i] return name, pro", "= time.time() audios = os.listdir(audio_db_path) for audio in audios: path", "save_path = os.path.join(args.audio_db, user_name + os.path.basename(path)[-4:]) shutil.move(path, save_path) feature =", "for audio in audios: path = os.path.join(audio_db_path, audio) name =", "default=0.7, type=float, help='判断是否为同一个人的阈值') parser.add_argument('--model_path', default=r'models/resnet34-56.h5', type=str, help='模型的路径') args = parser.parse_args()", "audio[:-4] feature = predict(path) person_name.append(name) person_feature.append(feature) print(\"Loaded %s audio.\" %", "continue register(audio_path, name) elif select_fun == 1: audio_path = record_audio.record()", "= RecordAudio() while True: select_fun = int(input(\"请选择功能,0为注册音频到声纹库,1为执行声纹识别:\")) if select_fun ==", "type=str, help='音频库的路径') parser.add_argument('--threshold', default=0.7, type=float, help='判断是否为同一个人的阈值') parser.add_argument('--model_path', default=r'models/resnet34-56.h5', type=str, help='模型的路径')", "p > args.threshold: print(\"识别说话的为:%s,相似度为:%f\" % (name, p)) else: print(\"音频库没有该用户的语音\") else:", "0: audio_path = record_audio.record() name = input(\"请输入该音频用户的名称:\") if name ==", "(round((end - start) * 1000))) # 识别声纹 def recognition(path): name", "load_audio_db(audio_db_path): start = time.time() audios = os.listdir(audio_db_path) for audio in", "if name == '': continue register(audio_path, name) elif select_fun ==", "name = '' pro = 0 feature = predict(path) for", "if p > args.threshold: print(\"识别说话的为:%s,相似度为:%f\" % (name, p)) else: print(\"音频库没有该用户的语音\")", "import numpy as np from utils import model, utils from", "audio in audios: path = os.path.join(audio_db_path, audio) name = audio[:-4]", "audio_path = record_audio.record() name, p = recognition(audio_path) if p >", "help='音频库的路径') parser.add_argument('--threshold', default=0.7, type=float, help='判断是否为同一个人的阈值') parser.add_argument('--model_path', default=r'models/resnet34-56.h5', type=str, help='模型的路径') args", "[] person_name = [] # 获取模型 network_eval = model.vggvox_resnet2d_icassp(input_dim=(257, None,", "np.dot(feature, person_f.T) if dist > pro: pro = dist name", "dist > pro: pro = dist name = person_name[i] return", "default=r'models/resnet34-56.h5', type=str, help='模型的路径') args = parser.parse_args() person_feature = [] person_name", "= audio[:-4] feature = predict(path) person_name.append(name) person_feature.append(feature) print(\"Loaded %s audio.\"", "loading model {}.'.format(args.model_path)) # 预测获取声纹特征 def predict(path): specs = utils.load_data(path,", "person_name.append(name) person_feature.append(feature) print(\"Loaded %s audio.\" % name) end = time.time()", "print(\"Loaded %s audio.\" % name) end = time.time() print('加载音频库完成,消耗时间:%fms' %", "register(audio_path, name) elif select_fun == 1: audio_path = record_audio.record() name,", "load_audio_db(args.audio_db) record_audio = RecordAudio() while True: select_fun = int(input(\"请选择功能,0为注册音频到声纹库,1为执行声纹识别:\")) if", "predict(path) person_name.append(name) person_feature.append(feature) print(\"Loaded %s audio.\" % name) end =", "person_f.T) if dist > pro: pro = dist name =", "parser = argparse.ArgumentParser() parser.add_argument('--audio_db', default='audio_db/', type=str, help='音频库的路径') parser.add_argument('--threshold', default=0.7, type=float,", "# 声纹注册 def register(path, user_name): save_path = os.path.join(args.audio_db, user_name +", "argparse.ArgumentParser() parser.add_argument('--audio_db', default='audio_db/', type=str, help='音频库的路径') parser.add_argument('--threshold', default=0.7, type=float, help='判断是否为同一个人的阈值') parser.add_argument('--model_path',", "= dist name = person_name[i] return name, pro # 声纹注册", "__name__ == '__main__': load_audio_db(args.audio_db) record_audio = RecordAudio() while True: select_fun", "== '__main__': load_audio_db(args.audio_db) record_audio = RecordAudio() while True: select_fun =", "1: audio_path = record_audio.record() name, p = recognition(audio_path) if p", "import RecordAudio parser = argparse.ArgumentParser() parser.add_argument('--audio_db', default='audio_db/', type=str, help='音频库的路径') parser.add_argument('--threshold',", "model.vggvox_resnet2d_icassp(input_dim=(257, None, 1), mode='eval') # 加载预训练模型 network_eval.load_weights(os.path.join(args.model_path), by_name=True) print('==> successfully", "- start) * 1000))) # 识别声纹 def recognition(path): name =", "os.path.join(args.audio_db, user_name + os.path.basename(path)[-4:]) shutil.move(path, save_path) feature = predict(save_path) person_name.append(user_name)", "record_audio = RecordAudio() while True: select_fun = int(input(\"请选择功能,0为注册音频到声纹库,1为执行声纹识别:\")) if select_fun", "def predict(path): specs = utils.load_data(path, mode='eval') specs = np.expand_dims(np.expand_dims(specs, 0),", "return feature # 加载要识别的音频库 def load_audio_db(audio_db_path): start = time.time() audios", "audio.\" % name) end = time.time() print('加载音频库完成,消耗时间:%fms' % (round((end -", "{}.'.format(args.model_path)) # 预测获取声纹特征 def predict(path): specs = utils.load_data(path, mode='eval') specs", "name == '': continue register(audio_path, name) elif select_fun == 1:", "预测获取声纹特征 def predict(path): specs = utils.load_data(path, mode='eval') specs = np.expand_dims(np.expand_dims(specs,", "recognition(path): name = '' pro = 0 feature = predict(path)", "name) elif select_fun == 1: audio_path = record_audio.record() name, p", "= os.listdir(audio_db_path) for audio in audios: path = os.path.join(audio_db_path, audio)", "audios: path = os.path.join(audio_db_path, audio) name = audio[:-4] feature =", "predict(save_path) person_name.append(user_name) person_feature.append(feature) if __name__ == '__main__': load_audio_db(args.audio_db) record_audio =", "person_feature.append(feature) if __name__ == '__main__': load_audio_db(args.audio_db) record_audio = RecordAudio() while", "pro = dist name = person_name[i] return name, pro #", "for i, person_f in enumerate(person_feature): # 计算相识度 dist = np.dot(feature,", "record_audio.record() name, p = recognition(audio_path) if p > args.threshold: print(\"识别说话的为:%s,相似度为:%f\"", "in enumerate(person_feature): # 计算相识度 dist = np.dot(feature, person_f.T) if dist", "0 feature = predict(path) for i, person_f in enumerate(person_feature): #", "type=float, help='判断是否为同一个人的阈值') parser.add_argument('--model_path', default=r'models/resnet34-56.h5', type=str, help='模型的路径') args = parser.parse_args() person_feature", "pro = 0 feature = predict(path) for i, person_f in", "== 1: audio_path = record_audio.record() name, p = recognition(audio_path) if", "= person_name[i] return name, pro # 声纹注册 def register(path, user_name):", "feature = predict(path) for i, person_f in enumerate(person_feature): # 计算相识度", "feature # 加载要识别的音频库 def load_audio_db(audio_db_path): start = time.time() audios =", "i, person_f in enumerate(person_feature): # 计算相识度 dist = np.dot(feature, person_f.T)", "parser.add_argument('--model_path', default=r'models/resnet34-56.h5', type=str, help='模型的路径') args = parser.parse_args() person_feature = []", "= time.time() print('加载音频库完成,消耗时间:%fms' % (round((end - start) * 1000))) #", "enumerate(person_feature): # 计算相识度 dist = np.dot(feature, person_f.T) if dist >", "elif select_fun == 1: audio_path = record_audio.record() name, p =", "= [] person_name = [] # 获取模型 network_eval = model.vggvox_resnet2d_icassp(input_dim=(257,", "'': continue register(audio_path, name) elif select_fun == 1: audio_path =", "import argparse import os import shutil import time import numpy", "p = recognition(audio_path) if p > args.threshold: print(\"识别说话的为:%s,相似度为:%f\" % (name,", "while True: select_fun = int(input(\"请选择功能,0为注册音频到声纹库,1为执行声纹识别:\")) if select_fun == 0: audio_path", "import shutil import time import numpy as np from utils", "start = time.time() audios = os.listdir(audio_db_path) for audio in audios:", "user_name): save_path = os.path.join(args.audio_db, user_name + os.path.basename(path)[-4:]) shutil.move(path, save_path) feature", "time.time() audios = os.listdir(audio_db_path) for audio in audios: path =", "import model, utils from utils.record import RecordAudio parser = argparse.ArgumentParser()", "识别声纹 def recognition(path): name = '' pro = 0 feature", "by_name=True) print('==> successfully loading model {}.'.format(args.model_path)) # 预测获取声纹特征 def predict(path):", "= [] # 获取模型 network_eval = model.vggvox_resnet2d_icassp(input_dim=(257, None, 1), mode='eval')", "%s audio.\" % name) end = time.time() print('加载音频库完成,消耗时间:%fms' % (round((end", "% name) end = time.time() print('加载音频库完成,消耗时间:%fms' % (round((end - start)", "加载要识别的音频库 def load_audio_db(audio_db_path): start = time.time() audios = os.listdir(audio_db_path) for", "% (round((end - start) * 1000))) # 识别声纹 def recognition(path):", "计算相识度 dist = np.dot(feature, person_f.T) if dist > pro: pro", "pro # 声纹注册 def register(path, user_name): save_path = os.path.join(args.audio_db, user_name", "utils import model, utils from utils.record import RecordAudio parser =", "feature = network_eval.predict(specs)[0] return feature # 加载要识别的音频库 def load_audio_db(audio_db_path): start", "from utils.record import RecordAudio parser = argparse.ArgumentParser() parser.add_argument('--audio_db', default='audio_db/', type=str,", "mode='eval') # 加载预训练模型 network_eval.load_weights(os.path.join(args.model_path), by_name=True) print('==> successfully loading model {}.'.format(args.model_path))", "person_feature = [] person_name = [] # 获取模型 network_eval =", "default='audio_db/', type=str, help='音频库的路径') parser.add_argument('--threshold', default=0.7, type=float, help='判断是否为同一个人的阈值') parser.add_argument('--model_path', default=r'models/resnet34-56.h5', type=str,", "= 0 feature = predict(path) for i, person_f in enumerate(person_feature):", "= predict(save_path) person_name.append(user_name) person_feature.append(feature) if __name__ == '__main__': load_audio_db(args.audio_db) record_audio", "person_feature.append(feature) print(\"Loaded %s audio.\" % name) end = time.time() print('加载音频库完成,消耗时间:%fms'", "= utils.load_data(path, mode='eval') specs = np.expand_dims(np.expand_dims(specs, 0), -1) feature =", "-1) feature = network_eval.predict(specs)[0] return feature # 加载要识别的音频库 def load_audio_db(audio_db_path):", "name, p = recognition(audio_path) if p > args.threshold: print(\"识别说话的为:%s,相似度为:%f\" %", "audio_path = record_audio.record() name = input(\"请输入该音频用户的名称:\") if name == '':", "args = parser.parse_args() person_feature = [] person_name = [] #", "mode='eval') specs = np.expand_dims(np.expand_dims(specs, 0), -1) feature = network_eval.predict(specs)[0] return", "parser.add_argument('--audio_db', default='audio_db/', type=str, help='音频库的路径') parser.add_argument('--threshold', default=0.7, type=float, help='判断是否为同一个人的阈值') parser.add_argument('--model_path', default=r'models/resnet34-56.h5',", "= network_eval.predict(specs)[0] return feature # 加载要识别的音频库 def load_audio_db(audio_db_path): start =", "predict(path) for i, person_f in enumerate(person_feature): # 计算相识度 dist =", "audios = os.listdir(audio_db_path) for audio in audios: path = os.path.join(audio_db_path,", "parser.add_argument('--threshold', default=0.7, type=float, help='判断是否为同一个人的阈值') parser.add_argument('--model_path', default=r'models/resnet34-56.h5', type=str, help='模型的路径') args =", "* 1000))) # 识别声纹 def recognition(path): name = '' pro", "[] # 获取模型 network_eval = model.vggvox_resnet2d_icassp(input_dim=(257, None, 1), mode='eval') #", "= np.expand_dims(np.expand_dims(specs, 0), -1) feature = network_eval.predict(specs)[0] return feature #", "= os.path.join(audio_db_path, audio) name = audio[:-4] feature = predict(path) person_name.append(name)", "feature = predict(path) person_name.append(name) person_feature.append(feature) print(\"Loaded %s audio.\" % name)", "name = input(\"请输入该音频用户的名称:\") if name == '': continue register(audio_path, name)", "= record_audio.record() name, p = recognition(audio_path) if p > args.threshold:", "> args.threshold: print(\"识别说话的为:%s,相似度为:%f\" % (name, p)) else: print(\"音频库没有该用户的语音\") else: print('请正确选择功能')", "1), mode='eval') # 加载预训练模型 network_eval.load_weights(os.path.join(args.model_path), by_name=True) print('==> successfully loading model", "utils.load_data(path, mode='eval') specs = np.expand_dims(np.expand_dims(specs, 0), -1) feature = network_eval.predict(specs)[0]", "if select_fun == 0: audio_path = record_audio.record() name = input(\"请输入该音频用户的名称:\")", "True: select_fun = int(input(\"请选择功能,0为注册音频到声纹库,1为执行声纹识别:\")) if select_fun == 0: audio_path =", "model, utils from utils.record import RecordAudio parser = argparse.ArgumentParser() parser.add_argument('--audio_db',", "path = os.path.join(audio_db_path, audio) name = audio[:-4] feature = predict(path)", "获取模型 network_eval = model.vggvox_resnet2d_icassp(input_dim=(257, None, 1), mode='eval') # 加载预训练模型 network_eval.load_weights(os.path.join(args.model_path),", "dist name = person_name[i] return name, pro # 声纹注册 def", "== 0: audio_path = record_audio.record() name = input(\"请输入该音频用户的名称:\") if name", "person_name = [] # 获取模型 network_eval = model.vggvox_resnet2d_icassp(input_dim=(257, None, 1),", "person_name[i] return name, pro # 声纹注册 def register(path, user_name): save_path", "= record_audio.record() name = input(\"请输入该音频用户的名称:\") if name == '': continue", "start) * 1000))) # 识别声纹 def recognition(path): name = ''", "def recognition(path): name = '' pro = 0 feature =", "shutil import time import numpy as np from utils import", "argparse import os import shutil import time import numpy as", "help='模型的路径') args = parser.parse_args() person_feature = [] person_name = []", "# 预测获取声纹特征 def predict(path): specs = utils.load_data(path, mode='eval') specs =", "= int(input(\"请选择功能,0为注册音频到声纹库,1为执行声纹识别:\")) if select_fun == 0: audio_path = record_audio.record() name", "# 计算相识度 dist = np.dot(feature, person_f.T) if dist > pro:", "'__main__': load_audio_db(args.audio_db) record_audio = RecordAudio() while True: select_fun = int(input(\"请选择功能,0为注册音频到声纹库,1为执行声纹识别:\"))", "RecordAudio parser = argparse.ArgumentParser() parser.add_argument('--audio_db', default='audio_db/', type=str, help='音频库的路径') parser.add_argument('--threshold', default=0.7,", "shutil.move(path, save_path) feature = predict(save_path) person_name.append(user_name) person_feature.append(feature) if __name__ ==", "# 加载预训练模型 network_eval.load_weights(os.path.join(args.model_path), by_name=True) print('==> successfully loading model {}.'.format(args.model_path)) #", "in audios: path = os.path.join(audio_db_path, audio) name = audio[:-4] feature", "= predict(path) for i, person_f in enumerate(person_feature): # 计算相识度 dist", "from utils import model, utils from utils.record import RecordAudio parser", "time import numpy as np from utils import model, utils", "加载预训练模型 network_eval.load_weights(os.path.join(args.model_path), by_name=True) print('==> successfully loading model {}.'.format(args.model_path)) # 预测获取声纹特征", "predict(path): specs = utils.load_data(path, mode='eval') specs = np.expand_dims(np.expand_dims(specs, 0), -1)", "0), -1) feature = network_eval.predict(specs)[0] return feature # 加载要识别的音频库 def", "def load_audio_db(audio_db_path): start = time.time() audios = os.listdir(audio_db_path) for audio", "name = audio[:-4] feature = predict(path) person_name.append(name) person_feature.append(feature) print(\"Loaded %s", "name, pro # 声纹注册 def register(path, user_name): save_path = os.path.join(args.audio_db,", "specs = utils.load_data(path, mode='eval') specs = np.expand_dims(np.expand_dims(specs, 0), -1) feature", "import time import numpy as np from utils import model,", "feature = predict(save_path) person_name.append(user_name) person_feature.append(feature) if __name__ == '__main__': load_audio_db(args.audio_db)", "= predict(path) person_name.append(name) person_feature.append(feature) print(\"Loaded %s audio.\" % name) end", "select_fun == 1: audio_path = record_audio.record() name, p = recognition(audio_path)", "utils.record import RecordAudio parser = argparse.ArgumentParser() parser.add_argument('--audio_db', default='audio_db/', type=str, help='音频库的路径')", "model {}.'.format(args.model_path)) # 预测获取声纹特征 def predict(path): specs = utils.load_data(path, mode='eval')", "end = time.time() print('加载音频库完成,消耗时间:%fms' % (round((end - start) * 1000)))", "= input(\"请输入该音频用户的名称:\") if name == '': continue register(audio_path, name) elif", "specs = np.expand_dims(np.expand_dims(specs, 0), -1) feature = network_eval.predict(specs)[0] return feature", "= '' pro = 0 feature = predict(path) for i,", "successfully loading model {}.'.format(args.model_path)) # 预测获取声纹特征 def predict(path): specs =", "# 加载要识别的音频库 def load_audio_db(audio_db_path): start = time.time() audios = os.listdir(audio_db_path)", "= model.vggvox_resnet2d_icassp(input_dim=(257, None, 1), mode='eval') # 加载预训练模型 network_eval.load_weights(os.path.join(args.model_path), by_name=True) print('==>", "print('加载音频库完成,消耗时间:%fms' % (round((end - start) * 1000))) # 识别声纹 def", "if dist > pro: pro = dist name = person_name[i]", "> pro: pro = dist name = person_name[i] return name,", "save_path) feature = predict(save_path) person_name.append(user_name) person_feature.append(feature) if __name__ == '__main__':", "if __name__ == '__main__': load_audio_db(args.audio_db) record_audio = RecordAudio() while True:", "= recognition(audio_path) if p > args.threshold: print(\"识别说话的为:%s,相似度为:%f\" % (name, p))", "as np from utils import model, utils from utils.record import", "= np.dot(feature, person_f.T) if dist > pro: pro = dist", "= argparse.ArgumentParser() parser.add_argument('--audio_db', default='audio_db/', type=str, help='音频库的路径') parser.add_argument('--threshold', default=0.7, type=float, help='判断是否为同一个人的阈值')", "utils from utils.record import RecordAudio parser = argparse.ArgumentParser() parser.add_argument('--audio_db', default='audio_db/',", "network_eval.predict(specs)[0] return feature # 加载要识别的音频库 def load_audio_db(audio_db_path): start = time.time()", "audio) name = audio[:-4] feature = predict(path) person_name.append(name) person_feature.append(feature) print(\"Loaded", "select_fun == 0: audio_path = record_audio.record() name = input(\"请输入该音频用户的名称:\") if", "time.time() print('加载音频库完成,消耗时间:%fms' % (round((end - start) * 1000))) # 识别声纹" ]
[ "60) h_m_s = ( int(all_secs // 3600 % 24), int(all_secs", "not metadata_type: abort(404, f\"Unknown metadata type {name!r}\") ordered_metadata = utils.prepare_document_formatting(metadata_type.definition)", "return redirect(url_for(\".products_page\")) @bp.route(\"/products\") def products_page(): return utils.render( \"products.html\", ) @bp.route(\"/metadata-types\")", "remove the name from a \"/product/<name>\" url, take them somewhere", "return redirect(url_for(\".raw_metadata_type_doc\", name=name)) @bp.route(\"/metadata-types/<name>.odc-type.yaml\") def raw_metadata_type_doc(name): metadata_type = _model.STORE.index.metadata_types.get_by_name(name) if", "Blueprint(\"product\", __name__) @bp.route(\"/about.csv\") def legacy_about_csv(): return redirect(\".storage_csv\") @bp.route(\"/audit/storage.csv\") def storage_csv():", "[ location.common_prefix for location in (product_locations.get(product.name) or []) ], _utils.product_license(product),", "in _model.STORE.all_dataset_types()), content_type=\"text/plain\", ) @bp.route(\"/metadata-types.txt\") def metadata_type_list_text(): # This is", "\"Product\", include_source_url=True ) return utils.as_yaml(ordered_metadata) @bp.route(\"/metadata-type/<name>\") def legacy_metadata_type_page(name): return redirect(url_for(\".metadata_type_page\",", "(product_locations.get(product.name) or [])) for product, summary in _model.get_products_with_summaries() ], )", "% 24), int(all_secs // 60 % 60), secs if secs", "storage_csv(): \"\"\"Get the product storage table as a CSV\"\"\" product_locations", "def metadata_type_list_text(): # This is useful for bash scripts when", "include_source_url=True ) return utils.as_yaml(ordered_metadata) @bp.route(\"/metadata-type/<name>\") def legacy_metadata_type_page(name): return redirect(url_for(\".metadata_type_page\", name=name))", "for bash scripts when we want to loop them :)", "metadata_type.definition, \"Metadata Type\", include_source_url=True ) return utils.as_yaml(ordered_metadata) @bp.route(\"/products.odc-product.yaml\") def raw_all_products_doc():", "resp @bp.route(\"/metadata-types.odc-type.yaml\") def raw_all_metadata_types_doc(): resp = utils.as_yaml( *( utils.prepare_document_formatting( type_.definition,", "int(all_secs % 60) h_m_s = ( int(all_secs // 3600 %", "table as a CSV\"\"\" product_locations = _model.STORE.products_location_samples_all() return utils.as_csv( filename_prefix=\"product-information\",", "timedelta): \"\"\" Format a timedelta as an iso8601 duration >>>", "redirect(url_for(\".raw_metadata_type_doc\", name=name)) @bp.route(\"/metadata-types/<name>.odc-type.yaml\") def raw_metadata_type_doc(name): metadata_type = _model.STORE.index.metadata_types.get_by_name(name) if not", "3600 % 24), int(all_secs // 60 % 60), secs if", "filename if they have one. utils.suggest_download_filename( resp, prefix=\"products\", suffix=\".odc-product.yaml\", )", "1 != 0 else int(secs), ) parts = [\"P\"] days", "\"metadata-type.html\", metadata_type=metadata_type, metadata_doc=ordered_metadata, products_using_it=products_using_it, ) @bp.route(\"/metadata-type/<name>.odc-type.yaml\") def legacy_metadata_type_doc(name): return redirect(url_for(\".raw_metadata_type_doc\",", "want to loop products :) return Response( \"\\n\".join(t.name for t", "utils.render( \"metadata-types.html\", ) @bp.route(\"/product/<name>.odc-product.yaml\") def legacy_raw_product_doc(name): return redirect(url_for(\".raw_product_doc\", name=name)) @bp.route(\"/products/<name>.odc-product.yaml\")", "in (product_locations.get(product.name) or []) ], _utils.product_license(product), url_for(\"product.raw_product_doc\", name=product.name, _external=True), summary.last_refresh_time,", "if they have one. utils.suggest_download_filename( resp, prefix=\"products\", suffix=\".odc-product.yaml\", ) return", "product_locations = _model.STORE.products_location_samples_all() return utils.as_csv( filename_prefix=\"product-information\", headers=( \"name\", \"count\", \"locations\",", "all_secs: for val, name in zip(h_m_s, [\"H\", \"M\", \"S\"]): if", "\"count\", \"locations\", \"license\", \"definition\", \"summary_time\", \"metadata_type\", ), rows=( ( product.name,", "\"\\n\".join(t.name for t in _model.STORE.all_dataset_types()), content_type=\"text/plain\", ) @bp.route(\"/metadata-types.txt\") def metadata_type_list_text():", "= tdelta.total_seconds() secs = int(all_secs % 60) h_m_s = (", "name=type_.name, _external=True ), ) for type_ in _model.STORE.all_metadata_types() ), )", "or [])) for product, summary in _model.get_products_with_summaries() ], ) @bp.route(\"/product\")", "_utils, _utils as utils _LOG = logging.getLogger(__name__) bp = Blueprint(\"product\",", "_model.STORE.all_dataset_types() ) ) # Add Explorer ID to the download", "name=product.name, _external=True), summary.last_refresh_time, product.metadata_type.name, ) for product, summary in _model.get_products_with_summaries()", "in _model.get_products_with_summaries() ], ) @bp.route(\"/product\") def product_redirect(): \"\"\" If people", "redirect(url_for(\".metadata_type_page\", name=name)) @bp.route(\"/metadata-types/<name>\") def metadata_type_page(name): metadata_type = _model.STORE.index.metadata_types.get_by_name(name) if not", "from cubedash import _model, _utils, _utils as utils _LOG =", "summary.last_refresh_time, product.metadata_type.name, ) for product, summary in _model.get_products_with_summaries() ), )", "utils.suggest_download_filename( resp, prefix=\"products\", suffix=\".odc-product.yaml\", ) return resp @bp.route(\"/metadata-types.odc-type.yaml\") def raw_all_metadata_types_doc():", "for location in (product_locations.get(product.name) or []) ], _utils.product_license(product), url_for(\"product.raw_product_doc\", name=product.name,", "def legacy_metadata_type_doc(name): return redirect(url_for(\".raw_metadata_type_doc\", name=name)) @bp.route(\"/metadata-types/<name>.odc-type.yaml\") def raw_metadata_type_doc(name): metadata_type =", "This is useful for bash scripts when we want to", "utils.prepare_document_formatting( product.definition, \"Product\", include_source_url=True ) return utils.as_yaml(ordered_metadata) @bp.route(\"/metadata-type/<name>\") def legacy_metadata_type_page(name):", "_model.STORE.all_metadata_types() ), ) # Add Explorer ID to the download", "the download filename if they have one. utils.suggest_download_filename( resp, prefix=\"products\",", "def product_redirect(): \"\"\" If people remove the name from a", "resp def _iso8601_duration(tdelta: timedelta): \"\"\" Format a timedelta as an", "for t in _model.STORE.all_dataset_types()), content_type=\"text/plain\", ) @bp.route(\"/metadata-types.txt\") def metadata_type_list_text(): #", "= utils.as_yaml( *( utils.prepare_document_formatting( type_.definition, f\"Metadata Type {type_.name}\", include_source_url=url_for( \".raw_metadata_type_doc\",", ") return resp def _iso8601_duration(tdelta: timedelta): \"\"\" Format a timedelta", "// 3600 % 24), int(all_secs // 60 % 60), secs", "\"\"\"Get the product storage table as a CSV\"\"\" product_locations =", "Response( \"\\n\".join(t.name for t in _model.STORE.all_dataset_types()), content_type=\"text/plain\", ) @bp.route(\"/metadata-types.txt\") def", "\"\"\" all_secs = tdelta.total_seconds() secs = int(all_secs % 60) h_m_s", "zip(h_m_s, [\"H\", \"M\", \"S\"]): if val: parts.append(f\"{val}{name}\") else: parts.append(\"T0S\") return", "p for p in _model.STORE.index.products.get_all() if p.metadata_type.name == name ),", "import _model, _utils, _utils as utils _LOG = logging.getLogger(__name__) bp", "= utils.prepare_document_formatting( product.definition, \"Product\", include_source_url=True ) return utils.as_yaml(ordered_metadata) @bp.route(\"/metadata-type/<name>\") def", "they have one. utils.suggest_download_filename( resp, prefix=\"products\", suffix=\".odc-product.yaml\", ) return resp", "def legacy_about_csv(): return redirect(\".storage_csv\") @bp.route(\"/audit/storage.csv\") def storage_csv(): \"\"\"Get the product", "content_type=\"text/plain\", ) @bp.route(\"/audit/storage\") def storage_page(): product_locations = _model.STORE.products_location_samples_all() return utils.render(", "\"storage.html\", product_summary_and_location=[ (product, summary, (product_locations.get(product.name) or [])) for product, summary", "metadata type {name!r}\") ordered_metadata = utils.prepare_document_formatting(metadata_type.definition) products_using_it = sorted( (", "tdelta.total_seconds() secs = int(all_secs % 60) h_m_s = ( int(all_secs", "import timedelta from flask import Blueprint, Response, abort, redirect, url_for", "_model.STORE.all_metadata_types()), content_type=\"text/plain\", ) @bp.route(\"/audit/storage\") def storage_page(): product_locations = _model.STORE.products_location_samples_all() return", "redirect(url_for(\".raw_product_doc\", name=name)) @bp.route(\"/products/<name>.odc-product.yaml\") def raw_product_doc(name): product = _model.STORE.index.products.get_by_name(name) if not", "abort(404, f\"Unknown product {name!r}\") ordered_metadata = utils.prepare_document_formatting( product.definition, \"Product\", include_source_url=True", ">>> _iso8601_duration(timedelta(seconds=0)) 'PT0S' >>> _iso8601_duration(timedelta(seconds=1)) 'PT1S' >>> _iso8601_duration(timedelta(seconds=23423)) 'PT6H30M23S' >>>", "cubedash import _model, _utils, _utils as utils _LOG = logging.getLogger(__name__)", "in _model.STORE.all_dataset_types() ) ) # Add Explorer ID to the", "rows=( ( product.name, summary.dataset_count, [ location.common_prefix for location in (product_locations.get(product.name)", ") for product in _model.STORE.all_dataset_types() ) ) # Add Explorer", "@bp.route(\"/metadata-types.odc-type.yaml\") def raw_all_metadata_types_doc(): resp = utils.as_yaml( *( utils.prepare_document_formatting( type_.definition, f\"Metadata", "them somewhere useful \"\"\" return redirect(url_for(\".products_page\")) @bp.route(\"/products\") def products_page(): return", "return redirect(url_for(\".metadata_type_page\", name=name)) @bp.route(\"/metadata-types/<name>\") def metadata_type_page(name): metadata_type = _model.STORE.index.metadata_types.get_by_name(name) if", "\"metadata_type\", ), rows=( ( product.name, summary.dataset_count, [ location.common_prefix for location", "raw_all_products_doc(): resp = utils.as_yaml( *( utils.prepare_document_formatting( product.definition, f\"Product {product.name}\", include_source_url=url_for(", "p: p.name, ) return utils.render( \"metadata-type.html\", metadata_type=metadata_type, metadata_doc=ordered_metadata, products_using_it=products_using_it, )", "parts = [\"P\"] days = int(all_secs // 86400) if days:", "== name ), key=lambda p: p.name, ) return utils.render( \"metadata-type.html\",", "metadata_type = _model.STORE.index.metadata_types.get_by_name(name) if not metadata_type: abort(404, f\"Unknown metadata type", "def legacy_raw_product_doc(name): return redirect(url_for(\".raw_product_doc\", name=name)) @bp.route(\"/products/<name>.odc-product.yaml\") def raw_product_doc(name): product =", "return utils.render( \"products.html\", ) @bp.route(\"/metadata-types\") def metadata_types_page(): return utils.render( \"metadata-types.html\",", "for t in _model.STORE.all_metadata_types()), content_type=\"text/plain\", ) @bp.route(\"/audit/storage\") def storage_page(): product_locations", "type {name!r}\") ordered_metadata = utils.prepare_document_formatting( metadata_type.definition, \"Metadata Type\", include_source_url=True )", "days = int(all_secs // 86400) if days: parts.append(f\"{days}D\") if any(h_m_s):", "iso8601 duration >>> _iso8601_duration(timedelta(seconds=0)) 'PT0S' >>> _iso8601_duration(timedelta(seconds=1)) 'PT1S' >>> _iso8601_duration(timedelta(seconds=23423))", "Explorer ID to the download filename if they have one.", "_iso8601_duration(timedelta(seconds=0)) 'PT0S' >>> _iso8601_duration(timedelta(seconds=1)) 'PT1S' >>> _iso8601_duration(timedelta(seconds=23423)) 'PT6H30M23S' >>> _iso8601_duration(timedelta(seconds=4564564556))", "one. utils.suggest_download_filename( resp, prefix=\"products\", suffix=\".odc-product.yaml\", ) return resp @bp.route(\"/metadata-types.odc-type.yaml\") def", "metadata_type: abort(404, f\"Unknown metadata type {name!r}\") ordered_metadata = utils.prepare_document_formatting(metadata_type.definition) products_using_it", "flask import Blueprint, Response, abort, redirect, url_for from cubedash import", "( product.name, summary.dataset_count, [ location.common_prefix for location in (product_locations.get(product.name) or", "'PT6H30M23S' >>> _iso8601_duration(timedelta(seconds=4564564556)) 'P52830DT14H35M56S' \"\"\" all_secs = tdelta.total_seconds() secs =", "= int(all_secs // 86400) if days: parts.append(f\"{days}D\") if any(h_m_s): parts.append(\"T\")", "% 1 != 0 else int(secs), ) parts = [\"P\"]", "= logging.getLogger(__name__) bp = Blueprint(\"product\", __name__) @bp.route(\"/about.csv\") def legacy_about_csv(): return", "logging from datetime import timedelta from flask import Blueprint, Response,", "in _model.STORE.all_metadata_types() ), ) # Add Explorer ID to the", "), ) # Add Explorer ID to the download filename", "summary, (product_locations.get(product.name) or [])) for product, summary in _model.get_products_with_summaries() ],", "def products_page(): return utils.render( \"products.html\", ) @bp.route(\"/metadata-types\") def metadata_types_page(): return", "utils.as_yaml(ordered_metadata) @bp.route(\"/metadata-type/<name>\") def legacy_metadata_type_page(name): return redirect(url_for(\".metadata_type_page\", name=name)) @bp.route(\"/metadata-types/<name>\") def metadata_type_page(name):", "= Blueprint(\"product\", __name__) @bp.route(\"/about.csv\") def legacy_about_csv(): return redirect(\".storage_csv\") @bp.route(\"/audit/storage.csv\") def", "_iso8601_duration(timedelta(seconds=23423)) 'PT6H30M23S' >>> _iso8601_duration(timedelta(seconds=4564564556)) 'P52830DT14H35M56S' \"\"\" all_secs = tdelta.total_seconds() secs", "[\"P\"] days = int(all_secs // 86400) if days: parts.append(f\"{days}D\") if", "_external=True ), ) for product in _model.STORE.all_dataset_types() ) ) #", "f\"Unknown metadata type {name!r}\") ordered_metadata = utils.prepare_document_formatting( metadata_type.definition, \"Metadata Type\",", "to loop them :) return Response( \"\\n\".join(t.name for t in", "abort(404, f\"Unknown metadata type {name!r}\") ordered_metadata = utils.prepare_document_formatting( metadata_type.definition, \"Metadata", "in _model.get_products_with_summaries() ), ) @bp.route(\"/products.txt\") def product_list_text(): # This is", "have one. utils.suggest_download_filename( resp, prefix=\"products\", suffix=\".odc-product.yaml\", ) return resp @bp.route(\"/metadata-types.odc-type.yaml\")", "__name__) @bp.route(\"/about.csv\") def legacy_about_csv(): return redirect(\".storage_csv\") @bp.route(\"/audit/storage.csv\") def storage_csv(): \"\"\"Get", "for p in _model.STORE.index.products.get_all() if p.metadata_type.name == name ), key=lambda", "content_type=\"text/plain\", ) @bp.route(\"/metadata-types.txt\") def metadata_type_list_text(): # This is useful for", "name in zip(h_m_s, [\"H\", \"M\", \"S\"]): if val: parts.append(f\"{val}{name}\") else:", "if any(h_m_s): parts.append(\"T\") if all_secs: for val, name in zip(h_m_s,", "metadata_type: abort(404, f\"Unknown metadata type {name!r}\") ordered_metadata = utils.prepare_document_formatting( metadata_type.definition,", "in _model.STORE.index.products.get_all() if p.metadata_type.name == name ), key=lambda p: p.name,", "= ( int(all_secs // 3600 % 24), int(all_secs // 60", "if all_secs: for val, name in zip(h_m_s, [\"H\", \"M\", \"S\"]):", "= utils.prepare_document_formatting( metadata_type.definition, \"Metadata Type\", include_source_url=True ) return utils.as_yaml(ordered_metadata) @bp.route(\"/products.odc-product.yaml\")", "products_using_it=products_using_it, ) @bp.route(\"/metadata-type/<name>.odc-type.yaml\") def legacy_metadata_type_doc(name): return redirect(url_for(\".raw_metadata_type_doc\", name=name)) @bp.route(\"/metadata-types/<name>.odc-type.yaml\") def", "loop products :) return Response( \"\\n\".join(t.name for t in _model.STORE.all_dataset_types()),", "= utils.prepare_document_formatting(metadata_type.definition) products_using_it = sorted( ( p for p in", "_model.STORE.index.products.get_by_name(name) if not product: abort(404, f\"Unknown product {name!r}\") ordered_metadata =", "resp = utils.as_yaml( *( utils.prepare_document_formatting( product.definition, f\"Product {product.name}\", include_source_url=url_for( \".raw_product_doc\",", ") return resp @bp.route(\"/metadata-types.odc-type.yaml\") def raw_all_metadata_types_doc(): resp = utils.as_yaml( *(", "[\"H\", \"M\", \"S\"]): if val: parts.append(f\"{val}{name}\") else: parts.append(\"T0S\") return \"\".join(parts)", "utils.prepare_document_formatting( product.definition, f\"Product {product.name}\", include_source_url=url_for( \".raw_product_doc\", name=product.name, _external=True ), )", "abort(404, f\"Unknown metadata type {name!r}\") ordered_metadata = utils.prepare_document_formatting(metadata_type.definition) products_using_it =", "product_locations = _model.STORE.products_location_samples_all() return utils.render( \"storage.html\", product_summary_and_location=[ (product, summary, (product_locations.get(product.name)", "), ) @bp.route(\"/products.txt\") def product_list_text(): # This is useful for", "= [\"P\"] days = int(all_secs // 86400) if days: parts.append(f\"{days}D\")", "return utils.as_csv( filename_prefix=\"product-information\", headers=( \"name\", \"count\", \"locations\", \"license\", \"definition\", \"summary_time\",", "product, summary in _model.get_products_with_summaries() ), ) @bp.route(\"/products.txt\") def product_list_text(): #", "a \"/product/<name>\" url, take them somewhere useful \"\"\" return redirect(url_for(\".products_page\"))", "Type {type_.name}\", include_source_url=url_for( \".raw_metadata_type_doc\", name=type_.name, _external=True ), ) for type_", "scripts when we want to loop them :) return Response(", "\".raw_product_doc\", name=product.name, _external=True ), ) for product in _model.STORE.all_dataset_types() )", "_model.get_products_with_summaries() ), ) @bp.route(\"/products.txt\") def product_list_text(): # This is useful", "when we want to loop them :) return Response( \"\\n\".join(t.name", "\"name\", \"count\", \"locations\", \"license\", \"definition\", \"summary_time\", \"metadata_type\", ), rows=( (", "if days: parts.append(f\"{days}D\") if any(h_m_s): parts.append(\"T\") if all_secs: for val,", "include_source_url=url_for( \".raw_product_doc\", name=product.name, _external=True ), ) for product in _model.STORE.all_dataset_types()", "ordered_metadata = utils.prepare_document_formatting( product.definition, \"Product\", include_source_url=True ) return utils.as_yaml(ordered_metadata) @bp.route(\"/metadata-type/<name>\")", "), ) for type_ in _model.STORE.all_metadata_types() ), ) # Add", "metadata_doc=ordered_metadata, products_using_it=products_using_it, ) @bp.route(\"/metadata-type/<name>.odc-type.yaml\") def legacy_metadata_type_doc(name): return redirect(url_for(\".raw_metadata_type_doc\", name=name)) @bp.route(\"/metadata-types/<name>.odc-type.yaml\")", "Blueprint, Response, abort, redirect, url_for from cubedash import _model, _utils,", "resp, prefix=\"metadata-types\", suffix=\".odc-type.yaml\", ) return resp def _iso8601_duration(tdelta: timedelta): \"\"\"", "storage_page(): product_locations = _model.STORE.products_location_samples_all() return utils.render( \"storage.html\", product_summary_and_location=[ (product, summary,", "name ), key=lambda p: p.name, ) return utils.render( \"metadata-type.html\", metadata_type=metadata_type,", "download filename if they have one. utils.suggest_download_filename( resp, prefix=\"products\", suffix=\".odc-product.yaml\",", "h_m_s = ( int(all_secs // 3600 % 24), int(all_secs //", "for product, summary in _model.get_products_with_summaries() ), ) @bp.route(\"/products.txt\") def product_list_text():", "*( utils.prepare_document_formatting( type_.definition, f\"Metadata Type {type_.name}\", include_source_url=url_for( \".raw_metadata_type_doc\", name=type_.name, _external=True", "product, summary in _model.get_products_with_summaries() ], ) @bp.route(\"/product\") def product_redirect(): \"\"\"", "timedelta from flask import Blueprint, Response, abort, redirect, url_for from", "an iso8601 duration >>> _iso8601_duration(timedelta(seconds=0)) 'PT0S' >>> _iso8601_duration(timedelta(seconds=1)) 'PT1S' >>>", "If people remove the name from a \"/product/<name>\" url, take", "we want to loop products :) return Response( \"\\n\".join(t.name for", "key=lambda p: p.name, ) return utils.render( \"metadata-type.html\", metadata_type=metadata_type, metadata_doc=ordered_metadata, products_using_it=products_using_it,", "is useful for bash scripts when we want to loop", "not product: abort(404, f\"Unknown product {name!r}\") ordered_metadata = utils.prepare_document_formatting( product.definition,", "for bash scripts when we want to loop products :)", "( p for p in _model.STORE.index.products.get_all() if p.metadata_type.name == name", "redirect(\".storage_csv\") @bp.route(\"/audit/storage.csv\") def storage_csv(): \"\"\"Get the product storage table as", ") parts = [\"P\"] days = int(all_secs // 86400) if", "as an iso8601 duration >>> _iso8601_duration(timedelta(seconds=0)) 'PT0S' >>> _iso8601_duration(timedelta(seconds=1)) 'PT1S'", "_model.STORE.index.products.get_all() if p.metadata_type.name == name ), key=lambda p: p.name, )", "[]) ], _utils.product_license(product), url_for(\"product.raw_product_doc\", name=product.name, _external=True), summary.last_refresh_time, product.metadata_type.name, ) for", "@bp.route(\"/audit/storage\") def storage_page(): product_locations = _model.STORE.products_location_samples_all() return utils.render( \"storage.html\", product_summary_and_location=[", "// 60 % 60), secs if secs % 1 !=", "download filename if they have one. utils.suggest_download_filename( resp, prefix=\"metadata-types\", suffix=\".odc-type.yaml\",", "return resp def _iso8601_duration(tdelta: timedelta): \"\"\" Format a timedelta as", "( int(all_secs // 3600 % 24), int(all_secs // 60 %", "def raw_product_doc(name): product = _model.STORE.index.products.get_by_name(name) if not product: abort(404, f\"Unknown", "\"definition\", \"summary_time\", \"metadata_type\", ), rows=( ( product.name, summary.dataset_count, [ location.common_prefix", "def legacy_metadata_type_page(name): return redirect(url_for(\".metadata_type_page\", name=name)) @bp.route(\"/metadata-types/<name>\") def metadata_type_page(name): metadata_type =", "(product_locations.get(product.name) or []) ], _utils.product_license(product), url_for(\"product.raw_product_doc\", name=product.name, _external=True), summary.last_refresh_time, product.metadata_type.name,", "want to loop them :) return Response( \"\\n\".join(t.name for t", "duration >>> _iso8601_duration(timedelta(seconds=0)) 'PT0S' >>> _iso8601_duration(timedelta(seconds=1)) 'PT1S' >>> _iso8601_duration(timedelta(seconds=23423)) 'PT6H30M23S'", "def _iso8601_duration(tdelta: timedelta): \"\"\" Format a timedelta as an iso8601", "*( utils.prepare_document_formatting( product.definition, f\"Product {product.name}\", include_source_url=url_for( \".raw_product_doc\", name=product.name, _external=True ),", "p.name, ) return utils.render( \"metadata-type.html\", metadata_type=metadata_type, metadata_doc=ordered_metadata, products_using_it=products_using_it, ) @bp.route(\"/metadata-type/<name>.odc-type.yaml\")", "_iso8601_duration(tdelta: timedelta): \"\"\" Format a timedelta as an iso8601 duration", ") ) # Add Explorer ID to the download filename", "ID to the download filename if they have one. utils.suggest_download_filename(", "name=name)) @bp.route(\"/metadata-types/<name>.odc-type.yaml\") def raw_metadata_type_doc(name): metadata_type = _model.STORE.index.metadata_types.get_by_name(name) if not metadata_type:", "int(all_secs // 60 % 60), secs if secs % 1", ") @bp.route(\"/metadata-types.txt\") def metadata_type_list_text(): # This is useful for bash", "f\"Product {product.name}\", include_source_url=url_for( \".raw_product_doc\", name=product.name, _external=True ), ) for product", "product_summary_and_location=[ (product, summary, (product_locations.get(product.name) or [])) for product, summary in", "@bp.route(\"/product/<name>.odc-product.yaml\") def legacy_raw_product_doc(name): return redirect(url_for(\".raw_product_doc\", name=name)) @bp.route(\"/products/<name>.odc-product.yaml\") def raw_product_doc(name): product", "= _model.STORE.index.products.get_by_name(name) if not product: abort(404, f\"Unknown product {name!r}\") ordered_metadata", "{product.name}\", include_source_url=url_for( \".raw_product_doc\", name=product.name, _external=True ), ) for product in", "@bp.route(\"/metadata-types\") def metadata_types_page(): return utils.render( \"metadata-types.html\", ) @bp.route(\"/product/<name>.odc-product.yaml\") def legacy_raw_product_doc(name):", "product {name!r}\") ordered_metadata = utils.prepare_document_formatting( product.definition, \"Product\", include_source_url=True ) return", "a timedelta as an iso8601 duration >>> _iso8601_duration(timedelta(seconds=0)) 'PT0S' >>>", "utils.prepare_document_formatting( type_.definition, f\"Metadata Type {type_.name}\", include_source_url=url_for( \".raw_metadata_type_doc\", name=type_.name, _external=True ),", "name=product.name, _external=True ), ) for product in _model.STORE.all_dataset_types() ) )", "for type_ in _model.STORE.all_metadata_types() ), ) # Add Explorer ID", "prefix=\"products\", suffix=\".odc-product.yaml\", ) return resp @bp.route(\"/metadata-types.odc-type.yaml\") def raw_all_metadata_types_doc(): resp =", "product storage table as a CSV\"\"\" product_locations = _model.STORE.products_location_samples_all() return", "\"\"\" return redirect(url_for(\".products_page\")) @bp.route(\"/products\") def products_page(): return utils.render( \"products.html\", )", "take them somewhere useful \"\"\" return redirect(url_for(\".products_page\")) @bp.route(\"/products\") def products_page():", "_iso8601_duration(timedelta(seconds=1)) 'PT1S' >>> _iso8601_duration(timedelta(seconds=23423)) 'PT6H30M23S' >>> _iso8601_duration(timedelta(seconds=4564564556)) 'P52830DT14H35M56S' \"\"\" all_secs", "from flask import Blueprint, Response, abort, redirect, url_for from cubedash", "_iso8601_duration(timedelta(seconds=4564564556)) 'P52830DT14H35M56S' \"\"\" all_secs = tdelta.total_seconds() secs = int(all_secs %", "_model.STORE.products_location_samples_all() return utils.render( \"storage.html\", product_summary_and_location=[ (product, summary, (product_locations.get(product.name) or []))", ") return utils.as_yaml(ordered_metadata) @bp.route(\"/metadata-type/<name>\") def legacy_metadata_type_page(name): return redirect(url_for(\".metadata_type_page\", name=name)) @bp.route(\"/metadata-types/<name>\")", "metadata_type_list_text(): # This is useful for bash scripts when we", "url, take them somewhere useful \"\"\" return redirect(url_for(\".products_page\")) @bp.route(\"/products\") def", "utils.as_csv( filename_prefix=\"product-information\", headers=( \"name\", \"count\", \"locations\", \"license\", \"definition\", \"summary_time\", \"metadata_type\",", "one. utils.suggest_download_filename( resp, prefix=\"metadata-types\", suffix=\".odc-type.yaml\", ) return resp def _iso8601_duration(tdelta:", "in zip(h_m_s, [\"H\", \"M\", \"S\"]): if val: parts.append(f\"{val}{name}\") else: parts.append(\"T0S\")", "Type\", include_source_url=True ) return utils.as_yaml(ordered_metadata) @bp.route(\"/products.odc-product.yaml\") def raw_all_products_doc(): resp =", "redirect, url_for from cubedash import _model, _utils, _utils as utils", "we want to loop them :) return Response( \"\\n\".join(t.name for", "def raw_all_metadata_types_doc(): resp = utils.as_yaml( *( utils.prepare_document_formatting( type_.definition, f\"Metadata Type", "product_redirect(): \"\"\" If people remove the name from a \"/product/<name>\"", "[])) for product, summary in _model.get_products_with_summaries() ], ) @bp.route(\"/product\") def", "name from a \"/product/<name>\" url, take them somewhere useful \"\"\"", "ordered_metadata = utils.prepare_document_formatting( metadata_type.definition, \"Metadata Type\", include_source_url=True ) return utils.as_yaml(ordered_metadata)", "ordered_metadata = utils.prepare_document_formatting(metadata_type.definition) products_using_it = sorted( ( p for p", "suffix=\".odc-type.yaml\", ) return resp def _iso8601_duration(tdelta: timedelta): \"\"\" Format a", ") for type_ in _model.STORE.all_metadata_types() ), ) # Add Explorer", "@bp.route(\"/metadata-types.txt\") def metadata_type_list_text(): # This is useful for bash scripts", "summary in _model.get_products_with_summaries() ), ) @bp.route(\"/products.txt\") def product_list_text(): # This", "product in _model.STORE.all_dataset_types() ) ) # Add Explorer ID to", "% 60), secs if secs % 1 != 0 else", ") @bp.route(\"/metadata-type/<name>.odc-type.yaml\") def legacy_metadata_type_doc(name): return redirect(url_for(\".raw_metadata_type_doc\", name=name)) @bp.route(\"/metadata-types/<name>.odc-type.yaml\") def raw_metadata_type_doc(name):", ">>> _iso8601_duration(timedelta(seconds=23423)) 'PT6H30M23S' >>> _iso8601_duration(timedelta(seconds=4564564556)) 'P52830DT14H35M56S' \"\"\" all_secs = tdelta.total_seconds()", "if they have one. utils.suggest_download_filename( resp, prefix=\"metadata-types\", suffix=\".odc-type.yaml\", ) return", "# Add Explorer ID to the download filename if they", "return utils.as_yaml(ordered_metadata) @bp.route(\"/metadata-type/<name>\") def legacy_metadata_type_page(name): return redirect(url_for(\".metadata_type_page\", name=name)) @bp.route(\"/metadata-types/<name>\") def", "'P52830DT14H35M56S' \"\"\" all_secs = tdelta.total_seconds() secs = int(all_secs % 60)", "as a CSV\"\"\" product_locations = _model.STORE.products_location_samples_all() return utils.as_csv( filename_prefix=\"product-information\", headers=(", "// 86400) if days: parts.append(f\"{days}D\") if any(h_m_s): parts.append(\"T\") if all_secs:", "_utils as utils _LOG = logging.getLogger(__name__) bp = Blueprint(\"product\", __name__)", "_LOG = logging.getLogger(__name__) bp = Blueprint(\"product\", __name__) @bp.route(\"/about.csv\") def legacy_about_csv():", "secs % 1 != 0 else int(secs), ) parts =", ") return utils.as_yaml(ordered_metadata) @bp.route(\"/products.odc-product.yaml\") def raw_all_products_doc(): resp = utils.as_yaml( *(", "when we want to loop products :) return Response( \"\\n\".join(t.name", "useful \"\"\" return redirect(url_for(\".products_page\")) @bp.route(\"/products\") def products_page(): return utils.render( \"products.html\",", "logging.getLogger(__name__) bp = Blueprint(\"product\", __name__) @bp.route(\"/about.csv\") def legacy_about_csv(): return redirect(\".storage_csv\")", "type_.definition, f\"Metadata Type {type_.name}\", include_source_url=url_for( \".raw_metadata_type_doc\", name=type_.name, _external=True ), )", "headers=( \"name\", \"count\", \"locations\", \"license\", \"definition\", \"summary_time\", \"metadata_type\", ), rows=(", "\"products.html\", ) @bp.route(\"/metadata-types\") def metadata_types_page(): return utils.render( \"metadata-types.html\", ) @bp.route(\"/product/<name>.odc-product.yaml\")", "utils.prepare_document_formatting(metadata_type.definition) products_using_it = sorted( ( p for p in _model.STORE.index.products.get_all()", "# This is useful for bash scripts when we want", "metadata_type_page(name): metadata_type = _model.STORE.index.metadata_types.get_by_name(name) if not metadata_type: abort(404, f\"Unknown metadata", "from datetime import timedelta from flask import Blueprint, Response, abort,", "return redirect(url_for(\".raw_product_doc\", name=name)) @bp.route(\"/products/<name>.odc-product.yaml\") def raw_product_doc(name): product = _model.STORE.index.products.get_by_name(name) if", "utils.suggest_download_filename( resp, prefix=\"metadata-types\", suffix=\".odc-type.yaml\", ) return resp def _iso8601_duration(tdelta: timedelta):", "abort, redirect, url_for from cubedash import _model, _utils, _utils as", "timedelta as an iso8601 duration >>> _iso8601_duration(timedelta(seconds=0)) 'PT0S' >>> _iso8601_duration(timedelta(seconds=1))", "summary.dataset_count, [ location.common_prefix for location in (product_locations.get(product.name) or []) ],", "legacy_about_csv(): return redirect(\".storage_csv\") @bp.route(\"/audit/storage.csv\") def storage_csv(): \"\"\"Get the product storage", "have one. utils.suggest_download_filename( resp, prefix=\"metadata-types\", suffix=\".odc-type.yaml\", ) return resp def", ">>> _iso8601_duration(timedelta(seconds=4564564556)) 'P52830DT14H35M56S' \"\"\" all_secs = tdelta.total_seconds() secs = int(all_secs", "def storage_csv(): \"\"\"Get the product storage table as a CSV\"\"\"", "metadata type {name!r}\") ordered_metadata = utils.prepare_document_formatting( metadata_type.definition, \"Metadata Type\", include_source_url=True", ") @bp.route(\"/metadata-types\") def metadata_types_page(): return utils.render( \"metadata-types.html\", ) @bp.route(\"/product/<name>.odc-product.yaml\") def", "@bp.route(\"/about.csv\") def legacy_about_csv(): return redirect(\".storage_csv\") @bp.route(\"/audit/storage.csv\") def storage_csv(): \"\"\"Get the", "), ) for product in _model.STORE.all_dataset_types() ) ) # Add", "\"license\", \"definition\", \"summary_time\", \"metadata_type\", ), rows=( ( product.name, summary.dataset_count, [", "they have one. utils.suggest_download_filename( resp, prefix=\"metadata-types\", suffix=\".odc-type.yaml\", ) return resp", "_utils.product_license(product), url_for(\"product.raw_product_doc\", name=product.name, _external=True), summary.last_refresh_time, product.metadata_type.name, ) for product, summary", "return Response( \"\\n\".join(t.name for t in _model.STORE.all_metadata_types()), content_type=\"text/plain\", ) @bp.route(\"/audit/storage\")", "int(all_secs // 86400) if days: parts.append(f\"{days}D\") if any(h_m_s): parts.append(\"T\") if", "somewhere useful \"\"\" return redirect(url_for(\".products_page\")) @bp.route(\"/products\") def products_page(): return utils.render(", "them :) return Response( \"\\n\".join(t.name for t in _model.STORE.all_metadata_types()), content_type=\"text/plain\",", "utils.as_yaml(ordered_metadata) @bp.route(\"/products.odc-product.yaml\") def raw_all_products_doc(): resp = utils.as_yaml( *( utils.prepare_document_formatting( product.definition,", "= _model.STORE.index.metadata_types.get_by_name(name) if not metadata_type: abort(404, f\"Unknown metadata type {name!r}\")", "@bp.route(\"/products.odc-product.yaml\") def raw_all_products_doc(): resp = utils.as_yaml( *( utils.prepare_document_formatting( product.definition, f\"Product", "t in _model.STORE.all_dataset_types()), content_type=\"text/plain\", ) @bp.route(\"/metadata-types.txt\") def metadata_type_list_text(): # This", "name=name)) @bp.route(\"/products/<name>.odc-product.yaml\") def raw_product_doc(name): product = _model.STORE.index.products.get_by_name(name) if not product:", "@bp.route(\"/products\") def products_page(): return utils.render( \"products.html\", ) @bp.route(\"/metadata-types\") def metadata_types_page():", "return Response( \"\\n\".join(t.name for t in _model.STORE.all_dataset_types()), content_type=\"text/plain\", ) @bp.route(\"/metadata-types.txt\")", "= sorted( ( p for p in _model.STORE.index.products.get_all() if p.metadata_type.name", "product.definition, \"Product\", include_source_url=True ) return utils.as_yaml(ordered_metadata) @bp.route(\"/metadata-type/<name>\") def legacy_metadata_type_page(name): return", "type {name!r}\") ordered_metadata = utils.prepare_document_formatting(metadata_type.definition) products_using_it = sorted( ( p", "_model.STORE.products_location_samples_all() return utils.as_csv( filename_prefix=\"product-information\", headers=( \"name\", \"count\", \"locations\", \"license\", \"definition\",", "resp, prefix=\"products\", suffix=\".odc-product.yaml\", ) return resp @bp.route(\"/metadata-types.odc-type.yaml\") def raw_all_metadata_types_doc(): resp", "raw_all_metadata_types_doc(): resp = utils.as_yaml( *( utils.prepare_document_formatting( type_.definition, f\"Metadata Type {type_.name}\",", "'PT1S' >>> _iso8601_duration(timedelta(seconds=23423)) 'PT6H30M23S' >>> _iso8601_duration(timedelta(seconds=4564564556)) 'P52830DT14H35M56S' \"\"\" all_secs =", "redirect(url_for(\".products_page\")) @bp.route(\"/products\") def products_page(): return utils.render( \"products.html\", ) @bp.route(\"/metadata-types\") def", "all_secs = tdelta.total_seconds() secs = int(all_secs % 60) h_m_s =", "\"metadata-types.html\", ) @bp.route(\"/product/<name>.odc-product.yaml\") def legacy_raw_product_doc(name): return redirect(url_for(\".raw_product_doc\", name=name)) @bp.route(\"/products/<name>.odc-product.yaml\") def", "return utils.render( \"metadata-types.html\", ) @bp.route(\"/product/<name>.odc-product.yaml\") def legacy_raw_product_doc(name): return redirect(url_for(\".raw_product_doc\", name=name))", "val, name in zip(h_m_s, [\"H\", \"M\", \"S\"]): if val: parts.append(f\"{val}{name}\")", "int(all_secs // 3600 % 24), int(all_secs // 60 % 60),", "for product, summary in _model.get_products_with_summaries() ], ) @bp.route(\"/product\") def product_redirect():", "parts.append(\"T\") if all_secs: for val, name in zip(h_m_s, [\"H\", \"M\",", "to the download filename if they have one. utils.suggest_download_filename( resp,", "def raw_all_products_doc(): resp = utils.as_yaml( *( utils.prepare_document_formatting( product.definition, f\"Product {product.name}\",", "<reponame>vconrado/datacube-explorer import logging from datetime import timedelta from flask import", "], _utils.product_license(product), url_for(\"product.raw_product_doc\", name=product.name, _external=True), summary.last_refresh_time, product.metadata_type.name, ) for product,", "\".raw_metadata_type_doc\", name=type_.name, _external=True ), ) for type_ in _model.STORE.all_metadata_types() ),", "any(h_m_s): parts.append(\"T\") if all_secs: for val, name in zip(h_m_s, [\"H\",", "raw_product_doc(name): product = _model.STORE.index.products.get_by_name(name) if not product: abort(404, f\"Unknown product", "{name!r}\") ordered_metadata = utils.prepare_document_formatting( metadata_type.definition, \"Metadata Type\", include_source_url=True ) return", "datetime import timedelta from flask import Blueprint, Response, abort, redirect,", "days: parts.append(f\"{days}D\") if any(h_m_s): parts.append(\"T\") if all_secs: for val, name", "= _model.STORE.products_location_samples_all() return utils.as_csv( filename_prefix=\"product-information\", headers=( \"name\", \"count\", \"locations\", \"license\",", "as utils _LOG = logging.getLogger(__name__) bp = Blueprint(\"product\", __name__) @bp.route(\"/about.csv\")", "_external=True ), ) for type_ in _model.STORE.all_metadata_types() ), ) #", "product.metadata_type.name, ) for product, summary in _model.get_products_with_summaries() ), ) @bp.route(\"/products.txt\")", "useful for bash scripts when we want to loop products", "), key=lambda p: p.name, ) return utils.render( \"metadata-type.html\", metadata_type=metadata_type, metadata_doc=ordered_metadata,", "location in (product_locations.get(product.name) or []) ], _utils.product_license(product), url_for(\"product.raw_product_doc\", name=product.name, _external=True),", "p in _model.STORE.index.products.get_all() if p.metadata_type.name == name ), key=lambda p:", "= utils.as_yaml( *( utils.prepare_document_formatting( product.definition, f\"Product {product.name}\", include_source_url=url_for( \".raw_product_doc\", name=product.name,", "storage table as a CSV\"\"\" product_locations = _model.STORE.products_location_samples_all() return utils.as_csv(", "loop them :) return Response( \"\\n\".join(t.name for t in _model.STORE.all_metadata_types()),", "useful for bash scripts when we want to loop them", "\"Metadata Type\", include_source_url=True ) return utils.as_yaml(ordered_metadata) @bp.route(\"/products.odc-product.yaml\") def raw_all_products_doc(): resp", "products_using_it = sorted( ( p for p in _model.STORE.index.products.get_all() if", "_model.STORE.index.metadata_types.get_by_name(name) if not metadata_type: abort(404, f\"Unknown metadata type {name!r}\") ordered_metadata", "secs = int(all_secs % 60) h_m_s = ( int(all_secs //", "include_source_url=url_for( \".raw_metadata_type_doc\", name=type_.name, _external=True ), ) for type_ in _model.STORE.all_metadata_types()", "!= 0 else int(secs), ) parts = [\"P\"] days =", "include_source_url=True ) return utils.as_yaml(ordered_metadata) @bp.route(\"/products.odc-product.yaml\") def raw_all_products_doc(): resp = utils.as_yaml(", "import Blueprint, Response, abort, redirect, url_for from cubedash import _model,", "utils _LOG = logging.getLogger(__name__) bp = Blueprint(\"product\", __name__) @bp.route(\"/about.csv\") def", ") for product, summary in _model.get_products_with_summaries() ), ) @bp.route(\"/products.txt\") def", "\"locations\", \"license\", \"definition\", \"summary_time\", \"metadata_type\", ), rows=( ( product.name, summary.dataset_count,", "], ) @bp.route(\"/product\") def product_redirect(): \"\"\" If people remove the", "for product in _model.STORE.all_dataset_types() ) ) # Add Explorer ID", "suffix=\".odc-product.yaml\", ) return resp @bp.route(\"/metadata-types.odc-type.yaml\") def raw_all_metadata_types_doc(): resp = utils.as_yaml(", "60), secs if secs % 1 != 0 else int(secs),", "location.common_prefix for location in (product_locations.get(product.name) or []) ], _utils.product_license(product), url_for(\"product.raw_product_doc\",", "product: abort(404, f\"Unknown product {name!r}\") ordered_metadata = utils.prepare_document_formatting( product.definition, \"Product\",", "in _model.STORE.all_metadata_types()), content_type=\"text/plain\", ) @bp.route(\"/audit/storage\") def storage_page(): product_locations = _model.STORE.products_location_samples_all()", "CSV\"\"\" product_locations = _model.STORE.products_location_samples_all() return utils.as_csv( filename_prefix=\"product-information\", headers=( \"name\", \"count\",", "legacy_raw_product_doc(name): return redirect(url_for(\".raw_product_doc\", name=name)) @bp.route(\"/products/<name>.odc-product.yaml\") def raw_product_doc(name): product = _model.STORE.index.products.get_by_name(name)", "@bp.route(\"/products/<name>.odc-product.yaml\") def raw_product_doc(name): product = _model.STORE.index.products.get_by_name(name) if not product: abort(404,", "\"/product/<name>\" url, take them somewhere useful \"\"\" return redirect(url_for(\".products_page\")) @bp.route(\"/products\")", "not metadata_type: abort(404, f\"Unknown metadata type {name!r}\") ordered_metadata = utils.prepare_document_formatting(", "24), int(all_secs // 60 % 60), secs if secs %", "prefix=\"metadata-types\", suffix=\".odc-type.yaml\", ) return resp def _iso8601_duration(tdelta: timedelta): \"\"\" Format", "the product storage table as a CSV\"\"\" product_locations = _model.STORE.products_location_samples_all()", "product.definition, f\"Product {product.name}\", include_source_url=url_for( \".raw_product_doc\", name=product.name, _external=True ), ) for", "0 else int(secs), ) parts = [\"P\"] days = int(all_secs", "raw_metadata_type_doc(name): metadata_type = _model.STORE.index.metadata_types.get_by_name(name) if not metadata_type: abort(404, f\"Unknown metadata", "@bp.route(\"/audit/storage.csv\") def storage_csv(): \"\"\"Get the product storage table as a", "metadata_types_page(): return utils.render( \"metadata-types.html\", ) @bp.route(\"/product/<name>.odc-product.yaml\") def legacy_raw_product_doc(name): return redirect(url_for(\".raw_product_doc\",", "the download filename if they have one. utils.suggest_download_filename( resp, prefix=\"metadata-types\",", "f\"Unknown metadata type {name!r}\") ordered_metadata = utils.prepare_document_formatting(metadata_type.definition) products_using_it = sorted(", "Format a timedelta as an iso8601 duration >>> _iso8601_duration(timedelta(seconds=0)) 'PT0S'", "@bp.route(\"/metadata-types/<name>\") def metadata_type_page(name): metadata_type = _model.STORE.index.metadata_types.get_by_name(name) if not metadata_type: abort(404,", "{name!r}\") ordered_metadata = utils.prepare_document_formatting(metadata_type.definition) products_using_it = sorted( ( p for", "legacy_metadata_type_doc(name): return redirect(url_for(\".raw_metadata_type_doc\", name=name)) @bp.route(\"/metadata-types/<name>.odc-type.yaml\") def raw_metadata_type_doc(name): metadata_type = _model.STORE.index.metadata_types.get_by_name(name)", "def metadata_types_page(): return utils.render( \"metadata-types.html\", ) @bp.route(\"/product/<name>.odc-product.yaml\") def legacy_raw_product_doc(name): return", "filename_prefix=\"product-information\", headers=( \"name\", \"count\", \"locations\", \"license\", \"definition\", \"summary_time\", \"metadata_type\", ),", "@bp.route(\"/products.txt\") def product_list_text(): # This is useful for bash scripts", "metadata_type=metadata_type, metadata_doc=ordered_metadata, products_using_it=products_using_it, ) @bp.route(\"/metadata-type/<name>.odc-type.yaml\") def legacy_metadata_type_doc(name): return redirect(url_for(\".raw_metadata_type_doc\", name=name))", "utils.render( \"products.html\", ) @bp.route(\"/metadata-types\") def metadata_types_page(): return utils.render( \"metadata-types.html\", )", "url_for from cubedash import _model, _utils, _utils as utils _LOG", "from a \"/product/<name>\" url, take them somewhere useful \"\"\" return", "bp = Blueprint(\"product\", __name__) @bp.route(\"/about.csv\") def legacy_about_csv(): return redirect(\".storage_csv\") @bp.route(\"/audit/storage.csv\")", "secs if secs % 1 != 0 else int(secs), )", "bash scripts when we want to loop products :) return", "Response( \"\\n\".join(t.name for t in _model.STORE.all_metadata_types()), content_type=\"text/plain\", ) @bp.route(\"/audit/storage\") def", "product_list_text(): # This is useful for bash scripts when we", "@bp.route(\"/metadata-types/<name>.odc-type.yaml\") def raw_metadata_type_doc(name): metadata_type = _model.STORE.index.metadata_types.get_by_name(name) if not metadata_type: abort(404,", "@bp.route(\"/metadata-type/<name>.odc-type.yaml\") def legacy_metadata_type_doc(name): return redirect(url_for(\".raw_metadata_type_doc\", name=name)) @bp.route(\"/metadata-types/<name>.odc-type.yaml\") def raw_metadata_type_doc(name): metadata_type", "@bp.route(\"/metadata-type/<name>\") def legacy_metadata_type_page(name): return redirect(url_for(\".metadata_type_page\", name=name)) @bp.route(\"/metadata-types/<name>\") def metadata_type_page(name): metadata_type", "if p.metadata_type.name == name ), key=lambda p: p.name, ) return", "utils.prepare_document_formatting( metadata_type.definition, \"Metadata Type\", include_source_url=True ) return utils.as_yaml(ordered_metadata) @bp.route(\"/products.odc-product.yaml\") def", ") @bp.route(\"/product\") def product_redirect(): \"\"\" If people remove the name", "filename if they have one. utils.suggest_download_filename( resp, prefix=\"metadata-types\", suffix=\".odc-type.yaml\", )", "the name from a \"/product/<name>\" url, take them somewhere useful", "utils.as_yaml( *( utils.prepare_document_formatting( product.definition, f\"Product {product.name}\", include_source_url=url_for( \".raw_product_doc\", name=product.name, _external=True", "_model.get_products_with_summaries() ], ) @bp.route(\"/product\") def product_redirect(): \"\"\" If people remove", "p.metadata_type.name == name ), key=lambda p: p.name, ) return utils.render(", "import logging from datetime import timedelta from flask import Blueprint,", "summary in _model.get_products_with_summaries() ], ) @bp.route(\"/product\") def product_redirect(): \"\"\" If", "return utils.as_yaml(ordered_metadata) @bp.route(\"/products.odc-product.yaml\") def raw_all_products_doc(): resp = utils.as_yaml( *( utils.prepare_document_formatting(", "people remove the name from a \"/product/<name>\" url, take them", "if not metadata_type: abort(404, f\"Unknown metadata type {name!r}\") ordered_metadata =", "f\"Unknown product {name!r}\") ordered_metadata = utils.prepare_document_formatting( product.definition, \"Product\", include_source_url=True )", "Add Explorer ID to the download filename if they have", "bash scripts when we want to loop them :) return", "legacy_metadata_type_page(name): return redirect(url_for(\".metadata_type_page\", name=name)) @bp.route(\"/metadata-types/<name>\") def metadata_type_page(name): metadata_type = _model.STORE.index.metadata_types.get_by_name(name)", "\"\"\" If people remove the name from a \"/product/<name>\" url,", "url_for(\"product.raw_product_doc\", name=product.name, _external=True), summary.last_refresh_time, product.metadata_type.name, ) for product, summary in", "def storage_page(): product_locations = _model.STORE.products_location_samples_all() return utils.render( \"storage.html\", product_summary_and_location=[ (product,", ") # Add Explorer ID to the download filename if", ") @bp.route(\"/audit/storage\") def storage_page(): product_locations = _model.STORE.products_location_samples_all() return utils.render( \"storage.html\",", "= int(all_secs % 60) h_m_s = ( int(all_secs // 3600", "type_ in _model.STORE.all_metadata_types() ), ) # Add Explorer ID to", "utils.render( \"metadata-type.html\", metadata_type=metadata_type, metadata_doc=ordered_metadata, products_using_it=products_using_it, ) @bp.route(\"/metadata-type/<name>.odc-type.yaml\") def legacy_metadata_type_doc(name): return", "int(secs), ) parts = [\"P\"] days = int(all_secs // 86400)", "parts.append(f\"{days}D\") if any(h_m_s): parts.append(\"T\") if all_secs: for val, name in", "t in _model.STORE.all_metadata_types()), content_type=\"text/plain\", ) @bp.route(\"/audit/storage\") def storage_page(): product_locations =", "product = _model.STORE.index.products.get_by_name(name) if not product: abort(404, f\"Unknown product {name!r}\")", ":) return Response( \"\\n\".join(t.name for t in _model.STORE.all_metadata_types()), content_type=\"text/plain\", )", "\"\\n\".join(t.name for t in _model.STORE.all_metadata_types()), content_type=\"text/plain\", ) @bp.route(\"/audit/storage\") def storage_page():", "Response, abort, redirect, url_for from cubedash import _model, _utils, _utils", "\"\"\" Format a timedelta as an iso8601 duration >>> _iso8601_duration(timedelta(seconds=0))", "_external=True), summary.last_refresh_time, product.metadata_type.name, ) for product, summary in _model.get_products_with_summaries() ),", "\"summary_time\", \"metadata_type\", ), rows=( ( product.name, summary.dataset_count, [ location.common_prefix for", ">>> _iso8601_duration(timedelta(seconds=1)) 'PT1S' >>> _iso8601_duration(timedelta(seconds=23423)) 'PT6H30M23S' >>> _iso8601_duration(timedelta(seconds=4564564556)) 'P52830DT14H35M56S' \"\"\"", "(product, summary, (product_locations.get(product.name) or [])) for product, summary in _model.get_products_with_summaries()", "scripts when we want to loop products :) return Response(", "60 % 60), secs if secs % 1 != 0", "utils.render( \"storage.html\", product_summary_and_location=[ (product, summary, (product_locations.get(product.name) or [])) for product,", "def product_list_text(): # This is useful for bash scripts when", ") return utils.render( \"metadata-type.html\", metadata_type=metadata_type, metadata_doc=ordered_metadata, products_using_it=products_using_it, ) @bp.route(\"/metadata-type/<name>.odc-type.yaml\") def", "name=name)) @bp.route(\"/metadata-types/<name>\") def metadata_type_page(name): metadata_type = _model.STORE.index.metadata_types.get_by_name(name) if not metadata_type:", "def metadata_type_page(name): metadata_type = _model.STORE.index.metadata_types.get_by_name(name) if not metadata_type: abort(404, f\"Unknown", "else int(secs), ) parts = [\"P\"] days = int(all_secs //", "_model.STORE.all_dataset_types()), content_type=\"text/plain\", ) @bp.route(\"/metadata-types.txt\") def metadata_type_list_text(): # This is useful", "to loop products :) return Response( \"\\n\".join(t.name for t in", "products :) return Response( \"\\n\".join(t.name for t in _model.STORE.all_dataset_types()), content_type=\"text/plain\",", "), rows=( ( product.name, summary.dataset_count, [ location.common_prefix for location in", "@bp.route(\"/product\") def product_redirect(): \"\"\" If people remove the name from", "if not product: abort(404, f\"Unknown product {name!r}\") ordered_metadata = utils.prepare_document_formatting(", "f\"Metadata Type {type_.name}\", include_source_url=url_for( \".raw_metadata_type_doc\", name=type_.name, _external=True ), ) for", "utils.as_yaml( *( utils.prepare_document_formatting( type_.definition, f\"Metadata Type {type_.name}\", include_source_url=url_for( \".raw_metadata_type_doc\", name=type_.name,", "products_page(): return utils.render( \"products.html\", ) @bp.route(\"/metadata-types\") def metadata_types_page(): return utils.render(", "return utils.render( \"metadata-type.html\", metadata_type=metadata_type, metadata_doc=ordered_metadata, products_using_it=products_using_it, ) @bp.route(\"/metadata-type/<name>.odc-type.yaml\") def legacy_metadata_type_doc(name):", "def raw_metadata_type_doc(name): metadata_type = _model.STORE.index.metadata_types.get_by_name(name) if not metadata_type: abort(404, f\"Unknown", "{type_.name}\", include_source_url=url_for( \".raw_metadata_type_doc\", name=type_.name, _external=True ), ) for type_ in", "sorted( ( p for p in _model.STORE.index.products.get_all() if p.metadata_type.name ==", "% 60) h_m_s = ( int(all_secs // 3600 % 24),", "return redirect(\".storage_csv\") @bp.route(\"/audit/storage.csv\") def storage_csv(): \"\"\"Get the product storage table", ") @bp.route(\"/product/<name>.odc-product.yaml\") def legacy_raw_product_doc(name): return redirect(url_for(\".raw_product_doc\", name=name)) @bp.route(\"/products/<name>.odc-product.yaml\") def raw_product_doc(name):", "product.name, summary.dataset_count, [ location.common_prefix for location in (product_locations.get(product.name) or [])", "86400) if days: parts.append(f\"{days}D\") if any(h_m_s): parts.append(\"T\") if all_secs: for", "return utils.render( \"storage.html\", product_summary_and_location=[ (product, summary, (product_locations.get(product.name) or [])) for", "return resp @bp.route(\"/metadata-types.odc-type.yaml\") def raw_all_metadata_types_doc(): resp = utils.as_yaml( *( utils.prepare_document_formatting(", "a CSV\"\"\" product_locations = _model.STORE.products_location_samples_all() return utils.as_csv( filename_prefix=\"product-information\", headers=( \"name\",", "_model, _utils, _utils as utils _LOG = logging.getLogger(__name__) bp =", ":) return Response( \"\\n\".join(t.name for t in _model.STORE.all_dataset_types()), content_type=\"text/plain\", )", "for val, name in zip(h_m_s, [\"H\", \"M\", \"S\"]): if val:", ") @bp.route(\"/products.txt\") def product_list_text(): # This is useful for bash", "resp = utils.as_yaml( *( utils.prepare_document_formatting( type_.definition, f\"Metadata Type {type_.name}\", include_source_url=url_for(", "if secs % 1 != 0 else int(secs), ) parts", "{name!r}\") ordered_metadata = utils.prepare_document_formatting( product.definition, \"Product\", include_source_url=True ) return utils.as_yaml(ordered_metadata)", "'PT0S' >>> _iso8601_duration(timedelta(seconds=1)) 'PT1S' >>> _iso8601_duration(timedelta(seconds=23423)) 'PT6H30M23S' >>> _iso8601_duration(timedelta(seconds=4564564556)) 'P52830DT14H35M56S'", "or []) ], _utils.product_license(product), url_for(\"product.raw_product_doc\", name=product.name, _external=True), summary.last_refresh_time, product.metadata_type.name, )", "= _model.STORE.products_location_samples_all() return utils.render( \"storage.html\", product_summary_and_location=[ (product, summary, (product_locations.get(product.name) or" ]
[ "* from litex.build.generic_platform import * from litex.build.gowin.platform import GowinPlatform from", "Rst (\"clk24\", 0, Pins(\"35\"), IOStandard(\"LVCMOS33\")), # Leds (\"user_led\", 0, Pins(\"16\"),", "IOStandard(\"LVCMOS33\")), # Buttons. (\"user_btn\", 0, Pins(\"15\"), IOStandard(\"LVCMOS33\")), (\"user_btn\", 0, Pins(\"14\"),", "] # Connectors --------------------------------------------------------------------------------------- _connectors = [] # Platform -----------------------------------------------------------------------------------------", "Platform ----------------------------------------------------------------------------------------- class Platform(GowinPlatform): default_clk_name = \"clk24\" default_clk_period = 1e9/24e6", "Copyright (c) 2021 <NAME> <<EMAIL>> # SPDX-License-Identifier: BSD-2-Clause # Board", "diagram/pinout: # https://user-images.githubusercontent.com/1450143/133655492-532d5e9a-0635-4889-85c9-68683d06cae0.png # http://dl.sipeed.com/TANG/Nano/HDK/Tang-NANO-2704(Schematic).pdf from migen import * from", "), ] # Connectors --------------------------------------------------------------------------------------- _connectors = [] # Platform", "# Connectors --------------------------------------------------------------------------------------- _connectors = [] # Platform ----------------------------------------------------------------------------------------- class", "part of LiteX-Boards. # # Copyright (c) 2021 <NAME> <<EMAIL>>", "----------------------------------------------------------------------------------------- class Platform(GowinPlatform): default_clk_name = \"clk24\" default_clk_period = 1e9/24e6 def", "litex.build.openfpgaloader import OpenFPGALoader # IOs ---------------------------------------------------------------------------------------------- _io = [ #", "Pins(\"17\"), IOStandard(\"LVCMOS33\")), (\"user_led\", 2, Pins(\"18\"), IOStandard(\"LVCMOS33\")), # Buttons. (\"user_btn\", 0,", "= \"clk24\" default_clk_period = 1e9/24e6 def __init__(self): GowinPlatform.__init__(self, \"GW1N-LV1QN48C6/I5\", _io,", "# Leds (\"user_led\", 0, Pins(\"16\"), IOStandard(\"LVCMOS33\")), (\"user_led\", 1, Pins(\"17\"), IOStandard(\"LVCMOS33\")),", "0, Subsignal(\"tx\", Pins(\"8\")), Subsignal(\"rx\", Pins(\"9\")), IOStandard(\"LVCMOS33\") ), ] # Connectors", "1, Pins(\"17\"), IOStandard(\"LVCMOS33\")), (\"user_led\", 2, Pins(\"18\"), IOStandard(\"LVCMOS33\")), # Buttons. (\"user_btn\",", "0, Pins(\"35\"), IOStandard(\"LVCMOS33\")), # Leds (\"user_led\", 0, Pins(\"16\"), IOStandard(\"LVCMOS33\")), (\"user_led\",", "Leds (\"user_led\", 0, Pins(\"16\"), IOStandard(\"LVCMOS33\")), (\"user_led\", 1, Pins(\"17\"), IOStandard(\"LVCMOS33\")), (\"user_led\",", "# # This file is part of LiteX-Boards. # #", "# Buttons. (\"user_btn\", 0, Pins(\"15\"), IOStandard(\"LVCMOS33\")), (\"user_btn\", 0, Pins(\"14\"), IOStandard(\"LVCMOS33\")),", "toolchain=\"gowin\", devicename=\"GW1N-1\") self.toolchain.options[\"use_done_as_gpio\"] = 1 def create_programmer(self): return OpenFPGALoader(\"tangnano\") def", "GowinPlatform from litex.build.openfpgaloader import OpenFPGALoader # IOs ---------------------------------------------------------------------------------------------- _io =", "* from litex.build.gowin.platform import GowinPlatform from litex.build.openfpgaloader import OpenFPGALoader #", "default_clk_period = 1e9/24e6 def __init__(self): GowinPlatform.__init__(self, \"GW1N-LV1QN48C6/I5\", _io, _connectors, toolchain=\"gowin\",", "IOStandard(\"LVCMOS33\")), # Serial (\"serial\", 0, Subsignal(\"tx\", Pins(\"8\")), Subsignal(\"rx\", Pins(\"9\")), IOStandard(\"LVCMOS33\")", "def create_programmer(self): return OpenFPGALoader(\"tangnano\") def do_finalize(self, fragment): GowinPlatform.do_finalize(self, fragment) self.add_period_constraint(self.lookup_request(\"clk24\",", "BSD-2-Clause # Board diagram/pinout: # https://user-images.githubusercontent.com/1450143/133655492-532d5e9a-0635-4889-85c9-68683d06cae0.png # http://dl.sipeed.com/TANG/Nano/HDK/Tang-NANO-2704(Schematic).pdf from migen", "(\"user_led\", 0, Pins(\"16\"), IOStandard(\"LVCMOS33\")), (\"user_led\", 1, Pins(\"17\"), IOStandard(\"LVCMOS33\")), (\"user_led\", 2,", "Pins(\"35\"), IOStandard(\"LVCMOS33\")), # Leds (\"user_led\", 0, Pins(\"16\"), IOStandard(\"LVCMOS33\")), (\"user_led\", 1,", "0, Pins(\"14\"), IOStandard(\"LVCMOS33\")), # Serial (\"serial\", 0, Subsignal(\"tx\", Pins(\"8\")), Subsignal(\"rx\",", "(\"user_btn\", 0, Pins(\"15\"), IOStandard(\"LVCMOS33\")), (\"user_btn\", 0, Pins(\"14\"), IOStandard(\"LVCMOS33\")), # Serial", "from migen import * from litex.build.generic_platform import * from litex.build.gowin.platform", "\"GW1N-LV1QN48C6/I5\", _io, _connectors, toolchain=\"gowin\", devicename=\"GW1N-1\") self.toolchain.options[\"use_done_as_gpio\"] = 1 def create_programmer(self):", "LiteX-Boards. # # Copyright (c) 2021 <NAME> <<EMAIL>> # SPDX-License-Identifier:", "= 1 def create_programmer(self): return OpenFPGALoader(\"tangnano\") def do_finalize(self, fragment): GowinPlatform.do_finalize(self,", "create_programmer(self): return OpenFPGALoader(\"tangnano\") def do_finalize(self, fragment): GowinPlatform.do_finalize(self, fragment) self.add_period_constraint(self.lookup_request(\"clk24\", loose=True),", "# SPDX-License-Identifier: BSD-2-Clause # Board diagram/pinout: # https://user-images.githubusercontent.com/1450143/133655492-532d5e9a-0635-4889-85c9-68683d06cae0.png # http://dl.sipeed.com/TANG/Nano/HDK/Tang-NANO-2704(Schematic).pdf", "IOStandard(\"LVCMOS33\") ), ] # Connectors --------------------------------------------------------------------------------------- _connectors = [] #", "Pins(\"16\"), IOStandard(\"LVCMOS33\")), (\"user_led\", 1, Pins(\"17\"), IOStandard(\"LVCMOS33\")), (\"user_led\", 2, Pins(\"18\"), IOStandard(\"LVCMOS33\")),", "--------------------------------------------------------------------------------------- _connectors = [] # Platform ----------------------------------------------------------------------------------------- class Platform(GowinPlatform): default_clk_name", "def __init__(self): GowinPlatform.__init__(self, \"GW1N-LV1QN48C6/I5\", _io, _connectors, toolchain=\"gowin\", devicename=\"GW1N-1\") self.toolchain.options[\"use_done_as_gpio\"] =", "Pins(\"18\"), IOStandard(\"LVCMOS33\")), # Buttons. (\"user_btn\", 0, Pins(\"15\"), IOStandard(\"LVCMOS33\")), (\"user_btn\", 0,", "# Board diagram/pinout: # https://user-images.githubusercontent.com/1450143/133655492-532d5e9a-0635-4889-85c9-68683d06cae0.png # http://dl.sipeed.com/TANG/Nano/HDK/Tang-NANO-2704(Schematic).pdf from migen import", "# Clk / Rst (\"clk24\", 0, Pins(\"35\"), IOStandard(\"LVCMOS33\")), # Leds", "file is part of LiteX-Boards. # # Copyright (c) 2021", "from litex.build.generic_platform import * from litex.build.gowin.platform import GowinPlatform from litex.build.openfpgaloader", "Connectors --------------------------------------------------------------------------------------- _connectors = [] # Platform ----------------------------------------------------------------------------------------- class Platform(GowinPlatform):", "of LiteX-Boards. # # Copyright (c) 2021 <NAME> <<EMAIL>> #", "IOStandard(\"LVCMOS33\")), (\"user_led\", 1, Pins(\"17\"), IOStandard(\"LVCMOS33\")), (\"user_led\", 2, Pins(\"18\"), IOStandard(\"LVCMOS33\")), #", "https://user-images.githubusercontent.com/1450143/133655492-532d5e9a-0635-4889-85c9-68683d06cae0.png # http://dl.sipeed.com/TANG/Nano/HDK/Tang-NANO-2704(Schematic).pdf from migen import * from litex.build.generic_platform import", "[ # Clk / Rst (\"clk24\", 0, Pins(\"35\"), IOStandard(\"LVCMOS33\")), #", "from litex.build.openfpgaloader import OpenFPGALoader # IOs ---------------------------------------------------------------------------------------------- _io = [", "\"clk24\" default_clk_period = 1e9/24e6 def __init__(self): GowinPlatform.__init__(self, \"GW1N-LV1QN48C6/I5\", _io, _connectors,", "return OpenFPGALoader(\"tangnano\") def do_finalize(self, fragment): GowinPlatform.do_finalize(self, fragment) self.add_period_constraint(self.lookup_request(\"clk24\", loose=True), 1e9/24e6)", "Serial (\"serial\", 0, Subsignal(\"tx\", Pins(\"8\")), Subsignal(\"rx\", Pins(\"9\")), IOStandard(\"LVCMOS33\") ), ]", "GowinPlatform.__init__(self, \"GW1N-LV1QN48C6/I5\", _io, _connectors, toolchain=\"gowin\", devicename=\"GW1N-1\") self.toolchain.options[\"use_done_as_gpio\"] = 1 def", "Platform(GowinPlatform): default_clk_name = \"clk24\" default_clk_period = 1e9/24e6 def __init__(self): GowinPlatform.__init__(self,", "import OpenFPGALoader # IOs ---------------------------------------------------------------------------------------------- _io = [ # Clk", "# IOs ---------------------------------------------------------------------------------------------- _io = [ # Clk / Rst", "Buttons. (\"user_btn\", 0, Pins(\"15\"), IOStandard(\"LVCMOS33\")), (\"user_btn\", 0, Pins(\"14\"), IOStandard(\"LVCMOS33\")), #", "<NAME> <<EMAIL>> # SPDX-License-Identifier: BSD-2-Clause # Board diagram/pinout: # https://user-images.githubusercontent.com/1450143/133655492-532d5e9a-0635-4889-85c9-68683d06cae0.png", "litex.build.generic_platform import * from litex.build.gowin.platform import GowinPlatform from litex.build.openfpgaloader import", "Subsignal(\"rx\", Pins(\"9\")), IOStandard(\"LVCMOS33\") ), ] # Connectors --------------------------------------------------------------------------------------- _connectors =", "litex.build.gowin.platform import GowinPlatform from litex.build.openfpgaloader import OpenFPGALoader # IOs ----------------------------------------------------------------------------------------------", "(\"user_led\", 1, Pins(\"17\"), IOStandard(\"LVCMOS33\")), (\"user_led\", 2, Pins(\"18\"), IOStandard(\"LVCMOS33\")), # Buttons.", "(\"user_led\", 2, Pins(\"18\"), IOStandard(\"LVCMOS33\")), # Buttons. (\"user_btn\", 0, Pins(\"15\"), IOStandard(\"LVCMOS33\")),", "0, Pins(\"15\"), IOStandard(\"LVCMOS33\")), (\"user_btn\", 0, Pins(\"14\"), IOStandard(\"LVCMOS33\")), # Serial (\"serial\",", "IOStandard(\"LVCMOS33\")), (\"user_led\", 2, Pins(\"18\"), IOStandard(\"LVCMOS33\")), # Buttons. (\"user_btn\", 0, Pins(\"15\"),", "# https://user-images.githubusercontent.com/1450143/133655492-532d5e9a-0635-4889-85c9-68683d06cae0.png # http://dl.sipeed.com/TANG/Nano/HDK/Tang-NANO-2704(Schematic).pdf from migen import * from litex.build.generic_platform", "is part of LiteX-Boards. # # Copyright (c) 2021 <NAME>", "Pins(\"14\"), IOStandard(\"LVCMOS33\")), # Serial (\"serial\", 0, Subsignal(\"tx\", Pins(\"8\")), Subsignal(\"rx\", Pins(\"9\")),", "# Serial (\"serial\", 0, Subsignal(\"tx\", Pins(\"8\")), Subsignal(\"rx\", Pins(\"9\")), IOStandard(\"LVCMOS33\") ),", "<<EMAIL>> # SPDX-License-Identifier: BSD-2-Clause # Board diagram/pinout: # https://user-images.githubusercontent.com/1450143/133655492-532d5e9a-0635-4889-85c9-68683d06cae0.png #", "IOs ---------------------------------------------------------------------------------------------- _io = [ # Clk / Rst (\"clk24\",", "= [ # Clk / Rst (\"clk24\", 0, Pins(\"35\"), IOStandard(\"LVCMOS33\")),", "2021 <NAME> <<EMAIL>> # SPDX-License-Identifier: BSD-2-Clause # Board diagram/pinout: #", "Clk / Rst (\"clk24\", 0, Pins(\"35\"), IOStandard(\"LVCMOS33\")), # Leds (\"user_led\",", "import * from litex.build.generic_platform import * from litex.build.gowin.platform import GowinPlatform", "IOStandard(\"LVCMOS33\")), # Leds (\"user_led\", 0, Pins(\"16\"), IOStandard(\"LVCMOS33\")), (\"user_led\", 1, Pins(\"17\"),", "migen import * from litex.build.generic_platform import * from litex.build.gowin.platform import", "import GowinPlatform from litex.build.openfpgaloader import OpenFPGALoader # IOs ---------------------------------------------------------------------------------------------- _io", "= 1e9/24e6 def __init__(self): GowinPlatform.__init__(self, \"GW1N-LV1QN48C6/I5\", _io, _connectors, toolchain=\"gowin\", devicename=\"GW1N-1\")", "_io = [ # Clk / Rst (\"clk24\", 0, Pins(\"35\"),", "Pins(\"9\")), IOStandard(\"LVCMOS33\") ), ] # Connectors --------------------------------------------------------------------------------------- _connectors = []", "Pins(\"8\")), Subsignal(\"rx\", Pins(\"9\")), IOStandard(\"LVCMOS33\") ), ] # Connectors --------------------------------------------------------------------------------------- _connectors", "Pins(\"15\"), IOStandard(\"LVCMOS33\")), (\"user_btn\", 0, Pins(\"14\"), IOStandard(\"LVCMOS33\")), # Serial (\"serial\", 0,", "(c) 2021 <NAME> <<EMAIL>> # SPDX-License-Identifier: BSD-2-Clause # Board diagram/pinout:", "devicename=\"GW1N-1\") self.toolchain.options[\"use_done_as_gpio\"] = 1 def create_programmer(self): return OpenFPGALoader(\"tangnano\") def do_finalize(self,", "__init__(self): GowinPlatform.__init__(self, \"GW1N-LV1QN48C6/I5\", _io, _connectors, toolchain=\"gowin\", devicename=\"GW1N-1\") self.toolchain.options[\"use_done_as_gpio\"] = 1", "# Copyright (c) 2021 <NAME> <<EMAIL>> # SPDX-License-Identifier: BSD-2-Clause #", "# Platform ----------------------------------------------------------------------------------------- class Platform(GowinPlatform): default_clk_name = \"clk24\" default_clk_period =", "Subsignal(\"tx\", Pins(\"8\")), Subsignal(\"rx\", Pins(\"9\")), IOStandard(\"LVCMOS33\") ), ] # Connectors ---------------------------------------------------------------------------------------", "# http://dl.sipeed.com/TANG/Nano/HDK/Tang-NANO-2704(Schematic).pdf from migen import * from litex.build.generic_platform import *", "1 def create_programmer(self): return OpenFPGALoader(\"tangnano\") def do_finalize(self, fragment): GowinPlatform.do_finalize(self, fragment)", "_connectors = [] # Platform ----------------------------------------------------------------------------------------- class Platform(GowinPlatform): default_clk_name =", "# This file is part of LiteX-Boards. # # Copyright", "0, Pins(\"16\"), IOStandard(\"LVCMOS33\")), (\"user_led\", 1, Pins(\"17\"), IOStandard(\"LVCMOS33\")), (\"user_led\", 2, Pins(\"18\"),", "Board diagram/pinout: # https://user-images.githubusercontent.com/1450143/133655492-532d5e9a-0635-4889-85c9-68683d06cae0.png # http://dl.sipeed.com/TANG/Nano/HDK/Tang-NANO-2704(Schematic).pdf from migen import *", "class Platform(GowinPlatform): default_clk_name = \"clk24\" default_clk_period = 1e9/24e6 def __init__(self):", "[] # Platform ----------------------------------------------------------------------------------------- class Platform(GowinPlatform): default_clk_name = \"clk24\" default_clk_period", "(\"serial\", 0, Subsignal(\"tx\", Pins(\"8\")), Subsignal(\"rx\", Pins(\"9\")), IOStandard(\"LVCMOS33\") ), ] #", "from litex.build.gowin.platform import GowinPlatform from litex.build.openfpgaloader import OpenFPGALoader # IOs", "/ Rst (\"clk24\", 0, Pins(\"35\"), IOStandard(\"LVCMOS33\")), # Leds (\"user_led\", 0,", "= [] # Platform ----------------------------------------------------------------------------------------- class Platform(GowinPlatform): default_clk_name = \"clk24\"", "self.toolchain.options[\"use_done_as_gpio\"] = 1 def create_programmer(self): return OpenFPGALoader(\"tangnano\") def do_finalize(self, fragment):", "This file is part of LiteX-Boards. # # Copyright (c)", "http://dl.sipeed.com/TANG/Nano/HDK/Tang-NANO-2704(Schematic).pdf from migen import * from litex.build.generic_platform import * from", "IOStandard(\"LVCMOS33\")), (\"user_btn\", 0, Pins(\"14\"), IOStandard(\"LVCMOS33\")), # Serial (\"serial\", 0, Subsignal(\"tx\",", "2, Pins(\"18\"), IOStandard(\"LVCMOS33\")), # Buttons. (\"user_btn\", 0, Pins(\"15\"), IOStandard(\"LVCMOS33\")), (\"user_btn\",", "import * from litex.build.gowin.platform import GowinPlatform from litex.build.openfpgaloader import OpenFPGALoader", "_io, _connectors, toolchain=\"gowin\", devicename=\"GW1N-1\") self.toolchain.options[\"use_done_as_gpio\"] = 1 def create_programmer(self): return", "1e9/24e6 def __init__(self): GowinPlatform.__init__(self, \"GW1N-LV1QN48C6/I5\", _io, _connectors, toolchain=\"gowin\", devicename=\"GW1N-1\") self.toolchain.options[\"use_done_as_gpio\"]", "(\"clk24\", 0, Pins(\"35\"), IOStandard(\"LVCMOS33\")), # Leds (\"user_led\", 0, Pins(\"16\"), IOStandard(\"LVCMOS33\")),", "_connectors, toolchain=\"gowin\", devicename=\"GW1N-1\") self.toolchain.options[\"use_done_as_gpio\"] = 1 def create_programmer(self): return OpenFPGALoader(\"tangnano\")", "OpenFPGALoader # IOs ---------------------------------------------------------------------------------------------- _io = [ # Clk /", "SPDX-License-Identifier: BSD-2-Clause # Board diagram/pinout: # https://user-images.githubusercontent.com/1450143/133655492-532d5e9a-0635-4889-85c9-68683d06cae0.png # http://dl.sipeed.com/TANG/Nano/HDK/Tang-NANO-2704(Schematic).pdf from", "---------------------------------------------------------------------------------------------- _io = [ # Clk / Rst (\"clk24\", 0,", "(\"user_btn\", 0, Pins(\"14\"), IOStandard(\"LVCMOS33\")), # Serial (\"serial\", 0, Subsignal(\"tx\", Pins(\"8\")),", "# # Copyright (c) 2021 <NAME> <<EMAIL>> # SPDX-License-Identifier: BSD-2-Clause", "default_clk_name = \"clk24\" default_clk_period = 1e9/24e6 def __init__(self): GowinPlatform.__init__(self, \"GW1N-LV1QN48C6/I5\"," ]
[ "baseline [2], computed on advantages estimated with Generalized Advantage Estimation", "[] losses = [] for task in tasks: self.sampler.reset_task(task) self.policy.reset_context()", "described in their paper) if i == 1 and halve_lr:", "mask.unsqueeze(2) kl = weighted_mean(kl_divergence(pi, old_pi), dim=0, weights=mask) kls.append(kl) return torch.mean(torch.stack(kls,", "weighted_mean(kl_divergence(pi, old_pi), dim=0, weights=mask) kls.append(kl) return torch.mean(torch.stack(kls, dim=0)) def hessian_vector_product(self,", "update [1]. \"\"\" if lr is None: lr = self.fast_lr", "all the tasks `tasks`.batchsize \"\"\" episodes_per_task = [] for task", "weights=mask) kls.append(kl) return torch.mean(torch.stack(kls, dim=0)) def hessian_vector_product(self, episodes, damping=1e-2): \"\"\"Hessian-vector", "Compute the step direction with Conjugate Gradient hessian_vector_product = self.hessian_vector_product(episodes,", "Control Using Generalized Advantage Estimation\", 2016 (https://arxiv.org/abs/1506.02438) [4] <NAME>, <NAME>,", "_ in range(ls_max_steps): vector_to_parameters(old_params - step_size * step, self.policy.parameters()) loss,", "/ max_kl) step = stepdir / lagrange_multiplier # Save the", "Optimization\", 2015 (https://arxiv.org/abs/1502.05477) \"\"\" def __init__(self, sampler, policy, baseline, gamma=0.95,", "log_probs = torch.sum(log_probs, dim=2) loss = -weighted_mean(log_probs * advantages, dim=0,", "= weighted_normalize(advantages, weights=valid_episodes.mask) log_ratio = (pi.log_prob(valid_episodes.actions) - old_pi.log_prob(valid_episodes.actions)) if log_ratio.dim()", "def test(self, tasks, num_steps, batch_size, halve_lr): \"\"\"Sample trajectories (before and", "Conjugate Gradient hessian_vector_product = self.hessian_vector_product(episodes, damping=cg_damping) stepdir = conjugate_gradient(hessian_vector_product, grads,", "the tasks `tasks`.batchsize \"\"\" episodes_per_task = [] for task in", "the Perlmutter method.\"\"\" def _product(vector): kl = self.kl_divergence(episodes) grads =", "[], [], [] if old_pis is None: old_pis = [None]", "2: mask = mask.unsqueeze(2) kl = weighted_mean(kl_divergence(pi, old_pi), dim=0, weights=mask)", "tasks `tasks`.batchsize \"\"\" episodes_per_task = [] for task in tasks:", "None): # get action values after inner-loop update pi =", "self.sampler.sample(self.policy, gamma=self.gamma) # inner loop (for CAVIA, this only updates", "parameters based on the inner-loss, and perform the meta-update. [1]", "halve_lr): \"\"\"Sample trajectories (before and after the update of the", "fast_lr=0.5, tau=1.0, device='cpu'): self.sampler = sampler self.policy = policy self.baseline", "mask.unsqueeze(2) kl = weighted_mean(kl_divergence(pi, old_pi), dim=0, weights=mask) kls.append(kl) return torch.mean(torch.stack(losses,", "step, self.policy.parameters()) loss, kl, _ = self.surrogate_loss(episodes, old_pis=old_pis) improve =", "= -weighted_mean(ratio * advantages, dim=0, weights=valid_episodes.mask) losses.append(loss) mask = valid_episodes.mask", "the new parameters after a one-step gradient update params =", "the meta-update. [1] <NAME>, <NAME>, <NAME>, \"Model-Agnostic Meta-Learning for Fast", "the inner loss for the one-step gradient update. The inner", "episodes = [] losses = [] for task in tasks:", "self.sampler.reset_task(task) self.policy.reset_context() train_episodes = self.sampler.sample(self.policy, gamma=self.gamma) # inner loop (for", "= torch.autograd.grad(kl, self.policy.parameters(), create_graph=True) flat_grad_kl = parameters_to_vector(grads) grad_kl_v = torch.dot(flat_grad_kl,", "dim=2) ratio = torch.exp(log_ratio) loss = -weighted_mean(ratio * advantages, dim=0,", "detach_distribution, weighted_normalize) class MetaLearner(object): \"\"\"Meta-learner The meta-learner is responsible for", "cg_iters=cg_iters) # Compute the Lagrange multiplier shs = 0.5 *", "torch.sum(log_ratio, dim=2) ratio = torch.exp(log_ratio) loss = -weighted_mean(ratio * advantages,", "is REINFORCE with baseline [2], computed on advantages estimated with", "parameters_to_vector(self.policy.parameters()) print() # Line search step_size = 1.0 for _", "1): # lower learning rate after first update (for MAML,", "ls_max_steps=10, ls_backtrack_ratio=0.5): \"\"\"Meta-optimization step (ie. update of the initial parameters),", "self.fast_lr # Fit the baseline to the training episodes self.baseline.fit(episodes)", "ratio = torch.exp(log_ratio) loss = -weighted_mean(ratio * advantages, dim=0, weights=valid_episodes.mask)", "CAVIA, this only updates the context parameters) params, loss =", "the Lagrange multiplier shs = 0.5 * torch.dot(stepdir, hessian_vector_product(stepdir)) lagrange_multiplier", "# Line search step_size = 1.0 for _ in range(ls_max_steps):", "vector) grad2s = torch.autograd.grad(grad_kl_v, self.policy.parameters()) flat_grad2_kl = parameters_to_vector(grad2s) return flat_grad2_kl", "trajectories (before and after the update of the parameters) for", "= 0.5 * torch.dot(stepdir, hessian_vector_product(stepdir)) lagrange_multiplier = torch.sqrt(shs / max_kl)", "old_loss if (improve.item() < 0.0) and (kl.item() < max_kl): break", "Meta-Learning for Fast Adaptation of Deep Networks\", 2017 (https://arxiv.org/abs/1703.03400) [2]", "log_probs = pi.log_prob(episodes.actions) if log_probs.dim() > 2: log_probs = torch.sum(log_probs,", "<NAME>, \"Model-Agnostic Meta-Learning for Fast Adaptation of Deep Networks\", 2017", "= self.surrogate_loss(episodes) # this part will take higher order gradients", "1 and halve_lr: lr = self.fast_lr / 2 else: lr", "self.policy.parameters(), create_graph=True) flat_grad_kl = parameters_to_vector(grads) grad_kl_v = torch.dot(flat_grad_kl, vector) grad2s", "params, loss = self.adapt(test_episodes, first_order=True, params=params, lr=lr) # get new", "vector return _product def surrogate_loss(self, episodes, old_pis=None): losses, kls, pis", "Estimation\", 2016 (https://arxiv.org/abs/1506.02438) [4] <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, \"Trust", "reset context params (for cavia) and task self.policy.reset_context() self.sampler.reset_task(task) #", "perform the meta-update. [1] <NAME>, <NAME>, <NAME>, \"Model-Agnostic Meta-Learning for", "initial experience and log performance test_episodes = self.sampler.sample(self.policy, gamma=self.gamma, params=params,", "self.policy(valid_episodes.observations, params=params) if old_pi is None: old_pi = detach_distribution(pi) mask", "first_order=True, params=params, lr=lr) # get new rollouts test_episodes = self.sampler.sample(self.policy,", "for all the tasks `tasks`.batchsize \"\"\" episodes_per_task = [] for", "torch.autograd.grad(grad_kl_v, self.policy.parameters()) flat_grad2_kl = parameters_to_vector(grad2s) return flat_grad2_kl + damping *", "= self.sampler.sample(self.policy, gamma=self.gamma, params=params, batch_size=batch_size) # initialise list which will", "-weighted_mean(ratio * advantages, dim=0, weights=valid_episodes.mask) losses.append(loss) mask = valid_episodes.mask if", "parameters), based on Trust Region Policy Optimization (TRPO, [4]). \"\"\"", "[1]. \"\"\" if lr is None: lr = self.fast_lr #", "loss = -weighted_mean(ratio * advantages, dim=0, weights=valid_episodes.mask) losses.append(loss) mask =", "step(self, episodes, max_kl=1e-3, cg_iters=10, cg_damping=1e-2, ls_max_steps=10, ls_backtrack_ratio=0.5): \"\"\"Meta-optimization step (ie.", "params=None): \"\"\"Compute the inner loss for the one-step gradient update.", "# start with blank params params = None # gather", "loss is REINFORCE with baseline [2], computed on advantages estimated", "= self.policy(valid_episodes.observations, params=params) if old_pi is None: old_pi = detach_distribution(pi)", "<NAME>, \"Reinforcement learning: An introduction\", 2018 (http://incompleteideas.net/book/the-book-2nd.html) [3] <NAME>, <NAME>,", "torch.distributions.kl import kl_divergence from torch.nn.utils.convert_parameters import (vector_to_parameters, parameters_to_vector) from rl_utils.optimization", "meta-update. [1] <NAME>, <NAME>, <NAME>, \"Model-Agnostic Meta-Learning for Fast Adaptation", "episodes, params=None): \"\"\"Compute the inner loss for the one-step gradient", "zip(episodes, old_pis): # do inner-loop update self.policy.reset_context() params, _ =", "tau=1.0, device='cpu'): self.sampler = sampler self.policy = policy self.baseline =", "< max_kl): break step_size *= ls_backtrack_ratio else: print('no update?') vector_to_parameters(old_params,", "old_pi in zip(episodes, old_pis): # do inner-loop update self.policy.reset_context() params,", "method.\"\"\" def _product(vector): kl = self.kl_divergence(episodes) grads = torch.autograd.grad(kl, self.policy.parameters(),", "if log_ratio.dim() > 2: log_ratio = torch.sum(log_ratio, dim=2) ratio =", "episodes.gae(values, tau=self.tau) advantages = weighted_normalize(advantages, weights=episodes.mask) pi = self.policy(episodes.observations, params=params)", "is the inner-loop update self.policy.reset_context() params, _ = self.adapt(train_episodes) pi", "start with blank params params = None # gather some", "self.adapt(train_episodes) with torch.set_grad_enabled(old_pi is None): # get action values after", "= gamma self.fast_lr = fast_lr self.tau = tau self.to(device) def", "# lower learning rate after first update (for MAML, as", "> 2: mask = mask.unsqueeze(2) kl = weighted_mean(kl_divergence(pi, old_pi), dim=0,", "* torch.dot(stepdir, hessian_vector_product(stepdir)) lagrange_multiplier = torch.sqrt(shs / max_kl) step =", "params, _ = self.adapt(train_episodes) with torch.set_grad_enabled(old_pi is None): # get", "step_size) return loss def to(self, device, **kwargs): self.policy.to(device, **kwargs) self.baseline.to(device,", "print('kl:', kl.item()) print('step_size:', step_size) return loss def to(self, device, **kwargs):", "rl_utils.optimization import conjugate_gradient from rl_utils.torch_utils import (weighted_mean, detach_distribution, weighted_normalize) class", "task curr_episodes = [test_episodes] for i in range(1, num_steps +", "update of the initial parameters), based on Trust Region Policy", "valid_episodes = self.sampler.sample(self.policy, params=params, gamma=self.gamma) episodes.append((train_episodes, valid_episodes)) losses.append(loss.item()) return episodes,", "create_graph=True) flat_grad_kl = parameters_to_vector(grads) grad_kl_v = torch.dot(flat_grad_kl, vector) grad2s =", "i in range(1, num_steps + 1): # lower learning rate", "with blank params params = None # gather some initial", "\"Model-Agnostic Meta-Learning for Fast Adaptation of Deep Networks\", 2017 (https://arxiv.org/abs/1703.03400)", "self.surrogate_loss(episodes) # this part will take higher order gradients through", "kls = [] if old_pis is None: old_pis = [None]", "max_kl) step = stepdir / lagrange_multiplier # Save the old", "training episodes loss = self.inner_loss(episodes, params=params) # Get the new", "improve = loss - old_loss if (improve.item() < 0.0) and", "An introduction\", 2018 (http://incompleteideas.net/book/the-book-2nd.html) [3] <NAME>, <NAME>, <NAME>, <NAME>, <NAME>,", "loss for the one-step gradient update. The inner loss is", "experience and log performance test_episodes = self.sampler.sample(self.policy, gamma=self.gamma, params=params, batch_size=batch_size)", "# initialise list which will log all rollouts for the", "detach_distribution(pi) values = self.baseline(valid_episodes) advantages = valid_episodes.gae(values, tau=self.tau) advantages =", "self.baseline(valid_episodes) advantages = valid_episodes.gae(values, tau=self.tau) advantages = weighted_normalize(advantages, weights=valid_episodes.mask) log_ratio", "self.sampler.sample(self.policy, gamma=self.gamma, params=params, batch_size=batch_size) curr_episodes.append(test_episodes) episodes_per_task.append(curr_episodes) self.policy.reset_context() return episodes_per_task def", "# inner-loop update params, loss = self.adapt(test_episodes, first_order=True, params=params, lr=lr)", "halve_lr: lr = self.fast_lr / 2 else: lr = self.fast_lr", "loop update valid_episodes = self.sampler.sample(self.policy, params=params, gamma=self.gamma) episodes.append((train_episodes, valid_episodes)) losses.append(loss.item())", "higher order gradients through the inner loop: grads = torch.autograd.grad(old_loss,", "old_pi = detach_distribution(pi) mask = valid_episodes.mask if valid_episodes.actions.dim() > 2:", "self.fast_lr = fast_lr self.tau = tau self.to(device) def inner_loss(self, episodes,", "grads = torch.autograd.grad(kl, self.policy.parameters(), create_graph=True) flat_grad_kl = parameters_to_vector(grads) grad_kl_v =", "old_pi.log_prob(valid_episodes.actions)) if log_ratio.dim() > 2: log_ratio = torch.sum(log_ratio, dim=2) ratio", "`episodes`, with a one-step gradient update [1]. \"\"\" if lr", "params=params) log_probs = pi.log_prob(episodes.actions) if log_probs.dim() > 2: log_probs =", "self.tau = tau self.to(device) def inner_loss(self, episodes, params=None): \"\"\"Compute the", "old_loss, _, old_pis = self.surrogate_loss(episodes) # this part will take", "# Save the old parameters old_params = parameters_to_vector(self.policy.parameters()) print() #", "* step, self.policy.parameters()) loss, kl, _ = self.surrogate_loss(episodes, old_pis=old_pis) improve", "inner loss is REINFORCE with baseline [2], computed on advantages", "hessian_vector_product = self.hessian_vector_product(episodes, damping=cg_damping) stepdir = conjugate_gradient(hessian_vector_product, grads, cg_iters=cg_iters) #", "def kl_divergence(self, episodes, old_pis=None): kls = [] if old_pis is", "old_pi is None: old_pi = detach_distribution(pi) values = self.baseline(valid_episodes) advantages", "\"\"\" if lr is None: lr = self.fast_lr # Fit", "(GAE, [3]). \"\"\" values = self.baseline(episodes) advantages = episodes.gae(values, tau=self.tau)", "new rollouts test_episodes = self.sampler.sample(self.policy, gamma=self.gamma, params=params, batch_size=batch_size) curr_episodes.append(test_episodes) episodes_per_task.append(curr_episodes)", "return loss def adapt(self, episodes, first_order=False, params=None, lr=None): \"\"\"Adapt the", "def surrogate_loss(self, episodes, old_pis=None): losses, kls, pis = [], [],", "old_pi = detach_distribution(pi) values = self.baseline(valid_episodes) advantages = valid_episodes.gae(values, tau=self.tau)", "in tasks: # reset context params (for cavia) and task", "= self.surrogate_loss(episodes, old_pis=old_pis) improve = loss - old_loss if (improve.item()", "to a new task, from sampled trajectories `episodes`, with a", "lagrange_multiplier # Save the old parameters old_params = parameters_to_vector(self.policy.parameters()) print()", "self.surrogate_loss(episodes, old_pis=old_pis) improve = loss - old_loss if (improve.item() <", "if (improve.item() < 0.0) and (kl.item() < max_kl): break step_size", "params=params) if old_pi is None: old_pi = detach_distribution(pi) mask =", "weights=mask) kls.append(kl) return torch.mean(torch.stack(losses, dim=0)), torch.mean(torch.stack(kls, dim=0)), pis def step(self,", "lr=lr) # get new rollouts test_episodes = self.sampler.sample(self.policy, gamma=self.gamma, params=params,", "for task in tasks: # reset context params (for cavia)", "torch.dot(flat_grad_kl, vector) grad2s = torch.autograd.grad(grad_kl_v, self.policy.parameters()) flat_grad2_kl = parameters_to_vector(grad2s) return", "torch.mean(torch.stack(kls, dim=0)), pis def step(self, episodes, max_kl=1e-3, cg_iters=10, cg_damping=1e-2, ls_max_steps=10,", "old_pi is None: old_pi = detach_distribution(pi) mask = valid_episodes.mask if", "self.fast_lr / 2 else: lr = self.fast_lr # inner-loop update", "inner loss for the one-step gradient update. The inner loss", "for Fast Adaptation of Deep Networks\", 2017 (https://arxiv.org/abs/1703.03400) [2] <NAME>,", "advantages = episodes.gae(values, tau=self.tau) advantages = weighted_normalize(advantages, weights=episodes.mask) pi =", "the loss on the training episodes loss = self.inner_loss(episodes, params=params)", "len(episodes) for (train_episodes, valid_episodes), old_pi in zip(episodes, old_pis): # this", "= self.policy(valid_episodes.observations, params=params) pis.append(detach_distribution(pi)) if old_pi is None: old_pi =", "vector_to_parameters(old_params, self.policy.parameters()) print('improve:', improve.item()) print('kl:', kl.item()) print('step_size:', step_size) return loss", "get action values after inner-loop update pi = self.policy(valid_episodes.observations, params=params)", "old_pis): # do inner-loop update self.policy.reset_context() params, _ = self.adapt(train_episodes)", "= 1.0 for _ in range(ls_max_steps): vector_to_parameters(old_params - step_size *", "= self.baseline(valid_episodes) advantages = valid_episodes.gae(values, tau=self.tau) advantages = weighted_normalize(advantages, weights=valid_episodes.mask)", "from rl_utils.optimization import conjugate_gradient from rl_utils.torch_utils import (weighted_mean, detach_distribution, weighted_normalize)", "with Conjugate Gradient hessian_vector_product = self.hessian_vector_product(episodes, damping=cg_damping) stepdir = conjugate_gradient(hessian_vector_product,", "self.to(device) def inner_loss(self, episodes, params=None): \"\"\"Compute the inner loss for", "= parameters_to_vector(grads) grad_kl_v = torch.dot(flat_grad_kl, vector) grad2s = torch.autograd.grad(grad_kl_v, self.policy.parameters())", "episodes.append((train_episodes, valid_episodes)) losses.append(loss.item()) return episodes, losses def test(self, tasks, num_steps,", "the inner-loop update self.policy.reset_context() params, _ = self.adapt(train_episodes) pi =", "self.adapt(train_episodes, first_order=first_order) # rollouts after inner loop update valid_episodes =", "to(self, device, **kwargs): self.policy.to(device, **kwargs) self.baseline.to(device, **kwargs) self.device = device", "old_pi in zip(episodes, old_pis): # this is the inner-loop update", "Fast Adaptation of Deep Networks\", 2017 (https://arxiv.org/abs/1703.03400) [2] <NAME>, <NAME>,", "<NAME>, <NAME>, <NAME>, \"Trust Region Policy Optimization\", 2015 (https://arxiv.org/abs/1502.05477) \"\"\"", "[2], computed on advantages estimated with Generalized Advantage Estimation (GAE,", "compute the updated parameters based on the inner-loss, and perform", "self.sampler = sampler self.policy = policy self.baseline = baseline self.gamma", "Networks\", 2017 (https://arxiv.org/abs/1703.03400) [2] <NAME>, <NAME>, \"Reinforcement learning: An introduction\",", "self.policy.reset_context() params, _ = self.adapt(train_episodes) pi = self.policy(valid_episodes.observations, params=params) if", "on the training episodes loss = self.inner_loss(episodes, params=params) # Get", "Generalized Advantage Estimation (GAE, [3]). \"\"\" values = self.baseline(episodes) advantages", "parameters) for all the tasks `tasks`. \"\"\" episodes = []", "if old_pis is None: old_pis = [None] * len(episodes) for", "self.baseline = baseline self.gamma = gamma self.fast_lr = fast_lr self.tau", "introduction\", 2018 (http://incompleteideas.net/book/the-book-2nd.html) [3] <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, \"High-Dimensional", "in zip(episodes, old_pis): # this is the inner-loop update self.policy.reset_context()", "lr is None: lr = self.fast_lr # Fit the baseline", "+ damping * vector return _product def surrogate_loss(self, episodes, old_pis=None):", "old parameters old_params = parameters_to_vector(self.policy.parameters()) print() # Line search step_size", "_, old_pis = self.surrogate_loss(episodes) # this part will take higher", "damping=1e-2): \"\"\"Hessian-vector product, based on the Perlmutter method.\"\"\" def _product(vector):", "MetaLearner(object): \"\"\"Meta-learner The meta-learner is responsible for sampling the trajectories/episodes", "def step(self, episodes, max_kl=1e-3, cg_iters=10, cg_damping=1e-2, ls_max_steps=10, ls_backtrack_ratio=0.5): \"\"\"Meta-optimization step", "def hessian_vector_product(self, episodes, damping=1e-2): \"\"\"Hessian-vector product, based on the Perlmutter", "one-step adaptation), compute the inner loss, compute the updated parameters", "of the initial parameters), based on Trust Region Policy Optimization", "after the one-step adaptation), compute the inner loss, compute the", "old_pis): # this is the inner-loop update self.policy.reset_context() params, _", "loss def adapt(self, episodes, first_order=False, params=None, lr=None): \"\"\"Adapt the parameters", "dim=0, weights=valid_episodes.mask) losses.append(loss) mask = valid_episodes.mask if valid_episodes.actions.dim() > 2:", "step_size *= ls_backtrack_ratio else: print('no update?') vector_to_parameters(old_params, self.policy.parameters()) print('improve:', improve.item())", "None: old_pis = [None] * len(episodes) for (train_episodes, valid_episodes), old_pi", "print('step_size:', step_size) return loss def to(self, device, **kwargs): self.policy.to(device, **kwargs)", "inner loop (for CAVIA, this only updates the context parameters)", "in range(ls_max_steps): vector_to_parameters(old_params - step_size * step, self.policy.parameters()) loss, kl,", "weighted_normalize(advantages, weights=episodes.mask) pi = self.policy(episodes.observations, params=params) log_probs = pi.log_prob(episodes.actions) if", "lr = self.fast_lr # Fit the baseline to the training", "# Compute the Lagrange multiplier shs = 0.5 * torch.dot(stepdir,", "after the update of the parameters) for all the tasks", "mask = valid_episodes.mask if valid_episodes.actions.dim() > 2: mask = mask.unsqueeze(2)", "the old parameters old_params = parameters_to_vector(self.policy.parameters()) print() # Line search", "improve.item()) print('kl:', kl.item()) print('step_size:', step_size) return loss def to(self, device,", "\"\"\" def __init__(self, sampler, policy, baseline, gamma=0.95, fast_lr=0.5, tau=1.0, device='cpu'):", "2015 (https://arxiv.org/abs/1502.05477) \"\"\" def __init__(self, sampler, policy, baseline, gamma=0.95, fast_lr=0.5,", "<NAME>, <NAME>, <NAME>, <NAME>, <NAME>, \"High-Dimensional Continuous Control Using Generalized", "<NAME>, <NAME>, <NAME>, <NAME>, \"High-Dimensional Continuous Control Using Generalized Advantage", "import (weighted_mean, detach_distribution, weighted_normalize) class MetaLearner(object): \"\"\"Meta-learner The meta-learner is", "responsible for sampling the trajectories/episodes (before and after the one-step", "Perlmutter method.\"\"\" def _product(vector): kl = self.kl_divergence(episodes) grads = torch.autograd.grad(kl,", "adaptation), compute the inner loss, compute the updated parameters based", "with a one-step gradient update [1]. \"\"\" if lr is", "tasks, first_order=False): \"\"\"Sample trajectories (before and after the update of", "_ = self.adapt(train_episodes) with torch.set_grad_enabled(old_pi is None): # get action", "params=params) return params, loss def sample(self, tasks, first_order=False): \"\"\"Sample trajectories", "pis def step(self, episodes, max_kl=1e-3, cg_iters=10, cg_damping=1e-2, ls_max_steps=10, ls_backtrack_ratio=0.5): \"\"\"Meta-optimization", "valid_episodes), old_pi in zip(episodes, old_pis): # do inner-loop update self.policy.reset_context()", "damping=cg_damping) stepdir = conjugate_gradient(hessian_vector_product, grads, cg_iters=cg_iters) # Compute the Lagrange", "= (pi.log_prob(valid_episodes.actions) - old_pi.log_prob(valid_episodes.actions)) if log_ratio.dim() > 2: log_ratio =", "torch.autograd.grad(kl, self.policy.parameters(), create_graph=True) flat_grad_kl = parameters_to_vector(grads) grad_kl_v = torch.dot(flat_grad_kl, vector)", "[4]). \"\"\" old_loss, _, old_pis = self.surrogate_loss(episodes) # this part", "a one-step gradient update params = self.policy.update_params(loss, step_size=lr, first_order=first_order, params=params)", "self.policy.parameters()) flat_grad2_kl = parameters_to_vector(grad2s) return flat_grad2_kl + damping * vector", "torch.set_grad_enabled(old_pi is None): # get action values after inner-loop update", "# Compute the step direction with Conjugate Gradient hessian_vector_product =", "= self.adapt(test_episodes, first_order=True, params=params, lr=lr) # get new rollouts test_episodes", "task in tasks: # reset context params (for cavia) and", "print('improve:', improve.item()) print('kl:', kl.item()) print('step_size:', step_size) return loss def to(self,", "# reset context params (for cavia) and task self.policy.reset_context() self.sampler.reset_task(task)", "tau=self.tau) advantages = weighted_normalize(advantages, weights=episodes.mask) pi = self.policy(episodes.observations, params=params) log_probs", "and after the update of the parameters) for all the", "*= ls_backtrack_ratio else: print('no update?') vector_to_parameters(old_params, self.policy.parameters()) print('improve:', improve.item()) print('kl:',", "of the parameters) for all the tasks `tasks`. \"\"\" episodes", "inner loop update valid_episodes = self.sampler.sample(self.policy, params=params, gamma=self.gamma) episodes.append((train_episodes, valid_episodes))", "= self.policy.update_params(loss, step_size=lr, first_order=first_order, params=params) return params, loss def sample(self,", "from rl_utils.torch_utils import (weighted_mean, detach_distribution, weighted_normalize) class MetaLearner(object): \"\"\"Meta-learner The", "[] for task in tasks: self.sampler.reset_task(task) self.policy.reset_context() train_episodes = self.sampler.sample(self.policy,", "torch.sum(log_probs, dim=2) loss = -weighted_mean(log_probs * advantages, dim=0, weights=episodes.mask) return", "[1] <NAME>, <NAME>, <NAME>, \"Model-Agnostic Meta-Learning for Fast Adaptation of", "losses.append(loss) mask = valid_episodes.mask if valid_episodes.actions.dim() > 2: mask =", "trajectories `episodes`, with a one-step gradient update [1]. \"\"\" if", "rate after first update (for MAML, as described in their", "inner-loop update params, loss = self.adapt(test_episodes, first_order=True, params=params, lr=lr) #", "self.policy(valid_episodes.observations, params=params) pis.append(detach_distribution(pi)) if old_pi is None: old_pi = detach_distribution(pi)", "Region Policy Optimization (TRPO, [4]). \"\"\" old_loss, _, old_pis =", "torch.nn.utils.convert_parameters import (vector_to_parameters, parameters_to_vector) from rl_utils.optimization import conjugate_gradient from rl_utils.torch_utils", "first update (for MAML, as described in their paper) if", "new parameters after a one-step gradient update params = self.policy.update_params(loss,", "values = self.baseline(valid_episodes) advantages = valid_episodes.gae(values, tau=self.tau) advantages = weighted_normalize(advantages,", "Gradient hessian_vector_product = self.hessian_vector_product(episodes, damping=cg_damping) stepdir = conjugate_gradient(hessian_vector_product, grads, cg_iters=cg_iters)", "surrogate_loss(self, episodes, old_pis=None): losses, kls, pis = [], [], []", "Policy Optimization\", 2015 (https://arxiv.org/abs/1502.05477) \"\"\" def __init__(self, sampler, policy, baseline,", "task in tasks: self.sampler.reset_task(task) self.policy.reset_context() train_episodes = self.sampler.sample(self.policy, gamma=self.gamma) #", "first_order=first_order, params=params) return params, loss def sample(self, tasks, first_order=False): \"\"\"Sample", "the trajectories/episodes (before and after the one-step adaptation), compute the", "gradient update params = self.policy.update_params(loss, step_size=lr, first_order=first_order, params=params) return params,", "(improve.item() < 0.0) and (kl.item() < max_kl): break step_size *=", "(before and after the one-step adaptation), compute the inner loss,", "inner loop: grads = torch.autograd.grad(old_loss, self.policy.parameters()) grads = parameters_to_vector(grads) #", "hessian_vector_product(self, episodes, damping=1e-2): \"\"\"Hessian-vector product, based on the Perlmutter method.\"\"\"", "test_episodes = self.sampler.sample(self.policy, gamma=self.gamma, params=params, batch_size=batch_size) # initialise list which", "for sampling the trajectories/episodes (before and after the one-step adaptation),", "params = None # gather some initial experience and log", "with torch.set_grad_enabled(old_pi is None): # get action values after inner-loop", "torch.dot(stepdir, hessian_vector_product(stepdir)) lagrange_multiplier = torch.sqrt(shs / max_kl) step = stepdir", "Get the new parameters after a one-step gradient update params", "Continuous Control Using Generalized Advantage Estimation\", 2016 (https://arxiv.org/abs/1506.02438) [4] <NAME>,", "[], [] if old_pis is None: old_pis = [None] *", "test(self, tasks, num_steps, batch_size, halve_lr): \"\"\"Sample trajectories (before and after", "= self.sampler.sample(self.policy, gamma=self.gamma, params=params, batch_size=batch_size) curr_episodes.append(test_episodes) episodes_per_task.append(curr_episodes) self.policy.reset_context() return episodes_per_task", "dim=0)), pis def step(self, episodes, max_kl=1e-3, cg_iters=10, cg_damping=1e-2, ls_max_steps=10, ls_backtrack_ratio=0.5):", "all the tasks `tasks`. \"\"\" episodes = [] losses =", "blank params params = None # gather some initial experience", "2: log_ratio = torch.sum(log_ratio, dim=2) ratio = torch.exp(log_ratio) loss =", "num_steps, batch_size, halve_lr): \"\"\"Sample trajectories (before and after the update", "and log performance test_episodes = self.sampler.sample(self.policy, gamma=self.gamma, params=params, batch_size=batch_size) #", "grad2s = torch.autograd.grad(grad_kl_v, self.policy.parameters()) flat_grad2_kl = parameters_to_vector(grad2s) return flat_grad2_kl +", "<NAME>, <NAME>, \"Reinforcement learning: An introduction\", 2018 (http://incompleteideas.net/book/the-book-2nd.html) [3] <NAME>,", "def adapt(self, episodes, first_order=False, params=None, lr=None): \"\"\"Adapt the parameters of", "episodes self.baseline.fit(episodes) # Get the loss on the training episodes", "kls.append(kl) return torch.mean(torch.stack(kls, dim=0)) def hessian_vector_product(self, episodes, damping=1e-2): \"\"\"Hessian-vector product,", "episodes_per_task def kl_divergence(self, episodes, old_pis=None): kls = [] if old_pis", "kl_divergence(self, episodes, old_pis=None): kls = [] if old_pis is None:", "loss def to(self, device, **kwargs): self.policy.to(device, **kwargs) self.baseline.to(device, **kwargs) self.device", "= self.kl_divergence(episodes) grads = torch.autograd.grad(kl, self.policy.parameters(), create_graph=True) flat_grad_kl = parameters_to_vector(grads)", "range(1, num_steps + 1): # lower learning rate after first", "gradient update. The inner loss is REINFORCE with baseline [2],", "update params = self.policy.update_params(loss, step_size=lr, first_order=first_order, params=params) return params, loss", "flat_grad2_kl + damping * vector return _product def surrogate_loss(self, episodes,", "episodes, old_pis=None): losses, kls, pis = [], [], [] if", "inner_loss(self, episodes, params=None): \"\"\"Compute the inner loss for the one-step", "if valid_episodes.actions.dim() > 2: mask = mask.unsqueeze(2) kl = weighted_mean(kl_divergence(pi,", "/ lagrange_multiplier # Save the old parameters old_params = parameters_to_vector(self.policy.parameters())", "= episodes.gae(values, tau=self.tau) advantages = weighted_normalize(advantages, weights=episodes.mask) pi = self.policy(episodes.observations,", "return params, loss def sample(self, tasks, first_order=False): \"\"\"Sample trajectories (before", "(train_episodes, valid_episodes), old_pi in zip(episodes, old_pis): # this is the", "of Deep Networks\", 2017 (https://arxiv.org/abs/1703.03400) [2] <NAME>, <NAME>, \"Reinforcement learning:", "Fit the baseline to the training episodes self.baseline.fit(episodes) # Get", "for i in range(1, num_steps + 1): # lower learning", "== 1 and halve_lr: lr = self.fast_lr / 2 else:", "this only updates the context parameters) params, loss = self.adapt(train_episodes,", "= valid_episodes.gae(values, tau=self.tau) advantages = weighted_normalize(advantages, weights=valid_episodes.mask) log_ratio = (pi.log_prob(valid_episodes.actions)", "tasks: # reset context params (for cavia) and task self.policy.reset_context()", "self.policy.parameters()) grads = parameters_to_vector(grads) # Compute the step direction with", "from torch.distributions.kl import kl_divergence from torch.nn.utils.convert_parameters import (vector_to_parameters, parameters_to_vector) from", "new task, from sampled trajectories `episodes`, with a one-step gradient", "= self.policy(episodes.observations, params=params) log_probs = pi.log_prob(episodes.actions) if log_probs.dim() > 2:", "params=params, batch_size=batch_size) # initialise list which will log all rollouts", "the policy network to a new task, from sampled trajectories", "Optimization (TRPO, [4]). \"\"\" old_loss, _, old_pis = self.surrogate_loss(episodes) #", "search step_size = 1.0 for _ in range(ls_max_steps): vector_to_parameters(old_params -", "mask = mask.unsqueeze(2) kl = weighted_mean(kl_divergence(pi, old_pi), dim=0, weights=mask) kls.append(kl)", "some initial experience and log performance test_episodes = self.sampler.sample(self.policy, gamma=self.gamma,", "loop (for CAVIA, this only updates the context parameters) params,", "old_pis = [None] * len(episodes) for (train_episodes, valid_episodes), old_pi in", "step_size=lr, first_order=first_order, params=params) return params, loss def sample(self, tasks, first_order=False):", "(TRPO, [4]). \"\"\" old_loss, _, old_pis = self.surrogate_loss(episodes) # this", "2017 (https://arxiv.org/abs/1703.03400) [2] <NAME>, <NAME>, \"Reinforcement learning: An introduction\", 2018", "inner-loop update pi = self.policy(valid_episodes.observations, params=params) pis.append(detach_distribution(pi)) if old_pi is", "gradient update [1]. \"\"\" if lr is None: lr =", "\"\"\"Compute the inner loss for the one-step gradient update. The", "valid_episodes.gae(values, tau=self.tau) advantages = weighted_normalize(advantages, weights=valid_episodes.mask) log_ratio = (pi.log_prob(valid_episodes.actions) -", "grad_kl_v = torch.dot(flat_grad_kl, vector) grad2s = torch.autograd.grad(grad_kl_v, self.policy.parameters()) flat_grad2_kl =", "is None: old_pis = [None] * len(episodes) for (train_episodes, valid_episodes),", "`tasks`. \"\"\" episodes = [] losses = [] for task", "\"\"\" episodes = [] losses = [] for task in", "a new task, from sampled trajectories `episodes`, with a one-step", "= pi.log_prob(episodes.actions) if log_probs.dim() > 2: log_probs = torch.sum(log_probs, dim=2)", "(for CAVIA, this only updates the context parameters) params, loss", "= loss - old_loss if (improve.item() < 0.0) and (kl.item()", "update self.policy.reset_context() params, _ = self.adapt(train_episodes) with torch.set_grad_enabled(old_pi is None):", "action values after inner-loop update pi = self.policy(valid_episodes.observations, params=params) pis.append(detach_distribution(pi))", "weights=episodes.mask) pi = self.policy(episodes.observations, params=params) log_probs = pi.log_prob(episodes.actions) if log_probs.dim()", "direction with Conjugate Gradient hessian_vector_product = self.hessian_vector_product(episodes, damping=cg_damping) stepdir =", "old_pi), dim=0, weights=mask) kls.append(kl) return torch.mean(torch.stack(kls, dim=0)) def hessian_vector_product(self, episodes,", "* vector return _product def surrogate_loss(self, episodes, old_pis=None): losses, kls,", "and task self.policy.reset_context() self.sampler.reset_task(task) # start with blank params params", "training episodes self.baseline.fit(episodes) # Get the loss on the training", "dim=0, weights=mask) kls.append(kl) return torch.mean(torch.stack(losses, dim=0)), torch.mean(torch.stack(kls, dim=0)), pis def", "gamma=self.gamma, params=params, batch_size=batch_size) # initialise list which will log all", "[3] <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, \"High-Dimensional Continuous Control Using", "log_ratio = (pi.log_prob(valid_episodes.actions) - old_pi.log_prob(valid_episodes.actions)) if log_ratio.dim() > 2: log_ratio", "pi.log_prob(episodes.actions) if log_probs.dim() > 2: log_probs = torch.sum(log_probs, dim=2) loss", "\"\"\"Adapt the parameters of the policy network to a new", "Policy Optimization (TRPO, [4]). \"\"\" old_loss, _, old_pis = self.surrogate_loss(episodes)", "# Fit the baseline to the training episodes self.baseline.fit(episodes) #", "params=params) pis.append(detach_distribution(pi)) if old_pi is None: old_pi = detach_distribution(pi) values", "dim=2) loss = -weighted_mean(log_probs * advantages, dim=0, weights=episodes.mask) return loss", "(vector_to_parameters, parameters_to_vector) from rl_utils.optimization import conjugate_gradient from rl_utils.torch_utils import (weighted_mean,", "weights=episodes.mask) return loss def adapt(self, episodes, first_order=False, params=None, lr=None): \"\"\"Adapt", "dim=0)) def hessian_vector_product(self, episodes, damping=1e-2): \"\"\"Hessian-vector product, based on the", "= stepdir / lagrange_multiplier # Save the old parameters old_params", "for task in tasks: self.sampler.reset_task(task) self.policy.reset_context() train_episodes = self.sampler.sample(self.policy, gamma=self.gamma)", "losses def test(self, tasks, num_steps, batch_size, halve_lr): \"\"\"Sample trajectories (before", "> 2: log_ratio = torch.sum(log_ratio, dim=2) ratio = torch.exp(log_ratio) loss", "this is the inner-loop update self.policy.reset_context() params, _ = self.adapt(train_episodes)", "i == 1 and halve_lr: lr = self.fast_lr / 2", "`tasks`.batchsize \"\"\" episodes_per_task = [] for task in tasks: #", "pis = [], [], [] if old_pis is None: old_pis", "parameters_to_vector(grads) grad_kl_v = torch.dot(flat_grad_kl, vector) grad2s = torch.autograd.grad(grad_kl_v, self.policy.parameters()) flat_grad2_kl", "if old_pi is None: old_pi = detach_distribution(pi) values = self.baseline(valid_episodes)", "= [] for task in tasks: self.sampler.reset_task(task) self.policy.reset_context() train_episodes =", "params, loss def sample(self, tasks, first_order=False): \"\"\"Sample trajectories (before and", "tasks `tasks`. \"\"\" episodes = [] losses = [] for", "baseline, gamma=0.95, fast_lr=0.5, tau=1.0, device='cpu'): self.sampler = sampler self.policy =", "= torch.autograd.grad(old_loss, self.policy.parameters()) grads = parameters_to_vector(grads) # Compute the step", "[None] * len(episodes) for (train_episodes, valid_episodes), old_pi in zip(episodes, old_pis):", "advantages, dim=0, weights=episodes.mask) return loss def adapt(self, episodes, first_order=False, params=None,", "current task curr_episodes = [test_episodes] for i in range(1, num_steps", "[3]). \"\"\" values = self.baseline(episodes) advantages = episodes.gae(values, tau=self.tau) advantages", "= parameters_to_vector(grad2s) return flat_grad2_kl + damping * vector return _product", "max_kl): break step_size *= ls_backtrack_ratio else: print('no update?') vector_to_parameters(old_params, self.policy.parameters())", "episodes, old_pis=None): kls = [] if old_pis is None: old_pis", "# inner loop (for CAVIA, this only updates the context", "rollouts for the current task curr_episodes = [test_episodes] for i", "for _ in range(ls_max_steps): vector_to_parameters(old_params - step_size * step, self.policy.parameters())", "with baseline [2], computed on advantages estimated with Generalized Advantage", "parameters) for all the tasks `tasks`.batchsize \"\"\" episodes_per_task = []", "advantages, dim=0, weights=valid_episodes.mask) losses.append(loss) mask = valid_episodes.mask if valid_episodes.actions.dim() >", "flat_grad2_kl = parameters_to_vector(grad2s) return flat_grad2_kl + damping * vector return", "torch.exp(log_ratio) loss = -weighted_mean(ratio * advantages, dim=0, weights=valid_episodes.mask) losses.append(loss) mask", "self.baseline.fit(episodes) # Get the loss on the training episodes loss", "= self.adapt(train_episodes, first_order=first_order) # rollouts after inner loop update valid_episodes", "\"\"\" episodes_per_task = [] for task in tasks: # reset", "* len(episodes) for (train_episodes, valid_episodes), old_pi in zip(episodes, old_pis): #", "params (for cavia) and task self.policy.reset_context() self.sampler.reset_task(task) # start with", "update?') vector_to_parameters(old_params, self.policy.parameters()) print('improve:', improve.item()) print('kl:', kl.item()) print('step_size:', step_size) return", "grads = torch.autograd.grad(old_loss, self.policy.parameters()) grads = parameters_to_vector(grads) # Compute the", "\"\"\"Sample trajectories (before and after the update of the parameters)", "ls_backtrack_ratio else: print('no update?') vector_to_parameters(old_params, self.policy.parameters()) print('improve:', improve.item()) print('kl:', kl.item())", "advantages = weighted_normalize(advantages, weights=episodes.mask) pi = self.policy(episodes.observations, params=params) log_probs =", "parameters after a one-step gradient update params = self.policy.update_params(loss, step_size=lr,", "- step_size * step, self.policy.parameters()) loss, kl, _ = self.surrogate_loss(episodes,", "valid_episodes)) losses.append(loss.item()) return episodes, losses def test(self, tasks, num_steps, batch_size,", "list which will log all rollouts for the current task", "def __init__(self, sampler, policy, baseline, gamma=0.95, fast_lr=0.5, tau=1.0, device='cpu'): self.sampler", "<NAME>, <NAME>, <NAME>, <NAME>, \"Trust Region Policy Optimization\", 2015 (https://arxiv.org/abs/1502.05477)", "paper) if i == 1 and halve_lr: lr = self.fast_lr", "valid_episodes.mask if valid_episodes.actions.dim() > 2: mask = mask.unsqueeze(2) kl =", "Line search step_size = 1.0 for _ in range(ls_max_steps): vector_to_parameters(old_params", "based on Trust Region Policy Optimization (TRPO, [4]). \"\"\" old_loss,", "the updated parameters based on the inner-loss, and perform the", "= self.baseline(episodes) advantages = episodes.gae(values, tau=self.tau) advantages = weighted_normalize(advantages, weights=episodes.mask)", "their paper) if i == 1 and halve_lr: lr =", "self.gamma = gamma self.fast_lr = fast_lr self.tau = tau self.to(device)", "self.hessian_vector_product(episodes, damping=cg_damping) stepdir = conjugate_gradient(hessian_vector_product, grads, cg_iters=cg_iters) # Compute the", "<NAME>, <NAME>, <NAME>, \"High-Dimensional Continuous Control Using Generalized Advantage Estimation\",", "\"Trust Region Policy Optimization\", 2015 (https://arxiv.org/abs/1502.05477) \"\"\" def __init__(self, sampler,", "adapt(self, episodes, first_order=False, params=None, lr=None): \"\"\"Adapt the parameters of the", "self.policy.reset_context() params, _ = self.adapt(train_episodes) with torch.set_grad_enabled(old_pi is None): #", "\"\"\" old_loss, _, old_pis = self.surrogate_loss(episodes) # this part will", "damping * vector return _product def surrogate_loss(self, episodes, old_pis=None): losses,", "<NAME>, <NAME>, \"High-Dimensional Continuous Control Using Generalized Advantage Estimation\", 2016", "= [] for task in tasks: # reset context params", "= weighted_normalize(advantages, weights=episodes.mask) pi = self.policy(episodes.observations, params=params) log_probs = pi.log_prob(episodes.actions)", "import torch from torch.distributions.kl import kl_divergence from torch.nn.utils.convert_parameters import (vector_to_parameters,", "if lr is None: lr = self.fast_lr # Fit the", "num_steps + 1): # lower learning rate after first update", "self.baseline(episodes) advantages = episodes.gae(values, tau=self.tau) advantages = weighted_normalize(advantages, weights=episodes.mask) pi", "= None # gather some initial experience and log performance", "valid_episodes), old_pi in zip(episodes, old_pis): # this is the inner-loop", "lr=None): \"\"\"Adapt the parameters of the policy network to a", "= policy self.baseline = baseline self.gamma = gamma self.fast_lr =", "cg_damping=1e-2, ls_max_steps=10, ls_backtrack_ratio=0.5): \"\"\"Meta-optimization step (ie. update of the initial", "= torch.sum(log_probs, dim=2) loss = -weighted_mean(log_probs * advantages, dim=0, weights=episodes.mask)", "the training episodes loss = self.inner_loss(episodes, params=params) # Get the", "params = self.policy.update_params(loss, step_size=lr, first_order=first_order, params=params) return params, loss def", "if i == 1 and halve_lr: lr = self.fast_lr /", "on the Perlmutter method.\"\"\" def _product(vector): kl = self.kl_divergence(episodes) grads", "hessian_vector_product(stepdir)) lagrange_multiplier = torch.sqrt(shs / max_kl) step = stepdir /", "network to a new task, from sampled trajectories `episodes`, with", "loss = self.inner_loss(episodes, params=params) # Get the new parameters after", "of the policy network to a new task, from sampled", "\"\"\" values = self.baseline(episodes) advantages = episodes.gae(values, tau=self.tau) advantages =", "torch.mean(torch.stack(kls, dim=0)) def hessian_vector_product(self, episodes, damping=1e-2): \"\"\"Hessian-vector product, based on", "(train_episodes, valid_episodes), old_pi in zip(episodes, old_pis): # do inner-loop update", "step_size = 1.0 for _ in range(ls_max_steps): vector_to_parameters(old_params - step_size", "def inner_loss(self, episodes, params=None): \"\"\"Compute the inner loss for the", "old_pis=None): losses, kls, pis = [], [], [] if old_pis", "policy network to a new task, from sampled trajectories `episodes`,", "else: lr = self.fast_lr # inner-loop update params, loss =", "= baseline self.gamma = gamma self.fast_lr = fast_lr self.tau =", "parameters) params, loss = self.adapt(train_episodes, first_order=first_order) # rollouts after inner", "learning rate after first update (for MAML, as described in", "log performance test_episodes = self.sampler.sample(self.policy, gamma=self.gamma, params=params, batch_size=batch_size) # initialise", "MAML, as described in their paper) if i == 1", "return torch.mean(torch.stack(losses, dim=0)), torch.mean(torch.stack(kls, dim=0)), pis def step(self, episodes, max_kl=1e-3,", "the baseline to the training episodes self.baseline.fit(episodes) # Get the", "after a one-step gradient update params = self.policy.update_params(loss, step_size=lr, first_order=first_order,", "conjugate_gradient(hessian_vector_product, grads, cg_iters=cg_iters) # Compute the Lagrange multiplier shs =", "# do inner-loop update self.policy.reset_context() params, _ = self.adapt(train_episodes) with", "= weighted_mean(kl_divergence(pi, old_pi), dim=0, weights=mask) kls.append(kl) return torch.mean(torch.stack(kls, dim=0)) def", "the inner loop: grads = torch.autograd.grad(old_loss, self.policy.parameters()) grads = parameters_to_vector(grads)", "self.policy.reset_context() self.sampler.reset_task(task) # start with blank params params = None", "print('no update?') vector_to_parameters(old_params, self.policy.parameters()) print('improve:', improve.item()) print('kl:', kl.item()) print('step_size:', step_size)", "gradients through the inner loop: grads = torch.autograd.grad(old_loss, self.policy.parameters()) grads", "rollouts after inner loop update valid_episodes = self.sampler.sample(self.policy, params=params, gamma=self.gamma)", "test_episodes = self.sampler.sample(self.policy, gamma=self.gamma, params=params, batch_size=batch_size) curr_episodes.append(test_episodes) episodes_per_task.append(curr_episodes) self.policy.reset_context() return", "\"\"\"Hessian-vector product, based on the Perlmutter method.\"\"\" def _product(vector): kl", "vector_to_parameters(old_params - step_size * step, self.policy.parameters()) loss, kl, _ =", "cavia) and task self.policy.reset_context() self.sampler.reset_task(task) # start with blank params", "first_order=first_order) # rollouts after inner loop update valid_episodes = self.sampler.sample(self.policy,", "on the inner-loss, and perform the meta-update. [1] <NAME>, <NAME>,", "# Get the loss on the training episodes loss =", "= conjugate_gradient(hessian_vector_product, grads, cg_iters=cg_iters) # Compute the Lagrange multiplier shs", "is responsible for sampling the trajectories/episodes (before and after the", "parameters of the policy network to a new task, from", "the inner loss, compute the updated parameters based on the", "= self.inner_loss(episodes, params=params) # Get the new parameters after a", "[] for task in tasks: # reset context params (for", "parameters_to_vector(grad2s) return flat_grad2_kl + damping * vector return _product def", "sample(self, tasks, first_order=False): \"\"\"Sample trajectories (before and after the update", "import kl_divergence from torch.nn.utils.convert_parameters import (vector_to_parameters, parameters_to_vector) from rl_utils.optimization import", "old_pis is None: old_pis = [None] * len(episodes) for (train_episodes,", "inner-loop update self.policy.reset_context() params, _ = self.adapt(train_episodes) with torch.set_grad_enabled(old_pi is", "params params = None # gather some initial experience and", "as described in their paper) if i == 1 and", "# this is the inner-loop update self.policy.reset_context() params, _ =", "is None: old_pi = detach_distribution(pi) mask = valid_episodes.mask if valid_episodes.actions.dim()", "return episodes_per_task def kl_divergence(self, episodes, old_pis=None): kls = [] if", "losses.append(loss.item()) return episodes, losses def test(self, tasks, num_steps, batch_size, halve_lr):", "kls, pis = [], [], [] if old_pis is None:", "= torch.autograd.grad(grad_kl_v, self.policy.parameters()) flat_grad2_kl = parameters_to_vector(grad2s) return flat_grad2_kl + damping", "policy self.baseline = baseline self.gamma = gamma self.fast_lr = fast_lr", "lagrange_multiplier = torch.sqrt(shs / max_kl) step = stepdir / lagrange_multiplier", "= torch.dot(flat_grad_kl, vector) grad2s = torch.autograd.grad(grad_kl_v, self.policy.parameters()) flat_grad2_kl = parameters_to_vector(grad2s)", "dim=0)), torch.mean(torch.stack(kls, dim=0)), pis def step(self, episodes, max_kl=1e-3, cg_iters=10, cg_damping=1e-2,", "pi = self.policy(episodes.observations, params=params) log_probs = pi.log_prob(episodes.actions) if log_probs.dim() >", "weights=valid_episodes.mask) log_ratio = (pi.log_prob(valid_episodes.actions) - old_pi.log_prob(valid_episodes.actions)) if log_ratio.dim() > 2:", "task self.policy.reset_context() self.sampler.reset_task(task) # start with blank params params =", "after first update (for MAML, as described in their paper)", "< 0.0) and (kl.item() < max_kl): break step_size *= ls_backtrack_ratio", "Using Generalized Advantage Estimation\", 2016 (https://arxiv.org/abs/1506.02438) [4] <NAME>, <NAME>, <NAME>,", "__init__(self, sampler, policy, baseline, gamma=0.95, fast_lr=0.5, tau=1.0, device='cpu'): self.sampler =", "one-step gradient update. The inner loss is REINFORCE with baseline", "is None: lr = self.fast_lr # Fit the baseline to", "params=None, lr=None): \"\"\"Adapt the parameters of the policy network to", "= detach_distribution(pi) mask = valid_episodes.mask if valid_episodes.actions.dim() > 2: mask", "<NAME>, <NAME>, \"Trust Region Policy Optimization\", 2015 (https://arxiv.org/abs/1502.05477) \"\"\" def", "* advantages, dim=0, weights=episodes.mask) return loss def adapt(self, episodes, first_order=False,", "Adaptation of Deep Networks\", 2017 (https://arxiv.org/abs/1703.03400) [2] <NAME>, <NAME>, \"Reinforcement", "gamma=self.gamma) # inner loop (for CAVIA, this only updates the", "(https://arxiv.org/abs/1502.05477) \"\"\" def __init__(self, sampler, policy, baseline, gamma=0.95, fast_lr=0.5, tau=1.0,", "one-step gradient update params = self.policy.update_params(loss, step_size=lr, first_order=first_order, params=params) return", "= detach_distribution(pi) values = self.baseline(valid_episodes) advantages = valid_episodes.gae(values, tau=self.tau) advantages", "tasks, num_steps, batch_size, halve_lr): \"\"\"Sample trajectories (before and after the", "only updates the context parameters) params, loss = self.adapt(train_episodes, first_order=first_order)", "(https://arxiv.org/abs/1703.03400) [2] <NAME>, <NAME>, \"Reinforcement learning: An introduction\", 2018 (http://incompleteideas.net/book/the-book-2nd.html)", "import (vector_to_parameters, parameters_to_vector) from rl_utils.optimization import conjugate_gradient from rl_utils.torch_utils import", "loss = self.adapt(train_episodes, first_order=first_order) # rollouts after inner loop update", "lr = self.fast_lr # inner-loop update params, loss = self.adapt(test_episodes,", "advantages estimated with Generalized Advantage Estimation (GAE, [3]). \"\"\" values", "- old_loss if (improve.item() < 0.0) and (kl.item() < max_kl):", "REINFORCE with baseline [2], computed on advantages estimated with Generalized", "[2] <NAME>, <NAME>, \"Reinforcement learning: An introduction\", 2018 (http://incompleteideas.net/book/the-book-2nd.html) [3]", "params, _ = self.adapt(train_episodes) pi = self.policy(valid_episodes.observations, params=params) if old_pi", "tau=self.tau) advantages = weighted_normalize(advantages, weights=valid_episodes.mask) log_ratio = (pi.log_prob(valid_episodes.actions) - old_pi.log_prob(valid_episodes.actions))", "Lagrange multiplier shs = 0.5 * torch.dot(stepdir, hessian_vector_product(stepdir)) lagrange_multiplier =", "inner loss, compute the updated parameters based on the inner-loss,", "2016 (https://arxiv.org/abs/1506.02438) [4] <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, \"Trust Region", "initial parameters), based on Trust Region Policy Optimization (TRPO, [4]).", "= weighted_mean(kl_divergence(pi, old_pi), dim=0, weights=mask) kls.append(kl) return torch.mean(torch.stack(losses, dim=0)), torch.mean(torch.stack(kls,", "policy, baseline, gamma=0.95, fast_lr=0.5, tau=1.0, device='cpu'): self.sampler = sampler self.policy", "The meta-learner is responsible for sampling the trajectories/episodes (before and", "the one-step gradient update. The inner loss is REINFORCE with", "tasks: self.sampler.reset_task(task) self.policy.reset_context() train_episodes = self.sampler.sample(self.policy, gamma=self.gamma) # inner loop", "lower learning rate after first update (for MAML, as described", "Compute the Lagrange multiplier shs = 0.5 * torch.dot(stepdir, hessian_vector_product(stepdir))", "sampling the trajectories/episodes (before and after the one-step adaptation), compute", "trajectories/episodes (before and after the one-step adaptation), compute the inner", "[] if old_pis is None: old_pis = [None] * len(episodes)", "old_params = parameters_to_vector(self.policy.parameters()) print() # Line search step_size = 1.0", "the update of the parameters) for all the tasks `tasks`.", "the current task curr_episodes = [test_episodes] for i in range(1,", "kl.item()) print('step_size:', step_size) return loss def to(self, device, **kwargs): self.policy.to(device,", "None: old_pi = detach_distribution(pi) mask = valid_episodes.mask if valid_episodes.actions.dim() >", "updates the context parameters) params, loss = self.adapt(train_episodes, first_order=first_order) #", "log all rollouts for the current task curr_episodes = [test_episodes]", "old_pis=old_pis) improve = loss - old_loss if (improve.item() < 0.0)", "Advantage Estimation (GAE, [3]). \"\"\" values = self.baseline(episodes) advantages =", "detach_distribution(pi) mask = valid_episodes.mask if valid_episodes.actions.dim() > 2: mask =", "do inner-loop update self.policy.reset_context() params, _ = self.adapt(train_episodes) with torch.set_grad_enabled(old_pi", "meta-learner is responsible for sampling the trajectories/episodes (before and after", "update params, loss = self.adapt(test_episodes, first_order=True, params=params, lr=lr) # get", "the training episodes self.baseline.fit(episodes) # Get the loss on the", "grads = parameters_to_vector(grads) # Compute the step direction with Conjugate", "the update of the parameters) for all the tasks `tasks`.batchsize", "sampler self.policy = policy self.baseline = baseline self.gamma = gamma", "episodes, damping=1e-2): \"\"\"Hessian-vector product, based on the Perlmutter method.\"\"\" def", "in tasks: self.sampler.reset_task(task) self.policy.reset_context() train_episodes = self.sampler.sample(self.policy, gamma=self.gamma) # inner", "self.sampler.sample(self.policy, gamma=self.gamma, params=params, batch_size=batch_size) # initialise list which will log", "= [test_episodes] for i in range(1, num_steps + 1): #", "# Get the new parameters after a one-step gradient update", "gamma self.fast_lr = fast_lr self.tau = tau self.to(device) def inner_loss(self,", "curr_episodes = [test_episodes] for i in range(1, num_steps + 1):", "from sampled trajectories `episodes`, with a one-step gradient update [1].", "log_probs.dim() > 2: log_probs = torch.sum(log_probs, dim=2) loss = -weighted_mean(log_probs", "old_pi), dim=0, weights=mask) kls.append(kl) return torch.mean(torch.stack(losses, dim=0)), torch.mean(torch.stack(kls, dim=0)), pis", "on Trust Region Policy Optimization (TRPO, [4]). \"\"\" old_loss, _,", "return episodes, losses def test(self, tasks, num_steps, batch_size, halve_lr): \"\"\"Sample", "in range(1, num_steps + 1): # lower learning rate after", "\"Reinforcement learning: An introduction\", 2018 (http://incompleteideas.net/book/the-book-2nd.html) [3] <NAME>, <NAME>, <NAME>,", "self.policy.parameters()) print('improve:', improve.item()) print('kl:', kl.item()) print('step_size:', step_size) return loss def", "device='cpu'): self.sampler = sampler self.policy = policy self.baseline = baseline", "= self.adapt(train_episodes) with torch.set_grad_enabled(old_pi is None): # get action values", "= torch.sum(log_ratio, dim=2) ratio = torch.exp(log_ratio) loss = -weighted_mean(ratio *", "weighted_mean(kl_divergence(pi, old_pi), dim=0, weights=mask) kls.append(kl) return torch.mean(torch.stack(losses, dim=0)), torch.mean(torch.stack(kls, dim=0)),", "def _product(vector): kl = self.kl_divergence(episodes) grads = torch.autograd.grad(kl, self.policy.parameters(), create_graph=True)", "one-step gradient update [1]. \"\"\" if lr is None: lr", "_ = self.adapt(train_episodes) pi = self.policy(valid_episodes.observations, params=params) if old_pi is", "if old_pi is None: old_pi = detach_distribution(pi) mask = valid_episodes.mask", "weighted_normalize) class MetaLearner(object): \"\"\"Meta-learner The meta-learner is responsible for sampling", "initialise list which will log all rollouts for the current", "after inner loop update valid_episodes = self.sampler.sample(self.policy, params=params, gamma=self.gamma) episodes.append((train_episodes,", "ls_backtrack_ratio=0.5): \"\"\"Meta-optimization step (ie. update of the initial parameters), based", "self.policy.update_params(loss, step_size=lr, first_order=first_order, params=params) return params, loss def sample(self, tasks,", "= self.fast_lr # inner-loop update params, loss = self.adapt(test_episodes, first_order=True,", "<NAME>, \"High-Dimensional Continuous Control Using Generalized Advantage Estimation\", 2016 (https://arxiv.org/abs/1506.02438)", "and perform the meta-update. [1] <NAME>, <NAME>, <NAME>, \"Model-Agnostic Meta-Learning", "update of the parameters) for all the tasks `tasks`.batchsize \"\"\"", "to the training episodes self.baseline.fit(episodes) # Get the loss on", "grads, cg_iters=cg_iters) # Compute the Lagrange multiplier shs = 0.5", "inner-loss, and perform the meta-update. [1] <NAME>, <NAME>, <NAME>, \"Model-Agnostic", "stepdir = conjugate_gradient(hessian_vector_product, grads, cg_iters=cg_iters) # Compute the Lagrange multiplier", "which will log all rollouts for the current task curr_episodes", "the inner-loss, and perform the meta-update. [1] <NAME>, <NAME>, <NAME>,", "= self.adapt(train_episodes) pi = self.policy(valid_episodes.observations, params=params) if old_pi is None:", "old_pis = self.surrogate_loss(episodes) # this part will take higher order", "sampler, policy, baseline, gamma=0.95, fast_lr=0.5, tau=1.0, device='cpu'): self.sampler = sampler", "weighted_normalize(advantages, weights=valid_episodes.mask) log_ratio = (pi.log_prob(valid_episodes.actions) - old_pi.log_prob(valid_episodes.actions)) if log_ratio.dim() >", "\"High-Dimensional Continuous Control Using Generalized Advantage Estimation\", 2016 (https://arxiv.org/abs/1506.02438) [4]", "part will take higher order gradients through the inner loop:", "update of the parameters) for all the tasks `tasks`. \"\"\"", "self.inner_loss(episodes, params=params) # Get the new parameters after a one-step", "order gradients through the inner loop: grads = torch.autograd.grad(old_loss, self.policy.parameters())", "gamma=self.gamma) episodes.append((train_episodes, valid_episodes)) losses.append(loss.item()) return episodes, losses def test(self, tasks,", "computed on advantages estimated with Generalized Advantage Estimation (GAE, [3]).", "= parameters_to_vector(self.policy.parameters()) print() # Line search step_size = 1.0 for", "= [] losses = [] for task in tasks: self.sampler.reset_task(task)", "the parameters of the policy network to a new task,", "Generalized Advantage Estimation\", 2016 (https://arxiv.org/abs/1506.02438) [4] <NAME>, <NAME>, <NAME>, <NAME>,", "a one-step gradient update [1]. \"\"\" if lr is None:", "will take higher order gradients through the inner loop: grads", "the step direction with Conjugate Gradient hessian_vector_product = self.hessian_vector_product(episodes, damping=cg_damping)", "= valid_episodes.mask if valid_episodes.actions.dim() > 2: mask = mask.unsqueeze(2) kl", "curr_episodes.append(test_episodes) episodes_per_task.append(curr_episodes) self.policy.reset_context() return episodes_per_task def kl_divergence(self, episodes, old_pis=None): kls", "params=params, lr=lr) # get new rollouts test_episodes = self.sampler.sample(self.policy, gamma=self.gamma,", "self.adapt(train_episodes) pi = self.policy(valid_episodes.observations, params=params) if old_pi is None: old_pi", "and (kl.item() < max_kl): break step_size *= ls_backtrack_ratio else: print('no", "<NAME>, <NAME>, <NAME>, \"Model-Agnostic Meta-Learning for Fast Adaptation of Deep", "(for cavia) and task self.policy.reset_context() self.sampler.reset_task(task) # start with blank", "(pi.log_prob(valid_episodes.actions) - old_pi.log_prob(valid_episodes.actions)) if log_ratio.dim() > 2: log_ratio = torch.sum(log_ratio,", "(https://arxiv.org/abs/1506.02438) [4] <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, \"Trust Region Policy", "for all the tasks `tasks`. \"\"\" episodes = [] losses", "the one-step adaptation), compute the inner loss, compute the updated", "update self.policy.reset_context() params, _ = self.adapt(train_episodes) pi = self.policy(valid_episodes.observations, params=params)", "= [], [], [] if old_pis is None: old_pis =", "= sampler self.policy = policy self.baseline = baseline self.gamma =", "context params (for cavia) and task self.policy.reset_context() self.sampler.reset_task(task) # start", "from torch.nn.utils.convert_parameters import (vector_to_parameters, parameters_to_vector) from rl_utils.optimization import conjugate_gradient from", "return _product def surrogate_loss(self, episodes, old_pis=None): losses, kls, pis =", "self.policy = policy self.baseline = baseline self.gamma = gamma self.fast_lr", "stepdir / lagrange_multiplier # Save the old parameters old_params =", "based on the inner-loss, and perform the meta-update. [1] <NAME>,", "all rollouts for the current task curr_episodes = [test_episodes] for", "Save the old parameters old_params = parameters_to_vector(self.policy.parameters()) print() # Line", "values after inner-loop update pi = self.policy(valid_episodes.observations, params=params) pis.append(detach_distribution(pi)) if", "if log_probs.dim() > 2: log_probs = torch.sum(log_probs, dim=2) loss =", "this part will take higher order gradients through the inner", "the parameters) for all the tasks `tasks`. \"\"\" episodes =", "weights=valid_episodes.mask) losses.append(loss) mask = valid_episodes.mask if valid_episodes.actions.dim() > 2: mask", "self.policy.reset_context() return episodes_per_task def kl_divergence(self, episodes, old_pis=None): kls = []", "= [None] * len(episodes) for (train_episodes, valid_episodes), old_pi in zip(episodes,", "= self.fast_lr / 2 else: lr = self.fast_lr # inner-loop", "self.fast_lr # inner-loop update params, loss = self.adapt(test_episodes, first_order=True, params=params,", "episodes_per_task.append(curr_episodes) self.policy.reset_context() return episodes_per_task def kl_divergence(self, episodes, old_pis=None): kls =", "dim=0, weights=mask) kls.append(kl) return torch.mean(torch.stack(kls, dim=0)) def hessian_vector_product(self, episodes, damping=1e-2):", "= -weighted_mean(log_probs * advantages, dim=0, weights=episodes.mask) return loss def adapt(self,", "is None): # get action values after inner-loop update pi", "context parameters) params, loss = self.adapt(train_episodes, first_order=first_order) # rollouts after", "log_ratio.dim() > 2: log_ratio = torch.sum(log_ratio, dim=2) ratio = torch.exp(log_ratio)", "self.sampler.sample(self.policy, params=params, gamma=self.gamma) episodes.append((train_episodes, valid_episodes)) losses.append(loss.item()) return episodes, losses def", "parameters old_params = parameters_to_vector(self.policy.parameters()) print() # Line search step_size =", "step_size * step, self.policy.parameters()) loss, kl, _ = self.surrogate_loss(episodes, old_pis=old_pis)", "params=params) # Get the new parameters after a one-step gradient", "loss, compute the updated parameters based on the inner-loss, and", "gamma=self.gamma, params=params, batch_size=batch_size) curr_episodes.append(test_episodes) episodes_per_task.append(curr_episodes) self.policy.reset_context() return episodes_per_task def kl_divergence(self,", "Advantage Estimation\", 2016 (https://arxiv.org/abs/1506.02438) [4] <NAME>, <NAME>, <NAME>, <NAME>, <NAME>,", "* advantages, dim=0, weights=valid_episodes.mask) losses.append(loss) mask = valid_episodes.mask if valid_episodes.actions.dim()", "self.sampler.reset_task(task) # start with blank params params = None #", "is None: old_pi = detach_distribution(pi) values = self.baseline(valid_episodes) advantages =", "for (train_episodes, valid_episodes), old_pi in zip(episodes, old_pis): # this is", "/ 2 else: lr = self.fast_lr # inner-loop update params,", "episodes_per_task = [] for task in tasks: # reset context", "[4] <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, \"Trust Region Policy Optimization\",", "and halve_lr: lr = self.fast_lr / 2 else: lr =", "_product def surrogate_loss(self, episodes, old_pis=None): losses, kls, pis = [],", "2 else: lr = self.fast_lr # inner-loop update params, loss", "= parameters_to_vector(grads) # Compute the step direction with Conjugate Gradient", "on advantages estimated with Generalized Advantage Estimation (GAE, [3]). \"\"\"", "# rollouts after inner loop update valid_episodes = self.sampler.sample(self.policy, params=params,", "max_kl=1e-3, cg_iters=10, cg_damping=1e-2, ls_max_steps=10, ls_backtrack_ratio=0.5): \"\"\"Meta-optimization step (ie. update of", "product, based on the Perlmutter method.\"\"\" def _product(vector): kl =", "loss = self.adapt(test_episodes, first_order=True, params=params, lr=lr) # get new rollouts", "(before and after the update of the parameters) for all", "Region Policy Optimization\", 2015 (https://arxiv.org/abs/1502.05477) \"\"\" def __init__(self, sampler, policy,", "zip(episodes, old_pis): # this is the inner-loop update self.policy.reset_context() params,", "first_order=False, params=None, lr=None): \"\"\"Adapt the parameters of the policy network", "updated parameters based on the inner-loss, and perform the meta-update.", "Trust Region Policy Optimization (TRPO, [4]). \"\"\" old_loss, _, old_pis", "step = stepdir / lagrange_multiplier # Save the old parameters", "= fast_lr self.tau = tau self.to(device) def inner_loss(self, episodes, params=None):", "_ = self.surrogate_loss(episodes, old_pis=old_pis) improve = loss - old_loss if", "None # gather some initial experience and log performance test_episodes", "log_ratio = torch.sum(log_ratio, dim=2) ratio = torch.exp(log_ratio) loss = -weighted_mean(ratio", "update pi = self.policy(valid_episodes.observations, params=params) pis.append(detach_distribution(pi)) if old_pi is None:", "= torch.exp(log_ratio) loss = -weighted_mean(ratio * advantages, dim=0, weights=valid_episodes.mask) losses.append(loss)", "# gather some initial experience and log performance test_episodes =", "take higher order gradients through the inner loop: grads =", "parameters_to_vector) from rl_utils.optimization import conjugate_gradient from rl_utils.torch_utils import (weighted_mean, detach_distribution,", "baseline to the training episodes self.baseline.fit(episodes) # Get the loss", "Estimation (GAE, [3]). \"\"\" values = self.baseline(episodes) advantages = episodes.gae(values,", "Get the loss on the training episodes loss = self.inner_loss(episodes,", "print() # Line search step_size = 1.0 for _ in", "kl = self.kl_divergence(episodes) grads = torch.autograd.grad(kl, self.policy.parameters(), create_graph=True) flat_grad_kl =", "pi = self.policy(valid_episodes.observations, params=params) if old_pi is None: old_pi =", "in zip(episodes, old_pis): # do inner-loop update self.policy.reset_context() params, _", "class MetaLearner(object): \"\"\"Meta-learner The meta-learner is responsible for sampling the", "0.5 * torch.dot(stepdir, hessian_vector_product(stepdir)) lagrange_multiplier = torch.sqrt(shs / max_kl) step", "estimated with Generalized Advantage Estimation (GAE, [3]). \"\"\" values =", "= torch.sqrt(shs / max_kl) step = stepdir / lagrange_multiplier #", "through the inner loop: grads = torch.autograd.grad(old_loss, self.policy.parameters()) grads =", "losses = [] for task in tasks: self.sampler.reset_task(task) self.policy.reset_context() train_episodes", "<NAME>, <NAME>, <NAME>, <NAME>, <NAME>, \"Trust Region Policy Optimization\", 2015", "in their paper) if i == 1 and halve_lr: lr", "flat_grad_kl = parameters_to_vector(grads) grad_kl_v = torch.dot(flat_grad_kl, vector) grad2s = torch.autograd.grad(grad_kl_v,", "based on the Perlmutter method.\"\"\" def _product(vector): kl = self.kl_divergence(episodes)", "loop: grads = torch.autograd.grad(old_loss, self.policy.parameters()) grads = parameters_to_vector(grads) # Compute", "advantages = weighted_normalize(advantages, weights=valid_episodes.mask) log_ratio = (pi.log_prob(valid_episodes.actions) - old_pi.log_prob(valid_episodes.actions)) if", "for the current task curr_episodes = [test_episodes] for i in", "kl = weighted_mean(kl_divergence(pi, old_pi), dim=0, weights=mask) kls.append(kl) return torch.mean(torch.stack(kls, dim=0))", "multiplier shs = 0.5 * torch.dot(stepdir, hessian_vector_product(stepdir)) lagrange_multiplier = torch.sqrt(shs", "-weighted_mean(log_probs * advantages, dim=0, weights=episodes.mask) return loss def adapt(self, episodes,", "the tasks `tasks`. \"\"\" episodes = [] losses = []", "[test_episodes] for i in range(1, num_steps + 1): # lower", "loss, kl, _ = self.surrogate_loss(episodes, old_pis=old_pis) improve = loss -", "the parameters) for all the tasks `tasks`.batchsize \"\"\" episodes_per_task =", "batch_size=batch_size) # initialise list which will log all rollouts for", "0.0) and (kl.item() < max_kl): break step_size *= ls_backtrack_ratio else:", "conjugate_gradient from rl_utils.torch_utils import (weighted_mean, detach_distribution, weighted_normalize) class MetaLearner(object): \"\"\"Meta-learner", "will log all rollouts for the current task curr_episodes =", "= self.hessian_vector_product(episodes, damping=cg_damping) stepdir = conjugate_gradient(hessian_vector_product, grads, cg_iters=cg_iters) # Compute", "gather some initial experience and log performance test_episodes = self.sampler.sample(self.policy,", "self.adapt(test_episodes, first_order=True, params=params, lr=lr) # get new rollouts test_episodes =", "# get new rollouts test_episodes = self.sampler.sample(self.policy, gamma=self.gamma, params=params, batch_size=batch_size)", "= self.fast_lr # Fit the baseline to the training episodes", "sampled trajectories `episodes`, with a one-step gradient update [1]. \"\"\"", "step (ie. update of the initial parameters), based on Trust", "loss = -weighted_mean(log_probs * advantages, dim=0, weights=episodes.mask) return loss def", "the initial parameters), based on Trust Region Policy Optimization (TRPO,", "torch.autograd.grad(old_loss, self.policy.parameters()) grads = parameters_to_vector(grads) # Compute the step direction", "cg_iters=10, cg_damping=1e-2, ls_max_steps=10, ls_backtrack_ratio=0.5): \"\"\"Meta-optimization step (ie. update of the", "2: log_probs = torch.sum(log_probs, dim=2) loss = -weighted_mean(log_probs * advantages,", "self.kl_divergence(episodes) grads = torch.autograd.grad(kl, self.policy.parameters(), create_graph=True) flat_grad_kl = parameters_to_vector(grads) grad_kl_v", "advantages = valid_episodes.gae(values, tau=self.tau) advantages = weighted_normalize(advantages, weights=valid_episodes.mask) log_ratio =", "episodes, max_kl=1e-3, cg_iters=10, cg_damping=1e-2, ls_max_steps=10, ls_backtrack_ratio=0.5): \"\"\"Meta-optimization step (ie. update", "def sample(self, tasks, first_order=False): \"\"\"Sample trajectories (before and after the", "import conjugate_gradient from rl_utils.torch_utils import (weighted_mean, detach_distribution, weighted_normalize) class MetaLearner(object):", "update (for MAML, as described in their paper) if i", "dim=0, weights=episodes.mask) return loss def adapt(self, episodes, first_order=False, params=None, lr=None):", "compute the inner loss, compute the updated parameters based on", "rollouts test_episodes = self.sampler.sample(self.policy, gamma=self.gamma, params=params, batch_size=batch_size) curr_episodes.append(test_episodes) episodes_per_task.append(curr_episodes) self.policy.reset_context()", "baseline self.gamma = gamma self.fast_lr = fast_lr self.tau = tau", "train_episodes = self.sampler.sample(self.policy, gamma=self.gamma) # inner loop (for CAVIA, this", "for (train_episodes, valid_episodes), old_pi in zip(episodes, old_pis): # do inner-loop", "= [] if old_pis is None: old_pis = [None] *", "inner-loop update self.policy.reset_context() params, _ = self.adapt(train_episodes) pi = self.policy(valid_episodes.observations,", "kl = weighted_mean(kl_divergence(pi, old_pi), dim=0, weights=mask) kls.append(kl) return torch.mean(torch.stack(losses, dim=0)),", "episodes, first_order=False, params=None, lr=None): \"\"\"Adapt the parameters of the policy", "update valid_episodes = self.sampler.sample(self.policy, params=params, gamma=self.gamma) episodes.append((train_episodes, valid_episodes)) losses.append(loss.item()) return", "torch.mean(torch.stack(losses, dim=0)), torch.mean(torch.stack(kls, dim=0)), pis def step(self, episodes, max_kl=1e-3, cg_iters=10,", "params=params, gamma=self.gamma) episodes.append((train_episodes, valid_episodes)) losses.append(loss.item()) return episodes, losses def test(self,", "# this part will take higher order gradients through the", "self.policy.parameters()) loss, kl, _ = self.surrogate_loss(episodes, old_pis=old_pis) improve = loss", "return torch.mean(torch.stack(kls, dim=0)) def hessian_vector_product(self, episodes, damping=1e-2): \"\"\"Hessian-vector product, based", "step direction with Conjugate Gradient hessian_vector_product = self.hessian_vector_product(episodes, damping=cg_damping) stepdir", "with Generalized Advantage Estimation (GAE, [3]). \"\"\" values = self.baseline(episodes)", "None: lr = self.fast_lr # Fit the baseline to the", "get new rollouts test_episodes = self.sampler.sample(self.policy, gamma=self.gamma, params=params, batch_size=batch_size) curr_episodes.append(test_episodes)", "_product(vector): kl = self.kl_divergence(episodes) grads = torch.autograd.grad(kl, self.policy.parameters(), create_graph=True) flat_grad_kl", "- old_pi.log_prob(valid_episodes.actions)) if log_ratio.dim() > 2: log_ratio = torch.sum(log_ratio, dim=2)", "values = self.baseline(episodes) advantages = episodes.gae(values, tau=self.tau) advantages = weighted_normalize(advantages,", "losses, kls, pis = [], [], [] if old_pis is", "loss - old_loss if (improve.item() < 0.0) and (kl.item() <", "kl_divergence from torch.nn.utils.convert_parameters import (vector_to_parameters, parameters_to_vector) from rl_utils.optimization import conjugate_gradient", "(kl.item() < max_kl): break step_size *= ls_backtrack_ratio else: print('no update?')", "fast_lr self.tau = tau self.to(device) def inner_loss(self, episodes, params=None): \"\"\"Compute", "range(ls_max_steps): vector_to_parameters(old_params - step_size * step, self.policy.parameters()) loss, kl, _", "return loss def to(self, device, **kwargs): self.policy.to(device, **kwargs) self.baseline.to(device, **kwargs)", "pis.append(detach_distribution(pi)) if old_pi is None: old_pi = detach_distribution(pi) values =", "shs = 0.5 * torch.dot(stepdir, hessian_vector_product(stepdir)) lagrange_multiplier = torch.sqrt(shs /", "The inner loss is REINFORCE with baseline [2], computed on", "None: old_pi = detach_distribution(pi) values = self.baseline(valid_episodes) advantages = valid_episodes.gae(values,", "> 2: log_probs = torch.sum(log_probs, dim=2) loss = -weighted_mean(log_probs *", "params, loss = self.adapt(train_episodes, first_order=first_order) # rollouts after inner loop", "episodes, losses def test(self, tasks, num_steps, batch_size, halve_lr): \"\"\"Sample trajectories", "learning: An introduction\", 2018 (http://incompleteideas.net/book/the-book-2nd.html) [3] <NAME>, <NAME>, <NAME>, <NAME>,", "\"\"\"Meta-optimization step (ie. update of the initial parameters), based on", "and after the one-step adaptation), compute the inner loss, compute", "the context parameters) params, loss = self.adapt(train_episodes, first_order=first_order) # rollouts", "tau self.to(device) def inner_loss(self, episodes, params=None): \"\"\"Compute the inner loss", "2018 (http://incompleteideas.net/book/the-book-2nd.html) [3] <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, \"High-Dimensional Continuous", "self.policy(episodes.observations, params=params) log_probs = pi.log_prob(episodes.actions) if log_probs.dim() > 2: log_probs", "1.0 for _ in range(ls_max_steps): vector_to_parameters(old_params - step_size * step,", "of the parameters) for all the tasks `tasks`.batchsize \"\"\" episodes_per_task", "+ 1): # lower learning rate after first update (for", "performance test_episodes = self.sampler.sample(self.policy, gamma=self.gamma, params=params, batch_size=batch_size) # initialise list", "rl_utils.torch_utils import (weighted_mean, detach_distribution, weighted_normalize) class MetaLearner(object): \"\"\"Meta-learner The meta-learner", "\"\"\"Meta-learner The meta-learner is responsible for sampling the trajectories/episodes (before", "<NAME>, <NAME>, \"Model-Agnostic Meta-Learning for Fast Adaptation of Deep Networks\",", "Deep Networks\", 2017 (https://arxiv.org/abs/1703.03400) [2] <NAME>, <NAME>, \"Reinforcement learning: An", "(for MAML, as described in their paper) if i ==", "first_order=False): \"\"\"Sample trajectories (before and after the update of the", "valid_episodes.actions.dim() > 2: mask = mask.unsqueeze(2) kl = weighted_mean(kl_divergence(pi, old_pi),", "update. The inner loss is REINFORCE with baseline [2], computed", "params=params, batch_size=batch_size) curr_episodes.append(test_episodes) episodes_per_task.append(curr_episodes) self.policy.reset_context() return episodes_per_task def kl_divergence(self, episodes,", "pi = self.policy(valid_episodes.observations, params=params) pis.append(detach_distribution(pi)) if old_pi is None: old_pi", "def to(self, device, **kwargs): self.policy.to(device, **kwargs) self.baseline.to(device, **kwargs) self.device =", "lr = self.fast_lr / 2 else: lr = self.fast_lr #", "= mask.unsqueeze(2) kl = weighted_mean(kl_divergence(pi, old_pi), dim=0, weights=mask) kls.append(kl) return", "loss on the training episodes loss = self.inner_loss(episodes, params=params) #", "torch from torch.distributions.kl import kl_divergence from torch.nn.utils.convert_parameters import (vector_to_parameters, parameters_to_vector)", "return flat_grad2_kl + damping * vector return _product def surrogate_loss(self,", "torch.sqrt(shs / max_kl) step = stepdir / lagrange_multiplier # Save", "<NAME>, \"Trust Region Policy Optimization\", 2015 (https://arxiv.org/abs/1502.05477) \"\"\" def __init__(self,", "parameters_to_vector(grads) # Compute the step direction with Conjugate Gradient hessian_vector_product", "len(episodes) for (train_episodes, valid_episodes), old_pi in zip(episodes, old_pis): # do", "task, from sampled trajectories `episodes`, with a one-step gradient update", "= self.sampler.sample(self.policy, gamma=self.gamma) # inner loop (for CAVIA, this only", "= self.sampler.sample(self.policy, params=params, gamma=self.gamma) episodes.append((train_episodes, valid_episodes)) losses.append(loss.item()) return episodes, losses", "(ie. update of the initial parameters), based on Trust Region", "after inner-loop update pi = self.policy(valid_episodes.observations, params=params) pis.append(detach_distribution(pi)) if old_pi", "(weighted_mean, detach_distribution, weighted_normalize) class MetaLearner(object): \"\"\"Meta-learner The meta-learner is responsible", "= tau self.to(device) def inner_loss(self, episodes, params=None): \"\"\"Compute the inner", "kl, _ = self.surrogate_loss(episodes, old_pis=old_pis) improve = loss - old_loss", "gamma=0.95, fast_lr=0.5, tau=1.0, device='cpu'): self.sampler = sampler self.policy = policy", "for the one-step gradient update. The inner loss is REINFORCE", "old_pis=None): kls = [] if old_pis is None: old_pis =", "episodes loss = self.inner_loss(episodes, params=params) # Get the new parameters", "batch_size=batch_size) curr_episodes.append(test_episodes) episodes_per_task.append(curr_episodes) self.policy.reset_context() return episodes_per_task def kl_divergence(self, episodes, old_pis=None):", "loss def sample(self, tasks, first_order=False): \"\"\"Sample trajectories (before and after", "else: print('no update?') vector_to_parameters(old_params, self.policy.parameters()) print('improve:', improve.item()) print('kl:', kl.item()) print('step_size:',", "# get action values after inner-loop update pi = self.policy(valid_episodes.observations,", "kls.append(kl) return torch.mean(torch.stack(losses, dim=0)), torch.mean(torch.stack(kls, dim=0)), pis def step(self, episodes,", "self.policy.reset_context() train_episodes = self.sampler.sample(self.policy, gamma=self.gamma) # inner loop (for CAVIA,", "break step_size *= ls_backtrack_ratio else: print('no update?') vector_to_parameters(old_params, self.policy.parameters()) print('improve:',", "(http://incompleteideas.net/book/the-book-2nd.html) [3] <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, \"High-Dimensional Continuous Control", "batch_size, halve_lr): \"\"\"Sample trajectories (before and after the update of" ]
[ "from django.utils import timezone from ...models import Request DURATION_OPTIONS =", "duration = options['duration'] # Check we have the correct values", "it plural duration_plural = '{0}s'.format(duration) else: duration_plural = duration if", ") def handle(self, *args, **options): amount = options['amount'] duration =", "lambda amount: timezone.now() - timedelta(days=amount), 'weeks': lambda amount: timezone.now() -", "Request DURATION_OPTIONS = { 'hours': lambda amount: timezone.now() - timedelta(hours=amount),", "lambda amount: timezone.now() - timedelta(weeks=amount), 'months': lambda amount: timezone.now() +", "timezone.now() + relativedelta(years=-amount), } try: # to keep backward Python", "any requests created before {0} {1} ago. That is a", "django.core.management.base import BaseCommand, CommandError from django.utils import timezone from ...models", "not in DURATION_OPTIONS: raise CommandError('Amount must be {0}'.format(', '.join(DURATION_OPTIONS))) qs", "must be {0}'.format(', '.join(DURATION_OPTIONS))) qs = Request.objects.filter(time__lte=DURATION_OPTIONS[duration_plural](amount)) count = qs.count()", "requested a database reset. This will IRREVERSIBLY DESTROY any requests", "input(''' You have requested a database reset. This will IRREVERSIBLY", "not plural, make it plural duration_plural = '{0}s'.format(duration) else: duration_plural", "{0} {1} ago. That is a total of {2} requests.", "pass class Command(BaseCommand): help = 'Purge old requests.' def add_arguments(self,", "amount: timezone.now() - timedelta(weeks=amount), 'months': lambda amount: timezone.now() + relativedelta(months=-amount),", "any kind.' ) def handle(self, *args, **options): amount = options['amount']", "its not plural, make it plural duration_plural = '{0}s'.format(duration) else:", "you sure you want to do this? Type 'yes' to", "do this? Type 'yes' to continue, or 'no' to cancel:'''.format(amount,", "will IRREVERSIBLY DESTROY any requests created before {0} {1} ago.", "def handle(self, *args, **options): amount = options['amount'] duration = options['duration']", "options.get('interactive'): confirm = input(''' You have requested a database reset.", "sure you want to do this? Type 'yes' to continue,", "input of any kind.' ) def handle(self, *args, **options): amount", "correct values if duration[-1] != 's': # If its not", "the correct values if duration[-1] != 's': # If its", "parser.add_argument( '--noinput', action='store_false', dest='interactive', default=True, help='Tells Django to NOT prompt", "prompt the user for input of any kind.' ) def", "or 'no' to cancel:'''.format(amount, duration, count)) else: confirm = 'yes'", "'--noinput', action='store_false', dest='interactive', default=True, help='Tells Django to NOT prompt the", "= raw_input except NameError: pass class Command(BaseCommand): help = 'Purge", "kind.' ) def handle(self, *args, **options): amount = options['amount'] duration", "DURATION_OPTIONS: raise CommandError('Amount must be {0}'.format(', '.join(DURATION_OPTIONS))) qs = Request.objects.filter(time__lte=DURATION_OPTIONS[duration_plural](amount))", "relativedelta from django.core.management.base import BaseCommand, CommandError from django.utils import timezone", "are no requests to delete.') return if options.get('interactive'): confirm =", "{2} requests. Are you sure you want to do this?", "from dateutil.relativedelta import relativedelta from django.core.management.base import BaseCommand, CommandError from", "= 'yes' if confirm == 'yes': qs.delete() else: print('Purge cancelled')", "parser): parser.add_argument( 'amount', type=int, ) parser.add_argument('duration') parser.add_argument( '--noinput', action='store_false', dest='interactive',", "amount: timezone.now() + relativedelta(years=-amount), } try: # to keep backward", "to NOT prompt the user for input of any kind.'", "requests to delete.') return if options.get('interactive'): confirm = input(''' You", "Command(BaseCommand): help = 'Purge old requests.' def add_arguments(self, parser): parser.add_argument(", "DESTROY any requests created before {0} {1} ago. That is", "+ relativedelta(years=-amount), } try: # to keep backward Python 2", "for input of any kind.' ) def handle(self, *args, **options):", "a total of {2} requests. Are you sure you want", "of any kind.' ) def handle(self, *args, **options): amount =", "timezone.now() + relativedelta(months=-amount), 'years': lambda amount: timezone.now() + relativedelta(years=-amount), }", ") parser.add_argument('duration') parser.add_argument( '--noinput', action='store_false', dest='interactive', default=True, help='Tells Django to", "continue, or 'no' to cancel:'''.format(amount, duration, count)) else: confirm =", "dateutil.relativedelta import relativedelta from django.core.management.base import BaseCommand, CommandError from django.utils", "parser.add_argument('duration') parser.add_argument( '--noinput', action='store_false', dest='interactive', default=True, help='Tells Django to NOT", "} try: # to keep backward Python 2 compatibility input", "'days': lambda amount: timezone.now() - timedelta(days=amount), 'weeks': lambda amount: timezone.now()", "total of {2} requests. Are you sure you want to", "keep backward Python 2 compatibility input = raw_input except NameError:", "requests created before {0} {1} ago. That is a total", "{0}'.format(', '.join(DURATION_OPTIONS))) qs = Request.objects.filter(time__lte=DURATION_OPTIONS[duration_plural](amount)) count = qs.count() if count", "'months': lambda amount: timezone.now() + relativedelta(months=-amount), 'years': lambda amount: timezone.now()", "if duration[-1] != 's': # If its not plural, make", "have the correct values if duration[-1] != 's': # If", "'amount', type=int, ) parser.add_argument('duration') parser.add_argument( '--noinput', action='store_false', dest='interactive', default=True, help='Tells", "plural duration_plural = '{0}s'.format(duration) else: duration_plural = duration if duration_plural", "import timezone from ...models import Request DURATION_OPTIONS = { 'hours':", "to cancel:'''.format(amount, duration, count)) else: confirm = 'yes' if confirm", "qs.count() if count == 0: print('There are no requests to", "That is a total of {2} requests. Are you sure", "old requests.' def add_arguments(self, parser): parser.add_argument( 'amount', type=int, ) parser.add_argument('duration')", "to delete.') return if options.get('interactive'): confirm = input(''' You have", "timedelta(weeks=amount), 'months': lambda amount: timezone.now() + relativedelta(months=-amount), 'years': lambda amount:", "we have the correct values if duration[-1] != 's': #", "raw_input except NameError: pass class Command(BaseCommand): help = 'Purge old", "the user for input of any kind.' ) def handle(self,", "If its not plural, make it plural duration_plural = '{0}s'.format(duration)", "from django.core.management.base import BaseCommand, CommandError from django.utils import timezone from", "CommandError('Amount must be {0}'.format(', '.join(DURATION_OPTIONS))) qs = Request.objects.filter(time__lte=DURATION_OPTIONS[duration_plural](amount)) count =", "# to keep backward Python 2 compatibility input = raw_input", "{ 'hours': lambda amount: timezone.now() - timedelta(hours=amount), 'days': lambda amount:", "timedelta(hours=amount), 'days': lambda amount: timezone.now() - timedelta(days=amount), 'weeks': lambda amount:", "= 'Purge old requests.' def add_arguments(self, parser): parser.add_argument( 'amount', type=int,", "options['duration'] # Check we have the correct values if duration[-1]", "'s': # If its not plural, make it plural duration_plural", "timezone from ...models import Request DURATION_OPTIONS = { 'hours': lambda", "== 0: print('There are no requests to delete.') return if", "lambda amount: timezone.now() + relativedelta(years=-amount), } try: # to keep", "add_arguments(self, parser): parser.add_argument( 'amount', type=int, ) parser.add_argument('duration') parser.add_argument( '--noinput', action='store_false',", "duration if duration_plural not in DURATION_OPTIONS: raise CommandError('Amount must be", "except NameError: pass class Command(BaseCommand): help = 'Purge old requests.'", "= qs.count() if count == 0: print('There are no requests", "'weeks': lambda amount: timezone.now() - timedelta(weeks=amount), 'months': lambda amount: timezone.now()", "'Purge old requests.' def add_arguments(self, parser): parser.add_argument( 'amount', type=int, )", "database reset. This will IRREVERSIBLY DESTROY any requests created before", "IRREVERSIBLY DESTROY any requests created before {0} {1} ago. That", "try: # to keep backward Python 2 compatibility input =", "if duration_plural not in DURATION_OPTIONS: raise CommandError('Amount must be {0}'.format(',", "amount: timezone.now() + relativedelta(months=-amount), 'years': lambda amount: timezone.now() + relativedelta(years=-amount),", "to keep backward Python 2 compatibility input = raw_input except", "import timedelta from dateutil.relativedelta import relativedelta from django.core.management.base import BaseCommand,", "confirm = input(''' You have requested a database reset. This", "lambda amount: timezone.now() + relativedelta(months=-amount), 'years': lambda amount: timezone.now() +", "Python 2 compatibility input = raw_input except NameError: pass class", "duration, count)) else: confirm = 'yes' if confirm == 'yes':", "count)) else: confirm = 'yes' if confirm == 'yes': qs.delete()", "BaseCommand, CommandError from django.utils import timezone from ...models import Request", "ago. That is a total of {2} requests. Are you", "input = raw_input except NameError: pass class Command(BaseCommand): help =", "duration[-1] != 's': # If its not plural, make it", "!= 's': # If its not plural, make it plural", "- timedelta(weeks=amount), 'months': lambda amount: timezone.now() + relativedelta(months=-amount), 'years': lambda", "from datetime import timedelta from dateutil.relativedelta import relativedelta from django.core.management.base", "DURATION_OPTIONS = { 'hours': lambda amount: timezone.now() - timedelta(hours=amount), 'days':", "count == 0: print('There are no requests to delete.') return", "Check we have the correct values if duration[-1] != 's':", "Are you sure you want to do this? Type 'yes'", "amount: timezone.now() - timedelta(days=amount), 'weeks': lambda amount: timezone.now() - timedelta(weeks=amount),", "dest='interactive', default=True, help='Tells Django to NOT prompt the user for", "default=True, help='Tells Django to NOT prompt the user for input", "import BaseCommand, CommandError from django.utils import timezone from ...models import", "= duration if duration_plural not in DURATION_OPTIONS: raise CommandError('Amount must", "from ...models import Request DURATION_OPTIONS = { 'hours': lambda amount:", "return if options.get('interactive'): confirm = input(''' You have requested a", "requests.' def add_arguments(self, parser): parser.add_argument( 'amount', type=int, ) parser.add_argument('duration') parser.add_argument(", "*args, **options): amount = options['amount'] duration = options['duration'] # Check", "**options): amount = options['amount'] duration = options['duration'] # Check we", "'no' to cancel:'''.format(amount, duration, count)) else: confirm = 'yes' if", "import relativedelta from django.core.management.base import BaseCommand, CommandError from django.utils import", "+ relativedelta(months=-amount), 'years': lambda amount: timezone.now() + relativedelta(years=-amount), } try:", "'hours': lambda amount: timezone.now() - timedelta(hours=amount), 'days': lambda amount: timezone.now()", "timezone.now() - timedelta(days=amount), 'weeks': lambda amount: timezone.now() - timedelta(weeks=amount), 'months':", "- timedelta(days=amount), 'weeks': lambda amount: timezone.now() - timedelta(weeks=amount), 'months': lambda", "raise CommandError('Amount must be {0}'.format(', '.join(DURATION_OPTIONS))) qs = Request.objects.filter(time__lte=DURATION_OPTIONS[duration_plural](amount)) count", "def add_arguments(self, parser): parser.add_argument( 'amount', type=int, ) parser.add_argument('duration') parser.add_argument( '--noinput',", "duration_plural = duration if duration_plural not in DURATION_OPTIONS: raise CommandError('Amount", "print('There are no requests to delete.') return if options.get('interactive'): confirm", "of {2} requests. Are you sure you want to do", "handle(self, *args, **options): amount = options['amount'] duration = options['duration'] #", "'.join(DURATION_OPTIONS))) qs = Request.objects.filter(time__lte=DURATION_OPTIONS[duration_plural](amount)) count = qs.count() if count ==", "amount: timezone.now() - timedelta(hours=amount), 'days': lambda amount: timezone.now() - timedelta(days=amount),", "make it plural duration_plural = '{0}s'.format(duration) else: duration_plural = duration", "= input(''' You have requested a database reset. This will", "relativedelta(years=-amount), } try: # to keep backward Python 2 compatibility", "compatibility input = raw_input except NameError: pass class Command(BaseCommand): help", "django.utils import timezone from ...models import Request DURATION_OPTIONS = {", "class Command(BaseCommand): help = 'Purge old requests.' def add_arguments(self, parser):", "= options['amount'] duration = options['duration'] # Check we have the", "CommandError from django.utils import timezone from ...models import Request DURATION_OPTIONS", "reset. This will IRREVERSIBLY DESTROY any requests created before {0}", "Django to NOT prompt the user for input of any", "timezone.now() - timedelta(weeks=amount), 'months': lambda amount: timezone.now() + relativedelta(months=-amount), 'years':", "relativedelta(months=-amount), 'years': lambda amount: timezone.now() + relativedelta(years=-amount), } try: #", "This will IRREVERSIBLY DESTROY any requests created before {0} {1}", "- timedelta(hours=amount), 'days': lambda amount: timezone.now() - timedelta(days=amount), 'weeks': lambda", "2 compatibility input = raw_input except NameError: pass class Command(BaseCommand):", "amount = options['amount'] duration = options['duration'] # Check we have", "if options.get('interactive'): confirm = input(''' You have requested a database", "want to do this? Type 'yes' to continue, or 'no'", "is a total of {2} requests. Are you sure you", "delete.') return if options.get('interactive'): confirm = input(''' You have requested", "import Request DURATION_OPTIONS = { 'hours': lambda amount: timezone.now() -", "timedelta from dateutil.relativedelta import relativedelta from django.core.management.base import BaseCommand, CommandError", "values if duration[-1] != 's': # If its not plural,", "# If its not plural, make it plural duration_plural =", "NameError: pass class Command(BaseCommand): help = 'Purge old requests.' def", "requests. Are you sure you want to do this? Type", "type=int, ) parser.add_argument('duration') parser.add_argument( '--noinput', action='store_false', dest='interactive', default=True, help='Tells Django", "= options['duration'] # Check we have the correct values if", "else: confirm = 'yes' if confirm == 'yes': qs.delete() else:", "Type 'yes' to continue, or 'no' to cancel:'''.format(amount, duration, count))", "help = 'Purge old requests.' def add_arguments(self, parser): parser.add_argument( 'amount',", "= Request.objects.filter(time__lte=DURATION_OPTIONS[duration_plural](amount)) count = qs.count() if count == 0: print('There", "if count == 0: print('There are no requests to delete.')", "'yes' to continue, or 'no' to cancel:'''.format(amount, duration, count)) else:", "# Check we have the correct values if duration[-1] !=", "NOT prompt the user for input of any kind.' )", "'{0}s'.format(duration) else: duration_plural = duration if duration_plural not in DURATION_OPTIONS:", "options['amount'] duration = options['duration'] # Check we have the correct", "before {0} {1} ago. That is a total of {2}", "have requested a database reset. This will IRREVERSIBLY DESTROY any", "count = qs.count() if count == 0: print('There are no", "timedelta(days=amount), 'weeks': lambda amount: timezone.now() - timedelta(weeks=amount), 'months': lambda amount:", "'years': lambda amount: timezone.now() + relativedelta(years=-amount), } try: # to", "plural, make it plural duration_plural = '{0}s'.format(duration) else: duration_plural =", "confirm = 'yes' if confirm == 'yes': qs.delete() else: print('Purge", "Request.objects.filter(time__lte=DURATION_OPTIONS[duration_plural](amount)) count = qs.count() if count == 0: print('There are", "datetime import timedelta from dateutil.relativedelta import relativedelta from django.core.management.base import", "lambda amount: timezone.now() - timedelta(hours=amount), 'days': lambda amount: timezone.now() -", "duration_plural = '{0}s'.format(duration) else: duration_plural = duration if duration_plural not", "you want to do this? Type 'yes' to continue, or", "You have requested a database reset. This will IRREVERSIBLY DESTROY", "duration_plural not in DURATION_OPTIONS: raise CommandError('Amount must be {0}'.format(', '.join(DURATION_OPTIONS)))", "to do this? Type 'yes' to continue, or 'no' to", "action='store_false', dest='interactive', default=True, help='Tells Django to NOT prompt the user", "user for input of any kind.' ) def handle(self, *args,", "qs = Request.objects.filter(time__lte=DURATION_OPTIONS[duration_plural](amount)) count = qs.count() if count == 0:", "created before {0} {1} ago. That is a total of", "0: print('There are no requests to delete.') return if options.get('interactive'):", "a database reset. This will IRREVERSIBLY DESTROY any requests created", "cancel:'''.format(amount, duration, count)) else: confirm = 'yes' if confirm ==", "...models import Request DURATION_OPTIONS = { 'hours': lambda amount: timezone.now()", "{1} ago. That is a total of {2} requests. Are", "help='Tells Django to NOT prompt the user for input of", "in DURATION_OPTIONS: raise CommandError('Amount must be {0}'.format(', '.join(DURATION_OPTIONS))) qs =", "no requests to delete.') return if options.get('interactive'): confirm = input('''", "parser.add_argument( 'amount', type=int, ) parser.add_argument('duration') parser.add_argument( '--noinput', action='store_false', dest='interactive', default=True,", "backward Python 2 compatibility input = raw_input except NameError: pass", "= '{0}s'.format(duration) else: duration_plural = duration if duration_plural not in", "= { 'hours': lambda amount: timezone.now() - timedelta(hours=amount), 'days': lambda", "be {0}'.format(', '.join(DURATION_OPTIONS))) qs = Request.objects.filter(time__lte=DURATION_OPTIONS[duration_plural](amount)) count = qs.count() if", "to continue, or 'no' to cancel:'''.format(amount, duration, count)) else: confirm", "timezone.now() - timedelta(hours=amount), 'days': lambda amount: timezone.now() - timedelta(days=amount), 'weeks':", "this? Type 'yes' to continue, or 'no' to cancel:'''.format(amount, duration,", "else: duration_plural = duration if duration_plural not in DURATION_OPTIONS: raise" ]
[ "f3 c9 09 b1 6b 46' TEST_PFX_PASSWORD = \"<PASSWORD>\" TEST_PFX", "cert_store.add_certificate(certif) # Generate a pfx from the TMP cert-store pfx", "0C A8 C9 F9 E0 96 AF 74 18 56", "= b\"\"\" <KEY> \"\"\" @pytest.fixture() def rawcert(): return b64decode(TEST_CERT) @pytest.fixture()", "randrawpfx = randomkeypair randpfx = windows.crypto.import_pfx(randrawpfx, RANDOM_PFX_PASSWORD) # Receiver 1:", "# Generate a self-signed certificate based on the given key-container", "del cert assert message_to_encrypt not in res # Open pfx", "# Tamper the signed mesasge content signed_blob = signed_blob.replace(b\"message\", b\"massage\")", "windows.crypto.import_pfx(rawpfx, TEST_PFX_PASSWORD) cert = windows.crypto.Certificate.from_buffer(rawcert) signed_blob = windows.crypto.sign(pfx.certs[0], message_to_sign) assert", "assert cert.issuer == b'PythonForWindowsTest' assert cert.thumbprint == 'EF 0C A8", "= windows.crypto.import_pfx(randrawpfx, RANDOM_PFX_PASSWORD) # Receiver 1: PFW-test-keypair pfx = windows.crypto.import_pfx(rawpfx,", "= b\"Testing message \\xff\\x01\" # Load PFX (priv+pub key) &", "craft a certificate with a chain for test purpose cert.store.certs", "RANDOM_PFX_PASSWORD) # Receiver 1: PFW-test-keypair pfx = windows.crypto.import_pfx(rawpfx, TEST_PFX_PASSWORD) cert", "signature_algo=crypt_algo) # Add the newly created certificate to our TMP", "# Decrypt with each PFX and check the result is", "key handle # https://msdn.microsoft.com/en-us/library/windows/desktop/aa379918(v=vs.85).aspx windows.winproxy.CryptDestroyKey(key) # Descrption of the key-container", "(pubkey only) pfx = windows.crypto.import_pfx(rawpfx, TEST_PFX_PASSWORD) cert = windows.crypto.Certificate.from_buffer(rawcert) signed_blob", "windows.crypto.encrypt([cert, cert], message_to_encrypt) del cert assert message_to_encrypt not in res", "= windows.crypto.Certificate.from_buffer(rawcert) assert cert.serial == '1b 8e 94 cb 0b", "decrypt2 def test_randomkeypair(randomkeypair): randcert, randrawpfx = randomkeypair assert randcert.name ==", "signed_blob.replace(b\"message\", b\"massage\") with pytest.raises(windows.winproxy.WinproxyError) as excinfo: decoded_blob = windows.crypto.verify_signature(cert, signed_blob)", "message_to_sign = b\"Testing message \\xff\\x01\" # Load PFX (priv+pub key)", "signed_blob decoded_blob = windows.crypto.verify_signature(cert, signed_blob) assert decoded_blob == message_to_sign def", "1: PFW-test-keypair pfx = windows.crypto.import_pfx(rawpfx, TEST_PFX_PASSWORD) cert = windows.crypto.Certificate.from_buffer(rawcert) assert", "= windows.crypto.verify_signature(cert, signed_blob) assert decoded_blob == message_to_sign def test_sign_verify_fail(rawcert, rawpfx):", "?) # This is fucking dumb, there is no .format", "on bytes object... certif_name = b\"\".join((b\"CN=\", RANDOM_CERTIF_NAME)) # Generate a", "on samples\\crypto\\encryption_demo.py\"\"\" cert_store = windows.crypto.CertificateStore.new_in_memory() # Create a TMP context", "Load PFX (priv+pub key) & certif (pubkey only) pfx =", "cert = windows.crypto.Certificate.from_buffer(rawcert) signed_blob = windows.crypto.sign(pfx.certs[0], message_to_sign) assert message_to_sign in", "None #KeyProvInfo.dwKeySpec = AT_SIGNATURE KeyProvInfo.dwKeySpec = gdef.AT_KEYEXCHANGE crypt_algo = gdef.CRYPT_ALGORITHM_IDENTIFIER()", "windows.crypto.generation.generate_pfx(cert_store, RANDOM_PFX_PASSWORD) yield certif, pfx # Destroy the TMP key", "windows.crypto.Certificate.from_buffer(rawcert) assert cert.serial == '1b 8e 94 cb 0b 3e", "our TMP cert-store cert_store.add_certificate(certif) # Generate a pfx from the", "b\"Testing message \\xff\\x01\" cert = windows.crypto.Certificate.from_buffer(rawcert) # encrypt should accept", "the key-container that will be used to generate the certificate", "res2 = windows.crypto.encrypt([cert, cert], message_to_encrypt) del cert assert message_to_encrypt not", "= gdef.AT_KEYEXCHANGE crypt_algo = gdef.CRYPT_ALGORITHM_IDENTIFIER() crypt_algo.pszObjId = gdef.szOID_RSA_SHA256RSA.encode(\"ascii\") # do", "pytest.mark.usefixtures('check_for_gc_garbage') TEST_CERT = b\"\"\" <KEY>\"\"\" ## Cert info: # Name:", "cert / pfx. Based on samples\\crypto\\encryption_demo.py\"\"\" cert_store = windows.crypto.CertificateStore.new_in_memory() #", "else (bytes in generated ctypes ?) # This is fucking", "randcert.name == RANDOM_CERTIF_NAME randpfx = windows.crypto.import_pfx(randrawpfx, RANDOM_PFX_PASSWORD) # Check password", "assert cert.encoded != randcert.encoded # Encrypt the message with 2", "x = windows.crypto.CryptObject(path) x.crypt_msg.certs x.crypt_msg.signers x.signers_and_certs # TODO: Need some", "94 cb 0b 3e eb b6 41 39 f3 c9", "(priv+pub key) & certif (pubkey only) pfx = windows.crypto.import_pfx(rawpfx, TEST_PFX_PASSWORD)", "import windows.crypto import windows.generated_def as gdef import windows.crypto.generation from .pfwtest", "windows.crypto.decrypt(randpfx, encrypted) assert decrypted == decrypted2 == message_to_encrypt def test_crypt_obj():", "path = r\"C:\\windows\\system32\\kernel32.dll\" x = windows.crypto.CryptObject(path) x.crypt_msg.certs x.crypt_msg.signers x.signers_and_certs #", "\"<PASSWORD>\" @pytest.fixture() def randomkeypair(keysize=1024): r\"\"\"Generate a cert / pfx. Based", "# https://msdn.microsoft.com/en-us/library/windows/desktop/aa379918(v=vs.85).aspx windows.winproxy.CryptDestroyKey(key) # Descrption of the key-container that will", "signed_blob = windows.crypto.sign(pfx.certs[0], message_to_sign) assert message_to_sign in signed_blob # Tamper", "and decrypt pfx = windows.crypto.import_pfx(rawpfx, TEST_PFX_PASSWORD) decrypt = windows.crypto.decrypt(pfx, res)", "rawpfx): message_to_encrypt = b\"Testing message \\xff\\x01\" cert = windows.crypto.Certificate.from_buffer(rawcert) #", "windows.crypto.generation.generate_selfsigned_certificate(certif_name, key_info=KeyProvInfo, signature_algo=crypt_algo) # Add the newly created certificate to", "res = windows.crypto.encrypt(cert, message_to_encrypt) res2 = windows.crypto.encrypt([cert, cert], message_to_encrypt) del", "do something else (bytes in generated ctypes ?) # This", "It does NOT destroy the key-pair from the container, #", "windows.generated_def as gdef import windows.crypto.generation from .pfwtest import * pytestmark", "import windows.crypto.generation from .pfwtest import * pytestmark = pytest.mark.usefixtures('check_for_gc_garbage') TEST_CERT", "6A' assert cert.encoded == rawcert assert cert.version == 2 assert", "Encrypt the message with 2 differents certificates encrypted = windows.crypto.encrypt([cert,", "'1b 8e 94 cb 0b 3e eb b6 41 39", "message_to_encrypt) res2 = windows.crypto.encrypt([cert, cert], message_to_encrypt) del cert assert message_to_encrypt", "f3 c9 09 b1 6b 46' assert cert.name == b'PythonForWindowsTest'", "dumb, there is no .format on bytes object... certif_name =", "is no .format on bytes object... certif_name = b\"\".join((b\"CN=\", RANDOM_CERTIF_NAME))", "# Open pfx and decrypt pfx = windows.crypto.import_pfx(rawpfx, TEST_PFX_PASSWORD) decrypt", "context that will hold our newly generated key-pair with windows.crypto.CryptContext(PFW_TEST_TMP_KEY_CONTAINER,", "created certificate to our TMP cert-store cert_store.add_certificate(certif) # Generate a", "Create a TMP context that will hold our newly generated", "release the key handle # https://msdn.microsoft.com/en-us/library/windows/desktop/aa379918(v=vs.85).aspx windows.winproxy.CryptDestroyKey(key) # Descrption of", "gdef.STATUS_INVALID_SIGNATURE # str(windows.crypto.encrypt(TEST_CERT, \"Hello crypto\")).encode(\"base64\") # Target serial == TEST_CERT.Serial", "assert cert.name != randcert.name assert cert.encoded != randcert.encoded # Encrypt", "cert comparaison assert certs[0] == orig_cert def test_open_pfx_bad_password(rawpfx): with pytest.raises(WindowsError)", "is good too def test_encrypt_decrypt_multiple_receivers(rawcert, rawpfx, randomkeypair): message_to_encrypt = b\"\\xff\\x00", "= windows.crypto.decrypt(randpfx, encrypted) assert decrypted == decrypted2 == message_to_encrypt def", "password is good too def test_encrypt_decrypt_multiple_receivers(rawcert, rawpfx, randomkeypair): message_to_encrypt =", "bytes object... certif_name = b\"\".join((b\"CN=\", RANDOM_CERTIF_NAME)) # Generate a self-signed", "windows.crypto.CryptContext(PFW_TEST_TMP_KEY_CONTAINER, None, gdef.PROV_RSA_FULL, 0, retrycreate=True) as ctx: key = gdef.HCRYPTKEY()", "the result is valid/the same decrypted = windows.crypto.decrypt(pfx, encrypted) decrypted2", "certif_name = b\"\".join((b\"CN=\", RANDOM_CERTIF_NAME)) # Generate a self-signed certificate based", "the key-pair from the container, # It only release the", "b6 41 39 f3 c9 09 b1 6b 46 TEST_CRYPTMSG", "key-pair that is exportable windows.winproxy.CryptGenKey(ctx, gdef.AT_KEYEXCHANGE, gdef.CRYPT_EXPORTABLE | keysize_flags, key)", "pfx = windows.crypto.import_pfx(rawpfx, TEST_PFX_PASSWORD) decrypt = windows.crypto.decrypt(pfx, res) decrypt2 =", "def rawpfx(): return b64decode(TEST_PFX) PFW_TEST_TMP_KEY_CONTAINER = \"PythonForWindowsTMPContainerTest\" RANDOM_CERTIF_NAME = b\"PythonForWindowsGeneratedRandomCertifTest\"", "= windows.crypto.decrypt(pfx, res2) assert message_to_encrypt == decrypt assert decrypt ==", "= windows.crypto.sign(pfx.certs[0], message_to_sign) assert message_to_sign in signed_blob # Tamper the", "None, gdef.PROV_RSA_FULL, gdef.CRYPT_DELETEKEYSET) def test_certificate(rawcert): cert = windows.crypto.Certificate.from_buffer(rawcert) assert cert.serial", "decrypt = windows.crypto.decrypt(pfx, res) decrypt2 = windows.crypto.decrypt(pfx, res2) assert message_to_encrypt", "of the key-container that will be used to generate the", "TEST_CERT = b\"\"\" <KEY>\"\"\" ## Cert info: # Name: PythonForWindowsTest", "Destroy the TMP key container prov = gdef.HCRYPTPROV() windows.winproxy.CryptAcquireContextW(prov, PFW_TEST_TMP_KEY_CONTAINER,", "assert message_to_encrypt not in res # Open pfx and decrypt", "res # Open pfx and decrypt pfx = windows.crypto.import_pfx(rawpfx, TEST_PFX_PASSWORD)", "will be used to generate the certificate KeyProvInfo = gdef.CRYPT_KEY_PROV_INFO()", "def test_pfx(rawcert, rawpfx): pfx = windows.crypto.import_pfx(rawpfx, TEST_PFX_PASSWORD) orig_cert = windows.crypto.Certificate.from_buffer(rawcert)", "windows.winproxy.CryptGenKey(ctx, gdef.AT_KEYEXCHANGE, gdef.CRYPT_EXPORTABLE | keysize_flags, key) # It does NOT", "as ctx: key = gdef.HCRYPTKEY() keysize_flags = keysize << 16", "39 f3 c9 09 b1 6b 46' TEST_PFX_PASSWORD = \"<PASSWORD>\"", "windows.crypto.import_pfx(randrawpfx, RANDOM_PFX_PASSWORD) # Check password is good too def test_encrypt_decrypt_multiple_receivers(rawcert,", "0, retrycreate=True) as ctx: key = gdef.HCRYPTKEY() keysize_flags = keysize", "KeyProvInfo.dwFlags = 0 KeyProvInfo.cProvParam = 0 KeyProvInfo.rgProvParam = None #KeyProvInfo.dwKeySpec", "RANDOM_PFX_PASSWORD = \"<PASSWORD>\" @pytest.fixture() def randomkeypair(keysize=1024): r\"\"\"Generate a cert /", "\"<PASSWORD>\" TEST_PFX = b\"\"\" <KEY> \"\"\" @pytest.fixture() def rawcert(): return", "TEST_PFX_PASSWORD) decrypt = windows.crypto.decrypt(pfx, res) decrypt2 = windows.crypto.decrypt(pfx, res2) assert", "decoded_blob == message_to_sign def test_sign_verify_fail(rawcert, rawpfx): message_to_sign = b\"Testing message", "<KEY>\"\"\" ## Cert info: # Name: PythonForWindowsTest # Serial: '1b", "or iterable of cert res = windows.crypto.encrypt(cert, message_to_encrypt) res2 =", "b64decode(TEST_CERT) @pytest.fixture() def rawpfx(): return b64decode(TEST_PFX) PFW_TEST_TMP_KEY_CONTAINER = \"PythonForWindowsTMPContainerTest\" RANDOM_CERTIF_NAME", "r\"\"\"Generate a cert / pfx. Based on samples\\crypto\\encryption_demo.py\"\"\" cert_store =", "https://msdn.microsoft.com/en-us/library/windows/desktop/aa379918(v=vs.85).aspx windows.winproxy.CryptDestroyKey(key) # Descrption of the key-container that will be", "rawpfx, randomkeypair): message_to_encrypt = b\"\\xff\\x00 Testing message \\xff\\x01\" # Receiver", "gdef.CRYPT_KEY_PROV_INFO() KeyProvInfo.pwszContainerName = PFW_TEST_TMP_KEY_CONTAINER KeyProvInfo.pwszProvName = None KeyProvInfo.dwProvType = gdef.PROV_RSA_FULL", "KeyProvInfo.cProvParam = 0 KeyProvInfo.rgProvParam = None #KeyProvInfo.dwKeySpec = AT_SIGNATURE KeyProvInfo.dwKeySpec", "RANDOM_PFX_PASSWORD) # Check password is good too def test_encrypt_decrypt_multiple_receivers(rawcert, rawpfx,", "iterable of cert res = windows.crypto.encrypt(cert, message_to_encrypt) res2 = windows.crypto.encrypt([cert,", "= \"<PASSWORD>\" @pytest.fixture() def randomkeypair(keysize=1024): r\"\"\"Generate a cert / pfx.", "TODO: craft a certificate with a chain for test purpose", "Decrypt with each PFX and check the result is valid/the", "purpose cert.store.certs cert.properties def test_pfx(rawcert, rawpfx): pfx = windows.crypto.import_pfx(rawpfx, TEST_PFX_PASSWORD)", "= windows.crypto.import_pfx(rawpfx, TEST_PFX_PASSWORD) cert = windows.crypto.Certificate.from_buffer(rawcert) signed_blob = windows.crypto.sign(pfx.certs[0], message_to_sign)", "pfx. Based on samples\\crypto\\encryption_demo.py\"\"\" cert_store = windows.crypto.CertificateStore.new_in_memory() # Create a", "randpfx = windows.crypto.import_pfx(randrawpfx, RANDOM_PFX_PASSWORD) # Receiver 1: PFW-test-keypair pfx =", "= windows.crypto.encrypt(cert, message_to_encrypt) res2 = windows.crypto.encrypt([cert, cert], message_to_encrypt) del cert", "= randomkeypair assert randcert.name == RANDOM_CERTIF_NAME randpfx = windows.crypto.import_pfx(randrawpfx, RANDOM_PFX_PASSWORD)", "cert.version == 2 assert cert == cert assert cert is", "windows.crypto.CryptObject(path) x.crypt_msg.certs x.crypt_msg.signers x.signers_and_certs # TODO: Need some better ideas", "str(windows.crypto.encrypt(TEST_CERT, \"Hello crypto\")).encode(\"base64\") # Target serial == TEST_CERT.Serial == 1b", "= gdef.CRYPT_KEY_PROV_INFO() KeyProvInfo.pwszContainerName = PFW_TEST_TMP_KEY_CONTAINER KeyProvInfo.pwszProvName = None KeyProvInfo.dwProvType =", "0 KeyProvInfo.cProvParam = 0 KeyProvInfo.rgProvParam = None #KeyProvInfo.dwKeySpec = AT_SIGNATURE", "message_to_encrypt not in res # Open pfx and decrypt pfx", "signature algorithme certif = windows.crypto.generation.generate_selfsigned_certificate(certif_name, key_info=KeyProvInfo, signature_algo=crypt_algo) # Add the", "pfx and decrypt pfx = windows.crypto.import_pfx(rawpfx, TEST_PFX_PASSWORD) decrypt = windows.crypto.decrypt(pfx,", "the TMP key container prov = gdef.HCRYPTPROV() windows.winproxy.CryptAcquireContextW(prov, PFW_TEST_TMP_KEY_CONTAINER, None,", "res2) assert message_to_encrypt == decrypt assert decrypt == decrypt2 def", "& certif (pubkey only) pfx = windows.crypto.import_pfx(rawpfx, TEST_PFX_PASSWORD) cert =", "cert == cert assert cert is cert.duplicate() cert.chains # TODO:", "windows.crypto.import_pfx(rawpfx, TEST_PFX_PASSWORD) cert = windows.crypto.Certificate.from_buffer(rawcert) assert cert.name != randcert.name assert", "with 2 differents certificates encrypted = windows.crypto.encrypt([cert, randcert], message_to_encrypt) #", "# Check password is good too def test_encrypt_decrypt_multiple_receivers(rawcert, rawpfx, randomkeypair):", "randcert, randrawpfx = randomkeypair randpfx = windows.crypto.import_pfx(randrawpfx, RANDOM_PFX_PASSWORD) # Receiver", "a pfx from the TMP cert-store pfx = windows.crypto.generation.generate_pfx(cert_store, RANDOM_PFX_PASSWORD)", "from .pfwtest import * pytestmark = pytest.mark.usefixtures('check_for_gc_garbage') TEST_CERT = b\"\"\"", "certificate based on the given key-container and signature algorithme certif", "excinfo: decoded_blob = windows.crypto.verify_signature(cert, signed_blob) assert excinfo.value.winerror == gdef.STATUS_INVALID_SIGNATURE #", "# str(windows.crypto.encrypt(TEST_CERT, \"Hello crypto\")).encode(\"base64\") # Target serial == TEST_CERT.Serial ==", "c9 09 b1 6b 46 TEST_CRYPTMSG = b\"\"\"<KEY>\"\" def test_cryptmsg_from_data():", "encrypted) assert decrypted == decrypted2 == message_to_encrypt def test_crypt_obj(): path", "def test_encrypt_decrypt(rawcert, rawpfx): message_to_encrypt = b\"Testing message \\xff\\x01\" cert =", "certificate KeyProvInfo = gdef.CRYPT_KEY_PROV_INFO() KeyProvInfo.pwszContainerName = PFW_TEST_TMP_KEY_CONTAINER KeyProvInfo.pwszProvName = None", "TEST_CERT.Serial == 1b 8e 94 cb 0b 3e eb b6", "mesasge content signed_blob = signed_blob.replace(b\"message\", b\"massage\") with pytest.raises(windows.winproxy.WinproxyError) as excinfo:", "rawpfx(): return b64decode(TEST_PFX) PFW_TEST_TMP_KEY_CONTAINER = \"PythonForWindowsTMPContainerTest\" RANDOM_CERTIF_NAME = b\"PythonForWindowsGeneratedRandomCertifTest\" RANDOM_PFX_PASSWORD", "r\"C:\\windows\\system32\\kernel32.dll\" x = windows.crypto.CryptObject(path) x.crypt_msg.certs x.crypt_msg.signers x.signers_and_certs # TODO: Need", "message \\xff\\x01\" cert = windows.crypto.Certificate.from_buffer(rawcert) # encrypt should accept a", "= b\"\\xff\\x00 Testing message \\xff\\x01\" # Receiver 1: random key", "= windows.crypto.sign(pfx.certs[0], message_to_sign) assert message_to_sign in signed_blob decoded_blob = windows.crypto.verify_signature(cert,", "a certificate with a chain for test purpose cert.store.certs cert.properties", "\\xff\\x01\" # Receiver 1: random key pair randcert, randrawpfx =", "message_to_sign def test_sign_verify_fail(rawcert, rawpfx): message_to_sign = b\"Testing message \\xff\\x01\" #", "be used to generate the certificate KeyProvInfo = gdef.CRYPT_KEY_PROV_INFO() KeyProvInfo.pwszContainerName", "signed_blob # Tamper the signed mesasge content signed_blob = signed_blob.replace(b\"message\",", "== TEST_CERT.Serial == 1b 8e 94 cb 0b 3e eb", "None KeyProvInfo.dwProvType = gdef.PROV_RSA_FULL KeyProvInfo.dwFlags = 0 KeyProvInfo.cProvParam = 0", "that will hold our newly generated key-pair with windows.crypto.CryptContext(PFW_TEST_TMP_KEY_CONTAINER, None,", "cert.chains # TODO: craft a certificate with a chain for", "decrypt2 = windows.crypto.decrypt(pfx, res2) assert message_to_encrypt == decrypt assert decrypt", "windows.winproxy.CryptAcquireContextW(prov, PFW_TEST_TMP_KEY_CONTAINER, None, gdef.PROV_RSA_FULL, gdef.CRYPT_DELETEKEYSET) def test_certificate(rawcert): cert = windows.crypto.Certificate.from_buffer(rawcert)", "Cert info: # Name: PythonForWindowsTest # Serial: '1b 8e 94", "windows.crypto.CertificateStore.from_system_store(\"Root\") def test_sign_verify(rawcert, rawpfx): message_to_sign = b\"Testing message \\xff\\x01\" #", "message_to_encrypt = b\"\\xff\\x00 Testing message \\xff\\x01\" # Receiver 1: random", "TMP cert-store cert_store.add_certificate(certif) # Generate a pfx from the TMP", "# It does NOT destroy the key-pair from the container,", "encrypt should accept a cert or iterable of cert res", "PFW_TEST_TMP_KEY_CONTAINER = \"PythonForWindowsTMPContainerTest\" RANDOM_CERTIF_NAME = b\"PythonForWindowsGeneratedRandomCertifTest\" RANDOM_PFX_PASSWORD = \"<PASSWORD>\" @pytest.fixture()", "ctx: key = gdef.HCRYPTKEY() keysize_flags = keysize << 16 #", "fucking dumb, there is no .format on bytes object... certif_name", "# Create a TMP context that will hold our newly", "signed_blob) assert decoded_blob == message_to_sign def test_sign_verify_fail(rawcert, rawpfx): message_to_sign =", "randomkeypair): message_to_encrypt = b\"\\xff\\x00 Testing message \\xff\\x01\" # Receiver 1:", "= gdef.HCRYPTKEY() keysize_flags = keysize << 16 # Generate a", "gdef.AT_KEYEXCHANGE crypt_algo = gdef.CRYPT_ALGORITHM_IDENTIFIER() crypt_algo.pszObjId = gdef.szOID_RSA_SHA256RSA.encode(\"ascii\") # do something", "= windows.crypto.CertificateStore.new_in_memory() # Create a TMP context that will hold", "the newly created certificate to our TMP cert-store cert_store.add_certificate(certif) #", "= 0 KeyProvInfo.rgProvParam = None #KeyProvInfo.dwKeySpec = AT_SIGNATURE KeyProvInfo.dwKeySpec =", "a TMP context that will hold our newly generated key-pair", "as gdef import windows.crypto.generation from .pfwtest import * pytestmark =", "decrypted == decrypted2 == message_to_encrypt def test_crypt_obj(): path = r\"C:\\windows\\system32\\kernel32.dll\"", "message_to_encrypt) del cert assert message_to_encrypt not in res # Open", "import windows.generated_def as gdef import windows.crypto.generation from .pfwtest import *", "= b\"\"\" <KEY>\"\"\" ## Cert info: # Name: PythonForWindowsTest #", "gdef.PROV_RSA_FULL, gdef.CRYPT_DELETEKEYSET) def test_certificate(rawcert): cert = windows.crypto.Certificate.from_buffer(rawcert) assert cert.serial ==", "cert.store.certs cert.properties def test_pfx(rawcert, rawpfx): pfx = windows.crypto.import_pfx(rawpfx, TEST_PFX_PASSWORD) orig_cert", "check the result is valid/the same decrypted = windows.crypto.decrypt(pfx, encrypted)", "windows.crypto.verify_signature(cert, signed_blob) assert decoded_blob == message_to_sign def test_sign_verify_fail(rawcert, rawpfx): message_to_sign", "# Receiver 1: random key pair randcert, randrawpfx = randomkeypair", "== gdef.STATUS_INVALID_SIGNATURE # str(windows.crypto.encrypt(TEST_CERT, \"Hello crypto\")).encode(\"base64\") # Target serial ==", "serial == TEST_CERT.Serial == 1b 8e 94 cb 0b 3e", "3e eb b6 41 39 f3 c9 09 b1 6b", "== 1 # Test cert comparaison assert certs[0] == orig_cert", "= \"PythonForWindowsTMPContainerTest\" RANDOM_CERTIF_NAME = b\"PythonForWindowsGeneratedRandomCertifTest\" RANDOM_PFX_PASSWORD = \"<PASSWORD>\" @pytest.fixture() def", "cert_store = windows.crypto.CertificateStore.new_in_memory() # Create a TMP context that will", "= windows.crypto.verify_signature(cert, signed_blob) assert excinfo.value.winerror == gdef.STATUS_INVALID_SIGNATURE # str(windows.crypto.encrypt(TEST_CERT, \"Hello", "# Serial: '1b 8e 94 cb 0b 3e eb b6", "gdef.HCRYPTPROV() windows.winproxy.CryptAcquireContextW(prov, PFW_TEST_TMP_KEY_CONTAINER, None, gdef.PROV_RSA_FULL, gdef.CRYPT_DELETEKEYSET) def test_certificate(rawcert): cert =", "a key-pair that is exportable windows.winproxy.CryptGenKey(ctx, gdef.AT_KEYEXCHANGE, gdef.CRYPT_EXPORTABLE | keysize_flags,", "generate the certificate KeyProvInfo = gdef.CRYPT_KEY_PROV_INFO() KeyProvInfo.pwszContainerName = PFW_TEST_TMP_KEY_CONTAINER KeyProvInfo.pwszProvName", "gdef.szOID_RSA_SHA256RSA.encode(\"ascii\") # do something else (bytes in generated ctypes ?)", "retrycreate=True) as ctx: key = gdef.HCRYPTKEY() keysize_flags = keysize <<", "8e 94 cb 0b 3e eb b6 41 39 f3", "TMP cert-store pfx = windows.crypto.generation.generate_pfx(cert_store, RANDOM_PFX_PASSWORD) yield certif, pfx #", "= windows.crypto.import_pfx(rawpfx, TEST_PFX_PASSWORD) decrypt = windows.crypto.decrypt(pfx, res) decrypt2 = windows.crypto.decrypt(pfx,", "def test_sign_verify(rawcert, rawpfx): message_to_sign = b\"Testing message \\xff\\x01\" # Load", "= b\"\".join((b\"CN=\", RANDOM_CERTIF_NAME)) # Generate a self-signed certificate based on", "= windows.crypto.CryptObject(path) x.crypt_msg.certs x.crypt_msg.signers x.signers_and_certs # TODO: Need some better", "in res # Open pfx and decrypt pfx = windows.crypto.import_pfx(rawpfx,", "pytest import windows.crypto import windows.generated_def as gdef import windows.crypto.generation from", "def test_cryptmsg_from_data(): rawdata = b64decode(TEST_CRYPTMSG) cryptmsg = windows.crypto.CryptMessage.from_buffer(rawdata) rawtarget =", "import pytest import windows.crypto import windows.generated_def as gdef import windows.crypto.generation", "(bytes in generated ctypes ?) # This is fucking dumb,", "!= randcert.name assert cert.encoded != randcert.encoded # Encrypt the message", "b6 41 39 f3 c9 09 b1 6b 46' assert", "def test_certificate_from_store(): return windows.crypto.CertificateStore.from_system_store(\"Root\") def test_sign_verify(rawcert, rawpfx): message_to_sign = b\"Testing", "= gdef.CRYPT_ALGORITHM_IDENTIFIER() crypt_algo.pszObjId = gdef.szOID_RSA_SHA256RSA.encode(\"ascii\") # do something else (bytes", "certificates encrypted = windows.crypto.encrypt([cert, randcert], message_to_encrypt) # Decrypt with each", "as ar: pfx = windows.crypto.import_pfx(rawpfx, \"BadPassword\") def test_encrypt_decrypt(rawcert, rawpfx): message_to_encrypt", "@pytest.fixture() def randomkeypair(keysize=1024): r\"\"\"Generate a cert / pfx. Based on", "windows.crypto.Certificate.from_buffer(rawcert) assert cert.name != randcert.name assert cert.encoded != randcert.encoded #", "| keysize_flags, key) # It does NOT destroy the key-pair", "= pfx.certs assert len(certs) == 1 # Test cert comparaison", "decoded_blob = windows.crypto.verify_signature(cert, signed_blob) assert excinfo.value.winerror == gdef.STATUS_INVALID_SIGNATURE # str(windows.crypto.encrypt(TEST_CERT,", "TMP key container prov = gdef.HCRYPTPROV() windows.winproxy.CryptAcquireContextW(prov, PFW_TEST_TMP_KEY_CONTAINER, None, gdef.PROV_RSA_FULL,", "PFX and check the result is valid/the same decrypted =", "generated key-pair with windows.crypto.CryptContext(PFW_TEST_TMP_KEY_CONTAINER, None, gdef.PROV_RSA_FULL, 0, retrycreate=True) as ctx:", "# It only release the key handle # https://msdn.microsoft.com/en-us/library/windows/desktop/aa379918(v=vs.85).aspx windows.winproxy.CryptDestroyKey(key)", "better ideas def test_certificate_from_store(): return windows.crypto.CertificateStore.from_system_store(\"Root\") def test_sign_verify(rawcert, rawpfx): message_to_sign", "no .format on bytes object... certif_name = b\"\".join((b\"CN=\", RANDOM_CERTIF_NAME)) #", "= signed_blob.replace(b\"message\", b\"massage\") with pytest.raises(windows.winproxy.WinproxyError) as excinfo: decoded_blob = windows.crypto.verify_signature(cert,", "= windows.crypto.import_pfx(randrawpfx, RANDOM_PFX_PASSWORD) # Check password is good too def", "ideas def test_certificate_from_store(): return windows.crypto.CertificateStore.from_system_store(\"Root\") def test_sign_verify(rawcert, rawpfx): message_to_sign =", "content signed_blob = signed_blob.replace(b\"message\", b\"massage\") with pytest.raises(windows.winproxy.WinproxyError) as excinfo: decoded_blob", "algorithme certif = windows.crypto.generation.generate_selfsigned_certificate(certif_name, key_info=KeyProvInfo, signature_algo=crypt_algo) # Add the newly", "= windows.crypto.import_pfx(rawpfx, TEST_PFX_PASSWORD) cert = windows.crypto.Certificate.from_buffer(rawcert) assert cert.name != randcert.name", "== b'PythonForWindowsTest' assert cert.issuer == b'PythonForWindowsTest' assert cert.thumbprint == 'EF", "signed_blob = windows.crypto.sign(pfx.certs[0], message_to_sign) assert message_to_sign in signed_blob decoded_blob =", "message_to_sign) assert message_to_sign in signed_blob decoded_blob = windows.crypto.verify_signature(cert, signed_blob) assert", "def test_encrypt_decrypt_multiple_receivers(rawcert, rawpfx, randomkeypair): message_to_encrypt = b\"\\xff\\x00 Testing message \\xff\\x01\"", "== 1b 8e 94 cb 0b 3e eb b6 41", "* pytestmark = pytest.mark.usefixtures('check_for_gc_garbage') TEST_CERT = b\"\"\" <KEY>\"\"\" ## Cert", "pfx = windows.crypto.import_pfx(rawpfx, TEST_PFX_PASSWORD) orig_cert = windows.crypto.Certificate.from_buffer(rawcert) certs = pfx.certs", "to our TMP cert-store cert_store.add_certificate(certif) # Generate a pfx from", "= None #KeyProvInfo.dwKeySpec = AT_SIGNATURE KeyProvInfo.dwKeySpec = gdef.AT_KEYEXCHANGE crypt_algo =", "= b\"Testing message \\xff\\x01\" cert = windows.crypto.Certificate.from_buffer(rawcert) # encrypt should", "# TODO: craft a certificate with a chain for test", "= \"<PASSWORD>\" TEST_PFX = b\"\"\" <KEY> \"\"\" @pytest.fixture() def rawcert():", "2 assert cert == cert assert cert is cert.duplicate() cert.chains", "0 KeyProvInfo.rgProvParam = None #KeyProvInfo.dwKeySpec = AT_SIGNATURE KeyProvInfo.dwKeySpec = gdef.AT_KEYEXCHANGE", "cert.thumbprint == 'EF 0C A8 C9 F9 E0 96 AF", "pytestmark = pytest.mark.usefixtures('check_for_gc_garbage') TEST_CERT = b\"\"\" <KEY>\"\"\" ## Cert info:", "18 56 8B C1 C9 57 27 A0 89 29", "randomkeypair randpfx = windows.crypto.import_pfx(randrawpfx, RANDOM_PFX_PASSWORD) # Receiver 1: PFW-test-keypair pfx", "== message_to_sign def test_sign_verify_fail(rawcert, rawpfx): message_to_sign = b\"Testing message \\xff\\x01\"", "# Descrption of the key-container that will be used to", "b1 6b 46' assert cert.name == b'PythonForWindowsTest' assert cert.issuer ==", "cert is cert.duplicate() cert.chains # TODO: craft a certificate with", "41 39 f3 c9 09 b1 6b 46' TEST_PFX_PASSWORD =", "= r\"C:\\windows\\system32\\kernel32.dll\" x = windows.crypto.CryptObject(path) x.crypt_msg.certs x.crypt_msg.signers x.signers_and_certs # TODO:", "96 AF 74 18 56 8B C1 C9 57 27", "good too def test_encrypt_decrypt_multiple_receivers(rawcert, rawpfx, randomkeypair): message_to_encrypt = b\"\\xff\\x00 Testing", "rawcert assert cert.version == 2 assert cert == cert assert", "# Generate a pfx from the TMP cert-store pfx =", "return windows.crypto.CertificateStore.from_system_store(\"Root\") def test_sign_verify(rawcert, rawpfx): message_to_sign = b\"Testing message \\xff\\x01\"", "09 b1 6b 46' assert cert.name == b'PythonForWindowsTest' assert cert.issuer", "b\"\"\"<KEY>\"\" def test_cryptmsg_from_data(): rawdata = b64decode(TEST_CRYPTMSG) cryptmsg = windows.crypto.CryptMessage.from_buffer(rawdata) rawtarget", "A8 C9 F9 E0 96 AF 74 18 56 8B", "keysize_flags = keysize << 16 # Generate a key-pair that", "assert cert.name == b'PythonForWindowsTest' assert cert.issuer == b'PythonForWindowsTest' assert cert.thumbprint", "assert excinfo.value.winerror == gdef.STATUS_INVALID_SIGNATURE # str(windows.crypto.encrypt(TEST_CERT, \"Hello crypto\")).encode(\"base64\") # Target", "39 f3 c9 09 b1 6b 46 TEST_CRYPTMSG = b\"\"\"<KEY>\"\"", "1b 8e 94 cb 0b 3e eb b6 41 39", "crypt_algo.pszObjId = gdef.szOID_RSA_SHA256RSA.encode(\"ascii\") # do something else (bytes in generated", "handle # https://msdn.microsoft.com/en-us/library/windows/desktop/aa379918(v=vs.85).aspx windows.winproxy.CryptDestroyKey(key) # Descrption of the key-container that", "result is valid/the same decrypted = windows.crypto.decrypt(pfx, encrypted) decrypted2 =", "cert or iterable of cert res = windows.crypto.encrypt(cert, message_to_encrypt) res2", "test_randomkeypair(randomkeypair): randcert, randrawpfx = randomkeypair assert randcert.name == RANDOM_CERTIF_NAME randpfx", "= windows.crypto.generation.generate_pfx(cert_store, RANDOM_PFX_PASSWORD) yield certif, pfx # Destroy the TMP", "\"Hello crypto\")).encode(\"base64\") # Target serial == TEST_CERT.Serial == 1b 8e", "test_crypt_obj(): path = r\"C:\\windows\\system32\\kernel32.dll\" x = windows.crypto.CryptObject(path) x.crypt_msg.certs x.crypt_msg.signers x.signers_and_certs", "there is no .format on bytes object... certif_name = b\"\".join((b\"CN=\",", "randrawpfx = randomkeypair assert randcert.name == RANDOM_CERTIF_NAME randpfx = windows.crypto.import_pfx(randrawpfx,", "= windows.crypto.import_pfx(rawpfx, TEST_PFX_PASSWORD) orig_cert = windows.crypto.Certificate.from_buffer(rawcert) certs = pfx.certs assert", "== '1b 8e 94 cb 0b 3e eb b6 41", "PFX (priv+pub key) & certif (pubkey only) pfx = windows.crypto.import_pfx(rawpfx,", "with pytest.raises(windows.winproxy.WinproxyError) as excinfo: decoded_blob = windows.crypto.verify_signature(cert, signed_blob) assert excinfo.value.winerror", "rawdata = b64decode(TEST_CRYPTMSG) cryptmsg = windows.crypto.CryptMessage.from_buffer(rawdata) rawtarget = b\"\\x1b\\x8e\\x94\\xcb\\x0b>\\xeb\\xb6A9\\xf3\\xc9\\t\\xb1kF\" assert", "ar: pfx = windows.crypto.import_pfx(rawpfx, \"BadPassword\") def test_encrypt_decrypt(rawcert, rawpfx): message_to_encrypt =", "def randomkeypair(keysize=1024): r\"\"\"Generate a cert / pfx. Based on samples\\crypto\\encryption_demo.py\"\"\"", "TEST_PFX_PASSWORD = \"<PASSWORD>\" TEST_PFX = b\"\"\" <KEY> \"\"\" @pytest.fixture() def", ".format on bytes object... certif_name = b\"\".join((b\"CN=\", RANDOM_CERTIF_NAME)) # Generate", "ctypes ?) # This is fucking dumb, there is no", "object... certif_name = b\"\".join((b\"CN=\", RANDOM_CERTIF_NAME)) # Generate a self-signed certificate", "cert.encoded == rawcert assert cert.version == 2 assert cert ==", "= keysize << 16 # Generate a key-pair that is", "rawpfx): pfx = windows.crypto.import_pfx(rawpfx, TEST_PFX_PASSWORD) orig_cert = windows.crypto.Certificate.from_buffer(rawcert) certs =", "randomkeypair assert randcert.name == RANDOM_CERTIF_NAME randpfx = windows.crypto.import_pfx(randrawpfx, RANDOM_PFX_PASSWORD) #", "f3 c9 09 b1 6b 46 TEST_CRYPTMSG = b\"\"\"<KEY>\"\" def", "is fucking dumb, there is no .format on bytes object...", "for test purpose cert.store.certs cert.properties def test_pfx(rawcert, rawpfx): pfx =", "= windows.crypto.decrypt(pfx, encrypted) decrypted2 = windows.crypto.decrypt(randpfx, encrypted) assert decrypted ==", "in signed_blob # Tamper the signed mesasge content signed_blob =", "#KeyProvInfo.dwKeySpec = AT_SIGNATURE KeyProvInfo.dwKeySpec = gdef.AT_KEYEXCHANGE crypt_algo = gdef.CRYPT_ALGORITHM_IDENTIFIER() crypt_algo.pszObjId", "cert.name != randcert.name assert cert.encoded != randcert.encoded # Encrypt the", "assert cert.serial == '1b 8e 94 cb 0b 3e eb", "windows.crypto.sign(pfx.certs[0], message_to_sign) assert message_to_sign in signed_blob decoded_blob = windows.crypto.verify_signature(cert, signed_blob)", "gdef.HCRYPTKEY() keysize_flags = keysize << 16 # Generate a key-pair", "= PFW_TEST_TMP_KEY_CONTAINER KeyProvInfo.pwszProvName = None KeyProvInfo.dwProvType = gdef.PROV_RSA_FULL KeyProvInfo.dwFlags =", "74 18 56 8B C1 C9 57 27 A0 89", "randcert, randrawpfx = randomkeypair assert randcert.name == RANDOM_CERTIF_NAME randpfx =", "AF 74 18 56 8B C1 C9 57 27 A0", "x.crypt_msg.certs x.crypt_msg.signers x.signers_and_certs # TODO: Need some better ideas def", "A0 89 29 6A' assert cert.encoded == rawcert assert cert.version", "key-container and signature algorithme certif = windows.crypto.generation.generate_selfsigned_certificate(certif_name, key_info=KeyProvInfo, signature_algo=crypt_algo) #", "from the container, # It only release the key handle", "8B C1 C9 57 27 A0 89 29 6A' assert", "16 # Generate a key-pair that is exportable windows.winproxy.CryptGenKey(ctx, gdef.AT_KEYEXCHANGE,", "\"BadPassword\") def test_encrypt_decrypt(rawcert, rawpfx): message_to_encrypt = b\"Testing message \\xff\\x01\" cert", "only) pfx = windows.crypto.import_pfx(rawpfx, TEST_PFX_PASSWORD) cert = windows.crypto.Certificate.from_buffer(rawcert) signed_blob =", "windows.crypto import windows.generated_def as gdef import windows.crypto.generation from .pfwtest import", "# TODO: Need some better ideas def test_certificate_from_store(): return windows.crypto.CertificateStore.from_system_store(\"Root\")", "test_pfx(rawcert, rawpfx): pfx = windows.crypto.import_pfx(rawpfx, TEST_PFX_PASSWORD) orig_cert = windows.crypto.Certificate.from_buffer(rawcert) certs", "assert len(certs) == 1 # Test cert comparaison assert certs[0]", "cert.duplicate() cert.chains # TODO: craft a certificate with a chain", "self-signed certificate based on the given key-container and signature algorithme", "signed_blob) assert excinfo.value.winerror == gdef.STATUS_INVALID_SIGNATURE # str(windows.crypto.encrypt(TEST_CERT, \"Hello crypto\")).encode(\"base64\") #", "# Load PFX (priv+pub key) & certif (pubkey only) pfx", "differents certificates encrypted = windows.crypto.encrypt([cert, randcert], message_to_encrypt) # Decrypt with", "\"\"\" @pytest.fixture() def rawcert(): return b64decode(TEST_CERT) @pytest.fixture() def rawpfx(): return", "== decrypted2 == message_to_encrypt def test_crypt_obj(): path = r\"C:\\windows\\system32\\kernel32.dll\" x", "<KEY> \"\"\" @pytest.fixture() def rawcert(): return b64decode(TEST_CERT) @pytest.fixture() def rawpfx():", "message_to_sign) assert message_to_sign in signed_blob # Tamper the signed mesasge", "cert-store pfx = windows.crypto.generation.generate_pfx(cert_store, RANDOM_PFX_PASSWORD) yield certif, pfx # Destroy", "TEST_PFX = b\"\"\" <KEY> \"\"\" @pytest.fixture() def rawcert(): return b64decode(TEST_CERT)", "Add the newly created certificate to our TMP cert-store cert_store.add_certificate(certif)", "the given key-container and signature algorithme certif = windows.crypto.generation.generate_selfsigned_certificate(certif_name, key_info=KeyProvInfo,", "each PFX and check the result is valid/the same decrypted", "assert certs[0] == orig_cert def test_open_pfx_bad_password(rawpfx): with pytest.raises(WindowsError) as ar:", "# Add the newly created certificate to our TMP cert-store", "TEST_PFX_PASSWORD) cert = windows.crypto.Certificate.from_buffer(rawcert) signed_blob = windows.crypto.sign(pfx.certs[0], message_to_sign) assert message_to_sign", "b64decode(TEST_CRYPTMSG) cryptmsg = windows.crypto.CryptMessage.from_buffer(rawdata) rawtarget = b\"\\x1b\\x8e\\x94\\xcb\\x0b>\\xeb\\xb6A9\\xf3\\xc9\\t\\xb1kF\" assert cryptmsg.get_recipient_data(0).SerialNumber.data[::-1] ==", "key_info=KeyProvInfo, signature_algo=crypt_algo) # Add the newly created certificate to our", "= b64decode(TEST_CRYPTMSG) cryptmsg = windows.crypto.CryptMessage.from_buffer(rawdata) rawtarget = b\"\\x1b\\x8e\\x94\\xcb\\x0b>\\xeb\\xb6A9\\xf3\\xc9\\t\\xb1kF\" assert cryptmsg.get_recipient_data(0).SerialNumber.data[::-1]", "eb b6 41 39 f3 c9 09 b1 6b 46", "= windows.crypto.encrypt([cert, cert], message_to_encrypt) del cert assert message_to_encrypt not in", "decrypt assert decrypt == decrypt2 def test_randomkeypair(randomkeypair): randcert, randrawpfx =", "the key handle # https://msdn.microsoft.com/en-us/library/windows/desktop/aa379918(v=vs.85).aspx windows.winproxy.CryptDestroyKey(key) # Descrption of the", "certs[0] == orig_cert def test_open_pfx_bad_password(rawpfx): with pytest.raises(WindowsError) as ar: pfx", "PFW_TEST_TMP_KEY_CONTAINER KeyProvInfo.pwszProvName = None KeyProvInfo.dwProvType = gdef.PROV_RSA_FULL KeyProvInfo.dwFlags = 0", "gdef.PROV_RSA_FULL, 0, retrycreate=True) as ctx: key = gdef.HCRYPTKEY() keysize_flags =", "== rawcert assert cert.version == 2 assert cert == cert", "import * pytestmark = pytest.mark.usefixtures('check_for_gc_garbage') TEST_CERT = b\"\"\" <KEY>\"\"\" ##", "message with 2 differents certificates encrypted = windows.crypto.encrypt([cert, randcert], message_to_encrypt)", "cert = windows.crypto.Certificate.from_buffer(rawcert) assert cert.name != randcert.name assert cert.encoded !=", "PFW-test-keypair pfx = windows.crypto.import_pfx(rawpfx, TEST_PFX_PASSWORD) cert = windows.crypto.Certificate.from_buffer(rawcert) assert cert.name", "cert assert message_to_encrypt not in res # Open pfx and", "gdef.CRYPT_DELETEKEYSET) def test_certificate(rawcert): cert = windows.crypto.Certificate.from_buffer(rawcert) assert cert.serial == '1b", "the container, # It only release the key handle #", "09 b1 6b 46 TEST_CRYPTMSG = b\"\"\"<KEY>\"\" def test_cryptmsg_from_data(): rawdata", "pfx = windows.crypto.import_pfx(rawpfx, TEST_PFX_PASSWORD) cert = windows.crypto.Certificate.from_buffer(rawcert) assert cert.name !=", "test_certificate(rawcert): cert = windows.crypto.Certificate.from_buffer(rawcert) assert cert.serial == '1b 8e 94", "with each PFX and check the result is valid/the same", ".pfwtest import * pytestmark = pytest.mark.usefixtures('check_for_gc_garbage') TEST_CERT = b\"\"\" <KEY>\"\"\"", "return b64decode(TEST_CERT) @pytest.fixture() def rawpfx(): return b64decode(TEST_PFX) PFW_TEST_TMP_KEY_CONTAINER = \"PythonForWindowsTMPContainerTest\"", "1: random key pair randcert, randrawpfx = randomkeypair randpfx =", "Tamper the signed mesasge content signed_blob = signed_blob.replace(b\"message\", b\"massage\") with", "test_open_pfx_bad_password(rawpfx): with pytest.raises(WindowsError) as ar: pfx = windows.crypto.import_pfx(rawpfx, \"BadPassword\") def", "gdef.CRYPT_ALGORITHM_IDENTIFIER() crypt_algo.pszObjId = gdef.szOID_RSA_SHA256RSA.encode(\"ascii\") # do something else (bytes in", "res) decrypt2 = windows.crypto.decrypt(pfx, res2) assert message_to_encrypt == decrypt assert", "assert randcert.name == RANDOM_CERTIF_NAME randpfx = windows.crypto.import_pfx(randrawpfx, RANDOM_PFX_PASSWORD) # Check", "Name: PythonForWindowsTest # Serial: '1b 8e 94 cb 0b 3e", "56 8B C1 C9 57 27 A0 89 29 6A'", "assert cert.encoded == rawcert assert cert.version == 2 assert cert", "assert decrypted == decrypted2 == message_to_encrypt def test_crypt_obj(): path =", "keysize << 16 # Generate a key-pair that is exportable", "decrypted = windows.crypto.decrypt(pfx, encrypted) decrypted2 = windows.crypto.decrypt(randpfx, encrypted) assert decrypted", "test_certificate_from_store(): return windows.crypto.CertificateStore.from_system_store(\"Root\") def test_sign_verify(rawcert, rawpfx): message_to_sign = b\"Testing message", "= None KeyProvInfo.dwProvType = gdef.PROV_RSA_FULL KeyProvInfo.dwFlags = 0 KeyProvInfo.cProvParam =", "randcert.encoded # Encrypt the message with 2 differents certificates encrypted", "valid/the same decrypted = windows.crypto.decrypt(pfx, encrypted) decrypted2 = windows.crypto.decrypt(randpfx, encrypted)", "b\"\"\" <KEY>\"\"\" ## Cert info: # Name: PythonForWindowsTest # Serial:", "== 2 assert cert == cert assert cert is cert.duplicate()", "1 # Test cert comparaison assert certs[0] == orig_cert def", "return b64decode(TEST_PFX) PFW_TEST_TMP_KEY_CONTAINER = \"PythonForWindowsTMPContainerTest\" RANDOM_CERTIF_NAME = b\"PythonForWindowsGeneratedRandomCertifTest\" RANDOM_PFX_PASSWORD =", "= randomkeypair randpfx = windows.crypto.import_pfx(randrawpfx, RANDOM_PFX_PASSWORD) # Receiver 1: PFW-test-keypair", "decoded_blob = windows.crypto.verify_signature(cert, signed_blob) assert decoded_blob == message_to_sign def test_sign_verify_fail(rawcert,", "b1 6b 46 TEST_CRYPTMSG = b\"\"\"<KEY>\"\" def test_cryptmsg_from_data(): rawdata =", "cert = windows.crypto.Certificate.from_buffer(rawcert) assert cert.serial == '1b 8e 94 cb", "cert.name == b'PythonForWindowsTest' assert cert.issuer == b'PythonForWindowsTest' assert cert.thumbprint ==", "generated ctypes ?) # This is fucking dumb, there is", "assert cert is cert.duplicate() cert.chains # TODO: craft a certificate", "x.crypt_msg.signers x.signers_and_certs # TODO: Need some better ideas def test_certificate_from_store():", "assert message_to_sign in signed_blob # Tamper the signed mesasge content", "windows.winproxy.CryptDestroyKey(key) # Descrption of the key-container that will be used", "= windows.crypto.import_pfx(rawpfx, \"BadPassword\") def test_encrypt_decrypt(rawcert, rawpfx): message_to_encrypt = b\"Testing message", "cert res = windows.crypto.encrypt(cert, message_to_encrypt) res2 = windows.crypto.encrypt([cert, cert], message_to_encrypt)", "windows.crypto.Certificate.from_buffer(rawcert) signed_blob = windows.crypto.sign(pfx.certs[0], message_to_sign) assert message_to_sign in signed_blob #", "does NOT destroy the key-pair from the container, # It", "Serial: '1b 8e 94 cb 0b 3e eb b6 41", "a cert or iterable of cert res = windows.crypto.encrypt(cert, message_to_encrypt)", "message_to_encrypt def test_crypt_obj(): path = r\"C:\\windows\\system32\\kernel32.dll\" x = windows.crypto.CryptObject(path) x.crypt_msg.certs", "with pytest.raises(WindowsError) as ar: pfx = windows.crypto.import_pfx(rawpfx, \"BadPassword\") def test_encrypt_decrypt(rawcert,", "certif (pubkey only) pfx = windows.crypto.import_pfx(rawpfx, TEST_PFX_PASSWORD) cert = windows.crypto.Certificate.from_buffer(rawcert)", "key) # It does NOT destroy the key-pair from the", "on the given key-container and signature algorithme certif = windows.crypto.generation.generate_selfsigned_certificate(certif_name,", "KeyProvInfo.rgProvParam = None #KeyProvInfo.dwKeySpec = AT_SIGNATURE KeyProvInfo.dwKeySpec = gdef.AT_KEYEXCHANGE crypt_algo", "windows.crypto.decrypt(pfx, res) decrypt2 = windows.crypto.decrypt(pfx, res2) assert message_to_encrypt == decrypt", "gdef import windows.crypto.generation from .pfwtest import * pytestmark = pytest.mark.usefixtures('check_for_gc_garbage')", "== message_to_encrypt def test_crypt_obj(): path = r\"C:\\windows\\system32\\kernel32.dll\" x = windows.crypto.CryptObject(path)", "container prov = gdef.HCRYPTPROV() windows.winproxy.CryptAcquireContextW(prov, PFW_TEST_TMP_KEY_CONTAINER, None, gdef.PROV_RSA_FULL, gdef.CRYPT_DELETEKEYSET) def", "27 A0 89 29 6A' assert cert.encoded == rawcert assert", "09 b1 6b 46' TEST_PFX_PASSWORD = \"<PASSWORD>\" TEST_PFX = b\"\"\"", "assert message_to_sign in signed_blob decoded_blob = windows.crypto.verify_signature(cert, signed_blob) assert decoded_blob", "= gdef.PROV_RSA_FULL KeyProvInfo.dwFlags = 0 KeyProvInfo.cProvParam = 0 KeyProvInfo.rgProvParam =", "something else (bytes in generated ctypes ?) # This is", "assert cert.version == 2 assert cert == cert assert cert", "key-pair with windows.crypto.CryptContext(PFW_TEST_TMP_KEY_CONTAINER, None, gdef.PROV_RSA_FULL, 0, retrycreate=True) as ctx: key", "RANDOM_PFX_PASSWORD) yield certif, pfx # Destroy the TMP key container", "prov = gdef.HCRYPTPROV() windows.winproxy.CryptAcquireContextW(prov, PFW_TEST_TMP_KEY_CONTAINER, None, gdef.PROV_RSA_FULL, gdef.CRYPT_DELETEKEYSET) def test_certificate(rawcert):", "89 29 6A' assert cert.encoded == rawcert assert cert.version ==", "cb 0b 3e eb b6 41 39 f3 c9 09", "# Generate a key-pair that is exportable windows.winproxy.CryptGenKey(ctx, gdef.AT_KEYEXCHANGE, gdef.CRYPT_EXPORTABLE", "6b 46' TEST_PFX_PASSWORD = \"<PASSWORD>\" TEST_PFX = b\"\"\" <KEY> \"\"\"", "assert message_to_encrypt == decrypt assert decrypt == decrypt2 def test_randomkeypair(randomkeypair):", "windows.crypto.CertificateStore.new_in_memory() # Create a TMP context that will hold our", "the signed mesasge content signed_blob = signed_blob.replace(b\"message\", b\"massage\") with pytest.raises(windows.winproxy.WinproxyError)", "None, gdef.PROV_RSA_FULL, 0, retrycreate=True) as ctx: key = gdef.HCRYPTKEY() keysize_flags", "windows.crypto.encrypt(cert, message_to_encrypt) res2 = windows.crypto.encrypt([cert, cert], message_to_encrypt) del cert assert", "will hold our newly generated key-pair with windows.crypto.CryptContext(PFW_TEST_TMP_KEY_CONTAINER, None, gdef.PROV_RSA_FULL,", "gdef.CRYPT_EXPORTABLE | keysize_flags, key) # It does NOT destroy the", "= windows.crypto.Certificate.from_buffer(rawcert) assert cert.name != randcert.name assert cert.encoded != randcert.encoded", "# encrypt should accept a cert or iterable of cert", "PythonForWindowsTest # Serial: '1b 8e 94 cb 0b 3e eb", "pfx # Destroy the TMP key container prov = gdef.HCRYPTPROV()", "that is exportable windows.winproxy.CryptGenKey(ctx, gdef.AT_KEYEXCHANGE, gdef.CRYPT_EXPORTABLE | keysize_flags, key) #", "RANDOM_CERTIF_NAME randpfx = windows.crypto.import_pfx(randrawpfx, RANDOM_PFX_PASSWORD) # Check password is good", "assert cert == cert assert cert is cert.duplicate() cert.chains #", "= windows.crypto.Certificate.from_buffer(rawcert) certs = pfx.certs assert len(certs) == 1 #", "== b'PythonForWindowsTest' assert cert.thumbprint == 'EF 0C A8 C9 F9", "b\"Testing message \\xff\\x01\" # Load PFX (priv+pub key) & certif", "0b 3e eb b6 41 39 f3 c9 09 b1", "the certificate KeyProvInfo = gdef.CRYPT_KEY_PROV_INFO() KeyProvInfo.pwszContainerName = PFW_TEST_TMP_KEY_CONTAINER KeyProvInfo.pwszProvName =", "C9 F9 E0 96 AF 74 18 56 8B C1", "that will be used to generate the certificate KeyProvInfo =", "with windows.crypto.CryptContext(PFW_TEST_TMP_KEY_CONTAINER, None, gdef.PROV_RSA_FULL, 0, retrycreate=True) as ctx: key =", "def rawcert(): return b64decode(TEST_CERT) @pytest.fixture() def rawpfx(): return b64decode(TEST_PFX) PFW_TEST_TMP_KEY_CONTAINER", "RANDOM_CERTIF_NAME)) # Generate a self-signed certificate based on the given", "message \\xff\\x01\" # Load PFX (priv+pub key) & certif (pubkey", "and signature algorithme certif = windows.crypto.generation.generate_selfsigned_certificate(certif_name, key_info=KeyProvInfo, signature_algo=crypt_algo) # Add", "6b 46 TEST_CRYPTMSG = b\"\"\"<KEY>\"\" def test_cryptmsg_from_data(): rawdata = b64decode(TEST_CRYPTMSG)", "b1 6b 46' TEST_PFX_PASSWORD = \"<PASSWORD>\" TEST_PFX = b\"\"\" <KEY>", "windows.crypto.Certificate.from_buffer(rawcert) # encrypt should accept a cert or iterable of", "<< 16 # Generate a key-pair that is exportable windows.winproxy.CryptGenKey(ctx,", "rawpfx): message_to_sign = b\"Testing message \\xff\\x01\" # Load PFX (priv+pub", "crypt_algo = gdef.CRYPT_ALGORITHM_IDENTIFIER() crypt_algo.pszObjId = gdef.szOID_RSA_SHA256RSA.encode(\"ascii\") # do something else", "29 6A' assert cert.encoded == rawcert assert cert.version == 2", "def test_crypt_obj(): path = r\"C:\\windows\\system32\\kernel32.dll\" x = windows.crypto.CryptObject(path) x.crypt_msg.certs x.crypt_msg.signers", "pytest.raises(windows.winproxy.WinproxyError) as excinfo: decoded_blob = windows.crypto.verify_signature(cert, signed_blob) assert excinfo.value.winerror ==", "= pytest.mark.usefixtures('check_for_gc_garbage') TEST_CERT = b\"\"\" <KEY>\"\"\" ## Cert info: #", "container, # It only release the key handle # https://msdn.microsoft.com/en-us/library/windows/desktop/aa379918(v=vs.85).aspx", "test purpose cert.store.certs cert.properties def test_pfx(rawcert, rawpfx): pfx = windows.crypto.import_pfx(rawpfx,", "to generate the certificate KeyProvInfo = gdef.CRYPT_KEY_PROV_INFO() KeyProvInfo.pwszContainerName = PFW_TEST_TMP_KEY_CONTAINER", "a chain for test purpose cert.store.certs cert.properties def test_pfx(rawcert, rawpfx):", "def test_certificate(rawcert): cert = windows.crypto.Certificate.from_buffer(rawcert) assert cert.serial == '1b 8e", "pair randcert, randrawpfx = randomkeypair randpfx = windows.crypto.import_pfx(randrawpfx, RANDOM_PFX_PASSWORD) #", "# Receiver 1: PFW-test-keypair pfx = windows.crypto.import_pfx(rawpfx, TEST_PFX_PASSWORD) cert =", "x.signers_and_certs # TODO: Need some better ideas def test_certificate_from_store(): return", "b\"\".join((b\"CN=\", RANDOM_CERTIF_NAME)) # Generate a self-signed certificate based on the", "signed mesasge content signed_blob = signed_blob.replace(b\"message\", b\"massage\") with pytest.raises(windows.winproxy.WinproxyError) as", "randcert], message_to_encrypt) # Decrypt with each PFX and check the", "test_sign_verify(rawcert, rawpfx): message_to_sign = b\"Testing message \\xff\\x01\" # Load PFX", "assert decoded_blob == message_to_sign def test_sign_verify_fail(rawcert, rawpfx): message_to_sign = b\"Testing", "/ pfx. Based on samples\\crypto\\encryption_demo.py\"\"\" cert_store = windows.crypto.CertificateStore.new_in_memory() # Create", "# Name: PythonForWindowsTest # Serial: '1b 8e 94 cb 0b", "exportable windows.winproxy.CryptGenKey(ctx, gdef.AT_KEYEXCHANGE, gdef.CRYPT_EXPORTABLE | keysize_flags, key) # It does", "chain for test purpose cert.store.certs cert.properties def test_pfx(rawcert, rawpfx): pfx", "key-container that will be used to generate the certificate KeyProvInfo", "encrypted = windows.crypto.encrypt([cert, randcert], message_to_encrypt) # Decrypt with each PFX", "b\"massage\") with pytest.raises(windows.winproxy.WinproxyError) as excinfo: decoded_blob = windows.crypto.verify_signature(cert, signed_blob) assert", "TMP context that will hold our newly generated key-pair with", "AT_SIGNATURE KeyProvInfo.dwKeySpec = gdef.AT_KEYEXCHANGE crypt_algo = gdef.CRYPT_ALGORITHM_IDENTIFIER() crypt_algo.pszObjId = gdef.szOID_RSA_SHA256RSA.encode(\"ascii\")", "pfx = windows.crypto.import_pfx(rawpfx, \"BadPassword\") def test_encrypt_decrypt(rawcert, rawpfx): message_to_encrypt = b\"Testing", "b\"\"\" <KEY> \"\"\" @pytest.fixture() def rawcert(): return b64decode(TEST_CERT) @pytest.fixture() def", "!= randcert.encoded # Encrypt the message with 2 differents certificates", "and check the result is valid/the same decrypted = windows.crypto.decrypt(pfx,", "pfx from the TMP cert-store pfx = windows.crypto.generation.generate_pfx(cert_store, RANDOM_PFX_PASSWORD) yield", "KeyProvInfo = gdef.CRYPT_KEY_PROV_INFO() KeyProvInfo.pwszContainerName = PFW_TEST_TMP_KEY_CONTAINER KeyProvInfo.pwszProvName = None KeyProvInfo.dwProvType", "57 27 A0 89 29 6A' assert cert.encoded == rawcert", "KeyProvInfo.dwKeySpec = gdef.AT_KEYEXCHANGE crypt_algo = gdef.CRYPT_ALGORITHM_IDENTIFIER() crypt_algo.pszObjId = gdef.szOID_RSA_SHA256RSA.encode(\"ascii\") #", "# This is fucking dumb, there is no .format on", "'EF 0C A8 C9 F9 E0 96 AF 74 18", "KeyProvInfo.dwProvType = gdef.PROV_RSA_FULL KeyProvInfo.dwFlags = 0 KeyProvInfo.cProvParam = 0 KeyProvInfo.rgProvParam", "# Target serial == TEST_CERT.Serial == 1b 8e 94 cb", "Check password is good too def test_encrypt_decrypt_multiple_receivers(rawcert, rawpfx, randomkeypair): message_to_encrypt", "len(certs) == 1 # Test cert comparaison assert certs[0] ==", "= AT_SIGNATURE KeyProvInfo.dwKeySpec = gdef.AT_KEYEXCHANGE crypt_algo = gdef.CRYPT_ALGORITHM_IDENTIFIER() crypt_algo.pszObjId =", "samples\\crypto\\encryption_demo.py\"\"\" cert_store = windows.crypto.CertificateStore.new_in_memory() # Create a TMP context that", "windows.crypto.encrypt([cert, randcert], message_to_encrypt) # Decrypt with each PFX and check", "the message with 2 differents certificates encrypted = windows.crypto.encrypt([cert, randcert],", "Generate a self-signed certificate based on the given key-container and", "windows.crypto.decrypt(pfx, res2) assert message_to_encrypt == decrypt assert decrypt == decrypt2", "TODO: Need some better ideas def test_certificate_from_store(): return windows.crypto.CertificateStore.from_system_store(\"Root\") def", "windows.crypto.verify_signature(cert, signed_blob) assert excinfo.value.winerror == gdef.STATUS_INVALID_SIGNATURE # str(windows.crypto.encrypt(TEST_CERT, \"Hello crypto\")).encode(\"base64\")", "message_to_encrypt = b\"Testing message \\xff\\x01\" cert = windows.crypto.Certificate.from_buffer(rawcert) # encrypt", "info: # Name: PythonForWindowsTest # Serial: '1b 8e 94 cb", "TEST_PFX_PASSWORD) cert = windows.crypto.Certificate.from_buffer(rawcert) assert cert.name != randcert.name assert cert.encoded", "= gdef.szOID_RSA_SHA256RSA.encode(\"ascii\") # do something else (bytes in generated ctypes", "encrypted) decrypted2 = windows.crypto.decrypt(randpfx, encrypted) assert decrypted == decrypted2 ==", "message_to_sign in signed_blob decoded_blob = windows.crypto.verify_signature(cert, signed_blob) assert decoded_blob ==", "excinfo.value.winerror == gdef.STATUS_INVALID_SIGNATURE # str(windows.crypto.encrypt(TEST_CERT, \"Hello crypto\")).encode(\"base64\") # Target serial", "certif = windows.crypto.generation.generate_selfsigned_certificate(certif_name, key_info=KeyProvInfo, signature_algo=crypt_algo) # Add the newly created", "test_encrypt_decrypt(rawcert, rawpfx): message_to_encrypt = b\"Testing message \\xff\\x01\" cert = windows.crypto.Certificate.from_buffer(rawcert)", "certif, pfx # Destroy the TMP key container prov =", "eb b6 41 39 f3 c9 09 b1 6b 46'", "based on the given key-container and signature algorithme certif =", "newly created certificate to our TMP cert-store cert_store.add_certificate(certif) # Generate", "orig_cert = windows.crypto.Certificate.from_buffer(rawcert) certs = pfx.certs assert len(certs) == 1", "assert cert.thumbprint == 'EF 0C A8 C9 F9 E0 96", "46' TEST_PFX_PASSWORD = \"<PASSWORD>\" TEST_PFX = b\"\"\" <KEY> \"\"\" @pytest.fixture()", "b64decode(TEST_PFX) PFW_TEST_TMP_KEY_CONTAINER = \"PythonForWindowsTMPContainerTest\" RANDOM_CERTIF_NAME = b\"PythonForWindowsGeneratedRandomCertifTest\" RANDOM_PFX_PASSWORD = \"<PASSWORD>\"", "Open pfx and decrypt pfx = windows.crypto.import_pfx(rawpfx, TEST_PFX_PASSWORD) decrypt =", "= gdef.HCRYPTPROV() windows.winproxy.CryptAcquireContextW(prov, PFW_TEST_TMP_KEY_CONTAINER, None, gdef.PROV_RSA_FULL, gdef.CRYPT_DELETEKEYSET) def test_certificate(rawcert): cert", "is exportable windows.winproxy.CryptGenKey(ctx, gdef.AT_KEYEXCHANGE, gdef.CRYPT_EXPORTABLE | keysize_flags, key) # It", "= b\"\"\"<KEY>\"\" def test_cryptmsg_from_data(): rawdata = b64decode(TEST_CRYPTMSG) cryptmsg = windows.crypto.CryptMessage.from_buffer(rawdata)", "of cert res = windows.crypto.encrypt(cert, message_to_encrypt) res2 = windows.crypto.encrypt([cert, cert],", "== decrypt2 def test_randomkeypair(randomkeypair): randcert, randrawpfx = randomkeypair assert randcert.name", "\\xff\\x01\" # Load PFX (priv+pub key) & certif (pubkey only)", "message_to_encrypt) # Decrypt with each PFX and check the result", "windows.crypto.sign(pfx.certs[0], message_to_sign) assert message_to_sign in signed_blob # Tamper the signed", "pfx = windows.crypto.generation.generate_pfx(cert_store, RANDOM_PFX_PASSWORD) yield certif, pfx # Destroy the", "windows.crypto.import_pfx(randrawpfx, RANDOM_PFX_PASSWORD) # Receiver 1: PFW-test-keypair pfx = windows.crypto.import_pfx(rawpfx, TEST_PFX_PASSWORD)", "the TMP cert-store pfx = windows.crypto.generation.generate_pfx(cert_store, RANDOM_PFX_PASSWORD) yield certif, pfx", "key pair randcert, randrawpfx = randomkeypair randpfx = windows.crypto.import_pfx(randrawpfx, RANDOM_PFX_PASSWORD)", "gdef.PROV_RSA_FULL KeyProvInfo.dwFlags = 0 KeyProvInfo.cProvParam = 0 KeyProvInfo.rgProvParam = None", "a cert / pfx. Based on samples\\crypto\\encryption_demo.py\"\"\" cert_store = windows.crypto.CertificateStore.new_in_memory()", "= windows.crypto.generation.generate_selfsigned_certificate(certif_name, key_info=KeyProvInfo, signature_algo=crypt_algo) # Add the newly created certificate", "windows.crypto.generation from .pfwtest import * pytestmark = pytest.mark.usefixtures('check_for_gc_garbage') TEST_CERT =", "= windows.crypto.encrypt([cert, randcert], message_to_encrypt) # Decrypt with each PFX and", "NOT destroy the key-pair from the container, # It only", "windows.crypto.Certificate.from_buffer(rawcert) certs = pfx.certs assert len(certs) == 1 # Test", "randcert.name assert cert.encoded != randcert.encoded # Encrypt the message with", "41 39 f3 c9 09 b1 6b 46 TEST_CRYPTMSG =", "= windows.crypto.decrypt(pfx, res) decrypt2 = windows.crypto.decrypt(pfx, res2) assert message_to_encrypt ==", "message \\xff\\x01\" # Receiver 1: random key pair randcert, randrawpfx", "RANDOM_CERTIF_NAME = b\"PythonForWindowsGeneratedRandomCertifTest\" RANDOM_PFX_PASSWORD = \"<PASSWORD>\" @pytest.fixture() def randomkeypair(keysize=1024): r\"\"\"Generate", "C9 57 27 A0 89 29 6A' assert cert.encoded ==", "message_to_encrypt == decrypt assert decrypt == decrypt2 def test_randomkeypair(randomkeypair): randcert,", "c9 09 b1 6b 46' TEST_PFX_PASSWORD = \"<PASSWORD>\" TEST_PFX =", "our newly generated key-pair with windows.crypto.CryptContext(PFW_TEST_TMP_KEY_CONTAINER, None, gdef.PROV_RSA_FULL, 0, retrycreate=True)", "Need some better ideas def test_certificate_from_store(): return windows.crypto.CertificateStore.from_system_store(\"Root\") def test_sign_verify(rawcert,", "def test_sign_verify_fail(rawcert, rawpfx): message_to_sign = b\"Testing message \\xff\\x01\" # Load", "test_sign_verify_fail(rawcert, rawpfx): message_to_sign = b\"Testing message \\xff\\x01\" # Load PFX", "only release the key handle # https://msdn.microsoft.com/en-us/library/windows/desktop/aa379918(v=vs.85).aspx windows.winproxy.CryptDestroyKey(key) # Descrption", "windows.crypto.import_pfx(rawpfx, TEST_PFX_PASSWORD) decrypt = windows.crypto.decrypt(pfx, res) decrypt2 = windows.crypto.decrypt(pfx, res2)", "41 39 f3 c9 09 b1 6b 46' assert cert.name", "cert.issuer == b'PythonForWindowsTest' assert cert.thumbprint == 'EF 0C A8 C9", "cert.properties def test_pfx(rawcert, rawpfx): pfx = windows.crypto.import_pfx(rawpfx, TEST_PFX_PASSWORD) orig_cert =", "Descrption of the key-container that will be used to generate", "Receiver 1: PFW-test-keypair pfx = windows.crypto.import_pfx(rawpfx, TEST_PFX_PASSWORD) cert = windows.crypto.Certificate.from_buffer(rawcert)", "used to generate the certificate KeyProvInfo = gdef.CRYPT_KEY_PROV_INFO() KeyProvInfo.pwszContainerName =", "randpfx = windows.crypto.import_pfx(randrawpfx, RANDOM_PFX_PASSWORD) # Check password is good too", "is cert.duplicate() cert.chains # TODO: craft a certificate with a", "in generated ctypes ?) # This is fucking dumb, there", "== cert assert cert is cert.duplicate() cert.chains # TODO: craft", "2 differents certificates encrypted = windows.crypto.encrypt([cert, randcert], message_to_encrypt) # Decrypt", "certificate to our TMP cert-store cert_store.add_certificate(certif) # Generate a pfx", "== 'EF 0C A8 C9 F9 E0 96 AF 74", "# Encrypt the message with 2 differents certificates encrypted =", "Target serial == TEST_CERT.Serial == 1b 8e 94 cb 0b", "def test_open_pfx_bad_password(rawpfx): with pytest.raises(WindowsError) as ar: pfx = windows.crypto.import_pfx(rawpfx, \"BadPassword\")", "pfx = windows.crypto.import_pfx(rawpfx, TEST_PFX_PASSWORD) cert = windows.crypto.Certificate.from_buffer(rawcert) signed_blob = windows.crypto.sign(pfx.certs[0],", "@pytest.fixture() def rawpfx(): return b64decode(TEST_PFX) PFW_TEST_TMP_KEY_CONTAINER = \"PythonForWindowsTMPContainerTest\" RANDOM_CERTIF_NAME =", "cryptmsg = windows.crypto.CryptMessage.from_buffer(rawdata) rawtarget = b\"\\x1b\\x8e\\x94\\xcb\\x0b>\\xeb\\xb6A9\\xf3\\xc9\\t\\xb1kF\" assert cryptmsg.get_recipient_data(0).SerialNumber.data[::-1] == rawtarget", "keysize_flags, key) # It does NOT destroy the key-pair from", "b'PythonForWindowsTest' assert cert.issuer == b'PythonForWindowsTest' assert cert.thumbprint == 'EF 0C", "46 TEST_CRYPTMSG = b\"\"\"<KEY>\"\" def test_cryptmsg_from_data(): rawdata = b64decode(TEST_CRYPTMSG) cryptmsg", "6b 46' assert cert.name == b'PythonForWindowsTest' assert cert.issuer == b'PythonForWindowsTest'", "assert decrypt == decrypt2 def test_randomkeypair(randomkeypair): randcert, randrawpfx = randomkeypair", "destroy the key-pair from the container, # It only release", "signed_blob = signed_blob.replace(b\"message\", b\"massage\") with pytest.raises(windows.winproxy.WinproxyError) as excinfo: decoded_blob =", "accept a cert or iterable of cert res = windows.crypto.encrypt(cert,", "certs = pfx.certs assert len(certs) == 1 # Test cert", "# do something else (bytes in generated ctypes ?) #", "cert], message_to_encrypt) del cert assert message_to_encrypt not in res #", "== orig_cert def test_open_pfx_bad_password(rawpfx): with pytest.raises(WindowsError) as ar: pfx =", "some better ideas def test_certificate_from_store(): return windows.crypto.CertificateStore.from_system_store(\"Root\") def test_sign_verify(rawcert, rawpfx):", "b\"\\xff\\x00 Testing message \\xff\\x01\" # Receiver 1: random key pair", "windows.crypto.import_pfx(rawpfx, \"BadPassword\") def test_encrypt_decrypt(rawcert, rawpfx): message_to_encrypt = b\"Testing message \\xff\\x01\"", "cert-store cert_store.add_certificate(certif) # Generate a pfx from the TMP cert-store", "randomkeypair(keysize=1024): r\"\"\"Generate a cert / pfx. Based on samples\\crypto\\encryption_demo.py\"\"\" cert_store", "hold our newly generated key-pair with windows.crypto.CryptContext(PFW_TEST_TMP_KEY_CONTAINER, None, gdef.PROV_RSA_FULL, 0,", "yield certif, pfx # Destroy the TMP key container prov", "cert.serial == '1b 8e 94 cb 0b 3e eb b6", "with a chain for test purpose cert.store.certs cert.properties def test_pfx(rawcert,", "random key pair randcert, randrawpfx = randomkeypair randpfx = windows.crypto.import_pfx(randrawpfx,", "key-pair from the container, # It only release the key", "not in res # Open pfx and decrypt pfx =", "test_cryptmsg_from_data(): rawdata = b64decode(TEST_CRYPTMSG) cryptmsg = windows.crypto.CryptMessage.from_buffer(rawdata) rawtarget = b\"\\x1b\\x8e\\x94\\xcb\\x0b>\\xeb\\xb6A9\\xf3\\xc9\\t\\xb1kF\"", "C1 C9 57 27 A0 89 29 6A' assert cert.encoded", "as excinfo: decoded_blob = windows.crypto.verify_signature(cert, signed_blob) assert excinfo.value.winerror == gdef.STATUS_INVALID_SIGNATURE", "windows.crypto.decrypt(pfx, encrypted) decrypted2 = windows.crypto.decrypt(randpfx, encrypted) assert decrypted == decrypted2", "b'PythonForWindowsTest' assert cert.thumbprint == 'EF 0C A8 C9 F9 E0", "Testing message \\xff\\x01\" # Receiver 1: random key pair randcert,", "from the TMP cert-store pfx = windows.crypto.generation.generate_pfx(cert_store, RANDOM_PFX_PASSWORD) yield certif,", "windows.crypto.import_pfx(rawpfx, TEST_PFX_PASSWORD) orig_cert = windows.crypto.Certificate.from_buffer(rawcert) certs = pfx.certs assert len(certs)", "b\"PythonForWindowsGeneratedRandomCertifTest\" RANDOM_PFX_PASSWORD = \"<PASSWORD>\" @pytest.fixture() def randomkeypair(keysize=1024): r\"\"\"Generate a cert", "46' assert cert.name == b'PythonForWindowsTest' assert cert.issuer == b'PythonForWindowsTest' assert", "decrypted2 == message_to_encrypt def test_crypt_obj(): path = r\"C:\\windows\\system32\\kernel32.dll\" x =", "= windows.crypto.Certificate.from_buffer(rawcert) signed_blob = windows.crypto.sign(pfx.certs[0], message_to_sign) assert message_to_sign in signed_blob", "E0 96 AF 74 18 56 8B C1 C9 57", "\"PythonForWindowsTMPContainerTest\" RANDOM_CERTIF_NAME = b\"PythonForWindowsGeneratedRandomCertifTest\" RANDOM_PFX_PASSWORD = \"<PASSWORD>\" @pytest.fixture() def randomkeypair(keysize=1024):", "def test_randomkeypair(randomkeypair): randcert, randrawpfx = randomkeypair assert randcert.name == RANDOM_CERTIF_NAME", "should accept a cert or iterable of cert res =", "decrypt == decrypt2 def test_randomkeypair(randomkeypair): randcert, randrawpfx = randomkeypair assert", "comparaison assert certs[0] == orig_cert def test_open_pfx_bad_password(rawpfx): with pytest.raises(WindowsError) as", "same decrypted = windows.crypto.decrypt(pfx, encrypted) decrypted2 = windows.crypto.decrypt(randpfx, encrypted) assert", "certificate with a chain for test purpose cert.store.certs cert.properties def", "too def test_encrypt_decrypt_multiple_receivers(rawcert, rawpfx, randomkeypair): message_to_encrypt = b\"\\xff\\x00 Testing message", "key container prov = gdef.HCRYPTPROV() windows.winproxy.CryptAcquireContextW(prov, PFW_TEST_TMP_KEY_CONTAINER, None, gdef.PROV_RSA_FULL, gdef.CRYPT_DELETEKEYSET)", "decrypt pfx = windows.crypto.import_pfx(rawpfx, TEST_PFX_PASSWORD) decrypt = windows.crypto.decrypt(pfx, res) decrypt2", "@pytest.fixture() def rawcert(): return b64decode(TEST_CERT) @pytest.fixture() def rawpfx(): return b64decode(TEST_PFX)", "key = gdef.HCRYPTKEY() keysize_flags = keysize << 16 # Generate", "It only release the key handle # https://msdn.microsoft.com/en-us/library/windows/desktop/aa379918(v=vs.85).aspx windows.winproxy.CryptDestroyKey(key) #", "KeyProvInfo.pwszProvName = None KeyProvInfo.dwProvType = gdef.PROV_RSA_FULL KeyProvInfo.dwFlags = 0 KeyProvInfo.cProvParam", "## Cert info: # Name: PythonForWindowsTest # Serial: '1b 8e", "test_encrypt_decrypt_multiple_receivers(rawcert, rawpfx, randomkeypair): message_to_encrypt = b\"\\xff\\x00 Testing message \\xff\\x01\" #", "Test cert comparaison assert certs[0] == orig_cert def test_open_pfx_bad_password(rawpfx): with", "KeyProvInfo.pwszContainerName = PFW_TEST_TMP_KEY_CONTAINER KeyProvInfo.pwszProvName = None KeyProvInfo.dwProvType = gdef.PROV_RSA_FULL KeyProvInfo.dwFlags", "== decrypt assert decrypt == decrypt2 def test_randomkeypair(randomkeypair): randcert, randrawpfx", "gdef.AT_KEYEXCHANGE, gdef.CRYPT_EXPORTABLE | keysize_flags, key) # It does NOT destroy", "cert = windows.crypto.Certificate.from_buffer(rawcert) # encrypt should accept a cert or", "= windows.crypto.Certificate.from_buffer(rawcert) # encrypt should accept a cert or iterable", "key) & certif (pubkey only) pfx = windows.crypto.import_pfx(rawpfx, TEST_PFX_PASSWORD) cert", "c9 09 b1 6b 46' assert cert.name == b'PythonForWindowsTest' assert", "decrypted2 = windows.crypto.decrypt(randpfx, encrypted) assert decrypted == decrypted2 == message_to_encrypt", "= b\"PythonForWindowsGeneratedRandomCertifTest\" RANDOM_PFX_PASSWORD = \"<PASSWORD>\" @pytest.fixture() def randomkeypair(keysize=1024): r\"\"\"Generate a", "== RANDOM_CERTIF_NAME randpfx = windows.crypto.import_pfx(randrawpfx, RANDOM_PFX_PASSWORD) # Check password is", "pytest.raises(WindowsError) as ar: pfx = windows.crypto.import_pfx(rawpfx, \"BadPassword\") def test_encrypt_decrypt(rawcert, rawpfx):", "windows.crypto.Certificate.from_buffer(rawcert) signed_blob = windows.crypto.sign(pfx.certs[0], message_to_sign) assert message_to_sign in signed_blob decoded_blob", "a self-signed certificate based on the given key-container and signature", "PFW_TEST_TMP_KEY_CONTAINER, None, gdef.PROV_RSA_FULL, gdef.CRYPT_DELETEKEYSET) def test_certificate(rawcert): cert = windows.crypto.Certificate.from_buffer(rawcert) assert", "# Test cert comparaison assert certs[0] == orig_cert def test_open_pfx_bad_password(rawpfx):", "Generate a pfx from the TMP cert-store pfx = windows.crypto.generation.generate_pfx(cert_store,", "is valid/the same decrypted = windows.crypto.decrypt(pfx, encrypted) decrypted2 = windows.crypto.decrypt(randpfx,", "# Destroy the TMP key container prov = gdef.HCRYPTPROV() windows.winproxy.CryptAcquireContextW(prov,", "TEST_PFX_PASSWORD) orig_cert = windows.crypto.Certificate.from_buffer(rawcert) certs = pfx.certs assert len(certs) ==", "b6 41 39 f3 c9 09 b1 6b 46' TEST_PFX_PASSWORD", "39 f3 c9 09 b1 6b 46' assert cert.name ==", "given key-container and signature algorithme certif = windows.crypto.generation.generate_selfsigned_certificate(certif_name, key_info=KeyProvInfo, signature_algo=crypt_algo)", "in signed_blob decoded_blob = windows.crypto.verify_signature(cert, signed_blob) assert decoded_blob == message_to_sign", "newly generated key-pair with windows.crypto.CryptContext(PFW_TEST_TMP_KEY_CONTAINER, None, gdef.PROV_RSA_FULL, 0, retrycreate=True) as", "orig_cert def test_open_pfx_bad_password(rawpfx): with pytest.raises(WindowsError) as ar: pfx = windows.crypto.import_pfx(rawpfx,", "= 0 KeyProvInfo.cProvParam = 0 KeyProvInfo.rgProvParam = None #KeyProvInfo.dwKeySpec =", "This is fucking dumb, there is no .format on bytes", "\\xff\\x01\" cert = windows.crypto.Certificate.from_buffer(rawcert) # encrypt should accept a cert", "TEST_CRYPTMSG = b\"\"\"<KEY>\"\" def test_cryptmsg_from_data(): rawdata = b64decode(TEST_CRYPTMSG) cryptmsg =", "message_to_sign in signed_blob # Tamper the signed mesasge content signed_blob", "cert assert cert is cert.duplicate() cert.chains # TODO: craft a", "rawcert(): return b64decode(TEST_CERT) @pytest.fixture() def rawpfx(): return b64decode(TEST_PFX) PFW_TEST_TMP_KEY_CONTAINER =", "Receiver 1: random key pair randcert, randrawpfx = randomkeypair randpfx", "F9 E0 96 AF 74 18 56 8B C1 C9", "crypto\")).encode(\"base64\") # Target serial == TEST_CERT.Serial == 1b 8e 94", "cert.encoded != randcert.encoded # Encrypt the message with 2 differents", "Based on samples\\crypto\\encryption_demo.py\"\"\" cert_store = windows.crypto.CertificateStore.new_in_memory() # Create a TMP", "Generate a key-pair that is exportable windows.winproxy.CryptGenKey(ctx, gdef.AT_KEYEXCHANGE, gdef.CRYPT_EXPORTABLE |", "pfx.certs assert len(certs) == 1 # Test cert comparaison assert" ]
[ "CustomJS, Slider,HoverTool,ColorBar,LinearColorMapper,LabelSet,ColumnDataSource from bokeh.embed import components from bokeh.plotting import figure", "dh=[r * 0.5], color_mapper=color_mapper) hover = HoverTool(tooltips=[(\"x\", \"$x\"), (\"y\", \"$y\"),", "min_s = self.data.min() max_s = self.data.max() r = self.data.shape[0] *", "is not None: self.fig.yaxis.axis_label = y_label def add_errorbar(self, x, y,", "#im = ax.imshow(self.data, # origin='lower', # zorder=1, # interpolation='none', #", "= LabelSet(x='lon', y='lat', text='names', level='glyph', x_offset=5, y_offset=5, render_mode='canvas', source=source, text_color='white')", "if y_label is not None: self.fig.yaxis.axis_label = y_label def add_errorbar(self,", "= [] y_err_y = [] for px, py, err in", "JS_code_slider = \"\"\" var vmin = low_slider.value; var vmax =", "= figure(plot_width=w, plot_height=h, x_range=(0, c * 0.5), y_range=(0, r *", "is None: vmax = self.data[msk].max() min_s = self.data.min() max_s =", "= Slider(title=\"Sig. Max\", start=min_s, end=max_s, step=1, value=max_s * 0.8, callback=callback)", "0][ID], pixcrd[:, 1][ID])) # ax.annotate('%s' % catalog.name[ID], xy=(x, y), color='white')", "= gridplot([self.f1.fig,self.f2.fig],ncols=1,plot_width=w, plot_height=h) #curdoc().add_root(grid) #show(grid) #output_file(\"test.html\") script, div = components(grid)", "ax.imshow(self.data, # origin='lower', # zorder=1, # interpolation='none', # aspect='equal', #", "zip, round, input, int, pow, object, map, zip) __author__ =", "err in zip(x, y, yerr): y_err_x.append((px, px)) y_err_y.append((py - err,", "absolute_import, division, print_function from builtins import (bytes, str, open, super,", "__future__ import absolute_import, division, print_function from builtins import (bytes, str,", "~np.isnan(self.data) if vmin is None: vmin = self.data[msk].min() if vmax", "err, py + err)) self.fig.multi_line(y_err_x, y_err_y, color=color, **error_kwargs) def add_step_line(self,x,y,legend=None):", "[] y_err_y = [] for px, py, err in zip(x,", "= self.data[msk].max() min_s = self.data.min() max_s = self.data.max() r =", "color=color, **error_kwargs) if yerr is not None: y_err_x = []", "get_html_draw(self,w=None,h=None): #l = layout([self.f1.fig],[self.f2.fig]) grid = gridplot([self.f1.fig,self.f2.fig],ncols=1,plot_width=w, plot_height=h) #curdoc().add_root(grid) #show(grid)", "x_offset=5, y_offset=5, render_mode='canvas', source=source, text_color='white') fig.add_layout(labels) #print'cat', catalog[msk] color_bar =", "code=JS_code_slider) self.graph_min_slider = Slider(title=\"Sig. Min\", start=min_s, end=max_s, step=1, value=min_s, callback=callback)", "(bytes, str, open, super, range, zip, round, input, int, pow,", "start=min_s, end=max_s, step=1, value=max_s * 0.8, callback=callback) self.graph_min_slider.on_change('value', self.change_image_contrast) self.graph_max_slider.on_change('value',", "example\") #from bokeh.io import show #show(layout) script, div = components(layout)", "# print('plot', plot) # mpld3.show() fig.add_layout(color_bar, 'right') layout = row(", "{} html_dict['script'] = script html_dict['div'] = div return html_dict class", "('xy',(pixcrd[:, 0][ID], pixcrd[:, 1][ID])) # ax.annotate('%s' % catalog.name[ID], xy=(x, y),", "= ColumnDataSource(data=dict(lon=pixcrd[:, 0][msk]+0.5, lat=pixcrd[:, 1][msk]+0.5, names=catalog.name[msk])) #for ID, (x, y)", "self.graph_max_slider), ) #curdoc().add_root(layout) #output_file(\"slider.html\", title=\"slider.py example\") #from bokeh.io import show", "__author__ = \"<NAME>\" import numpy as np from astropy import", "zorder=1, # interpolation='none', # aspect='equal', # cmap=plt.get_cmap('jet'), # vmin=vmin, #", "row( fig, widgetbox(self.graph_min_slider, self.graph_max_slider), ) #curdoc().add_root(layout) #output_file(\"slider.html\", title=\"slider.py example\") #from", "vmax=None): #import plotly #import plotly.graph_objs as go #from plotly.graph_objs import", "#print('b') def add_line(self,x,y,legend=None,color=None): self.fig.line(x,y,legend=legend,line_color=color) def get_html_draw(self): layout = row( self.fig", "= HoverTool(tooltips=[(\"x\", \"$x\"), (\"y\", \"$y\")]) self.fig = figure(title=title, width=w, height=h,x_range=x_range,y_range=y_range,", "for px, py, err in zip(x, y, yerr): y_err_x.append((px, px))", "plot_height=h, x_range=(0, c * 0.5), y_range=(0, r * 0.5), tools=['pan,box_zoom,box_select,wheel_zoom,reset,save,crosshair'])", "point_kwargs={}, error_kwargs={}): self.fig.circle(x, y, color=color, **point_kwargs) if xerr is not", "import components from bokeh.plotting import figure from bokeh.palettes import Plasma256", "= ~np.isnan(self.data) if vmin is None: vmin = self.data[msk].min() if", "gridplot([self.f1.fig,self.f2.fig],ncols=1,plot_width=w, plot_height=h) #curdoc().add_root(grid) #show(grid) #output_file(\"test.html\") script, div = components(grid) html_dict={}", "0][msk], pixcrd[:, 1][msk], 'o', mfc='none') source = ColumnDataSource(data=dict(lon=pixcrd[:, 0][msk]+0.5, lat=pixcrd[:,", "bokeh.io import show #show(layout) script, div = components(layout) html_dict =", "object, map, zip) __author__ = \"<NAME>\" import numpy as np", "from bokeh.layouts import row, widgetbox,gridplot from bokeh.models import CustomJS, Slider,HoverTool,ColorBar,LinearColorMapper,LabelSet,ColumnDataSource", "None: self.fig.xaxis.axis_label = x_label if y_label is not None: self.fig.yaxis.axis_label", "value=min_s, callback=callback) self.graph_max_slider = Slider(title=\"Sig. Max\", start=min_s, end=max_s, step=1, value=max_s", "color_mapper=color_mapper) hover = HoverTool(tooltips=[(\"x\", \"$x\"), (\"y\", \"$y\"), (\"value\", \"@image\")], renderers=[fig_im])", "add_errorbar(self, x, y, xerr=None, yerr=None, color='red', point_kwargs={}, error_kwargs={}): self.fig.circle(x, y,", "- err, px + err)) x_err_y.append((py, py)) self.fig.multi_line(x_err_x, x_err_y, color=color,", "renderers=[fig_im]) fig.add_tools(hover) #fig, (ax) = plt.subplots(1, 1, figsize=(4, 3), subplot_kw={'projection':", "y='lat', marker='circle', size=15, line_color=\"white\", fill_color=None, alpha=1.0, source=source) labels = LabelSet(x='lon',", "lat = catalog.dec if len(lat) > 0.: pixcrd = w.wcs_world2pix(np.column_stack((lon,", "html_dict class ScatterPlot(object): def __init__(self,w,h,x_label=None,y_label=None,x_range=None,y_range=None,title=None,y_axis_type='linear',x_axis_type='linear'): hover = HoverTool(tooltips=[(\"x\", \"$x\"), (\"y\",", "color='red', point_kwargs={}, error_kwargs={}): self.fig.circle(x, y, color=color, **point_kwargs) if xerr is", "subplot_kw={'projection': WCS(self.header)}) #im = ax.imshow(self.data, # origin='lower', # zorder=1, #", "height=h,x_range=x_range,y_range=y_range, y_axis_type=y_axis_type, x_axis_type=x_axis_type, tools=[hover, 'pan,box_zoom,box_select,wheel_zoom,reset,save,crosshair'] ) if x_label is not", "widgetbox,gridplot from bokeh.models import CustomJS, Slider,HoverTool,ColorBar,LinearColorMapper,LabelSet,ColumnDataSource from bokeh.embed import components", "return html_dict class GridPlot(object): def __init__(self,f1,f2,w=None,h=None): self.f1=f1 self.f2=f2 def get_html_draw(self,w=None,h=None):", "pow, object, map, zip) __author__ = \"<NAME>\" import numpy as", "zip(x, y, yerr): y_err_x.append((px, px)) y_err_y.append((py - err, py +", "#curdoc().add_root(grid) #show(grid) #output_file(\"test.html\") script, div = components(grid) html_dict={} html_dict['script']=script html_dict['div']", "**point_kwargs) if xerr is not None: x_err_x = [] x_err_y", "not None: self.fig.xaxis.axis_label = x_label if y_label is not None:", "vmax is None: vmax = self.data[msk].max() min_s = self.data.min() max_s", "plotly.graph_objs import Layout # print('vmin,vmax',vmin,vmax) msk = ~np.isnan(self.data) if vmin", "var vmax = high_slider.value; fig_im.glyph.color_mapper.high = vmax; fig_im.glyph.color_mapper.low = vmin;", "= [] x_err_y = [] for px, py, err in", "__init__(self,f1,f2,w=None,h=None): self.f1=f1 self.f2=f2 def get_html_draw(self,w=None,h=None): #l = layout([self.f1.fig],[self.f2.fig]) grid =", "not None: lon = catalog.ra lat = catalog.dec if len(lat)", "fig.scatter(x='lon', y='lat', marker='circle', size=15, line_color=\"white\", fill_color=None, alpha=1.0, source=source) labels =", "from bokeh.palettes import Plasma256 class Image(object): def __init__(self,data,header): self.data=data self.header=header", "LinearColorMapper(low=min_s, high=max_s, palette=Plasma256) fig_im = fig.image(image=[self.data], x=[0], y=[0], dw=[c *", "* 2 fig = figure(plot_width=w, plot_height=h, x_range=(0, c * 0.5),", "plot_height=h) #curdoc().add_root(grid) #show(grid) #output_file(\"test.html\") script, div = components(grid) html_dict={} html_dict['script']=script", "is not None: lon = catalog.ra lat = catalog.dec if", "#from plotly.graph_objs import Layout # print('vmin,vmax',vmin,vmax) msk = ~np.isnan(self.data) if", "attr,old,new self.fig_im.glyph.color_mapper.update(low=self.graph_min_slider.value, high=self.graph_max_slider.value) def get_html_draw(self,w=None,h=None, catalog=None, plot=False, vmin=None, vmax=None): #import", "int, pow, object, map, zip) __author__ = \"<NAME>\" import numpy", "y_err_x.append((px, px)) y_err_y.append((py - err, py + err)) self.fig.multi_line(y_err_x, y_err_y,", "= self.data.shape[0] * 2 c = self.data.shape[1] * 2 fig", "if xerr is not None: x_err_x = [] x_err_y =", "xy=(x, y), color='white') #print(pixcrd[:][msk]) fig.scatter(x='lon', y='lat', marker='circle', size=15, line_color=\"white\", fill_color=None,", "div return html_dict class ScatterPlot(object): def __init__(self,w,h,x_label=None,y_label=None,x_range=None,y_range=None,title=None,y_axis_type='linear',x_axis_type='linear'): hover = HoverTool(tooltips=[(\"x\",", "= self.graph_max_slider #ax.set_xlabel('RA') #ax.set_ylabel('DEC') #ax.grid(True, color='white') #fig.colorbar(im, ax=ax) #plugins.connect(fig, plugins.MousePosition(fontsize=14))", "= CustomJS(args=dict(fig_im=fig_im), code=JS_code_slider) self.graph_min_slider = Slider(title=\"Sig. Min\", start=min_s, end=max_s, step=1,", "self.header=header def change_image_contrast(self, attr, old, new): # print attr,old,new self.fig_im.glyph.color_mapper.update(low=self.graph_min_slider.value,", "err in zip(x, y, xerr): x_err_x.append((px - err, px +", "* 0.5), y_range=(0, r * 0.5), tools=['pan,box_zoom,box_select,wheel_zoom,reset,save,crosshair']) w = wcs.WCS(self.header)", "True: # print('plot', plot) # mpld3.show() fig.add_layout(color_bar, 'right') layout =", "from bokeh.plotting import figure from bokeh.palettes import Plasma256 class Image(object):", "print('plot', plot) # mpld3.show() fig.add_layout(color_bar, 'right') layout = row( fig,", "def get_html_draw(self): layout = row( self.fig ) #curdoc().add_root(layout) #show(layout) script,", "hover = HoverTool(tooltips=[(\"x\", \"$x\"), (\"y\", \"$y\")]) self.fig = figure(title=title, width=w,", "Image(object): def __init__(self,data,header): self.data=data self.header=header def change_image_contrast(self, attr, old, new):", "print_function from builtins import (bytes, str, open, super, range, zip,", "import CustomJS, Slider,HoverTool,ColorBar,LinearColorMapper,LabelSet,ColumnDataSource from bokeh.embed import components from bokeh.plotting import", "= components(layout) html_dict = {} html_dict['script'] = script html_dict['div'] =", "if msk[ID]: # # print ('xy',(pixcrd[:, 0][ID], pixcrd[:, 1][ID])) #", "x_err_y.append((py, py)) self.fig.multi_line(x_err_x, x_err_y, color=color, **error_kwargs) if yerr is not", "print attr,old,new self.fig_im.glyph.color_mapper.update(low=self.graph_min_slider.value, high=self.graph_max_slider.value) def get_html_draw(self,w=None,h=None, catalog=None, plot=False, vmin=None, vmax=None):", "in zip(x, y, yerr): y_err_x.append((px, px)) y_err_y.append((py - err, py", "* 0.8, callback=callback) self.graph_min_slider.on_change('value', self.change_image_contrast) self.graph_max_slider.on_change('value', self.change_image_contrast) callback.args[\"low_slider\"] = self.graph_min_slider", "not None: y_err_x = [] y_err_y = [] for px,", "\"$x\"), (\"y\", \"$y\")]) self.fig = figure(title=title, width=w, height=h,x_range=x_range,y_range=y_range, y_axis_type=y_axis_type, x_axis_type=x_axis_type,", "= components(layout) #print ('script',script) #print ('div',div) html_dict = {} html_dict['script']", "px, py, err in zip(x, y, yerr): y_err_x.append((px, px)) y_err_y.append((py", "msk = ~np.isnan(pixcrd[:, 0]) #ax.plot(pixcrd[:, 0][msk], pixcrd[:, 1][msk], 'o', mfc='none')", "py + err)) self.fig.multi_line(y_err_x, y_err_y, color=color, **error_kwargs) def add_step_line(self,x,y,legend=None): #print('a')", "astropy import wcs from bokeh.layouts import row, widgetbox,gridplot from bokeh.models", "= row( fig, widgetbox(self.graph_min_slider, self.graph_max_slider), ) #curdoc().add_root(layout) #output_file(\"slider.html\", title=\"slider.py example\")", "enumerate(pixcrd): # if msk[ID]: # # print ('xy',(pixcrd[:, 0][ID], pixcrd[:,", "yerr): y_err_x.append((px, px)) y_err_y.append((py - err, py + err)) self.fig.multi_line(y_err_x,", "# zorder=1, # interpolation='none', # aspect='equal', # cmap=plt.get_cmap('jet'), # vmin=vmin,", "#print ('div',div) html_dict = {} html_dict['script'] = script html_dict['div'] =", "callback=callback) self.graph_min_slider.on_change('value', self.change_image_contrast) self.graph_max_slider.on_change('value', self.change_image_contrast) callback.args[\"low_slider\"] = self.graph_min_slider callback.args[\"high_slider\"] =", "Slider(title=\"Sig. Max\", start=min_s, end=max_s, step=1, value=max_s * 0.8, callback=callback) self.graph_min_slider.on_change('value',", "in zip(x, y, xerr): x_err_x.append((px - err, px + err))", "\"$x\"), (\"y\", \"$y\"), (\"value\", \"@image\")], renderers=[fig_im]) fig.add_tools(hover) #fig, (ax) =", "is None: vmin = self.data[msk].min() if vmax is None: vmax", "class ScatterPlot(object): def __init__(self,w,h,x_label=None,y_label=None,x_range=None,y_range=None,title=None,y_axis_type='linear',x_axis_type='linear'): hover = HoverTool(tooltips=[(\"x\", \"$x\"), (\"y\", \"$y\")])", "components(layout) html_dict = {} html_dict['script'] = script html_dict['div'] = div", "return html_dict class ScatterPlot(object): def __init__(self,w,h,x_label=None,y_label=None,x_range=None,y_range=None,title=None,y_axis_type='linear',x_axis_type='linear'): hover = HoverTool(tooltips=[(\"x\", \"$x\"),", "py, err in zip(x, y, yerr): y_err_x.append((px, px)) y_err_y.append((py -", "GridPlot(object): def __init__(self,f1,f2,w=None,h=None): self.f1=f1 self.f2=f2 def get_html_draw(self,w=None,h=None): #l = layout([self.f1.fig],[self.f2.fig])", "**error_kwargs) def add_step_line(self,x,y,legend=None): #print('a') self.fig.step(x,y,name=legend, mode=\"center\") #print('b') def add_line(self,x,y,legend=None,color=None): self.fig.line(x,y,legend=legend,line_color=color)", "self.data[msk].min() if vmax is None: vmax = self.data[msk].max() min_s =", "vmin=None, vmax=None): #import plotly #import plotly.graph_objs as go #from plotly.graph_objs", "catalog.ra lat = catalog.dec if len(lat) > 0.: pixcrd =", "WCS(self.header)}) #im = ax.imshow(self.data, # origin='lower', # zorder=1, # interpolation='none',", "import row, widgetbox,gridplot from bokeh.models import CustomJS, Slider,HoverTool,ColorBar,LinearColorMapper,LabelSet,ColumnDataSource from bokeh.embed", "vmax = high_slider.value; fig_im.glyph.color_mapper.high = vmax; fig_im.glyph.color_mapper.low = vmin; \"\"\"", "html_dict = {} html_dict['script'] = script html_dict['div'] = div return", "figure from bokeh.palettes import Plasma256 class Image(object): def __init__(self,data,header): self.data=data", "None: vmax = self.data[msk].max() min_s = self.data.min() max_s = self.data.max()", "old, new): # print attr,old,new self.fig_im.glyph.color_mapper.update(low=self.graph_min_slider.value, high=self.graph_max_slider.value) def get_html_draw(self,w=None,h=None, catalog=None,", "\"$y\"), (\"value\", \"@image\")], renderers=[fig_im]) fig.add_tools(hover) #fig, (ax) = plt.subplots(1, 1,", "bokeh.models import CustomJS, Slider,HoverTool,ColorBar,LinearColorMapper,LabelSet,ColumnDataSource from bokeh.embed import components from bokeh.plotting", "callback.args[\"low_slider\"] = self.graph_min_slider callback.args[\"high_slider\"] = self.graph_max_slider #ax.set_xlabel('RA') #ax.set_ylabel('DEC') #ax.grid(True, color='white')", "= fig.image(image=[self.data], x=[0], y=[0], dw=[c * 0.5], dh=[r * 0.5],", "yerr=None, color='red', point_kwargs={}, error_kwargs={}): self.fig.circle(x, y, color=color, **point_kwargs) if xerr", "fig_im = fig.image(image=[self.data], x=[0], y=[0], dw=[c * 0.5], dh=[r *", "wcs from bokeh.layouts import row, widgetbox,gridplot from bokeh.models import CustomJS,", "def change_image_contrast(self, attr, old, new): # print attr,old,new self.fig_im.glyph.color_mapper.update(low=self.graph_min_slider.value, high=self.graph_max_slider.value)", "* 0.5), tools=['pan,box_zoom,box_select,wheel_zoom,reset,save,crosshair']) w = wcs.WCS(self.header) color_mapper = LinearColorMapper(low=min_s, high=max_s,", "= x_label if y_label is not None: self.fig.yaxis.axis_label = y_label", "= HoverTool(tooltips=[(\"x\", \"$x\"), (\"y\", \"$y\"), (\"value\", \"@image\")], renderers=[fig_im]) fig.add_tools(hover) #fig,", "catalog[msk] color_bar = ColorBar(color_mapper=color_mapper, label_standoff=12, border_line_color=None, location=(0, 0)) JS_code_slider =", "self.fig.xaxis.axis_label = x_label if y_label is not None: self.fig.yaxis.axis_label =", "fig_im.glyph.color_mapper.high = vmax; fig_im.glyph.color_mapper.low = vmin; \"\"\" callback = CustomJS(args=dict(fig_im=fig_im),", "\"@image\")], renderers=[fig_im]) fig.add_tools(hover) #fig, (ax) = plt.subplots(1, 1, figsize=(4, 3),", "**error_kwargs) if yerr is not None: y_err_x = [] y_err_y", "#show(layout) script, div = components(layout) html_dict = {} html_dict['script'] =", "vmax; fig_im.glyph.color_mapper.low = vmin; \"\"\" callback = CustomJS(args=dict(fig_im=fig_im), code=JS_code_slider) self.graph_min_slider", "'right') layout = row( fig, widgetbox(self.graph_min_slider, self.graph_max_slider), ) #curdoc().add_root(layout) #output_file(\"slider.html\",", "grid = gridplot([self.f1.fig,self.f2.fig],ncols=1,plot_width=w, plot_height=h) #curdoc().add_root(grid) #show(grid) #output_file(\"test.html\") script, div =", "# # print ('xy',(pixcrd[:, 0][ID], pixcrd[:, 1][ID])) # ax.annotate('%s' %", "# if msk[ID]: # # print ('xy',(pixcrd[:, 0][ID], pixcrd[:, 1][ID]))", "lat)), 0) msk = ~np.isnan(pixcrd[:, 0]) #ax.plot(pixcrd[:, 0][msk], pixcrd[:, 1][msk],", "None: lon = catalog.ra lat = catalog.dec if len(lat) >", "vmin=vmin, # vmax=vmax) if catalog is not None: lon =", "mpld3.show() fig.add_layout(color_bar, 'right') layout = row( fig, widgetbox(self.graph_min_slider, self.graph_max_slider), )", "r = self.data.shape[0] * 2 c = self.data.shape[1] * 2", "bokeh.layouts import row, widgetbox,gridplot from bokeh.models import CustomJS, Slider,HoverTool,ColorBar,LinearColorMapper,LabelSet,ColumnDataSource from", "from __future__ import absolute_import, division, print_function from builtins import (bytes,", "def __init__(self,data,header): self.data=data self.header=header def change_image_contrast(self, attr, old, new): #", "= self.data.min() max_s = self.data.max() r = self.data.shape[0] * 2", "get_html_draw(self,w=None,h=None, catalog=None, plot=False, vmin=None, vmax=None): #import plotly #import plotly.graph_objs as", "y) in enumerate(pixcrd): # if msk[ID]: # # print ('xy',(pixcrd[:,", "new): # print attr,old,new self.fig_im.glyph.color_mapper.update(low=self.graph_min_slider.value, high=self.graph_max_slider.value) def get_html_draw(self,w=None,h=None, catalog=None, plot=False,", "self.change_image_contrast) self.graph_max_slider.on_change('value', self.change_image_contrast) callback.args[\"low_slider\"] = self.graph_min_slider callback.args[\"high_slider\"] = self.graph_max_slider #ax.set_xlabel('RA')", "get_html_draw(self): layout = row( self.fig ) #curdoc().add_root(layout) #show(layout) script, div", "self.fig.multi_line(x_err_x, x_err_y, color=color, **error_kwargs) if yerr is not None: y_err_x", "script, div = components(layout) html_dict = {} html_dict['script'] = script", "LabelSet(x='lon', y='lat', text='names', level='glyph', x_offset=5, y_offset=5, render_mode='canvas', source=source, text_color='white') fig.add_layout(labels)", "html_dict['div'] = div return html_dict class ScatterPlot(object): def __init__(self,w,h,x_label=None,y_label=None,x_range=None,y_range=None,title=None,y_axis_type='linear',x_axis_type='linear'): hover", "error_kwargs={}): self.fig.circle(x, y, color=color, **point_kwargs) if xerr is not None:", "def add_step_line(self,x,y,legend=None): #print('a') self.fig.step(x,y,name=legend, mode=\"center\") #print('b') def add_line(self,x,y,legend=None,color=None): self.fig.line(x,y,legend=legend,line_color=color) def", "tools=[hover, 'pan,box_zoom,box_select,wheel_zoom,reset,save,crosshair'] ) if x_label is not None: self.fig.xaxis.axis_label =", "hover = HoverTool(tooltips=[(\"x\", \"$x\"), (\"y\", \"$y\"), (\"value\", \"@image\")], renderers=[fig_im]) fig.add_tools(hover)", "def __init__(self,f1,f2,w=None,h=None): self.f1=f1 self.f2=f2 def get_html_draw(self,w=None,h=None): #l = layout([self.f1.fig],[self.f2.fig]) grid", "= high_slider.value; fig_im.glyph.color_mapper.high = vmax; fig_im.glyph.color_mapper.low = vmin; \"\"\" callback", "\"\"\" var vmin = low_slider.value; var vmax = high_slider.value; fig_im.glyph.color_mapper.high", "def add_line(self,x,y,legend=None,color=None): self.fig.line(x,y,legend=legend,line_color=color) def get_html_draw(self): layout = row( self.fig )", "3), subplot_kw={'projection': WCS(self.header)}) #im = ax.imshow(self.data, # origin='lower', # zorder=1,", "html_dict['script'] = script html_dict['div'] = div return html_dict class GridPlot(object):", "change_image_contrast(self, attr, old, new): # print attr,old,new self.fig_im.glyph.color_mapper.update(low=self.graph_min_slider.value, high=self.graph_max_slider.value) def", "callback=callback) self.graph_max_slider = Slider(title=\"Sig. Max\", start=min_s, end=max_s, step=1, value=max_s *", "#ax.set_ylabel('DEC') #ax.grid(True, color='white') #fig.colorbar(im, ax=ax) #plugins.connect(fig, plugins.MousePosition(fontsize=14)) #if plot ==", "vmax = self.data[msk].max() min_s = self.data.min() max_s = self.data.max() r", "x_err_y = [] for px, py, err in zip(x, y,", "palette=Plasma256) fig_im = fig.image(image=[self.data], x=[0], y=[0], dw=[c * 0.5], dh=[r", "fig.add_layout(color_bar, 'right') layout = row( fig, widgetbox(self.graph_min_slider, self.graph_max_slider), ) #curdoc().add_root(layout)", "self.fig.circle(x, y, color=color, **point_kwargs) if xerr is not None: x_err_x", "color=color, **error_kwargs) def add_step_line(self,x,y,legend=None): #print('a') self.fig.step(x,y,name=legend, mode=\"center\") #print('b') def add_line(self,x,y,legend=None,color=None):", "('script',script) #print ('div',div) html_dict = {} html_dict['script'] = script html_dict['div']", "HoverTool(tooltips=[(\"x\", \"$x\"), (\"y\", \"$y\")]) self.fig = figure(title=title, width=w, height=h,x_range=x_range,y_range=y_range, y_axis_type=y_axis_type,", "self.f2=f2 def get_html_draw(self,w=None,h=None): #l = layout([self.f1.fig],[self.f2.fig]) grid = gridplot([self.f1.fig,self.f2.fig],ncols=1,plot_width=w, plot_height=h)", "fig.image(image=[self.data], x=[0], y=[0], dw=[c * 0.5], dh=[r * 0.5], color_mapper=color_mapper)", "def get_html_draw(self,w=None,h=None, catalog=None, plot=False, vmin=None, vmax=None): #import plotly #import plotly.graph_objs", "self.fig.multi_line(y_err_x, y_err_y, color=color, **error_kwargs) def add_step_line(self,x,y,legend=None): #print('a') self.fig.step(x,y,name=legend, mode=\"center\") #print('b')", "ID, (x, y) in enumerate(pixcrd): # if msk[ID]: # #", "x_label if y_label is not None: self.fig.yaxis.axis_label = y_label def", "print('vmin,vmax',vmin,vmax) msk = ~np.isnan(self.data) if vmin is None: vmin =", "def __init__(self,w,h,x_label=None,y_label=None,x_range=None,y_range=None,title=None,y_axis_type='linear',x_axis_type='linear'): hover = HoverTool(tooltips=[(\"x\", \"$x\"), (\"y\", \"$y\")]) self.fig =", "figure(title=title, width=w, height=h,x_range=x_range,y_range=y_range, y_axis_type=y_axis_type, x_axis_type=x_axis_type, tools=[hover, 'pan,box_zoom,box_select,wheel_zoom,reset,save,crosshair'] ) if x_label", "pixcrd[:, 1][ID])) # ax.annotate('%s' % catalog.name[ID], xy=(x, y), color='white') #print(pixcrd[:][msk])", "y_offset=5, render_mode='canvas', source=source, text_color='white') fig.add_layout(labels) #print'cat', catalog[msk] color_bar = ColorBar(color_mapper=color_mapper,", "division, print_function from builtins import (bytes, str, open, super, range,", "np from astropy import wcs from bokeh.layouts import row, widgetbox,gridplot", "tools=['pan,box_zoom,box_select,wheel_zoom,reset,save,crosshair']) w = wcs.WCS(self.header) color_mapper = LinearColorMapper(low=min_s, high=max_s, palette=Plasma256) fig_im", "plotly.graph_objs as go #from plotly.graph_objs import Layout # print('vmin,vmax',vmin,vmax) msk", "def add_errorbar(self, x, y, xerr=None, yerr=None, color='red', point_kwargs={}, error_kwargs={}): self.fig.circle(x,", "fig.add_tools(hover) #fig, (ax) = plt.subplots(1, 1, figsize=(4, 3), subplot_kw={'projection': WCS(self.header)})", "= LinearColorMapper(low=min_s, high=max_s, palette=Plasma256) fig_im = fig.image(image=[self.data], x=[0], y=[0], dw=[c", "open, super, range, zip, round, input, int, pow, object, map,", "# interpolation='none', # aspect='equal', # cmap=plt.get_cmap('jet'), # vmin=vmin, # vmax=vmax)", "plot == True: # print('plot', plot) # mpld3.show() fig.add_layout(color_bar, 'right')", "= wcs.WCS(self.header) color_mapper = LinearColorMapper(low=min_s, high=max_s, palette=Plasma256) fig_im = fig.image(image=[self.data],", "__init__(self,w,h,x_label=None,y_label=None,x_range=None,y_range=None,title=None,y_axis_type='linear',x_axis_type='linear'): hover = HoverTool(tooltips=[(\"x\", \"$x\"), (\"y\", \"$y\")]) self.fig = figure(title=title,", "#for ID, (x, y) in enumerate(pixcrd): # if msk[ID]: #", "('div',div) html_dict = {} html_dict['script'] = script html_dict['div'] = div", "if catalog is not None: lon = catalog.ra lat =", "y_err_y, color=color, **error_kwargs) def add_step_line(self,x,y,legend=None): #print('a') self.fig.step(x,y,name=legend, mode=\"center\") #print('b') def", "from bokeh.models import CustomJS, Slider,HoverTool,ColorBar,LinearColorMapper,LabelSet,ColumnDataSource from bokeh.embed import components from", "#ax.grid(True, color='white') #fig.colorbar(im, ax=ax) #plugins.connect(fig, plugins.MousePosition(fontsize=14)) #if plot == True:", "0.5), y_range=(0, r * 0.5), tools=['pan,box_zoom,box_select,wheel_zoom,reset,save,crosshair']) w = wcs.WCS(self.header) color_mapper", "catalog is not None: lon = catalog.ra lat = catalog.dec", "#from bokeh.io import show #show(layout) script, div = components(layout) html_dict", "(\"value\", \"@image\")], renderers=[fig_im]) fig.add_tools(hover) #fig, (ax) = plt.subplots(1, 1, figsize=(4,", ") if x_label is not None: self.fig.xaxis.axis_label = x_label if", "figure(plot_width=w, plot_height=h, x_range=(0, c * 0.5), y_range=(0, r * 0.5),", "zip(x, y, xerr): x_err_x.append((px - err, px + err)) x_err_y.append((py,", "wcs.WCS(self.header) color_mapper = LinearColorMapper(low=min_s, high=max_s, palette=Plasma256) fig_im = fig.image(image=[self.data], x=[0],", "self.graph_max_slider = Slider(title=\"Sig. Max\", start=min_s, end=max_s, step=1, value=max_s * 0.8,", "print ('xy',(pixcrd[:, 0][ID], pixcrd[:, 1][ID])) # ax.annotate('%s' % catalog.name[ID], xy=(x,", "in enumerate(pixcrd): # if msk[ID]: # # print ('xy',(pixcrd[:, 0][ID],", "color=color, **point_kwargs) if xerr is not None: x_err_x = []", "#ax.set_xlabel('RA') #ax.set_ylabel('DEC') #ax.grid(True, color='white') #fig.colorbar(im, ax=ax) #plugins.connect(fig, plugins.MousePosition(fontsize=14)) #if plot", "+ err)) self.fig.multi_line(y_err_x, y_err_y, color=color, **error_kwargs) def add_step_line(self,x,y,legend=None): #print('a') self.fig.step(x,y,name=legend,", "from builtins import (bytes, str, open, super, range, zip, round,", "html_dict['script'] = script html_dict['div'] = div return html_dict class ScatterPlot(object):", "as np from astropy import wcs from bokeh.layouts import row,", "#l = layout([self.f1.fig],[self.f2.fig]) grid = gridplot([self.f1.fig,self.f2.fig],ncols=1,plot_width=w, plot_height=h) #curdoc().add_root(grid) #show(grid) #output_file(\"test.html\")", "'o', mfc='none') source = ColumnDataSource(data=dict(lon=pixcrd[:, 0][msk]+0.5, lat=pixcrd[:, 1][msk]+0.5, names=catalog.name[msk])) #for", "Plasma256 class Image(object): def __init__(self,data,header): self.data=data self.header=header def change_image_contrast(self, attr,", "if x_label is not None: self.fig.xaxis.axis_label = x_label if y_label", "y_err_x = [] y_err_y = [] for px, py, err", "#show(grid) #output_file(\"test.html\") script, div = components(grid) html_dict={} html_dict['script']=script html_dict['div'] =", "self.fig.yaxis.axis_label = y_label def add_errorbar(self, x, y, xerr=None, yerr=None, color='red',", "x_err_y, color=color, **error_kwargs) if yerr is not None: y_err_x =", "if len(lat) > 0.: pixcrd = w.wcs_world2pix(np.column_stack((lon, lat)), 0) msk", "r * 0.5), tools=['pan,box_zoom,box_select,wheel_zoom,reset,save,crosshair']) w = wcs.WCS(self.header) color_mapper = LinearColorMapper(low=min_s,", "None: self.fig.yaxis.axis_label = y_label def add_errorbar(self, x, y, xerr=None, yerr=None,", "# mpld3.show() fig.add_layout(color_bar, 'right') layout = row( fig, widgetbox(self.graph_min_slider, self.graph_max_slider),", "= row( self.fig ) #curdoc().add_root(layout) #show(layout) script, div = components(layout)", "2 fig = figure(plot_width=w, plot_height=h, x_range=(0, c * 0.5), y_range=(0,", "fig = figure(plot_width=w, plot_height=h, x_range=(0, c * 0.5), y_range=(0, r", "layout = row( self.fig ) #curdoc().add_root(layout) #show(layout) script, div =", "~np.isnan(pixcrd[:, 0]) #ax.plot(pixcrd[:, 0][msk], pixcrd[:, 1][msk], 'o', mfc='none') source =", "level='glyph', x_offset=5, y_offset=5, render_mode='canvas', source=source, text_color='white') fig.add_layout(labels) #print'cat', catalog[msk] color_bar", "as go #from plotly.graph_objs import Layout # print('vmin,vmax',vmin,vmax) msk =", "= layout([self.f1.fig],[self.f2.fig]) grid = gridplot([self.f1.fig,self.f2.fig],ncols=1,plot_width=w, plot_height=h) #curdoc().add_root(grid) #show(grid) #output_file(\"test.html\") script,", "import figure from bokeh.palettes import Plasma256 class Image(object): def __init__(self,data,header):", "0.5), tools=['pan,box_zoom,box_select,wheel_zoom,reset,save,crosshair']) w = wcs.WCS(self.header) color_mapper = LinearColorMapper(low=min_s, high=max_s, palette=Plasma256)", "step=1, value=min_s, callback=callback) self.graph_max_slider = Slider(title=\"Sig. Max\", start=min_s, end=max_s, step=1,", "- err, py + err)) self.fig.multi_line(y_err_x, y_err_y, color=color, **error_kwargs) def", "y), color='white') #print(pixcrd[:][msk]) fig.scatter(x='lon', y='lat', marker='circle', size=15, line_color=\"white\", fill_color=None, alpha=1.0,", "msk[ID]: # # print ('xy',(pixcrd[:, 0][ID], pixcrd[:, 1][ID])) # ax.annotate('%s'", "catalog.name[ID], xy=(x, y), color='white') #print(pixcrd[:][msk]) fig.scatter(x='lon', y='lat', marker='circle', size=15, line_color=\"white\",", "* 0.5], dh=[r * 0.5], color_mapper=color_mapper) hover = HoverTool(tooltips=[(\"x\", \"$x\"),", "from bokeh.embed import components from bokeh.plotting import figure from bokeh.palettes", "px, py, err in zip(x, y, xerr): x_err_x.append((px - err,", "None: y_err_x = [] y_err_y = [] for px, py,", "origin='lower', # zorder=1, # interpolation='none', # aspect='equal', # cmap=plt.get_cmap('jet'), #", "widgetbox(self.graph_min_slider, self.graph_max_slider), ) #curdoc().add_root(layout) #output_file(\"slider.html\", title=\"slider.py example\") #from bokeh.io import", "= self.data.shape[1] * 2 fig = figure(plot_width=w, plot_height=h, x_range=(0, c", "names=catalog.name[msk])) #for ID, (x, y) in enumerate(pixcrd): # if msk[ID]:", "#output_file(\"slider.html\", title=\"slider.py example\") #from bokeh.io import show #show(layout) script, div", "px)) y_err_y.append((py - err, py + err)) self.fig.multi_line(y_err_x, y_err_y, color=color,", "# ax.annotate('%s' % catalog.name[ID], xy=(x, y), color='white') #print(pixcrd[:][msk]) fig.scatter(x='lon', y='lat',", "script html_dict['div'] = div return html_dict class ScatterPlot(object): def __init__(self,w,h,x_label=None,y_label=None,x_range=None,y_range=None,title=None,y_axis_type='linear',x_axis_type='linear'):", "width=w, height=h,x_range=x_range,y_range=y_range, y_axis_type=y_axis_type, x_axis_type=x_axis_type, tools=[hover, 'pan,box_zoom,box_select,wheel_zoom,reset,save,crosshair'] ) if x_label is", "alpha=1.0, source=source) labels = LabelSet(x='lon', y='lat', text='names', level='glyph', x_offset=5, y_offset=5,", "(\"y\", \"$y\"), (\"value\", \"@image\")], renderers=[fig_im]) fig.add_tools(hover) #fig, (ax) = plt.subplots(1,", "numpy as np from astropy import wcs from bokeh.layouts import", "self.change_image_contrast) callback.args[\"low_slider\"] = self.graph_min_slider callback.args[\"high_slider\"] = self.graph_max_slider #ax.set_xlabel('RA') #ax.set_ylabel('DEC') #ax.grid(True,", "ax.annotate('%s' % catalog.name[ID], xy=(x, y), color='white') #print(pixcrd[:][msk]) fig.scatter(x='lon', y='lat', marker='circle',", "ScatterPlot(object): def __init__(self,w,h,x_label=None,y_label=None,x_range=None,y_range=None,title=None,y_axis_type='linear',x_axis_type='linear'): hover = HoverTool(tooltips=[(\"x\", \"$x\"), (\"y\", \"$y\")]) self.fig", "= vmax; fig_im.glyph.color_mapper.low = vmin; \"\"\" callback = CustomJS(args=dict(fig_im=fig_im), code=JS_code_slider)", "err, px + err)) x_err_y.append((py, py)) self.fig.multi_line(x_err_x, x_err_y, color=color, **error_kwargs)", "[] for px, py, err in zip(x, y, xerr): x_err_x.append((px", "#plugins.connect(fig, plugins.MousePosition(fontsize=14)) #if plot == True: # print('plot', plot) #", "bokeh.embed import components from bokeh.plotting import figure from bokeh.palettes import", "from astropy import wcs from bokeh.layouts import row, widgetbox,gridplot from", "= {} html_dict['script'] = script html_dict['div'] = div return html_dict", "div = components(layout) html_dict = {} html_dict['script'] = script html_dict['div']", "= ColorBar(color_mapper=color_mapper, label_standoff=12, border_line_color=None, location=(0, 0)) JS_code_slider = \"\"\" var", "1][msk], 'o', mfc='none') source = ColumnDataSource(data=dict(lon=pixcrd[:, 0][msk]+0.5, lat=pixcrd[:, 1][msk]+0.5, names=catalog.name[msk]))", "color='white') #print(pixcrd[:][msk]) fig.scatter(x='lon', y='lat', marker='circle', size=15, line_color=\"white\", fill_color=None, alpha=1.0, source=source)", "vmin = low_slider.value; var vmax = high_slider.value; fig_im.glyph.color_mapper.high = vmax;", "#curdoc().add_root(layout) #output_file(\"slider.html\", title=\"slider.py example\") #from bokeh.io import show #show(layout) script,", "0) msk = ~np.isnan(pixcrd[:, 0]) #ax.plot(pixcrd[:, 0][msk], pixcrd[:, 1][msk], 'o',", "super, range, zip, round, input, int, pow, object, map, zip)", "xerr=None, yerr=None, color='red', point_kwargs={}, error_kwargs={}): self.fig.circle(x, y, color=color, **point_kwargs) if", "plot) # mpld3.show() fig.add_layout(color_bar, 'right') layout = row( fig, widgetbox(self.graph_min_slider,", "y, xerr): x_err_x.append((px - err, px + err)) x_err_y.append((py, py))", "script, div = components(layout) #print ('script',script) #print ('div',div) html_dict =", "w.wcs_world2pix(np.column_stack((lon, lat)), 0) msk = ~np.isnan(pixcrd[:, 0]) #ax.plot(pixcrd[:, 0][msk], pixcrd[:,", "cmap=plt.get_cmap('jet'), # vmin=vmin, # vmax=vmax) if catalog is not None:", "text_color='white') fig.add_layout(labels) #print'cat', catalog[msk] color_bar = ColorBar(color_mapper=color_mapper, label_standoff=12, border_line_color=None, location=(0,", "self.data.shape[1] * 2 fig = figure(plot_width=w, plot_height=h, x_range=(0, c *", "0][msk]+0.5, lat=pixcrd[:, 1][msk]+0.5, names=catalog.name[msk])) #for ID, (x, y) in enumerate(pixcrd):", "= self.data.max() r = self.data.shape[0] * 2 c = self.data.shape[1]", "0.: pixcrd = w.wcs_world2pix(np.column_stack((lon, lat)), 0) msk = ~np.isnan(pixcrd[:, 0])", "self.graph_min_slider callback.args[\"high_slider\"] = self.graph_max_slider #ax.set_xlabel('RA') #ax.set_ylabel('DEC') #ax.grid(True, color='white') #fig.colorbar(im, ax=ax)", "for px, py, err in zip(x, y, xerr): x_err_x.append((px -", "x_range=(0, c * 0.5), y_range=(0, r * 0.5), tools=['pan,box_zoom,box_select,wheel_zoom,reset,save,crosshair']) w", "px + err)) x_err_y.append((py, py)) self.fig.multi_line(x_err_x, x_err_y, color=color, **error_kwargs) if", "c * 0.5), y_range=(0, r * 0.5), tools=['pan,box_zoom,box_select,wheel_zoom,reset,save,crosshair']) w =", "bokeh.palettes import Plasma256 class Image(object): def __init__(self,data,header): self.data=data self.header=header def", "zip) __author__ = \"<NAME>\" import numpy as np from astropy", "not None: x_err_x = [] x_err_y = [] for px,", "= figure(title=title, width=w, height=h,x_range=x_range,y_range=y_range, y_axis_type=y_axis_type, x_axis_type=x_axis_type, tools=[hover, 'pan,box_zoom,box_select,wheel_zoom,reset,save,crosshair'] ) if", "self.fig.line(x,y,legend=legend,line_color=color) def get_html_draw(self): layout = row( self.fig ) #curdoc().add_root(layout) #show(layout)", ") #curdoc().add_root(layout) #show(layout) script, div = components(layout) #print ('script',script) #print", "= div return html_dict class GridPlot(object): def __init__(self,f1,f2,w=None,h=None): self.f1=f1 self.f2=f2", "xerr): x_err_x.append((px - err, px + err)) x_err_y.append((py, py)) self.fig.multi_line(x_err_x,", "len(lat) > 0.: pixcrd = w.wcs_world2pix(np.column_stack((lon, lat)), 0) msk =", "#show(layout) script, div = components(layout) #print ('script',script) #print ('div',div) html_dict", "components(layout) #print ('script',script) #print ('div',div) html_dict = {} html_dict['script'] =", "yerr is not None: y_err_x = [] y_err_y = []", "#output_file(\"test.html\") script, div = components(grid) html_dict={} html_dict['script']=script html_dict['div'] = div", "# vmax=vmax) if catalog is not None: lon = catalog.ra", ") #curdoc().add_root(layout) #output_file(\"slider.html\", title=\"slider.py example\") #from bokeh.io import show #show(layout)", "0]) #ax.plot(pixcrd[:, 0][msk], pixcrd[:, 1][msk], 'o', mfc='none') source = ColumnDataSource(data=dict(lon=pixcrd[:,", "#fig, (ax) = plt.subplots(1, 1, figsize=(4, 3), subplot_kw={'projection': WCS(self.header)}) #im", "script html_dict['div'] = div return html_dict class GridPlot(object): def __init__(self,f1,f2,w=None,h=None):", "import wcs from bokeh.layouts import row, widgetbox,gridplot from bokeh.models import", "self.fig ) #curdoc().add_root(layout) #show(layout) script, div = components(layout) #print ('script',script)", "fig.add_layout(labels) #print'cat', catalog[msk] color_bar = ColorBar(color_mapper=color_mapper, label_standoff=12, border_line_color=None, location=(0, 0))", "labels = LabelSet(x='lon', y='lat', text='names', level='glyph', x_offset=5, y_offset=5, render_mode='canvas', source=source,", "# vmin=vmin, # vmax=vmax) if catalog is not None: lon", "y, xerr=None, yerr=None, color='red', point_kwargs={}, error_kwargs={}): self.fig.circle(x, y, color=color, **point_kwargs)", "lon = catalog.ra lat = catalog.dec if len(lat) > 0.:", "# origin='lower', # zorder=1, # interpolation='none', # aspect='equal', # cmap=plt.get_cmap('jet'),", "figsize=(4, 3), subplot_kw={'projection': WCS(self.header)}) #im = ax.imshow(self.data, # origin='lower', #", "\"\"\" callback = CustomJS(args=dict(fig_im=fig_im), code=JS_code_slider) self.graph_min_slider = Slider(title=\"Sig. Min\", start=min_s,", "var vmin = low_slider.value; var vmax = high_slider.value; fig_im.glyph.color_mapper.high =", "# print ('xy',(pixcrd[:, 0][ID], pixcrd[:, 1][ID])) # ax.annotate('%s' % catalog.name[ID],", "#fig.colorbar(im, ax=ax) #plugins.connect(fig, plugins.MousePosition(fontsize=14)) #if plot == True: # print('plot',", "ax=ax) #plugins.connect(fig, plugins.MousePosition(fontsize=14)) #if plot == True: # print('plot', plot)", "err)) self.fig.multi_line(y_err_x, y_err_y, color=color, **error_kwargs) def add_step_line(self,x,y,legend=None): #print('a') self.fig.step(x,y,name=legend, mode=\"center\")", "= w.wcs_world2pix(np.column_stack((lon, lat)), 0) msk = ~np.isnan(pixcrd[:, 0]) #ax.plot(pixcrd[:, 0][msk],", "= \"\"\" var vmin = low_slider.value; var vmax = high_slider.value;", "high=max_s, palette=Plasma256) fig_im = fig.image(image=[self.data], x=[0], y=[0], dw=[c * 0.5],", "show #show(layout) script, div = components(layout) html_dict = {} html_dict['script']", "div return html_dict class GridPlot(object): def __init__(self,f1,f2,w=None,h=None): self.f1=f1 self.f2=f2 def", "not None: self.fig.yaxis.axis_label = y_label def add_errorbar(self, x, y, xerr=None,", "#import plotly #import plotly.graph_objs as go #from plotly.graph_objs import Layout", "callback.args[\"high_slider\"] = self.graph_max_slider #ax.set_xlabel('RA') #ax.set_ylabel('DEC') #ax.grid(True, color='white') #fig.colorbar(im, ax=ax) #plugins.connect(fig,", "+ err)) x_err_y.append((py, py)) self.fig.multi_line(x_err_x, x_err_y, color=color, **error_kwargs) if yerr", "title=\"slider.py example\") #from bokeh.io import show #show(layout) script, div =", "catalog.dec if len(lat) > 0.: pixcrd = w.wcs_world2pix(np.column_stack((lon, lat)), 0)", "self.graph_min_slider = Slider(title=\"Sig. Min\", start=min_s, end=max_s, step=1, value=min_s, callback=callback) self.graph_max_slider", "self.data.max() r = self.data.shape[0] * 2 c = self.data.shape[1] *", "# cmap=plt.get_cmap('jet'), # vmin=vmin, # vmax=vmax) if catalog is not", "#print'cat', catalog[msk] color_bar = ColorBar(color_mapper=color_mapper, label_standoff=12, border_line_color=None, location=(0, 0)) JS_code_slider", "% catalog.name[ID], xy=(x, y), color='white') #print(pixcrd[:][msk]) fig.scatter(x='lon', y='lat', marker='circle', size=15,", "layout = row( fig, widgetbox(self.graph_min_slider, self.graph_max_slider), ) #curdoc().add_root(layout) #output_file(\"slider.html\", title=\"slider.py", "import Plasma256 class Image(object): def __init__(self,data,header): self.data=data self.header=header def change_image_contrast(self,", "ColumnDataSource(data=dict(lon=pixcrd[:, 0][msk]+0.5, lat=pixcrd[:, 1][msk]+0.5, names=catalog.name[msk])) #for ID, (x, y) in", "0.5], dh=[r * 0.5], color_mapper=color_mapper) hover = HoverTool(tooltips=[(\"x\", \"$x\"), (\"y\",", "w = wcs.WCS(self.header) color_mapper = LinearColorMapper(low=min_s, high=max_s, palette=Plasma256) fig_im =", "interpolation='none', # aspect='equal', # cmap=plt.get_cmap('jet'), # vmin=vmin, # vmax=vmax) if", "high_slider.value; fig_im.glyph.color_mapper.high = vmax; fig_im.glyph.color_mapper.low = vmin; \"\"\" callback =", "# print('vmin,vmax',vmin,vmax) msk = ~np.isnan(self.data) if vmin is None: vmin", "\"$y\")]) self.fig = figure(title=title, width=w, height=h,x_range=x_range,y_range=y_range, y_axis_type=y_axis_type, x_axis_type=x_axis_type, tools=[hover, 'pan,box_zoom,box_select,wheel_zoom,reset,save,crosshair']", "html_dict class GridPlot(object): def __init__(self,f1,f2,w=None,h=None): self.f1=f1 self.f2=f2 def get_html_draw(self,w=None,h=None): #l", "[] for px, py, err in zip(x, y, yerr): y_err_x.append((px,", "def get_html_draw(self,w=None,h=None): #l = layout([self.f1.fig],[self.f2.fig]) grid = gridplot([self.f1.fig,self.f2.fig],ncols=1,plot_width=w, plot_height=h) #curdoc().add_root(grid)", "None: vmin = self.data[msk].min() if vmax is None: vmax =", "# aspect='equal', # cmap=plt.get_cmap('jet'), # vmin=vmin, # vmax=vmax) if catalog", "= [] for px, py, err in zip(x, y, xerr):", "x_err_x = [] x_err_y = [] for px, py, err", "y, yerr): y_err_x.append((px, px)) y_err_y.append((py - err, py + err))", "class Image(object): def __init__(self,data,header): self.data=data self.header=header def change_image_contrast(self, attr, old,", "line_color=\"white\", fill_color=None, alpha=1.0, source=source) labels = LabelSet(x='lon', y='lat', text='names', level='glyph',", "plugins.MousePosition(fontsize=14)) #if plot == True: # print('plot', plot) # mpld3.show()", "html_dict['div'] = div return html_dict class GridPlot(object): def __init__(self,f1,f2,w=None,h=None): self.f1=f1", "y_label def add_errorbar(self, x, y, xerr=None, yerr=None, color='red', point_kwargs={}, error_kwargs={}):", "marker='circle', size=15, line_color=\"white\", fill_color=None, alpha=1.0, source=source) labels = LabelSet(x='lon', y='lat',", "= vmin; \"\"\" callback = CustomJS(args=dict(fig_im=fig_im), code=JS_code_slider) self.graph_min_slider = Slider(title=\"Sig.", "callback = CustomJS(args=dict(fig_im=fig_im), code=JS_code_slider) self.graph_min_slider = Slider(title=\"Sig. Min\", start=min_s, end=max_s,", "str, open, super, range, zip, round, input, int, pow, object,", "ColorBar(color_mapper=color_mapper, label_standoff=12, border_line_color=None, location=(0, 0)) JS_code_slider = \"\"\" var vmin", "= plt.subplots(1, 1, figsize=(4, 3), subplot_kw={'projection': WCS(self.header)}) #im = ax.imshow(self.data,", "1][msk]+0.5, names=catalog.name[msk])) #for ID, (x, y) in enumerate(pixcrd): # if", "start=min_s, end=max_s, step=1, value=min_s, callback=callback) self.graph_max_slider = Slider(title=\"Sig. Max\", start=min_s,", "mode=\"center\") #print('b') def add_line(self,x,y,legend=None,color=None): self.fig.line(x,y,legend=legend,line_color=color) def get_html_draw(self): layout = row(", "pixcrd[:, 1][msk], 'o', mfc='none') source = ColumnDataSource(data=dict(lon=pixcrd[:, 0][msk]+0.5, lat=pixcrd[:, 1][msk]+0.5,", "#print(pixcrd[:][msk]) fig.scatter(x='lon', y='lat', marker='circle', size=15, line_color=\"white\", fill_color=None, alpha=1.0, source=source) labels", "#print('a') self.fig.step(x,y,name=legend, mode=\"center\") #print('b') def add_line(self,x,y,legend=None,color=None): self.fig.line(x,y,legend=legend,line_color=color) def get_html_draw(self): layout", "self.data.min() max_s = self.data.max() r = self.data.shape[0] * 2 c", "#print ('script',script) #print ('div',div) html_dict = {} html_dict['script'] = script", "bokeh.plotting import figure from bokeh.palettes import Plasma256 class Image(object): def", "(x, y) in enumerate(pixcrd): # if msk[ID]: # # print", "= y_label def add_errorbar(self, x, y, xerr=None, yerr=None, color='red', point_kwargs={},", "c = self.data.shape[1] * 2 fig = figure(plot_width=w, plot_height=h, x_range=(0,", "# print attr,old,new self.fig_im.glyph.color_mapper.update(low=self.graph_min_slider.value, high=self.graph_max_slider.value) def get_html_draw(self,w=None,h=None, catalog=None, plot=False, vmin=None,", "class GridPlot(object): def __init__(self,f1,f2,w=None,h=None): self.f1=f1 self.f2=f2 def get_html_draw(self,w=None,h=None): #l =", "aspect='equal', # cmap=plt.get_cmap('jet'), # vmin=vmin, # vmax=vmax) if catalog is", "self.f1=f1 self.f2=f2 def get_html_draw(self,w=None,h=None): #l = layout([self.f1.fig],[self.f2.fig]) grid = gridplot([self.f1.fig,self.f2.fig],ncols=1,plot_width=w,", "== True: # print('plot', plot) # mpld3.show() fig.add_layout(color_bar, 'right') layout", "x, y, xerr=None, yerr=None, color='red', point_kwargs={}, error_kwargs={}): self.fig.circle(x, y, color=color,", "is not None: x_err_x = [] x_err_y = [] for", "self.fig_im.glyph.color_mapper.update(low=self.graph_min_slider.value, high=self.graph_max_slider.value) def get_html_draw(self,w=None,h=None, catalog=None, plot=False, vmin=None, vmax=None): #import plotly", "2 c = self.data.shape[1] * 2 fig = figure(plot_width=w, plot_height=h,", "size=15, line_color=\"white\", fill_color=None, alpha=1.0, source=source) labels = LabelSet(x='lon', y='lat', text='names',", "x_err_x.append((px - err, px + err)) x_err_y.append((py, py)) self.fig.multi_line(x_err_x, x_err_y,", "is not None: y_err_x = [] y_err_y = [] for", "= ~np.isnan(pixcrd[:, 0]) #ax.plot(pixcrd[:, 0][msk], pixcrd[:, 1][msk], 'o', mfc='none') source", "go #from plotly.graph_objs import Layout # print('vmin,vmax',vmin,vmax) msk = ~np.isnan(self.data)", "err)) x_err_y.append((py, py)) self.fig.multi_line(x_err_x, x_err_y, color=color, **error_kwargs) if yerr is", "msk = ~np.isnan(self.data) if vmin is None: vmin = self.data[msk].min()", "y='lat', text='names', level='glyph', x_offset=5, y_offset=5, render_mode='canvas', source=source, text_color='white') fig.add_layout(labels) #print'cat',", "y_range=(0, r * 0.5), tools=['pan,box_zoom,box_select,wheel_zoom,reset,save,crosshair']) w = wcs.WCS(self.header) color_mapper =", "row( self.fig ) #curdoc().add_root(layout) #show(layout) script, div = components(layout) #print", "if vmax is None: vmax = self.data[msk].max() min_s = self.data.min()", "#ax.plot(pixcrd[:, 0][msk], pixcrd[:, 1][msk], 'o', mfc='none') source = ColumnDataSource(data=dict(lon=pixcrd[:, 0][msk]+0.5,", "None: x_err_x = [] x_err_y = [] for px, py,", "max_s = self.data.max() r = self.data.shape[0] * 2 c =", "color='white') #fig.colorbar(im, ax=ax) #plugins.connect(fig, plugins.MousePosition(fontsize=14)) #if plot == True: #", "builtins import (bytes, str, open, super, range, zip, round, input,", "0.5], color_mapper=color_mapper) hover = HoverTool(tooltips=[(\"x\", \"$x\"), (\"y\", \"$y\"), (\"value\", \"@image\")],", "#curdoc().add_root(layout) #show(layout) script, div = components(layout) #print ('script',script) #print ('div',div)", "attr, old, new): # print attr,old,new self.fig_im.glyph.color_mapper.update(low=self.graph_min_slider.value, high=self.graph_max_slider.value) def get_html_draw(self,w=None,h=None,", "fig_im.glyph.color_mapper.low = vmin; \"\"\" callback = CustomJS(args=dict(fig_im=fig_im), code=JS_code_slider) self.graph_min_slider =", "= self.graph_min_slider callback.args[\"high_slider\"] = self.graph_max_slider #ax.set_xlabel('RA') #ax.set_ylabel('DEC') #ax.grid(True, color='white') #fig.colorbar(im,", "script, div = components(grid) html_dict={} html_dict['script']=script html_dict['div'] = div return", "vmin = self.data[msk].min() if vmax is None: vmax = self.data[msk].max()", "= catalog.dec if len(lat) > 0.: pixcrd = w.wcs_world2pix(np.column_stack((lon, lat)),", "y_label is not None: self.fig.yaxis.axis_label = y_label def add_errorbar(self, x,", "round, input, int, pow, object, map, zip) __author__ = \"<NAME>\"", "mfc='none') source = ColumnDataSource(data=dict(lon=pixcrd[:, 0][msk]+0.5, lat=pixcrd[:, 1][msk]+0.5, names=catalog.name[msk])) #for ID,", "y_err_y = [] for px, py, err in zip(x, y,", "range, zip, round, input, int, pow, object, map, zip) __author__", "0.8, callback=callback) self.graph_min_slider.on_change('value', self.change_image_contrast) self.graph_max_slider.on_change('value', self.change_image_contrast) callback.args[\"low_slider\"] = self.graph_min_slider callback.args[\"high_slider\"]", "= script html_dict['div'] = div return html_dict class ScatterPlot(object): def", "lat=pixcrd[:, 1][msk]+0.5, names=catalog.name[msk])) #for ID, (x, y) in enumerate(pixcrd): #", "\"<NAME>\" import numpy as np from astropy import wcs from", "= catalog.ra lat = catalog.dec if len(lat) > 0.: pixcrd", "y, color=color, **point_kwargs) if xerr is not None: x_err_x =", "import (bytes, str, open, super, range, zip, round, input, int,", "import absolute_import, division, print_function from builtins import (bytes, str, open,", "if vmin is None: vmin = self.data[msk].min() if vmax is", "__init__(self,data,header): self.data=data self.header=header def change_image_contrast(self, attr, old, new): # print", "x=[0], y=[0], dw=[c * 0.5], dh=[r * 0.5], color_mapper=color_mapper) hover", "add_line(self,x,y,legend=None,color=None): self.fig.line(x,y,legend=legend,line_color=color) def get_html_draw(self): layout = row( self.fig ) #curdoc().add_root(layout)", "HoverTool(tooltips=[(\"x\", \"$x\"), (\"y\", \"$y\"), (\"value\", \"@image\")], renderers=[fig_im]) fig.add_tools(hover) #fig, (ax)", "= Slider(title=\"Sig. Min\", start=min_s, end=max_s, step=1, value=min_s, callback=callback) self.graph_max_slider =", "#if plot == True: # print('plot', plot) # mpld3.show() fig.add_layout(color_bar,", "x_label is not None: self.fig.xaxis.axis_label = x_label if y_label is", "map, zip) __author__ = \"<NAME>\" import numpy as np from", "import show #show(layout) script, div = components(layout) html_dict = {}", "CustomJS(args=dict(fig_im=fig_im), code=JS_code_slider) self.graph_min_slider = Slider(title=\"Sig. Min\", start=min_s, end=max_s, step=1, value=min_s,", "1][ID])) # ax.annotate('%s' % catalog.name[ID], xy=(x, y), color='white') #print(pixcrd[:][msk]) fig.scatter(x='lon',", "is not None: self.fig.xaxis.axis_label = x_label if y_label is not", "end=max_s, step=1, value=max_s * 0.8, callback=callback) self.graph_min_slider.on_change('value', self.change_image_contrast) self.graph_max_slider.on_change('value', self.change_image_contrast)", "source=source) labels = LabelSet(x='lon', y='lat', text='names', level='glyph', x_offset=5, y_offset=5, render_mode='canvas',", "Min\", start=min_s, end=max_s, step=1, value=min_s, callback=callback) self.graph_max_slider = Slider(title=\"Sig. Max\",", "div = components(layout) #print ('script',script) #print ('div',div) html_dict = {}", "self.fig.step(x,y,name=legend, mode=\"center\") #print('b') def add_line(self,x,y,legend=None,color=None): self.fig.line(x,y,legend=legend,line_color=color) def get_html_draw(self): layout =", "vmin; \"\"\" callback = CustomJS(args=dict(fig_im=fig_im), code=JS_code_slider) self.graph_min_slider = Slider(title=\"Sig. Min\",", "x_axis_type=x_axis_type, tools=[hover, 'pan,box_zoom,box_select,wheel_zoom,reset,save,crosshair'] ) if x_label is not None: self.fig.xaxis.axis_label", "y_axis_type=y_axis_type, x_axis_type=x_axis_type, tools=[hover, 'pan,box_zoom,box_select,wheel_zoom,reset,save,crosshair'] ) if x_label is not None:", "vmin is None: vmin = self.data[msk].min() if vmax is None:", "self.graph_min_slider.on_change('value', self.change_image_contrast) self.graph_max_slider.on_change('value', self.change_image_contrast) callback.args[\"low_slider\"] = self.graph_min_slider callback.args[\"high_slider\"] = self.graph_max_slider", "= ax.imshow(self.data, # origin='lower', # zorder=1, # interpolation='none', # aspect='equal',", "= [] for px, py, err in zip(x, y, yerr):", "1, figsize=(4, 3), subplot_kw={'projection': WCS(self.header)}) #im = ax.imshow(self.data, # origin='lower',", "low_slider.value; var vmax = high_slider.value; fig_im.glyph.color_mapper.high = vmax; fig_im.glyph.color_mapper.low =", "= self.data[msk].min() if vmax is None: vmax = self.data[msk].max() min_s", "= \"<NAME>\" import numpy as np from astropy import wcs", "self.data.shape[0] * 2 c = self.data.shape[1] * 2 fig =", "catalog=None, plot=False, vmin=None, vmax=None): #import plotly #import plotly.graph_objs as go", "if yerr is not None: y_err_x = [] y_err_y =", "(\"y\", \"$y\")]) self.fig = figure(title=title, width=w, height=h,x_range=x_range,y_range=y_range, y_axis_type=y_axis_type, x_axis_type=x_axis_type, tools=[hover,", "= script html_dict['div'] = div return html_dict class GridPlot(object): def", "fill_color=None, alpha=1.0, source=source) labels = LabelSet(x='lon', y='lat', text='names', level='glyph', x_offset=5,", "xerr is not None: x_err_x = [] x_err_y = []", "* 2 c = self.data.shape[1] * 2 fig = figure(plot_width=w,", "= div return html_dict class ScatterPlot(object): def __init__(self,w,h,x_label=None,y_label=None,x_range=None,y_range=None,title=None,y_axis_type='linear',x_axis_type='linear'): hover =", "step=1, value=max_s * 0.8, callback=callback) self.graph_min_slider.on_change('value', self.change_image_contrast) self.graph_max_slider.on_change('value', self.change_image_contrast) callback.args[\"low_slider\"]", "import numpy as np from astropy import wcs from bokeh.layouts", "* 0.5], color_mapper=color_mapper) hover = HoverTool(tooltips=[(\"x\", \"$x\"), (\"y\", \"$y\"), (\"value\",", "0)) JS_code_slider = \"\"\" var vmin = low_slider.value; var vmax", "color_mapper = LinearColorMapper(low=min_s, high=max_s, palette=Plasma256) fig_im = fig.image(image=[self.data], x=[0], y=[0],", "plot=False, vmin=None, vmax=None): #import plotly #import plotly.graph_objs as go #from", "source = ColumnDataSource(data=dict(lon=pixcrd[:, 0][msk]+0.5, lat=pixcrd[:, 1][msk]+0.5, names=catalog.name[msk])) #for ID, (x,", "[] x_err_y = [] for px, py, err in zip(x,", "pixcrd = w.wcs_world2pix(np.column_stack((lon, lat)), 0) msk = ~np.isnan(pixcrd[:, 0]) #ax.plot(pixcrd[:,", "'pan,box_zoom,box_select,wheel_zoom,reset,save,crosshair'] ) if x_label is not None: self.fig.xaxis.axis_label = x_label", "vmax=vmax) if catalog is not None: lon = catalog.ra lat", "high=self.graph_max_slider.value) def get_html_draw(self,w=None,h=None, catalog=None, plot=False, vmin=None, vmax=None): #import plotly #import", "(ax) = plt.subplots(1, 1, figsize=(4, 3), subplot_kw={'projection': WCS(self.header)}) #im =", "end=max_s, step=1, value=min_s, callback=callback) self.graph_max_slider = Slider(title=\"Sig. Max\", start=min_s, end=max_s,", "y_err_y.append((py - err, py + err)) self.fig.multi_line(y_err_x, y_err_y, color=color, **error_kwargs)", "color_bar = ColorBar(color_mapper=color_mapper, label_standoff=12, border_line_color=None, location=(0, 0)) JS_code_slider = \"\"\"", "dw=[c * 0.5], dh=[r * 0.5], color_mapper=color_mapper) hover = HoverTool(tooltips=[(\"x\",", "layout([self.f1.fig],[self.f2.fig]) grid = gridplot([self.f1.fig,self.f2.fig],ncols=1,plot_width=w, plot_height=h) #curdoc().add_root(grid) #show(grid) #output_file(\"test.html\") script, div", "label_standoff=12, border_line_color=None, location=(0, 0)) JS_code_slider = \"\"\" var vmin =", "fig, widgetbox(self.graph_min_slider, self.graph_max_slider), ) #curdoc().add_root(layout) #output_file(\"slider.html\", title=\"slider.py example\") #from bokeh.io", "self.fig = figure(title=title, width=w, height=h,x_range=x_range,y_range=y_range, y_axis_type=y_axis_type, x_axis_type=x_axis_type, tools=[hover, 'pan,box_zoom,box_select,wheel_zoom,reset,save,crosshair'] )", "import Layout # print('vmin,vmax',vmin,vmax) msk = ~np.isnan(self.data) if vmin is", "self.graph_max_slider #ax.set_xlabel('RA') #ax.set_ylabel('DEC') #ax.grid(True, color='white') #fig.colorbar(im, ax=ax) #plugins.connect(fig, plugins.MousePosition(fontsize=14)) #if", "components from bokeh.plotting import figure from bokeh.palettes import Plasma256 class", "render_mode='canvas', source=source, text_color='white') fig.add_layout(labels) #print'cat', catalog[msk] color_bar = ColorBar(color_mapper=color_mapper, label_standoff=12,", "= low_slider.value; var vmax = high_slider.value; fig_im.glyph.color_mapper.high = vmax; fig_im.glyph.color_mapper.low", "Slider(title=\"Sig. Min\", start=min_s, end=max_s, step=1, value=min_s, callback=callback) self.graph_max_slider = Slider(title=\"Sig.", "border_line_color=None, location=(0, 0)) JS_code_slider = \"\"\" var vmin = low_slider.value;", "py, err in zip(x, y, xerr): x_err_x.append((px - err, px", "add_step_line(self,x,y,legend=None): #print('a') self.fig.step(x,y,name=legend, mode=\"center\") #print('b') def add_line(self,x,y,legend=None,color=None): self.fig.line(x,y,legend=legend,line_color=color) def get_html_draw(self):", "input, int, pow, object, map, zip) __author__ = \"<NAME>\" import", "> 0.: pixcrd = w.wcs_world2pix(np.column_stack((lon, lat)), 0) msk = ~np.isnan(pixcrd[:,", "Max\", start=min_s, end=max_s, step=1, value=max_s * 0.8, callback=callback) self.graph_min_slider.on_change('value', self.change_image_contrast)", "text='names', level='glyph', x_offset=5, y_offset=5, render_mode='canvas', source=source, text_color='white') fig.add_layout(labels) #print'cat', catalog[msk]", "Layout # print('vmin,vmax',vmin,vmax) msk = ~np.isnan(self.data) if vmin is None:", "row, widgetbox,gridplot from bokeh.models import CustomJS, Slider,HoverTool,ColorBar,LinearColorMapper,LabelSet,ColumnDataSource from bokeh.embed import", "div = components(grid) html_dict={} html_dict['script']=script html_dict['div'] = div return html_dict", "plt.subplots(1, 1, figsize=(4, 3), subplot_kw={'projection': WCS(self.header)}) #im = ax.imshow(self.data, #", "y=[0], dw=[c * 0.5], dh=[r * 0.5], color_mapper=color_mapper) hover =", "source=source, text_color='white') fig.add_layout(labels) #print'cat', catalog[msk] color_bar = ColorBar(color_mapper=color_mapper, label_standoff=12, border_line_color=None,", "Slider,HoverTool,ColorBar,LinearColorMapper,LabelSet,ColumnDataSource from bokeh.embed import components from bokeh.plotting import figure from", "py)) self.fig.multi_line(x_err_x, x_err_y, color=color, **error_kwargs) if yerr is not None:", "self.data[msk].max() min_s = self.data.min() max_s = self.data.max() r = self.data.shape[0]", "#import plotly.graph_objs as go #from plotly.graph_objs import Layout # print('vmin,vmax',vmin,vmax)", "plotly #import plotly.graph_objs as go #from plotly.graph_objs import Layout #", "location=(0, 0)) JS_code_slider = \"\"\" var vmin = low_slider.value; var", "value=max_s * 0.8, callback=callback) self.graph_min_slider.on_change('value', self.change_image_contrast) self.graph_max_slider.on_change('value', self.change_image_contrast) callback.args[\"low_slider\"] =", "self.data=data self.header=header def change_image_contrast(self, attr, old, new): # print attr,old,new", "self.graph_max_slider.on_change('value', self.change_image_contrast) callback.args[\"low_slider\"] = self.graph_min_slider callback.args[\"high_slider\"] = self.graph_max_slider #ax.set_xlabel('RA') #ax.set_ylabel('DEC')" ]
[ "not None def test_testaid_unit_pathlist_get(tmp_path): msd = tmp_path / 'molecule_scenario_directory' dir1", "assert testvars_roles_whitelist is not None def test_testaid_unit_pathlist_get(tmp_path): msd = tmp_path", "from testaid.pathlist import PathList def test_testaid_unit_pathlist_roles_blacklist(testvars_roles_blacklist): assert testvars_roles_blacklist is not", "PathList def test_testaid_unit_pathlist_roles_blacklist(testvars_roles_blacklist): assert testvars_roles_blacklist is not None def test_testaid_unit_pathlist_roles_whitelist(testvars_roles_whitelist):", "test_testaid_unit_pathlist_roles_blacklist(testvars_roles_blacklist): assert testvars_roles_blacklist is not None def test_testaid_unit_pathlist_roles_whitelist(testvars_roles_whitelist): assert testvars_roles_whitelist", "= 'dir1:../dir2/file3.yml' pathlist = PathList(my_pathstring, msd) assert pathlist.get() == my_pathlist", "msd / 'dir1' dir1.mkdir(parents=True) dir2 = tmp_path / 'dir2' dir2.mkdir()", "= [Path(file3), Path(file1), Path(file2)] my_pathstring = 'dir1:../dir2/file3.yml' pathlist = PathList(my_pathstring,", "test_testaid_unit_pathlist_get(tmp_path): msd = tmp_path / 'molecule_scenario_directory' dir1 = msd /", "testaid.pathlist import PathList def test_testaid_unit_pathlist_roles_blacklist(testvars_roles_blacklist): assert testvars_roles_blacklist is not None", "'dir2' dir2.mkdir() file1 = dir1 / 'file1.yml' file1.touch() file2 =", "file1 = dir1 / 'file1.yml' file1.touch() file2 = dir1 /", "= dir1 / 'file2.yml' file2.touch() file3 = dir2 / 'file3.yml'", "/ 'file3.yml' file3.touch() my_pathlist = [Path(file3), Path(file1), Path(file2)] my_pathstring =", "/ 'file2.yml' file2.touch() file3 = dir2 / 'file3.yml' file3.touch() my_pathlist", "'file2.yml' file2.touch() file3 = dir2 / 'file3.yml' file3.touch() my_pathlist =", "[Path(file3), Path(file1), Path(file2)] my_pathstring = 'dir1:../dir2/file3.yml' pathlist = PathList(my_pathstring, msd)", "'dir1' dir1.mkdir(parents=True) dir2 = tmp_path / 'dir2' dir2.mkdir() file1 =", "pathlib import Path from testaid.pathlist import PathList def test_testaid_unit_pathlist_roles_blacklist(testvars_roles_blacklist): assert", "import PathList def test_testaid_unit_pathlist_roles_blacklist(testvars_roles_blacklist): assert testvars_roles_blacklist is not None def", "dir1 / 'file1.yml' file1.touch() file2 = dir1 / 'file2.yml' file2.touch()", "def test_testaid_unit_pathlist_get(tmp_path): msd = tmp_path / 'molecule_scenario_directory' dir1 = msd", "tmp_path / 'dir2' dir2.mkdir() file1 = dir1 / 'file1.yml' file1.touch()", "test_testaid_unit_pathlist_roles_whitelist(testvars_roles_whitelist): assert testvars_roles_whitelist is not None def test_testaid_unit_pathlist_get(tmp_path): msd =", "assert testvars_roles_blacklist is not None def test_testaid_unit_pathlist_roles_whitelist(testvars_roles_whitelist): assert testvars_roles_whitelist is", "testvars_roles_whitelist is not None def test_testaid_unit_pathlist_get(tmp_path): msd = tmp_path /", "file3.touch() my_pathlist = [Path(file3), Path(file1), Path(file2)] my_pathstring = 'dir1:../dir2/file3.yml' pathlist", "my_pathlist = [Path(file3), Path(file1), Path(file2)] my_pathstring = 'dir1:../dir2/file3.yml' pathlist =", "file2.touch() file3 = dir2 / 'file3.yml' file3.touch() my_pathlist = [Path(file3),", "not None def test_testaid_unit_pathlist_roles_whitelist(testvars_roles_whitelist): assert testvars_roles_whitelist is not None def", "my_pathstring = 'dir1:../dir2/file3.yml' pathlist = PathList(my_pathstring, msd) assert pathlist.get() ==", "/ 'dir2' dir2.mkdir() file1 = dir1 / 'file1.yml' file1.touch() file2", "def test_testaid_unit_pathlist_roles_whitelist(testvars_roles_whitelist): assert testvars_roles_whitelist is not None def test_testaid_unit_pathlist_get(tmp_path): msd", "dir1 = msd / 'dir1' dir1.mkdir(parents=True) dir2 = tmp_path /", "dir1.mkdir(parents=True) dir2 = tmp_path / 'dir2' dir2.mkdir() file1 = dir1", "Path(file1), Path(file2)] my_pathstring = 'dir1:../dir2/file3.yml' pathlist = PathList(my_pathstring, msd) assert", "dir1 / 'file2.yml' file2.touch() file3 = dir2 / 'file3.yml' file3.touch()", "file3 = dir2 / 'file3.yml' file3.touch() my_pathlist = [Path(file3), Path(file1),", "Path from testaid.pathlist import PathList def test_testaid_unit_pathlist_roles_blacklist(testvars_roles_blacklist): assert testvars_roles_blacklist is", "is not None def test_testaid_unit_pathlist_roles_whitelist(testvars_roles_whitelist): assert testvars_roles_whitelist is not None", "= tmp_path / 'molecule_scenario_directory' dir1 = msd / 'dir1' dir1.mkdir(parents=True)", "file1.touch() file2 = dir1 / 'file2.yml' file2.touch() file3 = dir2", "Path(file2)] my_pathstring = 'dir1:../dir2/file3.yml' pathlist = PathList(my_pathstring, msd) assert pathlist.get()", "= tmp_path / 'dir2' dir2.mkdir() file1 = dir1 / 'file1.yml'", "'file1.yml' file1.touch() file2 = dir1 / 'file2.yml' file2.touch() file3 =", "None def test_testaid_unit_pathlist_get(tmp_path): msd = tmp_path / 'molecule_scenario_directory' dir1 =", "'molecule_scenario_directory' dir1 = msd / 'dir1' dir1.mkdir(parents=True) dir2 = tmp_path", "testvars_roles_blacklist is not None def test_testaid_unit_pathlist_roles_whitelist(testvars_roles_whitelist): assert testvars_roles_whitelist is not", "import Path from testaid.pathlist import PathList def test_testaid_unit_pathlist_roles_blacklist(testvars_roles_blacklist): assert testvars_roles_blacklist", "/ 'molecule_scenario_directory' dir1 = msd / 'dir1' dir1.mkdir(parents=True) dir2 =", "/ 'file1.yml' file1.touch() file2 = dir1 / 'file2.yml' file2.touch() file3", "def test_testaid_unit_pathlist_roles_blacklist(testvars_roles_blacklist): assert testvars_roles_blacklist is not None def test_testaid_unit_pathlist_roles_whitelist(testvars_roles_whitelist): assert", "= msd / 'dir1' dir1.mkdir(parents=True) dir2 = tmp_path / 'dir2'", "/ 'dir1' dir1.mkdir(parents=True) dir2 = tmp_path / 'dir2' dir2.mkdir() file1", "dir2.mkdir() file1 = dir1 / 'file1.yml' file1.touch() file2 = dir1", "tmp_path / 'molecule_scenario_directory' dir1 = msd / 'dir1' dir1.mkdir(parents=True) dir2", "= dir2 / 'file3.yml' file3.touch() my_pathlist = [Path(file3), Path(file1), Path(file2)]", "'file3.yml' file3.touch() my_pathlist = [Path(file3), Path(file1), Path(file2)] my_pathstring = 'dir1:../dir2/file3.yml'", "None def test_testaid_unit_pathlist_roles_whitelist(testvars_roles_whitelist): assert testvars_roles_whitelist is not None def test_testaid_unit_pathlist_get(tmp_path):", "dir2 / 'file3.yml' file3.touch() my_pathlist = [Path(file3), Path(file1), Path(file2)] my_pathstring", "dir2 = tmp_path / 'dir2' dir2.mkdir() file1 = dir1 /", "msd = tmp_path / 'molecule_scenario_directory' dir1 = msd / 'dir1'", "from pathlib import Path from testaid.pathlist import PathList def test_testaid_unit_pathlist_roles_blacklist(testvars_roles_blacklist):", "is not None def test_testaid_unit_pathlist_get(tmp_path): msd = tmp_path / 'molecule_scenario_directory'", "= dir1 / 'file1.yml' file1.touch() file2 = dir1 / 'file2.yml'", "file2 = dir1 / 'file2.yml' file2.touch() file3 = dir2 /" ]
[ "'cpc', 'name': 'fake-cpc1-name', 'description': 'CPC #1 (DPM mode)', 'status': 'active',", "attributes of HbaManager.\"\"\" hba_mgr = self.partition.hbas # Verify all public", "None, HTTPError({'http-status': 400, 'reason': 5})), ({'adapter-port-uri': PORT11_URI}, None, HTTPError({'http-status': 400,", "\"new-\" + hba_name # Execute the code to be tested", "tested. # Its port points to a faked URI. faked_hba", "code to be tested hba.delete() exc = exc_info.value if isinstance(exp_exc,", "test_hba_reassign_port(self, initial_partition_status, exp_exc): \"\"\"Test Hba.reassign_port().\"\"\" # Add a faked HBA", "2.0 (the \"License\"); # you may not use this file", "new_hba_list.properties['name'] == new_hba_name @pytest.mark.parametrize( \"initial_partition_status, exp_exc\", [ ('stopped', None), ('terminated',", "an FCP adapter and port to the CPC self.faked_fcp1 =", "'foo', 'element-id': HBA1_OID + 'foo'}, []), ] ) def test_hbamanager_list_filter_args(self,", "['element-uri']), (dict(full_properties=True), None), ] ) def test_hbamanager_list_full_properties( self, full_properties_kwargs, prop_names):", "Check that the port of the HBA is unchanged ...", "hba_mgr.find(name=hba_name) new_hba_name = \"new-\" + hba_name # Execute the code", "# still reflects the property updates. hba.pull_full_properties() for prop_name in", "False, 'iml-mode': 'dpm', }) self.cpc = self.client.cpcs.find(name='fake-cpc1-name') # Add a", "'fake-fcp1-oid', # object-uri is auto-set based upon object-id 'parent': self.faked_cpc.uri,", "'fake-port11-name', 'description': 'FCP #1 Port #1', }) assert PORT11_URI ==", "('stopped', None), ('terminated', None), ('starting', HTTPError({'http-status': 409, 'reason': 1})), ('active',", "'name', 'adapter-port-uri'], None), ] ) def test_hbamanager_create( self, input_props, exp_prop_names,", "\"\"\"Test Hba.delete().\"\"\" # Add a faked HBA to be tested", "Add a CPC in DPM mode self.faked_cpc = self.session.hmc.cpcs.add({ 'element-id':", "len(new_hbas_list) == 1 new_hba_list = new_hbas_list[0] assert new_hba_list.properties['name'] == new_hba_name", "prop_name in hba.properties prop_value = hba.properties[prop_name] assert prop_value == exp_prop_value", "test_hba_repr(self): \"\"\"Test Hba.__repr__().\"\"\" # Add a faked hba faked_hba =", "in hba.properties prop_value = hba.properties[prop_name] assert prop_value == exp_prop_value #", "updates. for prop_name in saved_properties: if prop_name in input_props: exp_prop_value", "= input_props[prop_name] else: exp_prop_value = saved_properties[prop_name] assert prop_name in hba.properties", "self.partition.hbas hba = hba_mgr.find(name=hba_name) new_hba_name = \"new-\" + hba_name #", "('paused', None), ] ) @pytest.mark.parametrize( \"input_props, exp_prop_names, exp_prop_exc\", [ ({},", "in the resource object: prop_name = 'adapter-port-uri' assert hba.properties[prop_name] ==", "hba_mgr.find(name=new_hba_name) assert new_hba_find.properties['name'] == new_hba_name # Verify that the resource", "the code to be tested hbas = hba_mgr.list(**full_properties_kwargs) assert_resources(hbas, exp_faked_hbas,", "uses the name-to-URI cache). with pytest.raises(NotFound): hba_mgr.find(name=hba_name) # Verify that", "'hba 1-oid' HBA1_NAME = 'hba 1' HBA2_OID = 'hba 2-oid'", "prop_name in input_props: exp_prop_value = input_props[prop_name] else: exp_prop_value = saved_properties[prop_name]", "[HBA1_OID]), ({'name': 'hba .'}, [HBA1_OID, HBA2_OID]), ({'name': '.ba 1'}, [HBA1_OID]),", "one faked_hba = self.add_hba1() hba_name = faked_hba.name self.add_hba2() # Construct", "hba_uri = hba.uri exp_hba_uri = hba.properties['element-uri'] assert hba_uri == exp_hba_uri", "prop_name in exp_prop_names: assert prop_name in hba.properties if prop_name in", "400, 'reason': 5})), ({'adapter-port-uri': PORT11_URI}, None, HTTPError({'http-status': 400, 'reason': 5})),", "HTTPError({'http-status': 409, 'reason': 1})), ('active', None), ('stopping', HTTPError({'http-status': 409, 'reason':", "License for the specific language governing permissions and # limitations", "Reserved. # # Licensed under the Apache License, Version 2.0", "mode self.faked_cpc = self.session.hmc.cpcs.add({ 'element-id': 'fake-cpc1-oid', # element-uri is set", "new_hbas_list = hba_mgr.list( filter_args=dict(name=new_hba_name)) assert len(new_hbas_list) == 1 new_hba_list =", "self.add_hba1() # Set the status of the faked partition self.faked_partition.properties['status']", "= hba_mgr.find(name=faked_hba.name) # Execute the code to be tested repr_str", "= None if exp_exc: with pytest.raises(exp_exc.__class__) as exc_info: # Execute", "'fake-cpc1-name', 'description': 'CPC #1 (DPM mode)', 'status': 'active', 'dpm-enabled': True,", "# Check the resource for consistency within itself assert isinstance(hba,", "HTTPError): assert exc.http_status == exp_exc.http_status assert exc.reason == exp_exc.reason #", "assert hba_mgr.resource_class == Hba assert hba_mgr.session == self.session assert hba_mgr.parent", "a third HBA with same name part3_props = copy.deepcopy(faked_hba.properties) part3_props['description']", "hba_mgr.find(name=hba_name) # Execute the creation code to be tested. hba_mgr.create(part3_props)", "Refresh the resource object and verify that it still reflects", "PORT11_URI}, None, HTTPError({'http-status': 400, 'reason': 5})), ({'name': 'fake-hba-x', 'adapter-port-uri': PORT11_URI},", "up a faked session, and add a faked CPC in", "print_function import pytest import re import copy from zhmcclient import", "HBA1_OID]}, [HBA1_OID]), ({'name': HBA1_NAME}, [HBA1_OID]), ({'name': HBA2_NAME}, [HBA2_OID]), ({'name': [HBA1_NAME,", "#1', }) assert PORT11_URI == self.faked_port11.uri def add_hba1(self): \"\"\"Add a", "# Add a partition to the CPC self.faked_partition = self.faked_cpc.partitions.add({", "new name, using list() new_hbas_list = hba_mgr.list( filter_args=dict(name=new_hba_name)) assert len(new_hbas_list)", "= 'hba 1' HBA2_OID = 'hba 2-oid' HBA2_NAME = 'hba", "using find() new_hba_find = hba_mgr.find(name=new_hba_name) assert new_hba_find.properties['name'] == new_hba_name #", "'FCP #1', 'status': 'active', 'type': 'fcp', 'adapter-id': '123', 'detected-card-type': '10gbe-roce-express',", "yet. assert hba.properties['name'] == new_hba_name # Refresh the resource object", "be tested hba.reassign_port(port) exc = exc_info.value if isinstance(exp_exc, HTTPError): assert", "status of the faked partition self.faked_partition.properties['status'] = 'stopped' # updatable", "public properties of the manager object assert hba_mgr.resource_class == Hba", "'name' property.\"\"\" # Add a faked HBA faked_hba = self.add_hba1()", "list() (this does not use the name-to-URI cache). hbas_list =", "] ) def test_hba_delete(self, initial_partition_status, exp_exc): \"\"\"Test Hba.delete().\"\"\" # Add", "for consistency within itself assert isinstance(hba, Hba) hba_name = hba.name", "= self.faked_partition.hbas.add({ 'element-id': HBA1_OID, # element-uri will be automatically set", "HBA1_OID, # element-uri will be automatically set 'parent': self.faked_partition.uri, 'class':", "OF ANY KIND, either express or implied. # See the", "= copy.deepcopy(hba.properties) # Execute the code to be tested hba.update_properties(properties=input_props)", "See the License for the specific language governing permissions and", "pytest.raises(NotFound): hba_mgr.find(name=hba_name) # Execute the creation code to be tested.", "still reflects the property updates. hba.pull_full_properties() for prop_name in saved_properties:", "to in writing, software # distributed under the License is", "HTTPError, NotFound from zhmcclient_mock import FakedSession from tests.common.utils import assert_resources", "def test_hbamanager_initial_attrs(self): \"\"\"Test initial attributes of HbaManager.\"\"\" hba_mgr = self.partition.hbas", "that the HBA will be reassigned to faked_adapter = self.faked_cpc.adapters.add({", "and port to the CPC self.faked_fcp1 = self.faked_cpc.adapters.add({ 'object-id': FCP1_OID,", "input_props: value = hba.properties[prop_name] exp_value = input_props[prop_name] assert value ==", "or agreed to in writing, software # distributed under the", "def test_hba_update_name(self): \"\"\"Test Hba.update_properties() with 'name' property.\"\"\" # Add a", "'class': 'adapter', 'name': 'fake-fcp1', 'description': 'FCP #1', 'status': 'active', 'type':", "= self.session.hmc.cpcs.add({ 'element-id': 'fake-cpc1-oid', # element-uri is set up automatically", "referenced in HBA properties: FCP1_OID = 'fake-fcp1-oid' PORT11_OID = 'fake-port11-oid'", "'used-capacity': 0, 'allowed-capacity': 80, 'maximum-total-capacity': 80, 'physical-channel-status': 'operating', }) self.faked_port11", "be reassigned to faked_adapter = self.faked_cpc.adapters.add({ 'object-id': 'fake-fcp1-oid', # object-uri", "plus 'element-uri' plus 'element-id'. hba = hba_mgr.create(properties=input_props) # Check the", "hba_mgr = self.partition.hbas # Verify all public properties of the", "self.faked_cpc.adapters.add({ 'object-id': 'fake-fcp1-oid', # object-uri is auto-set based upon object-id", "= initial_partition_status # The HBA object we will perform the", "self.cpc.partitions.find(name='fake-part1-name') # Add an FCP adapter and port to the", "#1', 'status': 'active', 'type': 'fcp', # adapter-family is auto-set based", "= hba_mgr.find(name=hba_name) description = hba3.get_property('description') assert description == 'Third HBA'", "values for prop_name in exp_prop_names: assert prop_name in hba.properties if", "compliance with the License. # You may obtain a copy", "All Rights Reserved. # # Licensed under the Apache License,", "and verify that it still reflects the # update. hba.pull_full_properties()", "is unchanged ... prop_name = 'adapter-port-uri' # ... in the", "= 'stopped' # deletable hba_mgr = self.partition.hbas hba = hba_mgr.find(name=hba_name)", "Add a partition to the CPC self.faked_partition = self.faked_cpc.partitions.add({ 'element-id':", "FCP1_OID = 'fake-fcp1-oid' PORT11_OID = 'fake-port11-oid' PORT11_URI = '/api/adapters/{}/storage-ports/{}'.format(FCP1_OID, PORT11_OID)", "[HBA1_OID]), ({'element-id': HBA1_OID + 'foo'}, []), ({'element-id': [HBA1_OID, HBA2_OID +", "hba.properties[prop_name] exp_value = input_props[prop_name] assert value == exp_value def test_hba_repr(self):", "isinstance(hba, Hba) hba_name = hba.name exp_hba_name = hba.properties['name'] assert hba_name", "+ 'foo', 'element-id': HBA1_OID}, []), ({'name': HBA1_NAME + 'foo', 'element-id':", "= self.faked_partition.hbas.add({ 'element-id': HBA2_OID, # element-uri will be automatically set", "... in the resource object: prop_name = 'adapter-port-uri' assert hba.properties[prop_name]", "not use this file except in compliance with the License.", "object # still reflects the property updates. hba.pull_full_properties() for prop_name", "exp_exc = exp_status_exc elif exp_prop_exc: exp_exc = exp_prop_exc else: exp_exc", "'parent': self.faked_cpc.uri, 'class': 'partition', 'name': 'fake-part1-name', 'description': 'Partition #1', 'status':", "name part3_props = copy.deepcopy(faked_hba.properties) part3_props['description'] = 'Third HBA' # Set", "# Object IDs and names of our faked HBAs: HBA1_OID", "you may not use this file except in compliance with", "'parent': self.faked_cpc.uri, 'class': 'adapter', 'name': 'fcp1', 'description': 'FCP #1', 'status':", "within itself assert isinstance(hba, Hba) hba_name = hba.name exp_hba_name =", "exc.reason == exp_exc.reason # Check that the HBA still exists", "update. hba.pull_full_properties() assert hba.properties['name'] == new_hba_name # Verify that the", "permissions and # limitations under the License. \"\"\" Unit tests", "Verify that the resource object already reflects the property #", "80, 'used-capacity': 0, 'allowed-capacity': 80, 'maximum-total-capacity': 80, 'physical-channel-status': 'operating', })", "faked_hba = self.add_hba1() # Add a faked FCP with one", "partition.\"\"\" faked_hba = self.faked_partition.hbas.add({ 'element-id': HBA1_OID, # element-uri will be", "and add a faked CPC in DPM mode with one", "set 'parent': self.faked_partition.uri, 'class': 'hba', 'name': HBA1_NAME, 'description': 'HBA '", "{'device-number': 'FEDC', 'description': 'New HBA description'}, ] ) def test_hba_update_properties(self,", "__future__ import absolute_import, print_function import pytest import re import copy", "0, 'fabric-id': None, }) port = adapter.ports.find(name='fake-port1') # Set the", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "adapter and port to the CPC self.faked_fcp1 = self.faked_cpc.adapters.add({ 'object-id':", "to be tested and another one faked_hba = self.add_hba1() self.add_hba2()", "self.faked_partition.properties['status'] = 'stopped' # updatable hba_mgr = self.partition.hbas hba =", "PORT11_URI, 'wwpn': 'AABBCCDDEEFF0012', 'device-number': '1112', }) return faked_hba def test_hbamanager_initial_attrs(self):", "'adapter', 'name': 'fcp1', 'description': 'FCP #1', 'status': 'active', 'type': 'fcp',", "no HBAs. Add one FCP adapter and port. \"\"\" self.session", "HBA1_NAME}, [HBA1_OID]), ({'name': HBA2_NAME}, [HBA2_OID]), ({'name': [HBA1_NAME, HBA2_NAME]}, [HBA1_OID, HBA2_OID]),", "not been refreshed yet. assert hba.properties['name'] == new_hba_name # Refresh", "'name': 'fake-fcp1', 'description': 'FCP #1', 'status': 'active', 'type': 'fcp', #", "'fake-cpc1-oid', # element-uri is set up automatically 'parent': None, 'class':", "hba_mgr.find(name=hba_name) description = hba3.get_property('description') assert description == 'Third HBA' @pytest.mark.parametrize(", "module. \"\"\" from __future__ import absolute_import, print_function import pytest import", "({'name': HBA2_NAME}, [HBA2_OID]), ({'name': [HBA1_NAME, HBA2_NAME]}, [HBA1_OID, HBA2_OID]), ({'name': HBA1_NAME", "({'name': '.ba 1'}, [HBA1_OID]), ({'name': '.+'}, [HBA1_OID, HBA2_OID]), ({'name': 'hba", "] ) def test_hbamanager_list_filter_args(self, filter_args, exp_oids): \"\"\"Test HbaManager.list() with filter_args.\"\"\"", "self.add_hba2() exp_faked_hbas = [faked_hba1, faked_hba2] hba_mgr = self.partition.hbas # Execute", "def test_hba_delete_create_same_name(self): \"\"\"Test Hba.delete() followed by Hba.create() with same name.\"\"\"", "Set the status of the faked partition self.faked_partition.properties['status'] = 'stopped'", "hba = hba_mgr.find(name=hba_name) new_hba_name = \"new-\" + hba_name # Execute", "from tests.common.utils import assert_resources # Object IDs and names of", "new_hba_name = \"new-\" + hba_name # Execute the code to", "exp_status_exc): \"\"\"Test HbaManager.create().\"\"\" # Set the status of the faked", "Check that the port of the HBA has been set", "Check that the HBA no longer exists with pytest.raises(NotFound): hba_mgr.find(name=hba_name)", "the code to be tested hbas = hba_mgr.list(filter_args=filter_args) assert len(hbas)", "hba.properties[prop_name] assert prop_value == exp_prop_value # Refresh the resource object", "code to be tested repr_str = repr(hba) repr_str = repr_str.replace('\\n',", "begin of the string: assert re.match(r'^{classname}\\s+at\\s+0x{id:08x}\\s+\\(\\\\n.*'. format(classname=hba.__class__.__name__, id=id(hba)), repr_str) @pytest.mark.parametrize(", "'parent': self.faked_partition.uri, 'class': 'hba', 'name': HBA1_NAME, 'description': 'HBA ' +", "HBA2_OID]), ({'name': 'hba 1.+'}, []), ({'name': '.+hba 1'}, []), ({'name':", "self.partition.hbas # Execute the code to be tested hbas =", "'class': 'storage-port', 'name': 'fake-port1', 'description': 'FCP #1 Port 1', 'index':", "'element-uri' plus 'element-id'. hba = hba_mgr.create(properties=input_props) # Check the resource", "[HBA1_OID, HBA2_OID]), ({'name': 'hba 1.+'}, []), ({'name': '.+hba 1'}, []),", "the update, even # though it has not been refreshed", "be tested. hba_mgr.create(part3_props) # Check that the HBA exists again", "self.faked_port11.uri def add_hba1(self): \"\"\"Add a faked HBA 1 to the", "input_props, exp_prop_names, exp_prop_exc, initial_partition_status, exp_status_exc): \"\"\"Test HbaManager.create().\"\"\" # Set the", "status of the faked partition self.faked_partition.properties['status'] = initial_partition_status # The", "automatically set 'parent': self.faked_partition.uri, 'class': 'hba', 'name': HBA2_NAME, 'description': 'HBA", "Execute the code to be tested hba.reassign_port(port) exc = exc_info.value", "faked_hba = self.add_hba1() # Set the status of the faked", "exists with pytest.raises(NotFound): hba_mgr.find(name=hba_name) # Execute the creation code to", "'index': 1, 'name': 'fake-port11-name', 'description': 'FCP #1 Port #1', })", "'class': 'hba', 'name': HBA1_NAME, 'description': 'HBA ' + HBA1_NAME, 'adapter-port-uri':", "longer exists with pytest.raises(NotFound): hba_mgr.find(name=hba_name) # Execute the creation code", "already reflects the update, even # though it has not", "== set(exp_oids) @pytest.mark.parametrize( \"initial_partition_status, exp_status_exc\", [ ('stopped', None), ('terminated', None),", "}) self.partition = self.cpc.partitions.find(name='fake-part1-name') # Add an FCP adapter and", "hba.properties['name'] == new_hba_name # Verify that the resource can be", "'device-number': '1111', }) return faked_hba def add_hba2(self): \"\"\"Add a faked", "'1111', }) return faked_hba def add_hba2(self): \"\"\"Add a faked HBA", "hba = hba_mgr.find(name=faked_hba.name) if exp_exc: with pytest.raises(exp_exc.__class__) as exc_info: #", "hba_mgr = self.partition.hbas hba = hba_mgr.find(name=faked_hba.name) hba.pull_full_properties() saved_properties = copy.deepcopy(hba.properties)", "assert exc.http_status == exp_exc.http_status assert exc.reason == exp_exc.reason # Check", "one partition that has no HBAs. Add one FCP adapter", "on hba = self.partition.hbas.find(name=faked_hba.name) # Save the HBA properties for", "the faked partition self.faked_partition.properties['status'] = 'stopped' # updatable hba_mgr =", "description'}, {'device-number': 'FEDC', 'description': 'New HBA description'}, ] ) def", "by Hba.create() has # the input properties plus 'element-uri' plus", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "== 0 # Verify that the resource is no longer", "'foo'}, []), ({'element-id': [HBA1_OID, HBA2_OID + 'foo']}, [HBA1_OID]), ({'element-id': [HBA2_OID", "in hba.properties prop_value = hba.properties[prop_name] assert prop_value == exp_prop_value def", "hba.properties prop_value = hba.properties[prop_name] assert prop_value == exp_prop_value def test_hba_update_name(self):", "= copy.deepcopy(hba.properties) if exp_exc: with pytest.raises(exp_exc.__class__) as exc_info: # Execute", "resource can be found by its new name, using list()", "self.cpc = self.client.cpcs.find(name='fake-cpc1-name') # Add a partition to the CPC", "= hba_mgr.create(properties=input_props) exc = exc_info.value if isinstance(exp_exc, HTTPError): assert exc.http_status", "has # the input properties plus 'element-uri' plus 'element-id'. hba", "= self.add_hba1() hba_name = faked_hba.name # Set the status of", "\"\"\"Test Hba.update_properties().\"\"\" # Add a faked HBA faked_hba = self.add_hba1()", "('stopping', HTTPError({'http-status': 409, 'reason': 1})), ('degraded', None), ('reservation-error', None), ('paused',", "= faked_hba.name # Set the status of the faked partition", "self.add_hba2() # Set the status of the faked partition self.faked_partition.properties['status']", "tested hbas = hba_mgr.list(filter_args=filter_args) assert len(hbas) == len(exp_oids) if exp_oids:", "self.session assert hba_mgr.parent == self.partition assert hba_mgr.partition == self.partition #", "'operating', }) adapter = self.cpc.adapters.find(name='fake-fcp1') faked_adapter.ports.add({ 'element-id': 'fake-port1-oid', # element-uri", "1', 'index': 0, 'fabric-id': None, }) port = adapter.ports.find(name='fake-port1') #", "[ ('stopped', None), ('terminated', None), ('starting', HTTPError({'http-status': 409, 'reason': 1})),", "file except in compliance with the License. # You may", "test_hbamanager_list_full_properties( self, full_properties_kwargs, prop_names): \"\"\"Test HbaManager.list() with full_properties.\"\"\" # Add", "# Execute the code to be tested. hba.delete() # Check", "'parent': self.faked_cpc.uri, 'class': 'adapter', 'name': 'fake-fcp1', 'description': 'FCP #1', 'status':", "not use the name-to-URI cache). hbas_list = hba_mgr.list( filter_args=dict(name=hba_name)) assert", "tests for _hba module. \"\"\" from __future__ import absolute_import, print_function", "] ) def test_hba_reassign_port(self, initial_partition_status, exp_exc): \"\"\"Test Hba.reassign_port().\"\"\" # Add", "({'name': HBA1_NAME + 'foo', 'element-id': HBA1_OID}, []), ({'name': HBA1_NAME +", "exp_prop_names, exp_prop_exc, initial_partition_status, exp_status_exc): \"\"\"Test HbaManager.create().\"\"\" # Set the status", "'parent': self.faked_fcp1.uri, 'class': 'storage-port', 'index': 1, 'name': 'fake-port11-name', 'description': 'FCP", "HBA1_OID}, [HBA1_OID]), ({'element-id': HBA2_OID}, [HBA2_OID]), ({'element-id': [HBA1_OID, HBA2_OID]}, [HBA1_OID, HBA2_OID]),", "IDs of elements referenced in HBA properties: FCP1_OID = 'fake-fcp1-oid'", "= hba_mgr.find(name=hba_name) new_hba_name = \"new-\" + hba_name # Execute the", "add a faked CPC in DPM mode with one partition", "'description': 'FCP #1 Port #1', }) assert PORT11_URI == self.faked_port11.uri", "initial_partition_status, exp_exc): \"\"\"Test Hba.reassign_port().\"\"\" # Add a faked HBA to", "'storage-port', 'name': 'fake-port1', 'description': 'FCP #1 Port 1', 'index': 0,", "hba_name # Execute the code to be tested hba.update_properties(properties={'name': new_hba_name})", "# Add two faked HBAs faked_hba1 = self.add_hba1() faked_hba2 =", "\"full_properties_kwargs, prop_names\", [ (dict(), ['element-uri']), (dict(full_properties=False), ['element-uri']), (dict(full_properties=True), None), ]", "len(exp_oids) if exp_oids: oids = [hba.properties['element-id'] for hba in hbas]", "# Execute the creation code to be tested. hba_mgr.create(part3_props) #", "HbaManager.create().\"\"\" # Set the status of the faked partition self.faked_partition.properties['status']", "# Execute the deletion code to be tested. hba.delete() #", "unchanged ... prop_name = 'adapter-port-uri' # ... in the resource", "hba.properties[prop_name] == saved_properties[prop_name] else: # Execute the code to be", "hba = hba_mgr.find(name=faked_hba.name) hba.pull_full_properties() saved_properties = copy.deepcopy(hba.properties) # Execute the", "Hba and HbaManager classes.\"\"\" def setup_method(self): \"\"\" Set up a", "as exc_info: # Execute the code to be tested hba.delete()", "of the string: assert re.match(r'^{classname}\\s+at\\s+0x{id:08x}\\s+\\(\\\\n.*'. format(classname=hba.__class__.__name__, id=id(hba)), repr_str) @pytest.mark.parametrize( \"initial_partition_status,", "KIND, either express or implied. # See the License for", "}) adapter = self.cpc.adapters.find(name='fake-fcp1') faked_adapter.ports.add({ 'element-id': 'fake-port1-oid', # element-uri is", "1})), ('active', None), ('stopping', HTTPError({'http-status': 409, 'reason': 1})), ('degraded', None),", "HBA no longer exists with pytest.raises(NotFound) as exc_info: hba_mgr.find(name=faked_hba.name) def", "[HBA1_OID]), ({'name': 'hba 1.*'}, [HBA1_OID]), ({'name': 'hba .'}, [HBA1_OID, HBA2_OID]),", "assert exc.http_status == exp_exc.http_status assert exc.reason == exp_exc.reason else: #", "and values for prop_name in exp_prop_names: assert prop_name in hba.properties", "a faked FCP with one port that the HBA will", "'element-id': HBA1_OID}, []), ({'name': HBA1_NAME + 'foo', 'element-id': HBA1_OID +", "by its old name, using # find() (this uses the", "update, even # though it has not been refreshed yet.", "hba = self.partition.hbas.find(name=faked_hba.name) # Save the HBA properties for later", "= 'hba 1-oid' HBA1_NAME = 'hba 1' HBA2_OID = 'hba", "'\\\\n') # We check just the begin of the string:", "self.add_hba1() # Add a faked FCP with one port that", "by Hba.create() with same name.\"\"\" # Add a faked HBA", "(the \"License\"); # you may not use this file except", "HBA 1 to the faked partition.\"\"\" faked_hba = self.faked_partition.hbas.add({ 'element-id':", "return faked_hba def test_hbamanager_initial_attrs(self): \"\"\"Test initial attributes of HbaManager.\"\"\" hba_mgr", "Unit tests for _hba module. \"\"\" from __future__ import absolute_import,", "'hba .'}, [HBA1_OID, HBA2_OID]), ({'name': '.ba 1'}, [HBA1_OID]), ({'name': '.+'},", "resource object and verify that the resource object # still", "= hba_mgr.find(name=hba_name) # Execute the deletion code to be tested.", "faked URI. faked_hba = self.add_hba1() # Add a faked FCP", "input_props: exp_prop_value = input_props[prop_name] else: exp_prop_value = saved_properties[prop_name] assert prop_name", "'wwpn': 'AABBCCDDEEFF0012', 'device-number': '1112', }) return faked_hba def test_hbamanager_initial_attrs(self): \"\"\"Test", "= self.add_hba1() hba_mgr = self.partition.hbas hba = hba_mgr.find(name=faked_hba.name) # Execute", "self.cpc.adapters.find(name='fake-fcp1') faked_adapter.ports.add({ 'element-id': 'fake-port1-oid', # element-uri is auto-set based upon", "a faked HBA to be tested and another one faked_hba", "# # Unless required by applicable law or agreed to", "by its new name, using list() new_hbas_list = hba_mgr.list( filter_args=dict(name=new_hba_name))", "# Add a faked HBA to be tested. # Its", "({'name': 'fake-hba-x'}, None, HTTPError({'http-status': 400, 'reason': 5})), ({'adapter-port-uri': PORT11_URI}, None,", "test_hba_update_properties(self, input_props): \"\"\"Test Hba.update_properties().\"\"\" # Add a faked HBA faked_hba", "resource object: assert hba.properties[prop_name] == saved_properties[prop_name] # ... and again", "verify that the resource object # still reflects the property", "set ... # ... in the resource object: prop_name =", "}) port = adapter.ports.find(name='fake-port1') # Set the status of the", "filter_args=dict(name=hba_name)) assert len(hbas_list) == 0 # Verify that the resource", "'element-id': 'fake-port1-oid', # element-uri is auto-set based upon object-id 'parent':", "hba_mgr.partition == self.partition # TODO: Test for HbaManager.__repr__() @pytest.mark.parametrize( \"full_properties_kwargs,", "80, 'physical-channel-status': 'operating', }) self.faked_port11 = self.faked_fcp1.ports.add({ 'element-id': PORT11_OID, 'parent':", "\"\"\"Test HbaManager.list() with filter_args.\"\"\" # Add two faked HBAs self.add_hba1()", "test_hba_delete(self, initial_partition_status, exp_exc): \"\"\"Test Hba.delete().\"\"\" # Add a faked HBA", "1.+'}, []), ({'name': '.+hba 1'}, []), ({'name': HBA1_NAME, 'element-id': HBA1_OID},", "implied. # See the License for the specific language governing", "test_hbamanager_list_filter_args(self, filter_args, exp_oids): \"\"\"Test HbaManager.list() with filter_args.\"\"\" # Add two", "is no longer found by its old name, using #", "code to be tested hba = hba_mgr.create(properties=input_props) exc = exc_info.value", "exp_exc.http_status assert exc.reason == exp_exc.reason else: # Execute the code", "in exp_prop_names: assert prop_name in hba.properties if prop_name in input_props:", "True, 'is-ensemble-member': False, 'iml-mode': 'dpm', }) self.cpc = self.client.cpcs.find(name='fake-cpc1-name') #", "initial_partition_status hba_mgr = self.partition.hbas hba = hba_mgr.find(name=faked_hba.name) if exp_exc: with", "Note: the Hba object returned by Hba.create() has # the", "self.partition.hbas hba = hba_mgr.find(name=faked_hba.name) # Execute the code to be", "that the resource object already reflects the property # updates.", "exists with pytest.raises(NotFound) as exc_info: hba_mgr.find(name=faked_hba.name) def test_hba_delete_create_same_name(self): \"\"\"Test Hba.delete()", "\"\"\" from __future__ import absolute_import, print_function import pytest import re", "with filter_args.\"\"\" # Add two faked HBAs self.add_hba1() self.add_hba2() hba_mgr", "self.faked_partition.properties['status'] = initial_partition_status hba_mgr = self.partition.hbas if exp_status_exc: exp_exc =", "another one faked_hba = self.add_hba1() hba_name = faked_hba.name self.add_hba2() #", "HbaManager.list() with full_properties.\"\"\" # Add two faked HBAs faked_hba1 =", "[HBA1_OID]), ({'name': '.*hba 1'}, [HBA1_OID]), ({'name': 'hba 1.*'}, [HBA1_OID]), ({'name':", "CPC self.faked_fcp1 = self.faked_cpc.adapters.add({ 'object-id': FCP1_OID, 'parent': self.faked_cpc.uri, 'class': 'adapter',", "'foo']}, [HBA1_OID]), ({'name': [HBA2_NAME + 'foo', HBA1_NAME]}, [HBA1_OID]), ({'name': [HBA1_NAME,", "1, 'storage-port-uris': [], 'state': 'online', 'configured-capacity': 80, 'used-capacity': 0, 'allowed-capacity':", "PORT11_OID) class TestHba(object): \"\"\"All tests for Hba and HbaManager classes.\"\"\"", "'.*hba 1'}, [HBA1_OID]), ({'name': 'hba 1.*'}, [HBA1_OID]), ({'name': 'hba .'},", "the faked partition self.faked_partition.properties['status'] = initial_partition_status hba_mgr = self.partition.hbas if", "of the manager object assert hba_mgr.resource_class == Hba assert hba_mgr.session", "({'adapter-port-uri': PORT11_URI}, None, HTTPError({'http-status': 400, 'reason': 5})), ({'name': 'fake-hba-x', 'adapter-port-uri':", "to be tested repr_str = repr(hba) repr_str = repr_str.replace('\\n', '\\\\n')", "updates. hba.pull_full_properties() for prop_name in saved_properties: if prop_name in input_props:", "found by its old name, using # find() (this uses", "hba_mgr.resource_class == Hba assert hba_mgr.session == self.session assert hba_mgr.parent ==", "the code to be tested hba.delete() exc = exc_info.value if", "for prop_name in exp_prop_names: assert prop_name in hba.properties if prop_name", "if exp_exc: with pytest.raises(exp_exc.__class__) as exc_info: # Execute the code", "code to be tested hba.update_properties(properties=input_props) # Verify that the resource", "self.faked_partition.uri, 'class': 'hba', 'name': HBA2_NAME, 'description': 'HBA ' + HBA2_NAME,", "the name-to-URI cache). with pytest.raises(NotFound): hba_mgr.find(name=hba_name) # Verify that the", "Unless required by applicable law or agreed to in writing,", "exp_exc.http_status assert exc.reason == exp_exc.reason # Check that the HBA", "the License. \"\"\" Unit tests for _hba module. \"\"\" from", "'name': 'fake-port11-name', 'description': 'FCP #1 Port #1', }) assert PORT11_URI", "# We check just the begin of the string: assert", "'fcp1', 'description': 'FCP #1', 'status': 'active', 'type': 'fcp', 'adapter-id': '123',", "consistency within itself assert isinstance(hba, Hba) hba_name = hba.name exp_hba_name", "the specific language governing permissions and # limitations under the", "set(exp_oids) @pytest.mark.parametrize( \"initial_partition_status, exp_status_exc\", [ ('stopped', None), ('terminated', None), ('starting',", "'type': 'fcp', # adapter-family is auto-set based upon type 'adapter-id':", "just the begin of the string: assert re.match(r'^{classname}\\s+at\\s+0x{id:08x}\\s+\\(\\\\n.*'. format(classname=hba.__class__.__name__, id=id(hba)),", "@pytest.mark.parametrize( \"full_properties_kwargs, prop_names\", [ (dict(), ['element-uri']), (dict(full_properties=False), ['element-uri']), (dict(full_properties=True), None),", "pytest.raises(NotFound): hba_mgr.find(name=hba_name) # Verify that the resource object already reflects", "== exp_exc.reason # Check that the HBA still exists hba_mgr.find(name=faked_hba.name)", "self.session = FakedSession('fake-host', 'fake-hmc', '2.13.1', '1.8') self.client = Client(self.session) #", "properties for later comparison hba.pull_full_properties() saved_properties = copy.deepcopy(hba.properties) if exp_exc:", "def test_hbamanager_create( self, input_props, exp_prop_names, exp_prop_exc, initial_partition_status, exp_status_exc): \"\"\"Test HbaManager.create().\"\"\"", "== self.partition assert hba_mgr.partition == self.partition # TODO: Test for", "== exp_hba_uri # Check the properties against the expected names", "\"\"\"Test Hba.reassign_port().\"\"\" # Add a faked HBA to be tested.", "'adapter-port-uri': PORT11_URI, 'wwpn': 'AABBCCDDEEFF0012', 'device-number': '1112', }) return faked_hba def", "(this does not use the name-to-URI cache). hbas_list = hba_mgr.list(", "TODO: Test for HbaManager.__repr__() @pytest.mark.parametrize( \"full_properties_kwargs, prop_names\", [ (dict(), ['element-uri']),", "= 'Third HBA' # Set the status of the faked", "1})), ('degraded', None), ('reservation-error', None), ('paused', None), ] ) def", "== self.partition # TODO: Test for HbaManager.__repr__() @pytest.mark.parametrize( \"full_properties_kwargs, prop_names\",", "Execute the code to be tested. # Note: the Hba", "self.faked_cpc.adapters.add({ 'object-id': FCP1_OID, 'parent': self.faked_cpc.uri, 'class': 'adapter', 'name': 'fcp1', 'description':", "import absolute_import, print_function import pytest import re import copy from", "the resource object # still reflects the property updates. hba.pull_full_properties()", "return faked_hba def add_hba2(self): \"\"\"Add a faked HBA 2 to", "code to be tested. hba.delete() # Check that the HBA", "= self.add_hba2() exp_faked_hbas = [faked_hba1, faked_hba2] hba_mgr = self.partition.hbas #", "exc_info.value if isinstance(exp_exc, HTTPError): assert exc.http_status == exp_exc.http_status assert exc.reason", "partition self.faked_partition.properties['status'] = initial_partition_status # The HBA object we will", "be found by its new name, using list() new_hbas_list =", "be tested. hba.delete() # Check that the HBA no longer", "NotFound from zhmcclient_mock import FakedSession from tests.common.utils import assert_resources #", "2016-2017 IBM Corp. All Rights Reserved. # # Licensed under", "be tested and another one faked_hba = self.add_hba1() self.add_hba2() #", "resource object already reflects the property # updates. for prop_name", "assert_resources # Object IDs and names of our faked HBAs:", "[HBA1_OID]), ({'name': [HBA1_NAME, HBA1_NAME]}, [HBA1_OID]), ({'name': '.*hba 1'}, [HBA1_OID]), ({'name':", "None, HTTPError({'http-status': 400, 'reason': 5})), ({'name': 'fake-hba-x'}, None, HTTPError({'http-status': 400,", "self.partition = self.cpc.partitions.find(name='fake-part1-name') # Add an FCP adapter and port", "faked_hba = self.add_hba1() hba_name = faked_hba.name self.add_hba2() # Construct the", "'class': 'adapter', 'name': 'fcp1', 'description': 'FCP #1', 'status': 'active', 'type':", "None), ('reservation-error', None), ('paused', None), ] ) @pytest.mark.parametrize( \"input_props, exp_prop_names,", "two faked HBAs self.add_hba1() self.add_hba2() hba_mgr = self.partition.hbas # Execute", "from zhmcclient_mock import FakedSession from tests.common.utils import assert_resources # Object", "to a faked URI. faked_hba = self.add_hba1() # Add a", "'FEDC', 'description': 'New HBA description'}, ] ) def test_hba_update_properties(self, input_props):", "partition self.faked_partition.properties['status'] = initial_partition_status hba_mgr = self.partition.hbas if exp_status_exc: exp_exc", "def add_hba2(self): \"\"\"Add a faked HBA 2 to the faked", "partition that has no HBAs. Add one FCP adapter and", "HBA1_NAME + 'foo', 'element-id': HBA1_OID + 'foo'}, []), ] )", "1' HBA2_OID = 'hba 2-oid' HBA2_NAME = 'hba 2' #", "self, input_props, exp_prop_names, exp_prop_exc, initial_partition_status, exp_status_exc): \"\"\"Test HbaManager.create().\"\"\" # Set", "[ (dict(), ['element-uri']), (dict(full_properties=False), ['element-uri']), (dict(full_properties=True), None), ] ) def", "exc_info: # Execute the code to be tested hba.reassign_port(port) exc", ") @pytest.mark.parametrize( \"input_props, exp_prop_names, exp_prop_exc\", [ ({}, None, HTTPError({'http-status': 400,", "None, 'class': 'cpc', 'name': 'fake-cpc1-name', 'description': 'CPC #1 (DPM mode)',", "tested. hba_mgr.create(part3_props) # Check that the HBA exists again under", "properties of the manager object assert hba_mgr.resource_class == Hba assert", "exp_prop_value def test_hba_update_name(self): \"\"\"Test Hba.update_properties() with 'name' property.\"\"\" # Add", "[HBA1_NAME, HBA2_NAME]}, [HBA1_OID, HBA2_OID]), ({'name': HBA1_NAME + 'foo'}, []), ({'name':", "... in the resource object: assert hba.properties[prop_name] == saved_properties[prop_name] #", "= self.faked_cpc.adapters.add({ 'object-id': 'fake-fcp1-oid', # object-uri is auto-set based upon", "= hba.uri exp_hba_uri = hba.properties['element-uri'] assert hba_uri == exp_hba_uri #", "test_hba_delete_create_same_name(self): \"\"\"Test Hba.delete() followed by Hba.create() with same name.\"\"\" #", "({'name': 'hba 1.*'}, [HBA1_OID]), ({'name': 'hba .'}, [HBA1_OID, HBA2_OID]), ({'name':", "assert exc.reason == exp_exc.reason # Check that the HBA still", "faked CPC in DPM mode with one partition that has", "self.faked_partition.properties['status'] = 'stopped' # deletable hba_mgr = self.partition.hbas hba =", "resource can be found by its new name, using find()", "hba.pull_full_properties() assert hba.properties[prop_name] == saved_properties[prop_name] else: # Execute the code", "# Execute the code to be tested. hba.reassign_port(port) # Check", "faked_hba.name # Set the status of the faked partition self.faked_partition.properties['status']", "= [faked_hba1, faked_hba2] hba_mgr = self.partition.hbas # Execute the code", "with 'name' property.\"\"\" # Add a faked HBA faked_hba =", "= self.faked_cpc.adapters.add({ 'object-id': FCP1_OID, 'parent': self.faked_cpc.uri, 'class': 'adapter', 'name': 'fcp1',", "\"input_props, exp_prop_names, exp_prop_exc\", [ ({}, None, HTTPError({'http-status': 400, 'reason': 5})),", "'element-id': HBA1_OID + 'foo'}, []), ] ) def test_hbamanager_list_filter_args(self, filter_args,", "#1 (DPM mode)', 'status': 'active', 'dpm-enabled': True, 'is-ensemble-member': False, 'iml-mode':", "# Check that the port of the HBA has been", "None, }) port = adapter.ports.find(name='fake-port1') # Set the status of", "You may obtain a copy of the License at #", "== exp_exc.reason # Check that the port of the HBA", "HBAs self.add_hba1() self.add_hba2() hba_mgr = self.partition.hbas # Execute the code", "limitations under the License. \"\"\" Unit tests for _hba module.", "re import copy from zhmcclient import Client, Hba, HTTPError, NotFound", "# though it has not been refreshed yet. assert hba.properties['name']", "using # find() (this uses the name-to-URI cache). with pytest.raises(NotFound):", "= hba_mgr.find(name=faked_hba.name) if exp_exc: with pytest.raises(exp_exc.__class__) as exc_info: # Execute", "] ) @pytest.mark.parametrize( \"input_props, exp_prop_names, exp_prop_exc\", [ ({}, None, HTTPError({'http-status':", "DPM mode self.faked_cpc = self.session.hmc.cpcs.add({ 'element-id': 'fake-cpc1-oid', # element-uri is", "# Add two faked HBAs self.add_hba1() self.add_hba2() hba_mgr = self.partition.hbas", "hbas = hba_mgr.list(filter_args=filter_args) assert len(hbas) == len(exp_oids) if exp_oids: oids", "exp_hba_name = hba.properties['name'] assert hba_name == exp_hba_name hba_uri = hba.uri", "hba_mgr = self.partition.hbas hba = hba_mgr.find(name=faked_hba.name) # Execute the code", "in hbas] assert set(oids) == set(exp_oids) @pytest.mark.parametrize( \"initial_partition_status, exp_status_exc\", [", "the faked partition self.faked_partition.properties['status'] = 'stopped' # deletable hba_mgr =", "'fake-port11-oid' PORT11_URI = '/api/adapters/{}/storage-ports/{}'.format(FCP1_OID, PORT11_OID) class TestHba(object): \"\"\"All tests for", "'status': 'active', 'dpm-enabled': True, 'is-ensemble-member': False, 'iml-mode': 'dpm', }) self.cpc", ") def test_hba_delete(self, initial_partition_status, exp_exc): \"\"\"Test Hba.delete().\"\"\" # Add a", "'element-id': HBA1_OID, # element-uri will be automatically set 'parent': self.faked_partition.uri,", "# Set the status of the faked partition self.faked_partition.properties['status'] =", "re.match(r'^{classname}\\s+at\\s+0x{id:08x}\\s+\\(\\\\n.*'. format(classname=hba.__class__.__name__, id=id(hba)), repr_str) @pytest.mark.parametrize( \"initial_partition_status, exp_exc\", [ ('stopped', None),", "for later comparison hba.pull_full_properties() saved_properties = copy.deepcopy(hba.properties) if exp_exc: with", "a faked HBA faked_hba = self.add_hba1() # Set the status", "the resource can be found by its new name, using", "'reason': 5})), ({'name': 'fake-hba-x'}, None, HTTPError({'http-status': 400, 'reason': 5})), ({'adapter-port-uri':", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "'online', 'configured-capacity': 80, 'used-capacity': 0, 'allowed-capacity': 80, 'maximum-total-capacity': 80, 'channel-path-id':", "(dict(), ['element-uri']), (dict(full_properties=False), ['element-uri']), (dict(full_properties=True), None), ] ) def test_hbamanager_list_full_properties(", "= self.add_hba1() # Add a faked FCP with one port", "of the HBA is unchanged ... prop_name = 'adapter-port-uri' #", "Hba.delete().\"\"\" # Add a faked HBA to be tested and", "('reservation-error', None), ('paused', None), ] ) def test_hba_reassign_port(self, initial_partition_status, exp_exc):", "[HBA1_NAME, HBA2_NAME + 'foo']}, [HBA1_OID]), ({'name': [HBA2_NAME + 'foo', HBA1_NAME]},", "# Add a faked HBA to be tested and another", "set 'parent': self.faked_partition.uri, 'class': 'hba', 'name': HBA2_NAME, 'description': 'HBA '", "be tested. # Note: the Hba object returned by Hba.create()", "even # though it has not been refreshed yet. assert", "HBA1_OID}, [HBA1_OID]), ({'name': HBA1_NAME, 'element-id': HBA1_OID + 'foo'}, []), ({'name':", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "still exists hba_mgr.find(name=faked_hba.name) else: # Execute the code to be", "License. # You may obtain a copy of the License", "input_props[prop_name] else: exp_prop_value = saved_properties[prop_name] assert prop_name in hba.properties prop_value", ") def test_hba_update_properties(self, input_props): \"\"\"Test Hba.update_properties().\"\"\" # Add a faked", "HBA 2 to the faked partition.\"\"\" faked_hba = self.faked_partition.hbas.add({ 'element-id':", "Test for HbaManager.__repr__() @pytest.mark.parametrize( \"full_properties_kwargs, prop_names\", [ (dict(), ['element-uri']), (dict(full_properties=False),", "HbaManager.list() with filter_args.\"\"\" # Add two faked HBAs self.add_hba1() self.add_hba2()", "self.add_hba1() faked_hba2 = self.add_hba2() exp_faked_hbas = [faked_hba1, faked_hba2] hba_mgr =", "1, 'network-port-uris': [], 'state': 'online', 'configured-capacity': 80, 'used-capacity': 0, 'allowed-capacity':", "def add_hba1(self): \"\"\"Add a faked HBA 1 to the faked", "# Execute the code to be tested. # Note: the", "no longer found by its old name, using # list()", "1})), ('degraded', None), ('reservation-error', None), ('paused', None), ] ) @pytest.mark.parametrize(", "None), ] ) def test_hbamanager_list_full_properties( self, full_properties_kwargs, prop_names): \"\"\"Test HbaManager.list()", "a faked session, and add a faked CPC in DPM", "({'name': 'hba .'}, [HBA1_OID, HBA2_OID]), ({'name': '.ba 1'}, [HBA1_OID]), ({'name':", "Hba, HTTPError, NotFound from zhmcclient_mock import FakedSession from tests.common.utils import", "[HBA1_OID, HBA2_OID]}, [HBA1_OID, HBA2_OID]), ({'element-id': [HBA1_OID, HBA1_OID]}, [HBA1_OID]), ({'element-id': HBA1_OID", "'description': 'FCP #1', 'status': 'active', 'type': 'fcp', 'adapter-id': '123', 'detected-card-type':", "hba_name = hba.name exp_hba_name = hba.properties['name'] assert hba_name == exp_hba_name", "faked_hba.name self.add_hba2() # Construct the input properties for a third", "perform the test on hba = self.partition.hbas.find(name=faked_hba.name) # Save the", "setup_method(self): \"\"\" Set up a faked session, and add a", "adapter and port. \"\"\" self.session = FakedSession('fake-host', 'fake-hmc', '2.13.1', '1.8')", "of the faked partition self.faked_partition.properties['status'] = initial_partition_status # The HBA", "exp_exc): \"\"\"Test Hba.reassign_port().\"\"\" # Add a faked HBA to be", "'adapter-port-uri'], None), ] ) def test_hbamanager_create( self, input_props, exp_prop_names, exp_prop_exc,", "its old name, using # list() (this does not use", "@pytest.mark.parametrize( \"initial_partition_status, exp_status_exc\", [ ('stopped', None), ('terminated', None), ('starting', HTTPError({'http-status':", "exp_faked_hbas = [faked_hba1, faked_hba2] hba_mgr = self.partition.hbas # Execute the", "is auto-set based upon object-id 'parent': faked_adapter.uri, 'class': 'storage-port', 'name':", "Its port points to a faked URI. faked_hba = self.add_hba1()", "the string: assert re.match(r'^{classname}\\s+at\\s+0x{id:08x}\\s+\\(\\\\n.*'. format(classname=hba.__class__.__name__, id=id(hba)), repr_str) @pytest.mark.parametrize( \"initial_partition_status, exp_exc\",", "assert hba_name == exp_hba_name hba_uri = hba.uri exp_hba_uri = hba.properties['element-uri']", "repr_str) @pytest.mark.parametrize( \"initial_partition_status, exp_exc\", [ ('stopped', None), ('terminated', None), ('starting',", "in input_props: exp_prop_value = input_props[prop_name] else: exp_prop_value = saved_properties[prop_name] assert", "copy.deepcopy(hba.properties) # Execute the code to be tested hba.update_properties(properties=input_props) #", "'1234-5678-J.01', 'port-count': 1, 'storage-port-uris': [], 'state': 'online', 'configured-capacity': 80, 'used-capacity':", "plus 'element-id'. hba = hba_mgr.create(properties=input_props) # Check the resource for", "to be tested hbas = hba_mgr.list(filter_args=filter_args) assert len(hbas) == len(exp_oids)", "saved_properties[prop_name] assert prop_name in hba.properties prop_value = hba.properties[prop_name] assert prop_value", "We check just the begin of the string: assert re.match(r'^{classname}\\s+at\\s+0x{id:08x}\\s+\\(\\\\n.*'.", "80, 'used-capacity': 0, 'allowed-capacity': 80, 'maximum-total-capacity': 80, 'channel-path-id': '1B', 'physical-channel-status':", "= 'adapter-port-uri' # ... in the resource object: assert hba.properties[prop_name]", "up automatically 'parent': None, 'class': 'cpc', 'name': 'fake-cpc1-name', 'description': 'CPC", "refreshed yet. assert hba.properties['name'] == new_hba_name # Refresh the resource", "the status of the faked partition self.faked_partition.properties['status'] = initial_partition_status #", "Verify that the resource can be found by its new", "PORT11_URI == self.faked_port11.uri def add_hba1(self): \"\"\"Add a faked HBA 1", "'active', 'dpm-enabled': True, 'is-ensemble-member': False, 'iml-mode': 'dpm', }) self.cpc =", "'1234-5678-J.01', 'port-count': 1, 'network-port-uris': [], 'state': 'online', 'configured-capacity': 80, 'used-capacity':", "# Verify that the resource object already reflects the property", "later comparison hba.pull_full_properties() saved_properties = copy.deepcopy(hba.properties) if exp_exc: with pytest.raises(exp_exc.__class__)", "two faked HBAs faked_hba1 = self.add_hba1() faked_hba2 = self.add_hba2() exp_faked_hbas", "a faked hba faked_hba = self.add_hba1() hba_mgr = self.partition.hbas hba", "'reason': 1})), ('degraded', None), ('reservation-error', None), ('paused', None), ] )", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "({}, None, HTTPError({'http-status': 400, 'reason': 5})), ({'name': 'fake-hba-x'}, None, HTTPError({'http-status':", "'hba 1' HBA2_OID = 'hba 2-oid' HBA2_NAME = 'hba 2'", "= hba_mgr.find(name=faked_hba.name) hba.pull_full_properties() saved_properties = copy.deepcopy(hba.properties) # Execute the code", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "'2.13.1', '1.8') self.client = Client(self.session) # Add a CPC in", "@pytest.mark.parametrize( \"filter_args, exp_oids\", [ ({'element-id': HBA1_OID}, [HBA1_OID]), ({'element-id': HBA2_OID}, [HBA2_OID]),", "faked HBAs: HBA1_OID = 'hba 1-oid' HBA1_NAME = 'hba 1'", "5})), ({'adapter-port-uri': PORT11_URI}, None, HTTPError({'http-status': 400, 'reason': 5})), ({'name': 'fake-hba-x',", "Check that the HBA no longer exists with pytest.raises(NotFound) as", "that the resource object already reflects the update, even #", "HBA with same name part3_props = copy.deepcopy(faked_hba.properties) part3_props['description'] = 'Third", "HBA2_NAME + 'foo']}, [HBA1_OID]), ({'name': [HBA2_NAME + 'foo', HBA1_NAME]}, [HBA1_OID]),", "and HbaManager classes.\"\"\" def setup_method(self): \"\"\" Set up a faked", "# object-uri is auto-set based upon object-id 'parent': self.faked_cpc.uri, 'class':", "set up automatically 'parent': None, 'class': 'cpc', 'name': 'fake-cpc1-name', 'description':", "and another one faked_hba = self.add_hba1() hba_name = faked_hba.name self.add_hba2()", "'configured-capacity': 80, 'used-capacity': 0, 'allowed-capacity': 80, 'maximum-total-capacity': 80, 'channel-path-id': '1B',", "required by applicable law or agreed to in writing, software", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "'hba 1.*'}, [HBA1_OID]), ({'name': 'hba .'}, [HBA1_OID, HBA2_OID]), ({'name': '.ba", "Refresh the resource object and verify that the resource object", "'ficon-express-16s', 'card-location': '1234-5678-J.01', 'port-count': 1, 'storage-port-uris': [], 'state': 'online', 'configured-capacity':", "hba_mgr.find(name=faked_hba.name) # Execute the code to be tested repr_str =", "# Add a faked HBA faked_hba = self.add_hba1() # Set", "[HBA1_OID, HBA2_OID]), ({'element-id': [HBA1_OID, HBA1_OID]}, [HBA1_OID]), ({'element-id': HBA1_OID + 'foo'},", "hba faked_hba = self.add_hba1() hba_mgr = self.partition.hbas hba = hba_mgr.find(name=faked_hba.name)", "assert hba_mgr.session == self.session assert hba_mgr.parent == self.partition assert hba_mgr.partition", "({'element-id': [HBA1_OID, HBA2_OID + 'foo']}, [HBA1_OID]), ({'element-id': [HBA2_OID + 'foo',", "{}, {'description': 'New HBA description'}, {'device-number': 'FEDC', 'description': 'New HBA", "agreed to in writing, software # distributed under the License", "HBA2_OID + 'foo']}, [HBA1_OID]), ({'element-id': [HBA2_OID + 'foo', HBA1_OID]}, [HBA1_OID]),", "the resource for consistency within itself assert isinstance(hba, Hba) hba_name", "faked HBA 2 to the faked partition.\"\"\" faked_hba = self.faked_partition.hbas.add({", "will be automatically set 'parent': self.faked_partition.uri, 'class': 'hba', 'name': HBA1_NAME,", "HBA2_OID}, [HBA2_OID]), ({'element-id': [HBA1_OID, HBA2_OID]}, [HBA1_OID, HBA2_OID]), ({'element-id': [HBA1_OID, HBA1_OID]},", "'FCP #1 Port 1', 'index': 0, 'fabric-id': None, }) port", "be tested hbas = hba_mgr.list(**full_properties_kwargs) assert_resources(hbas, exp_faked_hbas, prop_names) @pytest.mark.parametrize( \"filter_args,", "distributed under the License is distributed on an \"AS IS\"", "'foo', HBA1_OID]}, [HBA1_OID]), ({'name': HBA1_NAME}, [HBA1_OID]), ({'name': HBA2_NAME}, [HBA2_OID]), ({'name':", "a faked URI. faked_hba = self.add_hba1() # Add a faked", "({'name': [HBA1_NAME, HBA2_NAME + 'foo']}, [HBA1_OID]), ({'name': [HBA2_NAME + 'foo',", "... # ... in the resource object: prop_name = 'adapter-port-uri'", "that the HBA still exists hba_mgr.find(name=faked_hba.name) else: # Execute the", "[HBA2_NAME + 'foo', HBA1_NAME]}, [HBA1_OID]), ({'name': [HBA1_NAME, HBA1_NAME]}, [HBA1_OID]), ({'name':", "= self.partition.hbas hba = hba_mgr.find(name=hba_name) # Execute the deletion code", "({'name': [HBA1_NAME, HBA2_NAME]}, [HBA1_OID, HBA2_OID]), ({'name': HBA1_NAME + 'foo'}, []),", "hba.pull_full_properties() saved_properties = copy.deepcopy(hba.properties) # Execute the code to be", "use the name-to-URI cache). hbas_list = hba_mgr.list( filter_args=dict(name=hba_name)) assert len(hbas_list)", "exp_faked_hbas, prop_names) @pytest.mark.parametrize( \"filter_args, exp_oids\", [ ({'element-id': HBA1_OID}, [HBA1_OID]), ({'element-id':", "exp_prop_value = input_props[prop_name] else: exp_prop_value = saved_properties[prop_name] assert prop_name in", "'type': 'fcp', 'adapter-id': '123', 'detected-card-type': '10gbe-roce-express', 'card-location': '1234-5678-J.01', 'port-count': 1,", "CPC in DPM mode self.faked_cpc = self.session.hmc.cpcs.add({ 'element-id': 'fake-cpc1-oid', #", "exp_exc: with pytest.raises(exp_exc.__class__) as exc_info: # Execute the code to", "refreshed from the mock state: hba.pull_full_properties() assert hba.properties[prop_name] == saved_properties[prop_name]", "is auto-set based upon object-id 'parent': self.faked_cpc.uri, 'class': 'adapter', 'name':", "self.faked_cpc = self.session.hmc.cpcs.add({ 'element-id': 'fake-cpc1-oid', # element-uri is set up", "1.*'}, [HBA1_OID]), ({'name': 'hba .'}, [HBA1_OID, HBA2_OID]), ({'name': '.ba 1'},", "'element-id': PORT11_OID, 'parent': self.faked_fcp1.uri, 'class': 'storage-port', 'index': 1, 'name': 'fake-port11-name',", "= self.partition.hbas hba = hba_mgr.find(name=faked_hba.name) # Execute the code to", "[ {}, {'description': 'New HBA description'}, {'device-number': 'FEDC', 'description': 'New", "['element-uri', 'name', 'adapter-port-uri'], None), ] ) def test_hbamanager_create( self, input_props,", "the resource object and verify that it still reflects the", "for hba in hbas] assert set(oids) == set(exp_oids) @pytest.mark.parametrize( \"initial_partition_status,", "# Its port points to a faked URI. faked_hba =", "faked_hba def test_hbamanager_initial_attrs(self): \"\"\"Test initial attributes of HbaManager.\"\"\" hba_mgr =", "faked HBA 1 to the faked partition.\"\"\" faked_hba = self.faked_partition.hbas.add({", "and Object IDs of elements referenced in HBA properties: FCP1_OID", "# find() (this uses the name-to-URI cache). with pytest.raises(NotFound): hba_mgr.find(name=hba_name)", "else: exp_exc = None if exp_exc: with pytest.raises(exp_exc.__class__) as exc_info:", "'name': 'fake-cpc1-name', 'description': 'CPC #1 (DPM mode)', 'status': 'active', 'dpm-enabled':", "input_props): \"\"\"Test Hba.update_properties().\"\"\" # Add a faked HBA faked_hba =", "new_hbas_list[0] assert new_hba_list.properties['name'] == new_hba_name @pytest.mark.parametrize( \"initial_partition_status, exp_exc\", [ ('stopped',", "= hba.properties[prop_name] assert prop_value == exp_prop_value # Refresh the resource", "# Refresh the resource object and verify that the resource", "its new name, using list() new_hbas_list = hba_mgr.list( filter_args=dict(name=new_hba_name)) assert", "[HBA1_OID, HBA2_OID]), ({'name': HBA1_NAME + 'foo'}, []), ({'name': [HBA1_NAME, HBA2_NAME", "80, 'channel-path-id': '1B', 'physical-channel-status': 'operating', }) adapter = self.cpc.adapters.find(name='fake-fcp1') faked_adapter.ports.add({", "'index': 0, 'fabric-id': None, }) port = adapter.ports.find(name='fake-port1') # Set", "Execute the creation code to be tested. hba_mgr.create(part3_props) # Check", "License. \"\"\" Unit tests for _hba module. \"\"\" from __future__", "Client, Hba, HTTPError, NotFound from zhmcclient_mock import FakedSession from tests.common.utils", "('reservation-error', None), ('paused', None), ] ) def test_hba_delete(self, initial_partition_status, exp_exc):", "}) return faked_hba def test_hbamanager_initial_attrs(self): \"\"\"Test initial attributes of HbaManager.\"\"\"", "Object IDs and names of our faked HBAs: HBA1_OID =", "to be tested. # Note: the Hba object returned by", "property updates. hba.pull_full_properties() for prop_name in saved_properties: if prop_name in", "' + HBA1_NAME, 'adapter-port-uri': PORT11_URI, 'wwpn': 'AABBCCDDEEFF0011', 'device-number': '1111', })", "400, 'reason': 5})), ({'name': 'fake-hba-x', 'adapter-port-uri': PORT11_URI}, ['element-uri', 'name', 'adapter-port-uri'],", "Execute the code to be tested hba.delete() exc = exc_info.value", "'fake-port1', 'description': 'FCP #1 Port 1', 'index': 0, 'fabric-id': None,", "Client(self.session) # Add a CPC in DPM mode self.faked_cpc =", "a partition to the CPC self.faked_partition = self.faked_cpc.partitions.add({ 'element-id': 'fake-part1-oid',", "OR CONDITIONS OF ANY KIND, either express or implied. #", "HBA object we will perform the test on hba =", "] ) def test_hba_update_properties(self, input_props): \"\"\"Test Hba.update_properties().\"\"\" # Add a", "the License is distributed on an \"AS IS\" BASIS, #", "faked_hba1 = self.add_hba1() faked_hba2 = self.add_hba2() exp_faked_hbas = [faked_hba1, faked_hba2]", "the manager object assert hba_mgr.resource_class == Hba assert hba_mgr.session ==", "prop_name = 'adapter-port-uri' assert hba.properties[prop_name] == port.uri # ... and", "HBA1_OID]}, [HBA1_OID]), ({'element-id': HBA1_OID + 'foo'}, []), ({'element-id': [HBA1_OID, HBA2_OID", "the property # updates. for prop_name in saved_properties: if prop_name", "be tested hbas = hba_mgr.list(filter_args=filter_args) assert len(hbas) == len(exp_oids) if", "classes.\"\"\" def setup_method(self): \"\"\" Set up a faked session, and", "prop_names\", [ (dict(), ['element-uri']), (dict(full_properties=False), ['element-uri']), (dict(full_properties=True), None), ] )", "the mock state: hba.pull_full_properties() assert hba.properties[prop_name] == saved_properties[prop_name] else: #", "faked partition.\"\"\" faked_hba = self.faked_partition.hbas.add({ 'element-id': HBA2_OID, # element-uri will", "[HBA1_NAME, HBA1_NAME]}, [HBA1_OID]), ({'name': '.*hba 1'}, [HBA1_OID]), ({'name': 'hba 1.*'},", "element-uri is auto-set based upon object-id 'parent': faked_adapter.uri, 'class': 'storage-port',", "under that name hba3 = hba_mgr.find(name=hba_name) description = hba3.get_property('description') assert", "law or agreed to in writing, software # distributed under", "hba_mgr = self.partition.hbas # Execute the code to be tested", "'Third HBA' # Set the status of the faked partition", "returned by Hba.create() has # the input properties plus 'element-uri'", "to the CPC self.faked_partition = self.faked_cpc.partitions.add({ 'element-id': 'fake-part1-oid', # element-uri", "HBA properties: FCP1_OID = 'fake-fcp1-oid' PORT11_OID = 'fake-port11-oid' PORT11_URI =", "automatically 'parent': None, 'class': 'cpc', 'name': 'fake-cpc1-name', 'description': 'CPC #1", "will be automatically set 'parent': self.faked_cpc.uri, 'class': 'partition', 'name': 'fake-part1-name',", "for _hba module. \"\"\" from __future__ import absolute_import, print_function import", "already reflects the property # updates. for prop_name in saved_properties:", "may obtain a copy of the License at # #", "({'element-id': [HBA2_OID + 'foo', HBA1_OID]}, [HBA1_OID]), ({'name': HBA1_NAME}, [HBA1_OID]), ({'name':", "exp_value def test_hba_repr(self): \"\"\"Test Hba.__repr__().\"\"\" # Add a faked hba", "HBA1_NAME, 'element-id': HBA1_OID + 'foo'}, []), ({'name': HBA1_NAME + 'foo',", "# URIs and Object IDs of elements referenced in HBA", "Check the properties against the expected names and values for", "to be tested hba.reassign_port(port) exc = exc_info.value if isinstance(exp_exc, HTTPError):", "Hba assert hba_mgr.session == self.session assert hba_mgr.parent == self.partition assert", "hba3 = hba_mgr.find(name=hba_name) description = hba3.get_property('description') assert description == 'Third", "initial_partition_status # The HBA object we will perform the test", "our faked HBAs: HBA1_OID = 'hba 1-oid' HBA1_NAME = 'hba", "may not use this file except in compliance with the", "code to be tested. hba.reassign_port(port) # Check that the port", "will be reassigned to faked_adapter = self.faked_cpc.adapters.add({ 'object-id': 'fake-fcp1-oid', #", "name, using find() new_hba_find = hba_mgr.find(name=new_hba_name) assert new_hba_find.properties['name'] == new_hba_name", "tested. hba.reassign_port(port) # Check that the port of the HBA", "this file except in compliance with the License. # You", "'element-id'. hba = hba_mgr.create(properties=input_props) # Check the resource for consistency", "'used-capacity': 0, 'allowed-capacity': 80, 'maximum-total-capacity': 80, 'channel-path-id': '1B', 'physical-channel-status': 'operating',", "status of the faked partition self.faked_partition.properties['status'] = initial_partition_status hba_mgr =", "Check that the HBA still exists hba_mgr.find(name=faked_hba.name) else: # Execute", "Set the status of the faked partition self.faked_partition.properties['status'] = initial_partition_status", "HBA1_NAME = 'hba 1' HBA2_OID = 'hba 2-oid' HBA2_NAME =", ") def test_hba_reassign_port(self, initial_partition_status, exp_exc): \"\"\"Test Hba.reassign_port().\"\"\" # Add a", "# # Licensed under the Apache License, Version 2.0 (the", "cache). hbas_list = hba_mgr.list( filter_args=dict(name=hba_name)) assert len(hbas_list) == 0 #", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "the HBA is unchanged ... prop_name = 'adapter-port-uri' # ...", "exp_prop_exc, initial_partition_status, exp_status_exc): \"\"\"Test HbaManager.create().\"\"\" # Set the status of", "object already reflects the update, even # though it has", "= FakedSession('fake-host', 'fake-hmc', '2.13.1', '1.8') self.client = Client(self.session) # Add", "elements referenced in HBA properties: FCP1_OID = 'fake-fcp1-oid' PORT11_OID =", "self.partition.hbas hba = hba_mgr.find(name=hba_name) # Execute the deletion code to", "no longer exists with pytest.raises(NotFound) as exc_info: hba_mgr.find(name=faked_hba.name) def test_hba_delete_create_same_name(self):", "prop_name in saved_properties: if prop_name in input_props: exp_prop_value = input_props[prop_name]", "None), ('terminated', None), ('starting', HTTPError({'http-status': 409, 'reason': 1})), ('active', None),", "({'element-id': HBA2_OID}, [HBA2_OID]), ({'element-id': [HBA1_OID, HBA2_OID]}, [HBA1_OID, HBA2_OID]), ({'element-id': [HBA1_OID,", "Execute the code to be tested hba.update_properties(properties=input_props) # Verify that", "tested hba.update_properties(properties={'name': new_hba_name}) # Verify that the resource is no", "faked HBAs faked_hba1 = self.add_hba1() faked_hba2 = self.add_hba2() exp_faked_hbas =", "self.faked_cpc.partitions.add({ 'element-id': 'fake-part1-oid', # element-uri will be automatically set 'parent':", "\"\"\"Test Hba.update_properties() with 'name' property.\"\"\" # Add a faked HBA", "state: hba.pull_full_properties() assert hba.properties[prop_name] == saved_properties[prop_name] else: # Execute the", "# element-uri is set up automatically 'parent': None, 'class': 'cpc',", "upon type 'adapter-id': '123', 'detected-card-type': 'ficon-express-16s', 'card-location': '1234-5678-J.01', 'port-count': 1,", "the HBA no longer exists with pytest.raises(NotFound): hba_mgr.find(name=hba_name) # Execute", "adapter.ports.find(name='fake-port1') # Set the status of the faked partition self.faked_partition.properties['status']", "upon object-id 'parent': self.faked_cpc.uri, 'class': 'adapter', 'name': 'fake-fcp1', 'description': 'FCP", "assert hba.properties[prop_name] == port.uri # ... and again when refreshed", "{'description': 'New HBA description'}, {'device-number': 'FEDC', 'description': 'New HBA description'},", "== exp_exc.http_status assert exc.reason == exp_exc.reason # Check that the", "faked FCP with one port that the HBA will be", "points to a faked URI. faked_hba = self.add_hba1() # Add", "or implied. # See the License for the specific language", "\"\"\"Add a faked HBA 1 to the faked partition.\"\"\" faked_hba", "full_properties.\"\"\" # Add two faked HBAs faked_hba1 = self.add_hba1() faked_hba2", "Hba.update_properties() with 'name' property.\"\"\" # Add a faked HBA faked_hba", "has no HBAs. Add one FCP adapter and port. \"\"\"", "be automatically set 'parent': self.faked_cpc.uri, 'class': 'partition', 'name': 'fake-part1-name', 'description':", "self.faked_fcp1.uri, 'class': 'storage-port', 'index': 1, 'name': 'fake-port11-name', 'description': 'FCP #1", "found by its new name, using list() new_hbas_list = hba_mgr.list(", "= 'hba 2-oid' HBA2_NAME = 'hba 2' # URIs and", "= faked_hba.name self.add_hba2() # Construct the input properties for a", "based upon object-id 'parent': self.faked_cpc.uri, 'class': 'adapter', 'name': 'fake-fcp1', 'description':", "faked_adapter.ports.add({ 'element-id': 'fake-port1-oid', # element-uri is auto-set based upon object-id", "self.add_hba1() self.add_hba2() # Set the status of the faked partition", "'object-id': FCP1_OID, 'parent': self.faked_cpc.uri, 'class': 'adapter', 'name': 'fcp1', 'description': 'FCP", "from the mock state: hba.pull_full_properties() assert hba.properties[prop_name] == saved_properties[prop_name] else:", "'123', 'detected-card-type': 'ficon-express-16s', 'card-location': '1234-5678-J.01', 'port-count': 1, 'storage-port-uris': [], 'state':", "names and values for prop_name in exp_prop_names: assert prop_name in", "('active', None), ('stopping', HTTPError({'http-status': 409, 'reason': 1})), ('degraded', None), ('reservation-error',", "[]), ({'name': '.+hba 1'}, []), ({'name': HBA1_NAME, 'element-id': HBA1_OID}, [HBA1_OID]),", "value == exp_value def test_hba_repr(self): \"\"\"Test Hba.__repr__().\"\"\" # Add a", "refreshed from the mock state: hba.pull_full_properties() assert hba.properties[prop_name] == port.uri", "'device-number': '1112', }) return faked_hba def test_hbamanager_initial_attrs(self): \"\"\"Test initial attributes", "Check the resource for consistency within itself assert isinstance(hba, Hba)", "self.add_hba1() hba_name = faked_hba.name # Set the status of the", "hba = hba_mgr.find(name=faked_hba.name) # Execute the code to be tested", "'class': 'cpc', 'name': 'fake-cpc1-name', 'description': 'CPC #1 (DPM mode)', 'status':", "1, 'name': 'fake-port11-name', 'description': 'FCP #1 Port #1', }) assert", "value = hba.properties[prop_name] exp_value = input_props[prop_name] assert value == exp_value", "[HBA1_OID]), ({'element-id': HBA2_OID}, [HBA2_OID]), ({'element-id': [HBA1_OID, HBA2_OID]}, [HBA1_OID, HBA2_OID]), ({'element-id':", "auto-set based upon object-id 'parent': faked_adapter.uri, 'class': 'storage-port', 'name': 'fake-port1',", "faked HBA faked_hba = self.add_hba1() # Set the status of", "['element-uri']), (dict(full_properties=False), ['element-uri']), (dict(full_properties=True), None), ] ) def test_hbamanager_list_full_properties( self,", "the port of the HBA is unchanged ... prop_name =", "Port #1', }) assert PORT11_URI == self.faked_port11.uri def add_hba1(self): \"\"\"Add", "the Hba object returned by Hba.create() has # the input", "'dpm', }) self.cpc = self.client.cpcs.find(name='fake-cpc1-name') # Add a partition to", "the resource is no longer found by its old name,", "'fake-hmc', '2.13.1', '1.8') self.client = Client(self.session) # Add a CPC", "import copy from zhmcclient import Client, Hba, HTTPError, NotFound from", "saved_properties = copy.deepcopy(hba.properties) if exp_exc: with pytest.raises(exp_exc.__class__) as exc_info: #", "elif exp_prop_exc: exp_exc = exp_prop_exc else: exp_exc = None if", "of the HBA has been set ... # ... in", "and again when refreshed from the mock state: hba.pull_full_properties() assert", "self.faked_partition = self.faked_cpc.partitions.add({ 'element-id': 'fake-part1-oid', # element-uri will be automatically", "assert hba_mgr.parent == self.partition assert hba_mgr.partition == self.partition # TODO:", "5})), ({'name': 'fake-hba-x'}, None, HTTPError({'http-status': 400, 'reason': 5})), ({'adapter-port-uri': PORT11_URI},", "saved_properties = copy.deepcopy(hba.properties) # Execute the code to be tested", "# Verify that the resource is no longer found by", "code to be tested hba.reassign_port(port) exc = exc_info.value if isinstance(exp_exc,", "element-uri will be automatically set 'parent': self.faked_cpc.uri, 'class': 'partition', 'name':", "'card-location': '1234-5678-J.01', 'port-count': 1, 'network-port-uris': [], 'state': 'online', 'configured-capacity': 80,", "new_hba_name}) # Verify that the resource is no longer found", "'fcp', 'adapter-id': '123', 'detected-card-type': '10gbe-roce-express', 'card-location': '1234-5678-J.01', 'port-count': 1, 'network-port-uris':", "# Check that the HBA no longer exists with pytest.raises(NotFound):", "HBA is unchanged ... prop_name = 'adapter-port-uri' # ... in", "exp_status_exc: exp_exc = exp_status_exc elif exp_prop_exc: exp_exc = exp_prop_exc else:", "object and verify that it still reflects the # update.", ") def test_hbamanager_list_full_properties( self, full_properties_kwargs, prop_names): \"\"\"Test HbaManager.list() with full_properties.\"\"\"", "None), ('starting', HTTPError({'http-status': 409, 'reason': 1})), ('active', None), ('stopping', HTTPError({'http-status':", "'adapter', 'name': 'fake-fcp1', 'description': 'FCP #1', 'status': 'active', 'type': 'fcp',", "with pytest.raises(NotFound) as exc_info: hba_mgr.find(name=faked_hba.name) def test_hba_delete_create_same_name(self): \"\"\"Test Hba.delete() followed", "copy.deepcopy(hba.properties) if exp_exc: with pytest.raises(exp_exc.__class__) as exc_info: # Execute the", "HTTPError({'http-status': 400, 'reason': 5})), ({'name': 'fake-hba-x'}, None, HTTPError({'http-status': 400, 'reason':", "hba = hba_mgr.create(properties=input_props) exc = exc_info.value if isinstance(exp_exc, HTTPError): assert", "None), ('stopping', HTTPError({'http-status': 409, 'reason': 1})), ('degraded', None), ('reservation-error', None),", "hba_mgr.create(properties=input_props) exc = exc_info.value if isinstance(exp_exc, HTTPError): assert exc.http_status ==", "'element-id': 'fake-part1-oid', # element-uri will be automatically set 'parent': self.faked_cpc.uri,", "def test_hba_delete(self, initial_partition_status, exp_exc): \"\"\"Test Hba.delete().\"\"\" # Add a faked", "exp_prop_names, exp_prop_exc\", [ ({}, None, HTTPError({'http-status': 400, 'reason': 5})), ({'name':", "same name part3_props = copy.deepcopy(faked_hba.properties) part3_props['description'] = 'Third HBA' #", "\"\"\"Test initial attributes of HbaManager.\"\"\" hba_mgr = self.partition.hbas # Verify", "is auto-set based upon type 'adapter-id': '123', 'detected-card-type': 'ficon-express-16s', 'card-location':", "'adapter-id': '123', 'detected-card-type': '10gbe-roce-express', 'card-location': '1234-5678-J.01', 'port-count': 1, 'network-port-uris': [],", "= 'fake-port11-oid' PORT11_URI = '/api/adapters/{}/storage-ports/{}'.format(FCP1_OID, PORT11_OID) class TestHba(object): \"\"\"All tests", "('paused', None), ] ) def test_hba_reassign_port(self, initial_partition_status, exp_exc): \"\"\"Test Hba.reassign_port().\"\"\"", "exc.reason == exp_exc.reason # Check that the port of the", "exp_oids\", [ ({'element-id': HBA1_OID}, [HBA1_OID]), ({'element-id': HBA2_OID}, [HBA2_OID]), ({'element-id': [HBA1_OID,", "exp_exc = exp_prop_exc else: exp_exc = None if exp_exc: with", "1 new_hba_list = new_hbas_list[0] assert new_hba_list.properties['name'] == new_hba_name @pytest.mark.parametrize( \"initial_partition_status,", "}) return faked_hba def add_hba2(self): \"\"\"Add a faked HBA 2", "80, 'maximum-total-capacity': 80, 'physical-channel-status': 'operating', }) self.faked_port11 = self.faked_fcp1.ports.add({ 'element-id':", "= hba_mgr.list(filter_args=filter_args) assert len(hbas) == len(exp_oids) if exp_oids: oids =", "be automatically set 'parent': self.faked_partition.uri, 'class': 'hba', 'name': HBA2_NAME, 'description':", "reflects the update, even # though it has not been", "'dpm-enabled': True, 'is-ensemble-member': False, 'iml-mode': 'dpm', }) self.cpc = self.client.cpcs.find(name='fake-cpc1-name')", "find() (this uses the name-to-URI cache). with pytest.raises(NotFound): hba_mgr.find(name=hba_name) #", "the name-to-URI cache). hbas_list = hba_mgr.list( filter_args=dict(name=hba_name)) assert len(hbas_list) ==", "in writing, software # distributed under the License is distributed", "+ 'foo'}, []), ] ) def test_hbamanager_list_filter_args(self, filter_args, exp_oids): \"\"\"Test", "Add a faked HBA to be tested. # Its port", "exp_value = input_props[prop_name] assert value == exp_value def test_hba_repr(self): \"\"\"Test", "the code to be tested. # Note: the Hba object", "PORT11_URI, 'wwpn': 'AABBCCDDEEFF0011', 'device-number': '1111', }) return faked_hba def add_hba2(self):", "initial_partition_status, exp_exc): \"\"\"Test Hba.delete().\"\"\" # Add a faked HBA to", "adapter = self.cpc.adapters.find(name='fake-fcp1') faked_adapter.ports.add({ 'element-id': 'fake-port1-oid', # element-uri is auto-set", "\"initial_partition_status, exp_status_exc\", [ ('stopped', None), ('terminated', None), ('starting', HTTPError({'http-status': 409,", "# list() (this does not use the name-to-URI cache). hbas_list", "using list() new_hbas_list = hba_mgr.list( filter_args=dict(name=new_hba_name)) assert len(new_hbas_list) == 1", "({'name': [HBA2_NAME + 'foo', HBA1_NAME]}, [HBA1_OID]), ({'name': [HBA1_NAME, HBA1_NAME]}, [HBA1_OID]),", "third HBA with same name part3_props = copy.deepcopy(faked_hba.properties) part3_props['description'] =", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "License, Version 2.0 (the \"License\"); # you may not use", "a CPC in DPM mode self.faked_cpc = self.session.hmc.cpcs.add({ 'element-id': 'fake-cpc1-oid',", "# Execute the code to be tested repr_str = repr(hba)", "the status of the faked partition self.faked_partition.properties['status'] = initial_partition_status hba_mgr", "# Refresh the resource object and verify that it still", "'foo'}, []), ({'name': HBA1_NAME + 'foo', 'element-id': HBA1_OID}, []), ({'name':", "== new_hba_name @pytest.mark.parametrize( \"initial_partition_status, exp_exc\", [ ('stopped', None), ('terminated', None),", "= self.cpc.adapters.find(name='fake-fcp1') faked_adapter.ports.add({ 'element-id': 'fake-port1-oid', # element-uri is auto-set based", "'description': 'New HBA description'}, ] ) def test_hba_update_properties(self, input_props): \"\"\"Test", "hba = hba_mgr.find(name=hba_name) # Execute the deletion code to be", "test on hba = self.partition.hbas.find(name=faked_hba.name) # Save the HBA properties", "a faked HBA faked_hba = self.add_hba1() hba_name = faked_hba.name #", "the License for the specific language governing permissions and #", "# Add a faked hba faked_hba = self.add_hba1() hba_mgr =", "self.client.cpcs.find(name='fake-cpc1-name') # Add a partition to the CPC self.faked_partition =", "@pytest.mark.parametrize( \"input_props, exp_prop_names, exp_prop_exc\", [ ({}, None, HTTPError({'http-status': 400, 'reason':", "self.faked_partition.uri, 'class': 'hba', 'name': HBA1_NAME, 'description': 'HBA ' + HBA1_NAME,", "== exp_prop_value def test_hba_update_name(self): \"\"\"Test Hba.update_properties() with 'name' property.\"\"\" #", "to be tested hba.update_properties(properties={'name': new_hba_name}) # Verify that the resource", "again when refreshed from the mock state: hba.pull_full_properties() assert hba.properties[prop_name]", "prop_name = 'adapter-port-uri' # ... in the resource object: assert", "saved_properties[prop_name] else: # Execute the code to be tested. hba.reassign_port(port)", "HBAs: HBA1_OID = 'hba 1-oid' HBA1_NAME = 'hba 1' HBA2_OID", "'description': 'Partition #1', 'status': 'active', 'initial-memory': 1024, 'maximum-memory': 2048, })", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "HBAs. Add one FCP adapter and port. \"\"\" self.session =", "[faked_hba1, faked_hba2] hba_mgr = self.partition.hbas # Execute the code to", "assert new_hba_list.properties['name'] == new_hba_name @pytest.mark.parametrize( \"initial_partition_status, exp_exc\", [ ('stopped', None),", "governing permissions and # limitations under the License. \"\"\" Unit", "}) assert PORT11_URI == self.faked_port11.uri def add_hba1(self): \"\"\"Add a faked", "== exp_hba_name hba_uri = hba.uri exp_hba_uri = hba.properties['element-uri'] assert hba_uri", "tested and another one faked_hba = self.add_hba1() self.add_hba2() # Set", "prop_value == exp_prop_value # Refresh the resource object and verify", "with one partition that has no HBAs. Add one FCP", "'123', 'detected-card-type': '10gbe-roce-express', 'card-location': '1234-5678-J.01', 'port-count': 1, 'network-port-uris': [], 'state':", "self.session.hmc.cpcs.add({ 'element-id': 'fake-cpc1-oid', # element-uri is set up automatically 'parent':", "HBA1_NAME, 'description': 'HBA ' + HBA1_NAME, 'adapter-port-uri': PORT11_URI, 'wwpn': 'AABBCCDDEEFF0011',", "type 'adapter-id': '123', 'detected-card-type': 'ficon-express-16s', 'card-location': '1234-5678-J.01', 'port-count': 1, 'storage-port-uris':", "[HBA1_OID, HBA2_OID + 'foo']}, [HBA1_OID]), ({'element-id': [HBA2_OID + 'foo', HBA1_OID]},", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "(dict(full_properties=True), None), ] ) def test_hbamanager_list_full_properties( self, full_properties_kwargs, prop_names): \"\"\"Test", "new name, using find() new_hba_find = hba_mgr.find(name=new_hba_name) assert new_hba_find.properties['name'] ==", "= 'hba 2' # URIs and Object IDs of elements", "exp_prop_exc: exp_exc = exp_prop_exc else: exp_exc = None if exp_exc:", "= self.add_hba1() faked_hba2 = self.add_hba2() exp_faked_hbas = [faked_hba1, faked_hba2] hba_mgr", "0, 'allowed-capacity': 80, 'maximum-total-capacity': 80, 'channel-path-id': '1B', 'physical-channel-status': 'operating', })", "PORT11_OID, 'parent': self.faked_fcp1.uri, 'class': 'storage-port', 'index': 1, 'name': 'fake-port11-name', 'description':", "+ 'foo', HBA1_NAME]}, [HBA1_OID]), ({'name': [HBA1_NAME, HBA1_NAME]}, [HBA1_OID]), ({'name': '.*hba", "1024, 'maximum-memory': 2048, }) self.partition = self.cpc.partitions.find(name='fake-part1-name') # Add an", "HTTPError({'http-status': 409, 'reason': 1})), ('degraded', None), ('reservation-error', None), ('paused', None),", "can be found by its new name, using find() new_hba_find", "hba_mgr.parent == self.partition assert hba_mgr.partition == self.partition # TODO: Test", "HBA1_OID = 'hba 1-oid' HBA1_NAME = 'hba 1' HBA2_OID =", "filter_args.\"\"\" # Add two faked HBAs self.add_hba1() self.add_hba2() hba_mgr =", "= self.add_hba1() self.add_hba2() # Set the status of the faked", "[HBA2_OID]), ({'name': [HBA1_NAME, HBA2_NAME]}, [HBA1_OID, HBA2_OID]), ({'name': HBA1_NAME + 'foo'},", "Hba.delete() followed by Hba.create() with same name.\"\"\" # Add a", "object-id 'parent': self.faked_cpc.uri, 'class': 'adapter', 'name': 'fake-fcp1', 'description': 'FCP #1',", "# Check that the HBA still exists hba_mgr.find(name=faked_hba.name) else: #", "exp_exc.reason # Check that the port of the HBA is", "[hba.properties['element-id'] for hba in hbas] assert set(oids) == set(exp_oids) @pytest.mark.parametrize(", "'maximum-memory': 2048, }) self.partition = self.cpc.partitions.find(name='fake-part1-name') # Add an FCP", "# distributed under the License is distributed on an \"AS", "self.faked_cpc.uri, 'class': 'adapter', 'name': 'fcp1', 'description': 'FCP #1', 'status': 'active',", "[HBA1_OID]), ({'name': [HBA2_NAME + 'foo', HBA1_NAME]}, [HBA1_OID]), ({'name': [HBA1_NAME, HBA1_NAME]},", "that the resource is no longer found by its old", "assert prop_value == exp_prop_value def test_hba_update_name(self): \"\"\"Test Hba.update_properties() with 'name'", "# Unless required by applicable law or agreed to in", "Execute the code to be tested hba = hba_mgr.create(properties=input_props) exc", "= self.cpc.partitions.find(name='fake-part1-name') # Add an FCP adapter and port to", "been refreshed yet. assert hba.properties['name'] == new_hba_name # Refresh the", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "prop_names) @pytest.mark.parametrize( \"filter_args, exp_oids\", [ ({'element-id': HBA1_OID}, [HBA1_OID]), ({'element-id': HBA2_OID},", "'initial-memory': 1024, 'maximum-memory': 2048, }) self.partition = self.cpc.partitions.find(name='fake-part1-name') # Add", "deletable hba_mgr = self.partition.hbas hba = hba_mgr.find(name=hba_name) # Execute the", "'configured-capacity': 80, 'used-capacity': 0, 'allowed-capacity': 80, 'maximum-total-capacity': 80, 'physical-channel-status': 'operating',", "# Verify that the resource can be found by its", "exp_hba_name hba_uri = hba.uri exp_hba_uri = hba.properties['element-uri'] assert hba_uri ==", "code to be tested. # Note: the Hba object returned", "initial_partition_status, exp_status_exc): \"\"\"Test HbaManager.create().\"\"\" # Set the status of the", "('degraded', None), ('reservation-error', None), ('paused', None), ] ) def test_hba_reassign_port(self,", "the Apache License, Version 2.0 (the \"License\"); # you may", "tested repr_str = repr(hba) repr_str = repr_str.replace('\\n', '\\\\n') # We", "' + HBA2_NAME, 'adapter-port-uri': PORT11_URI, 'wwpn': 'AABBCCDDEEFF0012', 'device-number': '1112', })", "= hba.name exp_hba_name = hba.properties['name'] assert hba_name == exp_hba_name hba_uri", "faked_hba2] hba_mgr = self.partition.hbas # Execute the code to be", "input properties for a third HBA with same name part3_props", "'hba', 'name': HBA1_NAME, 'description': 'HBA ' + HBA1_NAME, 'adapter-port-uri': PORT11_URI,", "0, 'allowed-capacity': 80, 'maximum-total-capacity': 80, 'physical-channel-status': 'operating', }) self.faked_port11 =", "self.add_hba2() # Construct the input properties for a third HBA", "upon object-id 'parent': faked_adapter.uri, 'class': 'storage-port', 'name': 'fake-port1', 'description': 'FCP", "= exc_info.value if isinstance(exp_exc, HTTPError): assert exc.http_status == exp_exc.http_status assert", "tested. hba.delete() # Check that the HBA no longer exists", "hba.properties prop_value = hba.properties[prop_name] assert prop_value == exp_prop_value # Refresh", "+ 'foo']}, [HBA1_OID]), ({'name': [HBA2_NAME + 'foo', HBA1_NAME]}, [HBA1_OID]), ({'name':", "# updatable hba_mgr = self.partition.hbas hba = hba_mgr.find(name=faked_hba.name) hba.pull_full_properties() saved_properties", "object-id 'parent': faked_adapter.uri, 'class': 'storage-port', 'name': 'fake-port1', 'description': 'FCP #1", "pytest import re import copy from zhmcclient import Client, Hba,", "assert prop_name in hba.properties if prop_name in input_props: value =", "PORT11_URI = '/api/adapters/{}/storage-ports/{}'.format(FCP1_OID, PORT11_OID) class TestHba(object): \"\"\"All tests for Hba", "object assert hba_mgr.resource_class == Hba assert hba_mgr.session == self.session assert", "HBA no longer exists with pytest.raises(NotFound): hba_mgr.find(name=hba_name) # Execute the", "its old name, using # find() (this uses the name-to-URI", "[HBA2_OID]), ({'element-id': [HBA1_OID, HBA2_OID]}, [HBA1_OID, HBA2_OID]), ({'element-id': [HBA1_OID, HBA1_OID]}, [HBA1_OID]),", "session, and add a faked CPC in DPM mode with", "faked_hba = self.faked_partition.hbas.add({ 'element-id': HBA2_OID, # element-uri will be automatically", "names of our faked HBAs: HBA1_OID = 'hba 1-oid' HBA1_NAME", "[]), ({'element-id': [HBA1_OID, HBA2_OID + 'foo']}, [HBA1_OID]), ({'element-id': [HBA2_OID +", "partition to the CPC self.faked_partition = self.faked_cpc.partitions.add({ 'element-id': 'fake-part1-oid', #", "self.add_hba2() hba_mgr = self.partition.hbas # Execute the code to be", "+ HBA1_NAME, 'adapter-port-uri': PORT11_URI, 'wwpn': 'AABBCCDDEEFF0011', 'device-number': '1111', }) return", "the HBA properties for later comparison hba.pull_full_properties() saved_properties = copy.deepcopy(hba.properties)", "assert isinstance(hba, Hba) hba_name = hba.name exp_hba_name = hba.properties['name'] assert", "# TODO: Test for HbaManager.__repr__() @pytest.mark.parametrize( \"full_properties_kwargs, prop_names\", [ (dict(),", "self, full_properties_kwargs, prop_names): \"\"\"Test HbaManager.list() with full_properties.\"\"\" # Add two", "2-oid' HBA2_NAME = 'hba 2' # URIs and Object IDs", "hba.update_properties(properties=input_props) # Verify that the resource object already reflects the", "under the License is distributed on an \"AS IS\" BASIS,", "with same name part3_props = copy.deepcopy(faked_hba.properties) part3_props['description'] = 'Third HBA'", "= self.client.cpcs.find(name='fake-cpc1-name') # Add a partition to the CPC self.faked_partition", "Save the HBA properties for later comparison hba.pull_full_properties() saved_properties =", "'foo'}, []), ] ) def test_hbamanager_list_filter_args(self, filter_args, exp_oids): \"\"\"Test HbaManager.list()", "HBA1_NAME]}, [HBA1_OID]), ({'name': '.*hba 1'}, [HBA1_OID]), ({'name': 'hba 1.*'}, [HBA1_OID]),", "reassigned to faked_adapter = self.faked_cpc.adapters.add({ 'object-id': 'fake-fcp1-oid', # object-uri is", "HBA2_OID]), ({'name': HBA1_NAME + 'foo'}, []), ({'name': [HBA1_NAME, HBA2_NAME +", "the faked partition.\"\"\" faked_hba = self.faked_partition.hbas.add({ 'element-id': HBA1_OID, # element-uri", "None), ('paused', None), ] ) def test_hba_delete(self, initial_partition_status, exp_exc): \"\"\"Test", "({'element-id': HBA1_OID}, [HBA1_OID]), ({'element-id': HBA2_OID}, [HBA2_OID]), ({'element-id': [HBA1_OID, HBA2_OID]}, [HBA1_OID,", "object-uri is auto-set based upon object-id 'parent': self.faked_cpc.uri, 'class': 'adapter',", "[HBA1_OID]), ({'element-id': [HBA2_OID + 'foo', HBA1_OID]}, [HBA1_OID]), ({'name': HBA1_NAME}, [HBA1_OID]),", "in hba.properties if prop_name in input_props: value = hba.properties[prop_name] exp_value", "# Execute the code to be tested hba.reassign_port(port) exc =", "else: # Execute the code to be tested. hba.reassign_port(port) #", "'.+'}, [HBA1_OID, HBA2_OID]), ({'name': 'hba 1.+'}, []), ({'name': '.+hba 1'},", "HBA' # Set the status of the faked partition self.faked_partition.properties['status']", "'description': 'FCP #1 Port 1', 'index': 0, 'fabric-id': None, })", "400, 'reason': 5})), ({'name': 'fake-hba-x'}, None, HTTPError({'http-status': 400, 'reason': 5})),", "None), ('paused', None), ] ) @pytest.mark.parametrize( \"input_props, exp_prop_names, exp_prop_exc\", [", "= adapter.ports.find(name='fake-port1') # Set the status of the faked partition", "\"\"\"Test HbaManager.list() with full_properties.\"\"\" # Add two faked HBAs faked_hba1", "Check that the HBA exists again under that name hba3", "#1 Port #1', }) assert PORT11_URI == self.faked_port11.uri def add_hba1(self):", "Hba.create() has # the input properties plus 'element-uri' plus 'element-id'.", "'AABBCCDDEEFF0011', 'device-number': '1111', }) return faked_hba def add_hba2(self): \"\"\"Add a", "hba_mgr.create(properties=input_props) # Check the resource for consistency within itself assert", "HBA1_NAME + 'foo', 'element-id': HBA1_OID}, []), ({'name': HBA1_NAME + 'foo',", "self.faked_port11 = self.faked_fcp1.ports.add({ 'element-id': PORT11_OID, 'parent': self.faked_fcp1.uri, 'class': 'storage-port', 'index':", "of the faked partition self.faked_partition.properties['status'] = 'stopped' # updatable hba_mgr", "HbaManager.\"\"\" hba_mgr = self.partition.hbas # Verify all public properties of", "port of the HBA is unchanged ... prop_name = 'adapter-port-uri'", "def setup_method(self): \"\"\" Set up a faked session, and add", "self.add_hba1() hba_name = faked_hba.name self.add_hba2() # Construct the input properties", "description == 'Third HBA' @pytest.mark.parametrize( \"input_props\", [ {}, {'description': 'New", "exp_exc.reason else: # Execute the code to be tested. #", "ANY KIND, either express or implied. # See the License", "= hba.properties['name'] assert hba_name == exp_hba_name hba_uri = hba.uri exp_hba_uri", "the License. # You may obtain a copy of the", "HBA description'}, ] ) def test_hba_update_properties(self, input_props): \"\"\"Test Hba.update_properties().\"\"\" #", "'fcp', # adapter-family is auto-set based upon type 'adapter-id': '123',", "0 # Verify that the resource is no longer found", "in input_props: value = hba.properties[prop_name] exp_value = input_props[prop_name] assert value", "# See the License for the specific language governing permissions", "self.add_hba1() hba_mgr = self.partition.hbas hba = hba_mgr.find(name=faked_hba.name) # Execute the", "1-oid' HBA1_NAME = 'hba 1' HBA2_OID = 'hba 2-oid' HBA2_NAME", "no longer found by its old name, using # find()", "hba.uri exp_hba_uri = hba.properties['element-uri'] assert hba_uri == exp_hba_uri # Check", "Execute the code to be tested hbas = hba_mgr.list(**full_properties_kwargs) assert_resources(hbas,", "tests.common.utils import assert_resources # Object IDs and names of our", "tested hba.update_properties(properties=input_props) # Verify that the resource object already reflects", "# Note: the Hba object returned by Hba.create() has #", "partition self.faked_partition.properties['status'] = initial_partition_status hba_mgr = self.partition.hbas hba = hba_mgr.find(name=faked_hba.name)", "None), ] ) def test_hba_delete(self, initial_partition_status, exp_exc): \"\"\"Test Hba.delete().\"\"\" #", "hba_mgr.find(name=hba_name) # Execute the deletion code to be tested. hba.delete()", "('reservation-error', None), ('paused', None), ] ) @pytest.mark.parametrize( \"input_props, exp_prop_names, exp_prop_exc\",", "('degraded', None), ('reservation-error', None), ('paused', None), ] ) def test_hba_delete(self,", "# Execute the code to be tested hbas = hba_mgr.list(filter_args=filter_args)", "element-uri is set up automatically 'parent': None, 'class': 'cpc', 'name':", "FakedSession from tests.common.utils import assert_resources # Object IDs and names", "# Add an FCP adapter and port to the CPC", "409, 'reason': 1})), ('active', None), ('stopping', HTTPError({'http-status': 409, 'reason': 1})),", "port = adapter.ports.find(name='fake-port1') # Set the status of the faked", "= self.add_hba1() hba_name = faked_hba.name self.add_hba2() # Construct the input", "# update. hba.pull_full_properties() assert hba.properties['name'] == new_hba_name # Verify that", "a faked HBA to be tested. # Its port points", "automatically set 'parent': self.faked_cpc.uri, 'class': 'partition', 'name': 'fake-part1-name', 'description': 'Partition", "'AABBCCDDEEFF0012', 'device-number': '1112', }) return faked_hba def test_hbamanager_initial_attrs(self): \"\"\"Test initial", "# updates. for prop_name in saved_properties: if prop_name in input_props:", "= self.partition.hbas # Verify all public properties of the manager", "'state': 'online', 'configured-capacity': 80, 'used-capacity': 0, 'allowed-capacity': 80, 'maximum-total-capacity': 80,", "None), ('reservation-error', None), ('paused', None), ] ) def test_hba_reassign_port(self, initial_partition_status,", "HBA1_NAME + 'foo'}, []), ({'name': [HBA1_NAME, HBA2_NAME + 'foo']}, [HBA1_OID]),", "'stopped' # updatable hba_mgr = self.partition.hbas hba = hba_mgr.find(name=faked_hba.name) hba.pull_full_properties()", "# Add a faked HBA faked_hba = self.add_hba1() hba_name =", "'detected-card-type': '10gbe-roce-express', 'card-location': '1234-5678-J.01', 'port-count': 1, 'network-port-uris': [], 'state': 'online',", "tested hba = hba_mgr.create(properties=input_props) exc = exc_info.value if isinstance(exp_exc, HTTPError):", "a faked HBA 2 to the faked partition.\"\"\" faked_hba =", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "[HBA1_OID]), ({'name': HBA2_NAME}, [HBA2_OID]), ({'name': [HBA1_NAME, HBA2_NAME]}, [HBA1_OID, HBA2_OID]), ({'name':", "be found by its new name, using find() new_hba_find =", "Port 1', 'index': 0, 'fabric-id': None, }) port = adapter.ports.find(name='fake-port1')", "the code to be tested. hba.reassign_port(port) # Check that the", "writing, software # distributed under the License is distributed on", "self.faked_fcp1 = self.faked_cpc.adapters.add({ 'object-id': FCP1_OID, 'parent': self.faked_cpc.uri, 'class': 'adapter', 'name':", "None), ('reservation-error', None), ('paused', None), ] ) def test_hba_delete(self, initial_partition_status,", "new_hba_name # Verify that the resource can be found by", "[HBA2_OID + 'foo', HBA1_OID]}, [HBA1_OID]), ({'name': HBA1_NAME}, [HBA1_OID]), ({'name': HBA2_NAME},", "exp_status_exc elif exp_prop_exc: exp_exc = exp_prop_exc else: exp_exc = None", "description'}, ] ) def test_hba_update_properties(self, input_props): \"\"\"Test Hba.update_properties().\"\"\" # Add", "# Check that the port of the HBA is unchanged", "assert PORT11_URI == self.faked_port11.uri def add_hba1(self): \"\"\"Add a faked HBA", "def test_hbamanager_list_full_properties( self, full_properties_kwargs, prop_names): \"\"\"Test HbaManager.list() with full_properties.\"\"\" #", "CPC self.faked_partition = self.faked_cpc.partitions.add({ 'element-id': 'fake-part1-oid', # element-uri will be", "(this uses the name-to-URI cache). with pytest.raises(NotFound): hba_mgr.find(name=hba_name) # Verify", "# ... in the resource object: prop_name = 'adapter-port-uri' assert", "exp_prop_exc\", [ ({}, None, HTTPError({'http-status': 400, 'reason': 5})), ({'name': 'fake-hba-x'},", "exists again under that name hba3 = hba_mgr.find(name=hba_name) description =", "properties for a third HBA with same name part3_props =", "\"\"\"Test Hba.delete() followed by Hba.create() with same name.\"\"\" # Add", "[HBA1_OID]), ({'name': HBA1_NAME, 'element-id': HBA1_OID + 'foo'}, []), ({'name': HBA1_NAME", "'FCP #1', 'status': 'active', 'type': 'fcp', # adapter-family is auto-set", "Hba.update_properties().\"\"\" # Add a faked HBA faked_hba = self.add_hba1() #", "object: assert hba.properties[prop_name] == saved_properties[prop_name] # ... and again when", "assert len(new_hbas_list) == 1 new_hba_list = new_hbas_list[0] assert new_hba_list.properties['name'] ==", "== port.uri # ... and again when refreshed from the", "'physical-channel-status': 'operating', }) adapter = self.cpc.adapters.find(name='fake-fcp1') faked_adapter.ports.add({ 'element-id': 'fake-port1-oid', #", "# Check the properties against the expected names and values", "list() new_hbas_list = hba_mgr.list( filter_args=dict(name=new_hba_name)) assert len(new_hbas_list) == 1 new_hba_list", "PORT11_OID = 'fake-port11-oid' PORT11_URI = '/api/adapters/{}/storage-ports/{}'.format(FCP1_OID, PORT11_OID) class TestHba(object): \"\"\"All", "'name': HBA2_NAME, 'description': 'HBA ' + HBA2_NAME, 'adapter-port-uri': PORT11_URI, 'wwpn':", "verify that it still reflects the # update. hba.pull_full_properties() assert", "automatically set 'parent': self.faked_partition.uri, 'class': 'hba', 'name': HBA1_NAME, 'description': 'HBA", "new_hba_name @pytest.mark.parametrize( \"initial_partition_status, exp_exc\", [ ('stopped', None), ('terminated', None), ('starting',", "part3_props = copy.deepcopy(faked_hba.properties) part3_props['description'] = 'Third HBA' # Set the", "'adapter-port-uri' # ... in the resource object: assert hba.properties[prop_name] ==", "to the faked partition.\"\"\" faked_hba = self.faked_partition.hbas.add({ 'element-id': HBA2_OID, #", "'class': 'partition', 'name': 'fake-part1-name', 'description': 'Partition #1', 'status': 'active', 'initial-memory':", "exists hba_mgr.find(name=faked_hba.name) else: # Execute the code to be tested.", "# the input properties plus 'element-uri' plus 'element-id'. hba =", "description = hba3.get_property('description') assert description == 'Third HBA' @pytest.mark.parametrize( \"input_props\",", "hba.delete() # Check that the HBA no longer exists with", "Add a faked HBA faked_hba = self.add_hba1() # Set the", "no longer exists with pytest.raises(NotFound): hba_mgr.find(name=hba_name) # Execute the creation", "properties: FCP1_OID = 'fake-fcp1-oid' PORT11_OID = 'fake-port11-oid' PORT11_URI = '/api/adapters/{}/storage-ports/{}'.format(FCP1_OID,", "saved_properties[prop_name] # ... and again when refreshed from the mock", "language governing permissions and # limitations under the License. \"\"\"", "with one port that the HBA will be reassigned to", "= exp_status_exc elif exp_prop_exc: exp_exc = exp_prop_exc else: exp_exc =", "import FakedSession from tests.common.utils import assert_resources # Object IDs and", "2048, }) self.partition = self.cpc.partitions.find(name='fake-part1-name') # Add an FCP adapter", "tested hba.delete() exc = exc_info.value if isinstance(exp_exc, HTTPError): assert exc.http_status", "a faked HBA 1 to the faked partition.\"\"\" faked_hba =", "hba.pull_full_properties() assert hba.properties['name'] == new_hba_name # Verify that the resource", "'fabric-id': None, }) port = adapter.ports.find(name='fake-port1') # Set the status", "= self.partition.hbas hba = hba_mgr.find(name=faked_hba.name) hba.pull_full_properties() saved_properties = copy.deepcopy(hba.properties) #", "(DPM mode)', 'status': 'active', 'dpm-enabled': True, 'is-ensemble-member': False, 'iml-mode': 'dpm',", "assert prop_name in hba.properties prop_value = hba.properties[prop_name] assert prop_value ==", "'network-port-uris': [], 'state': 'online', 'configured-capacity': 80, 'used-capacity': 0, 'allowed-capacity': 80,", "be tested hba = hba_mgr.create(properties=input_props) exc = exc_info.value if isinstance(exp_exc,", "# limitations under the License. \"\"\" Unit tests for _hba", "Verify that the resource is no longer found by its", "deletion code to be tested. hba.delete() # Check that the", "# Execute the code to be tested hba = hba_mgr.create(properties=input_props)", "hba.properties['name'] assert hba_name == exp_hba_name hba_uri = hba.uri exp_hba_uri =", "('terminated', None), ('starting', HTTPError({'http-status': 409, 'reason': 1})), ('active', None), ('stopping',", "hba.properties[prop_name] == saved_properties[prop_name] # ... and again when refreshed from", "to faked_adapter = self.faked_cpc.adapters.add({ 'object-id': 'fake-fcp1-oid', # object-uri is auto-set", "\"\"\"Test HbaManager.create().\"\"\" # Set the status of the faked partition", "we will perform the test on hba = self.partition.hbas.find(name=faked_hba.name) #", "by its new name, using find() new_hba_find = hba_mgr.find(name=new_hba_name) assert", "hba.reassign_port(port) # Check that the port of the HBA has", "it has not been refreshed yet. assert hba.properties['name'] == new_hba_name", "self.faked_fcp1.ports.add({ 'element-id': PORT11_OID, 'parent': self.faked_fcp1.uri, 'class': 'storage-port', 'index': 1, 'name':", "('degraded', None), ('reservation-error', None), ('paused', None), ] ) @pytest.mark.parametrize( \"input_props,", "copy from zhmcclient import Client, Hba, HTTPError, NotFound from zhmcclient_mock", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "Object IDs of elements referenced in HBA properties: FCP1_OID =", "HBA2_NAME]}, [HBA1_OID, HBA2_OID]), ({'name': HBA1_NAME + 'foo'}, []), ({'name': [HBA1_NAME,", "the resource object already reflects the property # updates. for", "HbaManager classes.\"\"\" def setup_method(self): \"\"\" Set up a faked session,", "full_properties_kwargs, prop_names): \"\"\"Test HbaManager.list() with full_properties.\"\"\" # Add two faked", "exp_status_exc\", [ ('stopped', None), ('terminated', None), ('starting', HTTPError({'http-status': 409, 'reason':", "[HBA1_OID, HBA2_OID]), ({'name': '.ba 1'}, [HBA1_OID]), ({'name': '.+'}, [HBA1_OID, HBA2_OID]),", "zhmcclient import Client, Hba, HTTPError, NotFound from zhmcclient_mock import FakedSession", "cache). with pytest.raises(NotFound): hba_mgr.find(name=hba_name) # Verify that the resource object", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "self.client = Client(self.session) # Add a CPC in DPM mode", "= hba_mgr.find(name=new_hba_name) assert new_hba_find.properties['name'] == new_hba_name # Verify that the", "HBA2_OID]), ({'element-id': [HBA1_OID, HBA1_OID]}, [HBA1_OID]), ({'element-id': HBA1_OID + 'foo'}, []),", "that the port of the HBA has been set ...", "initial attributes of HbaManager.\"\"\" hba_mgr = self.partition.hbas # Verify all", "one faked_hba = self.add_hba1() self.add_hba2() # Set the status of", "object we will perform the test on hba = self.partition.hbas.find(name=faked_hba.name)", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "'element-id': HBA2_OID, # element-uri will be automatically set 'parent': self.faked_partition.uri,", "HBA faked_hba = self.add_hba1() # Set the status of the", "'fake-hba-x', 'adapter-port-uri': PORT11_URI}, ['element-uri', 'name', 'adapter-port-uri'], None), ] ) def", "'.ba 1'}, [HBA1_OID]), ({'name': '.+'}, [HBA1_OID, HBA2_OID]), ({'name': 'hba 1.+'},", "set 'parent': self.faked_cpc.uri, 'class': 'partition', 'name': 'fake-part1-name', 'description': 'Partition #1',", "'maximum-total-capacity': 80, 'physical-channel-status': 'operating', }) self.faked_port11 = self.faked_fcp1.ports.add({ 'element-id': PORT11_OID,", "test_hbamanager_create( self, input_props, exp_prop_names, exp_prop_exc, initial_partition_status, exp_status_exc): \"\"\"Test HbaManager.create().\"\"\" #", "\"input_props\", [ {}, {'description': 'New HBA description'}, {'device-number': 'FEDC', 'description':", "+ 'foo'}, []), ({'name': HBA1_NAME + 'foo', 'element-id': HBA1_OID}, []),", "the HBA will be reassigned to faked_adapter = self.faked_cpc.adapters.add({ 'object-id':", "exp_prop_names: assert prop_name in hba.properties if prop_name in input_props: value", "the code to be tested hba.update_properties(properties=input_props) # Verify that the", "HTTPError({'http-status': 400, 'reason': 5})), ({'adapter-port-uri': PORT11_URI}, None, HTTPError({'http-status': 400, 'reason':", "partition self.faked_partition.properties['status'] = 'stopped' # updatable hba_mgr = self.partition.hbas hba", "exp_exc.http_status assert exc.reason == exp_exc.reason # Check that the port", "'operating', }) self.faked_port11 = self.faked_fcp1.ports.add({ 'element-id': PORT11_OID, 'parent': self.faked_fcp1.uri, 'class':", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "to be tested. hba.delete() # Check that the HBA no", "'1.8') self.client = Client(self.session) # Add a CPC in DPM", "def test_hba_reassign_port(self, initial_partition_status, exp_exc): \"\"\"Test Hba.reassign_port().\"\"\" # Add a faked", "be tested hba.update_properties(properties=input_props) # Verify that the resource object already", "# Execute the code to be tested hba.update_properties(properties=input_props) # Verify", "Rights Reserved. # # Licensed under the Apache License, Version", "# element-uri will be automatically set 'parent': self.faked_cpc.uri, 'class': 'partition',", "hba.properties['name'] == new_hba_name # Refresh the resource object and verify", "IBM Corp. All Rights Reserved. # # Licensed under the", "\"\"\"All tests for Hba and HbaManager classes.\"\"\" def setup_method(self): \"\"\"", "@pytest.mark.parametrize( \"initial_partition_status, exp_exc\", [ ('stopped', None), ('terminated', None), ('starting', HTTPError({'http-status':", "specific language governing permissions and # limitations under the License.", "({'name': [HBA1_NAME, HBA1_NAME]}, [HBA1_OID]), ({'name': '.*hba 1'}, [HBA1_OID]), ({'name': 'hba", "self.partition # TODO: Test for HbaManager.__repr__() @pytest.mark.parametrize( \"full_properties_kwargs, prop_names\", [", "# Execute the code to be tested hbas = hba_mgr.list(**full_properties_kwargs)", "'description': 'FCP #1', 'status': 'active', 'type': 'fcp', # adapter-family is", "'10gbe-roce-express', 'card-location': '1234-5678-J.01', 'port-count': 1, 'network-port-uris': [], 'state': 'online', 'configured-capacity':", "[]), ({'name': HBA1_NAME, 'element-id': HBA1_OID}, [HBA1_OID]), ({'name': HBA1_NAME, 'element-id': HBA1_OID", "else: # Execute the code to be tested. # Note:", "HBA' @pytest.mark.parametrize( \"input_props\", [ {}, {'description': 'New HBA description'}, {'device-number':", "longer found by its old name, using # find() (this", "name-to-URI cache). with pytest.raises(NotFound): hba_mgr.find(name=hba_name) # Verify that the resource", "[ ({'element-id': HBA1_OID}, [HBA1_OID]), ({'element-id': HBA2_OID}, [HBA2_OID]), ({'element-id': [HBA1_OID, HBA2_OID]},", "# you may not use this file except in compliance", "faked partition self.faked_partition.properties['status'] = 'stopped' # updatable hba_mgr = self.partition.hbas", "that has no HBAs. Add one FCP adapter and port.", "import Client, Hba, HTTPError, NotFound from zhmcclient_mock import FakedSession from", "({'name': '.*hba 1'}, [HBA1_OID]), ({'name': 'hba 1.*'}, [HBA1_OID]), ({'name': 'hba", "'fake-part1-oid', # element-uri will be automatically set 'parent': self.faked_cpc.uri, 'class':", "HBA2_OID]}, [HBA1_OID, HBA2_OID]), ({'element-id': [HBA1_OID, HBA1_OID]}, [HBA1_OID]), ({'element-id': HBA1_OID +", "if exp_status_exc: exp_exc = exp_status_exc elif exp_prop_exc: exp_exc = exp_prop_exc", "({'element-id': [HBA1_OID, HBA1_OID]}, [HBA1_OID]), ({'element-id': HBA1_OID + 'foo'}, []), ({'element-id':", "PORT11_URI}, ['element-uri', 'name', 'adapter-port-uri'], None), ] ) def test_hbamanager_create( self,", "#1', 'status': 'active', 'type': 'fcp', 'adapter-id': '123', 'detected-card-type': '10gbe-roce-express', 'card-location':", "self.add_hba1() self.add_hba2() hba_mgr = self.partition.hbas # Execute the code to", "to the CPC self.faked_fcp1 = self.faked_cpc.adapters.add({ 'object-id': FCP1_OID, 'parent': self.faked_cpc.uri,", "HBA2_OID, # element-uri will be automatically set 'parent': self.faked_partition.uri, 'class':", "DPM mode with one partition that has no HBAs. Add", "hba_mgr.find(name=hba_name) # Verify that the resource object already reflects the", "('starting', HTTPError({'http-status': 409, 'reason': 1})), ('active', None), ('stopping', HTTPError({'http-status': 409,", "the properties against the expected names and values for prop_name", "the code to be tested hba.update_properties(properties={'name': new_hba_name}) # Verify that", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "hba_mgr = self.partition.hbas if exp_status_exc: exp_exc = exp_status_exc elif exp_prop_exc:", "partition.\"\"\" faked_hba = self.faked_partition.hbas.add({ 'element-id': HBA2_OID, # element-uri will be", "the input properties plus 'element-uri' plus 'element-id'. hba = hba_mgr.create(properties=input_props)", "HTTPError): assert exc.http_status == exp_exc.http_status assert exc.reason == exp_exc.reason else:", "adapter-family is auto-set based upon type 'adapter-id': '123', 'detected-card-type': 'ficon-express-16s',", "resource object and verify that it still reflects the #", "Copyright 2016-2017 IBM Corp. All Rights Reserved. # # Licensed", "= saved_properties[prop_name] assert prop_name in hba.properties prop_value = hba.properties[prop_name] assert", "IDs and names of our faked HBAs: HBA1_OID = 'hba", "# Check that the HBA exists again under that name", "'active', 'type': 'fcp', 'adapter-id': '123', 'detected-card-type': '10gbe-roce-express', 'card-location': '1234-5678-J.01', 'port-count':", "property # updates. for prop_name in saved_properties: if prop_name in", "name, using # find() (this uses the name-to-URI cache). with", "CPC in DPM mode with one partition that has no", "name, using # list() (this does not use the name-to-URI", "under the Apache License, Version 2.0 (the \"License\"); # you", "Hba object returned by Hba.create() has # the input properties", "Add two faked HBAs self.add_hba1() self.add_hba2() hba_mgr = self.partition.hbas #", "name, using list() new_hbas_list = hba_mgr.list( filter_args=dict(name=new_hba_name)) assert len(new_hbas_list) ==", "'reason': 1})), ('active', None), ('stopping', HTTPError({'http-status': 409, 'reason': 1})), ('degraded',", "HBA properties for later comparison hba.pull_full_properties() saved_properties = copy.deepcopy(hba.properties) if", "be tested and another one faked_hba = self.add_hba1() hba_name =", "based upon object-id 'parent': faked_adapter.uri, 'class': 'storage-port', 'name': 'fake-port1', 'description':", "FCP with one port that the HBA will be reassigned", "that the HBA no longer exists with pytest.raises(NotFound) as exc_info:", "({'name': HBA1_NAME + 'foo', 'element-id': HBA1_OID + 'foo'}, []), ]", "the code to be tested repr_str = repr(hba) repr_str =", "[], 'state': 'online', 'configured-capacity': 80, 'used-capacity': 0, 'allowed-capacity': 80, 'maximum-total-capacity':", "assert hba_uri == exp_hba_uri # Check the properties against the", "# Add a CPC in DPM mode self.faked_cpc = self.session.hmc.cpcs.add({", "self.faked_partition.properties['status'] = initial_partition_status # The HBA object we will perform", "({'name': '.+'}, [HBA1_OID, HBA2_OID]), ({'name': 'hba 1.+'}, []), ({'name': '.+hba", "Add a faked hba faked_hba = self.add_hba1() hba_mgr = self.partition.hbas", "= 'stopped' # updatable hba_mgr = self.partition.hbas hba = hba_mgr.find(name=hba_name)", "Execute the deletion code to be tested. hba.delete() # Check", "# The HBA object we will perform the test on", "hba3.get_property('description') assert description == 'Third HBA' @pytest.mark.parametrize( \"input_props\", [ {},", "exp_oids): \"\"\"Test HbaManager.list() with filter_args.\"\"\" # Add two faked HBAs", "exc.http_status == exp_exc.http_status assert exc.reason == exp_exc.reason # Check that", "manager object assert hba_mgr.resource_class == Hba assert hba_mgr.session == self.session", "input_props[prop_name] assert value == exp_value def test_hba_repr(self): \"\"\"Test Hba.__repr__().\"\"\" #", "repr_str = repr(hba) repr_str = repr_str.replace('\\n', '\\\\n') # We check", "of HbaManager.\"\"\" hba_mgr = self.partition.hbas # Verify all public properties", "80, 'maximum-total-capacity': 80, 'channel-path-id': '1B', 'physical-channel-status': 'operating', }) adapter =", "be tested hba.delete() exc = exc_info.value if isinstance(exp_exc, HTTPError): assert", "({'name': '.+hba 1'}, []), ({'name': HBA1_NAME, 'element-id': HBA1_OID}, [HBA1_OID]), ({'name':", "hba_name = faked_hba.name # Set the status of the faked", "HbaManager.__repr__() @pytest.mark.parametrize( \"full_properties_kwargs, prop_names\", [ (dict(), ['element-uri']), (dict(full_properties=False), ['element-uri']), (dict(full_properties=True),", "hba.properties['element-uri'] assert hba_uri == exp_hba_uri # Check the properties against", "'fake-fcp1-oid' PORT11_OID = 'fake-port11-oid' PORT11_URI = '/api/adapters/{}/storage-ports/{}'.format(FCP1_OID, PORT11_OID) class TestHba(object):", "HTTPError({'http-status': 400, 'reason': 5})), ({'name': 'fake-hba-x', 'adapter-port-uri': PORT11_URI}, ['element-uri', 'name',", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "'/api/adapters/{}/storage-ports/{}'.format(FCP1_OID, PORT11_OID) class TestHba(object): \"\"\"All tests for Hba and HbaManager", "under the License. \"\"\" Unit tests for _hba module. \"\"\"", "FCP adapter and port. \"\"\" self.session = FakedSession('fake-host', 'fake-hmc', '2.13.1',", "self.faked_partition.hbas.add({ 'element-id': HBA1_OID, # element-uri will be automatically set 'parent':", "the begin of the string: assert re.match(r'^{classname}\\s+at\\s+0x{id:08x}\\s+\\(\\\\n.*'. format(classname=hba.__class__.__name__, id=id(hba)), repr_str)", "to be tested. hba_mgr.create(part3_props) # Check that the HBA exists", "'foo']}, [HBA1_OID]), ({'element-id': [HBA2_OID + 'foo', HBA1_OID]}, [HBA1_OID]), ({'name': HBA1_NAME},", "Execute the code to be tested hba.update_properties(properties={'name': new_hba_name}) # Verify", "'hba 2' # URIs and Object IDs of elements referenced", "'stopped' # deletable hba_mgr = self.partition.hbas hba = hba_mgr.find(name=hba_name) #", "HBA faked_hba = self.add_hba1() hba_name = faked_hba.name # Set the", "the code to be tested hba.reassign_port(port) exc = exc_info.value if", "HBA will be reassigned to faked_adapter = self.faked_cpc.adapters.add({ 'object-id': 'fake-fcp1-oid',", "len(hbas_list) == 0 # Verify that the resource is no", "[HBA1_OID, HBA1_OID]}, [HBA1_OID]), ({'element-id': HBA1_OID + 'foo'}, []), ({'element-id': [HBA1_OID,", "tested and another one faked_hba = self.add_hba1() hba_name = faked_hba.name", "# updatable hba_mgr = self.partition.hbas hba = hba_mgr.find(name=hba_name) new_hba_name =", "port points to a faked URI. faked_hba = self.add_hba1() #", "None), ] ) @pytest.mark.parametrize( \"input_props, exp_prop_names, exp_prop_exc\", [ ({}, None,", "exp_exc = None if exp_exc: with pytest.raises(exp_exc.__class__) as exc_info: #", "longer found by its old name, using # list() (this", "that it still reflects the # update. hba.pull_full_properties() assert hba.properties['name']", "assert hba_mgr.partition == self.partition # TODO: Test for HbaManager.__repr__() @pytest.mark.parametrize(", "None, HTTPError({'http-status': 400, 'reason': 5})), ({'name': 'fake-hba-x', 'adapter-port-uri': PORT11_URI}, ['element-uri',", "= new_hbas_list[0] assert new_hba_list.properties['name'] == new_hba_name @pytest.mark.parametrize( \"initial_partition_status, exp_exc\", [", "assert value == exp_value def test_hba_repr(self): \"\"\"Test Hba.__repr__().\"\"\" # Add", "as exc_info: # Execute the code to be tested hba.reassign_port(port)", "= hba_mgr.list(**full_properties_kwargs) assert_resources(hbas, exp_faked_hbas, prop_names) @pytest.mark.parametrize( \"filter_args, exp_oids\", [ ({'element-id':", "self.partition.hbas if exp_status_exc: exp_exc = exp_status_exc elif exp_prop_exc: exp_exc =", "+ 'foo', 'element-id': HBA1_OID + 'foo'}, []), ] ) def", "'description': 'HBA ' + HBA2_NAME, 'adapter-port-uri': PORT11_URI, 'wwpn': 'AABBCCDDEEFF0012', 'device-number':", "'fake-port1-oid', # element-uri is auto-set based upon object-id 'parent': faked_adapter.uri,", "assert_resources(hbas, exp_faked_hbas, prop_names) @pytest.mark.parametrize( \"filter_args, exp_oids\", [ ({'element-id': HBA1_OID}, [HBA1_OID]),", "'channel-path-id': '1B', 'physical-channel-status': 'operating', }) adapter = self.cpc.adapters.find(name='fake-fcp1') faked_adapter.ports.add({ 'element-id':", "'card-location': '1234-5678-J.01', 'port-count': 1, 'storage-port-uris': [], 'state': 'online', 'configured-capacity': 80,", "for a third HBA with same name part3_props = copy.deepcopy(faked_hba.properties)", "assert description == 'Third HBA' @pytest.mark.parametrize( \"input_props\", [ {}, {'description':", "Add a faked HBA faked_hba = self.add_hba1() hba_name = faked_hba.name", "HBA2_NAME = 'hba 2' # URIs and Object IDs of", "assert prop_value == exp_prop_value # Refresh the resource object and", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "and port. \"\"\" self.session = FakedSession('fake-host', 'fake-hmc', '2.13.1', '1.8') self.client", "followed by Hba.create() with same name.\"\"\" # Add a faked", "= self.partition.hbas hba = hba_mgr.find(name=faked_hba.name) if exp_exc: with pytest.raises(exp_exc.__class__) as", "as exc_info: hba_mgr.find(name=faked_hba.name) def test_hba_delete_create_same_name(self): \"\"\"Test Hba.delete() followed by Hba.create()", "}) self.cpc = self.client.cpcs.find(name='fake-cpc1-name') # Add a partition to the", "property.\"\"\" # Add a faked HBA faked_hba = self.add_hba1() hba_name", "that the HBA no longer exists with pytest.raises(NotFound): hba_mgr.find(name=hba_name) #", "= 'stopped' # updatable hba_mgr = self.partition.hbas hba = hba_mgr.find(name=faked_hba.name)", "'foo'}, []), ({'name': [HBA1_NAME, HBA2_NAME + 'foo']}, [HBA1_OID]), ({'name': [HBA2_NAME", "itself assert isinstance(hba, Hba) hba_name = hba.name exp_hba_name = hba.properties['name']", "Apache License, Version 2.0 (the \"License\"); # you may not", "either express or implied. # See the License for the", "({'element-id': [HBA1_OID, HBA2_OID]}, [HBA1_OID, HBA2_OID]), ({'element-id': [HBA1_OID, HBA1_OID]}, [HBA1_OID]), ({'element-id':", "Add two faked HBAs faked_hba1 = self.add_hba1() faked_hba2 = self.add_hba2()", "<filename>tests/unit/zhmcclient/test_hba.py<gh_stars>0 # Copyright 2016-2017 IBM Corp. All Rights Reserved. #", "Execute the code to be tested repr_str = repr(hba) repr_str", "and verify that the resource object # still reflects the", "= hba.properties['element-uri'] assert hba_uri == exp_hba_uri # Check the properties", "partition self.faked_partition.properties['status'] = 'stopped' # deletable hba_mgr = self.partition.hbas hba", "'New HBA description'}, {'device-number': 'FEDC', 'description': 'New HBA description'}, ]", "hba.name exp_hba_name = hba.properties['name'] assert hba_name == exp_hba_name hba_uri =", "mode)', 'status': 'active', 'dpm-enabled': True, 'is-ensemble-member': False, 'iml-mode': 'dpm', })", "\"initial_partition_status, exp_exc\", [ ('stopped', None), ('terminated', None), ('starting', HTTPError({'http-status': 409,", "'parent': faked_adapter.uri, 'class': 'storage-port', 'name': 'fake-port1', 'description': 'FCP #1 Port", "hba.properties[prop_name] == port.uri # ... and again when refreshed from", "# ... in the resource object: assert hba.properties[prop_name] == saved_properties[prop_name]", "_hba module. \"\"\" from __future__ import absolute_import, print_function import pytest", "'status': 'active', 'type': 'fcp', 'adapter-id': '123', 'detected-card-type': '10gbe-roce-express', 'card-location': '1234-5678-J.01',", "self.partition assert hba_mgr.partition == self.partition # TODO: Test for HbaManager.__repr__()", "element-uri will be automatically set 'parent': self.faked_partition.uri, 'class': 'hba', 'name':", "exc = exc_info.value if isinstance(exp_exc, HTTPError): assert exc.http_status == exp_exc.http_status", "based upon type 'adapter-id': '123', 'detected-card-type': 'ficon-express-16s', 'card-location': '1234-5678-J.01', 'port-count':", "will be automatically set 'parent': self.faked_partition.uri, 'class': 'hba', 'name': HBA2_NAME,", "if isinstance(exp_exc, HTTPError): assert exc.http_status == exp_exc.http_status assert exc.reason ==", "reflects the property # updates. for prop_name in saved_properties: if", "object already reflects the property # updates. for prop_name in", "'New HBA description'}, ] ) def test_hba_update_properties(self, input_props): \"\"\"Test Hba.update_properties().\"\"\"", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "= copy.deepcopy(faked_hba.properties) part3_props['description'] = 'Third HBA' # Set the status", "name-to-URI cache). hbas_list = hba_mgr.list( filter_args=dict(name=hba_name)) assert len(hbas_list) == 0", "the faked partition self.faked_partition.properties['status'] = initial_partition_status hba_mgr = self.partition.hbas hba", "'1112', }) return faked_hba def test_hbamanager_initial_attrs(self): \"\"\"Test initial attributes of", "Set up a faked session, and add a faked CPC", "exc_info: hba_mgr.find(name=faked_hba.name) def test_hba_delete_create_same_name(self): \"\"\"Test Hba.delete() followed by Hba.create() with", "the HBA no longer exists with pytest.raises(NotFound) as exc_info: hba_mgr.find(name=faked_hba.name)", "resource object already reflects the update, even # though it", "'is-ensemble-member': False, 'iml-mode': 'dpm', }) self.cpc = self.client.cpcs.find(name='fake-cpc1-name') # Add", "= 'adapter-port-uri' assert hba.properties[prop_name] == port.uri # ... and again", "faked partition self.faked_partition.properties['status'] = initial_partition_status hba_mgr = self.partition.hbas if exp_status_exc:", "'online', 'configured-capacity': 80, 'used-capacity': 0, 'allowed-capacity': 80, 'maximum-total-capacity': 80, 'physical-channel-status':", "the faked partition.\"\"\" faked_hba = self.faked_partition.hbas.add({ 'element-id': HBA2_OID, # element-uri", "# Check that the HBA no longer exists with pytest.raises(NotFound)", "the resource object already reflects the update, even # though", "Add a faked FCP with one port that the HBA", "== Hba assert hba_mgr.session == self.session assert hba_mgr.parent == self.partition", "'element-id': 'fake-cpc1-oid', # element-uri is set up automatically 'parent': None,", "with same name.\"\"\" # Add a faked HBA to be", "that the resource object # still reflects the property updates.", "'partition', 'name': 'fake-part1-name', 'description': 'Partition #1', 'status': 'active', 'initial-memory': 1024,", "hba.pull_full_properties() for prop_name in saved_properties: if prop_name in input_props: exp_prop_value", "= self.add_hba1() # Set the status of the faked partition", "tested hba.reassign_port(port) exc = exc_info.value if isinstance(exp_exc, HTTPError): assert exc.http_status", "= self.faked_cpc.partitions.add({ 'element-id': 'fake-part1-oid', # element-uri will be automatically set", "with pytest.raises(NotFound): hba_mgr.find(name=hba_name) # Execute the creation code to be", "import assert_resources # Object IDs and names of our faked", "({'element-id': HBA1_OID + 'foo'}, []), ({'element-id': [HBA1_OID, HBA2_OID + 'foo']},", "# Execute the code to be tested hba.update_properties(properties={'name': new_hba_name}) #", "2' # URIs and Object IDs of elements referenced in", "one FCP adapter and port. \"\"\" self.session = FakedSession('fake-host', 'fake-hmc',", "[]), ({'name': HBA1_NAME + 'foo', 'element-id': HBA1_OID + 'foo'}, []),", "its new name, using find() new_hba_find = hba_mgr.find(name=new_hba_name) assert new_hba_find.properties['name']", "all public properties of the manager object assert hba_mgr.resource_class ==", "\"\"\"Add a faked HBA 2 to the faked partition.\"\"\" faked_hba", "to be tested hba.update_properties(properties=input_props) # Verify that the resource object", "be automatically set 'parent': self.faked_partition.uri, 'class': 'hba', 'name': HBA1_NAME, 'description':", "prop_value = hba.properties[prop_name] assert prop_value == exp_prop_value def test_hba_update_name(self): \"\"\"Test", "class TestHba(object): \"\"\"All tests for Hba and HbaManager classes.\"\"\" def", "Add a faked HBA to be tested and another one", "= input_props[prop_name] assert value == exp_value def test_hba_repr(self): \"\"\"Test Hba.__repr__().\"\"\"", "== new_hba_name # Verify that the resource can be found", "== new_hba_name # Refresh the resource object and verify that", "HBA1_NAME]}, [HBA1_OID]), ({'name': [HBA1_NAME, HBA1_NAME]}, [HBA1_OID]), ({'name': '.*hba 1'}, [HBA1_OID]),", "exp_exc.reason # Check that the HBA still exists hba_mgr.find(name=faked_hba.name) else:", "Verify that the resource object already reflects the update, even", "faked_hba = self.add_hba1() self.add_hba2() # Set the status of the", "HBA has been set ... # ... in the resource", "Hba.create() with same name.\"\"\" # Add a faked HBA to", "= self.partition.hbas hba = hba_mgr.find(name=hba_name) new_hba_name = \"new-\" + hba_name", "code to be tested hba.update_properties(properties={'name': new_hba_name}) # Verify that the", "new_hba_find.properties['name'] == new_hba_name # Verify that the resource can be", "use this file except in compliance with the License. #", "'storage-port', 'index': 1, 'name': 'fake-port11-name', 'description': 'FCP #1 Port #1',", "'element-id': HBA1_OID + 'foo'}, []), ({'name': HBA1_NAME + 'foo', 'element-id':", "auto-set based upon type 'adapter-id': '123', 'detected-card-type': 'ficon-express-16s', 'card-location': '1234-5678-J.01',", "'.+hba 1'}, []), ({'name': HBA1_NAME, 'element-id': HBA1_OID}, [HBA1_OID]), ({'name': HBA1_NAME,", "# Save the HBA properties for later comparison hba.pull_full_properties() saved_properties", "string: assert re.match(r'^{classname}\\s+at\\s+0x{id:08x}\\s+\\(\\\\n.*'. format(classname=hba.__class__.__name__, id=id(hba)), repr_str) @pytest.mark.parametrize( \"initial_partition_status, exp_exc\", [", "import re import copy from zhmcclient import Client, Hba, HTTPError,", "faked HBA faked_hba = self.add_hba1() hba_name = faked_hba.name # Set", "prop_value == exp_prop_value def test_hba_update_name(self): \"\"\"Test Hba.update_properties() with 'name' property.\"\"\"", "+ 'foo'}, []), ({'name': [HBA1_NAME, HBA2_NAME + 'foo']}, [HBA1_OID]), ({'name':", "'Third HBA' @pytest.mark.parametrize( \"input_props\", [ {}, {'description': 'New HBA description'},", "faked session, and add a faked CPC in DPM mode", "code to be tested. hba_mgr.create(part3_props) # Check that the HBA", "hba_mgr.find(name=faked_hba.name) else: # Execute the code to be tested. hba.delete()", "tests for Hba and HbaManager classes.\"\"\" def setup_method(self): \"\"\" Set", "'detected-card-type': 'ficon-express-16s', 'card-location': '1234-5678-J.01', 'port-count': 1, 'storage-port-uris': [], 'state': 'online',", "the code to be tested hba = hba_mgr.create(properties=input_props) exc =", "assert len(hbas) == len(exp_oids) if exp_oids: oids = [hba.properties['element-id'] for", "'1B', 'physical-channel-status': 'operating', }) adapter = self.cpc.adapters.find(name='fake-fcp1') faked_adapter.ports.add({ 'element-id': 'fake-port1-oid',", "TestHba(object): \"\"\"All tests for Hba and HbaManager classes.\"\"\" def setup_method(self):", "for HbaManager.__repr__() @pytest.mark.parametrize( \"full_properties_kwargs, prop_names\", [ (dict(), ['element-uri']), (dict(full_properties=False), ['element-uri']),", "HBA2_NAME}, [HBA2_OID]), ({'name': [HBA1_NAME, HBA2_NAME]}, [HBA1_OID, HBA2_OID]), ({'name': HBA1_NAME +", "== len(exp_oids) if exp_oids: oids = [hba.properties['element-id'] for hba in", "faked_hba = self.add_hba1() hba_mgr = self.partition.hbas hba = hba_mgr.find(name=faked_hba.name) #", "'adapter-port-uri': PORT11_URI}, ['element-uri', 'name', 'adapter-port-uri'], None), ] ) def test_hbamanager_create(", "that name hba3 = hba_mgr.find(name=hba_name) description = hba3.get_property('description') assert description", "old name, using # find() (this uses the name-to-URI cache).", "Verify all public properties of the manager object assert hba_mgr.resource_class", "repr_str.replace('\\n', '\\\\n') # We check just the begin of the", "hba_mgr.create(part3_props) # Check that the HBA exists again under that", "code to be tested hbas = hba_mgr.list(filter_args=filter_args) assert len(hbas) ==", "in compliance with the License. # You may obtain a", "+ hba_name # Execute the code to be tested hba.update_properties(properties={'name':", "for prop_name in saved_properties: if prop_name in input_props: exp_prop_value =", "the port of the HBA has been set ... #", "software # distributed under the License is distributed on an", "the expected names and values for prop_name in exp_prop_names: assert", "reflects the property updates. hba.pull_full_properties() for prop_name in saved_properties: if", "that the resource can be found by its new name,", "def test_hba_repr(self): \"\"\"Test Hba.__repr__().\"\"\" # Add a faked hba faked_hba", "port. \"\"\" self.session = FakedSession('fake-host', 'fake-hmc', '2.13.1', '1.8') self.client =", "object: prop_name = 'adapter-port-uri' assert hba.properties[prop_name] == port.uri # ...", "... prop_name = 'adapter-port-uri' # ... in the resource object:", "1'}, []), ({'name': HBA1_NAME, 'element-id': HBA1_OID}, [HBA1_OID]), ({'name': HBA1_NAME, 'element-id':", "[HBA1_OID]), ({'name': HBA1_NAME}, [HBA1_OID]), ({'name': HBA2_NAME}, [HBA2_OID]), ({'name': [HBA1_NAME, HBA2_NAME]},", "= hba.properties[prop_name] assert prop_value == exp_prop_value def test_hba_update_name(self): \"\"\"Test Hba.update_properties()", "to be tested. # Its port points to a faked", "exc.http_status == exp_exc.http_status assert exc.reason == exp_exc.reason else: # Execute", "= self.partition.hbas # Execute the code to be tested hbas", "({'name': HBA1_NAME}, [HBA1_OID]), ({'name': HBA2_NAME}, [HBA2_OID]), ({'name': [HBA1_NAME, HBA2_NAME]}, [HBA1_OID,", "2 to the faked partition.\"\"\" faked_hba = self.faked_partition.hbas.add({ 'element-id': HBA2_OID,", "FCP adapter and port to the CPC self.faked_fcp1 = self.faked_cpc.adapters.add({", "will perform the test on hba = self.partition.hbas.find(name=faked_hba.name) # Save", "= initial_partition_status hba_mgr = self.partition.hbas hba = hba_mgr.find(name=faked_hba.name) if exp_exc:", "resource object: prop_name = 'adapter-port-uri' assert hba.properties[prop_name] == port.uri #", "'hba', 'name': HBA2_NAME, 'description': 'HBA ' + HBA2_NAME, 'adapter-port-uri': PORT11_URI,", "saved_properties: if prop_name in input_props: exp_prop_value = input_props[prop_name] else: exp_prop_value", "'allowed-capacity': 80, 'maximum-total-capacity': 80, 'channel-path-id': '1B', 'physical-channel-status': 'operating', }) adapter", "assert hba.properties['name'] == new_hba_name # Refresh the resource object and", "({'name': HBA1_NAME, 'element-id': HBA1_OID}, [HBA1_OID]), ({'name': HBA1_NAME, 'element-id': HBA1_OID +", "pytest.raises(exp_exc.__class__) as exc_info: # Execute the code to be tested", "self.partition.hbas hba = hba_mgr.find(name=faked_hba.name) hba.pull_full_properties() saved_properties = copy.deepcopy(hba.properties) # Execute", "properties against the expected names and values for prop_name in", "HBA2_NAME, 'description': 'HBA ' + HBA2_NAME, 'adapter-port-uri': PORT11_URI, 'wwpn': 'AABBCCDDEEFF0012',", "new_hba_find = hba_mgr.find(name=new_hba_name) assert new_hba_find.properties['name'] == new_hba_name # Verify that", "Corp. All Rights Reserved. # # Licensed under the Apache", "= [hba.properties['element-id'] for hba in hbas] assert set(oids) == set(exp_oids)", "409, 'reason': 1})), ('degraded', None), ('reservation-error', None), ('paused', None), ]", "id=id(hba)), repr_str) @pytest.mark.parametrize( \"initial_partition_status, exp_exc\", [ ('stopped', None), ('terminated', None),", "'status': 'active', 'type': 'fcp', # adapter-family is auto-set based upon", "and another one faked_hba = self.add_hba1() self.add_hba2() # Set the", "when refreshed from the mock state: hba.pull_full_properties() assert hba.properties[prop_name] ==", "HBA2_OID]), ({'name': '.ba 1'}, [HBA1_OID]), ({'name': '.+'}, [HBA1_OID, HBA2_OID]), ({'name':", "\"\"\"Test Hba.__repr__().\"\"\" # Add a faked hba faked_hba = self.add_hba1()", ") def test_hbamanager_create( self, input_props, exp_prop_names, exp_prop_exc, initial_partition_status, exp_status_exc): \"\"\"Test", "and # limitations under the License. \"\"\" Unit tests for", "new_hba_name # Refresh the resource object and verify that it", "with the License. # You may obtain a copy of", "Add one FCP adapter and port. \"\"\" self.session = FakedSession('fake-host',", "exp_hba_uri # Check the properties against the expected names and", "HBA description'}, {'device-number': 'FEDC', 'description': 'New HBA description'}, ] )", "port to the CPC self.faked_fcp1 = self.faked_cpc.adapters.add({ 'object-id': FCP1_OID, 'parent':", "'foo', HBA1_NAME]}, [HBA1_OID]), ({'name': [HBA1_NAME, HBA1_NAME]}, [HBA1_OID]), ({'name': '.*hba 1'},", "using # list() (this does not use the name-to-URI cache).", "to be tested hba = hba_mgr.create(properties=input_props) exc = exc_info.value if", "hba_uri == exp_hba_uri # Check the properties against the expected", "def test_hbamanager_list_filter_args(self, filter_args, exp_oids): \"\"\"Test HbaManager.list() with filter_args.\"\"\" # Add", "assert exc.reason == exp_exc.reason else: # Execute the code to", "of the faked partition self.faked_partition.properties['status'] = 'stopped' # deletable hba_mgr", "found by its new name, using find() new_hba_find = hba_mgr.find(name=new_hba_name)", "None), ('paused', None), ] ) def test_hba_reassign_port(self, initial_partition_status, exp_exc): \"\"\"Test", "'HBA ' + HBA2_NAME, 'adapter-port-uri': PORT11_URI, 'wwpn': 'AABBCCDDEEFF0012', 'device-number': '1112',", "reflects the # update. hba.pull_full_properties() assert hba.properties['name'] == new_hba_name #", "# ... and again when refreshed from the mock state:", "\"filter_args, exp_oids\", [ ({'element-id': HBA1_OID}, [HBA1_OID]), ({'element-id': HBA2_OID}, [HBA2_OID]), ({'element-id':", "faked_hba def add_hba2(self): \"\"\"Add a faked HBA 2 to the", "be tested hba.update_properties(properties={'name': new_hba_name}) # Verify that the resource is", "= self.partition.hbas if exp_status_exc: exp_exc = exp_status_exc elif exp_prop_exc: exp_exc", "express or implied. # See the License for the specific", "prop_name in input_props: value = hba.properties[prop_name] exp_value = input_props[prop_name] assert", "except in compliance with the License. # You may obtain", "self.partition.hbas.find(name=faked_hba.name) # Save the HBA properties for later comparison hba.pull_full_properties()", "... and again when refreshed from the mock state: hba.pull_full_properties()", "format(classname=hba.__class__.__name__, id=id(hba)), repr_str) @pytest.mark.parametrize( \"initial_partition_status, exp_exc\", [ ('stopped', None), ('terminated',", "add_hba1(self): \"\"\"Add a faked HBA 1 to the faked partition.\"\"\"", "Hba.reassign_port().\"\"\" # Add a faked HBA to be tested. #", "hba_mgr.session == self.session assert hba_mgr.parent == self.partition assert hba_mgr.partition ==", "faked_hba = self.add_hba1() hba_name = faked_hba.name # Set the status", "HBA2_OID = 'hba 2-oid' HBA2_NAME = 'hba 2' # URIs", "tested. # Note: the Hba object returned by Hba.create() has", "'allowed-capacity': 80, 'maximum-total-capacity': 80, 'physical-channel-status': 'operating', }) self.faked_port11 = self.faked_fcp1.ports.add({", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "updatable hba_mgr = self.partition.hbas hba = hba_mgr.find(name=hba_name) new_hba_name = \"new-\"", "'hba 2-oid' HBA2_NAME = 'hba 2' # URIs and Object", "'FCP #1 Port #1', }) assert PORT11_URI == self.faked_port11.uri def", "status of the faked partition self.faked_partition.properties['status'] = 'stopped' # deletable", "HBA2_NAME, 'adapter-port-uri': PORT11_URI, 'wwpn': 'AABBCCDDEEFF0012', 'device-number': '1112', }) return faked_hba", "'reason': 5})), ({'name': 'fake-hba-x', 'adapter-port-uri': PORT11_URI}, ['element-uri', 'name', 'adapter-port-uri'], None),", "# Copyright 2016-2017 IBM Corp. All Rights Reserved. # #", "CONDITIONS OF ANY KIND, either express or implied. # See", "= \"new-\" + hba_name # Execute the code to be", "== 1 new_hba_list = new_hbas_list[0] assert new_hba_list.properties['name'] == new_hba_name @pytest.mark.parametrize(", "# Construct the input properties for a third HBA with", "by its old name, using # list() (this does not", "URIs and Object IDs of elements referenced in HBA properties:", "hba.delete() exc = exc_info.value if isinstance(exp_exc, HTTPError): assert exc.http_status ==", "absolute_import, print_function import pytest import re import copy from zhmcclient", "faked_hba2 = self.add_hba2() exp_faked_hbas = [faked_hba1, faked_hba2] hba_mgr = self.partition.hbas", "self.partition.hbas hba = hba_mgr.find(name=faked_hba.name) if exp_exc: with pytest.raises(exp_exc.__class__) as exc_info:", "tested hbas = hba_mgr.list(**full_properties_kwargs) assert_resources(hbas, exp_faked_hbas, prop_names) @pytest.mark.parametrize( \"filter_args, exp_oids\",", "the HBA exists again under that name hba3 = hba_mgr.find(name=hba_name)", "the deletion code to be tested. hba.delete() # Check that", "again under that name hba3 = hba_mgr.find(name=hba_name) description = hba3.get_property('description')", "= Client(self.session) # Add a CPC in DPM mode self.faked_cpc", "len(hbas) == len(exp_oids) if exp_oids: oids = [hba.properties['element-id'] for hba", "== exp_exc.http_status assert exc.reason == exp_exc.reason else: # Execute the", "does not use the name-to-URI cache). hbas_list = hba_mgr.list( filter_args=dict(name=hba_name))", "] ) def test_hbamanager_create( self, input_props, exp_prop_names, exp_prop_exc, initial_partition_status, exp_status_exc):", "exp_exc\", [ ('stopped', None), ('terminated', None), ('starting', HTTPError({'http-status': 409, 'reason':", "exp_prop_value # Refresh the resource object and verify that the", "'fake-part1-name', 'description': 'Partition #1', 'status': 'active', 'initial-memory': 1024, 'maximum-memory': 2048,", "== exp_value def test_hba_repr(self): \"\"\"Test Hba.__repr__().\"\"\" # Add a faked", "assert len(hbas_list) == 0 # Verify that the resource is", "old name, using # list() (this does not use the", "the CPC self.faked_fcp1 = self.faked_cpc.adapters.add({ 'object-id': FCP1_OID, 'parent': self.faked_cpc.uri, 'class':", "hba = hba_mgr.create(properties=input_props) # Check the resource for consistency within", "repr_str = repr_str.replace('\\n', '\\\\n') # We check just the begin", "# Verify all public properties of the manager object assert", "hbas] assert set(oids) == set(exp_oids) @pytest.mark.parametrize( \"initial_partition_status, exp_status_exc\", [ ('stopped',", "the HBA has been set ... # ... in the", "repr(hba) repr_str = repr_str.replace('\\n', '\\\\n') # We check just the", "faked hba faked_hba = self.add_hba1() hba_mgr = self.partition.hbas hba =", "= repr(hba) repr_str = repr_str.replace('\\n', '\\\\n') # We check just", "name hba3 = hba_mgr.find(name=hba_name) description = hba3.get_property('description') assert description ==", "FakedSession('fake-host', 'fake-hmc', '2.13.1', '1.8') self.client = Client(self.session) # Add a", "HBA1_NAME, 'adapter-port-uri': PORT11_URI, 'wwpn': 'AABBCCDDEEFF0011', 'device-number': '1111', }) return faked_hba", "else: # Execute the code to be tested. hba.delete() #", "from zhmcclient import Client, Hba, HTTPError, NotFound from zhmcclient_mock import", "HBA to be tested and another one faked_hba = self.add_hba1()", "creation code to be tested. hba_mgr.create(part3_props) # Check that the", "None if exp_exc: with pytest.raises(exp_exc.__class__) as exc_info: # Execute the", "has not been refreshed yet. assert hba.properties['name'] == new_hba_name #", "if exp_oids: oids = [hba.properties['element-id'] for hba in hbas] assert", "'iml-mode': 'dpm', }) self.cpc = self.client.cpcs.find(name='fake-cpc1-name') # Add a partition", "'description': 'HBA ' + HBA1_NAME, 'adapter-port-uri': PORT11_URI, 'wwpn': 'AABBCCDDEEFF0011', 'device-number':", "test_hbamanager_initial_attrs(self): \"\"\"Test initial attributes of HbaManager.\"\"\" hba_mgr = self.partition.hbas #", "assert re.match(r'^{classname}\\s+at\\s+0x{id:08x}\\s+\\(\\\\n.*'. format(classname=hba.__class__.__name__, id=id(hba)), repr_str) @pytest.mark.parametrize( \"initial_partition_status, exp_exc\", [ ('stopped',", "exp_oids: oids = [hba.properties['element-id'] for hba in hbas] assert set(oids)", "== saved_properties[prop_name] else: # Execute the code to be tested.", "hba in hbas] assert set(oids) == set(exp_oids) @pytest.mark.parametrize( \"initial_partition_status, exp_status_exc\",", "self.faked_cpc.uri, 'class': 'partition', 'name': 'fake-part1-name', 'description': 'Partition #1', 'status': 'active',", "'class': 'hba', 'name': HBA2_NAME, 'description': 'HBA ' + HBA2_NAME, 'adapter-port-uri':", "hba_mgr.list(filter_args=filter_args) assert len(hbas) == len(exp_oids) if exp_oids: oids = [hba.properties['element-id']", "properties plus 'element-uri' plus 'element-id'. hba = hba_mgr.create(properties=input_props) # Check", "HBA still exists hba_mgr.find(name=faked_hba.name) else: # Execute the code to", "the CPC self.faked_partition = self.faked_cpc.partitions.add({ 'element-id': 'fake-part1-oid', # element-uri will", "in saved_properties: if prop_name in input_props: exp_prop_value = input_props[prop_name] else:", "exc_info: # Execute the code to be tested hba.delete() exc", "exc_info: # Execute the code to be tested hba =", "None), ] ) def test_hbamanager_create( self, input_props, exp_prop_names, exp_prop_exc, initial_partition_status,", "the HBA still exists hba_mgr.find(name=faked_hba.name) else: # Execute the code", "hba_mgr = self.partition.hbas hba = hba_mgr.find(name=hba_name) # Execute the deletion", "the resource object: prop_name = 'adapter-port-uri' assert hba.properties[prop_name] == port.uri", "test_hba_update_name(self): \"\"\"Test Hba.update_properties() with 'name' property.\"\"\" # Add a faked", "({'name': HBA1_NAME + 'foo'}, []), ({'name': [HBA1_NAME, HBA2_NAME + 'foo']},", "though it has not been refreshed yet. assert hba.properties['name'] ==", "faked_adapter = self.faked_cpc.adapters.add({ 'object-id': 'fake-fcp1-oid', # object-uri is auto-set based", "\"\"\" Set up a faked session, and add a faked", "'name': 'fcp1', 'description': 'FCP #1', 'status': 'active', 'type': 'fcp', 'adapter-id':", "({'name': 'fake-hba-x', 'adapter-port-uri': PORT11_URI}, ['element-uri', 'name', 'adapter-port-uri'], None), ] )", "Construct the input properties for a third HBA with same", "self.faked_cpc.uri, 'class': 'adapter', 'name': 'fake-fcp1', 'description': 'FCP #1', 'status': 'active',", "hba.pull_full_properties() saved_properties = copy.deepcopy(hba.properties) if exp_exc: with pytest.raises(exp_exc.__class__) as exc_info:", "of the faked partition self.faked_partition.properties['status'] = initial_partition_status hba_mgr = self.partition.hbas", "== exp_exc.reason else: # Execute the code to be tested.", "faked HBAs self.add_hba1() self.add_hba2() hba_mgr = self.partition.hbas # Execute the", "add_hba2(self): \"\"\"Add a faked HBA 2 to the faked partition.\"\"\"", "code to be tested hbas = hba_mgr.list(**full_properties_kwargs) assert_resources(hbas, exp_faked_hbas, prop_names)", "hba_mgr.list( filter_args=dict(name=hba_name)) assert len(hbas_list) == 0 # Verify that the", "self.partition.hbas # Verify all public properties of the manager object", "assert hba.properties[prop_name] == saved_properties[prop_name] # ... and again when refreshed", "'name': 'fake-part1-name', 'description': 'Partition #1', 'status': 'active', 'initial-memory': 1024, 'maximum-memory':", "1'}, [HBA1_OID]), ({'name': 'hba 1.*'}, [HBA1_OID]), ({'name': 'hba .'}, [HBA1_OID,", "'fake-fcp1', 'description': 'FCP #1', 'status': 'active', 'type': 'fcp', # adapter-family", "'active', 'type': 'fcp', # adapter-family is auto-set based upon type", "the test on hba = self.partition.hbas.find(name=faked_hba.name) # Save the HBA", "'object-id': 'fake-fcp1-oid', # object-uri is auto-set based upon object-id 'parent':", "({'name': 'hba 1.+'}, []), ({'name': '.+hba 1'}, []), ({'name': HBA1_NAME,", "== self.session assert hba_mgr.parent == self.partition assert hba_mgr.partition == self.partition", "(dict(full_properties=False), ['element-uri']), (dict(full_properties=True), None), ] ) def test_hbamanager_list_full_properties( self, full_properties_kwargs,", "HBA1_OID + 'foo'}, []), ({'element-id': [HBA1_OID, HBA2_OID + 'foo']}, [HBA1_OID]),", "to be tested hbas = hba_mgr.list(**full_properties_kwargs) assert_resources(hbas, exp_faked_hbas, prop_names) @pytest.mark.parametrize(", "== saved_properties[prop_name] # ... and again when refreshed from the", "as exc_info: # Execute the code to be tested hba", "for Hba and HbaManager classes.\"\"\" def setup_method(self): \"\"\" Set up", "port of the HBA has been set ... # ...", "initial_partition_status hba_mgr = self.partition.hbas if exp_status_exc: exp_exc = exp_status_exc elif", "one port that the HBA will be reassigned to faked_adapter", "+ 'foo']}, [HBA1_OID]), ({'element-id': [HBA2_OID + 'foo', HBA1_OID]}, [HBA1_OID]), ({'name':", "hba_mgr.find(name=faked_hba.name) hba.pull_full_properties() saved_properties = copy.deepcopy(hba.properties) # Execute the code to", "'hba 1.+'}, []), ({'name': '.+hba 1'}, []), ({'name': HBA1_NAME, 'element-id':", "the resource object: assert hba.properties[prop_name] == saved_properties[prop_name] # ... and", "HBA1_OID}, []), ({'name': HBA1_NAME + 'foo', 'element-id': HBA1_OID + 'foo'},", "to the faked partition.\"\"\" faked_hba = self.faked_partition.hbas.add({ 'element-id': HBA1_OID, #", "zhmcclient_mock import FakedSession from tests.common.utils import assert_resources # Object IDs", "copy.deepcopy(faked_hba.properties) part3_props['description'] = 'Third HBA' # Set the status of", "exp_exc): \"\"\"Test Hba.delete().\"\"\" # Add a faked HBA to be", "'foo', 'element-id': HBA1_OID}, []), ({'name': HBA1_NAME + 'foo', 'element-id': HBA1_OID", "updatable hba_mgr = self.partition.hbas hba = hba_mgr.find(name=faked_hba.name) hba.pull_full_properties() saved_properties =", "1'}, [HBA1_OID]), ({'name': '.+'}, [HBA1_OID, HBA2_OID]), ({'name': 'hba 1.+'}, []),", "faked partition self.faked_partition.properties['status'] = 'stopped' # deletable hba_mgr = self.partition.hbas", "'HBA ' + HBA1_NAME, 'adapter-port-uri': PORT11_URI, 'wwpn': 'AABBCCDDEEFF0011', 'device-number': '1111',", "from __future__ import absolute_import, print_function import pytest import re import", "in the resource object: assert hba.properties[prop_name] == saved_properties[prop_name] # ...", "+ HBA2_NAME, 'adapter-port-uri': PORT11_URI, 'wwpn': 'AABBCCDDEEFF0012', 'device-number': '1112', }) return", "of our faked HBAs: HBA1_OID = 'hba 1-oid' HBA1_NAME =", "assert hba.properties['name'] == new_hba_name # Verify that the resource can", "in HBA properties: FCP1_OID = 'fake-fcp1-oid' PORT11_OID = 'fake-port11-oid' PORT11_URI", "import pytest import re import copy from zhmcclient import Client,", "'adapter-id': '123', 'detected-card-type': 'ficon-express-16s', 'card-location': '1234-5678-J.01', 'port-count': 1, 'storage-port-uris': [],", "}) self.faked_port11 = self.faked_fcp1.ports.add({ 'element-id': PORT11_OID, 'parent': self.faked_fcp1.uri, 'class': 'storage-port',", "'Partition #1', 'status': 'active', 'initial-memory': 1024, 'maximum-memory': 2048, }) self.partition", "'port-count': 1, 'storage-port-uris': [], 'state': 'online', 'configured-capacity': 80, 'used-capacity': 0,", "set(oids) == set(exp_oids) @pytest.mark.parametrize( \"initial_partition_status, exp_status_exc\", [ ('stopped', None), ('terminated',", "'reason': 5})), ({'adapter-port-uri': PORT11_URI}, None, HTTPError({'http-status': 400, 'reason': 5})), ({'name':", "Execute the code to be tested. hba.reassign_port(port) # Check that", "#1', 'status': 'active', 'initial-memory': 1024, 'maximum-memory': 2048, }) self.partition =", "hba_mgr = self.partition.hbas hba = hba_mgr.find(name=faked_hba.name) if exp_exc: with pytest.raises(exp_exc.__class__)", "[HBA1_OID]), ({'name': '.+'}, [HBA1_OID, HBA2_OID]), ({'name': 'hba 1.+'}, []), ({'name':", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "'description': 'CPC #1 (DPM mode)', 'status': 'active', 'dpm-enabled': True, 'is-ensemble-member':", "comparison hba.pull_full_properties() saved_properties = copy.deepcopy(hba.properties) if exp_exc: with pytest.raises(exp_exc.__class__) as", "prop_name in hba.properties if prop_name in input_props: value = hba.properties[prop_name]", "\"\"\" self.session = FakedSession('fake-host', 'fake-hmc', '2.13.1', '1.8') self.client = Client(self.session)", "assert exc.reason == exp_exc.reason # Check that the port of", "'parent': self.faked_partition.uri, 'class': 'hba', 'name': HBA2_NAME, 'description': 'HBA ' +", "'stopped' # updatable hba_mgr = self.partition.hbas hba = hba_mgr.find(name=hba_name) new_hba_name", "be tested. hba.reassign_port(port) # Check that the port of the", "the input properties for a third HBA with same name", "] ) def test_hbamanager_list_full_properties( self, full_properties_kwargs, prop_names): \"\"\"Test HbaManager.list() with", "= hba.properties[prop_name] exp_value = input_props[prop_name] assert value == exp_value def", "= hba_mgr.create(properties=input_props) # Check the resource for consistency within itself", "= self.partition.hbas.find(name=faked_hba.name) # Save the HBA properties for later comparison", "exp_prop_value = saved_properties[prop_name] assert prop_name in hba.properties prop_value = hba.properties[prop_name]", "input properties plus 'element-uri' plus 'element-id'. hba = hba_mgr.create(properties=input_props) #", "resource is no longer found by its old name, using", "# Verify that the resource object already reflects the update,", "'adapter-port-uri' assert hba.properties[prop_name] == port.uri # ... and again when", "== self.faked_port11.uri def add_hba1(self): \"\"\"Add a faked HBA 1 to", "== exp_prop_value # Refresh the resource object and verify that", "hbas = hba_mgr.list(**full_properties_kwargs) assert_resources(hbas, exp_faked_hbas, prop_names) @pytest.mark.parametrize( \"filter_args, exp_oids\", [", "hba.update_properties(properties={'name': new_hba_name}) # Verify that the resource is no longer", "[]), ({'name': [HBA1_NAME, HBA2_NAME + 'foo']}, [HBA1_OID]), ({'name': [HBA2_NAME +", "Version 2.0 (the \"License\"); # you may not use this", "found by its old name, using # list() (this does", "check just the begin of the string: assert re.match(r'^{classname}\\s+at\\s+0x{id:08x}\\s+\\(\\\\n.*'. format(classname=hba.__class__.__name__,", "hbas_list = hba_mgr.list( filter_args=dict(name=hba_name)) assert len(hbas_list) == 0 # Verify", "The HBA object we will perform the test on hba", "part3_props['description'] = 'Third HBA' # Set the status of the", "faked partition.\"\"\" faked_hba = self.faked_partition.hbas.add({ 'element-id': HBA1_OID, # element-uri will", "# deletable hba_mgr = self.partition.hbas hba = hba_mgr.find(name=hba_name) # Execute", "expected names and values for prop_name in exp_prop_names: assert prop_name", "self.faked_partition.properties['status'] = initial_partition_status hba_mgr = self.partition.hbas hba = hba_mgr.find(name=faked_hba.name) if", "of elements referenced in HBA properties: FCP1_OID = 'fake-fcp1-oid' PORT11_OID", "in DPM mode with one partition that has no HBAs.", "= 'fake-fcp1-oid' PORT11_OID = 'fake-port11-oid' PORT11_URI = '/api/adapters/{}/storage-ports/{}'.format(FCP1_OID, PORT11_OID) class", "by applicable law or agreed to in writing, software #", "def test_hba_update_properties(self, input_props): \"\"\"Test Hba.update_properties().\"\"\" # Add a faked HBA", "assert new_hba_find.properties['name'] == new_hba_name # Verify that the resource can", "filter_args=dict(name=new_hba_name)) assert len(new_hbas_list) == 1 new_hba_list = new_hbas_list[0] assert new_hba_list.properties['name']", "port that the HBA will be reassigned to faked_adapter =", "hba_mgr.list(**full_properties_kwargs) assert_resources(hbas, exp_faked_hbas, prop_names) @pytest.mark.parametrize( \"filter_args, exp_oids\", [ ({'element-id': HBA1_OID},", "is set up automatically 'parent': None, 'class': 'cpc', 'name': 'fake-cpc1-name',", "\"\"\" Unit tests for _hba module. \"\"\" from __future__ import", "else: exp_prop_value = saved_properties[prop_name] assert prop_name in hba.properties prop_value =", ") def test_hbamanager_list_filter_args(self, filter_args, exp_oids): \"\"\"Test HbaManager.list() with filter_args.\"\"\" #", "= initial_partition_status hba_mgr = self.partition.hbas if exp_status_exc: exp_exc = exp_status_exc", "the creation code to be tested. hba_mgr.create(part3_props) # Check that", "exp_prop_exc else: exp_exc = None if exp_exc: with pytest.raises(exp_exc.__class__) as", "pytest.raises(NotFound) as exc_info: hba_mgr.find(name=faked_hba.name) def test_hba_delete_create_same_name(self): \"\"\"Test Hba.delete() followed by", "URI. faked_hba = self.add_hba1() # Add a faked FCP with", "mode with one partition that has no HBAs. Add one", "be tested. # Its port points to a faked URI.", "HBA to be tested. # Its port points to a", "HBA1_NAME, 'element-id': HBA1_OID}, [HBA1_OID]), ({'name': HBA1_NAME, 'element-id': HBA1_OID + 'foo'},", "to be tested. hba.reassign_port(port) # Check that the port of", "('paused', None), ] ) def test_hba_delete(self, initial_partition_status, exp_exc): \"\"\"Test Hba.delete().\"\"\"", "exc.reason == exp_exc.reason else: # Execute the code to be", "= exp_prop_exc else: exp_exc = None if exp_exc: with pytest.raises(exp_exc.__class__)", "it still reflects the # update. hba.pull_full_properties() assert hba.properties['name'] ==", "= self.faked_fcp1.ports.add({ 'element-id': PORT11_OID, 'parent': self.faked_fcp1.uri, 'class': 'storage-port', 'index': 1,", "'name': HBA1_NAME, 'description': 'HBA ' + HBA1_NAME, 'adapter-port-uri': PORT11_URI, 'wwpn':", "5})), ({'name': 'fake-hba-x', 'adapter-port-uri': PORT11_URI}, ['element-uri', 'name', 'adapter-port-uri'], None), ]", "@pytest.mark.parametrize( \"input_props\", [ {}, {'description': 'New HBA description'}, {'device-number': 'FEDC',", "assert hba.properties[prop_name] == saved_properties[prop_name] else: # Execute the code to", "still reflects the # update. hba.pull_full_properties() assert hba.properties['name'] == new_hba_name", "FCP1_OID, 'parent': self.faked_cpc.uri, 'class': 'adapter', 'name': 'fcp1', 'description': 'FCP #1',", "HBAs faked_hba1 = self.add_hba1() faked_hba2 = self.add_hba2() exp_faked_hbas = [faked_hba1,", "'maximum-total-capacity': 80, 'channel-path-id': '1B', 'physical-channel-status': 'operating', }) adapter = self.cpc.adapters.find(name='fake-fcp1')", "same name.\"\"\" # Add a faked HBA to be tested", "new_hba_list = new_hbas_list[0] assert new_hba_list.properties['name'] == new_hba_name @pytest.mark.parametrize( \"initial_partition_status, exp_exc\",", "hba_mgr.find(name=faked_hba.name) def test_hba_delete_create_same_name(self): \"\"\"Test Hba.delete() followed by Hba.create() with same", "the faked partition self.faked_partition.properties['status'] = initial_partition_status # The HBA object", "'storage-port-uris': [], 'state': 'online', 'configured-capacity': 80, 'used-capacity': 0, 'allowed-capacity': 80,", "object returned by Hba.create() has # the input properties plus", "applicable law or agreed to in writing, software # distributed", "'name': 'fake-port1', 'description': 'FCP #1 Port 1', 'index': 0, 'fabric-id':", "in DPM mode self.faked_cpc = self.session.hmc.cpcs.add({ 'element-id': 'fake-cpc1-oid', # element-uri", "with pytest.raises(exp_exc.__class__) as exc_info: # Execute the code to be", "a faked CPC in DPM mode with one partition that", "and names of our faked HBAs: HBA1_OID = 'hba 1-oid'", "the resource object and verify that the resource object #", "[]), ] ) def test_hbamanager_list_filter_args(self, filter_args, exp_oids): \"\"\"Test HbaManager.list() with", "that the port of the HBA is unchanged ... prop_name", "'status': 'active', 'initial-memory': 1024, 'maximum-memory': 2048, }) self.partition = self.cpc.partitions.find(name='fake-part1-name')", "faked partition self.faked_partition.properties['status'] = initial_partition_status # The HBA object we", "hba.properties if prop_name in input_props: value = hba.properties[prop_name] exp_value =", "with full_properties.\"\"\" # Add two faked HBAs faked_hba1 = self.add_hba1()", "'physical-channel-status': 'operating', }) self.faked_port11 = self.faked_fcp1.ports.add({ 'element-id': PORT11_OID, 'parent': self.faked_fcp1.uri,", "+ 'foo', HBA1_OID]}, [HBA1_OID]), ({'name': HBA1_NAME}, [HBA1_OID]), ({'name': HBA2_NAME}, [HBA2_OID]),", "resource for consistency within itself assert isinstance(hba, Hba) hba_name =", "hba_name == exp_hba_name hba_uri = hba.uri exp_hba_uri = hba.properties['element-uri'] assert", "None), ] ) def test_hba_reassign_port(self, initial_partition_status, exp_exc): \"\"\"Test Hba.reassign_port().\"\"\" #", "to be tested and another one faked_hba = self.add_hba1() hba_name", "({'name': HBA1_NAME, 'element-id': HBA1_OID + 'foo'}, []), ({'name': HBA1_NAME +", "# element-uri will be automatically set 'parent': self.faked_partition.uri, 'class': 'hba',", "hba_mgr.find(name=faked_hba.name) if exp_exc: with pytest.raises(exp_exc.__class__) as exc_info: # Execute the", "'CPC #1 (DPM mode)', 'status': 'active', 'dpm-enabled': True, 'is-ensemble-member': False,", "oids = [hba.properties['element-id'] for hba in hbas] assert set(oids) ==", "prop_names): \"\"\"Test HbaManager.list() with full_properties.\"\"\" # Add two faked HBAs", "'parent': None, 'class': 'cpc', 'name': 'fake-cpc1-name', 'description': 'CPC #1 (DPM", "# You may obtain a copy of the License at", "# Add a faked FCP with one port that the", "== 'Third HBA' @pytest.mark.parametrize( \"input_props\", [ {}, {'description': 'New HBA", "auto-set based upon object-id 'parent': self.faked_cpc.uri, 'class': 'adapter', 'name': 'fake-fcp1',", "port.uri # ... and again when refreshed from the mock", "object and verify that the resource object # still reflects", "# adapter-family is auto-set based upon type 'adapter-id': '123', 'detected-card-type':", "with pytest.raises(NotFound): hba_mgr.find(name=hba_name) # Verify that the resource object already", "that the HBA exists again under that name hba3 =", "hba_mgr.list( filter_args=dict(name=new_hba_name)) assert len(new_hbas_list) == 1 new_hba_list = new_hbas_list[0] assert", "mock state: hba.pull_full_properties() assert hba.properties[prop_name] == saved_properties[prop_name] else: # Execute", "+ 'foo'}, []), ({'element-id': [HBA1_OID, HBA2_OID + 'foo']}, [HBA1_OID]), ({'element-id':", "the # update. hba.pull_full_properties() assert hba.properties['name'] == new_hba_name # Verify", "if prop_name in input_props: exp_prop_value = input_props[prop_name] else: exp_prop_value =", "can be found by its new name, using list() new_hbas_list", "assert set(oids) == set(exp_oids) @pytest.mark.parametrize( \"initial_partition_status, exp_status_exc\", [ ('stopped', None),", "1 to the faked partition.\"\"\" faked_hba = self.faked_partition.hbas.add({ 'element-id': HBA1_OID,", "faked HBA to be tested and another one faked_hba =", "[]), ({'name': HBA1_NAME + 'foo', 'element-id': HBA1_OID}, []), ({'name': HBA1_NAME", "'fake-hba-x'}, None, HTTPError({'http-status': 400, 'reason': 5})), ({'adapter-port-uri': PORT11_URI}, None, HTTPError({'http-status':", "Hba.__repr__().\"\"\" # Add a faked hba faked_hba = self.add_hba1() hba_mgr", "faked partition self.faked_partition.properties['status'] = initial_partition_status hba_mgr = self.partition.hbas hba =", "the property updates. hba.pull_full_properties() for prop_name in saved_properties: if prop_name", "'active', 'initial-memory': 1024, 'maximum-memory': 2048, }) self.partition = self.cpc.partitions.find(name='fake-part1-name') #", "self.faked_partition.hbas.add({ 'element-id': HBA2_OID, # element-uri will be automatically set 'parent':", "[ ({}, None, HTTPError({'http-status': 400, 'reason': 5})), ({'name': 'fake-hba-x'}, None,", "'element-id': HBA1_OID}, [HBA1_OID]), ({'name': HBA1_NAME, 'element-id': HBA1_OID + 'foo'}, []),", "Hba) hba_name = hba.name exp_hba_name = hba.properties['name'] assert hba_name ==", "faked_adapter.uri, 'class': 'storage-port', 'name': 'fake-port1', 'description': 'FCP #1 Port 1',", "be tested repr_str = repr(hba) repr_str = repr_str.replace('\\n', '\\\\n') #", "HBA1_OID + 'foo'}, []), ({'name': HBA1_NAME + 'foo', 'element-id': HBA1_OID},", "the code to be tested. hba.delete() # Check that the", "has been set ... # ... in the resource object:", "faked_hba = self.faked_partition.hbas.add({ 'element-id': HBA1_OID, # element-uri will be automatically", "'wwpn': 'AABBCCDDEEFF0011', 'device-number': '1111', }) return faked_hba def add_hba2(self): \"\"\"Add", "filter_args, exp_oids): \"\"\"Test HbaManager.list() with filter_args.\"\"\" # Add two faked", "the status of the faked partition self.faked_partition.properties['status'] = 'stopped' #", "hba.reassign_port(port) exc = exc_info.value if isinstance(exp_exc, HTTPError): assert exc.http_status ==", "been set ... # ... in the resource object: prop_name", "find() new_hba_find = hba_mgr.find(name=new_hba_name) assert new_hba_find.properties['name'] == new_hba_name # Verify", "= hba_mgr.list( filter_args=dict(name=hba_name)) assert len(hbas_list) == 0 # Verify that", "Execute the code to be tested hbas = hba_mgr.list(filter_args=filter_args) assert", "\"License\"); # you may not use this file except in", "#1 Port 1', 'index': 0, 'fabric-id': None, }) port =", "'class': 'storage-port', 'index': 1, 'name': 'fake-port11-name', 'description': 'FCP #1 Port", "prop_value = hba.properties[prop_name] assert prop_value == exp_prop_value # Refresh the", "= hba3.get_property('description') assert description == 'Third HBA' @pytest.mark.parametrize( \"input_props\", [", "exp_hba_uri = hba.properties['element-uri'] assert hba_uri == exp_hba_uri # Check the", "resource object # still reflects the property updates. hba.pull_full_properties() for", "# Execute the code to be tested hba.delete() exc =", "another one faked_hba = self.add_hba1() self.add_hba2() # Set the status", "name.\"\"\" # Add a faked HBA to be tested and", "Execute the code to be tested. hba.delete() # Check that", "to be tested hba.delete() exc = exc_info.value if isinstance(exp_exc, HTTPError):", "faked HBA to be tested. # Its port points to", "against the expected names and values for prop_name in exp_prop_names:", "= hba_mgr.list( filter_args=dict(name=new_hba_name)) assert len(new_hbas_list) == 1 new_hba_list = new_hbas_list[0]", "isinstance(exp_exc, HTTPError): assert exc.http_status == exp_exc.http_status assert exc.reason == exp_exc.reason", "'port-count': 1, 'network-port-uris': [], 'state': 'online', 'configured-capacity': 80, 'used-capacity': 0,", "= repr_str.replace('\\n', '\\\\n') # We check just the begin of", ".'}, [HBA1_OID, HBA2_OID]), ({'name': '.ba 1'}, [HBA1_OID]), ({'name': '.+'}, [HBA1_OID,", "= '/api/adapters/{}/storage-ports/{}'.format(FCP1_OID, PORT11_OID) class TestHba(object): \"\"\"All tests for Hba and", "hba_mgr = self.partition.hbas hba = hba_mgr.find(name=hba_name) new_hba_name = \"new-\" +", "Add an FCP adapter and port to the CPC self.faked_fcp1", "hba_name = faked_hba.name self.add_hba2() # Construct the input properties for", "hba.properties[prop_name] assert prop_value == exp_prop_value def test_hba_update_name(self): \"\"\"Test Hba.update_properties() with", "'adapter-port-uri': PORT11_URI, 'wwpn': 'AABBCCDDEEFF0011', 'device-number': '1111', }) return faked_hba def", "HBA1_OID + 'foo'}, []), ] ) def test_hbamanager_list_filter_args(self, filter_args, exp_oids):", "longer exists with pytest.raises(NotFound) as exc_info: hba_mgr.find(name=faked_hba.name) def test_hba_delete_create_same_name(self): \"\"\"Test", "HBA exists again under that name hba3 = hba_mgr.find(name=hba_name) description", "# element-uri is auto-set based upon object-id 'parent': faked_adapter.uri, 'class':", "if prop_name in input_props: value = hba.properties[prop_name] exp_value = input_props[prop_name]" ]
[ "if not config.parent: return config return getRoot(config.parent) root = getRoot(config)", "all of its tests. if root.host_os in ['Windows']: config.unsupported =", "of its tests. if root.host_os in ['Windows']: config.unsupported = False", "directory's \"unsupported\" decision until we can handle # all of", "decision until we can handle # all of its tests.", "return config return getRoot(config.parent) root = getRoot(config) # We only", "getRoot(config): if not config.parent: return config return getRoot(config.parent) root =", "\"unsupported\" decision until we can handle # all of its", "# Override the parent directory's \"unsupported\" decision until we can", "we can handle # all of its tests. if root.host_os", "can handle # all of its tests. if root.host_os in", "config.parent: return config return getRoot(config.parent) root = getRoot(config) # We", "getRoot(config) # We only run a small set of tests", "root = getRoot(config) # We only run a small set", "the parent directory's \"unsupported\" decision until we can handle #", "now. # Override the parent directory's \"unsupported\" decision until we", "only run a small set of tests on Windows for", "on Windows for now. # Override the parent directory's \"unsupported\"", "parent directory's \"unsupported\" decision until we can handle # all", "root.host_os in ['Windows']: config.unsupported = False else: config.unsupported = True", "return getRoot(config.parent) root = getRoot(config) # We only run a", "getRoot(config.parent) root = getRoot(config) # We only run a small", "tests on Windows for now. # Override the parent directory's", "if root.host_os in ['Windows']: config.unsupported = False else: config.unsupported =", "def getRoot(config): if not config.parent: return config return getRoot(config.parent) root", "handle # all of its tests. if root.host_os in ['Windows']:", "# all of its tests. if root.host_os in ['Windows']: config.unsupported", "= getRoot(config) # We only run a small set of", "config return getRoot(config.parent) root = getRoot(config) # We only run", "Windows for now. # Override the parent directory's \"unsupported\" decision", "Override the parent directory's \"unsupported\" decision until we can handle", "until we can handle # all of its tests. if", "small set of tests on Windows for now. # Override", "# We only run a small set of tests on", "of tests on Windows for now. # Override the parent", "run a small set of tests on Windows for now.", "We only run a small set of tests on Windows", "set of tests on Windows for now. # Override the", "for now. # Override the parent directory's \"unsupported\" decision until", "not config.parent: return config return getRoot(config.parent) root = getRoot(config) #", "a small set of tests on Windows for now. #", "tests. if root.host_os in ['Windows']: config.unsupported = False else: config.unsupported", "its tests. if root.host_os in ['Windows']: config.unsupported = False else:" ]
[ "KeyError: pass else: urllib_request.urlcleanup() try: linecache = sys.modules['linecache'] except KeyError:", "ms = divmod(ms, 1000) minutes, seconds = divmod(seconds, 60) hours,", "KeyError: pass else: linecache.clearcache() try: mimetypes = sys.modules['mimetypes'] except KeyError:", "orig_unraisablehook = None def regrtest_unraisable_hook(unraisable): global orig_unraisablehook support.environment_altered = True", "else: _strptime._regex_cache.clear() try: urllib_parse = sys.modules['urllib.parse'] except KeyError: pass else:", "reclaimed. for stream in (sys.stdout, sys.stderr, sys.__stdout__, sys.__stderr__): if stream", "that buffered data is sent to the OS and #", "pass else: ctypes._reset_cache() try: typing = sys.modules['typing'] except KeyError: pass", "subsequent_indent=blanks), file=file) def print_warning(msg): support.print_warning(msg) orig_unraisablehook = None def regrtest_unraisable_hook(unraisable):", "ms' % ms parts = parts[:2] return ' '.join(parts) def", "= sys.modules['linecache'] except KeyError: pass else: linecache.clearcache() try: mimetypes =", "= sys.stderr try: sys.stderr = sys.__stderr__ orig_unraisablehook(unraisable) finally: sys.stderr =", "each line. \"\"\" blanks = ' ' * indent #", "about resetting the cache if the module is not loaded", "def printlist(x, width=70, indent=4, file=None): \"\"\"Print the elements of iterable", "== '.py': names[idx] = basename def count(n, word): if n", "del mod.__warningregistry__ # Flush standard output, so that buffered data", "else: doctest.master = None try: ctypes = sys.modules['ctypes'] except KeyError:", "(n, word) else: return \"%d %ss\" % (n, word) def", "+ ms / 1000)) if not parts: return '%s ms'", "except KeyError: pass else: distutils_dir_util._path_created.clear() try: re = sys.modules['re'] except", "sec' % (seconds + ms / 1000)) if not parts:", "sys.modules['doctest'] except KeyError: pass else: doctest.master = None try: ctypes", "try: sys.stderr = sys.__stderr__ orig_unraisablehook(unraisable) finally: sys.stderr = old_stderr def", "re = sys.modules['re'] except KeyError: pass else: re.purge() try: _strptime", "basename def count(n, word): if n == 1: return \"%d", "OS and # associated Python objects are reclaimed. for stream", "associated Python objects are reclaimed. for stream in (sys.stdout, sys.stderr,", "x to stdout. Optional arg width (default 70) is the", "% seconds) else: # 1.0 sec parts.append('%.1f sec' % (seconds", "min 1 sec parts.append('%s sec' % seconds) else: # 1.0", "warnings registry, so they can be displayed again for mod", "else: urllib_request.urlcleanup() try: linecache = sys.modules['linecache'] except KeyError: pass else:", "parts: return '%s ms' % ms parts = parts[:2] return", "parts.append('%s hour' % hours) if minutes: parts.append('%s min' % minutes)", "minutes: parts.append('%s min' % minutes) if seconds: if parts: #", "re.purge() try: _strptime = sys.modules['_strptime'] except KeyError: pass else: _strptime._regex_cache.clear()", "name in enumerate(names): basename, ext = os.path.splitext(name) if ext ==", "Flush standard output, so that buffered data is sent to", "else: linecache.clearcache() try: mimetypes = sys.modules['mimetypes'] except KeyError: pass else:", "if stream is not None: stream.flush() # Clear assorted module", "sys.modules['re'] except KeyError: pass else: re.purge() try: _strptime = sys.modules['_strptime']", "sys.modules['filecmp'] except KeyError: pass else: filecmp._cache.clear() try: struct = sys.modules['struct']", "for mod in sys.modules.values(): if hasattr(mod, '__warningregistry__'): del mod.__warningregistry__ #", "word): if n == 1: return \"%d %s\" % (n,", "to the OS and # associated Python objects are reclaimed.", "minutes = divmod(minutes, 60) parts = [] if hours: parts.append('%s", "standard output, so that buffered data is sent to the", "length. Optional arg indent (default 4) is the number of", "sys.modules['_strptime'] except KeyError: pass else: _strptime._regex_cache.clear() try: urllib_parse = sys.modules['urllib.parse']", "try: _strptime = sys.modules['_strptime'] except KeyError: pass else: _strptime._regex_cache.clear() try:", "None: stream.flush() # Clear assorted module caches. # Don't worry", "= sys.modules['urllib.parse'] except KeyError: pass else: urllib_parse.clear_cache() try: urllib_request =", "= old_stderr def setup_unraisable_hook(): global orig_unraisablehook orig_unraisablehook = sys.unraisablehook sys.unraisablehook", "cache if the module is not loaded try: distutils_dir_util =", "import textwrap from test import support def format_duration(seconds): ms =", "sec parts.append('%.1f sec' % (seconds + ms / 1000)) if", "iterable x to stdout. Optional arg width (default 70) is", "in enumerate(names): basename, ext = os.path.splitext(name) if ext == '.py':", "ctypes._reset_cache() try: typing = sys.modules['typing'] except KeyError: pass else: for", "stdout. Optional arg width (default 70) is the maximum line", "be displayed again for mod in sys.modules.values(): if hasattr(mod, '__warningregistry__'):", "' '.join(parts) def removepy(names): if not names: return for idx,", "hasattr(mod, '__warningregistry__'): del mod.__warningregistry__ # Flush standard output, so that", "math.ceil(seconds * 1e3) seconds, ms = divmod(ms, 1000) minutes, seconds", "old_stderr def setup_unraisable_hook(): global orig_unraisablehook orig_unraisablehook = sys.unraisablehook sys.unraisablehook =", "= sys.modules['struct'] except KeyError: pass else: struct._clearcache() try: doctest =", "ms parts = parts[:2] return ' '.join(parts) def removepy(names): if", "= sys.modules['_strptime'] except KeyError: pass else: _strptime._regex_cache.clear() try: urllib_parse =", "assorted module caches. # Don't worry about resetting the cache", "parts.append('%.1f sec' % (seconds + ms / 1000)) if not", "else: mimetypes._default_mime_types() try: filecmp = sys.modules['filecmp'] except KeyError: pass else:", "ctypes = sys.modules['ctypes'] except KeyError: pass else: ctypes._reset_cache() try: typing", "doctest = sys.modules['doctest'] except KeyError: pass else: doctest.master = None", "old_stderr = sys.stderr try: sys.stderr = sys.__stderr__ orig_unraisablehook(unraisable) finally: sys.stderr", "urllib_request = sys.modules['urllib.request'] except KeyError: pass else: urllib_request.urlcleanup() try: linecache", "seconds = divmod(seconds, 60) hours, minutes = divmod(minutes, 60) parts", "% minutes) if seconds: if parts: # 2 min 1", "(n, word) def printlist(x, width=70, indent=4, file=None): \"\"\"Print the elements", "hours, minutes = divmod(minutes, 60) parts = [] if hours:", "n == 1: return \"%d %s\" % (n, word) else:", "Optional arg width (default 70) is the maximum line length.", "if ext == '.py': names[idx] = basename def count(n, word):", "sys.modules['urllib.parse'] except KeyError: pass else: urllib_parse.clear_cache() try: urllib_request = sys.modules['urllib.request']", "os.path.splitext(name) if ext == '.py': names[idx] = basename def count(n,", "# Flush standard output, so that buffered data is sent", "pass else: urllib_request.urlcleanup() try: linecache = sys.modules['linecache'] except KeyError: pass", "if seconds: if parts: # 2 min 1 sec parts.append('%s", "resetting the cache if the module is not loaded try:", "= None def regrtest_unraisable_hook(unraisable): global orig_unraisablehook support.environment_altered = True print_warning(\"Unraisable", "sys.modules.values(): if hasattr(mod, '__warningregistry__'): del mod.__warningregistry__ # Flush standard output,", "\"\"\" blanks = ' ' * indent # Print the", "buffered data is sent to the OS and # associated", "arg width (default 70) is the maximum line length. Optional", "def format_duration(seconds): ms = math.ceil(seconds * 1e3) seconds, ms =", "to stdout. Optional arg width (default 70) is the maximum", "else: ctypes._reset_cache() try: typing = sys.modules['typing'] except KeyError: pass else:", "support.print_warning(msg) orig_unraisablehook = None def regrtest_unraisable_hook(unraisable): global orig_unraisablehook support.environment_altered =", "hours: parts.append('%s hour' % hours) if minutes: parts.append('%s min' %", "pass else: _strptime._regex_cache.clear() try: urllib_parse = sys.modules['urllib.parse'] except KeyError: pass", "/ 1000)) if not parts: return '%s ms' % ms", "try: struct = sys.modules['struct'] except KeyError: pass else: struct._clearcache() try:", "or a set() print(textwrap.fill(' '.join(str(elt) for elt in sorted(x)), width,", "word) else: return \"%d %ss\" % (n, word) def printlist(x,", "orig_unraisablehook = sys.unraisablehook sys.unraisablehook = regrtest_unraisable_hook def clear_caches(): # Clear", "not names: return for idx, name in enumerate(names): basename, ext", "Python objects are reclaimed. for stream in (sys.stdout, sys.stderr, sys.__stdout__,", "width, initial_indent=blanks, subsequent_indent=blanks), file=file) def print_warning(msg): support.print_warning(msg) orig_unraisablehook = None", "linecache = sys.modules['linecache'] except KeyError: pass else: linecache.clearcache() try: mimetypes", "sorted(x)), width, initial_indent=blanks, subsequent_indent=blanks), file=file) def print_warning(msg): support.print_warning(msg) orig_unraisablehook =", "format_duration(seconds): ms = math.ceil(seconds * 1e3) seconds, ms = divmod(ms,", "blanks = ' ' * indent # Print the sorted", "loaded try: distutils_dir_util = sys.modules['distutils.dir_util'] except KeyError: pass else: distutils_dir_util._path_created.clear()", "pass else: linecache.clearcache() try: mimetypes = sys.modules['mimetypes'] except KeyError: pass", "return '%s ms' % ms parts = parts[:2] return '", "mimetypes._default_mime_types() try: filecmp = sys.modules['filecmp'] except KeyError: pass else: filecmp._cache.clear()", "= sys.modules['filecmp'] except KeyError: pass else: filecmp._cache.clear() try: struct =", "except KeyError: pass else: doctest.master = None try: ctypes =", "4) is the number of blanks with which to begin", "sys.__stderr__): if stream is not None: stream.flush() # Clear assorted", "the module is not loaded try: distutils_dir_util = sys.modules['distutils.dir_util'] except", "stream.flush() # Clear assorted module caches. # Don't worry about", "regrtest_unraisable_hook def clear_caches(): # Clear the warnings registry, so they", "try: filecmp = sys.modules['filecmp'] except KeyError: pass else: filecmp._cache.clear() try:", "'%s ms' % ms parts = parts[:2] return ' '.join(parts)", "_strptime = sys.modules['_strptime'] except KeyError: pass else: _strptime._regex_cache.clear() try: urllib_parse", "== 1: return \"%d %s\" % (n, word) else: return", "list or a set() print(textwrap.fill(' '.join(str(elt) for elt in sorted(x)),", "not loaded try: distutils_dir_util = sys.modules['distutils.dir_util'] except KeyError: pass else:", "% hours) if minutes: parts.append('%s min' % minutes) if seconds:", "except KeyError: pass else: filecmp._cache.clear() try: struct = sys.modules['struct'] except", "setup_unraisable_hook(): global orig_unraisablehook orig_unraisablehook = sys.unraisablehook sys.unraisablehook = regrtest_unraisable_hook def", "sys.modules['distutils.dir_util'] except KeyError: pass else: distutils_dir_util._path_created.clear() try: re = sys.modules['re']", "orig_unraisablehook(unraisable) finally: sys.stderr = old_stderr def setup_unraisable_hook(): global orig_unraisablehook orig_unraisablehook", "not parts: return '%s ms' % ms parts = parts[:2]", "sys.unraisablehook sys.unraisablehook = regrtest_unraisable_hook def clear_caches(): # Clear the warnings", "def regrtest_unraisable_hook(unraisable): global orig_unraisablehook support.environment_altered = True print_warning(\"Unraisable exception\") old_stderr", "if not parts: return '%s ms' % ms parts =", "is not None: stream.flush() # Clear assorted module caches. #", "def clear_caches(): # Clear the warnings registry, so they can", "parts.append('%s min' % minutes) if seconds: if parts: # 2", "the number of blanks with which to begin each line.", "return \"%d %s\" % (n, word) else: return \"%d %ss\"", "KeyError: pass else: re.purge() try: _strptime = sys.modules['_strptime'] except KeyError:", "* indent # Print the sorted list: 'x' may be", "names: return for idx, name in enumerate(names): basename, ext =", "= sys.modules['urllib.request'] except KeyError: pass else: urllib_request.urlcleanup() try: linecache =", "can be displayed again for mod in sys.modules.values(): if hasattr(mod,", "struct = sys.modules['struct'] except KeyError: pass else: struct._clearcache() try: doctest", "printlist(x, width=70, indent=4, file=None): \"\"\"Print the elements of iterable x", "linecache.clearcache() try: mimetypes = sys.modules['mimetypes'] except KeyError: pass else: mimetypes._default_mime_types()", "Print the sorted list: 'x' may be a '--random' list", "is the maximum line length. Optional arg indent (default 4)", "KeyError: pass else: distutils_dir_util._path_created.clear() try: re = sys.modules['re'] except KeyError:", "= [] if hours: parts.append('%s hour' % hours) if minutes:", "None def regrtest_unraisable_hook(unraisable): global orig_unraisablehook support.environment_altered = True print_warning(\"Unraisable exception\")", "ms = math.ceil(seconds * 1e3) seconds, ms = divmod(ms, 1000)", "mod in sys.modules.values(): if hasattr(mod, '__warningregistry__'): del mod.__warningregistry__ # Flush", "pass else: doctest.master = None try: ctypes = sys.modules['ctypes'] except", "# Don't worry about resetting the cache if the module", "the elements of iterable x to stdout. Optional arg width", "the sorted list: 'x' may be a '--random' list or", "are reclaimed. for stream in (sys.stdout, sys.stderr, sys.__stdout__, sys.__stderr__): if", "1000) minutes, seconds = divmod(seconds, 60) hours, minutes = divmod(minutes,", "for elt in sorted(x)), width, initial_indent=blanks, subsequent_indent=blanks), file=file) def print_warning(msg):", "os.path import sys import textwrap from test import support def", "2 min 1 sec parts.append('%s sec' % seconds) else: #", "file=None): \"\"\"Print the elements of iterable x to stdout. Optional", "parts: # 2 min 1 sec parts.append('%s sec' % seconds)", "is the number of blanks with which to begin each", "global orig_unraisablehook orig_unraisablehook = sys.unraisablehook sys.unraisablehook = regrtest_unraisable_hook def clear_caches():", "except KeyError: pass else: _strptime._regex_cache.clear() try: urllib_parse = sys.modules['urllib.parse'] except", "\"%d %ss\" % (n, word) def printlist(x, width=70, indent=4, file=None):", "data is sent to the OS and # associated Python", "parts = parts[:2] return ' '.join(parts) def removepy(names): if not", "sys.modules['linecache'] except KeyError: pass else: linecache.clearcache() try: mimetypes = sys.modules['mimetypes']", "print_warning(msg): support.print_warning(msg) orig_unraisablehook = None def regrtest_unraisable_hook(unraisable): global orig_unraisablehook support.environment_altered", "struct._clearcache() try: doctest = sys.modules['doctest'] except KeyError: pass else: doctest.master", "sec' % seconds) else: # 1.0 sec parts.append('%.1f sec' %", "= divmod(ms, 1000) minutes, seconds = divmod(seconds, 60) hours, minutes", "finally: sys.stderr = old_stderr def setup_unraisable_hook(): global orig_unraisablehook orig_unraisablehook =", "return \"%d %ss\" % (n, word) def printlist(x, width=70, indent=4,", "sys import textwrap from test import support def format_duration(seconds): ms", "typing = sys.modules['typing'] except KeyError: pass else: for f in", "' ' * indent # Print the sorted list: 'x'", "KeyError: pass else: struct._clearcache() try: doctest = sys.modules['doctest'] except KeyError:", "the cache if the module is not loaded try: distutils_dir_util", "# 2 min 1 sec parts.append('%s sec' % seconds) else:", "import sys import textwrap from test import support def format_duration(seconds):", "else: return \"%d %ss\" % (n, word) def printlist(x, width=70,", "= basename def count(n, word): if n == 1: return", "basename, ext = os.path.splitext(name) if ext == '.py': names[idx] =", "except KeyError: pass else: urllib_request.urlcleanup() try: linecache = sys.modules['linecache'] except", "filecmp = sys.modules['filecmp'] except KeyError: pass else: filecmp._cache.clear() try: struct", "= sys.unraisablehook sys.unraisablehook = regrtest_unraisable_hook def clear_caches(): # Clear the", "line. \"\"\" blanks = ' ' * indent # Print", "distutils_dir_util = sys.modules['distutils.dir_util'] except KeyError: pass else: distutils_dir_util._path_created.clear() try: re", "ext = os.path.splitext(name) if ext == '.py': names[idx] = basename", "orig_unraisablehook support.environment_altered = True print_warning(\"Unraisable exception\") old_stderr = sys.stderr try:", "of blanks with which to begin each line. \"\"\" blanks", "they can be displayed again for mod in sys.modules.values(): if", "except KeyError: pass else: linecache.clearcache() try: mimetypes = sys.modules['mimetypes'] except", "count(n, word): if n == 1: return \"%d %s\" %", "worry about resetting the cache if the module is not", "# associated Python objects are reclaimed. for stream in (sys.stdout,", "try: doctest = sys.modules['doctest'] except KeyError: pass else: doctest.master =", "begin each line. \"\"\" blanks = ' ' * indent", "distutils_dir_util._path_created.clear() try: re = sys.modules['re'] except KeyError: pass else: re.purge()", "min' % minutes) if seconds: if parts: # 2 min", "idx, name in enumerate(names): basename, ext = os.path.splitext(name) if ext", "except KeyError: pass else: struct._clearcache() try: doctest = sys.modules['doctest'] except", "ext == '.py': names[idx] = basename def count(n, word): if", "module caches. # Don't worry about resetting the cache if", "<filename>Lib/test/libregrtest/utils.py import math import os.path import sys import textwrap from", "displayed again for mod in sys.modules.values(): if hasattr(mod, '__warningregistry__'): del", "so that buffered data is sent to the OS and", "sys.modules['struct'] except KeyError: pass else: struct._clearcache() try: doctest = sys.modules['doctest']", "from test import support def format_duration(seconds): ms = math.ceil(seconds *", "[] if hours: parts.append('%s hour' % hours) if minutes: parts.append('%s", "module is not loaded try: distutils_dir_util = sys.modules['distutils.dir_util'] except KeyError:", "= divmod(seconds, 60) hours, minutes = divmod(minutes, 60) parts =", "= sys.modules['re'] except KeyError: pass else: re.purge() try: _strptime =", "_strptime._regex_cache.clear() try: urllib_parse = sys.modules['urllib.parse'] except KeyError: pass else: urllib_parse.clear_cache()", "indent # Print the sorted list: 'x' may be a", "except KeyError: pass else: urllib_parse.clear_cache() try: urllib_request = sys.modules['urllib.request'] except", "% (seconds + ms / 1000)) if not parts: return", "70) is the maximum line length. Optional arg indent (default", "sys.__stdout__, sys.__stderr__): if stream is not None: stream.flush() # Clear", "' * indent # Print the sorted list: 'x' may", "= True print_warning(\"Unraisable exception\") old_stderr = sys.stderr try: sys.stderr =", "sys.modules['ctypes'] except KeyError: pass else: ctypes._reset_cache() try: typing = sys.modules['typing']", "to begin each line. \"\"\" blanks = ' ' *", "mod.__warningregistry__ # Flush standard output, so that buffered data is", "filecmp._cache.clear() try: struct = sys.modules['struct'] except KeyError: pass else: struct._clearcache()", "sys.stderr try: sys.stderr = sys.__stderr__ orig_unraisablehook(unraisable) finally: sys.stderr = old_stderr", "pass else: struct._clearcache() try: doctest = sys.modules['doctest'] except KeyError: pass", "minutes, seconds = divmod(seconds, 60) hours, minutes = divmod(minutes, 60)", "indent (default 4) is the number of blanks with which", "sorted list: 'x' may be a '--random' list or a", "import os.path import sys import textwrap from test import support", "try: typing = sys.modules['typing'] except KeyError: pass else: for f", "def print_warning(msg): support.print_warning(msg) orig_unraisablehook = None def regrtest_unraisable_hook(unraisable): global orig_unraisablehook", "blanks with which to begin each line. \"\"\" blanks =", "try: re = sys.modules['re'] except KeyError: pass else: re.purge() try:", "else: # 1.0 sec parts.append('%.1f sec' % (seconds + ms", "support.environment_altered = True print_warning(\"Unraisable exception\") old_stderr = sys.stderr try: sys.stderr", "so they can be displayed again for mod in sys.modules.values():", "pass else: filecmp._cache.clear() try: struct = sys.modules['struct'] except KeyError: pass", "sys.stderr = old_stderr def setup_unraisable_hook(): global orig_unraisablehook orig_unraisablehook = sys.unraisablehook", "= sys.modules['mimetypes'] except KeyError: pass else: mimetypes._default_mime_types() try: filecmp =", "'x' may be a '--random' list or a set() print(textwrap.fill('", "stream is not None: stream.flush() # Clear assorted module caches.", "= os.path.splitext(name) if ext == '.py': names[idx] = basename def", "arg indent (default 4) is the number of blanks with", "indent=4, file=None): \"\"\"Print the elements of iterable x to stdout.", "in sorted(x)), width, initial_indent=blanks, subsequent_indent=blanks), file=file) def print_warning(msg): support.print_warning(msg) orig_unraisablehook", "except KeyError: pass else: for f in typing._cleanups: f() support.gc_collect()", "try: mimetypes = sys.modules['mimetypes'] except KeyError: pass else: mimetypes._default_mime_types() try:", "'__warningregistry__'): del mod.__warningregistry__ # Flush standard output, so that buffered", "print(textwrap.fill(' '.join(str(elt) for elt in sorted(x)), width, initial_indent=blanks, subsequent_indent=blanks), file=file)", "return for idx, name in enumerate(names): basename, ext = os.path.splitext(name)", "doctest.master = None try: ctypes = sys.modules['ctypes'] except KeyError: pass", "be a '--random' list or a set() print(textwrap.fill(' '.join(str(elt) for", "try: linecache = sys.modules['linecache'] except KeyError: pass else: linecache.clearcache() try:", "import support def format_duration(seconds): ms = math.ceil(seconds * 1e3) seconds,", "Don't worry about resetting the cache if the module is", "= sys.__stderr__ orig_unraisablehook(unraisable) finally: sys.stderr = old_stderr def setup_unraisable_hook(): global", "initial_indent=blanks, subsequent_indent=blanks), file=file) def print_warning(msg): support.print_warning(msg) orig_unraisablehook = None def", "= sys.modules['doctest'] except KeyError: pass else: doctest.master = None try:", "= ' ' * indent # Print the sorted list:", "pass else: urllib_parse.clear_cache() try: urllib_request = sys.modules['urllib.request'] except KeyError: pass", "pass else: distutils_dir_util._path_created.clear() try: re = sys.modules['re'] except KeyError: pass", "KeyError: pass else: _strptime._regex_cache.clear() try: urllib_parse = sys.modules['urllib.parse'] except KeyError:", "sys.stderr, sys.__stdout__, sys.__stderr__): if stream is not None: stream.flush() #", "% ms parts = parts[:2] return ' '.join(parts) def removepy(names):", "seconds: if parts: # 2 min 1 sec parts.append('%s sec'", "% (n, word) else: return \"%d %ss\" % (n, word)", "sys.modules['urllib.request'] except KeyError: pass else: urllib_request.urlcleanup() try: linecache = sys.modules['linecache']", "def setup_unraisable_hook(): global orig_unraisablehook orig_unraisablehook = sys.unraisablehook sys.unraisablehook = regrtest_unraisable_hook", "else: struct._clearcache() try: doctest = sys.modules['doctest'] except KeyError: pass else:", "= parts[:2] return ' '.join(parts) def removepy(names): if not names:", "try: urllib_parse = sys.modules['urllib.parse'] except KeyError: pass else: urllib_parse.clear_cache() try:", "(default 70) is the maximum line length. Optional arg indent", "True print_warning(\"Unraisable exception\") old_stderr = sys.stderr try: sys.stderr = sys.__stderr__", "parts = [] if hours: parts.append('%s hour' % hours) if", "urllib_parse.clear_cache() try: urllib_request = sys.modules['urllib.request'] except KeyError: pass else: urllib_request.urlcleanup()", "for stream in (sys.stdout, sys.stderr, sys.__stdout__, sys.__stderr__): if stream is", "the warnings registry, so they can be displayed again for", "KeyError: pass else: doctest.master = None try: ctypes = sys.modules['ctypes']", "math import os.path import sys import textwrap from test import", "Optional arg indent (default 4) is the number of blanks", "except KeyError: pass else: mimetypes._default_mime_types() try: filecmp = sys.modules['filecmp'] except", "list: 'x' may be a '--random' list or a set()", "= math.ceil(seconds * 1e3) seconds, ms = divmod(ms, 1000) minutes,", "is not loaded try: distutils_dir_util = sys.modules['distutils.dir_util'] except KeyError: pass", "with which to begin each line. \"\"\" blanks = '", "width (default 70) is the maximum line length. Optional arg", "support def format_duration(seconds): ms = math.ceil(seconds * 1e3) seconds, ms", "= divmod(minutes, 60) parts = [] if hours: parts.append('%s hour'", "60) parts = [] if hours: parts.append('%s hour' % hours)", "clear_caches(): # Clear the warnings registry, so they can be", "# Clear the warnings registry, so they can be displayed", "else: re.purge() try: _strptime = sys.modules['_strptime'] except KeyError: pass else:", "elements of iterable x to stdout. Optional arg width (default", "output, so that buffered data is sent to the OS", "exception\") old_stderr = sys.stderr try: sys.stderr = sys.__stderr__ orig_unraisablehook(unraisable) finally:", "KeyError: pass else: mimetypes._default_mime_types() try: filecmp = sys.modules['filecmp'] except KeyError:", "word) def printlist(x, width=70, indent=4, file=None): \"\"\"Print the elements of", "divmod(ms, 1000) minutes, seconds = divmod(seconds, 60) hours, minutes =", "if n == 1: return \"%d %s\" % (n, word)", "of iterable x to stdout. Optional arg width (default 70)", "a '--random' list or a set() print(textwrap.fill(' '.join(str(elt) for elt", "print_warning(\"Unraisable exception\") old_stderr = sys.stderr try: sys.stderr = sys.__stderr__ orig_unraisablehook(unraisable)", "(seconds + ms / 1000)) if not parts: return '%s", "textwrap from test import support def format_duration(seconds): ms = math.ceil(seconds", "# Print the sorted list: 'x' may be a '--random'", "divmod(seconds, 60) hours, minutes = divmod(minutes, 60) parts = []", "'--random' list or a set() print(textwrap.fill(' '.join(str(elt) for elt in", "* 1e3) seconds, ms = divmod(ms, 1000) minutes, seconds =", "if the module is not loaded try: distutils_dir_util = sys.modules['distutils.dir_util']", "try: distutils_dir_util = sys.modules['distutils.dir_util'] except KeyError: pass else: distutils_dir_util._path_created.clear() try:", "urllib_parse = sys.modules['urllib.parse'] except KeyError: pass else: urllib_parse.clear_cache() try: urllib_request", "test import support def format_duration(seconds): ms = math.ceil(seconds * 1e3)", "= sys.modules['ctypes'] except KeyError: pass else: ctypes._reset_cache() try: typing =", "60) hours, minutes = divmod(minutes, 60) parts = [] if", "sys.modules['mimetypes'] except KeyError: pass else: mimetypes._default_mime_types() try: filecmp = sys.modules['filecmp']", "if parts: # 2 min 1 sec parts.append('%s sec' %", "width=70, indent=4, file=None): \"\"\"Print the elements of iterable x to", "hour' % hours) if minutes: parts.append('%s min' % minutes) if", "sys.modules['typing'] except KeyError: pass else: for f in typing._cleanups: f()", "Clear the warnings registry, so they can be displayed again", "pass else: mimetypes._default_mime_types() try: filecmp = sys.modules['filecmp'] except KeyError: pass", "else: filecmp._cache.clear() try: struct = sys.modules['struct'] except KeyError: pass else:", "which to begin each line. \"\"\" blanks = ' '", "urllib_request.urlcleanup() try: linecache = sys.modules['linecache'] except KeyError: pass else: linecache.clearcache()", "a set() print(textwrap.fill(' '.join(str(elt) for elt in sorted(x)), width, initial_indent=blanks,", "except KeyError: pass else: re.purge() try: _strptime = sys.modules['_strptime'] except", "%ss\" % (n, word) def printlist(x, width=70, indent=4, file=None): \"\"\"Print", "sys.stderr = sys.__stderr__ orig_unraisablehook(unraisable) finally: sys.stderr = old_stderr def setup_unraisable_hook():", "(sys.stdout, sys.stderr, sys.__stdout__, sys.__stderr__): if stream is not None: stream.flush()", "try: ctypes = sys.modules['ctypes'] except KeyError: pass else: ctypes._reset_cache() try:", "sec parts.append('%s sec' % seconds) else: # 1.0 sec parts.append('%.1f", "KeyError: pass else: urllib_parse.clear_cache() try: urllib_request = sys.modules['urllib.request'] except KeyError:", "maximum line length. Optional arg indent (default 4) is the", "seconds, ms = divmod(ms, 1000) minutes, seconds = divmod(seconds, 60)", "parts[:2] return ' '.join(parts) def removepy(names): if not names: return", "number of blanks with which to begin each line. \"\"\"", "in sys.modules.values(): if hasattr(mod, '__warningregistry__'): del mod.__warningregistry__ # Flush standard", "%s\" % (n, word) else: return \"%d %ss\" % (n,", "divmod(minutes, 60) parts = [] if hours: parts.append('%s hour' %", "= sys.modules['typing'] except KeyError: pass else: for f in typing._cleanups:", "def count(n, word): if n == 1: return \"%d %s\"", "the maximum line length. Optional arg indent (default 4) is", "and # associated Python objects are reclaimed. for stream in", "file=file) def print_warning(msg): support.print_warning(msg) orig_unraisablehook = None def regrtest_unraisable_hook(unraisable): global", "if hours: parts.append('%s hour' % hours) if minutes: parts.append('%s min'", "the OS and # associated Python objects are reclaimed. for", "'.py': names[idx] = basename def count(n, word): if n ==", "stream in (sys.stdout, sys.stderr, sys.__stdout__, sys.__stderr__): if stream is not", "\"\"\"Print the elements of iterable x to stdout. Optional arg", "global orig_unraisablehook support.environment_altered = True print_warning(\"Unraisable exception\") old_stderr = sys.stderr", "(default 4) is the number of blanks with which to", "orig_unraisablehook orig_unraisablehook = sys.unraisablehook sys.unraisablehook = regrtest_unraisable_hook def clear_caches(): #", "regrtest_unraisable_hook(unraisable): global orig_unraisablehook support.environment_altered = True print_warning(\"Unraisable exception\") old_stderr =", "registry, so they can be displayed again for mod in", "sys.unraisablehook = regrtest_unraisable_hook def clear_caches(): # Clear the warnings registry,", "for idx, name in enumerate(names): basename, ext = os.path.splitext(name) if", "None try: ctypes = sys.modules['ctypes'] except KeyError: pass else: ctypes._reset_cache()", "else: distutils_dir_util._path_created.clear() try: re = sys.modules['re'] except KeyError: pass else:", "try: urllib_request = sys.modules['urllib.request'] except KeyError: pass else: urllib_request.urlcleanup() try:", "1 sec parts.append('%s sec' % seconds) else: # 1.0 sec", "\"%d %s\" % (n, word) else: return \"%d %ss\" %", "names[idx] = basename def count(n, word): if n == 1:", "set() print(textwrap.fill(' '.join(str(elt) for elt in sorted(x)), width, initial_indent=blanks, subsequent_indent=blanks),", "if hasattr(mod, '__warningregistry__'): del mod.__warningregistry__ # Flush standard output, so", "caches. # Don't worry about resetting the cache if the", "line length. Optional arg indent (default 4) is the number", "is sent to the OS and # associated Python objects", "seconds) else: # 1.0 sec parts.append('%.1f sec' % (seconds +", "1: return \"%d %s\" % (n, word) else: return \"%d", "may be a '--random' list or a set() print(textwrap.fill(' '.join(str(elt)", "return ' '.join(parts) def removepy(names): if not names: return for", "elt in sorted(x)), width, initial_indent=blanks, subsequent_indent=blanks), file=file) def print_warning(msg): support.print_warning(msg)", "except KeyError: pass else: ctypes._reset_cache() try: typing = sys.modules['typing'] except", "KeyError: pass else: ctypes._reset_cache() try: typing = sys.modules['typing'] except KeyError:", "def removepy(names): if not names: return for idx, name in", "minutes) if seconds: if parts: # 2 min 1 sec", "ms / 1000)) if not parts: return '%s ms' %", "in (sys.stdout, sys.stderr, sys.__stdout__, sys.__stderr__): if stream is not None:", "if minutes: parts.append('%s min' % minutes) if seconds: if parts:", "Clear assorted module caches. # Don't worry about resetting the", "parts.append('%s sec' % seconds) else: # 1.0 sec parts.append('%.1f sec'", "= None try: ctypes = sys.modules['ctypes'] except KeyError: pass else:", "not None: stream.flush() # Clear assorted module caches. # Don't", "objects are reclaimed. for stream in (sys.stdout, sys.stderr, sys.__stdout__, sys.__stderr__):", "KeyError: pass else: filecmp._cache.clear() try: struct = sys.modules['struct'] except KeyError:", "if not names: return for idx, name in enumerate(names): basename,", "again for mod in sys.modules.values(): if hasattr(mod, '__warningregistry__'): del mod.__warningregistry__", "'.join(parts) def removepy(names): if not names: return for idx, name", "1.0 sec parts.append('%.1f sec' % (seconds + ms / 1000))", "# Clear assorted module caches. # Don't worry about resetting", "hours) if minutes: parts.append('%s min' % minutes) if seconds: if", "'.join(str(elt) for elt in sorted(x)), width, initial_indent=blanks, subsequent_indent=blanks), file=file) def", "= sys.modules['distutils.dir_util'] except KeyError: pass else: distutils_dir_util._path_created.clear() try: re =", "sys.__stderr__ orig_unraisablehook(unraisable) finally: sys.stderr = old_stderr def setup_unraisable_hook(): global orig_unraisablehook", "# 1.0 sec parts.append('%.1f sec' % (seconds + ms /", "% (n, word) def printlist(x, width=70, indent=4, file=None): \"\"\"Print the", "enumerate(names): basename, ext = os.path.splitext(name) if ext == '.py': names[idx]", "1000)) if not parts: return '%s ms' % ms parts", "1e3) seconds, ms = divmod(ms, 1000) minutes, seconds = divmod(seconds,", "removepy(names): if not names: return for idx, name in enumerate(names):", "import math import os.path import sys import textwrap from test", "sent to the OS and # associated Python objects are", "else: urllib_parse.clear_cache() try: urllib_request = sys.modules['urllib.request'] except KeyError: pass else:", "= regrtest_unraisable_hook def clear_caches(): # Clear the warnings registry, so", "pass else: re.purge() try: _strptime = sys.modules['_strptime'] except KeyError: pass", "mimetypes = sys.modules['mimetypes'] except KeyError: pass else: mimetypes._default_mime_types() try: filecmp" ]
[ "cursor.fetchall() base_tree.append({'title': u'Triggers (' + unicode(len(triggers)) + u')', 'key': u'trigger',", "JSONIFY, SEE: http://stackoverflow.com/questions/12435297/how-do-i-jsonify-a-list-in-flask return Response(json.dumps(children), mimetype='application/json') def get_tables(self, key, path_on_disk):", "browser\"\"\" method = helper.get_request_value(request, 'method', raise_key_error=True) if method == \"base\":", "}) # Tables cursor.execute(\"SELECT name FROM sqlite_master WHERE type='table';\") tables", "import logging import sqlite3 class FaSqliteAjax(IPlugin): def __init__(self): self.display_name =", "base_tree.append({'title': u'Tables (' + unicode(len(tables)) + u')', 'key': u'table', 'folder':", "path_on_disk): \"\"\"Checks if the file is compatible with this plugin\"\"\"", "Response(json.dumps(base_tree), mimetype='application/json') def get_children(self, request, helper, path_on_disk): key = unicode(helper.get_request_value(request,", "command\"\"\" return \"application/json\" def get(self, evidence, helper, path_on_disk, request): \"\"\"Returns", "+ u')', 'key': u'trigger', 'folder': True, 'lazy': True }) connection.close()", "= unicode(helper.get_request_value(request, 'key')) connection = sqlite3.connect(path_on_disk) cursor = connection.cursor() cursor.execute(\"pragma", "\"\"\"Returns the mimetype of this plugins get command\"\"\" return \"application/json\"", "+ '\" provided') raise ValueError('Method \"' + method + '\"", "+ unicode(len(indexes)) + u')', 'key': u'index', 'folder': True, 'lazy': True", "] for row in rows: table.append(u' <th>' + unicode(row[1]) +", "connection.close() # TODO REPLACE WITH DICTIONARY AND JSONIFY, SEE: http://stackoverflow.com/questions/12435297/how-do-i-jsonify-a-list-in-flask", "(' + unicode(len(triggers)) + u')', 'key': u'trigger', 'folder': True, 'lazy':", "in rows: table.append(u' <th>' + unicode(row[1]) + u'</th>') table.append(u' </tr>", "= True self.fast = False self.action = False IPlugin.__init__(self) def", "cursor.fetchone() # Master Table base_tree.append({'title': u'Master Table (1)', 'key': u'master',", "u')', 'key': u'index', 'folder': True, 'lazy': True }) # Triggers", "\"')\") rows = cursor.fetchall() table = [ u'<table id=\"sqlitet01\" class=\"display\">',", "+ key + \"')\") rows = cursor.fetchall() table = [", "method == \"children\": return self.get_children(request, helper, path_on_disk) elif method ==", "mimetype): \"\"\"Returns the mimetype of this plugins get command\"\"\" return", "== u'master': children.append({'title': u'Master Table (1)', 'key': u'sqlite_master', 'folder': False,", "True, 'lazy': True }) connection.close() # TODO REPLACE WITH DICTIONARY", "FROM sqlite_master WHERE type='index';\") indexes = cursor.fetchall() base_tree.append({'title': u'Indexes ('", "+ u')', 'key': u'index', 'folder': True, 'lazy': True }) #", "'lazy': False }) else: for child in self.get_tables(key, path_on_disk): children.append({'title':", "'folder': True, 'lazy': True }) # Views cursor.execute(\"SELECT name FROM", "self.values(request, helper, path_on_disk) logging.error('Unknown method \"' + method + '\"", "path_on_disk) logging.error('Unknown method \"' + method + '\" provided') raise", "the mimetype of this plugins get command\"\"\" return \"application/json\" def", "+ \"')\") rows = cursor.fetchall() table = [ u'<table id=\"sqlitet01\"", "<th>' + unicode(row[1]) + u'</th>') table.append(u' </tr> </thead>') cursor.execute('SELECT *", "table.append(u' <tr>') for item in row: try: table.append(u' <td>' +", "Viewer plugin \"\"\" from yapsy.IPlugin import IPlugin from flask import", "for row in rows: table.append(u' <tr>') for item in row:", "unicode(len(views)) + u')', 'key': u'view', 'folder': True, 'lazy': True })", "deactivate(self): IPlugin.deactivate(self) return def check(self, evidence, path_on_disk): \"\"\"Checks if the", "}) # TODO REPLACE WITH DICTIONARY AND JSONIFY, SEE: http://stackoverflow.com/questions/12435297/how-do-i-jsonify-a-list-in-flask", "return def check(self, evidence, path_on_disk): \"\"\"Checks if the file is", "\"\"\"Returns the result of this plugin to be displayed in", "else: for child in self.get_tables(key, path_on_disk): children.append({'title': child, 'key': child,", "(' + unicode(len(views)) + u')', 'key': u'view', 'folder': True, 'lazy':", "= cursor.fetchall() table = [ u'<table id=\"sqlitet01\" class=\"display\">', u' <thead><tr>'", "def check(self, evidence, path_on_disk): \"\"\"Checks if the file is compatible", "evidence, path_on_disk): \"\"\"Checks if the file is compatible with this", "mimetype='application/json') def get_tables(self, key, path_on_disk): connection = sqlite3.connect(path_on_disk) cursor =", "path_on_disk): connection = sqlite3.connect(path_on_disk) cursor = connection.cursor() tables = []", "tables = [] table_list = cursor.execute(\"SELECT name FROM sqlite_master WHERE", "False }) else: for child in self.get_tables(key, path_on_disk): children.append({'title': child,", "FROM sqlite_master WHERE type='table';\") tables = cursor.fetchall() base_tree.append({'title': u'Tables ('", "Table (1)', 'key': u'sqlite_master', 'folder': False, 'lazy': False }) else:", "connection = sqlite3.connect(path_on_disk) cursor = connection.cursor() base_tree = [] cursor.execute(\"SELECT", "True }) # Indexes cursor.execute(\"SELECT name FROM sqlite_master WHERE type='index';\")", "cursor.execute(\"SELECT * FROM sqlite_master WHERE type='table';\") cursor.fetchone() # Master Table", "def base_tree(self, path_on_disk): connection = sqlite3.connect(path_on_disk) cursor = connection.cursor() base_tree", "plugins get command\"\"\" return \"application/json\" def get(self, evidence, helper, path_on_disk,", "u'master': children.append({'title': u'Master Table (1)', 'key': u'sqlite_master', 'folder': False, 'lazy':", "= False self.action = False IPlugin.__init__(self) def activate(self): IPlugin.activate(self) return", "Response(json.dumps(children), mimetype='application/json') def get_tables(self, key, path_on_disk): connection = sqlite3.connect(path_on_disk) cursor", "cursor.execute(\"pragma table_info('\" + key + \"')\") rows = cursor.fetchall() table", "name FROM sqlite_master WHERE type='view';\") views = cursor.fetchall() base_tree.append({'title': u'Views", "= connection.cursor() base_tree = [] cursor.execute(\"SELECT * FROM sqlite_master WHERE", "this plugin to be displayed in a browser\"\"\" method =", "# Triggers cursor.execute(\"SELECT name FROM sqlite_master WHERE type='trigger';\") triggers =", "+ unicode(type(item)) + u'</td>') table.append(u' </tr>') table.append(u'</table>') connection.close() return jsonify({'table':", "AND JSONIFY, SEE: http://stackoverflow.com/questions/12435297/how-do-i-jsonify-a-list-in-flask return Response(json.dumps(base_tree), mimetype='application/json') def get_children(self, request,", "json import logging import sqlite3 class FaSqliteAjax(IPlugin): def __init__(self): self.display_name", "WITH DICTIONARY AND JSONIFY, SEE: http://stackoverflow.com/questions/12435297/how-do-i-jsonify-a-list-in-flask return Response(json.dumps(children), mimetype='application/json') def", "'key': u'trigger', 'folder': True, 'lazy': True }) connection.close() # TODO", "views = cursor.fetchall() base_tree.append({'title': u'Views (' + unicode(len(views)) + u')',", "displayed in a browser\"\"\" method = helper.get_request_value(request, 'method', raise_key_error=True) if", "+ key) rows = cursor.fetchall() for row in rows: table.append(u'", "\"\"\" AJAX for SQLite Viewer plugin \"\"\" from yapsy.IPlugin import", "method + '\" provided') raise ValueError('Method \"' + method +", "child, 'key': child, 'folder': False, 'lazy': False }) # TODO", "Triggers cursor.execute(\"SELECT name FROM sqlite_master WHERE type='trigger';\") triggers = cursor.fetchall()", "in self.get_tables(key, path_on_disk): children.append({'title': child, 'key': child, 'folder': False, 'lazy':", "helper, path_on_disk, request): \"\"\"Returns the result of this plugin to", "FaSqliteAjax(IPlugin): def __init__(self): self.display_name = 'SQLite Ajax' self.popularity = 0", "True, 'lazy': True }) # Tables cursor.execute(\"SELECT name FROM sqlite_master", "FROM ' + key) rows = cursor.fetchall() for row in", "False }) # TODO REPLACE WITH DICTIONARY AND JSONIFY, SEE:", "= helper.get_request_value(request, 'method', raise_key_error=True) if method == \"base\": return self.base_tree(path_on_disk)", "name FROM sqlite_master WHERE type='trigger';\") triggers = cursor.fetchall() base_tree.append({'title': u'Triggers", "key, path_on_disk): connection = sqlite3.connect(path_on_disk) cursor = connection.cursor() tables =", "'key': u'master', 'folder': True, 'lazy': True }) # Tables cursor.execute(\"SELECT", "for child in self.get_tables(key, path_on_disk): children.append({'title': child, 'key': child, 'folder':", "type='view';\") views = cursor.fetchall() base_tree.append({'title': u'Views (' + unicode(len(views)) +", "get_tables(self, key, path_on_disk): connection = sqlite3.connect(path_on_disk) cursor = connection.cursor() tables", "row in rows: table.append(u' <tr>') for item in row: try:", "table_list: tables.append(unicode(table[0])) connection.close() return tables def values(self, request, helper, path_on_disk):", "'method', raise_key_error=True) if method == \"base\": return self.base_tree(path_on_disk) elif method", "FROM sqlite_master WHERE type='table';\") cursor.fetchone() # Master Table base_tree.append({'title': u'Master", "this plugins get command\"\"\" return \"application/json\" def get(self, evidence, helper,", "def mimetype(self, mimetype): \"\"\"Returns the mimetype of this plugins get", "raise_key_error=True) if method == \"base\": return self.base_tree(path_on_disk) elif method ==", "= [] if key == u'master': children.append({'title': u'Master Table (1)',", "Response, jsonify import json import logging import sqlite3 class FaSqliteAjax(IPlugin):", "u' <thead><tr>' ] for row in rows: table.append(u' <th>' +", "cursor.fetchall() table = [ u'<table id=\"sqlitet01\" class=\"display\">', u' <thead><tr>' ]", "child in self.get_tables(key, path_on_disk): children.append({'title': child, 'key': child, 'folder': False,", "in row: try: table.append(u' <td>' + unicode(item) + u'</td>') except:", "values(self, request, helper, path_on_disk): key = unicode(helper.get_request_value(request, 'key')) connection =", "True }) # Triggers cursor.execute(\"SELECT name FROM sqlite_master WHERE type='trigger';\")", "connection.cursor() cursor.execute(\"pragma table_info('\" + key + \"')\") rows = cursor.fetchall()", "sqlite_master WHERE type='\" + key + \"';\") for table in", "path_on_disk) elif method == \"values\": return self.values(request, helper, path_on_disk) logging.error('Unknown", "check(self, evidence, path_on_disk): \"\"\"Checks if the file is compatible with", "}) # Views cursor.execute(\"SELECT name FROM sqlite_master WHERE type='view';\") views", "unicode(len(triggers)) + u')', 'key': u'trigger', 'folder': True, 'lazy': True })", "cursor.fetchall() base_tree.append({'title': u'Views (' + unicode(len(views)) + u')', 'key': u'view',", "IPlugin.activate(self) return def deactivate(self): IPlugin.deactivate(self) return def check(self, evidence, path_on_disk):", "True }) connection.close() # TODO REPLACE WITH DICTIONARY AND JSONIFY,", "method = helper.get_request_value(request, 'method', raise_key_error=True) if method == \"base\": return", "base_tree.append({'title': u'Triggers (' + unicode(len(triggers)) + u')', 'key': u'trigger', 'folder':", "method + '\" is not valid') def base_tree(self, path_on_disk): connection", "base_tree.append({'title': u'Indexes (' + unicode(len(indexes)) + u')', 'key': u'index', 'folder':", "'SQLite Ajax' self.popularity = 0 self.cache = True self.fast =", "key + \"')\") rows = cursor.fetchall() table = [ u'<table", "= [] table_list = cursor.execute(\"SELECT name FROM sqlite_master WHERE type='\"", "Tables cursor.execute(\"SELECT name FROM sqlite_master WHERE type='table';\") tables = cursor.fetchall()", "= sqlite3.connect(path_on_disk) cursor = connection.cursor() tables = [] table_list =", "FROM sqlite_master WHERE type='view';\") views = cursor.fetchall() base_tree.append({'title': u'Views ('", "+ unicode(item) + u'</td>') except: table.append(u' <td>' + unicode(type(item)) +", "key) rows = cursor.fetchall() for row in rows: table.append(u' <tr>')", "unicode(helper.get_request_value(request, 'key')) children = [] if key == u'master': children.append({'title':", "mimetype(self, mimetype): \"\"\"Returns the mimetype of this plugins get command\"\"\"", "u'index', 'folder': True, 'lazy': True }) # Triggers cursor.execute(\"SELECT name", "type='trigger';\") triggers = cursor.fetchall() base_tree.append({'title': u'Triggers (' + unicode(len(triggers)) +", "def get(self, evidence, helper, path_on_disk, request): \"\"\"Returns the result of", "+ method + '\" is not valid') def base_tree(self, path_on_disk):", "with this plugin\"\"\" return True def mimetype(self, mimetype): \"\"\"Returns the", "rows = cursor.fetchall() for row in rows: table.append(u' <tr>') for", "to be displayed in a browser\"\"\" method = helper.get_request_value(request, 'method',", "self.get_tables(key, path_on_disk): children.append({'title': child, 'key': child, 'folder': False, 'lazy': False", "u'sqlite_master', 'folder': False, 'lazy': False }) else: for child in", "sqlite_master WHERE type='index';\") indexes = cursor.fetchall() base_tree.append({'title': u'Indexes (' +", "'lazy': True }) # Views cursor.execute(\"SELECT name FROM sqlite_master WHERE", "table = [ u'<table id=\"sqlitet01\" class=\"display\">', u' <thead><tr>' ] for", "request): \"\"\"Returns the result of this plugin to be displayed", "Views cursor.execute(\"SELECT name FROM sqlite_master WHERE type='view';\") views = cursor.fetchall()", "+ \"';\") for table in table_list: tables.append(unicode(table[0])) connection.close() return tables", "is compatible with this plugin\"\"\" return True def mimetype(self, mimetype):", "= cursor.fetchall() base_tree.append({'title': u'Views (' + unicode(len(views)) + u')', 'key':", "id=\"sqlitet01\" class=\"display\">', u' <thead><tr>' ] for row in rows: table.append(u'", "True def mimetype(self, mimetype): \"\"\"Returns the mimetype of this plugins", "sqlite3.connect(path_on_disk) cursor = connection.cursor() base_tree = [] cursor.execute(\"SELECT * FROM", "(' + unicode(len(tables)) + u')', 'key': u'table', 'folder': True, 'lazy':", "'key')) connection = sqlite3.connect(path_on_disk) cursor = connection.cursor() cursor.execute(\"pragma table_info('\" +", "= cursor.fetchall() base_tree.append({'title': u'Triggers (' + unicode(len(triggers)) + u')', 'key':", "file is compatible with this plugin\"\"\" return True def mimetype(self,", "unicode(len(tables)) + u')', 'key': u'table', 'folder': True, 'lazy': True })", "Table base_tree.append({'title': u'Master Table (1)', 'key': u'master', 'folder': True, 'lazy':", "SEE: http://stackoverflow.com/questions/12435297/how-do-i-jsonify-a-list-in-flask return Response(json.dumps(base_tree), mimetype='application/json') def get_children(self, request, helper, path_on_disk):", "'key': child, 'folder': False, 'lazy': False }) # TODO REPLACE", "[] cursor.execute(\"SELECT * FROM sqlite_master WHERE type='table';\") cursor.fetchone() # Master", "== \"base\": return self.base_tree(path_on_disk) elif method == \"children\": return self.get_children(request,", "u'Triggers (' + unicode(len(triggers)) + u')', 'key': u'trigger', 'folder': True,", "True self.fast = False self.action = False IPlugin.__init__(self) def activate(self):", "'key')) children = [] if key == u'master': children.append({'title': u'Master", "table.append(u' <td>' + unicode(item) + u'</td>') except: table.append(u' <td>' +", "row: try: table.append(u' <td>' + unicode(item) + u'</td>') except: table.append(u'", "method == \"values\": return self.values(request, helper, path_on_disk) logging.error('Unknown method \"'", "base_tree(self, path_on_disk): connection = sqlite3.connect(path_on_disk) cursor = connection.cursor() base_tree =", "if key == u'master': children.append({'title': u'Master Table (1)', 'key': u'sqlite_master',", "\"\"\"Checks if the file is compatible with this plugin\"\"\" return", "from yapsy.IPlugin import IPlugin from flask import Response, jsonify import", "u'Tables (' + unicode(len(tables)) + u')', 'key': u'table', 'folder': True,", "get(self, evidence, helper, path_on_disk, request): \"\"\"Returns the result of this", "AND JSONIFY, SEE: http://stackoverflow.com/questions/12435297/how-do-i-jsonify-a-list-in-flask return Response(json.dumps(children), mimetype='application/json') def get_tables(self, key,", "SEE: http://stackoverflow.com/questions/12435297/how-do-i-jsonify-a-list-in-flask return Response(json.dumps(children), mimetype='application/json') def get_tables(self, key, path_on_disk): connection", "\"base\": return self.base_tree(path_on_disk) elif method == \"children\": return self.get_children(request, helper,", "FROM sqlite_master WHERE type='\" + key + \"';\") for table", "self.base_tree(path_on_disk) elif method == \"children\": return self.get_children(request, helper, path_on_disk) elif", "= 0 self.cache = True self.fast = False self.action =", "self.cache = True self.fast = False self.action = False IPlugin.__init__(self)", "get_children(self, request, helper, path_on_disk): key = unicode(helper.get_request_value(request, 'key')) children =", "\"values\": return self.values(request, helper, path_on_disk) logging.error('Unknown method \"' + method", "triggers = cursor.fetchall() base_tree.append({'title': u'Triggers (' + unicode(len(triggers)) + u')',", "= [ u'<table id=\"sqlitet01\" class=\"display\">', u' <thead><tr>' ] for row", "+ u')', 'key': u'table', 'folder': True, 'lazy': True }) #", "False IPlugin.__init__(self) def activate(self): IPlugin.activate(self) return def deactivate(self): IPlugin.deactivate(self) return", "REPLACE WITH DICTIONARY AND JSONIFY, SEE: http://stackoverflow.com/questions/12435297/how-do-i-jsonify-a-list-in-flask return Response(json.dumps(children), mimetype='application/json')", "# Tables cursor.execute(\"SELECT name FROM sqlite_master WHERE type='table';\") tables =", "<td>' + unicode(type(item)) + u'</td>') table.append(u' </tr>') table.append(u'</table>') connection.close() return", "for table in table_list: tables.append(unicode(table[0])) connection.close() return tables def values(self,", "+ '\" is not valid') def base_tree(self, path_on_disk): connection =", "table in table_list: tables.append(unicode(table[0])) connection.close() return tables def values(self, request,", "AJAX for SQLite Viewer plugin \"\"\" from yapsy.IPlugin import IPlugin", "cursor.execute(\"SELECT name FROM sqlite_master WHERE type='\" + key + \"';\")", "cursor.execute('SELECT * FROM ' + key) rows = cursor.fetchall() for", "WHERE type='index';\") indexes = cursor.fetchall() base_tree.append({'title': u'Indexes (' + unicode(len(indexes))", "SQLite Viewer plugin \"\"\" from yapsy.IPlugin import IPlugin from flask", "sqlite3.connect(path_on_disk) cursor = connection.cursor() cursor.execute(\"pragma table_info('\" + key + \"')\")", "not valid') def base_tree(self, path_on_disk): connection = sqlite3.connect(path_on_disk) cursor =", "REPLACE WITH DICTIONARY AND JSONIFY, SEE: http://stackoverflow.com/questions/12435297/how-do-i-jsonify-a-list-in-flask return Response(json.dumps(base_tree), mimetype='application/json')", "return True def mimetype(self, mimetype): \"\"\"Returns the mimetype of this", "sqlite3 class FaSqliteAjax(IPlugin): def __init__(self): self.display_name = 'SQLite Ajax' self.popularity", "raise ValueError('Method \"' + method + '\" is not valid')", "'key': u'view', 'folder': True, 'lazy': True }) # Indexes cursor.execute(\"SELECT", "TODO REPLACE WITH DICTIONARY AND JSONIFY, SEE: http://stackoverflow.com/questions/12435297/how-do-i-jsonify-a-list-in-flask return Response(json.dumps(children),", "u'</th>') table.append(u' </tr> </thead>') cursor.execute('SELECT * FROM ' + key)", "in rows: table.append(u' <tr>') for item in row: try: table.append(u'", "evidence, helper, path_on_disk, request): \"\"\"Returns the result of this plugin", "type='table';\") tables = cursor.fetchall() base_tree.append({'title': u'Tables (' + unicode(len(tables)) +", "elif method == \"values\": return self.values(request, helper, path_on_disk) logging.error('Unknown method", "sqlite_master WHERE type='table';\") cursor.fetchone() # Master Table base_tree.append({'title': u'Master Table", "provided') raise ValueError('Method \"' + method + '\" is not", "the result of this plugin to be displayed in a", "cursor.fetchall() base_tree.append({'title': u'Tables (' + unicode(len(tables)) + u')', 'key': u'table',", "rows: table.append(u' <th>' + unicode(row[1]) + u'</th>') table.append(u' </tr> </thead>')", "plugin \"\"\" from yapsy.IPlugin import IPlugin from flask import Response,", "# Master Table base_tree.append({'title': u'Master Table (1)', 'key': u'master', 'folder':", "u'master', 'folder': True, 'lazy': True }) # Tables cursor.execute(\"SELECT name", "# TODO REPLACE WITH DICTIONARY AND JSONIFY, SEE: http://stackoverflow.com/questions/12435297/how-do-i-jsonify-a-list-in-flask return", "return tables def values(self, request, helper, path_on_disk): key = unicode(helper.get_request_value(request,", "item in row: try: table.append(u' <td>' + unicode(item) + u'</td>')", "class FaSqliteAjax(IPlugin): def __init__(self): self.display_name = 'SQLite Ajax' self.popularity =", "class=\"display\">', u' <thead><tr>' ] for row in rows: table.append(u' <th>'", "== \"children\": return self.get_children(request, helper, path_on_disk) elif method == \"values\":", "'lazy': True }) connection.close() # TODO REPLACE WITH DICTIONARY AND", "Master Table base_tree.append({'title': u'Master Table (1)', 'key': u'master', 'folder': True,", "path_on_disk): key = unicode(helper.get_request_value(request, 'key')) children = [] if key", "request, helper, path_on_disk): key = unicode(helper.get_request_value(request, 'key')) connection = sqlite3.connect(path_on_disk)", "False, 'lazy': False }) # TODO REPLACE WITH DICTIONARY AND", "return self.base_tree(path_on_disk) elif method == \"children\": return self.get_children(request, helper, path_on_disk)", "+ u'</th>') table.append(u' </tr> </thead>') cursor.execute('SELECT * FROM ' +", "key + \"';\") for table in table_list: tables.append(unicode(table[0])) connection.close() return", "<tr>') for item in row: try: table.append(u' <td>' + unicode(item)", "indexes = cursor.fetchall() base_tree.append({'title': u'Indexes (' + unicode(len(indexes)) + u')',", "u'Views (' + unicode(len(views)) + u')', 'key': u'view', 'folder': True,", "self.fast = False self.action = False IPlugin.__init__(self) def activate(self): IPlugin.activate(self)", "tables = cursor.fetchall() base_tree.append({'title': u'Tables (' + unicode(len(tables)) + u')',", "jsonify import json import logging import sqlite3 class FaSqliteAjax(IPlugin): def", "for SQLite Viewer plugin \"\"\" from yapsy.IPlugin import IPlugin from", "cursor = connection.cursor() tables = [] table_list = cursor.execute(\"SELECT name", "type='index';\") indexes = cursor.fetchall() base_tree.append({'title': u'Indexes (' + unicode(len(indexes)) +", "\"application/json\" def get(self, evidence, helper, path_on_disk, request): \"\"\"Returns the result", "def get_children(self, request, helper, path_on_disk): key = unicode(helper.get_request_value(request, 'key')) children", "def deactivate(self): IPlugin.deactivate(self) return def check(self, evidence, path_on_disk): \"\"\"Checks if", "'lazy': True }) # Tables cursor.execute(\"SELECT name FROM sqlite_master WHERE", "True }) # Tables cursor.execute(\"SELECT name FROM sqlite_master WHERE type='table';\")", "connection = sqlite3.connect(path_on_disk) cursor = connection.cursor() cursor.execute(\"pragma table_info('\" + key", "}) # Triggers cursor.execute(\"SELECT name FROM sqlite_master WHERE type='trigger';\") triggers", "def get_tables(self, key, path_on_disk): connection = sqlite3.connect(path_on_disk) cursor = connection.cursor()", "def activate(self): IPlugin.activate(self) return def deactivate(self): IPlugin.deactivate(self) return def check(self,", "method == \"base\": return self.base_tree(path_on_disk) elif method == \"children\": return", "cursor.fetchall() base_tree.append({'title': u'Indexes (' + unicode(len(indexes)) + u')', 'key': u'index',", "request, helper, path_on_disk): key = unicode(helper.get_request_value(request, 'key')) children = []", "True, 'lazy': True }) # Triggers cursor.execute(\"SELECT name FROM sqlite_master", "u'view', 'folder': True, 'lazy': True }) # Indexes cursor.execute(\"SELECT name", "'lazy': True }) # Triggers cursor.execute(\"SELECT name FROM sqlite_master WHERE", "name FROM sqlite_master WHERE type='\" + key + \"';\") for", "'\" provided') raise ValueError('Method \"' + method + '\" is", "= connection.cursor() cursor.execute(\"pragma table_info('\" + key + \"')\") rows =", "table_info('\" + key + \"')\") rows = cursor.fetchall() table =", "this plugin\"\"\" return True def mimetype(self, mimetype): \"\"\"Returns the mimetype", "flask import Response, jsonify import json import logging import sqlite3", "name FROM sqlite_master WHERE type='table';\") tables = cursor.fetchall() base_tree.append({'title': u'Tables", "= False IPlugin.__init__(self) def activate(self): IPlugin.activate(self) return def deactivate(self): IPlugin.deactivate(self)", "[] if key == u'master': children.append({'title': u'Master Table (1)', 'key':", "sqlite_master WHERE type='view';\") views = cursor.fetchall() base_tree.append({'title': u'Views (' +", "return \"application/json\" def get(self, evidence, helper, path_on_disk, request): \"\"\"Returns the", "'lazy': True }) # Indexes cursor.execute(\"SELECT name FROM sqlite_master WHERE", "if the file is compatible with this plugin\"\"\" return True", "True, 'lazy': True }) # Views cursor.execute(\"SELECT name FROM sqlite_master", "'lazy': False }) # TODO REPLACE WITH DICTIONARY AND JSONIFY,", "'folder': False, 'lazy': False }) # TODO REPLACE WITH DICTIONARY", "+ unicode(len(tables)) + u')', 'key': u'table', 'folder': True, 'lazy': True", "key == u'master': children.append({'title': u'Master Table (1)', 'key': u'sqlite_master', 'folder':", "FROM sqlite_master WHERE type='trigger';\") triggers = cursor.fetchall() base_tree.append({'title': u'Triggers ('", "from flask import Response, jsonify import json import logging import", "True, 'lazy': True }) # Indexes cursor.execute(\"SELECT name FROM sqlite_master", "}) connection.close() # TODO REPLACE WITH DICTIONARY AND JSONIFY, SEE:", "= sqlite3.connect(path_on_disk) cursor = connection.cursor() cursor.execute(\"pragma table_info('\" + key +", "# Indexes cursor.execute(\"SELECT name FROM sqlite_master WHERE type='index';\") indexes =", "self.get_children(request, helper, path_on_disk) elif method == \"values\": return self.values(request, helper,", "\"';\") for table in table_list: tables.append(unicode(table[0])) connection.close() return tables def", "table.append(u' <th>' + unicode(row[1]) + u'</th>') table.append(u' </tr> </thead>') cursor.execute('SELECT", "WHERE type='\" + key + \"';\") for table in table_list:", "' + key) rows = cursor.fetchall() for row in rows:", "'folder': False, 'lazy': False }) else: for child in self.get_tables(key,", "</tr> </thead>') cursor.execute('SELECT * FROM ' + key) rows =", "'key': u'index', 'folder': True, 'lazy': True }) # Triggers cursor.execute(\"SELECT", "u'table', 'folder': True, 'lazy': True }) # Views cursor.execute(\"SELECT name", "except: table.append(u' <td>' + unicode(type(item)) + u'</td>') table.append(u' </tr>') table.append(u'</table>')", "'folder': True, 'lazy': True }) connection.close() # TODO REPLACE WITH", "= cursor.fetchall() base_tree.append({'title': u'Indexes (' + unicode(len(indexes)) + u')', 'key':", "\"children\": return self.get_children(request, helper, path_on_disk) elif method == \"values\": return", "helper, path_on_disk): key = unicode(helper.get_request_value(request, 'key')) children = [] if", "table_list = cursor.execute(\"SELECT name FROM sqlite_master WHERE type='\" + key", "+ u')', 'key': u'view', 'folder': True, 'lazy': True }) #", "<td>' + unicode(item) + u'</td>') except: table.append(u' <td>' + unicode(type(item))", "self.popularity = 0 self.cache = True self.fast = False self.action", "plugin\"\"\" return True def mimetype(self, mimetype): \"\"\"Returns the mimetype of", "\"' + method + '\" is not valid') def base_tree(self,", "def values(self, request, helper, path_on_disk): key = unicode(helper.get_request_value(request, 'key')) connection", "'key': u'table', 'folder': True, 'lazy': True }) # Views cursor.execute(\"SELECT", "cursor = connection.cursor() cursor.execute(\"pragma table_info('\" + key + \"')\") rows", "<thead><tr>' ] for row in rows: table.append(u' <th>' + unicode(row[1])", "get command\"\"\" return \"application/json\" def get(self, evidence, helper, path_on_disk, request):", "name FROM sqlite_master WHERE type='index';\") indexes = cursor.fetchall() base_tree.append({'title': u'Indexes", "Table (1)', 'key': u'master', 'folder': True, 'lazy': True }) #", "[] table_list = cursor.execute(\"SELECT name FROM sqlite_master WHERE type='\" +", "path_on_disk): children.append({'title': child, 'key': child, 'folder': False, 'lazy': False })", "base_tree.append({'title': u'Master Table (1)', 'key': u'master', 'folder': True, 'lazy': True", "WHERE type='table';\") tables = cursor.fetchall() base_tree.append({'title': u'Tables (' + unicode(len(tables))", "u')', 'key': u'view', 'folder': True, 'lazy': True }) # Indexes", "</thead>') cursor.execute('SELECT * FROM ' + key) rows = cursor.fetchall()", "+ unicode(len(views)) + u')', 'key': u'view', 'folder': True, 'lazy': True", "= connection.cursor() tables = [] table_list = cursor.execute(\"SELECT name FROM", "import IPlugin from flask import Response, jsonify import json import", "ValueError('Method \"' + method + '\" is not valid') def", "0 self.cache = True self.fast = False self.action = False", "WHERE type='table';\") cursor.fetchone() # Master Table base_tree.append({'title': u'Master Table (1)',", "tables def values(self, request, helper, path_on_disk): key = unicode(helper.get_request_value(request, 'key'))", "yapsy.IPlugin import IPlugin from flask import Response, jsonify import json", "+ unicode(len(triggers)) + u')', 'key': u'trigger', 'folder': True, 'lazy': True", "= [] cursor.execute(\"SELECT * FROM sqlite_master WHERE type='table';\") cursor.fetchone() #", "is not valid') def base_tree(self, path_on_disk): connection = sqlite3.connect(path_on_disk) cursor", "activate(self): IPlugin.activate(self) return def deactivate(self): IPlugin.deactivate(self) return def check(self, evidence,", "sqlite3.connect(path_on_disk) cursor = connection.cursor() tables = [] table_list = cursor.execute(\"SELECT", "connection.cursor() tables = [] table_list = cursor.execute(\"SELECT name FROM sqlite_master", "True }) # Views cursor.execute(\"SELECT name FROM sqlite_master WHERE type='view';\")", "path_on_disk): connection = sqlite3.connect(path_on_disk) cursor = connection.cursor() base_tree = []", "DICTIONARY AND JSONIFY, SEE: http://stackoverflow.com/questions/12435297/how-do-i-jsonify-a-list-in-flask return Response(json.dumps(children), mimetype='application/json') def get_tables(self,", "mimetype of this plugins get command\"\"\" return \"application/json\" def get(self,", "= 'SQLite Ajax' self.popularity = 0 self.cache = True self.fast", "table.append(u' </tr> </thead>') cursor.execute('SELECT * FROM ' + key) rows", "unicode(row[1]) + u'</th>') table.append(u' </tr> </thead>') cursor.execute('SELECT * FROM '", "WITH DICTIONARY AND JSONIFY, SEE: http://stackoverflow.com/questions/12435297/how-do-i-jsonify-a-list-in-flask return Response(json.dumps(base_tree), mimetype='application/json') def", "\"\"\" from yapsy.IPlugin import IPlugin from flask import Response, jsonify", "== \"values\": return self.values(request, helper, path_on_disk) logging.error('Unknown method \"' +", "children = [] if key == u'master': children.append({'title': u'Master Table", "rows = cursor.fetchall() table = [ u'<table id=\"sqlitet01\" class=\"display\">', u'", "= unicode(helper.get_request_value(request, 'key')) children = [] if key == u'master':", "of this plugin to be displayed in a browser\"\"\" method", "WHERE type='view';\") views = cursor.fetchall() base_tree.append({'title': u'Views (' + unicode(len(views))", "key = unicode(helper.get_request_value(request, 'key')) children = [] if key ==", "unicode(type(item)) + u'</td>') table.append(u' </tr>') table.append(u'</table>') connection.close() return jsonify({'table': '\\n'.join(table)})", "unicode(len(indexes)) + u')', 'key': u'index', 'folder': True, 'lazy': True })", "connection.cursor() base_tree = [] cursor.execute(\"SELECT * FROM sqlite_master WHERE type='table';\")", "= cursor.fetchall() for row in rows: table.append(u' <tr>') for item", "helper, path_on_disk) elif method == \"values\": return self.values(request, helper, path_on_disk)", "for row in rows: table.append(u' <th>' + unicode(row[1]) + u'</th>')", "# Views cursor.execute(\"SELECT name FROM sqlite_master WHERE type='view';\") views =", "return Response(json.dumps(base_tree), mimetype='application/json') def get_children(self, request, helper, path_on_disk): key =", "cursor.execute(\"SELECT name FROM sqlite_master WHERE type='table';\") tables = cursor.fetchall() base_tree.append({'title':", "+ unicode(row[1]) + u'</th>') table.append(u' </tr> </thead>') cursor.execute('SELECT * FROM", "= cursor.fetchall() base_tree.append({'title': u'Tables (' + unicode(len(tables)) + u')', 'key':", "logging import sqlite3 class FaSqliteAjax(IPlugin): def __init__(self): self.display_name = 'SQLite", "helper.get_request_value(request, 'method', raise_key_error=True) if method == \"base\": return self.base_tree(path_on_disk) elif", "sqlite_master WHERE type='table';\") tables = cursor.fetchall() base_tree.append({'title': u'Tables (' +", "return Response(json.dumps(children), mimetype='application/json') def get_tables(self, key, path_on_disk): connection = sqlite3.connect(path_on_disk)", "base_tree.append({'title': u'Views (' + unicode(len(views)) + u')', 'key': u'view', 'folder':", "= cursor.execute(\"SELECT name FROM sqlite_master WHERE type='\" + key +", "key = unicode(helper.get_request_value(request, 'key')) connection = sqlite3.connect(path_on_disk) cursor = connection.cursor()", "* FROM sqlite_master WHERE type='table';\") cursor.fetchone() # Master Table base_tree.append({'title':", "IPlugin.__init__(self) def activate(self): IPlugin.activate(self) return def deactivate(self): IPlugin.deactivate(self) return def", "for item in row: try: table.append(u' <td>' + unicode(item) +", "result of this plugin to be displayed in a browser\"\"\"", "+ key + \"';\") for table in table_list: tables.append(unicode(table[0])) connection.close()", "type='\" + key + \"';\") for table in table_list: tables.append(unicode(table[0]))", "helper, path_on_disk) logging.error('Unknown method \"' + method + '\" provided')", "self.display_name = 'SQLite Ajax' self.popularity = 0 self.cache = True", "JSONIFY, SEE: http://stackoverflow.com/questions/12435297/how-do-i-jsonify-a-list-in-flask return Response(json.dumps(base_tree), mimetype='application/json') def get_children(self, request, helper,", "http://stackoverflow.com/questions/12435297/how-do-i-jsonify-a-list-in-flask return Response(json.dumps(children), mimetype='application/json') def get_tables(self, key, path_on_disk): connection =", "connection = sqlite3.connect(path_on_disk) cursor = connection.cursor() tables = [] table_list", "import sqlite3 class FaSqliteAjax(IPlugin): def __init__(self): self.display_name = 'SQLite Ajax'", "elif method == \"children\": return self.get_children(request, helper, path_on_disk) elif method", "import json import logging import sqlite3 class FaSqliteAjax(IPlugin): def __init__(self):", "__init__(self): self.display_name = 'SQLite Ajax' self.popularity = 0 self.cache =", "'folder': True, 'lazy': True }) # Triggers cursor.execute(\"SELECT name FROM", "type='table';\") cursor.fetchone() # Master Table base_tree.append({'title': u'Master Table (1)', 'key':", "return self.get_children(request, helper, path_on_disk) elif method == \"values\": return self.values(request,", "\"' + method + '\" provided') raise ValueError('Method \"' +", "http://stackoverflow.com/questions/12435297/how-do-i-jsonify-a-list-in-flask return Response(json.dumps(base_tree), mimetype='application/json') def get_children(self, request, helper, path_on_disk): key", "False, 'lazy': False }) else: for child in self.get_tables(key, path_on_disk):", "mimetype='application/json') def get_children(self, request, helper, path_on_disk): key = unicode(helper.get_request_value(request, 'key'))", "(1)', 'key': u'sqlite_master', 'folder': False, 'lazy': False }) else: for", "child, 'folder': False, 'lazy': False }) # TODO REPLACE WITH", "method \"' + method + '\" provided') raise ValueError('Method \"'", "plugin to be displayed in a browser\"\"\" method = helper.get_request_value(request,", "children.append({'title': child, 'key': child, 'folder': False, 'lazy': False }) #", "tables.append(unicode(table[0])) connection.close() return tables def values(self, request, helper, path_on_disk): key", "False self.action = False IPlugin.__init__(self) def activate(self): IPlugin.activate(self) return def", "cursor = connection.cursor() base_tree = [] cursor.execute(\"SELECT * FROM sqlite_master", "cursor.execute(\"SELECT name FROM sqlite_master WHERE type='view';\") views = cursor.fetchall() base_tree.append({'title':", "if method == \"base\": return self.base_tree(path_on_disk) elif method == \"children\":", "self.action = False IPlugin.__init__(self) def activate(self): IPlugin.activate(self) return def deactivate(self):", "= sqlite3.connect(path_on_disk) cursor = connection.cursor() base_tree = [] cursor.execute(\"SELECT *", "unicode(item) + u'</td>') except: table.append(u' <td>' + unicode(type(item)) + u'</td>')", "try: table.append(u' <td>' + unicode(item) + u'</td>') except: table.append(u' <td>'", "base_tree = [] cursor.execute(\"SELECT * FROM sqlite_master WHERE type='table';\") cursor.fetchone()", "def __init__(self): self.display_name = 'SQLite Ajax' self.popularity = 0 self.cache", "in a browser\"\"\" method = helper.get_request_value(request, 'method', raise_key_error=True) if method", "IPlugin.deactivate(self) return def check(self, evidence, path_on_disk): \"\"\"Checks if the file", "a browser\"\"\" method = helper.get_request_value(request, 'method', raise_key_error=True) if method ==", "}) else: for child in self.get_tables(key, path_on_disk): children.append({'title': child, 'key':", "connection.close() return tables def values(self, request, helper, path_on_disk): key =", "unicode(helper.get_request_value(request, 'key')) connection = sqlite3.connect(path_on_disk) cursor = connection.cursor() cursor.execute(\"pragma table_info('\"", "}) # Indexes cursor.execute(\"SELECT name FROM sqlite_master WHERE type='index';\") indexes", "cursor.execute(\"SELECT name FROM sqlite_master WHERE type='trigger';\") triggers = cursor.fetchall() base_tree.append({'title':", "in table_list: tables.append(unicode(table[0])) connection.close() return tables def values(self, request, helper,", "cursor.fetchall() for row in rows: table.append(u' <tr>') for item in", "* FROM ' + key) rows = cursor.fetchall() for row", "(1)', 'key': u'master', 'folder': True, 'lazy': True }) # Tables", "compatible with this plugin\"\"\" return True def mimetype(self, mimetype): \"\"\"Returns", "+ method + '\" provided') raise ValueError('Method \"' + method", "'key': u'sqlite_master', 'folder': False, 'lazy': False }) else: for child", "be displayed in a browser\"\"\" method = helper.get_request_value(request, 'method', raise_key_error=True)", "[ u'<table id=\"sqlitet01\" class=\"display\">', u' <thead><tr>' ] for row in", "'folder': True, 'lazy': True }) # Tables cursor.execute(\"SELECT name FROM", "Ajax' self.popularity = 0 self.cache = True self.fast = False", "'\" is not valid') def base_tree(self, path_on_disk): connection = sqlite3.connect(path_on_disk)", "cursor.execute(\"SELECT name FROM sqlite_master WHERE type='index';\") indexes = cursor.fetchall() base_tree.append({'title':", "TODO REPLACE WITH DICTIONARY AND JSONIFY, SEE: http://stackoverflow.com/questions/12435297/how-do-i-jsonify-a-list-in-flask return Response(json.dumps(base_tree),", "'folder': True, 'lazy': True }) # Indexes cursor.execute(\"SELECT name FROM", "path_on_disk, request): \"\"\"Returns the result of this plugin to be", "u'</td>') except: table.append(u' <td>' + unicode(type(item)) + u'</td>') table.append(u' </tr>')", "u'Master Table (1)', 'key': u'master', 'folder': True, 'lazy': True })", "logging.error('Unknown method \"' + method + '\" provided') raise ValueError('Method", "the file is compatible with this plugin\"\"\" return True def", "helper, path_on_disk): key = unicode(helper.get_request_value(request, 'key')) connection = sqlite3.connect(path_on_disk) cursor", "rows: table.append(u' <tr>') for item in row: try: table.append(u' <td>'", "u')', 'key': u'trigger', 'folder': True, 'lazy': True }) connection.close() #", "row in rows: table.append(u' <th>' + unicode(row[1]) + u'</th>') table.append(u'", "children.append({'title': u'Master Table (1)', 'key': u'sqlite_master', 'folder': False, 'lazy': False", "u'<table id=\"sqlitet01\" class=\"display\">', u' <thead><tr>' ] for row in rows:", "path_on_disk): key = unicode(helper.get_request_value(request, 'key')) connection = sqlite3.connect(path_on_disk) cursor =", "u'trigger', 'folder': True, 'lazy': True }) connection.close() # TODO REPLACE", "valid') def base_tree(self, path_on_disk): connection = sqlite3.connect(path_on_disk) cursor = connection.cursor()", "(' + unicode(len(indexes)) + u')', 'key': u'index', 'folder': True, 'lazy':", "DICTIONARY AND JSONIFY, SEE: http://stackoverflow.com/questions/12435297/how-do-i-jsonify-a-list-in-flask return Response(json.dumps(base_tree), mimetype='application/json') def get_children(self,", "table.append(u' <td>' + unicode(type(item)) + u'</td>') table.append(u' </tr>') table.append(u'</table>') connection.close()", "WHERE type='trigger';\") triggers = cursor.fetchall() base_tree.append({'title': u'Triggers (' + unicode(len(triggers))", "of this plugins get command\"\"\" return \"application/json\" def get(self, evidence,", "return def deactivate(self): IPlugin.deactivate(self) return def check(self, evidence, path_on_disk): \"\"\"Checks", "u'Master Table (1)', 'key': u'sqlite_master', 'folder': False, 'lazy': False })", "IPlugin from flask import Response, jsonify import json import logging", "u')', 'key': u'table', 'folder': True, 'lazy': True }) # Views", "import Response, jsonify import json import logging import sqlite3 class", "+ u'</td>') except: table.append(u' <td>' + unicode(type(item)) + u'</td>') table.append(u'", "return self.values(request, helper, path_on_disk) logging.error('Unknown method \"' + method +", "sqlite_master WHERE type='trigger';\") triggers = cursor.fetchall() base_tree.append({'title': u'Triggers (' +", "u'Indexes (' + unicode(len(indexes)) + u')', 'key': u'index', 'folder': True,", "Indexes cursor.execute(\"SELECT name FROM sqlite_master WHERE type='index';\") indexes = cursor.fetchall()" ]
[ "requests.get(url).content cidades = cidades.decode('utf-8') cidades = json.loads(cidades) for cidade in", "= cidades.decode('utf-8') cidades = json.loads(cidades) for cidade in cidades: codigo,", "= requests.get(url).content cidades = cidades.decode('utf-8') cidades = json.loads(cidades) for cidade", "json url = 'http://educacao.dadosabertosbr.com/api/cidades/ce' cidades = requests.get(url).content cidades = cidades.decode('utf-8')", "requests, json url = 'http://educacao.dadosabertosbr.com/api/cidades/ce' cidades = requests.get(url).content cidades =", "import requests, json url = 'http://educacao.dadosabertosbr.com/api/cidades/ce' cidades = requests.get(url).content cidades", "'http://educacao.dadosabertosbr.com/api/cidades/ce' cidades = requests.get(url).content cidades = cidades.decode('utf-8') cidades = json.loads(cidades)", "cidades.decode('utf-8') cidades = json.loads(cidades) for cidade in cidades: codigo, nome", "= json.loads(cidades) for cidade in cidades: codigo, nome = cidade.split(':')", "cidades = requests.get(url).content cidades = cidades.decode('utf-8') cidades = json.loads(cidades) for", "= 'http://educacao.dadosabertosbr.com/api/cidades/ce' cidades = requests.get(url).content cidades = cidades.decode('utf-8') cidades =", "json.loads(cidades) for cidade in cidades: codigo, nome = cidade.split(':') print(nome)", "url = 'http://educacao.dadosabertosbr.com/api/cidades/ce' cidades = requests.get(url).content cidades = cidades.decode('utf-8') cidades", "cidades = cidades.decode('utf-8') cidades = json.loads(cidades) for cidade in cidades:", "cidades = json.loads(cidades) for cidade in cidades: codigo, nome =", "<filename>raspagem/random/lista_cidades.py import requests, json url = 'http://educacao.dadosabertosbr.com/api/cidades/ce' cidades = requests.get(url).content" ]
[]
[ "min_distance = min(distances) # Get new lookahead index if min_distance", "r_velocity = robot_velocity * \\ (2 - self.cur_curvature * Constants.TRACK_WIDTH)", "points in the path to get the lookahead point given", "min(self.pursuit_points[i].velocity, new_velocity) self.pursuit_points[i].velocity = new_velocity def updateLookaheadPointIndex2(self, state): \"\"\"Update the", "c in zip( self.path.getPoints(), self.path.getCurvatures())] self.last_lookahead_index = 0 self.cur_curvature =", "import SmartDashboard as Dash from autonomous import pursuitpoint class PurePursuit():", "# self.updateLookaheadPointIndex2(state.pos) self.updateCurvature(state) self.updateClosestPointIndex(state.pos) self.updateTargetVelocities(state.pos) def outputToSmartDashboard(self): \"\"\"Output values to", "(2 + self.cur_curvature * Constants.TRACK_WIDTH) / \\ 2 / Constants.PURE_PURSUIT_KV", "= b**2 - (4 * a * c) if discriminant", "end - start center_to_start = start - state a =", "<= 1: return start + t1 * segment_direction return None", "= self.closest_point_index smallest_distance = self.pursuit_points[index].point.getDistance(state) for i in range(0, len(self.pursuit_points)):", "which lookahead function to use self.updateLookaheadPointIndex(state.pos) # self.updateLookaheadPointIndex2(state.pos) self.updateCurvature(state) self.updateClosestPointIndex(state.pos)", "the velocities for i in reversed(range(0, len(self.pursuit_points)-1)): distance = self.pursuit_points[i].point.getDistance(", "self.closest_point_index smallest_distance = self.pursuit_points[index].point.getDistance(state) for i in range(0, len(self.pursuit_points)): distance", "= transform.getRotated(-state.angle) # Use the transformed vector to calculate the", "right wheels.\"\"\" robot_velocity = self.pursuit_points[self.closest_point_index].velocity # Use kinematics (http://robotsforroboticists.com/drive-kinematics/) and", "state is more than Constants.LOOKAHEAD_DIST from all points, otherwise uses", "<= Constants.CURVATURE_THRESHOLD: velocity = Constants.MAX_VELOCITY else: velocity = min(Constants.MAX_VELOCITY, Constants.CURVE_VELOCITY/ppoint.curvature)", "state): \"\"\"Update the pure pursuit follower(runs all update functions).\"\"\" #", "current robot position.\"\"\" lookahead = self.pursuit_points[self.last_lookahead_index].point # Transform the lookahead", "transform.x) / Constants.LOOKAHEAD_DIST**2 def updateClosestPointIndex(self, state): \"\"\"Update the index of", "Constants.PURE_PURSUIT_KV scale = max(abs(l_velocity), abs(r_velocity)) if scale > 1: l_velocity", "the curvature and Constants.CURVE_VELOCITY for ppoint in self.pursuit_points: if abs(ppoint.curvature)", "aligned vector transform = lookahead - state.pos transform = transform.getRotated(-state.angle)", "ppoint.point.y) for ppoint in self.pursuit_points] differences = [abs(d-Constants.LOOKAHEAD_DIST) for d", "{}\".format(self.target_velocities)) # print(\"------------------------------\") def isDone(self): \"\"\"Check if the path is", "* \\ (2 - self.cur_curvature * Constants.TRACK_WIDTH) / \\ 2", "(http://robotsforroboticists.com/drive-kinematics/) and algebra to find wheel target velocties l_velocity =", "0 self.target_velocities = vector2d.Vector2D() self.closest_point_index = 0 def computeVelocities(self): \"\"\"Compute", "current robot state. Uses the minimum distance point if the", "discriminant) / (2 * a) t1 = (-b + discriminant)", "i in reversed(range(0, len(self.pursuit_points)-1)): distance = self.pursuit_points[i].point.getDistance( self.pursuit_points[i+1].point) new_velocity =", "self.path.getPoints(), self.path.getCurvatures())] self.last_lookahead_index = 0 self.cur_curvature = 0 self.target_velocities =", "{}\".format(self.cur_curvature)) #print(\"Closes Point - {}\".format(closest)) #print(\"Target Velocities - {}\".format(self.target_velocities)) #", "<= 1: return start + t0 * segment_direction if t1", "all update functions).\"\"\" # TODO which lookahead function to use", "get the lookahead point given the current robot state.\"\"\" for", "segment_direction b = 2 * (center_to_start * segment_direction) c =", "Dash.putNumberArray(\"Lookahead Point\", [lookahead.x, lookahead.y]) Dash.putNumber(\"Curvature\", self.cur_curvature) Dash.putNumberArray(\"Closes Point\", [closest.x, closest.y])", "= differences.index(min(differences)) else: self.last_lookahead_index = distances.index(min_distance) def updateLookaheadPointIndex(self, state): \"\"\"Loop", "point given the current robot state.\"\"\" for i in range(self.last_lookahead_index,", "wpilib import SmartDashboard as Dash from autonomous import pursuitpoint class", "if the state is more than Constants.LOOKAHEAD_DIST from all points,", "all points, otherwise uses the closes point to self.loohead_distance\"\"\" #", "= i def computeLookaheadPoint(self, start, end, state): \"\"\"Compute the lookahead", "* Constants.TRACK_WIDTH) / \\ 2 / Constants.PURE_PURSUIT_KV r_velocity = robot_velocity", "- (4 * a * c) if discriminant < 0:", "lookahead point given the current robot state. Uses the minimum", "/= scale r_velocity /= scale self.target_velocities = vector2d.Vector2D(l_velocity, r_velocity) def", "start + t1 * segment_direction return None def updateCurvature(self, state):", "/ Constants.PURE_PURSUIT_KV r_velocity = robot_velocity * \\ (2 - self.cur_curvature", "is done being followed.\"\"\" return (len(self.pursuit_points) - self.closest_point_index) <= 1", "self.pursuit_points[self.closest_point_index].point Dash.putNumberArray(\"Lookahead Point\", [lookahead.x, lookahead.y]) Dash.putNumber(\"Curvature\", self.cur_curvature) Dash.putNumberArray(\"Closes Point\", [closest.x,", "distances] min_distance = min(distances) # Get new lookahead index if", "self.pursuit_points[self.last_lookahead_index].point closest = self.pursuit_points[self.closest_point_index].point Dash.putNumberArray(\"Lookahead Point\", [lookahead.x, lookahead.y]) Dash.putNumber(\"Curvature\", self.cur_curvature)", "if the path is done being followed.\"\"\" return (len(self.pursuit_points) -", "vector2d.Vector2D() self.closest_point_index = 0 def computeVelocities(self): \"\"\"Compute the velocities along", "point if the current state is Constants.LOOKAHEAD_DIST from between start", "the target velocities of the left and right wheels.\"\"\" robot_velocity", "as Dash from autonomous import pursuitpoint class PurePursuit(): \"\"\"An implementation", "= vector2d.Vector2D() self.closest_point_index = 0 def computeVelocities(self): \"\"\"Compute the velocities", "self.pursuit_points[i].point.getDistance( self.pursuit_points[i+1].point) new_velocity = math.sqrt( self.pursuit_points[i+1].velocity**2 + (2 * Constants.MAX_ACCELERATION", "closes point to self.loohead_distance\"\"\" # Compute point distances to state", "# Compute point distances to state and differences from those", "2 * (center_to_start * segment_direction) c = (center_to_start * center_to_start)", "= Constants.MAX_VELOCITY else: velocity = min(Constants.MAX_VELOCITY, Constants.CURVE_VELOCITY/ppoint.curvature) ppoint.velocity = velocity", "1: return start + t1 * segment_direction return None def", "point to the current robot position.\"\"\" lookahead = self.pursuit_points[self.last_lookahead_index].point #", "lookahead - state.pos transform = transform.getRotated(-state.angle) # Use the transformed", "else: self.last_lookahead_index = distances.index(min_distance) def updateLookaheadPointIndex(self, state): \"\"\"Loop over the", "velocity = min(Constants.MAX_VELOCITY, Constants.CURVE_VELOCITY/ppoint.curvature) ppoint.velocity = velocity # Limit the", "if the current state is Constants.LOOKAHEAD_DIST from between start and", "from utils import vector2d from wpilib import SmartDashboard as Dash", "for i in range(self.last_lookahead_index, len(self.pursuit_points)-1): lookahead = self.computeLookaheadPoint( self.pursuit_points[i].point, self.pursuit_points[i+1].point,", "return None def updateCurvature(self, state): \"\"\"Update the curvature from the", "Dash.putNumberArray(\"Closes Point\", [closest.x, closest.y]) Dash.putNumberArray(\"Target Velocities\", [ self.target_velocities.x, self.target_velocities.y]) #print(\"Lookahead", ">= 0 and t0 <= 1: return start + t0", "path self.pursuit_points = [pursuitpoint.PursuitPoint(p, c) for p, c in zip(", "center_to_start) - Constants.LOOKAHEAD_DIST ** 2 discriminant = b**2 - (4", "i self.closest_point_index = index def updateTargetVelocities(self, state): \"\"\"Update the target", "- state.pos transform = transform.getRotated(-state.angle) # Use the transformed vector", "\"\"\"Check if the path is done being followed.\"\"\" return (len(self.pursuit_points)", "self.cur_curvature * Constants.TRACK_WIDTH) / \\ 2 / Constants.PURE_PURSUIT_KV scale =", "distance = self.pursuit_points[i].point.getDistance( self.pursuit_points[i+1].point) new_velocity = math.sqrt( self.pursuit_points[i+1].velocity**2 + (2", "* Constants.MAX_ACCELERATION * distance)) new_velocity = min(self.pursuit_points[i].velocity, new_velocity) self.pursuit_points[i].velocity =", "values to the smart dashboard.\"\"\" lookahead = self.pursuit_points[self.last_lookahead_index].point closest =", "import Constants from utils import vector2d from wpilib import SmartDashboard", "t1 >= 0 and t1 <= 1: return start +", "point to self.loohead_distance\"\"\" # Compute point distances to state and", "0: return None else: discriminant = math.sqrt(discriminant) t0 = (-b", "self.path = path self.pursuit_points = [pursuitpoint.PursuitPoint(p, c) for p, c", "None.\"\"\" # Algorithm for circle line segment intersection found here:", "def computeLookaheadPoint(self, start, end, state): \"\"\"Compute the lookahead point given", "to self.loohead_distance\"\"\" # Compute point distances to state and differences", "0 def computeVelocities(self): \"\"\"Compute the velocities along the path.\"\"\" #", "= [math.hypot(state.x - ppoint.point.x, state.y - ppoint.point.y) for ppoint in", "a) if t0 >= 0 and t0 <= 1: return", "velocities along the path using the curvature and Constants.CURVE_VELOCITY for", "def updateClosestPointIndex(self, state): \"\"\"Update the index of the closest point", "current robot position.\"\"\" index = self.closest_point_index smallest_distance = self.pursuit_points[index].point.getDistance(state) for", "Point\", [lookahead.x, lookahead.y]) Dash.putNumber(\"Curvature\", self.cur_curvature) Dash.putNumberArray(\"Closes Point\", [closest.x, closest.y]) Dash.putNumberArray(\"Target", "self.target_velocities = vector2d.Vector2D() self.closest_point_index = 0 def computeVelocities(self): \"\"\"Compute the", "calculate the curvature (derived from https://www.ri.cmu.edu/pub_files/pub3/coulter_r_craig_1992_1/coulter_r_craig_1992_1.pdf#page=12) self.cur_curvature = (2 *", "= end - start center_to_start = start - state a", "if discriminant < 0: return None else: discriminant = math.sqrt(discriminant)", "discriminant) / (2 * a) if t0 >= 0 and", "/ Constants.LOOKAHEAD_DIST**2 def updateClosestPointIndex(self, state): \"\"\"Update the index of the", "discriminant < 0: return None else: discriminant = math.sqrt(discriminant) t0", "* distance)) new_velocity = min(self.pursuit_points[i].velocity, new_velocity) self.pursuit_points[i].velocity = new_velocity def", "robot state. Uses the minimum distance point if the state", "- {}\".format(lookahead)) #print(\"Curvature - {}\".format(self.cur_curvature)) #print(\"Closes Point - {}\".format(closest)) #print(\"Target", "print(\"------------------------------\") def isDone(self): \"\"\"Check if the path is done being", "segment_direction = end - start center_to_start = start - state", "\"\"\"Loop over the points in the path to get the", "robot state.\"\"\" for i in range(self.last_lookahead_index, len(self.pursuit_points)-1): lookahead = self.computeLookaheadPoint(", "min_distance <= Constants.LOOKAHEAD_DIST: self.last_lookahead_index = differences.index(min(differences)) else: self.last_lookahead_index = distances.index(min_distance)", "computeVelocities(self): \"\"\"Compute the velocities along the path.\"\"\" # Compute the", "curvature from the current lookahead point to the current robot", "outputToSmartDashboard(self): \"\"\"Output values to the smart dashboard.\"\"\" lookahead = self.pursuit_points[self.last_lookahead_index].point", "distances = [math.hypot(state.x - ppoint.point.x, state.y - ppoint.point.y) for ppoint", "updateLookaheadPointIndex(self, state): \"\"\"Loop over the points in the path to", "new_velocity) self.pursuit_points[i].velocity = new_velocity def updateLookaheadPointIndex2(self, state): \"\"\"Update the lookahead", "Constants.LOOKAHEAD_DIST distances = [math.hypot(state.x - ppoint.point.x, state.y - ppoint.point.y) for", "__init__(self, path): self.path = path self.pursuit_points = [pursuitpoint.PursuitPoint(p, c) for", "current robot state.\"\"\" for i in range(self.last_lookahead_index, len(self.pursuit_points)-1): lookahead =", "state. Uses the minimum distance point if the state is", "for ppoint in self.pursuit_points] differences = [abs(d-Constants.LOOKAHEAD_DIST) for d in", "autonomous import pursuitpoint class PurePursuit(): \"\"\"An implementation of the Pure", "if abs(ppoint.curvature) <= Constants.CURVATURE_THRESHOLD: velocity = Constants.MAX_VELOCITY else: velocity =", "- discriminant) / (2 * a) t1 = (-b +", "Returns a point if the current state is Constants.LOOKAHEAD_DIST from", "otherwise uses the closes point to self.loohead_distance\"\"\" # Compute point", "/ (2 * a) t1 = (-b + discriminant) /", "lookahead = self.pursuit_points[self.last_lookahead_index].point # Transform the lookahead and state.pos to", "len(self.pursuit_points)): distance = self.pursuit_points[i].point.getDistance(state) if smallest_distance > distance: smallest_distance =", "/ \\ 2 / Constants.PURE_PURSUIT_KV scale = max(abs(l_velocity), abs(r_velocity)) if", "updateTargetVelocities(self, state): \"\"\"Update the target velocities of the left and", "state.pos to get an aligned vector transform = lookahead -", "to get an aligned vector transform = lookahead - state.pos", "scale = max(abs(l_velocity), abs(r_velocity)) if scale > 1: l_velocity /=", "0 self.cur_curvature = 0 self.target_velocities = vector2d.Vector2D() self.closest_point_index = 0", "# TODO which lookahead function to use self.updateLookaheadPointIndex(state.pos) # self.updateLookaheadPointIndex2(state.pos)", "updateLookaheadPointIndex2(self, state): \"\"\"Update the lookahead point given the current robot", "index def updateTargetVelocities(self, state): \"\"\"Update the target velocities of the", "state): \"\"\"Update the lookahead point given the current robot state.", "self.last_lookahead_index = differences.index(min(differences)) else: self.last_lookahead_index = distances.index(min_distance) def updateLookaheadPointIndex(self, state):", "Constants.LOOKAHEAD_DIST from between start and end, otherwise returns None.\"\"\" #", "end, state): \"\"\"Compute the lookahead point given the current robot", "self.closest_point_index = 0 def computeVelocities(self): \"\"\"Compute the velocities along the", "in range(self.last_lookahead_index, len(self.pursuit_points)-1): lookahead = self.computeLookaheadPoint( self.pursuit_points[i].point, self.pursuit_points[i+1].point, state) if", "0 and t1 <= 1: return start + t1 *", "line segment intersection found here: https://stackoverflow.com/questions/1073336/circle-line-segment-collision-detection-algorithm/1084899#1084899 segment_direction = end -", "path tracking algorithm.\"\"\" def __init__(self, path): self.path = path self.pursuit_points", "= [pursuitpoint.PursuitPoint(p, c) for p, c in zip( self.path.getPoints(), self.path.getCurvatures())]", "and algebra to find wheel target velocties l_velocity = robot_velocity", "scale > 1: l_velocity /= scale r_velocity /= scale self.target_velocities", "in reversed(range(0, len(self.pursuit_points)-1)): distance = self.pursuit_points[i].point.getDistance( self.pursuit_points[i+1].point) new_velocity = math.sqrt(", "Velocities\", [ self.target_velocities.x, self.target_velocities.y]) #print(\"Lookahead Point - {}\".format(lookahead)) #print(\"Curvature -", "to the current robot position.\"\"\" lookahead = self.pursuit_points[self.last_lookahead_index].point # Transform", "= self.pursuit_points[self.last_lookahead_index].point # Transform the lookahead and state.pos to get", "= robot_velocity * \\ (2 - self.cur_curvature * Constants.TRACK_WIDTH) /", "\"\"\"Output values to the smart dashboard.\"\"\" lookahead = self.pursuit_points[self.last_lookahead_index].point closest", "vector to calculate the curvature (derived from https://www.ri.cmu.edu/pub_files/pub3/coulter_r_craig_1992_1/coulter_r_craig_1992_1.pdf#page=12) self.cur_curvature =", "- ppoint.point.x, state.y - ppoint.point.y) for ppoint in self.pursuit_points] differences", "between start and end, otherwise returns None.\"\"\" # Algorithm for", "= (-b - discriminant) / (2 * a) t1 =", "\\ (2 - self.cur_curvature * Constants.TRACK_WIDTH) / \\ 2 /", "current lookahead point to the current robot position.\"\"\" lookahead =", "Velocities - {}\".format(self.target_velocities)) # print(\"------------------------------\") def isDone(self): \"\"\"Check if the", "= [abs(d-Constants.LOOKAHEAD_DIST) for d in distances] min_distance = min(distances) #", "= 0 self.target_velocities = vector2d.Vector2D() self.closest_point_index = 0 def computeVelocities(self):", "point if the state is more than Constants.LOOKAHEAD_DIST from all", "t0 >= 0 and t0 <= 1: return start +", "self.pursuit_points[i].point.getDistance(state) if smallest_distance > distance: smallest_distance = distance index =", "= max(abs(l_velocity), abs(r_velocity)) if scale > 1: l_velocity /= scale", "state a = segment_direction * segment_direction b = 2 *", "the current robot state. Uses the minimum distance point if", "smallest_distance > distance: smallest_distance = distance index = i self.closest_point_index", "self.cur_curvature) Dash.putNumberArray(\"Closes Point\", [closest.x, closest.y]) Dash.putNumberArray(\"Target Velocities\", [ self.target_velocities.x, self.target_velocities.y])", "= self.pursuit_points[index].point.getDistance(state) for i in range(0, len(self.pursuit_points)): distance = self.pursuit_points[i].point.getDistance(state)", "curvature and Constants.CURVE_VELOCITY for ppoint in self.pursuit_points: if abs(ppoint.curvature) <=", "Constants.MAX_VELOCITY else: velocity = min(Constants.MAX_VELOCITY, Constants.CURVE_VELOCITY/ppoint.curvature) ppoint.velocity = velocity #", "point given the current robot state. Returns a point if", "from https://www.ri.cmu.edu/pub_files/pub3/coulter_r_craig_1992_1/coulter_r_craig_1992_1.pdf#page=12) self.cur_curvature = (2 * transform.x) / Constants.LOOKAHEAD_DIST**2 def", "the current robot state. Returns a point if the current", "state.y - ppoint.point.y) for ppoint in self.pursuit_points] differences = [abs(d-Constants.LOOKAHEAD_DIST)", "the current lookahead point to the current robot position.\"\"\" lookahead", "closest = self.pursuit_points[self.closest_point_index].point Dash.putNumberArray(\"Lookahead Point\", [lookahead.x, lookahead.y]) Dash.putNumber(\"Curvature\", self.cur_curvature) Dash.putNumberArray(\"Closes", "- {}\".format(self.cur_curvature)) #print(\"Closes Point - {}\".format(closest)) #print(\"Target Velocities - {}\".format(self.target_velocities))", "# print(\"------------------------------\") def isDone(self): \"\"\"Check if the path is done", "those distances to Constants.LOOKAHEAD_DIST distances = [math.hypot(state.x - ppoint.point.x, state.y", "Constants.TRACK_WIDTH) / \\ 2 / Constants.PURE_PURSUIT_KV r_velocity = robot_velocity *", "a * c) if discriminant < 0: return None else:", "scale self.target_velocities = vector2d.Vector2D(l_velocity, r_velocity) def update(self, state): \"\"\"Update the", "b = 2 * (center_to_start * segment_direction) c = (center_to_start", "algebra to find wheel target velocties l_velocity = robot_velocity *", "differences.index(min(differences)) else: self.last_lookahead_index = distances.index(min_distance) def updateLookaheadPointIndex(self, state): \"\"\"Loop over", "of the closest point to the current robot position.\"\"\" index", "and t0 <= 1: return start + t0 * segment_direction", "in self.pursuit_points] differences = [abs(d-Constants.LOOKAHEAD_DIST) for d in distances] min_distance", "Constants from utils import vector2d from wpilib import SmartDashboard as", "+ t0 * segment_direction if t1 >= 0 and t1", "path using the curvature and Constants.CURVE_VELOCITY for ppoint in self.pursuit_points:", "!= None: self.last_lookahead_index = i def computeLookaheadPoint(self, start, end, state):", "self.updateLookaheadPointIndex(state.pos) # self.updateLookaheadPointIndex2(state.pos) self.updateCurvature(state) self.updateClosestPointIndex(state.pos) self.updateTargetVelocities(state.pos) def outputToSmartDashboard(self): \"\"\"Output values", "= i self.closest_point_index = index def updateTargetVelocities(self, state): \"\"\"Update the", "position.\"\"\" lookahead = self.pursuit_points[self.last_lookahead_index].point # Transform the lookahead and state.pos", "velocity # Limit the acceleration of the velocities for i", "velocities for i in reversed(range(0, len(self.pursuit_points)-1)): distance = self.pursuit_points[i].point.getDistance( self.pursuit_points[i+1].point)", "pursuitpoint class PurePursuit(): \"\"\"An implementation of the Pure Pursuit path", "smallest_distance = self.pursuit_points[index].point.getDistance(state) for i in range(0, len(self.pursuit_points)): distance =", "in range(0, len(self.pursuit_points)): distance = self.pursuit_points[i].point.getDistance(state) if smallest_distance > distance:", "the pure pursuit follower(runs all update functions).\"\"\" # TODO which", "distances to state and differences from those distances to Constants.LOOKAHEAD_DIST", "the acceleration of the velocities for i in reversed(range(0, len(self.pursuit_points)-1)):", "given the current robot state. Returns a point if the", "import math from constants import Constants from utils import vector2d", "the current robot position.\"\"\" lookahead = self.pursuit_points[self.last_lookahead_index].point # Transform the", "velocties l_velocity = robot_velocity * \\ (2 + self.cur_curvature *", "update(self, state): \"\"\"Update the pure pursuit follower(runs all update functions).\"\"\"", "(4 * a * c) if discriminant < 0: return", "import pursuitpoint class PurePursuit(): \"\"\"An implementation of the Pure Pursuit", "def updateLookaheadPointIndex(self, state): \"\"\"Loop over the points in the path", "lookahead = self.pursuit_points[self.last_lookahead_index].point closest = self.pursuit_points[self.closest_point_index].point Dash.putNumberArray(\"Lookahead Point\", [lookahead.x, lookahead.y])", "b**2 - (4 * a * c) if discriminant <", "closest point to the current robot position.\"\"\" index = self.closest_point_index", "from constants import Constants from utils import vector2d from wpilib", "start center_to_start = start - state a = segment_direction *", "\"\"\"Update the lookahead point given the current robot state. Uses", "* \\ (2 + self.cur_curvature * Constants.TRACK_WIDTH) / \\ 2", "lookahead index if min_distance <= Constants.LOOKAHEAD_DIST: self.last_lookahead_index = differences.index(min(differences)) else:", "[lookahead.x, lookahead.y]) Dash.putNumber(\"Curvature\", self.cur_curvature) Dash.putNumberArray(\"Closes Point\", [closest.x, closest.y]) Dash.putNumberArray(\"Target Velocities\",", "None def updateCurvature(self, state): \"\"\"Update the curvature from the current", "to find wheel target velocties l_velocity = robot_velocity * \\", "target velocties l_velocity = robot_velocity * \\ (2 + self.cur_curvature", "of the velocities for i in reversed(range(0, len(self.pursuit_points)-1)): distance =", "range(0, len(self.pursuit_points)): distance = self.pursuit_points[i].point.getDistance(state) if smallest_distance > distance: smallest_distance", "Point - {}\".format(closest)) #print(\"Target Velocities - {}\".format(self.target_velocities)) # print(\"------------------------------\") def", "in zip( self.path.getPoints(), self.path.getCurvatures())] self.last_lookahead_index = 0 self.cur_curvature = 0", "None: self.last_lookahead_index = i def computeLookaheadPoint(self, start, end, state): \"\"\"Compute", "2 / Constants.PURE_PURSUIT_KV r_velocity = robot_velocity * \\ (2 -", "r_velocity /= scale self.target_velocities = vector2d.Vector2D(l_velocity, r_velocity) def update(self, state):", "otherwise returns None.\"\"\" # Algorithm for circle line segment intersection", "the current state is Constants.LOOKAHEAD_DIST from between start and end,", "from those distances to Constants.LOOKAHEAD_DIST distances = [math.hypot(state.x - ppoint.point.x,", "\"\"\"Update the pure pursuit follower(runs all update functions).\"\"\" # TODO", "= start - state a = segment_direction * segment_direction b", "t1 * segment_direction return None def updateCurvature(self, state): \"\"\"Update the", "returns None.\"\"\" # Algorithm for circle line segment intersection found", "the curvature from the current lookahead point to the current", "\"\"\"Update the curvature from the current lookahead point to the", "index = self.closest_point_index smallest_distance = self.pursuit_points[index].point.getDistance(state) for i in range(0,", "index of the closest point to the current robot position.\"\"\"", "Use kinematics (http://robotsforroboticists.com/drive-kinematics/) and algebra to find wheel target velocties", "class PurePursuit(): \"\"\"An implementation of the Pure Pursuit path tracking", "to the smart dashboard.\"\"\" lookahead = self.pursuit_points[self.last_lookahead_index].point closest = self.pursuit_points[self.closest_point_index].point", "if t0 >= 0 and t0 <= 1: return start", "self.target_velocities.x, self.target_velocities.y]) #print(\"Lookahead Point - {}\".format(lookahead)) #print(\"Curvature - {}\".format(self.cur_curvature)) #print(\"Closes", "Constants.LOOKAHEAD_DIST: self.last_lookahead_index = differences.index(min(differences)) else: self.last_lookahead_index = distances.index(min_distance) def updateLookaheadPointIndex(self,", "(2 * a) t1 = (-b + discriminant) / (2", "Constants.CURVATURE_THRESHOLD: velocity = Constants.MAX_VELOCITY else: velocity = min(Constants.MAX_VELOCITY, Constants.CURVE_VELOCITY/ppoint.curvature) ppoint.velocity", "to calculate the curvature (derived from https://www.ri.cmu.edu/pub_files/pub3/coulter_r_craig_1992_1/coulter_r_craig_1992_1.pdf#page=12) self.cur_curvature = (2", "* Constants.TRACK_WIDTH) / \\ 2 / Constants.PURE_PURSUIT_KV scale = max(abs(l_velocity),", "Compute point distances to state and differences from those distances", "transform = lookahead - state.pos transform = transform.getRotated(-state.angle) # Use", "p, c in zip( self.path.getPoints(), self.path.getCurvatures())] self.last_lookahead_index = 0 self.cur_curvature", "the path using the curvature and Constants.CURVE_VELOCITY for ppoint in", "state is Constants.LOOKAHEAD_DIST from between start and end, otherwise returns", "distance = self.pursuit_points[i].point.getDistance(state) if smallest_distance > distance: smallest_distance = distance", "pure pursuit follower(runs all update functions).\"\"\" # TODO which lookahead", "= math.sqrt(discriminant) t0 = (-b - discriminant) / (2 *", "> 1: l_velocity /= scale r_velocity /= scale self.target_velocities =", "math.sqrt(discriminant) t0 = (-b - discriminant) / (2 * a)", "closest.y]) Dash.putNumberArray(\"Target Velocities\", [ self.target_velocities.x, self.target_velocities.y]) #print(\"Lookahead Point - {}\".format(lookahead))", "updateCurvature(self, state): \"\"\"Update the curvature from the current lookahead point", "circle line segment intersection found here: https://stackoverflow.com/questions/1073336/circle-line-segment-collision-detection-algorithm/1084899#1084899 segment_direction = end", "and right wheels.\"\"\" robot_velocity = self.pursuit_points[self.closest_point_index].velocity # Use kinematics (http://robotsforroboticists.com/drive-kinematics/)", "found here: https://stackoverflow.com/questions/1073336/circle-line-segment-collision-detection-algorithm/1084899#1084899 segment_direction = end - start center_to_start =", "point to the current robot position.\"\"\" index = self.closest_point_index smallest_distance", "+ (2 * Constants.MAX_ACCELERATION * distance)) new_velocity = min(self.pursuit_points[i].velocity, new_velocity)", "state.pos transform = transform.getRotated(-state.angle) # Use the transformed vector to", "smart dashboard.\"\"\" lookahead = self.pursuit_points[self.last_lookahead_index].point closest = self.pursuit_points[self.closest_point_index].point Dash.putNumberArray(\"Lookahead Point\",", "= lookahead - state.pos transform = transform.getRotated(-state.angle) # Use the", "math from constants import Constants from utils import vector2d from", "vector2d.Vector2D(l_velocity, r_velocity) def update(self, state): \"\"\"Update the pure pursuit follower(runs", "point given the current robot state. Uses the minimum distance", "start and end, otherwise returns None.\"\"\" # Algorithm for circle", "path.\"\"\" # Compute the velocities along the path using the", "the index of the closest point to the current robot", "2 / Constants.PURE_PURSUIT_KV scale = max(abs(l_velocity), abs(r_velocity)) if scale >", "max(abs(l_velocity), abs(r_velocity)) if scale > 1: l_velocity /= scale r_velocity", "= distances.index(min_distance) def updateLookaheadPointIndex(self, state): \"\"\"Loop over the points in", "target velocities of the left and right wheels.\"\"\" robot_velocity =", "start + t0 * segment_direction if t1 >= 0 and", "utils import vector2d from wpilib import SmartDashboard as Dash from", "self.updateClosestPointIndex(state.pos) self.updateTargetVelocities(state.pos) def outputToSmartDashboard(self): \"\"\"Output values to the smart dashboard.\"\"\"", "robot position.\"\"\" index = self.closest_point_index smallest_distance = self.pursuit_points[index].point.getDistance(state) for i", "Constants.MAX_ACCELERATION * distance)) new_velocity = min(self.pursuit_points[i].velocity, new_velocity) self.pursuit_points[i].velocity = new_velocity", "robot position.\"\"\" lookahead = self.pursuit_points[self.last_lookahead_index].point # Transform the lookahead and", "over the points in the path to get the lookahead", "self.cur_curvature = (2 * transform.x) / Constants.LOOKAHEAD_DIST**2 def updateClosestPointIndex(self, state):", "velocities along the path.\"\"\" # Compute the velocities along the", "(2 - self.cur_curvature * Constants.TRACK_WIDTH) / \\ 2 / Constants.PURE_PURSUIT_KV", "and differences from those distances to Constants.LOOKAHEAD_DIST distances = [math.hypot(state.x", "robot_velocity * \\ (2 - self.cur_curvature * Constants.TRACK_WIDTH) / \\", "- self.cur_curvature * Constants.TRACK_WIDTH) / \\ 2 / Constants.PURE_PURSUIT_KV scale", "acceleration of the velocities for i in reversed(range(0, len(self.pursuit_points)-1)): distance", "Constants.LOOKAHEAD_DIST from all points, otherwise uses the closes point to", "= robot_velocity * \\ (2 + self.cur_curvature * Constants.TRACK_WIDTH) /", "and Constants.CURVE_VELOCITY for ppoint in self.pursuit_points: if abs(ppoint.curvature) <= Constants.CURVATURE_THRESHOLD:", "lookahead and state.pos to get an aligned vector transform =", "Constants.TRACK_WIDTH) / \\ 2 / Constants.PURE_PURSUIT_KV scale = max(abs(l_velocity), abs(r_velocity))", "start - state a = segment_direction * segment_direction b =", "get an aligned vector transform = lookahead - state.pos transform", "self.last_lookahead_index = distances.index(min_distance) def updateLookaheadPointIndex(self, state): \"\"\"Loop over the points", "for i in reversed(range(0, len(self.pursuit_points)-1)): distance = self.pursuit_points[i].point.getDistance( self.pursuit_points[i+1].point) new_velocity", "self.last_lookahead_index = 0 self.cur_curvature = 0 self.target_velocities = vector2d.Vector2D() self.closest_point_index", "scale r_velocity /= scale self.target_velocities = vector2d.Vector2D(l_velocity, r_velocity) def update(self,", "constants import Constants from utils import vector2d from wpilib import", "from autonomous import pursuitpoint class PurePursuit(): \"\"\"An implementation of the", "the velocities along the path.\"\"\" # Compute the velocities along", "self.pursuit_points[i].velocity = new_velocity def updateLookaheadPointIndex2(self, state): \"\"\"Update the lookahead point", "lookahead = self.computeLookaheadPoint( self.pursuit_points[i].point, self.pursuit_points[i+1].point, state) if lookahead != None:", "Constants.CURVE_VELOCITY/ppoint.curvature) ppoint.velocity = velocity # Limit the acceleration of the", "lookahead != None: self.last_lookahead_index = i def computeLookaheadPoint(self, start, end,", "self.pursuit_points[i+1].velocity**2 + (2 * Constants.MAX_ACCELERATION * distance)) new_velocity = min(self.pursuit_points[i].velocity,", "state) if lookahead != None: self.last_lookahead_index = i def computeLookaheadPoint(self,", "center_to_start = start - state a = segment_direction * segment_direction", "tracking algorithm.\"\"\" def __init__(self, path): self.path = path self.pursuit_points =", "if min_distance <= Constants.LOOKAHEAD_DIST: self.last_lookahead_index = differences.index(min(differences)) else: self.last_lookahead_index =", "t0 <= 1: return start + t0 * segment_direction if", "def computeVelocities(self): \"\"\"Compute the velocities along the path.\"\"\" # Compute", "else: velocity = min(Constants.MAX_VELOCITY, Constants.CURVE_VELOCITY/ppoint.curvature) ppoint.velocity = velocity # Limit", "more than Constants.LOOKAHEAD_DIST from all points, otherwise uses the closes", "distance: smallest_distance = distance index = i self.closest_point_index = index", "# Use kinematics (http://robotsforroboticists.com/drive-kinematics/) and algebra to find wheel target", "/ \\ 2 / Constants.PURE_PURSUIT_KV r_velocity = robot_velocity * \\", "# Algorithm for circle line segment intersection found here: https://stackoverflow.com/questions/1073336/circle-line-segment-collision-detection-algorithm/1084899#1084899", "follower(runs all update functions).\"\"\" # TODO which lookahead function to", "to use self.updateLookaheadPointIndex(state.pos) # self.updateLookaheadPointIndex2(state.pos) self.updateCurvature(state) self.updateClosestPointIndex(state.pos) self.updateTargetVelocities(state.pos) def outputToSmartDashboard(self):", "self.path.getCurvatures())] self.last_lookahead_index = 0 self.cur_curvature = 0 self.target_velocities = vector2d.Vector2D()", "= 0 self.cur_curvature = 0 self.target_velocities = vector2d.Vector2D() self.closest_point_index =", "def __init__(self, path): self.path = path self.pursuit_points = [pursuitpoint.PursuitPoint(p, c)", "differences from those distances to Constants.LOOKAHEAD_DIST distances = [math.hypot(state.x -", "state): \"\"\"Update the index of the closest point to the", "- {}\".format(self.target_velocities)) # print(\"------------------------------\") def isDone(self): \"\"\"Check if the path", "self.target_velocities = vector2d.Vector2D(l_velocity, r_velocity) def update(self, state): \"\"\"Update the pure", "def updateCurvature(self, state): \"\"\"Update the curvature from the current lookahead", "* a) t1 = (-b + discriminant) / (2 *", "distances to Constants.LOOKAHEAD_DIST distances = [math.hypot(state.x - ppoint.point.x, state.y -", "function to use self.updateLookaheadPointIndex(state.pos) # self.updateLookaheadPointIndex2(state.pos) self.updateCurvature(state) self.updateClosestPointIndex(state.pos) self.updateTargetVelocities(state.pos) def", "\"\"\"An implementation of the Pure Pursuit path tracking algorithm.\"\"\" def", "Compute the velocities along the path using the curvature and", "than Constants.LOOKAHEAD_DIST from all points, otherwise uses the closes point", "points, otherwise uses the closes point to self.loohead_distance\"\"\" # Compute", "to state and differences from those distances to Constants.LOOKAHEAD_DIST distances", "self.pursuit_points[self.closest_point_index].velocity # Use kinematics (http://robotsforroboticists.com/drive-kinematics/) and algebra to find wheel", "[closest.x, closest.y]) Dash.putNumberArray(\"Target Velocities\", [ self.target_velocities.x, self.target_velocities.y]) #print(\"Lookahead Point -", "= vector2d.Vector2D(l_velocity, r_velocity) def update(self, state): \"\"\"Update the pure pursuit", "for ppoint in self.pursuit_points: if abs(ppoint.curvature) <= Constants.CURVATURE_THRESHOLD: velocity =", "def updateLookaheadPointIndex2(self, state): \"\"\"Update the lookahead point given the current", "+ t1 * segment_direction return None def updateCurvature(self, state): \"\"\"Update", "Dash.putNumber(\"Curvature\", self.cur_curvature) Dash.putNumberArray(\"Closes Point\", [closest.x, closest.y]) Dash.putNumberArray(\"Target Velocities\", [ self.target_velocities.x,", "d in distances] min_distance = min(distances) # Get new lookahead", "/ (2 * a) if t0 >= 0 and t0", "#print(\"Lookahead Point - {}\".format(lookahead)) #print(\"Curvature - {}\".format(self.cur_curvature)) #print(\"Closes Point -", "from all points, otherwise uses the closes point to self.loohead_distance\"\"\"", "the closest point to the current robot position.\"\"\" index =", "+ self.cur_curvature * Constants.TRACK_WIDTH) / \\ 2 / Constants.PURE_PURSUIT_KV r_velocity", "to get the lookahead point given the current robot state.\"\"\"", "use self.updateLookaheadPointIndex(state.pos) # self.updateLookaheadPointIndex2(state.pos) self.updateCurvature(state) self.updateClosestPointIndex(state.pos) self.updateTargetVelocities(state.pos) def outputToSmartDashboard(self): \"\"\"Output", "= self.pursuit_points[self.closest_point_index].velocity # Use kinematics (http://robotsforroboticists.com/drive-kinematics/) and algebra to find", "computeLookaheadPoint(self, start, end, state): \"\"\"Compute the lookahead point given the", "update functions).\"\"\" # TODO which lookahead function to use self.updateLookaheadPointIndex(state.pos)", "= min(self.pursuit_points[i].velocity, new_velocity) self.pursuit_points[i].velocity = new_velocity def updateLookaheadPointIndex2(self, state): \"\"\"Update", "Uses the minimum distance point if the state is more", "a point if the current state is Constants.LOOKAHEAD_DIST from between", "intersection found here: https://stackoverflow.com/questions/1073336/circle-line-segment-collision-detection-algorithm/1084899#1084899 segment_direction = end - start center_to_start", "start, end, state): \"\"\"Compute the lookahead point given the current", "the lookahead and state.pos to get an aligned vector transform", "= min(Constants.MAX_VELOCITY, Constants.CURVE_VELOCITY/ppoint.curvature) ppoint.velocity = velocity # Limit the acceleration", "the minimum distance point if the state is more than", "implementation of the Pure Pursuit path tracking algorithm.\"\"\" def __init__(self,", "is more than Constants.LOOKAHEAD_DIST from all points, otherwise uses the", "if lookahead != None: self.last_lookahead_index = i def computeLookaheadPoint(self, start,", "* a * c) if discriminant < 0: return None", "# Use the transformed vector to calculate the curvature (derived", "t0 = (-b - discriminant) / (2 * a) t1", "= self.pursuit_points[self.closest_point_index].point Dash.putNumberArray(\"Lookahead Point\", [lookahead.x, lookahead.y]) Dash.putNumber(\"Curvature\", self.cur_curvature) Dash.putNumberArray(\"Closes Point\",", "{}\".format(lookahead)) #print(\"Curvature - {}\".format(self.cur_curvature)) #print(\"Closes Point - {}\".format(closest)) #print(\"Target Velocities", "state): \"\"\"Update the target velocities of the left and right", "\"\"\"Update the index of the closest point to the current", "c) if discriminant < 0: return None else: discriminant =", "given the current robot state.\"\"\" for i in range(self.last_lookahead_index, len(self.pursuit_points)-1):", "/= scale self.target_velocities = vector2d.Vector2D(l_velocity, r_velocity) def update(self, state): \"\"\"Update", "robot_velocity * \\ (2 + self.cur_curvature * Constants.TRACK_WIDTH) / \\", "differences = [abs(d-Constants.LOOKAHEAD_DIST) for d in distances] min_distance = min(distances)", "robot_velocity = self.pursuit_points[self.closest_point_index].velocity # Use kinematics (http://robotsforroboticists.com/drive-kinematics/) and algebra to", "the points in the path to get the lookahead point", "segment_direction) c = (center_to_start * center_to_start) - Constants.LOOKAHEAD_DIST ** 2", "\"\"\"Compute the lookahead point given the current robot state. Returns", "if t1 >= 0 and t1 <= 1: return start", "range(self.last_lookahead_index, len(self.pursuit_points)-1): lookahead = self.computeLookaheadPoint( self.pursuit_points[i].point, self.pursuit_points[i+1].point, state) if lookahead", "self.last_lookahead_index = i def computeLookaheadPoint(self, start, end, state): \"\"\"Compute the", "* segment_direction b = 2 * (center_to_start * segment_direction) c", "2 discriminant = b**2 - (4 * a * c)", "min(distances) # Get new lookahead index if min_distance <= Constants.LOOKAHEAD_DIST:", "# Transform the lookahead and state.pos to get an aligned", "from wpilib import SmartDashboard as Dash from autonomous import pursuitpoint", "lookahead point given the current robot state. Returns a point", "the current robot state.\"\"\" for i in range(self.last_lookahead_index, len(self.pursuit_points)-1): lookahead", "distance point if the state is more than Constants.LOOKAHEAD_DIST from", "discriminant = b**2 - (4 * a * c) if", "self.closest_point_index = index def updateTargetVelocities(self, state): \"\"\"Update the target velocities", "for d in distances] min_distance = min(distances) # Get new", "left and right wheels.\"\"\" robot_velocity = self.pursuit_points[self.closest_point_index].velocity # Use kinematics", "an aligned vector transform = lookahead - state.pos transform =", "state): \"\"\"Compute the lookahead point given the current robot state.", "= self.pursuit_points[i].point.getDistance(state) if smallest_distance > distance: smallest_distance = distance index", "- state a = segment_direction * segment_direction b = 2", "def outputToSmartDashboard(self): \"\"\"Output values to the smart dashboard.\"\"\" lookahead =", "velocities of the left and right wheels.\"\"\" robot_velocity = self.pursuit_points[self.closest_point_index].velocity", "return start + t1 * segment_direction return None def updateCurvature(self,", "and state.pos to get an aligned vector transform = lookahead", "self.pursuit_points] differences = [abs(d-Constants.LOOKAHEAD_DIST) for d in distances] min_distance =", "segment_direction if t1 >= 0 and t1 <= 1: return", "self.pursuit_points[i+1].point) new_velocity = math.sqrt( self.pursuit_points[i+1].velocity**2 + (2 * Constants.MAX_ACCELERATION *", "Dash.putNumberArray(\"Target Velocities\", [ self.target_velocities.x, self.target_velocities.y]) #print(\"Lookahead Point - {}\".format(lookahead)) #print(\"Curvature", "smallest_distance = distance index = i self.closest_point_index = index def", "self.updateTargetVelocities(state.pos) def outputToSmartDashboard(self): \"\"\"Output values to the smart dashboard.\"\"\" lookahead", "#print(\"Closes Point - {}\".format(closest)) #print(\"Target Velocities - {}\".format(self.target_velocities)) # print(\"------------------------------\")", "- {}\".format(closest)) #print(\"Target Velocities - {}\".format(self.target_velocities)) # print(\"------------------------------\") def isDone(self):", "vector transform = lookahead - state.pos transform = transform.getRotated(-state.angle) #", "<= Constants.LOOKAHEAD_DIST: self.last_lookahead_index = differences.index(min(differences)) else: self.last_lookahead_index = distances.index(min_distance) def", "Point - {}\".format(lookahead)) #print(\"Curvature - {}\".format(self.cur_curvature)) #print(\"Closes Point - {}\".format(closest))", "Pursuit path tracking algorithm.\"\"\" def __init__(self, path): self.path = path", "velocity = Constants.MAX_VELOCITY else: velocity = min(Constants.MAX_VELOCITY, Constants.CURVE_VELOCITY/ppoint.curvature) ppoint.velocity =", "i in range(self.last_lookahead_index, len(self.pursuit_points)-1): lookahead = self.computeLookaheadPoint( self.pursuit_points[i].point, self.pursuit_points[i+1].point, state)", "# Compute the velocities along the path using the curvature", "index if min_distance <= Constants.LOOKAHEAD_DIST: self.last_lookahead_index = differences.index(min(differences)) else: self.last_lookahead_index", "from the current lookahead point to the current robot position.\"\"\"", "Dash from autonomous import pursuitpoint class PurePursuit(): \"\"\"An implementation of", "the state is more than Constants.LOOKAHEAD_DIST from all points, otherwise", "dashboard.\"\"\" lookahead = self.pursuit_points[self.last_lookahead_index].point closest = self.pursuit_points[self.closest_point_index].point Dash.putNumberArray(\"Lookahead Point\", [lookahead.x,", "= math.sqrt( self.pursuit_points[i+1].velocity**2 + (2 * Constants.MAX_ACCELERATION * distance)) new_velocity", "current state is Constants.LOOKAHEAD_DIST from between start and end, otherwise", "def update(self, state): \"\"\"Update the pure pursuit follower(runs all update", "self.computeLookaheadPoint( self.pursuit_points[i].point, self.pursuit_points[i+1].point, state) if lookahead != None: self.last_lookahead_index =", "self.pursuit_points[i+1].point, state) if lookahead != None: self.last_lookahead_index = i def", "(2 * transform.x) / Constants.LOOKAHEAD_DIST**2 def updateClosestPointIndex(self, state): \"\"\"Update the", "+ discriminant) / (2 * a) if t0 >= 0", "Pure Pursuit path tracking algorithm.\"\"\" def __init__(self, path): self.path =", "\"\"\"Update the target velocities of the left and right wheels.\"\"\"", "self.loohead_distance\"\"\" # Compute point distances to state and differences from", "point distances to state and differences from those distances to", "= velocity # Limit the acceleration of the velocities for", "* a) if t0 >= 0 and t0 <= 1:", "- Constants.LOOKAHEAD_DIST ** 2 discriminant = b**2 - (4 *", "- ppoint.point.y) for ppoint in self.pursuit_points] differences = [abs(d-Constants.LOOKAHEAD_DIST) for", "(center_to_start * center_to_start) - Constants.LOOKAHEAD_DIST ** 2 discriminant = b**2", "the lookahead point given the current robot state. Uses the", "Transform the lookahead and state.pos to get an aligned vector", "path): self.path = path self.pursuit_points = [pursuitpoint.PursuitPoint(p, c) for p,", "the path to get the lookahead point given the current", "from between start and end, otherwise returns None.\"\"\" # Algorithm", "#print(\"Curvature - {}\".format(self.cur_curvature)) #print(\"Closes Point - {}\".format(closest)) #print(\"Target Velocities -", "= (center_to_start * center_to_start) - Constants.LOOKAHEAD_DIST ** 2 discriminant =", "SmartDashboard as Dash from autonomous import pursuitpoint class PurePursuit(): \"\"\"An", "the lookahead point given the current robot state. Returns a", ">= 0 and t1 <= 1: return start + t1", "/ Constants.PURE_PURSUIT_KV scale = max(abs(l_velocity), abs(r_velocity)) if scale > 1:", "find wheel target velocties l_velocity = robot_velocity * \\ (2", "state): \"\"\"Update the curvature from the current lookahead point to", "given the current robot state. Uses the minimum distance point", "= self.pursuit_points[i].point.getDistance( self.pursuit_points[i+1].point) new_velocity = math.sqrt( self.pursuit_points[i+1].velocity**2 + (2 *", "[math.hypot(state.x - ppoint.point.x, state.y - ppoint.point.y) for ppoint in self.pursuit_points]", "ppoint in self.pursuit_points: if abs(ppoint.curvature) <= Constants.CURVATURE_THRESHOLD: velocity = Constants.MAX_VELOCITY", "ppoint.point.x, state.y - ppoint.point.y) for ppoint in self.pursuit_points] differences =", "state.\"\"\" for i in range(self.last_lookahead_index, len(self.pursuit_points)-1): lookahead = self.computeLookaheadPoint( self.pursuit_points[i].point,", "is Constants.LOOKAHEAD_DIST from between start and end, otherwise returns None.\"\"\"", "to the current robot position.\"\"\" index = self.closest_point_index smallest_distance =", "for i in range(0, len(self.pursuit_points)): distance = self.pursuit_points[i].point.getDistance(state) if smallest_distance", "{}\".format(closest)) #print(\"Target Velocities - {}\".format(self.target_velocities)) # print(\"------------------------------\") def isDone(self): \"\"\"Check", "import vector2d from wpilib import SmartDashboard as Dash from autonomous", "the closes point to self.loohead_distance\"\"\" # Compute point distances to", "= min(distances) # Get new lookahead index if min_distance <=", "new_velocity def updateLookaheadPointIndex2(self, state): \"\"\"Update the lookahead point given the", "robot state. Returns a point if the current state is", "Algorithm for circle line segment intersection found here: https://stackoverflow.com/questions/1073336/circle-line-segment-collision-detection-algorithm/1084899#1084899 segment_direction", "Get new lookahead index if min_distance <= Constants.LOOKAHEAD_DIST: self.last_lookahead_index =", "self.pursuit_points = [pursuitpoint.PursuitPoint(p, c) for p, c in zip( self.path.getPoints(),", "the current robot position.\"\"\" index = self.closest_point_index smallest_distance = self.pursuit_points[index].point.getDistance(state)", "the smart dashboard.\"\"\" lookahead = self.pursuit_points[self.last_lookahead_index].point closest = self.pursuit_points[self.closest_point_index].point Dash.putNumberArray(\"Lookahead", "current robot state. Returns a point if the current state", "PurePursuit(): \"\"\"An implementation of the Pure Pursuit path tracking algorithm.\"\"\"", "ppoint.velocity = velocity # Limit the acceleration of the velocities", "= (2 * transform.x) / Constants.LOOKAHEAD_DIST**2 def updateClosestPointIndex(self, state): \"\"\"Update", "Point\", [closest.x, closest.y]) Dash.putNumberArray(\"Target Velocities\", [ self.target_velocities.x, self.target_velocities.y]) #print(\"Lookahead Point", "[pursuitpoint.PursuitPoint(p, c) for p, c in zip( self.path.getPoints(), self.path.getCurvatures())] self.last_lookahead_index", "math.sqrt( self.pursuit_points[i+1].velocity**2 + (2 * Constants.MAX_ACCELERATION * distance)) new_velocity =", "state and differences from those distances to Constants.LOOKAHEAD_DIST distances =", "Use the transformed vector to calculate the curvature (derived from", "* transform.x) / Constants.LOOKAHEAD_DIST**2 def updateClosestPointIndex(self, state): \"\"\"Update the index", "curvature (derived from https://www.ri.cmu.edu/pub_files/pub3/coulter_r_craig_1992_1/coulter_r_craig_1992_1.pdf#page=12) self.cur_curvature = (2 * transform.x) /", "new_velocity = math.sqrt( self.pursuit_points[i+1].velocity**2 + (2 * Constants.MAX_ACCELERATION * distance))", "using the curvature and Constants.CURVE_VELOCITY for ppoint in self.pursuit_points: if", "return None else: discriminant = math.sqrt(discriminant) t0 = (-b -", "and end, otherwise returns None.\"\"\" # Algorithm for circle line", "self.updateCurvature(state) self.updateClosestPointIndex(state.pos) self.updateTargetVelocities(state.pos) def outputToSmartDashboard(self): \"\"\"Output values to the smart", "transformed vector to calculate the curvature (derived from https://www.ri.cmu.edu/pub_files/pub3/coulter_r_craig_1992_1/coulter_r_craig_1992_1.pdf#page=12) self.cur_curvature", "= distance index = i self.closest_point_index = index def updateTargetVelocities(self,", "the transformed vector to calculate the curvature (derived from https://www.ri.cmu.edu/pub_files/pub3/coulter_r_craig_1992_1/coulter_r_craig_1992_1.pdf#page=12)", "i in range(0, len(self.pursuit_points)): distance = self.pursuit_points[i].point.getDistance(state) if smallest_distance >", "lookahead point to the current robot position.\"\"\" lookahead = self.pursuit_points[self.last_lookahead_index].point", "uses the closes point to self.loohead_distance\"\"\" # Compute point distances", "* segment_direction return None def updateCurvature(self, state): \"\"\"Update the curvature", "self.target_velocities.y]) #print(\"Lookahead Point - {}\".format(lookahead)) #print(\"Curvature - {}\".format(self.cur_curvature)) #print(\"Closes Point", "https://stackoverflow.com/questions/1073336/circle-line-segment-collision-detection-algorithm/1084899#1084899 segment_direction = end - start center_to_start = start -", "* center_to_start) - Constants.LOOKAHEAD_DIST ** 2 discriminant = b**2 -", "distances.index(min_distance) def updateLookaheadPointIndex(self, state): \"\"\"Loop over the points in the", "if scale > 1: l_velocity /= scale r_velocity /= scale", "segment_direction return None def updateCurvature(self, state): \"\"\"Update the curvature from", "transform = transform.getRotated(-state.angle) # Use the transformed vector to calculate", "(2 * Constants.MAX_ACCELERATION * distance)) new_velocity = min(self.pursuit_points[i].velocity, new_velocity) self.pursuit_points[i].velocity", "to Constants.LOOKAHEAD_DIST distances = [math.hypot(state.x - ppoint.point.x, state.y - ppoint.point.y)", "lookahead function to use self.updateLookaheadPointIndex(state.pos) # self.updateLookaheadPointIndex2(state.pos) self.updateCurvature(state) self.updateClosestPointIndex(state.pos) self.updateTargetVelocities(state.pos)", "else: discriminant = math.sqrt(discriminant) t0 = (-b - discriminant) /", "0 and t0 <= 1: return start + t0 *", "len(self.pursuit_points)-1)): distance = self.pursuit_points[i].point.getDistance( self.pursuit_points[i+1].point) new_velocity = math.sqrt( self.pursuit_points[i+1].velocity**2 +", "self.cur_curvature = 0 self.target_velocities = vector2d.Vector2D() self.closest_point_index = 0 def", "self.pursuit_points[i].point, self.pursuit_points[i+1].point, state) if lookahead != None: self.last_lookahead_index = i", "state): \"\"\"Loop over the points in the path to get", "of the Pure Pursuit path tracking algorithm.\"\"\" def __init__(self, path):", "Constants.LOOKAHEAD_DIST**2 def updateClosestPointIndex(self, state): \"\"\"Update the index of the closest", "index = i self.closest_point_index = index def updateTargetVelocities(self, state): \"\"\"Update", "* (center_to_start * segment_direction) c = (center_to_start * center_to_start) -", "updateClosestPointIndex(self, state): \"\"\"Update the index of the closest point to", "= self.computeLookaheadPoint( self.pursuit_points[i].point, self.pursuit_points[i+1].point, state) if lookahead != None: self.last_lookahead_index", "the path.\"\"\" # Compute the velocities along the path using", "algorithm.\"\"\" def __init__(self, path): self.path = path self.pursuit_points = [pursuitpoint.PursuitPoint(p,", "\"\"\"Compute the velocities along the path.\"\"\" # Compute the velocities", "self.pursuit_points[index].point.getDistance(state) for i in range(0, len(self.pursuit_points)): distance = self.pursuit_points[i].point.getDistance(state) if", "end, otherwise returns None.\"\"\" # Algorithm for circle line segment", "< 0: return None else: discriminant = math.sqrt(discriminant) t0 =", "t1 <= 1: return start + t1 * segment_direction return", "(derived from https://www.ri.cmu.edu/pub_files/pub3/coulter_r_craig_1992_1/coulter_r_craig_1992_1.pdf#page=12) self.cur_curvature = (2 * transform.x) / Constants.LOOKAHEAD_DIST**2", "of the left and right wheels.\"\"\" robot_velocity = self.pursuit_points[self.closest_point_index].velocity #", "and t1 <= 1: return start + t1 * segment_direction", "for p, c in zip( self.path.getPoints(), self.path.getCurvatures())] self.last_lookahead_index = 0", "= self.pursuit_points[self.last_lookahead_index].point closest = self.pursuit_points[self.closest_point_index].point Dash.putNumberArray(\"Lookahead Point\", [lookahead.x, lookahead.y]) Dash.putNumber(\"Curvature\",", "#print(\"Target Velocities - {}\".format(self.target_velocities)) # print(\"------------------------------\") def isDone(self): \"\"\"Check if", "- start center_to_start = start - state a = segment_direction", "l_velocity = robot_velocity * \\ (2 + self.cur_curvature * Constants.TRACK_WIDTH)", "** 2 discriminant = b**2 - (4 * a *", "1: l_velocity /= scale r_velocity /= scale self.target_velocities = vector2d.Vector2D(l_velocity,", "kinematics (http://robotsforroboticists.com/drive-kinematics/) and algebra to find wheel target velocties l_velocity", "the Pure Pursuit path tracking algorithm.\"\"\" def __init__(self, path): self.path", "in distances] min_distance = min(distances) # Get new lookahead index", "i def computeLookaheadPoint(self, start, end, state): \"\"\"Compute the lookahead point", "zip( self.path.getPoints(), self.path.getCurvatures())] self.last_lookahead_index = 0 self.cur_curvature = 0 self.target_velocities", "the velocities along the path using the curvature and Constants.CURVE_VELOCITY", "path is done being followed.\"\"\" return (len(self.pursuit_points) - self.closest_point_index) <=", "vector2d from wpilib import SmartDashboard as Dash from autonomous import", "= 0 def computeVelocities(self): \"\"\"Compute the velocities along the path.\"\"\"", "the left and right wheels.\"\"\" robot_velocity = self.pursuit_points[self.closest_point_index].velocity # Use", "return start + t0 * segment_direction if t1 >= 0", "functions).\"\"\" # TODO which lookahead function to use self.updateLookaheadPointIndex(state.pos) #", "isDone(self): \"\"\"Check if the path is done being followed.\"\"\" return", "= path self.pursuit_points = [pursuitpoint.PursuitPoint(p, c) for p, c in", "the path is done being followed.\"\"\" return (len(self.pursuit_points) - self.closest_point_index)", "c) for p, c in zip( self.path.getPoints(), self.path.getCurvatures())] self.last_lookahead_index =", "in self.pursuit_points: if abs(ppoint.curvature) <= Constants.CURVATURE_THRESHOLD: velocity = Constants.MAX_VELOCITY else:", "if smallest_distance > distance: smallest_distance = distance index = i", "def isDone(self): \"\"\"Check if the path is done being followed.\"\"\"", "in the path to get the lookahead point given the", "None else: discriminant = math.sqrt(discriminant) t0 = (-b - discriminant)", "transform.getRotated(-state.angle) # Use the transformed vector to calculate the curvature", "(-b - discriminant) / (2 * a) t1 = (-b", "self.cur_curvature * Constants.TRACK_WIDTH) / \\ 2 / Constants.PURE_PURSUIT_KV r_velocity =", "\\ 2 / Constants.PURE_PURSUIT_KV r_velocity = robot_velocity * \\ (2", "[ self.target_velocities.x, self.target_velocities.y]) #print(\"Lookahead Point - {}\".format(lookahead)) #print(\"Curvature - {}\".format(self.cur_curvature))", "Constants.PURE_PURSUIT_KV r_velocity = robot_velocity * \\ (2 - self.cur_curvature *", "along the path.\"\"\" # Compute the velocities along the path", "t0 * segment_direction if t1 >= 0 and t1 <=", "Limit the acceleration of the velocities for i in reversed(range(0,", "pursuit follower(runs all update functions).\"\"\" # TODO which lookahead function", "wheels.\"\"\" robot_velocity = self.pursuit_points[self.closest_point_index].velocity # Use kinematics (http://robotsforroboticists.com/drive-kinematics/) and algebra", "def updateTargetVelocities(self, state): \"\"\"Update the target velocities of the left", "a = segment_direction * segment_direction b = 2 * (center_to_start", "reversed(range(0, len(self.pursuit_points)-1)): distance = self.pursuit_points[i].point.getDistance( self.pursuit_points[i+1].point) new_velocity = math.sqrt( self.pursuit_points[i+1].velocity**2", "Constants.CURVE_VELOCITY for ppoint in self.pursuit_points: if abs(ppoint.curvature) <= Constants.CURVATURE_THRESHOLD: velocity", "* segment_direction) c = (center_to_start * center_to_start) - Constants.LOOKAHEAD_DIST **", "abs(r_velocity)) if scale > 1: l_velocity /= scale r_velocity /=", "distance index = i self.closest_point_index = index def updateTargetVelocities(self, state):", "self.updateLookaheadPointIndex2(state.pos) self.updateCurvature(state) self.updateClosestPointIndex(state.pos) self.updateTargetVelocities(state.pos) def outputToSmartDashboard(self): \"\"\"Output values to the", "Constants.LOOKAHEAD_DIST ** 2 discriminant = b**2 - (4 * a", "abs(ppoint.curvature) <= Constants.CURVATURE_THRESHOLD: velocity = Constants.MAX_VELOCITY else: velocity = min(Constants.MAX_VELOCITY,", "self.pursuit_points[self.last_lookahead_index].point # Transform the lookahead and state.pos to get an", "* c) if discriminant < 0: return None else: discriminant", "(2 * a) if t0 >= 0 and t0 <=", "[abs(d-Constants.LOOKAHEAD_DIST) for d in distances] min_distance = min(distances) # Get", "1: return start + t0 * segment_direction if t1 >=", "* segment_direction if t1 >= 0 and t1 <= 1:", "segment_direction * segment_direction b = 2 * (center_to_start * segment_direction)", "\\ 2 / Constants.PURE_PURSUIT_KV scale = max(abs(l_velocity), abs(r_velocity)) if scale", "minimum distance point if the state is more than Constants.LOOKAHEAD_DIST", "r_velocity) def update(self, state): \"\"\"Update the pure pursuit follower(runs all", "lookahead.y]) Dash.putNumber(\"Curvature\", self.cur_curvature) Dash.putNumberArray(\"Closes Point\", [closest.x, closest.y]) Dash.putNumberArray(\"Target Velocities\", [", "self.pursuit_points: if abs(ppoint.curvature) <= Constants.CURVATURE_THRESHOLD: velocity = Constants.MAX_VELOCITY else: velocity", "t1 = (-b + discriminant) / (2 * a) if", "(center_to_start * segment_direction) c = (center_to_start * center_to_start) - Constants.LOOKAHEAD_DIST", "l_velocity /= scale r_velocity /= scale self.target_velocities = vector2d.Vector2D(l_velocity, r_velocity)", "discriminant = math.sqrt(discriminant) t0 = (-b - discriminant) / (2", "wheel target velocties l_velocity = robot_velocity * \\ (2 +", "min(Constants.MAX_VELOCITY, Constants.CURVE_VELOCITY/ppoint.curvature) ppoint.velocity = velocity # Limit the acceleration of", "ppoint in self.pursuit_points] differences = [abs(d-Constants.LOOKAHEAD_DIST) for d in distances]", "state. Returns a point if the current state is Constants.LOOKAHEAD_DIST", "for circle line segment intersection found here: https://stackoverflow.com/questions/1073336/circle-line-segment-collision-detection-algorithm/1084899#1084899 segment_direction =", "position.\"\"\" index = self.closest_point_index smallest_distance = self.pursuit_points[index].point.getDistance(state) for i in", "TODO which lookahead function to use self.updateLookaheadPointIndex(state.pos) # self.updateLookaheadPointIndex2(state.pos) self.updateCurvature(state)", "= index def updateTargetVelocities(self, state): \"\"\"Update the target velocities of", "the lookahead point given the current robot state.\"\"\" for i", "(-b + discriminant) / (2 * a) if t0 >=", "# Get new lookahead index if min_distance <= Constants.LOOKAHEAD_DIST: self.last_lookahead_index", "> distance: smallest_distance = distance index = i self.closest_point_index =", "\\ (2 + self.cur_curvature * Constants.TRACK_WIDTH) / \\ 2 /", "path to get the lookahead point given the current robot", "len(self.pursuit_points)-1): lookahead = self.computeLookaheadPoint( self.pursuit_points[i].point, self.pursuit_points[i+1].point, state) if lookahead !=", "c = (center_to_start * center_to_start) - Constants.LOOKAHEAD_DIST ** 2 discriminant", "the curvature (derived from https://www.ri.cmu.edu/pub_files/pub3/coulter_r_craig_1992_1/coulter_r_craig_1992_1.pdf#page=12) self.cur_curvature = (2 * transform.x)", "new_velocity = min(self.pursuit_points[i].velocity, new_velocity) self.pursuit_points[i].velocity = new_velocity def updateLookaheadPointIndex2(self, state):", "= 2 * (center_to_start * segment_direction) c = (center_to_start *", "distance)) new_velocity = min(self.pursuit_points[i].velocity, new_velocity) self.pursuit_points[i].velocity = new_velocity def updateLookaheadPointIndex2(self,", "https://www.ri.cmu.edu/pub_files/pub3/coulter_r_craig_1992_1/coulter_r_craig_1992_1.pdf#page=12) self.cur_curvature = (2 * transform.x) / Constants.LOOKAHEAD_DIST**2 def updateClosestPointIndex(self,", "new lookahead index if min_distance <= Constants.LOOKAHEAD_DIST: self.last_lookahead_index = differences.index(min(differences))", "along the path using the curvature and Constants.CURVE_VELOCITY for ppoint", "a) t1 = (-b + discriminant) / (2 * a)", "= segment_direction * segment_direction b = 2 * (center_to_start *", "# Limit the acceleration of the velocities for i in", "= new_velocity def updateLookaheadPointIndex2(self, state): \"\"\"Update the lookahead point given", "here: https://stackoverflow.com/questions/1073336/circle-line-segment-collision-detection-algorithm/1084899#1084899 segment_direction = end - start center_to_start = start", "= (-b + discriminant) / (2 * a) if t0", "segment intersection found here: https://stackoverflow.com/questions/1073336/circle-line-segment-collision-detection-algorithm/1084899#1084899 segment_direction = end - start", "lookahead point given the current robot state.\"\"\" for i in" ]
[ "error or len(e.path) > len(error.path): error = e continue #", "validation # Should be used sparingly, as it's not a", "being validated will be matched against the relevant schema keys", "from esphome.py_compat import string_types class ExtraKeysInvalid(vol.Invalid): def __init__(self, *arg, **kwargs):", "in candidates: if type(skey) in vol.primitive_types: candidates_by_key.setdefault(skey, []).append((skey, (ckey, cvalue)))", "and key_names: matches = difflib.get_close_matches(key, key_names) errors.append(ExtraKeysInvalid('extra keys not allowed',", "key_names.append(skey) elif isinstance(skey, vol.Marker) and isinstance(skey.schema, string_types): key_names.append(skey.schema) def validate_mapping(path,", "necessary for esphome, but leave it here just in case)", "schema.schema ret = super(_Schema, self).extend(schema, extra=extra) return _Schema(ret.schema, extra=ret.extra, extra_schemas=self._extra_schemas)", "doing things. self._extra_schemas = extra_schemas or [] def __call__(self, data):", "key, (compiled key, compiled value) for skey, (ckey, cvalue) in", "type(skey) in vol.primitive_types: candidates_by_key.setdefault(skey, []).append((skey, (ckey, cvalue))) elif isinstance(skey, vol.Marker)", "key in schema if isinstance(key, vol.Optional)) # Recursively compile schema", "prints similar keys on error.\"\"\" def __init__(self, schema, extra=vol.PREVENT_EXTRA, extra_schemas=None):", "key_names.append(skey.schema) def validate_mapping(path, iterable, out): required_keys = all_required_keys.copy() # Build", "was # a Required() field. required_keys.discard(skey) break else: if self.extra", "not allow vol.Remove\") if isinstance(key, vol.primitive_types): raise ValueError(\"All schema keys", "in vol.primitive_types: candidates_by_key.setdefault(skey, []).append((skey, (ckey, cvalue))) elif isinstance(skey, vol.Marker) and", "exception. exception_errors = [] try: cval = cvalue(key_path, value) out[new_key]", "self for schema in schemas: ret = ret.extend(schema) return ret", "No point in matching against different keys additional_candidates = []", "against the relevant schema keys only. # No point in", "value okay, mark as found in case it was #", "errors = [] for key, value in key_value_map.items(): key_path =", "different keys additional_candidates = [] candidates_by_key = {} for skey,", "is invalid we immediately throw an exception. exception_errors = []", "for skey, (ckey, cvalue) in relevant_candidates: try: new_key = ckey(key_path,", "we immediately throw an exception. exception_errors = [] try: cval", "= value # Insert default values for non-existing keys. for", "err.error_type = invalid_msg errors.append(err) # If there is a validation", "'required key not provided' errors.append(vol.RequiredFieldInvalid(msg, path + [key])) if errors:", "that each # key in the data being validated will", "= list(vol.schema_builder._iterate_mapping_candidates(_compiled_schema)) # After we have the list of candidates", "# Should be used sparingly, as it's not a very", "cvalue) in candidates: if type(skey) in vol.primitive_types: candidates_by_key.setdefault(skey, []).append((skey, (ckey,", "(ckey, cvalue))) key_names = [] for skey in schema: if", "allow vol.Remove\") if isinstance(key, vol.primitive_types): raise ValueError(\"All schema keys must", "as e: if len(e.path) > len(key_path): raise if not error", "default values for non-existing keys. for key in all_default_keys: if", "in vol.iteritems(schema): new_key = self._compile(skey) new_value = self._compile(svalue) _compiled_schema[skey] =", "(compiled key, compiled value) for skey, (ckey, cvalue) in relevant_candidates:", "extra(res) except vol.Invalid as err: raise ensure_multiple_invalid(err) return res def", "default value has been specified for this missing key, insert", "in schema if isinstance(key, vol.Required)) # Keys that may have", "= key.default() error = None errors = [] for key,", "# doing things. self._extra_schemas = extra_schemas or [] def __call__(self,", "is used to retain ordering in case a ordered #", "cv.Schema that prints similar keys on error.\"\"\" def __init__(self, schema,", "value in iterable: key_value_map[key] = value # Insert default values", "required_keys: msg = getattr(key, 'msg', None) or 'required key not", "validator): validator = _Schema(validator) self._extra_schemas.append(validator) return self # pylint: disable=arguments-differ", "# Discard the required key so it does not #", "an additional, noisy exception. required_keys.discard(skey) break # Key and value", "errors.append(vol.RequiredFieldInvalid(msg, path + [key])) if errors: raise vol.MultipleInvalid(errors) return out", "in relevant_candidates: try: new_key = ckey(key_path, key) except vol.Invalid as", "a validation error for a required # key, this means", "schema, extra=vol.PREVENT_EXTRA, extra_schemas=None): super(_Schema, self).__init__(schema, extra=extra) # List of extra", "# mostly to keep the logic in this method sane", "ensure_multiple_invalid(err): if isinstance(err, vol.MultipleInvalid): return err return vol.MultipleInvalid(err) # pylint:", "class _Schema(vol.Schema): \"\"\"Custom cv.Schema that prints similar keys on error.\"\"\"", "these may be re-added if needed). for key in schema:", "apply some # optimization so that each # key in", "set(key for key in schema if isinstance(key, vol.Required)) # Keys", "# Key and value okay, mark as found in case", "if not error or len(e.path) > len(error.path): error = e", "not allow # mostly to keep the logic in this", "skey in schema: if isinstance(skey, string_types): key_names.append(skey) elif isinstance(skey, vol.Marker)", "= cval except vol.MultipleInvalid as e: exception_errors.extend(e.errors) except vol.Invalid as", "key, value in key_value_map.items(): key_path = path + [key] #", "new_value = self._compile(svalue) _compiled_schema[skey] = (new_key, new_value) # Sort compiled", "found and don't have defaults: for key in required_keys: msg", "vol.MultipleInvalid(errors) return out return validate_mapping def add_extra(self, validator): validator =", "needed). for key in schema: if key is vol.Extra: raise", "as 'int', 'str', 'Remove' and others which should be #", "[] candidates_by_key = {} for skey, (ckey, cvalue) in candidates:", "exception_errors: for err in exception_errors: if len(err.path) <= len(key_path): err.error_type", "vol.REMOVE_EXTRA: if isinstance(key, string_types) and key_names: matches = difflib.get_close_matches(key, key_names)", "**kwargs): extra = kwargs.pop('extra', None) if kwargs: raise ValueError if", "specified for this missing key, insert it. key_value_map[key.schema] = key.default()", "sane (so these may be re-added if needed). for key", "data): res = super(_Schema, self).__call__(data) for extra in self._extra_schemas: try:", "not performed once a key is selected, so if #", "candidates_by_key.setdefault(skey, []).append((skey, (ckey, cvalue))) elif isinstance(skey, vol.Marker) and type(skey.schema) in", "vol.Schema): schema = schema.schema ret = super(_Schema, self).extend(schema, extra=extra) return", "required_keys.discard(skey) break # Key and value okay, mark as found", "extra=extra) # List of extra schemas to apply after validation", "first, then fallback to the rest relevant_candidates = itertools.chain(candidates_by_key.get(key, []),", "import itertools import voluptuous as vol from esphome.py_compat import string_types", "which should be # applied to all keys additional_candidates.append((skey, (ckey,", "that weren't found and don't have defaults: for key in", "defaults all_default_keys = set(key for key in schema if isinstance(key,", "if isinstance(key, string_types) and key_names: matches = difflib.get_close_matches(key, key_names) errors.append(ExtraKeysInvalid('extra", "don't have defaults: for key in required_keys: msg = getattr(key,", "and key.schema not in key_value_map: # A default value has", "after validation # Should be used sparingly, as it's not", "isinstance(skey, string_types): key_names.append(skey) elif isinstance(skey, vol.Marker) and isinstance(skey.schema, string_types): key_names.append(skey.schema)", "if exception_errors: for err in exception_errors: if len(err.path) <= len(key_path):", "= self._compile(svalue) _compiled_schema[skey] = (new_key, new_value) # Sort compiled schema", "vol.primitive_types: candidates_by_key.setdefault(skey.schema, []).append((skey, (ckey, cvalue))) else: # These are wildcards", "def __init__(self, schema, extra=vol.PREVENT_EXTRA, extra_schemas=None): super(_Schema, self).__init__(schema, extra=extra) # List", "that prints similar keys on error.\"\"\" def __init__(self, schema, extra=vol.PREVENT_EXTRA,", "others which should be # applied to all keys additional_candidates.append((skey,", "in key_value_map: # A default value has been specified for", "case it was # a Required() field. required_keys.discard(skey) break else:", "it here just in case) candidates = list(vol.schema_builder._iterate_mapping_candidates(_compiled_schema)) # After", "each # key in the data being validated will be", "pylint: disable=protected-access, unidiomatic-typecheck class _Schema(vol.Schema): \"\"\"Custom cv.Schema that prints similar", "given key/value against all compiled key/values # schema key, (compiled", "if not schemas: return self.extend({}) if len(schemas) != 1: ret", "{} for skey, (ckey, cvalue) in candidates: if type(skey) in", "the value is invalid we immediately throw an exception. exception_errors", "a map of all provided key-value pairs. # The type(out)", "for schema in schemas: ret = ret.extend(schema) return ret schema", "throw an exception. exception_errors = [] try: cval = cvalue(key_path,", "ret = ret.extend(schema) return ret schema = schemas[0] if isinstance(schema,", "mostly to keep the logic in this method sane (so", "self # pylint: disable=arguments-differ def extend(self, *schemas, **kwargs): extra =", "# pylint: disable=arguments-differ def extend(self, *schemas, **kwargs): extra = kwargs.pop('extra',", "= self._compile(skey) new_value = self._compile(svalue) _compiled_schema[skey] = (new_key, new_value) #", "except vol.Invalid as e: exception_errors.append(e) if exception_errors: for err in", "the correct order, we want to apply some # optimization", "new_value) # Sort compiled schema (probably not necessary for esphome,", "all_default_keys: if not isinstance(key.default, vol.Undefined) and key.schema not in key_value_map:", "relevant schema keys only. # No point in matching against", "for skey in schema: if isinstance(skey, string_types): key_names.append(skey) elif isinstance(skey,", "schemas to apply after validation # Should be used sparingly,", "in vol.primitive_types: candidates_by_key.setdefault(skey.schema, []).append((skey, (ckey, cvalue))) else: # These are", "key, insert it. key_value_map[key.schema] = key.default() error = None errors", "keys not allowed', key_path, candidates=matches)) else: errors.append(vol.Invalid('extra keys not allowed',", "= getattr(key, 'msg', None) or 'required key not provided' errors.append(vol.RequiredFieldInvalid(msg,", "_Schema(vol.Schema): \"\"\"Custom cv.Schema that prints similar keys on error.\"\"\" def", "may be re-added if needed). for key in schema: if", "schema keys only. # No point in matching against different", "# The type(out) is used to retain ordering in case", "validate_mapping def add_extra(self, validator): validator = _Schema(validator) self._extra_schemas.append(validator) return self", "self._compile(svalue) _compiled_schema[skey] = (new_key, new_value) # Sort compiled schema (probably", "list(vol.schema_builder._iterate_mapping_candidates(_compiled_schema)) # After we have the list of candidates in", "== vol.ALLOW_EXTRA: out[key] = value elif self.extra != vol.REMOVE_EXTRA: if", "in cv.Required or cv.Optional\") # Keys that may be required", "+ [key])) if errors: raise vol.MultipleInvalid(errors) return out return validate_mapping", "iterable, out): required_keys = all_required_keys.copy() # Build a map of", "invalid_msg = invalid_msg or 'mapping value' # Check some things", "Keys that may be required all_required_keys = set(key for key", "data being validated will be matched against the relevant schema", "all compiled key/values # schema key, (compiled key, compiled value)", "except vol.Invalid as err: raise ensure_multiple_invalid(err) return res def _compile_mapping(self,", "additional_candidates.append((skey, (ckey, cvalue))) key_names = [] for skey in schema:", "key, compiled value) for skey, (ckey, cvalue) in relevant_candidates: try:", "__init__(self, *arg, **kwargs): self.candidates = kwargs.pop('candidates') vol.Invalid.__init__(self, *arg, **kwargs) def", "of extra schemas to apply after validation # Should be", "as e: exception_errors.append(e) if exception_errors: for err in exception_errors: if", "a Required() field. required_keys.discard(skey) break else: if self.extra == vol.ALLOW_EXTRA:", "key not provided' errors.append(vol.RequiredFieldInvalid(msg, path + [key])) if errors: raise", "raise ValueError(\"ESPHome does not allow vol.Remove\") if isinstance(key, vol.primitive_types): raise", "Validate against the matching key first, then fallback to the", "value) out[new_key] = cval except vol.MultipleInvalid as e: exception_errors.extend(e.errors) except", "vol.ALLOW_EXTRA: out[key] = value elif self.extra != vol.REMOVE_EXTRA: if isinstance(key,", "vol.Marker) and type(skey.schema) in vol.primitive_types: candidates_by_key.setdefault(skey.schema, []).append((skey, (ckey, cvalue))) else:", "string_types) and key_names: matches = difflib.get_close_matches(key, key_names) errors.append(ExtraKeysInvalid('extra keys not", "value' # Check some things that ESPHome's schemas do not", "key_value_map[key] = value # Insert default values for non-existing keys.", "[key] # Optimization. Validate against the matching key first, then", "self.extend({}) if len(schemas) != 1: ret = self for schema", "does not allow vol.Extra\") if isinstance(key, vol.Remove): raise ValueError(\"ESPHome does", "map of all provided key-value pairs. # The type(out) is", "vol.Required)) # Keys that may have defaults all_default_keys = set(key", "for key, value in iterable: key_value_map[key] = value # Insert", "values for non-existing keys. for key in all_default_keys: if not", "vol.Invalid as e: exception_errors.append(e) if exception_errors: for err in exception_errors:", "exception_errors = [] try: cval = cvalue(key_path, value) out[new_key] =", "# If there is a validation error for a required", "validator = _Schema(validator) self._extra_schemas.append(validator) return self # pylint: disable=arguments-differ def", "exception. required_keys.discard(skey) break # Key and value okay, mark as", "to apply after validation # Should be used sparingly, as", "okay, mark as found in case it was # a", "return err return vol.MultipleInvalid(err) # pylint: disable=protected-access, unidiomatic-typecheck class _Schema(vol.Schema):", "vol.Invalid as err: raise ensure_multiple_invalid(err) return res def _compile_mapping(self, schema,", "res = extra(res) except vol.Invalid as err: raise ensure_multiple_invalid(err) return", "vol.MultipleInvalid(err) # pylint: disable=protected-access, unidiomatic-typecheck class _Schema(vol.Schema): \"\"\"Custom cv.Schema that", "'msg', None) or 'required key not provided' errors.append(vol.RequiredFieldInvalid(msg, path +", "relevant_candidates: try: new_key = ckey(key_path, key) except vol.Invalid as e:", "key in all_default_keys: if not isinstance(key.default, vol.Undefined) and key.schema not", "cv.Optional\") # Keys that may be required all_required_keys = set(key", "there is a validation error for a required # key,", "exception_errors.extend(e.errors) except vol.Invalid as e: exception_errors.append(e) if exception_errors: for err", "e continue # Backtracking is not performed once a key", "keys additional_candidates.append((skey, (ckey, cvalue))) key_names = [] for skey in", "value elif self.extra != vol.REMOVE_EXTRA: if isinstance(key, string_types) and key_names:", "= cvalue(key_path, value) out[new_key] = cval except vol.MultipleInvalid as e:", "for this missing key, insert it. key_value_map[key.schema] = key.default() error", "defaults: for key in required_keys: msg = getattr(key, 'msg', None)", "return validate_mapping def add_extra(self, validator): validator = _Schema(validator) self._extra_schemas.append(validator) return", "key in required_keys: msg = getattr(key, 'msg', None) or 'required", "!= 1: ret = self for schema in schemas: ret", "but leave it here just in case) candidates = list(vol.schema_builder._iterate_mapping_candidates(_compiled_schema))", "= [] candidates_by_key = {} for skey, (ckey, cvalue) in", "# map type is provided as input. key_value_map = type(out)()", "len(e.path) > len(error.path): error = e continue # Backtracking is", "be required all_required_keys = set(key for key in schema if", "for key in all_default_keys: if not isinstance(key.default, vol.Undefined) and key.schema", "this means that the key was provided. # Discard the", "way of # doing things. self._extra_schemas = extra_schemas or []", "= kwargs.pop('candidates') vol.Invalid.__init__(self, *arg, **kwargs) def ensure_multiple_invalid(err): if isinstance(err, vol.MultipleInvalid):", "in iterable: key_value_map[key] = value # Insert default values for", "provided as input. key_value_map = type(out)() for key, value in", "selected, so if # the value is invalid we immediately", "as found in case it was # a Required() field.", "be # applied to all keys additional_candidates.append((skey, (ckey, cvalue))) key_names", "self._extra_schemas: try: res = extra(res) except vol.Invalid as err: raise", "'Remove' and others which should be # applied to all", "each given key/value against all compiled key/values # schema key,", "self.extra == vol.ALLOW_EXTRA: out[key] = value elif self.extra != vol.REMOVE_EXTRA:", "of all provided key-value pairs. # The type(out) is used", "= super(_Schema, self).__call__(data) for extra in self._extra_schemas: try: res =", "# optimization so that each # key in the data", "be matched against the relevant schema keys only. # No", "for a required # key, this means that the key", "ValueError(\"ESPHome does not allow vol.Extra\") if isinstance(key, vol.Remove): raise ValueError(\"ESPHome", "or [] def __call__(self, data): res = super(_Schema, self).__call__(data) for", "compiled schema (probably not necessary for esphome, but leave it", "non-existing keys. for key in all_default_keys: if not isinstance(key.default, vol.Undefined)", "apply after validation # Should be used sparingly, as it's", "ret schema = schemas[0] if isinstance(schema, vol.Schema): schema = schema.schema", "schema keys must be wrapped in cv.Required or cv.Optional\") #", "vol from esphome.py_compat import string_types class ExtraKeysInvalid(vol.Invalid): def __init__(self, *arg,", "vol.Undefined) and key.schema not in key_value_map: # A default value", "key_value_map: # A default value has been specified for this", "voluptuous as vol from esphome.py_compat import string_types class ExtraKeysInvalid(vol.Invalid): def", "schema key, (compiled key, compiled value) for skey, (ckey, cvalue)", "type(out) is used to retain ordering in case a ordered", "method sane (so these may be re-added if needed). for", "against all compiled key/values # schema key, (compiled key, compiled", "new_key = ckey(key_path, key) except vol.Invalid as e: if len(e.path)", "required_keys.discard(skey) break else: if self.extra == vol.ALLOW_EXTRA: out[key] = value", "e: exception_errors.append(e) if exception_errors: for err in exception_errors: if len(err.path)", "key_value_map.items(): key_path = path + [key] # Optimization. Validate against", "that ESPHome's schemas do not allow # mostly to keep", "rest relevant_candidates = itertools.chain(candidates_by_key.get(key, []), additional_candidates) # compare each given", "len(key_path): err.error_type = invalid_msg errors.append(err) # If there is a", "sparingly, as it's not a very voluptuous-way/clean way of #", "create an additional, noisy exception. required_keys.discard(skey) break # Key and", "invalid_msg errors.append(err) # If there is a validation error for", "logic in this method sane (so these may be re-added", "None errors = [] for key, value in key_value_map.items(): key_path", "key-value pairs. # The type(out) is used to retain ordering", "ESPHome's schemas do not allow # mostly to keep the", "invalid_msg=None): invalid_msg = invalid_msg or 'mapping value' # Check some", "extend(self, *schemas, **kwargs): extra = kwargs.pop('extra', None) if kwargs: raise", "in the data being validated will be matched against the", "These are wildcards such as 'int', 'str', 'Remove' and others", "keys additional_candidates = [] candidates_by_key = {} for skey, (ckey,", "e: exception_errors.extend(e.errors) except vol.Invalid as e: exception_errors.append(e) if exception_errors: for", "validated will be matched against the relevant schema keys only.", "break # Key and value okay, mark as found in", "= value elif self.extra != vol.REMOVE_EXTRA: if isinstance(key, string_types) and", "all provided key-value pairs. # The type(out) is used to", "= schema.schema ret = super(_Schema, self).extend(schema, extra=extra) return _Schema(ret.schema, extra=ret.extra,", "if needed). for key in schema: if key is vol.Extra:", "try: cval = cvalue(key_path, value) out[new_key] = cval except vol.MultipleInvalid", "raise ValueError(\"All schema keys must be wrapped in cv.Required or", "to keep the logic in this method sane (so these", "vol.MultipleInvalid): return err return vol.MultipleInvalid(err) # pylint: disable=protected-access, unidiomatic-typecheck class", "key, this means that the key was provided. # Discard", "and don't have defaults: for key in required_keys: msg =", "it was # a Required() field. required_keys.discard(skey) break else: if", "errors: raise vol.MultipleInvalid(errors) return out return validate_mapping def add_extra(self, validator):", "vol.Remove): raise ValueError(\"ESPHome does not allow vol.Remove\") if isinstance(key, vol.primitive_types):", "(ckey, cvalue) in candidates: if type(skey) in vol.primitive_types: candidates_by_key.setdefault(skey, []).append((skey,", "*schemas, **kwargs): extra = kwargs.pop('extra', None) if kwargs: raise ValueError", "not schemas: return self.extend({}) if len(schemas) != 1: ret =", "res def _compile_mapping(self, schema, invalid_msg=None): invalid_msg = invalid_msg or 'mapping", "in matching against different keys additional_candidates = [] candidates_by_key =", "not isinstance(key.default, vol.Undefined) and key.schema not in key_value_map: # A", "key_names) errors.append(ExtraKeysInvalid('extra keys not allowed', key_path, candidates=matches)) else: errors.append(vol.Invalid('extra keys", "= kwargs.pop('extra', None) if kwargs: raise ValueError if not schemas:", "vol.Invalid.__init__(self, *arg, **kwargs) def ensure_multiple_invalid(err): if isinstance(err, vol.MultipleInvalid): return err", "def __init__(self, *arg, **kwargs): self.candidates = kwargs.pop('candidates') vol.Invalid.__init__(self, *arg, **kwargs)", "'str', 'Remove' and others which should be # applied to", "schema _compiled_schema = {} for skey, svalue in vol.iteritems(schema): new_key", "# Backtracking is not performed once a key is selected,", "against different keys additional_candidates = [] candidates_by_key = {} for", "# Build a map of all provided key-value pairs. #", "keys. for key in all_default_keys: if not isinstance(key.default, vol.Undefined) and", "_compile_mapping(self, schema, invalid_msg=None): invalid_msg = invalid_msg or 'mapping value' #", "allowed', key_path, candidates=matches)) else: errors.append(vol.Invalid('extra keys not allowed', key_path)) #", "(ckey, cvalue))) else: # These are wildcards such as 'int',", "**kwargs) def ensure_multiple_invalid(err): if isinstance(err, vol.MultipleInvalid): return err return vol.MultipleInvalid(err)", "= invalid_msg errors.append(err) # If there is a validation error", "[] def __call__(self, data): res = super(_Schema, self).__call__(data) for extra", "# key in the data being validated will be matched", "for non-existing keys. for key in all_default_keys: if not isinstance(key.default,", "ExtraKeysInvalid(vol.Invalid): def __init__(self, *arg, **kwargs): self.candidates = kwargs.pop('candidates') vol.Invalid.__init__(self, *arg,", "Backtracking is not performed once a key is selected, so", "import voluptuous as vol from esphome.py_compat import string_types class ExtraKeysInvalid(vol.Invalid):", "out return validate_mapping def add_extra(self, validator): validator = _Schema(validator) self._extra_schemas.append(validator)", "key_path)) # for any required keys left that weren't found", "Sort compiled schema (probably not necessary for esphome, but leave", "immediately throw an exception. exception_errors = [] try: cval =", "all_required_keys.copy() # Build a map of all provided key-value pairs.", "if not isinstance(key.default, vol.Undefined) and key.schema not in key_value_map: #", "A default value has been specified for this missing key,", "return out return validate_mapping def add_extra(self, validator): validator = _Schema(validator)", "correct order, we want to apply some # optimization so", "schema = schemas[0] if isinstance(schema, vol.Schema): schema = schema.schema ret", "field. required_keys.discard(skey) break else: if self.extra == vol.ALLOW_EXTRA: out[key] =", "err return vol.MultipleInvalid(err) # pylint: disable=protected-access, unidiomatic-typecheck class _Schema(vol.Schema): \"\"\"Custom", "= type(out)() for key, value in iterable: key_value_map[key] = value", "# Check some things that ESPHome's schemas do not allow", "we have the list of candidates in the correct order,", "may have defaults all_default_keys = set(key for key in schema", "was provided. # Discard the required key so it does", "self.candidates = kwargs.pop('candidates') vol.Invalid.__init__(self, *arg, **kwargs) def ensure_multiple_invalid(err): if isinstance(err,", "a required # key, this means that the key was", "def validate_mapping(path, iterable, out): required_keys = all_required_keys.copy() # Build a", "# a Required() field. required_keys.discard(skey) break else: if self.extra ==", "key so it does not # create an additional, noisy", "# Sort compiled schema (probably not necessary for esphome, but", "and others which should be # applied to all keys", "schema: if isinstance(skey, string_types): key_names.append(skey) elif isinstance(skey, vol.Marker) and isinstance(skey.schema,", "Discard the required key so it does not # create", "def ensure_multiple_invalid(err): if isinstance(err, vol.MultipleInvalid): return err return vol.MultipleInvalid(err) #", "<= len(key_path): err.error_type = invalid_msg errors.append(err) # If there is", "= schemas[0] if isinstance(schema, vol.Schema): schema = schema.schema ret =", "and type(skey.schema) in vol.primitive_types: candidates_by_key.setdefault(skey.schema, []).append((skey, (ckey, cvalue))) else: #", "vol.Extra\") if isinstance(key, vol.Remove): raise ValueError(\"ESPHome does not allow vol.Remove\")", "if errors: raise vol.MultipleInvalid(errors) return out return validate_mapping def add_extra(self,", "key in schema if isinstance(key, vol.Required)) # Keys that may", "applied to all keys additional_candidates.append((skey, (ckey, cvalue))) key_names = []", "if isinstance(skey, string_types): key_names.append(skey) elif isinstance(skey, vol.Marker) and isinstance(skey.schema, string_types):", "esphome.py_compat import string_types class ExtraKeysInvalid(vol.Invalid): def __init__(self, *arg, **kwargs): self.candidates", "len(error.path): error = e continue # Backtracking is not performed", "ret.extend(schema) return ret schema = schemas[0] if isinstance(schema, vol.Schema): schema", "not necessary for esphome, but leave it here just in", "for skey, (ckey, cvalue) in candidates: if type(skey) in vol.primitive_types:", "key first, then fallback to the rest relevant_candidates = itertools.chain(candidates_by_key.get(key,", "= [] try: cval = cvalue(key_path, value) out[new_key] = cval", "matches = difflib.get_close_matches(key, key_names) errors.append(ExtraKeysInvalid('extra keys not allowed', key_path, candidates=matches))", "that may be required all_required_keys = set(key for key in", "skey, (ckey, cvalue) in candidates: if type(skey) in vol.primitive_types: candidates_by_key.setdefault(skey,", "or 'mapping value' # Check some things that ESPHome's schemas", "isinstance(skey.schema, string_types): key_names.append(skey.schema) def validate_mapping(path, iterable, out): required_keys = all_required_keys.copy()", "key_path = path + [key] # Optimization. Validate against the", "or len(e.path) > len(error.path): error = e continue # Backtracking", "in the correct order, we want to apply some #", "in case) candidates = list(vol.schema_builder._iterate_mapping_candidates(_compiled_schema)) # After we have the", "that the key was provided. # Discard the required key", "as it's not a very voluptuous-way/clean way of # doing", "matched against the relevant schema keys only. # No point", "re-added if needed). for key in schema: if key is", "additional_candidates) # compare each given key/value against all compiled key/values", "def extend(self, *schemas, **kwargs): extra = kwargs.pop('extra', None) if kwargs:", "ordered # map type is provided as input. key_value_map =", "disable=arguments-differ def extend(self, *schemas, **kwargs): extra = kwargs.pop('extra', None) if", "1: ret = self for schema in schemas: ret =", "so if # the value is invalid we immediately throw", "pairs. # The type(out) is used to retain ordering in", "schema, invalid_msg=None): invalid_msg = invalid_msg or 'mapping value' # Check", "key in schema: if key is vol.Extra: raise ValueError(\"ESPHome does", "have defaults all_default_keys = set(key for key in schema if", "case a ordered # map type is provided as input.", "break else: if self.extra == vol.ALLOW_EXTRA: out[key] = value elif", "[] for key, value in key_value_map.items(): key_path = path +", "the list of candidates in the correct order, we want", "is provided as input. key_value_map = type(out)() for key, value", "ValueError(\"ESPHome does not allow vol.Remove\") if isinstance(key, vol.primitive_types): raise ValueError(\"All", "!= vol.REMOVE_EXTRA: if isinstance(key, string_types) and key_names: matches = difflib.get_close_matches(key,", "def _compile_mapping(self, schema, invalid_msg=None): invalid_msg = invalid_msg or 'mapping value'", "required all_required_keys = set(key for key in schema if isinstance(key,", "to retain ordering in case a ordered # map type", "= [] for skey in schema: if isinstance(skey, string_types): key_names.append(skey)", "candidates = list(vol.schema_builder._iterate_mapping_candidates(_compiled_schema)) # After we have the list of", "out): required_keys = all_required_keys.copy() # Build a map of all", "the matching key first, then fallback to the rest relevant_candidates", "= itertools.chain(candidates_by_key.get(key, []), additional_candidates) # compare each given key/value against", "must be wrapped in cv.Required or cv.Optional\") # Keys that", "import difflib import itertools import voluptuous as vol from esphome.py_compat", "used sparingly, as it's not a very voluptuous-way/clean way of", "for key in schema: if key is vol.Extra: raise ValueError(\"ESPHome", "all_default_keys = set(key for key in schema if isinstance(key, vol.Optional))", "if isinstance(key, vol.Optional)) # Recursively compile schema _compiled_schema = {}", "[key])) if errors: raise vol.MultipleInvalid(errors) return out return validate_mapping def", "not # create an additional, noisy exception. required_keys.discard(skey) break #", "key/value against all compiled key/values # schema key, (compiled key,", "errors.append(err) # If there is a validation error for a", "errors.append(ExtraKeysInvalid('extra keys not allowed', key_path, candidates=matches)) else: errors.append(vol.Invalid('extra keys not", "extra_schemas or [] def __call__(self, data): res = super(_Schema, self).__call__(data)", "not allowed', key_path)) # for any required keys left that", "used to retain ordering in case a ordered # map", "(so these may be re-added if needed). for key in", "is vol.Extra: raise ValueError(\"ESPHome does not allow vol.Extra\") if isinstance(key,", "noisy exception. required_keys.discard(skey) break # Key and value okay, mark", "= (new_key, new_value) # Sort compiled schema (probably not necessary", "does not allow vol.Remove\") if isinstance(key, vol.primitive_types): raise ValueError(\"All schema", "fallback to the rest relevant_candidates = itertools.chain(candidates_by_key.get(key, []), additional_candidates) #", "have defaults: for key in required_keys: msg = getattr(key, 'msg',", "this missing key, insert it. key_value_map[key.schema] = key.default() error =", "very voluptuous-way/clean way of # doing things. self._extra_schemas = extra_schemas", "is a validation error for a required # key, this", "<gh_stars>0 import difflib import itertools import voluptuous as vol from", "compile schema _compiled_schema = {} for skey, svalue in vol.iteritems(schema):", "= {} for skey, (ckey, cvalue) in candidates: if type(skey)", "[] try: cval = cvalue(key_path, value) out[new_key] = cval except", "pylint: disable=arguments-differ def extend(self, *schemas, **kwargs): extra = kwargs.pop('extra', None)", "as vol from esphome.py_compat import string_types class ExtraKeysInvalid(vol.Invalid): def __init__(self,", "else: errors.append(vol.Invalid('extra keys not allowed', key_path)) # for any required", "in required_keys: msg = getattr(key, 'msg', None) or 'required key", "value # Insert default values for non-existing keys. for key", "for skey, svalue in vol.iteritems(schema): new_key = self._compile(skey) new_value =", "is selected, so if # the value is invalid we", "disable=protected-access, unidiomatic-typecheck class _Schema(vol.Schema): \"\"\"Custom cv.Schema that prints similar keys", "key is selected, so if # the value is invalid", "return ret schema = schemas[0] if isinstance(schema, vol.Schema): schema =", "the data being validated will be matched against the relevant", "exception_errors: if len(err.path) <= len(key_path): err.error_type = invalid_msg errors.append(err) #", "extra in self._extra_schemas: try: res = extra(res) except vol.Invalid as", "left that weren't found and don't have defaults: for key", "keys only. # No point in matching against different keys", "schema in schemas: ret = ret.extend(schema) return ret schema =", "candidates: if type(skey) in vol.primitive_types: candidates_by_key.setdefault(skey, []).append((skey, (ckey, cvalue))) elif", "key_value_map[key.schema] = key.default() error = None errors = [] for", "type(skey.schema) in vol.primitive_types: candidates_by_key.setdefault(skey.schema, []).append((skey, (ckey, cvalue))) else: # These", "path + [key] # Optimization. Validate against the matching key", "to the rest relevant_candidates = itertools.chain(candidates_by_key.get(key, []), additional_candidates) # compare", "kwargs.pop('candidates') vol.Invalid.__init__(self, *arg, **kwargs) def ensure_multiple_invalid(err): if isinstance(err, vol.MultipleInvalid): return", "it's not a very voluptuous-way/clean way of # doing things.", "Required() field. required_keys.discard(skey) break else: if self.extra == vol.ALLOW_EXTRA: out[key]", "here just in case) candidates = list(vol.schema_builder._iterate_mapping_candidates(_compiled_schema)) # After we", "> len(key_path): raise if not error or len(e.path) > len(error.path):", "in case a ordered # map type is provided as", "(probably not necessary for esphome, but leave it here just", "of # doing things. self._extra_schemas = extra_schemas or [] def", "the required key so it does not # create an", "= path + [key] # Optimization. Validate against the matching", "only. # No point in matching against different keys additional_candidates", "have the list of candidates in the correct order, we", "things that ESPHome's schemas do not allow # mostly to", "schema if isinstance(key, vol.Required)) # Keys that may have defaults", "for key in schema if isinstance(key, vol.Required)) # Keys that", "as input. key_value_map = type(out)() for key, value in iterable:", "self).__init__(schema, extra=extra) # List of extra schemas to apply after", "isinstance(skey, vol.Marker) and type(skey.schema) in vol.primitive_types: candidates_by_key.setdefault(skey.schema, []).append((skey, (ckey, cvalue)))", "= self for schema in schemas: ret = ret.extend(schema) return", "return res def _compile_mapping(self, schema, invalid_msg=None): invalid_msg = invalid_msg or", "skey, (ckey, cvalue) in relevant_candidates: try: new_key = ckey(key_path, key)", "order, we want to apply some # optimization so that", "string_types): key_names.append(skey.schema) def validate_mapping(path, iterable, out): required_keys = all_required_keys.copy() #", "vol.Invalid as e: if len(e.path) > len(key_path): raise if not", "key_path, candidates=matches)) else: errors.append(vol.Invalid('extra keys not allowed', key_path)) # for", "keys not allowed', key_path)) # for any required keys left", "kwargs: raise ValueError if not schemas: return self.extend({}) if len(schemas)", "this method sane (so these may be re-added if needed).", "mark as found in case it was # a Required()", "all keys additional_candidates.append((skey, (ckey, cvalue))) key_names = [] for skey", "Check some things that ESPHome's schemas do not allow #", "schema = schema.schema ret = super(_Schema, self).extend(schema, extra=extra) return _Schema(ret.schema,", "extra_schemas=None): super(_Schema, self).__init__(schema, extra=extra) # List of extra schemas to", "for key in schema if isinstance(key, vol.Optional)) # Recursively compile", "an exception. exception_errors = [] try: cval = cvalue(key_path, value)", "kwargs.pop('extra', None) if kwargs: raise ValueError if not schemas: return", "not error or len(e.path) > len(error.path): error = e continue", "vol.Optional)) # Recursively compile schema _compiled_schema = {} for skey,", "ValueError if not schemas: return self.extend({}) if len(schemas) != 1:", "is not performed once a key is selected, so if", "things. self._extra_schemas = extra_schemas or [] def __call__(self, data): res", "# key, this means that the key was provided. #", "cv.Required or cv.Optional\") # Keys that may be required all_required_keys", "out[key] = value elif self.extra != vol.REMOVE_EXTRA: if isinstance(key, string_types)", "len(key_path): raise if not error or len(e.path) > len(error.path): error", "in exception_errors: if len(err.path) <= len(key_path): err.error_type = invalid_msg errors.append(err)", "any required keys left that weren't found and don't have", "itertools.chain(candidates_by_key.get(key, []), additional_candidates) # compare each given key/value against all", "None) or 'required key not provided' errors.append(vol.RequiredFieldInvalid(msg, path + [key]))", "key.default() error = None errors = [] for key, value", "difflib import itertools import voluptuous as vol from esphome.py_compat import", "list of candidates in the correct order, we want to", "then fallback to the rest relevant_candidates = itertools.chain(candidates_by_key.get(key, []), additional_candidates)", "# After we have the list of candidates in the", "= ret.extend(schema) return ret schema = schemas[0] if isinstance(schema, vol.Schema):", "path + [key])) if errors: raise vol.MultipleInvalid(errors) return out return", "error for a required # key, this means that the", "= all_required_keys.copy() # Build a map of all provided key-value", "= None errors = [] for key, value in key_value_map.items():", "List of extra schemas to apply after validation # Should", "keys must be wrapped in cv.Required or cv.Optional\") # Keys", "def add_extra(self, validator): validator = _Schema(validator) self._extra_schemas.append(validator) return self #", "candidates in the correct order, we want to apply some", "# No point in matching against different keys additional_candidates =", "'int', 'str', 'Remove' and others which should be # applied", "a ordered # map type is provided as input. key_value_map", "key_value_map = type(out)() for key, value in iterable: key_value_map[key] =", "elif self.extra != vol.REMOVE_EXTRA: if isinstance(key, string_types) and key_names: matches", "len(e.path) > len(key_path): raise if not error or len(e.path) >", "not allowed', key_path, candidates=matches)) else: errors.append(vol.Invalid('extra keys not allowed', key_path))", "relevant_candidates = itertools.chain(candidates_by_key.get(key, []), additional_candidates) # compare each given key/value", "or 'required key not provided' errors.append(vol.RequiredFieldInvalid(msg, path + [key])) if", "# Recursively compile schema _compiled_schema = {} for skey, svalue", "a key is selected, so if # the value is", "schemas[0] if isinstance(schema, vol.Schema): schema = schema.schema ret = super(_Schema,", "isinstance(key, vol.Remove): raise ValueError(\"ESPHome does not allow vol.Remove\") if isinstance(key,", "error.\"\"\" def __init__(self, schema, extra=vol.PREVENT_EXTRA, extra_schemas=None): super(_Schema, self).__init__(schema, extra=extra) #", "if isinstance(key, vol.Remove): raise ValueError(\"ESPHome does not allow vol.Remove\") if", "key_names = [] for skey in schema: if isinstance(skey, string_types):", "getattr(key, 'msg', None) or 'required key not provided' errors.append(vol.RequiredFieldInvalid(msg, path", "matching against different keys additional_candidates = [] candidates_by_key = {}", "cvalue))) else: # These are wildcards such as 'int', 'str',", "# applied to all keys additional_candidates.append((skey, (ckey, cvalue))) key_names =", "of candidates in the correct order, we want to apply", "= invalid_msg or 'mapping value' # Check some things that", "*arg, **kwargs) def ensure_multiple_invalid(err): if isinstance(err, vol.MultipleInvalid): return err return", "if isinstance(key, vol.Required)) # Keys that may have defaults all_default_keys", "vol.MultipleInvalid as e: exception_errors.extend(e.errors) except vol.Invalid as e: exception_errors.append(e) if", "vol.iteritems(schema): new_key = self._compile(skey) new_value = self._compile(svalue) _compiled_schema[skey] = (new_key,", "> len(error.path): error = e continue # Backtracking is not", "additional, noisy exception. required_keys.discard(skey) break # Key and value okay,", "schema if isinstance(key, vol.Optional)) # Recursively compile schema _compiled_schema =", "keep the logic in this method sane (so these may", "Build a map of all provided key-value pairs. # The", "ordering in case a ordered # map type is provided", "(ckey, cvalue) in relevant_candidates: try: new_key = ckey(key_path, key) except", "import string_types class ExtraKeysInvalid(vol.Invalid): def __init__(self, *arg, **kwargs): self.candidates =", "super(_Schema, self).__init__(schema, extra=extra) # List of extra schemas to apply", "candidates=matches)) else: errors.append(vol.Invalid('extra keys not allowed', key_path)) # for any", "isinstance(schema, vol.Schema): schema = schema.schema ret = super(_Schema, self).extend(schema, extra=extra)", "else: if self.extra == vol.ALLOW_EXTRA: out[key] = value elif self.extra", "schemas: return self.extend({}) if len(schemas) != 1: ret = self", "key_names: matches = difflib.get_close_matches(key, key_names) errors.append(ExtraKeysInvalid('extra keys not allowed', key_path,", "if self.extra == vol.ALLOW_EXTRA: out[key] = value elif self.extra !=", "key was provided. # Discard the required key so it", "it. key_value_map[key.schema] = key.default() error = None errors = []", "elif isinstance(skey, vol.Marker) and isinstance(skey.schema, string_types): key_names.append(skey.schema) def validate_mapping(path, iterable,", "+ [key] # Optimization. Validate against the matching key first,", "key, value in iterable: key_value_map[key] = value # Insert default", "# Keys that may be required all_required_keys = set(key for", "self._compile(skey) new_value = self._compile(svalue) _compiled_schema[skey] = (new_key, new_value) # Sort", "in key_value_map.items(): key_path = path + [key] # Optimization. Validate", "msg = getattr(key, 'msg', None) or 'required key not provided'", "it does not # create an additional, noisy exception. required_keys.discard(skey)", "difflib.get_close_matches(key, key_names) errors.append(ExtraKeysInvalid('extra keys not allowed', key_path, candidates=matches)) else: errors.append(vol.Invalid('extra", "string_types class ExtraKeysInvalid(vol.Invalid): def __init__(self, *arg, **kwargs): self.candidates = kwargs.pop('candidates')", "type(out)() for key, value in iterable: key_value_map[key] = value #", "# schema key, (compiled key, compiled value) for skey, (ckey,", "= extra_schemas or [] def __call__(self, data): res = super(_Schema,", "value) for skey, (ckey, cvalue) in relevant_candidates: try: new_key =", "super(_Schema, self).__call__(data) for extra in self._extra_schemas: try: res = extra(res)", "the rest relevant_candidates = itertools.chain(candidates_by_key.get(key, []), additional_candidates) # compare each", "in self._extra_schemas: try: res = extra(res) except vol.Invalid as err:", "err: raise ensure_multiple_invalid(err) return res def _compile_mapping(self, schema, invalid_msg=None): invalid_msg", "= set(key for key in schema if isinstance(key, vol.Required)) #", "if len(err.path) <= len(key_path): err.error_type = invalid_msg errors.append(err) # If", "_Schema(validator) self._extra_schemas.append(validator) return self # pylint: disable=arguments-differ def extend(self, *schemas,", "in all_default_keys: if not isinstance(key.default, vol.Undefined) and key.schema not in", "vol.Marker) and isinstance(skey.schema, string_types): key_names.append(skey.schema) def validate_mapping(path, iterable, out): required_keys", "for extra in self._extra_schemas: try: res = extra(res) except vol.Invalid", "allow # mostly to keep the logic in this method", "ensure_multiple_invalid(err) return res def _compile_mapping(self, schema, invalid_msg=None): invalid_msg = invalid_msg", "may be required all_required_keys = set(key for key in schema", "cvalue(key_path, value) out[new_key] = cval except vol.MultipleInvalid as e: exception_errors.extend(e.errors)", "found in case it was # a Required() field. required_keys.discard(skey)", "esphome, but leave it here just in case) candidates =", "compiled value) for skey, (ckey, cvalue) in relevant_candidates: try: new_key", "raise ValueError(\"ESPHome does not allow vol.Extra\") if isinstance(key, vol.Remove): raise", "= set(key for key in schema if isinstance(key, vol.Optional)) #", "set(key for key in schema if isinstance(key, vol.Optional)) # Recursively", "# Insert default values for non-existing keys. for key in", "_compiled_schema = {} for skey, svalue in vol.iteritems(schema): new_key =", "schemas do not allow # mostly to keep the logic", "not allow vol.Extra\") if isinstance(key, vol.Remove): raise ValueError(\"ESPHome does not", "= [] for key, value in key_value_map.items(): key_path = path", "__init__(self, schema, extra=vol.PREVENT_EXTRA, extra_schemas=None): super(_Schema, self).__init__(schema, extra=extra) # List of", "vol.primitive_types): raise ValueError(\"All schema keys must be wrapped in cv.Required", "the logic in this method sane (so these may be", "return self.extend({}) if len(schemas) != 1: ret = self for", "in schema: if key is vol.Extra: raise ValueError(\"ESPHome does not", "# Keys that may have defaults all_default_keys = set(key for", "self._extra_schemas = extra_schemas or [] def __call__(self, data): res =", "\"\"\"Custom cv.Schema that prints similar keys on error.\"\"\" def __init__(self,", "return vol.MultipleInvalid(err) # pylint: disable=protected-access, unidiomatic-typecheck class _Schema(vol.Schema): \"\"\"Custom cv.Schema", "*arg, **kwargs): self.candidates = kwargs.pop('candidates') vol.Invalid.__init__(self, *arg, **kwargs) def ensure_multiple_invalid(err):", "not a very voluptuous-way/clean way of # doing things. self._extra_schemas", "additional_candidates = [] candidates_by_key = {} for skey, (ckey, cvalue)", "Key and value okay, mark as found in case it", "against the matching key first, then fallback to the rest", "self).__call__(data) for extra in self._extra_schemas: try: res = extra(res) except", "# List of extra schemas to apply after validation #", "keys left that weren't found and don't have defaults: for", "raise ValueError if not schemas: return self.extend({}) if len(schemas) !=", "optimization so that each # key in the data being", "if isinstance(schema, vol.Schema): schema = schema.schema ret = super(_Schema, self).extend(schema,", "len(schemas) != 1: ret = self for schema in schemas:", "case) candidates = list(vol.schema_builder._iterate_mapping_candidates(_compiled_schema)) # After we have the list", "errors.append(vol.Invalid('extra keys not allowed', key_path)) # for any required keys", "[]).append((skey, (ckey, cvalue))) elif isinstance(skey, vol.Marker) and type(skey.schema) in vol.primitive_types:", "[]), additional_candidates) # compare each given key/value against all compiled", "been specified for this missing key, insert it. key_value_map[key.schema] =", "some # optimization so that each # key in the", "value in key_value_map.items(): key_path = path + [key] # Optimization.", "cvalue))) elif isinstance(skey, vol.Marker) and type(skey.schema) in vol.primitive_types: candidates_by_key.setdefault(skey.schema, []).append((skey,", "isinstance(key, vol.Required)) # Keys that may have defaults all_default_keys =", "= extra(res) except vol.Invalid as err: raise ensure_multiple_invalid(err) return res", "in schemas: ret = ret.extend(schema) return ret schema = schemas[0]", "isinstance(key, string_types) and key_names: matches = difflib.get_close_matches(key, key_names) errors.append(ExtraKeysInvalid('extra keys", "if len(schemas) != 1: ret = self for schema in", "Insert default values for non-existing keys. for key in all_default_keys:", "return self # pylint: disable=arguments-differ def extend(self, *schemas, **kwargs): extra", "for key in required_keys: msg = getattr(key, 'msg', None) or", "some things that ESPHome's schemas do not allow # mostly", "to apply some # optimization so that each # key", "as err: raise ensure_multiple_invalid(err) return res def _compile_mapping(self, schema, invalid_msg=None):", "cvalue) in relevant_candidates: try: new_key = ckey(key_path, key) except vol.Invalid", "_compiled_schema[skey] = (new_key, new_value) # Sort compiled schema (probably not", "len(err.path) <= len(key_path): err.error_type = invalid_msg errors.append(err) # If there", "if type(skey) in vol.primitive_types: candidates_by_key.setdefault(skey, []).append((skey, (ckey, cvalue))) elif isinstance(skey,", "extra=vol.PREVENT_EXTRA, extra_schemas=None): super(_Schema, self).__init__(schema, extra=extra) # List of extra schemas", "# pylint: disable=protected-access, unidiomatic-typecheck class _Schema(vol.Schema): \"\"\"Custom cv.Schema that prints", "(new_key, new_value) # Sort compiled schema (probably not necessary for", "the relevant schema keys only. # No point in matching", "(ckey, cvalue))) elif isinstance(skey, vol.Marker) and type(skey.schema) in vol.primitive_types: candidates_by_key.setdefault(skey.schema,", "does not # create an additional, noisy exception. required_keys.discard(skey) break", "a very voluptuous-way/clean way of # doing things. self._extra_schemas =", "performed once a key is selected, so if # the", "continue # Backtracking is not performed once a key is", "ValueError(\"All schema keys must be wrapped in cv.Required or cv.Optional\")", "vol.primitive_types: candidates_by_key.setdefault(skey, []).append((skey, (ckey, cvalue))) elif isinstance(skey, vol.Marker) and type(skey.schema)", "wildcards such as 'int', 'str', 'Remove' and others which should", "= _Schema(validator) self._extra_schemas.append(validator) return self # pylint: disable=arguments-differ def extend(self,", "ckey(key_path, key) except vol.Invalid as e: if len(e.path) > len(key_path):", "be re-added if needed). for key in schema: if key", "**kwargs): self.candidates = kwargs.pop('candidates') vol.Invalid.__init__(self, *arg, **kwargs) def ensure_multiple_invalid(err): if", "itertools import voluptuous as vol from esphome.py_compat import string_types class", "# Optimization. Validate against the matching key first, then fallback", "not in key_value_map: # A default value has been specified", "to all keys additional_candidates.append((skey, (ckey, cvalue))) key_names = [] for", "isinstance(skey, vol.Marker) and isinstance(skey.schema, string_types): key_names.append(skey.schema) def validate_mapping(path, iterable, out):", "and isinstance(skey.schema, string_types): key_names.append(skey.schema) def validate_mapping(path, iterable, out): required_keys =", "iterable: key_value_map[key] = value # Insert default values for non-existing", "# for any required keys left that weren't found and", "allow vol.Extra\") if isinstance(key, vol.Remove): raise ValueError(\"ESPHome does not allow", "raise vol.MultipleInvalid(errors) return out return validate_mapping def add_extra(self, validator): validator", "__call__(self, data): res = super(_Schema, self).__call__(data) for extra in self._extra_schemas:", "svalue in vol.iteritems(schema): new_key = self._compile(skey) new_value = self._compile(svalue) _compiled_schema[skey]", "if isinstance(err, vol.MultipleInvalid): return err return vol.MultipleInvalid(err) # pylint: disable=protected-access,", "{} for skey, svalue in vol.iteritems(schema): new_key = self._compile(skey) new_value", "input. key_value_map = type(out)() for key, value in iterable: key_value_map[key]", "string_types): key_names.append(skey) elif isinstance(skey, vol.Marker) and isinstance(skey.schema, string_types): key_names.append(skey.schema) def", "be wrapped in cv.Required or cv.Optional\") # Keys that may", "[]).append((skey, (ckey, cvalue))) else: # These are wildcards such as", "has been specified for this missing key, insert it. key_value_map[key.schema]", "If there is a validation error for a required #", "voluptuous-way/clean way of # doing things. self._extra_schemas = extra_schemas or", "[] for skey in schema: if isinstance(skey, string_types): key_names.append(skey) elif", "in case it was # a Required() field. required_keys.discard(skey) break", "key is vol.Extra: raise ValueError(\"ESPHome does not allow vol.Extra\") if", "value has been specified for this missing key, insert it.", "= ckey(key_path, key) except vol.Invalid as e: if len(e.path) >", "error = None errors = [] for key, value in", "once a key is selected, so if # the value", "isinstance(err, vol.MultipleInvalid): return err return vol.MultipleInvalid(err) # pylint: disable=protected-access, unidiomatic-typecheck", "leave it here just in case) candidates = list(vol.schema_builder._iterate_mapping_candidates(_compiled_schema)) #", "e: if len(e.path) > len(key_path): raise if not error or", "so that each # key in the data being validated", "not provided' errors.append(vol.RequiredFieldInvalid(msg, path + [key])) if errors: raise vol.MultipleInvalid(errors)", "means that the key was provided. # Discard the required", "except vol.MultipleInvalid as e: exception_errors.extend(e.errors) except vol.Invalid as e: exception_errors.append(e)", "isinstance(key, vol.Optional)) # Recursively compile schema _compiled_schema = {} for", "wrapped in cv.Required or cv.Optional\") # Keys that may be", "error = e continue # Backtracking is not performed once", "retain ordering in case a ordered # map type is", "except vol.Invalid as e: if len(e.path) > len(key_path): raise if", "Keys that may have defaults all_default_keys = set(key for key", "add_extra(self, validator): validator = _Schema(validator) self._extra_schemas.append(validator) return self # pylint:", "invalid we immediately throw an exception. exception_errors = [] try:", "should be # applied to all keys additional_candidates.append((skey, (ckey, cvalue)))", "= e continue # Backtracking is not performed once a", "key.schema not in key_value_map: # A default value has been", "weren't found and don't have defaults: for key in required_keys:", "schema: if key is vol.Extra: raise ValueError(\"ESPHome does not allow", "provided key-value pairs. # The type(out) is used to retain", "Should be used sparingly, as it's not a very voluptuous-way/clean", "= {} for skey, svalue in vol.iteritems(schema): new_key = self._compile(skey)", "= difflib.get_close_matches(key, key_names) errors.append(ExtraKeysInvalid('extra keys not allowed', key_path, candidates=matches)) else:", "for err in exception_errors: if len(err.path) <= len(key_path): err.error_type =", "candidates_by_key = {} for skey, (ckey, cvalue) in candidates: if", "required # key, this means that the key was provided.", "required key so it does not # create an additional,", "want to apply some # optimization so that each #", "in schema: if isinstance(skey, string_types): key_names.append(skey) elif isinstance(skey, vol.Marker) and", "ret = self for schema in schemas: ret = ret.extend(schema)", "we want to apply some # optimization so that each", "similar keys on error.\"\"\" def __init__(self, schema, extra=vol.PREVENT_EXTRA, extra_schemas=None): super(_Schema,", "for esphome, but leave it here just in case) candidates", "just in case) candidates = list(vol.schema_builder._iterate_mapping_candidates(_compiled_schema)) # After we have", "try: new_key = ckey(key_path, key) except vol.Invalid as e: if", "if # the value is invalid we immediately throw an", "and value okay, mark as found in case it was", "required keys left that weren't found and don't have defaults:", "matching key first, then fallback to the rest relevant_candidates =", "cvalue))) key_names = [] for skey in schema: if isinstance(skey,", "if kwargs: raise ValueError if not schemas: return self.extend({}) if", "are wildcards such as 'int', 'str', 'Remove' and others which", "raise ensure_multiple_invalid(err) return res def _compile_mapping(self, schema, invalid_msg=None): invalid_msg =", "validate_mapping(path, iterable, out): required_keys = all_required_keys.copy() # Build a map", "candidates_by_key.setdefault(skey.schema, []).append((skey, (ckey, cvalue))) else: # These are wildcards such", "keys on error.\"\"\" def __init__(self, schema, extra=vol.PREVENT_EXTRA, extra_schemas=None): super(_Schema, self).__init__(schema,", "as e: exception_errors.extend(e.errors) except vol.Invalid as e: exception_errors.append(e) if exception_errors:", "out[new_key] = cval except vol.MultipleInvalid as e: exception_errors.extend(e.errors) except vol.Invalid", "exception_errors.append(e) if exception_errors: for err in exception_errors: if len(err.path) <=", "will be matched against the relevant schema keys only. #", "allowed', key_path)) # for any required keys left that weren't", "Recursively compile schema _compiled_schema = {} for skey, svalue in", "compiled key/values # schema key, (compiled key, compiled value) for", "that may have defaults all_default_keys = set(key for key in", "# A default value has been specified for this missing", "cval = cvalue(key_path, value) out[new_key] = cval except vol.MultipleInvalid as", "or cv.Optional\") # Keys that may be required all_required_keys =", "isinstance(key.default, vol.Undefined) and key.schema not in key_value_map: # A default", "try: res = extra(res) except vol.Invalid as err: raise ensure_multiple_invalid(err)", "class ExtraKeysInvalid(vol.Invalid): def __init__(self, *arg, **kwargs): self.candidates = kwargs.pop('candidates') vol.Invalid.__init__(self,", "raise if not error or len(e.path) > len(error.path): error =", "# These are wildcards such as 'int', 'str', 'Remove' and", "provided' errors.append(vol.RequiredFieldInvalid(msg, path + [key])) if errors: raise vol.MultipleInvalid(errors) return", "point in matching against different keys additional_candidates = [] candidates_by_key", "Optimization. Validate against the matching key first, then fallback to", "# create an additional, noisy exception. required_keys.discard(skey) break # Key", "the key was provided. # Discard the required key so", "all_required_keys = set(key for key in schema if isinstance(key, vol.Required))", "else: # These are wildcards such as 'int', 'str', 'Remove'", "unidiomatic-typecheck class _Schema(vol.Schema): \"\"\"Custom cv.Schema that prints similar keys on", "required_keys = all_required_keys.copy() # Build a map of all provided", "such as 'int', 'str', 'Remove' and others which should be", "'mapping value' # Check some things that ESPHome's schemas do", "cval except vol.MultipleInvalid as e: exception_errors.extend(e.errors) except vol.Invalid as e:", "so it does not # create an additional, noisy exception.", "if isinstance(key, vol.primitive_types): raise ValueError(\"All schema keys must be wrapped", "for any required keys left that weren't found and don't", "extra = kwargs.pop('extra', None) if kwargs: raise ValueError if not", "in this method sane (so these may be re-added if", "self._extra_schemas.append(validator) return self # pylint: disable=arguments-differ def extend(self, *schemas, **kwargs):", "in schema if isinstance(key, vol.Optional)) # Recursively compile schema _compiled_schema", "def __call__(self, data): res = super(_Schema, self).__call__(data) for extra in", "schemas: ret = ret.extend(schema) return ret schema = schemas[0] if", "extra schemas to apply after validation # Should be used", "self.extra != vol.REMOVE_EXTRA: if isinstance(key, string_types) and key_names: matches =", "if len(e.path) > len(key_path): raise if not error or len(e.path)", "compare each given key/value against all compiled key/values # schema", "vol.Remove\") if isinstance(key, vol.primitive_types): raise ValueError(\"All schema keys must be", "res = super(_Schema, self).__call__(data) for extra in self._extra_schemas: try: res", "skey, svalue in vol.iteritems(schema): new_key = self._compile(skey) new_value = self._compile(svalue)", "if key is vol.Extra: raise ValueError(\"ESPHome does not allow vol.Extra\")", "missing key, insert it. key_value_map[key.schema] = key.default() error = None", "for key, value in key_value_map.items(): key_path = path + [key]", "key in the data being validated will be matched against", "provided. # Discard the required key so it does not", "do not allow # mostly to keep the logic in", "isinstance(key, vol.primitive_types): raise ValueError(\"All schema keys must be wrapped in", "be used sparingly, as it's not a very voluptuous-way/clean way", "err in exception_errors: if len(err.path) <= len(key_path): err.error_type = invalid_msg", "vol.Extra: raise ValueError(\"ESPHome does not allow vol.Extra\") if isinstance(key, vol.Remove):", "key) except vol.Invalid as e: if len(e.path) > len(key_path): raise", "new_key = self._compile(skey) new_value = self._compile(svalue) _compiled_schema[skey] = (new_key, new_value)", "key/values # schema key, (compiled key, compiled value) for skey,", "The type(out) is used to retain ordering in case a", "# the value is invalid we immediately throw an exception.", "type is provided as input. key_value_map = type(out)() for key,", "insert it. key_value_map[key.schema] = key.default() error = None errors =", "map type is provided as input. key_value_map = type(out)() for", "elif isinstance(skey, vol.Marker) and type(skey.schema) in vol.primitive_types: candidates_by_key.setdefault(skey.schema, []).append((skey, (ckey,", "# compare each given key/value against all compiled key/values #", "None) if kwargs: raise ValueError if not schemas: return self.extend({})", "validation error for a required # key, this means that", "on error.\"\"\" def __init__(self, schema, extra=vol.PREVENT_EXTRA, extra_schemas=None): super(_Schema, self).__init__(schema, extra=extra)", "schema (probably not necessary for esphome, but leave it here", "invalid_msg or 'mapping value' # Check some things that ESPHome's", "value is invalid we immediately throw an exception. exception_errors =", "After we have the list of candidates in the correct" ]
[ "in enumerate(self.data): r_neighs = search.kneighbors( [sample], return_distance=False)[0][1:] knn[index].update(list(r_neighs)) for neigh", "if self.order[row] == count: unlabeled_indexes.append(row) if isinstance(self.filter, str) and self.filter", "density threshold that let average neighbor is 1-2 percent of", "= defaultdict(set) rnn = defaultdict(set) cnt = defaultdict(int) while True:", "the data point The function also creates a copy of", "the predicted label is the same as the label of", "is :return: nan, r \"\"\" r = 1 nan =", "self.structure_stdnpf[\"label\"] != -1 ] self.classifier_stdpnf.fit( labeled_data[\"sample\"].tolist(), labeled_data[\"label\"].tolist() ) count +=", "max_dis = dc dc = (max_dis + min_dis) / 2", "of r+2. The set of points that are within a", "= np.sort(list(self.distances.values()))[ position * 2 + self.n_id] return dc def", "rho = [0] * self.n_id for i in range(self.n_id): for", "are within a distance of r+1 is also a superset", "nneighs < 0.01: min_dis = dc else: max_dis = dc", "-1 ] self.classifier_stdpnf.fit( labeled_data[\"sample\"].tolist(), labeled_data[\"label\"].tolist() ) count += 1 labeled_data", "1 self.order = dict.fromkeys(range(self.n_id), 0) count = self._label_next_point(count) self._label_previous_points(count) def", "self.data = None def __build_distance(self): \"\"\" Calculate distance dict. :return:", "within a distance of r+1 is a superset of the", "pos in zip(y_pred, prev_unlabeled): self.structure.at[pos, \"label\"] = new_label count +=", "if index < len(self.y) else -1, ] for index in", "distance dict, max distance, min distance \"\"\" from scipy.spatial.distance import", "r+1 is a superset of the set of points that", "points that are within a distance of r+2. The set", "distance of r+2 is :return: nan, r \"\"\" r =", "in a distance value and a cutoff value, and returns", "samples_labeled_index = samples_labeled.index.to_list() for next_row in next_rows: if next_row not", "nan, r): \"\"\" > The function takes in the dataframe,", "result, _ = self.filter.filter_original_complete( original, original_y, complete, complete_y ) else:", "count if len(prev_unlabeled) == 0: break unlabeled_prev_of_labeled = self.structure.loc[prev_unlabeled] lu", ":return: The indexes of the samples that are going to", "prev_row is not None: prev_unlabeled.append(prev_row) self.order[prev_row] = count if len(prev_unlabeled)", "self.__step_a() next_rows = samples_labeled[\"next\"].to_numpy() next_unlabeled = [] samples_labeled_index = samples_labeled.index.to_list()", "next_rows = samples_labeled[\"next\"].to_numpy() next_unlabeled = [] samples_labeled_index = samples_labeled.index.to_list() for", "The function takes the samples labeled in the previous step", "and cnt[r] == cnt[r - 1]: r -= 1 break", "dataframe with the labeled data and the unlabeled data, and", "the model and compares them to the complete data set.", "local_structure = self.structure_stdnpf.copy(deep=True) base_estimator = KNeighborsClassifier( n_neighbors=r, metric=self.distance_metric ) labeled_data", "data[\"label\"].tolist()) enane_pred = enane_model.predict(nan_unlabeled[\"sample\"].tolist()) for (row_index, _), pred in zip(nan_unlabeled.iterrows(),", "to the complete data set. If the result is not", "an ENN, then filter the original data, otherwise filter the", "): \"\"\"Semi Supervised Algorithm based on Density Peaks.\"\"\" self.dc =", "+= 1 rnn[neigh].add(index) cnt[r] = np.count_nonzero((np.array(list(nb.values())) == 0)) if r", "= (max_dis + min_dis) / 2 while True: nneighs =", "delta[old_i]: delta[old_i] = self.distances[(old_i, old_j)] nneigh[old_i] = old_j delta[sort_rho_idx[0]] =", "lambda_param ) self.structure_stdnpf.at[filtered_indexes, \"label\"] = filtered_labels else: labeled_data = self.structure_stdnpf.loc[", "structure data set. :param complete: the complete dataset :param result:", "np.argsort(-self.rho) delta, nneigh = [float(self.max_dis)] * self.n_id, [0] * self.n_id", "of the majority of the neighbors of the unlabeled data.", "also creates a copy of the dataframe called structure_stdnpf \"\"\"", "= self.classifier.predict(lu) for new_label, pos in zip(y_pred, prev_unlabeled): self.structure.at[pos, \"label\"]", "filtering if classifier is not None: if isinstance(classifier_params, dict): self.classifier", "@Author: <NAME> # @Time: 5/3/22 09:55 # @Version: 4.0 import", "< 0.0001: break return dc def __select_dc(self): \"\"\" Select the", "within a distance of r+3. And so on. The set", "delta value, the number of neighbors, and the structure of", "set of points that are within a distance of r+1", "is the same as the label of the majority of", "is an ENN, then filter the original data, otherwise filter", "= dc dc = (max_dis + min_dis) / 2 if", "= [] samples_labeled_index = samples_labeled.index.to_list() for next_row in next_rows: if", "result is not in the complete data set, it is", "import ENN from .utils import split class STDPNF: \"\"\" <NAME>.,", "180-191. \"\"\" def __init__( self, dc=None, distance_metric=\"euclidean\", k=3, gauss_cutoff=True, percent=2.0,", "= low self.u = u self.data = np.concatenate((low, u), axis=0)", "that are within a distance of r. The set of", "original data, otherwise filter the complete data :param complete: the", "nneigh[old_i] = old_j delta[sort_rho_idx[0]] = max(delta) return np.array(delta, np.float32), np.array(nneigh,", "None self.delta = None self.nneigh = None self.data = None", "bound of the data :param u: upper bound of the", "count def _fit_stdpnf(self): \"\"\" Self Training based on Density Peaks", "self.percent / 100) dc = np.sort(list(self.distances.values()))[ position * 2 +", "= anormal self.filtering = filtering if classifier is not None:", "samples. It then labels those samples and repeats the process", "and updates the order of the samples :param count: the", "self.filtering: self._fit_stdpnf() else: self._fit_without() def predict(self, src): \"\"\" Predict based", "anormal=True, filtering=False, classifier=None, classifier_params=None, filter_method=None, ): \"\"\"Semi Supervised Algorithm based", "going to be assigned to them. \"\"\" es = []", "and j :param dc: cutoff distance :return: 1 if dij", "= dict.fromkeys(range(self.n_id), 0) knn = defaultdict(set) rnn = defaultdict(set) cnt", "predict the labels of the unlabeled data. It then checks", "= new_label count += 1 def _label_next_point(self, count): \"\"\" >", "old_j)] nneigh[old_i] = old_j delta[sort_rho_idx[0]] = max(delta) return np.array(delta, np.float32),", "process until there are no more samples to label :param", "for neigh in nan[row_index]: if local_structure.loc[neigh, \"label\"] == pred: usefulness", "the samples labeled in the previous step and finds the", "consider :return: The indexes of the samples that are going", "nneighs = ( sum([1 for v in self.distances.values() if v", "self._label_previous_points(count) def _label_previous_points(self, count): \"\"\" > The function takes the", "self.__step_a() prev_rows = samples_labeled[\"previous\"].to_numpy() prev_unlabeled = [] samples_labeled_index = samples_labeled.index.to_list()", "= distance_threshold self.anormal = anormal self.filtering = filtering if classifier", "classifier() else: self.classifier = None if filter_method is not None", "\"\"\" es = [] es_pred = [] local_structure = self.structure_stdnpf.copy(deep=True)", "0) knn = defaultdict(set) rnn = defaultdict(set) cnt = defaultdict(int)", "math.exp(-((dij / dc) ** 2)) def cutoff_func(dij, dc): \"\"\" If", "majority of the neighbors of the unlabeled data. If it", "def predict(self, src): \"\"\" Predict based on a trained classifier.", "nneigh = [float(self.max_dis)] * self.n_id, [0] * self.n_id delta[sort_rho_idx[0]] =", "the index of the nearest neighbor - previous: the index", "the labels that are going to be assigned to them.", "0.01: min_dis = dc else: max_dis = dc dc =", "(max_dis + min_dis) / 2 while True: nneighs = (", "_fit_without(self): \"\"\" The function takes in a classifier, and then", "data :param complete: the complete dataframe :param complete_y: the complete", "dij: distance between two nodes :param dc: The cutoff distance", "(2019). A self-training method based on density peaks and an", "the complete y values :return: The result is a dataframe", "None if filter_method is not None and filter_method != \"ENANE\":", "labeled samples. \"\"\" while True: samples_labeled = self.__step_a() next_rows =", "complete_y ) else: result, _ = self.filter.filter(complete, complete_y) return result", "= [ sample, int(self.nneigh[index]), None, self.y[index] if index < len(self.y)", "= None self.structure = None self.structure_stdnpf = None self.n_id =", "self.structure.loc[prev_unlabeled] lu = unlabeled_prev_of_labeled[\"sample\"].to_list() y_pred = self.classifier.predict(lu) for new_label, pos", "of indices of the neighbors of the unlabeled data, and", "with the following columns: - sample: the data point -", "returns the value of the Gaussian function at that point", "If the distance between two atoms is less than the", "= [] for row in unlabeled_rows: if self.order[row] == count:", "if 0.01 <= nneighs <= 0.02: break # binary search", "the local density threshold, default is the method used in", "= {} for i in range(self.n_id): for j in range(i", "True: nneighs = ( sum([1 for v in self.distances.values() if", "[] samples_labeled_index = samples_labeled.index.to_list() for prev_row in prev_rows: if prev_row", "usefulness += 1 else: harmfulness += 1 if usefulness >=", "the minimum distance, the dc value, the rho value, the", "> The function takes in the dataframe, the list of", "= SVC() count = 1 self.order = dict.fromkeys(range(self.n_id), 0) count", "return result def fit(self, samples, y): \"\"\"Fit method.\"\"\" try: l,", "self.structure_stdnpf[\"label\"] != -1 ] complete = labeled_data[\"sample\"] complete_y = labeled_data[\"label\"]", "of the data point The function also creates a copy", "+= 1 return count def _fit_stdpnf(self): \"\"\" Self Training based", "import numpy as np import pandas as pd from sklearn.neighbors", "break # binary search if nneighs < 0.01: min_dis =", "(2018). Self-training semi-supervised classification based on density peaks of data.", "select the local density threshold that let average neighbor is", "in next_rows: if next_row not in samples_labeled_index: next_unlabeled.append(next_row) self.order[next_row] =", "if dij < dc, else 0 \"\"\" return 1 if", "\"\"\" return math.exp(-((dij / dc) ** 2)) def cutoff_func(dij, dc):", "0 :param dij: distance between atoms i and j :param", "self.structure[index] = [ sample, int(self.nneigh[index]), None, self.y[index] if index <", "be labeled :return: The number of labeled samples. \"\"\" while", "from sklearn.preprocessing import LabelEncoder from sklearn.semi_supervised import SelfTrainingClassifier from sklearn.svm", "nodes. :return: dc that local density threshold \"\"\" max_dis, min_dis", "of points that are within a distance of r+2 is", "== cnt[r - 1]: r -= 1 break r +=", "a distance of r+2. The set of points that are", "u), axis=0) self.n_id = self.data.shape[0] self.distances, self.max_dis, self.min_dis = self.__build_distance()", "count if len(next_unlabeled) == 0: break unlabeled_next_of_labeled = self.structure.loc[next_unlabeled] lu", "<NAME>., & <NAME>. (2018). Self-training semi-supervised classification based on density", "have been labeled. \"\"\" samples_labeled = self.structure.loc[self.structure[\"label\"] != -1] sam_lab", "classifier_params=None, filter_method=None, ): \"\"\"Semi Supervised Algorithm based on Density Peaks.\"\"\"", "the point index \"\"\" def gauss_func(dij, dc): \"\"\" > The", "threshold \"\"\" max_dis, min_dis = self.max_dis, self.min_dis dc = (max_dis", "less than the cutoff distance, return 1, otherwise return 0", "LabelEncoder() le.fit(y) y = le.transform(y) self.__init_values(l, u, y) if self.filtering:", "called structure_stdnpf \"\"\" self.structure = dict.fromkeys(range(self.n_id)) for index, sample in", "isinstance(classifier_params, dict): self.classifier = classifier(**classifier_params) else: self.classifier = classifier() else:", "set. If the result is not in the complete data", "index is the point index \"\"\" def gauss_func(dij, dc): \"\"\"", "gaussian function. \"\"\" return math.exp(-((dij / dc) ** 2)) def", "of the unlabeled data. It then checks if the predicted", "= self.structure_stdnpf.loc[self.structure_stdnpf[\"label\"] != -1] self.classifier_stdpnf.fit( labeled_data[\"sample\"].tolist(), labeled_data[\"label\"].tolist() ) def _results_to_structure(self,", "filter. \"\"\" self.__discover_structure() nan, lambda_param = self.__nan_search() self.classifier_stdpnf = KNeighborsClassifier(", "and uses the KNN classifier to predict the labels of", "data set. :param complete: the complete dataset :param result: the", "\"\"\" If the distance between two atoms is less than", "= percent self.density_threshold = density_threshold self.distance_threshold = distance_threshold self.anormal =", "enane_model = SelfTrainingClassifier(base_estimator) enane_model.fit(data[\"sample\"].tolist(), data[\"label\"].tolist()) enane_pred = enane_model.predict(nan_unlabeled[\"sample\"].tolist()) for (row_index,", "next point, and then labels the previous points, without filtering.", "in range(i + 1, self.n_id): temp = func(self.distances[(i, j)], self.dc)", "been labeled. \"\"\" samples_labeled = self.structure.loc[self.structure[\"label\"] != -1] sam_lab =", "def fit(self, samples, y): \"\"\"Fit method.\"\"\" try: l, u, y", "\"auto\": dc = self.__auto_select_dc() else: position = int(self.n_id * (self.n_id", "samples that have been labeled. \"\"\" samples_labeled = self.structure.loc[self.structure[\"label\"] !=", "# binary search if nneighs < 0.01: min_dis = dc", "= self.structure_stdnpf.loc[ self.structure_stdnpf[\"label\"] != -1 ] self.classifier_stdpnf.fit( labeled_data[\"sample\"].tolist(), labeled_data[\"label\"].tolist() )", "if nneighs < 0.01: min_dis = dc else: max_dis =", "> The function takes the labeled samples and trains the", "= samples_labeled.index.to_list() for prev_row in prev_rows: if prev_row not in", "the current iteration \"\"\" while True: samples_labeled = self.__step_a() prev_rows", "dc = (max_dis + min_dis) / 2 while True: nneighs", "neighbor of the nearest neighbor - label: the label of", "self.n_id): temp = func(self.distances[(i, j)], self.dc) rho[i] += temp rho[j]", "also a superset of the set of points that are", "self.n_id = None self.distances = None self.max_dis = None self.min_dis", "function at that point :param dij: distance between two nodes", "labeled :param fx: the indexes of the unlabeled data :param", "break unlabeled_next_of_labeled = self.structure.loc[next_unlabeled] lu = unlabeled_next_of_labeled[\"sample\"].to_list() y_pred = self.classifier.predict(lu)", "self.y) count = 1 while count <= max(self.order.values()): unlabeled_rows =", "= le.transform(y) self.__init_values(l, u, y) if self.filtering: self._fit_stdpnf() else: self._fit_without()", "self.distances[(old_i, old_j)] < delta[old_i]: delta[old_i] = self.distances[(old_i, old_j)] nneigh[old_i] =", "filtered data. \"\"\" if isinstance(self.filter, ENN): original = pd.DataFrame(self.low) original_y", "labeled. \"\"\" samples_labeled = self.structure.loc[self.structure[\"label\"] != -1] sam_lab = samples_labeled[\"sample\"].to_list()", ":param dc: The cutoff distance :return: the value of the", "= labeled_data[\"label\"] result = self._if_filter(complete, complete_y) self._results_to_structure(complete, result) labeled_data =", "next samples are not labeled, it labels them and updates", "r: the number of neighbors to consider :return: The indexes", "indexes of the unlabeled data :param nan: a list of", "== 0: break unlabeled_next_of_labeled = self.structure.loc[next_unlabeled] lu = unlabeled_next_of_labeled[\"sample\"].to_list() y_pred", "unlabeled_indexes, nan, lambda_param ) self.structure_stdnpf.at[filtered_indexes, \"label\"] = filtered_labels else: labeled_data", "@Filename: DensityPeaks.py # @Author: <NAME> # @Time: 5/3/22 09:55 #", "and an extended parameter-free local noise filter for k nearest", "def __step_a(self): \"\"\" > The function takes the labeled samples", "\"\"\" results_to_unlabeled = [] for r in result.to_numpy(): is_in =", "more samples to label :param count: the number of the", "the distance between two atoms is less than the cutoff", "i in range(self.n_id): for j in range(i + 1, self.n_id):", "a dataframe with the filtered data. \"\"\" if isinstance(self.filter, ENN):", "classifier, and then labels the next point, and then labels", "it labels them and updates the order of the samples", "class STDPNF: \"\"\" <NAME>., <NAME>., & <NAME>. (2019). A self-training", "the samples that are going to be labeled and the", "= self.classifier.predict(lu) for new_label, pos in zip(y_pred, next_unlabeled): self.structure.at[pos, \"label\"]", "all points' min util to the higher local density point(which", "labels that are going to be assigned to them. \"\"\"", "labels the previous points, without filtering. \"\"\" if self.classifier is", "while count <= max(self.order.values()): unlabeled_rows = self.structure_stdnpf.loc[ self.structure_stdnpf[\"label\"] == -1", "filter for k nearest neighbor. Knowledge-Based Systems, 184, 104895. <NAME>.,", "self.gauss_cutoff else cutoff_func rho = [0] * self.n_id for i", "+= 1 def _label_next_point(self, count): \"\"\" > The function takes", "100) dc = np.sort(list(self.distances.values()))[ position * 2 + self.n_id] return", "of r+3. And so on. The set of points that", "self.order = None self.structure = None self.structure_stdnpf = None self.n_id", "data point The function also creates a copy of the", "r \"\"\" r = 1 nan = defaultdict(set) nb =", "\"label\"] ).transpose() self.structure_stdnpf = self.structure.copy(deep=True) def __step_a(self): \"\"\" > The", "distance_metric=\"euclidean\", k=3, gauss_cutoff=True, percent=2.0, density_threshold=None, distance_threshold=None, anormal=True, filtering=False, classifier=None, classifier_params=None,", "noise filter for k nearest neighbor. Knowledge-Based Systems, 184, 104895.", "prev_unlabeled = [] samples_labeled_index = samples_labeled.index.to_list() for prev_row in prev_rows:", "temp rho[j] += temp return np.array(rho, np.float32) def __min_neighbor_and_distance(self): \"\"\"", "takes in a classifier, and then labels the next point,", "the under laying structure.\"\"\" self._fit_without() def __nan_search(self): \"\"\" For each", "self.n_id delta[sort_rho_idx[0]] = -1.0 for i in range(self.n_id): for j", "self.structure, index=[\"sample\", \"next\", \"previous\", \"label\"] ).transpose() self.structure_stdnpf = self.structure.copy(deep=True) def", "classifier(**classifier_params) else: self.classifier = classifier() else: self.classifier = None if", "local density. :return: local density vector that index is the", "labels of the data \"\"\" self.y = y self.low =", "else cutoff_func rho = [0] * self.n_id for i in", "if len(next_unlabeled) == 0: break unlabeled_next_of_labeled = self.structure.loc[next_unlabeled] lu =", "+= 1 labeled_data = self.structure_stdnpf.loc[self.structure_stdnpf[\"label\"] != -1] self.classifier_stdpnf.fit( labeled_data[\"sample\"].tolist(), labeled_data[\"label\"].tolist()", "\"\"\" def __init__( self, dc=None, distance_metric=\"euclidean\", k=3, gauss_cutoff=True, percent=2.0, density_threshold=None,", "that are within a distance of r+3. And so on.", "samples_labeled = self.structure.loc[self.structure[\"label\"] != -1] sam_lab = samples_labeled[\"sample\"].to_list() y_without =", "1]: r -= 1 break r += 1 for index", "list of indices of the neighbors of the unlabeled data,", "if classifier is not None: if isinstance(classifier_params, dict): self.classifier =", "es, es_pred def __init_values(self, low, u, y): \"\"\" It takes", "distance \"\"\" from scipy.spatial.distance import pdist, squareform distance_matrix = pdist(self.data,", "distance, return 1, otherwise return 0 :param dij: distance between", "the distances between the data points, the maximum distance, the", "def _if_filter(self, complete, complete_y): \"\"\" If the filter is an", "position * 2 + self.n_id] return dc def __local_density(self): \"\"\"", "samples_labeled[\"previous\"].to_numpy() prev_unlabeled = [] samples_labeled_index = samples_labeled.index.to_list() for prev_row in", "value of the Gaussian function at that point :param dij:", "of all nodes. :return: dc that local density threshold \"\"\"", "dc) ** 2)) def cutoff_func(dij, dc): \"\"\" If the distance", "y_without = samples_labeled[\"label\"].to_list() self.classifier.fit(sam_lab, y_without) return samples_labeled def __discover_structure(self): \"\"\"Discovers", "<NAME>., <NAME>., <NAME>., <NAME>., <NAME>., <NAME>., & <NAME>. (2018). Self-training", "self.structure[self.structure[index][1]][2] is None: self.structure[self.structure[index][1]][2] = index self.structure = pd.DataFrame( self.structure,", "the set of points that are within a distance of", "cnt[r] == cnt[r - 1]: r -= 1 break r", "local_structure.loc[local_structure[\"label\"] != -1] nan_unlabeled = local_structure.loc[fx] data = pd.concat([labeled_data, nan_unlabeled],", "that local density threshold \"\"\" max_dis, min_dis = self.max_dis, self.min_dis", "dc def __local_density(self): \"\"\" Compute all points' local density. :return:", "index \"\"\" def gauss_func(dij, dc): \"\"\" > The function takes", "labeled_data[\"sample\"].tolist(), labeled_data[\"label\"].tolist() ) count += 1 labeled_data = self.structure_stdnpf.loc[self.structure_stdnpf[\"label\"] !=", "[] samples_labeled_index = samples_labeled.index.to_list() for next_row in next_rows: if next_row", "complete dataframe :param complete_y: the complete y values :return: The", "fx, nan, r): \"\"\" > The function takes in the", "all nodes. :return: dc that local density threshold \"\"\" max_dis,", "and finds the next samples in the structure. If the", "self.gauss_cutoff = gauss_cutoff self.percent = percent self.density_threshold = density_threshold self.distance_threshold", "number of the next point to be labeled :return: The", "is the point index \"\"\" def gauss_func(dij, dc): \"\"\" >", "labeled, it labels them and updates the order of the", "results_to_unlabeled.append(r) for r in results_to_unlabeled: self.structure_stdnpf.at[np.array(self.structure_stdnpf[\"sample\"], r)][ \"label\" ] =", ">= harmfulness: es.append(row_index) es_pred.append(pred) return es, es_pred def __init_values(self, low,", "knn[index].intersection(rnn[index]) return nan, r def __enane(self, fx, nan, r): \"\"\"", "the nearest neighbor of the nearest neighbor - label: the", "the indices of the neighbors of a sample :param r:", "= 1 self.order = dict.fromkeys(range(self.n_id), 0) count = self._label_next_point(count) self._label_previous_points(count)", ":param y: the labels of the data \"\"\" self.y =", "r -= 1 break r += 1 for index in", "of indices of the data to be labeled :param fx:", "within a distance of r, and the set of points", "= np.max(triangle_upper), np.min(triangle_upper) return distance, max_dis, min_dis def __auto_select_dc(self): \"\"\"", "0.01 <= nneighs <= 0.02: break # binary search if", "+ 1, self.n_id): temp = func(self.distances[(i, j)], self.dc) rho[i] +=", "points that are within a distance of r. The set", "= distance_matrix[i, j] distance[(j, i)] = distance_matrix[i, j] max_dis, min_dis", "le.fit(y) y = le.transform(y) self.__init_values(l, u, y) if self.filtering: self._fit_stdpnf()", "enumerate(self.data): self.structure[index] = [ sample, int(self.nneigh[index]), None, self.y[index] if index", "in the complete data set, it is added to the", "classifier. It then creates a new dataframe with the labeled", "same as the label of the majority of the neighbors", "def __init_values(self, low, u, y): \"\"\" It takes in the", "contains the indices of the neighbors of a sample :param", "of the data :param u: upper bound of the data", "_if_filter(self, complete, complete_y): \"\"\" If the filter is an ENN,", "r > 2 and cnt[r] == cnt[r - 1]: r", "the indexes of the unlabeled data :param nan: a list", "samples_labeled = self.__step_a() next_rows = samples_labeled[\"next\"].to_numpy() next_unlabeled = [] samples_labeled_index", "within a distance of r+2 is also a superset of", "\"\"\" Calculate distance dict. :return: distance dict, max distance, min", ":return: distance dict, max distance, min distance \"\"\" from scipy.spatial.distance", "- previous: the index of the nearest neighbor of the", "None: if isinstance(classifier_params, dict): self.classifier = classifier(**classifier_params) else: self.classifier =", "range(self.n_id): for j in range(0, i): old_i, old_j = sort_rho_idx[i],", "pandas as pd from sklearn.neighbors import KNeighborsClassifier, NearestNeighbors from sklearn.preprocessing", "np.array(nneigh, np.float32) def __structure(self): \"\"\" The function takes the data", "that are going to be assigned to them. \"\"\" es", "number of neighbors to consider :return: The indexes of the", "max(self.order.values()): unlabeled_rows = self.structure_stdnpf.loc[ self.structure_stdnpf[\"label\"] == -1 ].index.to_list() unlabeled_indexes =", "for index in range(self.n_id): nan[index] = knn[index].intersection(rnn[index]) return nan, r", "= distance_metric self.k = k self.gauss_cutoff = gauss_cutoff self.percent =", "= samples_labeled.index.to_list() for next_row in next_rows: if next_row not in", "within a distance of r+2. The set of points that", "data. It then checks if the predicted label is the", "then labels the previous points, without filtering. \"\"\" if self.classifier", "self.structure.at[pos, \"label\"] = new_label count += 1 return count def", "unlabeled data :param nan: a list of lists, where each", "2 and cnt[r] == cnt[r - 1]: r -= 1", "cutoff value, and returns the value of the Gaussian function", "dc]) / self.n_id**2 ) if 0.01 <= nneighs <= 0.02:", "The source image :return: The classifier is being returned. \"\"\"", "def __init__( self, dc=None, distance_metric=\"euclidean\", k=3, gauss_cutoff=True, percent=2.0, density_threshold=None, distance_threshold=None,", "self.__min_neighbor_and_distance() self.__structure() def _fit_without(self): \"\"\" The function takes in a", "the complete dataframe :param complete_y: the complete y values :return:", "else: self.classifier = None if filter_method is not None and", "samples and trains the classifier on them :return: The samples", "SVC() count = 1 self.order = dict.fromkeys(range(self.n_id), 0) count =", "math from collections import defaultdict import numpy as np import", "otherwise return 0 :param dij: distance between atoms i and", "!= -1 ] complete = labeled_data[\"sample\"] complete_y = labeled_data[\"label\"] result", "result, _ = self.filter.filter(complete, complete_y) return result def fit(self, samples,", "Density Peaks and a parameter-free noise filter. \"\"\" self.__discover_structure() nan,", "samples_labeled.index.to_list() for prev_row in prev_rows: if prev_row not in samples_labeled_index", "binary search if nneighs < 0.01: min_dis = dc else:", "self.n_id, [0] * self.n_id delta[sort_rho_idx[0]] = -1.0 for i in", "value, and returns the value of the Gaussian function at", "SelfTrainingClassifier from sklearn.svm import SVC from instance_selection import ENN from", "data to the list of indices of the data to", "ENN): original = pd.DataFrame(self.low) original_y = pd.DataFrame(self.y) result, _ =", "\"\"\" if self.rho is None: raise ValueError(\"Encountered rho as None.\")", "the next samples in the structure. If the next samples", "filtered_labels = self.__enane( unlabeled_indexes, nan, lambda_param ) self.structure_stdnpf.at[filtered_indexes, \"label\"] =", "the unlabeled data, and the number of neighbors to use", "samples are not labeled, it labels them and updates the", "of points that are within a distance of r. The", "for index, sample in enumerate(self.data): self.structure[index] = [ sample, int(self.nneigh[index]),", "__build_distance(self): \"\"\" Calculate distance dict. :return: distance dict, max distance,", "new dataframe with the labeled data and the unlabeled data,", "u, y = split(samples, y) except IndexError: raise ValueError(\"Dimensions do", "\"\"\" > The function takes the samples labeled in the", "self.rho = None self.delta = None self.nneigh = None self.data", "samples and repeats the process until there are no more", "of r+2 is :return: nan, r \"\"\" r = 1", "self.structure_stdnpf.at[filtered_indexes, \"label\"] = filtered_labels else: labeled_data = self.structure_stdnpf.loc[ self.structure_stdnpf[\"label\"] !=", "neighbor - label: the label of the data point The", "self.order[row] == count: unlabeled_indexes.append(row) if isinstance(self.filter, str) and self.filter ==", "parameter-free local noise filter for k nearest neighbor. Knowledge-Based Systems,", "takes the samples labeled in the previous step and finds", "the number of the current iteration \"\"\" while True: samples_labeled", "points, without filtering. \"\"\" if self.classifier is None: self.classifier =", "= None self.structure_stdnpf = None self.n_id = None self.distances =", "complete_y = labeled_data[\"label\"] result = self._if_filter(complete, complete_y) self._results_to_structure(complete, result) labeled_data", "raise AssertionError(\"The model needs to be fitted first.\") return self.classifier.predict(src)", "dc that local density threshold \"\"\" if self.dc == \"auto\":", "is the nearest neighbor). :return: distance vector, nearest neighbor vector", "the Gaussian function at that point :param dij: distance between", "max(delta) return np.array(delta, np.float32), np.array(nneigh, np.float32) def __structure(self): \"\"\" The", "the list of indices of the unlabeled data, the list", "= dc else: max_dis = dc dc = (max_dis +", "= self.data.shape[0] self.distances, self.max_dis, self.min_dis = self.__build_distance() self.dc = self.__select_dc()", "index of the nearest neighbor of the nearest neighbor -", "= self.__local_density() self.delta, self.nneigh = self.__min_neighbor_and_distance() self.__structure() def _fit_without(self): \"\"\"", "nearest neighbor of the nearest neighbor - label: the label", "r_neighs = search.kneighbors( [sample], return_distance=False)[0][1:] knn[index].update(list(r_neighs)) for neigh in r_neighs:", "== pred: usefulness += 1 else: harmfulness += 1 if", "dij: distance between atoms i and j :param dc: cutoff", "complete data set, it is added to the structure data", "is not None and filter_method != \"ENANE\": self.filter = filter_method()", "delta[old_i] = self.distances[(old_i, old_j)] nneigh[old_i] = old_j delta[sort_rho_idx[0]] = max(delta)", "model and compares them to the complete data set. If", "structure_stdnpf \"\"\" self.structure = dict.fromkeys(range(self.n_id)) for index, sample in enumerate(self.data):", "False for c in complete: if np.array_equal(r, c): is_in =", "sample in enumerate(self.data): r_neighs = search.kneighbors( [sample], return_distance=False)[0][1:] knn[index].update(list(r_neighs)) for", "= [0] * self.n_id for i in range(self.n_id): for j", "labeled_data[\"sample\"].tolist(), labeled_data[\"label\"].tolist() ) def _results_to_structure(self, complete, result): \"\"\" > This", ":param low: lower bound of the data :param u: upper", "local density point(which is the nearest neighbor). :return: distance vector,", "= u self.data = np.concatenate((low, u), axis=0) self.n_id = self.data.shape[0]", "paper, 'auto' is auto select. :return: dc that local density", "for j in range(0, i): old_i, old_j = sort_rho_idx[i], sort_rho_idx[j]", "self.nneigh = None self.data = None def __build_distance(self): \"\"\" Calculate", "the neighbors of the unlabeled data, and the number of", "bound of the data :param y: the labels of the", "and creates a dataframe with the following columns: - sample:", "temp return np.array(rho, np.float32) def __min_neighbor_and_distance(self): \"\"\" Compute all points'", "the previous points, without filtering. \"\"\" if self.classifier is None:", "the samples :param count: the number of the next point", "<NAME>., <NAME>., <NAME>., <NAME>., & <NAME>. (2018). Self-training semi-supervised classification", "\"\"\" self.y = y self.low = low self.u = u", "a trained classifier. :param src: The source image :return: The", "u, y): \"\"\" It takes in the lower and upper", "KNeighborsClassifier( n_neighbors=r, metric=self.distance_metric ) labeled_data = local_structure.loc[local_structure[\"label\"] != -1] nan_unlabeled", "+= 1 if usefulness >= harmfulness: es.append(row_index) es_pred.append(pred) return es,", "if dij < dc else 0 func = gauss_func if", "rnn = defaultdict(set) cnt = defaultdict(int) while True: search =", "None self.nneigh = None self.data = None def __build_distance(self): \"\"\"", "1) triangle_upper = distance_matrix[triangle_upper] distance = {} for i in", "cutoff distance :return: 1 if dij < dc, else 0", "= 0 for neigh in nan[row_index]: if local_structure.loc[neigh, \"label\"] ==", "Calculate distance dict. :return: distance dict, max distance, min distance", "original_y, complete, complete_y ) else: result, _ = self.filter.filter(complete, complete_y)", "dc): \"\"\" > The function takes in a distance value", "neighbors of a sample :param r: the number of neighbors", "self._fit_stdpnf() else: self._fit_without() def predict(self, src): \"\"\" Predict based on", "the next point to be labeled :return: The number of", "def __build_distance(self): \"\"\" Calculate distance dict. :return: distance dict, max", "algorithm=\"kd_tree\") search.fit(self.data) for index, sample in enumerate(self.data): r_neighs = search.kneighbors(", "label of the data point The function also creates a", "None: self.classifier = SVC() count = 1 self.order = dict.fromkeys(range(self.n_id),", "= self.__step_a() prev_rows = samples_labeled[\"previous\"].to_numpy() prev_unlabeled = [] samples_labeled_index =", "if isinstance(self.filter, str) and self.filter == \"ENANE\": filtered_indexes, filtered_labels =", "= pd.DataFrame(self.low) original_y = pd.DataFrame(self.y) result, _ = self.filter.filter_original_complete( original,", "and a cutoff value, and returns the value of the", "samples :param count: the number of the next point to", "return dc def __select_dc(self): \"\"\" Select the local density threshold,", "= local_structure.loc[fx] data = pd.concat([labeled_data, nan_unlabeled], join=\"inner\") enane_model = SelfTrainingClassifier(base_estimator)", "list of lists, where each list contains the indices of", "on density peaks and an extended parameter-free local noise filter", "ValueError(\"Dimensions do not match.\") le = LabelEncoder() le.fit(y) y =", "of indices of the unlabeled data, the list of indices", "in prev_rows: if prev_row not in samples_labeled_index and prev_row is", "defaultdict(set) rnn = defaultdict(set) cnt = defaultdict(int) while True: search", "density_threshold=None, distance_threshold=None, anormal=True, filtering=False, classifier=None, classifier_params=None, filter_method=None, ): \"\"\"Semi Supervised", "no more samples to label :param count: the number of", "then it adds the index of the unlabeled data to", "pred in zip(nan_unlabeled.iterrows(), enane_pred): usefulness = 0 harmfulness = 0", "= pdist(self.data, metric=self.distance_metric) distance_matrix = squareform(distance_matrix) triangle_upper = np.triu_indices(self.data.shape[0], 1)", "NearestNeighbors(n_neighbors=r + 1, algorithm=\"kd_tree\") search.fit(self.data) for index, sample in enumerate(self.data):", "and the number of neighbors to use in the KNN", "= (max_dis + min_dis) / 2 if max_dis - min_dis", "dataframe, the list of indices of the unlabeled data, the", "the next samples are not labeled, it labels them and", "j)], self.dc) rho[i] += temp rho[j] += temp return np.array(rho,", "import SelfTrainingClassifier from sklearn.svm import SVC from instance_selection import ENN", "of points that are within a distance of r+2. The", "self.dc) rho[i] += temp rho[j] += temp return np.array(rho, np.float32)", "self.classifier_stdpnf = KNeighborsClassifier( n_neighbors=self.k, metric=self.distance_metric ) self.classifier_stdpnf.fit(self.low, self.y) count =", "trains the classifier on them :return: The samples that have", "\"\"\" return 1 if dij < dc else 0 func", "next_unlabeled = [] samples_labeled_index = samples_labeled.index.to_list() for next_row in next_rows:", "self.classifier = classifier() else: self.classifier = None if filter_method is", "!= -1 ] self.classifier_stdpnf.fit( labeled_data[\"sample\"].tolist(), labeled_data[\"label\"].tolist() ) count += 1", "self.structure = pd.DataFrame( self.structure, index=[\"sample\", \"next\", \"previous\", \"label\"] ).transpose() self.structure_stdnpf", "the number of neighbors, and the structure of the data", "those samples and repeats the process until there are no", "range(i + 1, self.n_id): distance[(i, j)] = distance_matrix[i, j] distance[(j,", "index in range(self.n_id): nan[index] = knn[index].intersection(rnn[index]) return nan, r def", "return 1 if dij < dc else 0 func =", "labeled_data[\"sample\"] complete_y = labeled_data[\"label\"] result = self._if_filter(complete, complete_y) self._results_to_structure(complete, result)", "self.structure_stdnpf.loc[ self.structure_stdnpf[\"label\"] != -1 ] complete = labeled_data[\"sample\"] complete_y =", "None self.u = None self.classifier_stdpnf = None self.order = None", "else: self._fit_without() def predict(self, src): \"\"\" Predict based on a", "neighbors, and the structure of the data :param low: lower", "es = [] es_pred = [] local_structure = self.structure_stdnpf.copy(deep=True) base_estimator", "nearest neighbor). :return: distance vector, nearest neighbor vector \"\"\" if", "the unlabeled data :param nan: a list of lists, where", "self.__init_values(l, u, y) if self.filtering: self._fit_stdpnf() else: self._fit_without() def predict(self,", "is_in: results_to_unlabeled.append(r) for r in results_to_unlabeled: self.structure_stdnpf.at[np.array(self.structure_stdnpf[\"sample\"], r)][ \"label\" ]", "1 else: harmfulness += 1 if usefulness >= harmfulness: es.append(row_index)", "self.classifier.predict(lu) for new_label, pos in zip(y_pred, prev_unlabeled): self.structure.at[pos, \"label\"] =", "self.n_id**2 ) if 0.01 <= nneighs <= 0.02: break #", "checks if the predicted label is the same as the", "= dict.fromkeys(range(self.n_id), 0) count = self._label_next_point(count) self._label_previous_points(count) def _label_previous_points(self, count):", "True: samples_labeled = self.__step_a() next_rows = samples_labeled[\"next\"].to_numpy() next_unlabeled = []", "as None.\") sort_rho_idx = np.argsort(-self.rho) delta, nneigh = [float(self.max_dis)] *", "usefulness = 0 harmfulness = 0 for neigh in nan[row_index]:", "prev_unlabeled.append(prev_row) self.order[prev_row] = count if len(prev_unlabeled) == 0: break unlabeled_prev_of_labeled", ":return: dc that local density threshold \"\"\" max_dis, min_dis =", "dc=None, distance_metric=\"euclidean\", k=3, gauss_cutoff=True, percent=2.0, density_threshold=None, distance_threshold=None, anormal=True, filtering=False, classifier=None,", ") else: result, _ = self.filter.filter(complete, complete_y) return result def", "It takes in the lower and upper bounds of the", "the complete data set, it is added to the structure", "classifier=None, classifier_params=None, filter_method=None, ): \"\"\"Semi Supervised Algorithm based on Density", "higher local density point(which is the nearest neighbor). :return: distance", "search = NearestNeighbors(n_neighbors=r + 1, algorithm=\"kd_tree\") search.fit(self.data) for index, sample", "the labeled data and the unlabeled data, and uses the", "None: raise ValueError(\"Encountered rho as None.\") sort_rho_idx = np.argsort(-self.rho) delta,", "the complete dataset :param result: the result of the clustering", "dc = np.sort(list(self.distances.values()))[ position * 2 + self.n_id] return dc", "-1.0 for i in range(self.n_id): for j in range(0, i):", "the next point, and then labels the previous points, without", "nan_unlabeled = local_structure.loc[fx] data = pd.concat([labeled_data, nan_unlabeled], join=\"inner\") enane_model =", "= count if len(prev_unlabeled) == 0: break unlabeled_prev_of_labeled = self.structure.loc[prev_unlabeled]", "default is the method used in paper, 'auto' is auto", "the rho value, the delta value, the number of neighbors,", "IndexError: raise ValueError(\"Dimensions do not match.\") le = LabelEncoder() le.fit(y)", "nan, lambda_param = self.__nan_search() self.classifier_stdpnf = KNeighborsClassifier( n_neighbors=self.k, metric=self.distance_metric )", "r_neighs: nb[neigh] += 1 rnn[neigh].add(index) cnt[r] = np.count_nonzero((np.array(list(nb.values())) == 0))", "This function takes the results of the model and compares", "atoms i and j :param dc: cutoff distance :return: 1", ":param complete: the complete dataset :param result: the result of", "import SVC from instance_selection import ENN from .utils import split", "method.\"\"\" try: l, u, y = split(samples, y) except IndexError:", "data set. If the result is not in the complete", "np.concatenate((low, u), axis=0) self.n_id = self.data.shape[0] self.distances, self.max_dis, self.min_dis =", "int(self.nneigh[index]), None, self.y[index] if index < len(self.y) else -1, ]", "self.__nan_search() self.classifier_stdpnf = KNeighborsClassifier( n_neighbors=self.k, metric=self.distance_metric ) self.classifier_stdpnf.fit(self.low, self.y) count", "{} for i in range(self.n_id): for j in range(i +", "_), pred in zip(nan_unlabeled.iterrows(), enane_pred): usefulness = 0 harmfulness =", "j in range(i + 1, self.n_id): temp = func(self.distances[(i, j)],", "dict. :return: distance dict, max distance, min distance \"\"\" from", "break r += 1 for index in range(self.n_id): nan[index] =", "\"\"\" > The function takes in the dataframe, the list", "not in the complete data set, it is added to", "labeled_data[\"label\"] result = self._if_filter(complete, complete_y) self._results_to_structure(complete, result) labeled_data = self.structure_stdnpf.loc[", "scipy.spatial.distance import pdist, squareform distance_matrix = pdist(self.data, metric=self.distance_metric) distance_matrix =", "local density threshold that let average neighbor is 1-2 percent", "= None self.low = None self.u = None self.classifier_stdpnf =", "= SelfTrainingClassifier(base_estimator) enane_model.fit(data[\"sample\"].tolist(), data[\"label\"].tolist()) enane_pred = enane_model.predict(nan_unlabeled[\"sample\"].tolist()) for (row_index, _),", "original = pd.DataFrame(self.low) original_y = pd.DataFrame(self.y) result, _ = self.filter.filter_original_complete(", "delta[sort_rho_idx[0]] = -1.0 for i in range(self.n_id): for j in", "0)) if r > 2 and cnt[r] == cnt[r -", "the data :param low: lower bound of the data :param", "count = 1 while count <= max(self.order.values()): unlabeled_rows = self.structure_stdnpf.loc[", "raise ValueError(\"Dimensions do not match.\") le = LabelEncoder() le.fit(y) y", "prev_rows = samples_labeled[\"previous\"].to_numpy() prev_unlabeled = [] samples_labeled_index = samples_labeled.index.to_list() for", "If the result is not in the complete data set,", "Compute all points' local density. :return: local density vector that", "count: the number of the current iteration \"\"\" while True:", "a distance of r. The set of points that are", "distance_matrix = squareform(distance_matrix) triangle_upper = np.triu_indices(self.data.shape[0], 1) triangle_upper = distance_matrix[triangle_upper]", "184, 104895. <NAME>., <NAME>., <NAME>., <NAME>., <NAME>., <NAME>., & <NAME>.", "dict, max distance, min distance \"\"\" from scipy.spatial.distance import pdist,", "defaultdict(set) cnt = defaultdict(int) while True: search = NearestNeighbors(n_neighbors=r +", "__discover_structure(self): \"\"\"Discovers the under laying structure.\"\"\" self._fit_without() def __nan_search(self): \"\"\"", "le = LabelEncoder() le.fit(y) y = le.transform(y) self.__init_values(l, u, y)", "neighbor is 1-2 percent of all nodes. :return: dc that", "1, self.n_id): distance[(i, j)] = distance_matrix[i, j] distance[(j, i)] =", "_results_to_structure(self, complete, result): \"\"\" > This function takes the results", "self.min_dis dc = (max_dis + min_dis) / 2 while True:", "i in range(self.n_id): for j in range(0, i): old_i, old_j", "] for index in range(self.n_id): if self.structure[self.structure[index][1]][2] is None: self.structure[self.structure[index][1]][2]", "self.structure = None self.structure_stdnpf = None self.n_id = None self.distances", "distance = {} for i in range(self.n_id): for j in", "j)] = distance_matrix[i, j] distance[(j, i)] = distance_matrix[i, j] max_dis,", "self.y = y self.low = low self.u = u self.data", "classifier is being returned. \"\"\" if self.classifier is None: raise", "going to be labeled and the labels that are going", "and prev_row is not None: prev_unlabeled.append(prev_row) self.order[prev_row] = count if", "0 harmfulness = 0 for neigh in nan[row_index]: if local_structure.loc[neigh,", "superset of the set of points that are within a", "= None self.u = None self.classifier_stdpnf = None self.order =", "data, the list of indices of the neighbors of the", "of the samples :param count: the number of the next", "== \"ENANE\": filtered_indexes, filtered_labels = self.__enane( unlabeled_indexes, nan, lambda_param )", "self.k = k self.gauss_cutoff = gauss_cutoff self.percent = percent self.density_threshold", "within a distance of r+2 is :return: nan, r \"\"\"", "of the next point to be labeled :return: The number", "= gauss_cutoff self.percent = percent self.density_threshold = density_threshold self.distance_threshold =", "* self.percent / 100) dc = np.sort(list(self.distances.values()))[ position * 2", "let average neighbor is 1-2 percent of all nodes. :return:", "self.order[prev_row] = count if len(prev_unlabeled) == 0: break unlabeled_prev_of_labeled =", "in samples_labeled_index and prev_row is not None: prev_unlabeled.append(prev_row) self.order[prev_row] =", "is not None: if isinstance(classifier_params, dict): self.classifier = classifier(**classifier_params) else:", "KNeighborsClassifier( n_neighbors=self.k, metric=self.distance_metric ) self.classifier_stdpnf.fit(self.low, self.y) count = 1 while", "STDPNF: \"\"\" <NAME>., <NAME>., & <NAME>. (2019). A self-training method", "creates a dataframe with the following columns: - sample: the", "\"\"\" Select the local density threshold, default is the method", "the majority of the neighbors of the unlabeled data. If", "= int(self.n_id * (self.n_id + 1) / 2 * self.percent", "raise ValueError(\"Encountered rho as None.\") sort_rho_idx = np.argsort(-self.rho) delta, nneigh", "dc, else 0 \"\"\" return 1 if dij < dc", "!= -1] self.classifier_stdpnf.fit( labeled_data[\"sample\"].tolist(), labeled_data[\"label\"].tolist() ) def _results_to_structure(self, complete, result):", "is not None: prev_unlabeled.append(prev_row) self.order[prev_row] = count if len(prev_unlabeled) ==", "The function takes the labeled samples and trains the classifier", "= np.argsort(-self.rho) delta, nneigh = [float(self.max_dis)] * self.n_id, [0] *", "-*- coding:utf-8 -*- # @Filename: DensityPeaks.py # @Author: <NAME> #", "= index self.structure = pd.DataFrame( self.structure, index=[\"sample\", \"next\", \"previous\", \"label\"]", "If the next samples are not labeled, it labels them", ":return: The classifier is being returned. \"\"\" if self.classifier is", "not is_in: results_to_unlabeled.append(r) for r in results_to_unlabeled: self.structure_stdnpf.at[np.array(self.structure_stdnpf[\"sample\"], r)][ \"label\"", "count += 1 def _label_next_point(self, count): \"\"\" > The function", "in complete: if np.array_equal(r, c): is_in = True if not", "distance :return: 1 if dij < dc, else 0 \"\"\"", "compares them to the complete data set. If the result", "distances between the data points, the maximum distance, the minimum", "* (self.n_id + 1) / 2 * self.percent / 100)", "then checks if the predicted label is the same as", "None self.low = None self.u = None self.classifier_stdpnf = None", "= None self.min_dis = None self.rho = None self.delta =", "neighbor). :return: distance vector, nearest neighbor vector \"\"\" if self.rho", "metric=self.distance_metric ) labeled_data = local_structure.loc[local_structure[\"label\"] != -1] nan_unlabeled = local_structure.loc[fx]", "dataframe called structure_stdnpf \"\"\" self.structure = dict.fromkeys(range(self.n_id)) for index, sample", "r+2 is :return: nan, r \"\"\" r = 1 nan", "nan, lambda_param ) self.structure_stdnpf.at[filtered_indexes, \"label\"] = filtered_labels else: labeled_data =", "nan[index] = knn[index].intersection(rnn[index]) return nan, r def __enane(self, fx, nan,", "filter_method != \"ENANE\": self.filter = filter_method() elif isinstance(filter_method, str) and", "uses the KNN classifier to predict the labels of the", "the data :param y: the labels of the data \"\"\"", "peaks and an extended parameter-free local noise filter for k", "samples, y): \"\"\"Fit method.\"\"\" try: l, u, y = split(samples,", "a superset of the set of points that are within", ":param result: the result of the clustering \"\"\" results_to_unlabeled =", "= KNeighborsClassifier( n_neighbors=r, metric=self.distance_metric ) labeled_data = local_structure.loc[local_structure[\"label\"] != -1]", "1 while count <= max(self.order.values()): unlabeled_rows = self.structure_stdnpf.loc[ self.structure_stdnpf[\"label\"] ==", "if next_row not in samples_labeled_index: next_unlabeled.append(next_row) self.order[next_row] = count if", "the classifier on them :return: The samples that have been", "== 0: break unlabeled_prev_of_labeled = self.structure.loc[prev_unlabeled] lu = unlabeled_prev_of_labeled[\"sample\"].to_list() y_pred", "= filtering if classifier is not None: if isinstance(classifier_params, dict):", "count += 1 return count def _fit_stdpnf(self): \"\"\" Self Training", "func = gauss_func if self.gauss_cutoff else cutoff_func rho = [0]", "self.nneigh = self.__min_neighbor_and_distance() self.__structure() def _fit_without(self): \"\"\" The function takes", "= LabelEncoder() le.fit(y) y = le.transform(y) self.__init_values(l, u, y) if", ":param dc: cutoff distance :return: 1 if dij < dc,", "enane_pred): usefulness = 0 harmfulness = 0 for neigh in", "clustering \"\"\" results_to_unlabeled = [] for r in result.to_numpy(): is_in", "an extended parameter-free local noise filter for k nearest neighbor.", ":return: The result is a dataframe with the filtered data.", "are within a distance of r+1 is a superset of", "the data :param u: upper bound of the data :param", "data, otherwise filter the complete data :param complete: the complete", "u self.data = np.concatenate((low, u), axis=0) self.n_id = self.data.shape[0] self.distances,", "new_label, pos in zip(y_pred, next_unlabeled): self.structure.at[pos, \"label\"] = new_label count", "of points that are within a distance of r+1 is", "them :return: The samples that have been labeled. \"\"\" samples_labeled", "bounds of the data, and the data itself, and then", "points, the maximum distance, the minimum distance, the dc value,", "if np.array_equal(r, c): is_in = True if not is_in: results_to_unlabeled.append(r)", "\"\"\" It takes in the lower and upper bounds of", "for v in self.distances.values() if v < dc]) / self.n_id**2", "neighbor. Knowledge-Based Systems, 184, 104895. <NAME>., <NAME>., <NAME>., <NAME>., <NAME>.,", "np.array_equal(r, c): is_in = True if not is_in: results_to_unlabeled.append(r) for", "[ sample, int(self.nneigh[index]), None, self.y[index] if index < len(self.y) else", "import pdist, squareform distance_matrix = pdist(self.data, metric=self.distance_metric) distance_matrix = squareform(distance_matrix)", "within a distance of r+1 is also a superset of", "points that are within a distance of r+2 is also", "are within a distance of r+2 is :return: nan, r", "j] distance[(j, i)] = distance_matrix[i, j] max_dis, min_dis = np.max(triangle_upper),", "and returns the value of the Gaussian function at that", "filtering. \"\"\" if self.classifier is None: self.classifier = SVC() count", "function also creates a copy of the dataframe called structure_stdnpf", "creates a copy of the dataframe called structure_stdnpf \"\"\" self.structure", "\"label\"] == pred: usefulness += 1 else: harmfulness += 1", "if isinstance(self.filter, ENN): original = pd.DataFrame(self.low) original_y = pd.DataFrame(self.y) result,", "_label_previous_points(self, count): \"\"\" > The function takes the samples labeled", "dataframe :param complete_y: the complete y values :return: The result", "result is a dataframe with the filtered data. \"\"\" if", "def __local_density(self): \"\"\" Compute all points' local density. :return: local", "= self.__auto_select_dc() else: position = int(self.n_id * (self.n_id + 1)", "Neurocomputing, 275, 180-191. \"\"\" def __init__( self, dc=None, distance_metric=\"euclidean\", k=3,", "in self.distances.values() if v < dc]) / self.n_id**2 ) if", "self.y[index] if index < len(self.y) else -1, ] for index", "0 for neigh in nan[row_index]: if local_structure.loc[neigh, \"label\"] == pred:", "r in result.to_numpy(): is_in = False for c in complete:", "and self.filter == \"ENANE\": filtered_indexes, filtered_labels = self.__enane( unlabeled_indexes, nan,", "sklearn.semi_supervised import SelfTrainingClassifier from sklearn.svm import SVC from instance_selection import", "value and a cutoff value, and returns the value of", "The classifier is being returned. \"\"\" if self.classifier is None:", "if not is_in: results_to_unlabeled.append(r) for r in results_to_unlabeled: self.structure_stdnpf.at[np.array(self.structure_stdnpf[\"sample\"], r)][", "__init__( self, dc=None, distance_metric=\"euclidean\", k=3, gauss_cutoff=True, percent=2.0, density_threshold=None, distance_threshold=None, anormal=True,", "else: self.classifier = classifier() else: self.classifier = None if filter_method", "a sample :param r: the number of neighbors to consider", "neigh in nan[row_index]: if local_structure.loc[neigh, \"label\"] == pred: usefulness +=", "min distance \"\"\" from scipy.spatial.distance import pdist, squareform distance_matrix =", "src): \"\"\" Predict based on a trained classifier. :param src:", "and repeats the process until there are no more samples", "takes the labeled samples and trains the classifier on them", "of the nearest neighbor - previous: the index of the", "in unlabeled_rows: if self.order[row] == count: unlabeled_indexes.append(row) if isinstance(self.filter, str)", "within a distance of r. The set of points that", "values :return: The result is a dataframe with the filtered", "None: prev_unlabeled.append(prev_row) self.order[prev_row] = count if len(prev_unlabeled) == 0: break", "return math.exp(-((dij / dc) ** 2)) def cutoff_func(dij, dc): \"\"\"", "if v < dc]) / self.n_id**2 ) if 0.01 <=", "labeled :return: The number of labeled samples. \"\"\" while True:", "data. \"\"\" if isinstance(self.filter, ENN): original = pd.DataFrame(self.low) original_y =", "labels them and updates the order of the samples :param", "then filter the original data, otherwise filter the complete data", "of the gaussian function. \"\"\" return math.exp(-((dij / dc) **", "!= -1] sam_lab = samples_labeled[\"sample\"].to_list() y_without = samples_labeled[\"label\"].to_list() self.classifier.fit(sam_lab, y_without)", "that are within a distance of r+1 is also a", "if r > 2 and cnt[r] == cnt[r - 1]:", "y = split(samples, y) except IndexError: raise ValueError(\"Dimensions do not", "self.classifier_stdpnf = None self.order = None self.structure = None self.structure_stdnpf", "max_dis, min_dis = np.max(triangle_upper), np.min(triangle_upper) return distance, max_dis, min_dis def", "2)) def cutoff_func(dij, dc): \"\"\" If the distance between two", "= self.__build_distance() self.dc = self.__select_dc() self.rho = self.__local_density() self.delta, self.nneigh", "based on density peaks and an extended parameter-free local noise", "neighbors of the unlabeled data, and the number of neighbors", "each list contains the indices of the neighbors of a", "self-training method based on density peaks and an extended parameter-free", "return distance, max_dis, min_dis def __auto_select_dc(self): \"\"\" Auto select the", "1 break r += 1 for index in range(self.n_id): nan[index]", "= None self.nneigh = None self.data = None def __build_distance(self):", "rnn[neigh].add(index) cnt[r] = np.count_nonzero((np.array(list(nb.values())) == 0)) if r > 2", "data :param nan: a list of lists, where each list", "4.0 import math from collections import defaultdict import numpy as", "distance, the dc value, the rho value, the delta value,", "_ = self.filter.filter(complete, complete_y) return result def fit(self, samples, y):", "zip(y_pred, prev_unlabeled): self.structure.at[pos, \"label\"] = new_label count += 1 def", ") self.structure_stdnpf.at[filtered_indexes, \"label\"] = filtered_labels else: labeled_data = self.structure_stdnpf.loc[ self.structure_stdnpf[\"label\"]", "low: lower bound of the data :param u: upper bound", "Compute all points' min util to the higher local density", "104895. <NAME>., <NAME>., <NAME>., <NAME>., <NAME>., <NAME>., & <NAME>. (2018).", "the result of the clustering \"\"\" results_to_unlabeled = [] for", "= density_threshold self.distance_threshold = distance_threshold self.anormal = anormal self.filtering =", "self.structure.copy(deep=True) def __step_a(self): \"\"\" > The function takes the labeled", "None self.order = None self.structure = None self.structure_stdnpf = None", "# @Filename: DensityPeaks.py # @Author: <NAME> # @Time: 5/3/22 09:55", "self.distances[(old_i, old_j)] nneigh[old_i] = old_j delta[sort_rho_idx[0]] = max(delta) return np.array(delta,", "points that are within a distance of r, and the", "point :param dij: distance between two nodes :param dc: The", "dc else 0 func = gauss_func if self.gauss_cutoff else cutoff_func", "on Density Peaks and a parameter-free noise filter. \"\"\" self.__discover_structure()", "pdist, squareform distance_matrix = pdist(self.data, metric=self.distance_metric) distance_matrix = squareform(distance_matrix) triangle_upper", "sample: the data point - next: the index of the", "= func(self.distances[(i, j)], self.dc) rho[i] += temp rho[j] += temp", "If it is, then it adds the index of the", "1 rnn[neigh].add(index) cnt[r] = np.count_nonzero((np.array(list(nb.values())) == 0)) if r >", "labeled_data[\"label\"].tolist() ) def _results_to_structure(self, complete, result): \"\"\" > This function", "auto select. :return: dc that local density threshold \"\"\" if", "func(self.distances[(i, j)], self.dc) rho[i] += temp rho[j] += temp return", "A self-training method based on density peaks and an extended", "the same as the label of the majority of the", "find the set of points that are within a distance", "def _label_next_point(self, count): \"\"\" > The function takes the samples", "r+1. The set of points that are within a distance", "v < dc]) / self.n_id**2 ) if 0.01 <= nneighs", "dc dc = (max_dis + min_dis) / 2 if max_dis", "and the nearest neighbor indices and creates a dataframe with", "\"\"\" while True: samples_labeled = self.__step_a() prev_rows = samples_labeled[\"previous\"].to_numpy() prev_unlabeled", "n_neighbors=self.k, metric=self.distance_metric ) self.classifier_stdpnf.fit(self.low, self.y) count = 1 while count", "/ dc) ** 2)) def cutoff_func(dij, dc): \"\"\" If the", "= KNeighborsClassifier( n_neighbors=self.k, metric=self.distance_metric ) self.classifier_stdpnf.fit(self.low, self.y) count = 1", "True: samples_labeled = self.__step_a() prev_rows = samples_labeled[\"previous\"].to_numpy() prev_unlabeled = []", "== -1 ].index.to_list() unlabeled_indexes = [] for row in unlabeled_rows:", "275, 180-191. \"\"\" def __init__( self, dc=None, distance_metric=\"euclidean\", k=3, gauss_cutoff=True,", "try: l, u, y = split(samples, y) except IndexError: raise", "labeled in the previous step and finds the next samples", "python # -*- coding:utf-8 -*- # @Filename: DensityPeaks.py # @Author:", "prev_rows: if prev_row not in samples_labeled_index and prev_row is not", ":return: 1 if dij < dc, else 0 \"\"\" return", "min_dis < 0.0001: break return dc def __select_dc(self): \"\"\" Select", ":param dij: distance between two nodes :param dc: The cutoff", "= knn[index].intersection(rnn[index]) return nan, r def __enane(self, fx, nan, r):", "i): old_i, old_j = sort_rho_idx[i], sort_rho_idx[j] if self.distances[(old_i, old_j)] <", "range(self.n_id): for j in range(i + 1, self.n_id): temp =", "complete, result): \"\"\" > This function takes the results of", "y_without) return samples_labeled def __discover_structure(self): \"\"\"Discovers the under laying structure.\"\"\"", "numpy as np import pandas as pd from sklearn.neighbors import", "the result is not in the complete data set, it", "if self.filtering: self._fit_stdpnf() else: self._fit_without() def predict(self, src): \"\"\" Predict", "the structure data set. :param complete: the complete dataset :param", "distance_matrix[i, j] distance[(j, i)] = distance_matrix[i, j] max_dis, min_dis =", "and then labels the previous points, without filtering. \"\"\" if", "original_y = pd.DataFrame(self.y) result, _ = self.filter.filter_original_complete( original, original_y, complete,", "\"label\"] = new_label count += 1 return count def _fit_stdpnf(self):", "of the unlabeled data, the list of indices of the", "+= temp return np.array(rho, np.float32) def __min_neighbor_and_distance(self): \"\"\" Compute all", "nan_unlabeled], join=\"inner\") enane_model = SelfTrainingClassifier(base_estimator) enane_model.fit(data[\"sample\"].tolist(), data[\"label\"].tolist()) enane_pred = enane_model.predict(nan_unlabeled[\"sample\"].tolist())", "a distance of r+2 is also a superset of the", "for neigh in r_neighs: nb[neigh] += 1 rnn[neigh].add(index) cnt[r] =", "self.order[next_row] = count if len(next_unlabeled) == 0: break unlabeled_next_of_labeled =", "0: break unlabeled_prev_of_labeled = self.structure.loc[prev_unlabeled] lu = unlabeled_prev_of_labeled[\"sample\"].to_list() y_pred =", "a list of lists, where each list contains the indices", "if prev_row not in samples_labeled_index and prev_row is not None:", "__enane(self, fx, nan, r): \"\"\" > The function takes in", "samples_labeled[\"next\"].to_numpy() next_unlabeled = [] samples_labeled_index = samples_labeled.index.to_list() for next_row in", "self.distances, self.max_dis, self.min_dis = self.__build_distance() self.dc = self.__select_dc() self.rho =", "\"\"\" from scipy.spatial.distance import pdist, squareform distance_matrix = pdist(self.data, metric=self.distance_metric)", "@Version: 4.0 import math from collections import defaultdict import numpy", "None self.structure_stdnpf = None self.n_id = None self.distances = None", "the dc value, the rho value, the delta value, the", "of neighbors to use in the KNN classifier. It then", "self.n_id): distance[(i, j)] = distance_matrix[i, j] distance[(j, i)] = distance_matrix[i,", "nb = dict.fromkeys(range(self.n_id), 0) knn = defaultdict(set) rnn = defaultdict(set)", "label :param count: the number of the current iteration \"\"\"", "self.structure[self.structure[index][1]][2] = index self.structure = pd.DataFrame( self.structure, index=[\"sample\", \"next\", \"previous\",", "r+1 is also a superset of the set of points", "dc): \"\"\" If the distance between two atoms is less", "function. \"\"\" return math.exp(-((dij / dc) ** 2)) def cutoff_func(dij,", "complete_y) self._results_to_structure(complete, result) labeled_data = self.structure_stdnpf.loc[ self.structure_stdnpf[\"label\"] != -1 ]", "self.structure.loc[next_unlabeled] lu = unlabeled_next_of_labeled[\"sample\"].to_list() y_pred = self.classifier.predict(lu) for new_label, pos", "data \"\"\" self.y = y self.low = low self.u =", "data and the nearest neighbor indices and creates a dataframe", "count <= max(self.order.values()): unlabeled_rows = self.structure_stdnpf.loc[ self.structure_stdnpf[\"label\"] == -1 ].index.to_list()", "-1] sam_lab = samples_labeled[\"sample\"].to_list() y_without = samples_labeled[\"label\"].to_list() self.classifier.fit(sam_lab, y_without) return", "upper bounds of the data, and the data itself, and", "return count def _fit_stdpnf(self): \"\"\" Self Training based on Density", "= 1 while count <= max(self.order.values()): unlabeled_rows = self.structure_stdnpf.loc[ self.structure_stdnpf[\"label\"]", "None self.structure = None self.structure_stdnpf = None self.n_id = None", "a distance of r, and the set of points that", "neighbor vector \"\"\" if self.rho is None: raise ValueError(\"Encountered rho", "import split class STDPNF: \"\"\" <NAME>., <NAME>., & <NAME>. (2019).", "enane_model.fit(data[\"sample\"].tolist(), data[\"label\"].tolist()) enane_pred = enane_model.predict(nan_unlabeled[\"sample\"].tolist()) for (row_index, _), pred in", "\"\"\" self.structure = dict.fromkeys(range(self.n_id)) for index, sample in enumerate(self.data): self.structure[index]", "be assigned to them. \"\"\" es = [] es_pred =", "unlabeled data, and uses the KNN classifier to predict the", "= None self.rho = None self.delta = None self.nneigh =", "a distance of r+1. The set of points that are", "self.distances.values() if v < dc]) / self.n_id**2 ) if 0.01", "and the set of points that are within a distance", "on density peaks of data. Neurocomputing, 275, 180-191. \"\"\" def", "self.max_dis, self.min_dis dc = (max_dis + min_dis) / 2 while", "in range(0, i): old_i, old_j = sort_rho_idx[i], sort_rho_idx[j] if self.distances[(old_i,", "vector that index is the point index \"\"\" def gauss_func(dij,", "r, and the set of points that are within a", "updates the order of the samples :param count: the number", ":return: nan, r \"\"\" r = 1 nan = defaultdict(set)", "ENN, then filter the original data, otherwise filter the complete", "<NAME>., <NAME>., & <NAME>. (2019). A self-training method based on", "filter the original data, otherwise filter the complete data :param", "for next_row in next_rows: if next_row not in samples_labeled_index: next_unlabeled.append(next_row)", "\"\"\" if self.classifier is None: raise AssertionError(\"The model needs to", "is not in the complete data set, it is added", "threshold \"\"\" if self.dc == \"auto\": dc = self.__auto_select_dc() else:", "= None self.delta = None self.nneigh = None self.data =", "<NAME>., <NAME>., & <NAME>. (2018). Self-training semi-supervised classification based on", "\"\"\" <NAME>., <NAME>., & <NAME>. (2019). A self-training method based", "else: labeled_data = self.structure_stdnpf.loc[ self.structure_stdnpf[\"label\"] != -1 ] complete =", "np.float32) def __min_neighbor_and_distance(self): \"\"\" Compute all points' min util to", "def gauss_func(dij, dc): \"\"\" > The function takes in a", "and filter_method != \"ENANE\": self.filter = filter_method() elif isinstance(filter_method, str)", "self.classifier = None if filter_method is not None and filter_method", "sample in enumerate(self.data): self.structure[index] = [ sample, int(self.nneigh[index]), None, self.y[index]", "< 0.01: min_dis = dc else: max_dis = dc dc", "is None: self.classifier = SVC() count = 1 self.order =", "= unlabeled_prev_of_labeled[\"sample\"].to_list() y_pred = self.classifier.predict(lu) for new_label, pos in zip(y_pred,", "number of labeled samples. \"\"\" while True: samples_labeled = self.__step_a()", "of the unlabeled data, and the number of neighbors to", "self.structure.loc[self.structure[\"label\"] != -1] sam_lab = samples_labeled[\"sample\"].to_list() y_without = samples_labeled[\"label\"].to_list() self.classifier.fit(sam_lab,", "upper bound of the data :param y: the labels of", "density_threshold self.distance_threshold = distance_threshold self.anormal = anormal self.filtering = filtering", "extended parameter-free local noise filter for k nearest neighbor. Knowledge-Based", "1, self.n_id): temp = func(self.distances[(i, j)], self.dc) rho[i] += temp", "with the filtered data. \"\"\" if isinstance(self.filter, ENN): original =", "__init_values(self, low, u, y): \"\"\" It takes in the lower", "the unlabeled data. It then checks if the predicted label", "instance_selection import ENN from .utils import split class STDPNF: \"\"\"", "es_pred.append(pred) return es, es_pred def __init_values(self, low, u, y): \"\"\"", "result): \"\"\" > This function takes the results of the", "cutoff_func(dij, dc): \"\"\" If the distance between two atoms is", "for r in results_to_unlabeled: self.structure_stdnpf.at[np.array(self.structure_stdnpf[\"sample\"], r)][ \"label\" ] = -1", "self.classifier = SVC() count = 1 self.order = dict.fromkeys(range(self.n_id), 0)", "data. If it is, then it adds the index of", "of the samples that are going to be labeled and", "finds the next samples in the structure. If the next", "distance[(j, i)] = distance_matrix[i, j] max_dis, min_dis = np.max(triangle_upper), np.min(triangle_upper)", "* self.n_id for i in range(self.n_id): for j in range(i", "unlabeled_rows = self.structure_stdnpf.loc[ self.structure_stdnpf[\"label\"] == -1 ].index.to_list() unlabeled_indexes = []", "under laying structure.\"\"\" self._fit_without() def __nan_search(self): \"\"\" For each point,", "@Time: 5/3/22 09:55 # @Version: 4.0 import math from collections", "complete y values :return: The result is a dataframe with", "len(prev_unlabeled) == 0: break unlabeled_prev_of_labeled = self.structure.loc[prev_unlabeled] lu = unlabeled_prev_of_labeled[\"sample\"].to_list()", "result.to_numpy(): is_in = False for c in complete: if np.array_equal(r,", "self.__structure() def _fit_without(self): \"\"\" The function takes in a classifier,", "label of the majority of the neighbors of the unlabeled", "vector, nearest neighbor vector \"\"\" if self.rho is None: raise", "and the structure of the data :param low: lower bound", "except IndexError: raise ValueError(\"Dimensions do not match.\") le = LabelEncoder()", "it is added to the structure data set. :param complete:", "c): is_in = True if not is_in: results_to_unlabeled.append(r) for r", "Predict based on a trained classifier. :param src: The source", "= self.__min_neighbor_and_distance() self.__structure() def _fit_without(self): \"\"\" The function takes in", "1 if dij < dc else 0 func = gauss_func", "\"label\" ] = -1 def _if_filter(self, complete, complete_y): \"\"\" If", "l, u, y = split(samples, y) except IndexError: raise ValueError(\"Dimensions", "of points that are within a distance of r+3. And", "\"\"\" r = 1 nan = defaultdict(set) nb = dict.fromkeys(range(self.n_id),", "point(which is the nearest neighbor). :return: distance vector, nearest neighbor", "indices of the neighbors of the unlabeled data, and the", "while True: search = NearestNeighbors(n_neighbors=r + 1, algorithm=\"kd_tree\") search.fit(self.data) for", "if max_dis - min_dis < 0.0001: break return dc def", "value, the rho value, the delta value, the number of", "= k self.gauss_cutoff = gauss_cutoff self.percent = percent self.density_threshold =", "complete, complete_y): \"\"\" If the filter is an ENN, then", "not None and filter_method != \"ENANE\": self.filter = filter_method() elif", "<NAME>., <NAME>., <NAME>., <NAME>., <NAME>., & <NAME>. (2018). Self-training semi-supervised", "self.classifier_stdpnf.fit( labeled_data[\"sample\"].tolist(), labeled_data[\"label\"].tolist() ) def _results_to_structure(self, complete, result): \"\"\" >", "r = 1 nan = defaultdict(set) nb = dict.fromkeys(range(self.n_id), 0)", "data :param low: lower bound of the data :param u:", "in the structure. If the next samples are not labeled,", "fit(self, samples, y): \"\"\"Fit method.\"\"\" try: l, u, y =", "new_label count += 1 return count def _fit_stdpnf(self): \"\"\" Self", "nearest neighbor indices and creates a dataframe with the following", "np.min(triangle_upper) return distance, max_dis, min_dis def __auto_select_dc(self): \"\"\" Auto select", "rho as None.\") sort_rho_idx = np.argsort(-self.rho) delta, nneigh = [float(self.max_dis)]", "in enumerate(self.data): self.structure[index] = [ sample, int(self.nneigh[index]), None, self.y[index] if", "The function takes in a distance value and a cutoff", "nearest neighbor vector \"\"\" if self.rho is None: raise ValueError(\"Encountered", "on. The set of points that are within a distance", "samples_labeled_index: next_unlabeled.append(next_row) self.order[next_row] = count if len(next_unlabeled) == 0: break", "percent of all nodes. :return: dc that local density threshold", "index of the unlabeled data to the list of indices", "axis=0) self.n_id = self.data.shape[0] self.distances, self.max_dis, self.min_dis = self.__build_distance() self.dc", "= samples_labeled[\"sample\"].to_list() y_without = samples_labeled[\"label\"].to_list() self.classifier.fit(sam_lab, y_without) return samples_labeled def", "k nearest neighbor. Knowledge-Based Systems, 184, 104895. <NAME>., <NAME>., <NAME>.,", "= pd.concat([labeled_data, nan_unlabeled], join=\"inner\") enane_model = SelfTrainingClassifier(base_estimator) enane_model.fit(data[\"sample\"].tolist(), data[\"label\"].tolist()) enane_pred", "average neighbor is 1-2 percent of all nodes. :return: dc", "in the lower and upper bounds of the data, and", "= self.structure.loc[self.structure[\"label\"] != -1] sam_lab = samples_labeled[\"sample\"].to_list() y_without = samples_labeled[\"label\"].to_list()", "u: upper bound of the data :param y: the labels", "j in range(0, i): old_i, old_j = sort_rho_idx[i], sort_rho_idx[j] if", "\"previous\", \"label\"] ).transpose() self.structure_stdnpf = self.structure.copy(deep=True) def __step_a(self): \"\"\" >", "points that are within a distance of r+3. And so", "structure.\"\"\" self._fit_without() def __nan_search(self): \"\"\" For each point, find the", "density peaks of data. Neurocomputing, 275, 180-191. \"\"\" def __init__(", "density peaks and an extended parameter-free local noise filter for", "number of neighbors, and the structure of the data :param", "in result.to_numpy(): is_in = False for c in complete: if", "2 + self.n_id] return dc def __local_density(self): \"\"\" Compute all", "distance of r, and the set of points that are", "old_j delta[sort_rho_idx[0]] = max(delta) return np.array(delta, np.float32), np.array(nneigh, np.float32) def", "indices and creates a dataframe with the following columns: -", ") if 0.01 <= nneighs <= 0.02: break # binary", "nan, r def __enane(self, fx, nan, r): \"\"\" > The", "/ 2 * self.percent / 100) dc = np.sort(list(self.distances.values()))[ position", "of neighbors, and the structure of the data :param low:", "samples_labeled_index = samples_labeled.index.to_list() for prev_row in prev_rows: if prev_row not", "0 \"\"\" return 1 if dij < dc else 0", "dc that local density threshold \"\"\" max_dis, min_dis = self.max_dis,", "1 return count def _fit_stdpnf(self): \"\"\" Self Training based on", "following columns: - sample: the data point - next: the", "Density Peaks.\"\"\" self.dc = dc self.distance_metric = distance_metric self.k =", "result = self._if_filter(complete, complete_y) self._results_to_structure(complete, result) labeled_data = self.structure_stdnpf.loc[ self.structure_stdnpf[\"label\"]", "count = self._label_next_point(count) self._label_previous_points(count) def _label_previous_points(self, count): \"\"\" > The", "/ 100) dc = np.sort(list(self.distances.values()))[ position * 2 + self.n_id]", "temp = func(self.distances[(i, j)], self.dc) rho[i] += temp rho[j] +=", "from collections import defaultdict import numpy as np import pandas", "prev_row in prev_rows: if prev_row not in samples_labeled_index and prev_row", "neigh in r_neighs: nb[neigh] += 1 rnn[neigh].add(index) cnt[r] = np.count_nonzero((np.array(list(nb.values()))", "enane_pred = enane_model.predict(nan_unlabeled[\"sample\"].tolist()) for (row_index, _), pred in zip(nan_unlabeled.iterrows(), enane_pred):", "for i in range(self.n_id): for j in range(0, i): old_i,", "= self.structure.copy(deep=True) def __step_a(self): \"\"\" > The function takes the", "import KNeighborsClassifier, NearestNeighbors from sklearn.preprocessing import LabelEncoder from sklearn.semi_supervised import", "1-2 percent of all nodes. :return: dc that local density", "then calculates the distances between the data points, the maximum", "= squareform(distance_matrix) triangle_upper = np.triu_indices(self.data.shape[0], 1) triangle_upper = distance_matrix[triangle_upper] distance", "maximum distance, the minimum distance, the dc value, the rho", "dc = (max_dis + min_dis) / 2 if max_dis -", "len(self.y) else -1, ] for index in range(self.n_id): if self.structure[self.structure[index][1]][2]", "gauss_func(dij, dc): \"\"\" > The function takes in a distance", "not in samples_labeled_index and prev_row is not None: prev_unlabeled.append(prev_row) self.order[prev_row]", "current iteration \"\"\" while True: samples_labeled = self.__step_a() prev_rows =", "unlabeled_indexes.append(row) if isinstance(self.filter, str) and self.filter == \"ENANE\": filtered_indexes, filtered_labels", "Systems, 184, 104895. <NAME>., <NAME>., <NAME>., <NAME>., <NAME>., <NAME>., &", "self.percent = percent self.density_threshold = density_threshold self.distance_threshold = distance_threshold self.anormal", ":param count: the number of the current iteration \"\"\" while", "of those samples. It then labels those samples and repeats", "a new dataframe with the labeled data and the unlabeled", "filter_method is not None and filter_method != \"ENANE\": self.filter =", "index < len(self.y) else -1, ] for index in range(self.n_id):", "09:55 # @Version: 4.0 import math from collections import defaultdict", "__local_density(self): \"\"\" Compute all points' local density. :return: local density", "return es, es_pred def __init_values(self, low, u, y): \"\"\" It", "the local density threshold that let average neighbor is 1-2", "+= 1 else: harmfulness += 1 if usefulness >= harmfulness:", "fx: the indexes of the unlabeled data :param nan: a", "of the unlabeled data :param nan: a list of lists,", ":return: the value of the gaussian function. \"\"\" return math.exp(-((dij", "return 0 :param dij: distance between atoms i and j", "+= temp rho[j] += temp return np.array(rho, np.float32) def __min_neighbor_and_distance(self):", "samples labeled in the previous step and finds the next", "1 if usefulness >= harmfulness: es.append(row_index) es_pred.append(pred) return es, es_pred", "is the method used in paper, 'auto' is auto select.", "<NAME>., <NAME>., <NAME>., & <NAME>. (2018). Self-training semi-supervised classification based", "a dataframe with the following columns: - sample: the data", "of neighbors to consider :return: The indexes of the samples", "1 for index in range(self.n_id): nan[index] = knn[index].intersection(rnn[index]) return nan,", "\"label\"] = new_label count += 1 def _label_next_point(self, count): \"\"\"", "_ = self.filter.filter_original_complete( original, original_y, complete, complete_y ) else: result,", "based on Density Peaks.\"\"\" self.dc = dc self.distance_metric = distance_metric", "the process until there are no more samples to label", "= self.__step_a() next_rows = samples_labeled[\"next\"].to_numpy() next_unlabeled = [] samples_labeled_index =", "of the clustering \"\"\" results_to_unlabeled = [] for r in", "number of neighbors to use in the KNN classifier. It", "function takes the results of the model and compares them", "parameter-free noise filter. \"\"\" self.__discover_structure() nan, lambda_param = self.__nan_search() self.classifier_stdpnf", "complete: the complete dataset :param result: the result of the", "next_row not in samples_labeled_index: next_unlabeled.append(next_row) self.order[next_row] = count if len(next_unlabeled)", "for (row_index, _), pred in zip(nan_unlabeled.iterrows(), enane_pred): usefulness = 0", "squareform(distance_matrix) triangle_upper = np.triu_indices(self.data.shape[0], 1) triangle_upper = distance_matrix[triangle_upper] distance =", "r): \"\"\" > The function takes in the dataframe, the", ":param fx: the indexes of the unlabeled data :param nan:", "min_dis = dc else: max_dis = dc dc = (max_dis", "np.array(rho, np.float32) def __min_neighbor_and_distance(self): \"\"\" Compute all points' min util", "= max(delta) return np.array(delta, np.float32), np.array(nneigh, np.float32) def __structure(self): \"\"\"", "i)] = distance_matrix[i, j] max_dis, min_dis = np.max(triangle_upper), np.min(triangle_upper) return", "j] max_dis, min_dis = np.max(triangle_upper), np.min(triangle_upper) return distance, max_dis, min_dis", "= sort_rho_idx[i], sort_rho_idx[j] if self.distances[(old_i, old_j)] < delta[old_i]: delta[old_i] =", "filter is an ENN, then filter the original data, otherwise", "not labeled, it labels them and updates the order of", "distance of r. The set of points that are within", "\"\"\" if self.dc == \"auto\": dc = self.__auto_select_dc() else: position", "is, then it adds the index of the unlabeled data", "\"\"\" The function takes the data and the nearest neighbor", "samples_labeled = self.__step_a() prev_rows = samples_labeled[\"previous\"].to_numpy() prev_unlabeled = [] samples_labeled_index", "\"\"\" If the filter is an ENN, then filter the", "of the dataframe called structure_stdnpf \"\"\" self.structure = dict.fromkeys(range(self.n_id)) for", "def cutoff_func(dij, dc): \"\"\" If the distance between two atoms", "sort_rho_idx[j] if self.distances[(old_i, old_j)] < delta[old_i]: delta[old_i] = self.distances[(old_i, old_j)]", "for new_label, pos in zip(y_pred, prev_unlabeled): self.structure.at[pos, \"label\"] = new_label", "classifier is not None: if isinstance(classifier_params, dict): self.classifier = classifier(**classifier_params)", "set, it is added to the structure data set. :param", "lu = unlabeled_prev_of_labeled[\"sample\"].to_list() y_pred = self.classifier.predict(lu) for new_label, pos in", "a distance of r+1 is a superset of the set", "in range(self.n_id): if self.structure[self.structure[index][1]][2] is None: self.structure[self.structure[index][1]][2] = index self.structure", "The function also creates a copy of the dataframe called", "nearest neighbor. Knowledge-Based Systems, 184, 104895. <NAME>., <NAME>., <NAME>., <NAME>.,", "samples of those samples. It then labels those samples and", "self.n_id for i in range(self.n_id): for j in range(i +", "complete: if np.array_equal(r, c): is_in = True if not is_in:", "ValueError(\"Encountered rho as None.\") sort_rho_idx = np.argsort(-self.rho) delta, nneigh =", "& <NAME>. (2019). A self-training method based on density peaks", "the data itself, and then calculates the distances between the", "index of the nearest neighbor - previous: the index of", "delta, nneigh = [float(self.max_dis)] * self.n_id, [0] * self.n_id delta[sort_rho_idx[0]]", "import defaultdict import numpy as np import pandas as pd", "at that point :param dij: distance between two nodes :param", "k=3, gauss_cutoff=True, percent=2.0, density_threshold=None, distance_threshold=None, anormal=True, filtering=False, classifier=None, classifier_params=None, filter_method=None,", "to be labeled :return: The number of labeled samples. \"\"\"", "in range(self.n_id): nan[index] = knn[index].intersection(rnn[index]) return nan, r def __enane(self,", "density threshold \"\"\" max_dis, min_dis = self.max_dis, self.min_dis dc =", "& <NAME>. (2018). Self-training semi-supervised classification based on density peaks", "y self.low = low self.u = u self.data = np.concatenate((low,", "r+2 is also a superset of the set of points", "next_unlabeled.append(next_row) self.order[next_row] = count if len(next_unlabeled) == 0: break unlabeled_next_of_labeled", "\"\"\" > This function takes the results of the model", "return nan, r def __enane(self, fx, nan, r): \"\"\" >", "the nearest neighbor indices and creates a dataframe with the", "] complete = labeled_data[\"sample\"] complete_y = labeled_data[\"label\"] result = self._if_filter(complete,", "else 0 func = gauss_func if self.gauss_cutoff else cutoff_func rho", "Peaks.\"\"\" self.dc = dc self.distance_metric = distance_metric self.k = k", "low self.u = u self.data = np.concatenate((low, u), axis=0) self.n_id", "> The function takes the samples labeled in the previous", "unlabeled_next_of_labeled[\"sample\"].to_list() y_pred = self.classifier.predict(lu) for new_label, pos in zip(y_pred, next_unlabeled):", "in range(self.n_id): for j in range(i + 1, self.n_id): distance[(i,", "self.__auto_select_dc() else: position = int(self.n_id * (self.n_id + 1) /", "def __min_neighbor_and_distance(self): \"\"\" Compute all points' min util to the", "Peaks and a parameter-free noise filter. \"\"\" self.__discover_structure() nan, lambda_param", "= None self.y = None self.low = None self.u =", "of labeled samples. \"\"\" while True: samples_labeled = self.__step_a() next_rows", "and the data itself, and then calculates the distances between", "of r. The set of points that are within a", "on Density Peaks.\"\"\" self.dc = dc self.distance_metric = distance_metric self.k", "the unlabeled data. If it is, then it adds the", "np.sort(list(self.distances.values()))[ position * 2 + self.n_id] return dc def __local_density(self):", "= np.count_nonzero((np.array(list(nb.values())) == 0)) if r > 2 and cnt[r]", "it adds the index of the unlabeled data to the", "pd.DataFrame(self.y) result, _ = self.filter.filter_original_complete( original, original_y, complete, complete_y )", "new_label count += 1 def _label_next_point(self, count): \"\"\" > The", "-1 def _if_filter(self, complete, complete_y): \"\"\" If the filter is", "y = le.transform(y) self.__init_values(l, u, y) if self.filtering: self._fit_stdpnf() else:", "cnt[r] = np.count_nonzero((np.array(list(nb.values())) == 0)) if r > 2 and", "search if nneighs < 0.01: min_dis = dc else: max_dis", "lower and upper bounds of the data, and the data", "method based on density peaks and an extended parameter-free local", "self, dc=None, distance_metric=\"euclidean\", k=3, gauss_cutoff=True, percent=2.0, density_threshold=None, distance_threshold=None, anormal=True, filtering=False,", "self._fit_without() def predict(self, src): \"\"\" Predict based on a trained", "r+3. And so on. The set of points that are", "result: the result of the clustering \"\"\" results_to_unlabeled = []", "the following columns: - sample: the data point - next:", "dataframe with the following columns: - sample: the data point", "the unlabeled data to the list of indices of the", "of the nearest neighbor - label: the label of the", "are within a distance of r. The set of points", "do not match.\") le = LabelEncoder() le.fit(y) y = le.transform(y)", "- sample: the data point - next: the index of", "the higher local density point(which is the nearest neighbor). :return:", "of lists, where each list contains the indices of the", "nan, r \"\"\" r = 1 nan = defaultdict(set) nb", "\"\"\" def gauss_func(dij, dc): \"\"\" > The function takes in", "local density vector that index is the point index \"\"\"", "sample :param r: the number of neighbors to consider :return:", "is added to the structure data set. :param complete: the", "- next: the index of the nearest neighbor - previous:", "base_estimator = KNeighborsClassifier( n_neighbors=r, metric=self.distance_metric ) labeled_data = local_structure.loc[local_structure[\"label\"] !=", "that have been labeled. \"\"\" samples_labeled = self.structure.loc[self.structure[\"label\"] != -1]", "unlabeled_indexes = [] for row in unlabeled_rows: if self.order[row] ==", ":return: distance vector, nearest neighbor vector \"\"\" if self.rho is", "j in range(i + 1, self.n_id): distance[(i, j)] = distance_matrix[i,", "min_dis = self.max_dis, self.min_dis dc = (max_dis + min_dis) /", "next samples in the structure. If the next samples are", "then labels those samples and repeats the process until there", "added to the structure data set. :param complete: the complete", "of the Gaussian function at that point :param dij: distance", "self._label_next_point(count) self._label_previous_points(count) def _label_previous_points(self, count): \"\"\" > The function takes", "while True: samples_labeled = self.__step_a() next_rows = samples_labeled[\"next\"].to_numpy() next_unlabeled =", "takes in the lower and upper bounds of the data,", "to be labeled and the labels that are going to", "repeats the process until there are no more samples to", "for i in range(self.n_id): for j in range(i + 1,", "[0] * self.n_id delta[sort_rho_idx[0]] = -1.0 for i in range(self.n_id):", "in the KNN classifier. It then creates a new dataframe", "= np.triu_indices(self.data.shape[0], 1) triangle_upper = distance_matrix[triangle_upper] distance = {} for", "split(samples, y) except IndexError: raise ValueError(\"Dimensions do not match.\") le", "a classifier, and then labels the next point, and then", "usefulness >= harmfulness: es.append(row_index) es_pred.append(pred) return es, es_pred def __init_values(self,", "else: result, _ = self.filter.filter(complete, complete_y) return result def fit(self,", "y values :return: The result is a dataframe with the", "k self.gauss_cutoff = gauss_cutoff self.percent = percent self.density_threshold = density_threshold", "samples_labeled[\"label\"].to_list() self.classifier.fit(sam_lab, y_without) return samples_labeled def __discover_structure(self): \"\"\"Discovers the under", "rho[i] += temp rho[j] += temp return np.array(rho, np.float32) def", "max_dis, min_dis def __auto_select_dc(self): \"\"\" Auto select the local density", "None and filter_method != \"ENANE\": self.filter = filter_method() elif isinstance(filter_method,", "self.__build_distance() self.dc = self.__select_dc() self.rho = self.__local_density() self.delta, self.nneigh =", "that are within a distance of r+2 is also a", "Supervised Algorithm based on Density Peaks.\"\"\" self.dc = dc self.distance_metric", "self.data = np.concatenate((low, u), axis=0) self.n_id = self.data.shape[0] self.distances, self.max_dis,", "in the previous step and finds the next samples in", "the previous samples of those samples. It then labels those", "as the label of the majority of the neighbors of", "on them :return: The samples that have been labeled. \"\"\"", "samples_labeled[\"sample\"].to_list() y_without = samples_labeled[\"label\"].to_list() self.classifier.fit(sam_lab, y_without) return samples_labeled def __discover_structure(self):", "min_dis = np.max(triangle_upper), np.min(triangle_upper) return distance, max_dis, min_dis def __auto_select_dc(self):", "= samples_labeled[\"label\"].to_list() self.classifier.fit(sam_lab, y_without) return samples_labeled def __discover_structure(self): \"\"\"Discovers the", "is also a superset of the set of points that", "It then creates a new dataframe with the labeled data", "data itself, and then calculates the distances between the data", "unlabeled data. It then checks if the predicted label is", "for new_label, pos in zip(y_pred, next_unlabeled): self.structure.at[pos, \"label\"] = new_label", "distance :return: the value of the gaussian function. \"\"\" return", "data :param y: the labels of the data \"\"\" self.y", "local_structure.loc[neigh, \"label\"] == pred: usefulness += 1 else: harmfulness +=", "function takes in a distance value and a cutoff value,", "point The function also creates a copy of the dataframe", "dataset :param result: the result of the clustering \"\"\" results_to_unlabeled", "samples_labeled def __discover_structure(self): \"\"\"Discovers the under laying structure.\"\"\" self._fit_without() def", "local density threshold \"\"\" if self.dc == \"auto\": dc =", "str) and filter_method == \"ENANE\": self.filter = filter_method else: self.filter", "\"next\", \"previous\", \"label\"] ).transpose() self.structure_stdnpf = self.structure.copy(deep=True) def __step_a(self): \"\"\"", "__nan_search(self): \"\"\" For each point, find the set of points", "- min_dis < 0.0001: break return dc def __select_dc(self): \"\"\"", "import LabelEncoder from sklearn.semi_supervised import SelfTrainingClassifier from sklearn.svm import SVC", "self.data.shape[0] self.distances, self.max_dis, self.min_dis = self.__build_distance() self.dc = self.__select_dc() self.rho", "= classifier() else: self.classifier = None if filter_method is not", "gauss_func if self.gauss_cutoff else cutoff_func rho = [0] * self.n_id", "# -*- coding:utf-8 -*- # @Filename: DensityPeaks.py # @Author: <NAME>", "y) if self.filtering: self._fit_stdpnf() else: self._fit_without() def predict(self, src): \"\"\"", "classification based on density peaks of data. Neurocomputing, 275, 180-191.", "+ 1, self.n_id): distance[(i, j)] = distance_matrix[i, j] distance[(j, i)]", "self.rho = self.__local_density() self.delta, self.nneigh = self.__min_neighbor_and_distance() self.__structure() def _fit_without(self):", "the index of the nearest neighbor of the nearest neighbor", "> 2 and cnt[r] == cnt[r - 1]: r -=", "distance_threshold=None, anormal=True, filtering=False, classifier=None, classifier_params=None, filter_method=None, ): \"\"\"Semi Supervised Algorithm", "the data \"\"\" self.y = y self.low = low self.u", "lists, where each list contains the indices of the neighbors", "next point to be labeled :return: The number of labeled", "__auto_select_dc(self): \"\"\" Auto select the local density threshold that let", "filter_method == \"ENANE\": self.filter = filter_method else: self.filter = None", "# @Author: <NAME> # @Time: 5/3/22 09:55 # @Version: 4.0", "src: The source image :return: The classifier is being returned.", "density threshold, default is the method used in paper, 'auto'", "dc: cutoff distance :return: 1 if dij < dc, else", "= dict.fromkeys(range(self.n_id)) for index, sample in enumerate(self.data): self.structure[index] = [", "= pd.DataFrame( self.structure, index=[\"sample\", \"next\", \"previous\", \"label\"] ).transpose() self.structure_stdnpf =", "distance vector, nearest neighbor vector \"\"\" if self.rho is None:", "/ 2 if max_dis - min_dis < 0.0001: break return", "nodes :param dc: The cutoff distance :return: the value of", "of the current iteration \"\"\" while True: samples_labeled = self.__step_a()", "the structure of the data :param low: lower bound of", "the unlabeled data, the list of indices of the neighbors", "isinstance(filter_method, str) and filter_method == \"ENANE\": self.filter = filter_method else:", "pd.concat([labeled_data, nan_unlabeled], join=\"inner\") enane_model = SelfTrainingClassifier(base_estimator) enane_model.fit(data[\"sample\"].tolist(), data[\"label\"].tolist()) enane_pred =", "1 def _label_next_point(self, count): \"\"\" > The function takes the", "dc value, the rho value, the delta value, the number", "to the list of indices of the data to be", "[] for r in result.to_numpy(): is_in = False for c", "The set of points that are within a distance of", "the dataframe, the list of indices of the unlabeled data,", "result) labeled_data = self.structure_stdnpf.loc[ self.structure_stdnpf[\"label\"] != -1 ] self.classifier_stdpnf.fit( labeled_data[\"sample\"].tolist(),", "min_dis) / 2 if max_dis - min_dis < 0.0001: break", "self.u = u self.data = np.concatenate((low, u), axis=0) self.n_id =", "are going to be labeled and the labels that are", "samples_labeled_index and prev_row is not None: prev_unlabeled.append(prev_row) self.order[prev_row] = count", "there are no more samples to label :param count: the", "self._if_filter(complete, complete_y) self._results_to_structure(complete, result) labeled_data = self.structure_stdnpf.loc[ self.structure_stdnpf[\"label\"] != -1", "v in self.distances.values() if v < dc]) / self.n_id**2 )", ") count += 1 labeled_data = self.structure_stdnpf.loc[self.structure_stdnpf[\"label\"] != -1] self.classifier_stdpnf.fit(", "where each list contains the indices of the neighbors of", "* self.n_id, [0] * self.n_id delta[sort_rho_idx[0]] = -1.0 for i", "def __enane(self, fx, nan, r): \"\"\" > The function takes", "-1 ] complete = labeled_data[\"sample\"] complete_y = labeled_data[\"label\"] result =", "takes the results of the model and compares them to", "if self.structure[self.structure[index][1]][2] is None: self.structure[self.structure[index][1]][2] = index self.structure = pd.DataFrame(", "the original data, otherwise filter the complete data :param complete:", "collections import defaultdict import numpy as np import pandas as", "filtering=False, classifier=None, classifier_params=None, filter_method=None, ): \"\"\"Semi Supervised Algorithm based on", "SelfTrainingClassifier(base_estimator) enane_model.fit(data[\"sample\"].tolist(), data[\"label\"].tolist()) enane_pred = enane_model.predict(nan_unlabeled[\"sample\"].tolist()) for (row_index, _), pred", "the nearest neighbor). :return: distance vector, nearest neighbor vector \"\"\"", "set of points that are within a distance of r.", "distance_threshold self.anormal = anormal self.filtering = filtering if classifier is", "and compares them to the complete data set. If the", "ENN from .utils import split class STDPNF: \"\"\" <NAME>., <NAME>.,", "es.append(row_index) es_pred.append(pred) return es, es_pred def __init_values(self, low, u, y):", "self.structure.at[pos, \"label\"] = new_label count += 1 def _label_next_point(self, count):", "classifier on them :return: The samples that have been labeled.", "self.filter == \"ENANE\": filtered_indexes, filtered_labels = self.__enane( unlabeled_indexes, nan, lambda_param", "nearest neighbor - previous: the index of the nearest neighbor", "self.__discover_structure() nan, lambda_param = self.__nan_search() self.classifier_stdpnf = KNeighborsClassifier( n_neighbors=self.k, metric=self.distance_metric", "elif isinstance(filter_method, str) and filter_method == \"ENANE\": self.filter = filter_method", "\"label\"] = filtered_labels else: labeled_data = self.structure_stdnpf.loc[ self.structure_stdnpf[\"label\"] != -1", "source image :return: The classifier is being returned. \"\"\" if", ":return: The samples that have been labeled. \"\"\" samples_labeled =", "nneighs <= 0.02: break # binary search if nneighs <", "previous samples of those samples. It then labels those samples", "dij < dc, else 0 \"\"\" return 1 if dij", "* 2 + self.n_id] return dc def __local_density(self): \"\"\" Compute", "in zip(y_pred, prev_unlabeled): self.structure.at[pos, \"label\"] = new_label count += 1", "unlabeled_prev_of_labeled[\"sample\"].to_list() y_pred = self.classifier.predict(lu) for new_label, pos in zip(y_pred, prev_unlabeled):", "\"\"\" while True: samples_labeled = self.__step_a() next_rows = samples_labeled[\"next\"].to_numpy() next_unlabeled", "filtered_labels else: labeled_data = self.structure_stdnpf.loc[ self.structure_stdnpf[\"label\"] != -1 ] complete", "= np.concatenate((low, u), axis=0) self.n_id = self.data.shape[0] self.distances, self.max_dis, self.min_dis", "self.y = None self.low = None self.u = None self.classifier_stdpnf", "distance of r+1 is also a superset of the set", "of the nearest neighbor of the nearest neighbor - label:", "complete data set. If the result is not in the", "of the neighbors of a sample :param r: the number", "True: search = NearestNeighbors(n_neighbors=r + 1, algorithm=\"kd_tree\") search.fit(self.data) for index,", "y) except IndexError: raise ValueError(\"Dimensions do not match.\") le =", "without filtering. \"\"\" if self.classifier is None: self.classifier = SVC()", "= self.__select_dc() self.rho = self.__local_density() self.delta, self.nneigh = self.__min_neighbor_and_distance() self.__structure()", "distance between two atoms is less than the cutoff distance,", "pd.DataFrame(self.low) original_y = pd.DataFrame(self.y) result, _ = self.filter.filter_original_complete( original, original_y,", "split class STDPNF: \"\"\" <NAME>., <NAME>., & <NAME>. (2019). A", "that point :param dij: distance between two nodes :param dc:", "the lower and upper bounds of the data, and the", "filtered_indexes, filtered_labels = self.__enane( unlabeled_indexes, nan, lambda_param ) self.structure_stdnpf.at[filtered_indexes, \"label\"]", "atoms is less than the cutoff distance, return 1, otherwise", "= labeled_data[\"sample\"] complete_y = labeled_data[\"label\"] result = self._if_filter(complete, complete_y) self._results_to_structure(complete,", "_fit_stdpnf(self): \"\"\" Self Training based on Density Peaks and a", "harmfulness = 0 for neigh in nan[row_index]: if local_structure.loc[neigh, \"label\"]", "creates a new dataframe with the labeled data and the", "self.n_id] return dc def __local_density(self): \"\"\" Compute all points' local", "the number of the next point to be labeled :return:", "count = 1 self.order = dict.fromkeys(range(self.n_id), 0) count = self._label_next_point(count)", "< dc]) / self.n_id**2 ) if 0.01 <= nneighs <=", "until there are no more samples to label :param count:", "while True: nneighs = ( sum([1 for v in self.distances.values()", "max distance, min distance \"\"\" from scipy.spatial.distance import pdist, squareform", "cnt = defaultdict(int) while True: search = NearestNeighbors(n_neighbors=r + 1,", "1) / 2 * self.percent / 100) dc = np.sort(list(self.distances.values()))[", "self.filter.filter(complete, complete_y) return result def fit(self, samples, y): \"\"\"Fit method.\"\"\"", "distance_matrix[i, j] max_dis, min_dis = np.max(triangle_upper), np.min(triangle_upper) return distance, max_dis,", "= gauss_func if self.gauss_cutoff else cutoff_func rho = [0] *", "index=[\"sample\", \"next\", \"previous\", \"label\"] ).transpose() self.structure_stdnpf = self.structure.copy(deep=True) def __step_a(self):", "self.min_dis = None self.rho = None self.delta = None self.nneigh", "based on density peaks of data. Neurocomputing, 275, 180-191. \"\"\"", "= self.distances[(old_i, old_j)] nneigh[old_i] = old_j delta[sort_rho_idx[0]] = max(delta) return", "original, original_y, complete, complete_y ) else: result, _ = self.filter.filter(complete,", "harmfulness: es.append(row_index) es_pred.append(pred) return es, es_pred def __init_values(self, low, u,", "distance of r+2. The set of points that are within", "y): \"\"\" It takes in the lower and upper bounds", "distance of r+1. The set of points that are within", "use in the KNN classifier. It then creates a new", "are within a distance of r+1. The set of points", "in zip(nan_unlabeled.iterrows(), enane_pred): usefulness = 0 harmfulness = 0 for", "list of indices of the unlabeled data, the list of", "data. Neurocomputing, 275, 180-191. \"\"\" def __init__( self, dc=None, distance_metric=\"euclidean\",", "KNeighborsClassifier, NearestNeighbors from sklearn.preprocessing import LabelEncoder from sklearn.semi_supervised import SelfTrainingClassifier", "in samples_labeled_index: next_unlabeled.append(next_row) self.order[next_row] = count if len(next_unlabeled) == 0:", ":return: The number of labeled samples. \"\"\" while True: samples_labeled", "<= 0.02: break # binary search if nneighs < 0.01:", "the gaussian function. \"\"\" return math.exp(-((dij / dc) ** 2))", "n_neighbors=r, metric=self.distance_metric ) labeled_data = local_structure.loc[local_structure[\"label\"] != -1] nan_unlabeled =", "= None self.order = None self.structure = None self.structure_stdnpf =", "0.02: break # binary search if nneighs < 0.01: min_dis", "y_pred = self.classifier.predict(lu) for new_label, pos in zip(y_pred, next_unlabeled): self.structure.at[pos,", "\"\"\" if self.classifier is None: self.classifier = SVC() count =", "enane_model.predict(nan_unlabeled[\"sample\"].tolist()) for (row_index, _), pred in zip(nan_unlabeled.iterrows(), enane_pred): usefulness =", "= samples_labeled[\"next\"].to_numpy() next_unlabeled = [] samples_labeled_index = samples_labeled.index.to_list() for next_row", "anormal self.filtering = filtering if classifier is not None: if", "in a classifier, and then labels the next point, and", "previous step and finds the next samples in the structure.", "complete_y): \"\"\" If the filter is an ENN, then filter", "= 0 harmfulness = 0 for neigh in nan[row_index]: if", "str) and self.filter == \"ENANE\": filtered_indexes, filtered_labels = self.__enane( unlabeled_indexes,", "= self._label_next_point(count) self._label_previous_points(count) def _label_previous_points(self, count): \"\"\" > The function", "a distance value and a cutoff value, and returns the", "= new_label count += 1 return count def _fit_stdpnf(self): \"\"\"", "pdist(self.data, metric=self.distance_metric) distance_matrix = squareform(distance_matrix) triangle_upper = np.triu_indices(self.data.shape[0], 1) triangle_upper", "set of points that are within a distance of r,", "is 1-2 percent of all nodes. :return: dc that local", "two atoms is less than the cutoff distance, return 1,", "j :param dc: cutoff distance :return: 1 if dij <", "= [] local_structure = self.structure_stdnpf.copy(deep=True) base_estimator = KNeighborsClassifier( n_neighbors=r, metric=self.distance_metric", "\"\"\" self.__discover_structure() nan, lambda_param = self.__nan_search() self.classifier_stdpnf = KNeighborsClassifier( n_neighbors=self.k,", "set of points that are within a distance of r+2.", "set of points that are within a distance of r+1.", "<NAME>. (2019). A self-training method based on density peaks and", "else 0 \"\"\" return 1 if dij < dc else", "not match.\") le = LabelEncoder() le.fit(y) y = le.transform(y) self.__init_values(l,", "adds the index of the unlabeled data to the list", "Self Training based on Density Peaks and a parameter-free noise", "= ( sum([1 for v in self.distances.values() if v <", "dict.fromkeys(range(self.n_id), 0) knn = defaultdict(set) rnn = defaultdict(set) cnt =", "join=\"inner\") enane_model = SelfTrainingClassifier(base_estimator) enane_model.fit(data[\"sample\"].tolist(), data[\"label\"].tolist()) enane_pred = enane_model.predict(nan_unlabeled[\"sample\"].tolist()) for", "density point(which is the nearest neighbor). :return: distance vector, nearest", "None def __build_distance(self): \"\"\" Calculate distance dict. :return: distance dict,", "labeled_data = self.structure_stdnpf.loc[ self.structure_stdnpf[\"label\"] != -1 ] complete = labeled_data[\"sample\"]", "structure. If the next samples are not labeled, it labels", "neighbors to consider :return: The indexes of the samples that", "( sum([1 for v in self.distances.values() if v < dc])", "= self._if_filter(complete, complete_y) self._results_to_structure(complete, result) labeled_data = self.structure_stdnpf.loc[ self.structure_stdnpf[\"label\"] !=", "label is the same as the label of the majority", "as np import pandas as pd from sklearn.neighbors import KNeighborsClassifier,", "sklearn.neighbors import KNeighborsClassifier, NearestNeighbors from sklearn.preprocessing import LabelEncoder from sklearn.semi_supervised", "samples labeled in the previous step and finds the previous", "= None if filter_method is not None and filter_method !=", "that let average neighbor is 1-2 percent of all nodes.", "= None def __build_distance(self): \"\"\" Calculate distance dict. :return: distance", "labeled_data[\"label\"].tolist() ) count += 1 labeled_data = self.structure_stdnpf.loc[self.structure_stdnpf[\"label\"] != -1]", "data, and the number of neighbors to use in the", "result of the clustering \"\"\" results_to_unlabeled = [] for r", "zip(y_pred, next_unlabeled): self.structure.at[pos, \"label\"] = new_label count += 1 return", "5/3/22 09:55 # @Version: 4.0 import math from collections import", "0.0001: break return dc def __select_dc(self): \"\"\" Select the local", "the data, and the data itself, and then calculates the", "the data to be labeled :param fx: the indexes of", "return_distance=False)[0][1:] knn[index].update(list(r_neighs)) for neigh in r_neighs: nb[neigh] += 1 rnn[neigh].add(index)", "if self.dc == \"auto\": dc = self.__auto_select_dc() else: position =", "distance_matrix[triangle_upper] distance = {} for i in range(self.n_id): for j", "util to the higher local density point(which is the nearest", "samples to label :param count: the number of the current", "value of the gaussian function. \"\"\" return math.exp(-((dij / dc)", "predicted label is the same as the label of the", "data = pd.concat([labeled_data, nan_unlabeled], join=\"inner\") enane_model = SelfTrainingClassifier(base_estimator) enane_model.fit(data[\"sample\"].tolist(), data[\"label\"].tolist())", "the delta value, the number of neighbors, and the structure", "indexes of the samples that are going to be labeled", "lambda_param = self.__nan_search() self.classifier_stdpnf = KNeighborsClassifier( n_neighbors=self.k, metric=self.distance_metric ) self.classifier_stdpnf.fit(self.low,", "def __auto_select_dc(self): \"\"\" Auto select the local density threshold that", "assigned to them. \"\"\" es = [] es_pred = []", "it is, then it adds the index of the unlabeled", "the list of indices of the neighbors of the unlabeled", "-= 1 break r += 1 for index in range(self.n_id):", "and filter_method == \"ENANE\": self.filter = filter_method else: self.filter =", "that are within a distance of r+2. The set of", ") def _results_to_structure(self, complete, result): \"\"\" > This function takes", "self.filtering = filtering if classifier is not None: if isinstance(classifier_params,", "= count if len(next_unlabeled) == 0: break unlabeled_next_of_labeled = self.structure.loc[next_unlabeled]", "+ 1) / 2 * self.percent / 100) dc =", "neighbor indices and creates a dataframe with the following columns:", ":return: dc that local density threshold \"\"\" if self.dc ==", "if filter_method is not None and filter_method != \"ENANE\": self.filter", "= 1 nan = defaultdict(set) nb = dict.fromkeys(range(self.n_id), 0) knn", "the clustering \"\"\" results_to_unlabeled = [] for r in result.to_numpy():", "columns: - sample: the data point - next: the index", "step and finds the previous samples of those samples. It", "neighbors to use in the KNN classifier. It then creates", "index, sample in enumerate(self.data): r_neighs = search.kneighbors( [sample], return_distance=False)[0][1:] knn[index].update(list(r_neighs))", "for index, sample in enumerate(self.data): r_neighs = search.kneighbors( [sample], return_distance=False)[0][1:]", "self.filter.filter_original_complete( original, original_y, complete, complete_y ) else: result, _ =", "based on a trained classifier. :param src: The source image", "The indexes of the samples that are going to be", "next_rows: if next_row not in samples_labeled_index: next_unlabeled.append(next_row) self.order[next_row] = count", "within a distance of r+1. The set of points that", "distance of r+1 is a superset of the set of", "self.n_id = self.data.shape[0] self.distances, self.max_dis, self.min_dis = self.__build_distance() self.dc =", "filter_method else: self.filter = None self.y = None self.low =", "self.classifier is None: self.classifier = SVC() count = 1 self.order", "The result is a dataframe with the filtered data. \"\"\"", "points' local density. :return: local density vector that index is", "self.u = None self.classifier_stdpnf = None self.order = None self.structure", "= self.filter.filter(complete, complete_y) return result def fit(self, samples, y): \"\"\"Fit", "noise filter. \"\"\" self.__discover_structure() nan, lambda_param = self.__nan_search() self.classifier_stdpnf =", "'auto' is auto select. :return: dc that local density threshold", "enumerate(self.data): r_neighs = search.kneighbors( [sample], return_distance=False)[0][1:] knn[index].update(list(r_neighs)) for neigh in", "__min_neighbor_and_distance(self): \"\"\" Compute all points' min util to the higher", "filter_method() elif isinstance(filter_method, str) and filter_method == \"ENANE\": self.filter =", "of the set of points that are within a distance", "neighbor - previous: the index of the nearest neighbor of", "to label :param count: the number of the current iteration", "results_to_unlabeled = [] for r in result.to_numpy(): is_in = False", "index self.structure = pd.DataFrame( self.structure, index=[\"sample\", \"next\", \"previous\", \"label\"] ).transpose()", "complete_y) return result def fit(self, samples, y): \"\"\"Fit method.\"\"\" try:", "self.classifier = classifier(**classifier_params) else: self.classifier = classifier() else: self.classifier =", "to the higher local density point(which is the nearest neighbor).", "] self.classifier_stdpnf.fit( labeled_data[\"sample\"].tolist(), labeled_data[\"label\"].tolist() ) count += 1 labeled_data =", "-1 ].index.to_list() unlabeled_indexes = [] for row in unlabeled_rows: if", "knn[index].update(list(r_neighs)) for neigh in r_neighs: nb[neigh] += 1 rnn[neigh].add(index) cnt[r]", "from sklearn.neighbors import KNeighborsClassifier, NearestNeighbors from sklearn.preprocessing import LabelEncoder from", "local density threshold \"\"\" max_dis, min_dis = self.max_dis, self.min_dis dc", "if self.gauss_cutoff else cutoff_func rho = [0] * self.n_id for", "break return dc def __select_dc(self): \"\"\" Select the local density", "sklearn.svm import SVC from instance_selection import ENN from .utils import", "unlabeled data, the list of indices of the neighbors of", "It then checks if the predicted label is the same", "\"ENANE\": self.filter = filter_method else: self.filter = None self.y =", "min_dis def __auto_select_dc(self): \"\"\" Auto select the local density threshold", "the order of the samples :param count: the number of", "threshold that let average neighbor is 1-2 percent of all", "density threshold \"\"\" if self.dc == \"auto\": dc = self.__auto_select_dc()", "a distance of r+2 is :return: nan, r \"\"\" r", "else: max_dis = dc dc = (max_dis + min_dis) /", "of r, and the set of points that are within", "self.classifier_stdpnf.fit( labeled_data[\"sample\"].tolist(), labeled_data[\"label\"].tolist() ) count += 1 labeled_data = self.structure_stdnpf.loc[self.structure_stdnpf[\"label\"]", ":param nan: a list of lists, where each list contains", "the cutoff distance, return 1, otherwise return 0 :param dij:", "the labeled samples and trains the classifier on them :return:", "as pd from sklearn.neighbors import KNeighborsClassifier, NearestNeighbors from sklearn.preprocessing import", "return dc def __local_density(self): \"\"\" Compute all points' local density.", "minimum distance, the dc value, the rho value, the delta", "= distance_matrix[triangle_upper] distance = {} for i in range(self.n_id): for", "= self.structure_stdnpf.copy(deep=True) base_estimator = KNeighborsClassifier( n_neighbors=r, metric=self.distance_metric ) labeled_data =", "the neighbors of a sample :param r: the number of", "otherwise filter the complete data :param complete: the complete dataframe", ".utils import split class STDPNF: \"\"\" <NAME>., <NAME>., & <NAME>.", "in the previous step and finds the previous samples of", "self.min_dis = self.__build_distance() self.dc = self.__select_dc() self.rho = self.__local_density() self.delta,", "sklearn.preprocessing import LabelEncoder from sklearn.semi_supervised import SelfTrainingClassifier from sklearn.svm import", "#!/usr/bin/env python # -*- coding:utf-8 -*- # @Filename: DensityPeaks.py #", "< dc, else 0 \"\"\" return 1 if dij <", "a parameter-free noise filter. \"\"\" self.__discover_structure() nan, lambda_param = self.__nan_search()", "data and the unlabeled data, and uses the KNN classifier", "the results of the model and compares them to the", "labeled and the labels that are going to be assigned", "the previous step and finds the previous samples of those", "match.\") le = LabelEncoder() le.fit(y) y = le.transform(y) self.__init_values(l, u,", "Knowledge-Based Systems, 184, 104895. <NAME>., <NAME>., <NAME>., <NAME>., <NAME>., <NAME>.,", "KNN classifier. It then creates a new dataframe with the", "if len(prev_unlabeled) == 0: break unlabeled_prev_of_labeled = self.structure.loc[prev_unlabeled] lu =", "i and j :param dc: cutoff distance :return: 1 if", "None.\") sort_rho_idx = np.argsort(-self.rho) delta, nneigh = [float(self.max_dis)] * self.n_id,", "point - next: the index of the nearest neighbor -", "= old_j delta[sort_rho_idx[0]] = max(delta) return np.array(delta, np.float32), np.array(nneigh, np.float32)", "count): \"\"\" > The function takes the samples labeled in", "np.float32) def __structure(self): \"\"\" The function takes the data and", "that are within a distance of r+2 is :return: nan,", "self.__select_dc() self.rho = self.__local_density() self.delta, self.nneigh = self.__min_neighbor_and_distance() self.__structure() def", "that index is the point index \"\"\" def gauss_func(dij, dc):", "range(0, i): old_i, old_j = sort_rho_idx[i], sort_rho_idx[j] if self.distances[(old_i, old_j)]", "].index.to_list() unlabeled_indexes = [] for row in unlabeled_rows: if self.order[row]", "is a dataframe with the filtered data. \"\"\" if isinstance(self.filter,", "distance, max_dis, min_dis def __auto_select_dc(self): \"\"\" Auto select the local", "np.triu_indices(self.data.shape[0], 1) triangle_upper = distance_matrix[triangle_upper] distance = {} for i", "from scipy.spatial.distance import pdist, squareform distance_matrix = pdist(self.data, metric=self.distance_metric) distance_matrix", "peaks of data. Neurocomputing, 275, 180-191. \"\"\" def __init__( self,", "indices of the neighbors of a sample :param r: the", "of points that are within a distance of r, and", "dc self.distance_metric = distance_metric self.k = k self.gauss_cutoff = gauss_cutoff", "= distance_matrix[i, j] max_dis, min_dis = np.max(triangle_upper), np.min(triangle_upper) return distance,", "y: the labels of the data \"\"\" self.y = y", "np.count_nonzero((np.array(list(nb.values())) == 0)) if r > 2 and cnt[r] ==", ") self.classifier_stdpnf.fit(self.low, self.y) count = 1 while count <= max(self.order.values()):", "self.__enane( unlabeled_indexes, nan, lambda_param ) self.structure_stdnpf.at[filtered_indexes, \"label\"] = filtered_labels else:", "= self.__enane( unlabeled_indexes, nan, lambda_param ) self.structure_stdnpf.at[filtered_indexes, \"label\"] = filtered_labels", "np.max(triangle_upper), np.min(triangle_upper) return distance, max_dis, min_dis def __auto_select_dc(self): \"\"\" Auto", "to be labeled :param fx: the indexes of the unlabeled", "local noise filter for k nearest neighbor. Knowledge-Based Systems, 184,", "__step_a(self): \"\"\" > The function takes the labeled samples and", "y): \"\"\"Fit method.\"\"\" try: l, u, y = split(samples, y)", "of the neighbors of the unlabeled data, and the number", "** 2)) def cutoff_func(dij, dc): \"\"\" If the distance between", "dataframe with the filtered data. \"\"\" if isinstance(self.filter, ENN): original", "The number of labeled samples. \"\"\" while True: samples_labeled =", "\"\"\" For each point, find the set of points that", "is a superset of the set of points that are", "= defaultdict(int) while True: search = NearestNeighbors(n_neighbors=r + 1, algorithm=\"kd_tree\")", "the value of the gaussian function. \"\"\" return math.exp(-((dij /", "self.rho is None: raise ValueError(\"Encountered rho as None.\") sort_rho_idx =", "The samples that have been labeled. \"\"\" samples_labeled = self.structure.loc[self.structure[\"label\"]", "\"ENANE\": filtered_indexes, filtered_labels = self.__enane( unlabeled_indexes, nan, lambda_param ) self.structure_stdnpf.at[filtered_indexes,", "r+2. The set of points that are within a distance", "distance, min distance \"\"\" from scipy.spatial.distance import pdist, squareform distance_matrix", "are within a distance of r+2 is also a superset", "self.structure_stdnpf.loc[ self.structure_stdnpf[\"label\"] != -1 ] self.classifier_stdpnf.fit( labeled_data[\"sample\"].tolist(), labeled_data[\"label\"].tolist() ) count", ") labeled_data = local_structure.loc[local_structure[\"label\"] != -1] nan_unlabeled = local_structure.loc[fx] data", "data, and uses the KNN classifier to predict the labels", "For each point, find the set of points that are", "= local_structure.loc[local_structure[\"label\"] != -1] nan_unlabeled = local_structure.loc[fx] data = pd.concat([labeled_data,", "of the model and compares them to the complete data", "dc: The cutoff distance :return: the value of the gaussian", "samples. \"\"\" while True: samples_labeled = self.__step_a() next_rows = samples_labeled[\"next\"].to_numpy()", "predict(self, src): \"\"\" Predict based on a trained classifier. :param", "the number of neighbors to consider :return: The indexes of", "self.classifier is None: raise AssertionError(\"The model needs to be fitted", "= y self.low = low self.u = u self.data =", "None self.rho = None self.delta = None self.nneigh = None", "> The function takes in a distance value and a", "trained classifier. :param src: The source image :return: The classifier", "data points, the maximum distance, the minimum distance, the dc", "\"\"\" max_dis, min_dis = self.max_dis, self.min_dis dc = (max_dis +", "next_unlabeled): self.structure.at[pos, \"label\"] = new_label count += 1 return count", "self.filter = None self.y = None self.low = None self.u", "== \"ENANE\": self.filter = filter_method else: self.filter = None self.y", "[] local_structure = self.structure_stdnpf.copy(deep=True) base_estimator = KNeighborsClassifier( n_neighbors=r, metric=self.distance_metric )", "that are within a distance of r+1 is a superset", "r += 1 for index in range(self.n_id): nan[index] = knn[index].intersection(rnn[index])", "defaultdict(set) nb = dict.fromkeys(range(self.n_id), 0) knn = defaultdict(set) rnn =", "-1] nan_unlabeled = local_structure.loc[fx] data = pd.concat([labeled_data, nan_unlabeled], join=\"inner\") enane_model", "DensityPeaks.py # @Author: <NAME> # @Time: 5/3/22 09:55 # @Version:", "image :return: The classifier is being returned. \"\"\" if self.classifier", ":param complete_y: the complete y values :return: The result is", "a cutoff value, and returns the value of the Gaussian", "range(self.n_id): nan[index] = knn[index].intersection(rnn[index]) return nan, r def __enane(self, fx,", "prev_unlabeled): self.structure.at[pos, \"label\"] = new_label count += 1 def _label_next_point(self,", "def __discover_structure(self): \"\"\"Discovers the under laying structure.\"\"\" self._fit_without() def __nan_search(self):", "are not labeled, it labels them and updates the order", "complete, complete_y ) else: result, _ = self.filter.filter(complete, complete_y) return", "es_pred = [] local_structure = self.structure_stdnpf.copy(deep=True) base_estimator = KNeighborsClassifier( n_neighbors=r,", "set of points that are within a distance of r+2", "self.max_dis, self.min_dis = self.__build_distance() self.dc = self.__select_dc() self.rho = self.__local_density()", "= None self.distances = None self.max_dis = None self.min_dis =", "__structure(self): \"\"\" The function takes the data and the nearest", "\"\"\" > The function takes the labeled samples and trains", "complete = labeled_data[\"sample\"] complete_y = labeled_data[\"label\"] result = self._if_filter(complete, complete_y)", "= filtered_labels else: labeled_data = self.structure_stdnpf.loc[ self.structure_stdnpf[\"label\"] != -1 ]", "Auto select the local density threshold that let average neighbor", "dict): self.classifier = classifier(**classifier_params) else: self.classifier = classifier() else: self.classifier", "lu = unlabeled_next_of_labeled[\"sample\"].to_list() y_pred = self.classifier.predict(lu) for new_label, pos in", "self.structure_stdnpf[\"label\"] == -1 ].index.to_list() unlabeled_indexes = [] for row in", "number of the current iteration \"\"\" while True: samples_labeled =", "= None self.max_dis = None self.min_dis = None self.rho =", "1 labeled_data = self.structure_stdnpf.loc[self.structure_stdnpf[\"label\"] != -1] self.classifier_stdpnf.fit( labeled_data[\"sample\"].tolist(), labeled_data[\"label\"].tolist() )", "of r+1 is a superset of the set of points", "[] for row in unlabeled_rows: if self.order[row] == count: unlabeled_indexes.append(row)", "in range(self.n_id): for j in range(i + 1, self.n_id): temp", "+= 1 for index in range(self.n_id): nan[index] = knn[index].intersection(rnn[index]) return", "all points' local density. :return: local density vector that index", "lower bound of the data :param u: upper bound of", "count: the number of the next point to be labeled", "the KNN classifier to predict the labels of the unlabeled", "value, the delta value, the number of neighbors, and the", "[sample], return_distance=False)[0][1:] knn[index].update(list(r_neighs)) for neigh in r_neighs: nb[neigh] += 1", "r in results_to_unlabeled: self.structure_stdnpf.at[np.array(self.structure_stdnpf[\"sample\"], r)][ \"label\" ] = -1 def", "< dc else 0 func = gauss_func if self.gauss_cutoff else", "from sklearn.semi_supervised import SelfTrainingClassifier from sklearn.svm import SVC from instance_selection", "c in complete: if np.array_equal(r, c): is_in = True if", "\"\"\" Predict based on a trained classifier. :param src: The", "def _fit_without(self): \"\"\" The function takes in a classifier, and", ":param complete: the complete dataframe :param complete_y: the complete y", "\"\"\"Fit method.\"\"\" try: l, u, y = split(samples, y) except", "= None self.classifier_stdpnf = None self.order = None self.structure =", "samples_labeled.index.to_list() for next_row in next_rows: if next_row not in samples_labeled_index:", "LabelEncoder from sklearn.semi_supervised import SelfTrainingClassifier from sklearn.svm import SVC from", "classifier to predict the labels of the unlabeled data. It", "defaultdict import numpy as np import pandas as pd from", "It then labels those samples and repeats the process until", "of data. Neurocomputing, 275, 180-191. \"\"\" def __init__( self, dc=None,", "None self.classifier_stdpnf = None self.order = None self.structure = None", "from .utils import split class STDPNF: \"\"\" <NAME>., <NAME>., &", "np.float32), np.array(nneigh, np.float32) def __structure(self): \"\"\" The function takes the", "function takes the labeled samples and trains the classifier on", "def __nan_search(self): \"\"\" For each point, find the set of", "each point, find the set of points that are within", "list of indices of the data to be labeled :param", "is less than the cutoff distance, return 1, otherwise return", "in the dataframe, the list of indices of the unlabeled", "distance, the minimum distance, the dc value, the rho value,", "self.classifier.predict(lu) for new_label, pos in zip(y_pred, next_unlabeled): self.structure.at[pos, \"label\"] =", "to use in the KNN classifier. It then creates a", "KNN classifier to predict the labels of the unlabeled data.", "self.anormal = anormal self.filtering = filtering if classifier is not", "set of points that are within a distance of r+3.", "= [] samples_labeled_index = samples_labeled.index.to_list() for prev_row in prev_rows: if", "to the structure data set. :param complete: the complete dataset", "semi-supervised classification based on density peaks of data. Neurocomputing, 275,", "self.low = None self.u = None self.classifier_stdpnf = None self.order", ":param src: The source image :return: The classifier is being", "- label: the label of the data point The function", "< len(self.y) else -1, ] for index in range(self.n_id): if", "then labels the next point, and then labels the previous", "unlabeled_next_of_labeled = self.structure.loc[next_unlabeled] lu = unlabeled_next_of_labeled[\"sample\"].to_list() y_pred = self.classifier.predict(lu) for", "= filter_method else: self.filter = None self.y = None self.low", "= defaultdict(set) nb = dict.fromkeys(range(self.n_id), 0) knn = defaultdict(set) rnn", "points that are within a distance of r+1 is also", "distance between two nodes :param dc: The cutoff distance :return:", "= self.structure.loc[prev_unlabeled] lu = unlabeled_prev_of_labeled[\"sample\"].to_list() y_pred = self.classifier.predict(lu) for new_label,", "for index in range(self.n_id): if self.structure[self.structure[index][1]][2] is None: self.structure[self.structure[index][1]][2] =", "are no more samples to label :param count: the number", "and upper bounds of the data, and the data itself,", "of the data :param y: the labels of the data", "and trains the classifier on them :return: The samples that", "<NAME>., & <NAME>. (2019). A self-training method based on density", "range(self.n_id): for j in range(i + 1, self.n_id): distance[(i, j)]", "-1] self.classifier_stdpnf.fit( labeled_data[\"sample\"].tolist(), labeled_data[\"label\"].tolist() ) def _results_to_structure(self, complete, result): \"\"\"", "of r+2 is also a superset of the set of", "label: the label of the data point The function also", "pos in zip(y_pred, next_unlabeled): self.structure.at[pos, \"label\"] = new_label count +=", "self.structure_stdnpf.loc[ self.structure_stdnpf[\"label\"] == -1 ].index.to_list() unlabeled_indexes = [] for row", "point, find the set of points that are within a", "are within a distance of r, and the set of", "/ self.n_id**2 ) if 0.01 <= nneighs <= 0.02: break", "<= nneighs <= 0.02: break # binary search if nneighs", "self.structure = dict.fromkeys(range(self.n_id)) for index, sample in enumerate(self.data): self.structure[index] =", "unlabeled_prev_of_labeled = self.structure.loc[prev_unlabeled] lu = unlabeled_prev_of_labeled[\"sample\"].to_list() y_pred = self.classifier.predict(lu) for", "self.order = dict.fromkeys(range(self.n_id), 0) count = self._label_next_point(count) self._label_previous_points(count) def _label_previous_points(self,", "new_label, pos in zip(y_pred, prev_unlabeled): self.structure.at[pos, \"label\"] = new_label count", "distance_matrix = pdist(self.data, metric=self.distance_metric) distance_matrix = squareform(distance_matrix) triangle_upper = np.triu_indices(self.data.shape[0],", "nearest neighbor - label: the label of the data point", "return 1, otherwise return 0 :param dij: distance between atoms", "\"\"\"Semi Supervised Algorithm based on Density Peaks.\"\"\" self.dc = dc", "gauss_cutoff=True, percent=2.0, density_threshold=None, distance_threshold=None, anormal=True, filtering=False, classifier=None, classifier_params=None, filter_method=None, ):", "rho value, the delta value, the number of neighbors, and", ":return: local density vector that index is the point index", "\"\"\" Self Training based on Density Peaks and a parameter-free", "between two atoms is less than the cutoff distance, return", "the neighbors of the unlabeled data. If it is, then", "for r in result.to_numpy(): is_in = False for c in", "data set, it is added to the structure data set.", "= self.__nan_search() self.classifier_stdpnf = KNeighborsClassifier( n_neighbors=self.k, metric=self.distance_metric ) self.classifier_stdpnf.fit(self.low, self.y)", "local density threshold, default is the method used in paper,", "= True if not is_in: results_to_unlabeled.append(r) for r in results_to_unlabeled:", "self.structure_stdnpf = None self.n_id = None self.distances = None self.max_dis", "set. :param complete: the complete dataset :param result: the result", "le.transform(y) self.__init_values(l, u, y) if self.filtering: self._fit_stdpnf() else: self._fit_without() def", "that are within a distance of r+1. The set of", "the complete data set. If the result is not in", "(self.n_id + 1) / 2 * self.percent / 100) dc", "position = int(self.n_id * (self.n_id + 1) / 2 *", "self.max_dis = None self.min_dis = None self.rho = None self.delta", "self.structure_stdnpf.at[np.array(self.structure_stdnpf[\"sample\"], r)][ \"label\" ] = -1 def _if_filter(self, complete, complete_y):", "dij < dc else 0 func = gauss_func if self.gauss_cutoff", "else: harmfulness += 1 if usefulness >= harmfulness: es.append(row_index) es_pred.append(pred)", "distance[(i, j)] = distance_matrix[i, j] distance[(j, i)] = distance_matrix[i, j]", "in zip(y_pred, next_unlabeled): self.structure.at[pos, \"label\"] = new_label count += 1", "laying structure.\"\"\" self._fit_without() def __nan_search(self): \"\"\" For each point, find", "of the unlabeled data to the list of indices of", "+ 1, algorithm=\"kd_tree\") search.fit(self.data) for index, sample in enumerate(self.data): r_neighs", "to them. \"\"\" es = [] es_pred = [] local_structure", "rho[j] += temp return np.array(rho, np.float32) def __min_neighbor_and_distance(self): \"\"\" Compute", "are within a distance of r+3. And so on. The", "cutoff_func rho = [0] * self.n_id for i in range(self.n_id):", "that are within a distance of r, and the set", "is_in = True if not is_in: results_to_unlabeled.append(r) for r in", "return np.array(delta, np.float32), np.array(nneigh, np.float32) def __structure(self): \"\"\" The function", "= classifier(**classifier_params) else: self.classifier = classifier() else: self.classifier = None", "max_dis - min_dis < 0.0001: break return dc def __select_dc(self):", "True if not is_in: results_to_unlabeled.append(r) for r in results_to_unlabeled: self.structure_stdnpf.at[np.array(self.structure_stdnpf[\"sample\"],", "_label_next_point(self, count): \"\"\" > The function takes the samples labeled", "self.dc == \"auto\": dc = self.__auto_select_dc() else: position = int(self.n_id", "return samples_labeled def __discover_structure(self): \"\"\"Discovers the under laying structure.\"\"\" self._fit_without()", "y_pred = self.classifier.predict(lu) for new_label, pos in zip(y_pred, prev_unlabeled): self.structure.at[pos,", "complete data :param complete: the complete dataframe :param complete_y: the", "index, sample in enumerate(self.data): self.structure[index] = [ sample, int(self.nneigh[index]), None,", "import math from collections import defaultdict import numpy as np", "the filtered data. \"\"\" if isinstance(self.filter, ENN): original = pd.DataFrame(self.low)", ").transpose() self.structure_stdnpf = self.structure.copy(deep=True) def __step_a(self): \"\"\" > The function", "[0] * self.n_id for i in range(self.n_id): for j in", "previous step and finds the previous samples of those samples.", "the data and the nearest neighbor indices and creates a", "self.classifier.fit(sam_lab, y_without) return samples_labeled def __discover_structure(self): \"\"\"Discovers the under laying", "calculates the distances between the data points, the maximum distance,", "point to be labeled :return: The number of labeled samples.", "data to be labeled :param fx: the indexes of the", "u, y) if self.filtering: self._fit_stdpnf() else: self._fit_without() def predict(self, src):", "= None self.n_id = None self.distances = None self.max_dis =", "the data point - next: the index of the nearest", "if usefulness >= harmfulness: es.append(row_index) es_pred.append(pred) return es, es_pred def", "= [] es_pred = [] local_structure = self.structure_stdnpf.copy(deep=True) base_estimator =", "complete dataset :param result: the result of the clustering \"\"\"", "= samples_labeled[\"previous\"].to_numpy() prev_unlabeled = [] samples_labeled_index = samples_labeled.index.to_list() for prev_row", "return np.array(rho, np.float32) def __min_neighbor_and_distance(self): \"\"\" Compute all points' min", "in range(self.n_id): for j in range(0, i): old_i, old_j =", "None self.n_id = None self.distances = None self.max_dis = None", "percent self.density_threshold = density_threshold self.distance_threshold = distance_threshold self.anormal = anormal", "search.kneighbors( [sample], return_distance=False)[0][1:] knn[index].update(list(r_neighs)) for neigh in r_neighs: nb[neigh] +=", "to predict the labels of the unlabeled data. It then", "them and updates the order of the samples :param count:", "= False for c in complete: if np.array_equal(r, c): is_in", "pred: usefulness += 1 else: harmfulness += 1 if usefulness", "2 if max_dis - min_dis < 0.0001: break return dc", "dc def __select_dc(self): \"\"\" Select the local density threshold, default", "<NAME>. (2018). Self-training semi-supervised classification based on density peaks of", "Algorithm based on Density Peaks.\"\"\" self.dc = dc self.distance_metric =", "* self.n_id delta[sort_rho_idx[0]] = -1.0 for i in range(self.n_id): for", "for k nearest neighbor. Knowledge-Based Systems, 184, 104895. <NAME>., <NAME>.,", "None self.data = None def __build_distance(self): \"\"\" Calculate distance dict.", "for j in range(i + 1, self.n_id): temp = func(self.distances[(i,", "be labeled :param fx: the indexes of the unlabeled data", "defaultdict(int) while True: search = NearestNeighbors(n_neighbors=r + 1, algorithm=\"kd_tree\") search.fit(self.data)", "metric=self.distance_metric ) self.classifier_stdpnf.fit(self.low, self.y) count = 1 while count <=", "else: self.filter = None self.y = None self.low = None", "a distance of r+3. And so on. The set of", "distance_metric self.k = k self.gauss_cutoff = gauss_cutoff self.percent = percent", "isinstance(self.filter, ENN): original = pd.DataFrame(self.low) original_y = pd.DataFrame(self.y) result, _", "and the unlabeled data, and uses the KNN classifier to", "metric=self.distance_metric) distance_matrix = squareform(distance_matrix) triangle_upper = np.triu_indices(self.data.shape[0], 1) triangle_upper =", "old_j)] < delta[old_i]: delta[old_i] = self.distances[(old_i, old_j)] nneigh[old_i] = old_j", "from instance_selection import ENN from .utils import split class STDPNF:", "self.__local_density() self.delta, self.nneigh = self.__min_neighbor_and_distance() self.__structure() def _fit_without(self): \"\"\" The", "labeled_data = local_structure.loc[local_structure[\"label\"] != -1] nan_unlabeled = local_structure.loc[fx] data =", "nb[neigh] += 1 rnn[neigh].add(index) cnt[r] = np.count_nonzero((np.array(list(nb.values())) == 0)) if", "squareform distance_matrix = pdist(self.data, metric=self.distance_metric) distance_matrix = squareform(distance_matrix) triangle_upper =", "cutoff distance, return 1, otherwise return 0 :param dij: distance", "1, algorithm=\"kd_tree\") search.fit(self.data) for index, sample in enumerate(self.data): r_neighs =", "= [float(self.max_dis)] * self.n_id, [0] * self.n_id delta[sort_rho_idx[0]] = -1.0", "distance between atoms i and j :param dc: cutoff distance", "on a trained classifier. :param src: The source image :return:", "!= \"ENANE\": self.filter = filter_method() elif isinstance(filter_method, str) and filter_method", "between two nodes :param dc: The cutoff distance :return: the", "pd.DataFrame( self.structure, index=[\"sample\", \"next\", \"previous\", \"label\"] ).transpose() self.structure_stdnpf = self.structure.copy(deep=True)", "threshold, default is the method used in paper, 'auto' is", "self.delta = None self.nneigh = None self.data = None def", "the value of the Gaussian function at that point :param", "else: position = int(self.n_id * (self.n_id + 1) / 2", "If the filter is an ENN, then filter the original", "<NAME> # @Time: 5/3/22 09:55 # @Version: 4.0 import math", "and the labels that are going to be assigned to", "the index of the unlabeled data to the list of", "not None: if isinstance(classifier_params, dict): self.classifier = classifier(**classifier_params) else: self.classifier", "self.dc = dc self.distance_metric = distance_metric self.k = k self.gauss_cutoff", "distance of r+2 is also a superset of the set", "itself, and then calculates the distances between the data points,", "-*- # @Filename: DensityPeaks.py # @Author: <NAME> # @Time: 5/3/22", "= search.kneighbors( [sample], return_distance=False)[0][1:] knn[index].update(list(r_neighs)) for neigh in r_neighs: nb[neigh]", "local_structure.loc[fx] data = pd.concat([labeled_data, nan_unlabeled], join=\"inner\") enane_model = SelfTrainingClassifier(base_estimator) enane_model.fit(data[\"sample\"].tolist(),", "/ 2 while True: nneighs = ( sum([1 for v", "else -1, ] for index in range(self.n_id): if self.structure[self.structure[index][1]][2] is", "iteration \"\"\" while True: samples_labeled = self.__step_a() prev_rows = samples_labeled[\"previous\"].to_numpy()", "and then calculates the distances between the data points, the", "self.delta, self.nneigh = self.__min_neighbor_and_distance() self.__structure() def _fit_without(self): \"\"\" The function", "and then labels the next point, and then labels the", "index in range(self.n_id): if self.structure[self.structure[index][1]][2] is None: self.structure[self.structure[index][1]][2] = index", "the labels of the data \"\"\" self.y = y self.low", "if self.distances[(old_i, old_j)] < delta[old_i]: delta[old_i] = self.distances[(old_i, old_j)] nneigh[old_i]", "self.classifier_stdpnf.fit(self.low, self.y) count = 1 while count <= max(self.order.values()): unlabeled_rows", "and a parameter-free noise filter. \"\"\" self.__discover_structure() nan, lambda_param =", "the maximum distance, the minimum distance, the dc value, the", "+ min_dis) / 2 if max_dis - min_dis < 0.0001:", "the unlabeled data, and uses the KNN classifier to predict", "if self.classifier is None: raise AssertionError(\"The model needs to be", "in paper, 'auto' is auto select. :return: dc that local", "2 * self.percent / 100) dc = np.sort(list(self.distances.values()))[ position *", "point index \"\"\" def gauss_func(dij, dc): \"\"\" > The function", "between the data points, the maximum distance, the minimum distance,", "None self.max_dis = None self.min_dis = None self.rho = None", "and finds the previous samples of those samples. It then", "def _label_previous_points(self, count): \"\"\" > The function takes the samples", "finds the previous samples of those samples. It then labels", "function takes in the dataframe, the list of indices of", "for c in complete: if np.array_equal(r, c): is_in = True", "gauss_cutoff self.percent = percent self.density_threshold = density_threshold self.distance_threshold = distance_threshold", "while True: samples_labeled = self.__step_a() prev_rows = samples_labeled[\"previous\"].to_numpy() prev_unlabeled =", "samples that are going to be labeled and the labels", "sample, int(self.nneigh[index]), None, self.y[index] if index < len(self.y) else -1,", "of points that are within a distance of r+1. The", "function takes the data and the nearest neighbor indices and", "samples in the structure. If the next samples are not", "\"\"\" Compute all points' local density. :return: local density vector", "points that are within a distance of r+1. The set", "previous: the index of the nearest neighbor of the nearest", "r. The set of points that are within a distance", "count: unlabeled_indexes.append(row) if isinstance(self.filter, str) and self.filter == \"ENANE\": filtered_indexes,", "SVC from instance_selection import ENN from .utils import split class", "= -1.0 for i in range(self.n_id): for j in range(0,", "self.structure_stdnpf.loc[self.structure_stdnpf[\"label\"] != -1] self.classifier_stdpnf.fit( labeled_data[\"sample\"].tolist(), labeled_data[\"label\"].tolist() ) def _results_to_structure(self, complete,", "is None: self.structure[self.structure[index][1]][2] = index self.structure = pd.DataFrame( self.structure, index=[\"sample\",", "= [] for r in result.to_numpy(): is_in = False for", "== \"auto\": dc = self.__auto_select_dc() else: position = int(self.n_id *", ":param u: upper bound of the data :param y: the", "range(i + 1, self.n_id): temp = func(self.distances[(i, j)], self.dc) rho[i]", "min_dis) / 2 while True: nneighs = ( sum([1 for", "sum([1 for v in self.distances.values() if v < dc]) /", "the nearest neighbor - label: the label of the data", "zip(nan_unlabeled.iterrows(), enane_pred): usefulness = 0 harmfulness = 0 for neigh", "points that are within a distance of r+1 is a", "structure of the data :param low: lower bound of the", "the filter is an ENN, then filter the original data,", ":param r: the number of neighbors to consider :return: The", "sort_rho_idx[i], sort_rho_idx[j] if self.distances[(old_i, old_j)] < delta[old_i]: delta[old_i] = self.distances[(old_i,", "labels of the unlabeled data. It then checks if the", "not in samples_labeled_index: next_unlabeled.append(next_row) self.order[next_row] = count if len(next_unlabeled) ==", "None self.y = None self.low = None self.u = None", "low, u, y): \"\"\" It takes in the lower and", "def __structure(self): \"\"\" The function takes the data and the", "self._results_to_structure(complete, result) labeled_data = self.structure_stdnpf.loc[ self.structure_stdnpf[\"label\"] != -1 ] self.classifier_stdpnf.fit(", "self.structure_stdnpf.copy(deep=True) base_estimator = KNeighborsClassifier( n_neighbors=r, metric=self.distance_metric ) labeled_data = local_structure.loc[local_structure[\"label\"]", "complete: the complete dataframe :param complete_y: the complete y values", "__select_dc(self): \"\"\" Select the local density threshold, default is the", "nan = defaultdict(set) nb = dict.fromkeys(range(self.n_id), 0) knn = defaultdict(set)", "of r+1 is also a superset of the set of", "two nodes :param dc: The cutoff distance :return: the value", "if self.classifier is None: self.classifier = SVC() count = 1", "\"\"\" > The function takes in a distance value and", "= dc self.distance_metric = distance_metric self.k = k self.gauss_cutoff =", "] = -1 def _if_filter(self, complete, complete_y): \"\"\" If the", "None: self.structure[self.structure[index][1]][2] = index self.structure = pd.DataFrame( self.structure, index=[\"sample\", \"next\",", "them. \"\"\" es = [] es_pred = [] local_structure =", "of r+1. The set of points that are within a", "np import pandas as pd from sklearn.neighbors import KNeighborsClassifier, NearestNeighbors", "sort_rho_idx = np.argsort(-self.rho) delta, nneigh = [float(self.max_dis)] * self.n_id, [0]", "old_i, old_j = sort_rho_idx[i], sort_rho_idx[j] if self.distances[(old_i, old_j)] < delta[old_i]:", "in range(i + 1, self.n_id): distance[(i, j)] = distance_matrix[i, j]", "the nearest neighbor - previous: the index of the nearest", "distance dict. :return: distance dict, max distance, min distance \"\"\"", "Gaussian function at that point :param dij: distance between two", "data, and the data itself, and then calculates the distances", "# @Version: 4.0 import math from collections import defaultdict import", "the list of indices of the data to be labeled", "the dataframe called structure_stdnpf \"\"\" self.structure = dict.fromkeys(range(self.n_id)) for index,", "is None: raise ValueError(\"Encountered rho as None.\") sort_rho_idx = np.argsort(-self.rho)", "those samples. It then labels those samples and repeats the", "is being returned. \"\"\" if self.classifier is None: raise AssertionError(\"The", "coding:utf-8 -*- # @Filename: DensityPeaks.py # @Author: <NAME> # @Time:", "labels those samples and repeats the process until there are", "next_row in next_rows: if next_row not in samples_labeled_index: next_unlabeled.append(next_row) self.order[next_row]", "Training based on Density Peaks and a parameter-free noise filter.", "max_dis, min_dis = self.max_dis, self.min_dis dc = (max_dis + min_dis)", "complete_y: the complete y values :return: The result is a", "The function takes in the dataframe, the list of indices", "distance value and a cutoff value, and returns the value", "1 nan = defaultdict(set) nb = dict.fromkeys(range(self.n_id), 0) knn =", "== 0)) if r > 2 and cnt[r] == cnt[r", ":param count: the number of the next point to be", "percent=2.0, density_threshold=None, distance_threshold=None, anormal=True, filtering=False, classifier=None, classifier_params=None, filter_method=None, ): \"\"\"Semi", "= filter_method() elif isinstance(filter_method, str) and filter_method == \"ENANE\": self.filter", "filter the complete data :param complete: the complete dataframe :param", "density vector that index is the point index \"\"\" def", "used in paper, 'auto' is auto select. :return: dc that", "def _fit_stdpnf(self): \"\"\" Self Training based on Density Peaks and", "select. :return: dc that local density threshold \"\"\" if self.dc", "harmfulness += 1 if usefulness >= harmfulness: es.append(row_index) es_pred.append(pred) return", "the previous step and finds the next samples in the", "unlabeled data. If it is, then it adds the index", "dict.fromkeys(range(self.n_id), 0) count = self._label_next_point(count) self._label_previous_points(count) def _label_previous_points(self, count): \"\"\"", "== count: unlabeled_indexes.append(row) if isinstance(self.filter, str) and self.filter == \"ENANE\":", "int(self.n_id * (self.n_id + 1) / 2 * self.percent /", "[float(self.max_dis)] * self.n_id, [0] * self.n_id delta[sort_rho_idx[0]] = -1.0 for", "cnt[r - 1]: r -= 1 break r += 1", "for row in unlabeled_rows: if self.order[row] == count: unlabeled_indexes.append(row) if", "indices of the unlabeled data, the list of indices of", "2 while True: nneighs = ( sum([1 for v in", "if self.rho is None: raise ValueError(\"Encountered rho as None.\") sort_rho_idx", "the label of the data point The function also creates", "between atoms i and j :param dc: cutoff distance :return:", "isinstance(self.filter, str) and self.filter == \"ENANE\": filtered_indexes, filtered_labels = self.__enane(", "of the data \"\"\" self.y = y self.low = low", "for j in range(i + 1, self.n_id): distance[(i, j)] =", "self.density_threshold = density_threshold self.distance_threshold = distance_threshold self.anormal = anormal self.filtering", "next: the index of the nearest neighbor - previous: the", "len(next_unlabeled) == 0: break unlabeled_next_of_labeled = self.structure.loc[next_unlabeled] lu = unlabeled_next_of_labeled[\"sample\"].to_list()", "def _results_to_structure(self, complete, result): \"\"\" > This function takes the", "import pandas as pd from sklearn.neighbors import KNeighborsClassifier, NearestNeighbors from", "indices of the data to be labeled :param fx: the", "are going to be assigned to them. \"\"\" es =", "= enane_model.predict(nan_unlabeled[\"sample\"].tolist()) for (row_index, _), pred in zip(nan_unlabeled.iterrows(), enane_pred): usefulness", "r)][ \"label\" ] = -1 def _if_filter(self, complete, complete_y): \"\"\"", "< delta[old_i]: delta[old_i] = self.distances[(old_i, old_j)] nneigh[old_i] = old_j delta[sort_rho_idx[0]]", "self.structure_stdnpf = self.structure.copy(deep=True) def __step_a(self): \"\"\" > The function takes", "based on Density Peaks and a parameter-free noise filter. \"\"\"", "self.distance_metric = distance_metric self.k = k self.gauss_cutoff = gauss_cutoff self.percent", "[] es_pred = [] local_structure = self.structure_stdnpf.copy(deep=True) base_estimator = KNeighborsClassifier(", "(max_dis + min_dis) / 2 if max_dis - min_dis <", "not None: prev_unlabeled.append(prev_row) self.order[prev_row] = count if len(prev_unlabeled) == 0:", "takes in a distance value and a cutoff value, and", "np.array(delta, np.float32), np.array(nneigh, np.float32) def __structure(self): \"\"\" The function takes", "in r_neighs: nb[neigh] += 1 rnn[neigh].add(index) cnt[r] = np.count_nonzero((np.array(list(nb.values())) ==", "is auto select. :return: dc that local density threshold \"\"\"", "if the predicted label is the same as the label", "prev_row not in samples_labeled_index and prev_row is not None: prev_unlabeled.append(prev_row)", "of the unlabeled data. If it is, then it adds", "1, otherwise return 0 :param dij: distance between atoms i", "density. :return: local density vector that index is the point", "<= max(self.order.values()): unlabeled_rows = self.structure_stdnpf.loc[ self.structure_stdnpf[\"label\"] == -1 ].index.to_list() unlabeled_indexes", "the complete data :param complete: the complete dataframe :param complete_y:", "break unlabeled_prev_of_labeled = self.structure.loc[prev_unlabeled] lu = unlabeled_prev_of_labeled[\"sample\"].to_list() y_pred = self.classifier.predict(lu)", "+ min_dis) / 2 while True: nneighs = ( sum([1", "of the neighbors of the unlabeled data. If it is,", "= -1 def _if_filter(self, complete, complete_y): \"\"\" If the filter", "sam_lab = samples_labeled[\"sample\"].to_list() y_without = samples_labeled[\"label\"].to_list() self.classifier.fit(sam_lab, y_without) return samples_labeled", "1 if dij < dc, else 0 \"\"\" return 1", "unlabeled_rows: if self.order[row] == count: unlabeled_indexes.append(row) if isinstance(self.filter, str) and", "delta[sort_rho_idx[0]] = max(delta) return np.array(delta, np.float32), np.array(nneigh, np.float32) def __structure(self):", "labeled_data = self.structure_stdnpf.loc[ self.structure_stdnpf[\"label\"] != -1 ] self.classifier_stdpnf.fit( labeled_data[\"sample\"].tolist(), labeled_data[\"label\"].tolist()", "labels the next point, and then labels the previous points,", "= NearestNeighbors(n_neighbors=r + 1, algorithm=\"kd_tree\") search.fit(self.data) for index, sample in", "if isinstance(classifier_params, dict): self.classifier = classifier(**classifier_params) else: self.classifier = classifier()", "0) count = self._label_next_point(count) self._label_previous_points(count) def _label_previous_points(self, count): \"\"\" >", "\"\"\" Compute all points' min util to the higher local", "triangle_upper = distance_matrix[triangle_upper] distance = {} for i in range(self.n_id):", "= defaultdict(set) cnt = defaultdict(int) while True: search = NearestNeighbors(n_neighbors=r", "with the labeled data and the unlabeled data, and uses", "search.fit(self.data) for index, sample in enumerate(self.data): r_neighs = search.kneighbors( [sample],", "is None: raise AssertionError(\"The model needs to be fitted first.\")", "the KNN classifier. It then creates a new dataframe with", "The function takes the data and the nearest neighbor indices", "\"\"\" if isinstance(self.filter, ENN): original = pd.DataFrame(self.low) original_y = pd.DataFrame(self.y)", "unlabeled data, and the number of neighbors to use in", "dc else: max_dis = dc dc = (max_dis + min_dis)", "r def __enane(self, fx, nan, r): \"\"\" > The function", "the method used in paper, 'auto' is auto select. :return:", "= None self.data = None def __build_distance(self): \"\"\" Calculate distance", "(row_index, _), pred in zip(nan_unlabeled.iterrows(), enane_pred): usefulness = 0 harmfulness", "classifier. :param src: The source image :return: The classifier is", "order of the samples :param count: the number of the", "of the data to be labeled :param fx: the indexes", "def __select_dc(self): \"\"\" Select the local density threshold, default is", "\"ENANE\": self.filter = filter_method() elif isinstance(filter_method, str) and filter_method ==", "being returned. \"\"\" if self.classifier is None: raise AssertionError(\"The model", "range(self.n_id): if self.structure[self.structure[index][1]][2] is None: self.structure[self.structure[index][1]][2] = index self.structure =", "of a sample :param r: the number of neighbors to", "results of the model and compares them to the complete", "filter_method=None, ): \"\"\"Semi Supervised Algorithm based on Density Peaks.\"\"\" self.dc", "for prev_row in prev_rows: if prev_row not in samples_labeled_index and", "0: break unlabeled_next_of_labeled = self.structure.loc[next_unlabeled] lu = unlabeled_next_of_labeled[\"sample\"].to_list() y_pred =", "self.filter = filter_method() elif isinstance(filter_method, str) and filter_method == \"ENANE\":", "results_to_unlabeled: self.structure_stdnpf.at[np.array(self.structure_stdnpf[\"sample\"], r)][ \"label\" ] = -1 def _if_filter(self, complete,", "takes the data and the nearest neighbor indices and creates", "them to the complete data set. If the result is", "es_pred def __init_values(self, low, u, y): \"\"\" It takes in", "method used in paper, 'auto' is auto select. :return: dc", "the number of neighbors to use in the KNN classifier.", "= self.max_dis, self.min_dis dc = (max_dis + min_dis) / 2", "count += 1 labeled_data = self.structure_stdnpf.loc[self.structure_stdnpf[\"label\"] != -1] self.classifier_stdpnf.fit( labeled_data[\"sample\"].tolist(),", "None self.distances = None self.max_dis = None self.min_dis = None", "labeled_data = self.structure_stdnpf.loc[self.structure_stdnpf[\"label\"] != -1] self.classifier_stdpnf.fit( labeled_data[\"sample\"].tolist(), labeled_data[\"label\"].tolist() ) def", "pd from sklearn.neighbors import KNeighborsClassifier, NearestNeighbors from sklearn.preprocessing import LabelEncoder", "copy of the dataframe called structure_stdnpf \"\"\" self.structure = dict.fromkeys(range(self.n_id))", "points that are within a distance of r+2 is :return:", "so on. The set of points that are within a", "-1, ] for index in range(self.n_id): if self.structure[self.structure[index][1]][2] is None:", "Self-training semi-supervised classification based on density peaks of data. Neurocomputing,", "+ self.n_id] return dc def __local_density(self): \"\"\" Compute all points'", "distance of r+3. And so on. The set of points", "points' min util to the higher local density point(which is", "than the cutoff distance, return 1, otherwise return 0 :param", "= unlabeled_next_of_labeled[\"sample\"].to_list() y_pred = self.classifier.predict(lu) for new_label, pos in zip(y_pred,", "returned. \"\"\" if self.classifier is None: raise AssertionError(\"The model needs", "data point - next: the index of the nearest neighbor", "self._fit_without() def __nan_search(self): \"\"\" For each point, find the set", "\"\"\" samples_labeled = self.structure.loc[self.structure[\"label\"] != -1] sam_lab = samples_labeled[\"sample\"].to_list() y_without", "The cutoff distance :return: the value of the gaussian function.", "a copy of the dataframe called structure_stdnpf \"\"\" self.structure =", "dc = self.__auto_select_dc() else: position = int(self.n_id * (self.n_id +", "nan[row_index]: if local_structure.loc[neigh, \"label\"] == pred: usefulness += 1 else:", "vector \"\"\" if self.rho is None: raise ValueError(\"Encountered rho as", "the data points, the maximum distance, the minimum distance, the", "point, and then labels the previous points, without filtering. \"\"\"", "a distance of r+1 is also a superset of the", "> This function takes the results of the model and", "nan: a list of lists, where each list contains the", "= split(samples, y) except IndexError: raise ValueError(\"Dimensions do not match.\")", "!= -1] nan_unlabeled = local_structure.loc[fx] data = pd.concat([labeled_data, nan_unlabeled], join=\"inner\")", "min util to the higher local density point(which is the", "is_in = False for c in complete: if np.array_equal(r, c):", "that local density threshold \"\"\" if self.dc == \"auto\": dc", "value, the number of neighbors, and the structure of the", "labeled in the previous step and finds the previous samples", "= self.filter.filter_original_complete( original, original_y, complete, complete_y ) else: result, _", ":param dij: distance between atoms i and j :param dc:", "NearestNeighbors from sklearn.preprocessing import LabelEncoder from sklearn.semi_supervised import SelfTrainingClassifier from", "function takes in a classifier, and then labels the next", "labeled data and the unlabeled data, and uses the KNN", "None self.min_dis = None self.rho = None self.delta = None", "in nan[row_index]: if local_structure.loc[neigh, \"label\"] == pred: usefulness += 1", "Select the local density threshold, default is the method used", "in results_to_unlabeled: self.structure_stdnpf.at[np.array(self.structure_stdnpf[\"sample\"], r)][ \"label\" ] = -1 def _if_filter(self,", "result def fit(self, samples, y): \"\"\"Fit method.\"\"\" try: l, u,", "are within a distance of r+2. The set of points", "neighbors of the unlabeled data. If it is, then it", "be labeled and the labels that are going to be", "to consider :return: The indexes of the samples that are", "function takes the samples labeled in the previous step and", "previous points, without filtering. \"\"\" if self.classifier is None: self.classifier", "from sklearn.svm import SVC from instance_selection import ENN from .utils", "then creates a new dataframe with the labeled data and", "list contains the indices of the neighbors of a sample", "knn = defaultdict(set) rnn = defaultdict(set) cnt = defaultdict(int) while", "the labels of the unlabeled data. It then checks if", "self.distances = None self.max_dis = None self.min_dis = None self.rho", "the label of the majority of the neighbors of the", "self.low = low self.u = u self.data = np.concatenate((low, u),", "of the data, and the data itself, and then calculates", "\"\"\" The function takes in a classifier, and then labels", "The function takes in a classifier, and then labels the", "= pd.DataFrame(self.y) result, _ = self.filter.filter_original_complete( original, original_y, complete, complete_y", "cutoff distance :return: the value of the gaussian function. \"\"\"", "0 func = gauss_func if self.gauss_cutoff else cutoff_func rho =", "And so on. The set of points that are within", "that are going to be labeled and the labels that", "None: raise AssertionError(\"The model needs to be fitted first.\") return", "old_j = sort_rho_idx[i], sort_rho_idx[j] if self.distances[(old_i, old_j)] < delta[old_i]: delta[old_i]", "self.filter = filter_method else: self.filter = None self.y = None", "of the data :param low: lower bound of the data", "row in unlabeled_rows: if self.order[row] == count: unlabeled_indexes.append(row) if isinstance(self.filter,", "step and finds the next samples in the structure. If", "the structure. If the next samples are not labeled, it", "data :param u: upper bound of the data :param y:", "unlabeled data to the list of indices of the data", "- 1]: r -= 1 break r += 1 for", "self.distance_threshold = distance_threshold self.anormal = anormal self.filtering = filtering if", "\"\"\"Discovers the under laying structure.\"\"\" self._fit_without() def __nan_search(self): \"\"\" For", "to be assigned to them. \"\"\" es = [] es_pred", "= self.structure_stdnpf.loc[ self.structure_stdnpf[\"label\"] != -1 ] complete = labeled_data[\"sample\"] complete_y", "# @Time: 5/3/22 09:55 # @Version: 4.0 import math from", "if local_structure.loc[neigh, \"label\"] == pred: usefulness += 1 else: harmfulness", "None, self.y[index] if index < len(self.y) else -1, ] for", "= self.structure_stdnpf.loc[ self.structure_stdnpf[\"label\"] == -1 ].index.to_list() unlabeled_indexes = [] for", "= self.structure.loc[next_unlabeled] lu = unlabeled_next_of_labeled[\"sample\"].to_list() y_pred = self.classifier.predict(lu) for new_label,", "self.dc = self.__select_dc() self.rho = self.__local_density() self.delta, self.nneigh = self.__min_neighbor_and_distance()", "dict.fromkeys(range(self.n_id)) for index, sample in enumerate(self.data): self.structure[index] = [ sample,", "triangle_upper = np.triu_indices(self.data.shape[0], 1) triangle_upper = distance_matrix[triangle_upper] distance = {}", "takes in the dataframe, the list of indices of the", "\"\"\" Auto select the local density threshold that let average", "labeled samples and trains the classifier on them :return: The" ]
[ "node.children: que.put(child) if k == 0: k = que.qsize() ans.append(list(tmp))", "que.put(root) ans, tmp, k = [], [], 1 while que.qsize()", "for a Node. \"\"\" class TreeNode(object): def __init__(self, val, children):", "root: Node :rtype: List[List[int]] \"\"\" if root is None: return", "0: node = que.get() tmp.append(node.val) k -= 1 for child", "root): \"\"\" :type root: Node :rtype: List[List[int]] \"\"\" if root", "ans.append(list(tmp)) tmp = [] return ans node2 = TreeNode(2, [])", "= [] return ans node2 = TreeNode(2, []) node3 =", "[node2, node3] node1 = TreeNode(1, children) solution = Solution() print(solution.levelOrder(node1))", "from Queue import Queue que = Queue() que.put(root) ans, tmp,", "= [], [], 1 while que.qsize() != 0: node =", "children): self.val = val self.children = children class Solution(object): def", "tmp, k = [], [], 1 while que.qsize() != 0:", "ans node2 = TreeNode(2, []) node3 = TreeNode(3, []) children", "\"\"\" :type root: Node :rtype: List[List[int]] \"\"\" if root is", "[] from Queue import Queue que = Queue() que.put(root) ans,", "k -= 1 for child in node.children: que.put(child) if k", "TreeNode(object): def __init__(self, val, children): self.val = val self.children =", "!= 0: node = que.get() tmp.append(node.val) k -= 1 for", "que.qsize() ans.append(list(tmp)) tmp = [] return ans node2 = TreeNode(2,", "TreeNode(2, []) node3 = TreeNode(3, []) children = [node2, node3]", "tmp = [] return ans node2 = TreeNode(2, []) node3", ":type root: Node :rtype: List[List[int]] \"\"\" if root is None:", "__init__(self, val, children): self.val = val self.children = children class", "TreeNode(3, []) children = [node2, node3] node1 = TreeNode(1, children)", "if root is None: return [] from Queue import Queue", "= children class Solution(object): def levelOrder(self, root): \"\"\" :type root:", "in node.children: que.put(child) if k == 0: k = que.qsize()", "class TreeNode(object): def __init__(self, val, children): self.val = val self.children", "while que.qsize() != 0: node = que.get() tmp.append(node.val) k -=", "\"\"\" class TreeNode(object): def __init__(self, val, children): self.val = val", "a Node. \"\"\" class TreeNode(object): def __init__(self, val, children): self.val", "None: return [] from Queue import Queue que = Queue()", "children = [node2, node3] node1 = TreeNode(1, children) solution =", "Queue import Queue que = Queue() que.put(root) ans, tmp, k", "Queue que = Queue() que.put(root) ans, tmp, k = [],", "[], 1 while que.qsize() != 0: node = que.get() tmp.append(node.val)", "children class Solution(object): def levelOrder(self, root): \"\"\" :type root: Node", "for child in node.children: que.put(child) if k == 0: k", "= [node2, node3] node1 = TreeNode(1, children) solution = Solution()", "[]) node3 = TreeNode(3, []) children = [node2, node3] node1", "[]) children = [node2, node3] node1 = TreeNode(1, children) solution", "return ans node2 = TreeNode(2, []) node3 = TreeNode(3, [])", "Solution(object): def levelOrder(self, root): \"\"\" :type root: Node :rtype: List[List[int]]", "root is None: return [] from Queue import Queue que", "[], [], 1 while que.qsize() != 0: node = que.get()", "return [] from Queue import Queue que = Queue() que.put(root)", "class Solution(object): def levelOrder(self, root): \"\"\" :type root: Node :rtype:", "val, children): self.val = val self.children = children class Solution(object):", "# Definition for a Node. \"\"\" class TreeNode(object): def __init__(self,", "node = que.get() tmp.append(node.val) k -= 1 for child in", "= que.get() tmp.append(node.val) k -= 1 for child in node.children:", ":rtype: List[List[int]] \"\"\" if root is None: return [] from", "self.val = val self.children = children class Solution(object): def levelOrder(self,", "k = que.qsize() ans.append(list(tmp)) tmp = [] return ans node2", "\"\"\" # Definition for a Node. \"\"\" class TreeNode(object): def", "if k == 0: k = que.qsize() ans.append(list(tmp)) tmp =", "= TreeNode(3, []) children = [node2, node3] node1 = TreeNode(1,", "is None: return [] from Queue import Queue que =", "1 for child in node.children: que.put(child) if k == 0:", "que = Queue() que.put(root) ans, tmp, k = [], [],", "Node :rtype: List[List[int]] \"\"\" if root is None: return []", "= Queue() que.put(root) ans, tmp, k = [], [], 1", "\"\"\" if root is None: return [] from Queue import", "que.get() tmp.append(node.val) k -= 1 for child in node.children: que.put(child)", "tmp.append(node.val) k -= 1 for child in node.children: que.put(child) if", "k == 0: k = que.qsize() ans.append(list(tmp)) tmp = []", "node3 = TreeNode(3, []) children = [node2, node3] node1 =", "val self.children = children class Solution(object): def levelOrder(self, root): \"\"\"", "Queue() que.put(root) ans, tmp, k = [], [], 1 while", "que.put(child) if k == 0: k = que.qsize() ans.append(list(tmp)) tmp", "node2 = TreeNode(2, []) node3 = TreeNode(3, []) children =", "[] return ans node2 = TreeNode(2, []) node3 = TreeNode(3,", "0: k = que.qsize() ans.append(list(tmp)) tmp = [] return ans", "levelOrder(self, root): \"\"\" :type root: Node :rtype: List[List[int]] \"\"\" if", "= TreeNode(2, []) node3 = TreeNode(3, []) children = [node2,", "def __init__(self, val, children): self.val = val self.children = children", "ans, tmp, k = [], [], 1 while que.qsize() !=", "Node. \"\"\" class TreeNode(object): def __init__(self, val, children): self.val =", "k = [], [], 1 while que.qsize() != 0: node", "self.children = children class Solution(object): def levelOrder(self, root): \"\"\" :type", "1 while que.qsize() != 0: node = que.get() tmp.append(node.val) k", "child in node.children: que.put(child) if k == 0: k =", "Definition for a Node. \"\"\" class TreeNode(object): def __init__(self, val,", "= que.qsize() ans.append(list(tmp)) tmp = [] return ans node2 =", "-= 1 for child in node.children: que.put(child) if k ==", "List[List[int]] \"\"\" if root is None: return [] from Queue", "def levelOrder(self, root): \"\"\" :type root: Node :rtype: List[List[int]] \"\"\"", "import Queue que = Queue() que.put(root) ans, tmp, k =", "= val self.children = children class Solution(object): def levelOrder(self, root):", "que.qsize() != 0: node = que.get() tmp.append(node.val) k -= 1", "== 0: k = que.qsize() ans.append(list(tmp)) tmp = [] return" ]
[ "False raise def create_connection(url, timeout=None, class_=WebSocket, **options): \"\"\" connect to", "server. return value: ABNF frame object. \"\"\" return self.frame_buffer.recv_frame() def", "from the server. return value: string(byte array) value. \"\"\" opcode,", "timeout): \"\"\" Set the timeout to the websocket. timeout: timeout", "array) value. \"\"\" opcode, frame = self.recv_data_frame(control_frame) return opcode, frame.data", "frame.data)[0] if recv_status != STATUS_NORMAL: error(\"close status: \" + repr(recv_status))", "_send(self, data): return send(self.sock, data) def _recv(self, bufsize): try: return", "- host names, which doesn't use proxy. \"http_proxy_auth\" - http", ">>> ws.send_frame(frame) \"\"\" if self.get_mask_key: frame.get_mask_key = self.get_mask_key data =", "long\") if control_frame: return (frame.opcode, frame) elif frame.opcode == ABNF.OPCODE_PONG:", "\"\"\" return self.sock_opt.timeout def settimeout(self, timeout): \"\"\" Set the timeout", "of available sub protocols. default is None. \"skip_utf8_validation\" - skip", "return self.handshake_response.headers else: return None headers = property(getheaders) def connect(self,", "protocols. default is None. \"skip_utf8_validation\" - skip utf8 validation. \"socket\"", "or dict. \"cookie\" -> cookie value. \"origin\" -> custom origin", "header string. \"http_proxy_host\" - http proxy host name. \"http_proxy_port\" -", "return value: tuple of operation code and string(byte array) value.", "self.sock = None self.connected = False self.get_mask_key = get_mask_key #", "= timeout if self.sock: self.sock.settimeout(timeout) timeout = property(gettimeout, settimeout) def", "values for socket.setsockopt. sockopt must be tuple and each element", "data as frame from server. return value: ABNF frame object.", "len(frame.data) < 126: self.pong(frame.data) else: raise WebSocketProtocolException(\"Ping message is too", "skip_utf8_validation = options.pop(\"skip_utf8_validation\", False) websock = class_(sockopt=sockopt, sslopt=sslopt, fire_cont_frame=fire_cont_frame, enable_multithread=enable_multithread,", "== ABNF.OPCODE_PONG: if control_frame: return (frame.opcode, frame) def recv_frame(self): \"\"\"", "the connection. It has to implement settimeout and connect. It's", "url. \"host\" -> custom host header string. \"http_proxy_host\" - http", "you set \"header\" list object, you can set your own", "repr(recv_status)) except: pass self.sock.settimeout(sock_timeout) self.sock.shutdown(socket.SHUT_RDWR) except: pass self.shutdown() def abort(self):", "be string or bytes. \"\"\" if status < 0 or", "of username and password. default is None \"enable_multithread\" -> enable", "it means \"use default_timeout value\" options: \"header\" -> custom http", "free software; you can redistribute it and/or modify it under", ">>> ws = create_connection(\"ws://echo.websocket.org/\") >>> frame = ABNF.create_frame(\"Hello\", ABNF.OPCODE_TEXT) >>>", "client. ========================= This version support only hybi-13. Please see http://tools.ietf.org/html/rfc6455", "skip_utf8_validation) if enable_multithread: self.lock = threading.Lock() else: self.lock = NoLock()", "invalid range\") self.connected = False self.send(struct.pack('!H', status) + reason, ABNF.OPCODE_CLOSE)", "set function to create musk key. You can customize mask", "None for this value, it means \"use default_timeout value\" options:", "(at your option) any later version. This library is distributed", "* from ._http import * from ._handshake import * from", "sslopt=None, fire_cont_frame=False, enable_multithread=False, skip_utf8_validation=False, **options): \"\"\" Initialize WebSocket object. \"\"\"", "\"http_proxy_host\" - http proxy host name. \"http_proxy_port\" - http proxy", "General Public License for more details. You should have received", "timeout: timeout until receive a close frame. If None, it", "__iter__(self): \"\"\" Allow iteration over websocket, implying sequential `recv` executions.", "Please see http://tools.ietf.org/html/rfc6455 for protocol. \"\"\" class WebSocket(object): \"\"\" Low", "default is None. \"skip_utf8_validation\" - skip utf8 validation. \"socket\" -", "object, you can set your own custom header. >>> ws", "property(getsubprotocol) def getstatus(self): \"\"\" get handshake status \"\"\" if self.handshake_response:", "If not set, set to 80. \"http_no_proxy\" - host names,", "sockopt must be tuple and each element is argument of", "try: return recv(self.sock, bufsize) except WebSocketConnectionClosedException: if self.sock: self.sock.close() self.sock", "websocket, implying sequential `recv` executions. \"\"\" while True: yield self.recv()", "if self.connected: self.sock.shutdown(socket.SHUT_RDWR) def shutdown(self): \"close socket, immediately.\" if self.sock:", "bufsize): try: return recv(self.sock, bufsize) except WebSocketConnectionClosedException: if self.sock: self.sock.close()", "option \"subprotocols\" - array of available sub protocols. default is", "WebSocket protocol draft-hixie-thewebsocketprotocol-76 http://tools.ietf.org/html/draft-hixie-thewebsocketprotocol-76 We can connect to the websocket", "opcode, data = self.recv_data() if six.PY3 and opcode == ABNF.OPCODE_TEXT:", "property(getheaders) def connect(self, url, **options): \"\"\" Connect to url. url", "Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1335", "from ._utils import * from ._url import * from ._logging", "class_: class to instantiate when creating the connection. It has", "docstring for more details sockopt: values for socket.setsockopt. sockopt must", "support only hybi-13. Please see http://tools.ietf.org/html/rfc6455 for protocol. \"\"\" class", "websocket. timeout: timeout time(second). \"\"\" self.sock_opt.timeout = timeout if self.sock:", "will be useful, but WITHOUT ANY WARRANTY; without even the", "name. \"http_proxy_port\" - http proxy port. If not set, set", "data frame. frame: frame data created by ABNF.create_frame >>> ws", "return None headers = property(getheaders) def connect(self, url, **options): \"\"\"", "Mainly, this is for testing purpose. func: callable object. the", "code and string(byte array) value. \"\"\" while True: frame =", "default is None \"enable_multithread\" -> enable lock for multithread. \"sockopt\"", "None \"enable_multithread\" -> enable lock for multithread. \"sockopt\" -> socket", "enable_multithread: if set to True, lock send method. skip_utf8_validation: skip", "string or unicode, if the opcode is OPCODE_TEXT. Otherwise, it", "it means \"use default_timeout value\" class_: class to instantiate when", "get subprotocol \"\"\" if self.handshake_response: return self.handshake_response.subprotocol else: return None", "Passing optional timeout parameter will set the timeout on the", "WebSocketProtocolException(\"Ping message is too long\") if control_frame: return (frame.opcode, frame)", "elif frame.opcode == ABNF.OPCODE_CLOSE: self.send_close() return (frame.opcode, frame) elif frame.opcode", "range\") self.connected = False self.send(struct.pack('!H', status) + reason, ABNF.OPCODE_CLOSE) def", ">>> ws.send(\"Hello, Server\") >>> ws.recv() 'Hello, Server' >>> ws.close() get_mask_key:", "range\") try: self.connected = False self.send(struct.pack('!H', status) + reason, ABNF.OPCODE_CLOSE)", "the data as string. payload: Payload must be utf-8 string", "status >= ABNF.LENGTH_16: raise ValueError(\"code is invalid range\") self.connected =", "later version. This library is distributed in the hope that", "default_timeout value\" options: \"header\" -> custom http header list or", "**options): \"\"\" Connect to url. url is websocket url scheme.", "frame = ABNF.create_frame(\"Hello\", ABNF.OPCODE_TEXT) >>> ws.send_frame(frame) >>> cont_frame = ABNF.create_frame(\"My", "the websocket timeout(second). \"\"\" return self.sock_opt.timeout def settimeout(self, timeout): \"\"\"", "\"\"\" Low level WebSocket interface. This class is based on", "data else: return '' def recv_data(self, control_frame=False): \"\"\" Receive data", "-> custom origin url. \"host\" -> custom host header string.", "must return string(byte array), which length is argument specified. \"\"\"", "__future__ import print_function import six import socket if six.PY3: from", "ABNF.OPCODE_TEXT) >>> ws.send_frame(frame) >>> cont_frame = ABNF.create_frame(\"My name is \",", "set to 80. \"http_no_proxy\" - host names, which doesn't use", "isEnabledForError(): recv_status = struct.unpack(\"!H\", frame.data)[0] if recv_status != STATUS_NORMAL: error(\"close", "self.frame_buffer.recv_frame() def send_close(self, status=STATUS_NORMAL, reason=six.b(\"\")): \"\"\" send close data to", "except: if self.sock: self.sock.close() self.sock = None raise def send(self,", "data payload to send server. \"\"\" if isinstance(payload, six.text_type): payload", "ping(self, payload=\"\"): \"\"\" send ping data. payload: data payload to", "ABNF.OPCODE_BINARY) def ping(self, payload=\"\"): \"\"\" send ping data. payload: data", "Server' >>> ws.close() get_mask_key: a callable to produce new mask", "and password. default is None \"enable_multithread\" -> enable lock for", "string. payload: Payload must be utf-8 string or unicode, if", "\"\"\" Send the data frame. frame: frame data created by", "of operation code and string(byte array) value. \"\"\" opcode, frame", "to send. see STATUS_XXX. reason: the reason to close. This", "six.text_type): payload = payload.encode(\"utf-8\") self.send(payload, ABNF.OPCODE_PING) def pong(self, payload): \"\"\"", "this value, it means \"use default_timeout value\" options: \"header\" ->", "< 126: self.pong(frame.data) else: raise WebSocketProtocolException(\"Ping message is too long\")", "status >= ABNF.LENGTH_16: raise ValueError(\"code is invalid range\") try: self.connected", "for socket.setsockopt. sockopt must be tuple and each element is", "self.cont_frame.is_fire(frame): return self.cont_frame.extract(frame) elif frame.opcode == ABNF.OPCODE_CLOSE: self.send_close() return (frame.opcode,", "set None for this value, it means \"use default_timeout value\"", "to the Free Software Foundation, Inc., 51 Franklin Street, Fifth", "sockopt=None, sslopt=None, fire_cont_frame=False, enable_multithread=False, skip_utf8_validation=False, **options): \"\"\" Initialize WebSocket object.", "server. \"\"\" if isinstance(payload, six.text_type): payload = payload.encode(\"utf-8\") self.send(payload, ABNF.OPCODE_PONG)", "This library is distributed in the hope that it will", "self.lock: while data: l = self._send(data) data = data[l:] return", "send method. skip_utf8_validation: skip utf8 validation. \"\"\" def __init__(self, get_mask_key=None,", "abort, wakes up other threads that are waiting in recv_*", "\"http_no_proxy\" - host names, which doesn't use proxy. \"http_proxy_auth\" -", "subprotocol \"\"\" if self.handshake_response: return self.handshake_response.subprotocol else: return None subprotocol", "def send_binary(self, payload): return self.send(payload, ABNF.OPCODE_BINARY) def ping(self, payload=\"\"): \"\"\"", "data) def _recv(self, bufsize): try: return recv(self.sock, bufsize) except WebSocketConnectionClosedException:", "self.sock: self.sock.close() self.sock = None raise def send(self, payload, opcode=ABNF.OPCODE_TEXT):", "should have received a copy of the GNU Lesser General", "published by the Free Software Foundation; either version 2.1 of", "until receive a close frame. \"\"\" if self.connected: if status", "except WebSocketConnectionClosedException: if self.sock: self.sock.close() self.sock = None self.connected =", ">>> frame = ABNF.create_frame(\"Hello\", ABNF.OPCODE_TEXT) >>> ws.send_frame(frame) >>> cont_frame =", "executions. \"\"\" while True: yield self.recv() def __next__(self): return self.recv()", "raise WebSocketProtocolException(\"Not a valid frame %s\" % frame) elif frame.opcode", "= False def _send(self, data): return send(self.sock, data) def _recv(self,", "while data: l = self._send(data) data = data[l:] return length", "else: self.lock = NoLock() def __iter__(self): \"\"\" Allow iteration over", "multithread. \"sockopt\" -> socket options \"sslopt\" -> ssl option \"subprotocols\"", "Free Software Foundation; either version 2.1 of the License, or", "frame object. \"\"\" return self.frame_buffer.recv_frame() def send_close(self, status=STATUS_NORMAL, reason=six.b(\"\")): \"\"\"", "\"\"\" while True: yield self.recv() def __next__(self): return self.recv() def", "string(byte array), which length is argument specified. \"\"\" self.get_mask_key =", "True: frame = self.recv_frame() if not frame: # handle error:", "opcode == ABNF.OPCODE_TEXT or opcode == ABNF.OPCODE_BINARY: return data else:", ">>> ws = websocket.WebSocket() >>> ws.connect(\"ws://echo.websocket.org\") >>> ws.send(\"Hello, Server\") >>>", "Receive string data(byte array) from the server. return value: string(byte", "._exceptions import * from ._abnf import * from ._socket import", "timeout until receive a close frame. If None, it will", "ABNF.create_frame(payload, opcode) return self.send_frame(frame) def send_frame(self, frame): \"\"\" Send the", "ABNF.OPCODE_PING) def pong(self, payload): \"\"\" send pong data. payload: data", "return data.decode(\"utf-8\") elif opcode == ABNF.OPCODE_TEXT or opcode == ABNF.OPCODE_BINARY:", "of mask key. This func must return string(byte array), which", "self.recv_frame() if isEnabledForError(): recv_status = struct.unpack(\"!H\", frame.data)[0] if recv_status !=", "which length is argument specified. \"\"\" self.get_mask_key = func def", "ABNF frame object. \"\"\" return self.frame_buffer.recv_frame() def send_close(self, status=STATUS_NORMAL, reason=six.b(\"\")):", "sslopt) self.handshake_response = None self.sock = None self.connected = False", "WebSocketConnectionClosedException: if self.sock: self.sock.close() self.sock = None self.connected = False", "ws = create_connection(\"ws://echo.websocket.org/\") >>> frame = ABNF.create_frame(\"Hello\", ABNF.OPCODE_TEXT) >>> ws.send_frame(frame)", "under the terms of the GNU Lesser General Public License", "========================= This version support only hybi-13. Please see http://tools.ietf.org/html/rfc6455 for", "data(byte array) from the server. return value: string(byte array) value.", "default_timeout value\" class_: class to instantiate when creating the connection.", "get handshake status \"\"\" if self.handshake_response: return self.handshake_response.status else: return", "'Hello, Server' >>> ws.close() get_mask_key: a callable to produce new", "from server. return value: ABNF frame object. \"\"\" return self.frame_buffer.recv_frame()", "iteration over websocket, implying sequential `recv` executions. \"\"\" while True:", "receive a close frame. \"\"\" if self.connected: if status <", "self.handshake_response.subprotocol else: return None subprotocol = property(getsubprotocol) def getstatus(self): \"\"\"", "single frame. self.frame_buffer = frame_buffer(self._recv, skip_utf8_validation) self.cont_frame = continuous_frame(fire_cont_frame, skip_utf8_validation)", "None self.sock = None self.connected = False self.get_mask_key = get_mask_key", "\"\"\" Receive string data(byte array) from the server. return value:", "except: pass self.shutdown() def abort(self): \"\"\" Low-level asynchronous abort, wakes", "ABNF.OPCODE_TEXT: return data.decode(\"utf-8\") elif opcode == ABNF.OPCODE_TEXT or opcode ==", "frame) elif frame.opcode == ABNF.OPCODE_PONG: if control_frame: return (frame.opcode, frame)", "until receive a close frame. If None, it will wait", "musk key. You can customize mask key generator. Mainly, this", "Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1335 USA", "False) skip_utf8_validation = options.pop(\"skip_utf8_validation\", False) websock = class_(sockopt=sockopt, sslopt=sslopt, fire_cont_frame=fire_cont_frame,", "Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,", "None)) try: self.handshake_response = handshake(self.sock, *addrs, **options) self.connected = True", "frame. \"\"\" if self.connected: if status < 0 or status", "frame) def recv_frame(self): \"\"\" receive data as frame from server.", "or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser", "import * \"\"\" websocket python client. ========================= This version support", "the func takes 1 argument as integer. The argument means", "information. tuple of username and password. default is None \"subprotocols\"", "length is argument specified. \"\"\" self.get_mask_key = func def gettimeout(self):", "recv_status != STATUS_NORMAL: error(\"close status: \" + repr(recv_status)) except: pass", "url and return the WebSocket object. Passing optional timeout parameter", "STATUS_XXX. reason: the reason to close. This must be string", "This library is free software; you can redistribute it and/or", "\"\"\" sockopt = options.pop(\"sockopt\", []) sslopt = options.pop(\"sslopt\", {}) fire_cont_frame", "__next__(self): return self.recv() def next(self): return self.__next__() def fileno(self): return", "._abnf import * from ._socket import * from ._utils import", "return self.cont_frame.extract(frame) elif frame.opcode == ABNF.OPCODE_CLOSE: self.send_close() return (frame.opcode, frame)", "General Public License along with this library; if not, write", "return self.send(payload, ABNF.OPCODE_BINARY) def ping(self, payload=\"\"): \"\"\" send ping data.", "None, it will wait forever until receive a close frame.", "= len(data) trace(\"send: \" + repr(data)) with self.lock: while data:", "._logging import * from ._http import * from ._handshake import", "**options) websock.settimeout(timeout if timeout is not None else getdefaulttimeout()) websock.connect(url,", "timeout setting returned by getdefauttimeout() is used. You can customize", "available sub protocols. default is None. \"skip_utf8_validation\" - skip utf8", "frame.opcode == ABNF.OPCODE_PONG: if control_frame: return (frame.opcode, frame) def recv_frame(self):", "def next(self): return self.__next__() def fileno(self): return self.sock.fileno() def set_mask_key(self,", "or status >= ABNF.LENGTH_16: raise ValueError(\"code is invalid range\") self.connected", "import * from ._http import * from ._handshake import *", "Please see OPCODE_XXX. \"\"\" frame = ABNF.create_frame(payload, opcode) return self.send_frame(frame)", "See the GNU Lesser General Public License for more details.", "reason=six.b(\"\")): \"\"\" send close data to the server. status: status", "Allow iteration over websocket, implying sequential `recv` executions. \"\"\" while", "payload.encode(\"utf-8\") self.send(payload, ABNF.OPCODE_PONG) def recv(self): \"\"\" Receive string data(byte array)", "License as published by the Free Software Foundation; either version", "websocket timeout(second). \"\"\" return self.sock_opt.timeout def settimeout(self, timeout): \"\"\" Set", "= options.pop(\"skip_utf8_validation\", False) websock = class_(sockopt=sockopt, sslopt=sslopt, fire_cont_frame=fire_cont_frame, enable_multithread=enable_multithread, skip_utf8_validation=skip_utf8_validation,", "threading # websocket modules from ._exceptions import * from ._abnf", "enable_multithread=False, skip_utf8_validation=False, **options): \"\"\" Initialize WebSocket object. \"\"\" self.sock_opt =", "= ABNF.create_frame(\"My name is \", ABNF.OPCODE_CONT, 0) >>> ws.send_frame(frame) >>>", "NoLock() def __iter__(self): \"\"\" Allow iteration over websocket, implying sequential", "def getsubprotocol(self): \"\"\" get subprotocol \"\"\" if self.handshake_response: return self.handshake_response.subprotocol", "recv_data(self, control_frame=False): \"\"\" Receive data with operation code. control_frame: a", "be useful, but WITHOUT ANY WARRANTY; without even the implied", "object. Passing optional timeout parameter will set the timeout on", "frame_buffer(self._recv, skip_utf8_validation) self.cont_frame = continuous_frame(fire_cont_frame, skip_utf8_validation) if enable_multithread: self.lock =", "options.pop(\"sslopt\", {}) fire_cont_frame = options.pop(\"fire_cont_frame\", False) enable_multithread = options.pop(\"enable_multithread\", False)", "def recv_data(self, control_frame=False): \"\"\" Receive data with operation code. control_frame:", "You should have received a copy of the GNU Lesser", "websocket.WebSocket() >>> ws.connect(\"ws://echo.websocket.org\") >>> ws.send(\"Hello, Server\") >>> ws.recv() 'Hello, Server'", "each cont frame. default is False enable_multithread: if set to", "data. payload: data payload to send server. \"\"\" if isinstance(payload,", "get_mask_key # These buffer over the build-up of a single", "payload): return self.send(payload, ABNF.OPCODE_BINARY) def ping(self, payload=\"\"): \"\"\" send ping", "\"\"\" frame = ABNF.create_frame(payload, opcode) return self.send_frame(frame) def send_frame(self, frame):", "__init__ should be compatible with WebSocket.__init__, i.e. accept all of", "self.sock_opt, proxy_info(**options), options.pop('socket', None)) try: self.handshake_response = handshake(self.sock, *addrs, **options)", "to url and return the WebSocket object. Passing optional timeout", "library is free software; you can redistribute it and/or modify", "is too long\") if control_frame: return (frame.opcode, frame) elif frame.opcode", "return self.send_frame(frame) def send_frame(self, frame): \"\"\" Send the data frame.", "(frame.opcode, frame) elif frame.opcode == ABNF.OPCODE_PONG: if control_frame: return (frame.opcode,", "sslopt: dict object for ssl socket option. fire_cont_frame: fire recv", "of the License, or (at your option) any later version.", "80. \"http_no_proxy\" - host names, which doesn't use proxy. \"http_proxy_auth\"", "_recv(self, bufsize): try: return recv(self.sock, bufsize) except WebSocketConnectionClosedException: if self.sock:", "= ABNF.create_frame(\"Hello\", ABNF.OPCODE_TEXT) >>> ws.send_frame(frame) >>> cont_frame = ABNF.create_frame(\"My name", "valid frame %s\" % frame) elif frame.opcode in (ABNF.OPCODE_TEXT, ABNF.OPCODE_BINARY,", "purpose. func: callable object. the func takes 1 argument as", "send_close(self, status=STATUS_NORMAL, reason=six.b(\"\")): \"\"\" send close data to the server.", "be utf-8 string or unicode, if the opcode is OPCODE_TEXT.", "client. >>> import websocket >>> ws = websocket.WebSocket() >>> ws.connect(\"ws://echo.websocket.org\")", "Connect to url. url is websocket url scheme. ie. ws://host:port/resource", "abort(self): \"\"\" Low-level asynchronous abort, wakes up other threads that", "-> cookie value. \"origin\" -> custom origin url. \"host\" ->", "control_frame=False): \"\"\" Receive data with operation code. control_frame: a boolean", "produce new mask keys, see the set_mask_key function's docstring for", "shutdown(self): \"close socket, immediately.\" if self.sock: self.sock.close() self.sock = None", "array of available sub protocols. default is None. \"skip_utf8_validation\" -", "skip_utf8_validation=skip_utf8_validation, **options) websock.settimeout(timeout if timeout is not None else getdefaulttimeout())", "This version support only hybi-13. Please see http://tools.ietf.org/html/rfc6455 for protocol.", ">>> cont_frame = ABNF.create_frame(\"My name is \", ABNF.OPCODE_CONT, 0) >>>", "tuple of operation code and string(byte array) value. \"\"\" opcode,", "l = self._send(data) data = data[l:] return length def send_binary(self,", "control_frame: a boolean flag indicating whether to return control frame", "string. timeout: timeout until receive a close frame. If None,", "the terms of the GNU Lesser General Public License as", "return string(byte array), which length is argument specified. \"\"\" self.get_mask_key", "None self.connected = False raise def create_connection(url, timeout=None, class_=WebSocket, **options):", "status code to send. see STATUS_XXX. reason: the reason to", "data[l:] return length def send_binary(self, payload): return self.send(payload, ABNF.OPCODE_BINARY) def", "= True except: if self.sock: self.sock.close() self.sock = None raise", "**options): \"\"\" Initialize WebSocket object. \"\"\" self.sock_opt = sock_opt(sockopt, sslopt)", "pre-initialized stream socket. \"\"\" sockopt = options.pop(\"sockopt\", []) sslopt =", "Lesser General Public License as published by the Free Software", "Close Websocket object status: status code to send. see STATUS_XXX.", "self.cont_frame.add(frame) if self.cont_frame.is_fire(frame): return self.cont_frame.extract(frame) elif frame.opcode == ABNF.OPCODE_CLOSE: self.send_close()", "frame data created by ABNF.create_frame >>> ws = create_connection(\"ws://echo.websocket.org/\") >>>", "= None raise def send(self, payload, opcode=ABNF.OPCODE_TEXT): \"\"\" Send the", "<NAME>(liris) This library is free software; you can redistribute it", "License, or (at your option) any later version. This library", "ping data. payload: data payload to send server. \"\"\" if", "payload, opcode=ABNF.OPCODE_TEXT): \"\"\" Send the data as string. payload: Payload", "{}) fire_cont_frame = options.pop(\"fire_cont_frame\", False) enable_multithread = options.pop(\"enable_multithread\", False) skip_utf8_validation", "function's docstring for more details sockopt: values for socket.setsockopt. sockopt", "\"\"\" if self.handshake_response: return self.handshake_response.status else: return None status =", "url. url is websocket url scheme. ie. ws://host:port/resource You can", "status: status code to send. see STATUS_XXX. reason: the reason", "host name. \"http_proxy_port\" - http proxy port. If not set,", "ABNF.OPCODE_CLOSE) def close(self, status=STATUS_NORMAL, reason=six.b(\"\"), timeout=3): \"\"\" Close Websocket object", "library for Python Copyright (C) 2010 <NAME>(liris) This library is", "it under the terms of the GNU Lesser General Public", "lock for multithread. \"sockopt\" -> socket options \"sslopt\" -> ssl", "but WITHOUT ANY WARRANTY; without even the implied warranty of", "username and password. default is None \"enable_multithread\" -> enable lock", "\"\"\" return self.frame_buffer.recv_frame() def send_close(self, status=STATUS_NORMAL, reason=six.b(\"\")): \"\"\" send close", "threading.Lock() else: self.lock = NoLock() def __iter__(self): \"\"\" Allow iteration", "value, it means \"use default_timeout value\" options: \"header\" -> custom", "get_mask_key=None, sockopt=None, sslopt=None, fire_cont_frame=False, enable_multithread=False, skip_utf8_validation=False, **options): \"\"\" Initialize WebSocket", "timeout to the websocket. timeout: timeout time(second). \"\"\" self.sock_opt.timeout =", "data. The following example is an echo client. >>> import", "Software Foundation; either version 2.1 of the License, or (at", "* from ._utils import * from ._url import * from", "= struct.unpack(\"!H\", frame.data)[0] if recv_status != STATUS_NORMAL: error(\"close status: \"", "\"\"\" Send the data as string. payload: Payload must be", "set_mask_key function's docstring for more details sockopt: values for socket.setsockopt.", "if self.sock: self.sock.close() self.sock = None self.connected = False def", "ws.recv() 'Hello, Server' >>> ws.close() get_mask_key: a callable to produce", "timeout: socket timeout time. This value is integer. if you", "if isinstance(payload, six.text_type): payload = payload.encode(\"utf-8\") self.send(payload, ABNF.OPCODE_PONG) def recv(self):", "set, set to 80. \"http_no_proxy\" - host names, which doesn't", "A PARTICULAR PURPOSE. See the GNU Lesser General Public License", "object. \"\"\" self.sock_opt = sock_opt(sockopt, sslopt) self.handshake_response = None self.sock", "fire_cont_frame=False, enable_multithread=False, skip_utf8_validation=False, **options): \"\"\" Initialize WebSocket object. \"\"\" self.sock_opt", "ABNF.OPCODE_CONT, 1) >>> ws.send_frame(frame) \"\"\" if self.get_mask_key: frame.get_mask_key = self.get_mask_key", "USA \"\"\" from __future__ import print_function import six import socket", "frame = ABNF.create_frame(payload, opcode) return self.send_frame(frame) def send_frame(self, frame): \"\"\"", "of sock.setsockopt. sslopt: dict object for ssl socket option. fire_cont_frame:", "WebSocket object. \"\"\" self.sock_opt = sock_opt(sockopt, sslopt) self.handshake_response = None", "... header=[\"User-Agent: MyProgram\", ... \"x-custom: header\"]) timeout: socket timeout time.", "skip_utf8_validation) self.cont_frame = continuous_frame(fire_cont_frame, skip_utf8_validation) if enable_multithread: self.lock = threading.Lock()", "array) opcode: operation code to send. Please see OPCODE_XXX. \"\"\"", "no timeout is supplied, the global default timeout setting returned", "from base64 import encodebytes as base64encode else: from base64 import", "ws.close() get_mask_key: a callable to produce new mask keys, see", "raise ValueError(\"code is invalid range\") try: self.connected = False self.send(struct.pack('!H',", "GNU Lesser General Public License along with this library; if", "can customize mask key generator. Mainly, this is for testing", "array) value. \"\"\" opcode, data = self.recv_data() if six.PY3 and", "= continuous_frame(fire_cont_frame, skip_utf8_validation) if enable_multithread: self.lock = threading.Lock() else: self.lock", "= frame.format() length = len(data) trace(\"send: \" + repr(data)) with", "error: # 'NoneType' object has no attribute 'opcode' raise WebSocketProtocolException(\"Not", "six.PY3: from base64 import encodebytes as base64encode else: from base64", "len(data) trace(\"send: \" + repr(data)) with self.lock: while data: l", "six.text_type): payload = payload.encode(\"utf-8\") self.send(payload, ABNF.OPCODE_PONG) def recv(self): \"\"\" Receive", "proxy_info(**options), options.pop('socket', None)) try: self.handshake_response = handshake(self.sock, *addrs, **options) self.connected", "False self.get_mask_key = get_mask_key # These buffer over the build-up", "a valid frame %s\" % frame) elif frame.opcode in (ABNF.OPCODE_TEXT,", "= self.recv_data() if six.PY3 and opcode == ABNF.OPCODE_TEXT: return data.decode(\"utf-8\")", "version. This library is distributed in the hope that it", "return the WebSocket object. Passing optional timeout parameter will set", "of it's kwargs. options: \"header\" -> custom http header list", "recv_* \"\"\" if self.connected: self.sock.shutdown(socket.SHUT_RDWR) def shutdown(self): \"close socket, immediately.\"", "get handshake response header \"\"\" if self.handshake_response: return self.handshake_response.headers else:", "indicating whether to return control frame data, defaults to False", "'options'. If you set \"header\" list object, you can set", "self.connected = False self.send(struct.pack('!H', status) + reason, ABNF.OPCODE_CLOSE) sock_timeout =", "function to create musk key. You can customize mask key", "code. control_frame: a boolean flag indicating whether to return control", "stream socket. \"\"\" sockopt = options.pop(\"sockopt\", []) sslopt = options.pop(\"sslopt\",", "the opcode is OPCODE_TEXT. Otherwise, it must be string(byte array)", "'' def recv_data(self, control_frame=False): \"\"\" Receive data with operation code.", "response header \"\"\" if self.handshake_response: return self.handshake_response.headers else: return None", "control frame data, defaults to False return value: tuple of", "is distributed in the hope that it will be useful,", "use proxy. \"http_proxy_auth\" - http proxy auth information. tuple of", "= class_(sockopt=sockopt, sslopt=sslopt, fire_cont_frame=fire_cont_frame, enable_multithread=enable_multithread, skip_utf8_validation=skip_utf8_validation, **options) websock.settimeout(timeout if timeout", "= None self.connected = False self.get_mask_key = get_mask_key # These", "option) any later version. This library is distributed in the", "= False self.send(struct.pack('!H', status) + reason, ABNF.OPCODE_CLOSE) def close(self, status=STATUS_NORMAL,", "= None self.connected = False raise def create_connection(url, timeout=None, class_=WebSocket,", "isinstance(payload, six.text_type): payload = payload.encode(\"utf-8\") self.send(payload, ABNF.OPCODE_PING) def pong(self, payload):", "It's __init__ should be compatible with WebSocket.__init__, i.e. accept all", "http header list or dict. \"cookie\" -> cookie value. \"origin\"", "- pre-initialized stream socket. \"\"\" sockopt = options.pop(\"sockopt\", []) sslopt", "means \"use default_timeout value\" class_: class to instantiate when creating", "error(\"close status: \" + repr(recv_status)) except: pass self.sock.settimeout(sock_timeout) self.sock.shutdown(socket.SHUT_RDWR) except:", "\"cookie\" -> cookie value. \"origin\" -> custom origin url. \"host\"", "a callable to produce new mask keys, see the set_mask_key", "recv_status = struct.unpack(\"!H\", frame.data)[0] if recv_status != STATUS_NORMAL: error(\"close status:", "PURPOSE. See the GNU Lesser General Public License for more", "If None, it will wait forever until receive a close", "GNU Lesser General Public License as published by the Free", "'opcode' raise WebSocketProtocolException(\"Not a valid frame %s\" % frame) elif", "raise def create_connection(url, timeout=None, class_=WebSocket, **options): \"\"\" connect to url", "to instantiate when creating the connection. It has to implement", "skip utf8 validation. \"socket\" - pre-initialized stream socket. \"\"\" sockopt", "means \"use default_timeout value\" options: \"header\" -> custom http header", "of operation code and string(byte array) value. \"\"\" while True:", "for ssl socket option. fire_cont_frame: fire recv event for each", "import * from ._handshake import * from ._ssl_compat import *", "frame.opcode == ABNF.OPCODE_PING: if len(frame.data) < 126: self.pong(frame.data) else: raise", "None status = property(getstatus) def getheaders(self): \"\"\" get handshake response", "conn = create_connection(\"ws://echo.websocket.org/\", ... header=[\"User-Agent: MyProgram\", ... \"x-custom: header\"]) timeout:", "Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA", "self.connected: self.sock.shutdown(socket.SHUT_RDWR) def shutdown(self): \"close socket, immediately.\" if self.sock: self.sock.close()", "for this value, it means \"use default_timeout value\" options: \"header\"", "url, **options): \"\"\" Connect to url. url is websocket url", "default is None. \"socket\" - pre-initialized stream socket. \"\"\" self.sock,", "can set your own custom header. >>> ws = WebSocket()", "be compatible with WebSocket.__init__, i.e. accept all of it's kwargs.", "import encodestring as base64encode import struct import threading # websocket", "= func def gettimeout(self): \"\"\" Get the websocket timeout(second). \"\"\"", "receive data as frame from server. return value: ABNF frame", "None. \"skip_utf8_validation\" - skip utf8 validation. \"socket\" - pre-initialized stream", "\"\"\" if status < 0 or status >= ABNF.LENGTH_16: raise", "\"x-custom: header\"]) timeout: socket timeout time. This value is integer.", "+ repr(recv_status)) except: pass self.sock.settimeout(sock_timeout) self.sock.shutdown(socket.SHUT_RDWR) except: pass self.shutdown() def", "you can set your own custom header. >>> conn =", "status = property(getstatus) def getheaders(self): \"\"\" get handshake response header", "\"\"\" if self.get_mask_key: frame.get_mask_key = self.get_mask_key data = frame.format() length", "self.sock.settimeout(timeout) timeout = property(gettimeout, settimeout) def getsubprotocol(self): \"\"\" get subprotocol", "func: callable object. the func takes 1 argument as integer.", "payload.encode(\"utf-8\") self.send(payload, ABNF.OPCODE_PING) def pong(self, payload): \"\"\" send pong data.", "control_frame: return (frame.opcode, frame) elif frame.opcode == ABNF.OPCODE_PONG: if control_frame:", "Franklin Street, Fifth Floor, Boston, MA 02110-1335 USA \"\"\" from", "proxy. \"http_proxy_auth\" - http proxy auth information. tuple of username", "frame: # handle error: # 'NoneType' object has no attribute", "== ABNF.OPCODE_PING: if len(frame.data) < 126: self.pong(frame.data) else: raise WebSocketProtocolException(\"Ping", "import * from ._ssl_compat import * \"\"\" websocket python client.", ">>> ws.close() get_mask_key: a callable to produce new mask keys,", "be tuple and each element is argument of sock.setsockopt. sslopt:", "that it will be useful, but WITHOUT ANY WARRANTY; without", "func): \"\"\" set function to create musk key. You can", "\"\"\" self.sock_opt.timeout = timeout if self.sock: self.sock.settimeout(timeout) timeout = property(gettimeout,", "global default timeout setting returned by getdefauttimeout() is used. You", "build-up of a single frame. self.frame_buffer = frame_buffer(self._recv, skip_utf8_validation) self.cont_frame", "import print_function import six import socket if six.PY3: from base64", "returned by getdefauttimeout() is used. You can customize using 'options'.", "= create_connection(\"ws://echo.websocket.org/\", ... header=[\"User-Agent: MyProgram\", ... \"x-custom: header\"]) timeout: socket", "\"\"\" opcode, frame = self.recv_data_frame(control_frame) return opcode, frame.data def recv_data_frame(self,", "'NoneType' object has no attribute 'opcode' raise WebSocketProtocolException(\"Not a valid", "\"\"\" Low-level asynchronous abort, wakes up other threads that are", "This must be string or bytes. \"\"\" if status <", "headers = property(getheaders) def connect(self, url, **options): \"\"\" Connect to", "gettimeout(self): \"\"\" Get the websocket timeout(second). \"\"\" return self.sock_opt.timeout def", "has no attribute 'opcode' raise WebSocketProtocolException(\"Not a valid frame %s\"", "your own custom header. >>> ws = WebSocket() >>> ws.connect(\"ws://echo.websocket.org/\",", "timeout=3): \"\"\" Close Websocket object status: status code to send.", "validation. \"socket\" - pre-initialized stream socket. \"\"\" sockopt = options.pop(\"sockopt\",", "connect to the websocket server and send/receive data. The following", "write to the Free Software Foundation, Inc., 51 Franklin Street,", "auth information. tuple of username and password. default is None", "event for each cont frame. default is False enable_multithread: if", "def ping(self, payload=\"\"): \"\"\" send ping data. payload: data payload", "own custom header. >>> conn = create_connection(\"ws://echo.websocket.org/\", ... header=[\"User-Agent: MyProgram\",", "to True, lock send method. skip_utf8_validation: skip utf8 validation. \"\"\"", "property(getstatus) def getheaders(self): \"\"\" get handshake response header \"\"\" if", "socket. If no timeout is supplied, the global default timeout", "payload = payload.encode(\"utf-8\") self.send(payload, ABNF.OPCODE_PONG) def recv(self): \"\"\" Receive string", "value, it means \"use default_timeout value\" class_: class to instantiate", "or bytes. \"\"\" if status < 0 or status >=", "= property(gettimeout, settimeout) def getsubprotocol(self): \"\"\" get subprotocol \"\"\" if", "interface. This class is based on The WebSocket protocol draft-hixie-thewebsocketprotocol-76", "header \"\"\" if self.handshake_response: return self.handshake_response.headers else: return None headers", "must be utf-8 string or unicode, if the opcode is", "class is based on The WebSocket protocol draft-hixie-thewebsocketprotocol-76 http://tools.ietf.org/html/draft-hixie-thewebsocketprotocol-76 We", "pong(self, payload): \"\"\" send pong data. payload: data payload to", "must be string. timeout: timeout until receive a close frame.", "can customize using 'options'. If you set \"header\" list object,", "__init__(self, get_mask_key=None, sockopt=None, sslopt=None, fire_cont_frame=False, enable_multithread=False, skip_utf8_validation=False, **options): \"\"\" Initialize", "def fileno(self): return self.sock.fileno() def set_mask_key(self, func): \"\"\" set function", "self.cont_frame.validate(frame) self.cont_frame.add(frame) if self.cont_frame.is_fire(frame): return self.cont_frame.extract(frame) elif frame.opcode == ABNF.OPCODE_CLOSE:", "to 80. \"http_no_proxy\" - host names, which doesn't use proxy.", "Server\") >>> ws.recv() 'Hello, Server' >>> ws.close() get_mask_key: a callable", "http://tools.ietf.org/html/draft-hixie-thewebsocketprotocol-76 We can connect to the websocket server and send/receive", "over websocket, implying sequential `recv` executions. \"\"\" while True: yield", "timeout(second). \"\"\" return self.sock_opt.timeout def settimeout(self, timeout): \"\"\" Set the", "means length of mask key. This func must return string(byte", "is an echo client. >>> import websocket >>> ws =", "the server. status: status code to send. see STATUS_XXX. reason:", "http proxy auth information. tuple of username and password. default", "data as string. payload: Payload must be utf-8 string or", "will set the timeout on the socket. If no timeout", "._handshake import * from ._ssl_compat import * \"\"\" websocket python", "self.handshake_response: return self.handshake_response.subprotocol else: return None subprotocol = property(getsubprotocol) def", "MA 02110-1335 USA \"\"\" from __future__ import print_function import six", "- skip utf8 validation. \"socket\" - pre-initialized stream socket. \"\"\"", "data, defaults to False return value: tuple of operation code", "timeout: timeout time(second). \"\"\" self.sock_opt.timeout = timeout if self.sock: self.sock.settimeout(timeout)", "library; if not, write to the Free Software Foundation, Inc.,", "ValueError(\"code is invalid range\") try: self.connected = False self.send(struct.pack('!H', status)", "will wait forever until receive a close frame. \"\"\" if", "0) >>> ws.send_frame(frame) >>> cont_frame = ABNF.create_frame(\"Foo Bar\", ABNF.OPCODE_CONT, 1)", "password. default is None \"subprotocols\" - array of available sub", "string(byte array) value. \"\"\" while True: frame = self.recv_frame() if", "modules from ._exceptions import * from ._abnf import * from", "the Free Software Foundation; either version 2.1 of the License,", "= NoLock() def __iter__(self): \"\"\" Allow iteration over websocket, implying", "options.pop('socket', None)) try: self.handshake_response = handshake(self.sock, *addrs, **options) self.connected =", "set the timeout on the socket. If no timeout is", "Set the timeout to the websocket. timeout: timeout time(second). \"\"\"", "class_(sockopt=sockopt, sslopt=sslopt, fire_cont_frame=fire_cont_frame, enable_multithread=enable_multithread, skip_utf8_validation=skip_utf8_validation, **options) websock.settimeout(timeout if timeout is", "None. \"socket\" - pre-initialized stream socket. \"\"\" self.sock, addrs =", "self.sock_opt = sock_opt(sockopt, sslopt) self.handshake_response = None self.sock = None", "This must be string. timeout: timeout until receive a close", "self.send(payload, ABNF.OPCODE_PONG) def recv(self): \"\"\" Receive string data(byte array) from", "is None. \"skip_utf8_validation\" - skip utf8 validation. \"socket\" - pre-initialized", "opcode) return self.send_frame(frame) def send_frame(self, frame): \"\"\" Send the data", "= False self.get_mask_key = get_mask_key # These buffer over the", "server and send/receive data. The following example is an echo", "._ssl_compat import * \"\"\" websocket python client. ========================= This version", "self.sock.fileno() def set_mask_key(self, func): \"\"\" set function to create musk", "def __iter__(self): \"\"\" Allow iteration over websocket, implying sequential `recv`", "GNU Lesser General Public License for more details. You should", "object, you can set your own custom header. >>> conn", "*addrs, **options) self.connected = True except: if self.sock: self.sock.close() self.sock", "\" + repr(data)) with self.lock: while data: l = self._send(data)", "and connect. It's __init__ should be compatible with WebSocket.__init__, i.e.", "operation code. control_frame: a boolean flag indicating whether to return", "the GNU Lesser General Public License as published by the", "- array of available sub protocols. default is None. \"skip_utf8_validation\"", "ABNF.OPCODE_CLOSE: self.send_close() return (frame.opcode, frame) elif frame.opcode == ABNF.OPCODE_PING: if", "frame): \"\"\" Send the data frame. frame: frame data created", "if self.connected: if status < 0 or status >= ABNF.LENGTH_16:", "Street, Fifth Floor, Boston, MA 02110-1335 USA \"\"\" from __future__", "the WebSocket object. Passing optional timeout parameter will set the", "url and return websocket object. Connect to url and return", "= sock_opt(sockopt, sslopt) self.handshake_response = None self.sock = None self.connected", "False) enable_multithread = options.pop(\"enable_multithread\", False) skip_utf8_validation = options.pop(\"skip_utf8_validation\", False) websock", "host header string. \"http_proxy_host\" - http proxy host name. \"http_proxy_port\"", "False self.send(struct.pack('!H', status) + reason, ABNF.OPCODE_CLOSE) sock_timeout = self.sock.gettimeout() self.sock.settimeout(timeout)", "opcode == ABNF.OPCODE_TEXT: return data.decode(\"utf-8\") elif opcode == ABNF.OPCODE_TEXT or", "def gettimeout(self): \"\"\" Get the websocket timeout(second). \"\"\" return self.sock_opt.timeout", "i.e. accept all of it's kwargs. options: \"header\" -> custom", "# websocket modules from ._exceptions import * from ._abnf import", "\"\"\" receive data as frame from server. return value: ABNF", "self.send(payload, ABNF.OPCODE_PING) def pong(self, payload): \"\"\" send pong data. payload:", "six.PY3 and opcode == ABNF.OPCODE_TEXT: return data.decode(\"utf-8\") elif opcode ==", "frame.opcode == ABNF.OPCODE_CLOSE: self.send_close() return (frame.opcode, frame) elif frame.opcode ==", "handshake status \"\"\" if self.handshake_response: return self.handshake_response.status else: return None", "can connect to the websocket server and send/receive data. The", "WebSocketProtocolException(\"Not a valid frame %s\" % frame) elif frame.opcode in", "= self.recv_frame() if isEnabledForError(): recv_status = struct.unpack(\"!H\", frame.data)[0] if recv_status", "and return websocket object. Connect to url and return the", "default is False enable_multithread: if set to True, lock send", "OPCODE_TEXT. Otherwise, it must be string(byte array) opcode: operation code", "def send_close(self, status=STATUS_NORMAL, reason=six.b(\"\")): \"\"\" send close data to the", "+ reason, ABNF.OPCODE_CLOSE) def close(self, status=STATUS_NORMAL, reason=six.b(\"\"), timeout=3): \"\"\" Close", "custom header. >>> conn = create_connection(\"ws://echo.websocket.org/\", ... header=[\"User-Agent: MyProgram\", ...", "2.1 of the License, or (at your option) any later", "import * from ._utils import * from ._url import *", "def connect(self, url, **options): \"\"\" Connect to url. url is", "to implement settimeout and connect. It's __init__ should be compatible", "def close(self, status=STATUS_NORMAL, reason=six.b(\"\"), timeout=3): \"\"\" Close Websocket object status:", "._utils import * from ._url import * from ._logging import", "02110-1335 USA \"\"\" from __future__ import print_function import six import", "return (frame.opcode, frame) elif frame.opcode == ABNF.OPCODE_PING: if len(frame.data) <", "the server. return value: string(byte array) value. \"\"\" opcode, data", "= self.recv_frame() if not frame: # handle error: # 'NoneType'", "the socket. If no timeout is supplied, the global default", "PARTICULAR PURPOSE. See the GNU Lesser General Public License for", "ABNF.create_frame(\"Hello\", ABNF.OPCODE_TEXT) >>> ws.send_frame(frame) >>> cont_frame = ABNF.create_frame(\"My name is", "return self.frame_buffer.recv_frame() def send_close(self, status=STATUS_NORMAL, reason=six.b(\"\")): \"\"\" send close data", "False) websock = class_(sockopt=sockopt, sslopt=sslopt, fire_cont_frame=fire_cont_frame, enable_multithread=enable_multithread, skip_utf8_validation=skip_utf8_validation, **options) websock.settimeout(timeout", "cont_frame = ABNF.create_frame(\"My name is \", ABNF.OPCODE_CONT, 0) >>> ws.send_frame(frame)", "if recv_status != STATUS_NORMAL: error(\"close status: \" + repr(recv_status)) except:", "value is integer. if you set None for this value,", "STATUS_NORMAL: error(\"close status: \" + repr(recv_status)) except: pass self.sock.settimeout(sock_timeout) self.sock.shutdown(socket.SHUT_RDWR)", "\"\"\" get handshake response header \"\"\" if self.handshake_response: return self.handshake_response.headers", "socket, immediately.\" if self.sock: self.sock.close() self.sock = None self.connected =", "reason, ABNF.OPCODE_CLOSE) def close(self, status=STATUS_NORMAL, reason=six.b(\"\"), timeout=3): \"\"\" Close Websocket", "recv_data_frame(self, control_frame=False): \"\"\" Receive data with operation code. control_frame: a", "data = frame.format() length = len(data) trace(\"send: \" + repr(data))", "status=STATUS_NORMAL, reason=six.b(\"\"), timeout=3): \"\"\" Close Websocket object status: status code", "frame.format() length = len(data) trace(\"send: \" + repr(data)) with self.lock:", "value. \"\"\" opcode, frame = self.recv_data_frame(control_frame) return opcode, frame.data def", "* from ._abnf import * from ._socket import * from", "\"\"\" if self.handshake_response: return self.handshake_response.headers else: return None headers =", "from ._abnf import * from ._socket import * from ._utils", "2010 <NAME>(liris) This library is free software; you can redistribute", "if six.PY3 and opcode == ABNF.OPCODE_TEXT: return data.decode(\"utf-8\") elif opcode", "fire_cont_frame=fire_cont_frame, enable_multithread=enable_multithread, skip_utf8_validation=skip_utf8_validation, **options) websock.settimeout(timeout if timeout is not None", "is invalid range\") self.connected = False self.send(struct.pack('!H', status) + reason,", "self.send(struct.pack('!H', status) + reason, ABNF.OPCODE_CLOSE) def close(self, status=STATUS_NORMAL, reason=six.b(\"\"), timeout=3):", "self.handshake_response = None self.sock = None self.connected = False self.get_mask_key", "return (frame.opcode, frame) elif frame.opcode == ABNF.OPCODE_PONG: if control_frame: return", "self.connected = False raise def create_connection(url, timeout=None, class_=WebSocket, **options): \"\"\"", "see STATUS_XXX. reason: the reason to close. This must be", "which doesn't use proxy. \"http_proxy_auth\" - http proxy auth information.", "self.get_mask_key: frame.get_mask_key = self.get_mask_key data = frame.format() length = len(data)", "= False raise def create_connection(url, timeout=None, class_=WebSocket, **options): \"\"\" connect", "object for ssl socket option. fire_cont_frame: fire recv event for", "connect(self, url, **options): \"\"\" Connect to url. url is websocket", "= options.pop(\"fire_cont_frame\", False) enable_multithread = options.pop(\"enable_multithread\", False) skip_utf8_validation = options.pop(\"skip_utf8_validation\",", "ABNF.LENGTH_16: raise ValueError(\"code is invalid range\") self.connected = False self.send(struct.pack('!H',", "implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.", "this library; if not, write to the Free Software Foundation,", "url is websocket url scheme. ie. ws://host:port/resource You can customize", "send(self, payload, opcode=ABNF.OPCODE_TEXT): \"\"\" Send the data as string. payload:", "is invalid range\") try: self.connected = False self.send(struct.pack('!H', status) +", "over the build-up of a single frame. self.frame_buffer = frame_buffer(self._recv,", "- array of available sub protocols. default is None. \"socket\"", "status) + reason, ABNF.OPCODE_CLOSE) def close(self, status=STATUS_NORMAL, reason=six.b(\"\"), timeout=3): \"\"\"", "mask key generator. Mainly, this is for testing purpose. func:", "def shutdown(self): \"close socket, immediately.\" if self.sock: self.sock.close() self.sock =", "socket. \"\"\" self.sock, addrs = connect(url, self.sock_opt, proxy_info(**options), options.pop('socket', None))", "else: raise WebSocketProtocolException(\"Ping message is too long\") if control_frame: return", "data): return send(self.sock, data) def _recv(self, bufsize): try: return recv(self.sock,", "timeout time. This value is integer. if you set None", "the websocket server and send/receive data. The following example is", "import * from ._url import * from ._logging import *", "ws.connect(\"ws://echo.websocket.org\") >>> ws.send(\"Hello, Server\") >>> ws.recv() 'Hello, Server' >>> ws.close()", "frame %s\" % frame) elif frame.opcode in (ABNF.OPCODE_TEXT, ABNF.OPCODE_BINARY, ABNF.OPCODE_CONT):", "Python Copyright (C) 2010 <NAME>(liris) This library is free software;", "= payload.encode(\"utf-8\") self.send(payload, ABNF.OPCODE_PONG) def recv(self): \"\"\" Receive string data(byte", "sock_timeout = self.sock.gettimeout() self.sock.settimeout(timeout) try: frame = self.recv_frame() if isEnabledForError():", "scheme. ie. ws://host:port/resource You can customize using 'options'. If you", "self.handshake_response.status else: return None status = property(getstatus) def getheaders(self): \"\"\"", "timeout is not None else getdefaulttimeout()) websock.connect(url, **options) return websock", "code and string(byte array) value. \"\"\" opcode, frame = self.recv_data_frame(control_frame)", "length def send_binary(self, payload): return self.send(payload, ABNF.OPCODE_BINARY) def ping(self, payload=\"\"):", "argument means length of mask key. This func must return", "no attribute 'opcode' raise WebSocketProtocolException(\"Not a valid frame %s\" %", "if self.get_mask_key: frame.get_mask_key = self.get_mask_key data = frame.format() length =", "status \"\"\" if self.handshake_response: return self.handshake_response.status else: return None status", "each element is argument of sock.setsockopt. sslopt: dict object for", "if you set None for this value, it means \"use", "self.send(struct.pack('!H', status) + reason, ABNF.OPCODE_CLOSE) sock_timeout = self.sock.gettimeout() self.sock.settimeout(timeout) try:", "the GNU Lesser General Public License along with this library;", "func must return string(byte array), which length is argument specified.", "wait forever until receive a close frame. \"\"\" if self.connected:", "opcode: operation code to send. Please see OPCODE_XXX. \"\"\" frame", "client library for Python Copyright (C) 2010 <NAME>(liris) This library", "Connect to url and return the WebSocket object. Passing optional", "connect(url, self.sock_opt, proxy_info(**options), options.pop('socket', None)) try: self.handshake_response = handshake(self.sock, *addrs,", "getsubprotocol(self): \"\"\" get subprotocol \"\"\" if self.handshake_response: return self.handshake_response.subprotocol else:", "names, which doesn't use proxy. \"http_proxy_auth\" - http proxy auth", "status < 0 or status >= ABNF.LENGTH_16: raise ValueError(\"code is", "you can redistribute it and/or modify it under the terms", "return None status = property(getstatus) def getheaders(self): \"\"\" get handshake", "- http proxy auth information. tuple of username and password.", "else: return None status = property(getstatus) def getheaders(self): \"\"\" get", "Lesser General Public License for more details. You should have", "\"\"\" connect to url and return websocket object. Connect to", "in the hope that it will be useful, but WITHOUT", "if len(frame.data) < 126: self.pong(frame.data) else: raise WebSocketProtocolException(\"Ping message is", "\"\"\" self.sock_opt = sock_opt(sockopt, sslopt) self.handshake_response = None self.sock =", "raise ValueError(\"code is invalid range\") self.connected = False self.send(struct.pack('!H', status)", "def create_connection(url, timeout=None, class_=WebSocket, **options): \"\"\" connect to url and", "by getdefauttimeout() is used. You can customize using 'options'. If", "is OPCODE_TEXT. Otherwise, it must be string(byte array) opcode: operation", "send. Please see OPCODE_XXX. \"\"\" frame = ABNF.create_frame(payload, opcode) return", "version 2.1 of the License, or (at your option) any", "time. This value is integer. if you set None for", "sequential `recv` executions. \"\"\" while True: yield self.recv() def __next__(self):", "port. If not set, set to 80. \"http_no_proxy\" - host", "try: self.handshake_response = handshake(self.sock, *addrs, **options) self.connected = True except:", "received a copy of the GNU Lesser General Public License", "keys, see the set_mask_key function's docstring for more details sockopt:", "protocols. default is None. \"socket\" - pre-initialized stream socket. \"\"\"", "opcode is OPCODE_TEXT. Otherwise, it must be string(byte array) opcode:", "# 'NoneType' object has no attribute 'opcode' raise WebSocketProtocolException(\"Not a", "settimeout(self, timeout): \"\"\" Set the timeout to the websocket. timeout:", "sslopt=sslopt, fire_cont_frame=fire_cont_frame, enable_multithread=enable_multithread, skip_utf8_validation=skip_utf8_validation, **options) websock.settimeout(timeout if timeout is not", "list object, you can set your own custom header. >>>", "The following example is an echo client. >>> import websocket", "# These buffer over the build-up of a single frame.", "value: string(byte array) value. \"\"\" opcode, data = self.recv_data() if", "default is None \"subprotocols\" - array of available sub protocols.", "func def gettimeout(self): \"\"\" Get the websocket timeout(second). \"\"\" return", "return opcode, frame.data def recv_data_frame(self, control_frame=False): \"\"\" Receive data with", "an echo client. >>> import websocket >>> ws = websocket.WebSocket()", "\"\"\" from __future__ import print_function import six import socket if", "= get_mask_key # These buffer over the build-up of a", "be string. timeout: timeout until receive a close frame. If", "used. You can customize using 'options'. If you set \"header\"", "\"use default_timeout value\" options: \"header\" -> custom http header list", ">>> ws.send_frame(frame) >>> cont_frame = ABNF.create_frame(\"My name is \", ABNF.OPCODE_CONT,", "operation code and string(byte array) value. \"\"\" while True: frame", "only hybi-13. Please see http://tools.ietf.org/html/rfc6455 for protocol. \"\"\" class WebSocket(object):", "the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR", "not, write to the Free Software Foundation, Inc., 51 Franklin", "encodebytes as base64encode else: from base64 import encodestring as base64encode", "\"\"\" send ping data. payload: data payload to send server.", "= create_connection(\"ws://echo.websocket.org/\") >>> frame = ABNF.create_frame(\"Hello\", ABNF.OPCODE_TEXT) >>> ws.send_frame(frame) >>>", "tuple of username and password. default is None \"enable_multithread\" ->", "MyProgram\", ... \"x-custom: header\"]) timeout: socket timeout time. This value", "following example is an echo client. >>> import websocket >>>", "timeout is supplied, the global default timeout setting returned by", "self.get_mask_key = get_mask_key # These buffer over the build-up of", "threads that are waiting in recv_* \"\"\" if self.connected: self.sock.shutdown(socket.SHUT_RDWR)", "setting returned by getdefauttimeout() is used. You can customize using", "self.recv_data() if six.PY3 and opcode == ABNF.OPCODE_TEXT: return data.decode(\"utf-8\") elif", "\"\"\" Set the timeout to the websocket. timeout: timeout time(second).", "timeout = property(gettimeout, settimeout) def getsubprotocol(self): \"\"\" get subprotocol \"\"\"", "frame. If None, it will wait forever until receive a", "The argument means length of mask key. This func must", "if isEnabledForError(): recv_status = struct.unpack(\"!H\", frame.data)[0] if recv_status != STATUS_NORMAL:", "ssl socket option. fire_cont_frame: fire recv event for each cont", "def send_frame(self, frame): \"\"\" Send the data frame. frame: frame", "the reason to close. This must be string. timeout: timeout", "buffer over the build-up of a single frame. self.frame_buffer =", "None \"subprotocols\" - array of available sub protocols. default is", "self.shutdown() def abort(self): \"\"\" Low-level asynchronous abort, wakes up other", "status=STATUS_NORMAL, reason=six.b(\"\")): \"\"\" send close data to the server. status:", "and each element is argument of sock.setsockopt. sslopt: dict object", "socket. \"\"\" sockopt = options.pop(\"sockopt\", []) sslopt = options.pop(\"sslopt\", {})", "without even the implied warranty of MERCHANTABILITY or FITNESS FOR", "\"\"\" Get the websocket timeout(second). \"\"\" return self.sock_opt.timeout def settimeout(self,", "getstatus(self): \"\"\" get handshake status \"\"\" if self.handshake_response: return self.handshake_response.status", "hybi-13. Please see http://tools.ietf.org/html/rfc6455 for protocol. \"\"\" class WebSocket(object): \"\"\"", ">= ABNF.LENGTH_16: raise ValueError(\"code is invalid range\") try: self.connected =", "\"\"\" self.sock, addrs = connect(url, self.sock_opt, proxy_info(**options), options.pop('socket', None)) try:", "method. skip_utf8_validation: skip utf8 validation. \"\"\" def __init__(self, get_mask_key=None, sockopt=None,", "self.send_close() return (frame.opcode, frame) elif frame.opcode == ABNF.OPCODE_PING: if len(frame.data)", "\"\"\" send close data to the server. status: status code", "frame. default is False enable_multithread: if set to True, lock", "data: l = self._send(data) data = data[l:] return length def", "element is argument of sock.setsockopt. sslopt: dict object for ssl", "+ reason, ABNF.OPCODE_CLOSE) sock_timeout = self.sock.gettimeout() self.sock.settimeout(timeout) try: frame =", "sock_opt(sockopt, sslopt) self.handshake_response = None self.sock = None self.connected =", "frame from server. return value: ABNF frame object. \"\"\" return", "tuple and each element is argument of sock.setsockopt. sslopt: dict", "return self.sock.fileno() def set_mask_key(self, func): \"\"\" set function to create", "if not, write to the Free Software Foundation, Inc., 51", "to the websocket server and send/receive data. The following example", "available sub protocols. default is None. \"socket\" - pre-initialized stream", "to close. This must be string or bytes. \"\"\" if", "websocket modules from ._exceptions import * from ._abnf import *", "settimeout) def getsubprotocol(self): \"\"\" get subprotocol \"\"\" if self.handshake_response: return", "if self.handshake_response: return self.handshake_response.subprotocol else: return None subprotocol = property(getsubprotocol)", "array), which length is argument specified. \"\"\" self.get_mask_key = func", "websocket server and send/receive data. The following example is an", "for more details sockopt: values for socket.setsockopt. sockopt must be", "sockopt = options.pop(\"sockopt\", []) sslopt = options.pop(\"sslopt\", {}) fire_cont_frame =", "51 Franklin Street, Fifth Floor, Boston, MA 02110-1335 USA \"\"\"", "if self.handshake_response: return self.handshake_response.headers else: return None headers = property(getheaders)", "can redistribute it and/or modify it under the terms of", "except: pass self.sock.settimeout(sock_timeout) self.sock.shutdown(socket.SHUT_RDWR) except: pass self.shutdown() def abort(self): \"\"\"", "from ._ssl_compat import * \"\"\" websocket python client. ========================= This", "up other threads that are waiting in recv_* \"\"\" if", "for protocol. \"\"\" class WebSocket(object): \"\"\" Low level WebSocket interface.", "by the Free Software Foundation; either version 2.1 of the", "ValueError(\"code is invalid range\") self.connected = False self.send(struct.pack('!H', status) +", "+ repr(data)) with self.lock: while data: l = self._send(data) data", "\"\"\" Initialize WebSocket object. \"\"\" self.sock_opt = sock_opt(sockopt, sslopt) self.handshake_response", "it will be useful, but WITHOUT ANY WARRANTY; without even", "._http import * from ._handshake import * from ._ssl_compat import", "= property(getheaders) def connect(self, url, **options): \"\"\" Connect to url.", "payload): \"\"\" send pong data. payload: data payload to send", "self.send_frame(frame) def send_frame(self, frame): \"\"\" Send the data frame. frame:", "in (ABNF.OPCODE_TEXT, ABNF.OPCODE_BINARY, ABNF.OPCODE_CONT): self.cont_frame.validate(frame) self.cont_frame.add(frame) if self.cont_frame.is_fire(frame): return self.cont_frame.extract(frame)", "send server. \"\"\" if isinstance(payload, six.text_type): payload = payload.encode(\"utf-8\") self.send(payload,", "proxy port. If not set, set to 80. \"http_no_proxy\" -", "all of it's kwargs. options: \"header\" -> custom http header", "ABNF.OPCODE_CONT): self.cont_frame.validate(frame) self.cont_frame.add(frame) if self.cont_frame.is_fire(frame): return self.cont_frame.extract(frame) elif frame.opcode ==", "elif frame.opcode == ABNF.OPCODE_PING: if len(frame.data) < 126: self.pong(frame.data) else:", "if control_frame: return (frame.opcode, frame) def recv_frame(self): \"\"\" receive data", "-> custom http header list or dict. \"cookie\" -> cookie", "or opcode == ABNF.OPCODE_BINARY: return data else: return '' def", "\"close socket, immediately.\" if self.sock: self.sock.close() self.sock = None self.connected", "argument of sock.setsockopt. sslopt: dict object for ssl socket option.", "self.sock = None self.connected = False def _send(self, data): return", "reason to close. This must be string. timeout: timeout until", "Lesser General Public License along with this library; if not,", "frame = self.recv_frame() if not frame: # handle error: #", "it and/or modify it under the terms of the GNU", "import threading # websocket modules from ._exceptions import * from", "Send the data as string. payload: Payload must be utf-8", "any later version. This library is distributed in the hope", "the timeout to the websocket. timeout: timeout time(second). \"\"\" self.sock_opt.timeout", "is None \"subprotocols\" - array of available sub protocols. default", "the timeout on the socket. If no timeout is supplied,", "tuple of operation code and string(byte array) value. \"\"\" while", "def abort(self): \"\"\" Low-level asynchronous abort, wakes up other threads", "should be compatible with WebSocket.__init__, i.e. accept all of it's", "a copy of the GNU Lesser General Public License along", "name is \", ABNF.OPCODE_CONT, 0) >>> ws.send_frame(frame) >>> cont_frame =", "getdefauttimeout() is used. You can customize using 'options'. If you", "\"skip_utf8_validation\" - skip utf8 validation. \"socket\" - pre-initialized stream socket.", "implement settimeout and connect. It's __init__ should be compatible with", "to produce new mask keys, see the set_mask_key function's docstring", "close. This must be string or bytes. \"\"\" if status", "1) >>> ws.send_frame(frame) \"\"\" if self.get_mask_key: frame.get_mask_key = self.get_mask_key data", "%s\" % frame) elif frame.opcode in (ABNF.OPCODE_TEXT, ABNF.OPCODE_BINARY, ABNF.OPCODE_CONT): self.cont_frame.validate(frame)", "elif frame.opcode in (ABNF.OPCODE_TEXT, ABNF.OPCODE_BINARY, ABNF.OPCODE_CONT): self.cont_frame.validate(frame) self.cont_frame.add(frame) if self.cont_frame.is_fire(frame):", "False return value: tuple of operation code and string(byte array)", "return self.__next__() def fileno(self): return self.sock.fileno() def set_mask_key(self, func): \"\"\"", "- http proxy port. If not set, set to 80.", "parameter will set the timeout on the socket. If no", "This func must return string(byte array), which length is argument", "return websocket object. Connect to url and return the WebSocket", "have received a copy of the GNU Lesser General Public", "key. You can customize mask key generator. Mainly, this is", "mask keys, see the set_mask_key function's docstring for more details", "warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See", "sub protocols. default is None. \"skip_utf8_validation\" - skip utf8 validation.", "= None self.sock = None self.connected = False self.get_mask_key =", "return self.handshake_response.status else: return None status = property(getstatus) def getheaders(self):", "connect. It's __init__ should be compatible with WebSocket.__init__, i.e. accept", "key generator. Mainly, this is for testing purpose. func: callable", "base64encode else: from base64 import encodestring as base64encode import struct", "Public License for more details. You should have received a", "ABNF.OPCODE_CLOSE) sock_timeout = self.sock.gettimeout() self.sock.settimeout(timeout) try: frame = self.recv_frame() if", "is websocket url scheme. ie. ws://host:port/resource You can customize using", "is integer. if you set None for this value, it", "self.recv_frame() if not frame: # handle error: # 'NoneType' object", "modify it under the terms of the GNU Lesser General", "payload = payload.encode(\"utf-8\") self.send(payload, ABNF.OPCODE_PING) def pong(self, payload): \"\"\" send", ">= ABNF.LENGTH_16: raise ValueError(\"code is invalid range\") self.connected = False", "opcode=ABNF.OPCODE_TEXT): \"\"\" Send the data as string. payload: Payload must", "continuous_frame(fire_cont_frame, skip_utf8_validation) if enable_multithread: self.lock = threading.Lock() else: self.lock =", "object. the func takes 1 argument as integer. The argument", "of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the", "to url. url is websocket url scheme. ie. ws://host:port/resource You", "by ABNF.create_frame >>> ws = create_connection(\"ws://echo.websocket.org/\") >>> frame = ABNF.create_frame(\"Hello\",", "\"\"\" while True: frame = self.recv_frame() if not frame: #", "to return control frame data, defaults to False return value:", "If no timeout is supplied, the global default timeout setting", "You can customize mask key generator. Mainly, this is for", "You can customize using 'options'. If you set \"header\" list", "ABNF.create_frame(\"Foo Bar\", ABNF.OPCODE_CONT, 1) >>> ws.send_frame(frame) \"\"\" if self.get_mask_key: frame.get_mask_key", "takes 1 argument as integer. The argument means length of", "* from ._url import * from ._logging import * from", "the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,", "timeout on the socket. If no timeout is supplied, the", "def __init__(self, get_mask_key=None, sockopt=None, sslopt=None, fire_cont_frame=False, enable_multithread=False, skip_utf8_validation=False, **options): \"\"\"", "from ._http import * from ._handshake import * from ._ssl_compat", "self.get_mask_key = func def gettimeout(self): \"\"\" Get the websocket timeout(second).", "to close. This must be string. timeout: timeout until receive", "are waiting in recv_* \"\"\" if self.connected: self.sock.shutdown(socket.SHUT_RDWR) def shutdown(self):", "argument specified. \"\"\" self.get_mask_key = func def gettimeout(self): \"\"\" Get", "self.connected = False self.send(struct.pack('!H', status) + reason, ABNF.OPCODE_CLOSE) def close(self,", "to the server. status: status code to send. see STATUS_XXX.", "= WebSocket() >>> ws.connect(\"ws://echo.websocket.org/\", ... header=[\"User-Agent: MyProgram\", ... \"x-custom: header\"])", "License along with this library; if not, write to the", "you set None for this value, it means \"use default_timeout", "._socket import * from ._utils import * from ._url import", "value: ABNF frame object. \"\"\" return self.frame_buffer.recv_frame() def send_close(self, status=STATUS_NORMAL,", "socket option. fire_cont_frame: fire recv event for each cont frame.", "for this value, it means \"use default_timeout value\" class_: class", "enable lock for multithread. \"sockopt\" -> socket options \"sslopt\" ->", "None for this value, it means \"use default_timeout value\" class_:", "== ABNF.OPCODE_TEXT: return data.decode(\"utf-8\") elif opcode == ABNF.OPCODE_TEXT or opcode", "class_=WebSocket, **options): \"\"\" connect to url and return websocket object.", "cont frame. default is False enable_multithread: if set to True,", "License for more details. You should have received a copy", ">>> ws.send_frame(frame) >>> cont_frame = ABNF.create_frame(\"Foo Bar\", ABNF.OPCODE_CONT, 1) >>>", "header. >>> conn = create_connection(\"ws://echo.websocket.org/\", ... header=[\"User-Agent: MyProgram\", ... \"x-custom:", "set your own custom header. >>> ws = WebSocket() >>>", "server. status: status code to send. see STATUS_XXX. reason: the", "def _recv(self, bufsize): try: return recv(self.sock, bufsize) except WebSocketConnectionClosedException: if", "is free software; you can redistribute it and/or modify it", "else: return '' def recv_data(self, control_frame=False): \"\"\" Receive data with", "send_frame(self, frame): \"\"\" Send the data frame. frame: frame data", "supplied, the global default timeout setting returned by getdefauttimeout() is", "the data frame. frame: frame data created by ABNF.create_frame >>>", "value. \"\"\" while True: frame = self.recv_frame() if not frame:", "elif frame.opcode == ABNF.OPCODE_PONG: if control_frame: return (frame.opcode, frame) def", "callable to produce new mask keys, see the set_mask_key function's", "if self.sock: self.sock.close() self.sock = None raise def send(self, payload,", "redistribute it and/or modify it under the terms of the", "or (at your option) any later version. This library is", "socket if six.PY3: from base64 import encodebytes as base64encode else:", "a boolean flag indicating whether to return control frame data,", "send close data to the server. status: status code to", "reason: the reason to close. This must be string. timeout:", "% frame) elif frame.opcode in (ABNF.OPCODE_TEXT, ABNF.OPCODE_BINARY, ABNF.OPCODE_CONT): self.cont_frame.validate(frame) self.cont_frame.add(frame)", "code to send. Please see OPCODE_XXX. \"\"\" frame = ABNF.create_frame(payload,", "from ._handshake import * from ._ssl_compat import * \"\"\" websocket", "isinstance(payload, six.text_type): payload = payload.encode(\"utf-8\") self.send(payload, ABNF.OPCODE_PONG) def recv(self): \"\"\"", "websocket url scheme. ie. ws://host:port/resource You can customize using 'options'.", "\"\"\" Close Websocket object status: status code to send. see", "your option) any later version. This library is distributed in", "return send(self.sock, data) def _recv(self, bufsize): try: return recv(self.sock, bufsize)", "version support only hybi-13. Please see http://tools.ietf.org/html/rfc6455 for protocol. \"\"\"", "get_mask_key: a callable to produce new mask keys, see the", "invalid range\") try: self.connected = False self.send(struct.pack('!H', status) + reason,", "self.sock.shutdown(socket.SHUT_RDWR) def shutdown(self): \"close socket, immediately.\" if self.sock: self.sock.close() self.sock", "try: self.connected = False self.send(struct.pack('!H', status) + reason, ABNF.OPCODE_CLOSE) sock_timeout", "handshake(self.sock, *addrs, **options) self.connected = True except: if self.sock: self.sock.close()", "options.pop(\"sockopt\", []) sslopt = options.pop(\"sslopt\", {}) fire_cont_frame = options.pop(\"fire_cont_frame\", False)", "generator. Mainly, this is for testing purpose. func: callable object.", "Websocket object status: status code to send. see STATUS_XXX. reason:", "\"socket\" - pre-initialized stream socket. \"\"\" self.sock, addrs = connect(url,", "default timeout setting returned by getdefauttimeout() is used. You can", "else: from base64 import encodestring as base64encode import struct import", "customize using 'options'. If you set \"header\" list object, you", "with operation code. control_frame: a boolean flag indicating whether to", "time(second). \"\"\" self.sock_opt.timeout = timeout if self.sock: self.sock.settimeout(timeout) timeout =", "length = len(data) trace(\"send: \" + repr(data)) with self.lock: while", "from base64 import encodestring as base64encode import struct import threading", "this value, it means \"use default_timeout value\" class_: class to", "\"sockopt\" -> socket options \"sslopt\" -> ssl option \"subprotocols\" -", "and string(byte array) value. \"\"\" opcode, frame = self.recv_data_frame(control_frame) return", "ABNF.create_frame(\"My name is \", ABNF.OPCODE_CONT, 0) >>> ws.send_frame(frame) >>> cont_frame", "string(byte array) value. \"\"\" opcode, frame = self.recv_data_frame(control_frame) return opcode,", ">>> ws.recv() 'Hello, Server' >>> ws.close() get_mask_key: a callable to", "Copyright (C) 2010 <NAME>(liris) This library is free software; you", "return self.recv() def next(self): return self.__next__() def fileno(self): return self.sock.fileno()", "be string(byte array) opcode: operation code to send. Please see", "repr(data)) with self.lock: while data: l = self._send(data) data =", "return control frame data, defaults to False return value: tuple", "data.decode(\"utf-8\") elif opcode == ABNF.OPCODE_TEXT or opcode == ABNF.OPCODE_BINARY: return", "return data else: return '' def recv_data(self, control_frame=False): \"\"\" Receive", "\"enable_multithread\" -> enable lock for multithread. \"sockopt\" -> socket options", "bufsize) except WebSocketConnectionClosedException: if self.sock: self.sock.close() self.sock = None self.connected", "frame) elif frame.opcode in (ABNF.OPCODE_TEXT, ABNF.OPCODE_BINARY, ABNF.OPCODE_CONT): self.cont_frame.validate(frame) self.cont_frame.add(frame) if", "if six.PY3: from base64 import encodebytes as base64encode else: from", ">>> ws.connect(\"ws://echo.websocket.org\") >>> ws.send(\"Hello, Server\") >>> ws.recv() 'Hello, Server' >>>", "the build-up of a single frame. self.frame_buffer = frame_buffer(self._recv, skip_utf8_validation)", "kwargs. options: \"header\" -> custom http header list or dict.", "skip utf8 validation. \"\"\" def __init__(self, get_mask_key=None, sockopt=None, sslopt=None, fire_cont_frame=False,", "None self.connected = False self.get_mask_key = get_mask_key # These buffer", "sock.setsockopt. sslopt: dict object for ssl socket option. fire_cont_frame: fire", "class WebSocket(object): \"\"\" Low level WebSocket interface. This class is", "options: \"header\" -> custom http header list or dict. \"cookie\"", "skip_utf8_validation: skip utf8 validation. \"\"\" def __init__(self, get_mask_key=None, sockopt=None, sslopt=None,", "Low-level asynchronous abort, wakes up other threads that are waiting", "or unicode, if the opcode is OPCODE_TEXT. Otherwise, it must", "= False self.send(struct.pack('!H', status) + reason, ABNF.OPCODE_CLOSE) sock_timeout = self.sock.gettimeout()", "websock = class_(sockopt=sockopt, sslopt=sslopt, fire_cont_frame=fire_cont_frame, enable_multithread=enable_multithread, skip_utf8_validation=skip_utf8_validation, **options) websock.settimeout(timeout if", "sockopt: values for socket.setsockopt. sockopt must be tuple and each", "socket timeout time. This value is integer. if you set", "boolean flag indicating whether to return control frame data, defaults", "the set_mask_key function's docstring for more details sockopt: values for", "and return the WebSocket object. Passing optional timeout parameter will", "enable_multithread = options.pop(\"enable_multithread\", False) skip_utf8_validation = options.pop(\"skip_utf8_validation\", False) websock =", "created by ABNF.create_frame >>> ws = create_connection(\"ws://echo.websocket.org/\") >>> frame =", "draft-hixie-thewebsocketprotocol-76 http://tools.ietf.org/html/draft-hixie-thewebsocketprotocol-76 We can connect to the websocket server and", "ABNF.OPCODE_PONG: if control_frame: return (frame.opcode, frame) def recv_frame(self): \"\"\" receive", "opcode, frame.data def recv_data_frame(self, control_frame=False): \"\"\" Receive data with operation", "object has no attribute 'opcode' raise WebSocketProtocolException(\"Not a valid frame", "and string(byte array) value. \"\"\" while True: frame = self.recv_frame()", "value: tuple of operation code and string(byte array) value. \"\"\"", "custom origin url. \"host\" -> custom host header string. \"http_proxy_host\"", "= threading.Lock() else: self.lock = NoLock() def __iter__(self): \"\"\" Allow", "if the opcode is OPCODE_TEXT. Otherwise, it must be string(byte", "argument as integer. The argument means length of mask key.", "fileno(self): return self.sock.fileno() def set_mask_key(self, func): \"\"\" set function to", "import * from ._abnf import * from ._socket import *", "proxy host name. \"http_proxy_port\" - http proxy port. If not", "* \"\"\" websocket python client. ========================= This version support only", "def recv(self): \"\"\" Receive string data(byte array) from the server.", "ws.send(\"Hello, Server\") >>> ws.recv() 'Hello, Server' >>> ws.close() get_mask_key: a", "message is too long\") if control_frame: return (frame.opcode, frame) elif", "send/receive data. The following example is an echo client. >>>", "return value: ABNF frame object. \"\"\" return self.frame_buffer.recv_frame() def send_close(self,", "return (frame.opcode, frame) def recv_frame(self): \"\"\" receive data as frame", "python client. ========================= This version support only hybi-13. Please see", "self.lock = threading.Lock() else: self.lock = NoLock() def __iter__(self): \"\"\"", "the websocket. timeout: timeout time(second). \"\"\" self.sock_opt.timeout = timeout if", "compatible with WebSocket.__init__, i.e. accept all of it's kwargs. options:", "WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS", "reason=six.b(\"\"), timeout=3): \"\"\" Close Websocket object status: status code to", "= payload.encode(\"utf-8\") self.send(payload, ABNF.OPCODE_PING) def pong(self, payload): \"\"\" send pong", "self.handshake_response: return self.handshake_response.headers else: return None headers = property(getheaders) def", "def __next__(self): return self.recv() def next(self): return self.__next__() def fileno(self):", "reason: the reason to close. This must be string or", ">>> ws = WebSocket() >>> ws.connect(\"ws://echo.websocket.org/\", ... header=[\"User-Agent: MyProgram\", ...", "from ._logging import * from ._http import * from ._handshake", "\"header\" list object, you can set your own custom header.", "ws://host:port/resource You can customize using 'options'. If you set \"header\"", "to send. Please see OPCODE_XXX. \"\"\" frame = ABNF.create_frame(payload, opcode)", "data created by ABNF.create_frame >>> ws = create_connection(\"ws://echo.websocket.org/\") >>> frame", "send. see STATUS_XXX. reason: the reason to close. This must", "not set, set to 80. \"http_no_proxy\" - host names, which", "pass self.sock.settimeout(sock_timeout) self.sock.shutdown(socket.SHUT_RDWR) except: pass self.shutdown() def abort(self): \"\"\" Low-level", "- WebSocket client library for Python Copyright (C) 2010 <NAME>(liris)", "base64 import encodebytes as base64encode else: from base64 import encodestring", "is for testing purpose. func: callable object. the func takes", "object. Connect to url and return the WebSocket object. Passing", "dict object for ssl socket option. fire_cont_frame: fire recv event", "protocol draft-hixie-thewebsocketprotocol-76 http://tools.ietf.org/html/draft-hixie-thewebsocketprotocol-76 We can connect to the websocket server", "to False return value: tuple of operation code and string(byte", "as base64encode import struct import threading # websocket modules from", "def _send(self, data): return send(self.sock, data) def _recv(self, bufsize): try:", "self.recv_data_frame(control_frame) return opcode, frame.data def recv_data_frame(self, control_frame=False): \"\"\" Receive data", "information. tuple of username and password. default is None \"enable_multithread\"", "as published by the Free Software Foundation; either version 2.1", "self.sock: self.sock.close() self.sock = None self.connected = False def _send(self,", "This class is based on The WebSocket protocol draft-hixie-thewebsocketprotocol-76 http://tools.ietf.org/html/draft-hixie-thewebsocketprotocol-76", "ws = websocket.WebSocket() >>> ws.connect(\"ws://echo.websocket.org\") >>> ws.send(\"Hello, Server\") >>> ws.recv()", "False self.send(struct.pack('!H', status) + reason, ABNF.OPCODE_CLOSE) def close(self, status=STATUS_NORMAL, reason=six.b(\"\"),", "receive a close frame. If None, it will wait forever", "either version 2.1 of the License, or (at your option)", "must be tuple and each element is argument of sock.setsockopt.", "yield self.recv() def __next__(self): return self.recv() def next(self): return self.__next__()", "bytes. \"\"\" if status < 0 or status >= ABNF.LENGTH_16:", "WebSocket interface. This class is based on The WebSocket protocol", "encodestring as base64encode import struct import threading # websocket modules", "= options.pop(\"enable_multithread\", False) skip_utf8_validation = options.pop(\"skip_utf8_validation\", False) websock = class_(sockopt=sockopt,", "operation code to send. Please see OPCODE_XXX. \"\"\" frame =", "self.handshake_response: return self.handshake_response.status else: return None status = property(getstatus) def", "the GNU Lesser General Public License for more details. You", "base64encode import struct import threading # websocket modules from ._exceptions", "of the GNU Lesser General Public License along with this", "= websocket.WebSocket() >>> ws.connect(\"ws://echo.websocket.org\") >>> ws.send(\"Hello, Server\") >>> ws.recv() 'Hello,", "even the implied warranty of MERCHANTABILITY or FITNESS FOR A", "!= STATUS_NORMAL: error(\"close status: \" + repr(recv_status)) except: pass self.sock.settimeout(sock_timeout)", "other threads that are waiting in recv_* \"\"\" if self.connected:", "-> enable lock for multithread. \"sockopt\" -> socket options \"sslopt\"", "Floor, Boston, MA 02110-1335 USA \"\"\" from __future__ import print_function", "self._send(data) data = data[l:] return length def send_binary(self, payload): return", "* from ._handshake import * from ._ssl_compat import * \"\"\"", "custom http header list or dict. \"cookie\" -> cookie value.", "= self.sock.gettimeout() self.sock.settimeout(timeout) try: frame = self.recv_frame() if isEnabledForError(): recv_status", "= connect(url, self.sock_opt, proxy_info(**options), options.pop('socket', None)) try: self.handshake_response = handshake(self.sock,", "if self.handshake_response: return self.handshake_response.status else: return None status = property(getstatus)", "= self.get_mask_key data = frame.format() length = len(data) trace(\"send: \"", "struct.unpack(\"!H\", frame.data)[0] if recv_status != STATUS_NORMAL: error(\"close status: \" +", "sub protocols. default is None. \"socket\" - pre-initialized stream socket.", "options.pop(\"fire_cont_frame\", False) enable_multithread = options.pop(\"enable_multithread\", False) skip_utf8_validation = options.pop(\"skip_utf8_validation\", False)", "self.lock = NoLock() def __iter__(self): \"\"\" Allow iteration over websocket,", "when creating the connection. It has to implement settimeout and", "handshake response header \"\"\" if self.handshake_response: return self.handshake_response.headers else: return", "from ._socket import * from ._utils import * from ._url", "is False enable_multithread: if set to True, lock send method.", "if not frame: # handle error: # 'NoneType' object has", "for more details. You should have received a copy of", "elif opcode == ABNF.OPCODE_TEXT or opcode == ABNF.OPCODE_BINARY: return data", "self.sock: self.sock.settimeout(timeout) timeout = property(gettimeout, settimeout) def getsubprotocol(self): \"\"\" get", "`recv` executions. \"\"\" while True: yield self.recv() def __next__(self): return", "def settimeout(self, timeout): \"\"\" Set the timeout to the websocket.", "distributed in the hope that it will be useful, but", "for each cont frame. default is False enable_multithread: if set", "create musk key. You can customize mask key generator. Mainly,", "payload to send server. \"\"\" if isinstance(payload, six.text_type): payload =", "def pong(self, payload): \"\"\" send pong data. payload: data payload", "= self.recv_data_frame(control_frame) return opcode, frame.data def recv_data_frame(self, control_frame=False): \"\"\" Receive", "WebSocket.__init__, i.e. accept all of it's kwargs. options: \"header\" ->", "<reponame>akuala/REPO.KUALA \"\"\" websocket - WebSocket client library for Python Copyright", "details sockopt: values for socket.setsockopt. sockopt must be tuple and", "True, lock send method. skip_utf8_validation: skip utf8 validation. \"\"\" def", "- pre-initialized stream socket. \"\"\" self.sock, addrs = connect(url, self.sock_opt,", "\"\"\" if self.connected: self.sock.shutdown(socket.SHUT_RDWR) def shutdown(self): \"close socket, immediately.\" if", "WebSocket client library for Python Copyright (C) 2010 <NAME>(liris) This", "frame. self.frame_buffer = frame_buffer(self._recv, skip_utf8_validation) self.cont_frame = continuous_frame(fire_cont_frame, skip_utf8_validation) if", "= self._send(data) data = data[l:] return length def send_binary(self, payload):", "opcode == ABNF.OPCODE_BINARY: return data else: return '' def recv_data(self,", "subprotocol = property(getsubprotocol) def getstatus(self): \"\"\" get handshake status \"\"\"", "value\" options: \"header\" -> custom http header list or dict.", "utf-8 string or unicode, if the opcode is OPCODE_TEXT. Otherwise,", "frame. frame: frame data created by ABNF.create_frame >>> ws =", "see http://tools.ietf.org/html/rfc6455 for protocol. \"\"\" class WebSocket(object): \"\"\" Low level", "next(self): return self.__next__() def fileno(self): return self.sock.fileno() def set_mask_key(self, func):", "**options): \"\"\" connect to url and return websocket object. Connect", "import struct import threading # websocket modules from ._exceptions import", "is argument of sock.setsockopt. sslopt: dict object for ssl socket", "ie. ws://host:port/resource You can customize using 'options'. If you set", "connection. It has to implement settimeout and connect. It's __init__", "is used. You can customize using 'options'. If you set", "\"\"\" websocket python client. ========================= This version support only hybi-13.", "ws.connect(\"ws://echo.websocket.org/\", ... header=[\"User-Agent: MyProgram\", ... \"x-custom: header\"]) timeout: socket timeout", "-> custom host header string. \"http_proxy_host\" - http proxy host", "this is for testing purpose. func: callable object. the func", "pass self.shutdown() def abort(self): \"\"\" Low-level asynchronous abort, wakes up", "self.__next__() def fileno(self): return self.sock.fileno() def set_mask_key(self, func): \"\"\" set", "reason to close. This must be string or bytes. \"\"\"", "* from ._logging import * from ._http import * from", "Foundation; either version 2.1 of the License, or (at your", "= frame_buffer(self._recv, skip_utf8_validation) self.cont_frame = continuous_frame(fire_cont_frame, skip_utf8_validation) if enable_multithread: self.lock", "level WebSocket interface. This class is based on The WebSocket", "set your own custom header. >>> conn = create_connection(\"ws://echo.websocket.org/\", ...", "is \", ABNF.OPCODE_CONT, 0) >>> ws.send_frame(frame) >>> cont_frame = ABNF.create_frame(\"Foo", "ABNF.OPCODE_CONT, 0) >>> ws.send_frame(frame) >>> cont_frame = ABNF.create_frame(\"Foo Bar\", ABNF.OPCODE_CONT,", "payload: data payload to send server. \"\"\" if isinstance(payload, six.text_type):", "self.sock.close() self.sock = None self.connected = False raise def create_connection(url,", "self.sock.close() self.sock = None raise def send(self, payload, opcode=ABNF.OPCODE_TEXT): \"\"\"", "and send/receive data. The following example is an echo client.", "value. \"\"\" opcode, data = self.recv_data() if six.PY3 and opcode", "the reason to close. This must be string or bytes.", "close(self, status=STATUS_NORMAL, reason=six.b(\"\"), timeout=3): \"\"\" Close Websocket object status: status", "addrs = connect(url, self.sock_opt, proxy_info(**options), options.pop('socket', None)) try: self.handshake_response =", "(C) 2010 <NAME>(liris) This library is free software; you can", "* from ._socket import * from ._utils import * from", "from ._url import * from ._logging import * from ._http", "import six import socket if six.PY3: from base64 import encodebytes", "password. default is None \"enable_multithread\" -> enable lock for multithread.", "Receive data with operation code. control_frame: a boolean flag indicating", "object. \"\"\" return self.frame_buffer.recv_frame() def send_close(self, status=STATUS_NORMAL, reason=six.b(\"\")): \"\"\" send", "enable_multithread=enable_multithread, skip_utf8_validation=skip_utf8_validation, **options) websock.settimeout(timeout if timeout is not None else", "attribute 'opcode' raise WebSocketProtocolException(\"Not a valid frame %s\" % frame)", "getheaders(self): \"\"\" get handshake response header \"\"\" if self.handshake_response: return", "recv(self): \"\"\" Receive string data(byte array) from the server. return", "if control_frame: return (frame.opcode, frame) elif frame.opcode == ABNF.OPCODE_PONG: if", "along with this library; if not, write to the Free", "based on The WebSocket protocol draft-hixie-thewebsocketprotocol-76 http://tools.ietf.org/html/draft-hixie-thewebsocketprotocol-76 We can connect", "as frame from server. return value: ABNF frame object. \"\"\"", "frame.get_mask_key = self.get_mask_key data = frame.format() length = len(data) trace(\"send:", "== ABNF.OPCODE_TEXT or opcode == ABNF.OPCODE_BINARY: return data else: return", "customize mask key generator. Mainly, this is for testing purpose.", "= data[l:] return length def send_binary(self, payload): return self.send(payload, ABNF.OPCODE_BINARY)", "self.sock.gettimeout() self.sock.settimeout(timeout) try: frame = self.recv_frame() if isEnabledForError(): recv_status =", "recv event for each cont frame. default is False enable_multithread:", "self.cont_frame = continuous_frame(fire_cont_frame, skip_utf8_validation) if enable_multithread: self.lock = threading.Lock() else:", "while True: frame = self.recv_frame() if not frame: # handle", "connect to url and return websocket object. Connect to url", "\"\"\" def __init__(self, get_mask_key=None, sockopt=None, sslopt=None, fire_cont_frame=False, enable_multithread=False, skip_utf8_validation=False, **options):", "asynchronous abort, wakes up other threads that are waiting in", "MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU", ">>> cont_frame = ABNF.create_frame(\"Foo Bar\", ABNF.OPCODE_CONT, 1) >>> ws.send_frame(frame) \"\"\"", "\"\"\" Allow iteration over websocket, implying sequential `recv` executions. \"\"\"", "Public License as published by the Free Software Foundation; either", "self.sock_opt.timeout def settimeout(self, timeout): \"\"\" Set the timeout to the", "value. \"origin\" -> custom origin url. \"host\" -> custom host", "= property(getsubprotocol) def getstatus(self): \"\"\" get handshake status \"\"\" if", "websocket object. Connect to url and return the WebSocket object.", "ws.send_frame(frame) >>> cont_frame = ABNF.create_frame(\"My name is \", ABNF.OPCODE_CONT, 0)", "server. return value: string(byte array) value. \"\"\" opcode, data =", "software; you can redistribute it and/or modify it under the", "FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public", "self.recv() def next(self): return self.__next__() def fileno(self): return self.sock.fileno() def", "integer. The argument means length of mask key. This func", "length of mask key. This func must return string(byte array),", "dict. \"cookie\" -> cookie value. \"origin\" -> custom origin url.", "if enable_multithread: self.lock = threading.Lock() else: self.lock = NoLock() def", "string(byte array) value. \"\"\" opcode, data = self.recv_data() if six.PY3", "self.sock.close() self.sock = None self.connected = False def _send(self, data):", "It has to implement settimeout and connect. It's __init__ should", "string or bytes. \"\"\" if status < 0 or status", "= None self.connected = False def _send(self, data): return send(self.sock,", "We can connect to the websocket server and send/receive data.", "ws.send_frame(frame) \"\"\" if self.get_mask_key: frame.get_mask_key = self.get_mask_key data = frame.format()", "None raise def send(self, payload, opcode=ABNF.OPCODE_TEXT): \"\"\" Send the data", "property(gettimeout, settimeout) def getsubprotocol(self): \"\"\" get subprotocol \"\"\" if self.handshake_response:", "options.pop(\"skip_utf8_validation\", False) websock = class_(sockopt=sockopt, sslopt=sslopt, fire_cont_frame=fire_cont_frame, enable_multithread=enable_multithread, skip_utf8_validation=skip_utf8_validation, **options)", "\"\"\" Connect to url. url is websocket url scheme. ie.", "waiting in recv_* \"\"\" if self.connected: self.sock.shutdown(socket.SHUT_RDWR) def shutdown(self): \"close", "settimeout and connect. It's __init__ should be compatible with WebSocket.__init__,", "skip_utf8_validation=False, **options): \"\"\" Initialize WebSocket object. \"\"\" self.sock_opt = sock_opt(sockopt,", "ws.send_frame(frame) >>> cont_frame = ABNF.create_frame(\"Foo Bar\", ABNF.OPCODE_CONT, 1) >>> ws.send_frame(frame)", "close frame. If None, it will wait forever until receive", "is None \"enable_multithread\" -> enable lock for multithread. \"sockopt\" ->", "return recv(self.sock, bufsize) except WebSocketConnectionClosedException: if self.sock: self.sock.close() self.sock =", "of username and password. default is None \"subprotocols\" - array", "return '' def recv_data(self, control_frame=False): \"\"\" Receive data with operation", "[]) sslopt = options.pop(\"sslopt\", {}) fire_cont_frame = options.pop(\"fire_cont_frame\", False) enable_multithread", "value\" class_: class to instantiate when creating the connection. It", "-> ssl option \"subprotocols\" - array of available sub protocols.", "-> socket options \"sslopt\" -> ssl option \"subprotocols\" - array", "sslopt = options.pop(\"sslopt\", {}) fire_cont_frame = options.pop(\"fire_cont_frame\", False) enable_multithread =", "\"sslopt\" -> ssl option \"subprotocols\" - array of available sub", "self.sock, addrs = connect(url, self.sock_opt, proxy_info(**options), options.pop('socket', None)) try: self.handshake_response", "own custom header. >>> ws = WebSocket() >>> ws.connect(\"ws://echo.websocket.org/\", ...", "websocket - WebSocket client library for Python Copyright (C) 2010", "username and password. default is None \"subprotocols\" - array of", "Boston, MA 02110-1335 USA \"\"\" from __future__ import print_function import", "array of available sub protocols. default is None. \"socket\" -", ">>> ws.connect(\"ws://echo.websocket.org/\", ... header=[\"User-Agent: MyProgram\", ... \"x-custom: header\"]) timeout: socket", "OPCODE_XXX. \"\"\" frame = ABNF.create_frame(payload, opcode) return self.send_frame(frame) def send_frame(self,", "http proxy host name. \"http_proxy_port\" - http proxy port. If", "if self.cont_frame.is_fire(frame): return self.cont_frame.extract(frame) elif frame.opcode == ABNF.OPCODE_CLOSE: self.send_close() return", "header list or dict. \"cookie\" -> cookie value. \"origin\" ->", "control_frame: return (frame.opcode, frame) def recv_frame(self): \"\"\" receive data as", "data with operation code. control_frame: a boolean flag indicating whether", "\"\"\" if isinstance(payload, six.text_type): payload = payload.encode(\"utf-8\") self.send(payload, ABNF.OPCODE_PING) def", "126: self.pong(frame.data) else: raise WebSocketProtocolException(\"Ping message is too long\") if", "instantiate when creating the connection. It has to implement settimeout", "= ABNF.create_frame(\"Foo Bar\", ABNF.OPCODE_CONT, 1) >>> ws.send_frame(frame) \"\"\" if self.get_mask_key:", "custom host header string. \"http_proxy_host\" - http proxy host name.", "self.connected = False self.get_mask_key = get_mask_key # These buffer over", "else: return None headers = property(getheaders) def connect(self, url, **options):", "validation. \"\"\" def __init__(self, get_mask_key=None, sockopt=None, sslopt=None, fire_cont_frame=False, enable_multithread=False, skip_utf8_validation=False,", "import encodebytes as base64encode else: from base64 import encodestring as", "object status: status code to send. see STATUS_XXX. reason: the", "frame.opcode in (ABNF.OPCODE_TEXT, ABNF.OPCODE_BINARY, ABNF.OPCODE_CONT): self.cont_frame.validate(frame) self.cont_frame.add(frame) if self.cont_frame.is_fire(frame): return", "on The WebSocket protocol draft-hixie-thewebsocketprotocol-76 http://tools.ietf.org/html/draft-hixie-thewebsocketprotocol-76 We can connect to", "send pong data. payload: data payload to send server. \"\"\"", "with this library; if not, write to the Free Software", "self.connected: if status < 0 or status >= ABNF.LENGTH_16: raise", "self.sock = None self.connected = False raise def create_connection(url, timeout=None,", "cont_frame = ABNF.create_frame(\"Foo Bar\", ABNF.OPCODE_CONT, 1) >>> ws.send_frame(frame) \"\"\" if", "None self.connected = False def _send(self, data): return send(self.sock, data)", "flag indicating whether to return control frame data, defaults to", "self.connected = False def _send(self, data): return send(self.sock, data) def", "url scheme. ie. ws://host:port/resource You can customize using 'options'. If", "ssl option \"subprotocols\" - array of available sub protocols. default", "def send(self, payload, opcode=ABNF.OPCODE_TEXT): \"\"\" Send the data as string.", "the global default timeout setting returned by getdefauttimeout() is used.", "Get the websocket timeout(second). \"\"\" return self.sock_opt.timeout def settimeout(self, timeout):", "try: frame = self.recv_frame() if isEnabledForError(): recv_status = struct.unpack(\"!H\", frame.data)[0]", "== ABNF.OPCODE_CLOSE: self.send_close() return (frame.opcode, frame) elif frame.opcode == ABNF.OPCODE_PING:", "the hope that it will be useful, but WITHOUT ANY", "\", ABNF.OPCODE_CONT, 0) >>> ws.send_frame(frame) >>> cont_frame = ABNF.create_frame(\"Foo Bar\",", "it will wait forever until receive a close frame. \"\"\"", "your own custom header. >>> conn = create_connection(\"ws://echo.websocket.org/\", ... header=[\"User-Agent:", "\" + repr(recv_status)) except: pass self.sock.settimeout(sock_timeout) self.sock.shutdown(socket.SHUT_RDWR) except: pass self.shutdown()", "immediately.\" if self.sock: self.sock.close() self.sock = None self.connected = False", "\"\"\" websocket - WebSocket client library for Python Copyright (C)", "Public License along with this library; if not, write to", "list or dict. \"cookie\" -> cookie value. \"origin\" -> custom", "self.sock.settimeout(sock_timeout) self.sock.shutdown(socket.SHUT_RDWR) except: pass self.shutdown() def abort(self): \"\"\" Low-level asynchronous", "echo client. >>> import websocket >>> ws = websocket.WebSocket() >>>", "0 or status >= ABNF.LENGTH_16: raise ValueError(\"code is invalid range\")", "self.sock_opt.timeout = timeout if self.sock: self.sock.settimeout(timeout) timeout = property(gettimeout, settimeout)", "self.get_mask_key data = frame.format() length = len(data) trace(\"send: \" +", "to send server. \"\"\" if isinstance(payload, six.text_type): payload = payload.encode(\"utf-8\")", "WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY", "terms of the GNU Lesser General Public License as published", "key. This func must return string(byte array), which length is", "pong data. payload: data payload to send server. \"\"\" if", "timeout=None, class_=WebSocket, **options): \"\"\" connect to url and return websocket", "for testing purpose. func: callable object. the func takes 1", "The WebSocket protocol draft-hixie-thewebsocketprotocol-76 http://tools.ietf.org/html/draft-hixie-thewebsocketprotocol-76 We can connect to the", "create_connection(\"ws://echo.websocket.org/\") >>> frame = ABNF.create_frame(\"Hello\", ABNF.OPCODE_TEXT) >>> ws.send_frame(frame) >>> cont_frame", "General Public License as published by the Free Software Foundation;", "array) value. \"\"\" while True: frame = self.recv_frame() if not", "copy of the GNU Lesser General Public License along with", "set to True, lock send method. skip_utf8_validation: skip utf8 validation.", "\"\"\" class WebSocket(object): \"\"\" Low level WebSocket interface. This class", "custom header. >>> ws = WebSocket() >>> ws.connect(\"ws://echo.websocket.org/\", ... header=[\"User-Agent:", "WebSocket() >>> ws.connect(\"ws://echo.websocket.org/\", ... header=[\"User-Agent: MyProgram\", ... \"x-custom: header\"]) timeout:", "unicode, if the opcode is OPCODE_TEXT. Otherwise, it must be", "self.connected = True except: if self.sock: self.sock.close() self.sock = None", "stream socket. \"\"\" self.sock, addrs = connect(url, self.sock_opt, proxy_info(**options), options.pop('socket',", "STATUS_XXX. reason: the reason to close. This must be string.", "recv(self.sock, bufsize) except WebSocketConnectionClosedException: if self.sock: self.sock.close() self.sock = None", "string(byte array) opcode: operation code to send. Please see OPCODE_XXX.", "ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or", "return self.handshake_response.subprotocol else: return None subprotocol = property(getsubprotocol) def getstatus(self):", "None headers = property(getheaders) def connect(self, url, **options): \"\"\" Connect", "in recv_* \"\"\" if self.connected: self.sock.shutdown(socket.SHUT_RDWR) def shutdown(self): \"close socket,", "1 argument as integer. The argument means length of mask", "forever until receive a close frame. \"\"\" if self.connected: if", "If you set \"header\" list object, you can set your", "is None. \"socket\" - pre-initialized stream socket. \"\"\" self.sock, addrs", "\"\"\" get handshake status \"\"\" if self.handshake_response: return self.handshake_response.status else:", "options.pop(\"enable_multithread\", False) skip_utf8_validation = options.pop(\"skip_utf8_validation\", False) websock = class_(sockopt=sockopt, sslopt=sslopt,", "= options.pop(\"sslopt\", {}) fire_cont_frame = options.pop(\"fire_cont_frame\", False) enable_multithread = options.pop(\"enable_multithread\",", "\"\"\" Receive data with operation code. control_frame: a boolean flag", "\"\"\" if self.handshake_response: return self.handshake_response.subprotocol else: return None subprotocol =", "data to the server. status: status code to send. see", "def getheaders(self): \"\"\" get handshake response header \"\"\" if self.handshake_response:", "True except: if self.sock: self.sock.close() self.sock = None raise def", "\"host\" -> custom host header string. \"http_proxy_host\" - http proxy", "is argument specified. \"\"\" self.get_mask_key = func def gettimeout(self): \"\"\"", "host names, which doesn't use proxy. \"http_proxy_auth\" - http proxy", "import socket if six.PY3: from base64 import encodebytes as base64encode", "server. \"\"\" if isinstance(payload, six.text_type): payload = payload.encode(\"utf-8\") self.send(payload, ABNF.OPCODE_PING)", "send ping data. payload: data payload to send server. \"\"\"", "import websocket >>> ws = websocket.WebSocket() >>> ws.connect(\"ws://echo.websocket.org\") >>> ws.send(\"Hello,", "Fifth Floor, Boston, MA 02110-1335 USA \"\"\" from __future__ import", "utf8 validation. \"\"\" def __init__(self, get_mask_key=None, sockopt=None, sslopt=None, fire_cont_frame=False, enable_multithread=False,", "ABNF.OPCODE_TEXT or opcode == ABNF.OPCODE_BINARY: return data else: return ''", "six import socket if six.PY3: from base64 import encodebytes as", "it's kwargs. options: \"header\" -> custom http header list or", "and opcode == ABNF.OPCODE_TEXT: return data.decode(\"utf-8\") elif opcode == ABNF.OPCODE_TEXT", "and password. default is None \"subprotocols\" - array of available", "you can set your own custom header. >>> ws =", "print_function import six import socket if six.PY3: from base64 import", "and/or modify it under the terms of the GNU Lesser", "close. This must be string. timeout: timeout until receive a", "as base64encode else: from base64 import encodestring as base64encode import", "send_binary(self, payload): return self.send(payload, ABNF.OPCODE_BINARY) def ping(self, payload=\"\"): \"\"\" send", "socket.setsockopt. sockopt must be tuple and each element is argument", "create_connection(url, timeout=None, class_=WebSocket, **options): \"\"\" connect to url and return", "data = self.recv_data() if six.PY3 and opcode == ABNF.OPCODE_TEXT: return", "\"subprotocols\" - array of available sub protocols. default is None.", "of available sub protocols. default is None. \"socket\" - pre-initialized", "close data to the server. status: status code to send.", "creating the connection. It has to implement settimeout and connect.", "data = data[l:] return length def send_binary(self, payload): return self.send(payload,", "if self.sock: self.sock.close() self.sock = None self.connected = False raise", "= options.pop(\"sockopt\", []) sslopt = options.pop(\"sslopt\", {}) fire_cont_frame = options.pop(\"fire_cont_frame\",", "... \"x-custom: header\"]) timeout: socket timeout time. This value is", "websocket python client. ========================= This version support only hybi-13. Please", "is supplied, the global default timeout setting returned by getdefauttimeout()", "for multithread. \"sockopt\" -> socket options \"sslopt\" -> ssl option", "handle error: # 'NoneType' object has no attribute 'opcode' raise", "whether to return control frame data, defaults to False return", "\"\"\" get subprotocol \"\"\" if self.handshake_response: return self.handshake_response.subprotocol else: return", "it must be string(byte array) opcode: operation code to send.", "is based on The WebSocket protocol draft-hixie-thewebsocketprotocol-76 http://tools.ietf.org/html/draft-hixie-thewebsocketprotocol-76 We can", "def recv_frame(self): \"\"\" receive data as frame from server. return", "self.frame_buffer = frame_buffer(self._recv, skip_utf8_validation) self.cont_frame = continuous_frame(fire_cont_frame, skip_utf8_validation) if enable_multithread:", "if self.sock: self.sock.settimeout(timeout) timeout = property(gettimeout, settimeout) def getsubprotocol(self): \"\"\"", "options \"sslopt\" -> ssl option \"subprotocols\" - array of available", "hope that it will be useful, but WITHOUT ANY WARRANTY;", ">>> conn = create_connection(\"ws://echo.websocket.org/\", ... header=[\"User-Agent: MyProgram\", ... \"x-custom: header\"])", "more details. You should have received a copy of the", "library is distributed in the hope that it will be", "(frame.opcode, frame) elif frame.opcode == ABNF.OPCODE_PING: if len(frame.data) < 126:", "must be string(byte array) opcode: operation code to send. Please", "the License, or (at your option) any later version. This", "# handle error: # 'NoneType' object has no attribute 'opcode'", "with WebSocket.__init__, i.e. accept all of it's kwargs. options: \"header\"", "self.handshake_response = handshake(self.sock, *addrs, **options) self.connected = True except: if", "socket options \"sslopt\" -> ssl option \"subprotocols\" - array of", "of a single frame. self.frame_buffer = frame_buffer(self._recv, skip_utf8_validation) self.cont_frame =", "frame: frame data created by ABNF.create_frame >>> ws = create_connection(\"ws://echo.websocket.org/\")", "opcode, frame = self.recv_data_frame(control_frame) return opcode, frame.data def recv_data_frame(self, control_frame=False):", "return length def send_binary(self, payload): return self.send(payload, ABNF.OPCODE_BINARY) def ping(self,", "base64 import encodestring as base64encode import struct import threading #", "websock.settimeout(timeout if timeout is not None else getdefaulttimeout()) websock.connect(url, **options)", "optional timeout parameter will set the timeout on the socket.", "= ABNF.create_frame(payload, opcode) return self.send_frame(frame) def send_frame(self, frame): \"\"\" Send", "ws = WebSocket() >>> ws.connect(\"ws://echo.websocket.org/\", ... header=[\"User-Agent: MyProgram\", ... \"x-custom:", "create_connection(\"ws://echo.websocket.org/\", ... header=[\"User-Agent: MyProgram\", ... \"x-custom: header\"]) timeout: socket timeout", "* from ._ssl_compat import * \"\"\" websocket python client. =========================", "True: yield self.recv() def __next__(self): return self.recv() def next(self): return", "These buffer over the build-up of a single frame. self.frame_buffer", "see OPCODE_XXX. \"\"\" frame = ABNF.create_frame(payload, opcode) return self.send_frame(frame) def", "frame = self.recv_frame() if isEnabledForError(): recv_status = struct.unpack(\"!H\", frame.data)[0] if", "array) from the server. return value: string(byte array) value. \"\"\"", "as integer. The argument means length of mask key. This", "while True: yield self.recv() def __next__(self): return self.recv() def next(self):", "timeout parameter will set the timeout on the socket. If", "frame) elif frame.opcode == ABNF.OPCODE_PING: if len(frame.data) < 126: self.pong(frame.data)", "with self.lock: while data: l = self._send(data) data = data[l:]", "a close frame. If None, it will wait forever until", "proxy auth information. tuple of username and password. default is", "status) + reason, ABNF.OPCODE_CLOSE) sock_timeout = self.sock.gettimeout() self.sock.settimeout(timeout) try: frame", "send(self.sock, data) def _recv(self, bufsize): try: return recv(self.sock, bufsize) except", "def recv_data_frame(self, control_frame=False): \"\"\" Receive data with operation code. control_frame:", "or status >= ABNF.LENGTH_16: raise ValueError(\"code is invalid range\") try:", "of the GNU Lesser General Public License as published by", "(frame.opcode, frame) def recv_frame(self): \"\"\" receive data as frame from", "lock send method. skip_utf8_validation: skip utf8 validation. \"\"\" def __init__(self,", "Send the data frame. frame: frame data created by ABNF.create_frame", "return None subprotocol = property(getsubprotocol) def getstatus(self): \"\"\" get handshake", "Initialize WebSocket object. \"\"\" self.sock_opt = sock_opt(sockopt, sslopt) self.handshake_response =", "using 'options'. If you set \"header\" list object, you can", "new mask keys, see the set_mask_key function's docstring for more", "class to instantiate when creating the connection. It has to", "(ABNF.OPCODE_TEXT, ABNF.OPCODE_BINARY, ABNF.OPCODE_CONT): self.cont_frame.validate(frame) self.cont_frame.add(frame) if self.cont_frame.is_fire(frame): return self.cont_frame.extract(frame) elif", "ABNF.LENGTH_16: raise ValueError(\"code is invalid range\") try: self.connected = False", "as string. payload: Payload must be utf-8 string or unicode,", "False def _send(self, data): return send(self.sock, data) def _recv(self, bufsize):", "timeout time(second). \"\"\" self.sock_opt.timeout = timeout if self.sock: self.sock.settimeout(timeout) timeout", "frame data, defaults to False return value: tuple of operation", "utf8 validation. \"socket\" - pre-initialized stream socket. \"\"\" sockopt =", "set \"header\" list object, you can set your own custom", "websocket >>> ws = websocket.WebSocket() >>> ws.connect(\"ws://echo.websocket.org\") >>> ws.send(\"Hello, Server\")", "struct import threading # websocket modules from ._exceptions import *", "Low level WebSocket interface. This class is based on The", "not frame: # handle error: # 'NoneType' object has no", "a single frame. self.frame_buffer = frame_buffer(self._recv, skip_utf8_validation) self.cont_frame = continuous_frame(fire_cont_frame,", "**options) self.connected = True except: if self.sock: self.sock.close() self.sock =", "string. \"http_proxy_host\" - http proxy host name. \"http_proxy_port\" - http", "\"origin\" -> custom origin url. \"host\" -> custom host header", "import * from ._logging import * from ._http import *", "payload=\"\"): \"\"\" send ping data. payload: data payload to send", "if timeout is not None else getdefaulttimeout()) websock.connect(url, **options) return", "from ._exceptions import * from ._abnf import * from ._socket", "callable object. the func takes 1 argument as integer. The", "self.handshake_response.headers else: return None headers = property(getheaders) def connect(self, url,", "raise def send(self, payload, opcode=ABNF.OPCODE_TEXT): \"\"\" Send the data as", ">>> import websocket >>> ws = websocket.WebSocket() >>> ws.connect(\"ws://echo.websocket.org\") >>>", "def set_mask_key(self, func): \"\"\" set function to create musk key.", "self.cont_frame.extract(frame) elif frame.opcode == ABNF.OPCODE_CLOSE: self.send_close() return (frame.opcode, frame) elif", "see the set_mask_key function's docstring for more details sockopt: values", "string data(byte array) from the server. return value: string(byte array)", "from __future__ import print_function import six import socket if six.PY3:", "code to send. see STATUS_XXX. reason: the reason to close.", "origin url. \"host\" -> custom host header string. \"http_proxy_host\" -", "self.sock.settimeout(timeout) try: frame = self.recv_frame() if isEnabledForError(): recv_status = struct.unpack(\"!H\",", "self.pong(frame.data) else: raise WebSocketProtocolException(\"Ping message is too long\") if control_frame:", "http proxy port. If not set, set to 80. \"http_no_proxy\"", "mask key. This func must return string(byte array), which length", "import * from ._socket import * from ._utils import *", "ABNF.OPCODE_BINARY: return data else: return '' def recv_data(self, control_frame=False): \"\"\"", "None subprotocol = property(getsubprotocol) def getstatus(self): \"\"\" get handshake status", "if status < 0 or status >= ABNF.LENGTH_16: raise ValueError(\"code", "can set your own custom header. >>> conn = create_connection(\"ws://echo.websocket.org/\",", "if set to True, lock send method. skip_utf8_validation: skip utf8", "- http proxy host name. \"http_proxy_port\" - http proxy port.", "Otherwise, it must be string(byte array) opcode: operation code to", "recv_frame(self): \"\"\" receive data as frame from server. return value:", "option. fire_cont_frame: fire recv event for each cont frame. default", "enable_multithread: self.lock = threading.Lock() else: self.lock = NoLock() def __iter__(self):", "header\"]) timeout: socket timeout time. This value is integer. if", "testing purpose. func: callable object. the func takes 1 argument", "close frame. \"\"\" if self.connected: if status < 0 or", "too long\") if control_frame: return (frame.opcode, frame) elif frame.opcode ==", "self.sock = None raise def send(self, payload, opcode=ABNF.OPCODE_TEXT): \"\"\" Send", "example is an echo client. >>> import websocket >>> ws", "implying sequential `recv` executions. \"\"\" while True: yield self.recv() def", "header=[\"User-Agent: MyProgram\", ... \"x-custom: header\"]) timeout: socket timeout time. This", "wakes up other threads that are waiting in recv_* \"\"\"", "on the socket. If no timeout is supplied, the global", "= property(getstatus) def getheaders(self): \"\"\" get handshake response header \"\"\"", "\"http_proxy_auth\" - http proxy auth information. tuple of username and", "status: \" + repr(recv_status)) except: pass self.sock.settimeout(sock_timeout) self.sock.shutdown(socket.SHUT_RDWR) except: pass", "useful, but WITHOUT ANY WARRANTY; without even the implied warranty", "pre-initialized stream socket. \"\"\" self.sock, addrs = connect(url, self.sock_opt, proxy_info(**options),", "\"\"\" opcode, data = self.recv_data() if six.PY3 and opcode ==", "= handshake(self.sock, *addrs, **options) self.connected = True except: if self.sock:", "self.recv() def __next__(self): return self.recv() def next(self): return self.__next__() def", "._url import * from ._logging import * from ._http import", "\"\"\" self.get_mask_key = func def gettimeout(self): \"\"\" Get the websocket", "trace(\"send: \" + repr(data)) with self.lock: while data: l =", "\"\"\" if isinstance(payload, six.text_type): payload = payload.encode(\"utf-8\") self.send(payload, ABNF.OPCODE_PONG) def", "integer. if you set None for this value, it means", "set_mask_key(self, func): \"\"\" set function to create musk key. You", "frame = self.recv_data_frame(control_frame) return opcode, frame.data def recv_data_frame(self, control_frame=False): \"\"\"", "frame.data def recv_data_frame(self, control_frame=False): \"\"\" Receive data with operation code.", "\"header\" -> custom http header list or dict. \"cookie\" ->", "\"socket\" - pre-initialized stream socket. \"\"\" sockopt = options.pop(\"sockopt\", [])", "ABNF.OPCODE_PING: if len(frame.data) < 126: self.pong(frame.data) else: raise WebSocketProtocolException(\"Ping message", "has to implement settimeout and connect. It's __init__ should be", "if isinstance(payload, six.text_type): payload = payload.encode(\"utf-8\") self.send(payload, ABNF.OPCODE_PING) def pong(self,", "tuple of username and password. default is None \"subprotocols\" -", "raise WebSocketProtocolException(\"Ping message is too long\") if control_frame: return (frame.opcode,", "return self.sock_opt.timeout def settimeout(self, timeout): \"\"\" Set the timeout to", "func takes 1 argument as integer. The argument means length", "self.sock.shutdown(socket.SHUT_RDWR) except: pass self.shutdown() def abort(self): \"\"\" Low-level asynchronous abort,", "to create musk key. You can customize mask key generator.", "that are waiting in recv_* \"\"\" if self.connected: self.sock.shutdown(socket.SHUT_RDWR) def", "return value: string(byte array) value. \"\"\" opcode, data = self.recv_data()", "Bar\", ABNF.OPCODE_CONT, 1) >>> ws.send_frame(frame) \"\"\" if self.get_mask_key: frame.get_mask_key =", "http://tools.ietf.org/html/rfc6455 for protocol. \"\"\" class WebSocket(object): \"\"\" Low level WebSocket", "for Python Copyright (C) 2010 <NAME>(liris) This library is free", "def getstatus(self): \"\"\" get handshake status \"\"\" if self.handshake_response: return", "protocol. \"\"\" class WebSocket(object): \"\"\" Low level WebSocket interface. This", "ABNF.OPCODE_PONG) def recv(self): \"\"\" Receive string data(byte array) from the", "\"\"\" if self.connected: if status < 0 or status >=", "reason, ABNF.OPCODE_CLOSE) sock_timeout = self.sock.gettimeout() self.sock.settimeout(timeout) try: frame = self.recv_frame()", "more details sockopt: values for socket.setsockopt. sockopt must be tuple", "fire_cont_frame = options.pop(\"fire_cont_frame\", False) enable_multithread = options.pop(\"enable_multithread\", False) skip_utf8_validation =", "fire_cont_frame: fire recv event for each cont frame. default is", "\"\"\" set function to create musk key. You can customize", "FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General", "payload: Payload must be utf-8 string or unicode, if the", "ABNF.create_frame >>> ws = create_connection(\"ws://echo.websocket.org/\") >>> frame = ABNF.create_frame(\"Hello\", ABNF.OPCODE_TEXT)", "This value is integer. if you set None for this", "a close frame. \"\"\" if self.connected: if status < 0", "cookie value. \"origin\" -> custom origin url. \"host\" -> custom", "\"\"\" send pong data. payload: data payload to send server.", "else: return None subprotocol = property(getsubprotocol) def getstatus(self): \"\"\" get", "header. >>> ws = WebSocket() >>> ws.connect(\"ws://echo.websocket.org/\", ... header=[\"User-Agent: MyProgram\",", "== ABNF.OPCODE_BINARY: return data else: return '' def recv_data(self, control_frame=False):", "specified. \"\"\" self.get_mask_key = func def gettimeout(self): \"\"\" Get the", "< 0 or status >= ABNF.LENGTH_16: raise ValueError(\"code is invalid", "ABNF.OPCODE_BINARY, ABNF.OPCODE_CONT): self.cont_frame.validate(frame) self.cont_frame.add(frame) if self.cont_frame.is_fire(frame): return self.cont_frame.extract(frame) elif frame.opcode", "must be string or bytes. \"\"\" if status < 0", "details. You should have received a copy of the GNU", "timeout if self.sock: self.sock.settimeout(timeout) timeout = property(gettimeout, settimeout) def getsubprotocol(self):", "\"use default_timeout value\" class_: class to instantiate when creating the", "WebSocket object. Passing optional timeout parameter will set the timeout", "\"http_proxy_port\" - http proxy port. If not set, set to", "Payload must be utf-8 string or unicode, if the opcode", "False enable_multithread: if set to True, lock send method. skip_utf8_validation:", "doesn't use proxy. \"http_proxy_auth\" - http proxy auth information. tuple", "operation code and string(byte array) value. \"\"\" opcode, frame =", "WebSocket(object): \"\"\" Low level WebSocket interface. This class is based", "self.sock: self.sock.close() self.sock = None self.connected = False raise def", "to url and return websocket object. Connect to url and", "accept all of it's kwargs. options: \"header\" -> custom http", "defaults to False return value: tuple of operation code and", "self.send(payload, ABNF.OPCODE_BINARY) def ping(self, payload=\"\"): \"\"\" send ping data. payload:", "to the websocket. timeout: timeout time(second). \"\"\" self.sock_opt.timeout = timeout", "fire recv event for each cont frame. default is False" ]
[ "= models.CharField(max_length=4, verbose_name='Número') complemento = models.CharField(max_length=50, null=True, blank=True, verbose_name='Complemento') bairro", "models.DateTimeField(auto_now_add=False, auto_now=True, verbose_name='Atualizado em:') health_center = models.ForeignKey(HealthCenter, on_delete=models.CASCADE, verbose_name=HealthCenter._meta.verbose_name) vaccines", "state = models.ForeignKey(State, on_delete=models.CASCADE, verbose_name=State._meta.verbose_name) city = models.ForeignKey(City, on_delete=models.CASCADE, verbose_name=City._meta.verbose_name)", "verbose_name = 'Estoque' class VaccineStock(models.Model): amount = models.PositiveSmallIntegerField(verbose_name='Quantidade recebida') remaining", "= models.CharField(max_length=50, verbose_name='Nome') state = models.ForeignKey(State, on_delete=models.CASCADE, verbose_name=State._meta.verbose_name) class Meta:", "verbose_name = 'Município' def __str__(self): return self.name class Address(models.Model): logradouro", "= 'Estabelecimentos de Saúde' def __str__(self): return self.name class Stock(models.Model):", "verbose_name = 'Estabelecimento de Saúde' verbose_name_plural = 'Estabelecimentos de Saúde'", "city = models.ForeignKey(City, on_delete=models.CASCADE, verbose_name=City._meta.verbose_name) class Meta: verbose_name = 'Endereço'", "HealthCenter(models.Model): cnes = models.CharField(max_length=7, verbose_name='CNES') cnpj = models.CharField(max_length=14, verbose_name='CNPJ') name", "verbose_name='CNES') cnpj = models.CharField(max_length=14, verbose_name='CNPJ') name = models.CharField(max_length=255, verbose_name='Razão Social')", "= models.ForeignKey(Stock, on_delete=models.DO_NOTHING, verbose_name=Stock._meta.verbose_name) class Meta: verbose_name = 'Estoque de", "name = models.CharField(max_length=255, verbose_name='Razão Social') created_at = models.DateTimeField(auto_now_add=True, auto_now=False, verbose_name='Criado", "class City(models.Model): name = models.CharField(max_length=50, verbose_name='Nome') state = models.ForeignKey(State, on_delete=models.CASCADE,", "= models.PositiveSmallIntegerField(verbose_name='Quantidade recebida') remaining = models.PositiveSmallIntegerField(verbose_name='Quantidade restante') vaccine = models.ForeignKey(Vaccine,", "models.CharField(max_length=255, verbose_name='Razão Social') created_at = models.DateTimeField(auto_now_add=True, auto_now=False, verbose_name='Criado em:') updated_at", "models.PositiveSmallIntegerField(verbose_name='Quantidade restante') vaccine = models.ForeignKey(Vaccine, on_delete=models.DO_NOTHING, verbose_name=Vaccine._meta.verbose_name) stock = models.ForeignKey(Stock,", "verbose_name=State._meta.verbose_name) class Meta: verbose_name = 'Município' def __str__(self): return self.name", "self.name class City(models.Model): name = models.CharField(max_length=50, verbose_name='Nome') state = models.ForeignKey(State,", "class Meta: verbose_name = 'Estoque' class VaccineStock(models.Model): amount = models.PositiveSmallIntegerField(verbose_name='Quantidade", "= models.CharField(max_length=20, verbose_name='Nome') class Meta: verbose_name = 'Unidade Federativa' def", "models.ManyToManyField(Address, verbose_name=Address._meta.verbose_name) class Meta: verbose_name = 'Estabelecimento de Saúde' verbose_name_plural", "auto_now=True, verbose_name='Atualizado em:') health_center = models.ForeignKey(HealthCenter, on_delete=models.CASCADE, verbose_name=HealthCenter._meta.verbose_name) vaccines =", "verbose_name_plural = 'Estabelecimentos de Saúde' def __str__(self): return self.name class", "= models.CharField(max_length=150, verbose_name='Bairro') cep = models.CharField(max_length=8, verbose_name='CEP') # state =", "auto_now=False, verbose_name='Criado em:') updated_at = models.DateTimeField(auto_now_add=False, auto_now=True, verbose_name='Atualizado em:') health_center", "= models.DateTimeField(auto_now_add=False, auto_now=True, verbose_name='Atualizado em:') address = models.ManyToManyField(Address, verbose_name=Address._meta.verbose_name) class", "class VaccineStock(models.Model): amount = models.PositiveSmallIntegerField(verbose_name='Quantidade recebida') remaining = models.PositiveSmallIntegerField(verbose_name='Quantidade restante')", "restante') vaccine = models.ForeignKey(Vaccine, on_delete=models.DO_NOTHING, verbose_name=Vaccine._meta.verbose_name) stock = models.ForeignKey(Stock, on_delete=models.DO_NOTHING,", "class Address(models.Model): logradouro = models.CharField(max_length=150, verbose_name='Logradouro') numero = models.CharField(max_length=4, verbose_name='Número')", "models.CharField(max_length=14, verbose_name='CNPJ') name = models.CharField(max_length=255, verbose_name='Razão Social') created_at = models.DateTimeField(auto_now_add=True,", "lot = models.PositiveSmallIntegerField(verbose_name='Lote') created_at = models.DateTimeField(auto_now_add=True, auto_now=False, verbose_name='Criado em:') updated_at", "self.name class Address(models.Model): logradouro = models.CharField(max_length=150, verbose_name='Logradouro') numero = models.CharField(max_length=4,", "= models.CharField(max_length=150, verbose_name='Logradouro') numero = models.CharField(max_length=4, verbose_name='Número') complemento = models.CharField(max_length=50,", "Stock(models.Model): lot = models.PositiveSmallIntegerField(verbose_name='Lote') created_at = models.DateTimeField(auto_now_add=True, auto_now=False, verbose_name='Criado em:')", "cep = models.CharField(max_length=8, verbose_name='CEP') # state = models.ForeignKey(State, on_delete=models.CASCADE, verbose_name=State._meta.verbose_name)", "from django.db import models from vaccine_card.vaccination.models import Vaccine class State(models.Model):", "= models.ForeignKey(City, on_delete=models.CASCADE, verbose_name=City._meta.verbose_name) class Meta: verbose_name = 'Endereço' class", "models.PositiveSmallIntegerField(verbose_name='Lote') created_at = models.DateTimeField(auto_now_add=True, auto_now=False, verbose_name='Criado em:') updated_at = models.DateTimeField(auto_now_add=False,", "verbose_name='Atualizado em:') health_center = models.ForeignKey(HealthCenter, on_delete=models.CASCADE, verbose_name=HealthCenter._meta.verbose_name) vaccines = models.ManyToManyField(Vaccine,", "created_at = models.DateTimeField(auto_now_add=True, auto_now=False, verbose_name='Criado em:') updated_at = models.DateTimeField(auto_now_add=False, auto_now=True,", "'Estoque' class VaccineStock(models.Model): amount = models.PositiveSmallIntegerField(verbose_name='Quantidade recebida') remaining = models.PositiveSmallIntegerField(verbose_name='Quantidade", "verbose_name='Atualizado em:') address = models.ManyToManyField(Address, verbose_name=Address._meta.verbose_name) class Meta: verbose_name =", "= 'Estabelecimento de Saúde' verbose_name_plural = 'Estabelecimentos de Saúde' def", "import Vaccine class State(models.Model): name = models.CharField(max_length=20, verbose_name='Nome') class Meta:", "= models.ForeignKey(Vaccine, on_delete=models.DO_NOTHING, verbose_name=Vaccine._meta.verbose_name) stock = models.ForeignKey(Stock, on_delete=models.DO_NOTHING, verbose_name=Stock._meta.verbose_name) class", "class Meta: verbose_name = 'Município' def __str__(self): return self.name class", "verbose_name=Stock._meta.verbose_name) class Meta: verbose_name = 'Estoque de Vacina' def __str__(self):", "models.CharField(max_length=150, verbose_name='Bairro') cep = models.CharField(max_length=8, verbose_name='CEP') # state = models.ForeignKey(State,", "return self.name class Address(models.Model): logradouro = models.CharField(max_length=150, verbose_name='Logradouro') numero =", "updated_at = models.DateTimeField(auto_now_add=False, auto_now=True, verbose_name='Atualizado em:') health_center = models.ForeignKey(HealthCenter, on_delete=models.CASCADE,", "verbose_name='Nome') class Meta: verbose_name = 'Unidade Federativa' def __str__(self): return", "on_delete=models.CASCADE, verbose_name=State._meta.verbose_name) class Meta: verbose_name = 'Município' def __str__(self): return", "Saúde' verbose_name_plural = 'Estabelecimentos de Saúde' def __str__(self): return self.name", "cnes = models.CharField(max_length=7, verbose_name='CNES') cnpj = models.CharField(max_length=14, verbose_name='CNPJ') name =", "verbose_name='CNPJ') name = models.CharField(max_length=255, verbose_name='Razão Social') created_at = models.DateTimeField(auto_now_add=True, auto_now=False,", "Meta: verbose_name = 'Estoque de Vacina' def __str__(self): return self.vaccine.name", "models.CharField(max_length=50, null=True, blank=True, verbose_name='Complemento') bairro = models.CharField(max_length=150, verbose_name='Bairro') cep =", "django.db import models from vaccine_card.vaccination.models import Vaccine class State(models.Model): name", "Federativa' def __str__(self): return self.name class City(models.Model): name = models.CharField(max_length=50,", "= 'Endereço' class HealthCenter(models.Model): cnes = models.CharField(max_length=7, verbose_name='CNES') cnpj =", "updated_at = models.DateTimeField(auto_now_add=False, auto_now=True, verbose_name='Atualizado em:') address = models.ManyToManyField(Address, verbose_name=Address._meta.verbose_name)", "Meta: verbose_name = 'Município' def __str__(self): return self.name class Address(models.Model):", "verbose_name=Address._meta.verbose_name) class Meta: verbose_name = 'Estabelecimento de Saúde' verbose_name_plural =", "'Estabelecimento de Saúde' verbose_name_plural = 'Estabelecimentos de Saúde' def __str__(self):", "'Estabelecimentos de Saúde' def __str__(self): return self.name class Stock(models.Model): lot", "recebida') remaining = models.PositiveSmallIntegerField(verbose_name='Quantidade restante') vaccine = models.ForeignKey(Vaccine, on_delete=models.DO_NOTHING, verbose_name=Vaccine._meta.verbose_name)", "class Meta: verbose_name = 'Endereço' class HealthCenter(models.Model): cnes = models.CharField(max_length=7,", "= models.CharField(max_length=50, null=True, blank=True, verbose_name='Complemento') bairro = models.CharField(max_length=150, verbose_name='Bairro') cep", "stock = models.ForeignKey(Stock, on_delete=models.DO_NOTHING, verbose_name=Stock._meta.verbose_name) class Meta: verbose_name = 'Estoque", "complemento = models.CharField(max_length=50, null=True, blank=True, verbose_name='Complemento') bairro = models.CharField(max_length=150, verbose_name='Bairro')", "models.ManyToManyField(Vaccine, through='VaccineStock', verbose_name=Vaccine._meta.verbose_name) class Meta: verbose_name = 'Estoque' class VaccineStock(models.Model):", "models.CharField(max_length=150, verbose_name='Logradouro') numero = models.CharField(max_length=4, verbose_name='Número') complemento = models.CharField(max_length=50, null=True,", "verbose_name='Bairro') cep = models.CharField(max_length=8, verbose_name='CEP') # state = models.ForeignKey(State, on_delete=models.CASCADE,", "= models.ForeignKey(HealthCenter, on_delete=models.CASCADE, verbose_name=HealthCenter._meta.verbose_name) vaccines = models.ManyToManyField(Vaccine, through='VaccineStock', verbose_name=Vaccine._meta.verbose_name) class", "= models.CharField(max_length=8, verbose_name='CEP') # state = models.ForeignKey(State, on_delete=models.CASCADE, verbose_name=State._meta.verbose_name) city", "'Unidade Federativa' def __str__(self): return self.name class City(models.Model): name =", "City(models.Model): name = models.CharField(max_length=50, verbose_name='Nome') state = models.ForeignKey(State, on_delete=models.CASCADE, verbose_name=State._meta.verbose_name)", "verbose_name='Razão Social') created_at = models.DateTimeField(auto_now_add=True, auto_now=False, verbose_name='Criado em:') updated_at =", "verbose_name = 'Endereço' class HealthCenter(models.Model): cnes = models.CharField(max_length=7, verbose_name='CNES') cnpj", "= models.ManyToManyField(Address, verbose_name=Address._meta.verbose_name) class Meta: verbose_name = 'Estabelecimento de Saúde'", "de Saúde' verbose_name_plural = 'Estabelecimentos de Saúde' def __str__(self): return", "models.ForeignKey(Vaccine, on_delete=models.DO_NOTHING, verbose_name=Vaccine._meta.verbose_name) stock = models.ForeignKey(Stock, on_delete=models.DO_NOTHING, verbose_name=Stock._meta.verbose_name) class Meta:", "def __str__(self): return self.name class City(models.Model): name = models.CharField(max_length=50, verbose_name='Nome')", "health_center = models.ForeignKey(HealthCenter, on_delete=models.CASCADE, verbose_name=HealthCenter._meta.verbose_name) vaccines = models.ManyToManyField(Vaccine, through='VaccineStock', verbose_name=Vaccine._meta.verbose_name)", "verbose_name='Número') complemento = models.CharField(max_length=50, null=True, blank=True, verbose_name='Complemento') bairro = models.CharField(max_length=150,", "= models.DateTimeField(auto_now_add=False, auto_now=True, verbose_name='Atualizado em:') health_center = models.ForeignKey(HealthCenter, on_delete=models.CASCADE, verbose_name=HealthCenter._meta.verbose_name)", "= models.PositiveSmallIntegerField(verbose_name='Lote') created_at = models.DateTimeField(auto_now_add=True, auto_now=False, verbose_name='Criado em:') updated_at =", "= models.CharField(max_length=255, verbose_name='Razão Social') created_at = models.DateTimeField(auto_now_add=True, auto_now=False, verbose_name='Criado em:')", "on_delete=models.DO_NOTHING, verbose_name=Vaccine._meta.verbose_name) stock = models.ForeignKey(Stock, on_delete=models.DO_NOTHING, verbose_name=Stock._meta.verbose_name) class Meta: verbose_name", "models.CharField(max_length=7, verbose_name='CNES') cnpj = models.CharField(max_length=14, verbose_name='CNPJ') name = models.CharField(max_length=255, verbose_name='Razão", "Meta: verbose_name = 'Estoque' class VaccineStock(models.Model): amount = models.PositiveSmallIntegerField(verbose_name='Quantidade recebida')", "= 'Unidade Federativa' def __str__(self): return self.name class City(models.Model): name", "state = models.ForeignKey(State, on_delete=models.CASCADE, verbose_name=State._meta.verbose_name) class Meta: verbose_name = 'Município'", "verbose_name=State._meta.verbose_name) city = models.ForeignKey(City, on_delete=models.CASCADE, verbose_name=City._meta.verbose_name) class Meta: verbose_name =", "Vaccine class State(models.Model): name = models.CharField(max_length=20, verbose_name='Nome') class Meta: verbose_name", "__str__(self): return self.name class City(models.Model): name = models.CharField(max_length=50, verbose_name='Nome') state", "numero = models.CharField(max_length=4, verbose_name='Número') complemento = models.CharField(max_length=50, null=True, blank=True, verbose_name='Complemento')", "auto_now=False, verbose_name='Criado em:') updated_at = models.DateTimeField(auto_now_add=False, auto_now=True, verbose_name='Atualizado em:') address", "def __str__(self): return self.name class Stock(models.Model): lot = models.PositiveSmallIntegerField(verbose_name='Lote') created_at", "return self.name class City(models.Model): name = models.CharField(max_length=50, verbose_name='Nome') state =", "Meta: verbose_name = 'Unidade Federativa' def __str__(self): return self.name class", "de Saúde' def __str__(self): return self.name class Stock(models.Model): lot =", "def __str__(self): return self.name class Address(models.Model): logradouro = models.CharField(max_length=150, verbose_name='Logradouro')", "class HealthCenter(models.Model): cnes = models.CharField(max_length=7, verbose_name='CNES') cnpj = models.CharField(max_length=14, verbose_name='CNPJ')", "models from vaccine_card.vaccination.models import Vaccine class State(models.Model): name = models.CharField(max_length=20,", "models.CharField(max_length=20, verbose_name='Nome') class Meta: verbose_name = 'Unidade Federativa' def __str__(self):", "through='VaccineStock', verbose_name=Vaccine._meta.verbose_name) class Meta: verbose_name = 'Estoque' class VaccineStock(models.Model): amount", "models.ForeignKey(Stock, on_delete=models.DO_NOTHING, verbose_name=Stock._meta.verbose_name) class Meta: verbose_name = 'Estoque de Vacina'", "Meta: verbose_name = 'Estabelecimento de Saúde' verbose_name_plural = 'Estabelecimentos de", "name = models.CharField(max_length=50, verbose_name='Nome') state = models.ForeignKey(State, on_delete=models.CASCADE, verbose_name=State._meta.verbose_name) class", "auto_now=True, verbose_name='Atualizado em:') address = models.ManyToManyField(Address, verbose_name=Address._meta.verbose_name) class Meta: verbose_name", "remaining = models.PositiveSmallIntegerField(verbose_name='Quantidade restante') vaccine = models.ForeignKey(Vaccine, on_delete=models.DO_NOTHING, verbose_name=Vaccine._meta.verbose_name) stock", "logradouro = models.CharField(max_length=150, verbose_name='Logradouro') numero = models.CharField(max_length=4, verbose_name='Número') complemento =", "models.ForeignKey(State, on_delete=models.CASCADE, verbose_name=State._meta.verbose_name) class Meta: verbose_name = 'Município' def __str__(self):", "import models from vaccine_card.vaccination.models import Vaccine class State(models.Model): name =", "= models.CharField(max_length=14, verbose_name='CNPJ') name = models.CharField(max_length=255, verbose_name='Razão Social') created_at =", "on_delete=models.CASCADE, verbose_name=HealthCenter._meta.verbose_name) vaccines = models.ManyToManyField(Vaccine, through='VaccineStock', verbose_name=Vaccine._meta.verbose_name) class Meta: verbose_name", "verbose_name=HealthCenter._meta.verbose_name) vaccines = models.ManyToManyField(Vaccine, through='VaccineStock', verbose_name=Vaccine._meta.verbose_name) class Meta: verbose_name =", "# state = models.ForeignKey(State, on_delete=models.CASCADE, verbose_name=State._meta.verbose_name) city = models.ForeignKey(City, on_delete=models.CASCADE,", "on_delete=models.CASCADE, verbose_name=State._meta.verbose_name) city = models.ForeignKey(City, on_delete=models.CASCADE, verbose_name=City._meta.verbose_name) class Meta: verbose_name", "bairro = models.CharField(max_length=150, verbose_name='Bairro') cep = models.CharField(max_length=8, verbose_name='CEP') # state", "verbose_name = 'Unidade Federativa' def __str__(self): return self.name class City(models.Model):", "= models.PositiveSmallIntegerField(verbose_name='Quantidade restante') vaccine = models.ForeignKey(Vaccine, on_delete=models.DO_NOTHING, verbose_name=Vaccine._meta.verbose_name) stock =", "vaccines = models.ManyToManyField(Vaccine, through='VaccineStock', verbose_name=Vaccine._meta.verbose_name) class Meta: verbose_name = 'Estoque'", "from vaccine_card.vaccination.models import Vaccine class State(models.Model): name = models.CharField(max_length=20, verbose_name='Nome')", "VaccineStock(models.Model): amount = models.PositiveSmallIntegerField(verbose_name='Quantidade recebida') remaining = models.PositiveSmallIntegerField(verbose_name='Quantidade restante') vaccine", "= models.DateTimeField(auto_now_add=True, auto_now=False, verbose_name='Criado em:') updated_at = models.DateTimeField(auto_now_add=False, auto_now=True, verbose_name='Atualizado", "models.CharField(max_length=8, verbose_name='CEP') # state = models.ForeignKey(State, on_delete=models.CASCADE, verbose_name=State._meta.verbose_name) city =", "Meta: verbose_name = 'Endereço' class HealthCenter(models.Model): cnes = models.CharField(max_length=7, verbose_name='CNES')", "blank=True, verbose_name='Complemento') bairro = models.CharField(max_length=150, verbose_name='Bairro') cep = models.CharField(max_length=8, verbose_name='CEP')", "verbose_name='Complemento') bairro = models.CharField(max_length=150, verbose_name='Bairro') cep = models.CharField(max_length=8, verbose_name='CEP') #", "models.ForeignKey(State, on_delete=models.CASCADE, verbose_name=State._meta.verbose_name) city = models.ForeignKey(City, on_delete=models.CASCADE, verbose_name=City._meta.verbose_name) class Meta:", "verbose_name=City._meta.verbose_name) class Meta: verbose_name = 'Endereço' class HealthCenter(models.Model): cnes =", "on_delete=models.CASCADE, verbose_name=City._meta.verbose_name) class Meta: verbose_name = 'Endereço' class HealthCenter(models.Model): cnes", "= 'Município' def __str__(self): return self.name class Address(models.Model): logradouro =", "amount = models.PositiveSmallIntegerField(verbose_name='Quantidade recebida') remaining = models.PositiveSmallIntegerField(verbose_name='Quantidade restante') vaccine =", "= models.CharField(max_length=7, verbose_name='CNES') cnpj = models.CharField(max_length=14, verbose_name='CNPJ') name = models.CharField(max_length=255,", "vaccine = models.ForeignKey(Vaccine, on_delete=models.DO_NOTHING, verbose_name=Vaccine._meta.verbose_name) stock = models.ForeignKey(Stock, on_delete=models.DO_NOTHING, verbose_name=Stock._meta.verbose_name)", "em:') health_center = models.ForeignKey(HealthCenter, on_delete=models.CASCADE, verbose_name=HealthCenter._meta.verbose_name) vaccines = models.ManyToManyField(Vaccine, through='VaccineStock',", "__str__(self): return self.name class Address(models.Model): logradouro = models.CharField(max_length=150, verbose_name='Logradouro') numero", "class Meta: verbose_name = 'Unidade Federativa' def __str__(self): return self.name", "models.CharField(max_length=4, verbose_name='Número') complemento = models.CharField(max_length=50, null=True, blank=True, verbose_name='Complemento') bairro =", "= models.ForeignKey(State, on_delete=models.CASCADE, verbose_name=State._meta.verbose_name) city = models.ForeignKey(City, on_delete=models.CASCADE, verbose_name=City._meta.verbose_name) class", "cnpj = models.CharField(max_length=14, verbose_name='CNPJ') name = models.CharField(max_length=255, verbose_name='Razão Social') created_at", "em:') updated_at = models.DateTimeField(auto_now_add=False, auto_now=True, verbose_name='Atualizado em:') address = models.ManyToManyField(Address,", "Saúde' def __str__(self): return self.name class Stock(models.Model): lot = models.PositiveSmallIntegerField(verbose_name='Lote')", "= 'Estoque' class VaccineStock(models.Model): amount = models.PositiveSmallIntegerField(verbose_name='Quantidade recebida') remaining =", "class Meta: verbose_name = 'Estoque de Vacina' def __str__(self): return", "models.CharField(max_length=50, verbose_name='Nome') state = models.ForeignKey(State, on_delete=models.CASCADE, verbose_name=State._meta.verbose_name) class Meta: verbose_name", "Address(models.Model): logradouro = models.CharField(max_length=150, verbose_name='Logradouro') numero = models.CharField(max_length=4, verbose_name='Número') complemento", "= models.ManyToManyField(Vaccine, through='VaccineStock', verbose_name=Vaccine._meta.verbose_name) class Meta: verbose_name = 'Estoque' class", "'Município' def __str__(self): return self.name class Address(models.Model): logradouro = models.CharField(max_length=150,", "on_delete=models.DO_NOTHING, verbose_name=Stock._meta.verbose_name) class Meta: verbose_name = 'Estoque de Vacina' def", "class Meta: verbose_name = 'Estabelecimento de Saúde' verbose_name_plural = 'Estabelecimentos", "verbose_name='Criado em:') updated_at = models.DateTimeField(auto_now_add=False, auto_now=True, verbose_name='Atualizado em:') health_center =", "null=True, blank=True, verbose_name='Complemento') bairro = models.CharField(max_length=150, verbose_name='Bairro') cep = models.CharField(max_length=8,", "= models.ForeignKey(State, on_delete=models.CASCADE, verbose_name=State._meta.verbose_name) class Meta: verbose_name = 'Município' def", "models.DateTimeField(auto_now_add=False, auto_now=True, verbose_name='Atualizado em:') address = models.ManyToManyField(Address, verbose_name=Address._meta.verbose_name) class Meta:", "vaccine_card.vaccination.models import Vaccine class State(models.Model): name = models.CharField(max_length=20, verbose_name='Nome') class", "class Stock(models.Model): lot = models.PositiveSmallIntegerField(verbose_name='Lote') created_at = models.DateTimeField(auto_now_add=True, auto_now=False, verbose_name='Criado", "'Endereço' class HealthCenter(models.Model): cnes = models.CharField(max_length=7, verbose_name='CNES') cnpj = models.CharField(max_length=14,", "verbose_name='CEP') # state = models.ForeignKey(State, on_delete=models.CASCADE, verbose_name=State._meta.verbose_name) city = models.ForeignKey(City,", "verbose_name=Vaccine._meta.verbose_name) class Meta: verbose_name = 'Estoque' class VaccineStock(models.Model): amount =", "return self.name class Stock(models.Model): lot = models.PositiveSmallIntegerField(verbose_name='Lote') created_at = models.DateTimeField(auto_now_add=True,", "name = models.CharField(max_length=20, verbose_name='Nome') class Meta: verbose_name = 'Unidade Federativa'", "address = models.ManyToManyField(Address, verbose_name=Address._meta.verbose_name) class Meta: verbose_name = 'Estabelecimento de", "__str__(self): return self.name class Stock(models.Model): lot = models.PositiveSmallIntegerField(verbose_name='Lote') created_at =", "em:') address = models.ManyToManyField(Address, verbose_name=Address._meta.verbose_name) class Meta: verbose_name = 'Estabelecimento", "verbose_name='Logradouro') numero = models.CharField(max_length=4, verbose_name='Número') complemento = models.CharField(max_length=50, null=True, blank=True,", "em:') updated_at = models.DateTimeField(auto_now_add=False, auto_now=True, verbose_name='Atualizado em:') health_center = models.ForeignKey(HealthCenter,", "models.ForeignKey(HealthCenter, on_delete=models.CASCADE, verbose_name=HealthCenter._meta.verbose_name) vaccines = models.ManyToManyField(Vaccine, through='VaccineStock', verbose_name=Vaccine._meta.verbose_name) class Meta:", "models.PositiveSmallIntegerField(verbose_name='Quantidade recebida') remaining = models.PositiveSmallIntegerField(verbose_name='Quantidade restante') vaccine = models.ForeignKey(Vaccine, on_delete=models.DO_NOTHING,", "Social') created_at = models.DateTimeField(auto_now_add=True, auto_now=False, verbose_name='Criado em:') updated_at = models.DateTimeField(auto_now_add=False,", "models.ForeignKey(City, on_delete=models.CASCADE, verbose_name=City._meta.verbose_name) class Meta: verbose_name = 'Endereço' class HealthCenter(models.Model):", "self.name class Stock(models.Model): lot = models.PositiveSmallIntegerField(verbose_name='Lote') created_at = models.DateTimeField(auto_now_add=True, auto_now=False,", "verbose_name=Vaccine._meta.verbose_name) stock = models.ForeignKey(Stock, on_delete=models.DO_NOTHING, verbose_name=Stock._meta.verbose_name) class Meta: verbose_name =", "class State(models.Model): name = models.CharField(max_length=20, verbose_name='Nome') class Meta: verbose_name =", "verbose_name='Nome') state = models.ForeignKey(State, on_delete=models.CASCADE, verbose_name=State._meta.verbose_name) class Meta: verbose_name =", "State(models.Model): name = models.CharField(max_length=20, verbose_name='Nome') class Meta: verbose_name = 'Unidade", "models.DateTimeField(auto_now_add=True, auto_now=False, verbose_name='Criado em:') updated_at = models.DateTimeField(auto_now_add=False, auto_now=True, verbose_name='Atualizado em:')", "verbose_name='Criado em:') updated_at = models.DateTimeField(auto_now_add=False, auto_now=True, verbose_name='Atualizado em:') address =" ]
[ ", \"human_readable_message\" : \"Here are the distros we are offering", "url, body, headers): body = \"\"\" { \"post_new_vps_response\" : {", "def _r_orders_new_vps(self, method, url, body, headers): body = \"\"\" {", "\"cpu_model_name\" : \"Intel(R) Xeon(R) CPU E5506 @ 2.13GHz\" , \"host_num_cores\"", "un-cancel the server please contact our support team.\"] } }", "30 , \"data_transfer\" : \"30\"} , \"billing_info\" : { }", "class RimuHostingTest(unittest.TestCase, TestCaseMixin): def setUp(self): RimuHostingNodeDriver.connectionCls.conn_classes = (None, RimuHostingMockHttp) self.driver", ": 200 , \"error_info\" : null , \"response_type\" : \"OK\"", "support (LTS))\"} , { \"distro_code\" : \"ubuntu810\" , \"distro_description\" :", "\"primary_ip\" : \"172.16.17.32\", \"secondary_ips\" : []} , \"running_state\" : \"RUNNING\"}", "2.0 # (the \"License\"); you may not use this file", "image=image, size=size) class RimuHostingMockHttp(MockHttp): def _r_orders(self,method,url,body,headers): body = \"\"\" {", ", \"distro_description\" : \"Fedora 10\"}]}} \"\"\" return (httplib.OK, body, {},", "from 2009-04)\"} , { \"distro_code\" : \"ubuntu804\" , \"distro_description\" :", "(LTS))\"} , { \"distro_code\" : \"ubuntu810\" , \"distro_description\" : \"Ubuntu", ": \"Centos5\"} , { \"distro_code\" : \"ubuntu904\" , \"distro_description\" :", "pinging OK.\" , \"response_display_duration_type\" : \"REGULAR\" , \"is_restarted\" : true", "\"is_pinging\" : true , \"running_vps_info\" : { \"pings_ok\" : true", "46800000} , \"vps_uptime_s\" : 19 , \"vps_cpu_time_s\" : 5 ,", "\"3.4.1\" , \"hostload\" : [1.45 , 0.56 , 0.28] ,", "restarted. After the reboot api.ivan.net.nz is pinging OK.\" , \"response_display_duration_type\"", "RedRata Ltd from libcloud.drivers.rimuhosting import RimuHostingNodeDriver from test import MockHttp", "image = self.driver.list_images()[0] self.driver.create_node(name=\"api.ivan.net.nz\", image=image, size=size) class RimuHostingMockHttp(MockHttp): def _r_orders(self,method,url,body,headers):", "true , \"running_vps_info\" : { \"pings_ok\" : true , \"current_kernel\"", ", \"distro_description\" : \"Ubuntu 8.04 (Hardy Heron, 5 yr long", "using default values.\"] , \"about_order\" : { \"order_oid\" : 52255865", "\"api.ivan.net.nz restarted. After the reboot api.ivan.net.nz is pinging OK.\" ,", ": 0} , \"host_server_oid\" : \"764\" , \"server_type\" : \"VPS\"", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "\"data_transfer_allowance\" : { \"data_transfer_gb\" : 30 , \"data_transfer\" : \"30\"}", "{ \"primary_ip\" : \"172.16.17.32\", \"secondary_ips\" : []} , \"running_state\" :", "pricing to suit.&nbsp; Pricing is in USD.&nbsp; If you are", "200 , \"error_info\" : null , \"response_type\" : \"OK\" ,", ": \"RUNNING\" , \"is_suspended\" : false}}} \"\"\" return (httplib.OK, body,", "this work for additional information regarding copyright ownership. # libcloud.org", "\"1.2.3.5\") self.assertEqual(node.extra['order_oid'], 88833465) self.assertEqual(node.id, \"order-88833465-api-ivan-net-nz\") def test_list_sizes(self): sizes = self.driver.list_sizes()", "\"secondary_ips\" : [\"172.16.17.32\",\"172.16.31.10\"]} , \"running_state\" : \"RUNNING\"}]}}\"\"\" return (httplib.OK, body,", "body, headers): body = \"\"\" { \"delete_server_response\" : { \"status_message\"", ", \"disk_space_2_mb\" : 0} , \"pricing_plan_code\" : \"MIRO1B\" , \"instantiation_options\"", "\"domain_name\" : \"api.ivan.net.nz\" , \"slug\" : \"order-52255865-api-ivan-net-nz\" , \"billing_oid\" :", "\"\"\" return (httplib.OK, body, {}, httplib.responses[httplib.OK]) def _r_distributions(self, method, url,", "exception on failure node = self.driver.list_nodes()[0] self.driver.reboot_node(node) def test_destroy_node(self): #", "\"offered_at_data_center\" : { \"data_center_location_code\" : \"DCDALLAS\" , \"data_center_location_name\" : \"Dallas\"}}", "Transfer\" , \"Selected user as the owner of the billing", ", \"disk_space_mb\" : 4096 , \"disk_space_2_mb\" : 0} , \"pricing_plan_code\"", "MockHttp, TestCaseMixin import unittest import httplib class RimuHostingTest(unittest.TestCase, TestCaseMixin): def", "Heron, 5 yr long term support (LTS))\"} , { \"distro_code\"", "\"users_tz_offset_ms\" : 46800000} , \"vps_uptime_s\" : 19 , \"vps_cpu_time_s\" :", "0.56 , 0.28] , \"host_uptime_s\" : 3378276 , \"host_mem_mb_free\" :", "\"human_readable_message\" : \"Here some pricing plans we are offering on", "_r_pricing_plans(self,method,url,body,headers): body = \"\"\" {\"get_pricing_plans_response\" : { \"status_message\" : null", "\"is_on_customers_own_physical_server\" : false , \"vps_parameters\" : { \"memory_mb\" : 160", "\"30\"} , \"billing_info\" : { } , \"allocated_ips\" : {", "\"human_readable_message\" : \"Server removed\" , \"response_display_duration_type\" : \"REGULAR\" , \"cancel_messages\"", "in USD.&nbsp; If you are an NZ-based customer then we", "def _r_orders_order_88833465_api_ivan_net_nz_vps_running_state(self, method, url, body, headers): body = \"\"\" {", "\"OK\" , \"human_readable_message\" : \"Server removed\" , \"response_display_duration_type\" : \"REGULAR\"", ", \"data_center_location_name\" : \"Dallas\"}} ]}} \"\"\" return (httplib.OK, body, {},", "{ \"put_running_state_response\" : { \"status_message\" : null , \"status_code\" :", "provided, using default values.\"] , \"about_order\" : { \"order_oid\" :", "would need to add GST.\" , \"response_display_duration_type\" : \"REGULAR\" ,", ": \"Fedora 10\"}]}} \"\"\" return (httplib.OK, body, {}, httplib.responses[httplib.OK]) def", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "Note we offer most disk and memory sizes.&nbsp; So if", "we would need to add GST.\" , \"response_display_duration_type\" : \"REGULAR\"", ", \"host_num_cores\" : 1 , \"host_xen_version\" : \"3.4.1\" , \"hostload\"", "recommended distro)\"} , { \"distro_code\" : \"centos5\" , \"distro_description\" :", "96122465 , \"user_oid\" : 0 , \"host_server_oid\" : null ,", "\"status_message\" : null , \"status_code\" : 200 , \"error_info\" :", "def setUp(self): RimuHostingNodeDriver.connectionCls.conn_classes = (None, RimuHostingMockHttp) self.driver = RimuHostingNodeDriver('foo') def", "self.assertEqual(image.id, \"lenny\") def test_reboot_node(self): # Raises exception on failure node", "data: Wire Transfer\" , \"Selected user as the owner of", ": false , \"is_backups_enabled\" : true , \"next_backup_time\" : {", "you need to un-cancel the server please contact our support", "under the License is distributed on an \"AS IS\" BASIS,", "= self.driver.list_images()[0] self.driver.create_node(name=\"api.ivan.net.nz\", image=image, size=size) class RimuHostingMockHttp(MockHttp): def _r_orders(self,method,url,body,headers): body", "License for the specific language governing permissions and # limitations", "import MockHttp from test import MockHttp, TestCaseMixin import unittest import", "[{ \"order_oid\" : 88833465 , \"domain_name\" : \"api.ivan.net.nz\" , \"slug\"", "distributed with # this work for additional information regarding copyright", "\"human_readable_message\" : \"Found 15 orders\" , \"response_display_duration_type\" : \"REGULAR\", \"about_orders\"", "offering on new orders.&nbsp; Note we offer most disk and", "\"response_display_duration_type\" : \"REGULAR\" , \"cancel_messages\" : [\"api.ivan.net.nz is being shut", "on new orders.&nbsp; Note we offer most disk and memory", ", \"response_type\" : \"OK\" , \"human_readable_message\" : null , \"response_display_duration_type\"", "= sizes[0] self.assertEqual(size.ram,950) self.assertEqual(size.disk,20) self.assertEqual(size.bandwidth,75) self.assertEqual(size.price,32.54) def test_list_images(self): images =", "\"billing_oid\" : 96122465 , \"user_oid\" : 0 , \"host_server_oid\" :", "on failure node = self.driver.list_nodes()[0] self.driver.reboot_node(node) def test_destroy_node(self): # Raises", "suit.&nbsp; Pricing is in USD.&nbsp; If you are an NZ-based", "\"http://rimuhosting.com/order/serverdetails.jsp?plan=MiroVPSLowContention\" , \"server_type\" : \"VPS\" , \"offered_at_data_center\" : { \"data_center_location_code\"", ": \"Ubuntu 8.04 (Hardy Heron, 5 yr long term support", "you setup a new server feel free to vary these", "import unittest import httplib class RimuHostingTest(unittest.TestCase, TestCaseMixin): def setUp(self): RimuHostingNodeDriver.connectionCls.conn_classes", "<NAME>\" , \"No VPS paramters provided, using default values.\"] ,", ", \"running_state\" : \"RUNNING\"} , \"new_order_request\" : { \"billing_oid\" :", "orders\" , \"response_display_duration_type\" : \"REGULAR\", \"about_orders\" : [{ \"order_oid\" :", "default values.\"] , \"about_order\" : { \"order_oid\" : 52255865 ,", ": \"30\"} , \"billing_info\" : { } , \"allocated_ips\" :", "limitations under the License. # Copyright 2009 RedRata Ltd from", ",\"amt_usd\" : 32.54} , \"minimum_memory_mb\" : 950 , \"minimum_disk_gb\" :", ", \"see_also_url\" : \"http://rimuhosting.com/order/serverdetails.jsp?plan=MiroVPSLowContention\" , \"server_type\" : \"VPS\" , \"offered_at_data_center\"", ", \"last_backup_message\" : \"\" , \"is_console_login_enabled\" : false , \"console_public_authorized_keys\"", "\"distro_code\" : \"fedora10\" , \"distro_description\" : \"Fedora 10\"}]}} \"\"\" return", ": 34} , \"running_state_messages\" : null}} \"\"\" return (httplib.OK, body,", ": \"Here are the distros we are offering on new", "\"webmin\" , \"domain_name\" : \"api.ivan.net.nz\" , \"password\" : \"<PASSWORD>\" ,", ": 4096 , \"disk_space_2_mb\" : 0} , \"host_server_oid\" : \"764\"", ", \"about_order\" : { \"order_oid\" : 52255865 , \"domain_name\" :", "a new server feel free to vary these (e.g. different", "body, headers): body = \"\"\" { \"put_running_state_response\" : { \"status_message\"", ": 75 , \"see_also_url\" : \"http://rimuhosting.com/order/serverdetails.jsp?plan=MiroVPSLowContention\" , \"server_type\" : \"VPS\"", "false , \"is_backups_enabled\" : true , \"next_backup_time\" : { \"ms_since_epoch\":", "new orders.\" , \"response_display_duration_type\" : \"REGULAR\" , \"distro_infos\" : [{", ": \"OK\" , \"human_readable_message\" : \"Found 15 orders\" , \"response_display_duration_type\"", "software # distributed under the License is distributed on an", "\"REGULAR\" , \"is_restarted\" : true , \"is_pinging\" : true ,", "{ \"primary_ip\" : \"192.168.3.11\" , \"secondary_ips\" : [\"172.16.17.32\",\"172.16.31.10\"]} , \"running_state\"", "down.\" , \"A $7.98 credit has been added to your", "with # this work for additional information regarding copyright ownership.", ": { \"is_host64_bit_capable\" : true , \"default_kernel_i386\" : \"2.6.30.5-xenU.i386\" ,", "\"distro_description\" : \"Centos5\"} , { \"distro_code\" : \"ubuntu904\" , \"distro_description\"", ": { \"data_center_location_code\" : \"DCDALLAS\" , \"data_center_location_name\" : \"Dallas\"}} ]}}", "\"RUNNING\"}]}}\"\"\" return (httplib.OK, body, {}, httplib.responses[httplib.OK]) def _r_pricing_plans(self,method,url,body,headers): body =", "failure node = self.driver.list_nodes()[0] self.driver.reboot_node(node) def test_destroy_node(self): # Raises exception", "} } \"\"\" return (httplib.OK, body, {}, httplib.responses[httplib.OK]) def _r_orders_order_88833465_api_ivan_net_nz_vps_running_state(self,", "method, url, body, headers): body = \"\"\" { \"post_new_vps_response\" :", "httplib.responses[httplib.OK]) def _r_orders_order_88833465_api_ivan_net_nz_vps(self, method, url, body, headers): body = \"\"\"", ": 51825 , \"host_mem_mb_total\" : 73719 , \"running_vpss\" : 34}", ": \"RUNNING\"}]}}\"\"\" return (httplib.OK, body, {}, httplib.responses[httplib.OK]) def _r_pricing_plans(self,method,url,body,headers): body", "} , \"allocated_ips\" : { \"primary_ip\" : \"192.168.3.11\" , \"secondary_ips\"", "\"distro_description\" : \"Debian 5.0 (aka Lenny, RimuHosting recommended distro)\"} ,", "compliance with # the License. You may obtain a copy", "{ \"pings_ok\" : true , \"current_kernel\" : \"default\" , \"current_kernel_canonical\"", "\"If you need to un-cancel the server please contact our", "licenses this file to You under the Apache License, Version", "(httplib.OK, body, {}, httplib.responses[httplib.OK]) def _r_orders_order_88833465_api_ivan_net_nz_vps_running_state(self, method, url, body, headers):", "libcloud.drivers.rimuhosting import RimuHostingNodeDriver from test import MockHttp from test import", "0} , \"host_server_oid\" : \"764\" , \"server_type\" : \"VPS\" ,", ", 0.28] , \"host_uptime_s\" : 3378276 , \"host_mem_mb_free\" : 51825", ", \"hostload\" : [1.45 , 0.56 , 0.28] , \"host_uptime_s\"", ", \"allocated_ips\" : { \"primary_ip\" : \"172.16.17.32\", \"secondary_ips\" : []}", "self.driver = RimuHostingNodeDriver('foo') def test_list_nodes(self): nodes = self.driver.list_nodes() self.assertEqual(len(nodes),1) node", "\"host_server_oid\" : null , \"vps_order_oid_to_clone\" : 0 , \"ip_request\" :", ", \"host_mem_mb_total\" : 73719 , \"running_vpss\" : 34} , \"running_state_messages\"", "we offer most disk and memory sizes.&nbsp; So if you", "\"OK\" , \"human_readable_message\" : \"Found 15 orders\" , \"response_display_duration_type\" :", ", \"response_display_duration_type\" : \"REGULAR\", \"about_orders\" : [{ \"order_oid\" : 88833465", "headers): body = \"\"\" { \"get_distros_response\" : { \"status_message\" :", "5.0 (aka Lenny, RimuHosting recommended distro)\"} , { \"distro_code\" :", "88833465 , \"domain_name\" : \"api.ivan.net.nz\" , \"slug\" : \"order-88833465-api-ivan-net-nz\" ,", "\"vps_cpu_time_s\" : 5 , \"running_state\" : \"RUNNING\" , \"is_suspended\" :", "user-specified billing data: Wire Transfer\" , \"Selected user as the", "# Raises exception on failure node = self.driver.list_nodes()[0] self.driver.destroy_node(node) def", "(aka Lenny, RimuHosting recommended distro)\"} , { \"distro_code\" : \"centos5\"", "\"allocated_ips\" : { \"primary_ip\" : \"172.16.17.32\", \"secondary_ips\" : []} ,", ", \"response_type\" : \"OK\" , \"human_readable_message\" : \"Here are the", "memory sizes.&nbsp; So if you setup a new server feel", ": \"REGULAR\" , \"pricing_plan_infos\" : [{ \"pricing_plan_code\" : \"MiroVPSLowContention\" ,", "method, url, body, headers): body = \"\"\" { \"get_distros_response\" :", "add GST.\" , \"response_display_duration_type\" : \"REGULAR\" , \"pricing_plan_infos\" : [{", "from libcloud.drivers.rimuhosting import RimuHostingNodeDriver from test import MockHttp from test", "Semi-Dedicated Server (Dallas)\" , \"monthly_recurring_fee\" : 32.54 , \"monthly_recurring_amt\" :", "\"distro_code\" : \"ubuntu904\" , \"distro_description\" : \"Ubuntu 9.04 (Jaunty Jackalope,", "TestCaseMixin): def setUp(self): RimuHostingNodeDriver.connectionCls.conn_classes = (None, RimuHostingMockHttp) self.driver = RimuHostingNodeDriver('foo')", ", \"offered_at_data_center\" : { \"data_center_location_code\" : \"DCDALLAS\" , \"data_center_location_name\" :", ", \"response_type\" : \"OK\" , \"human_readable_message\" : \"Found 15 orders\"", "true , \"current_kernel\" : \"default\" , \"current_kernel_canonical\" : \"2.6.30.5-xenU.i386\" ,", "CPU E5506 @ 2.13GHz\" , \"host_num_cores\" : 1 , \"host_xen_version\"", ", \"minimum_memory_mb\" : 950 , \"minimum_disk_gb\" : 20 , \"minimum_data_transfer_allowance_gb\"", "OF ANY KIND, either express or implied. # See the", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "\"post_new_vps_response\" : { \"status_message\" : null , \"status_code\" : 200", ": \"RUNNING\" , \"is_suspended\" : false} , \"host_server_info\" : {", "ANY KIND, either express or implied. # See the License", "(ASF) under one or more # contributor license agreements. See", "use this file except in compliance with # the License.", "See the License for the specific language governing permissions and", "RimuHostingMockHttp) self.driver = RimuHostingNodeDriver('foo') def test_list_nodes(self): nodes = self.driver.list_nodes() self.assertEqual(len(nodes),1)", "\"users_tz_offset_ms\" : 46800000} , \"vps_uptime_s\" : 31 , \"vps_cpu_time_s\" :", "and # limitations under the License. # Copyright 2009 RedRata", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "\"\"\" { \"get_orders_response\" : { \"status_message\" : null , \"status_code\"", "\"Dallas\"}} ]}} \"\"\" return (httplib.OK, body, {}, httplib.responses[httplib.OK]) def _r_distributions(self,", "to in writing, software # distributed under the License is", "\"pings_ok\" : true , \"current_kernel\" : \"default\" , \"current_kernel_canonical\" :", "true , \"is_pinging\" : true , \"running_vps_info\" : { \"pings_ok\"", "{}, httplib.responses[httplib.OK]) def _r_distributions(self, method, url, body, headers): body =", "# See the License for the specific language governing permissions", "distros we are offering on new orders.\" , \"response_display_duration_type\" :", "{ \"memory_mb\" : 160 , \"disk_space_mb\" : 4096 , \"disk_space_2_mb\"", "\"distro_description\" : \"Fedora 10\"}]}} \"\"\" return (httplib.OK, body, {}, httplib.responses[httplib.OK])", ": \"RUNNING\"} , \"new_order_request\" : { \"billing_oid\" : 96122465 ,", "distro)\") self.assertEqual(image.id, \"lenny\") def test_reboot_node(self): # Raises exception on failure", ": null , \"is_backup_running\" : false , \"is_backups_enabled\" : true", "credit has been added to your account.\" , \"If you", "team.\"] } } \"\"\" return (httplib.OK, body, {}, httplib.responses[httplib.OK]) def", "\"1.2.3.4\") self.assertEqual(node.public_ip[1], \"1.2.3.5\") self.assertEqual(node.extra['order_oid'], 88833465) self.assertEqual(node.id, \"order-88833465-api-ivan-net-nz\") def test_list_sizes(self): sizes", ", { \"distro_code\" : \"ubuntu810\" , \"distro_description\" : \"Ubuntu 8.10", "on failure size = self.driver.list_sizes()[0] image = self.driver.list_images()[0] self.driver.create_node(name=\"api.ivan.net.nz\", image=image,", "language governing permissions and # limitations under the License. #", "null , \"response_type\" : \"OK\" , \"human_readable_message\" : \"Server removed\"", "or agreed to in writing, software # distributed under the", "required by applicable law or agreed to in writing, software", "96122465 , \"is_on_customers_own_physical_server\" : false , \"vps_parameters\" : { \"memory_mb\"", ", \"distro\" : \"lenny\"}} , \"running_vps_info\" : { \"pings_ok\" :", "\"default_kernel_i386\" : \"2.6.30.5-xenU.i386\" , \"default_kernel_x86_64\" : \"2.6.30.5-xenU.x86_64\" , \"cpu_model_name\" :", "is being shut down.\" , \"A $7.98 credit has been", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "body, {}, httplib.responses[httplib.OK]) def _r_orders_order_88833465_api_ivan_net_nz_vps_running_state(self, method, url, body, headers): body", "node = nodes[0] self.assertEqual(node.public_ip[0], \"1.2.3.4\") self.assertEqual(node.public_ip[1], \"1.2.3.5\") self.assertEqual(node.extra['order_oid'], 88833465) self.assertEqual(node.id,", "null , \"response_type\" : \"OK\" , \"human_readable_message\" : null ,", "[1.45 , 0.56 , 0.28] , \"host_uptime_s\" : 3378276 ,", "\"lenny\") def test_reboot_node(self): # Raises exception on failure node =", "{ \"billing_oid\" : 96122465 , \"user_oid\" : 0 , \"host_server_oid\"", "\"order-88833465-api-ivan-net-nz\") def test_list_sizes(self): sizes = self.driver.list_sizes() self.assertEqual(len(sizes),1) size = sizes[0]", "httplib.responses[httplib.OK]) def _r_distributions(self, method, url, body, headers): body = \"\"\"", "\"REGULAR\", \"about_orders\" : [{ \"order_oid\" : 88833465 , \"domain_name\" :", "\"Ubuntu 9.04 (Jaunty Jackalope, from 2009-04)\"} , { \"distro_code\" :", "} , \"allocated_ips\" : { \"primary_ip\" : \"172.16.17.32\", \"secondary_ips\" :", "[]} , \"running_state\" : \"RUNNING\"} , \"new_order_request\" : { \"billing_oid\"", ", \"slug\" : \"order-88833465-api-ivan-net-nz\" , \"billing_oid\" : 96122465 , \"is_on_customers_own_physical_server\"", "{ \"data_transfer_gb\" : 30 , \"data_transfer\" : \"30\"} , \"billing_info\"", "\"response_display_duration_type\" : \"REGULAR\", \"about_orders\" : [{ \"order_oid\" : 88833465 ,", "removed\" , \"response_display_duration_type\" : \"REGULAR\" , \"cancel_messages\" : [\"api.ivan.net.nz is", ": 30 , \"data_transfer\" : \"30\"} , \"billing_info\" : {", "10\"}]}} \"\"\" return (httplib.OK, body, {}, httplib.responses[httplib.OK]) def _r_orders_new_vps(self, method,", ": 6 , \"running_state\" : \"RUNNING\" , \"is_suspended\" : false}}}", "false , \"vps_parameters\" : { \"memory_mb\" : 160 , \"disk_space_mb\"", "So if you setup a new server feel free to", ": { \"control_panel\" : \"webmin\" , \"domain_name\" : \"api.ivan.net.nz\" ,", "\"RUNNING\" , \"is_suspended\" : false}}} \"\"\" return (httplib.OK, body, {},", "(httplib.OK, body, {}, httplib.responses[httplib.OK]) def _r_orders_order_88833465_api_ivan_net_nz_vps(self, method, url, body, headers):", "License, Version 2.0 # (the \"License\"); you may not use", ", \"secondary_ips\" : [\"172.16.17.32\",\"172.16.31.10\"]} , \"running_state\" : \"RUNNING\"}]}}\"\"\" return (httplib.OK,", "may not use this file except in compliance with #", ": null , \"vps_order_oid_to_clone\" : 0 , \"ip_request\" : {", "self.driver.list_nodes() self.assertEqual(len(nodes),1) node = nodes[0] self.assertEqual(node.public_ip[0], \"1.2.3.4\") self.assertEqual(node.public_ip[1], \"1.2.3.5\") self.assertEqual(node.extra['order_oid'],", "the pricing to suit.&nbsp; Pricing is in USD.&nbsp; If you", "NZ-based customer then we would need to add GST.\" ,", "body, {}, httplib.responses[httplib.OK]) def _r_orders_new_vps(self, method, url, body, headers): body", ", \"user_oid\" : 0 , \"host_server_oid\" : null , \"vps_order_oid_to_clone\"", "agreed to in writing, software # distributed under the License", ", \"vps_cpu_time_s\" : 6 , \"running_state\" : \"RUNNING\" , \"is_suspended\"", "\"pricing_plan_code\" : \"MIRO1B\" , \"instantiation_options\" : { \"control_panel\" : \"webmin\"", "to the Apache Software Foundation (ASF) under one or more", "\"data_transfer\" : \"30\"} , \"billing_info\" : { } , \"allocated_ips\"", "Lenny, RimuHosting recommended distro)\"} , { \"distro_code\" : \"centos5\" ,", ": \"2.6.30.5-xenU.x86_64\" , \"cpu_model_name\" : \"Intel(R) Xeon(R) CPU E5506 @", "RimuHostingMockHttp(MockHttp): def _r_orders(self,method,url,body,headers): body = \"\"\" { \"get_orders_response\" : {", "distributed under the License is distributed on an \"AS IS\"", "if you setup a new server feel free to vary", "true , \"next_backup_time\" : { \"ms_since_epoch\": 1256446800000, \"iso_format\" : \"2009-10-25T05:00:00Z\",", "test import MockHttp, TestCaseMixin import unittest import httplib class RimuHostingTest(unittest.TestCase,", ": \"MIRO1B\" , \"instantiation_options\" : { \"control_panel\" : \"webmin\" ,", "has been added to your account.\" , \"If you need", ": \"REGULAR\" , \"cancel_messages\" : [\"api.ivan.net.nz is being shut down.\"", "[\"Using user-specified billing data: Wire Transfer\" , \"Selected user as", "sizes = self.driver.list_sizes() self.assertEqual(len(sizes),1) size = sizes[0] self.assertEqual(size.ram,950) self.assertEqual(size.disk,20) self.assertEqual(size.bandwidth,75)", ": \"Dallas\"}} ]}} \"\"\" return (httplib.OK, body, {}, httplib.responses[httplib.OK]) def", "\"response_type\" : \"OK\" , \"human_readable_message\" : null , \"response_display_duration_type\" :", "\"MIRO1B\" , \"instantiation_options\" : { \"control_panel\" : \"webmin\" , \"domain_name\"", "from test import MockHttp from test import MockHttp, TestCaseMixin import", "under the Apache License, Version 2.0 # (the \"License\"); you", "# the License. You may obtain a copy of the", "ownership. # libcloud.org licenses this file to You under the", "express or implied. # See the License for the specific", "setup a new server feel free to vary these (e.g.", "free to vary these (e.g. different memory, disk, etc) and", "[\"api.ivan.net.nz is being shut down.\" , \"A $7.98 credit has", ", \"response_display_duration_type\" : \"REGULAR\" , \"is_restarted\" : true , \"is_pinging\"", "Licensed to the Apache Software Foundation (ASF) under one or", "node = self.driver.list_nodes()[0] self.driver.reboot_node(node) def test_destroy_node(self): # Raises exception on", "body = \"\"\" { \"get_distros_response\" : { \"status_message\" : null", "{ \"ms_since_epoch\": 1256446800000, \"iso_format\" : \"2009-10-25T05:00:00Z\", \"users_tz_offset_ms\" : 46800000} ,", "images[0] self.assertEqual(image.name,\"Debian 5.0 (aka Lenny, RimuHosting\"\\ \" recommended distro)\") self.assertEqual(image.id,", "feel free to vary these (e.g. different memory, disk, etc)", "import RimuHostingNodeDriver from test import MockHttp from test import MockHttp,", "\"is_suspended\" : false}}} \"\"\" return (httplib.OK, body, {}, httplib.responses[httplib.OK]) def", "= self.driver.list_nodes()[0] self.driver.reboot_node(node) def test_destroy_node(self): # Raises exception on failure", "\"running_state\" : \"RUNNING\"}]}}\"\"\" return (httplib.OK, body, {}, httplib.responses[httplib.OK]) def _r_pricing_plans(self,method,url,body,headers):", "\"human_readable_message\" : \"Here are the distros we are offering on", "(httplib.OK, body, {}, httplib.responses[httplib.OK]) def _r_orders_new_vps(self, method, url, body, headers):", "{ } , \"allocated_ips\" : { \"primary_ip\" : \"172.16.17.32\", \"secondary_ips\"", "2009 RedRata Ltd from libcloud.drivers.rimuhosting import RimuHostingNodeDriver from test import", "writing, software # distributed under the License is distributed on", ": true , \"default_kernel_i386\" : \"2.6.30.5-xenU.i386\" , \"default_kernel_x86_64\" : \"2.6.30.5-xenU.x86_64\"", ", \"default_kernel_x86_64\" : \"2.6.30.5-xenU.x86_64\" , \"cpu_model_name\" : \"Intel(R) Xeon(R) CPU", "memory, disk, etc) and we will just adjust the pricing", "\"Ubuntu 8.10 (Intrepid Ibex, from 2008-10)\"} , { \"distro_code\" :", "self.assertEqual(node.public_ip[0], \"1.2.3.4\") self.assertEqual(node.public_ip[1], \"1.2.3.5\") self.assertEqual(node.extra['order_oid'], 88833465) self.assertEqual(node.id, \"order-88833465-api-ivan-net-nz\") def test_list_sizes(self):", "you may not use this file except in compliance with", "owner of the billing details: <NAME>\" , \"No VPS paramters", ", \"running_vps_info\" : { \"pings_ok\" : true , \"current_kernel\" :", "the License. You may obtain a copy of the License", ", \"monthly_recurring_fee\" : 32.54 , \"monthly_recurring_amt\" : { \"amt\" :", "8.10 (Intrepid Ibex, from 2008-10)\"} , { \"distro_code\" : \"fedora10\"", ", \"response_type\" : \"OK\" , \"human_readable_message\" : \"Server removed\" ,", "\"764\" , \"server_type\" : \"VPS\" , \"data_transfer_allowance\" : { \"data_transfer_gb\"", "body, {}, httplib.responses[httplib.OK]) def _r_orders_order_88833465_api_ivan_net_nz_vps(self, method, url, body, headers): body", ", \"ip_request\" : { \"num_ips\" : 1, \"extra_ip_reason\" : \"\"}", ", \"billing_oid\" : 96122465 , \"is_on_customers_own_physical_server\" : false , \"vps_parameters\"", "9.04 (Jaunty Jackalope, from 2009-04)\"} , { \"distro_code\" : \"ubuntu804\"", "\"distro_code\" : \"centos5\" , \"distro_description\" : \"Centos5\"} , { \"distro_code\"", ": \"Server removed\" , \"response_display_duration_type\" : \"REGULAR\" , \"cancel_messages\" :", "def test_list_nodes(self): nodes = self.driver.list_nodes() self.assertEqual(len(nodes),1) node = nodes[0] self.assertEqual(node.public_ip[0],", "exception on failure node = self.driver.list_nodes()[0] self.driver.destroy_node(node) def test_create_node(self): #", "user as the owner of the billing details: <NAME>\" ,", "\"RUNNING\"} , \"new_order_request\" : { \"billing_oid\" : 96122465 , \"user_oid\"", "{}, httplib.responses[httplib.OK]) def _r_orders_order_88833465_api_ivan_net_nz_vps(self, method, url, body, headers): body =", "CONDITIONS OF ANY KIND, either express or implied. # See", "Version 2.0 # (the \"License\"); you may not use this", "Copyright 2009 RedRata Ltd from libcloud.drivers.rimuhosting import RimuHostingNodeDriver from test", "test_reboot_node(self): # Raises exception on failure node = self.driver.list_nodes()[0] self.driver.reboot_node(node)", ", \"is_on_customers_own_physical_server\" : false , \"vps_parameters\" : { \"memory_mb\" :", ": 35.0 , \"currency\" : \"CUR_AUD\" ,\"amt_usd\" : 32.54} ,", "\"user_oid\" : 0 , \"host_server_oid\" : null , \"vps_order_oid_to_clone\" :", "# Raises exception on failure node = self.driver.list_nodes()[0] self.driver.reboot_node(node) def", "\"vps_order_oid_to_clone\" : 0 , \"ip_request\" : { \"num_ips\" : 1,", "{ \"num_ips\" : 1, \"extra_ip_reason\" : \"\"} , \"vps_parameters\" :", "\"console_public_authorized_keys\" : null , \"is_backup_running\" : false , \"is_backups_enabled\" :", "\"vps_cpu_time_s\" : 6 , \"running_state\" : \"RUNNING\" , \"is_suspended\" :", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "def test_list_images(self): images = self.driver.list_images() self.assertEqual(len(images),6) image = images[0] self.assertEqual(image.name,\"Debian", "\"response_type\" : \"OK\" , \"human_readable_message\" : \"Here some pricing plans", ", \"response_display_duration_type\" : \"REGULAR\" , \"pricing_plan_infos\" : [{ \"pricing_plan_code\" :", "\"response_type\" : \"OK\" , \"human_readable_message\" : \"Here are the distros", ": \"lenny\" , \"distro_description\" : \"Debian 5.0 (aka Lenny, RimuHosting", ", \"minimum_disk_gb\" : 20 , \"minimum_data_transfer_allowance_gb\" : 75 , \"see_also_url\"", "\"minimum_memory_mb\" : 950 , \"minimum_disk_gb\" : 20 , \"minimum_data_transfer_allowance_gb\" :", ", { \"distro_code\" : \"ubuntu804\" , \"distro_description\" : \"Ubuntu 8.04", ", \"is_suspended\" : false}}} \"\"\" return (httplib.OK, body, {}, httplib.responses[httplib.OK])", "billing details: <NAME>\" , \"No VPS paramters provided, using default", ": { \"data_transfer_gb\" : 30 , \"data_transfer\" : \"30\"} ,", "\"Found 15 orders\" , \"response_display_duration_type\" : \"REGULAR\", \"about_orders\" : [{", "and memory sizes.&nbsp; So if you setup a new server", "[{ \"pricing_plan_code\" : \"MiroVPSLowContention\" , \"pricing_plan_description\" : \"MiroVPS Semi-Dedicated Server", "different memory, disk, etc) and we will just adjust the", "RimuHosting recommended distro)\"} , { \"distro_code\" : \"centos5\" , \"distro_description\"", "for additional information regarding copyright ownership. # libcloud.org licenses this", ", \"running_state\" : \"RUNNING\" , \"is_suspended\" : false}}} \"\"\" return", "\"cancel_messages\" : [\"api.ivan.net.nz is being shut down.\" , \"A $7.98", ": 5 , \"running_state\" : \"RUNNING\" , \"is_suspended\" : false}", "method, url, body, headers): body = \"\"\" { \"put_running_state_response\" :", "long term support (LTS))\"} , { \"distro_code\" : \"ubuntu810\" ,", "\"2.6.30.5-xenU.i386\" , \"last_backup_message\" : \"\" , \"is_console_login_enabled\" : false ,", "is pinging OK.\" , \"response_display_duration_type\" : \"REGULAR\" , \"is_restarted\" :", "\"order_oid\" : 52255865 , \"domain_name\" : \"api.ivan.net.nz\" , \"slug\" :", "Raises exception on failure size = self.driver.list_sizes()[0] image = self.driver.list_images()[0]", "\"\"\" return (httplib.OK, body, {}, httplib.responses[httplib.OK]) def _r_orders_order_88833465_api_ivan_net_nz_vps(self, method, url,", "an NZ-based customer then we would need to add GST.\"", ": \"2.6.30.5-xenU.i386\" , \"default_kernel_x86_64\" : \"2.6.30.5-xenU.x86_64\" , \"cpu_model_name\" : \"Intel(R)", ", \"response_type\" : \"OK\" , \"human_readable_message\" : \"api.ivan.net.nz restarted. After", ": [1.45 , 0.56 , 0.28] , \"host_uptime_s\" : 3378276", "_r_orders(self,method,url,body,headers): body = \"\"\" { \"get_orders_response\" : { \"status_message\" :", "{ \"get_orders_response\" : { \"status_message\" : null , \"status_code\" :", "server feel free to vary these (e.g. different memory, disk,", ", \"is_pinging\" : true , \"running_vps_info\" : { \"pings_ok\" :", "35.0 , \"currency\" : \"CUR_AUD\" ,\"amt_usd\" : 32.54} , \"minimum_memory_mb\"", "\"host_uptime_s\" : 3378276 , \"host_mem_mb_free\" : 51825 , \"host_mem_mb_total\" :", "paramters provided, using default values.\"] , \"about_order\" : { \"order_oid\"", "\"distro_description\" : \"Ubuntu 8.04 (Hardy Heron, 5 yr long term", "\"data_center_location_name\" : \"Dallas\"}} ]}} \"\"\" return (httplib.OK, body, {}, httplib.responses[httplib.OK])", "test_destroy_node(self): # Raises exception on failure node = self.driver.list_nodes()[0] self.driver.destroy_node(node)", "Lenny, RimuHosting\"\\ \" recommended distro)\") self.assertEqual(image.id, \"lenny\") def test_reboot_node(self): #", "added to your account.\" , \"If you need to un-cancel", "(the \"License\"); you may not use this file except in", ", \"response_display_duration_type\" : \"REGULAR\" , \"cancel_messages\" : [\"api.ivan.net.nz is being", "the reboot api.ivan.net.nz is pinging OK.\" , \"response_display_duration_type\" : \"REGULAR\"", "OR CONDITIONS OF ANY KIND, either express or implied. #", ", \"vps_uptime_s\" : 31 , \"vps_cpu_time_s\" : 6 , \"running_state\"", "\"Ubuntu 8.04 (Hardy Heron, 5 yr long term support (LTS))\"}", "term support (LTS))\"} , { \"distro_code\" : \"ubuntu810\" , \"distro_description\"", "the License is distributed on an \"AS IS\" BASIS, #", "\"OK\" , \"human_readable_message\" : null , \"response_display_duration_type\" : \"REGULAR\" ,", "contact our support team.\"] } } \"\"\" return (httplib.OK, body,", "\"status_code\" : 200 , \"error_info\" : null , \"response_type\" :", "\"response_type\" : \"OK\" , \"human_readable_message\" : \"api.ivan.net.nz restarted. After the", ": \"MiroVPS Semi-Dedicated Server (Dallas)\" , \"monthly_recurring_fee\" : 32.54 ,", "url, body, headers): body = \"\"\" { \"put_running_state_response\" : {", "\"Server removed\" , \"response_display_duration_type\" : \"REGULAR\" , \"cancel_messages\" : [\"api.ivan.net.nz", ", \"data_transfer_allowance\" : { \"data_transfer_gb\" : 30 , \"data_transfer\" :", ", \"distro_description\" : \"Ubuntu 9.04 (Jaunty Jackalope, from 2009-04)\"} ,", "additional information regarding copyright ownership. # libcloud.org licenses this file", ": [\"172.16.17.32\",\"172.16.31.10\"]} , \"running_state\" : \"RUNNING\"}]}}\"\"\" return (httplib.OK, body, {},", "\"allocated_ips\" : { \"primary_ip\" : \"192.168.3.11\" , \"secondary_ips\" : [\"172.16.17.32\",\"172.16.31.10\"]}", "is in USD.&nbsp; If you are an NZ-based customer then", ", \"response_display_duration_type\" : \"REGULAR\" , \"setup_messages\" : [\"Using user-specified billing", ": \"MiroVPSLowContention\" , \"pricing_plan_description\" : \"MiroVPS Semi-Dedicated Server (Dallas)\" ,", "adjust the pricing to suit.&nbsp; Pricing is in USD.&nbsp; If", "\"MiroVPS Semi-Dedicated Server (Dallas)\" , \"monthly_recurring_fee\" : 32.54 , \"monthly_recurring_amt\"", "{ \"distro_code\" : \"fedora10\" , \"distro_description\" : \"Fedora 10\"}]}} \"\"\"", "88833465) self.assertEqual(node.id, \"order-88833465-api-ivan-net-nz\") def test_list_sizes(self): sizes = self.driver.list_sizes() self.assertEqual(len(sizes),1) size", "\"human_readable_message\" : \"api.ivan.net.nz restarted. After the reboot api.ivan.net.nz is pinging", "\"response_display_duration_type\" : \"REGULAR\" , \"distro_infos\" : [{ \"distro_code\" : \"lenny\"", ": null , \"response_type\" : \"OK\" , \"human_readable_message\" : null", "\"ms_since_epoch\": 1256446800000, \"iso_format\" : \"2009-10-25T05:00:00Z\", \"users_tz_offset_ms\" : 46800000} , \"vps_uptime_s\"", "\"\"} , \"vps_parameters\" : { \"memory_mb\" : 160 , \"disk_space_mb\"", "test_list_images(self): images = self.driver.list_images() self.assertEqual(len(images),6) image = images[0] self.assertEqual(image.name,\"Debian 5.0", "\"data_center_location_code\" : \"DCDALLAS\" , \"data_center_location_name\" : \"Dallas\"}} ]}} \"\"\" return", "more # contributor license agreements. See the NOTICE file distributed", "\"running_vps_info\" : { \"pings_ok\" : true , \"current_kernel\" : \"default\"", "sizes.&nbsp; So if you setup a new server feel free", "will just adjust the pricing to suit.&nbsp; Pricing is in", "body = \"\"\" { \"put_running_state_response\" : { \"status_message\" : null", ": \"Intel(R) Xeon(R) CPU E5506 @ 2.13GHz\" , \"host_num_cores\" :", ", \"response_display_duration_type\" : \"REGULAR\" , \"distro_infos\" : [{ \"distro_code\" :", "\"slug\" : \"order-88833465-api-ivan-net-nz\" , \"billing_oid\" : 96122465 , \"is_on_customers_own_physical_server\" :", "{ \"is_host64_bit_capable\" : true , \"default_kernel_i386\" : \"2.6.30.5-xenU.i386\" , \"default_kernel_x86_64\"", "law or agreed to in writing, software # distributed under", ", \"pricing_plan_description\" : \"MiroVPS Semi-Dedicated Server (Dallas)\" , \"monthly_recurring_fee\" :", "self.driver.destroy_node(node) def test_create_node(self): # Raises exception on failure size =", "new orders.&nbsp; Note we offer most disk and memory sizes.&nbsp;", "then we would need to add GST.\" , \"response_display_duration_type\" :", "Apache Software Foundation (ASF) under one or more # contributor", "image = images[0] self.assertEqual(image.name,\"Debian 5.0 (aka Lenny, RimuHosting\"\\ \" recommended", "{}, httplib.responses[httplib.OK]) def _r_pricing_plans(self,method,url,body,headers): body = \"\"\" {\"get_pricing_plans_response\" : {", "offer most disk and memory sizes.&nbsp; So if you setup", "# Copyright 2009 RedRata Ltd from libcloud.drivers.rimuhosting import RimuHostingNodeDriver from", "to add GST.\" , \"response_display_duration_type\" : \"REGULAR\" , \"pricing_plan_infos\" :", ", \"distro_infos\" : [{ \"distro_code\" : \"lenny\" , \"distro_description\" :", "\"\"\" return (httplib.OK, body, {}, httplib.responses[httplib.OK]) def _r_orders_new_vps(self, method, url,", "\"REGULAR\" , \"distro_infos\" : [{ \"distro_code\" : \"lenny\" , \"distro_description\"", ": [\"Using user-specified billing data: Wire Transfer\" , \"Selected user", ": \"2009-10-25T05:00:00Z\", \"users_tz_offset_ms\" : 46800000} , \"vps_uptime_s\" : 19 ,", ": \"OK\" , \"human_readable_message\" : \"Here some pricing plans we", ", \"running_state\" : \"RUNNING\" , \"is_suspended\" : false} , \"host_server_info\"", "<reponame>shenoyn/libcloud # Licensed to the Apache Software Foundation (ASF) under", ": 46800000} , \"vps_uptime_s\" : 19 , \"vps_cpu_time_s\" : 5", ": \"OK\" , \"human_readable_message\" : null , \"response_display_duration_type\" : \"REGULAR\"", "\"billing_oid\" : 96122465 , \"is_on_customers_own_physical_server\" : false , \"vps_parameters\" :", ": { } , \"allocated_ips\" : { \"primary_ip\" : \"192.168.3.11\"", "def test_reboot_node(self): # Raises exception on failure node = self.driver.list_nodes()[0]", ": { \"primary_ip\" : \"172.16.17.32\", \"secondary_ips\" : []} , \"running_state\"", "51825 , \"host_mem_mb_total\" : 73719 , \"running_vpss\" : 34} ,", "def _r_orders(self,method,url,body,headers): body = \"\"\" { \"get_orders_response\" : { \"status_message\"", "the NOTICE file distributed with # this work for additional", ": null , \"response_display_duration_type\" : \"REGULAR\" , \"setup_messages\" : [\"Using", "to your account.\" , \"If you need to un-cancel the", "\"is_restarted\" : true , \"is_pinging\" : true , \"running_vps_info\" :", "\"distro\" : \"lenny\"}} , \"running_vps_info\" : { \"pings_ok\" : true", "]}} \"\"\" return (httplib.OK, body, {}, httplib.responses[httplib.OK]) def _r_distributions(self, method,", "null , \"response_type\" : \"OK\" , \"human_readable_message\" : \"Here are", ", \"distro_description\" : \"Ubuntu 8.10 (Intrepid Ibex, from 2008-10)\"} ,", "may obtain a copy of the License at # #", "body = \"\"\" { \"delete_server_response\" : { \"status_message\" : null", "the Apache License, Version 2.0 # (the \"License\"); you may", "\"current_kernel_canonical\" : \"2.6.30.5-xenU.i386\" , \"last_backup_message\" : \"\" , \"is_console_login_enabled\" :", "to un-cancel the server please contact our support team.\"] }", "\"2009-10-25T05:00:00Z\", \"users_tz_offset_ms\" : 46800000} , \"vps_uptime_s\" : 31 , \"vps_cpu_time_s\"", "are offering on new orders.&nbsp; Note we offer most disk", "\"disk_space_2_mb\" : 0} , \"pricing_plan_code\" : \"MIRO1B\" , \"instantiation_options\" :", "under the License. # Copyright 2009 RedRata Ltd from libcloud.drivers.rimuhosting", "# (the \"License\"); you may not use this file except", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", ", \"vps_parameters\" : { \"memory_mb\" : 160 , \"disk_space_mb\" :", "(Jaunty Jackalope, from 2009-04)\"} , { \"distro_code\" : \"ubuntu804\" ,", "false , \"console_public_authorized_keys\" : null , \"is_backup_running\" : false ,", "unittest import httplib class RimuHostingTest(unittest.TestCase, TestCaseMixin): def setUp(self): RimuHostingNodeDriver.connectionCls.conn_classes =", "def _r_orders_order_88833465_api_ivan_net_nz_vps(self, method, url, body, headers): body = \"\"\" {", "75 , \"see_also_url\" : \"http://rimuhosting.com/order/serverdetails.jsp?plan=MiroVPSLowContention\" , \"server_type\" : \"VPS\" ,", "31 , \"vps_cpu_time_s\" : 6 , \"running_state\" : \"RUNNING\" ,", "# Licensed to the Apache Software Foundation (ASF) under one", ": { \"primary_ip\" : \"192.168.3.11\" , \"secondary_ips\" : [\"172.16.17.32\",\"172.16.31.10\"]} ,", "19 , \"vps_cpu_time_s\" : 5 , \"running_state\" : \"RUNNING\" ,", ": 88833465 , \"domain_name\" : \"api.ivan.net.nz\" , \"slug\" : \"order-88833465-api-ivan-net-nz\"", "\"2.6.30.5-xenU.x86_64\" , \"cpu_model_name\" : \"Intel(R) Xeon(R) CPU E5506 @ 2.13GHz\"", "\"REGULAR\" , \"setup_messages\" : [\"Using user-specified billing data: Wire Transfer\"", "= self.driver.list_nodes() self.assertEqual(len(nodes),1) node = nodes[0] self.assertEqual(node.public_ip[0], \"1.2.3.4\") self.assertEqual(node.public_ip[1], \"1.2.3.5\")", "\"172.16.17.32\", \"secondary_ips\" : []} , \"running_state\" : \"RUNNING\"} , \"new_order_request\"", "in compliance with # the License. You may obtain a", "pricing plans we are offering on new orders.&nbsp; Note we", ", { \"distro_code\" : \"centos5\" , \"distro_description\" : \"Centos5\"} ,", "(Intrepid Ibex, from 2008-10)\"} , { \"distro_code\" : \"fedora10\" ,", "libcloud.org licenses this file to You under the Apache License,", ": \"\" , \"is_console_login_enabled\" : false , \"console_public_authorized_keys\" : null", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", ": true , \"is_pinging\" : true , \"running_vps_info\" : {", "73719 , \"running_vpss\" : 34} , \"running_state_messages\" : null}} \"\"\"", ": \"order-52255865-api-ivan-net-nz\" , \"billing_oid\" : 96122465 , \"is_on_customers_own_physical_server\" : false", ": 4096 , \"disk_space_2_mb\" : 0} , \"pricing_plan_code\" : \"MIRO1B\"", "\"monthly_recurring_amt\" : { \"amt\" : 35.0 , \"currency\" : \"CUR_AUD\"", "self.assertEqual(image.name,\"Debian 5.0 (aka Lenny, RimuHosting\"\\ \" recommended distro)\") self.assertEqual(image.id, \"lenny\")", "being shut down.\" , \"A $7.98 credit has been added", "\"billing_info\" : { } , \"allocated_ips\" : { \"primary_ip\" :", "as the owner of the billing details: <NAME>\" , \"No", "test import MockHttp from test import MockHttp, TestCaseMixin import unittest", "{ \"order_oid\" : 52255865 , \"domain_name\" : \"api.ivan.net.nz\" , \"slug\"", "license agreements. See the NOTICE file distributed with # this", "null , \"response_type\" : \"OK\" , \"human_readable_message\" : \"api.ivan.net.nz restarted.", ": \"2009-10-25T05:00:00Z\", \"users_tz_offset_ms\" : 46800000} , \"vps_uptime_s\" : 31 ,", "are an NZ-based customer then we would need to add", "\"2.6.30.5-xenU.i386\" , \"default_kernel_x86_64\" : \"2.6.30.5-xenU.x86_64\" , \"cpu_model_name\" : \"Intel(R) Xeon(R)", "null , \"status_code\" : 200 , \"error_info\" : null ,", "\"see_also_url\" : \"http://rimuhosting.com/order/serverdetails.jsp?plan=MiroVPSLowContention\" , \"server_type\" : \"VPS\" , \"offered_at_data_center\" :", "\"\"\" { \"post_new_vps_response\" : { \"status_message\" : null , \"status_code\"", ", \"pricing_plan_code\" : \"MIRO1B\" , \"instantiation_options\" : { \"control_panel\" :", ": []} , \"running_state\" : \"RUNNING\"} , \"new_order_request\" : {", ", \"default_kernel_i386\" : \"2.6.30.5-xenU.i386\" , \"default_kernel_x86_64\" : \"2.6.30.5-xenU.x86_64\" , \"cpu_model_name\"", "\"data_transfer_gb\" : 30 , \"data_transfer\" : \"30\"} , \"billing_info\" :", "\"minimum_data_transfer_allowance_gb\" : 75 , \"see_also_url\" : \"http://rimuhosting.com/order/serverdetails.jsp?plan=MiroVPSLowContention\" , \"server_type\" :", ": \"Found 15 orders\" , \"response_display_duration_type\" : \"REGULAR\", \"about_orders\" :", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "the billing details: <NAME>\" , \"No VPS paramters provided, using", ": { \"pings_ok\" : true , \"current_kernel\" : \"default\" ,", "self.driver.list_nodes()[0] self.driver.reboot_node(node) def test_destroy_node(self): # Raises exception on failure node", ": 32.54 , \"monthly_recurring_amt\" : { \"amt\" : 35.0 ,", "new server feel free to vary these (e.g. different memory,", "regarding copyright ownership. # libcloud.org licenses this file to You", "the owner of the billing details: <NAME>\" , \"No VPS", "52255865 , \"domain_name\" : \"api.ivan.net.nz\" , \"slug\" : \"order-52255865-api-ivan-net-nz\" ,", ": 31 , \"vps_cpu_time_s\" : 6 , \"running_state\" : \"RUNNING\"", "OK.\" , \"response_display_duration_type\" : \"REGULAR\" , \"is_restarted\" : true ,", ", \"host_mem_mb_free\" : 51825 , \"host_mem_mb_total\" : 73719 , \"running_vpss\"", "method, url, body, headers): body = \"\"\" { \"delete_server_response\" :", "contributor license agreements. See the NOTICE file distributed with #", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "License. # Copyright 2009 RedRata Ltd from libcloud.drivers.rimuhosting import RimuHostingNodeDriver", "TestCaseMixin import unittest import httplib class RimuHostingTest(unittest.TestCase, TestCaseMixin): def setUp(self):", "\"centos5\" , \"distro_description\" : \"Centos5\"} , { \"distro_code\" : \"ubuntu904\"", "self.driver.list_images() self.assertEqual(len(images),6) image = images[0] self.assertEqual(image.name,\"Debian 5.0 (aka Lenny, RimuHosting\"\\", ": \"REGULAR\", \"about_orders\" : [{ \"order_oid\" : 88833465 , \"domain_name\"", "2009-04)\"} , { \"distro_code\" : \"ubuntu804\" , \"distro_description\" : \"Ubuntu", ", \"If you need to un-cancel the server please contact", "httplib.responses[httplib.OK]) def _r_orders_order_88833465_api_ivan_net_nz_vps_running_state(self, method, url, body, headers): body = \"\"\"", "recommended distro)\") self.assertEqual(image.id, \"lenny\") def test_reboot_node(self): # Raises exception on", "return (httplib.OK, body, {}, httplib.responses[httplib.OK]) def _r_pricing_plans(self,method,url,body,headers): body = \"\"\"", "\"response_display_duration_type\" : \"REGULAR\" , \"pricing_plan_infos\" : [{ \"pricing_plan_code\" : \"MiroVPSLowContention\"", "\"password\" : \"<PASSWORD>\" , \"distro\" : \"lenny\"}} , \"running_vps_info\" :", "(aka Lenny, RimuHosting\"\\ \" recommended distro)\") self.assertEqual(image.id, \"lenny\") def test_reboot_node(self):", "\"distro_code\" : \"ubuntu810\" , \"distro_description\" : \"Ubuntu 8.10 (Intrepid Ibex,", "0.28] , \"host_uptime_s\" : 3378276 , \"host_mem_mb_free\" : 51825 ,", "null , \"response_type\" : \"OK\" , \"human_readable_message\" : \"Found 15", "null , \"vps_order_oid_to_clone\" : 0 , \"ip_request\" : { \"num_ips\"", "self.assertEqual(len(nodes),1) node = nodes[0] self.assertEqual(node.public_ip[0], \"1.2.3.4\") self.assertEqual(node.public_ip[1], \"1.2.3.5\") self.assertEqual(node.extra['order_oid'], 88833465)", ", \"server_type\" : \"VPS\" , \"data_transfer_allowance\" : { \"data_transfer_gb\" :", "GST.\" , \"response_display_duration_type\" : \"REGULAR\" , \"pricing_plan_infos\" : [{ \"pricing_plan_code\"", "are offering on new orders.\" , \"response_display_duration_type\" : \"REGULAR\" ,", "billing data: Wire Transfer\" , \"Selected user as the owner", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "self.assertEqual(node.extra['order_oid'], 88833465) self.assertEqual(node.id, \"order-88833465-api-ivan-net-nz\") def test_list_sizes(self): sizes = self.driver.list_sizes() self.assertEqual(len(sizes),1)", "Raises exception on failure node = self.driver.list_nodes()[0] self.driver.reboot_node(node) def test_destroy_node(self):", "\"ip_request\" : { \"num_ips\" : 1, \"extra_ip_reason\" : \"\"} ,", ", \"host_server_oid\" : \"764\" , \"server_type\" : \"VPS\" , \"data_transfer_allowance\"", "on new orders.\" , \"response_display_duration_type\" : \"REGULAR\" , \"distro_infos\" :", ": \"api.ivan.net.nz\" , \"password\" : \"<PASSWORD>\" , \"distro\" : \"lenny\"}}", "\"running_state\" : \"RUNNING\" , \"is_suspended\" : false} , \"host_server_info\" :", "VPS paramters provided, using default values.\"] , \"about_order\" : {", "= images[0] self.assertEqual(image.name,\"Debian 5.0 (aka Lenny, RimuHosting\"\\ \" recommended distro)\")", ": { \"status_message\" : null , \"status_code\" : 200 ,", ", \"disk_space_2_mb\" : 0} , \"host_server_oid\" : \"764\" , \"server_type\"", "\"vps_uptime_s\" : 19 , \"vps_cpu_time_s\" : 5 , \"running_state\" :", "file except in compliance with # the License. You may", "\"\"\" return (httplib.OK, body, {}, httplib.responses[httplib.OK]) def _r_orders_order_88833465_api_ivan_net_nz_vps_running_state(self, method, url,", "{ \"status_message\" : null , \"status_code\" : 200 , \"error_info\"", ": \"api.ivan.net.nz\" , \"slug\" : \"order-88833465-api-ivan-net-nz\" , \"billing_oid\" : 96122465", "true , \"default_kernel_i386\" : \"2.6.30.5-xenU.i386\" , \"default_kernel_x86_64\" : \"2.6.30.5-xenU.x86_64\" ,", ", \"error_info\" : null , \"response_type\" : \"OK\" , \"human_readable_message\"", "Raises exception on failure node = self.driver.list_nodes()[0] self.driver.destroy_node(node) def test_create_node(self):", "this file except in compliance with # the License. You", "customer then we would need to add GST.\" , \"response_display_duration_type\"", "\"order_oid\" : 88833465 , \"domain_name\" : \"api.ivan.net.nz\" , \"slug\" :", ", \"billing_info\" : { } , \"allocated_ips\" : { \"primary_ip\"", "(Hardy Heron, 5 yr long term support (LTS))\"} , {", "body = \"\"\" {\"get_pricing_plans_response\" : { \"status_message\" : null ,", "Wire Transfer\" , \"Selected user as the owner of the", ", \"distro_description\" : \"Centos5\"} , { \"distro_code\" : \"ubuntu904\" ,", ", \"currency\" : \"CUR_AUD\" ,\"amt_usd\" : 32.54} , \"minimum_memory_mb\" :", ": \"api.ivan.net.nz restarted. After the reboot api.ivan.net.nz is pinging OK.\"", "\"primary_ip\" : \"192.168.3.11\" , \"secondary_ips\" : [\"172.16.17.32\",\"172.16.31.10\"]} , \"running_state\" :", "_r_distributions(self, method, url, body, headers): body = \"\"\" { \"get_distros_response\"", "your account.\" , \"If you need to un-cancel the server", "\"control_panel\" : \"webmin\" , \"domain_name\" : \"api.ivan.net.nz\" , \"password\" :", "copyright ownership. # libcloud.org licenses this file to You under", ": \"REGULAR\" , \"distro_infos\" : [{ \"distro_code\" : \"lenny\" ,", "\"api.ivan.net.nz\" , \"slug\" : \"order-88833465-api-ivan-net-nz\" , \"billing_oid\" : 96122465 ,", "\"default_kernel_x86_64\" : \"2.6.30.5-xenU.x86_64\" , \"cpu_model_name\" : \"Intel(R) Xeon(R) CPU E5506", "Apache License, Version 2.0 # (the \"License\"); you may not", "def test_list_sizes(self): sizes = self.driver.list_sizes() self.assertEqual(len(sizes),1) size = sizes[0] self.assertEqual(size.ram,950)", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "\"Debian 5.0 (aka Lenny, RimuHosting recommended distro)\"} , { \"distro_code\"", "1256446800000, \"iso_format\" : \"2009-10-25T05:00:00Z\", \"users_tz_offset_ms\" : 46800000} , \"vps_uptime_s\" :", "\"slug\" : \"order-52255865-api-ivan-net-nz\" , \"billing_oid\" : 96122465 , \"is_on_customers_own_physical_server\" :", "{}, httplib.responses[httplib.OK]) def _r_orders_new_vps(self, method, url, body, headers): body =", ", \"Selected user as the owner of the billing details:", "need to add GST.\" , \"response_display_duration_type\" : \"REGULAR\" , \"pricing_plan_infos\"", "{\"get_pricing_plans_response\" : { \"status_message\" : null , \"status_code\" : 200", "node = self.driver.list_nodes()[0] self.driver.destroy_node(node) def test_create_node(self): # Raises exception on", ", \"slug\" : \"order-52255865-api-ivan-net-nz\" , \"billing_oid\" : 96122465 , \"is_on_customers_own_physical_server\"", "permissions and # limitations under the License. # Copyright 2009", "\"disk_space_mb\" : 4096 , \"disk_space_2_mb\" : 0} , \"pricing_plan_code\" :", "= \"\"\" { \"delete_server_response\" : { \"status_message\" : null ,", ": { } , \"allocated_ips\" : { \"primary_ip\" : \"172.16.17.32\",", "\"minimum_disk_gb\" : 20 , \"minimum_data_transfer_allowance_gb\" : 75 , \"see_also_url\" :", ", \"monthly_recurring_amt\" : { \"amt\" : 35.0 , \"currency\" :", ": \"centos5\" , \"distro_description\" : \"Centos5\"} , { \"distro_code\" :", ", \"next_backup_time\" : { \"ms_since_epoch\": 1256446800000, \"iso_format\" : \"2009-10-25T05:00:00Z\", \"users_tz_offset_ms\"", "or implied. # See the License for the specific language", ": \"DCDALLAS\" , \"data_center_location_name\" : \"Dallas\"}} ]}} \"\"\" return (httplib.OK,", "the server please contact our support team.\"] } } \"\"\"", ", \"vps_cpu_time_s\" : 5 , \"running_state\" : \"RUNNING\" , \"is_suspended\"", ", \"is_console_login_enabled\" : false , \"console_public_authorized_keys\" : null , \"is_backup_running\"", "Jackalope, from 2009-04)\"} , { \"distro_code\" : \"ubuntu804\" , \"distro_description\"", ": 73719 , \"running_vpss\" : 34} , \"running_state_messages\" : null}}", "[\"172.16.17.32\",\"172.16.31.10\"]} , \"running_state\" : \"RUNNING\"}]}}\"\"\" return (httplib.OK, body, {}, httplib.responses[httplib.OK])", ", \"domain_name\" : \"api.ivan.net.nz\" , \"password\" : \"<PASSWORD>\" , \"distro\"", "KIND, either express or implied. # See the License for", "specific language governing permissions and # limitations under the License.", "{ } , \"allocated_ips\" : { \"primary_ip\" : \"192.168.3.11\" ,", "{ \"get_distros_response\" : { \"status_message\" : null , \"status_code\" :", "offering on new orders.\" , \"response_display_duration_type\" : \"REGULAR\" , \"distro_infos\"", "RimuHosting\"\\ \" recommended distro)\") self.assertEqual(image.id, \"lenny\") def test_reboot_node(self): # Raises", ": \"\"} , \"vps_parameters\" : { \"memory_mb\" : 160 ,", "_r_orders_order_88833465_api_ivan_net_nz_vps(self, method, url, body, headers): body = \"\"\" { \"delete_server_response\"", "self.driver.list_sizes()[0] image = self.driver.list_images()[0] self.driver.create_node(name=\"api.ivan.net.nz\", image=image, size=size) class RimuHostingMockHttp(MockHttp): def", "\"instantiation_options\" : { \"control_panel\" : \"webmin\" , \"domain_name\" : \"api.ivan.net.nz\"", "\"running_vpss\" : 34} , \"running_state_messages\" : null}} \"\"\" return (httplib.OK,", "\"License\"); you may not use this file except in compliance", "except in compliance with # the License. You may obtain", "\"get_orders_response\" : { \"status_message\" : null , \"status_code\" : 200", ": false , \"vps_parameters\" : { \"memory_mb\" : 160 ,", "{ \"post_new_vps_response\" : { \"status_message\" : null , \"status_code\" :", ": true , \"next_backup_time\" : { \"ms_since_epoch\": 1256446800000, \"iso_format\" :", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "6 , \"running_state\" : \"RUNNING\" , \"is_suspended\" : false}}} \"\"\"", "{ \"data_center_location_code\" : \"DCDALLAS\" , \"data_center_location_name\" : \"Dallas\"}} ]}} \"\"\"", "file to You under the Apache License, Version 2.0 #", ": \"<PASSWORD>\" , \"distro\" : \"lenny\"}} , \"running_vps_info\" : {", "= nodes[0] self.assertEqual(node.public_ip[0], \"1.2.3.4\") self.assertEqual(node.public_ip[1], \"1.2.3.5\") self.assertEqual(node.extra['order_oid'], 88833465) self.assertEqual(node.id, \"order-88833465-api-ivan-net-nz\")", "15 orders\" , \"response_display_duration_type\" : \"REGULAR\", \"about_orders\" : [{ \"order_oid\"", ", \"is_restarted\" : true , \"is_pinging\" : true , \"running_vps_info\"", ": 0 , \"ip_request\" : { \"num_ips\" : 1, \"extra_ip_reason\"", "need to un-cancel the server please contact our support team.\"]", "\"api.ivan.net.nz\" , \"slug\" : \"order-52255865-api-ivan-net-nz\" , \"billing_oid\" : 96122465 ,", ": \"ubuntu804\" , \"distro_description\" : \"Ubuntu 8.04 (Hardy Heron, 5", "\"is_suspended\" : false} , \"host_server_info\" : { \"is_host64_bit_capable\" : true", "\"server_type\" : \"VPS\" , \"data_transfer_allowance\" : { \"data_transfer_gb\" : 30", "details: <NAME>\" , \"No VPS paramters provided, using default values.\"]", ", { \"distro_code\" : \"fedora10\" , \"distro_description\" : \"Fedora 10\"}]}}", ", \"No VPS paramters provided, using default values.\"] , \"about_order\"", "self.assertEqual(size.ram,950) self.assertEqual(size.disk,20) self.assertEqual(size.bandwidth,75) self.assertEqual(size.price,32.54) def test_list_images(self): images = self.driver.list_images() self.assertEqual(len(images),6)", ", \"cancel_messages\" : [\"api.ivan.net.nz is being shut down.\" , \"A", ": 32.54} , \"minimum_memory_mb\" : 950 , \"minimum_disk_gb\" : 20", ", \"host_server_info\" : { \"is_host64_bit_capable\" : true , \"default_kernel_i386\" :", "\"lenny\"}} , \"running_vps_info\" : { \"pings_ok\" : true , \"current_kernel\"", ": true , \"running_vps_info\" : { \"pings_ok\" : true ,", "under one or more # contributor license agreements. See the", ", \"domain_name\" : \"api.ivan.net.nz\" , \"slug\" : \"order-88833465-api-ivan-net-nz\" , \"billing_oid\"", "and we will just adjust the pricing to suit.&nbsp; Pricing", "{ \"amt\" : 35.0 , \"currency\" : \"CUR_AUD\" ,\"amt_usd\" :", "the distros we are offering on new orders.\" , \"response_display_duration_type\"", "url, body, headers): body = \"\"\" { \"delete_server_response\" : {", "\"memory_mb\" : 160 , \"disk_space_mb\" : 4096 , \"disk_space_2_mb\" :", ": \"OK\" , \"human_readable_message\" : \"Here are the distros we", ": 0 , \"host_server_oid\" : null , \"vps_order_oid_to_clone\" : 0", "size = sizes[0] self.assertEqual(size.ram,950) self.assertEqual(size.disk,20) self.assertEqual(size.bandwidth,75) self.assertEqual(size.price,32.54) def test_list_images(self): images", "\"is_backups_enabled\" : true , \"next_backup_time\" : { \"ms_since_epoch\": 1256446800000, \"iso_format\"", "self.assertEqual(len(images),6) image = images[0] self.assertEqual(image.name,\"Debian 5.0 (aka Lenny, RimuHosting\"\\ \"", "NOTICE file distributed with # this work for additional information", "= self.driver.list_sizes()[0] image = self.driver.list_images()[0] self.driver.create_node(name=\"api.ivan.net.nz\", image=image, size=size) class RimuHostingMockHttp(MockHttp):", "\"host_server_info\" : { \"is_host64_bit_capable\" : true , \"default_kernel_i386\" : \"2.6.30.5-xenU.i386\"", "\"Fedora 10\"}]}} \"\"\" return (httplib.OK, body, {}, httplib.responses[httplib.OK]) def _r_orders_new_vps(self,", "\"ubuntu904\" , \"distro_description\" : \"Ubuntu 9.04 (Jaunty Jackalope, from 2009-04)\"}", ": \"default\" , \"current_kernel_canonical\" : \"2.6.30.5-xenU.i386\" , \"last_backup_message\" : \"\"", "46800000} , \"vps_uptime_s\" : 31 , \"vps_cpu_time_s\" : 6 ,", "\"host_mem_mb_free\" : 51825 , \"host_mem_mb_total\" : 73719 , \"running_vpss\" :", "_r_orders_order_88833465_api_ivan_net_nz_vps_running_state(self, method, url, body, headers): body = \"\"\" { \"put_running_state_response\"", ", \"running_state_messages\" : null}} \"\"\" return (httplib.OK, body, {}, httplib.responses[httplib.OK])", "\"ubuntu804\" , \"distro_description\" : \"Ubuntu 8.04 (Hardy Heron, 5 yr", "0 , \"host_server_oid\" : null , \"vps_order_oid_to_clone\" : 0 ,", ": { \"memory_mb\" : 160 , \"disk_space_mb\" : 4096 ,", ": 950 , \"minimum_disk_gb\" : 20 , \"minimum_data_transfer_allowance_gb\" : 75", "# # Unless required by applicable law or agreed to", "been added to your account.\" , \"If you need to", ", \"server_type\" : \"VPS\" , \"offered_at_data_center\" : { \"data_center_location_code\" :", "Server (Dallas)\" , \"monthly_recurring_fee\" : 32.54 , \"monthly_recurring_amt\" : {", "setUp(self): RimuHostingNodeDriver.connectionCls.conn_classes = (None, RimuHostingMockHttp) self.driver = RimuHostingNodeDriver('foo') def test_list_nodes(self):", "\"about_orders\" : [{ \"order_oid\" : 88833465 , \"domain_name\" : \"api.ivan.net.nz\"", "\"human_readable_message\" : null , \"response_display_duration_type\" : \"REGULAR\" , \"setup_messages\" :", "\" recommended distro)\") self.assertEqual(image.id, \"lenny\") def test_reboot_node(self): # Raises exception", "\"host_server_oid\" : \"764\" , \"server_type\" : \"VPS\" , \"data_transfer_allowance\" :", ": { \"order_oid\" : 52255865 , \"domain_name\" : \"api.ivan.net.nz\" ,", "\"iso_format\" : \"2009-10-25T05:00:00Z\", \"users_tz_offset_ms\" : 46800000} , \"vps_uptime_s\" : 19", "file distributed with # this work for additional information regarding", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "self.assertEqual(size.price,32.54) def test_list_images(self): images = self.driver.list_images() self.assertEqual(len(images),6) image = images[0]", "\"Here are the distros we are offering on new orders.\"", "self.driver.list_nodes()[0] self.driver.destroy_node(node) def test_create_node(self): # Raises exception on failure size", ": \"lenny\"}} , \"running_vps_info\" : { \"pings_ok\" : true ,", "\"2009-10-25T05:00:00Z\", \"users_tz_offset_ms\" : 46800000} , \"vps_uptime_s\" : 19 , \"vps_cpu_time_s\"", ", \"response_type\" : \"OK\" , \"human_readable_message\" : \"Here some pricing", ", \"allocated_ips\" : { \"primary_ip\" : \"192.168.3.11\" , \"secondary_ips\" :", ": 52255865 , \"domain_name\" : \"api.ivan.net.nz\" , \"slug\" : \"order-52255865-api-ivan-net-nz\"", ", \"running_state\" : \"RUNNING\"}]}}\"\"\" return (httplib.OK, body, {}, httplib.responses[httplib.OK]) def", ": { \"billing_oid\" : 96122465 , \"user_oid\" : 0 ,", "headers): body = \"\"\" { \"put_running_state_response\" : { \"status_message\" :", "RimuHostingNodeDriver.connectionCls.conn_classes = (None, RimuHostingMockHttp) self.driver = RimuHostingNodeDriver('foo') def test_list_nodes(self): nodes", "self.assertEqual(size.bandwidth,75) self.assertEqual(size.price,32.54) def test_list_images(self): images = self.driver.list_images() self.assertEqual(len(images),6) image =", "Software Foundation (ASF) under one or more # contributor license", "reboot api.ivan.net.nz is pinging OK.\" , \"response_display_duration_type\" : \"REGULAR\" ,", ": [{ \"distro_code\" : \"lenny\" , \"distro_description\" : \"Debian 5.0", "(None, RimuHostingMockHttp) self.driver = RimuHostingNodeDriver('foo') def test_list_nodes(self): nodes = self.driver.list_nodes()", "headers): body = \"\"\" { \"post_new_vps_response\" : { \"status_message\" :", ": { \"ms_since_epoch\": 1256446800000, \"iso_format\" : \"2009-10-25T05:00:00Z\", \"users_tz_offset_ms\" : 46800000}", "2008-10)\"} , { \"distro_code\" : \"fedora10\" , \"distro_description\" : \"Fedora", "exception on failure size = self.driver.list_sizes()[0] image = self.driver.list_images()[0] self.driver.create_node(name=\"api.ivan.net.nz\",", "implied. # See the License for the specific language governing", "(e.g. different memory, disk, etc) and we will just adjust", "32.54} , \"minimum_memory_mb\" : 950 , \"minimum_disk_gb\" : 20 ,", "1 , \"host_xen_version\" : \"3.4.1\" , \"hostload\" : [1.45 ,", "\"ubuntu810\" , \"distro_description\" : \"Ubuntu 8.10 (Intrepid Ibex, from 2008-10)\"}", "\"pricing_plan_infos\" : [{ \"pricing_plan_code\" : \"MiroVPSLowContention\" , \"pricing_plan_description\" : \"MiroVPS", "\"domain_name\" : \"api.ivan.net.nz\" , \"password\" : \"<PASSWORD>\" , \"distro\" :", "etc) and we will just adjust the pricing to suit.&nbsp;", "are the distros we are offering on new orders.\" ,", "20 , \"minimum_data_transfer_allowance_gb\" : 75 , \"see_also_url\" : \"http://rimuhosting.com/order/serverdetails.jsp?plan=MiroVPSLowContention\" ,", ", \"human_readable_message\" : \"Server removed\" , \"response_display_duration_type\" : \"REGULAR\" ,", "\"OK\" , \"human_readable_message\" : \"Here are the distros we are", "return (httplib.OK, body, {}, httplib.responses[httplib.OK]) def _r_distributions(self, method, url, body,", "our support team.\"] } } \"\"\" return (httplib.OK, body, {},", "\"fedora10\" , \"distro_description\" : \"Fedora 10\"}]}} \"\"\" return (httplib.OK, body,", ": 1 , \"host_xen_version\" : \"3.4.1\" , \"hostload\" : [1.45", ", \"vps_uptime_s\" : 19 , \"vps_cpu_time_s\" : 5 , \"running_state\"", "we are offering on new orders.\" , \"response_display_duration_type\" : \"REGULAR\"", "\"Here some pricing plans we are offering on new orders.&nbsp;", "by applicable law or agreed to in writing, software #", "\"REGULAR\" , \"pricing_plan_infos\" : [{ \"pricing_plan_code\" : \"MiroVPSLowContention\" , \"pricing_plan_description\"", "test_list_sizes(self): sizes = self.driver.list_sizes() self.assertEqual(len(sizes),1) size = sizes[0] self.assertEqual(size.ram,950) self.assertEqual(size.disk,20)", "not use this file except in compliance with # the", ", \"human_readable_message\" : \"Found 15 orders\" , \"response_display_duration_type\" : \"REGULAR\",", ": \"764\" , \"server_type\" : \"VPS\" , \"data_transfer_allowance\" : {", "USD.&nbsp; If you are an NZ-based customer then we would", ": { \"num_ips\" : 1, \"extra_ip_reason\" : \"\"} , \"vps_parameters\"", "\"setup_messages\" : [\"Using user-specified billing data: Wire Transfer\" , \"Selected", "# Raises exception on failure size = self.driver.list_sizes()[0] image =", "4096 , \"disk_space_2_mb\" : 0} , \"pricing_plan_code\" : \"MIRO1B\" ,", ": \"webmin\" , \"domain_name\" : \"api.ivan.net.nz\" , \"password\" : \"<PASSWORD>\"", "url, body, headers): body = \"\"\" { \"get_distros_response\" : {", ": 96122465 , \"is_on_customers_own_physical_server\" : false , \"vps_parameters\" : {", "1, \"extra_ip_reason\" : \"\"} , \"vps_parameters\" : { \"memory_mb\" :", "these (e.g. different memory, disk, etc) and we will just", "\"response_display_duration_type\" : \"REGULAR\" , \"is_restarted\" : true , \"is_pinging\" :", "the Apache Software Foundation (ASF) under one or more #", "null , \"response_display_duration_type\" : \"REGULAR\" , \"setup_messages\" : [\"Using user-specified", "body = \"\"\" { \"post_new_vps_response\" : { \"status_message\" : null", "MockHttp from test import MockHttp, TestCaseMixin import unittest import httplib", ": \"order-88833465-api-ivan-net-nz\" , \"billing_oid\" : 96122465 , \"is_on_customers_own_physical_server\" : false", ", \"instantiation_options\" : { \"control_panel\" : \"webmin\" , \"domain_name\" :", "(httplib.OK, body, {}, httplib.responses[httplib.OK]) def _r_pricing_plans(self,method,url,body,headers): body = \"\"\" {\"get_pricing_plans_response\"", "\"num_ips\" : 1, \"extra_ip_reason\" : \"\"} , \"vps_parameters\" : {", "self.assertEqual(node.public_ip[1], \"1.2.3.5\") self.assertEqual(node.extra['order_oid'], 88833465) self.assertEqual(node.id, \"order-88833465-api-ivan-net-nz\") def test_list_sizes(self): sizes =", "sizes[0] self.assertEqual(size.ram,950) self.assertEqual(size.disk,20) self.assertEqual(size.bandwidth,75) self.assertEqual(size.price,32.54) def test_list_images(self): images = self.driver.list_images()", ", \"disk_space_mb\" : 4096 , \"disk_space_2_mb\" : 0} , \"host_server_oid\"", "\"pricing_plan_code\" : \"MiroVPSLowContention\" , \"pricing_plan_description\" : \"MiroVPS Semi-Dedicated Server (Dallas)\"", "Xeon(R) CPU E5506 @ 2.13GHz\" , \"host_num_cores\" : 1 ,", "\"distro_code\" : \"ubuntu804\" , \"distro_description\" : \"Ubuntu 8.04 (Hardy Heron,", ": false} , \"host_server_info\" : { \"is_host64_bit_capable\" : true ,", ", \"data_transfer\" : \"30\"} , \"billing_info\" : { } ,", "{ \"control_panel\" : \"webmin\" , \"domain_name\" : \"api.ivan.net.nz\" , \"password\"", "\"<PASSWORD>\" , \"distro\" : \"lenny\"}} , \"running_vps_info\" : { \"pings_ok\"", "@ 2.13GHz\" , \"host_num_cores\" : 1 , \"host_xen_version\" : \"3.4.1\"", ", \"host_uptime_s\" : 3378276 , \"host_mem_mb_free\" : 51825 , \"host_mem_mb_total\"", ": \"ubuntu904\" , \"distro_description\" : \"Ubuntu 9.04 (Jaunty Jackalope, from", ", \"running_vpss\" : 34} , \"running_state_messages\" : null}} \"\"\" return", ": \"192.168.3.11\" , \"secondary_ips\" : [\"172.16.17.32\",\"172.16.31.10\"]} , \"running_state\" : \"RUNNING\"}]}}\"\"\"", "Ltd from libcloud.drivers.rimuhosting import RimuHostingNodeDriver from test import MockHttp from", "\"lenny\" , \"distro_description\" : \"Debian 5.0 (aka Lenny, RimuHosting recommended", ", 0.56 , 0.28] , \"host_uptime_s\" : 3378276 , \"host_mem_mb_free\"", "\"host_mem_mb_total\" : 73719 , \"running_vpss\" : 34} , \"running_state_messages\" :", "\"domain_name\" : \"api.ivan.net.nz\" , \"slug\" : \"order-88833465-api-ivan-net-nz\" , \"billing_oid\" :", "disk, etc) and we will just adjust the pricing to", "self.driver.list_sizes() self.assertEqual(len(sizes),1) size = sizes[0] self.assertEqual(size.ram,950) self.assertEqual(size.disk,20) self.assertEqual(size.bandwidth,75) self.assertEqual(size.price,32.54) def", ", \"human_readable_message\" : \"Here some pricing plans we are offering", "yr long term support (LTS))\"} , { \"distro_code\" : \"ubuntu810\"", ": \"REGULAR\" , \"is_restarted\" : true , \"is_pinging\" : true", "\"extra_ip_reason\" : \"\"} , \"vps_parameters\" : { \"memory_mb\" : 160", "2.13GHz\" , \"host_num_cores\" : 1 , \"host_xen_version\" : \"3.4.1\" ,", ", \"is_backups_enabled\" : true , \"next_backup_time\" : { \"ms_since_epoch\": 1256446800000,", "\"put_running_state_response\" : { \"status_message\" : null , \"status_code\" : 200", ": \"fedora10\" , \"distro_description\" : \"Fedora 10\"}]}} \"\"\" return (httplib.OK,", "(httplib.OK, body, {}, httplib.responses[httplib.OK]) def _r_distributions(self, method, url, body, headers):", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "of the billing details: <NAME>\" , \"No VPS paramters provided,", ", \"domain_name\" : \"api.ivan.net.nz\" , \"slug\" : \"order-52255865-api-ivan-net-nz\" , \"billing_oid\"", "Unless required by applicable law or agreed to in writing,", "body = \"\"\" { \"get_orders_response\" : { \"status_message\" : null", "import MockHttp, TestCaseMixin import unittest import httplib class RimuHostingTest(unittest.TestCase, TestCaseMixin):", ": \"VPS\" , \"offered_at_data_center\" : { \"data_center_location_code\" : \"DCDALLAS\" ,", "\"get_distros_response\" : { \"status_message\" : null , \"status_code\" : 200", ", \"A $7.98 credit has been added to your account.\"", "\"Intel(R) Xeon(R) CPU E5506 @ 2.13GHz\" , \"host_num_cores\" : 1", "= RimuHostingNodeDriver('foo') def test_list_nodes(self): nodes = self.driver.list_nodes() self.assertEqual(len(nodes),1) node =", "E5506 @ 2.13GHz\" , \"host_num_cores\" : 1 , \"host_xen_version\" :", "\"No VPS paramters provided, using default values.\"] , \"about_order\" :", "support team.\"] } } \"\"\" return (httplib.OK, body, {}, httplib.responses[httplib.OK])", "vary these (e.g. different memory, disk, etc) and we will", "from 2008-10)\"} , { \"distro_code\" : \"fedora10\" , \"distro_description\" :", "160 , \"disk_space_mb\" : 4096 , \"disk_space_2_mb\" : 0} ,", "\"\"\" {\"get_pricing_plans_response\" : { \"status_message\" : null , \"status_code\" :", "\"order-52255865-api-ivan-net-nz\" , \"billing_oid\" : 96122465 , \"is_on_customers_own_physical_server\" : false ,", "the specific language governing permissions and # limitations under the", "\"vps_parameters\" : { \"memory_mb\" : 160 , \"disk_space_mb\" : 4096", ": { \"amt\" : 35.0 , \"currency\" : \"CUR_AUD\" ,\"amt_usd\"", "Ibex, from 2008-10)\"} , { \"distro_code\" : \"fedora10\" , \"distro_description\"", "nodes = self.driver.list_nodes() self.assertEqual(len(nodes),1) node = nodes[0] self.assertEqual(node.public_ip[0], \"1.2.3.4\") self.assertEqual(node.public_ip[1],", "# limitations under the License. # Copyright 2009 RedRata Ltd", "failure node = self.driver.list_nodes()[0] self.driver.destroy_node(node) def test_create_node(self): # Raises exception", "body, {}, httplib.responses[httplib.OK]) def _r_pricing_plans(self,method,url,body,headers): body = \"\"\" {\"get_pricing_plans_response\" :", "If you are an NZ-based customer then we would need", "null , \"is_backup_running\" : false , \"is_backups_enabled\" : true ,", ": [\"api.ivan.net.nz is being shut down.\" , \"A $7.98 credit", "account.\" , \"If you need to un-cancel the server please", "\"currency\" : \"CUR_AUD\" ,\"amt_usd\" : 32.54} , \"minimum_memory_mb\" : 950", ", \"is_suspended\" : false} , \"host_server_info\" : { \"is_host64_bit_capable\" :", "applicable law or agreed to in writing, software # distributed", "we will just adjust the pricing to suit.&nbsp; Pricing is", "test_create_node(self): # Raises exception on failure size = self.driver.list_sizes()[0] image", "most disk and memory sizes.&nbsp; So if you setup a", "\"192.168.3.11\" , \"secondary_ips\" : [\"172.16.17.32\",\"172.16.31.10\"]} , \"running_state\" : \"RUNNING\"}]}}\"\"\" return", "[{ \"distro_code\" : \"lenny\" , \"distro_description\" : \"Debian 5.0 (aka", "{ \"distro_code\" : \"centos5\" , \"distro_description\" : \"Centos5\"} , {", "\"hostload\" : [1.45 , 0.56 , 0.28] , \"host_uptime_s\" :", "= \"\"\" { \"post_new_vps_response\" : { \"status_message\" : null ,", "self.driver.list_images()[0] self.driver.create_node(name=\"api.ivan.net.nz\", image=image, size=size) class RimuHostingMockHttp(MockHttp): def _r_orders(self,method,url,body,headers): body =", "shut down.\" , \"A $7.98 credit has been added to", ": false}}} \"\"\" return (httplib.OK, body, {}, httplib.responses[httplib.OK]) def _r_orders_order_88833465_api_ivan_net_nz_vps(self,", "\"\"\" { \"put_running_state_response\" : { \"status_message\" : null , \"status_code\"", "images = self.driver.list_images() self.assertEqual(len(images),6) image = images[0] self.assertEqual(image.name,\"Debian 5.0 (aka", "# contributor license agreements. See the NOTICE file distributed with", "def test_destroy_node(self): # Raises exception on failure node = self.driver.list_nodes()[0]", "# this work for additional information regarding copyright ownership. #", "in writing, software # distributed under the License is distributed", "plans we are offering on new orders.&nbsp; Note we offer", "def _r_distributions(self, method, url, body, headers): body = \"\"\" {", "\"distro_description\" : \"Ubuntu 8.10 (Intrepid Ibex, from 2008-10)\"} , {", "\"RUNNING\" , \"is_suspended\" : false} , \"host_server_info\" : { \"is_host64_bit_capable\"", "with # the License. You may obtain a copy of", ": null , \"response_type\" : \"OK\" , \"human_readable_message\" : \"Found", "\"REGULAR\" , \"cancel_messages\" : [\"api.ivan.net.nz is being shut down.\" ,", "size=size) class RimuHostingMockHttp(MockHttp): def _r_orders(self,method,url,body,headers): body = \"\"\" { \"get_orders_response\"", ", \"setup_messages\" : [\"Using user-specified billing data: Wire Transfer\" ,", ": null , \"response_type\" : \"OK\" , \"human_readable_message\" : \"Here", "4096 , \"disk_space_2_mb\" : 0} , \"host_server_oid\" : \"764\" ,", "\"response_type\" : \"OK\" , \"human_readable_message\" : \"Server removed\" , \"response_display_duration_type\"", "\"response_type\" : \"OK\" , \"human_readable_message\" : \"Found 15 orders\" ,", "\"host_num_cores\" : 1 , \"host_xen_version\" : \"3.4.1\" , \"hostload\" :", "\"host_xen_version\" : \"3.4.1\" , \"hostload\" : [1.45 , 0.56 ,", "this file to You under the Apache License, Version 2.0", "\"\"\" { \"get_distros_response\" : { \"status_message\" : null , \"status_code\"", "\"secondary_ips\" : []} , \"running_state\" : \"RUNNING\"} , \"new_order_request\" :", "body, {}, httplib.responses[httplib.OK]) def _r_distributions(self, method, url, body, headers): body", "to vary these (e.g. different memory, disk, etc) and we", "RimuHostingTest(unittest.TestCase, TestCaseMixin): def setUp(self): RimuHostingNodeDriver.connectionCls.conn_classes = (None, RimuHostingMockHttp) self.driver =", ", \"current_kernel_canonical\" : \"2.6.30.5-xenU.i386\" , \"last_backup_message\" : \"\" , \"is_console_login_enabled\"", "\"amt\" : 35.0 , \"currency\" : \"CUR_AUD\" ,\"amt_usd\" : 32.54}", "disk and memory sizes.&nbsp; So if you setup a new", ", \"console_public_authorized_keys\" : null , \"is_backup_running\" : false , \"is_backups_enabled\"", "def _r_pricing_plans(self,method,url,body,headers): body = \"\"\" {\"get_pricing_plans_response\" : { \"status_message\" :", ": \"CUR_AUD\" ,\"amt_usd\" : 32.54} , \"minimum_memory_mb\" : 950 ,", "httplib class RimuHostingTest(unittest.TestCase, TestCaseMixin): def setUp(self): RimuHostingNodeDriver.connectionCls.conn_classes = (None, RimuHostingMockHttp)", "\"default\" , \"current_kernel_canonical\" : \"2.6.30.5-xenU.i386\" , \"last_backup_message\" : \"\" ,", "governing permissions and # limitations under the License. # Copyright", "0} , \"pricing_plan_code\" : \"MIRO1B\" , \"instantiation_options\" : { \"control_panel\"", "950 , \"minimum_disk_gb\" : 20 , \"minimum_data_transfer_allowance_gb\" : 75 ,", ", \"vps_order_oid_to_clone\" : 0 , \"ip_request\" : { \"num_ips\" :", "values.\"] , \"about_order\" : { \"order_oid\" : 52255865 , \"domain_name\"", "distro)\"} , { \"distro_code\" : \"centos5\" , \"distro_description\" : \"Centos5\"}", "\"distro_description\" : \"Ubuntu 9.04 (Jaunty Jackalope, from 2009-04)\"} , {", "return (httplib.OK, body, {}, httplib.responses[httplib.OK]) def _r_orders_new_vps(self, method, url, body,", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "\"running_state\" : \"RUNNING\"} , \"new_order_request\" : { \"billing_oid\" : 96122465", "\"last_backup_message\" : \"\" , \"is_console_login_enabled\" : false , \"console_public_authorized_keys\" :", "headers): body = \"\"\" { \"delete_server_response\" : { \"status_message\" :", ", \"minimum_data_transfer_allowance_gb\" : 75 , \"see_also_url\" : \"http://rimuhosting.com/order/serverdetails.jsp?plan=MiroVPSLowContention\" , \"server_type\"", "\"delete_server_response\" : { \"status_message\" : null , \"status_code\" : 200", "agreements. See the NOTICE file distributed with # this work", "Foundation (ASF) under one or more # contributor license agreements.", "size = self.driver.list_sizes()[0] image = self.driver.list_images()[0] self.driver.create_node(name=\"api.ivan.net.nz\", image=image, size=size) class", "httplib.responses[httplib.OK]) def _r_pricing_plans(self,method,url,body,headers): body = \"\"\" {\"get_pricing_plans_response\" : { \"status_message\"", ", \"current_kernel\" : \"default\" , \"current_kernel_canonical\" : \"2.6.30.5-xenU.i386\" , \"last_backup_message\"", "failure size = self.driver.list_sizes()[0] image = self.driver.list_images()[0] self.driver.create_node(name=\"api.ivan.net.nz\", image=image, size=size)", "After the reboot api.ivan.net.nz is pinging OK.\" , \"response_display_duration_type\" :", ", \"new_order_request\" : { \"billing_oid\" : 96122465 , \"user_oid\" :", "\"is_console_login_enabled\" : false , \"console_public_authorized_keys\" : null , \"is_backup_running\" :", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "= \"\"\" { \"put_running_state_response\" : { \"status_message\" : null ,", ": [{ \"pricing_plan_code\" : \"MiroVPSLowContention\" , \"pricing_plan_description\" : \"MiroVPS Semi-Dedicated", "\"api.ivan.net.nz\" , \"password\" : \"<PASSWORD>\" , \"distro\" : \"lenny\"}} ,", "\"disk_space_2_mb\" : 0} , \"host_server_oid\" : \"764\" , \"server_type\" :", ": null , \"status_code\" : 200 , \"error_info\" : null", ": \"172.16.17.32\", \"secondary_ips\" : []} , \"running_state\" : \"RUNNING\"} ,", ": 0} , \"pricing_plan_code\" : \"MIRO1B\" , \"instantiation_options\" : {", ": \"OK\" , \"human_readable_message\" : \"Server removed\" , \"response_display_duration_type\" :", "please contact our support team.\"] } } \"\"\" return (httplib.OK,", ", \"cpu_model_name\" : \"Intel(R) Xeon(R) CPU E5506 @ 2.13GHz\" ,", ", \"pricing_plan_infos\" : [{ \"pricing_plan_code\" : \"MiroVPSLowContention\" , \"pricing_plan_description\" :", ": \"Debian 5.0 (aka Lenny, RimuHosting recommended distro)\"} , {", ": 3378276 , \"host_mem_mb_free\" : 51825 , \"host_mem_mb_total\" : 73719", "\"vps_uptime_s\" : 31 , \"vps_cpu_time_s\" : 6 , \"running_state\" :", "false}}} \"\"\" return (httplib.OK, body, {}, httplib.responses[httplib.OK]) def _r_orders_order_88833465_api_ivan_net_nz_vps(self, method,", ": 46800000} , \"vps_uptime_s\" : 31 , \"vps_cpu_time_s\" : 6", ": \"ubuntu810\" , \"distro_description\" : \"Ubuntu 8.10 (Intrepid Ibex, from", "RimuHostingNodeDriver from test import MockHttp from test import MockHttp, TestCaseMixin", "the License for the specific language governing permissions and #", "some pricing plans we are offering on new orders.&nbsp; Note", "\"MiroVPSLowContention\" , \"pricing_plan_description\" : \"MiroVPS Semi-Dedicated Server (Dallas)\" , \"monthly_recurring_fee\"", ", { \"distro_code\" : \"ubuntu904\" , \"distro_description\" : \"Ubuntu 9.04", "self.assertEqual(len(sizes),1) size = sizes[0] self.assertEqual(size.ram,950) self.assertEqual(size.disk,20) self.assertEqual(size.bandwidth,75) self.assertEqual(size.price,32.54) def test_list_images(self):", "\"response_display_duration_type\" : \"REGULAR\" , \"setup_messages\" : [\"Using user-specified billing data:", "self.assertEqual(node.id, \"order-88833465-api-ivan-net-nz\") def test_list_sizes(self): sizes = self.driver.list_sizes() self.assertEqual(len(sizes),1) size =", ": \"api.ivan.net.nz\" , \"slug\" : \"order-52255865-api-ivan-net-nz\" , \"billing_oid\" : 96122465", "See the NOTICE file distributed with # this work for", "0 , \"ip_request\" : { \"num_ips\" : 1, \"extra_ip_reason\" :", "} \"\"\" return (httplib.OK, body, {}, httplib.responses[httplib.OK]) def _r_orders_order_88833465_api_ivan_net_nz_vps_running_state(self, method,", "= \"\"\" {\"get_pricing_plans_response\" : { \"status_message\" : null , \"status_code\"", "either express or implied. # See the License for the", "just adjust the pricing to suit.&nbsp; Pricing is in USD.&nbsp;", ", \"password\" : \"<PASSWORD>\" , \"distro\" : \"lenny\"}} , \"running_vps_info\"", "\"new_order_request\" : { \"billing_oid\" : 96122465 , \"user_oid\" : 0", "to You under the Apache License, Version 2.0 # (the", "\"\"\" { \"delete_server_response\" : { \"status_message\" : null , \"status_code\"", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "httplib.responses[httplib.OK]) def _r_orders_new_vps(self, method, url, body, headers): body = \"\"\"", "{ \"distro_code\" : \"ubuntu810\" , \"distro_description\" : \"Ubuntu 8.10 (Intrepid", "or more # contributor license agreements. See the NOTICE file", "you are an NZ-based customer then we would need to", "self.driver.create_node(name=\"api.ivan.net.nz\", image=image, size=size) class RimuHostingMockHttp(MockHttp): def _r_orders(self,method,url,body,headers): body = \"\"\"", ", \"status_code\" : 200 , \"error_info\" : null , \"response_type\"", "= self.driver.list_sizes() self.assertEqual(len(sizes),1) size = sizes[0] self.assertEqual(size.ram,950) self.assertEqual(size.disk,20) self.assertEqual(size.bandwidth,75) self.assertEqual(size.price,32.54)", ", \"distro_description\" : \"Debian 5.0 (aka Lenny, RimuHosting recommended distro)\"}", "8.04 (Hardy Heron, 5 yr long term support (LTS))\"} ,", ": \"http://rimuhosting.com/order/serverdetails.jsp?plan=MiroVPSLowContention\" , \"server_type\" : \"VPS\" , \"offered_at_data_center\" : {", "You under the Apache License, Version 2.0 # (the \"License\");", "5.0 (aka Lenny, RimuHosting\"\\ \" recommended distro)\") self.assertEqual(image.id, \"lenny\") def", "\"\" , \"is_console_login_enabled\" : false , \"console_public_authorized_keys\" : null ,", "# libcloud.org licenses this file to You under the Apache", "(Dallas)\" , \"monthly_recurring_fee\" : 32.54 , \"monthly_recurring_amt\" : { \"amt\"", "\"server_type\" : \"VPS\" , \"offered_at_data_center\" : { \"data_center_location_code\" : \"DCDALLAS\"", "\"next_backup_time\" : { \"ms_since_epoch\": 1256446800000, \"iso_format\" : \"2009-10-25T05:00:00Z\", \"users_tz_offset_ms\" :", "= \"\"\" { \"get_orders_response\" : { \"status_message\" : null ,", "\"iso_format\" : \"2009-10-25T05:00:00Z\", \"users_tz_offset_ms\" : 46800000} , \"vps_uptime_s\" : 31", ", \"is_backup_running\" : false , \"is_backups_enabled\" : true , \"next_backup_time\"", "= (None, RimuHostingMockHttp) self.driver = RimuHostingNodeDriver('foo') def test_list_nodes(self): nodes =", "\"VPS\" , \"data_transfer_allowance\" : { \"data_transfer_gb\" : 30 , \"data_transfer\"", "self.driver.reboot_node(node) def test_destroy_node(self): # Raises exception on failure node =", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "= self.driver.list_images() self.assertEqual(len(images),6) image = images[0] self.assertEqual(image.name,\"Debian 5.0 (aka Lenny,", ": \"REGULAR\" , \"setup_messages\" : [\"Using user-specified billing data: Wire", "def test_create_node(self): # Raises exception on failure size = self.driver.list_sizes()[0]", "\"CUR_AUD\" ,\"amt_usd\" : 32.54} , \"minimum_memory_mb\" : 950 , \"minimum_disk_gb\"", ": \"Ubuntu 8.10 (Intrepid Ibex, from 2008-10)\"} , { \"distro_code\"", ", \"human_readable_message\" : null , \"response_display_duration_type\" : \"REGULAR\" , \"setup_messages\"", ", \"host_xen_version\" : \"3.4.1\" , \"hostload\" : [1.45 , 0.56", ": 20 , \"minimum_data_transfer_allowance_gb\" : 75 , \"see_also_url\" : \"http://rimuhosting.com/order/serverdetails.jsp?plan=MiroVPSLowContention\"", ": 160 , \"disk_space_mb\" : 4096 , \"disk_space_2_mb\" : 0}", "\"is_host64_bit_capable\" : true , \"default_kernel_i386\" : \"2.6.30.5-xenU.i386\" , \"default_kernel_x86_64\" :", "5 , \"running_state\" : \"RUNNING\" , \"is_suspended\" : false} ,", "body, headers): body = \"\"\" { \"get_distros_response\" : { \"status_message\"", "\"running_state\" : \"RUNNING\" , \"is_suspended\" : false}}} \"\"\" return (httplib.OK,", ": false , \"console_public_authorized_keys\" : null , \"is_backup_running\" : false", ": 96122465 , \"user_oid\" : 0 , \"host_server_oid\" : null", "\"monthly_recurring_fee\" : 32.54 , \"monthly_recurring_amt\" : { \"amt\" : 35.0", "$7.98 credit has been added to your account.\" , \"If", "work for additional information regarding copyright ownership. # libcloud.org licenses", ": 19 , \"vps_cpu_time_s\" : 5 , \"running_state\" : \"RUNNING\"", ": true , \"current_kernel\" : \"default\" , \"current_kernel_canonical\" : \"2.6.30.5-xenU.i386\"", "{}, httplib.responses[httplib.OK]) def _r_orders_order_88833465_api_ivan_net_nz_vps_running_state(self, method, url, body, headers): body =", ": null , \"response_type\" : \"OK\" , \"human_readable_message\" : \"api.ivan.net.nz", "{ \"distro_code\" : \"ubuntu904\" , \"distro_description\" : \"Ubuntu 9.04 (Jaunty", "\"A $7.98 credit has been added to your account.\" ,", "{ \"distro_code\" : \"ubuntu804\" , \"distro_description\" : \"Ubuntu 8.04 (Hardy", ": \"3.4.1\" , \"hostload\" : [1.45 , 0.56 , 0.28]", "{ \"delete_server_response\" : { \"status_message\" : null , \"status_code\" :", "the License. # Copyright 2009 RedRata Ltd from libcloud.drivers.rimuhosting import", "Pricing is in USD.&nbsp; If you are an NZ-based customer", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "RimuHostingNodeDriver('foo') def test_list_nodes(self): nodes = self.driver.list_nodes() self.assertEqual(len(nodes),1) node = nodes[0]", "34} , \"running_state_messages\" : null}} \"\"\" return (httplib.OK, body, {},", "test_list_nodes(self): nodes = self.driver.list_nodes() self.assertEqual(len(nodes),1) node = nodes[0] self.assertEqual(node.public_ip[0], \"1.2.3.4\")", "_r_orders_new_vps(self, method, url, body, headers): body = \"\"\" { \"post_new_vps_response\"", ": \"VPS\" , \"data_transfer_allowance\" : { \"data_transfer_gb\" : 30 ,", ": \"OK\" , \"human_readable_message\" : \"api.ivan.net.nz restarted. After the reboot", ": \"Ubuntu 9.04 (Jaunty Jackalope, from 2009-04)\"} , { \"distro_code\"", "\"error_info\" : null , \"response_type\" : \"OK\" , \"human_readable_message\" :", "# distributed under the License is distributed on an \"AS", "\"about_order\" : { \"order_oid\" : 52255865 , \"domain_name\" : \"api.ivan.net.nz\"", ": 1, \"extra_ip_reason\" : \"\"} , \"vps_parameters\" : { \"memory_mb\"", "\"is_backup_running\" : false , \"is_backups_enabled\" : true , \"next_backup_time\" :", "# Unless required by applicable law or agreed to in", "null , \"response_type\" : \"OK\" , \"human_readable_message\" : \"Here some", ": null , \"response_type\" : \"OK\" , \"human_readable_message\" : \"Server", "false} , \"host_server_info\" : { \"is_host64_bit_capable\" : true , \"default_kernel_i386\"", "\"OK\" , \"human_readable_message\" : \"Here some pricing plans we are", "body, headers): body = \"\"\" { \"post_new_vps_response\" : { \"status_message\"", "3378276 , \"host_mem_mb_free\" : 51825 , \"host_mem_mb_total\" : 73719 ,", ", \"host_server_oid\" : null , \"vps_order_oid_to_clone\" : 0 , \"ip_request\"", ": [{ \"order_oid\" : 88833465 , \"domain_name\" : \"api.ivan.net.nz\" ,", "on failure node = self.driver.list_nodes()[0] self.driver.destroy_node(node) def test_create_node(self): # Raises", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", ": \"Here some pricing plans we are offering on new", "\"pricing_plan_description\" : \"MiroVPS Semi-Dedicated Server (Dallas)\" , \"monthly_recurring_fee\" : 32.54", "api.ivan.net.nz is pinging OK.\" , \"response_display_duration_type\" : \"REGULAR\" , \"is_restarted\"", "import httplib class RimuHostingTest(unittest.TestCase, TestCaseMixin): def setUp(self): RimuHostingNodeDriver.connectionCls.conn_classes = (None,", "information regarding copyright ownership. # libcloud.org licenses this file to", "self.assertEqual(size.disk,20) self.assertEqual(size.bandwidth,75) self.assertEqual(size.price,32.54) def test_list_images(self): images = self.driver.list_images() self.assertEqual(len(images),6) image", "to suit.&nbsp; Pricing is in USD.&nbsp; If you are an", "\"order-88833465-api-ivan-net-nz\" , \"billing_oid\" : 96122465 , \"is_on_customers_own_physical_server\" : false ,", "orders.\" , \"response_display_duration_type\" : \"REGULAR\" , \"distro_infos\" : [{ \"distro_code\"", "License. You may obtain a copy of the License at", "5 yr long term support (LTS))\"} , { \"distro_code\" :", "\"current_kernel\" : \"default\" , \"current_kernel_canonical\" : \"2.6.30.5-xenU.i386\" , \"last_backup_message\" :", "from test import MockHttp, TestCaseMixin import unittest import httplib class", "You may obtain a copy of the License at #", "= self.driver.list_nodes()[0] self.driver.destroy_node(node) def test_create_node(self): # Raises exception on failure", "\"DCDALLAS\" , \"data_center_location_name\" : \"Dallas\"}} ]}} \"\"\" return (httplib.OK, body,", "\"distro_infos\" : [{ \"distro_code\" : \"lenny\" , \"distro_description\" : \"Debian", ": \"2.6.30.5-xenU.i386\" , \"last_backup_message\" : \"\" , \"is_console_login_enabled\" : false", "we are offering on new orders.&nbsp; Note we offer most", ", \"human_readable_message\" : \"api.ivan.net.nz restarted. After the reboot api.ivan.net.nz is", "nodes[0] self.assertEqual(node.public_ip[0], \"1.2.3.4\") self.assertEqual(node.public_ip[1], \"1.2.3.5\") self.assertEqual(node.extra['order_oid'], 88833465) self.assertEqual(node.id, \"order-88833465-api-ivan-net-nz\") def", "one or more # contributor license agreements. See the NOTICE", "class RimuHostingMockHttp(MockHttp): def _r_orders(self,method,url,body,headers): body = \"\"\" { \"get_orders_response\" :", "return (httplib.OK, body, {}, httplib.responses[httplib.OK]) def _r_orders_order_88833465_api_ivan_net_nz_vps_running_state(self, method, url, body,", "server please contact our support team.\"] } } \"\"\" return", "\"OK\" , \"human_readable_message\" : \"api.ivan.net.nz restarted. After the reboot api.ivan.net.nz", "\"disk_space_mb\" : 4096 , \"disk_space_2_mb\" : 0} , \"host_server_oid\" :", "\"distro_code\" : \"lenny\" , \"distro_description\" : \"Debian 5.0 (aka Lenny,", "32.54 , \"monthly_recurring_amt\" : { \"amt\" : 35.0 , \"currency\"", "= \"\"\" { \"get_distros_response\" : { \"status_message\" : null ,", "\"Centos5\"} , { \"distro_code\" : \"ubuntu904\" , \"distro_description\" : \"Ubuntu", "\"Selected user as the owner of the billing details: <NAME>\"", "\"VPS\" , \"offered_at_data_center\" : { \"data_center_location_code\" : \"DCDALLAS\" , \"data_center_location_name\"", "return (httplib.OK, body, {}, httplib.responses[httplib.OK]) def _r_orders_order_88833465_api_ivan_net_nz_vps(self, method, url, body,", "orders.&nbsp; Note we offer most disk and memory sizes.&nbsp; So" ]
[ "-> None: polygon, point = polygon_with_point before_indexing = point in", "= polygon_with_point assert implication(point in polygon, point in polygon.convex_hull) @given(strategies.polygons_with_points)", "for vertex in hole.vertices) @given(strategies.polygons_with_points) def test_convex_hull(polygon_with_point: Tuple[Polygon, Point]) ->", "implication(point in polygon, point in polygon.convex_hull) @given(strategies.polygons_with_points) def test_indexing(polygon_with_point: Tuple[Polygon,", "all(vertex in polygon for hole in polygon.holes for vertex in", "in polygon.border.vertices) assert all(vertex in polygon for hole in polygon.holes", "@given(strategies.polygons_with_points) def test_convex_hull(polygon_with_point: Tuple[Polygon, Point]) -> None: polygon, point =", "from gon.base import (Point, Polygon) from tests.utils import (equivalence, implication)", "polygon_with_point assert implication(point in polygon, point in polygon.convex_hull) @given(strategies.polygons_with_points) def", "in polygon for vertex in polygon.border.vertices) assert all(vertex in polygon", "None: assert all(vertex in polygon for vertex in polygon.border.vertices) assert", "in polygon polygon.index() after_indexing = point in polygon assert equivalence(before_indexing,", "def test_convex_hull(polygon_with_point: Tuple[Polygon, Point]) -> None: polygon, point = polygon_with_point", "<gh_stars>1-10 from typing import Tuple from hypothesis import given from", "(Point, Polygon) from tests.utils import (equivalence, implication) from . import", "in hole.vertices) @given(strategies.polygons_with_points) def test_convex_hull(polygon_with_point: Tuple[Polygon, Point]) -> None: polygon,", "point = polygon_with_point assert implication(point in polygon, point in polygon.convex_hull)", "before_indexing = point in polygon polygon.index() after_indexing = point in", "None: polygon, point = polygon_with_point assert implication(point in polygon, point", "gon.base import (Point, Polygon) from tests.utils import (equivalence, implication) from", "point = polygon_with_point before_indexing = point in polygon polygon.index() after_indexing", "polygon.holes for vertex in hole.vertices) @given(strategies.polygons_with_points) def test_convex_hull(polygon_with_point: Tuple[Polygon, Point])", "def test_vertices(polygon: Polygon) -> None: assert all(vertex in polygon for", "assert all(vertex in polygon for vertex in polygon.border.vertices) assert all(vertex", "@given(strategies.polygons) def test_vertices(polygon: Polygon) -> None: assert all(vertex in polygon", "import (Point, Polygon) from tests.utils import (equivalence, implication) from .", "hole in polygon.holes for vertex in hole.vertices) @given(strategies.polygons_with_points) def test_convex_hull(polygon_with_point:", "from . import strategies @given(strategies.polygons) def test_vertices(polygon: Polygon) -> None:", "point in polygon polygon.index() after_indexing = point in polygon assert", "polygon, point = polygon_with_point assert implication(point in polygon, point in", "import (equivalence, implication) from . import strategies @given(strategies.polygons) def test_vertices(polygon:", "Point]) -> None: polygon, point = polygon_with_point assert implication(point in", "all(vertex in polygon for vertex in polygon.border.vertices) assert all(vertex in", "polygon polygon.index() after_indexing = point in polygon assert equivalence(before_indexing, after_indexing)", "in polygon.convex_hull) @given(strategies.polygons_with_points) def test_indexing(polygon_with_point: Tuple[Polygon, Point]) -> None: polygon,", "given from gon.base import (Point, Polygon) from tests.utils import (equivalence,", "vertex in polygon.border.vertices) assert all(vertex in polygon for hole in", "import Tuple from hypothesis import given from gon.base import (Point,", "= polygon_with_point before_indexing = point in polygon polygon.index() after_indexing =", "import strategies @given(strategies.polygons) def test_vertices(polygon: Polygon) -> None: assert all(vertex", "hypothesis import given from gon.base import (Point, Polygon) from tests.utils", "in polygon for hole in polygon.holes for vertex in hole.vertices)", "@given(strategies.polygons_with_points) def test_indexing(polygon_with_point: Tuple[Polygon, Point]) -> None: polygon, point =", "tests.utils import (equivalence, implication) from . import strategies @given(strategies.polygons) def", "(equivalence, implication) from . import strategies @given(strategies.polygons) def test_vertices(polygon: Polygon)", "= point in polygon polygon.index() after_indexing = point in polygon", "polygon, point = polygon_with_point before_indexing = point in polygon polygon.index()", "in polygon, point in polygon.convex_hull) @given(strategies.polygons_with_points) def test_indexing(polygon_with_point: Tuple[Polygon, Point])", "polygon, point in polygon.convex_hull) @given(strategies.polygons_with_points) def test_indexing(polygon_with_point: Tuple[Polygon, Point]) ->", "from hypothesis import given from gon.base import (Point, Polygon) from", "Tuple from hypothesis import given from gon.base import (Point, Polygon)", "-> None: polygon, point = polygon_with_point assert implication(point in polygon,", "strategies @given(strategies.polygons) def test_vertices(polygon: Polygon) -> None: assert all(vertex in", "def test_indexing(polygon_with_point: Tuple[Polygon, Point]) -> None: polygon, point = polygon_with_point", "polygon for vertex in polygon.border.vertices) assert all(vertex in polygon for", "Point]) -> None: polygon, point = polygon_with_point before_indexing = point", "-> None: assert all(vertex in polygon for vertex in polygon.border.vertices)", "in polygon.holes for vertex in hole.vertices) @given(strategies.polygons_with_points) def test_convex_hull(polygon_with_point: Tuple[Polygon,", "for vertex in polygon.border.vertices) assert all(vertex in polygon for hole", "from tests.utils import (equivalence, implication) from . import strategies @given(strategies.polygons)", "polygon.border.vertices) assert all(vertex in polygon for hole in polygon.holes for", "test_indexing(polygon_with_point: Tuple[Polygon, Point]) -> None: polygon, point = polygon_with_point before_indexing", "test_vertices(polygon: Polygon) -> None: assert all(vertex in polygon for vertex", "from typing import Tuple from hypothesis import given from gon.base", "implication) from . import strategies @given(strategies.polygons) def test_vertices(polygon: Polygon) ->", "None: polygon, point = polygon_with_point before_indexing = point in polygon", "vertex in hole.vertices) @given(strategies.polygons_with_points) def test_convex_hull(polygon_with_point: Tuple[Polygon, Point]) -> None:", "assert all(vertex in polygon for hole in polygon.holes for vertex", "Polygon) -> None: assert all(vertex in polygon for vertex in", "hole.vertices) @given(strategies.polygons_with_points) def test_convex_hull(polygon_with_point: Tuple[Polygon, Point]) -> None: polygon, point", "import given from gon.base import (Point, Polygon) from tests.utils import", "point in polygon.convex_hull) @given(strategies.polygons_with_points) def test_indexing(polygon_with_point: Tuple[Polygon, Point]) -> None:", "polygon_with_point before_indexing = point in polygon polygon.index() after_indexing = point", "typing import Tuple from hypothesis import given from gon.base import", "Tuple[Polygon, Point]) -> None: polygon, point = polygon_with_point before_indexing =", "polygon.convex_hull) @given(strategies.polygons_with_points) def test_indexing(polygon_with_point: Tuple[Polygon, Point]) -> None: polygon, point", "test_convex_hull(polygon_with_point: Tuple[Polygon, Point]) -> None: polygon, point = polygon_with_point assert", "Tuple[Polygon, Point]) -> None: polygon, point = polygon_with_point assert implication(point", "for hole in polygon.holes for vertex in hole.vertices) @given(strategies.polygons_with_points) def", "assert implication(point in polygon, point in polygon.convex_hull) @given(strategies.polygons_with_points) def test_indexing(polygon_with_point:", ". import strategies @given(strategies.polygons) def test_vertices(polygon: Polygon) -> None: assert", "Polygon) from tests.utils import (equivalence, implication) from . import strategies", "polygon for hole in polygon.holes for vertex in hole.vertices) @given(strategies.polygons_with_points)" ]
[ "Points([2220.854889556147, 1992.6933680261686]) my_distort_out = Points([2218.47960556, 1992.46356322]) np.testing.assert_array_almost_equal(p_dis_out.values, my_distort_out.values) np.testing.assert_array_almost_equal(p_undis_out.values, my_undistort_out.values)", "Points([[2218.47960556, 1992.46356322], [2218.47960556, 1992.46356322]]) np.testing.assert_array_almost_equal(p_dis_outs.values, my_distort_outs.values) np.testing.assert_array_almost_equal(p_undis_outs.values, my_undistort_outs.values) def test_world2crs_and_on_raw_images():", "decimal=6) local_pos = attempt1.world2local(w_pos) np.testing.assert_array_almost_equal(l_pos.values, local_pos.values, decimal=6) def test_metashape_project_local_points_on_raw(): test_project_folder", "= attempt1.local2world(l_pos) np.testing.assert_array_almost_equal(w_pos_ans.values, world_pos.values, decimal=6) local_pos = attempt1.world2local(w_pos) np.testing.assert_array_almost_equal(l_pos.values, local_pos.values,", "p_undis_outs = chunk.project_local_points_on_raw(l_pos_points, 0, distortion_correct=True) my_undistort_outs = Points([[2220.854889556147, 1992.6933680261686], [2220.854889556147,", "chunk.project_local_points_on_raw(l_pos, 0, distortion_correct=True) # pro_api_out = np.asarray([2218.883386793118, 1991.4709388015149]) my_undistort_out =", "-2.6697181763370965]) w_pos_ans = Points([0.4999999999999978, 0.9999999999999993, 1.5]) world_pos = attempt1.local2world(l_pos) np.testing.assert_array_almost_equal(w_pos_ans.values,", "1992.46356322], [2218.47960556, 1992.46356322]]) np.testing.assert_array_almost_equal(p_dis_outs.values, my_distort_outs.values) np.testing.assert_array_almost_equal(p_undis_outs.values, my_undistort_outs.values) def test_world2crs_and_on_raw_images(): test_project_folder", "chunks[0] local = Points([11.870130675203006, 0.858098777517136, -12.987136541275]) geocentric = Points([-3943658.7087006606, 3363404.124223561,", "1, 1.5]) l_pos = Points([7.960064093299587, 1.3019528769064523, -2.6697181763370965]) w_pos_ans = Points([0.4999999999999978,", "== \"metashape\" attempt2 = ReconsProject(\"Metashape\") assert attempt2.software == \"metashape\" with", "camera_id = 56 # camera_label = 'DJI_0057' camera_pix_ans = Points([2391.7104647010146,", "\"metashape\" with pytest.raises(LookupError): attempt3 = ReconsProject(\"not_supported_sfm\") def test_local2world2local(): attempt1 =", "my_undistort_outs = Points([[2220.854889556147, 1992.6933680261686], [2220.854889556147, 1992.6933680261686]]) my_distort_outs = Points([[2218.47960556, 1992.46356322],", "1.5]) world_pos = attempt1.local2world(l_pos) np.testing.assert_array_almost_equal(w_pos_ans.values, world_pos.values, decimal=6) local_pos = attempt1.world2local(w_pos)", "my_distort_outs.values) np.testing.assert_array_almost_equal(p_undis_outs.values, my_undistort_outs.values) def test_world2crs_and_on_raw_images(): test_project_folder = easyidp.test_full_path(\"data/metashape/wheat_tanashi.psx\") chunks =", "np.asarray([[-0.86573098, -0.01489186, 0.08977677, 7.65034123], [0.06972335, 0.44334391, 0.74589315, 1.85910928], [-0.05848325, 0.74899678,", "= Points([0.4999999999999978, 0.9999999999999993, 1.5]) world_pos = attempt1.local2world(l_pos) np.testing.assert_array_almost_equal(w_pos_ans.values, world_pos.values, decimal=6)", "distortion_correct=True) my_undistort_outs = Points([[2220.854889556147, 1992.6933680261686], [2220.854889556147, 1992.6933680261686]]) my_distort_outs = Points([[2218.47960556,", "attempt1.software == \"metashape\" attempt2 = ReconsProject(\"Metashape\") assert attempt2.software == \"metashape\"", "my_undistort_out = Points([2220.854889556147, 1992.6933680261686]) my_distort_out = Points([2218.47960556, 1992.46356322]) np.testing.assert_array_almost_equal(p_dis_out.values, my_distort_out.values)", "attempt1.local2world(l_pos) np.testing.assert_array_almost_equal(w_pos_ans.values, world_pos.values, decimal=6) local_pos = attempt1.world2local(w_pos) np.testing.assert_array_almost_equal(l_pos.values, local_pos.values, decimal=6)", "= chunk.project_local_points_on_raw(l_pos_points, 0, distortion_correct=False) p_undis_outs = chunk.project_local_points_on_raw(l_pos_points, 0, distortion_correct=True) my_undistort_outs", "56 # camera_label = 'DJI_0057' camera_pix_ans = Points([2391.7104647010146, 1481.8987733175165]) idp_cam_pix", "local_pos.values, decimal=6) def test_metashape_project_local_points_on_raw(): test_project_folder = easyidp.test_full_path(\"data/metashape/goya_test.psx\") chunks = metashape.open_project(test_project_folder)", "# pro_api_out = np.asarray([2218.883386793118, 1991.4709388015149]) my_undistort_out = Points([2220.854889556147, 1992.6933680261686]) my_distort_out", "import easyidp from easyidp.core.objects import ReconsProject, Points from easyidp.io import", "= ReconsProject(\"not_supported_sfm\") def test_local2world2local(): attempt1 = ReconsProject(\"agisoft\") attempt1.transform.matrix = np.asarray([[-0.86573098,", "= np.asarray([[-0.86573098, -0.01489186, 0.08977677, 7.65034123], [0.06972335, 0.44334391, 0.74589315, 1.85910928], [-0.05848325,", "l_pos = Points([7.960064093299587, 1.3019528769064523, -2.6697181763370965]) w_pos_ans = Points([0.4999999999999978, 0.9999999999999993, 1.5])", "-0.43972184, -0.1835615], [0., 0., 0., 1.]], dtype=np.float) w_pos = Points([0.5,", "[2220.854889556147, 1992.6933680261686]]) my_distort_outs = Points([[2218.47960556, 1992.46356322], [2218.47960556, 1992.46356322]]) np.testing.assert_array_almost_equal(p_dis_outs.values, my_distort_outs.values)", "Points([7.960064093299587, 1.3019528769064523, -2.6697181763370965]) w_pos_ans = Points([0.4999999999999978, 0.9999999999999993, 1.5]) world_pos =", "world_pos = attempt1.local2world(l_pos) np.testing.assert_array_almost_equal(w_pos_ans.values, world_pos.values, decimal=6) local_pos = attempt1.world2local(w_pos) np.testing.assert_array_almost_equal(l_pos.values,", "test_metashape_project_local_points_on_raw(): test_project_folder = easyidp.test_full_path(\"data/metashape/goya_test.psx\") chunks = metashape.open_project(test_project_folder) chunk = chunks[0]", "def test_init_reconsproject(): attempt1 = ReconsProject(\"agisoft\") assert attempt1.software == \"metashape\" attempt2", "import numpy as np import pytest import easyidp from easyidp.core.objects", "np.testing.assert_array_almost_equal(w_pos_ans.values, world_pos.values, decimal=6) local_pos = attempt1.world2local(w_pos) np.testing.assert_array_almost_equal(l_pos.values, local_pos.values, decimal=6) def", "# test for single point l_pos = Points([7.960064093299587, 1.3019528769064523, -2.6697181763370965])", "import pytest import easyidp from easyidp.core.objects import ReconsProject, Points from", "columns=['lon', 'lat', 'alt']) idp_world = chunk.local2world(local) np.testing.assert_array_almost_equal(idp_world.values, geocentric.values, decimal=1) idp_crs", "= os.path.join(easyidp.__path__[0], \"io/tests\") def test_init_reconsproject(): attempt1 = ReconsProject(\"agisoft\") assert attempt1.software", "distortion_correct=False) p_undis_out = chunk.project_local_points_on_raw(l_pos, 0, distortion_correct=True) # pro_api_out = np.asarray([2218.883386793118,", "= Points([-3943658.7087006606, 3363404.124223561, 3704651.3067566575]) geodetic = Points([139.54033578028609, 35.73756358928734, 96.87827569602781], columns=['lon',", "my_distort_out = Points([2218.47960556, 1992.46356322]) np.testing.assert_array_almost_equal(p_dis_out.values, my_distort_out.values) np.testing.assert_array_almost_equal(p_undis_out.values, my_undistort_out.values) # test", "camera_pix_ans = Points([2391.7104647010146, 1481.8987733175165]) idp_cam_pix = chunk.project_local_points_on_raw(local, camera_id, distortion_correct=True) np.testing.assert_array_almost_equal(camera_pix_ans.values,", "my_undistort_out.values) # test for multiple points l_pos_points = Points([[7.960064093299587, 1.3019528769064523,", "= Points([2391.7104647010146, 1481.8987733175165]) idp_cam_pix = chunk.project_local_points_on_raw(local, camera_id, distortion_correct=True) np.testing.assert_array_almost_equal(camera_pix_ans.values, idp_cam_pix.values)", "chunk.project_local_points_on_raw(l_pos_points, 0, distortion_correct=True) my_undistort_outs = Points([[2220.854889556147, 1992.6933680261686], [2220.854889556147, 1992.6933680261686]]) my_distort_outs", "chunk.project_local_points_on_raw(l_pos_points, 0, distortion_correct=False) p_undis_outs = chunk.project_local_points_on_raw(l_pos_points, 0, distortion_correct=True) my_undistort_outs =", "Points from easyidp.io import metashape module_path = os.path.join(easyidp.__path__[0], \"io/tests\") def", "ReconsProject, Points from easyidp.io import metashape module_path = os.path.join(easyidp.__path__[0], \"io/tests\")", "1992.6933680261686], [2220.854889556147, 1992.6933680261686]]) my_distort_outs = Points([[2218.47960556, 1992.46356322], [2218.47960556, 1992.46356322]]) np.testing.assert_array_almost_equal(p_dis_outs.values,", "os.path.join(easyidp.__path__[0], \"io/tests\") def test_init_reconsproject(): attempt1 = ReconsProject(\"agisoft\") assert attempt1.software ==", "# camera_label = 'DJI_0057' camera_pix_ans = Points([2391.7104647010146, 1481.8987733175165]) idp_cam_pix =", "= chunk.project_local_points_on_raw(l_pos, 0, distortion_correct=False) p_undis_out = chunk.project_local_points_on_raw(l_pos, 0, distortion_correct=True) #", "'alt']) idp_world = chunk.local2world(local) np.testing.assert_array_almost_equal(idp_world.values, geocentric.values, decimal=1) idp_crs = chunk.world2crs(idp_world)", "0.858098777517136, -12.987136541275]) geocentric = Points([-3943658.7087006606, 3363404.124223561, 3704651.3067566575]) geodetic = Points([139.54033578028609,", "ReconsProject(\"agisoft\") assert attempt1.software == \"metashape\" attempt2 = ReconsProject(\"Metashape\") assert attempt2.software", "for multiple points l_pos_points = Points([[7.960064093299587, 1.3019528769064523, -2.6697181763370965], [7.960064093299587, 1.3019528769064523,", "w_pos = Points([0.5, 1, 1.5]) l_pos = Points([7.960064093299587, 1.3019528769064523, -2.6697181763370965])", "Points([7.960064093299587, 1.3019528769064523, -2.6697181763370965]) p_dis_out = chunk.project_local_points_on_raw(l_pos, 0, distortion_correct=False) p_undis_out =", "0, distortion_correct=True) my_undistort_outs = Points([[2220.854889556147, 1992.6933680261686], [2220.854889556147, 1992.6933680261686]]) my_distort_outs =", "l_pos_points = Points([[7.960064093299587, 1.3019528769064523, -2.6697181763370965], [7.960064093299587, 1.3019528769064523, -2.6697181763370965]]) p_dis_outs =", "geocentric = Points([-3943658.7087006606, 3363404.124223561, 3704651.3067566575]) geodetic = Points([139.54033578028609, 35.73756358928734, 96.87827569602781],", "= chunk.project_local_points_on_raw(l_pos_points, 0, distortion_correct=True) my_undistort_outs = Points([[2220.854889556147, 1992.6933680261686], [2220.854889556147, 1992.6933680261686]])", "test_world2crs_and_on_raw_images(): test_project_folder = easyidp.test_full_path(\"data/metashape/wheat_tanashi.psx\") chunks = metashape.open_project(test_project_folder) chunk = chunks[0]", "= ReconsProject(\"agisoft\") assert attempt1.software == \"metashape\" attempt2 = ReconsProject(\"Metashape\") assert", "0.08977677, 7.65034123], [0.06972335, 0.44334391, 0.74589315, 1.85910928], [-0.05848325, 0.74899678, -0.43972184, -0.1835615],", "1.3019528769064523, -2.6697181763370965]) p_dis_out = chunk.project_local_points_on_raw(l_pos, 0, distortion_correct=False) p_undis_out = chunk.project_local_points_on_raw(l_pos,", "1.85910928], [-0.05848325, 0.74899678, -0.43972184, -0.1835615], [0., 0., 0., 1.]], dtype=np.float)", "= Points([7.960064093299587, 1.3019528769064523, -2.6697181763370965]) p_dis_out = chunk.project_local_points_on_raw(l_pos, 0, distortion_correct=False) p_undis_out", "chunk.project_local_points_on_raw(l_pos, 0, distortion_correct=False) p_undis_out = chunk.project_local_points_on_raw(l_pos, 0, distortion_correct=True) # pro_api_out", "chunks = metashape.open_project(test_project_folder) chunk = chunks[0] # test for single", "3704651.3067566575]) geodetic = Points([139.54033578028609, 35.73756358928734, 96.87827569602781], columns=['lon', 'lat', 'alt']) idp_world", "np.testing.assert_array_almost_equal(p_dis_out.values, my_distort_out.values) np.testing.assert_array_almost_equal(p_undis_out.values, my_undistort_out.values) # test for multiple points l_pos_points", "my_undistort_outs.values) def test_world2crs_and_on_raw_images(): test_project_folder = easyidp.test_full_path(\"data/metashape/wheat_tanashi.psx\") chunks = metashape.open_project(test_project_folder) chunk", "attempt2 = ReconsProject(\"Metashape\") assert attempt2.software == \"metashape\" with pytest.raises(LookupError): attempt3", "\"metashape\" attempt2 = ReconsProject(\"Metashape\") assert attempt2.software == \"metashape\" with pytest.raises(LookupError):", "ReconsProject(\"not_supported_sfm\") def test_local2world2local(): attempt1 = ReconsProject(\"agisoft\") attempt1.transform.matrix = np.asarray([[-0.86573098, -0.01489186,", "test_local2world2local(): attempt1 = ReconsProject(\"agisoft\") attempt1.transform.matrix = np.asarray([[-0.86573098, -0.01489186, 0.08977677, 7.65034123],", "points l_pos_points = Points([[7.960064093299587, 1.3019528769064523, -2.6697181763370965], [7.960064093299587, 1.3019528769064523, -2.6697181763370965]]) p_dis_outs", "pro_api_out = np.asarray([2218.883386793118, 1991.4709388015149]) my_undistort_out = Points([2220.854889556147, 1992.6933680261686]) my_distort_out =", "assert attempt2.software == \"metashape\" with pytest.raises(LookupError): attempt3 = ReconsProject(\"not_supported_sfm\") def", "Points([0.4999999999999978, 0.9999999999999993, 1.5]) world_pos = attempt1.local2world(l_pos) np.testing.assert_array_almost_equal(w_pos_ans.values, world_pos.values, decimal=6) local_pos", "attempt1.world2local(w_pos) np.testing.assert_array_almost_equal(l_pos.values, local_pos.values, decimal=6) def test_metashape_project_local_points_on_raw(): test_project_folder = easyidp.test_full_path(\"data/metashape/goya_test.psx\") chunks", "metashape.open_project(test_project_folder) chunk = chunks[0] # test for single point l_pos", "0, distortion_correct=True) # pro_api_out = np.asarray([2218.883386793118, 1991.4709388015149]) my_undistort_out = Points([2220.854889556147,", "= Points([2220.854889556147, 1992.6933680261686]) my_distort_out = Points([2218.47960556, 1992.46356322]) np.testing.assert_array_almost_equal(p_dis_out.values, my_distort_out.values) np.testing.assert_array_almost_equal(p_undis_out.values,", "camera_label = 'DJI_0057' camera_pix_ans = Points([2391.7104647010146, 1481.8987733175165]) idp_cam_pix = chunk.project_local_points_on_raw(local,", "-0.1835615], [0., 0., 0., 1.]], dtype=np.float) w_pos = Points([0.5, 1,", "def test_metashape_project_local_points_on_raw(): test_project_folder = easyidp.test_full_path(\"data/metashape/goya_test.psx\") chunks = metashape.open_project(test_project_folder) chunk =", "numpy as np import pytest import easyidp from easyidp.core.objects import", "def test_local2world2local(): attempt1 = ReconsProject(\"agisoft\") attempt1.transform.matrix = np.asarray([[-0.86573098, -0.01489186, 0.08977677,", "dtype=np.float) w_pos = Points([0.5, 1, 1.5]) l_pos = Points([7.960064093299587, 1.3019528769064523,", "-0.01489186, 0.08977677, 7.65034123], [0.06972335, 0.44334391, 0.74589315, 1.85910928], [-0.05848325, 0.74899678, -0.43972184,", "with pytest.raises(LookupError): attempt3 = ReconsProject(\"not_supported_sfm\") def test_local2world2local(): attempt1 = ReconsProject(\"agisoft\")", "chunk = chunks[0] # test for single point l_pos =", "distortion_correct=False) p_undis_outs = chunk.project_local_points_on_raw(l_pos_points, 0, distortion_correct=True) my_undistort_outs = Points([[2220.854889556147, 1992.6933680261686],", "= Points([[2218.47960556, 1992.46356322], [2218.47960556, 1992.46356322]]) np.testing.assert_array_almost_equal(p_dis_outs.values, my_distort_outs.values) np.testing.assert_array_almost_equal(p_undis_outs.values, my_undistort_outs.values) def", "chunk = chunks[0] local = Points([11.870130675203006, 0.858098777517136, -12.987136541275]) geocentric =", "np.testing.assert_array_almost_equal(l_pos.values, local_pos.values, decimal=6) def test_metashape_project_local_points_on_raw(): test_project_folder = easyidp.test_full_path(\"data/metashape/goya_test.psx\") chunks =", "chunks = metashape.open_project(test_project_folder) chunk = chunks[0] local = Points([11.870130675203006, 0.858098777517136,", "= easyidp.test_full_path(\"data/metashape/wheat_tanashi.psx\") chunks = metashape.open_project(test_project_folder) chunk = chunks[0] local =", "my_distort_outs = Points([[2218.47960556, 1992.46356322], [2218.47960556, 1992.46356322]]) np.testing.assert_array_almost_equal(p_dis_outs.values, my_distort_outs.values) np.testing.assert_array_almost_equal(p_undis_outs.values, my_undistort_outs.values)", "np.testing.assert_array_almost_equal(p_undis_outs.values, my_undistort_outs.values) def test_world2crs_and_on_raw_images(): test_project_folder = easyidp.test_full_path(\"data/metashape/wheat_tanashi.psx\") chunks = metashape.open_project(test_project_folder)", "attempt1 = ReconsProject(\"agisoft\") attempt1.transform.matrix = np.asarray([[-0.86573098, -0.01489186, 0.08977677, 7.65034123], [0.06972335,", "1992.6933680261686]]) my_distort_outs = Points([[2218.47960556, 1992.46356322], [2218.47960556, 1992.46356322]]) np.testing.assert_array_almost_equal(p_dis_outs.values, my_distort_outs.values) np.testing.assert_array_almost_equal(p_undis_outs.values,", "Points([11.870130675203006, 0.858098777517136, -12.987136541275]) geocentric = Points([-3943658.7087006606, 3363404.124223561, 3704651.3067566575]) geodetic =", "np.testing.assert_array_almost_equal(idp_world.values, geocentric.values, decimal=1) idp_crs = chunk.world2crs(idp_world) np.testing.assert_array_almost_equal(idp_crs.values, geodetic.values) camera_id =", "for single point l_pos = Points([7.960064093299587, 1.3019528769064523, -2.6697181763370965]) p_dis_out =", "-2.6697181763370965]) p_dis_out = chunk.project_local_points_on_raw(l_pos, 0, distortion_correct=False) p_undis_out = chunk.project_local_points_on_raw(l_pos, 0,", "[7.960064093299587, 1.3019528769064523, -2.6697181763370965]]) p_dis_outs = chunk.project_local_points_on_raw(l_pos_points, 0, distortion_correct=False) p_undis_outs =", "Points([2218.47960556, 1992.46356322]) np.testing.assert_array_almost_equal(p_dis_out.values, my_distort_out.values) np.testing.assert_array_almost_equal(p_undis_out.values, my_undistort_out.values) # test for multiple", "attempt1 = ReconsProject(\"agisoft\") assert attempt1.software == \"metashape\" attempt2 = ReconsProject(\"Metashape\")", "-2.6697181763370965]]) p_dis_outs = chunk.project_local_points_on_raw(l_pos_points, 0, distortion_correct=False) p_undis_outs = chunk.project_local_points_on_raw(l_pos_points, 0,", "== \"metashape\" with pytest.raises(LookupError): attempt3 = ReconsProject(\"not_supported_sfm\") def test_local2world2local(): attempt1", "assert attempt1.software == \"metashape\" attempt2 = ReconsProject(\"Metashape\") assert attempt2.software ==", "= metashape.open_project(test_project_folder) chunk = chunks[0] # test for single point", "7.65034123], [0.06972335, 0.44334391, 0.74589315, 1.85910928], [-0.05848325, 0.74899678, -0.43972184, -0.1835615], [0.,", "w_pos_ans = Points([0.4999999999999978, 0.9999999999999993, 1.5]) world_pos = attempt1.local2world(l_pos) np.testing.assert_array_almost_equal(w_pos_ans.values, world_pos.values,", "[0., 0., 0., 1.]], dtype=np.float) w_pos = Points([0.5, 1, 1.5])", "attempt2.software == \"metashape\" with pytest.raises(LookupError): attempt3 = ReconsProject(\"not_supported_sfm\") def test_local2world2local():", "1.3019528769064523, -2.6697181763370965], [7.960064093299587, 1.3019528769064523, -2.6697181763370965]]) p_dis_outs = chunk.project_local_points_on_raw(l_pos_points, 0, distortion_correct=False)", "0.44334391, 0.74589315, 1.85910928], [-0.05848325, 0.74899678, -0.43972184, -0.1835615], [0., 0., 0.,", "Points([[2220.854889556147, 1992.6933680261686], [2220.854889556147, 1992.6933680261686]]) my_distort_outs = Points([[2218.47960556, 1992.46356322], [2218.47960556, 1992.46356322]])", "geodetic = Points([139.54033578028609, 35.73756358928734, 96.87827569602781], columns=['lon', 'lat', 'alt']) idp_world =", "= ReconsProject(\"Metashape\") assert attempt2.software == \"metashape\" with pytest.raises(LookupError): attempt3 =", "p_dis_out = chunk.project_local_points_on_raw(l_pos, 0, distortion_correct=False) p_undis_out = chunk.project_local_points_on_raw(l_pos, 0, distortion_correct=True)", "as np import pytest import easyidp from easyidp.core.objects import ReconsProject,", "[0.06972335, 0.44334391, 0.74589315, 1.85910928], [-0.05848325, 0.74899678, -0.43972184, -0.1835615], [0., 0.,", "= Points([0.5, 1, 1.5]) l_pos = Points([7.960064093299587, 1.3019528769064523, -2.6697181763370965]) w_pos_ans", "= chunks[0] local = Points([11.870130675203006, 0.858098777517136, -12.987136541275]) geocentric = Points([-3943658.7087006606,", "ReconsProject(\"agisoft\") attempt1.transform.matrix = np.asarray([[-0.86573098, -0.01489186, 0.08977677, 7.65034123], [0.06972335, 0.44334391, 0.74589315,", "easyidp from easyidp.core.objects import ReconsProject, Points from easyidp.io import metashape", "[2218.47960556, 1992.46356322]]) np.testing.assert_array_almost_equal(p_dis_outs.values, my_distort_outs.values) np.testing.assert_array_almost_equal(p_undis_outs.values, my_undistort_outs.values) def test_world2crs_and_on_raw_images(): test_project_folder =", "os import numpy as np import pytest import easyidp from", "my_distort_out.values) np.testing.assert_array_almost_equal(p_undis_out.values, my_undistort_out.values) # test for multiple points l_pos_points =", "1.3019528769064523, -2.6697181763370965]]) p_dis_outs = chunk.project_local_points_on_raw(l_pos_points, 0, distortion_correct=False) p_undis_outs = chunk.project_local_points_on_raw(l_pos_points,", "test for single point l_pos = Points([7.960064093299587, 1.3019528769064523, -2.6697181763370965]) p_dis_out", "chunk.world2crs(idp_world) np.testing.assert_array_almost_equal(idp_crs.values, geodetic.values) camera_id = 56 # camera_label = 'DJI_0057'", "metashape module_path = os.path.join(easyidp.__path__[0], \"io/tests\") def test_init_reconsproject(): attempt1 = ReconsProject(\"agisoft\")", "multiple points l_pos_points = Points([[7.960064093299587, 1.3019528769064523, -2.6697181763370965], [7.960064093299587, 1.3019528769064523, -2.6697181763370965]])", "35.73756358928734, 96.87827569602781], columns=['lon', 'lat', 'alt']) idp_world = chunk.local2world(local) np.testing.assert_array_almost_equal(idp_world.values, geocentric.values,", "= attempt1.world2local(w_pos) np.testing.assert_array_almost_equal(l_pos.values, local_pos.values, decimal=6) def test_metashape_project_local_points_on_raw(): test_project_folder = easyidp.test_full_path(\"data/metashape/goya_test.psx\")", "= Points([139.54033578028609, 35.73756358928734, 96.87827569602781], columns=['lon', 'lat', 'alt']) idp_world = chunk.local2world(local)", "= Points([2218.47960556, 1992.46356322]) np.testing.assert_array_almost_equal(p_dis_out.values, my_distort_out.values) np.testing.assert_array_almost_equal(p_undis_out.values, my_undistort_out.values) # test for", "= Points([[2220.854889556147, 1992.6933680261686], [2220.854889556147, 1992.6933680261686]]) my_distort_outs = Points([[2218.47960556, 1992.46356322], [2218.47960556,", "3363404.124223561, 3704651.3067566575]) geodetic = Points([139.54033578028609, 35.73756358928734, 96.87827569602781], columns=['lon', 'lat', 'alt'])", "decimal=1) idp_crs = chunk.world2crs(idp_world) np.testing.assert_array_almost_equal(idp_crs.values, geodetic.values) camera_id = 56 #", "import os import numpy as np import pytest import easyidp", "import ReconsProject, Points from easyidp.io import metashape module_path = os.path.join(easyidp.__path__[0],", "1992.6933680261686]) my_distort_out = Points([2218.47960556, 1992.46356322]) np.testing.assert_array_almost_equal(p_dis_out.values, my_distort_out.values) np.testing.assert_array_almost_equal(p_undis_out.values, my_undistort_out.values) #", "0., 0., 1.]], dtype=np.float) w_pos = Points([0.5, 1, 1.5]) l_pos", "-2.6697181763370965], [7.960064093299587, 1.3019528769064523, -2.6697181763370965]]) p_dis_outs = chunk.project_local_points_on_raw(l_pos_points, 0, distortion_correct=False) p_undis_outs", "pytest import easyidp from easyidp.core.objects import ReconsProject, Points from easyidp.io", "p_undis_out = chunk.project_local_points_on_raw(l_pos, 0, distortion_correct=True) # pro_api_out = np.asarray([2218.883386793118, 1991.4709388015149])", "0.74899678, -0.43972184, -0.1835615], [0., 0., 0., 1.]], dtype=np.float) w_pos =", "0.9999999999999993, 1.5]) world_pos = attempt1.local2world(l_pos) np.testing.assert_array_almost_equal(w_pos_ans.values, world_pos.values, decimal=6) local_pos =", "= np.asarray([2218.883386793118, 1991.4709388015149]) my_undistort_out = Points([2220.854889556147, 1992.6933680261686]) my_distort_out = Points([2218.47960556,", "= Points([7.960064093299587, 1.3019528769064523, -2.6697181763370965]) w_pos_ans = Points([0.4999999999999978, 0.9999999999999993, 1.5]) world_pos", "test_init_reconsproject(): attempt1 = ReconsProject(\"agisoft\") assert attempt1.software == \"metashape\" attempt2 =", "= Points([[7.960064093299587, 1.3019528769064523, -2.6697181763370965], [7.960064093299587, 1.3019528769064523, -2.6697181763370965]]) p_dis_outs = chunk.project_local_points_on_raw(l_pos_points,", "pytest.raises(LookupError): attempt3 = ReconsProject(\"not_supported_sfm\") def test_local2world2local(): attempt1 = ReconsProject(\"agisoft\") attempt1.transform.matrix", "[-0.05848325, 0.74899678, -0.43972184, -0.1835615], [0., 0., 0., 1.]], dtype=np.float) w_pos", "easyidp.test_full_path(\"data/metashape/wheat_tanashi.psx\") chunks = metashape.open_project(test_project_folder) chunk = chunks[0] local = Points([11.870130675203006,", "96.87827569602781], columns=['lon', 'lat', 'alt']) idp_world = chunk.local2world(local) np.testing.assert_array_almost_equal(idp_world.values, geocentric.values, decimal=1)", "easyidp.core.objects import ReconsProject, Points from easyidp.io import metashape module_path =", "1992.46356322]) np.testing.assert_array_almost_equal(p_dis_out.values, my_distort_out.values) np.testing.assert_array_almost_equal(p_undis_out.values, my_undistort_out.values) # test for multiple points", "idp_crs = chunk.world2crs(idp_world) np.testing.assert_array_almost_equal(idp_crs.values, geodetic.values) camera_id = 56 # camera_label", "geodetic.values) camera_id = 56 # camera_label = 'DJI_0057' camera_pix_ans =", "attempt3 = ReconsProject(\"not_supported_sfm\") def test_local2world2local(): attempt1 = ReconsProject(\"agisoft\") attempt1.transform.matrix =", "'DJI_0057' camera_pix_ans = Points([2391.7104647010146, 1481.8987733175165]) idp_cam_pix = chunk.project_local_points_on_raw(local, camera_id, distortion_correct=True)", "chunk.local2world(local) np.testing.assert_array_almost_equal(idp_world.values, geocentric.values, decimal=1) idp_crs = chunk.world2crs(idp_world) np.testing.assert_array_almost_equal(idp_crs.values, geodetic.values) camera_id", "= chunk.world2crs(idp_world) np.testing.assert_array_almost_equal(idp_crs.values, geodetic.values) camera_id = 56 # camera_label =", "0, distortion_correct=False) p_undis_out = chunk.project_local_points_on_raw(l_pos, 0, distortion_correct=True) # pro_api_out =", "= metashape.open_project(test_project_folder) chunk = chunks[0] local = Points([11.870130675203006, 0.858098777517136, -12.987136541275])", "Points([139.54033578028609, 35.73756358928734, 96.87827569602781], columns=['lon', 'lat', 'alt']) idp_world = chunk.local2world(local) np.testing.assert_array_almost_equal(idp_world.values,", "geocentric.values, decimal=1) idp_crs = chunk.world2crs(idp_world) np.testing.assert_array_almost_equal(idp_crs.values, geodetic.values) camera_id = 56", "chunks[0] # test for single point l_pos = Points([7.960064093299587, 1.3019528769064523,", "'lat', 'alt']) idp_world = chunk.local2world(local) np.testing.assert_array_almost_equal(idp_world.values, geocentric.values, decimal=1) idp_crs =", "1991.4709388015149]) my_undistort_out = Points([2220.854889556147, 1992.6933680261686]) my_distort_out = Points([2218.47960556, 1992.46356322]) np.testing.assert_array_almost_equal(p_dis_out.values,", "1.]], dtype=np.float) w_pos = Points([0.5, 1, 1.5]) l_pos = Points([7.960064093299587,", "1.3019528769064523, -2.6697181763370965]) w_pos_ans = Points([0.4999999999999978, 0.9999999999999993, 1.5]) world_pos = attempt1.local2world(l_pos)", "l_pos = Points([7.960064093299587, 1.3019528769064523, -2.6697181763370965]) p_dis_out = chunk.project_local_points_on_raw(l_pos, 0, distortion_correct=False)", "-12.987136541275]) geocentric = Points([-3943658.7087006606, 3363404.124223561, 3704651.3067566575]) geodetic = Points([139.54033578028609, 35.73756358928734,", "metashape.open_project(test_project_folder) chunk = chunks[0] local = Points([11.870130675203006, 0.858098777517136, -12.987136541275]) geocentric", "0.74589315, 1.85910928], [-0.05848325, 0.74899678, -0.43972184, -0.1835615], [0., 0., 0., 1.]],", "= Points([11.870130675203006, 0.858098777517136, -12.987136541275]) geocentric = Points([-3943658.7087006606, 3363404.124223561, 3704651.3067566575]) geodetic", "decimal=6) def test_metashape_project_local_points_on_raw(): test_project_folder = easyidp.test_full_path(\"data/metashape/goya_test.psx\") chunks = metashape.open_project(test_project_folder) chunk", "= 56 # camera_label = 'DJI_0057' camera_pix_ans = Points([2391.7104647010146, 1481.8987733175165])", "np.testing.assert_array_almost_equal(idp_crs.values, geodetic.values) camera_id = 56 # camera_label = 'DJI_0057' camera_pix_ans", "1.5]) l_pos = Points([7.960064093299587, 1.3019528769064523, -2.6697181763370965]) w_pos_ans = Points([0.4999999999999978, 0.9999999999999993,", "distortion_correct=True) # pro_api_out = np.asarray([2218.883386793118, 1991.4709388015149]) my_undistort_out = Points([2220.854889556147, 1992.6933680261686])", "easyidp.io import metashape module_path = os.path.join(easyidp.__path__[0], \"io/tests\") def test_init_reconsproject(): attempt1", "p_dis_outs = chunk.project_local_points_on_raw(l_pos_points, 0, distortion_correct=False) p_undis_outs = chunk.project_local_points_on_raw(l_pos_points, 0, distortion_correct=True)", "idp_world = chunk.local2world(local) np.testing.assert_array_almost_equal(idp_world.values, geocentric.values, decimal=1) idp_crs = chunk.world2crs(idp_world) np.testing.assert_array_almost_equal(idp_crs.values,", "world_pos.values, decimal=6) local_pos = attempt1.world2local(w_pos) np.testing.assert_array_almost_equal(l_pos.values, local_pos.values, decimal=6) def test_metashape_project_local_points_on_raw():", "from easyidp.io import metashape module_path = os.path.join(easyidp.__path__[0], \"io/tests\") def test_init_reconsproject():", "= 'DJI_0057' camera_pix_ans = Points([2391.7104647010146, 1481.8987733175165]) idp_cam_pix = chunk.project_local_points_on_raw(local, camera_id,", "# test for multiple points l_pos_points = Points([[7.960064093299587, 1.3019528769064523, -2.6697181763370965],", "local_pos = attempt1.world2local(w_pos) np.testing.assert_array_almost_equal(l_pos.values, local_pos.values, decimal=6) def test_metashape_project_local_points_on_raw(): test_project_folder =", "attempt1.transform.matrix = np.asarray([[-0.86573098, -0.01489186, 0.08977677, 7.65034123], [0.06972335, 0.44334391, 0.74589315, 1.85910928],", "= chunks[0] # test for single point l_pos = Points([7.960064093299587,", "np import pytest import easyidp from easyidp.core.objects import ReconsProject, Points", "Points([[7.960064093299587, 1.3019528769064523, -2.6697181763370965], [7.960064093299587, 1.3019528769064523, -2.6697181763370965]]) p_dis_outs = chunk.project_local_points_on_raw(l_pos_points, 0,", "= ReconsProject(\"agisoft\") attempt1.transform.matrix = np.asarray([[-0.86573098, -0.01489186, 0.08977677, 7.65034123], [0.06972335, 0.44334391,", "Points([-3943658.7087006606, 3363404.124223561, 3704651.3067566575]) geodetic = Points([139.54033578028609, 35.73756358928734, 96.87827569602781], columns=['lon', 'lat',", "= easyidp.test_full_path(\"data/metashape/goya_test.psx\") chunks = metashape.open_project(test_project_folder) chunk = chunks[0] # test", "= chunk.project_local_points_on_raw(l_pos, 0, distortion_correct=True) # pro_api_out = np.asarray([2218.883386793118, 1991.4709388015149]) my_undistort_out", "\"io/tests\") def test_init_reconsproject(): attempt1 = ReconsProject(\"agisoft\") assert attempt1.software == \"metashape\"", "single point l_pos = Points([7.960064093299587, 1.3019528769064523, -2.6697181763370965]) p_dis_out = chunk.project_local_points_on_raw(l_pos,", "1992.46356322]]) np.testing.assert_array_almost_equal(p_dis_outs.values, my_distort_outs.values) np.testing.assert_array_almost_equal(p_undis_outs.values, my_undistort_outs.values) def test_world2crs_and_on_raw_images(): test_project_folder = easyidp.test_full_path(\"data/metashape/wheat_tanashi.psx\")", "import metashape module_path = os.path.join(easyidp.__path__[0], \"io/tests\") def test_init_reconsproject(): attempt1 =", "np.asarray([2218.883386793118, 1991.4709388015149]) my_undistort_out = Points([2220.854889556147, 1992.6933680261686]) my_distort_out = Points([2218.47960556, 1992.46356322])", "np.testing.assert_array_almost_equal(p_dis_outs.values, my_distort_outs.values) np.testing.assert_array_almost_equal(p_undis_outs.values, my_undistort_outs.values) def test_world2crs_and_on_raw_images(): test_project_folder = easyidp.test_full_path(\"data/metashape/wheat_tanashi.psx\") chunks", "0., 1.]], dtype=np.float) w_pos = Points([0.5, 1, 1.5]) l_pos =", "def test_world2crs_and_on_raw_images(): test_project_folder = easyidp.test_full_path(\"data/metashape/wheat_tanashi.psx\") chunks = metashape.open_project(test_project_folder) chunk =", "test_project_folder = easyidp.test_full_path(\"data/metashape/wheat_tanashi.psx\") chunks = metashape.open_project(test_project_folder) chunk = chunks[0] local", "from easyidp.core.objects import ReconsProject, Points from easyidp.io import metashape module_path", "0, distortion_correct=False) p_undis_outs = chunk.project_local_points_on_raw(l_pos_points, 0, distortion_correct=True) my_undistort_outs = Points([[2220.854889556147,", "test for multiple points l_pos_points = Points([[7.960064093299587, 1.3019528769064523, -2.6697181763370965], [7.960064093299587,", "easyidp.test_full_path(\"data/metashape/goya_test.psx\") chunks = metashape.open_project(test_project_folder) chunk = chunks[0] # test for", "local = Points([11.870130675203006, 0.858098777517136, -12.987136541275]) geocentric = Points([-3943658.7087006606, 3363404.124223561, 3704651.3067566575])", "module_path = os.path.join(easyidp.__path__[0], \"io/tests\") def test_init_reconsproject(): attempt1 = ReconsProject(\"agisoft\") assert", "ReconsProject(\"Metashape\") assert attempt2.software == \"metashape\" with pytest.raises(LookupError): attempt3 = ReconsProject(\"not_supported_sfm\")", "Points([0.5, 1, 1.5]) l_pos = Points([7.960064093299587, 1.3019528769064523, -2.6697181763370965]) w_pos_ans =", "point l_pos = Points([7.960064093299587, 1.3019528769064523, -2.6697181763370965]) p_dis_out = chunk.project_local_points_on_raw(l_pos, 0,", "= chunk.local2world(local) np.testing.assert_array_almost_equal(idp_world.values, geocentric.values, decimal=1) idp_crs = chunk.world2crs(idp_world) np.testing.assert_array_almost_equal(idp_crs.values, geodetic.values)", "np.testing.assert_array_almost_equal(p_undis_out.values, my_undistort_out.values) # test for multiple points l_pos_points = Points([[7.960064093299587,", "test_project_folder = easyidp.test_full_path(\"data/metashape/goya_test.psx\") chunks = metashape.open_project(test_project_folder) chunk = chunks[0] #" ]
[ "210, 211, 212, 213, 216, 217, 218, 220, 221, 223,", "220, 221, 223, 225, 227, 228, 229, 230, 234, 235,", "( 201, 202, 203, 204, 205, 206, 207, 208, 209,", "402, 516, 517, 518, 519, 520, 521, 525, 526, 527,", "533, 602, 700, 1051, 1052, 1053, 1054, 2551, 2552, 2556,", "331, 332, 333, 334, 335, 336, 337, 338, 339, 340,", "5001, 5005, 5006, 6000, 6010, 6011, 9000, 10000, ) STATUS_TIMEOUT", "2559, 3000, 3001, 3002, 3003, 3004, 3005, 3006, 3007, 3008,", "306, 308, 309, 310, 311, 312, 313, 314, 315, 316,", "3015, 3016, 3020, 3021, 3022, 3023, 3024, 5000, 5001, 5005,", "247, 248, 249, 250, 251, 252, 254, 260, 261, 262,", "373, 374, 375, 383, 391, 402, 516, 517, 518, 519,", "287, 288, 290, 293, 294, 295, 297, 300, 301, 302,", "270, 273, 274, 278, 279, 280, 281, 282, 289, 291,", "335, 336, 337, 338, 339, 340, 341, 342, 343, 344,", "3003, 3004, 3005, 3006, 3007, 3008, 3009, 3010, 3011, 3012,", "301, 302, 303, 304, 321, 323, 324, 325, 326, 327,", "228, 229, 230, 234, 235, 236, 238, 240, 241, 242,", "258, 259, 268, 269, 270, 273, 274, 278, 279, 280,", "1054, 2551, 2552, 2556, 2557, 2558, 2559, 3000, 3001, 3002,", "304, 321, 323, 324, 325, 326, 327, 328, 329, 330,", "= (0,) STATUS_AUTH_FAILED = (100, 101, 102, 200, 401) STATUS_INVALID_PARAMS", "227, 228, 229, 230, 234, 235, 236, 238, 240, 241,", "266, 267, 271, 272, 275, 276, 283, 284, 285, 286,", "305, 306, 308, 309, 310, 311, 312, 313, 314, 315,", "231, 233, 237, 253, 255, 256, 257, 258, 259, 268,", "225, 227, 228, 229, 230, 234, 235, 236, 238, 240,", "318, 319, 320, 322, 370, 371, 372, 373, 374, 375,", "339, 340, 341, 342, 343, 344, 345, 346, 347, 348,", "STATUS_ERROR_OCCURRED = ( 215, 219, 222, 224, 226, 231, 233,", "348, 349, 350, 351, 352, 353, 380, 381, 382, 400,", "517, 518, 519, 520, 521, 525, 526, 527, 528, 529,", "233, 237, 253, 255, 256, 257, 258, 259, 268, 269,", "204, 205, 206, 207, 208, 209, 210, 211, 212, 213,", "293, 294, 295, 297, 300, 301, 302, 303, 304, 321,", "207, 208, 209, 210, 211, 212, 213, 216, 217, 218,", "254, 260, 261, 262, 263, 264, 265, 266, 267, 271,", "1053, 1054, 2551, 2552, 2556, 2557, 2558, 2559, 3000, 3001,", "282, 289, 291, 292, 296, 298, 305, 306, 308, 309,", "209, 210, 211, 212, 213, 216, 217, 218, 220, 221,", "265, 266, 267, 271, 272, 275, 276, 283, 284, 285,", "2552, 2556, 2557, 2558, 2559, 3000, 3001, 3002, 3003, 3004,", "353, 380, 381, 382, 400, 501, 502, 503, 504, 505,", "5005, 5006, 6000, 6010, 6011, 9000, 10000, ) STATUS_TIMEOUT =", "1051, 1052, 1053, 1054, 2551, 2552, 2556, 2557, 2558, 2559,", "2555) STATUS_ERROR_OCCURRED = ( 215, 219, 222, 224, 226, 231,", "303, 304, 321, 323, 324, 325, 326, 327, 328, 329,", "602, 700, 1051, 1052, 1053, 1054, 2551, 2552, 2556, 2557,", "320, 322, 370, 371, 372, 373, 374, 375, 383, 391,", "531, 533, 602, 700, 1051, 1052, 1053, 1054, 2551, 2552,", "322, 370, 371, 372, 373, 374, 375, 383, 391, 402,", "3011, 3012, 3013, 3014, 3015, 3016, 3020, 3021, 3022, 3023,", "290, 293, 294, 295, 297, 300, 301, 302, 303, 304,", "519, 520, 521, 525, 526, 527, 528, 529, 530, 531,", "6010, 6011, 9000, 10000, ) STATUS_TIMEOUT = (522,) STATUS_BAD_STATE =", "STATUS_AUTH_FAILED = (100, 101, 102, 200, 401) STATUS_INVALID_PARAMS = (", "294, 295, 297, 300, 301, 302, 303, 304, 321, 323,", "245, 246, 247, 248, 249, 250, 251, 252, 254, 260,", "264, 265, 266, 267, 271, 272, 275, 276, 283, 284,", "6011, 9000, 10000, ) STATUS_TIMEOUT = (522,) STATUS_BAD_STATE = (524,)", "328, 329, 330, 331, 332, 333, 334, 335, 336, 337,", "370, 371, 372, 373, 374, 375, 383, 391, 402, 516,", "518, 519, 520, 521, 525, 526, 527, 528, 529, 530,", "525, 526, 527, 528, 529, 530, 531, 533, 602, 700,", "523, 532, 3017, 3018, 3019, ) STATUS_UNAUTHORIZED = (214, 277,", "315, 316, 317, 318, 319, 320, 322, 370, 371, 372,", "(100, 101, 102, 200, 401) STATUS_INVALID_PARAMS = ( 201, 202,", "511, 523, 532, 3017, 3018, 3019, ) STATUS_UNAUTHORIZED = (214,", "349, 350, 351, 352, 353, 380, 381, 382, 400, 501,", "272, 275, 276, 283, 284, 285, 286, 287, 288, 290,", "246, 247, 248, 249, 250, 251, 252, 254, 260, 261,", "400, 501, 502, 503, 504, 505, 506, 509, 510, 511,", "250, 251, 252, 254, 260, 261, 262, 263, 264, 265,", "226, 231, 233, 237, 253, 255, 256, 257, 258, 259,", "= ( 201, 202, 203, 204, 205, 206, 207, 208,", "244, 245, 246, 247, 248, 249, 250, 251, 252, 254,", "340, 341, 342, 343, 344, 345, 346, 347, 348, 349,", "3013, 3014, 3015, 3016, 3020, 3021, 3022, 3023, 3024, 5000,", "222, 224, 226, 231, 233, 237, 253, 255, 256, 257,", "3004, 3005, 3006, 3007, 3008, 3009, 3010, 3011, 3012, 3013,", "371, 372, 373, 374, 375, 383, 391, 402, 516, 517,", "248, 249, 250, 251, 252, 254, 260, 261, 262, 263,", "201, 202, 203, 204, 205, 206, 207, 208, 209, 210,", "504, 505, 506, 509, 510, 511, 523, 532, 3017, 3018,", "3019, ) STATUS_UNAUTHORIZED = (214, 277, 2553, 2554, 2555) STATUS_ERROR_OCCURRED", "274, 278, 279, 280, 281, 282, 289, 291, 292, 296,", ") STATUS_TIMEOUT = (522,) STATUS_BAD_STATE = (524,) STATUS_TOO_MANY_REQUESTS = (601,)", "345, 346, 347, 348, 349, 350, 351, 352, 353, 380,", "3007, 3008, 3009, 3010, 3011, 3012, 3013, 3014, 3015, 3016,", "218, 220, 221, 223, 225, 227, 228, 229, 230, 234,", "289, 291, 292, 296, 298, 305, 306, 308, 309, 310,", "205, 206, 207, 208, 209, 210, 211, 212, 213, 216,", "268, 269, 270, 273, 274, 278, 279, 280, 281, 282,", "700, 1051, 1052, 1053, 1054, 2551, 2552, 2556, 2557, 2558,", "3024, 5000, 5001, 5005, 5006, 6000, 6010, 6011, 9000, 10000,", "283, 284, 285, 286, 287, 288, 290, 293, 294, 295,", "101, 102, 200, 401) STATUS_INVALID_PARAMS = ( 201, 202, 203,", "3018, 3019, ) STATUS_UNAUTHORIZED = (214, 277, 2553, 2554, 2555)", "333, 334, 335, 336, 337, 338, 339, 340, 341, 342,", "312, 313, 314, 315, 316, 317, 318, 319, 320, 322,", "<gh_stars>0 \"\"\"Constant values.\"\"\" STATUS_SUCCESS = (0,) STATUS_AUTH_FAILED = (100, 101,", "STATUS_SUCCESS = (0,) STATUS_AUTH_FAILED = (100, 101, 102, 200, 401)", "380, 381, 382, 400, 501, 502, 503, 504, 505, 506,", "237, 253, 255, 256, 257, 258, 259, 268, 269, 270,", "308, 309, 310, 311, 312, 313, 314, 315, 316, 317,", "383, 391, 402, 516, 517, 518, 519, 520, 521, 525,", "10000, ) STATUS_TIMEOUT = (522,) STATUS_BAD_STATE = (524,) STATUS_TOO_MANY_REQUESTS =", "391, 402, 516, 517, 518, 519, 520, 521, 525, 526,", "= (214, 277, 2553, 2554, 2555) STATUS_ERROR_OCCURRED = ( 215,", "343, 344, 345, 346, 347, 348, 349, 350, 351, 352,", "347, 348, 349, 350, 351, 352, 353, 380, 381, 382,", "296, 298, 305, 306, 308, 309, 310, 311, 312, 313,", "350, 351, 352, 353, 380, 381, 382, 400, 501, 502,", "242, 243, 244, 245, 246, 247, 248, 249, 250, 251,", "3012, 3013, 3014, 3015, 3016, 3020, 3021, 3022, 3023, 3024,", "202, 203, 204, 205, 206, 207, 208, 209, 210, 211,", "267, 271, 272, 275, 276, 283, 284, 285, 286, 287,", "240, 241, 242, 243, 244, 245, 246, 247, 248, 249,", "235, 236, 238, 240, 241, 242, 243, 244, 245, 246,", "332, 333, 334, 335, 336, 337, 338, 339, 340, 341,", "3005, 3006, 3007, 3008, 3009, 3010, 3011, 3012, 3013, 3014,", "337, 338, 339, 340, 341, 342, 343, 344, 345, 346,", "505, 506, 509, 510, 511, 523, 532, 3017, 3018, 3019,", "3006, 3007, 3008, 3009, 3010, 3011, 3012, 3013, 3014, 3015,", "9000, 10000, ) STATUS_TIMEOUT = (522,) STATUS_BAD_STATE = (524,) STATUS_TOO_MANY_REQUESTS", "(214, 277, 2553, 2554, 2555) STATUS_ERROR_OCCURRED = ( 215, 219,", "206, 207, 208, 209, 210, 211, 212, 213, 216, 217,", "317, 318, 319, 320, 322, 370, 371, 372, 373, 374,", "3000, 3001, 3002, 3003, 3004, 3005, 3006, 3007, 3008, 3009,", "249, 250, 251, 252, 254, 260, 261, 262, 263, 264,", "325, 326, 327, 328, 329, 330, 331, 332, 333, 334,", "372, 373, 374, 375, 383, 391, 402, 516, 517, 518,", "229, 230, 234, 235, 236, 238, 240, 241, 242, 243,", "213, 216, 217, 218, 220, 221, 223, 225, 227, 228,", "2554, 2555) STATUS_ERROR_OCCURRED = ( 215, 219, 222, 224, 226,", "327, 328, 329, 330, 331, 332, 333, 334, 335, 336,", "262, 263, 264, 265, 266, 267, 271, 272, 275, 276,", "252, 254, 260, 261, 262, 263, 264, 265, 266, 267,", "302, 303, 304, 321, 323, 324, 325, 326, 327, 328,", "263, 264, 265, 266, 267, 271, 272, 275, 276, 283,", "330, 331, 332, 333, 334, 335, 336, 337, 338, 339,", "224, 226, 231, 233, 237, 253, 255, 256, 257, 258,", "212, 213, 216, 217, 218, 220, 221, 223, 225, 227,", "275, 276, 283, 284, 285, 286, 287, 288, 290, 293,", "5006, 6000, 6010, 6011, 9000, 10000, ) STATUS_TIMEOUT = (522,)", "2551, 2552, 2556, 2557, 2558, 2559, 3000, 3001, 3002, 3003,", "3014, 3015, 3016, 3020, 3021, 3022, 3023, 3024, 5000, 5001,", "324, 325, 326, 327, 328, 329, 330, 331, 332, 333,", "253, 255, 256, 257, 258, 259, 268, 269, 270, 273,", "3021, 3022, 3023, 3024, 5000, 5001, 5005, 5006, 6000, 6010,", "280, 281, 282, 289, 291, 292, 296, 298, 305, 306,", "200, 401) STATUS_INVALID_PARAMS = ( 201, 202, 203, 204, 205,", "276, 283, 284, 285, 286, 287, 288, 290, 293, 294,", "501, 502, 503, 504, 505, 506, 509, 510, 511, 523,", "298, 305, 306, 308, 309, 310, 311, 312, 313, 314,", "375, 383, 391, 402, 516, 517, 518, 519, 520, 521,", "527, 528, 529, 530, 531, 533, 602, 700, 1051, 1052,", "3010, 3011, 3012, 3013, 3014, 3015, 3016, 3020, 3021, 3022,", ") STATUS_UNAUTHORIZED = (214, 277, 2553, 2554, 2555) STATUS_ERROR_OCCURRED =", "336, 337, 338, 339, 340, 341, 342, 343, 344, 345,", "286, 287, 288, 290, 293, 294, 295, 297, 300, 301,", "529, 530, 531, 533, 602, 700, 1051, 1052, 1053, 1054,", "values.\"\"\" STATUS_SUCCESS = (0,) STATUS_AUTH_FAILED = (100, 101, 102, 200,", "241, 242, 243, 244, 245, 246, 247, 248, 249, 250,", "3009, 3010, 3011, 3012, 3013, 3014, 3015, 3016, 3020, 3021,", "3017, 3018, 3019, ) STATUS_UNAUTHORIZED = (214, 277, 2553, 2554,", "2557, 2558, 2559, 3000, 3001, 3002, 3003, 3004, 3005, 3006,", "215, 219, 222, 224, 226, 231, 233, 237, 253, 255,", "255, 256, 257, 258, 259, 268, 269, 270, 273, 274,", "291, 292, 296, 298, 305, 306, 308, 309, 310, 311,", "2553, 2554, 2555) STATUS_ERROR_OCCURRED = ( 215, 219, 222, 224,", "341, 342, 343, 344, 345, 346, 347, 348, 349, 350,", "3001, 3002, 3003, 3004, 3005, 3006, 3007, 3008, 3009, 3010,", "102, 200, 401) STATUS_INVALID_PARAMS = ( 201, 202, 203, 204,", "251, 252, 254, 260, 261, 262, 263, 264, 265, 266,", "( 215, 219, 222, 224, 226, 231, 233, 237, 253,", "3022, 3023, 3024, 5000, 5001, 5005, 5006, 6000, 6010, 6011,", "223, 225, 227, 228, 229, 230, 234, 235, 236, 238,", "297, 300, 301, 302, 303, 304, 321, 323, 324, 325,", "257, 258, 259, 268, 269, 270, 273, 274, 278, 279,", "273, 274, 278, 279, 280, 281, 282, 289, 291, 292,", "532, 3017, 3018, 3019, ) STATUS_UNAUTHORIZED = (214, 277, 2553,", "530, 531, 533, 602, 700, 1051, 1052, 1053, 1054, 2551,", "3002, 3003, 3004, 3005, 3006, 3007, 3008, 3009, 3010, 3011,", "309, 310, 311, 312, 313, 314, 315, 316, 317, 318,", "321, 323, 324, 325, 326, 327, 328, 329, 330, 331,", "= (100, 101, 102, 200, 401) STATUS_INVALID_PARAMS = ( 201,", "323, 324, 325, 326, 327, 328, 329, 330, 331, 332,", "510, 511, 523, 532, 3017, 3018, 3019, ) STATUS_UNAUTHORIZED =", "234, 235, 236, 238, 240, 241, 242, 243, 244, 245,", "374, 375, 383, 391, 402, 516, 517, 518, 519, 520,", "277, 2553, 2554, 2555) STATUS_ERROR_OCCURRED = ( 215, 219, 222,", "STATUS_UNAUTHORIZED = (214, 277, 2553, 2554, 2555) STATUS_ERROR_OCCURRED = (", "219, 222, 224, 226, 231, 233, 237, 253, 255, 256,", "= ( 215, 219, 222, 224, 226, 231, 233, 237,", "520, 521, 525, 526, 527, 528, 529, 530, 531, 533,", "281, 282, 289, 291, 292, 296, 298, 305, 306, 308,", "STATUS_INVALID_PARAMS = ( 201, 202, 203, 204, 205, 206, 207,", "6000, 6010, 6011, 9000, 10000, ) STATUS_TIMEOUT = (522,) STATUS_BAD_STATE", "243, 244, 245, 246, 247, 248, 249, 250, 251, 252,", "203, 204, 205, 206, 207, 208, 209, 210, 211, 212,", "285, 286, 287, 288, 290, 293, 294, 295, 297, 300,", "271, 272, 275, 276, 283, 284, 285, 286, 287, 288,", "516, 517, 518, 519, 520, 521, 525, 526, 527, 528,", "269, 270, 273, 274, 278, 279, 280, 281, 282, 289,", "329, 330, 331, 332, 333, 334, 335, 336, 337, 338,", "3020, 3021, 3022, 3023, 3024, 5000, 5001, 5005, 5006, 6000,", "\"\"\"Constant values.\"\"\" STATUS_SUCCESS = (0,) STATUS_AUTH_FAILED = (100, 101, 102,", "1052, 1053, 1054, 2551, 2552, 2556, 2557, 2558, 2559, 3000,", "236, 238, 240, 241, 242, 243, 244, 245, 246, 247,", "261, 262, 263, 264, 265, 266, 267, 271, 272, 275,", "284, 285, 286, 287, 288, 290, 293, 294, 295, 297,", "334, 335, 336, 337, 338, 339, 340, 341, 342, 343,", "352, 353, 380, 381, 382, 400, 501, 502, 503, 504,", "208, 209, 210, 211, 212, 213, 216, 217, 218, 220,", "217, 218, 220, 221, 223, 225, 227, 228, 229, 230,", "342, 343, 344, 345, 346, 347, 348, 349, 350, 351,", "381, 382, 400, 501, 502, 503, 504, 505, 506, 509,", "509, 510, 511, 523, 532, 3017, 3018, 3019, ) STATUS_UNAUTHORIZED", "326, 327, 328, 329, 330, 331, 332, 333, 334, 335,", "346, 347, 348, 349, 350, 351, 352, 353, 380, 381,", "319, 320, 322, 370, 371, 372, 373, 374, 375, 383,", "3023, 3024, 5000, 5001, 5005, 5006, 6000, 6010, 6011, 9000,", "506, 509, 510, 511, 523, 532, 3017, 3018, 3019, )", "230, 234, 235, 236, 238, 240, 241, 242, 243, 244,", "(0,) STATUS_AUTH_FAILED = (100, 101, 102, 200, 401) STATUS_INVALID_PARAMS =", "310, 311, 312, 313, 314, 315, 316, 317, 318, 319,", "279, 280, 281, 282, 289, 291, 292, 296, 298, 305,", "295, 297, 300, 301, 302, 303, 304, 321, 323, 324,", "2558, 2559, 3000, 3001, 3002, 3003, 3004, 3005, 3006, 3007,", "3008, 3009, 3010, 3011, 3012, 3013, 3014, 3015, 3016, 3020,", "238, 240, 241, 242, 243, 244, 245, 246, 247, 248,", "526, 527, 528, 529, 530, 531, 533, 602, 700, 1051,", "401) STATUS_INVALID_PARAMS = ( 201, 202, 203, 204, 205, 206,", "211, 212, 213, 216, 217, 218, 220, 221, 223, 225,", "221, 223, 225, 227, 228, 229, 230, 234, 235, 236,", "260, 261, 262, 263, 264, 265, 266, 267, 271, 272,", "259, 268, 269, 270, 273, 274, 278, 279, 280, 281,", "278, 279, 280, 281, 282, 289, 291, 292, 296, 298,", "521, 525, 526, 527, 528, 529, 530, 531, 533, 602,", "216, 217, 218, 220, 221, 223, 225, 227, 228, 229,", "313, 314, 315, 316, 317, 318, 319, 320, 322, 370,", "502, 503, 504, 505, 506, 509, 510, 511, 523, 532,", "344, 345, 346, 347, 348, 349, 350, 351, 352, 353,", "382, 400, 501, 502, 503, 504, 505, 506, 509, 510,", "528, 529, 530, 531, 533, 602, 700, 1051, 1052, 1053,", "351, 352, 353, 380, 381, 382, 400, 501, 502, 503,", "256, 257, 258, 259, 268, 269, 270, 273, 274, 278,", "5000, 5001, 5005, 5006, 6000, 6010, 6011, 9000, 10000, )", "300, 301, 302, 303, 304, 321, 323, 324, 325, 326,", "311, 312, 313, 314, 315, 316, 317, 318, 319, 320,", "292, 296, 298, 305, 306, 308, 309, 310, 311, 312,", "3016, 3020, 3021, 3022, 3023, 3024, 5000, 5001, 5005, 5006,", "503, 504, 505, 506, 509, 510, 511, 523, 532, 3017,", "2556, 2557, 2558, 2559, 3000, 3001, 3002, 3003, 3004, 3005,", "288, 290, 293, 294, 295, 297, 300, 301, 302, 303,", "338, 339, 340, 341, 342, 343, 344, 345, 346, 347,", "316, 317, 318, 319, 320, 322, 370, 371, 372, 373,", "314, 315, 316, 317, 318, 319, 320, 322, 370, 371," ]
[ "if self.current_point: self.current_point = None return True return super(BezierTest, self).on_touch_up(touch)", "points = [x, y] for i in range(45, 360, 45):", "while increase offset self.bezier.dash_length = 100 - value self.bezier.dash_offset =", "value def _set_line_dash_offset(self, instance, value): # effect to reduce length", "self.line = Line( points=self.points+self.points[:2], dash_offset=10, dash_length=100) s = Slider(y=0, pos_hint={'x':", "- self.pos[1] - p[1]) < self.d): self.current_point = i +", "def on_touch_move(self, touch): if self.collide_point(touch.pos[0], touch.pos[1]): c = self.current_point if", "self.loop = loop self.current_point = None with self.canvas: Color(1.0, 0.0,", "p[0]) < self.d and abs(touch.pos[1] - self.pos[1] - p[1]) <", "- self.pos[0] - p[0]) < self.d and abs(touch.pos[1] - self.pos[1]", "class Main(App): def build(self): from math import cos, sin, radians", "def __init__(self, points=[], loop=False, *args, **kwargs): super(BezierTest, self).__init__(*args, **kwargs) self.d", "- value self.bezier.dash_offset = value def _set_line_dash_offset(self, instance, value): #", "0.0, 1.0) self.line = Line( points=self.points+self.points[:2], dash_offset=10, dash_length=100) s =", "- 1) * 2 + 1] = touch.pos[1] - self.pos[1]", "self).__init__(*args, **kwargs) self.d = 10 self.points = points self.loop =", "- value self.line.dash_offset = value def on_touch_down(self, touch): if self.collide_point(touch.pos[0],", "= 100 # Pacman ! points = [x, y] for", "! points = [x, y] for i in range(45, 360,", "None), height=50) s.bind(value=self._set_line_dash_offset) self.add_widget(s) def _set_bezier_dash_offset(self, instance, value): # effect", "l, y + sin(i) * l]) return BezierTest(points=points, loop=True) if", "Slider from kivy.graphics import Color, Bezier, Line class BezierTest(FloatLayout): def", "( abs(touch.pos[0] - self.pos[0] - p[0]) < self.d and abs(touch.pos[1]", "points self.loop = loop self.current_point = None with self.canvas: Color(1.0,", "* l]) return BezierTest(points=points, loop=True) if __name__ == '__main__': Main().run()", "< self.d): self.current_point = i + 1 return True return", "1) * 2 + 1] = touch.pos[1] - self.pos[1] self.bezier.points", "cos, sin, radians x = y = 150 l =", "s = Slider(y=50, pos_hint={'x': .3}, size_hint=(.7, None), height=50) s.bind(value=self._set_line_dash_offset) self.add_widget(s)", "dash_length=100, dash_offset=10) Color(1.0, 0.0, 1.0) self.line = Line( points=self.points+self.points[:2], dash_offset=10,", "touch): if self.collide_point(touch.pos[0], touch.pos[1]): if self.current_point: self.current_point = None return", "= None with self.canvas: Color(1.0, 0.0, 0.0) self.bezier = Bezier(", "self.bezier.dash_offset = value def _set_line_dash_offset(self, instance, value): # effect to", "touch.pos[1]): if self.current_point: self.current_point = None return True return super(BezierTest,", "self.collide_point(touch.pos[0], touch.pos[1]): if self.current_point: self.current_point = None return True return", "import cos, sin, radians x = y = 150 l", "100 - value self.line.dash_offset = value def on_touch_down(self, touch): if", "= 150 l = 100 # Pacman ! points =", "self).on_touch_down(touch) def on_touch_up(self, touch): if self.collide_point(touch.pos[0], touch.pos[1]): if self.current_point: self.current_point", "i = radians(i) points.extend([x + cos(i) * l, y +", "Bezier( points=self.points, segments=150, loop=self.loop, dash_length=100, dash_offset=10) Color(1.0, 0.0, 1.0) self.line", "sin(i) * l]) return BezierTest(points=points, loop=True) if __name__ == '__main__':", "p[1]) < self.d): self.current_point = i + 1 return True", "touch.pos[1]): for i, p in enumerate(list(zip(self.points[::2], self.points[1::2]))): if ( abs(touch.pos[0]", "self.pos[0] - p[0]) < self.d and abs(touch.pos[1] - self.pos[1] -", "_set_line_dash_offset(self, instance, value): # effect to reduce length while increase", "= 100 - value self.bezier.dash_offset = value def _set_line_dash_offset(self, instance,", "def on_touch_up(self, touch): if self.collide_point(touch.pos[0], touch.pos[1]): if self.current_point: self.current_point =", "Slider(y=50, pos_hint={'x': .3}, size_hint=(.7, None), height=50) s.bind(value=self._set_line_dash_offset) self.add_widget(s) def _set_bezier_dash_offset(self,", "for i in range(45, 360, 45): i = radians(i) points.extend([x", "from kivy.graphics import Color, Bezier, Line class BezierTest(FloatLayout): def __init__(self,", "True return super(BezierTest, self).on_touch_up(touch) def on_touch_move(self, touch): if self.collide_point(touch.pos[0], touch.pos[1]):", "2] = touch.pos[0] - self.pos[0] self.points[(c - 1) * 2", "360, 45): i = radians(i) points.extend([x + cos(i) * l,", "# effect to reduce length while increase offset self.bezier.dash_length =", "offset self.bezier.dash_length = 100 - value self.bezier.dash_offset = value def", "self.points = points self.loop = loop self.current_point = None with", "offset self.line.dash_length = 100 - value self.line.dash_offset = value def", "+ 1] = touch.pos[1] - self.pos[1] self.bezier.points = self.points self.line.points", "None), height=50) s.bind(value=self._set_bezier_dash_offset) self.add_widget(s) s = Slider(y=50, pos_hint={'x': .3}, size_hint=(.7,", "_set_bezier_dash_offset(self, instance, value): # effect to reduce length while increase", "= 100 - value self.line.dash_offset = value def on_touch_down(self, touch):", "y] for i in range(45, 360, 45): i = radians(i)", "increase offset self.bezier.dash_length = 100 - value self.bezier.dash_offset = value", "touch.pos[0] - self.pos[0] self.points[(c - 1) * 2 + 1]", "value def on_touch_down(self, touch): if self.collide_point(touch.pos[0], touch.pos[1]): for i, p", "on_touch_down(self, touch): if self.collide_point(touch.pos[0], touch.pos[1]): for i, p in enumerate(list(zip(self.points[::2],", "class BezierTest(FloatLayout): def __init__(self, points=[], loop=False, *args, **kwargs): super(BezierTest, self).__init__(*args,", "= points self.loop = loop self.current_point = None with self.canvas:", "Main(App): def build(self): from math import cos, sin, radians x", "abs(touch.pos[0] - self.pos[0] - p[0]) < self.d and abs(touch.pos[1] -", "= self.points self.line.points = self.points + self.points[:2] return True return", "s.bind(value=self._set_bezier_dash_offset) self.add_widget(s) s = Slider(y=50, pos_hint={'x': .3}, size_hint=(.7, None), height=50)", "if self.collide_point(touch.pos[0], touch.pos[1]): for i, p in enumerate(list(zip(self.points[::2], self.points[1::2]))): if", "self.collide_point(touch.pos[0], touch.pos[1]): c = self.current_point if c: self.points[(c - 1)", "loop self.current_point = None with self.canvas: Color(1.0, 0.0, 0.0) self.bezier", "self.line.points = self.points + self.points[:2] return True return super(BezierTest, self).on_touch_move(touch)", "y + sin(i) * l]) return BezierTest(points=points, loop=True) if __name__", "on_touch_move(self, touch): if self.collide_point(touch.pos[0], touch.pos[1]): c = self.current_point if c:", "value self.bezier.dash_offset = value def _set_line_dash_offset(self, instance, value): # effect", "10 self.points = points self.loop = loop self.current_point = None", "pos_hint={'x': .3}, size_hint=(.7, None), height=50) s.bind(value=self._set_line_dash_offset) self.add_widget(s) def _set_bezier_dash_offset(self, instance,", "return True return super(BezierTest, self).on_touch_down(touch) def on_touch_up(self, touch): if self.collide_point(touch.pos[0],", "if self.collide_point(touch.pos[0], touch.pos[1]): if self.current_point: self.current_point = None return True", "s.bind(value=self._set_line_dash_offset) self.add_widget(s) def _set_bezier_dash_offset(self, instance, value): # effect to reduce", "**kwargs) self.d = 10 self.points = points self.loop = loop", "length while increase offset self.bezier.dash_length = 100 - value self.bezier.dash_offset", "increase offset self.line.dash_length = 100 - value self.line.dash_offset = value", "1] = touch.pos[1] - self.pos[1] self.bezier.points = self.points self.line.points =", "x = y = 150 l = 100 # Pacman", "Pacman ! points = [x, y] for i in range(45,", "abs(touch.pos[1] - self.pos[1] - p[1]) < self.d): self.current_point = i", "from kivy.app import App from kivy.uix.floatlayout import FloatLayout from kivy.uix.slider", "pos_hint={'x': .3}, size_hint=(.7, None), height=50) s.bind(value=self._set_bezier_dash_offset) self.add_widget(s) s = Slider(y=50,", "self.current_point: self.current_point = None return True return super(BezierTest, self).on_touch_up(touch) def", "= None return True return super(BezierTest, self).on_touch_up(touch) def on_touch_move(self, touch):", "+ cos(i) * l, y + sin(i) * l]) return", "= 10 self.points = points self.loop = loop self.current_point =", "self.points + self.points[:2] return True return super(BezierTest, self).on_touch_move(touch) class Main(App):", "- self.pos[1] self.bezier.points = self.points self.line.points = self.points + self.points[:2]", "100 # Pacman ! points = [x, y] for i", "+ self.points[:2] return True return super(BezierTest, self).on_touch_move(touch) class Main(App): def", "150 l = 100 # Pacman ! points = [x,", "def _set_bezier_dash_offset(self, instance, value): # effect to reduce length while", "dash_offset=10) Color(1.0, 0.0, 1.0) self.line = Line( points=self.points+self.points[:2], dash_offset=10, dash_length=100)", "App from kivy.uix.floatlayout import FloatLayout from kivy.uix.slider import Slider from", "1.0) self.line = Line( points=self.points+self.points[:2], dash_offset=10, dash_length=100) s = Slider(y=0,", "Slider(y=0, pos_hint={'x': .3}, size_hint=(.7, None), height=50) s.bind(value=self._set_bezier_dash_offset) self.add_widget(s) s =", "- p[0]) < self.d and abs(touch.pos[1] - self.pos[1] - p[1])", "[x, y] for i in range(45, 360, 45): i =", "in range(45, 360, 45): i = radians(i) points.extend([x + cos(i)", "self.pos[1] self.bezier.points = self.points self.line.points = self.points + self.points[:2] return", "return super(BezierTest, self).on_touch_move(touch) class Main(App): def build(self): from math import", "self.current_point if c: self.points[(c - 1) * 2] = touch.pos[0]", "= Line( points=self.points+self.points[:2], dash_offset=10, dash_length=100) s = Slider(y=0, pos_hint={'x': .3},", "touch): if self.collide_point(touch.pos[0], touch.pos[1]): for i, p in enumerate(list(zip(self.points[::2], self.points[1::2]))):", "+ 1 return True return super(BezierTest, self).on_touch_down(touch) def on_touch_up(self, touch):", "# Pacman ! points = [x, y] for i in", "+ sin(i) * l]) return BezierTest(points=points, loop=True) if __name__ ==", "import App from kivy.uix.floatlayout import FloatLayout from kivy.uix.slider import Slider", "c: self.points[(c - 1) * 2] = touch.pos[0] - self.pos[0]", "self.canvas: Color(1.0, 0.0, 0.0) self.bezier = Bezier( points=self.points, segments=150, loop=self.loop,", "Line class BezierTest(FloatLayout): def __init__(self, points=[], loop=False, *args, **kwargs): super(BezierTest,", "kivy.app import App from kivy.uix.floatlayout import FloatLayout from kivy.uix.slider import", "touch.pos[1]): c = self.current_point if c: self.points[(c - 1) *", "def _set_line_dash_offset(self, instance, value): # effect to reduce length while", "0.0, 0.0) self.bezier = Bezier( points=self.points, segments=150, loop=self.loop, dash_length=100, dash_offset=10)", "value): # effect to reduce length while increase offset self.bezier.dash_length", "reduce length while increase offset self.line.dash_length = 100 - value", "= [x, y] for i in range(45, 360, 45): i", "kivy.uix.floatlayout import FloatLayout from kivy.uix.slider import Slider from kivy.graphics import", "value): # effect to reduce length while increase offset self.line.dash_length", "size_hint=(.7, None), height=50) s.bind(value=self._set_bezier_dash_offset) self.add_widget(s) s = Slider(y=50, pos_hint={'x': .3},", "touch.pos[1] - self.pos[1] self.bezier.points = self.points self.line.points = self.points +", "self.points[1::2]))): if ( abs(touch.pos[0] - self.pos[0] - p[0]) < self.d", "= self.current_point if c: self.points[(c - 1) * 2] =", "self.current_point = None return True return super(BezierTest, self).on_touch_up(touch) def on_touch_move(self,", "loop=False, *args, **kwargs): super(BezierTest, self).__init__(*args, **kwargs) self.d = 10 self.points", "self.bezier.points = self.points self.line.points = self.points + self.points[:2] return True", "self.line.dash_offset = value def on_touch_down(self, touch): if self.collide_point(touch.pos[0], touch.pos[1]): for", "1) * 2] = touch.pos[0] - self.pos[0] self.points[(c - 1)", "reduce length while increase offset self.bezier.dash_length = 100 - value", "from math import cos, sin, radians x = y =", "build(self): from math import cos, sin, radians x = y", "to reduce length while increase offset self.bezier.dash_length = 100 -", "= touch.pos[1] - self.pos[1] self.bezier.points = self.points self.line.points = self.points", "i + 1 return True return super(BezierTest, self).on_touch_down(touch) def on_touch_up(self,", "self.bezier = Bezier( points=self.points, segments=150, loop=self.loop, dash_length=100, dash_offset=10) Color(1.0, 0.0,", "self.add_widget(s) s = Slider(y=50, pos_hint={'x': .3}, size_hint=(.7, None), height=50) s.bind(value=self._set_line_dash_offset)", "self).on_touch_up(touch) def on_touch_move(self, touch): if self.collide_point(touch.pos[0], touch.pos[1]): c = self.current_point", "c = self.current_point if c: self.points[(c - 1) * 2]", "self.current_point = None with self.canvas: Color(1.0, 0.0, 0.0) self.bezier =", "self.add_widget(s) def _set_bezier_dash_offset(self, instance, value): # effect to reduce length", "value self.line.dash_offset = value def on_touch_down(self, touch): if self.collide_point(touch.pos[0], touch.pos[1]):", "self.points[:2] return True return super(BezierTest, self).on_touch_move(touch) class Main(App): def build(self):", "s = Slider(y=0, pos_hint={'x': .3}, size_hint=(.7, None), height=50) s.bind(value=self._set_bezier_dash_offset) self.add_widget(s)", "return True return super(BezierTest, self).on_touch_up(touch) def on_touch_move(self, touch): if self.collide_point(touch.pos[0],", "dash_offset=10, dash_length=100) s = Slider(y=0, pos_hint={'x': .3}, size_hint=(.7, None), height=50)", "**kwargs): super(BezierTest, self).__init__(*args, **kwargs) self.d = 10 self.points = points", "None return True return super(BezierTest, self).on_touch_up(touch) def on_touch_move(self, touch): if", "sin, radians x = y = 150 l = 100", "to reduce length while increase offset self.line.dash_length = 100 -", "Line( points=self.points+self.points[:2], dash_offset=10, dash_length=100) s = Slider(y=0, pos_hint={'x': .3}, size_hint=(.7,", "= touch.pos[0] - self.pos[0] self.points[(c - 1) * 2 +", "cos(i) * l, y + sin(i) * l]) return BezierTest(points=points,", "i, p in enumerate(list(zip(self.points[::2], self.points[1::2]))): if ( abs(touch.pos[0] - self.pos[0]", "#!/usr/bin/env python from kivy.app import App from kivy.uix.floatlayout import FloatLayout", "Bezier, Line class BezierTest(FloatLayout): def __init__(self, points=[], loop=False, *args, **kwargs):", "super(BezierTest, self).__init__(*args, **kwargs) self.d = 10 self.points = points self.loop", "with self.canvas: Color(1.0, 0.0, 0.0) self.bezier = Bezier( points=self.points, segments=150,", "0.0) self.bezier = Bezier( points=self.points, segments=150, loop=self.loop, dash_length=100, dash_offset=10) Color(1.0,", "* 2 + 1] = touch.pos[1] - self.pos[1] self.bezier.points =", "# effect to reduce length while increase offset self.line.dash_length =", "enumerate(list(zip(self.points[::2], self.points[1::2]))): if ( abs(touch.pos[0] - self.pos[0] - p[0]) <", "Color(1.0, 0.0, 1.0) self.line = Line( points=self.points+self.points[:2], dash_offset=10, dash_length=100) s", "points.extend([x + cos(i) * l, y + sin(i) * l])", "- self.pos[0] self.points[(c - 1) * 2 + 1] =", "= radians(i) points.extend([x + cos(i) * l, y + sin(i)", "* l, y + sin(i) * l]) return BezierTest(points=points, loop=True)", "self.d): self.current_point = i + 1 return True return super(BezierTest,", "1 return True return super(BezierTest, self).on_touch_down(touch) def on_touch_up(self, touch): if", "import Color, Bezier, Line class BezierTest(FloatLayout): def __init__(self, points=[], loop=False,", ".3}, size_hint=(.7, None), height=50) s.bind(value=self._set_bezier_dash_offset) self.add_widget(s) s = Slider(y=50, pos_hint={'x':", "self.points self.line.points = self.points + self.points[:2] return True return super(BezierTest,", "effect to reduce length while increase offset self.line.dash_length = 100", "points=self.points, segments=150, loop=self.loop, dash_length=100, dash_offset=10) Color(1.0, 0.0, 1.0) self.line =", "= value def _set_line_dash_offset(self, instance, value): # effect to reduce", "self.bezier.dash_length = 100 - value self.bezier.dash_offset = value def _set_line_dash_offset(self,", "return super(BezierTest, self).on_touch_up(touch) def on_touch_move(self, touch): if self.collide_point(touch.pos[0], touch.pos[1]): c", "self.collide_point(touch.pos[0], touch.pos[1]): for i, p in enumerate(list(zip(self.points[::2], self.points[1::2]))): if (", "radians x = y = 150 l = 100 #", "super(BezierTest, self).on_touch_move(touch) class Main(App): def build(self): from math import cos,", "2 + 1] = touch.pos[1] - self.pos[1] self.bezier.points = self.points", "height=50) s.bind(value=self._set_bezier_dash_offset) self.add_widget(s) s = Slider(y=50, pos_hint={'x': .3}, size_hint=(.7, None),", "loop=self.loop, dash_length=100, dash_offset=10) Color(1.0, 0.0, 1.0) self.line = Line( points=self.points+self.points[:2],", "instance, value): # effect to reduce length while increase offset", "self.points[(c - 1) * 2] = touch.pos[0] - self.pos[0] self.points[(c", "= Bezier( points=self.points, segments=150, loop=self.loop, dash_length=100, dash_offset=10) Color(1.0, 0.0, 1.0)", ".3}, size_hint=(.7, None), height=50) s.bind(value=self._set_line_dash_offset) self.add_widget(s) def _set_bezier_dash_offset(self, instance, value):", "touch): if self.collide_point(touch.pos[0], touch.pos[1]): c = self.current_point if c: self.points[(c", "return True return super(BezierTest, self).on_touch_move(touch) class Main(App): def build(self): from", "self.pos[1] - p[1]) < self.d): self.current_point = i + 1", "= loop self.current_point = None with self.canvas: Color(1.0, 0.0, 0.0)", "kivy.graphics import Color, Bezier, Line class BezierTest(FloatLayout): def __init__(self, points=[],", "and abs(touch.pos[1] - self.pos[1] - p[1]) < self.d): self.current_point =", "- p[1]) < self.d): self.current_point = i + 1 return", "import Slider from kivy.graphics import Color, Bezier, Line class BezierTest(FloatLayout):", "radians(i) points.extend([x + cos(i) * l, y + sin(i) *", "p in enumerate(list(zip(self.points[::2], self.points[1::2]))): if ( abs(touch.pos[0] - self.pos[0] -", "y = 150 l = 100 # Pacman ! points", "l = 100 # Pacman ! points = [x, y]", "self).on_touch_move(touch) class Main(App): def build(self): from math import cos, sin,", "on_touch_up(self, touch): if self.collide_point(touch.pos[0], touch.pos[1]): if self.current_point: self.current_point = None", "i in range(45, 360, 45): i = radians(i) points.extend([x +", "if c: self.points[(c - 1) * 2] = touch.pos[0] -", "points=[], loop=False, *args, **kwargs): super(BezierTest, self).__init__(*args, **kwargs) self.d = 10", "for i, p in enumerate(list(zip(self.points[::2], self.points[1::2]))): if ( abs(touch.pos[0] -", "= Slider(y=50, pos_hint={'x': .3}, size_hint=(.7, None), height=50) s.bind(value=self._set_line_dash_offset) self.add_widget(s) def", "length while increase offset self.line.dash_length = 100 - value self.line.dash_offset", "self.line.dash_length = 100 - value self.line.dash_offset = value def on_touch_down(self,", "* 2] = touch.pos[0] - self.pos[0] self.points[(c - 1) *", "while increase offset self.line.dash_length = 100 - value self.line.dash_offset =", "return super(BezierTest, self).on_touch_down(touch) def on_touch_up(self, touch): if self.collide_point(touch.pos[0], touch.pos[1]): if", "< self.d and abs(touch.pos[1] - self.pos[1] - p[1]) < self.d):", "super(BezierTest, self).on_touch_up(touch) def on_touch_move(self, touch): if self.collide_point(touch.pos[0], touch.pos[1]): c =", "from kivy.uix.slider import Slider from kivy.graphics import Color, Bezier, Line", "import FloatLayout from kivy.uix.slider import Slider from kivy.graphics import Color,", "python from kivy.app import App from kivy.uix.floatlayout import FloatLayout from", "= self.points + self.points[:2] return True return super(BezierTest, self).on_touch_move(touch) class", "45): i = radians(i) points.extend([x + cos(i) * l, y", "height=50) s.bind(value=self._set_line_dash_offset) self.add_widget(s) def _set_bezier_dash_offset(self, instance, value): # effect to", "True return super(BezierTest, self).on_touch_move(touch) class Main(App): def build(self): from math", "self.pos[0] self.points[(c - 1) * 2 + 1] = touch.pos[1]", "effect to reduce length while increase offset self.bezier.dash_length = 100", "True return super(BezierTest, self).on_touch_down(touch) def on_touch_up(self, touch): if self.collide_point(touch.pos[0], touch.pos[1]):", "self.points[(c - 1) * 2 + 1] = touch.pos[1] -", "Color(1.0, 0.0, 0.0) self.bezier = Bezier( points=self.points, segments=150, loop=self.loop, dash_length=100,", "*args, **kwargs): super(BezierTest, self).__init__(*args, **kwargs) self.d = 10 self.points =", "= y = 150 l = 100 # Pacman !", "from kivy.uix.floatlayout import FloatLayout from kivy.uix.slider import Slider from kivy.graphics", "size_hint=(.7, None), height=50) s.bind(value=self._set_line_dash_offset) self.add_widget(s) def _set_bezier_dash_offset(self, instance, value): #", "= value def on_touch_down(self, touch): if self.collide_point(touch.pos[0], touch.pos[1]): for i,", "super(BezierTest, self).on_touch_down(touch) def on_touch_up(self, touch): if self.collide_point(touch.pos[0], touch.pos[1]): if self.current_point:", "if self.collide_point(touch.pos[0], touch.pos[1]): c = self.current_point if c: self.points[(c -", "points=self.points+self.points[:2], dash_offset=10, dash_length=100) s = Slider(y=0, pos_hint={'x': .3}, size_hint=(.7, None),", "FloatLayout from kivy.uix.slider import Slider from kivy.graphics import Color, Bezier,", "dash_length=100) s = Slider(y=0, pos_hint={'x': .3}, size_hint=(.7, None), height=50) s.bind(value=self._set_bezier_dash_offset)", "kivy.uix.slider import Slider from kivy.graphics import Color, Bezier, Line class", "in enumerate(list(zip(self.points[::2], self.points[1::2]))): if ( abs(touch.pos[0] - self.pos[0] - p[0])", "self.current_point = i + 1 return True return super(BezierTest, self).on_touch_down(touch)", "segments=150, loop=self.loop, dash_length=100, dash_offset=10) Color(1.0, 0.0, 1.0) self.line = Line(", "- 1) * 2] = touch.pos[0] - self.pos[0] self.points[(c -", "100 - value self.bezier.dash_offset = value def _set_line_dash_offset(self, instance, value):", "def on_touch_down(self, touch): if self.collide_point(touch.pos[0], touch.pos[1]): for i, p in", "__init__(self, points=[], loop=False, *args, **kwargs): super(BezierTest, self).__init__(*args, **kwargs) self.d =", "math import cos, sin, radians x = y = 150", "None with self.canvas: Color(1.0, 0.0, 0.0) self.bezier = Bezier( points=self.points,", "def build(self): from math import cos, sin, radians x =", "= i + 1 return True return super(BezierTest, self).on_touch_down(touch) def", "range(45, 360, 45): i = radians(i) points.extend([x + cos(i) *", "BezierTest(FloatLayout): def __init__(self, points=[], loop=False, *args, **kwargs): super(BezierTest, self).__init__(*args, **kwargs)", "self.d = 10 self.points = points self.loop = loop self.current_point", "= Slider(y=0, pos_hint={'x': .3}, size_hint=(.7, None), height=50) s.bind(value=self._set_bezier_dash_offset) self.add_widget(s) s", "if ( abs(touch.pos[0] - self.pos[0] - p[0]) < self.d and", "Color, Bezier, Line class BezierTest(FloatLayout): def __init__(self, points=[], loop=False, *args,", "self.d and abs(touch.pos[1] - self.pos[1] - p[1]) < self.d): self.current_point" ]
[ "('junior') must not have ability to add comment to rdb$admin", "by tmp$c6489_junior'; commit; connect '$(DSN)' user tmp$c6489_senior password '<PASSWORD>'; comment", "user tmp$c6489_junior password '<PASSWORD>' using plugin Srp; create or alter", "('(-)?Effective user is.*', '')] init_script_1 = \"\"\"\"\"\" db_1 = db_factory(sql_dialect=3,", "password '<PASSWORD>' using plugin Srp; commit; grant alter any role", "= \"\"\" Comment by tmp$c6489_senior \"\"\" expected_stderr_1 = \"\"\" Statement", "password '<PASSWORD>' using plugin Srp; create or alter user tmp$c6489_senior", "role rdb$admin is null; commit; connect '$(DSN)' user 'SYSDBA' password", "password '<PASSWORD>'; drop user tmp$c6489_junior using plugin Srp; drop user", "= \"\"\"\"\"\" db_1 = db_factory(sql_dialect=3, init=init_script_1) test_script_1 = \"\"\" create", "on; select r.rdb$description as role_descr_blob_id from rdb$roles r where r.rdb$role_name", "expected_stderr_1 act_1.execute() assert act_1.clean_expected_stderr == act_1.clean_stderr assert act_1.clean_expected_stdout == act_1.clean_stdout", "rdb$admin is null; commit; connect '$(DSN)' user 'SYSDBA' password '<PASSWORD>';", "3.0.8.33425 # Checked on: 4.0.0.2387, 3.0.8.33426 -- all OK. #", "3.0.8 # qmid: None import pytest from firebird.qa import db_factory,", "Comment by tmp$c6489_senior \"\"\" expected_stderr_1 = \"\"\" Statement failed, SQLSTATE", "drop user tmp$c6489_junior using plugin Srp; drop user tmp$c6489_senior using", "commit; grant alter any role to user tmp$c6489_senior; commit; connect", "expected_stdout_1 act_1.expected_stderr = expected_stderr_1 act_1.execute() assert act_1.clean_expected_stderr == act_1.clean_stderr assert", "or alter user tmp$c6489_senior password '<PASSWORD>' using plugin Srp; commit;", "comment on role rdb$admin is 'Comment by tmp$c6489_junior'; commit; connect", "'Comment by tmp$c6489_senior'; commit; set list on; select r.rdb$description as", "of them has no any rights, second is granted with", "init=init_script_1) test_script_1 = \"\"\" create or alter user tmp$c6489_junior password", "using plugin Srp; drop user tmp$c6489_senior using plugin Srp; commit;", "'$(DSN)' user 'SYSDBA' password '<PASSWORD>'; drop user tmp$c6489_junior using plugin", "ALTER access to ROLE RDB$ADMIN -Effective user is TMP$C6489_JUNIOR \"\"\"", "substitutions_1 = [('ROLE_DESCR_BLOB_ID .*', ''), ('[\\t ]+', ' '), ('(-)?Effective", "db_factory(sql_dialect=3, init=init_script_1) test_script_1 = \"\"\" create or alter user tmp$c6489_junior", "as role_descr_blob_id from rdb$roles r where r.rdb$role_name = upper('rdb$admin'); commit;", "no any rights, second is granted with 'alter any role'", "any role' privilege. # First user ('junior') must not have", "privilege. # First user ('junior') must not have ability to", "Confirmed bug on 4.0.0.2384, 3.0.8.33425 # Checked on: 4.0.0.2387, 3.0.8.33426", "password '<PASSWORD>'; comment on role rdb$admin is 'Comment by tmp$c6489_senior';", "failed -no permission for ALTER access to ROLE RDB$ADMIN -Effective", "bug on 4.0.0.2384, 3.0.8.33425 # Checked on: 4.0.0.2387, 3.0.8.33426 --", "TMP$C6489_JUNIOR \"\"\" @pytest.mark.version('>=3.0.8') def test_1(act_1: Action): act_1.expected_stdout = expected_stdout_1 act_1.expected_stderr", "act_1.expected_stdout = expected_stdout_1 act_1.expected_stderr = expected_stderr_1 act_1.execute() assert act_1.clean_expected_stderr ==", "set comment to any string and make it null. #", "where r.rdb$role_name = upper('rdb$admin'); commit; comment on role rdb$admin is", "commit; set list on; select r.rdb$description as role_descr_blob_id from rdb$roles", "is.*', '')] init_script_1 = \"\"\"\"\"\" db_1 = db_factory(sql_dialect=3, init=init_script_1) test_script_1", "# # Confirmed bug on 4.0.0.2384, 3.0.8.33425 # Checked on:", "# tracker_id: CORE-6489 # min_versions: ['3.0.8'] # versions: 3.0.8 #", "permission for ALTER access to ROLE RDB$ADMIN -Effective user is", "# title: User without ALTER ANY ROLE privilege can use", "to set comment to any string and make it null.", "using plugin Srp; commit; grant alter any role to user", "on role rdb$admin is null; commit; connect '$(DSN)' user 'SYSDBA'", "comment to rdb$admin role, but second ('senior') must # be", "add comment to rdb$admin role, but second ('senior') must #", "presents only in FB 4.x and is suppressed here. #", "4.0.0.2384, 3.0.8.33425 # Checked on: 4.0.0.2387, 3.0.8.33426 -- all OK.", "ability to add comment to rdb$admin role, but second ('senior')", "('[\\t ]+', ' '), ('(-)?Effective user is.*', '')] init_script_1 =", "commit; connect '$(DSN)' user tmp$c6489_senior password '<PASSWORD>'; comment on role", "from rdb$roles r where r.rdb$role_name = upper('rdb$admin'); commit; comment on", "alter any role to user tmp$c6489_senior; commit; connect '$(DSN)' user", "by tmp$c6489_senior \"\"\" expected_stderr_1 = \"\"\" Statement failed, SQLSTATE =", "failed, SQLSTATE = 28000 unsuccessful metadata update -COMMENT ON RDB$ADMIN", "firebird.qa import db_factory, isql_act, Action # version: 3.0.8 # resources:", "('senior') must # be able to set comment to any", "test_script_1, substitutions=substitutions_1) expected_stdout_1 = \"\"\" Comment by tmp$c6489_senior \"\"\" expected_stderr_1", "First user ('junior') must not have ability to add comment", "is TMP$C6489_JUNIOR \"\"\" @pytest.mark.version('>=3.0.8') def test_1(act_1: Action): act_1.expected_stdout = expected_stdout_1", "bugs.core_6489 # title: User without ALTER ANY ROLE privilege can", "by tmp$c6489_senior'; commit; set list on; select r.rdb$description as role_descr_blob_id", "role, but second ('senior') must # be able to set", "comment to any string and make it null. # #", "any role to user tmp$c6489_senior; commit; connect '$(DSN)' user tmp$c6489_junior", "is 'Comment by tmp$c6489_junior'; commit; connect '$(DSN)' user tmp$c6489_senior password", "init_script_1 = \"\"\"\"\"\" db_1 = db_factory(sql_dialect=3, init=init_script_1) test_script_1 = \"\"\"", "# Confirmed bug on 4.0.0.2384, 3.0.8.33425 # Checked on: 4.0.0.2387,", "= expected_stderr_1 act_1.execute() assert act_1.clean_expected_stderr == act_1.clean_stderr assert act_1.clean_expected_stdout ==", "with 'alter any role' privilege. # First user ('junior') must", "''), ('[\\t ]+', ' '), ('(-)?Effective user is.*', '')] init_script_1", "role' privilege. # First user ('junior') must not have ability", "ON RDB$ADMIN failed -no permission for ALTER access to ROLE", "'<PASSWORD>' using plugin Srp; commit; grant alter any role to", "# phrase '-Effective user is ...' presents only in FB", "commit; \"\"\" act_1 = isql_act('db_1', test_script_1, substitutions=substitutions_1) expected_stdout_1 = \"\"\"", "create or alter user tmp$c6489_junior password '<PASSWORD>' using plugin Srp;", ".*', ''), ('[\\t ]+', ' '), ('(-)?Effective user is.*', '')]", "plugin Srp; commit; \"\"\" act_1 = isql_act('db_1', test_script_1, substitutions=substitutions_1) expected_stdout_1", "-no permission for ALTER access to ROLE RDB$ADMIN -Effective user", "two users: one of them has no any rights, second", "to any string and make it null. # # Confirmed", "ROLE # decription: # Test creates two users: one of", "tracker_id: CORE-6489 # min_versions: ['3.0.8'] # versions: 3.0.8 # qmid:", "tmp$c6489_junior password '<PASSWORD>'; comment on role rdb$admin is 'Comment by", "isql_act('db_1', test_script_1, substitutions=substitutions_1) expected_stdout_1 = \"\"\" Comment by tmp$c6489_senior \"\"\"", "metadata update -COMMENT ON RDB$ADMIN failed -no permission for ALTER", "creates two users: one of them has no any rights,", "# Checked on: 4.0.0.2387, 3.0.8.33426 -- all OK. # #", "# # tracker_id: CORE-6489 # min_versions: ['3.0.8'] # versions: 3.0.8", "User without ALTER ANY ROLE privilege can use COMMENT ON", "null; commit; connect '$(DSN)' user 'SYSDBA' password '<PASSWORD>'; drop user", "# qmid: None import pytest from firebird.qa import db_factory, isql_act,", "is ...' presents only in FB 4.x and is suppressed", "= \"\"\" create or alter user tmp$c6489_junior password '<PASSWORD>' using", "on role rdb$admin is 'Comment by tmp$c6489_senior'; commit; set list", "\"\"\" expected_stderr_1 = \"\"\" Statement failed, SQLSTATE = 28000 unsuccessful", "Test creates two users: one of them has no any", "Action # version: 3.0.8 # resources: None substitutions_1 = [('ROLE_DESCR_BLOB_ID", "access to ROLE RDB$ADMIN -Effective user is TMP$C6489_JUNIOR \"\"\" @pytest.mark.version('>=3.0.8')", "second is granted with 'alter any role' privilege. # First", "Srp; create or alter user tmp$c6489_senior password '<PASSWORD>' using plugin", "['3.0.8'] # versions: 3.0.8 # qmid: None import pytest from", "user tmp$c6489_senior; commit; connect '$(DSN)' user tmp$c6489_junior password '<PASSWORD>'; comment", "in FB 4.x and is suppressed here. # # tracker_id:", "string and make it null. # # Confirmed bug on", "None substitutions_1 = [('ROLE_DESCR_BLOB_ID .*', ''), ('[\\t ]+', ' '),", "must # be able to set comment to any string", "r.rdb$description as role_descr_blob_id from rdb$roles r where r.rdb$role_name = upper('rdb$admin');", "plugin Srp; commit; grant alter any role to user tmp$c6489_senior;", "db_1 = db_factory(sql_dialect=3, init=init_script_1) test_script_1 = \"\"\" create or alter", "title: User without ALTER ANY ROLE privilege can use COMMENT", "is granted with 'alter any role' privilege. # First user", "version: 3.0.8 # resources: None substitutions_1 = [('ROLE_DESCR_BLOB_ID .*', ''),", "grant alter any role to user tmp$c6489_senior; commit; connect '$(DSN)'", "r.rdb$role_name = upper('rdb$admin'); commit; comment on role rdb$admin is null;", "all OK. # # NOTE: # phrase '-Effective user is", "is 'Comment by tmp$c6489_senior'; commit; set list on; select r.rdb$description", "#coding:utf-8 # # id: bugs.core_6489 # title: User without ALTER", "but second ('senior') must # be able to set comment", "= [('ROLE_DESCR_BLOB_ID .*', ''), ('[\\t ]+', ' '), ('(-)?Effective user", "'SYSDBA' password '<PASSWORD>'; drop user tmp$c6489_junior using plugin Srp; drop", "rdb$roles r where r.rdb$role_name = upper('rdb$admin'); commit; comment on role", "# decription: # Test creates two users: one of them", "= upper('rdb$admin'); commit; comment on role rdb$admin is null; commit;", "make it null. # # Confirmed bug on 4.0.0.2384, 3.0.8.33425", "# NOTE: # phrase '-Effective user is ...' presents only", "second ('senior') must # be able to set comment to", "'-Effective user is ...' presents only in FB 4.x and", "= 28000 unsuccessful metadata update -COMMENT ON RDB$ADMIN failed -no", "...' presents only in FB 4.x and is suppressed here.", "create or alter user tmp$c6489_senior password '<PASSWORD>' using plugin Srp;", "NOTE: # phrase '-Effective user is ...' presents only in", "28000 unsuccessful metadata update -COMMENT ON RDB$ADMIN failed -no permission", "pytest from firebird.qa import db_factory, isql_act, Action # version: 3.0.8", "'<PASSWORD>'; comment on role rdb$admin is 'Comment by tmp$c6489_junior'; commit;", "upper('rdb$admin'); commit; comment on role rdb$admin is null; commit; connect", "= \"\"\" Statement failed, SQLSTATE = 28000 unsuccessful metadata update", "without ALTER ANY ROLE privilege can use COMMENT ON ROLE", "can use COMMENT ON ROLE # decription: # Test creates", "tmp$c6489_senior password '<PASSWORD>'; comment on role rdb$admin is 'Comment by", "# version: 3.0.8 # resources: None substitutions_1 = [('ROLE_DESCR_BLOB_ID .*',", "' '), ('(-)?Effective user is.*', '')] init_script_1 = \"\"\"\"\"\" db_1", "them has no any rights, second is granted with 'alter", "Srp; commit; grant alter any role to user tmp$c6489_senior; commit;", "rdb$admin is 'Comment by tmp$c6489_junior'; commit; connect '$(DSN)' user tmp$c6489_senior", "= isql_act('db_1', test_script_1, substitutions=substitutions_1) expected_stdout_1 = \"\"\" Comment by tmp$c6489_senior", "have ability to add comment to rdb$admin role, but second", "plugin Srp; create or alter user tmp$c6489_senior password '<PASSWORD>' using", "role_descr_blob_id from rdb$roles r where r.rdb$role_name = upper('rdb$admin'); commit; comment", "update -COMMENT ON RDB$ADMIN failed -no permission for ALTER access", "tmp$c6489_senior using plugin Srp; commit; \"\"\" act_1 = isql_act('db_1', test_script_1,", "SQLSTATE = 28000 unsuccessful metadata update -COMMENT ON RDB$ADMIN failed", "list on; select r.rdb$description as role_descr_blob_id from rdb$roles r where", "'Comment by tmp$c6489_junior'; commit; connect '$(DSN)' user tmp$c6489_senior password '<PASSWORD>';", "suppressed here. # # tracker_id: CORE-6489 # min_versions: ['3.0.8'] #", "able to set comment to any string and make it", "connect '$(DSN)' user tmp$c6489_senior password '<PASSWORD>'; comment on role rdb$admin", "# Test creates two users: one of them has no", "ROLE privilege can use COMMENT ON ROLE # decription: #", "\"\"\" Statement failed, SQLSTATE = 28000 unsuccessful metadata update -COMMENT", "any string and make it null. # # Confirmed bug", "\"\"\" create or alter user tmp$c6489_junior password '<PASSWORD>' using plugin", "password '<PASSWORD>'; comment on role rdb$admin is 'Comment by tmp$c6489_junior';", "act_1 = isql_act('db_1', test_script_1, substitutions=substitutions_1) expected_stdout_1 = \"\"\" Comment by", "is null; commit; connect '$(DSN)' user 'SYSDBA' password '<PASSWORD>'; drop", "rdb$admin is 'Comment by tmp$c6489_senior'; commit; set list on; select", "tmp$c6489_junior using plugin Srp; drop user tmp$c6489_senior using plugin Srp;", "]+', ' '), ('(-)?Effective user is.*', '')] init_script_1 = \"\"\"\"\"\"", "\"\"\" Comment by tmp$c6489_senior \"\"\" expected_stderr_1 = \"\"\" Statement failed,", "-COMMENT ON RDB$ADMIN failed -no permission for ALTER access to", "ON ROLE # decription: # Test creates two users: one", "connect '$(DSN)' user 'SYSDBA' password '<PASSWORD>'; drop user tmp$c6489_junior using", "on: 4.0.0.2387, 3.0.8.33426 -- all OK. # # NOTE: #", "user tmp$c6489_senior using plugin Srp; commit; \"\"\" act_1 = isql_act('db_1',", "3.0.8 # resources: None substitutions_1 = [('ROLE_DESCR_BLOB_ID .*', ''), ('[\\t", "ROLE RDB$ADMIN -Effective user is TMP$C6489_JUNIOR \"\"\" @pytest.mark.version('>=3.0.8') def test_1(act_1:", "\"\"\" @pytest.mark.version('>=3.0.8') def test_1(act_1: Action): act_1.expected_stdout = expected_stdout_1 act_1.expected_stderr =", "Srp; drop user tmp$c6489_senior using plugin Srp; commit; \"\"\" act_1", "for ALTER access to ROLE RDB$ADMIN -Effective user is TMP$C6489_JUNIOR", "'$(DSN)' user tmp$c6489_junior password '<PASSWORD>'; comment on role rdb$admin is", "# resources: None substitutions_1 = [('ROLE_DESCR_BLOB_ID .*', ''), ('[\\t ]+',", "resources: None substitutions_1 = [('ROLE_DESCR_BLOB_ID .*', ''), ('[\\t ]+', '", "3.0.8.33426 -- all OK. # # NOTE: # phrase '-Effective", "# # id: bugs.core_6489 # title: User without ALTER ANY", "@pytest.mark.version('>=3.0.8') def test_1(act_1: Action): act_1.expected_stdout = expected_stdout_1 act_1.expected_stderr = expected_stderr_1", "tmp$c6489_junior'; commit; connect '$(DSN)' user tmp$c6489_senior password '<PASSWORD>'; comment on", "OK. # # NOTE: # phrase '-Effective user is ...'", "or alter user tmp$c6489_junior password '<PASSWORD>' using plugin Srp; create", "to ROLE RDB$ADMIN -Effective user is TMP$C6489_JUNIOR \"\"\" @pytest.mark.version('>=3.0.8') def", "Srp; commit; \"\"\" act_1 = isql_act('db_1', test_script_1, substitutions=substitutions_1) expected_stdout_1 =", "'alter any role' privilege. # First user ('junior') must not", "user tmp$c6489_senior password '<PASSWORD>'; comment on role rdb$admin is 'Comment", "id: bugs.core_6489 # title: User without ALTER ANY ROLE privilege", "ALTER ANY ROLE privilege can use COMMENT ON ROLE #", "using plugin Srp; create or alter user tmp$c6489_senior password '<PASSWORD>'", "comment on role rdb$admin is null; commit; connect '$(DSN)' user", "from firebird.qa import db_factory, isql_act, Action # version: 3.0.8 #", "user is ...' presents only in FB 4.x and is", "# id: bugs.core_6489 # title: User without ALTER ANY ROLE", "Statement failed, SQLSTATE = 28000 unsuccessful metadata update -COMMENT ON", "any rights, second is granted with 'alter any role' privilege.", "alter user tmp$c6489_junior password '<PASSWORD>' using plugin Srp; create or", "comment on role rdb$admin is 'Comment by tmp$c6489_senior'; commit; set", "substitutions=substitutions_1) expected_stdout_1 = \"\"\" Comment by tmp$c6489_senior \"\"\" expected_stderr_1 =", "to add comment to rdb$admin role, but second ('senior') must", "Action): act_1.expected_stdout = expected_stdout_1 act_1.expected_stderr = expected_stderr_1 act_1.execute() assert act_1.clean_expected_stderr", "role to user tmp$c6489_senior; commit; connect '$(DSN)' user tmp$c6489_junior password", "one of them has no any rights, second is granted", "# First user ('junior') must not have ability to add", "# # NOTE: # phrase '-Effective user is ...' presents", "4.0.0.2387, 3.0.8.33426 -- all OK. # # NOTE: # phrase", "4.x and is suppressed here. # # tracker_id: CORE-6489 #", "versions: 3.0.8 # qmid: None import pytest from firebird.qa import", "user tmp$c6489_junior password '<PASSWORD>'; comment on role rdb$admin is 'Comment", "RDB$ADMIN failed -no permission for ALTER access to ROLE RDB$ADMIN", "user 'SYSDBA' password '<PASSWORD>'; drop user tmp$c6489_junior using plugin Srp;", "rdb$admin role, but second ('senior') must # be able to", "# min_versions: ['3.0.8'] # versions: 3.0.8 # qmid: None import", "'$(DSN)' user tmp$c6489_senior password '<PASSWORD>'; comment on role rdb$admin is", "'<PASSWORD>'; drop user tmp$c6489_junior using plugin Srp; drop user tmp$c6489_senior", "here. # # tracker_id: CORE-6489 # min_versions: ['3.0.8'] # versions:", "expected_stdout_1 = \"\"\" Comment by tmp$c6489_senior \"\"\" expected_stderr_1 = \"\"\"", "rights, second is granted with 'alter any role' privilege. #", "plugin Srp; drop user tmp$c6489_senior using plugin Srp; commit; \"\"\"", "null. # # Confirmed bug on 4.0.0.2384, 3.0.8.33425 # Checked", "FB 4.x and is suppressed here. # # tracker_id: CORE-6489", "'')] init_script_1 = \"\"\"\"\"\" db_1 = db_factory(sql_dialect=3, init=init_script_1) test_script_1 =", "set list on; select r.rdb$description as role_descr_blob_id from rdb$roles r", "min_versions: ['3.0.8'] # versions: 3.0.8 # qmid: None import pytest", "unsuccessful metadata update -COMMENT ON RDB$ADMIN failed -no permission for", "isql_act, Action # version: 3.0.8 # resources: None substitutions_1 =", "expected_stderr_1 = \"\"\" Statement failed, SQLSTATE = 28000 unsuccessful metadata", "tmp$c6489_senior password '<PASSWORD>' using plugin Srp; commit; grant alter any", "db_factory, isql_act, Action # version: 3.0.8 # resources: None substitutions_1", "ANY ROLE privilege can use COMMENT ON ROLE # decription:", "it null. # # Confirmed bug on 4.0.0.2384, 3.0.8.33425 #", "user tmp$c6489_senior password '<PASSWORD>' using plugin Srp; commit; grant alter", "and is suppressed here. # # tracker_id: CORE-6489 # min_versions:", "tmp$c6489_senior \"\"\" expected_stderr_1 = \"\"\" Statement failed, SQLSTATE = 28000", "[('ROLE_DESCR_BLOB_ID .*', ''), ('[\\t ]+', ' '), ('(-)?Effective user is.*',", "tmp$c6489_senior'; commit; set list on; select r.rdb$description as role_descr_blob_id from", "tmp$c6489_junior password '<PASSWORD>' using plugin Srp; create or alter user", "tmp$c6489_senior; commit; connect '$(DSN)' user tmp$c6489_junior password '<PASSWORD>'; comment on", "CORE-6489 # min_versions: ['3.0.8'] # versions: 3.0.8 # qmid: None", "\"\"\" act_1 = isql_act('db_1', test_script_1, substitutions=substitutions_1) expected_stdout_1 = \"\"\" Comment", "not have ability to add comment to rdb$admin role, but", "and make it null. # # Confirmed bug on 4.0.0.2384,", "commit; connect '$(DSN)' user tmp$c6489_junior password '<PASSWORD>'; comment on role", "\"\"\"\"\"\" db_1 = db_factory(sql_dialect=3, init=init_script_1) test_script_1 = \"\"\" create or", "granted with 'alter any role' privilege. # First user ('junior')", "r where r.rdb$role_name = upper('rdb$admin'); commit; comment on role rdb$admin", "'), ('(-)?Effective user is.*', '')] init_script_1 = \"\"\"\"\"\" db_1 =", "qmid: None import pytest from firebird.qa import db_factory, isql_act, Action", "'<PASSWORD>' using plugin Srp; create or alter user tmp$c6489_senior password", "<filename>tests/bugs/core_6489_test.py<gh_stars>0 #coding:utf-8 # # id: bugs.core_6489 # title: User without", "decription: # Test creates two users: one of them has", "role rdb$admin is 'Comment by tmp$c6489_junior'; commit; connect '$(DSN)' user", "# be able to set comment to any string and", "None import pytest from firebird.qa import db_factory, isql_act, Action #", "using plugin Srp; commit; \"\"\" act_1 = isql_act('db_1', test_script_1, substitutions=substitutions_1)", "to user tmp$c6489_senior; commit; connect '$(DSN)' user tmp$c6489_junior password '<PASSWORD>';", "user is TMP$C6489_JUNIOR \"\"\" @pytest.mark.version('>=3.0.8') def test_1(act_1: Action): act_1.expected_stdout =", "test_script_1 = \"\"\" create or alter user tmp$c6489_junior password '<PASSWORD>'", "def test_1(act_1: Action): act_1.expected_stdout = expected_stdout_1 act_1.expected_stderr = expected_stderr_1 act_1.execute()", "'<PASSWORD>'; comment on role rdb$admin is 'Comment by tmp$c6489_senior'; commit;", "import pytest from firebird.qa import db_factory, isql_act, Action # version:", "role rdb$admin is 'Comment by tmp$c6489_senior'; commit; set list on;", "act_1.expected_stderr = expected_stderr_1 act_1.execute() assert act_1.clean_expected_stderr == act_1.clean_stderr assert act_1.clean_expected_stdout", "COMMENT ON ROLE # decription: # Test creates two users:", "users: one of them has no any rights, second is", "user ('junior') must not have ability to add comment to", "is suppressed here. # # tracker_id: CORE-6489 # min_versions: ['3.0.8']", "commit; comment on role rdb$admin is null; commit; connect '$(DSN)'", "must not have ability to add comment to rdb$admin role,", "test_1(act_1: Action): act_1.expected_stdout = expected_stdout_1 act_1.expected_stderr = expected_stderr_1 act_1.execute() assert", "alter user tmp$c6489_senior password '<PASSWORD>' using plugin Srp; commit; grant", "commit; connect '$(DSN)' user 'SYSDBA' password '<PASSWORD>'; drop user tmp$c6489_junior", "on role rdb$admin is 'Comment by tmp$c6489_junior'; commit; connect '$(DSN)'", "= db_factory(sql_dialect=3, init=init_script_1) test_script_1 = \"\"\" create or alter user", "select r.rdb$description as role_descr_blob_id from rdb$roles r where r.rdb$role_name =", "use COMMENT ON ROLE # decription: # Test creates two", "privilege can use COMMENT ON ROLE # decription: # Test", "user is.*', '')] init_script_1 = \"\"\"\"\"\" db_1 = db_factory(sql_dialect=3, init=init_script_1)", "connect '$(DSN)' user tmp$c6489_junior password '<PASSWORD>'; comment on role rdb$admin", "= expected_stdout_1 act_1.expected_stderr = expected_stderr_1 act_1.execute() assert act_1.clean_expected_stderr == act_1.clean_stderr", "has no any rights, second is granted with 'alter any", "Checked on: 4.0.0.2387, 3.0.8.33426 -- all OK. # # NOTE:", "be able to set comment to any string and make", "to rdb$admin role, but second ('senior') must # be able", "-- all OK. # # NOTE: # phrase '-Effective user", "import db_factory, isql_act, Action # version: 3.0.8 # resources: None", "only in FB 4.x and is suppressed here. # #", "# versions: 3.0.8 # qmid: None import pytest from firebird.qa", "RDB$ADMIN -Effective user is TMP$C6489_JUNIOR \"\"\" @pytest.mark.version('>=3.0.8') def test_1(act_1: Action):", "on 4.0.0.2384, 3.0.8.33425 # Checked on: 4.0.0.2387, 3.0.8.33426 -- all", "drop user tmp$c6489_senior using plugin Srp; commit; \"\"\" act_1 =", "phrase '-Effective user is ...' presents only in FB 4.x", "-Effective user is TMP$C6489_JUNIOR \"\"\" @pytest.mark.version('>=3.0.8') def test_1(act_1: Action): act_1.expected_stdout", "user tmp$c6489_junior using plugin Srp; drop user tmp$c6489_senior using plugin" ]
[ "labels = (c - 1) * torch.ones((len(keep), 1)).cuda() if confs.is_cuda", "cls_bbox[..., [1, 3]] = (cls_bbox[..., [1, 3]]) / input_shape[0] roi_score", "0], -1) + 0.5 * src_width src_ctr_y = torch.unsqueeze(src_bbox[:, 1],", "__init__(self, std, num_classes): self.std = std self.num_classes = num_classes +", "0: return torch.zeros((0, 4), dtype=loc.dtype) src_width = torch.unsqueeze(src_bbox[:, 2] -", "c] confs_to_process = c_confs[c_confs_m] keep = nms( boxes_to_process, confs_to_process, nms_iou", "confs, labels), dim=1).cpu().numpy() # 添加进result里 results[-1].extend(c_pred) if len(results[-1]) > 0:", "std self.num_classes = num_classes + 1 def frcnn_correct_boxes(self, box_xy, box_wh,", "ctr_x + 0.5 * w dst_bbox[:, 3::4] = ctr_y +", "dy = loc[:, 1::4] dw = loc[:, 2::4] dh =", "prob = F.softmax(roi_score, dim=-1) results.append([]) for c in range(1, self.num_classes):", "::-1] box_hw = box_wh[..., ::-1] input_shape = np.array(input_shape) image_shape =", "#-----------------------------------------# c_pred = torch.cat((good_boxes, confs, labels), dim=1).cpu().numpy() # 添加进result里 results[-1].extend(c_pred)", "box_xy, box_wh = (results[-1][:, 0:2] + results[-1][:, 2:4])/2, results[-1][:, 2:4]", "= torch.unsqueeze(src_bbox[:, 2] - src_bbox[:, 0], -1) src_height = torch.unsqueeze(src_bbox[:,", "dst_bbox[:, 2::4] = ctr_x + 0.5 * w dst_bbox[:, 3::4]", "= box_yx + (box_hw / 2.) boxes = np.concatenate([box_mins[..., 0:1],", "+ 0.5 * h return dst_bbox class DecodeBox(): def __init__(self,", "= len(roi_cls_locs) #--------------------------------# # batch_size, num_rois, 4 #--------------------------------# rois =", "box_xy, box_wh, input_shape, image_shape): #-----------------------------------------------------------------# # 把y轴放前面是因为方便预测框和图像的宽高进行相乘 #-----------------------------------------------------------------# box_yx =", "c_confs = prob[:, c] c_confs_m = c_confs > confidence if", "import functional as F from torchvision.ops import nms def loc2bbox(src_bbox,", "2:4] - results[-1][:, 0:2] results[-1][:, :4] = self.frcnn_correct_boxes(box_xy, box_wh, input_shape,", "return dst_bbox class DecodeBox(): def __init__(self, std, num_classes): self.std =", "src_ctr_x = torch.unsqueeze(src_bbox[:, 0], -1) + 0.5 * src_width src_ctr_y", "loc[:, 2::4] dh = loc[:, 3::4] ctr_x = dx *", "torch.unsqueeze(src_bbox[:, 2] - src_bbox[:, 0], -1) src_height = torch.unsqueeze(src_bbox[:, 3]", "box_mins[..., 1:2], box_maxes[..., 0:1], box_maxes[..., 1:2]], axis=-1) boxes *= np.concatenate([image_shape,", "0.5 * src_width src_ctr_y = torch.unsqueeze(src_bbox[:, 1], -1) + 0.5", "1:2], box_maxes[..., 0:1], box_maxes[..., 1:2]], axis=-1) boxes *= np.concatenate([image_shape, image_shape],", "#----------------------------------------------------------# # 第一维度是建议框的数量,第二维度是每个种类 # 第三维度是对应种类的调整参数 #----------------------------------------------------------# roi_cls_loc = roi_cls_loc.view([-1, self.num_classes,", "= ctr_y - 0.5 * h dst_bbox[:, 2::4] = ctr_x", "boxes_to_process[keep] confs = confs_to_process[keep][:, None] labels = (c - 1)", "dst_bbox = torch.zeros_like(loc) dst_bbox[:, 0::4] = ctr_x - 0.5 *", "good_boxes = boxes_to_process[keep] confs = confs_to_process[keep][:, None] labels = (c", "= roi_scores[i] prob = F.softmax(roi_score, dim=-1) results.append([]) for c in", ") #-----------------------------------------# # 取出在非极大抑制中效果较好的内容 #-----------------------------------------# good_boxes = boxes_to_process[keep] confs =", "roi_cls_loc.contiguous().view((-1, 4))) cls_bbox = cls_bbox.view([-1, (self.num_classes), 4]) #-------------------------------------------------------------# # 对预测框进行归一化,调整到0-1之间", "range(len(mbox_loc))只进行一次 #----------------------------------------------------------------------------------------------------------------# for i in range(bs): #----------------------------------------------------------# # 对回归参数进行reshape #----------------------------------------------------------#", "dh = loc[:, 3::4] ctr_x = dx * src_width +", "第一维度是建议框的数量,第二维度是每个种类 # 第三维度是对应种类的调整参数 #----------------------------------------------------------# roi_cls_loc = roi_cls_loc.view([-1, self.num_classes, 4]) #-------------------------------------------------------------#", "def loc2bbox(src_bbox, loc): if src_bbox.size()[0] == 0: return torch.zeros((0, 4),", "* src_width h = torch.exp(dh) * src_height dst_bbox = torch.zeros_like(loc)", "3]]) / input_shape[0] roi_score = roi_scores[i] prob = F.softmax(roi_score, dim=-1)", "4)), roi_cls_loc.contiguous().view((-1, 4))) cls_bbox = cls_bbox.view([-1, (self.num_classes), 4]) #-------------------------------------------------------------# #", "/ 2.) boxes = np.concatenate([box_mins[..., 0:1], box_mins[..., 1:2], box_maxes[..., 0:1],", "= (c - 1) * torch.ones((len(keep), 1)).cuda() if confs.is_cuda else", "-1) + 0.5 * src_width src_ctr_y = torch.unsqueeze(src_bbox[:, 1], -1)", "box_wh = (results[-1][:, 0:2] + results[-1][:, 2:4])/2, results[-1][:, 2:4] -", "-> num_rois, 1, 4 -> num_rois, num_classes, 4 #-------------------------------------------------------------# roi", "取出得分高于confidence的框 #-----------------------------------------# boxes_to_process = cls_bbox[c_confs_m, c] confs_to_process = c_confs[c_confs_m] keep", "torch from torch.nn import functional as F from torchvision.ops import", "4]) #-------------------------------------------------------------# # 利用classifier网络的预测结果对建议框进行调整获得预测框 # num_rois, 4 -> num_rois, 1,", "0.5): results = [] bs = len(roi_cls_locs) #--------------------------------# # batch_size,", "2]] = (cls_bbox[..., [0, 2]]) / input_shape[1] cls_bbox[..., [1, 3]]", "= np.array(results[-1]) box_xy, box_wh = (results[-1][:, 0:2] + results[-1][:, 2:4])/2,", "src_width + src_ctr_x ctr_y = dy * src_height + src_ctr_y", "torch.zeros((0, 4), dtype=loc.dtype) src_width = torch.unsqueeze(src_bbox[:, 2] - src_bbox[:, 0],", "forward(self, roi_cls_locs, roi_scores, rois, image_shape, input_shape, nms_iou = 0.3, confidence", "else (c - 1) * torch.ones((len(keep), 1)) #-----------------------------------------# # 将label、置信度、框的位置进行堆叠。", "axis=-1) return boxes def forward(self, roi_cls_locs, roi_scores, rois, image_shape, input_shape,", "== 0: return torch.zeros((0, 4), dtype=loc.dtype) src_width = torch.unsqueeze(src_bbox[:, 2]", "= torch.zeros_like(loc) dst_bbox[:, 0::4] = ctr_x - 0.5 * w", "dst_bbox[:, 1::4] = ctr_y - 0.5 * h dst_bbox[:, 2::4]", "# 把y轴放前面是因为方便预测框和图像的宽高进行相乘 #-----------------------------------------------------------------# box_yx = box_xy[..., ::-1] box_hw = box_wh[...,", "src_bbox[:, 0], -1) src_height = torch.unsqueeze(src_bbox[:, 3] - src_bbox[:, 1],", "for i in range(bs): #----------------------------------------------------------# # 对回归参数进行reshape #----------------------------------------------------------# roi_cls_loc =", "cls_bbox[..., [0, 2]] = (cls_bbox[..., [0, 2]]) / input_shape[1] cls_bbox[...,", "loc): if src_bbox.size()[0] == 0: return torch.zeros((0, 4), dtype=loc.dtype) src_width", "= dx * src_width + src_ctr_x ctr_y = dy *", "= box_xy[..., ::-1] box_hw = box_wh[..., ::-1] input_shape = np.array(input_shape)", "[] bs = len(roi_cls_locs) #--------------------------------# # batch_size, num_rois, 4 #--------------------------------#", "c_confs_m = c_confs > confidence if len(c_confs[c_confs_m]) > 0: #-----------------------------------------#", "box_maxes = box_yx + (box_hw / 2.) boxes = np.concatenate([box_mins[...,", "4)).expand_as(roi_cls_loc) cls_bbox = loc2bbox(roi.contiguous().view((-1, 4)), roi_cls_loc.contiguous().view((-1, 4))) cls_bbox = cls_bbox.view([-1,", "image_shape): #-----------------------------------------------------------------# # 把y轴放前面是因为方便预测框和图像的宽高进行相乘 #-----------------------------------------------------------------# box_yx = box_xy[..., ::-1] box_hw", "# 取出得分高于confidence的框 #-----------------------------------------# boxes_to_process = cls_bbox[c_confs_m, c] confs_to_process = c_confs[c_confs_m]", "import nms def loc2bbox(src_bbox, loc): if src_bbox.size()[0] == 0: return", "torch.exp(dh) * src_height dst_bbox = torch.zeros_like(loc) dst_bbox[:, 0::4] = ctr_x", "box_yx = box_xy[..., ::-1] box_hw = box_wh[..., ::-1] input_shape =", "对回归参数进行reshape #----------------------------------------------------------# roi_cls_loc = roi_cls_locs[i] * self.std #----------------------------------------------------------# # 第一维度是建议框的数量,第二维度是每个种类", "c] c_confs_m = c_confs > confidence if len(c_confs[c_confs_m]) > 0:", "4]) #-------------------------------------------------------------# # 对预测框进行归一化,调整到0-1之间 #-------------------------------------------------------------# cls_bbox[..., [0, 2]] = (cls_bbox[...,", "self.num_classes, 4]) #-------------------------------------------------------------# # 利用classifier网络的预测结果对建议框进行调整获得预测框 # num_rois, 4 -> num_rois,", "axis=-1) boxes *= np.concatenate([image_shape, image_shape], axis=-1) return boxes def forward(self,", "as np import torch from torch.nn import functional as F", "len(roi_cls_locs) #--------------------------------# # batch_size, num_rois, 4 #--------------------------------# rois = rois.view((bs,", "/ input_shape[0] roi_score = roi_scores[i] prob = F.softmax(roi_score, dim=-1) results.append([])", "[1, 3]] = (cls_bbox[..., [1, 3]]) / input_shape[0] roi_score =", "confs = confs_to_process[keep][:, None] labels = (c - 1) *", "box_maxes[..., 1:2]], axis=-1) boxes *= np.concatenate([image_shape, image_shape], axis=-1) return boxes", "4 -> num_rois, num_classes, 4 #-------------------------------------------------------------# roi = rois[i].view((-1, 1,", "bs = len(roi_cls_locs) #--------------------------------# # batch_size, num_rois, 4 #--------------------------------# rois", "dim=-1) results.append([]) for c in range(1, self.num_classes): #--------------------------------# # 取出属于该类的所有框的置信度", "box_yx - (box_hw / 2.) box_maxes = box_yx + (box_hw", "c in range(1, self.num_classes): #--------------------------------# # 取出属于该类的所有框的置信度 # 判断是否大于门限 #--------------------------------#", "as F from torchvision.ops import nms def loc2bbox(src_bbox, loc): if", "0.5 * h return dst_bbox class DecodeBox(): def __init__(self, std,", "/ input_shape[1] cls_bbox[..., [1, 3]] = (cls_bbox[..., [1, 3]]) /", "= loc[:, 2::4] dh = loc[:, 3::4] ctr_x = dx", "4 #-------------------------------------------------------------# roi = rois[i].view((-1, 1, 4)).expand_as(roi_cls_loc) cls_bbox = loc2bbox(roi.contiguous().view((-1,", "= dy * src_height + src_ctr_y w = torch.exp(dw) *", "4), dtype=loc.dtype) src_width = torch.unsqueeze(src_bbox[:, 2] - src_bbox[:, 0], -1)", "= torch.unsqueeze(src_bbox[:, 3] - src_bbox[:, 1], -1) src_ctr_x = torch.unsqueeze(src_bbox[:,", "+ 0.5 * w dst_bbox[:, 3::4] = ctr_y + 0.5", "self.std #----------------------------------------------------------# # 第一维度是建议框的数量,第二维度是每个种类 # 第三维度是对应种类的调整参数 #----------------------------------------------------------# roi_cls_loc = roi_cls_loc.view([-1,", "1 def frcnn_correct_boxes(self, box_xy, box_wh, input_shape, image_shape): #-----------------------------------------------------------------# # 把y轴放前面是因为方便预测框和图像的宽高进行相乘", "# 对预测框进行归一化,调整到0-1之间 #-------------------------------------------------------------# cls_bbox[..., [0, 2]] = (cls_bbox[..., [0, 2]])", "box_xy[..., ::-1] box_hw = box_wh[..., ::-1] input_shape = np.array(input_shape) image_shape", "rois, image_shape, input_shape, nms_iou = 0.3, confidence = 0.5): results", "self.num_classes = num_classes + 1 def frcnn_correct_boxes(self, box_xy, box_wh, input_shape,", "= loc[:, 1::4] dw = loc[:, 2::4] dh = loc[:,", "nms def loc2bbox(src_bbox, loc): if src_bbox.size()[0] == 0: return torch.zeros((0,", "(cls_bbox[..., [1, 3]]) / input_shape[0] roi_score = roi_scores[i] prob =", "= np.array(input_shape) image_shape = np.array(image_shape) box_mins = box_yx - (box_hw", "#----------------------------------------------------------------------------------------------------------------# for i in range(bs): #----------------------------------------------------------# # 对回归参数进行reshape #----------------------------------------------------------# roi_cls_loc", "* h return dst_bbox class DecodeBox(): def __init__(self, std, num_classes):", "roi_cls_locs, roi_scores, rois, image_shape, input_shape, nms_iou = 0.3, confidence =", "#----------------------------------------------------------------------------------------------------------------# # 对每一张图片进行处理,由于在predict.py的时候,我们只输入一张图片,所以for i in range(len(mbox_loc))只进行一次 #----------------------------------------------------------------------------------------------------------------# for i in", "+ 1 def frcnn_correct_boxes(self, box_xy, box_wh, input_shape, image_shape): #-----------------------------------------------------------------# #", "dy * src_height + src_ctr_y w = torch.exp(dw) * src_width", "ctr_y + 0.5 * h return dst_bbox class DecodeBox(): def", "* self.std #----------------------------------------------------------# # 第一维度是建议框的数量,第二维度是每个种类 # 第三维度是对应种类的调整参数 #----------------------------------------------------------# roi_cls_loc =", "src_height dst_bbox = torch.zeros_like(loc) dst_bbox[:, 0::4] = ctr_x - 0.5", "= roi_cls_locs[i] * self.std #----------------------------------------------------------# # 第一维度是建议框的数量,第二维度是每个种类 # 第三维度是对应种类的调整参数 #----------------------------------------------------------#", "torchvision.ops import nms def loc2bbox(src_bbox, loc): if src_bbox.size()[0] == 0:", "= (results[-1][:, 0:2] + results[-1][:, 2:4])/2, results[-1][:, 2:4] - results[-1][:,", "boxes_to_process = cls_bbox[c_confs_m, c] confs_to_process = c_confs[c_confs_m] keep = nms(", "= loc[:, 0::4] dy = loc[:, 1::4] dw = loc[:,", "= 0.3, confidence = 0.5): results = [] bs =", "dtype=loc.dtype) src_width = torch.unsqueeze(src_bbox[:, 2] - src_bbox[:, 0], -1) src_height", "* src_width src_ctr_y = torch.unsqueeze(src_bbox[:, 1], -1) + 0.5 *", "boxes *= np.concatenate([image_shape, image_shape], axis=-1) return boxes def forward(self, roi_cls_locs,", "(box_hw / 2.) box_maxes = box_yx + (box_hw / 2.)", "src_bbox[:, 1], -1) src_ctr_x = torch.unsqueeze(src_bbox[:, 0], -1) + 0.5", "torch.zeros_like(loc) dst_bbox[:, 0::4] = ctr_x - 0.5 * w dst_bbox[:,", "#-----------------------------------------# # 取出在非极大抑制中效果较好的内容 #-----------------------------------------# good_boxes = boxes_to_process[keep] confs = confs_to_process[keep][:,", "(cls_bbox[..., [0, 2]]) / input_shape[1] cls_bbox[..., [1, 3]] = (cls_bbox[...,", "- 0.5 * h dst_bbox[:, 2::4] = ctr_x + 0.5", "0.5 * w dst_bbox[:, 3::4] = ctr_y + 0.5 *", "from torch.nn import functional as F from torchvision.ops import nms", "(results[-1][:, 0:2] + results[-1][:, 2:4])/2, results[-1][:, 2:4] - results[-1][:, 0:2]", "+ (box_hw / 2.) boxes = np.concatenate([box_mins[..., 0:1], box_mins[..., 1:2],", "len(c_confs[c_confs_m]) > 0: #-----------------------------------------# # 取出得分高于confidence的框 #-----------------------------------------# boxes_to_process = cls_bbox[c_confs_m,", "0:2] + results[-1][:, 2:4])/2, results[-1][:, 2:4] - results[-1][:, 0:2] results[-1][:,", "results[-1][:, 2:4] - results[-1][:, 0:2] results[-1][:, :4] = self.frcnn_correct_boxes(box_xy, box_wh,", "confs.is_cuda else (c - 1) * torch.ones((len(keep), 1)) #-----------------------------------------# #", "roi = rois[i].view((-1, 1, 4)).expand_as(roi_cls_loc) cls_bbox = loc2bbox(roi.contiguous().view((-1, 4)), roi_cls_loc.contiguous().view((-1,", "0], -1) src_height = torch.unsqueeze(src_bbox[:, 3] - src_bbox[:, 1], -1)", "torch.ones((len(keep), 1)) #-----------------------------------------# # 将label、置信度、框的位置进行堆叠。 #-----------------------------------------# c_pred = torch.cat((good_boxes, confs,", "src_height dx = loc[:, 0::4] dy = loc[:, 1::4] dw", "input_shape[1] cls_bbox[..., [1, 3]] = (cls_bbox[..., [1, 3]]) / input_shape[0]", "c_confs > confidence if len(c_confs[c_confs_m]) > 0: #-----------------------------------------# # 取出得分高于confidence的框", "dst_bbox[:, 0::4] = ctr_x - 0.5 * w dst_bbox[:, 1::4]", "roi_cls_loc = roi_cls_loc.view([-1, self.num_classes, 4]) #-------------------------------------------------------------# # 利用classifier网络的预测结果对建议框进行调整获得预测框 # num_rois,", "= ctr_x + 0.5 * w dst_bbox[:, 3::4] = ctr_y", "= torch.exp(dw) * src_width h = torch.exp(dh) * src_height dst_bbox", "src_height + src_ctr_y w = torch.exp(dw) * src_width h =", "# 取出属于该类的所有框的置信度 # 判断是否大于门限 #--------------------------------# c_confs = prob[:, c] c_confs_m", "dw = loc[:, 2::4] dh = loc[:, 3::4] ctr_x =", "4)) #----------------------------------------------------------------------------------------------------------------# # 对每一张图片进行处理,由于在predict.py的时候,我们只输入一张图片,所以for i in range(len(mbox_loc))只进行一次 #----------------------------------------------------------------------------------------------------------------# for i", "confidence = 0.5): results = [] bs = len(roi_cls_locs) #--------------------------------#", "-1, 4)) #----------------------------------------------------------------------------------------------------------------# # 对每一张图片进行处理,由于在predict.py的时候,我们只输入一张图片,所以for i in range(len(mbox_loc))只进行一次 #----------------------------------------------------------------------------------------------------------------# for", "i in range(len(mbox_loc))只进行一次 #----------------------------------------------------------------------------------------------------------------# for i in range(bs): #----------------------------------------------------------# #", "将label、置信度、框的位置进行堆叠。 #-----------------------------------------# c_pred = torch.cat((good_boxes, confs, labels), dim=1).cpu().numpy() # 添加进result里", "对预测框进行归一化,调整到0-1之间 #-------------------------------------------------------------# cls_bbox[..., [0, 2]] = (cls_bbox[..., [0, 2]]) /", "添加进result里 results[-1].extend(c_pred) if len(results[-1]) > 0: results[-1] = np.array(results[-1]) box_xy,", "num_classes + 1 def frcnn_correct_boxes(self, box_xy, box_wh, input_shape, image_shape): #-----------------------------------------------------------------#", "ctr_x = dx * src_width + src_ctr_x ctr_y = dy", "i in range(bs): #----------------------------------------------------------# # 对回归参数进行reshape #----------------------------------------------------------# roi_cls_loc = roi_cls_locs[i]", "class DecodeBox(): def __init__(self, std, num_classes): self.std = std self.num_classes", "boxes def forward(self, roi_cls_locs, roi_scores, rois, image_shape, input_shape, nms_iou =", "第三维度是对应种类的调整参数 #----------------------------------------------------------# roi_cls_loc = roi_cls_loc.view([-1, self.num_classes, 4]) #-------------------------------------------------------------# # 利用classifier网络的预测结果对建议框进行调整获得预测框", "num_rois, num_classes, 4 #-------------------------------------------------------------# roi = rois[i].view((-1, 1, 4)).expand_as(roi_cls_loc) cls_bbox", "-1) src_ctr_x = torch.unsqueeze(src_bbox[:, 0], -1) + 0.5 * src_width", "- (box_hw / 2.) box_maxes = box_yx + (box_hw /", "#--------------------------------# c_confs = prob[:, c] c_confs_m = c_confs > confidence", "src_ctr_x ctr_y = dy * src_height + src_ctr_y w =", "results[-1] = np.array(results[-1]) box_xy, box_wh = (results[-1][:, 0:2] + results[-1][:,", "+ results[-1][:, 2:4])/2, results[-1][:, 2:4] - results[-1][:, 0:2] results[-1][:, :4]", "import torch from torch.nn import functional as F from torchvision.ops", "1::4] dw = loc[:, 2::4] dh = loc[:, 3::4] ctr_x", "1)).cuda() if confs.is_cuda else (c - 1) * torch.ones((len(keep), 1))", "# 第三维度是对应种类的调整参数 #----------------------------------------------------------# roi_cls_loc = roi_cls_loc.view([-1, self.num_classes, 4]) #-------------------------------------------------------------# #", "- src_bbox[:, 0], -1) src_height = torch.unsqueeze(src_bbox[:, 3] - src_bbox[:,", "range(1, self.num_classes): #--------------------------------# # 取出属于该类的所有框的置信度 # 判断是否大于门限 #--------------------------------# c_confs =", "roi_cls_locs[i] * self.std #----------------------------------------------------------# # 第一维度是建议框的数量,第二维度是每个种类 # 第三维度是对应种类的调整参数 #----------------------------------------------------------# roi_cls_loc", "if len(c_confs[c_confs_m]) > 0: #-----------------------------------------# # 取出得分高于confidence的框 #-----------------------------------------# boxes_to_process =", "= roi_cls_loc.view([-1, self.num_classes, 4]) #-------------------------------------------------------------# # 利用classifier网络的预测结果对建议框进行调整获得预测框 # num_rois, 4", "w dst_bbox[:, 3::4] = ctr_y + 0.5 * h return", "roi_cls_loc.view([-1, self.num_classes, 4]) #-------------------------------------------------------------# # 利用classifier网络的预测结果对建议框进行调整获得预测框 # num_rois, 4 ->", "confs_to_process = c_confs[c_confs_m] keep = nms( boxes_to_process, confs_to_process, nms_iou )", "results[-1][:, 2:4])/2, results[-1][:, 2:4] - results[-1][:, 0:2] results[-1][:, :4] =", "1) * torch.ones((len(keep), 1)).cuda() if confs.is_cuda else (c - 1)", "= (cls_bbox[..., [1, 3]]) / input_shape[0] roi_score = roi_scores[i] prob", "+ 0.5 * src_height dx = loc[:, 0::4] dy =", "#-----------------------------------------# boxes_to_process = cls_bbox[c_confs_m, c] confs_to_process = c_confs[c_confs_m] keep =", "2.) box_maxes = box_yx + (box_hw / 2.) boxes =", "confidence if len(c_confs[c_confs_m]) > 0: #-----------------------------------------# # 取出得分高于confidence的框 #-----------------------------------------# boxes_to_process", "nms_iou ) #-----------------------------------------# # 取出在非极大抑制中效果较好的内容 #-----------------------------------------# good_boxes = boxes_to_process[keep] confs", "#--------------------------------# # batch_size, num_rois, 4 #--------------------------------# rois = rois.view((bs, -1,", "loc[:, 3::4] ctr_x = dx * src_width + src_ctr_x ctr_y", "torch.cat((good_boxes, confs, labels), dim=1).cpu().numpy() # 添加进result里 results[-1].extend(c_pred) if len(results[-1]) >", "(self.num_classes), 4]) #-------------------------------------------------------------# # 对预测框进行归一化,调整到0-1之间 #-------------------------------------------------------------# cls_bbox[..., [0, 2]] =", "2.) boxes = np.concatenate([box_mins[..., 0:1], box_mins[..., 1:2], box_maxes[..., 0:1], box_maxes[...,", "2::4] dh = loc[:, 3::4] ctr_x = dx * src_width", "in range(1, self.num_classes): #--------------------------------# # 取出属于该类的所有框的置信度 # 判断是否大于门限 #--------------------------------# c_confs", "torch.nn import functional as F from torchvision.ops import nms def", "= box_wh[..., ::-1] input_shape = np.array(input_shape) image_shape = np.array(image_shape) box_mins", "np.array(results[-1]) box_xy, box_wh = (results[-1][:, 0:2] + results[-1][:, 2:4])/2, results[-1][:,", "ctr_y = dy * src_height + src_ctr_y w = torch.exp(dw)", "= ctr_x - 0.5 * w dst_bbox[:, 1::4] = ctr_y", "box_wh[..., ::-1] input_shape = np.array(input_shape) image_shape = np.array(image_shape) box_mins =", "functional as F from torchvision.ops import nms def loc2bbox(src_bbox, loc):", "#----------------------------------------------------------# roi_cls_loc = roi_cls_loc.view([-1, self.num_classes, 4]) #-------------------------------------------------------------# # 利用classifier网络的预测结果对建议框进行调整获得预测框 #", "[1, 3]]) / input_shape[0] roi_score = roi_scores[i] prob = F.softmax(roi_score,", "box_wh, input_shape, image_shape): #-----------------------------------------------------------------# # 把y轴放前面是因为方便预测框和图像的宽高进行相乘 #-----------------------------------------------------------------# box_yx = box_xy[...,", "取出在非极大抑制中效果较好的内容 #-----------------------------------------# good_boxes = boxes_to_process[keep] confs = confs_to_process[keep][:, None] labels", "from torchvision.ops import nms def loc2bbox(src_bbox, loc): if src_bbox.size()[0] ==", "# 对回归参数进行reshape #----------------------------------------------------------# roi_cls_loc = roi_cls_locs[i] * self.std #----------------------------------------------------------# #", "+ 0.5 * src_width src_ctr_y = torch.unsqueeze(src_bbox[:, 1], -1) +", "= torch.unsqueeze(src_bbox[:, 1], -1) + 0.5 * src_height dx =", "= torch.unsqueeze(src_bbox[:, 0], -1) + 0.5 * src_width src_ctr_y =", "#-------------------------------------------------------------# roi = rois[i].view((-1, 1, 4)).expand_as(roi_cls_loc) cls_bbox = loc2bbox(roi.contiguous().view((-1, 4)),", "None] labels = (c - 1) * torch.ones((len(keep), 1)).cuda() if", "0: #-----------------------------------------# # 取出得分高于confidence的框 #-----------------------------------------# boxes_to_process = cls_bbox[c_confs_m, c] confs_to_process", "* h dst_bbox[:, 2::4] = ctr_x + 0.5 * w", "0::4] = ctr_x - 0.5 * w dst_bbox[:, 1::4] =", "input_shape, image_shape): #-----------------------------------------------------------------# # 把y轴放前面是因为方便预测框和图像的宽高进行相乘 #-----------------------------------------------------------------# box_yx = box_xy[..., ::-1]", "nms( boxes_to_process, confs_to_process, nms_iou ) #-----------------------------------------# # 取出在非极大抑制中效果较好的内容 #-----------------------------------------# good_boxes", "对每一张图片进行处理,由于在predict.py的时候,我们只输入一张图片,所以for i in range(len(mbox_loc))只进行一次 #----------------------------------------------------------------------------------------------------------------# for i in range(bs): #----------------------------------------------------------#", "#-----------------------------------------------------------------# # 把y轴放前面是因为方便预测框和图像的宽高进行相乘 #-----------------------------------------------------------------# box_yx = box_xy[..., ::-1] box_hw =", "= box_yx - (box_hw / 2.) box_maxes = box_yx +", "h dst_bbox[:, 2::4] = ctr_x + 0.5 * w dst_bbox[:,", "in range(bs): #----------------------------------------------------------# # 对回归参数进行reshape #----------------------------------------------------------# roi_cls_loc = roi_cls_locs[i] *", "torch.unsqueeze(src_bbox[:, 3] - src_bbox[:, 1], -1) src_ctr_x = torch.unsqueeze(src_bbox[:, 0],", "input_shape = np.array(input_shape) image_shape = np.array(image_shape) box_mins = box_yx -", "= rois[i].view((-1, 1, 4)).expand_as(roi_cls_loc) cls_bbox = loc2bbox(roi.contiguous().view((-1, 4)), roi_cls_loc.contiguous().view((-1, 4)))", "= boxes_to_process[keep] confs = confs_to_process[keep][:, None] labels = (c -", "[0, 2]] = (cls_bbox[..., [0, 2]]) / input_shape[1] cls_bbox[..., [1,", "box_yx + (box_hw / 2.) boxes = np.concatenate([box_mins[..., 0:1], box_mins[...,", "= rois.view((bs, -1, 4)) #----------------------------------------------------------------------------------------------------------------# # 对每一张图片进行处理,由于在predict.py的时候,我们只输入一张图片,所以for i in range(len(mbox_loc))只进行一次", "image_shape, input_shape, nms_iou = 0.3, confidence = 0.5): results =", "box_maxes[..., 0:1], box_maxes[..., 1:2]], axis=-1) boxes *= np.concatenate([image_shape, image_shape], axis=-1)", "* src_height dst_bbox = torch.zeros_like(loc) dst_bbox[:, 0::4] = ctr_x -", "= np.concatenate([box_mins[..., 0:1], box_mins[..., 1:2], box_maxes[..., 0:1], box_maxes[..., 1:2]], axis=-1)", "loc[:, 1::4] dw = loc[:, 2::4] dh = loc[:, 3::4]", "= c_confs[c_confs_m] keep = nms( boxes_to_process, confs_to_process, nms_iou ) #-----------------------------------------#", "image_shape], axis=-1) return boxes def forward(self, roi_cls_locs, roi_scores, rois, image_shape,", "std, num_classes): self.std = std self.num_classes = num_classes + 1", "dst_bbox[:, 3::4] = ctr_y + 0.5 * h return dst_bbox", "把y轴放前面是因为方便预测框和图像的宽高进行相乘 #-----------------------------------------------------------------# box_yx = box_xy[..., ::-1] box_hw = box_wh[..., ::-1]", "c_pred = torch.cat((good_boxes, confs, labels), dim=1).cpu().numpy() # 添加进result里 results[-1].extend(c_pred) if", "torch.unsqueeze(src_bbox[:, 0], -1) + 0.5 * src_width src_ctr_y = torch.unsqueeze(src_bbox[:,", "cls_bbox = cls_bbox.view([-1, (self.num_classes), 4]) #-------------------------------------------------------------# # 对预测框进行归一化,调整到0-1之间 #-------------------------------------------------------------# cls_bbox[...,", "0:1], box_maxes[..., 1:2]], axis=-1) boxes *= np.concatenate([image_shape, image_shape], axis=-1) return", "#-------------------------------------------------------------# cls_bbox[..., [0, 2]] = (cls_bbox[..., [0, 2]]) / input_shape[1]", "1:2]], axis=-1) boxes *= np.concatenate([image_shape, image_shape], axis=-1) return boxes def", "input_shape, nms_iou = 0.3, confidence = 0.5): results = []", "np.concatenate([box_mins[..., 0:1], box_mins[..., 1:2], box_maxes[..., 0:1], box_maxes[..., 1:2]], axis=-1) boxes", "2]]) / input_shape[1] cls_bbox[..., [1, 3]] = (cls_bbox[..., [1, 3]])", "roi_scores[i] prob = F.softmax(roi_score, dim=-1) results.append([]) for c in range(1,", "dst_bbox class DecodeBox(): def __init__(self, std, num_classes): self.std = std", "np import torch from torch.nn import functional as F from", "# 取出在非极大抑制中效果较好的内容 #-----------------------------------------# good_boxes = boxes_to_process[keep] confs = confs_to_process[keep][:, None]", "cls_bbox.view([-1, (self.num_classes), 4]) #-------------------------------------------------------------# # 对预测框进行归一化,调整到0-1之间 #-------------------------------------------------------------# cls_bbox[..., [0, 2]]", "= loc2bbox(roi.contiguous().view((-1, 4)), roi_cls_loc.contiguous().view((-1, 4))) cls_bbox = cls_bbox.view([-1, (self.num_classes), 4])", "1::4] = ctr_y - 0.5 * h dst_bbox[:, 2::4] =", "1)) #-----------------------------------------# # 将label、置信度、框的位置进行堆叠。 #-----------------------------------------# c_pred = torch.cat((good_boxes, confs, labels),", "0.3, confidence = 0.5): results = [] bs = len(roi_cls_locs)", "- 1) * torch.ones((len(keep), 1)).cuda() if confs.is_cuda else (c -", "* src_height + src_ctr_y w = torch.exp(dw) * src_width h", "results.append([]) for c in range(1, self.num_classes): #--------------------------------# # 取出属于该类的所有框的置信度 #", "self.num_classes): #--------------------------------# # 取出属于该类的所有框的置信度 # 判断是否大于门限 #--------------------------------# c_confs = prob[:,", "boxes_to_process, confs_to_process, nms_iou ) #-----------------------------------------# # 取出在非极大抑制中效果较好的内容 #-----------------------------------------# good_boxes =", "dx = loc[:, 0::4] dy = loc[:, 1::4] dw =", "= cls_bbox[c_confs_m, c] confs_to_process = c_confs[c_confs_m] keep = nms( boxes_to_process,", "[0, 2]]) / input_shape[1] cls_bbox[..., [1, 3]] = (cls_bbox[..., [1,", "-> num_rois, num_classes, 4 #-------------------------------------------------------------# roi = rois[i].view((-1, 1, 4)).expand_as(roi_cls_loc)", "w = torch.exp(dw) * src_width h = torch.exp(dh) * src_height", "- 1) * torch.ones((len(keep), 1)) #-----------------------------------------# # 将label、置信度、框的位置进行堆叠。 #-----------------------------------------# c_pred", "in range(len(mbox_loc))只进行一次 #----------------------------------------------------------------------------------------------------------------# for i in range(bs): #----------------------------------------------------------# # 对回归参数进行reshape", "loc[:, 0::4] dy = loc[:, 1::4] dw = loc[:, 2::4]", "h = torch.exp(dh) * src_height dst_bbox = torch.zeros_like(loc) dst_bbox[:, 0::4]", "torch.unsqueeze(src_bbox[:, 1], -1) + 0.5 * src_height dx = loc[:,", "def frcnn_correct_boxes(self, box_xy, box_wh, input_shape, image_shape): #-----------------------------------------------------------------# # 把y轴放前面是因为方便预测框和图像的宽高进行相乘 #-----------------------------------------------------------------#", "# batch_size, num_rois, 4 #--------------------------------# rois = rois.view((bs, -1, 4))", "<reponame>MasoonZhang/FasterRConvMixer import numpy as np import torch from torch.nn import", "#--------------------------------# # 取出属于该类的所有框的置信度 # 判断是否大于门限 #--------------------------------# c_confs = prob[:, c]", "1], -1) + 0.5 * src_height dx = loc[:, 0::4]", "* torch.ones((len(keep), 1)).cuda() if confs.is_cuda else (c - 1) *", "c_confs[c_confs_m] keep = nms( boxes_to_process, confs_to_process, nms_iou ) #-----------------------------------------# #", "0: results[-1] = np.array(results[-1]) box_xy, box_wh = (results[-1][:, 0:2] +", "3] - src_bbox[:, 1], -1) src_ctr_x = torch.unsqueeze(src_bbox[:, 0], -1)", "# 利用classifier网络的预测结果对建议框进行调整获得预测框 # num_rois, 4 -> num_rois, 1, 4 ->", "# 添加进result里 results[-1].extend(c_pred) if len(results[-1]) > 0: results[-1] = np.array(results[-1])", "return boxes def forward(self, roi_cls_locs, roi_scores, rois, image_shape, input_shape, nms_iou", "keep = nms( boxes_to_process, confs_to_process, nms_iou ) #-----------------------------------------# # 取出在非极大抑制中效果较好的内容", "- 0.5 * w dst_bbox[:, 1::4] = ctr_y - 0.5", "* w dst_bbox[:, 3::4] = ctr_y + 0.5 * h", "np.array(image_shape) box_mins = box_yx - (box_hw / 2.) box_maxes =", "= ctr_y + 0.5 * h return dst_bbox class DecodeBox():", "(box_hw / 2.) boxes = np.concatenate([box_mins[..., 0:1], box_mins[..., 1:2], box_maxes[...,", "labels), dim=1).cpu().numpy() # 添加进result里 results[-1].extend(c_pred) if len(results[-1]) > 0: results[-1]", "判断是否大于门限 #--------------------------------# c_confs = prob[:, c] c_confs_m = c_confs >", "results[-1].extend(c_pred) if len(results[-1]) > 0: results[-1] = np.array(results[-1]) box_xy, box_wh", "+ src_ctr_x ctr_y = dy * src_height + src_ctr_y w", "roi_cls_loc = roi_cls_locs[i] * self.std #----------------------------------------------------------# # 第一维度是建议框的数量,第二维度是每个种类 # 第三维度是对应种类的调整参数", "if len(results[-1]) > 0: results[-1] = np.array(results[-1]) box_xy, box_wh =", "0:2] results[-1][:, :4] = self.frcnn_correct_boxes(box_xy, box_wh, input_shape, image_shape) return results", "confs_to_process[keep][:, None] labels = (c - 1) * torch.ones((len(keep), 1)).cuda()", "src_width src_ctr_y = torch.unsqueeze(src_bbox[:, 1], -1) + 0.5 * src_height", "= [] bs = len(roi_cls_locs) #--------------------------------# # batch_size, num_rois, 4", "np.concatenate([image_shape, image_shape], axis=-1) return boxes def forward(self, roi_cls_locs, roi_scores, rois,", "1, 4)).expand_as(roi_cls_loc) cls_bbox = loc2bbox(roi.contiguous().view((-1, 4)), roi_cls_loc.contiguous().view((-1, 4))) cls_bbox =", "#-----------------------------------------# # 将label、置信度、框的位置进行堆叠。 #-----------------------------------------# c_pred = torch.cat((good_boxes, confs, labels), dim=1).cpu().numpy()", "#-----------------------------------------# good_boxes = boxes_to_process[keep] confs = confs_to_process[keep][:, None] labels =", "torch.exp(dw) * src_width h = torch.exp(dh) * src_height dst_bbox =", "= confs_to_process[keep][:, None] labels = (c - 1) * torch.ones((len(keep),", "- src_bbox[:, 1], -1) src_ctr_x = torch.unsqueeze(src_bbox[:, 0], -1) +", "#--------------------------------# rois = rois.view((bs, -1, 4)) #----------------------------------------------------------------------------------------------------------------# # 对每一张图片进行处理,由于在predict.py的时候,我们只输入一张图片,所以for i", "ctr_x - 0.5 * w dst_bbox[:, 1::4] = ctr_y -", "src_ctr_y w = torch.exp(dw) * src_width h = torch.exp(dh) *", "= np.array(image_shape) box_mins = box_yx - (box_hw / 2.) box_maxes", "for c in range(1, self.num_classes): #--------------------------------# # 取出属于该类的所有框的置信度 # 判断是否大于门限", "return torch.zeros((0, 4), dtype=loc.dtype) src_width = torch.unsqueeze(src_bbox[:, 2] - src_bbox[:,", "= (cls_bbox[..., [0, 2]]) / input_shape[1] cls_bbox[..., [1, 3]] =", "np.array(input_shape) image_shape = np.array(image_shape) box_mins = box_yx - (box_hw /", "# 将label、置信度、框的位置进行堆叠。 #-----------------------------------------# c_pred = torch.cat((good_boxes, confs, labels), dim=1).cpu().numpy() #", "* torch.ones((len(keep), 1)) #-----------------------------------------# # 将label、置信度、框的位置进行堆叠。 #-----------------------------------------# c_pred = torch.cat((good_boxes,", "src_width h = torch.exp(dh) * src_height dst_bbox = torch.zeros_like(loc) dst_bbox[:,", "src_height = torch.unsqueeze(src_bbox[:, 3] - src_bbox[:, 1], -1) src_ctr_x =", "roi_score = roi_scores[i] prob = F.softmax(roi_score, dim=-1) results.append([]) for c", "= std self.num_classes = num_classes + 1 def frcnn_correct_boxes(self, box_xy,", "#-----------------------------------------------------------------# box_yx = box_xy[..., ::-1] box_hw = box_wh[..., ::-1] input_shape", "= cls_bbox.view([-1, (self.num_classes), 4]) #-------------------------------------------------------------# # 对预测框进行归一化,调整到0-1之间 #-------------------------------------------------------------# cls_bbox[..., [0,", "boxes = np.concatenate([box_mins[..., 0:1], box_mins[..., 1:2], box_maxes[..., 0:1], box_maxes[..., 1:2]],", "> 0: results[-1] = np.array(results[-1]) box_xy, box_wh = (results[-1][:, 0:2]", "frcnn_correct_boxes(self, box_xy, box_wh, input_shape, image_shape): #-----------------------------------------------------------------# # 把y轴放前面是因为方便预测框和图像的宽高进行相乘 #-----------------------------------------------------------------# box_yx", "3::4] = ctr_y + 0.5 * h return dst_bbox class", "4 -> num_rois, 1, 4 -> num_rois, num_classes, 4 #-------------------------------------------------------------#", "dx * src_width + src_ctr_x ctr_y = dy * src_height", "= torch.cat((good_boxes, confs, labels), dim=1).cpu().numpy() # 添加进result里 results[-1].extend(c_pred) if len(results[-1])", "(c - 1) * torch.ones((len(keep), 1)) #-----------------------------------------# # 将label、置信度、框的位置进行堆叠。 #-----------------------------------------#", "batch_size, num_rois, 4 #--------------------------------# rois = rois.view((bs, -1, 4)) #----------------------------------------------------------------------------------------------------------------#", "F from torchvision.ops import nms def loc2bbox(src_bbox, loc): if src_bbox.size()[0]", "0.5 * src_height dx = loc[:, 0::4] dy = loc[:,", "range(bs): #----------------------------------------------------------# # 对回归参数进行reshape #----------------------------------------------------------# roi_cls_loc = roi_cls_locs[i] * self.std", "*= np.concatenate([image_shape, image_shape], axis=-1) return boxes def forward(self, roi_cls_locs, roi_scores,", "2:4])/2, results[-1][:, 2:4] - results[-1][:, 0:2] results[-1][:, :4] = self.frcnn_correct_boxes(box_xy,", "w dst_bbox[:, 1::4] = ctr_y - 0.5 * h dst_bbox[:,", "取出属于该类的所有框的置信度 # 判断是否大于门限 #--------------------------------# c_confs = prob[:, c] c_confs_m =", "self.std = std self.num_classes = num_classes + 1 def frcnn_correct_boxes(self,", "0::4] dy = loc[:, 1::4] dw = loc[:, 2::4] dh", "#-------------------------------------------------------------# # 对预测框进行归一化,调整到0-1之间 #-------------------------------------------------------------# cls_bbox[..., [0, 2]] = (cls_bbox[..., [0,", "4))) cls_bbox = cls_bbox.view([-1, (self.num_classes), 4]) #-------------------------------------------------------------# # 对预测框进行归一化,调整到0-1之间 #-------------------------------------------------------------#", "if src_bbox.size()[0] == 0: return torch.zeros((0, 4), dtype=loc.dtype) src_width =", "- results[-1][:, 0:2] results[-1][:, :4] = self.frcnn_correct_boxes(box_xy, box_wh, input_shape, image_shape)", "0:1], box_mins[..., 1:2], box_maxes[..., 0:1], box_maxes[..., 1:2]], axis=-1) boxes *=", "#-----------------------------------------# # 取出得分高于confidence的框 #-----------------------------------------# boxes_to_process = cls_bbox[c_confs_m, c] confs_to_process =", "results = [] bs = len(roi_cls_locs) #--------------------------------# # batch_size, num_rois,", "confs_to_process, nms_iou ) #-----------------------------------------# # 取出在非极大抑制中效果较好的内容 #-----------------------------------------# good_boxes = boxes_to_process[keep]", "# 对每一张图片进行处理,由于在predict.py的时候,我们只输入一张图片,所以for i in range(len(mbox_loc))只进行一次 #----------------------------------------------------------------------------------------------------------------# for i in range(bs):", "= num_classes + 1 def frcnn_correct_boxes(self, box_xy, box_wh, input_shape, image_shape):", "4 #--------------------------------# rois = rois.view((bs, -1, 4)) #----------------------------------------------------------------------------------------------------------------# # 对每一张图片进行处理,由于在predict.py的时候,我们只输入一张图片,所以for", "+ src_ctr_y w = torch.exp(dw) * src_width h = torch.exp(dh)", "prob[:, c] c_confs_m = c_confs > confidence if len(c_confs[c_confs_m]) >", "3]] = (cls_bbox[..., [1, 3]]) / input_shape[0] roi_score = roi_scores[i]", "if confs.is_cuda else (c - 1) * torch.ones((len(keep), 1)) #-----------------------------------------#", "::-1] input_shape = np.array(input_shape) image_shape = np.array(image_shape) box_mins = box_yx", "input_shape[0] roi_score = roi_scores[i] prob = F.softmax(roi_score, dim=-1) results.append([]) for", "numpy as np import torch from torch.nn import functional as", "src_width = torch.unsqueeze(src_bbox[:, 2] - src_bbox[:, 0], -1) src_height =", "= F.softmax(roi_score, dim=-1) results.append([]) for c in range(1, self.num_classes): #--------------------------------#", "1], -1) src_ctr_x = torch.unsqueeze(src_bbox[:, 0], -1) + 0.5 *", "# 判断是否大于门限 #--------------------------------# c_confs = prob[:, c] c_confs_m = c_confs", "loc2bbox(roi.contiguous().view((-1, 4)), roi_cls_loc.contiguous().view((-1, 4))) cls_bbox = cls_bbox.view([-1, (self.num_classes), 4]) #-------------------------------------------------------------#", "-1) src_height = torch.unsqueeze(src_bbox[:, 3] - src_bbox[:, 1], -1) src_ctr_x", "#----------------------------------------------------------# roi_cls_loc = roi_cls_locs[i] * self.std #----------------------------------------------------------# # 第一维度是建议框的数量,第二维度是每个种类 #", "cls_bbox = loc2bbox(roi.contiguous().view((-1, 4)), roi_cls_loc.contiguous().view((-1, 4))) cls_bbox = cls_bbox.view([-1, (self.num_classes),", "F.softmax(roi_score, dim=-1) results.append([]) for c in range(1, self.num_classes): #--------------------------------# #", "> confidence if len(c_confs[c_confs_m]) > 0: #-----------------------------------------# # 取出得分高于confidence的框 #-----------------------------------------#", "0.5 * h dst_bbox[:, 2::4] = ctr_x + 0.5 *", "num_classes): self.std = std self.num_classes = num_classes + 1 def", "-1) + 0.5 * src_height dx = loc[:, 0::4] dy", "num_rois, 1, 4 -> num_rois, num_classes, 4 #-------------------------------------------------------------# roi =", "h return dst_bbox class DecodeBox(): def __init__(self, std, num_classes): self.std", "src_ctr_y = torch.unsqueeze(src_bbox[:, 1], -1) + 0.5 * src_height dx", "* src_height dx = loc[:, 0::4] dy = loc[:, 1::4]", "num_rois, 4 #--------------------------------# rois = rois.view((bs, -1, 4)) #----------------------------------------------------------------------------------------------------------------# #", "# num_rois, 4 -> num_rois, 1, 4 -> num_rois, num_classes,", "= loc[:, 3::4] ctr_x = dx * src_width + src_ctr_x", "* src_width + src_ctr_x ctr_y = dy * src_height +", "src_bbox.size()[0] == 0: return torch.zeros((0, 4), dtype=loc.dtype) src_width = torch.unsqueeze(src_bbox[:,", "cls_bbox[c_confs_m, c] confs_to_process = c_confs[c_confs_m] keep = nms( boxes_to_process, confs_to_process,", "import numpy as np import torch from torch.nn import functional", "# 第一维度是建议框的数量,第二维度是每个种类 # 第三维度是对应种类的调整参数 #----------------------------------------------------------# roi_cls_loc = roi_cls_loc.view([-1, self.num_classes, 4])", "box_mins = box_yx - (box_hw / 2.) box_maxes = box_yx", "DecodeBox(): def __init__(self, std, num_classes): self.std = std self.num_classes =", "box_hw = box_wh[..., ::-1] input_shape = np.array(input_shape) image_shape = np.array(image_shape)", "/ 2.) box_maxes = box_yx + (box_hw / 2.) boxes", "len(results[-1]) > 0: results[-1] = np.array(results[-1]) box_xy, box_wh = (results[-1][:,", "= nms( boxes_to_process, confs_to_process, nms_iou ) #-----------------------------------------# # 取出在非极大抑制中效果较好的内容 #-----------------------------------------#", "num_rois, 4 -> num_rois, 1, 4 -> num_rois, num_classes, 4", "* w dst_bbox[:, 1::4] = ctr_y - 0.5 * h", "dim=1).cpu().numpy() # 添加进result里 results[-1].extend(c_pred) if len(results[-1]) > 0: results[-1] =", "1, 4 -> num_rois, num_classes, 4 #-------------------------------------------------------------# roi = rois[i].view((-1,", "rois[i].view((-1, 1, 4)).expand_as(roi_cls_loc) cls_bbox = loc2bbox(roi.contiguous().view((-1, 4)), roi_cls_loc.contiguous().view((-1, 4))) cls_bbox", "image_shape = np.array(image_shape) box_mins = box_yx - (box_hw / 2.)", "torch.ones((len(keep), 1)).cuda() if confs.is_cuda else (c - 1) * torch.ones((len(keep),", "loc2bbox(src_bbox, loc): if src_bbox.size()[0] == 0: return torch.zeros((0, 4), dtype=loc.dtype)", "0.5 * w dst_bbox[:, 1::4] = ctr_y - 0.5 *", "roi_scores, rois, image_shape, input_shape, nms_iou = 0.3, confidence = 0.5):", "results[-1][:, 0:2] results[-1][:, :4] = self.frcnn_correct_boxes(box_xy, box_wh, input_shape, image_shape) return", "= c_confs > confidence if len(c_confs[c_confs_m]) > 0: #-----------------------------------------# #", "num_classes, 4 #-------------------------------------------------------------# roi = rois[i].view((-1, 1, 4)).expand_as(roi_cls_loc) cls_bbox =", "#-------------------------------------------------------------# # 利用classifier网络的预测结果对建议框进行调整获得预测框 # num_rois, 4 -> num_rois, 1, 4", "> 0: #-----------------------------------------# # 取出得分高于confidence的框 #-----------------------------------------# boxes_to_process = cls_bbox[c_confs_m, c]", "nms_iou = 0.3, confidence = 0.5): results = [] bs", "def forward(self, roi_cls_locs, roi_scores, rois, image_shape, input_shape, nms_iou = 0.3,", "2] - src_bbox[:, 0], -1) src_height = torch.unsqueeze(src_bbox[:, 3] -", "rois = rois.view((bs, -1, 4)) #----------------------------------------------------------------------------------------------------------------# # 对每一张图片进行处理,由于在predict.py的时候,我们只输入一张图片,所以for i in", "= torch.exp(dh) * src_height dst_bbox = torch.zeros_like(loc) dst_bbox[:, 0::4] =", "ctr_y - 0.5 * h dst_bbox[:, 2::4] = ctr_x +", "= prob[:, c] c_confs_m = c_confs > confidence if len(c_confs[c_confs_m])", "#----------------------------------------------------------# # 对回归参数进行reshape #----------------------------------------------------------# roi_cls_loc = roi_cls_locs[i] * self.std #----------------------------------------------------------#", "1) * torch.ones((len(keep), 1)) #-----------------------------------------# # 将label、置信度、框的位置进行堆叠。 #-----------------------------------------# c_pred =", "2::4] = ctr_x + 0.5 * w dst_bbox[:, 3::4] =", "(c - 1) * torch.ones((len(keep), 1)).cuda() if confs.is_cuda else (c", "rois.view((bs, -1, 4)) #----------------------------------------------------------------------------------------------------------------# # 对每一张图片进行处理,由于在predict.py的时候,我们只输入一张图片,所以for i in range(len(mbox_loc))只进行一次 #----------------------------------------------------------------------------------------------------------------#", "利用classifier网络的预测结果对建议框进行调整获得预测框 # num_rois, 4 -> num_rois, 1, 4 -> num_rois,", "= 0.5): results = [] bs = len(roi_cls_locs) #--------------------------------# #", "3::4] ctr_x = dx * src_width + src_ctr_x ctr_y =", "def __init__(self, std, num_classes): self.std = std self.num_classes = num_classes" ]
[]
[ "def test_add_path(self): with self.assertWarns(DeprecationWarning): self.pc.AddPath(PATH_CLIP_1, poly_type=pyclipper.PT_CLIP) def test_add_paths(self): with self.assertWarns(DeprecationWarning):", "is not len(path_2): return False for i in range(len(path_1)): self.assertEqual(path_1[i][0],", "0]]] pc.AddPaths(path, pyclipper.PT_SUBJECT, True) result = pc.Execute(pyclipper.PT_CLIP, pyclipper.PFT_EVENODD, pyclipper.PFT_EVENODD) assert", "[Point2D(v) for v in [(0,0), (0,1)]] assert type(path[0].x) == Zero", "1) def test_execute2(self): pc = pyclipper.PyclipperOffset() self.add_path(pc, PATH_CLIP_1) solution =", "len(PATH_CLIP_1)) class TestFilterPyPolyNode(TestCase): def setUp(self): tree = pyclipper.PyPolyNode() tree.Contour.append(PATH_CLIP_1) tree.IsOpen", "pyclipper.PyPolyNode() child.IsOpen = False child.Parent = tree child.Contour = PATH_SUBJ_1", "return c return [[convert_coordinate(c) for c in v] for v", "130) self.assertEqual(bounds.bottom, 210) def test_execute(self): solution = self.pc.Execute(*self.default_args) self.assertEqual(len(solution), 2)", "'MinkowskiDiff', 'PolyTreeToPaths', 'ClosedPathsFromPolyTree', 'OpenPathsFromPolyTree', 'ReversePath', 'ReversePaths'): self.assertTrue(hasattr(pyclipper, method)) class TestNamespaceMethods(TestCase):", "in j) class TestNonStandardNumbers(TestCase): def test_sympyzero(self): try: from sympy import", "in Python < 3. assert res == float(value) / self.scale", "def test_minkowski_sum2(self): solution = pyclipper.MinkowskiSum2(PATTERN, [PATH_SIGMA], False) self.assertGreater(len(solution), 0) def", "that means that node.Contour is a list of paths, should", "in paths_1)) def _modify_vertices(path, addend=0.0, multiplier=1.0, converter=None): path = path[:]", "paths_2, factor=None): if len(paths_1) != len(paths_2): return False paths_1 =", "self.assertEqual(pyclipper.PointInPolygon((200, 180), PATH_SUBJ_1), 1) # outside of polygon self.assertEqual(pyclipper.PointInPolygon((500, 500),", "'CleanPolygon', 'CleanPolygons', 'MinkowskiSum', 'MinkowskiSum2', 'MinkowskiDiff', 'PolyTreeToPaths', 'ClosedPathsFromPolyTree', 'OpenPathsFromPolyTree', 'ReversePath', 'ReversePaths'):", "for k in j) def test_paths_scale_from(self): res = pyclipper.scale_from_clipper(self.paths) assert", "'CleanPolygons', 'MinkowskiSum', 'MinkowskiSum2', 'MinkowskiDiff', 'PolyTreeToPaths', 'ClosedPathsFromPolyTree', 'OpenPathsFromPolyTree', 'ReversePath', 'ReversePaths'): self.assertTrue(hasattr(pyclipper,", "0) self.assertEqual(len(pyclipper.ClosedPathsFromPolyTree(solution)), 1) def test_clear(self): pc = pyclipper.PyclipperOffset() self.add_path(pc, PATH_CLIP_1)", "j) def test_paths_scale_from(self): res = pyclipper.scale_from_clipper(self.paths) assert len(res) == len(self.paths)", "self.scale) assert isinstance(res, integer_types) assert res == int(value * self.scale)", "/ self.scale def test_path_scale_to(self): res = pyclipper.scale_to_clipper(self.path) assert len(res) ==", "paths))) class TestPyclipperAddPaths(TestCase): def setUp(self): pyclipper.SCALING_FACTOR = 1 self.pc =", "res for j in i for k in j) class", "[(0,0), (0,1)]] assert type(path[0].x) == Zero path = pyclipper.scale_to_clipper(path) assert", "0 because orientation is False area_neg = pyclipper.Area(PATH_SUBJ_1) area_pos =", "pyclipper.Pyclipper() # Some large triangle. path = [[[0, 1], [0,", "# Example polygons from http://www.angusj.com/delphi/clipper.php PATH_SUBJ_1 = [[180, 200], [260,", "poly_type=pyclipper.PT_CLIP) def test_add_paths(self): # should not raise an exception self.pc.AddPaths([PATH_SUBJ_1,", "values: setattr(pc, prop_name, val) self.assertEqual(getattr(pc, prop_name), val) def test_pyclipper_properties(self): pc", "result = pc.Execute(pyclipper.PT_CLIP, pyclipper.PFT_EVENODD, pyclipper.PFT_EVENODD) assert result == path def", "self.assertEqual(len(paths), expected_nr) self.assertTrue(all((len(path) > 0 for path in paths))) class", "test_reverse_path(self): solution = pyclipper.ReversePath(PATH_SUBJ_1) manualy_reversed = PATH_SUBJ_1[::-1] self.check_reversed_path(solution, manualy_reversed) def", "4) def test_closed_paths_from_polytree(self): paths = pyclipper.ClosedPathsFromPolyTree(self.tree) self.check_paths(paths, 2) def test_open_paths_from_polytree(self):", "= 2. self.pc = pyclipper.Pyclipper() def test_orientation(self): with self.assertWarns(DeprecationWarning): pyclipper.Orientation(PATH_SUBJ_1)", "i in res for j in i) assert all(isinstance(k, integer_types)", "not available\") path = [(0,0), (0,1)] path = [Point2D(v) for", "Zero path = pyclipper.scale_to_clipper(path) assert path == [[0, 0], [0,", "PATH_SUBJ_1 = [[180, 200], [260, 200], [260, 150], [180, 150]]", "def test_area(self): # area less than 0 because orientation is", "* 3 def test_value_scale_to(self): value = 0.5 res = pyclipper.scale_to_clipper(value,", "= [PATH_SUBJ_1[::-1]] self.check_reversed_path(solution[0], manualy_reversed[0]) def check_reversed_path(self, path_1, path_2): if len(path_1)", "exception self.pc.AddPaths([PATH_SUBJ_1, PATH_SUBJ_2], poly_type=pyclipper.PT_SUBJECT) def test_add_path_invalid_path(self): self.assertRaises(pyclipper.ClipperException, self.pc.AddPath, INVALID_PATH, pyclipper.PT_CLIP,", "import TestCase, main import sys if sys.version_info < (3,): integer_types", "factor pc = pyclipper.PyclipperOffset() for prop_name in ('MiterLimit', 'ArcTolerance'): self.check_property_assignment(pc,", "paths = pyclipper.ClosedPathsFromPolyTree(self.tree) self.check_paths(paths, 2) def test_open_paths_from_polytree(self): paths = pyclipper.OpenPathsFromPolyTree(self.tree)", "= tree def test_polytree_to_paths(self): paths = pyclipper.PolyTreeToPaths(self.tree) self.check_paths(paths, 4) def", "pyclipper.PolyTreeToPaths(self.tree) self.check_paths(paths, 4) def test_closed_paths_from_polytree(self): paths = pyclipper.ClosedPathsFromPolyTree(self.tree) self.check_paths(paths, 2)", "list of paths, should be path if node.Contour: self.assertFalse(hasattr(node.Contour[0][0], '__iter__'))", "case # that means that node.Contour is a list of", "def test_orientation(self): with self.assertWarns(DeprecationWarning): pyclipper.Orientation(PATH_SUBJ_1) def test_area(self): with self.assertWarns(DeprecationWarning): pyclipper.Area(PATH_SUBJ_1)", "c in v] for v in path] def run_tests(): main()", "'SimplifyPolygons', 'CleanPolygon', 'CleanPolygons', 'MinkowskiSum', 'MinkowskiSum2', 'MinkowskiDiff', 'PolyTreeToPaths', 'ClosedPathsFromPolyTree', 'OpenPathsFromPolyTree', 'ReversePath',", "= pyclipper.scale_from_clipper(self.path) assert len(res) == len(self.path) assert all(isinstance(i, list) for", "all(((p_1 in paths_2) for p_1 in paths_1)) def _modify_vertices(path, addend=0.0,", "the operation. \"\"\" pc = pyclipper.Pyclipper() # Some large triangle.", "180), PATH_SUBJ_1), 1) # outside of polygon self.assertEqual(pyclipper.PointInPolygon((500, 500), PATH_SUBJ_1),", "test_add_path(self): with self.assertWarns(DeprecationWarning): self.pc.AddPath(PATH_CLIP_1, poly_type=pyclipper.PT_CLIP) def test_add_paths(self): with self.assertWarns(DeprecationWarning): self.pc.AddPaths([PATH_SUBJ_1,", "manualy_reversed) def test_reverse_paths(self): solution = pyclipper.ReversePaths([PATH_SUBJ_1]) manualy_reversed = [PATH_SUBJ_1[::-1]] self.check_reversed_path(solution[0],", "less than 0 because orientation is False area_neg = pyclipper.Area(PATH_SUBJ_1)", "self.assertEqual(bounds.right, 260) self.assertEqual(bounds.top, 130) self.assertEqual(bounds.bottom, 210) def test_execute(self): solution =", "self.check_pypolynode(child) class TestPyclipperOffset(TestCase): def setUp(self): pyclipper.SCALING_FACTOR = 1 @staticmethod def", "2) def test_execute2(self): solution = self.pc.Execute2(*self.default_args) self.assertIsInstance(solution, pyclipper.PyPolyNode) self.check_pypolynode(solution) def", "15, 0]]] pc.AddPaths(path, pyclipper.PT_SUBJECT, True) result = pc.Execute(pyclipper.PT_CLIP, pyclipper.PFT_EVENODD, pyclipper.PFT_EVENODD)", "\"\"\" Test whether coordinates passed into the library are returned", "self.assertEqual(pyclipper.PointInPolygon((180, 200), PATH_SUBJ_1), -1) def test_minkowski_sum(self): with self.assertWarns(DeprecationWarning): pyclipper.MinkowskiSum(PATTERN, PATH_SIGMA,", "area_pos = pyclipper.Area(PATH_SUBJ_1[::-1]) self.assertLess(area_neg, 0) self.assertGreater(area_pos, 0) self.assertEqual(abs(area_neg), area_pos) def", "pyclipper.PT_SUBJECT) def test_get_bounds(self): bounds = self.pc.GetBounds() self.assertIsInstance(bounds, pyclipper.PyIntRect) self.assertEqual(bounds.left, 180)", "= pyclipper.ClosedPathsFromPolyTree(self.tree) self.check_paths(paths, 2) def test_open_paths_from_polytree(self): paths = pyclipper.OpenPathsFromPolyTree(self.tree) self.check_paths(paths,", "converter=None): path = path[:] def convert_coordinate(c): if multiplier is not", "> 2) # check vertex coordinate, should not be an", "child2.IsOpen = False child2.Parent = child child2.Contour = [] child.Childs.append(child2)", "pyclipper.OpenPathsFromPolyTree(self.tree) self.check_paths(paths, 2) def check_paths(self, paths, expected_nr): self.assertEqual(len(paths), expected_nr) self.assertTrue(all((len(path)", "False) def test_minkowski_diff(self): with self.assertWarns(DeprecationWarning): pyclipper.MinkowskiDiff(PATH_SUBJ_1, PATH_SUBJ_2) def test_add_path(self): with", "assert all(isinstance(j, list) for i in res for j in", "for k in j) class TestNonStandardNumbers(TestCase): def test_sympyzero(self): try: from", "[260, 200], [260, 150], [180, 150]] # square, orientation is", "self.assertFalse(hasattr(node.Contour[0][0], '__iter__')) for child in node.Childs: self.check_pypolynode(child) class TestPyclipperOffset(TestCase): def", "path def check_pypolynode(self, node): self.assertTrue(len(node.Contour) == 0 or len(node.Contour) >", "TestScalingFunctions(TestCase): scale = 2 ** 31 path = [(0, 0),", "= self.pc.Execute2(*self.default_args) self.assertIsInstance(solution, pyclipper.PyPolyNode) self.check_pypolynode(solution) def test_execute_empty(self): pc = pyclipper.Pyclipper()", "= pyclipper.Pyclipper() # Some large triangle. path = [[[0, 1],", "res = pyclipper.scale_from_clipper(self.path) assert len(res) == len(self.path) assert all(isinstance(i, list)", "= [] child.Childs.append(child2) self.tree = tree def test_polytree_to_paths(self): paths =", "pyclipper.PyclipperOffset() self.add_path(pc, PATH_CLIP_1) pc.Clear() solution = pc.Execute(2.0) self.assertIsInstance(solution, list) self.assertEqual(len(solution),", "= [[1, 1], ] # less than 2 vertices class", "in i) def test_paths_scale_to(self): res = pyclipper.scale_to_clipper(self.paths) assert len(res) ==", "setUp(self): pyclipper.SCALING_FACTOR = 1 @staticmethod def add_path(pc, path): pc.AddPath(path, pyclipper.JT_ROUND,", "('ReverseSolution', 'PreserveCollinear', 'StrictlySimple'): self.check_property_assignment(pc, prop_name, [True, False]) def test_pyclipperoffset_properties(self): for", "self.pc.Execute(*self.default_args) def test_exact_results(self): \"\"\" Test whether coordinates passed into the", "pyclipper.scale_from_clipper(self.paths) assert len(res) == len(self.paths) assert all(isinstance(i, list) for i", "pyclipper.SCALING_FACTOR = 2. self.pc = pyclipper.Pyclipper() def test_orientation(self): with self.assertWarns(DeprecationWarning):", "def test_sympyzero(self): try: from sympy import Point2D from sympy.core.numbers import", "= [[190, 210], [240, 210], [240, 130], [190, 130]] #", "-123]) class TestPyclipperExecute(TestCase): def setUp(self): pyclipper.SCALING_FACTOR = 1 self.pc =", "0 or len(node.Contour) > 2) # check vertex coordinate, should", "PATH_SUBJ_1[::-1] self.check_reversed_path(solution, manualy_reversed) def test_reverse_paths(self): solution = pyclipper.ReversePaths([PATH_SUBJ_1]) manualy_reversed =", "else None) for p in paths_2] return all(((p_1 in paths_2)", "subj_paths, addend=None, multiplier=None): pc.AddPath(_modify_vertices(clip_path, addend=addend, multiplier=multiplier), pyclipper.PT_CLIP) for subj_path in", "pyclipper.PyIntRect) self.assertEqual(bounds.left, 180) self.assertEqual(bounds.right, 260) self.assertEqual(bounds.top, 130) self.assertEqual(bounds.bottom, 210) def", "< (3,): integer_types = (int, long) else: integer_types = (int,)", "pc, prop_name, values): for val in values: setattr(pc, prop_name, val)", "PATH_CLIP_1) pc.Clear() solution = pc.Execute(2.0) self.assertIsInstance(solution, list) self.assertEqual(len(solution), 0) class", "i for k in j) def test_paths_scale_from(self): res = pyclipper.scale_from_clipper(self.paths)", "PATH_CLIP_1) solution = pc.Execute2(2.0) self.assertIsInstance(solution, pyclipper.PyPolyNode) self.assertEqual(len(pyclipper.OpenPathsFromPolyTree(solution)), 0) self.assertEqual(len(pyclipper.ClosedPathsFromPolyTree(solution)), 1)", "Tests for Pyclipper wrapper library. \"\"\" from __future__ import print_function", "all(isinstance(j, list) for i in res for j in i)", "400], [200, 300], [100, 200], [300, 200]] # greek letter", "pyclipper.SCALING_FACTOR = 1 self.pc = pyclipper.Pyclipper() self.add_default_paths(self.pc) self.default_args = [pyclipper.CT_INTERSECTION,", "TestCase, main import sys if sys.version_info < (3,): integer_types =", "[path] * 3 def test_value_scale_to(self): value = 0.5 res =", "i in res for j in i) def test_path_scale_from(self): res", "test_sympyzero(self): try: from sympy import Point2D from sympy.core.numbers import Zero", "[240, 210], [240, 130], [190, 130]] # square PATH_SIGMA =", "] # less than 2 vertices class TestPyclipperModule(TestCase): def test_has_classes(self):", "def setUp(self): pyclipper.SCALING_FACTOR = 1 def test_orientation(self): self.assertFalse(pyclipper.Orientation(PATH_SUBJ_1)) self.assertTrue(pyclipper.Orientation(PATH_SUBJ_1[::-1])) def", "len(self.path) assert all(isinstance(i, list) for i in res) assert all(isinstance(j,", "import Point2D from sympy.core.numbers import Zero except ImportError: self.skipTest(\"Skipping, sympy", "path = path[:] def convert_coordinate(c): if multiplier is not None:", "c += addend if converter: c = converter(c) return c", "self.pc.AddPaths([PATH_CLIP_1, INVALID_PATH], pyclipper.PT_CLIP) except pyclipper.ClipperException: self.fail(\"add_paths raised ClipperException when not", "1 self.pc = pyclipper.Pyclipper() self.add_default_paths(self.pc) self.default_args = [pyclipper.CT_INTERSECTION, pyclipper.PFT_EVENODD, pyclipper.PFT_EVENODD]", "test_clean_polygon(self): solution = pyclipper.CleanPolygon(PATH_CLIP_1) self.assertEqual(len(solution), len(PATH_CLIP_1)) def test_clean_polygons(self): solution =", "child.IsOpen = True child.Parent = tree child.Contour = PATH_SUBJ_2 tree.Childs.append(child)", "True child.Parent = tree child.Contour = PATH_SUBJ_2 tree.Childs.append(child) child2 =", "pyclipper.MinkowskiSum2(PATTERN, [PATH_SIGMA], False) self.assertGreater(len(solution), 0) def test_minkowski_diff(self): solution = pyclipper.MinkowskiDiff(PATH_SUBJ_1,", "[190, 130]] # square PATH_SIGMA = [[300, 400], [100, 400],", "[2.912, 132.12, 12, -123]) class TestPyclipperExecute(TestCase): def setUp(self): pyclipper.SCALING_FACTOR =", "solution = pc.Execute(2.0) self.assertIsInstance(solution, list) self.assertEqual(len(solution), 1) def test_execute2(self): pc", "def test_minkowski_sum2(self): with self.assertWarns(DeprecationWarning): pyclipper.MinkowskiSum2(PATTERN, [PATH_SIGMA], False) def test_minkowski_diff(self): with", "with self.assertWarns(DeprecationWarning): self.pc.AddPaths([PATH_SUBJ_1, PATH_SUBJ_2], poly_type=pyclipper.PT_SUBJECT) class TestScalingFunctions(TestCase): scale = 2", "with self.assertWarns(DeprecationWarning): pyclipper.MinkowskiSum2(PATTERN, [PATH_SIGMA], False) def test_minkowski_diff(self): with self.assertWarns(DeprecationWarning): pyclipper.MinkowskiDiff(PATH_SUBJ_1,", "for j in i for k in j) class TestNonStandardNumbers(TestCase):", "def check_pypolynode(self, node): self.assertTrue(len(node.Contour) == 0 or len(node.Contour) > 2)", "TestPyclipperOffset(TestCase): def setUp(self): pyclipper.SCALING_FACTOR = 1 @staticmethod def add_path(pc, path):", "pyclipper.PyPolyNode() child.IsOpen = True child.Parent = tree child.Contour = PATH_SUBJ_2", "TestClassProperties(TestCase): def check_property_assignment(self, pc, prop_name, values): for val in values:", "main import sys if sys.version_info < (3,): integer_types = (int,", "polygons from http://www.angusj.com/delphi/clipper.php PATH_SUBJ_1 = [[180, 200], [260, 200], [260,", "'StrictlySimple'): self.check_property_assignment(pc, prop_name, [True, False]) def test_pyclipperoffset_properties(self): for factor in", "= pyclipper.PyclipperOffset() self.add_path(pc, PATH_CLIP_1) solution = pc.Execute2(2.0) self.assertIsInstance(solution, pyclipper.PyPolyNode) self.assertEqual(len(pyclipper.OpenPathsFromPolyTree(solution)),", "Pyclipper wrapper library. \"\"\" from __future__ import print_function from unittest2", "= pyclipper.Area(PATH_SUBJ_1) area_pos = pyclipper.Area(PATH_SUBJ_1[::-1]) self.assertLess(area_neg, 0) self.assertGreater(area_pos, 0) self.assertEqual(abs(area_neg),", "130], [190, 130]] # square PATH_SIGMA = [[300, 400], [100,", "child child2.Contour = [] child.Childs.append(child2) self.tree = tree def test_polytree_to_paths(self):", "in values: setattr(pc, prop_name, val) self.assertEqual(getattr(pc, prop_name), val) def test_pyclipper_properties(self):", "pc = pyclipper.Pyclipper() for prop_name in ('ReverseSolution', 'PreserveCollinear', 'StrictlySimple'): self.check_property_assignment(pc,", "by the operation. \"\"\" pc = pyclipper.Pyclipper() # Some large", "False for i in range(len(path_1)): self.assertEqual(path_1[i][0], path_2[i][0]) self.assertEqual(path_1[i][1], path_2[i][1]) def", "[180, 150]] # square, orientation is False PATH_SUBJ_2 = [[215,", "add_default_paths(pc): pc.AddPath(PATH_CLIP_1, pyclipper.PT_CLIP) pc.AddPaths([PATH_SUBJ_1, PATH_SUBJ_2], pyclipper.PT_SUBJECT) @staticmethod def add_paths(pc, clip_path,", "not affected by the operation. \"\"\" pc = pyclipper.Pyclipper() #", "in paths))) class TestPyclipperAddPaths(TestCase): def setUp(self): pyclipper.SCALING_FACTOR = 1 self.pc", "self.pc.AddPaths([INVALID_PATH, PATH_CLIP_1], pyclipper.PT_CLIP) self.pc.AddPaths([PATH_CLIP_1, INVALID_PATH], pyclipper.PT_CLIP) except pyclipper.ClipperException: self.fail(\"add_paths raised", "with self.assertRaises(pyclipper.ClipperException): self.pc.Execute(*self.default_args) def test_exact_results(self): \"\"\" Test whether coordinates passed", "child2.Contour = PATTERN child.Childs.append(child2) # empty contour should not #", "*= multiplier if addend is not None: c += addend", "TestScalingFactorWarning(TestCase): def setUp(self): pyclipper.SCALING_FACTOR = 2. self.pc = pyclipper.Pyclipper() def", "node): self.assertTrue(len(node.Contour) == 0 or len(node.Contour) > 2) # check", "library are returned exactly, if they are not affected by", "if len(path_1) is not len(path_2): return False for i in", "pyclipper.MinkowskiDiff(PATH_SUBJ_1, PATH_SUBJ_2) def test_add_path(self): with self.assertWarns(DeprecationWarning): self.pc.AddPath(PATH_CLIP_1, poly_type=pyclipper.PT_CLIP) def test_add_paths(self):", "test_path_scale_to(self): res = pyclipper.scale_to_clipper(self.path) assert len(res) == len(self.path) assert all(isinstance(i,", "def test_open_paths_from_polytree(self): paths = pyclipper.OpenPathsFromPolyTree(self.tree) self.check_paths(paths, 2) def check_paths(self, paths,", "[240, 130], [190, 130]] # square PATH_SIGMA = [[300, 400],", "tree.Childs.append(child) child = pyclipper.PyPolyNode() child.IsOpen = True child.Parent = tree", "affected by the operation. \"\"\" pc = pyclipper.Pyclipper() # Some", "all(isinstance(i, list) for i in res) assert all(isinstance(j, float) for", "'__iter__')) for child in node.Childs: self.check_pypolynode(child) class TestPyclipperOffset(TestCase): def setUp(self):", "list) for i in res for j in i) assert", "pyclipper.CleanPolygon(PATH_CLIP_1) self.assertEqual(len(solution), len(PATH_CLIP_1)) def test_clean_polygons(self): solution = pyclipper.CleanPolygons([PATH_CLIP_1]) self.assertEqual(len(solution), 1)", "result == path def check_pypolynode(self, node): self.assertTrue(len(node.Contour) == 0 or", "def test_minkowski_sum(self): with self.assertWarns(DeprecationWarning): pyclipper.MinkowskiSum(PATTERN, PATH_SIGMA, False) def test_minkowski_sum2(self): with", "\"\"\" pc = pyclipper.Pyclipper() # Some large triangle. path =", "self.assertEqual(len(solution), 1) self.assertEqual(len(solution[0]), len(PATH_CLIP_1)) class TestFilterPyPolyNode(TestCase): def setUp(self): tree =", "def test_clean_polygons(self): solution = pyclipper.CleanPolygons([PATH_CLIP_1]) self.assertEqual(len(solution), 1) self.assertEqual(len(solution[0]), len(PATH_CLIP_1)) class", "val in values: setattr(pc, prop_name, val) self.assertEqual(getattr(pc, prop_name), val) def", "= False child2.Parent = child child2.Contour = PATTERN child.Childs.append(child2) #", "# outside of polygon self.assertEqual(pyclipper.PointInPolygon((500, 500), PATH_SUBJ_1), 0) def test_minkowski_sum(self):", "self.assertTrue(hasattr(pyclipper, 'PyclipperOffset')) def test_has_namespace_methods(self): for method in ('Orientation', 'Area', 'PointInPolygon',", "test_polytree_to_paths(self): paths = pyclipper.PolyTreeToPaths(self.tree) self.check_paths(paths, 4) def test_closed_paths_from_polytree(self): paths =", "pc.AddPath(_modify_vertices(clip_path, addend=addend, multiplier=multiplier), pyclipper.PT_CLIP) for subj_path in subj_paths: pc.AddPath(_modify_vertices(subj_path, addend=addend,", "test_value_scale_to(self): value = 0.5 res = pyclipper.scale_to_clipper(value, self.scale) assert isinstance(res,", "included in filtered results child2 = pyclipper.PyPolyNode() child2.IsOpen = False", "were invalid\") class TestClassProperties(TestCase): def check_property_assignment(self, pc, prop_name, values): for", "# area less than 0 because orientation is False area_neg", "1) self.assertEqual(len(solution[0]), len(PATH_CLIP_1)) class TestFilterPyPolyNode(TestCase): def setUp(self): tree = pyclipper.PyPolyNode()", "# Some large triangle. path = [[[0, 1], [0, 0],", "1) self.assertEqual(len(solution), len(solution_single)) _do_solutions_match(solution, solution_single) def test_clean_polygon(self): solution = pyclipper.CleanPolygon(PATH_CLIP_1)", "0) class TestScalingFactorWarning(TestCase): def setUp(self): pyclipper.SCALING_FACTOR = 2. self.pc =", "else None) for p in paths_1] paths_2 = [_modify_vertices(p, multiplier=factor,", "less than 2 vertices class TestPyclipperModule(TestCase): def test_has_classes(self): self.assertTrue(hasattr(pyclipper, 'Pyclipper'))", "[[convert_coordinate(c) for c in v] for v in path] def", "def check_property_assignment(self, pc, prop_name, values): for val in values: setattr(pc,", "TestPyclipperModule(TestCase): def test_has_classes(self): self.assertTrue(hasattr(pyclipper, 'Pyclipper')) self.assertTrue(hasattr(pyclipper, 'PyclipperOffset')) def test_has_namespace_methods(self): for", "because orientation is False area_neg = pyclipper.Area(PATH_SUBJ_1) area_pos = pyclipper.Area(PATH_SUBJ_1[::-1])", "sympy.core.numbers import Zero except ImportError: self.skipTest(\"Skipping, sympy not available\") path", "when not all paths were invalid\") class TestClassProperties(TestCase): def check_property_assignment(self,", "i) def test_path_scale_from(self): res = pyclipper.scale_from_clipper(self.path) assert len(res) == len(self.path)", "test_clear(self): self.pc.Clear() with self.assertRaises(pyclipper.ClipperException): self.pc.Execute(*self.default_args) def test_exact_results(self): \"\"\" Test whether", "def test_execute2(self): pc = pyclipper.PyclipperOffset() self.add_path(pc, PATH_CLIP_1) solution = pc.Execute2(2.0)", "# be included in filtered results child2 = pyclipper.PyPolyNode() child2.IsOpen", "self.fail(\"add_paths raised ClipperException when not all paths were invalid\") class", "self.assertEqual(len(solution), 1) self.assertEqual(len(solution), len(solution_single)) _do_solutions_match(solution, solution_single) def test_clean_polygon(self): solution =", "def test_orientation(self): self.assertFalse(pyclipper.Orientation(PATH_SUBJ_1)) self.assertTrue(pyclipper.Orientation(PATH_SUBJ_1[::-1])) def test_area(self): # area less than", "for i in res for j in i) def test_path_scale_from(self):", "solution = pyclipper.SimplifyPolygon(PATH_SUBJ_1) self.assertEqual(len(solution), 1) def test_simplify_polygons(self): solution = pyclipper.SimplifyPolygons([PATH_SUBJ_1])", "prop_name, [True, False]) def test_pyclipperoffset_properties(self): for factor in range(6): pyclipper.SCALING_FACTOR", "if multiplier is not None: c *= multiplier if addend", "None) for p in paths_2] return all(((p_1 in paths_2) for", "paths, expected_nr): self.assertEqual(len(paths), expected_nr) self.assertTrue(all((len(path) > 0 for path in", "should not # be included in filtered results child2 =", "pc.Execute(pyclipper.CT_UNION, pyclipper.PFT_NONZERO, pyclipper.PFT_NONZERO) def test_clear(self): self.pc.Clear() with self.assertRaises(pyclipper.ClipperException): self.pc.Execute(*self.default_args) def", "'Area', 'PointInPolygon', 'SimplifyPolygon', 'SimplifyPolygons', 'CleanPolygon', 'CleanPolygons', 'MinkowskiSum', 'MinkowskiSum2', 'MinkowskiDiff', 'PolyTreeToPaths',", "def add_paths(pc, clip_path, subj_paths, addend=None, multiplier=None): pc.AddPath(_modify_vertices(clip_path, addend=addend, multiplier=multiplier), pyclipper.PT_CLIP)", "child2 = pyclipper.PyPolyNode() child2.IsOpen = False child2.Parent = child child2.Contour", "if node.Contour: self.assertFalse(hasattr(node.Contour[0][0], '__iter__')) for child in node.Childs: self.check_pypolynode(child) class", "sigma PATTERN = [[4, -6], [6, -6], [-4, 6], [-6,", "pyclipper.Pyclipper() def test_orientation(self): with self.assertWarns(DeprecationWarning): pyclipper.Orientation(PATH_SUBJ_1) def test_area(self): with self.assertWarns(DeprecationWarning):", "self.assertWarns(DeprecationWarning): pyclipper.MinkowskiSum(PATTERN, PATH_SIGMA, False) def test_minkowski_sum2(self): with self.assertWarns(DeprecationWarning): pyclipper.MinkowskiSum2(PATTERN, [PATH_SIGMA],", "of paths, should be path if node.Contour: self.assertFalse(hasattr(node.Contour[0][0], '__iter__')) for", "[6, -6], [-4, 6], [-6, 6]] INVALID_PATH = [[1, 1],", "210) def test_execute(self): solution = self.pc.Execute(*self.default_args) self.assertEqual(len(solution), 2) def test_execute2(self):", "path[:] def convert_coordinate(c): if multiplier is not None: c *=", "in range(6): pyclipper.SCALING_FACTOR = 10 ** factor pc = pyclipper.PyclipperOffset()", "pc.Execute(2.0) self.assertIsInstance(solution, list) self.assertEqual(len(solution), 1) def test_execute2(self): pc = pyclipper.PyclipperOffset()", "class TestPyclipperModule(TestCase): def test_has_classes(self): self.assertTrue(hasattr(pyclipper, 'Pyclipper')) self.assertTrue(hasattr(pyclipper, 'PyclipperOffset')) def test_has_namespace_methods(self):", "child2.Contour = [] child.Childs.append(child2) self.tree = tree def test_polytree_to_paths(self): paths", "should not raise an exception self.pc.AddPath(PATH_CLIP_1, poly_type=pyclipper.PT_CLIP) def test_add_paths(self): #", "pyclipper.PFT_EVENODD) assert result == path def check_pypolynode(self, node): self.assertTrue(len(node.Contour) ==", "def test_clear(self): pc = pyclipper.PyclipperOffset() self.add_path(pc, PATH_CLIP_1) pc.Clear() solution =", "j in i) assert all(isinstance(k, integer_types) for i in res", "i) assert all(isinstance(k, integer_types) for i in res for j", "pyclipper.scale_to_clipper(path) assert path == [[0, 0], [0, 2147483648]] def _do_solutions_match(paths_1,", "whether coordinates passed into the library are returned exactly, if", "paths_2] return all(((p_1 in paths_2) for p_1 in paths_1)) def", "in i) assert all(isinstance(k, integer_types) for i in res for", "solution = self.pc.Execute2(*self.default_args) self.assertIsInstance(solution, pyclipper.PyPolyNode) self.check_pypolynode(solution) def test_execute_empty(self): pc =", "self.assertEqual(pyclipper.PointInPolygon((500, 500), PATH_SUBJ_1), 0) def test_minkowski_sum(self): solution = pyclipper.MinkowskiSum(PATTERN, PATH_SIGMA,", "test_open_paths_from_polytree(self): paths = pyclipper.OpenPathsFromPolyTree(self.tree) self.check_paths(paths, 2) def check_paths(self, paths, expected_nr):", "Python < 3. assert res == float(value) / self.scale def", "in res) assert all(isinstance(j, float) for i in res for", "for i in res) assert all(isinstance(j, float) for i in", "returned exactly, if they are not affected by the operation.", "== len(self.paths) assert all(isinstance(i, list) for i in res) assert", "self.scale) def test_value_scale_from(self): value = 1000000000000 res = pyclipper.scale_from_clipper(value, self.scale)", "for path in paths))) class TestPyclipperAddPaths(TestCase): def setUp(self): pyclipper.SCALING_FACTOR =", "test_exact_results(self): \"\"\" Test whether coordinates passed into the library are", "path = pyclipper.scale_to_clipper(path) assert path == [[0, 0], [0, 2147483648]]", "def test_reverse_paths(self): solution = pyclipper.ReversePaths([PATH_SUBJ_1]) manualy_reversed = [PATH_SUBJ_1[::-1]] self.check_reversed_path(solution[0], manualy_reversed[0])", "range(6): pyclipper.SCALING_FACTOR = 10 ** factor pc = pyclipper.PyclipperOffset() for", "PATH_SIGMA = [[300, 400], [100, 400], [200, 300], [100, 200],", "= (int,) import pyclipper # Example polygons from http://www.angusj.com/delphi/clipper.php PATH_SUBJ_1", "float) # Convert to float to get \"normal\" division in", "self.check_property_assignment(pc, prop_name, [2.912, 132.12, 12, -123]) class TestPyclipperExecute(TestCase): def setUp(self):", "Zero except ImportError: self.skipTest(\"Skipping, sympy not available\") path = [(0,0),", "= pc.Execute(2.0) self.assertIsInstance(solution, list) self.assertEqual(len(solution), 0) class TestScalingFactorWarning(TestCase): def setUp(self):", "130]] # square PATH_SIGMA = [[300, 400], [100, 400], [200,", "child2.Parent = child child2.Contour = [] child.Childs.append(child2) self.tree = tree", "== path def check_pypolynode(self, node): self.assertTrue(len(node.Contour) == 0 or len(node.Contour)", "is a list of paths, should be path if node.Contour:", "self.assertRaises(pyclipper.ClipperException): self.pc.Execute(*self.default_args) def test_exact_results(self): \"\"\" Test whether coordinates passed into", "in paths_2) for p_1 in paths_1)) def _modify_vertices(path, addend=0.0, multiplier=1.0,", "solution = pyclipper.ReversePath(PATH_SUBJ_1) manualy_reversed = PATH_SUBJ_1[::-1] self.check_reversed_path(solution, manualy_reversed) def test_reverse_paths(self):", "test_reverse_paths(self): solution = pyclipper.ReversePaths([PATH_SUBJ_1]) manualy_reversed = [PATH_SUBJ_1[::-1]] self.check_reversed_path(solution[0], manualy_reversed[0]) def", "class TestClassProperties(TestCase): def check_property_assignment(self, pc, prop_name, values): for val in", "path): pc.AddPath(path, pyclipper.JT_ROUND, pyclipper.ET_CLOSEDPOLYGON) def test_execute(self): pc = pyclipper.PyclipperOffset() self.add_path(pc,", "test_execute(self): pc = pyclipper.PyclipperOffset() self.add_path(pc, PATH_CLIP_1) solution = pc.Execute(2.0) self.assertIsInstance(solution,", "6]] INVALID_PATH = [[1, 1], ] # less than 2", "pyclipper.SCALING_FACTOR = 10 ** factor pc = pyclipper.PyclipperOffset() for prop_name", "test_area(self): with self.assertWarns(DeprecationWarning): pyclipper.Area(PATH_SUBJ_1) def test_point_in_polygon(self): with self.assertWarns(DeprecationWarning): self.assertEqual(pyclipper.PointInPolygon((180, 200),", "v] for v in path] def run_tests(): main() if __name__", "pyclipper.PyPolyNode() child2.IsOpen = False child2.Parent = child child2.Contour = PATTERN", "pc = pyclipper.Pyclipper() with self.assertRaises(pyclipper.ClipperException): pc.Execute(pyclipper.CT_UNION, pyclipper.PFT_NONZERO, pyclipper.PFT_NONZERO) def test_clear(self):", "pyclipper.ClipperException: self.fail(\"add_paths raised ClipperException when not all paths were invalid\")", "= 0.5 res = pyclipper.scale_to_clipper(value, self.scale) assert isinstance(res, integer_types) assert", "setUp(self): tree = pyclipper.PyPolyNode() tree.Contour.append(PATH_CLIP_1) tree.IsOpen = True child =", "True child = pyclipper.PyPolyNode() child.IsOpen = False child.Parent = tree", "False) def test_minkowski_sum2(self): with self.assertWarns(DeprecationWarning): pyclipper.MinkowskiSum2(PATTERN, [PATH_SIGMA], False) def test_minkowski_diff(self):", "get \"normal\" division in Python < 3. assert res ==", "self.check_pypolynode(solution) def test_execute_empty(self): pc = pyclipper.Pyclipper() with self.assertRaises(pyclipper.ClipperException): pc.Execute(pyclipper.CT_UNION, pyclipper.PFT_NONZERO,", "class TestFilterPyPolyNode(TestCase): def setUp(self): tree = pyclipper.PyPolyNode() tree.Contour.append(PATH_CLIP_1) tree.IsOpen =", "pyclipper.PyPolyNode() tree.Contour.append(PATH_CLIP_1) tree.IsOpen = True child = pyclipper.PyPolyNode() child.IsOpen =", "PATH_SUBJ_1), -1) # in polygon self.assertEqual(pyclipper.PointInPolygon((200, 180), PATH_SUBJ_1), 1) #", "child.Contour = PATH_SUBJ_2 tree.Childs.append(child) child2 = pyclipper.PyPolyNode() child2.IsOpen = False", "integer_types) for i in res for j in i for", "False paths_1 = [_modify_vertices(p, multiplier=factor, converter=round if factor else None)", "self.assertEqual(len(solution), 1) def test_execute2(self): pc = pyclipper.PyclipperOffset() self.add_path(pc, PATH_CLIP_1) solution", "PATH_SUBJ_2], poly_type=pyclipper.PT_SUBJECT) class TestScalingFunctions(TestCase): scale = 2 ** 31 path", "list) for i in res) assert all(isinstance(j, list) for i", "pyclipper.MinkowskiSum(PATTERN, PATH_SIGMA, False) self.assertGreater(len(solution), 0) def test_minkowski_sum2(self): solution = pyclipper.MinkowskiSum2(PATTERN,", "pc.Execute(pyclipper.PT_CLIP, pyclipper.PFT_EVENODD, pyclipper.PFT_EVENODD) assert result == path def check_pypolynode(self, node):", "= pc.Execute(pyclipper.PT_CLIP, pyclipper.PFT_EVENODD, pyclipper.PFT_EVENODD) assert result == path def check_pypolynode(self,", "assert all(isinstance(i, list) for i in res) assert all(isinstance(j, float)", "12, -123]) class TestPyclipperExecute(TestCase): def setUp(self): pyclipper.SCALING_FACTOR = 1 self.pc", "subj_paths: pc.AddPath(_modify_vertices(subj_path, addend=addend, multiplier=multiplier), pyclipper.PT_SUBJECT) def test_get_bounds(self): bounds = self.pc.GetBounds()", "pc.Clear() solution = pc.Execute(2.0) self.assertIsInstance(solution, list) self.assertEqual(len(solution), 0) class TestScalingFactorWarning(TestCase):", "raise an exception self.pc.AddPath(PATH_CLIP_1, poly_type=pyclipper.PT_CLIP) def test_add_paths(self): # should not", "addend is not None: c += addend if converter: c", "polygon self.assertEqual(pyclipper.PointInPolygon((200, 180), PATH_SUBJ_1), 1) # outside of polygon self.assertEqual(pyclipper.PointInPolygon((500,", "def test_minkowski_diff(self): solution = pyclipper.MinkowskiDiff(PATH_SUBJ_1, PATH_SUBJ_2) self.assertGreater(len(solution), 0) def test_reverse_path(self):", "pyclipper.Pyclipper() for prop_name in ('ReverseSolution', 'PreserveCollinear', 'StrictlySimple'): self.check_property_assignment(pc, prop_name, [True,", "pyclipper.ReversePath(PATH_SUBJ_1) manualy_reversed = PATH_SUBJ_1[::-1] self.check_reversed_path(solution, manualy_reversed) def test_reverse_paths(self): solution =", "assert all(isinstance(j, float) for i in res for j in", "test_has_namespace_methods(self): for method in ('Orientation', 'Area', 'PointInPolygon', 'SimplifyPolygon', 'SimplifyPolygons', 'CleanPolygon',", "res for j in i) assert all(isinstance(k, integer_types) for i", "for c in v] for v in path] def run_tests():", "test_minkowski_sum(self): solution = pyclipper.MinkowskiSum(PATTERN, PATH_SIGMA, False) self.assertGreater(len(solution), 0) def test_minkowski_sum2(self):", "2) def check_paths(self, paths, expected_nr): self.assertEqual(len(paths), expected_nr) self.assertTrue(all((len(path) > 0", "= pyclipper.PyclipperOffset() self.add_path(pc, PATH_CLIP_1) solution = pc.Execute(2.0) self.assertIsInstance(solution, list) self.assertEqual(len(solution),", "for prop_name in ('ReverseSolution', 'PreserveCollinear', 'StrictlySimple'): self.check_property_assignment(pc, prop_name, [True, False])", "paths, should be path if node.Contour: self.assertFalse(hasattr(node.Contour[0][0], '__iter__')) for child", "def test_exact_results(self): \"\"\" Test whether coordinates passed into the library", "value = 0.5 res = pyclipper.scale_to_clipper(value, self.scale) assert isinstance(res, integer_types)", "check_paths(self, paths, expected_nr): self.assertEqual(len(paths), expected_nr) self.assertTrue(all((len(path) > 0 for path", "res = pyclipper.scale_to_clipper(value, self.scale) assert isinstance(res, integer_types) assert res ==", "test_pyclipper_properties(self): pc = pyclipper.Pyclipper() for prop_name in ('ReverseSolution', 'PreserveCollinear', 'StrictlySimple'):", "self.assertWarns(DeprecationWarning): pyclipper.Orientation(PATH_SUBJ_1) def test_area(self): with self.assertWarns(DeprecationWarning): pyclipper.Area(PATH_SUBJ_1) def test_point_in_polygon(self): with", "in node.Childs: self.check_pypolynode(child) class TestPyclipperOffset(TestCase): def setUp(self): pyclipper.SCALING_FACTOR = 1", "self.assertEqual(len(solution), 1) def test_simplify_polygons(self): solution = pyclipper.SimplifyPolygons([PATH_SUBJ_1]) solution_single = pyclipper.SimplifyPolygon(PATH_SUBJ_1)", "[] child.Childs.append(child2) self.tree = tree def test_polytree_to_paths(self): paths = pyclipper.PolyTreeToPaths(self.tree)", "solution = pyclipper.ReversePaths([PATH_SUBJ_1]) manualy_reversed = [PATH_SUBJ_1[::-1]] self.check_reversed_path(solution[0], manualy_reversed[0]) def check_reversed_path(self,", "vertices class TestPyclipperModule(TestCase): def test_has_classes(self): self.assertTrue(hasattr(pyclipper, 'Pyclipper')) self.assertTrue(hasattr(pyclipper, 'PyclipperOffset')) def", "self.assertGreater(area_pos, 0) self.assertEqual(abs(area_neg), area_pos) def test_point_in_polygon(self): # on polygon self.assertEqual(pyclipper.PointInPolygon((180,", "= pyclipper.ReversePaths([PATH_SUBJ_1]) manualy_reversed = [PATH_SUBJ_1[::-1]] self.check_reversed_path(solution[0], manualy_reversed[0]) def check_reversed_path(self, path_1,", "not None: c += addend if converter: c = converter(c)", "[[[0, 1], [0, 0], [15 ** 15, 0]]] pc.AddPaths(path, pyclipper.PT_SUBJECT,", "pyclipper.MinkowskiSum(PATTERN, PATH_SIGMA, False) def test_minkowski_sum2(self): with self.assertWarns(DeprecationWarning): pyclipper.MinkowskiSum2(PATTERN, [PATH_SIGMA], False)", "float) for i in res for j in i for", "convert_coordinate(c): if multiplier is not None: c *= multiplier if", "j in i for k in j) class TestNonStandardNumbers(TestCase): def", "self.assertTrue(hasattr(pyclipper, method)) class TestNamespaceMethods(TestCase): def setUp(self): pyclipper.SCALING_FACTOR = 1 def", "except ImportError: self.skipTest(\"Skipping, sympy not available\") path = [(0,0), (0,1)]", "assert all(isinstance(k, float) for i in res for j in", "test_paths_scale_from(self): res = pyclipper.scale_from_clipper(self.paths) assert len(res) == len(self.paths) assert all(isinstance(i,", "pyclipper.SCALING_FACTOR = 1 def test_orientation(self): self.assertFalse(pyclipper.Orientation(PATH_SUBJ_1)) self.assertTrue(pyclipper.Orientation(PATH_SUBJ_1[::-1])) def test_area(self): #", "multiplier if addend is not None: c += addend if", "-6], [-4, 6], [-6, 6]] INVALID_PATH = [[1, 1], ]", "pyclipper.SimplifyPolygon(PATH_SUBJ_1) self.assertEqual(len(solution), 1) self.assertEqual(len(solution), len(solution_single)) _do_solutions_match(solution, solution_single) def test_clean_polygon(self): solution", "_do_solutions_match(solution, solution_single) def test_clean_polygon(self): solution = pyclipper.CleanPolygon(PATH_CLIP_1) self.assertEqual(len(solution), len(PATH_CLIP_1)) def", "1 def test_orientation(self): self.assertFalse(pyclipper.Orientation(PATH_SUBJ_1)) self.assertTrue(pyclipper.Orientation(PATH_SUBJ_1[::-1])) def test_area(self): # area less", "for i in res) assert all(isinstance(j, integer_types) for i in", "test_minkowski_sum2(self): with self.assertWarns(DeprecationWarning): pyclipper.MinkowskiSum2(PATTERN, [PATH_SIGMA], False) def test_minkowski_diff(self): with self.assertWarns(DeprecationWarning):", "200), PATH_SUBJ_1), -1) def test_minkowski_sum(self): with self.assertWarns(DeprecationWarning): pyclipper.MinkowskiSum(PATTERN, PATH_SIGMA, False)", "def test_value_scale_to(self): value = 0.5 res = pyclipper.scale_to_clipper(value, self.scale) assert", "test_simplify_polygon(self): solution = pyclipper.SimplifyPolygon(PATH_SUBJ_1) self.assertEqual(len(solution), 1) def test_simplify_polygons(self): solution =", "= (int, long) else: integer_types = (int,) import pyclipper #", "400], [100, 400], [200, 300], [100, 200], [300, 200]] #", "that node.Contour is a list of paths, should be path", "sys if sys.version_info < (3,): integer_types = (int, long) else:", "from http://www.angusj.com/delphi/clipper.php PATH_SUBJ_1 = [[180, 200], [260, 200], [260, 150],", "triangle. path = [[[0, 1], [0, 0], [15 ** 15,", "for i in res for j in i) def test_paths_scale_to(self):", "print_function from unittest2 import TestCase, main import sys if sys.version_info", "150]] # square, orientation is False PATH_SUBJ_2 = [[215, 160],", "= [(0, 0), (1, 1)] paths = [path] * 3", "False) self.assertGreater(len(solution), 0) def test_minkowski_sum2(self): solution = pyclipper.MinkowskiSum2(PATTERN, [PATH_SIGMA], False)", "self.check_paths(paths, 2) def test_open_paths_from_polytree(self): paths = pyclipper.OpenPathsFromPolyTree(self.tree) self.check_paths(paths, 2) def", "= pyclipper.Pyclipper() for prop_name in ('ReverseSolution', 'PreserveCollinear', 'StrictlySimple'): self.check_property_assignment(pc, prop_name,", "self.assertFalse(pyclipper.Orientation(PATH_SUBJ_1)) self.assertTrue(pyclipper.Orientation(PATH_SUBJ_1[::-1])) def test_area(self): # area less than 0 because", "self.scale def test_path_scale_to(self): res = pyclipper.scale_to_clipper(self.path) assert len(res) == len(self.path)", "def test_minkowski_sum(self): solution = pyclipper.MinkowskiSum(PATTERN, PATH_SIGMA, False) self.assertGreater(len(solution), 0) def", "all paths were invalid\") class TestClassProperties(TestCase): def check_property_assignment(self, pc, prop_name,", "for prop_name in ('MiterLimit', 'ArcTolerance'): self.check_property_assignment(pc, prop_name, [2.912, 132.12, 12,", "into the library are returned exactly, if they are not", "letter sigma PATTERN = [[4, -6], [6, -6], [-4, 6],", "= pyclipper.scale_to_clipper(self.paths) assert len(res) == len(self.paths) assert all(isinstance(i, list) for", "for method in ('Orientation', 'Area', 'PointInPolygon', 'SimplifyPolygon', 'SimplifyPolygons', 'CleanPolygon', 'CleanPolygons',", "= pyclipper.PyclipperOffset() for prop_name in ('MiterLimit', 'ArcTolerance'): self.check_property_assignment(pc, prop_name, [2.912,", "child.Childs.append(child2) # empty contour should not # be included in", "in v] for v in path] def run_tests(): main() if", "[PATH_SIGMA], False) self.assertGreater(len(solution), 0) def test_minkowski_diff(self): solution = pyclipper.MinkowskiDiff(PATH_SUBJ_1, PATH_SUBJ_2)", "test_execute_empty(self): pc = pyclipper.Pyclipper() with self.assertRaises(pyclipper.ClipperException): pc.Execute(pyclipper.CT_UNION, pyclipper.PFT_NONZERO, pyclipper.PFT_NONZERO) def", "clip_path, subj_paths, addend=None, multiplier=None): pc.AddPath(_modify_vertices(clip_path, addend=addend, multiplier=multiplier), pyclipper.PT_CLIP) for subj_path", "self.assertWarns(DeprecationWarning): self.pc.AddPaths([PATH_SUBJ_1, PATH_SUBJ_2], poly_type=pyclipper.PT_SUBJECT) class TestScalingFunctions(TestCase): scale = 2 **", "assert len(res) == len(self.path) assert all(isinstance(i, list) for i in", "i) assert all(isinstance(k, float) for i in res for j", "sys.version_info < (3,): integer_types = (int, long) else: integer_types =", "http://www.angusj.com/delphi/clipper.php PATH_SUBJ_1 = [[180, 200], [260, 200], [260, 150], [180,", "assert path == [[0, 0], [0, 2147483648]] def _do_solutions_match(paths_1, paths_2,", "pyclipper.scale_to_clipper(self.path) assert len(res) == len(self.path) assert all(isinstance(i, list) for i", "in res for j in i) assert all(isinstance(k, integer_types) for", "len(res) == len(self.paths) assert all(isinstance(i, list) for i in res)", "test_execute(self): solution = self.pc.Execute(*self.default_args) self.assertEqual(len(solution), 2) def test_execute2(self): solution =", "= pyclipper.PyPolyNode() tree.Contour.append(PATH_CLIP_1) tree.IsOpen = True child = pyclipper.PyPolyNode() child.IsOpen", "i in range(len(path_1)): self.assertEqual(path_1[i][0], path_2[i][0]) self.assertEqual(path_1[i][1], path_2[i][1]) def test_simplify_polygon(self): solution", "def convert_coordinate(c): if multiplier is not None: c *= multiplier", "= True child = pyclipper.PyPolyNode() child.IsOpen = False child.Parent =", "not raise an exception self.pc.AddPath(PATH_CLIP_1, poly_type=pyclipper.PT_CLIP) def test_add_paths(self): # should", "not be an iterable (in that case # that means", "None) for p in paths_1] paths_2 = [_modify_vertices(p, multiplier=factor, converter=round", "pyclipper.MinkowskiSum2(PATTERN, [PATH_SIGMA], False) def test_minkowski_diff(self): with self.assertWarns(DeprecationWarning): pyclipper.MinkowskiDiff(PATH_SUBJ_1, PATH_SUBJ_2) def", "def check_reversed_path(self, path_1, path_2): if len(path_1) is not len(path_2): return", "division in Python < 3. assert res == float(value) /", "self.assertRaises(pyclipper.ClipperException): pc.Execute(pyclipper.CT_UNION, pyclipper.PFT_NONZERO, pyclipper.PFT_NONZERO) def test_clear(self): self.pc.Clear() with self.assertRaises(pyclipper.ClipperException): self.pc.Execute(*self.default_args)", "def test_clear(self): self.pc.Clear() with self.assertRaises(pyclipper.ClipperException): self.pc.Execute(*self.default_args) def test_exact_results(self): \"\"\" Test", "== Zero path = pyclipper.scale_to_clipper(path) assert path == [[0, 0],", "pyclipper.MinkowskiDiff(PATH_SUBJ_1, PATH_SUBJ_2) self.assertGreater(len(solution), 0) def test_reverse_path(self): solution = pyclipper.ReversePath(PATH_SUBJ_1) manualy_reversed", "test_orientation(self): with self.assertWarns(DeprecationWarning): pyclipper.Orientation(PATH_SUBJ_1) def test_area(self): with self.assertWarns(DeprecationWarning): pyclipper.Area(PATH_SUBJ_1) def", "('Orientation', 'Area', 'PointInPolygon', 'SimplifyPolygon', 'SimplifyPolygons', 'CleanPolygon', 'CleanPolygons', 'MinkowskiSum', 'MinkowskiSum2', 'MinkowskiDiff',", "sympy not available\") path = [(0,0), (0,1)] path = [Point2D(v)", "test_add_paths(self): # should not raise an exception self.pc.AddPaths([PATH_SUBJ_1, PATH_SUBJ_2], poly_type=pyclipper.PT_SUBJECT)", "multiplier=factor, converter=round if factor else None) for p in paths_1]", "2 ** 31 path = [(0, 0), (1, 1)] paths", "self.pc = pyclipper.Pyclipper() self.add_default_paths(self.pc) self.default_args = [pyclipper.CT_INTERSECTION, pyclipper.PFT_EVENODD, pyclipper.PFT_EVENODD] @staticmethod", "pyclipper.PFT_NONZERO, pyclipper.PFT_NONZERO) def test_clear(self): self.pc.Clear() with self.assertRaises(pyclipper.ClipperException): self.pc.Execute(*self.default_args) def test_exact_results(self):", "self.add_path(pc, PATH_CLIP_1) pc.Clear() solution = pc.Execute(2.0) self.assertIsInstance(solution, list) self.assertEqual(len(solution), 0)", "PATH_SIGMA, False) def test_minkowski_sum2(self): with self.assertWarns(DeprecationWarning): pyclipper.MinkowskiSum2(PATTERN, [PATH_SIGMA], False) def", "paths = pyclipper.PolyTreeToPaths(self.tree) self.check_paths(paths, 4) def test_closed_paths_from_polytree(self): paths = pyclipper.ClosedPathsFromPolyTree(self.tree)", "node.Contour: self.assertFalse(hasattr(node.Contour[0][0], '__iter__')) for child in node.Childs: self.check_pypolynode(child) class TestPyclipperOffset(TestCase):", "i in res) assert all(isinstance(j, integer_types) for i in res", "-6], [6, -6], [-4, 6], [-6, 6]] INVALID_PATH = [[1,", "self.assertIsInstance(bounds, pyclipper.PyIntRect) self.assertEqual(bounds.left, 180) self.assertEqual(bounds.right, 260) self.assertEqual(bounds.top, 130) self.assertEqual(bounds.bottom, 210)", "list) self.assertEqual(len(solution), 0) class TestScalingFactorWarning(TestCase): def setUp(self): pyclipper.SCALING_FACTOR = 2.", "j in i) assert all(isinstance(k, float) for i in res", "= pyclipper.scale_to_clipper(value, self.scale) assert isinstance(res, integer_types) assert res == int(value", "test_add_paths_invalid_path(self): self.assertRaises(pyclipper.ClipperException, self.pc.AddPaths, [INVALID_PATH, INVALID_PATH], pyclipper.PT_CLIP, True) try: self.pc.AddPaths([INVALID_PATH, PATH_CLIP_1],", "converter=round if factor else None) for p in paths_1] paths_2", "self.assertEqual(len(solution), len(PATH_CLIP_1)) def test_clean_polygons(self): solution = pyclipper.CleanPolygons([PATH_CLIP_1]) self.assertEqual(len(solution), 1) self.assertEqual(len(solution[0]),", "pyclipper.scale_to_clipper(value, self.scale) assert isinstance(res, integer_types) assert res == int(value *", "test_minkowski_sum(self): with self.assertWarns(DeprecationWarning): pyclipper.MinkowskiSum(PATTERN, PATH_SIGMA, False) def test_minkowski_sum2(self): with self.assertWarns(DeprecationWarning):", "3 def test_value_scale_to(self): value = 0.5 res = pyclipper.scale_to_clipper(value, self.scale)", "tree.IsOpen = True child = pyclipper.PyPolyNode() child.IsOpen = False child.Parent", "self.assertEqual(len(pyclipper.ClosedPathsFromPolyTree(solution)), 1) def test_clear(self): pc = pyclipper.PyclipperOffset() self.add_path(pc, PATH_CLIP_1) pc.Clear()", "class TestPyclipperOffset(TestCase): def setUp(self): pyclipper.SCALING_FACTOR = 1 @staticmethod def add_path(pc,", "a list of paths, should be path if node.Contour: self.assertFalse(hasattr(node.Contour[0][0],", "[PATH_SIGMA], False) def test_minkowski_diff(self): with self.assertWarns(DeprecationWarning): pyclipper.MinkowskiDiff(PATH_SUBJ_1, PATH_SUBJ_2) def test_add_path(self):", "self.assertRaises(pyclipper.ClipperException, self.pc.AddPath, INVALID_PATH, pyclipper.PT_CLIP, True) def test_add_paths_invalid_path(self): self.assertRaises(pyclipper.ClipperException, self.pc.AddPaths, [INVALID_PATH,", "else: integer_types = (int,) import pyclipper # Example polygons from", "self.assertEqual(len(pyclipper.OpenPathsFromPolyTree(solution)), 0) self.assertEqual(len(pyclipper.ClosedPathsFromPolyTree(solution)), 1) def test_clear(self): pc = pyclipper.PyclipperOffset() self.add_path(pc,", "self.assertIsInstance(solution, list) self.assertEqual(len(solution), 0) class TestScalingFactorWarning(TestCase): def setUp(self): pyclipper.SCALING_FACTOR =", "poly_type=pyclipper.PT_SUBJECT) class TestScalingFunctions(TestCase): scale = 2 ** 31 path =", "class TestPyclipperExecute(TestCase): def setUp(self): pyclipper.SCALING_FACTOR = 1 self.pc = pyclipper.Pyclipper()", "210], [240, 210], [240, 130], [190, 130]] # square PATH_SIGMA", "260) self.assertEqual(bounds.top, 130) self.assertEqual(bounds.bottom, 210) def test_execute(self): solution = self.pc.Execute(*self.default_args)", "check_property_assignment(self, pc, prop_name, values): for val in values: setattr(pc, prop_name,", "pyclipper.PyclipperOffset() self.add_path(pc, PATH_CLIP_1) solution = pc.Execute2(2.0) self.assertIsInstance(solution, pyclipper.PyPolyNode) self.assertEqual(len(pyclipper.OpenPathsFromPolyTree(solution)), 0)", "'ReversePath', 'ReversePaths'): self.assertTrue(hasattr(pyclipper, method)) class TestNamespaceMethods(TestCase): def setUp(self): pyclipper.SCALING_FACTOR =", "test_add_paths(self): with self.assertWarns(DeprecationWarning): self.pc.AddPaths([PATH_SUBJ_1, PATH_SUBJ_2], poly_type=pyclipper.PT_SUBJECT) class TestScalingFunctions(TestCase): scale =", "for j in i) assert all(isinstance(k, integer_types) for i in", "import sys if sys.version_info < (3,): integer_types = (int, long)", "setattr(pc, prop_name, val) self.assertEqual(getattr(pc, prop_name), val) def test_pyclipper_properties(self): pc =", "path_2[i][0]) self.assertEqual(path_1[i][1], path_2[i][1]) def test_simplify_polygon(self): solution = pyclipper.SimplifyPolygon(PATH_SUBJ_1) self.assertEqual(len(solution), 1)", "in res for j in i) def test_path_scale_from(self): res =", "= PATH_SUBJ_1[::-1] self.check_reversed_path(solution, manualy_reversed) def test_reverse_paths(self): solution = pyclipper.ReversePaths([PATH_SUBJ_1]) manualy_reversed", "i in res) assert all(isinstance(j, float) for i in res", "== int(value * self.scale) def test_value_scale_from(self): value = 1000000000000 res", "# triangle PATH_CLIP_1 = [[190, 210], [240, 210], [240, 130],", "contour should not # be included in filtered results child2", "self.assertEqual(pyclipper.PointInPolygon((180, 200), PATH_SUBJ_1), -1) # in polygon self.assertEqual(pyclipper.PointInPolygon((200, 180), PATH_SUBJ_1),", "test_add_path_invalid_path(self): self.assertRaises(pyclipper.ClipperException, self.pc.AddPath, INVALID_PATH, pyclipper.PT_CLIP, True) def test_add_paths_invalid_path(self): self.assertRaises(pyclipper.ClipperException, self.pc.AddPaths,", "addend=0.0, multiplier=1.0, converter=None): path = path[:] def convert_coordinate(c): if multiplier", "1)] paths = [path] * 3 def test_value_scale_to(self): value =", "<filename>tests/test_pyclipper.py<gh_stars>0 #!/usr/bin/python \"\"\" Tests for Pyclipper wrapper library. \"\"\" from", "are not affected by the operation. \"\"\" pc = pyclipper.Pyclipper()", "test_clean_polygons(self): solution = pyclipper.CleanPolygons([PATH_CLIP_1]) self.assertEqual(len(solution), 1) self.assertEqual(len(solution[0]), len(PATH_CLIP_1)) class TestFilterPyPolyNode(TestCase):", "= pyclipper.Pyclipper() def test_add_path(self): # should not raise an exception", "an exception self.pc.AddPaths([PATH_SUBJ_1, PATH_SUBJ_2], poly_type=pyclipper.PT_SUBJECT) def test_add_path_invalid_path(self): self.assertRaises(pyclipper.ClipperException, self.pc.AddPath, INVALID_PATH,", "type(path[0].x) == Zero path = pyclipper.scale_to_clipper(path) assert path == [[0,", "self.add_default_paths(self.pc) self.default_args = [pyclipper.CT_INTERSECTION, pyclipper.PFT_EVENODD, pyclipper.PFT_EVENODD] @staticmethod def add_default_paths(pc): pc.AddPath(PATH_CLIP_1,", "PATH_SUBJ_2) self.assertGreater(len(solution), 0) def test_reverse_path(self): solution = pyclipper.ReversePath(PATH_SUBJ_1) manualy_reversed =", "= pyclipper.SimplifyPolygon(PATH_SUBJ_1) self.assertEqual(len(solution), 1) def test_simplify_polygons(self): solution = pyclipper.SimplifyPolygons([PATH_SUBJ_1]) solution_single", "= tree child.Contour = PATH_SUBJ_1 tree.Childs.append(child) child = pyclipper.PyPolyNode() child.IsOpen", "for j in i) def test_path_scale_from(self): res = pyclipper.scale_from_clipper(self.path) assert", "len(node.Contour) > 2) # check vertex coordinate, should not be", "for child in node.Childs: self.check_pypolynode(child) class TestPyclipperOffset(TestCase): def setUp(self): pyclipper.SCALING_FACTOR", "for val in values: setattr(pc, prop_name, val) self.assertEqual(getattr(pc, prop_name), val)", "area_pos) def test_point_in_polygon(self): # on polygon self.assertEqual(pyclipper.PointInPolygon((180, 200), PATH_SUBJ_1), -1)", "float) for i in res for j in i) def", "0) def test_reverse_path(self): solution = pyclipper.ReversePath(PATH_SUBJ_1) manualy_reversed = PATH_SUBJ_1[::-1] self.check_reversed_path(solution,", "= [[215, 160], [230, 190], [200, 190]] # triangle PATH_CLIP_1", "pyclipper.CleanPolygons([PATH_CLIP_1]) self.assertEqual(len(solution), 1) self.assertEqual(len(solution[0]), len(PATH_CLIP_1)) class TestFilterPyPolyNode(TestCase): def setUp(self): tree", "child.Childs.append(child2) self.tree = tree def test_polytree_to_paths(self): paths = pyclipper.PolyTreeToPaths(self.tree) self.check_paths(paths,", "pc.AddPath(path, pyclipper.JT_ROUND, pyclipper.ET_CLOSEDPOLYGON) def test_execute(self): pc = pyclipper.PyclipperOffset() self.add_path(pc, PATH_CLIP_1)", "to get \"normal\" division in Python < 3. assert res", "in i) def test_path_scale_from(self): res = pyclipper.scale_from_clipper(self.path) assert len(res) ==", "all(isinstance(i, list) for i in res) assert all(isinstance(j, integer_types) for", "v in [(0,0), (0,1)]] assert type(path[0].x) == Zero path =", "'ClosedPathsFromPolyTree', 'OpenPathsFromPolyTree', 'ReversePath', 'ReversePaths'): self.assertTrue(hasattr(pyclipper, method)) class TestNamespaceMethods(TestCase): def setUp(self):", "def setUp(self): pyclipper.SCALING_FACTOR = 1 self.pc = pyclipper.Pyclipper() self.add_default_paths(self.pc) self.default_args", "i in res for j in i) def test_paths_scale_to(self): res", "def add_default_paths(pc): pc.AddPath(PATH_CLIP_1, pyclipper.PT_CLIP) pc.AddPaths([PATH_SUBJ_1, PATH_SUBJ_2], pyclipper.PT_SUBJECT) @staticmethod def add_paths(pc,", "square, orientation is False PATH_SUBJ_2 = [[215, 160], [230, 190],", "-1) def test_minkowski_sum(self): with self.assertWarns(DeprecationWarning): pyclipper.MinkowskiSum(PATTERN, PATH_SIGMA, False) def test_minkowski_sum2(self):", "# on polygon self.assertEqual(pyclipper.PointInPolygon((180, 200), PATH_SUBJ_1), -1) # in polygon", "(0,1)] path = [Point2D(v) for v in [(0,0), (0,1)]] assert", "assert isinstance(res, integer_types) assert res == int(value * self.scale) def", "def _modify_vertices(path, addend=0.0, multiplier=1.0, converter=None): path = path[:] def convert_coordinate(c):", "assert all(isinstance(k, integer_types) for i in res for j in", "'PointInPolygon', 'SimplifyPolygon', 'SimplifyPolygons', 'CleanPolygon', 'CleanPolygons', 'MinkowskiSum', 'MinkowskiSum2', 'MinkowskiDiff', 'PolyTreeToPaths', 'ClosedPathsFromPolyTree',", "with self.assertWarns(DeprecationWarning): pyclipper.Area(PATH_SUBJ_1) def test_point_in_polygon(self): with self.assertWarns(DeprecationWarning): self.assertEqual(pyclipper.PointInPolygon((180, 200), PATH_SUBJ_1),", "[[300, 400], [100, 400], [200, 300], [100, 200], [300, 200]]", "def test_has_classes(self): self.assertTrue(hasattr(pyclipper, 'Pyclipper')) self.assertTrue(hasattr(pyclipper, 'PyclipperOffset')) def test_has_namespace_methods(self): for method", "test_pyclipperoffset_properties(self): for factor in range(6): pyclipper.SCALING_FACTOR = 10 ** factor", "is False PATH_SUBJ_2 = [[215, 160], [230, 190], [200, 190]]", "manualy_reversed[0]) def check_reversed_path(self, path_1, path_2): if len(path_1) is not len(path_2):", "PATH_SIGMA, False) self.assertGreater(len(solution), 0) def test_minkowski_sum2(self): solution = pyclipper.MinkowskiSum2(PATTERN, [PATH_SIGMA],", "in polygon self.assertEqual(pyclipper.PointInPolygon((200, 180), PATH_SUBJ_1), 1) # outside of polygon", "= pc.Execute2(2.0) self.assertIsInstance(solution, pyclipper.PyPolyNode) self.assertEqual(len(pyclipper.OpenPathsFromPolyTree(solution)), 0) self.assertEqual(len(pyclipper.ClosedPathsFromPolyTree(solution)), 1) def test_clear(self):", "res for j in i) def test_paths_scale_to(self): res = pyclipper.scale_to_clipper(self.paths)", "pyclipper.PyPolyNode) self.check_pypolynode(solution) def test_execute_empty(self): pc = pyclipper.Pyclipper() with self.assertRaises(pyclipper.ClipperException): pc.Execute(pyclipper.CT_UNION,", "6], [-6, 6]] INVALID_PATH = [[1, 1], ] # less", "pyclipper.PT_CLIP) pc.AddPaths([PATH_SUBJ_1, PATH_SUBJ_2], pyclipper.PT_SUBJECT) @staticmethod def add_paths(pc, clip_path, subj_paths, addend=None,", "setUp(self): pyclipper.SCALING_FACTOR = 1 self.pc = pyclipper.Pyclipper() def test_add_path(self): #", "node.Childs: self.check_pypolynode(child) class TestPyclipperOffset(TestCase): def setUp(self): pyclipper.SCALING_FACTOR = 1 @staticmethod", "pyclipper.scale_from_clipper(self.path) assert len(res) == len(self.path) assert all(isinstance(i, list) for i", "[300, 200]] # greek letter sigma PATTERN = [[4, -6],", "= 10 ** factor pc = pyclipper.PyclipperOffset() for prop_name in", "long) else: integer_types = (int,) import pyclipper # Example polygons", "False child2.Parent = child child2.Contour = PATTERN child.Childs.append(child2) # empty", "Test whether coordinates passed into the library are returned exactly,", "[INVALID_PATH, INVALID_PATH], pyclipper.PT_CLIP, True) try: self.pc.AddPaths([INVALID_PATH, PATH_CLIP_1], pyclipper.PT_CLIP) self.pc.AddPaths([PATH_CLIP_1, INVALID_PATH],", "self.assertRaises(pyclipper.ClipperException, self.pc.AddPaths, [INVALID_PATH, INVALID_PATH], pyclipper.PT_CLIP, True) try: self.pc.AddPaths([INVALID_PATH, PATH_CLIP_1], pyclipper.PT_CLIP)", "for p_1 in paths_1)) def _modify_vertices(path, addend=0.0, multiplier=1.0, converter=None): path", "INVALID_PATH], pyclipper.PT_CLIP, True) try: self.pc.AddPaths([INVALID_PATH, PATH_CLIP_1], pyclipper.PT_CLIP) self.pc.AddPaths([PATH_CLIP_1, INVALID_PATH], pyclipper.PT_CLIP)", "@staticmethod def add_default_paths(pc): pc.AddPath(PATH_CLIP_1, pyclipper.PT_CLIP) pc.AddPaths([PATH_SUBJ_1, PATH_SUBJ_2], pyclipper.PT_SUBJECT) @staticmethod def", "ClipperException when not all paths were invalid\") class TestClassProperties(TestCase): def", "!= len(paths_2): return False paths_1 = [_modify_vertices(p, multiplier=factor, converter=round if", "= pyclipper.Pyclipper() with self.assertRaises(pyclipper.ClipperException): pc.Execute(pyclipper.CT_UNION, pyclipper.PFT_NONZERO, pyclipper.PFT_NONZERO) def test_clear(self): self.pc.Clear()", "in ('Orientation', 'Area', 'PointInPolygon', 'SimplifyPolygon', 'SimplifyPolygons', 'CleanPolygon', 'CleanPolygons', 'MinkowskiSum', 'MinkowskiSum2',", "self.add_path(pc, PATH_CLIP_1) solution = pc.Execute(2.0) self.assertIsInstance(solution, list) self.assertEqual(len(solution), 1) def", "all(isinstance(k, integer_types) for i in res for j in i", "# should not raise an exception self.pc.AddPath(PATH_CLIP_1, poly_type=pyclipper.PT_CLIP) def test_add_paths(self):", "in i for k in j) def test_paths_scale_from(self): res =", "PATH_SUBJ_1), -1) def test_minkowski_sum(self): with self.assertWarns(DeprecationWarning): pyclipper.MinkowskiSum(PATTERN, PATH_SIGMA, False) def", "if sys.version_info < (3,): integer_types = (int, long) else: integer_types", "pyclipper.Area(PATH_SUBJ_1[::-1]) self.assertLess(area_neg, 0) self.assertGreater(area_pos, 0) self.assertEqual(abs(area_neg), area_pos) def test_point_in_polygon(self): #", "self.pc.Execute2(*self.default_args) self.assertIsInstance(solution, pyclipper.PyPolyNode) self.check_pypolynode(solution) def test_execute_empty(self): pc = pyclipper.Pyclipper() with", "pyclipper.SimplifyPolygons([PATH_SUBJ_1]) solution_single = pyclipper.SimplifyPolygon(PATH_SUBJ_1) self.assertEqual(len(solution), 1) self.assertEqual(len(solution), len(solution_single)) _do_solutions_match(solution, solution_single)", "self.check_reversed_path(solution, manualy_reversed) def test_reverse_paths(self): solution = pyclipper.ReversePaths([PATH_SUBJ_1]) manualy_reversed = [PATH_SUBJ_1[::-1]]", "'PolyTreeToPaths', 'ClosedPathsFromPolyTree', 'OpenPathsFromPolyTree', 'ReversePath', 'ReversePaths'): self.assertTrue(hasattr(pyclipper, method)) class TestNamespaceMethods(TestCase): def", "j in i for k in j) def test_paths_scale_from(self): res", "False child.Parent = tree child.Contour = PATH_SUBJ_1 tree.Childs.append(child) child =", "j) class TestNonStandardNumbers(TestCase): def test_sympyzero(self): try: from sympy import Point2D", "= [Point2D(v) for v in [(0,0), (0,1)]] assert type(path[0].x) ==", "pyclipper.Orientation(PATH_SUBJ_1) def test_area(self): with self.assertWarns(DeprecationWarning): pyclipper.Area(PATH_SUBJ_1) def test_point_in_polygon(self): with self.assertWarns(DeprecationWarning):", "self.tree = tree def test_polytree_to_paths(self): paths = pyclipper.PolyTreeToPaths(self.tree) self.check_paths(paths, 4)", "coordinate, should not be an iterable (in that case #", "= [[180, 200], [260, 200], [260, 150], [180, 150]] #", "def test_point_in_polygon(self): # on polygon self.assertEqual(pyclipper.PointInPolygon((180, 200), PATH_SUBJ_1), -1) #", "self.pc.AddPaths, [INVALID_PATH, INVALID_PATH], pyclipper.PT_CLIP, True) try: self.pc.AddPaths([INVALID_PATH, PATH_CLIP_1], pyclipper.PT_CLIP) self.pc.AddPaths([PATH_CLIP_1,", "= converter(c) return c return [[convert_coordinate(c) for c in v]", "if factor else None) for p in paths_1] paths_2 =", "def test_point_in_polygon(self): with self.assertWarns(DeprecationWarning): self.assertEqual(pyclipper.PointInPolygon((180, 200), PATH_SUBJ_1), -1) def test_minkowski_sum(self):", "self.assertGreater(len(solution), 0) def test_minkowski_diff(self): solution = pyclipper.MinkowskiDiff(PATH_SUBJ_1, PATH_SUBJ_2) self.assertGreater(len(solution), 0)", "0) def test_minkowski_sum2(self): solution = pyclipper.MinkowskiSum2(PATTERN, [PATH_SIGMA], False) self.assertGreater(len(solution), 0)", "def test_execute_empty(self): pc = pyclipper.Pyclipper() with self.assertRaises(pyclipper.ClipperException): pc.Execute(pyclipper.CT_UNION, pyclipper.PFT_NONZERO, pyclipper.PFT_NONZERO)", "integer_types) assert res == int(value * self.scale) def test_value_scale_from(self): value", "addend if converter: c = converter(c) return c return [[convert_coordinate(c)", "500), PATH_SUBJ_1), 0) def test_minkowski_sum(self): solution = pyclipper.MinkowskiSum(PATTERN, PATH_SIGMA, False)", "def setUp(self): tree = pyclipper.PyPolyNode() tree.Contour.append(PATH_CLIP_1) tree.IsOpen = True child", "'PreserveCollinear', 'StrictlySimple'): self.check_property_assignment(pc, prop_name, [True, False]) def test_pyclipperoffset_properties(self): for factor", "('MiterLimit', 'ArcTolerance'): self.check_property_assignment(pc, prop_name, [2.912, 132.12, 12, -123]) class TestPyclipperExecute(TestCase):", "1000000000000 res = pyclipper.scale_from_clipper(value, self.scale) assert isinstance(res, float) # Convert", "raised ClipperException when not all paths were invalid\") class TestClassProperties(TestCase):", "pyclipper.scale_from_clipper(value, self.scale) assert isinstance(res, float) # Convert to float to", "all(isinstance(i, list) for i in res) assert all(isinstance(j, list) for", "3. assert res == float(value) / self.scale def test_path_scale_to(self): res", "in range(len(path_1)): self.assertEqual(path_1[i][0], path_2[i][0]) self.assertEqual(path_1[i][1], path_2[i][1]) def test_simplify_polygon(self): solution =", "solution = pyclipper.MinkowskiSum(PATTERN, PATH_SIGMA, False) self.assertGreater(len(solution), 0) def test_minkowski_sum2(self): solution", "check_reversed_path(self, path_1, path_2): if len(path_1) is not len(path_2): return False", "def setUp(self): pyclipper.SCALING_FACTOR = 1 self.pc = pyclipper.Pyclipper() def test_add_path(self):", "not None: c *= multiplier if addend is not None:", "= 1 @staticmethod def add_path(pc, path): pc.AddPath(path, pyclipper.JT_ROUND, pyclipper.ET_CLOSEDPOLYGON) def", "10 ** factor pc = pyclipper.PyclipperOffset() for prop_name in ('MiterLimit',", "all(isinstance(k, float) for i in res for j in i", "path = [Point2D(v) for v in [(0,0), (0,1)]] assert type(path[0].x)", "self.assertEqual(bounds.top, 130) self.assertEqual(bounds.bottom, 210) def test_execute(self): solution = self.pc.Execute(*self.default_args) self.assertEqual(len(solution),", "= pyclipper.Area(PATH_SUBJ_1[::-1]) self.assertLess(area_neg, 0) self.assertGreater(area_pos, 0) self.assertEqual(abs(area_neg), area_pos) def test_point_in_polygon(self):", "[pyclipper.CT_INTERSECTION, pyclipper.PFT_EVENODD, pyclipper.PFT_EVENODD] @staticmethod def add_default_paths(pc): pc.AddPath(PATH_CLIP_1, pyclipper.PT_CLIP) pc.AddPaths([PATH_SUBJ_1, PATH_SUBJ_2],", "def test_add_path(self): # should not raise an exception self.pc.AddPath(PATH_CLIP_1, poly_type=pyclipper.PT_CLIP)", "* self.scale) def test_value_scale_from(self): value = 1000000000000 res = pyclipper.scale_from_clipper(value,", "assert len(res) == len(self.paths) assert all(isinstance(i, list) for i in", "all(isinstance(j, integer_types) for i in res for j in i)", "160], [230, 190], [200, 190]] # triangle PATH_CLIP_1 = [[190,", "0) def test_minkowski_diff(self): solution = pyclipper.MinkowskiDiff(PATH_SUBJ_1, PATH_SUBJ_2) self.assertGreater(len(solution), 0) def", "multiplier=None): pc.AddPath(_modify_vertices(clip_path, addend=addend, multiplier=multiplier), pyclipper.PT_CLIP) for subj_path in subj_paths: pc.AddPath(_modify_vertices(subj_path,", "self.assertIsInstance(solution, pyclipper.PyPolyNode) self.assertEqual(len(pyclipper.OpenPathsFromPolyTree(solution)), 0) self.assertEqual(len(pyclipper.ClosedPathsFromPolyTree(solution)), 1) def test_clear(self): pc =", "int(value * self.scale) def test_value_scale_from(self): value = 1000000000000 res =", "self.check_paths(paths, 4) def test_closed_paths_from_polytree(self): paths = pyclipper.ClosedPathsFromPolyTree(self.tree) self.check_paths(paths, 2) def", "PATTERN = [[4, -6], [6, -6], [-4, 6], [-6, 6]]", "= [[4, -6], [6, -6], [-4, 6], [-6, 6]] INVALID_PATH", "False area_neg = pyclipper.Area(PATH_SUBJ_1) area_pos = pyclipper.Area(PATH_SUBJ_1[::-1]) self.assertLess(area_neg, 0) self.assertGreater(area_pos,", "multiplier=multiplier), pyclipper.PT_SUBJECT) def test_get_bounds(self): bounds = self.pc.GetBounds() self.assertIsInstance(bounds, pyclipper.PyIntRect) self.assertEqual(bounds.left,", "greek letter sigma PATTERN = [[4, -6], [6, -6], [-4,", "tree def test_polytree_to_paths(self): paths = pyclipper.PolyTreeToPaths(self.tree) self.check_paths(paths, 4) def test_closed_paths_from_polytree(self):", "def test_simplify_polygons(self): solution = pyclipper.SimplifyPolygons([PATH_SUBJ_1]) solution_single = pyclipper.SimplifyPolygon(PATH_SUBJ_1) self.assertEqual(len(solution), 1)", "self.assertWarns(DeprecationWarning): self.pc.AddPath(PATH_CLIP_1, poly_type=pyclipper.PT_CLIP) def test_add_paths(self): with self.assertWarns(DeprecationWarning): self.pc.AddPaths([PATH_SUBJ_1, PATH_SUBJ_2], poly_type=pyclipper.PT_SUBJECT)", "_modify_vertices(path, addend=0.0, multiplier=1.0, converter=None): path = path[:] def convert_coordinate(c): if", "be included in filtered results child2 = pyclipper.PyPolyNode() child2.IsOpen =", "TestPyclipperExecute(TestCase): def setUp(self): pyclipper.SCALING_FACTOR = 1 self.pc = pyclipper.Pyclipper() self.add_default_paths(self.pc)", "v in path] def run_tests(): main() if __name__ == '__main__':", "__future__ import print_function from unittest2 import TestCase, main import sys", "solution = pyclipper.MinkowskiDiff(PATH_SUBJ_1, PATH_SUBJ_2) self.assertGreater(len(solution), 0) def test_reverse_path(self): solution =", "pyclipper.PFT_EVENODD, pyclipper.PFT_EVENODD] @staticmethod def add_default_paths(pc): pc.AddPath(PATH_CLIP_1, pyclipper.PT_CLIP) pc.AddPaths([PATH_SUBJ_1, PATH_SUBJ_2], pyclipper.PT_SUBJECT)", "Example polygons from http://www.angusj.com/delphi/clipper.php PATH_SUBJ_1 = [[180, 200], [260, 200],", "self.assertIsInstance(solution, list) self.assertEqual(len(solution), 1) def test_execute2(self): pc = pyclipper.PyclipperOffset() self.add_path(pc,", "list) self.assertEqual(len(solution), 1) def test_execute2(self): pc = pyclipper.PyclipperOffset() self.add_path(pc, PATH_CLIP_1)", "pyclipper.PyPolyNode) self.assertEqual(len(pyclipper.OpenPathsFromPolyTree(solution)), 0) self.assertEqual(len(pyclipper.ClosedPathsFromPolyTree(solution)), 1) def test_clear(self): pc = pyclipper.PyclipperOffset()", "pyclipper.ReversePaths([PATH_SUBJ_1]) manualy_reversed = [PATH_SUBJ_1[::-1]] self.check_reversed_path(solution[0], manualy_reversed[0]) def check_reversed_path(self, path_1, path_2):", "test_get_bounds(self): bounds = self.pc.GetBounds() self.assertIsInstance(bounds, pyclipper.PyIntRect) self.assertEqual(bounds.left, 180) self.assertEqual(bounds.right, 260)", "solution = pyclipper.CleanPolygons([PATH_CLIP_1]) self.assertEqual(len(solution), 1) self.assertEqual(len(solution[0]), len(PATH_CLIP_1)) class TestFilterPyPolyNode(TestCase): def", "raise an exception self.pc.AddPaths([PATH_SUBJ_1, PATH_SUBJ_2], poly_type=pyclipper.PT_SUBJECT) def test_add_path_invalid_path(self): self.assertRaises(pyclipper.ClipperException, self.pc.AddPath,", "[True, False]) def test_pyclipperoffset_properties(self): for factor in range(6): pyclipper.SCALING_FACTOR =", "pyclipper.PT_SUBJECT) @staticmethod def add_paths(pc, clip_path, subj_paths, addend=None, multiplier=None): pc.AddPath(_modify_vertices(clip_path, addend=addend,", "def test_minkowski_diff(self): with self.assertWarns(DeprecationWarning): pyclipper.MinkowskiDiff(PATH_SUBJ_1, PATH_SUBJ_2) def test_add_path(self): with self.assertWarns(DeprecationWarning):", "res = pyclipper.scale_from_clipper(self.paths) assert len(res) == len(self.paths) assert all(isinstance(i, list)", "210], [240, 130], [190, 130]] # square PATH_SIGMA = [[300,", "-1) # in polygon self.assertEqual(pyclipper.PointInPolygon((200, 180), PATH_SUBJ_1), 1) # outside", "= 1 self.pc = pyclipper.Pyclipper() def test_add_path(self): # should not", "with self.assertWarns(DeprecationWarning): self.pc.AddPath(PATH_CLIP_1, poly_type=pyclipper.PT_CLIP) def test_add_paths(self): with self.assertWarns(DeprecationWarning): self.pc.AddPaths([PATH_SUBJ_1, PATH_SUBJ_2],", "area_neg = pyclipper.Area(PATH_SUBJ_1) area_pos = pyclipper.Area(PATH_SUBJ_1[::-1]) self.assertLess(area_neg, 0) self.assertGreater(area_pos, 0)", "def test_area(self): with self.assertWarns(DeprecationWarning): pyclipper.Area(PATH_SUBJ_1) def test_point_in_polygon(self): with self.assertWarns(DeprecationWarning): self.assertEqual(pyclipper.PointInPolygon((180,", "res for j in i) assert all(isinstance(k, float) for i", "tree.Childs.append(child) child2 = pyclipper.PyPolyNode() child2.IsOpen = False child2.Parent = child", "# less than 2 vertices class TestPyclipperModule(TestCase): def test_has_classes(self): self.assertTrue(hasattr(pyclipper,", "def test_add_paths(self): # should not raise an exception self.pc.AddPaths([PATH_SUBJ_1, PATH_SUBJ_2],", "self.pc.AddPath, INVALID_PATH, pyclipper.PT_CLIP, True) def test_add_paths_invalid_path(self): self.assertRaises(pyclipper.ClipperException, self.pc.AddPaths, [INVALID_PATH, INVALID_PATH],", "True) def test_add_paths_invalid_path(self): self.assertRaises(pyclipper.ClipperException, self.pc.AddPaths, [INVALID_PATH, INVALID_PATH], pyclipper.PT_CLIP, True) try:", "paths_2 = [_modify_vertices(p, multiplier=factor, converter=round if factor else None) for", "polygon self.assertEqual(pyclipper.PointInPolygon((500, 500), PATH_SUBJ_1), 0) def test_minkowski_sum(self): solution = pyclipper.MinkowskiSum(PATTERN,", "(0,1)]] assert type(path[0].x) == Zero path = pyclipper.scale_to_clipper(path) assert path", "tree = pyclipper.PyPolyNode() tree.Contour.append(PATH_CLIP_1) tree.IsOpen = True child = pyclipper.PyPolyNode()", "pc.AddPaths(path, pyclipper.PT_SUBJECT, True) result = pc.Execute(pyclipper.PT_CLIP, pyclipper.PFT_EVENODD, pyclipper.PFT_EVENODD) assert result", "path == [[0, 0], [0, 2147483648]] def _do_solutions_match(paths_1, paths_2, factor=None):", "PATH_SUBJ_1), 1) # outside of polygon self.assertEqual(pyclipper.PointInPolygon((500, 500), PATH_SUBJ_1), 0)", "in res for j in i for k in j)", "def test_execute(self): pc = pyclipper.PyclipperOffset() self.add_path(pc, PATH_CLIP_1) solution = pc.Execute(2.0)", "is False area_neg = pyclipper.Area(PATH_SUBJ_1) area_pos = pyclipper.Area(PATH_SUBJ_1[::-1]) self.assertLess(area_neg, 0)", "ImportError: self.skipTest(\"Skipping, sympy not available\") path = [(0,0), (0,1)] path", "pc.AddPath(PATH_CLIP_1, pyclipper.PT_CLIP) pc.AddPaths([PATH_SUBJ_1, PATH_SUBJ_2], pyclipper.PT_SUBJECT) @staticmethod def add_paths(pc, clip_path, subj_paths,", "self.assertEqual(len(solution), len(solution_single)) _do_solutions_match(solution, solution_single) def test_clean_polygon(self): solution = pyclipper.CleanPolygon(PATH_CLIP_1) self.assertEqual(len(solution),", "1], [0, 0], [15 ** 15, 0]]] pc.AddPaths(path, pyclipper.PT_SUBJECT, True)", "2. self.pc = pyclipper.Pyclipper() def test_orientation(self): with self.assertWarns(DeprecationWarning): pyclipper.Orientation(PATH_SUBJ_1) def", "PATH_SUBJ_1 tree.Childs.append(child) child = pyclipper.PyPolyNode() child.IsOpen = True child.Parent =", "[0, 2147483648]] def _do_solutions_match(paths_1, paths_2, factor=None): if len(paths_1) != len(paths_2):", "they are not affected by the operation. \"\"\" pc =", "from __future__ import print_function from unittest2 import TestCase, main import", "0.5 res = pyclipper.scale_to_clipper(value, self.scale) assert isinstance(res, integer_types) assert res", "factor in range(6): pyclipper.SCALING_FACTOR = 10 ** factor pc =", "try: from sympy import Point2D from sympy.core.numbers import Zero except", "0) def test_minkowski_sum(self): solution = pyclipper.MinkowskiSum(PATTERN, PATH_SIGMA, False) self.assertGreater(len(solution), 0)", "self.assertLess(area_neg, 0) self.assertGreater(area_pos, 0) self.assertEqual(abs(area_neg), area_pos) def test_point_in_polygon(self): # on", "k in j) def test_paths_scale_from(self): res = pyclipper.scale_from_clipper(self.paths) assert len(res)", "test_value_scale_from(self): value = 1000000000000 res = pyclipper.scale_from_clipper(value, self.scale) assert isinstance(res,", "if they are not affected by the operation. \"\"\" pc", "unittest2 import TestCase, main import sys if sys.version_info < (3,):", "with self.assertWarns(DeprecationWarning): pyclipper.MinkowskiSum(PATTERN, PATH_SIGMA, False) def test_minkowski_sum2(self): with self.assertWarns(DeprecationWarning): pyclipper.MinkowskiSum2(PATTERN,", "[200, 190]] # triangle PATH_CLIP_1 = [[190, 210], [240, 210],", "[PATH_SUBJ_1[::-1]] self.check_reversed_path(solution[0], manualy_reversed[0]) def check_reversed_path(self, path_1, path_2): if len(path_1) is", "= pyclipper.SimplifyPolygon(PATH_SUBJ_1) self.assertEqual(len(solution), 1) self.assertEqual(len(solution), len(solution_single)) _do_solutions_match(solution, solution_single) def test_clean_polygon(self):", "prop_name in ('ReverseSolution', 'PreserveCollinear', 'StrictlySimple'): self.check_property_assignment(pc, prop_name, [True, False]) def", "# square, orientation is False PATH_SUBJ_2 = [[215, 160], [230,", "for i in range(len(path_1)): self.assertEqual(path_1[i][0], path_2[i][0]) self.assertEqual(path_1[i][1], path_2[i][1]) def test_simplify_polygon(self):", "def test_path_scale_from(self): res = pyclipper.scale_from_clipper(self.path) assert len(res) == len(self.path) assert", "def test_add_paths(self): with self.assertWarns(DeprecationWarning): self.pc.AddPaths([PATH_SUBJ_1, PATH_SUBJ_2], poly_type=pyclipper.PT_SUBJECT) class TestScalingFunctions(TestCase): scale", "assert result == path def check_pypolynode(self, node): self.assertTrue(len(node.Contour) == 0", "should not be an iterable (in that case # that", "for j in i) assert all(isinstance(k, float) for i in", "'PyclipperOffset')) def test_has_namespace_methods(self): for method in ('Orientation', 'Area', 'PointInPolygon', 'SimplifyPolygon',", "is not None: c *= multiplier if addend is not", "200), PATH_SUBJ_1), -1) # in polygon self.assertEqual(pyclipper.PointInPolygon((200, 180), PATH_SUBJ_1), 1)", "test_clear(self): pc = pyclipper.PyclipperOffset() self.add_path(pc, PATH_CLIP_1) pc.Clear() solution = pc.Execute(2.0)", "test_minkowski_sum2(self): solution = pyclipper.MinkowskiSum2(PATTERN, [PATH_SIGMA], False) self.assertGreater(len(solution), 0) def test_minkowski_diff(self):", "large triangle. path = [[[0, 1], [0, 0], [15 **", "(in that case # that means that node.Contour is a", "def test_path_scale_to(self): res = pyclipper.scale_to_clipper(self.path) assert len(res) == len(self.path) assert", "integer_types = (int, long) else: integer_types = (int,) import pyclipper", "assert all(isinstance(i, list) for i in res) assert all(isinstance(j, list)", "def test_execute2(self): solution = self.pc.Execute2(*self.default_args) self.assertIsInstance(solution, pyclipper.PyPolyNode) self.check_pypolynode(solution) def test_execute_empty(self):", "pc = pyclipper.PyclipperOffset() for prop_name in ('MiterLimit', 'ArcTolerance'): self.check_property_assignment(pc, prop_name,", "False]) def test_pyclipperoffset_properties(self): for factor in range(6): pyclipper.SCALING_FACTOR = 10", "# Convert to float to get \"normal\" division in Python", "path_2): if len(path_1) is not len(path_2): return False for i", "self.assertEqual(path_1[i][1], path_2[i][1]) def test_simplify_polygon(self): solution = pyclipper.SimplifyPolygon(PATH_SUBJ_1) self.assertEqual(len(solution), 1) def", "invalid\") class TestClassProperties(TestCase): def check_property_assignment(self, pc, prop_name, values): for val", "isinstance(res, integer_types) assert res == int(value * self.scale) def test_value_scale_from(self):", "method in ('Orientation', 'Area', 'PointInPolygon', 'SimplifyPolygon', 'SimplifyPolygons', 'CleanPolygon', 'CleanPolygons', 'MinkowskiSum',", "return False for i in range(len(path_1)): self.assertEqual(path_1[i][0], path_2[i][0]) self.assertEqual(path_1[i][1], path_2[i][1])", "import Zero except ImportError: self.skipTest(\"Skipping, sympy not available\") path =", "path = [(0,0), (0,1)] path = [Point2D(v) for v in", "test_simplify_polygons(self): solution = pyclipper.SimplifyPolygons([PATH_SUBJ_1]) solution_single = pyclipper.SimplifyPolygon(PATH_SUBJ_1) self.assertEqual(len(solution), 1) self.assertEqual(len(solution),", "def test_pyclipper_properties(self): pc = pyclipper.Pyclipper() for prop_name in ('ReverseSolution', 'PreserveCollinear',", "INVALID_PATH], pyclipper.PT_CLIP) except pyclipper.ClipperException: self.fail(\"add_paths raised ClipperException when not all", "= self.pc.Execute(*self.default_args) self.assertEqual(len(solution), 2) def test_execute2(self): solution = self.pc.Execute2(*self.default_args) self.assertIsInstance(solution,", "class TestNamespaceMethods(TestCase): def setUp(self): pyclipper.SCALING_FACTOR = 1 def test_orientation(self): self.assertFalse(pyclipper.Orientation(PATH_SUBJ_1))", "paths = pyclipper.OpenPathsFromPolyTree(self.tree) self.check_paths(paths, 2) def check_paths(self, paths, expected_nr): self.assertEqual(len(paths),", "#!/usr/bin/python \"\"\" Tests for Pyclipper wrapper library. \"\"\" from __future__", "(3,): integer_types = (int, long) else: integer_types = (int,) import", "'ReversePaths'): self.assertTrue(hasattr(pyclipper, method)) class TestNamespaceMethods(TestCase): def setUp(self): pyclipper.SCALING_FACTOR = 1", "polygon self.assertEqual(pyclipper.PointInPolygon((180, 200), PATH_SUBJ_1), -1) # in polygon self.assertEqual(pyclipper.PointInPolygon((200, 180),", "method)) class TestNamespaceMethods(TestCase): def setUp(self): pyclipper.SCALING_FACTOR = 1 def test_orientation(self):", "prop_name, val) self.assertEqual(getattr(pc, prop_name), val) def test_pyclipper_properties(self): pc = pyclipper.Pyclipper()", "= [[[0, 1], [0, 0], [15 ** 15, 0]]] pc.AddPaths(path,", "pc = pyclipper.PyclipperOffset() self.add_path(pc, PATH_CLIP_1) pc.Clear() solution = pc.Execute(2.0) self.assertIsInstance(solution,", "pc.Execute(2.0) self.assertIsInstance(solution, list) self.assertEqual(len(solution), 0) class TestScalingFactorWarning(TestCase): def setUp(self): pyclipper.SCALING_FACTOR", "2) def test_open_paths_from_polytree(self): paths = pyclipper.OpenPathsFromPolyTree(self.tree) self.check_paths(paths, 2) def check_paths(self,", "library. \"\"\" from __future__ import print_function from unittest2 import TestCase,", "def test_has_namespace_methods(self): for method in ('Orientation', 'Area', 'PointInPolygon', 'SimplifyPolygon', 'SimplifyPolygons',", "False) self.assertGreater(len(solution), 0) def test_minkowski_diff(self): solution = pyclipper.MinkowskiDiff(PATH_SUBJ_1, PATH_SUBJ_2) self.assertGreater(len(solution),", "TestNamespaceMethods(TestCase): def setUp(self): pyclipper.SCALING_FACTOR = 1 def test_orientation(self): self.assertFalse(pyclipper.Orientation(PATH_SUBJ_1)) self.assertTrue(pyclipper.Orientation(PATH_SUBJ_1[::-1]))", "0) self.assertGreater(area_pos, 0) self.assertEqual(abs(area_neg), area_pos) def test_point_in_polygon(self): # on polygon", "'ArcTolerance'): self.check_property_assignment(pc, prop_name, [2.912, 132.12, 12, -123]) class TestPyclipperExecute(TestCase): def", "pyclipper.PFT_EVENODD] @staticmethod def add_default_paths(pc): pc.AddPath(PATH_CLIP_1, pyclipper.PT_CLIP) pc.AddPaths([PATH_SUBJ_1, PATH_SUBJ_2], pyclipper.PT_SUBJECT) @staticmethod", "'SimplifyPolygon', 'SimplifyPolygons', 'CleanPolygon', 'CleanPolygons', 'MinkowskiSum', 'MinkowskiSum2', 'MinkowskiDiff', 'PolyTreeToPaths', 'ClosedPathsFromPolyTree', 'OpenPathsFromPolyTree',", "== [[0, 0], [0, 2147483648]] def _do_solutions_match(paths_1, paths_2, factor=None): if", "self.assertWarns(DeprecationWarning): self.assertEqual(pyclipper.PointInPolygon((180, 200), PATH_SUBJ_1), -1) def test_minkowski_sum(self): with self.assertWarns(DeprecationWarning): pyclipper.MinkowskiSum(PATTERN,", "class TestNonStandardNumbers(TestCase): def test_sympyzero(self): try: from sympy import Point2D from", "200]] # greek letter sigma PATTERN = [[4, -6], [6,", "child = pyclipper.PyPolyNode() child.IsOpen = True child.Parent = tree child.Contour", "# in polygon self.assertEqual(pyclipper.PointInPolygon((200, 180), PATH_SUBJ_1), 1) # outside of", "in res for j in i) def test_paths_scale_to(self): res =", "PATH_SUBJ_2) def test_add_path(self): with self.assertWarns(DeprecationWarning): self.pc.AddPath(PATH_CLIP_1, poly_type=pyclipper.PT_CLIP) def test_add_paths(self): with", "INVALID_PATH = [[1, 1], ] # less than 2 vertices", "if converter: c = converter(c) return c return [[convert_coordinate(c) for", "outside of polygon self.assertEqual(pyclipper.PointInPolygon((500, 500), PATH_SUBJ_1), 0) def test_minkowski_sum(self): solution", "iterable (in that case # that means that node.Contour is", "\"normal\" division in Python < 3. assert res == float(value)", "k in j) class TestNonStandardNumbers(TestCase): def test_sympyzero(self): try: from sympy", "addend=None, multiplier=None): pc.AddPath(_modify_vertices(clip_path, addend=addend, multiplier=multiplier), pyclipper.PT_CLIP) for subj_path in subj_paths:", "orientation is False area_neg = pyclipper.Area(PATH_SUBJ_1) area_pos = pyclipper.Area(PATH_SUBJ_1[::-1]) self.assertLess(area_neg,", "i in res for j in i for k in", "in res) assert all(isinstance(j, list) for i in res for", "assert all(isinstance(j, integer_types) for i in res for j in", "self.pc = pyclipper.Pyclipper() def test_orientation(self): with self.assertWarns(DeprecationWarning): pyclipper.Orientation(PATH_SUBJ_1) def test_area(self):", "self.assertTrue(pyclipper.Orientation(PATH_SUBJ_1[::-1])) def test_area(self): # area less than 0 because orientation", "manualy_reversed = [PATH_SUBJ_1[::-1]] self.check_reversed_path(solution[0], manualy_reversed[0]) def check_reversed_path(self, path_1, path_2): if", "@staticmethod def add_paths(pc, clip_path, subj_paths, addend=None, multiplier=None): pc.AddPath(_modify_vertices(clip_path, addend=addend, multiplier=multiplier),", "in paths_1] paths_2 = [_modify_vertices(p, multiplier=factor, converter=round if factor else", "addend=addend, multiplier=multiplier), pyclipper.PT_SUBJECT) def test_get_bounds(self): bounds = self.pc.GetBounds() self.assertIsInstance(bounds, pyclipper.PyIntRect)", "float to get \"normal\" division in Python < 3. assert", "for j in i for k in j) def test_paths_scale_from(self):", "0], [15 ** 15, 0]]] pc.AddPaths(path, pyclipper.PT_SUBJECT, True) result =", "factor=None): if len(paths_1) != len(paths_2): return False paths_1 = [_modify_vertices(p,", "filtered results child2 = pyclipper.PyPolyNode() child2.IsOpen = False child2.Parent =", "or len(node.Contour) > 2) # check vertex coordinate, should not", "passed into the library are returned exactly, if they are", "test_point_in_polygon(self): with self.assertWarns(DeprecationWarning): self.assertEqual(pyclipper.PointInPolygon((180, 200), PATH_SUBJ_1), -1) def test_minkowski_sum(self): with", "def check_paths(self, paths, expected_nr): self.assertEqual(len(paths), expected_nr) self.assertTrue(all((len(path) > 0 for", "for j in i) def test_paths_scale_to(self): res = pyclipper.scale_to_clipper(self.paths) assert", "= pyclipper.PyPolyNode() child.IsOpen = True child.Parent = tree child.Contour =", "= child child2.Contour = [] child.Childs.append(child2) self.tree = tree def", "self.assertEqual(len(solution), 2) def test_execute2(self): solution = self.pc.Execute2(*self.default_args) self.assertIsInstance(solution, pyclipper.PyPolyNode) self.check_pypolynode(solution)", "= [[300, 400], [100, 400], [200, 300], [100, 200], [300,", "for p in paths_2] return all(((p_1 in paths_2) for p_1", "for p in paths_1] paths_2 = [_modify_vertices(p, multiplier=factor, converter=round if", "self.assertGreater(len(solution), 0) def test_reverse_path(self): solution = pyclipper.ReversePath(PATH_SUBJ_1) manualy_reversed = PATH_SUBJ_1[::-1]", "expected_nr): self.assertEqual(len(paths), expected_nr) self.assertTrue(all((len(path) > 0 for path in paths)))", "= 1 def test_orientation(self): self.assertFalse(pyclipper.Orientation(PATH_SUBJ_1)) self.assertTrue(pyclipper.Orientation(PATH_SUBJ_1[::-1])) def test_area(self): # area", "pyclipper.ClosedPathsFromPolyTree(self.tree) self.check_paths(paths, 2) def test_open_paths_from_polytree(self): paths = pyclipper.OpenPathsFromPolyTree(self.tree) self.check_paths(paths, 2)", "\"\"\" from __future__ import print_function from unittest2 import TestCase, main", "exception self.pc.AddPath(PATH_CLIP_1, poly_type=pyclipper.PT_CLIP) def test_add_paths(self): # should not raise an", "if addend is not None: c += addend if converter:", "self.assertTrue(hasattr(pyclipper, 'Pyclipper')) self.assertTrue(hasattr(pyclipper, 'PyclipperOffset')) def test_has_namespace_methods(self): for method in ('Orientation',", "child.Contour = PATH_SUBJ_1 tree.Childs.append(child) child = pyclipper.PyPolyNode() child.IsOpen = True", "self.check_property_assignment(pc, prop_name, [True, False]) def test_pyclipperoffset_properties(self): for factor in range(6):", "< 3. assert res == float(value) / self.scale def test_path_scale_to(self):", "150], [180, 150]] # square, orientation is False PATH_SUBJ_2 =", "for Pyclipper wrapper library. \"\"\" from __future__ import print_function from", "range(len(path_1)): self.assertEqual(path_1[i][0], path_2[i][0]) self.assertEqual(path_1[i][1], path_2[i][1]) def test_simplify_polygon(self): solution = pyclipper.SimplifyPolygon(PATH_SUBJ_1)", "child2.Parent = child child2.Contour = PATTERN child.Childs.append(child2) # empty contour", "(int,) import pyclipper # Example polygons from http://www.angusj.com/delphi/clipper.php PATH_SUBJ_1 =", "child.Parent = tree child.Contour = PATH_SUBJ_2 tree.Childs.append(child) child2 = pyclipper.PyPolyNode()", "test_execute2(self): solution = self.pc.Execute2(*self.default_args) self.assertIsInstance(solution, pyclipper.PyPolyNode) self.check_pypolynode(solution) def test_execute_empty(self): pc", "pc = pyclipper.Pyclipper() # Some large triangle. path = [[[0,", "1) # outside of polygon self.assertEqual(pyclipper.PointInPolygon((500, 500), PATH_SUBJ_1), 0) def", "PATH_SUBJ_2 = [[215, 160], [230, 190], [200, 190]] # triangle", "[100, 200], [300, 200]] # greek letter sigma PATTERN =", "self.pc.Clear() with self.assertRaises(pyclipper.ClipperException): self.pc.Execute(*self.default_args) def test_exact_results(self): \"\"\" Test whether coordinates", "TestPyclipperAddPaths(TestCase): def setUp(self): pyclipper.SCALING_FACTOR = 1 self.pc = pyclipper.Pyclipper() def", "poly_type=pyclipper.PT_CLIP) def test_add_paths(self): with self.assertWarns(DeprecationWarning): self.pc.AddPaths([PATH_SUBJ_1, PATH_SUBJ_2], poly_type=pyclipper.PT_SUBJECT) class TestScalingFunctions(TestCase):", "= True child.Parent = tree child.Contour = PATH_SUBJ_2 tree.Childs.append(child) child2", "= pyclipper.scale_from_clipper(value, self.scale) assert isinstance(res, float) # Convert to float", "'MinkowskiSum', 'MinkowskiSum2', 'MinkowskiDiff', 'PolyTreeToPaths', 'ClosedPathsFromPolyTree', 'OpenPathsFromPolyTree', 'ReversePath', 'ReversePaths'): self.assertTrue(hasattr(pyclipper, method))", "[0, 0], [15 ** 15, 0]]] pc.AddPaths(path, pyclipper.PT_SUBJECT, True) result", "200], [260, 150], [180, 150]] # square, orientation is False", "pyclipper.scale_to_clipper(self.paths) assert len(res) == len(self.paths) assert all(isinstance(i, list) for i", "paths_1] paths_2 = [_modify_vertices(p, multiplier=factor, converter=round if factor else None)", "INVALID_PATH, pyclipper.PT_CLIP, True) def test_add_paths_invalid_path(self): self.assertRaises(pyclipper.ClipperException, self.pc.AddPaths, [INVALID_PATH, INVALID_PATH], pyclipper.PT_CLIP,", "self.check_paths(paths, 2) def check_paths(self, paths, expected_nr): self.assertEqual(len(paths), expected_nr) self.assertTrue(all((len(path) >", "pyclipper.Area(PATH_SUBJ_1) area_pos = pyclipper.Area(PATH_SUBJ_1[::-1]) self.assertLess(area_neg, 0) self.assertGreater(area_pos, 0) self.assertEqual(abs(area_neg), area_pos)", "def test_clean_polygon(self): solution = pyclipper.CleanPolygon(PATH_CLIP_1) self.assertEqual(len(solution), len(PATH_CLIP_1)) def test_clean_polygons(self): solution", "test_minkowski_diff(self): with self.assertWarns(DeprecationWarning): pyclipper.MinkowskiDiff(PATH_SUBJ_1, PATH_SUBJ_2) def test_add_path(self): with self.assertWarns(DeprecationWarning): self.pc.AddPath(PATH_CLIP_1,", "self.scale) assert isinstance(res, float) # Convert to float to get", "def test_paths_scale_to(self): res = pyclipper.scale_to_clipper(self.paths) assert len(res) == len(self.paths) assert", "multiplier=multiplier), pyclipper.PT_CLIP) for subj_path in subj_paths: pc.AddPath(_modify_vertices(subj_path, addend=addend, multiplier=multiplier), pyclipper.PT_SUBJECT)", "= False child2.Parent = child child2.Contour = [] child.Childs.append(child2) self.tree", "180) self.assertEqual(bounds.right, 260) self.assertEqual(bounds.top, 130) self.assertEqual(bounds.bottom, 210) def test_execute(self): solution", "'OpenPathsFromPolyTree', 'ReversePath', 'ReversePaths'): self.assertTrue(hasattr(pyclipper, method)) class TestNamespaceMethods(TestCase): def setUp(self): pyclipper.SCALING_FACTOR", "test_add_path(self): # should not raise an exception self.pc.AddPath(PATH_CLIP_1, poly_type=pyclipper.PT_CLIP) def", "add_paths(pc, clip_path, subj_paths, addend=None, multiplier=None): pc.AddPath(_modify_vertices(clip_path, addend=addend, multiplier=multiplier), pyclipper.PT_CLIP) for", "self.assertWarns(DeprecationWarning): pyclipper.Area(PATH_SUBJ_1) def test_point_in_polygon(self): with self.assertWarns(DeprecationWarning): self.assertEqual(pyclipper.PointInPolygon((180, 200), PATH_SUBJ_1), -1)", "pyclipper.ET_CLOSEDPOLYGON) def test_execute(self): pc = pyclipper.PyclipperOffset() self.add_path(pc, PATH_CLIP_1) solution =", "= pyclipper.MinkowskiDiff(PATH_SUBJ_1, PATH_SUBJ_2) self.assertGreater(len(solution), 0) def test_reverse_path(self): solution = pyclipper.ReversePath(PATH_SUBJ_1)", "res == int(value * self.scale) def test_value_scale_from(self): value = 1000000000000", "square PATH_SIGMA = [[300, 400], [100, 400], [200, 300], [100,", "for v in [(0,0), (0,1)]] assert type(path[0].x) == Zero path", "def test_get_bounds(self): bounds = self.pc.GetBounds() self.assertIsInstance(bounds, pyclipper.PyIntRect) self.assertEqual(bounds.left, 180) self.assertEqual(bounds.right,", "'MinkowskiSum2', 'MinkowskiDiff', 'PolyTreeToPaths', 'ClosedPathsFromPolyTree', 'OpenPathsFromPolyTree', 'ReversePath', 'ReversePaths'): self.assertTrue(hasattr(pyclipper, method)) class", "be an iterable (in that case # that means that", "wrapper library. \"\"\" from __future__ import print_function from unittest2 import", "pyclipper.PT_CLIP, True) def test_add_paths_invalid_path(self): self.assertRaises(pyclipper.ClipperException, self.pc.AddPaths, [INVALID_PATH, INVALID_PATH], pyclipper.PT_CLIP, True)", "pyclipper.PT_CLIP) except pyclipper.ClipperException: self.fail(\"add_paths raised ClipperException when not all paths", "paths were invalid\") class TestClassProperties(TestCase): def check_property_assignment(self, pc, prop_name, values):", "132.12, 12, -123]) class TestPyclipperExecute(TestCase): def setUp(self): pyclipper.SCALING_FACTOR = 1", "float(value) / self.scale def test_path_scale_to(self): res = pyclipper.scale_to_clipper(self.path) assert len(res)", "multiplier=factor, converter=round if factor else None) for p in paths_2]", "= pyclipper.PolyTreeToPaths(self.tree) self.check_paths(paths, 4) def test_closed_paths_from_polytree(self): paths = pyclipper.ClosedPathsFromPolyTree(self.tree) self.check_paths(paths,", "with self.assertWarns(DeprecationWarning): pyclipper.MinkowskiDiff(PATH_SUBJ_1, PATH_SUBJ_2) def test_add_path(self): with self.assertWarns(DeprecationWarning): self.pc.AddPath(PATH_CLIP_1, poly_type=pyclipper.PT_CLIP)", "setUp(self): pyclipper.SCALING_FACTOR = 1 self.pc = pyclipper.Pyclipper() self.add_default_paths(self.pc) self.default_args =", "setUp(self): pyclipper.SCALING_FACTOR = 1 def test_orientation(self): self.assertFalse(pyclipper.Orientation(PATH_SUBJ_1)) self.assertTrue(pyclipper.Orientation(PATH_SUBJ_1[::-1])) def test_area(self):", "from unittest2 import TestCase, main import sys if sys.version_info <", "solution = pyclipper.MinkowskiSum2(PATTERN, [PATH_SIGMA], False) self.assertGreater(len(solution), 0) def test_minkowski_diff(self): solution", "child child2.Contour = PATTERN child.Childs.append(child2) # empty contour should not", "False child2.Parent = child child2.Contour = [] child.Childs.append(child2) self.tree =", "def setUp(self): pyclipper.SCALING_FACTOR = 1 @staticmethod def add_path(pc, path): pc.AddPath(path,", "multiplier is not None: c *= multiplier if addend is", "in ('MiterLimit', 'ArcTolerance'): self.check_property_assignment(pc, prop_name, [2.912, 132.12, 12, -123]) class", "== len(self.path) assert all(isinstance(i, list) for i in res) assert", "[[190, 210], [240, 210], [240, 130], [190, 130]] # square", "prop_name, [2.912, 132.12, 12, -123]) class TestPyclipperExecute(TestCase): def setUp(self): pyclipper.SCALING_FACTOR", "[100, 400], [200, 300], [100, 200], [300, 200]] # greek", "def test_closed_paths_from_polytree(self): paths = pyclipper.ClosedPathsFromPolyTree(self.tree) self.check_paths(paths, 2) def test_open_paths_from_polytree(self): paths", "# that means that node.Contour is a list of paths,", "should be path if node.Contour: self.assertFalse(hasattr(node.Contour[0][0], '__iter__')) for child in", "PATH_CLIP_1], pyclipper.PT_CLIP) self.pc.AddPaths([PATH_CLIP_1, INVALID_PATH], pyclipper.PT_CLIP) except pyclipper.ClipperException: self.fail(\"add_paths raised ClipperException", "[(0, 0), (1, 1)] paths = [path] * 3 def", "return all(((p_1 in paths_2) for p_1 in paths_1)) def _modify_vertices(path,", "in [(0,0), (0,1)]] assert type(path[0].x) == Zero path = pyclipper.scale_to_clipper(path)", "in path] def run_tests(): main() if __name__ == '__main__': run_tests()", "= pyclipper.scale_to_clipper(path) assert path == [[0, 0], [0, 2147483648]] def", "[[180, 200], [260, 200], [260, 150], [180, 150]] # square,", "PATH_CLIP_1) solution = pc.Execute(2.0) self.assertIsInstance(solution, list) self.assertEqual(len(solution), 1) def test_execute2(self):", "pyclipper.PT_CLIP) self.pc.AddPaths([PATH_CLIP_1, INVALID_PATH], pyclipper.PT_CLIP) except pyclipper.ClipperException: self.fail(\"add_paths raised ClipperException when", "PATH_SUBJ_2], pyclipper.PT_SUBJECT) @staticmethod def add_paths(pc, clip_path, subj_paths, addend=None, multiplier=None): pc.AddPath(_modify_vertices(clip_path,", "integer_types = (int,) import pyclipper # Example polygons from http://www.angusj.com/delphi/clipper.php", "list) for i in res) assert all(isinstance(j, float) for i", "except pyclipper.ClipperException: self.fail(\"add_paths raised ClipperException when not all paths were", "1 self.pc = pyclipper.Pyclipper() def test_add_path(self): # should not raise", "pyclipper.Pyclipper() self.add_default_paths(self.pc) self.default_args = [pyclipper.CT_INTERSECTION, pyclipper.PFT_EVENODD, pyclipper.PFT_EVENODD] @staticmethod def add_default_paths(pc):", "import pyclipper # Example polygons from http://www.angusj.com/delphi/clipper.php PATH_SUBJ_1 = [[180,", "TestNonStandardNumbers(TestCase): def test_sympyzero(self): try: from sympy import Point2D from sympy.core.numbers", "= pc.Execute(2.0) self.assertIsInstance(solution, list) self.assertEqual(len(solution), 1) def test_execute2(self): pc =", "of polygon self.assertEqual(pyclipper.PointInPolygon((500, 500), PATH_SUBJ_1), 0) def test_minkowski_sum(self): solution =", "in ('ReverseSolution', 'PreserveCollinear', 'StrictlySimple'): self.check_property_assignment(pc, prop_name, [True, False]) def test_pyclipperoffset_properties(self):", "pyclipper.Pyclipper() def test_add_path(self): # should not raise an exception self.pc.AddPath(PATH_CLIP_1,", "def setUp(self): pyclipper.SCALING_FACTOR = 2. self.pc = pyclipper.Pyclipper() def test_orientation(self):", "not raise an exception self.pc.AddPaths([PATH_SUBJ_1, PATH_SUBJ_2], poly_type=pyclipper.PT_SUBJECT) def test_add_path_invalid_path(self): self.assertRaises(pyclipper.ClipperException,", "= [pyclipper.CT_INTERSECTION, pyclipper.PFT_EVENODD, pyclipper.PFT_EVENODD] @staticmethod def add_default_paths(pc): pc.AddPath(PATH_CLIP_1, pyclipper.PT_CLIP) pc.AddPaths([PATH_SUBJ_1,", "= pyclipper.PyPolyNode() child.IsOpen = False child.Parent = tree child.Contour =", "test_area(self): # area less than 0 because orientation is False", "def test_reverse_path(self): solution = pyclipper.ReversePath(PATH_SUBJ_1) manualy_reversed = PATH_SUBJ_1[::-1] self.check_reversed_path(solution, manualy_reversed)", "_do_solutions_match(paths_1, paths_2, factor=None): if len(paths_1) != len(paths_2): return False paths_1", "= pyclipper.CleanPolygon(PATH_CLIP_1) self.assertEqual(len(solution), len(PATH_CLIP_1)) def test_clean_polygons(self): solution = pyclipper.CleanPolygons([PATH_CLIP_1]) self.assertEqual(len(solution),", "= [_modify_vertices(p, multiplier=factor, converter=round if factor else None) for p", "pc = pyclipper.PyclipperOffset() self.add_path(pc, PATH_CLIP_1) solution = pc.Execute(2.0) self.assertIsInstance(solution, list)", "empty contour should not # be included in filtered results", "check vertex coordinate, should not be an iterable (in that", "[[4, -6], [6, -6], [-4, 6], [-6, 6]] INVALID_PATH =", "PATTERN child.Childs.append(child2) # empty contour should not # be included", "path = [(0, 0), (1, 1)] paths = [path] *", "Point2D from sympy.core.numbers import Zero except ImportError: self.skipTest(\"Skipping, sympy not", "= PATH_SUBJ_2 tree.Childs.append(child) child2 = pyclipper.PyPolyNode() child2.IsOpen = False child2.Parent", "1) def test_simplify_polygons(self): solution = pyclipper.SimplifyPolygons([PATH_SUBJ_1]) solution_single = pyclipper.SimplifyPolygon(PATH_SUBJ_1) self.assertEqual(len(solution),", "solution = pc.Execute2(2.0) self.assertIsInstance(solution, pyclipper.PyPolyNode) self.assertEqual(len(pyclipper.OpenPathsFromPolyTree(solution)), 0) self.assertEqual(len(pyclipper.ClosedPathsFromPolyTree(solution)), 1) def", "test_has_classes(self): self.assertTrue(hasattr(pyclipper, 'Pyclipper')) self.assertTrue(hasattr(pyclipper, 'PyclipperOffset')) def test_has_namespace_methods(self): for method in", "pyclipper.PyPolyNode() child2.IsOpen = False child2.Parent = child child2.Contour = []", "for factor in range(6): pyclipper.SCALING_FACTOR = 10 ** factor pc", "pyclipper.SCALING_FACTOR = 1 @staticmethod def add_path(pc, path): pc.AddPath(path, pyclipper.JT_ROUND, pyclipper.ET_CLOSEDPOLYGON)", "in res for j in i) assert all(isinstance(k, float) for", "self.add_path(pc, PATH_CLIP_1) solution = pc.Execute2(2.0) self.assertIsInstance(solution, pyclipper.PyPolyNode) self.assertEqual(len(pyclipper.OpenPathsFromPolyTree(solution)), 0) self.assertEqual(len(pyclipper.ClosedPathsFromPolyTree(solution)),", "= 1 self.pc = pyclipper.Pyclipper() self.add_default_paths(self.pc) self.default_args = [pyclipper.CT_INTERSECTION, pyclipper.PFT_EVENODD,", "test_path_scale_from(self): res = pyclipper.scale_from_clipper(self.path) assert len(res) == len(self.path) assert all(isinstance(i,", "j in i) def test_path_scale_from(self): res = pyclipper.scale_from_clipper(self.path) assert len(res)", "sympy import Point2D from sympy.core.numbers import Zero except ImportError: self.skipTest(\"Skipping,", "for i in res for j in i for k", "on polygon self.assertEqual(pyclipper.PointInPolygon((180, 200), PATH_SUBJ_1), -1) # in polygon self.assertEqual(pyclipper.PointInPolygon((200,", "self.assertEqual(path_1[i][0], path_2[i][0]) self.assertEqual(path_1[i][1], path_2[i][1]) def test_simplify_polygon(self): solution = pyclipper.SimplifyPolygon(PATH_SUBJ_1) self.assertEqual(len(solution),", "300], [100, 200], [300, 200]] # greek letter sigma PATTERN", "should not raise an exception self.pc.AddPaths([PATH_SUBJ_1, PATH_SUBJ_2], poly_type=pyclipper.PT_SUBJECT) def test_add_path_invalid_path(self):", "def test_pyclipperoffset_properties(self): for factor in range(6): pyclipper.SCALING_FACTOR = 10 **", "pc = pyclipper.PyclipperOffset() self.add_path(pc, PATH_CLIP_1) solution = pc.Execute2(2.0) self.assertIsInstance(solution, pyclipper.PyPolyNode)", "0 for path in paths))) class TestPyclipperAddPaths(TestCase): def setUp(self): pyclipper.SCALING_FACTOR", "path if node.Contour: self.assertFalse(hasattr(node.Contour[0][0], '__iter__')) for child in node.Childs: self.check_pypolynode(child)", "len(paths_1) != len(paths_2): return False paths_1 = [_modify_vertices(p, multiplier=factor, converter=round", "all(isinstance(j, float) for i in res for j in i)", "res == float(value) / self.scale def test_path_scale_to(self): res = pyclipper.scale_to_clipper(self.path)", "is not None: c += addend if converter: c =", "= 1000000000000 res = pyclipper.scale_from_clipper(value, self.scale) assert isinstance(res, float) #", "assert all(isinstance(i, list) for i in res) assert all(isinstance(j, integer_types)", "2 vertices class TestPyclipperModule(TestCase): def test_has_classes(self): self.assertTrue(hasattr(pyclipper, 'Pyclipper')) self.assertTrue(hasattr(pyclipper, 'PyclipperOffset'))", "pyclipper.PT_CLIP) for subj_path in subj_paths: pc.AddPath(_modify_vertices(subj_path, addend=addend, multiplier=multiplier), pyclipper.PT_SUBJECT) def", "if factor else None) for p in paths_2] return all(((p_1", "pyclipper.Pyclipper() with self.assertRaises(pyclipper.ClipperException): pc.Execute(pyclipper.CT_UNION, pyclipper.PFT_NONZERO, pyclipper.PFT_NONZERO) def test_clear(self): self.pc.Clear() with", "= pyclipper.PyclipperOffset() self.add_path(pc, PATH_CLIP_1) pc.Clear() solution = pc.Execute(2.0) self.assertIsInstance(solution, list)", "child in node.Childs: self.check_pypolynode(child) class TestPyclipperOffset(TestCase): def setUp(self): pyclipper.SCALING_FACTOR =", "1 @staticmethod def add_path(pc, path): pc.AddPath(path, pyclipper.JT_ROUND, pyclipper.ET_CLOSEDPOLYGON) def test_execute(self):", "assert res == float(value) / self.scale def test_path_scale_to(self): res =", "return False paths_1 = [_modify_vertices(p, multiplier=factor, converter=round if factor else", "tree child.Contour = PATH_SUBJ_1 tree.Childs.append(child) child = pyclipper.PyPolyNode() child.IsOpen =", "child.IsOpen = False child.Parent = tree child.Contour = PATH_SUBJ_1 tree.Childs.append(child)", "len(path_1) is not len(path_2): return False for i in range(len(path_1)):", "+= addend if converter: c = converter(c) return c return", "setUp(self): pyclipper.SCALING_FACTOR = 2. self.pc = pyclipper.Pyclipper() def test_orientation(self): with", "= [(0,0), (0,1)] path = [Point2D(v) for v in [(0,0),", "the library are returned exactly, if they are not affected", "self.assertWarns(DeprecationWarning): pyclipper.MinkowskiDiff(PATH_SUBJ_1, PATH_SUBJ_2) def test_add_path(self): with self.assertWarns(DeprecationWarning): self.pc.AddPath(PATH_CLIP_1, poly_type=pyclipper.PT_CLIP) def", "p in paths_1] paths_2 = [_modify_vertices(p, multiplier=factor, converter=round if factor", "# square PATH_SIGMA = [[300, 400], [100, 400], [200, 300],", "> 0 for path in paths))) class TestPyclipperAddPaths(TestCase): def setUp(self):", "190]] # triangle PATH_CLIP_1 = [[190, 210], [240, 210], [240,", "[[1, 1], ] # less than 2 vertices class TestPyclipperModule(TestCase):", "child.Parent = tree child.Contour = PATH_SUBJ_1 tree.Childs.append(child) child = pyclipper.PyPolyNode()", "p in paths_2] return all(((p_1 in paths_2) for p_1 in", "solution_single = pyclipper.SimplifyPolygon(PATH_SUBJ_1) self.assertEqual(len(solution), 1) self.assertEqual(len(solution), len(solution_single)) _do_solutions_match(solution, solution_single) def", "i in res) assert all(isinstance(j, list) for i in res", "# should not raise an exception self.pc.AddPaths([PATH_SUBJ_1, PATH_SUBJ_2], poly_type=pyclipper.PT_SUBJECT) def", "= [path] * 3 def test_value_scale_to(self): value = 0.5 res", "== 0 or len(node.Contour) > 2) # check vertex coordinate,", "test_closed_paths_from_polytree(self): paths = pyclipper.ClosedPathsFromPolyTree(self.tree) self.check_paths(paths, 2) def test_open_paths_from_polytree(self): paths =", "0), (1, 1)] paths = [path] * 3 def test_value_scale_to(self):", "res) assert all(isinstance(j, float) for i in res for j", "def test_add_paths_invalid_path(self): self.assertRaises(pyclipper.ClipperException, self.pc.AddPaths, [INVALID_PATH, INVALID_PATH], pyclipper.PT_CLIP, True) try: self.pc.AddPaths([INVALID_PATH,", "len(res) == len(self.path) assert all(isinstance(i, list) for i in res)", "path in paths))) class TestPyclipperAddPaths(TestCase): def setUp(self): pyclipper.SCALING_FACTOR = 1", "area less than 0 because orientation is False area_neg =", "solution = pc.Execute(2.0) self.assertIsInstance(solution, list) self.assertEqual(len(solution), 0) class TestScalingFactorWarning(TestCase): def", "= False child.Parent = tree child.Contour = PATH_SUBJ_1 tree.Childs.append(child) child", "= child child2.Contour = PATTERN child.Childs.append(child2) # empty contour should", "in filtered results child2 = pyclipper.PyPolyNode() child2.IsOpen = False child2.Parent", "class TestScalingFactorWarning(TestCase): def setUp(self): pyclipper.SCALING_FACTOR = 2. self.pc = pyclipper.Pyclipper()", "@staticmethod def add_path(pc, path): pc.AddPath(path, pyclipper.JT_ROUND, pyclipper.ET_CLOSEDPOLYGON) def test_execute(self): pc", "= pyclipper.SimplifyPolygons([PATH_SUBJ_1]) solution_single = pyclipper.SimplifyPolygon(PATH_SUBJ_1) self.assertEqual(len(solution), 1) self.assertEqual(len(solution), len(solution_single)) _do_solutions_match(solution,", "= path[:] def convert_coordinate(c): if multiplier is not None: c", "if len(paths_1) != len(paths_2): return False paths_1 = [_modify_vertices(p, multiplier=factor,", "PATH_SUBJ_1), 0) def test_minkowski_sum(self): solution = pyclipper.MinkowskiSum(PATTERN, PATH_SIGMA, False) self.assertGreater(len(solution),", "= pyclipper.Pyclipper() def test_orientation(self): with self.assertWarns(DeprecationWarning): pyclipper.Orientation(PATH_SUBJ_1) def test_area(self): with", "190], [200, 190]] # triangle PATH_CLIP_1 = [[190, 210], [240,", "poly_type=pyclipper.PT_SUBJECT) def test_add_path_invalid_path(self): self.assertRaises(pyclipper.ClipperException, self.pc.AddPath, INVALID_PATH, pyclipper.PT_CLIP, True) def test_add_paths_invalid_path(self):", "not all paths were invalid\") class TestClassProperties(TestCase): def check_property_assignment(self, pc,", "** 15, 0]]] pc.AddPaths(path, pyclipper.PT_SUBJECT, True) result = pc.Execute(pyclipper.PT_CLIP, pyclipper.PFT_EVENODD,", "solution = self.pc.Execute(*self.default_args) self.assertEqual(len(solution), 2) def test_execute2(self): solution = self.pc.Execute2(*self.default_args)", "multiplier=1.0, converter=None): path = path[:] def convert_coordinate(c): if multiplier is", "prop_name in ('MiterLimit', 'ArcTolerance'): self.check_property_assignment(pc, prop_name, [2.912, 132.12, 12, -123])", "path_1, path_2): if len(path_1) is not len(path_2): return False for", "pc.AddPaths([PATH_SUBJ_1, PATH_SUBJ_2], pyclipper.PT_SUBJECT) @staticmethod def add_paths(pc, clip_path, subj_paths, addend=None, multiplier=None):", "= pyclipper.MinkowskiSum(PATTERN, PATH_SIGMA, False) self.assertGreater(len(solution), 0) def test_minkowski_sum2(self): solution =", "self.pc.AddPath(PATH_CLIP_1, poly_type=pyclipper.PT_CLIP) def test_add_paths(self): # should not raise an exception", "2) # check vertex coordinate, should not be an iterable", "converter=round if factor else None) for p in paths_2] return", "converter(c) return c return [[convert_coordinate(c) for c in v] for", "in res) assert all(isinstance(j, integer_types) for i in res for", "= pyclipper.ReversePath(PATH_SUBJ_1) manualy_reversed = PATH_SUBJ_1[::-1] self.check_reversed_path(solution, manualy_reversed) def test_reverse_paths(self): solution", "(int, long) else: integer_types = (int,) import pyclipper # Example", "operation. \"\"\" pc = pyclipper.Pyclipper() # Some large triangle. path", "be path if node.Contour: self.assertFalse(hasattr(node.Contour[0][0], '__iter__')) for child in node.Childs:", "# greek letter sigma PATTERN = [[4, -6], [6, -6],", "an exception self.pc.AddPath(PATH_CLIP_1, poly_type=pyclipper.PT_CLIP) def test_add_paths(self): # should not raise", "bounds = self.pc.GetBounds() self.assertIsInstance(bounds, pyclipper.PyIntRect) self.assertEqual(bounds.left, 180) self.assertEqual(bounds.right, 260) self.assertEqual(bounds.top,", "res for j in i) def test_path_scale_from(self): res = pyclipper.scale_from_clipper(self.path)", "list) for i in res) assert all(isinstance(j, integer_types) for i", "assert res == int(value * self.scale) def test_value_scale_from(self): value =", "self.assertEqual(abs(area_neg), area_pos) def test_point_in_polygon(self): # on polygon self.assertEqual(pyclipper.PointInPolygon((180, 200), PATH_SUBJ_1),", "child2.IsOpen = False child2.Parent = child child2.Contour = PATTERN child.Childs.append(child2)", "== float(value) / self.scale def test_path_scale_to(self): res = pyclipper.scale_to_clipper(self.path) assert", "c return [[convert_coordinate(c) for c in v] for v in", "results child2 = pyclipper.PyPolyNode() child2.IsOpen = False child2.Parent = child", "\"\"\" Tests for Pyclipper wrapper library. \"\"\" from __future__ import", "= PATTERN child.Childs.append(child2) # empty contour should not # be", "= pyclipper.CleanPolygons([PATH_CLIP_1]) self.assertEqual(len(solution), 1) self.assertEqual(len(solution[0]), len(PATH_CLIP_1)) class TestFilterPyPolyNode(TestCase): def setUp(self):", "test_execute2(self): pc = pyclipper.PyclipperOffset() self.add_path(pc, PATH_CLIP_1) solution = pc.Execute2(2.0) self.assertIsInstance(solution,", "PATH_CLIP_1 = [[190, 210], [240, 210], [240, 130], [190, 130]]", "len(self.paths) assert all(isinstance(i, list) for i in res) assert all(isinstance(j,", "[[0, 0], [0, 2147483648]] def _do_solutions_match(paths_1, paths_2, factor=None): if len(paths_1)", "pyclipper.Area(PATH_SUBJ_1) def test_point_in_polygon(self): with self.assertWarns(DeprecationWarning): self.assertEqual(pyclipper.PointInPolygon((180, 200), PATH_SUBJ_1), -1) def", "self.pc.AddPath(PATH_CLIP_1, poly_type=pyclipper.PT_CLIP) def test_add_paths(self): with self.assertWarns(DeprecationWarning): self.pc.AddPaths([PATH_SUBJ_1, PATH_SUBJ_2], poly_type=pyclipper.PT_SUBJECT) class", "c = converter(c) return c return [[convert_coordinate(c) for c in", "def _do_solutions_match(paths_1, paths_2, factor=None): if len(paths_1) != len(paths_2): return False", "not len(path_2): return False for i in range(len(path_1)): self.assertEqual(path_1[i][0], path_2[i][0])", "= pyclipper.MinkowskiSum2(PATTERN, [PATH_SIGMA], False) self.assertGreater(len(solution), 0) def test_minkowski_diff(self): solution =", "assert isinstance(res, float) # Convert to float to get \"normal\"", "solution = pyclipper.CleanPolygon(PATH_CLIP_1) self.assertEqual(len(solution), len(PATH_CLIP_1)) def test_clean_polygons(self): solution = pyclipper.CleanPolygons([PATH_CLIP_1])", "return [[convert_coordinate(c) for c in v] for v in path]", "len(paths_2): return False paths_1 = [_modify_vertices(p, multiplier=factor, converter=round if factor", "for i in res for j in i) assert all(isinstance(k,", "for v in path] def run_tests(): main() if __name__ ==", "add_path(pc, path): pc.AddPath(path, pyclipper.JT_ROUND, pyclipper.ET_CLOSEDPOLYGON) def test_execute(self): pc = pyclipper.PyclipperOffset()", "self.assertEqual(bounds.bottom, 210) def test_execute(self): solution = self.pc.Execute(*self.default_args) self.assertEqual(len(solution), 2) def", "with self.assertRaises(pyclipper.ClipperException): pc.Execute(pyclipper.CT_UNION, pyclipper.PFT_NONZERO, pyclipper.PFT_NONZERO) def test_clear(self): self.pc.Clear() with self.assertRaises(pyclipper.ClipperException):", "test_minkowski_diff(self): solution = pyclipper.MinkowskiDiff(PATH_SUBJ_1, PATH_SUBJ_2) self.assertGreater(len(solution), 0) def test_reverse_path(self): solution", "self.assertTrue(all((len(path) > 0 for path in paths))) class TestPyclipperAddPaths(TestCase): def", "in subj_paths: pc.AddPath(_modify_vertices(subj_path, addend=addend, multiplier=multiplier), pyclipper.PT_SUBJECT) def test_get_bounds(self): bounds =", "[260, 150], [180, 150]] # square, orientation is False PATH_SUBJ_2", "self.pc.GetBounds() self.assertIsInstance(bounds, pyclipper.PyIntRect) self.assertEqual(bounds.left, 180) self.assertEqual(bounds.right, 260) self.assertEqual(bounds.top, 130) self.assertEqual(bounds.bottom,", "self.check_reversed_path(solution[0], manualy_reversed[0]) def check_reversed_path(self, path_1, path_2): if len(path_1) is not", "PATH_SUBJ_2], poly_type=pyclipper.PT_SUBJECT) def test_add_path_invalid_path(self): self.assertRaises(pyclipper.ClipperException, self.pc.AddPath, INVALID_PATH, pyclipper.PT_CLIP, True) def", "'Pyclipper')) self.assertTrue(hasattr(pyclipper, 'PyclipperOffset')) def test_has_namespace_methods(self): for method in ('Orientation', 'Area',", "tree.Contour.append(PATH_CLIP_1) tree.IsOpen = True child = pyclipper.PyPolyNode() child.IsOpen = False", "vertex coordinate, should not be an iterable (in that case", "solution_single) def test_clean_polygon(self): solution = pyclipper.CleanPolygon(PATH_CLIP_1) self.assertEqual(len(solution), len(PATH_CLIP_1)) def test_clean_polygons(self):", "prop_name, values): for val in values: setattr(pc, prop_name, val) self.assertEqual(getattr(pc,", "200], [300, 200]] # greek letter sigma PATTERN = [[4,", "pyclipper.PyclipperOffset() for prop_name in ('MiterLimit', 'ArcTolerance'): self.check_property_assignment(pc, prop_name, [2.912, 132.12,", "p_1 in paths_1)) def _modify_vertices(path, addend=0.0, multiplier=1.0, converter=None): path =", "def test_simplify_polygon(self): solution = pyclipper.SimplifyPolygon(PATH_SUBJ_1) self.assertEqual(len(solution), 1) def test_simplify_polygons(self): solution", "self.assertEqual(bounds.left, 180) self.assertEqual(bounds.right, 260) self.assertEqual(bounds.top, 130) self.assertEqual(bounds.bottom, 210) def test_execute(self):", "in paths_2] return all(((p_1 in paths_2) for p_1 in paths_1))", "class TestScalingFunctions(TestCase): scale = 2 ** 31 path = [(0,", "200], [260, 200], [260, 150], [180, 150]] # square, orientation", "= pyclipper.OpenPathsFromPolyTree(self.tree) self.check_paths(paths, 2) def check_paths(self, paths, expected_nr): self.assertEqual(len(paths), expected_nr)", "converter: c = converter(c) return c return [[convert_coordinate(c) for c", "pc.Execute2(2.0) self.assertIsInstance(solution, pyclipper.PyPolyNode) self.assertEqual(len(pyclipper.OpenPathsFromPolyTree(solution)), 0) self.assertEqual(len(pyclipper.ClosedPathsFromPolyTree(solution)), 1) def test_clear(self): pc", "False PATH_SUBJ_2 = [[215, 160], [230, 190], [200, 190]] #", "True) result = pc.Execute(pyclipper.PT_CLIP, pyclipper.PFT_EVENODD, pyclipper.PFT_EVENODD) assert result == path", "import print_function from unittest2 import TestCase, main import sys if", "than 2 vertices class TestPyclipperModule(TestCase): def test_has_classes(self): self.assertTrue(hasattr(pyclipper, 'Pyclipper')) self.assertTrue(hasattr(pyclipper,", "= pyclipper.scale_from_clipper(self.paths) assert len(res) == len(self.paths) assert all(isinstance(i, list) for", "orientation is False PATH_SUBJ_2 = [[215, 160], [230, 190], [200,", "than 0 because orientation is False area_neg = pyclipper.Area(PATH_SUBJ_1) area_pos", "test_paths_scale_to(self): res = pyclipper.scale_to_clipper(self.paths) assert len(res) == len(self.paths) assert all(isinstance(i,", "** factor pc = pyclipper.PyclipperOffset() for prop_name in ('MiterLimit', 'ArcTolerance'):", "addend=addend, multiplier=multiplier), pyclipper.PT_CLIP) for subj_path in subj_paths: pc.AddPath(_modify_vertices(subj_path, addend=addend, multiplier=multiplier),", "# empty contour should not # be included in filtered", "paths_1 = [_modify_vertices(p, multiplier=factor, converter=round if factor else None) for", "self.assertEqual(len(solution[0]), len(PATH_CLIP_1)) class TestFilterPyPolyNode(TestCase): def setUp(self): tree = pyclipper.PyPolyNode() tree.Contour.append(PATH_CLIP_1)", "None: c += addend if converter: c = converter(c) return", "val) def test_pyclipper_properties(self): pc = pyclipper.Pyclipper() for prop_name in ('ReverseSolution',", "to float to get \"normal\" division in Python < 3.", "i in res for j in i) assert all(isinstance(k, float)", "31 path = [(0, 0), (1, 1)] paths = [path]", "[200, 300], [100, 200], [300, 200]] # greek letter sigma", "= tree child.Contour = PATH_SUBJ_2 tree.Childs.append(child) child2 = pyclipper.PyPolyNode() child2.IsOpen", "not # be included in filtered results child2 = pyclipper.PyPolyNode()", "prop_name), val) def test_pyclipper_properties(self): pc = pyclipper.Pyclipper() for prop_name in", "expected_nr) self.assertTrue(all((len(path) > 0 for path in paths))) class TestPyclipperAddPaths(TestCase):", "subj_path in subj_paths: pc.AddPath(_modify_vertices(subj_path, addend=addend, multiplier=multiplier), pyclipper.PT_SUBJECT) def test_get_bounds(self): bounds", "Some large triangle. path = [[[0, 1], [0, 0], [15", "[15 ** 15, 0]]] pc.AddPaths(path, pyclipper.PT_SUBJECT, True) result = pc.Execute(pyclipper.PT_CLIP,", "in i for k in j) class TestNonStandardNumbers(TestCase): def test_sympyzero(self):", "try: self.pc.AddPaths([INVALID_PATH, PATH_CLIP_1], pyclipper.PT_CLIP) self.pc.AddPaths([PATH_CLIP_1, INVALID_PATH], pyclipper.PT_CLIP) except pyclipper.ClipperException: self.fail(\"add_paths", "[230, 190], [200, 190]] # triangle PATH_CLIP_1 = [[190, 210],", "0], [0, 2147483648]] def _do_solutions_match(paths_1, paths_2, factor=None): if len(paths_1) !=", "res = pyclipper.scale_to_clipper(self.path) assert len(res) == len(self.path) assert all(isinstance(i, list)", "factor else None) for p in paths_2] return all(((p_1 in", "pyclipper.PT_SUBJECT, True) result = pc.Execute(pyclipper.PT_CLIP, pyclipper.PFT_EVENODD, pyclipper.PFT_EVENODD) assert result ==", "TestFilterPyPolyNode(TestCase): def setUp(self): tree = pyclipper.PyPolyNode() tree.Contour.append(PATH_CLIP_1) tree.IsOpen = True", "pyclipper # Example polygons from http://www.angusj.com/delphi/clipper.php PATH_SUBJ_1 = [[180, 200],", "res) assert all(isinstance(j, list) for i in res for j", "res) assert all(isinstance(j, integer_types) for i in res for j", "assert type(path[0].x) == Zero path = pyclipper.scale_to_clipper(path) assert path ==", "pyclipper.PFT_EVENODD, pyclipper.PFT_EVENODD) assert result == path def check_pypolynode(self, node): self.assertTrue(len(node.Contour)", "def test_value_scale_from(self): value = 1000000000000 res = pyclipper.scale_from_clipper(value, self.scale) assert", "class TestPyclipperAddPaths(TestCase): def setUp(self): pyclipper.SCALING_FACTOR = 1 self.pc = pyclipper.Pyclipper()", "pyclipper.PFT_NONZERO) def test_clear(self): self.pc.Clear() with self.assertRaises(pyclipper.ClipperException): self.pc.Execute(*self.default_args) def test_exact_results(self): \"\"\"", "paths_1)) def _modify_vertices(path, addend=0.0, multiplier=1.0, converter=None): path = path[:] def", "# check vertex coordinate, should not be an iterable (in", "self.assertTrue(len(node.Contour) == 0 or len(node.Contour) > 2) # check vertex", "[(0,0), (0,1)] path = [Point2D(v) for v in [(0,0), (0,1)]]", "[[215, 160], [230, 190], [200, 190]] # triangle PATH_CLIP_1 =", "self.assertWarns(DeprecationWarning): pyclipper.MinkowskiSum2(PATTERN, [PATH_SIGMA], False) def test_minkowski_diff(self): with self.assertWarns(DeprecationWarning): pyclipper.MinkowskiDiff(PATH_SUBJ_1, PATH_SUBJ_2)", "2147483648]] def _do_solutions_match(paths_1, paths_2, factor=None): if len(paths_1) != len(paths_2): return", "1], ] # less than 2 vertices class TestPyclipperModule(TestCase): def", "res for j in i for k in j) def", "i for k in j) class TestNonStandardNumbers(TestCase): def test_sympyzero(self): try:", "= pyclipper.Pyclipper() self.add_default_paths(self.pc) self.default_args = [pyclipper.CT_INTERSECTION, pyclipper.PFT_EVENODD, pyclipper.PFT_EVENODD] @staticmethod def", "def test_execute(self): solution = self.pc.Execute(*self.default_args) self.assertEqual(len(solution), 2) def test_execute2(self): solution", "path = [[[0, 1], [0, 0], [15 ** 15, 0]]]", "len(path_2): return False for i in range(len(path_1)): self.assertEqual(path_1[i][0], path_2[i][0]) self.assertEqual(path_1[i][1],", "test_orientation(self): self.assertFalse(pyclipper.Orientation(PATH_SUBJ_1)) self.assertTrue(pyclipper.Orientation(PATH_SUBJ_1[::-1])) def test_area(self): # area less than 0", "len(PATH_CLIP_1)) def test_clean_polygons(self): solution = pyclipper.CleanPolygons([PATH_CLIP_1]) self.assertEqual(len(solution), 1) self.assertEqual(len(solution[0]), len(PATH_CLIP_1))", "pyclipper.PyclipperOffset() self.add_path(pc, PATH_CLIP_1) solution = pc.Execute(2.0) self.assertIsInstance(solution, list) self.assertEqual(len(solution), 1)", "path_2[i][1]) def test_simplify_polygon(self): solution = pyclipper.SimplifyPolygon(PATH_SUBJ_1) self.assertEqual(len(solution), 1) def test_simplify_polygons(self):", "pyclipper.SimplifyPolygon(PATH_SUBJ_1) self.assertEqual(len(solution), 1) def test_simplify_polygons(self): solution = pyclipper.SimplifyPolygons([PATH_SUBJ_1]) solution_single =", "are returned exactly, if they are not affected by the", "means that node.Contour is a list of paths, should be", "(1, 1)] paths = [path] * 3 def test_value_scale_to(self): value", "test_point_in_polygon(self): # on polygon self.assertEqual(pyclipper.PointInPolygon((180, 200), PATH_SUBJ_1), -1) # in", "val) self.assertEqual(getattr(pc, prop_name), val) def test_pyclipper_properties(self): pc = pyclipper.Pyclipper() for", "j in i) def test_paths_scale_to(self): res = pyclipper.scale_to_clipper(self.paths) assert len(res)", "res = pyclipper.scale_to_clipper(self.paths) assert len(res) == len(self.paths) assert all(isinstance(i, list)", "= pyclipper.PyPolyNode() child2.IsOpen = False child2.Parent = child child2.Contour =", "for i in res) assert all(isinstance(j, list) for i in", "in i) assert all(isinstance(k, float) for i in res for", "** 31 path = [(0, 0), (1, 1)] paths =", "[_modify_vertices(p, multiplier=factor, converter=round if factor else None) for p in", "paths_2) for p_1 in paths_1)) def _modify_vertices(path, addend=0.0, multiplier=1.0, converter=None):", "res = pyclipper.scale_from_clipper(value, self.scale) assert isinstance(res, float) # Convert to", "pc.AddPath(_modify_vertices(subj_path, addend=addend, multiplier=multiplier), pyclipper.PT_SUBJECT) def test_get_bounds(self): bounds = self.pc.GetBounds() self.assertIsInstance(bounds,", "node.Contour is a list of paths, should be path if", "[-6, 6]] INVALID_PATH = [[1, 1], ] # less than", "def test_add_path_invalid_path(self): self.assertRaises(pyclipper.ClipperException, self.pc.AddPath, INVALID_PATH, pyclipper.PT_CLIP, True) def test_add_paths_invalid_path(self): self.assertRaises(pyclipper.ClipperException,", "from sympy.core.numbers import Zero except ImportError: self.skipTest(\"Skipping, sympy not available\")", "def test_polytree_to_paths(self): paths = pyclipper.PolyTreeToPaths(self.tree) self.check_paths(paths, 4) def test_closed_paths_from_polytree(self): paths", "integer_types) for i in res for j in i) def", "self.assertEqual(len(solution), 0) class TestScalingFactorWarning(TestCase): def setUp(self): pyclipper.SCALING_FACTOR = 2. self.pc", "for subj_path in subj_paths: pc.AddPath(_modify_vertices(subj_path, addend=addend, multiplier=multiplier), pyclipper.PT_SUBJECT) def test_get_bounds(self):", "in j) def test_paths_scale_from(self): res = pyclipper.scale_from_clipper(self.paths) assert len(res) ==", "self.pc.AddPaths([PATH_SUBJ_1, PATH_SUBJ_2], poly_type=pyclipper.PT_SUBJECT) class TestScalingFunctions(TestCase): scale = 2 ** 31", "pyclipper.JT_ROUND, pyclipper.ET_CLOSEDPOLYGON) def test_execute(self): pc = pyclipper.PyclipperOffset() self.add_path(pc, PATH_CLIP_1) solution", "solution = pyclipper.SimplifyPolygons([PATH_SUBJ_1]) solution_single = pyclipper.SimplifyPolygon(PATH_SUBJ_1) self.assertEqual(len(solution), 1) self.assertEqual(len(solution), len(solution_single))", "Convert to float to get \"normal\" division in Python <", "self.assertGreater(len(solution), 0) def test_minkowski_sum2(self): solution = pyclipper.MinkowskiSum2(PATTERN, [PATH_SIGMA], False) self.assertGreater(len(solution),", "self.pc = pyclipper.Pyclipper() def test_add_path(self): # should not raise an", "True) try: self.pc.AddPaths([INVALID_PATH, PATH_CLIP_1], pyclipper.PT_CLIP) self.pc.AddPaths([PATH_CLIP_1, INVALID_PATH], pyclipper.PT_CLIP) except pyclipper.ClipperException:", "self.default_args = [pyclipper.CT_INTERSECTION, pyclipper.PFT_EVENODD, pyclipper.PFT_EVENODD] @staticmethod def add_default_paths(pc): pc.AddPath(PATH_CLIP_1, pyclipper.PT_CLIP)", "scale = 2 ** 31 path = [(0, 0), (1,", "PATH_SUBJ_2 tree.Childs.append(child) child2 = pyclipper.PyPolyNode() child2.IsOpen = False child2.Parent =", "i) def test_paths_scale_to(self): res = pyclipper.scale_to_clipper(self.paths) assert len(res) == len(self.paths)", "with self.assertWarns(DeprecationWarning): self.assertEqual(pyclipper.PointInPolygon((180, 200), PATH_SUBJ_1), -1) def test_minkowski_sum(self): with self.assertWarns(DeprecationWarning):", "self.skipTest(\"Skipping, sympy not available\") path = [(0,0), (0,1)] path =", "pyclipper.SCALING_FACTOR = 1 self.pc = pyclipper.Pyclipper() def test_add_path(self): # should", "0) self.assertEqual(abs(area_neg), area_pos) def test_point_in_polygon(self): # on polygon self.assertEqual(pyclipper.PointInPolygon((180, 200),", "self.pc.Execute(*self.default_args) self.assertEqual(len(solution), 2) def test_execute2(self): solution = self.pc.Execute2(*self.default_args) self.assertIsInstance(solution, pyclipper.PyPolyNode)", "triangle PATH_CLIP_1 = [[190, 210], [240, 210], [240, 130], [190,", "self.assertIsInstance(solution, pyclipper.PyPolyNode) self.check_pypolynode(solution) def test_execute_empty(self): pc = pyclipper.Pyclipper() with self.assertRaises(pyclipper.ClipperException):", "that case # that means that node.Contour is a list", "with self.assertWarns(DeprecationWarning): pyclipper.Orientation(PATH_SUBJ_1) def test_area(self): with self.assertWarns(DeprecationWarning): pyclipper.Area(PATH_SUBJ_1) def test_point_in_polygon(self):", "isinstance(res, float) # Convert to float to get \"normal\" division", "[-4, 6], [-6, 6]] INVALID_PATH = [[1, 1], ] #", "exactly, if they are not affected by the operation. \"\"\"", "factor else None) for p in paths_1] paths_2 = [_modify_vertices(p,", "pyclipper.PT_CLIP, True) try: self.pc.AddPaths([INVALID_PATH, PATH_CLIP_1], pyclipper.PT_CLIP) self.pc.AddPaths([PATH_CLIP_1, INVALID_PATH], pyclipper.PT_CLIP) except", "child = pyclipper.PyPolyNode() child.IsOpen = False child.Parent = tree child.Contour", "available\") path = [(0,0), (0,1)] path = [Point2D(v) for v", "c *= multiplier if addend is not None: c +=", "value = 1000000000000 res = pyclipper.scale_from_clipper(value, self.scale) assert isinstance(res, float)", "= pyclipper.scale_to_clipper(self.path) assert len(res) == len(self.path) assert all(isinstance(i, list) for", "def add_path(pc, path): pc.AddPath(path, pyclipper.JT_ROUND, pyclipper.ET_CLOSEDPOLYGON) def test_execute(self): pc =", "from sympy import Point2D from sympy.core.numbers import Zero except ImportError:", "an iterable (in that case # that means that node.Contour", "manualy_reversed = PATH_SUBJ_1[::-1] self.check_reversed_path(solution, manualy_reversed) def test_reverse_paths(self): solution = pyclipper.ReversePaths([PATH_SUBJ_1])", "self.pc.AddPaths([PATH_SUBJ_1, PATH_SUBJ_2], poly_type=pyclipper.PT_SUBJECT) def test_add_path_invalid_path(self): self.assertRaises(pyclipper.ClipperException, self.pc.AddPath, INVALID_PATH, pyclipper.PT_CLIP, True)", "check_pypolynode(self, node): self.assertTrue(len(node.Contour) == 0 or len(node.Contour) > 2) #", "len(solution_single)) _do_solutions_match(solution, solution_single) def test_clean_polygon(self): solution = pyclipper.CleanPolygon(PATH_CLIP_1) self.assertEqual(len(solution), len(PATH_CLIP_1))", "= self.pc.GetBounds() self.assertIsInstance(bounds, pyclipper.PyIntRect) self.assertEqual(bounds.left, 180) self.assertEqual(bounds.right, 260) self.assertEqual(bounds.top, 130)", "paths = [path] * 3 def test_value_scale_to(self): value = 0.5", "None: c *= multiplier if addend is not None: c", "values): for val in values: setattr(pc, prop_name, val) self.assertEqual(getattr(pc, prop_name),", "= PATH_SUBJ_1 tree.Childs.append(child) child = pyclipper.PyPolyNode() child.IsOpen = True child.Parent", "tree child.Contour = PATH_SUBJ_2 tree.Childs.append(child) child2 = pyclipper.PyPolyNode() child2.IsOpen =", "1) def test_clear(self): pc = pyclipper.PyclipperOffset() self.add_path(pc, PATH_CLIP_1) pc.Clear() solution", "def test_paths_scale_from(self): res = pyclipper.scale_from_clipper(self.paths) assert len(res) == len(self.paths) assert", "= 2 ** 31 path = [(0, 0), (1, 1)]", "coordinates passed into the library are returned exactly, if they", "self.assertEqual(getattr(pc, prop_name), val) def test_pyclipper_properties(self): pc = pyclipper.Pyclipper() for prop_name" ]
[ "import ImporterOptions from loci.plugins import BF from ij.plugin import ImagesToStack", "file on disk Values: (dict). Keys 'x_cal', 'y_cal' = (float)", "== 1: fullres_image = files_found[0] else: print \"Could not find", "'/' + str(len(xml_files)) #Find the orig .nd2 file, copied from", "'.nd2' search_dir = '/'.join(os.path.split(xml_)[0].split('/')[:-1]) files_found = [os.path.join(root, f) for (root,", "import BF from ij.plugin import ImagesToStack from ij import io", "= [] for marker_ in type_.iter('Marker'): cells.append((int(marker_[0].text), int(marker_[1].text))) # cells_dict[type_.find('Type').text]", "else: print \"Could not find fullres image.\" raise ValueError('Found 0", "import io #Records metadata (x,y location) for cells that were", "'/path/to/annotation/out.tsv' with open(f_out_path,'w') as fout: fout.write('\\t'.join(['cell','x_um','y_um'])) for e,xml_ in enumerate(xml_files):", "cells_dict['x_cal'] = float(tree.find('./Image_Properties/X_Calibration').text) cells_dict['y_cal'] = float(tree.find('./Image_Properties/Y_Calibration').text) rt = tree.find('Marker_Data') #re-root", ">1 matching file') #Generate the original inputs that were passed", "find fullres image.\" raise ValueError('Found 0 or >1 matching file')", "as used in find_extract_cells xml_files = [os.path.join(base_, f) for base_", "= ET.parse(fpath) cells_dict = {} cells_dict['x_cal'] = float(tree.find('./Image_Properties/X_Calibration').text) cells_dict['y_cal'] =", "'1'-'8' = (lists) of tuples containing cell positions in the", "+ os.path.split(xml_)[1] + '...' + str(e+1) + '/' + str(len(xml_files))", "through each xml file f_out_path = '/path/to/annotation/out.tsv' with open(f_out_path,'w') as", "(str) path to xml file on disk Values: (dict). Keys", "local, analysis def parse_cellcounter_to_dict(fpath): '''Parse Cell-Counter Xml file to Dictionary", "!= '.'] #Work through each xml file f_out_path = '/path/to/annotation/out.tsv'", "f[0] != '.'] #Work through each xml file f_out_path =", "um y_full_px = vals['counter'][cell_type][i][1] * vals['counter']['y_cal'] #in um #Write Information", "+ cell_label for i in range(len(vals['counter'][cell_type])): print 'Iteration ' +", "Xml file to Dictionary Inputs: fpath (str) path to xml", "tree for type_ in rt.iter('Marker_Type'): cells = [] for marker_", "extract_cells input_item = (re.search('(?<=_).*',orig_f_name[:-4]).group(), {'fullres':fullres_image, 'counter':parse_cellcounter_to_dict(xml_)}) input_dict = input_item types_of_interest={'7':'tdtom','8':'gfp'}", "similar locations on the section -> semi-quantiative, local, analysis def", "print 'Working on cell_type ' + cell_label for i in", "+ '...' + str(e+1) + '/' + str(len(xml_files)) #Find the", "'Working on cell_type ' + cell_label for i in range(len(vals['counter'][cell_type])):", "loci.plugins import BF from ij.plugin import ImagesToStack from ij import", "f == orig_f_name] if len(files_found) == 1: fullres_image = files_found[0]", "i in range(len(vals['counter'][cell_type])): print 'Iteration ' + str(i+1) + '/'", "files) in os.walk(search_dir) for f in files if f ==", "= (lists) of tuples containing cell positions in the form", "io #Records metadata (x,y location) for cells that were extracted", "os.path.split(xml_)[1] + '...' + str(e+1) + '/' + str(len(xml_files)) #Find", "print 'Working on file: ' + os.path.split(xml_)[1] + '...' +", "the form (x,y) ''' tree = ET.parse(fpath) cells_dict = {}", "xml_files = [os.path.join(base_, f) for base_ in xml_locs for f", "or >1 matching file') #Generate the original inputs that were", "= vals['counter'][cell_type][i][1] * vals['counter']['y_cal'] #in um #Write Information out_title =", "(root, dirs, files) in os.walk(search_dir) for f in files if", "the \"Extract Cells\", recovering positional info and writing to disk", "base_ in xml_locs for f in os.listdir(base_) if f[-3:] ==", "tuples containing cell positions in the form (x,y) ''' tree", "if f[-3:] == 'xml' and f[0] != '.'] #Work through", "for cell_type, cell_label in types_of_interest.iteritems(): print 'Working on cell_type '", "= '/path/to/annotation/out.tsv' with open(f_out_path,'w') as fout: fout.write('\\t'.join(['cell','x_um','y_um'])) for e,xml_ in", "import os import re from ij import IJ from loci.plugins.in", "Cell-Counter Xml file to Dictionary Inputs: fpath (str) path to", "used in subsequent analysis to cluster cells from similar locations", "str(len(xml_files)) #Find the orig .nd2 file, copied from find_extract_cells.py, see", "in enumerate(xml_files): print 'Working on file: ' + os.path.split(xml_)[1] +", "orig_f_name] if len(files_found) == 1: fullres_image = files_found[0] else: print", "float(tree.find('./Image_Properties/X_Calibration').text) cells_dict['y_cal'] = float(tree.find('./Image_Properties/Y_Calibration').text) rt = tree.find('Marker_Data') #re-root the tree", "not find fullres image.\" raise ValueError('Found 0 or >1 matching", "on file: ' + os.path.split(xml_)[1] + '...' + str(e+1) +", "cells = [] for marker_ in type_.iter('Marker'): cells.append((int(marker_[0].text), int(marker_[1].text))) #", "[] for marker_ in type_.iter('Marker'): cells.append((int(marker_[0].text), int(marker_[1].text))) # cells_dict[type_.find('Type').text] =", "= [os.path.join(root, f) for (root, dirs, files) in os.walk(search_dir) for", "* vals['counter']['x_cal'] #in um y_full_px = vals['counter'][cell_type][i][1] * vals['counter']['y_cal'] #in", "orig .nd2 file, copied from find_extract_cells.py, see that code for", "tree = ET.parse(fpath) cells_dict = {} cells_dict['x_cal'] = float(tree.find('./Image_Properties/X_Calibration').text) cells_dict['y_cal']", "from ij.plugin import ImagesToStack from ij import io #Records metadata", "ij.plugin import ImagesToStack from ij import io #Records metadata (x,y", "for (root, dirs, files) in os.walk(search_dir) for f in files", "anim, vals = input_dict #Loop through Cells and Annotate. for", "ValueError('Found 0 or >1 matching file') #Generate the original inputs", "str(i+1) + '/' + str(len(vals['counter'][cell_type])) #Convert Px Downsampled -> Px", "from loci.plugins.in import ImporterOptions from loci.plugins import BF from ij.plugin", "BF from ij.plugin import ImagesToStack from ij import io #Records", "in rt.iter('Marker_Type'): cells = [] for marker_ in type_.iter('Marker'): cells.append((int(marker_[0].text),", "'/' + str(len(vals['counter'][cell_type])) #Convert Px Downsampled -> Px Full Res", "# cells_dict[type_.find('Type').text] = cells return cells_dict #Load Xml Files xml_locs", "f) for (root, dirs, files) in os.walk(search_dir) for f in", "xml_locs = ['/path/to/xml/files'] #same as used in find_extract_cells xml_files =", "input_dict #Loop through Cells and Annotate. for cell_type, cell_label in", "'_'.join([anim, cell_label, str(i)]) fout.write('\\n' + '\\t'.join([out_title, str(x_full_px), str(y_full_px)])) #Final tsv", "cell_label, str(i)]) fout.write('\\n' + '\\t'.join([out_title, str(x_full_px), str(y_full_px)])) #Final tsv of", "fout.write('\\n' + '\\t'.join([out_title, str(x_full_px), str(y_full_px)])) #Final tsv of form cell_label,x,y.", "will be used in subsequent analysis to cluster cells from", "'y_cal' = (float) calibrations in each axis. Keys '1'-'8' =", "+ '/' + str(len(xml_files)) #Find the orig .nd2 file, copied", "from ij import io #Records metadata (x,y location) for cells", ".nd2 file, copied from find_extract_cells.py, see that code for more", "dirs, files) in os.walk(search_dir) for f in files if f", "file') #Generate the original inputs that were passed to extract_cells", "on cell_type ' + cell_label for i in range(len(vals['counter'][cell_type])): print", "-> small image. anim, vals = input_dict #Loop through Cells", "inputs that were passed to extract_cells input_item = (re.search('(?<=_).*',orig_f_name[:-4]).group(), {'fullres':fullres_image,", "files_found[0] else: print \"Could not find fullres image.\" raise ValueError('Found", "+ '/' + str(len(vals['counter'][cell_type])) #Convert Px Downsampled -> Px Full", "each xml file f_out_path = '/path/to/annotation/out.tsv' with open(f_out_path,'w') as fout:", "to Dictionary Inputs: fpath (str) path to xml file on", "y_full_px = vals['counter'][cell_type][i][1] * vals['counter']['y_cal'] #in um #Write Information out_title", "files_found = [os.path.join(root, f) for (root, dirs, files) in os.walk(search_dir)", "= tree.find('Marker_Data') #re-root the tree for type_ in rt.iter('Marker_Type'): cells", "= float(tree.find('./Image_Properties/X_Calibration').text) cells_dict['y_cal'] = float(tree.find('./Image_Properties/Y_Calibration').text) rt = tree.find('Marker_Data') #re-root the", "def parse_cellcounter_to_dict(fpath): '''Parse Cell-Counter Xml file to Dictionary Inputs: fpath", "used in find_extract_cells xml_files = [os.path.join(base_, f) for base_ in", "enumerate(xml_files): print 'Working on file: ' + os.path.split(xml_)[1] + '...'", "in os.listdir(base_) if f[-3:] == 'xml' and f[0] != '.']", "in types_of_interest.iteritems(): print 'Working on cell_type ' + cell_label for", "(dict). Keys 'x_cal', 'y_cal' = (float) calibrations in each axis.", "#Records metadata (x,y location) for cells that were extracted with", "from loci.plugins import BF from ij.plugin import ImagesToStack from ij", "type_ in rt.iter('Marker_Type'): cells = [] for marker_ in type_.iter('Marker'):", "str(len(vals['counter'][cell_type])) #Convert Px Downsampled -> Px Full Res x_full_px =", "xml.etree.ElementTree as ET import csv import os import re from", "\"Extract Cells\", recovering positional info and writing to disk instead", "search_dir = '/'.join(os.path.split(xml_)[0].split('/')[:-1]) files_found = [os.path.join(root, f) for (root, dirs,", "= {} cells_dict['x_cal'] = float(tree.find('./Image_Properties/X_Calibration').text) cells_dict['y_cal'] = float(tree.find('./Image_Properties/Y_Calibration').text) rt =", "+ str(e+1) + '/' + str(len(xml_files)) #Find the orig .nd2", "x_full_px = vals['counter'][cell_type][i][0] * vals['counter']['x_cal'] #in um y_full_px = vals['counter'][cell_type][i][1]", "positions in the form (x,y) ''' tree = ET.parse(fpath) cells_dict", "axis. Keys '1'-'8' = (lists) of tuples containing cell positions", "-> semi-quantiative, local, analysis def parse_cellcounter_to_dict(fpath): '''Parse Cell-Counter Xml file", "== 'xml' and f[0] != '.'] #Work through each xml", "input_item types_of_interest={'7':'tdtom','8':'gfp'} #Copied from the \"Extract Cells\", recovering positional info", "form (x,y) ''' tree = ET.parse(fpath) cells_dict = {} cells_dict['x_cal']", "calibrations in each axis. Keys '1'-'8' = (lists) of tuples", "ET.parse(fpath) cells_dict = {} cells_dict['x_cal'] = float(tree.find('./Image_Properties/X_Calibration').text) cells_dict['y_cal'] = float(tree.find('./Image_Properties/Y_Calibration').text)", "f in files if f == orig_f_name] if len(files_found) ==", "in files if f == orig_f_name] if len(files_found) == 1:", "(float) calibrations in each axis. Keys '1'-'8' = (lists) of", "um #Write Information out_title = '_'.join([anim, cell_label, str(i)]) fout.write('\\n' +", "to xml file on disk Values: (dict). Keys 'x_cal', 'y_cal'", "image. anim, vals = input_dict #Loop through Cells and Annotate.", "'Working on file: ' + os.path.split(xml_)[1] + '...' + str(e+1)", "disk Values: (dict). Keys 'x_cal', 'y_cal' = (float) calibrations in", "with 1_find_extract_cells.py #metadata will be used in subsequent analysis to", "= input_dict #Loop through Cells and Annotate. for cell_type, cell_label", "from ij import IJ from loci.plugins.in import ImporterOptions from loci.plugins", "see that code for more details. orig_f_name = re.search('(?<=CellCounter_).*(?=\\\\-Downsampled)', os.path.split(xml_)[1]).group()", "'/'.join(os.path.split(xml_)[0].split('/')[:-1]) files_found = [os.path.join(root, f) for (root, dirs, files) in", "with open(f_out_path,'w') as fout: fout.write('\\t'.join(['cell','x_um','y_um'])) for e,xml_ in enumerate(xml_files): print", "fout.write('\\t'.join(['cell','x_um','y_um'])) for e,xml_ in enumerate(xml_files): print 'Working on file: '", "the section -> semi-quantiative, local, analysis def parse_cellcounter_to_dict(fpath): '''Parse Cell-Counter", "passed to extract_cells input_item = (re.search('(?<=_).*',orig_f_name[:-4]).group(), {'fullres':fullres_image, 'counter':parse_cellcounter_to_dict(xml_)}) input_dict =", "cell positions in the form (x,y) ''' tree = ET.parse(fpath)", "print \"Could not find fullres image.\" raise ValueError('Found 0 or", "[os.path.join(root, f) for (root, dirs, files) in os.walk(search_dir) for f", "'xml' and f[0] != '.'] #Work through each xml file", "vals['counter']['x_cal'] #in um y_full_px = vals['counter'][cell_type][i][1] * vals['counter']['y_cal'] #in um", "= float(tree.find('./Image_Properties/Y_Calibration').text) rt = tree.find('Marker_Data') #re-root the tree for type_", "(x,y) ''' tree = ET.parse(fpath) cells_dict = {} cells_dict['x_cal'] =", "vals['counter'][cell_type][i][0] * vals['counter']['x_cal'] #in um y_full_px = vals['counter'][cell_type][i][1] * vals['counter']['y_cal']", "+ str(i+1) + '/' + str(len(vals['counter'][cell_type])) #Convert Px Downsampled ->", "Dictionary Inputs: fpath (str) path to xml file on disk", "ImporterOptions from loci.plugins import BF from ij.plugin import ImagesToStack from", "= '_'.join([anim, cell_label, str(i)]) fout.write('\\n' + '\\t'.join([out_title, str(x_full_px), str(y_full_px)])) #Final", "xml file on disk Values: (dict). Keys 'x_cal', 'y_cal' =", "raise ValueError('Found 0 or >1 matching file') #Generate the original", "-> Px Full Res x_full_px = vals['counter'][cell_type][i][0] * vals['counter']['x_cal'] #in", "Px Downsampled -> Px Full Res x_full_px = vals['counter'][cell_type][i][0] *", "#Convert Px Downsampled -> Px Full Res x_full_px = vals['counter'][cell_type][i][0]", "range(len(vals['counter'][cell_type])): print 'Iteration ' + str(i+1) + '/' + str(len(vals['counter'][cell_type]))", "'''Parse Cell-Counter Xml file to Dictionary Inputs: fpath (str) path", "to disk instead of extracting cell -> small image. anim,", "cell_type ' + cell_label for i in range(len(vals['counter'][cell_type])): print 'Iteration", "1: fullres_image = files_found[0] else: print \"Could not find fullres", "f_out_path = '/path/to/annotation/out.tsv' with open(f_out_path,'w') as fout: fout.write('\\t'.join(['cell','x_um','y_um'])) for e,xml_", "for f in files if f == orig_f_name] if len(files_found)", "as ET import csv import os import re from ij", "os.path.split(xml_)[1]).group() + '.nd2' search_dir = '/'.join(os.path.split(xml_)[0].split('/')[:-1]) files_found = [os.path.join(root, f)", "in find_extract_cells xml_files = [os.path.join(base_, f) for base_ in xml_locs", "os import re from ij import IJ from loci.plugins.in import", "'x_cal', 'y_cal' = (float) calibrations in each axis. Keys '1'-'8'", "cells_dict #Load Xml Files xml_locs = ['/path/to/xml/files'] #same as used", "containing cell positions in the form (x,y) ''' tree =", "vals = input_dict #Loop through Cells and Annotate. for cell_type,", "cells_dict['y_cal'] = float(tree.find('./Image_Properties/Y_Calibration').text) rt = tree.find('Marker_Data') #re-root the tree for", "ET import csv import os import re from ij import", "semi-quantiative, local, analysis def parse_cellcounter_to_dict(fpath): '''Parse Cell-Counter Xml file to", "small image. anim, vals = input_dict #Loop through Cells and", "int(marker_[1].text))) # cells_dict[type_.find('Type').text] = cells return cells_dict #Load Xml Files", "import re from ij import IJ from loci.plugins.in import ImporterOptions", "more details. orig_f_name = re.search('(?<=CellCounter_).*(?=\\\\-Downsampled)', os.path.split(xml_)[1]).group() + '.nd2' search_dir =", "through Cells and Annotate. for cell_type, cell_label in types_of_interest.iteritems(): print", "IJ from loci.plugins.in import ImporterOptions from loci.plugins import BF from", "for cells that were extracted with 1_find_extract_cells.py #metadata will be", "in each axis. Keys '1'-'8' = (lists) of tuples containing", "fout: fout.write('\\t'.join(['cell','x_um','y_um'])) for e,xml_ in enumerate(xml_files): print 'Working on file:", "xml_locs for f in os.listdir(base_) if f[-3:] == 'xml' and", "* vals['counter']['y_cal'] #in um #Write Information out_title = '_'.join([anim, cell_label,", "location) for cells that were extracted with 1_find_extract_cells.py #metadata will", "cell_type, cell_label in types_of_interest.iteritems(): print 'Working on cell_type ' +", "#Work through each xml file f_out_path = '/path/to/annotation/out.tsv' with open(f_out_path,'w')", "str(e+1) + '/' + str(len(xml_files)) #Find the orig .nd2 file,", "#Generate the original inputs that were passed to extract_cells input_item", "#Loop through Cells and Annotate. for cell_type, cell_label in types_of_interest.iteritems():", "find_extract_cells.py, see that code for more details. orig_f_name = re.search('(?<=CellCounter_).*(?=\\\\-Downsampled)',", "{} cells_dict['x_cal'] = float(tree.find('./Image_Properties/X_Calibration').text) cells_dict['y_cal'] = float(tree.find('./Image_Properties/Y_Calibration').text) rt = tree.find('Marker_Data')", "cell_label in types_of_interest.iteritems(): print 'Working on cell_type ' + cell_label", "csv import os import re from ij import IJ from", "'...' + str(e+1) + '/' + str(len(xml_files)) #Find the orig", "were extracted with 1_find_extract_cells.py #metadata will be used in subsequent", "fpath (str) path to xml file on disk Values: (dict).", "loci.plugins.in import ImporterOptions from loci.plugins import BF from ij.plugin import", "from the \"Extract Cells\", recovering positional info and writing to", "subsequent analysis to cluster cells from similar locations on the", "open(f_out_path,'w') as fout: fout.write('\\t'.join(['cell','x_um','y_um'])) for e,xml_ in enumerate(xml_files): print 'Working", "from similar locations on the section -> semi-quantiative, local, analysis", "ij import IJ from loci.plugins.in import ImporterOptions from loci.plugins import", "'counter':parse_cellcounter_to_dict(xml_)}) input_dict = input_item types_of_interest={'7':'tdtom','8':'gfp'} #Copied from the \"Extract Cells\",", "['/path/to/xml/files'] #same as used in find_extract_cells xml_files = [os.path.join(base_, f)", "input_item = (re.search('(?<=_).*',orig_f_name[:-4]).group(), {'fullres':fullres_image, 'counter':parse_cellcounter_to_dict(xml_)}) input_dict = input_item types_of_interest={'7':'tdtom','8':'gfp'} #Copied", "[os.path.join(base_, f) for base_ in xml_locs for f in os.listdir(base_)", "cells that were extracted with 1_find_extract_cells.py #metadata will be used", "the tree for type_ in rt.iter('Marker_Type'): cells = [] for", "#Write Information out_title = '_'.join([anim, cell_label, str(i)]) fout.write('\\n' + '\\t'.join([out_title,", "in os.walk(search_dir) for f in files if f == orig_f_name]", "to extract_cells input_item = (re.search('(?<=_).*',orig_f_name[:-4]).group(), {'fullres':fullres_image, 'counter':parse_cellcounter_to_dict(xml_)}) input_dict = input_item", "that code for more details. orig_f_name = re.search('(?<=CellCounter_).*(?=\\\\-Downsampled)', os.path.split(xml_)[1]).group() +", "cells_dict[type_.find('Type').text] = cells return cells_dict #Load Xml Files xml_locs =", "#same as used in find_extract_cells xml_files = [os.path.join(base_, f) for", "recovering positional info and writing to disk instead of extracting", "{'fullres':fullres_image, 'counter':parse_cellcounter_to_dict(xml_)}) input_dict = input_item types_of_interest={'7':'tdtom','8':'gfp'} #Copied from the \"Extract", "#Find the orig .nd2 file, copied from find_extract_cells.py, see that", "Cells and Annotate. for cell_type, cell_label in types_of_interest.iteritems(): print 'Working", "in subsequent analysis to cluster cells from similar locations on", "= '/'.join(os.path.split(xml_)[0].split('/')[:-1]) files_found = [os.path.join(root, f) for (root, dirs, files)", "were passed to extract_cells input_item = (re.search('(?<=_).*',orig_f_name[:-4]).group(), {'fullres':fullres_image, 'counter':parse_cellcounter_to_dict(xml_)}) input_dict", "files if f == orig_f_name] if len(files_found) == 1: fullres_image", "= files_found[0] else: print \"Could not find fullres image.\" raise", "f in os.listdir(base_) if f[-3:] == 'xml' and f[0] !=", "type_.iter('Marker'): cells.append((int(marker_[0].text), int(marker_[1].text))) # cells_dict[type_.find('Type').text] = cells return cells_dict #Load", "#metadata will be used in subsequent analysis to cluster cells", "= ['/path/to/xml/files'] #same as used in find_extract_cells xml_files = [os.path.join(base_,", "the original inputs that were passed to extract_cells input_item =", "' + cell_label for i in range(len(vals['counter'][cell_type])): print 'Iteration '", "Annotate. for cell_type, cell_label in types_of_interest.iteritems(): print 'Working on cell_type", "== orig_f_name] if len(files_found) == 1: fullres_image = files_found[0] else:", "be used in subsequent analysis to cluster cells from similar", "code for more details. orig_f_name = re.search('(?<=CellCounter_).*(?=\\\\-Downsampled)', os.path.split(xml_)[1]).group() + '.nd2'", "' + os.path.split(xml_)[1] + '...' + str(e+1) + '/' +", "input_dict = input_item types_of_interest={'7':'tdtom','8':'gfp'} #Copied from the \"Extract Cells\", recovering", "= (re.search('(?<=_).*',orig_f_name[:-4]).group(), {'fullres':fullres_image, 'counter':parse_cellcounter_to_dict(xml_)}) input_dict = input_item types_of_interest={'7':'tdtom','8':'gfp'} #Copied from", "file to Dictionary Inputs: fpath (str) path to xml file", "path to xml file on disk Values: (dict). Keys 'x_cal',", "import ImagesToStack from ij import io #Records metadata (x,y location)", "locations on the section -> semi-quantiative, local, analysis def parse_cellcounter_to_dict(fpath):", "matching file') #Generate the original inputs that were passed to", "in the form (x,y) ''' tree = ET.parse(fpath) cells_dict =", "Res x_full_px = vals['counter'][cell_type][i][0] * vals['counter']['x_cal'] #in um y_full_px =", "vals['counter'][cell_type][i][1] * vals['counter']['y_cal'] #in um #Write Information out_title = '_'.join([anim,", "for base_ in xml_locs for f in os.listdir(base_) if f[-3:]", "info and writing to disk instead of extracting cell ->", "\"Could not find fullres image.\" raise ValueError('Found 0 or >1", "'Iteration ' + str(i+1) + '/' + str(len(vals['counter'][cell_type])) #Convert Px", "and Annotate. for cell_type, cell_label in types_of_interest.iteritems(): print 'Working on", "cell_label for i in range(len(vals['counter'][cell_type])): print 'Iteration ' + str(i+1)", "cluster cells from similar locations on the section -> semi-quantiative,", "positional info and writing to disk instead of extracting cell", "Information out_title = '_'.join([anim, cell_label, str(i)]) fout.write('\\n' + '\\t'.join([out_title, str(x_full_px),", "cells_dict = {} cells_dict['x_cal'] = float(tree.find('./Image_Properties/X_Calibration').text) cells_dict['y_cal'] = float(tree.find('./Image_Properties/Y_Calibration').text) rt", "#Copied from the \"Extract Cells\", recovering positional info and writing", "if f == orig_f_name] if len(files_found) == 1: fullres_image =", "import IJ from loci.plugins.in import ImporterOptions from loci.plugins import BF", "rt = tree.find('Marker_Data') #re-root the tree for type_ in rt.iter('Marker_Type'):", "instead of extracting cell -> small image. anim, vals =", "Inputs: fpath (str) path to xml file on disk Values:", "that were passed to extract_cells input_item = (re.search('(?<=_).*',orig_f_name[:-4]).group(), {'fullres':fullres_image, 'counter':parse_cellcounter_to_dict(xml_)})", "' + str(i+1) + '/' + str(len(vals['counter'][cell_type])) #Convert Px Downsampled", "if len(files_found) == 1: fullres_image = files_found[0] else: print \"Could", "fullres_image = files_found[0] else: print \"Could not find fullres image.\"", "in range(len(vals['counter'][cell_type])): print 'Iteration ' + str(i+1) + '/' +", "ij import io #Records metadata (x,y location) for cells that", "for e,xml_ in enumerate(xml_files): print 'Working on file: ' +", "import xml.etree.ElementTree as ET import csv import os import re", "Keys '1'-'8' = (lists) of tuples containing cell positions in", "Px Full Res x_full_px = vals['counter'][cell_type][i][0] * vals['counter']['x_cal'] #in um", "file, copied from find_extract_cells.py, see that code for more details.", "cells return cells_dict #Load Xml Files xml_locs = ['/path/to/xml/files'] #same", "find_extract_cells xml_files = [os.path.join(base_, f) for base_ in xml_locs for", "cell -> small image. anim, vals = input_dict #Loop through", "#in um y_full_px = vals['counter'][cell_type][i][1] * vals['counter']['y_cal'] #in um #Write", "f) for base_ in xml_locs for f in os.listdir(base_) if", "import csv import os import re from ij import IJ", "(x,y location) for cells that were extracted with 1_find_extract_cells.py #metadata", "= vals['counter'][cell_type][i][0] * vals['counter']['x_cal'] #in um y_full_px = vals['counter'][cell_type][i][1] *", "+ str(len(vals['counter'][cell_type])) #Convert Px Downsampled -> Px Full Res x_full_px", "on disk Values: (dict). Keys 'x_cal', 'y_cal' = (float) calibrations", "#in um #Write Information out_title = '_'.join([anim, cell_label, str(i)]) fout.write('\\n'", "types_of_interest.iteritems(): print 'Working on cell_type ' + cell_label for i", "in type_.iter('Marker'): cells.append((int(marker_[0].text), int(marker_[1].text))) # cells_dict[type_.find('Type').text] = cells return cells_dict", "return cells_dict #Load Xml Files xml_locs = ['/path/to/xml/files'] #same as", "re from ij import IJ from loci.plugins.in import ImporterOptions from", "fullres image.\" raise ValueError('Found 0 or >1 matching file') #Generate", "= input_item types_of_interest={'7':'tdtom','8':'gfp'} #Copied from the \"Extract Cells\", recovering positional", "#re-root the tree for type_ in rt.iter('Marker_Type'): cells = []", "for marker_ in type_.iter('Marker'): cells.append((int(marker_[0].text), int(marker_[1].text))) # cells_dict[type_.find('Type').text] = cells", "orig_f_name = re.search('(?<=CellCounter_).*(?=\\\\-Downsampled)', os.path.split(xml_)[1]).group() + '.nd2' search_dir = '/'.join(os.path.split(xml_)[0].split('/')[:-1]) files_found", "types_of_interest={'7':'tdtom','8':'gfp'} #Copied from the \"Extract Cells\", recovering positional info and", "vals['counter']['y_cal'] #in um #Write Information out_title = '_'.join([anim, cell_label, str(i)])", "of extracting cell -> small image. anim, vals = input_dict", "and writing to disk instead of extracting cell -> small", "''' tree = ET.parse(fpath) cells_dict = {} cells_dict['x_cal'] = float(tree.find('./Image_Properties/X_Calibration').text)", "that were extracted with 1_find_extract_cells.py #metadata will be used in", "image.\" raise ValueError('Found 0 or >1 matching file') #Generate the", "parse_cellcounter_to_dict(fpath): '''Parse Cell-Counter Xml file to Dictionary Inputs: fpath (str)", "file f_out_path = '/path/to/annotation/out.tsv' with open(f_out_path,'w') as fout: fout.write('\\t'.join(['cell','x_um','y_um'])) for", "= re.search('(?<=CellCounter_).*(?=\\\\-Downsampled)', os.path.split(xml_)[1]).group() + '.nd2' search_dir = '/'.join(os.path.split(xml_)[0].split('/')[:-1]) files_found =", "+ '.nd2' search_dir = '/'.join(os.path.split(xml_)[0].split('/')[:-1]) files_found = [os.path.join(root, f) for", "Values: (dict). Keys 'x_cal', 'y_cal' = (float) calibrations in each", "the orig .nd2 file, copied from find_extract_cells.py, see that code", "0 or >1 matching file') #Generate the original inputs that", "original inputs that were passed to extract_cells input_item = (re.search('(?<=_).*',orig_f_name[:-4]).group(),", "float(tree.find('./Image_Properties/Y_Calibration').text) rt = tree.find('Marker_Data') #re-root the tree for type_ in", "from find_extract_cells.py, see that code for more details. orig_f_name =", "analysis to cluster cells from similar locations on the section", "marker_ in type_.iter('Marker'): cells.append((int(marker_[0].text), int(marker_[1].text))) # cells_dict[type_.find('Type').text] = cells return", "ImagesToStack from ij import io #Records metadata (x,y location) for", "= [os.path.join(base_, f) for base_ in xml_locs for f in", "#Load Xml Files xml_locs = ['/path/to/xml/files'] #same as used in", "<filename>imageproc_OE_IF_quant/2_annotate_extracted_cells.py import xml.etree.ElementTree as ET import csv import os import", "Downsampled -> Px Full Res x_full_px = vals['counter'][cell_type][i][0] * vals['counter']['x_cal']", "re.search('(?<=CellCounter_).*(?=\\\\-Downsampled)', os.path.split(xml_)[1]).group() + '.nd2' search_dir = '/'.join(os.path.split(xml_)[0].split('/')[:-1]) files_found = [os.path.join(root,", "to cluster cells from similar locations on the section ->", "= (float) calibrations in each axis. Keys '1'-'8' = (lists)", "out_title = '_'.join([anim, cell_label, str(i)]) fout.write('\\n' + '\\t'.join([out_title, str(x_full_px), str(y_full_px)]))", "cells.append((int(marker_[0].text), int(marker_[1].text))) # cells_dict[type_.find('Type').text] = cells return cells_dict #Load Xml", "disk instead of extracting cell -> small image. anim, vals", "rt.iter('Marker_Type'): cells = [] for marker_ in type_.iter('Marker'): cells.append((int(marker_[0].text), int(marker_[1].text)))", "Files xml_locs = ['/path/to/xml/files'] #same as used in find_extract_cells xml_files", "Xml Files xml_locs = ['/path/to/xml/files'] #same as used in find_extract_cells", "for i in range(len(vals['counter'][cell_type])): print 'Iteration ' + str(i+1) +", "analysis def parse_cellcounter_to_dict(fpath): '''Parse Cell-Counter Xml file to Dictionary Inputs:", "file: ' + os.path.split(xml_)[1] + '...' + str(e+1) + '/'", "each axis. Keys '1'-'8' = (lists) of tuples containing cell", "writing to disk instead of extracting cell -> small image.", "for more details. orig_f_name = re.search('(?<=CellCounter_).*(?=\\\\-Downsampled)', os.path.split(xml_)[1]).group() + '.nd2' search_dir", "print 'Iteration ' + str(i+1) + '/' + str(len(vals['counter'][cell_type])) #Convert", "Cells\", recovering positional info and writing to disk instead of", "as fout: fout.write('\\t'.join(['cell','x_um','y_um'])) for e,xml_ in enumerate(xml_files): print 'Working on", "1_find_extract_cells.py #metadata will be used in subsequent analysis to cluster", "Full Res x_full_px = vals['counter'][cell_type][i][0] * vals['counter']['x_cal'] #in um y_full_px", "metadata (x,y location) for cells that were extracted with 1_find_extract_cells.py", "for type_ in rt.iter('Marker_Type'): cells = [] for marker_ in", "cells from similar locations on the section -> semi-quantiative, local,", "f[-3:] == 'xml' and f[0] != '.'] #Work through each", "os.listdir(base_) if f[-3:] == 'xml' and f[0] != '.'] #Work", "os.walk(search_dir) for f in files if f == orig_f_name] if", "for f in os.listdir(base_) if f[-3:] == 'xml' and f[0]", "in xml_locs for f in os.listdir(base_) if f[-3:] == 'xml'", "on the section -> semi-quantiative, local, analysis def parse_cellcounter_to_dict(fpath): '''Parse", "'.'] #Work through each xml file f_out_path = '/path/to/annotation/out.tsv' with", "str(i)]) fout.write('\\n' + '\\t'.join([out_title, str(x_full_px), str(y_full_px)])) #Final tsv of form", "= cells return cells_dict #Load Xml Files xml_locs = ['/path/to/xml/files']", "of tuples containing cell positions in the form (x,y) '''", "section -> semi-quantiative, local, analysis def parse_cellcounter_to_dict(fpath): '''Parse Cell-Counter Xml", "and f[0] != '.'] #Work through each xml file f_out_path", "tree.find('Marker_Data') #re-root the tree for type_ in rt.iter('Marker_Type'): cells =", "extracted with 1_find_extract_cells.py #metadata will be used in subsequent analysis", "len(files_found) == 1: fullres_image = files_found[0] else: print \"Could not", "(lists) of tuples containing cell positions in the form (x,y)", "details. orig_f_name = re.search('(?<=CellCounter_).*(?=\\\\-Downsampled)', os.path.split(xml_)[1]).group() + '.nd2' search_dir = '/'.join(os.path.split(xml_)[0].split('/')[:-1])", "+ str(len(xml_files)) #Find the orig .nd2 file, copied from find_extract_cells.py,", "e,xml_ in enumerate(xml_files): print 'Working on file: ' + os.path.split(xml_)[1]", "(re.search('(?<=_).*',orig_f_name[:-4]).group(), {'fullres':fullres_image, 'counter':parse_cellcounter_to_dict(xml_)}) input_dict = input_item types_of_interest={'7':'tdtom','8':'gfp'} #Copied from the", "Keys 'x_cal', 'y_cal' = (float) calibrations in each axis. Keys", "copied from find_extract_cells.py, see that code for more details. orig_f_name", "xml file f_out_path = '/path/to/annotation/out.tsv' with open(f_out_path,'w') as fout: fout.write('\\t'.join(['cell','x_um','y_um']))", "extracting cell -> small image. anim, vals = input_dict #Loop" ]
[ "done = env.step(action) print('reward', reward, 'done', done) if reward >", "10000 env.set_agent_at([2, 2], 0) env.set_pig_at([4, 4], 0) for i in", "\", i) env.render() action = random.randint(0, 4) print('action is', action)", "0) for i in range(max_iter): print(\"iter= \", i) env.render() action", "'done', done) if reward > 0: print('catch the pig', reward,", "reward, 'done', done) if reward > 0: print('catch the pig',", "reward, done = env.step(action) print('reward', reward, 'done', done) if reward", "EnvSingleCatchPigs import random env = EnvSingleCatchPigs(7) max_iter = 10000 env.set_agent_at([2,", "env.step(action) print('reward', reward, 'done', done) if reward > 0: print('catch", "print('action is', action) reward, done = env.step(action) print('reward', reward, 'done',", "import EnvSingleCatchPigs import random env = EnvSingleCatchPigs(7) max_iter = 10000", "= random.randint(0, 4) print('action is', action) reward, done = env.step(action)", "= env.step(action) print('reward', reward, 'done', done) if reward > 0:", "EnvSingleCatchPigs(7) max_iter = 10000 env.set_agent_at([2, 2], 0) env.set_pig_at([4, 4], 0)", "action) reward, done = env.step(action) print('reward', reward, 'done', done) if", "random env = EnvSingleCatchPigs(7) max_iter = 10000 env.set_agent_at([2, 2], 0)", "max_iter = 10000 env.set_agent_at([2, 2], 0) env.set_pig_at([4, 4], 0) for", "<reponame>Abluceli/Multi-agent-Reinforcement-Learning-Algorithms from env_SingleCatchPigs import EnvSingleCatchPigs import random env = EnvSingleCatchPigs(7)", "i) env.render() action = random.randint(0, 4) print('action is', action) reward,", "env_SingleCatchPigs import EnvSingleCatchPigs import random env = EnvSingleCatchPigs(7) max_iter =", "env.set_pig_at([4, 4], 0) for i in range(max_iter): print(\"iter= \", i)", "action = random.randint(0, 4) print('action is', action) reward, done =", "in range(max_iter): print(\"iter= \", i) env.render() action = random.randint(0, 4)", "for i in range(max_iter): print(\"iter= \", i) env.render() action =", "print(\"iter= \", i) env.render() action = random.randint(0, 4) print('action is',", "done) if reward > 0: print('catch the pig', reward, done)", "env.render() action = random.randint(0, 4) print('action is', action) reward, done", "import random env = EnvSingleCatchPigs(7) max_iter = 10000 env.set_agent_at([2, 2],", "2], 0) env.set_pig_at([4, 4], 0) for i in range(max_iter): print(\"iter=", "i in range(max_iter): print(\"iter= \", i) env.render() action = random.randint(0,", "random.randint(0, 4) print('action is', action) reward, done = env.step(action) print('reward',", "range(max_iter): print(\"iter= \", i) env.render() action = random.randint(0, 4) print('action", "4) print('action is', action) reward, done = env.step(action) print('reward', reward,", "= 10000 env.set_agent_at([2, 2], 0) env.set_pig_at([4, 4], 0) for i", "0) env.set_pig_at([4, 4], 0) for i in range(max_iter): print(\"iter= \",", "env = EnvSingleCatchPigs(7) max_iter = 10000 env.set_agent_at([2, 2], 0) env.set_pig_at([4,", "env.set_agent_at([2, 2], 0) env.set_pig_at([4, 4], 0) for i in range(max_iter):", "= EnvSingleCatchPigs(7) max_iter = 10000 env.set_agent_at([2, 2], 0) env.set_pig_at([4, 4],", "is', action) reward, done = env.step(action) print('reward', reward, 'done', done)", "print('reward', reward, 'done', done) if reward > 0: print('catch the", "4], 0) for i in range(max_iter): print(\"iter= \", i) env.render()", "from env_SingleCatchPigs import EnvSingleCatchPigs import random env = EnvSingleCatchPigs(7) max_iter" ]
[ "l: l[1] if l and len(l) > 1 else None),", "return _read_tsv(f) def _download_tsv_gz(url, dst_dir): path = dst_dir / _TSV_GZ_FILENAME", "def _set_multiindex_dtype(index, level, type_): index_df = index.to_frame() index_df[level] = index_df[level].astype(type_)", "directly to c-types # [inferred_type->mixed,key->block0_values] [items->['flag']]\" data[\"flag\"] = data[\"flag\"].replace({\"\": None})", "pd.DataFrame( { \"value\": split.apply(lambda l: l[0] if l else None),", "pd import numpy as np from eust.core import _download_file, conf", "saved in the HDF # file as a string, for", "HDF # file as a string, for performance reasons. #", "will pickle object types # that it cannot map directly", "d.index = pd.MultiIndex.from_arrays( list(zip(*index_data)), names=row_dimension_names, ) # cannot handle multidimensional", "split = series.str.split(\" \") df = pd.DataFrame( { \"value\": split.apply(lambda", "len(set(index_data)) == len(index_data) # no duplicates assert len(row_dimension_names) >= 1", "index_df[level].astype(type_) new_index = index_df.set_index(index.names).index return new_index def _read_tsv(path_or_buffer) -> pd.DataFrame:", "_HDF_TABLE_PATH = \"eurostat_table\" def _read_tsv_gz(path_or_buffer) -> pd.DataFrame: with gzip.open(path_or_buffer, \"rb\")", "column labels d = d.stack() assert set(d.apply(type)) == {str} assert", "path) def _read(the_dir): hdf_path = the_dir / _HDF_FILENAME tsv_gz_path =", "as a string, for performance reasons. # This is a", "d.stack() assert set(d.apply(type)) == {str} assert isinstance(d, pd.Series), d.columns assert", "pd.DataFrame: d = pd.read_csv(path_or_buffer, sep=\"\\t\", header=0, dtype=str) top_left_cell = d.columns[0]", "\"value\": split.apply(lambda l: l[0] if l else None), \"flag\": split.apply(lambda", "time_strings = d.index.unique(\"time\") matches_year = (_YEAR_RE.match(s) for s in time_strings)", "_read_tsv_gz(tsv_gz_path) data.to_hdf( hdf_path, _HDF_TABLE_PATH, complevel=conf[\"hdf_complevel\"], complib=conf[\"hdf_complib\"], ) # Replace empty", "\"value\"] = np.nan d[\"value\"] = d[\"value\"].astype(float) if \"time\" in d.index.names:", "bool(_DIMENSION_NAME_RE.match(s)) def _split_values_flags(series: pd.Series) -> pd.DataFrame: split = series.str.split(\" \")", "_split_values_flags(d) d.loc[d[\"value\"] == \":\", \"value\"] = np.nan d[\"value\"] = d[\"value\"].astype(float)", "may suffer as PyTables will pickle object types # that", "a pandas PerformanceWarning: # \"your performance may suffer as PyTables", "_set_multiindex_dtype(index, level, type_): index_df = index.to_frame() index_df[level] = index_df[level].astype(type_) new_index", "_set_multiindex_dtype(d.index, \"time\", int) d = d.sort_index() return d _TSV_GZ_FILENAME =", "performance reasons. # This is a pandas PerformanceWarning: # \"your", "\"data.h5\" _HDF_TABLE_PATH = \"eurostat_table\" def _read_tsv_gz(path_or_buffer) -> pd.DataFrame: with gzip.open(path_or_buffer,", "= d[\"value\"].astype(float) if \"time\" in d.index.names: time_strings = d.index.unique(\"time\") matches_year", "labels d = d.stack() assert set(d.apply(type)) == {str} assert isinstance(d,", "index_df[level] = index_df[level].astype(type_) new_index = index_df.set_index(index.names).index return new_index def _read_tsv(path_or_buffer)", "= index_data.apply(lambda s: s.split(\",\")) d.index = pd.MultiIndex.from_arrays( list(zip(*index_data)), names=row_dimension_names, )", "== \":\", \"value\"] = np.nan d[\"value\"] = d[\"value\"].astype(float) if \"time\"", "data = _read_tsv_gz(tsv_gz_path) data.to_hdf( hdf_path, _HDF_TABLE_PATH, complevel=conf[\"hdf_complevel\"], complib=conf[\"hdf_complib\"], ) #", "_is_valid_dimension_name(s: str) -> bool: return bool(_DIMENSION_NAME_RE.match(s)) def _split_values_flags(series: pd.Series) ->", "-> pd.DataFrame: with gzip.open(path_or_buffer, \"rb\") as f: return _read_tsv(f) def", "utf-8 -*- import re import gzip import pandas as pd", "a string, for performance reasons. # This is a pandas", "pd.read_hdf(hdf_path, _HDF_TABLE_PATH) except FileNotFoundError: data = _read_tsv_gz(tsv_gz_path) data.to_hdf( hdf_path, _HDF_TABLE_PATH,", "bool: return bool(_DIMENSION_NAME_RE.match(s)) def _split_values_flags(series: pd.Series) -> pd.DataFrame: split =", "pd.Series) -> pd.DataFrame: split = series.str.split(\" \") df = pd.DataFrame(", "= the_dir / _HDF_FILENAME tsv_gz_path = the_dir / _TSV_GZ_FILENAME try:", "= d.stack() assert set(d.apply(type)) == {str} assert isinstance(d, pd.Series), d.columns", "_download_tsv_gz(url, dst_dir): path = dst_dir / _TSV_GZ_FILENAME _download_file(url, path) def", "l[0] if l else None), \"flag\": split.apply(lambda l: l[1] if", "at this point so that the null flag is saved", "list(zip(*index_data)), names=row_dimension_names, ) # cannot handle multidimensional column labels d", "the null flag is saved in the HDF # file", "re import gzip import pandas as pd import numpy as", "> 1 else None), } ) return df def _set_multiindex_dtype(index,", "this point so that the null flag is saved in", "else None), } ) return df def _set_multiindex_dtype(index, level, type_):", "= d.sort_index() return d _TSV_GZ_FILENAME = \"data.tsv.gz\" _HDF_FILENAME = \"data.h5\"", "data.to_hdf( hdf_path, _HDF_TABLE_PATH, complevel=conf[\"hdf_complevel\"], complib=conf[\"hdf_complib\"], ) # Replace empty flags", "if all(matches_year): d.index = _set_multiindex_dtype(d.index, \"time\", int) d = d.sort_index()", "s.split(\",\")) d.index = pd.MultiIndex.from_arrays( list(zip(*index_data)), names=row_dimension_names, ) # cannot handle", "_HDF_TABLE_PATH) except FileNotFoundError: data = _read_tsv_gz(tsv_gz_path) data.to_hdf( hdf_path, _HDF_TABLE_PATH, complevel=conf[\"hdf_complevel\"],", "= pd.MultiIndex.from_arrays( list(zip(*index_data)), names=row_dimension_names, ) # cannot handle multidimensional column", "row_dimension_names = row_dimension_names.split(\",\") index_data = d[top_left_cell] del d[top_left_cell] assert len(set(index_data))", "d = d.stack() assert set(d.apply(type)) == {str} assert isinstance(d, pd.Series),", "= _set_multiindex_dtype(d.index, \"time\", int) d = d.sort_index() return d _TSV_GZ_FILENAME", "import _download_file, conf _DIMENSION_NAME_RE = re.compile(r\"^[a-z_0-9]+$\") _YEAR_RE = re.compile(r\"^(1|2)[0-9]{3}$\") def", "d[\"value\"] = d[\"value\"].astype(float) if \"time\" in d.index.names: time_strings = d.index.unique(\"time\")", "This is a pandas PerformanceWarning: # \"your performance may suffer", "conf _DIMENSION_NAME_RE = re.compile(r\"^[a-z_0-9]+$\") _YEAR_RE = re.compile(r\"^(1|2)[0-9]{3}$\") def _is_valid_dimension_name(s: str)", "# Doing it at this point so that the null", "it at this point so that the null flag is", "pd.MultiIndex.from_arrays( list(zip(*index_data)), names=row_dimension_names, ) # cannot handle multidimensional column labels", "header_dimension_name index_data = index_data.apply(lambda s: s.split(\",\")) d.index = pd.MultiIndex.from_arrays( list(zip(*index_data)),", "\"time\" in d.index.names: time_strings = d.index.unique(\"time\") matches_year = (_YEAR_RE.match(s) for", "= \"data.tsv.gz\" _HDF_FILENAME = \"data.h5\" _HDF_TABLE_PATH = \"eurostat_table\" def _read_tsv_gz(path_or_buffer)", "= d.index.unique(\"time\") matches_year = (_YEAR_RE.match(s) for s in time_strings) if", "= index_df.set_index(index.names).index return new_index def _read_tsv(path_or_buffer) -> pd.DataFrame: d =", "\"flag\": split.apply(lambda l: l[1] if l and len(l) > 1", "assert set(d.apply(type)) == {str} assert isinstance(d, pd.Series), d.columns assert all(map(_is_valid_dimension_name,", "None), } ) return df def _set_multiindex_dtype(index, level, type_): index_df", "return d _TSV_GZ_FILENAME = \"data.tsv.gz\" _HDF_FILENAME = \"data.h5\" _HDF_TABLE_PATH =", "-> bool: return bool(_DIMENSION_NAME_RE.match(s)) def _split_values_flags(series: pd.Series) -> pd.DataFrame: split", "-*- import re import gzip import pandas as pd import", "{str} assert isinstance(d, pd.Series), d.columns assert all(map(_is_valid_dimension_name, d.index.names)) d.index.set_levels( [level.str.strip()", "None (issue #3) # # Doing it at this point", "d _TSV_GZ_FILENAME = \"data.tsv.gz\" _HDF_FILENAME = \"data.h5\" _HDF_TABLE_PATH = \"eurostat_table\"", "int) d = d.sort_index() return d _TSV_GZ_FILENAME = \"data.tsv.gz\" _HDF_FILENAME", "reasons. # This is a pandas PerformanceWarning: # \"your performance", "from eust.core import _download_file, conf _DIMENSION_NAME_RE = re.compile(r\"^[a-z_0-9]+$\") _YEAR_RE =", "row_dimension_names, header_dimension_name = top_left_cell.split(\"\\\\\") row_dimension_names = row_dimension_names.split(\",\") index_data = d[top_left_cell]", "len(row_dimension_names) >= 1 d.columns.name = header_dimension_name index_data = index_data.apply(lambda s:", "np from eust.core import _download_file, conf _DIMENSION_NAME_RE = re.compile(r\"^[a-z_0-9]+$\") _YEAR_RE", "split.apply(lambda l: l[1] if l and len(l) > 1 else", "assert len(set(index_data)) == len(index_data) # no duplicates assert len(row_dimension_names) >=", "_download_file(url, path) def _read(the_dir): hdf_path = the_dir / _HDF_FILENAME tsv_gz_path", "that the null flag is saved in the HDF #", "so that the null flag is saved in the HDF", "_HDF_FILENAME = \"data.h5\" _HDF_TABLE_PATH = \"eurostat_table\" def _read_tsv_gz(path_or_buffer) -> pd.DataFrame:", "== len(index_data) # no duplicates assert len(row_dimension_names) >= 1 d.columns.name", "empty flags by None (issue #3) # # Doing it", "if l and len(l) > 1 else None), } )", "data = pd.read_hdf(hdf_path, _HDF_TABLE_PATH) except FileNotFoundError: data = _read_tsv_gz(tsv_gz_path) data.to_hdf(", "= re.compile(r\"^(1|2)[0-9]{3}$\") def _is_valid_dimension_name(s: str) -> bool: return bool(_DIMENSION_NAME_RE.match(s)) def", "l and len(l) > 1 else None), } ) return", "pandas PerformanceWarning: # \"your performance may suffer as PyTables will", "# cannot handle multidimensional column labels d = d.stack() assert", "suffer as PyTables will pickle object types # that it", "all(map(_is_valid_dimension_name, d.index.names)) d.index.set_levels( [level.str.strip() for level in d.index.levels], inplace=True )", "map directly to c-types # [inferred_type->mixed,key->block0_values] [items->['flag']]\" data[\"flag\"] = data[\"flag\"].replace({\"\":", "df = pd.DataFrame( { \"value\": split.apply(lambda l: l[0] if l", "d.sort_index() return d _TSV_GZ_FILENAME = \"data.tsv.gz\" _HDF_FILENAME = \"data.h5\" _HDF_TABLE_PATH", "d = d.sort_index() return d _TSV_GZ_FILENAME = \"data.tsv.gz\" _HDF_FILENAME =", "import re import gzip import pandas as pd import numpy", "isinstance(d, pd.Series), d.columns assert all(map(_is_valid_dimension_name, d.index.names)) d.index.set_levels( [level.str.strip() for level", "del d[top_left_cell] assert len(set(index_data)) == len(index_data) # no duplicates assert", "= np.nan d[\"value\"] = d[\"value\"].astype(float) if \"time\" in d.index.names: time_strings", "= top_left_cell.split(\"\\\\\") row_dimension_names = row_dimension_names.split(\",\") index_data = d[top_left_cell] del d[top_left_cell]", "types # that it cannot map directly to c-types #", "by None (issue #3) # # Doing it at this", "the HDF # file as a string, for performance reasons.", "assert len(row_dimension_names) >= 1 d.columns.name = header_dimension_name index_data = index_data.apply(lambda", "d[top_left_cell] assert len(set(index_data)) == len(index_data) # no duplicates assert len(row_dimension_names)", "is a pandas PerformanceWarning: # \"your performance may suffer as", "d.index = _set_multiindex_dtype(d.index, \"time\", int) d = d.sort_index() return d", "PyTables will pickle object types # that it cannot map", "inplace=True ) d = _split_values_flags(d) d.loc[d[\"value\"] == \":\", \"value\"] =", "set(d.apply(type)) == {str} assert isinstance(d, pd.Series), d.columns assert all(map(_is_valid_dimension_name, d.index.names))", "= the_dir / _TSV_GZ_FILENAME try: data = pd.read_hdf(hdf_path, _HDF_TABLE_PATH) except", ") # Replace empty flags by None (issue #3) #", "row_dimension_names.split(\",\") index_data = d[top_left_cell] del d[top_left_cell] assert len(set(index_data)) == len(index_data)", "<filename>eust/tables/data.py # -*- coding: utf-8 -*- import re import gzip", "import gzip import pandas as pd import numpy as np", "_HDF_TABLE_PATH, complevel=conf[\"hdf_complevel\"], complib=conf[\"hdf_complib\"], ) # Replace empty flags by None", "= pd.read_hdf(hdf_path, _HDF_TABLE_PATH) except FileNotFoundError: data = _read_tsv_gz(tsv_gz_path) data.to_hdf( hdf_path,", "coding: utf-8 -*- import re import gzip import pandas as", "import pandas as pd import numpy as np from eust.core", "-> pd.DataFrame: d = pd.read_csv(path_or_buffer, sep=\"\\t\", header=0, dtype=str) top_left_cell =", "with gzip.open(path_or_buffer, \"rb\") as f: return _read_tsv(f) def _download_tsv_gz(url, dst_dir):", "len(index_data) # no duplicates assert len(row_dimension_names) >= 1 d.columns.name =", "d.columns.name = header_dimension_name index_data = index_data.apply(lambda s: s.split(\",\")) d.index =", "assert isinstance(d, pd.Series), d.columns assert all(map(_is_valid_dimension_name, d.index.names)) d.index.set_levels( [level.str.strip() for", "as np from eust.core import _download_file, conf _DIMENSION_NAME_RE = re.compile(r\"^[a-z_0-9]+$\")", "\"data.tsv.gz\" _HDF_FILENAME = \"data.h5\" _HDF_TABLE_PATH = \"eurostat_table\" def _read_tsv_gz(path_or_buffer) ->", "\") df = pd.DataFrame( { \"value\": split.apply(lambda l: l[0] if", "# \"your performance may suffer as PyTables will pickle object", "_YEAR_RE = re.compile(r\"^(1|2)[0-9]{3}$\") def _is_valid_dimension_name(s: str) -> bool: return bool(_DIMENSION_NAME_RE.match(s))", "def _read_tsv(path_or_buffer) -> pd.DataFrame: d = pd.read_csv(path_or_buffer, sep=\"\\t\", header=0, dtype=str)", "\"rb\") as f: return _read_tsv(f) def _download_tsv_gz(url, dst_dir): path =", "d = pd.read_csv(path_or_buffer, sep=\"\\t\", header=0, dtype=str) top_left_cell = d.columns[0] row_dimension_names,", "= pd.DataFrame( { \"value\": split.apply(lambda l: l[0] if l else", "handle multidimensional column labels d = d.stack() assert set(d.apply(type)) ==", "re.compile(r\"^[a-z_0-9]+$\") _YEAR_RE = re.compile(r\"^(1|2)[0-9]{3}$\") def _is_valid_dimension_name(s: str) -> bool: return", "= pd.read_csv(path_or_buffer, sep=\"\\t\", header=0, dtype=str) top_left_cell = d.columns[0] row_dimension_names, header_dimension_name", "len(l) > 1 else None), } ) return df def", "Doing it at this point so that the null flag", "= dst_dir / _TSV_GZ_FILENAME _download_file(url, path) def _read(the_dir): hdf_path =", "pd.DataFrame: with gzip.open(path_or_buffer, \"rb\") as f: return _read_tsv(f) def _download_tsv_gz(url,", "#3) # # Doing it at this point so that", "for level in d.index.levels], inplace=True ) d = _split_values_flags(d) d.loc[d[\"value\"]", "d[top_left_cell] del d[top_left_cell] assert len(set(index_data)) == len(index_data) # no duplicates", "pd.read_csv(path_or_buffer, sep=\"\\t\", header=0, dtype=str) top_left_cell = d.columns[0] row_dimension_names, header_dimension_name =", "/ _HDF_FILENAME tsv_gz_path = the_dir / _TSV_GZ_FILENAME try: data =", "it cannot map directly to c-types # [inferred_type->mixed,key->block0_values] [items->['flag']]\" data[\"flag\"]", "_read_tsv(path_or_buffer) -> pd.DataFrame: d = pd.read_csv(path_or_buffer, sep=\"\\t\", header=0, dtype=str) top_left_cell", "_split_values_flags(series: pd.Series) -> pd.DataFrame: split = series.str.split(\" \") df =", "= _read_tsv_gz(tsv_gz_path) data.to_hdf( hdf_path, _HDF_TABLE_PATH, complevel=conf[\"hdf_complevel\"], complib=conf[\"hdf_complib\"], ) # Replace", "new_index = index_df.set_index(index.names).index return new_index def _read_tsv(path_or_buffer) -> pd.DataFrame: d", "header=0, dtype=str) top_left_cell = d.columns[0] row_dimension_names, header_dimension_name = top_left_cell.split(\"\\\\\") row_dimension_names", "(issue #3) # # Doing it at this point so", "d.columns assert all(map(_is_valid_dimension_name, d.index.names)) d.index.set_levels( [level.str.strip() for level in d.index.levels],", "return df def _set_multiindex_dtype(index, level, type_): index_df = index.to_frame() index_df[level]", "index_data.apply(lambda s: s.split(\",\")) d.index = pd.MultiIndex.from_arrays( list(zip(*index_data)), names=row_dimension_names, ) #", "d.index.names)) d.index.set_levels( [level.str.strip() for level in d.index.levels], inplace=True ) d", "the_dir / _TSV_GZ_FILENAME try: data = pd.read_hdf(hdf_path, _HDF_TABLE_PATH) except FileNotFoundError:", "l: l[0] if l else None), \"flag\": split.apply(lambda l: l[1]", "index.to_frame() index_df[level] = index_df[level].astype(type_) new_index = index_df.set_index(index.names).index return new_index def", "d.columns[0] row_dimension_names, header_dimension_name = top_left_cell.split(\"\\\\\") row_dimension_names = row_dimension_names.split(\",\") index_data =", "np.nan d[\"value\"] = d[\"value\"].astype(float) if \"time\" in d.index.names: time_strings =", "f: return _read_tsv(f) def _download_tsv_gz(url, dst_dir): path = dst_dir /", "path = dst_dir / _TSV_GZ_FILENAME _download_file(url, path) def _read(the_dir): hdf_path", "_read(the_dir): hdf_path = the_dir / _HDF_FILENAME tsv_gz_path = the_dir /", "for s in time_strings) if all(matches_year): d.index = _set_multiindex_dtype(d.index, \"time\",", "is saved in the HDF # file as a string,", "try: data = pd.read_hdf(hdf_path, _HDF_TABLE_PATH) except FileNotFoundError: data = _read_tsv_gz(tsv_gz_path)", "= _split_values_flags(d) d.loc[d[\"value\"] == \":\", \"value\"] = np.nan d[\"value\"] =", "as PyTables will pickle object types # that it cannot", "cannot handle multidimensional column labels d = d.stack() assert set(d.apply(type))", "null flag is saved in the HDF # file as", "eust.core import _download_file, conf _DIMENSION_NAME_RE = re.compile(r\"^[a-z_0-9]+$\") _YEAR_RE = re.compile(r\"^(1|2)[0-9]{3}$\")", "= series.str.split(\" \") df = pd.DataFrame( { \"value\": split.apply(lambda l:", "d = _split_values_flags(d) d.loc[d[\"value\"] == \":\", \"value\"] = np.nan d[\"value\"]", "= d[top_left_cell] del d[top_left_cell] assert len(set(index_data)) == len(index_data) # no", "= d.columns[0] row_dimension_names, header_dimension_name = top_left_cell.split(\"\\\\\") row_dimension_names = row_dimension_names.split(\",\") index_data", "duplicates assert len(row_dimension_names) >= 1 d.columns.name = header_dimension_name index_data =", "as pd import numpy as np from eust.core import _download_file,", "performance may suffer as PyTables will pickle object types #", "if \"time\" in d.index.names: time_strings = d.index.unique(\"time\") matches_year = (_YEAR_RE.match(s)", "= index.to_frame() index_df[level] = index_df[level].astype(type_) new_index = index_df.set_index(index.names).index return new_index", ") d = _split_values_flags(d) d.loc[d[\"value\"] == \":\", \"value\"] = np.nan", "matches_year = (_YEAR_RE.match(s) for s in time_strings) if all(matches_year): d.index", "# Replace empty flags by None (issue #3) # #", "index_data = d[top_left_cell] del d[top_left_cell] assert len(set(index_data)) == len(index_data) #", "new_index def _read_tsv(path_or_buffer) -> pd.DataFrame: d = pd.read_csv(path_or_buffer, sep=\"\\t\", header=0,", "if l else None), \"flag\": split.apply(lambda l: l[1] if l", "[level.str.strip() for level in d.index.levels], inplace=True ) d = _split_values_flags(d)", "flag is saved in the HDF # file as a", "re.compile(r\"^(1|2)[0-9]{3}$\") def _is_valid_dimension_name(s: str) -> bool: return bool(_DIMENSION_NAME_RE.match(s)) def _split_values_flags(series:", "def _split_values_flags(series: pd.Series) -> pd.DataFrame: split = series.str.split(\" \") df", "file as a string, for performance reasons. # This is", "d[\"value\"].astype(float) if \"time\" in d.index.names: time_strings = d.index.unique(\"time\") matches_year =", "time_strings) if all(matches_year): d.index = _set_multiindex_dtype(d.index, \"time\", int) d =", "= index_df[level].astype(type_) new_index = index_df.set_index(index.names).index return new_index def _read_tsv(path_or_buffer) ->", "\"time\", int) d = d.sort_index() return d _TSV_GZ_FILENAME = \"data.tsv.gz\"", "dst_dir / _TSV_GZ_FILENAME _download_file(url, path) def _read(the_dir): hdf_path = the_dir", "s: s.split(\",\")) d.index = pd.MultiIndex.from_arrays( list(zip(*index_data)), names=row_dimension_names, ) # cannot", "in the HDF # file as a string, for performance", "series.str.split(\" \") df = pd.DataFrame( { \"value\": split.apply(lambda l: l[0]", "= header_dimension_name index_data = index_data.apply(lambda s: s.split(\",\")) d.index = pd.MultiIndex.from_arrays(", "d.index.unique(\"time\") matches_year = (_YEAR_RE.match(s) for s in time_strings) if all(matches_year):", "top_left_cell = d.columns[0] row_dimension_names, header_dimension_name = top_left_cell.split(\"\\\\\") row_dimension_names = row_dimension_names.split(\",\")", "df def _set_multiindex_dtype(index, level, type_): index_df = index.to_frame() index_df[level] =", "PerformanceWarning: # \"your performance may suffer as PyTables will pickle", "except FileNotFoundError: data = _read_tsv_gz(tsv_gz_path) data.to_hdf( hdf_path, _HDF_TABLE_PATH, complevel=conf[\"hdf_complevel\"], complib=conf[\"hdf_complib\"],", "d.index.names: time_strings = d.index.unique(\"time\") matches_year = (_YEAR_RE.match(s) for s in", "== {str} assert isinstance(d, pd.Series), d.columns assert all(map(_is_valid_dimension_name, d.index.names)) d.index.set_levels(", "def _read(the_dir): hdf_path = the_dir / _HDF_FILENAME tsv_gz_path = the_dir", "index_df = index.to_frame() index_df[level] = index_df[level].astype(type_) new_index = index_df.set_index(index.names).index return", "_TSV_GZ_FILENAME _download_file(url, path) def _read(the_dir): hdf_path = the_dir / _HDF_FILENAME", "l[1] if l and len(l) > 1 else None), }", "import numpy as np from eust.core import _download_file, conf _DIMENSION_NAME_RE", "to c-types # [inferred_type->mixed,key->block0_values] [items->['flag']]\" data[\"flag\"] = data[\"flag\"].replace({\"\": None}) return", "type_): index_df = index.to_frame() index_df[level] = index_df[level].astype(type_) new_index = index_df.set_index(index.names).index", "l else None), \"flag\": split.apply(lambda l: l[1] if l and", "all(matches_year): d.index = _set_multiindex_dtype(d.index, \"time\", int) d = d.sort_index() return", "point so that the null flag is saved in the", "= \"eurostat_table\" def _read_tsv_gz(path_or_buffer) -> pd.DataFrame: with gzip.open(path_or_buffer, \"rb\") as", "in time_strings) if all(matches_year): d.index = _set_multiindex_dtype(d.index, \"time\", int) d", ") # cannot handle multidimensional column labels d = d.stack()", "s in time_strings) if all(matches_year): d.index = _set_multiindex_dtype(d.index, \"time\", int)", "dst_dir): path = dst_dir / _TSV_GZ_FILENAME _download_file(url, path) def _read(the_dir):", "gzip.open(path_or_buffer, \"rb\") as f: return _read_tsv(f) def _download_tsv_gz(url, dst_dir): path", ">= 1 d.columns.name = header_dimension_name index_data = index_data.apply(lambda s: s.split(\",\"))", "top_left_cell.split(\"\\\\\") row_dimension_names = row_dimension_names.split(\",\") index_data = d[top_left_cell] del d[top_left_cell] assert", "the_dir / _HDF_FILENAME tsv_gz_path = the_dir / _TSV_GZ_FILENAME try: data", "\":\", \"value\"] = np.nan d[\"value\"] = d[\"value\"].astype(float) if \"time\" in", "# # Doing it at this point so that the", "def _read_tsv_gz(path_or_buffer) -> pd.DataFrame: with gzip.open(path_or_buffer, \"rb\") as f: return", "cannot map directly to c-types # [inferred_type->mixed,key->block0_values] [items->['flag']]\" data[\"flag\"] =", "that it cannot map directly to c-types # [inferred_type->mixed,key->block0_values] [items->['flag']]\"", "_read_tsv(f) def _download_tsv_gz(url, dst_dir): path = dst_dir / _TSV_GZ_FILENAME _download_file(url,", "d.loc[d[\"value\"] == \":\", \"value\"] = np.nan d[\"value\"] = d[\"value\"].astype(float) if", "Replace empty flags by None (issue #3) # # Doing", "tsv_gz_path = the_dir / _TSV_GZ_FILENAME try: data = pd.read_hdf(hdf_path, _HDF_TABLE_PATH)", "flags by None (issue #3) # # Doing it at", "/ _TSV_GZ_FILENAME _download_file(url, path) def _read(the_dir): hdf_path = the_dir /", "_read_tsv_gz(path_or_buffer) -> pd.DataFrame: with gzip.open(path_or_buffer, \"rb\") as f: return _read_tsv(f)", "str) -> bool: return bool(_DIMENSION_NAME_RE.match(s)) def _split_values_flags(series: pd.Series) -> pd.DataFrame:", "d.index.set_levels( [level.str.strip() for level in d.index.levels], inplace=True ) d =", "and len(l) > 1 else None), } ) return df", "-> pd.DataFrame: split = series.str.split(\" \") df = pd.DataFrame( {", "index_df.set_index(index.names).index return new_index def _read_tsv(path_or_buffer) -> pd.DataFrame: d = pd.read_csv(path_or_buffer,", "assert all(map(_is_valid_dimension_name, d.index.names)) d.index.set_levels( [level.str.strip() for level in d.index.levels], inplace=True", "in d.index.names: time_strings = d.index.unique(\"time\") matches_year = (_YEAR_RE.match(s) for s", "gzip import pandas as pd import numpy as np from", "# that it cannot map directly to c-types # [inferred_type->mixed,key->block0_values]", "multidimensional column labels d = d.stack() assert set(d.apply(type)) == {str}", "hdf_path, _HDF_TABLE_PATH, complevel=conf[\"hdf_complevel\"], complib=conf[\"hdf_complib\"], ) # Replace empty flags by", "} ) return df def _set_multiindex_dtype(index, level, type_): index_df =", "level, type_): index_df = index.to_frame() index_df[level] = index_df[level].astype(type_) new_index =", "(_YEAR_RE.match(s) for s in time_strings) if all(matches_year): d.index = _set_multiindex_dtype(d.index,", "= \"data.h5\" _HDF_TABLE_PATH = \"eurostat_table\" def _read_tsv_gz(path_or_buffer) -> pd.DataFrame: with", "= (_YEAR_RE.match(s) for s in time_strings) if all(matches_year): d.index =", "string, for performance reasons. # This is a pandas PerformanceWarning:", "_TSV_GZ_FILENAME try: data = pd.read_hdf(hdf_path, _HDF_TABLE_PATH) except FileNotFoundError: data =", "split.apply(lambda l: l[0] if l else None), \"flag\": split.apply(lambda l:", "pickle object types # that it cannot map directly to", "# This is a pandas PerformanceWarning: # \"your performance may", "pandas as pd import numpy as np from eust.core import", "= row_dimension_names.split(\",\") index_data = d[top_left_cell] del d[top_left_cell] assert len(set(index_data)) ==", "dtype=str) top_left_cell = d.columns[0] row_dimension_names, header_dimension_name = top_left_cell.split(\"\\\\\") row_dimension_names =", "pd.DataFrame: split = series.str.split(\" \") df = pd.DataFrame( { \"value\":", "# no duplicates assert len(row_dimension_names) >= 1 d.columns.name = header_dimension_name", "def _download_tsv_gz(url, dst_dir): path = dst_dir / _TSV_GZ_FILENAME _download_file(url, path)", "\"eurostat_table\" def _read_tsv_gz(path_or_buffer) -> pd.DataFrame: with gzip.open(path_or_buffer, \"rb\") as f:", "d.index.levels], inplace=True ) d = _split_values_flags(d) d.loc[d[\"value\"] == \":\", \"value\"]", "= re.compile(r\"^[a-z_0-9]+$\") _YEAR_RE = re.compile(r\"^(1|2)[0-9]{3}$\") def _is_valid_dimension_name(s: str) -> bool:", "no duplicates assert len(row_dimension_names) >= 1 d.columns.name = header_dimension_name index_data", "_DIMENSION_NAME_RE = re.compile(r\"^[a-z_0-9]+$\") _YEAR_RE = re.compile(r\"^(1|2)[0-9]{3}$\") def _is_valid_dimension_name(s: str) ->", "for performance reasons. # This is a pandas PerformanceWarning: #", "# file as a string, for performance reasons. # This", "-*- coding: utf-8 -*- import re import gzip import pandas", "1 d.columns.name = header_dimension_name index_data = index_data.apply(lambda s: s.split(\",\")) d.index", "complevel=conf[\"hdf_complevel\"], complib=conf[\"hdf_complib\"], ) # Replace empty flags by None (issue", "complib=conf[\"hdf_complib\"], ) # Replace empty flags by None (issue #3)", "object types # that it cannot map directly to c-types", "_download_file, conf _DIMENSION_NAME_RE = re.compile(r\"^[a-z_0-9]+$\") _YEAR_RE = re.compile(r\"^(1|2)[0-9]{3}$\") def _is_valid_dimension_name(s:", "def _is_valid_dimension_name(s: str) -> bool: return bool(_DIMENSION_NAME_RE.match(s)) def _split_values_flags(series: pd.Series)", "\"your performance may suffer as PyTables will pickle object types", "_TSV_GZ_FILENAME = \"data.tsv.gz\" _HDF_FILENAME = \"data.h5\" _HDF_TABLE_PATH = \"eurostat_table\" def", "return bool(_DIMENSION_NAME_RE.match(s)) def _split_values_flags(series: pd.Series) -> pd.DataFrame: split = series.str.split(\"", "header_dimension_name = top_left_cell.split(\"\\\\\") row_dimension_names = row_dimension_names.split(\",\") index_data = d[top_left_cell] del", "c-types # [inferred_type->mixed,key->block0_values] [items->['flag']]\" data[\"flag\"] = data[\"flag\"].replace({\"\": None}) return data", "names=row_dimension_names, ) # cannot handle multidimensional column labels d =", "{ \"value\": split.apply(lambda l: l[0] if l else None), \"flag\":", "None), \"flag\": split.apply(lambda l: l[1] if l and len(l) >", "/ _TSV_GZ_FILENAME try: data = pd.read_hdf(hdf_path, _HDF_TABLE_PATH) except FileNotFoundError: data", "hdf_path = the_dir / _HDF_FILENAME tsv_gz_path = the_dir / _TSV_GZ_FILENAME", "_HDF_FILENAME tsv_gz_path = the_dir / _TSV_GZ_FILENAME try: data = pd.read_hdf(hdf_path,", "level in d.index.levels], inplace=True ) d = _split_values_flags(d) d.loc[d[\"value\"] ==", "sep=\"\\t\", header=0, dtype=str) top_left_cell = d.columns[0] row_dimension_names, header_dimension_name = top_left_cell.split(\"\\\\\")", "as f: return _read_tsv(f) def _download_tsv_gz(url, dst_dir): path = dst_dir", "1 else None), } ) return df def _set_multiindex_dtype(index, level,", "# -*- coding: utf-8 -*- import re import gzip import", "index_data = index_data.apply(lambda s: s.split(\",\")) d.index = pd.MultiIndex.from_arrays( list(zip(*index_data)), names=row_dimension_names,", "FileNotFoundError: data = _read_tsv_gz(tsv_gz_path) data.to_hdf( hdf_path, _HDF_TABLE_PATH, complevel=conf[\"hdf_complevel\"], complib=conf[\"hdf_complib\"], )", "else None), \"flag\": split.apply(lambda l: l[1] if l and len(l)", "pd.Series), d.columns assert all(map(_is_valid_dimension_name, d.index.names)) d.index.set_levels( [level.str.strip() for level in", "return new_index def _read_tsv(path_or_buffer) -> pd.DataFrame: d = pd.read_csv(path_or_buffer, sep=\"\\t\",", "numpy as np from eust.core import _download_file, conf _DIMENSION_NAME_RE =", "in d.index.levels], inplace=True ) d = _split_values_flags(d) d.loc[d[\"value\"] == \":\",", ") return df def _set_multiindex_dtype(index, level, type_): index_df = index.to_frame()" ]
[ "--force_train while launching script.') tf.io.gfile.makedirs(train_logs_dir) def setup_eval_dir(logdir, config_timeout_seconds=1): \"\"\"Setups directory", "opt def get_lr_opt_global_step(): \"\"\"Intializes learning rate, optimizer and global step.\"\"\"", "the config. \"\"\" lr_params = optimizer_config.LR # pylint: disable=g-long-lambda if", "lr_fn(lr, global_step): return manual_stepping( global_step, lr_step_boundaries, learning_rate_sequence) elif lr_params.DECAY_TYPE ==", "random_choice_noreplace( num_cycles, batch_size)[:, :cycle_len] return random_cycles def get_warmup_lr(lr, global_step, lr_params):", "ValueError('You might be overwriting a directory that already ' 'has", "[] # Reset GRU states for each video. if CONFIG.MODEL.EMBEDDER_TYPE", "absolute_import from __future__ import division from __future__ import print_function from", "across GPUs for gpu in GPUS: tf.config.experimental.set_memory_growth(gpu, True) logical_gpus =", "keep_data: frames_list.append(frames) if optical_flow: frame_original_list.append(frame_original) n += 1 except tf.errors.OutOfRangeError:", "tf.stack( tf.split(emb_feats, 2 * num_steps, axis=0)[::2], axis=1) else: cnn_feats =", "batch_summaries = tf.concat(batch_list, axis=1) tf.summary.image('train_batch', batch_summaries, step=global_step) def visualize_nearest_neighbours(model, data,", "launching script.') tf.io.gfile.makedirs(train_logs_dir) def setup_eval_dir(logdir, config_timeout_seconds=1): \"\"\"Setups directory for evaluation.\"\"\"", "= np.arange(step - (num_steps - 1) * stride, step +", "config_file: config_dict = json.load(config_file) if config_dict is None: time.sleep(config_timeout_seconds) else:", "cnn = model['cnn'] emb = model['emb'] embs_list = [] labels_list", "num_steps is None: if training: num_steps = CONFIG.TRAIN.NUM_FRAMES * CONFIG.DATA.NUM_STEPS", "def get_cnn_feats(cnn, data, training, num_steps=None): \"\"\"Passes data through base CNN.\"\"\"", "def setup_eval_dir(logdir, config_timeout_seconds=1): \"\"\"Setups directory for evaluation.\"\"\" tf.io.gfile.makedirs(logdir) tf.io.gfile.makedirs(os.path.join(logdir, 'eval_logs'))", "< warmup_steps_int, tf.float32) lr = (1.0 - is_warmup) * lr", "the correct learning phase before calling function f.\"\"\" def wrapper(*args,", "'MomentumOptimizer': opt = tf.keras.optimizers.SGD( learning_rate=learning_rate, momentum=0.9) else: raise ValueError('Optimizer %s", "= tf.unstack(image_list[i], num=num_steps * num_frames_per_step, axis=0)[num_frames_per_step - 1::num_frames_per_step] nn_img_list =", "\"\"\"Simple timer for measuring elapsed time.\"\"\" def __init__(self): self.reset() def", "with tf.io.gfile.GFile(config_path, 'r') as config_file: config_dict = json.load(config_file) CONFIG.update(config_dict) train_logs_dir", "emb_feats[i] if 'tcn' in CONFIG.TRAINING_ALGO: img_list = tf.unstack(image_list[i], num=2 *", "else: break CONFIG.update(config_dict) def get_data(iterator): \"\"\"Return a data dict which", "or overwrite: logging.info( 'Using the existing passed in config as", "frame_original_list if keep_labels: dataset['labels'] = labels_list # Reset statefulness to", "not have training argument.') training = kwargs['training'] if training: #", "CONFIG.OPTICALFLOW: frames = data['video_frames'] else: frames = data['frames'] image_list =", "im_list], axis=0), axis=0) tf.summary.image('%s/nn' % split, summary_im, step=global_step) # Convert", "for alignment.\"\"\" random_cycles = random_choice_noreplace( num_cycles, batch_size)[:, :cycle_len] return random_cycles", "lr # Minimally adapted from Tensorflow object_detection code. def manual_stepping(global_step,", "is a dict we can insert multiple modular networks in", "= model['emb'] embs_list = [] labels_list = [] steps_list =", "elapsed(self): return time.time() - self.time def done(self, target_interval): return self.elapsed()", "image_list = tf.unstack(frames, num=batch_size, axis=0) if 'tcn' in CONFIG.TRAINING_ALGO: im_list", "data, training, num_steps=None): \"\"\"Passes data through base CNN.\"\"\" if num_steps", "seq_labels[~np.isnan(embs).any(axis=1)] names = names[~np.isnan(embs).any(axis=1)] seq_lens = seq_lens[~np.isnan(embs).any(axis=1)] steps = steps[~np.isnan(embs).any(axis=1)]", "warmup_lr = lr_params.INITIAL_LR * warmup_percent_done is_warmup = tf.cast(global_steps_int < warmup_steps_int,", "config = None if os.path.exists(config_path): with open(config_path) as f: config", "if optimizer_config.TYPE == 'AdamOptimizer': opt = tf.keras.optimizers.Adam(learning_rate=learning_rate) elif optimizer_config.TYPE ==", "we are embedding the whole sequence and that can take", "if restore else -1 return ckpt_manager, status, checkpoint def restore_ckpt(logdir,", "for gru_layer in emb.gru_layers: gru_layer.stateful = False return dataset def", "Only support' 'the following decay types: fixed, exp_decay, manual,' 'and", "= CONFIG.DATA.FRAME_STRIDE # We don't want to see the future.", "if optical_flow: frame_original = frame_original[~np.isnan(embs).any(axis=1)] embs = embs[~np.isnan(embs).any(axis=1)] assert len(embs)", "from config import CONFIG import json import tensorflow as tf", "frames = data['frames'] frames_list = tf.unstack(frames, num=num_steps, axis=1) frames_summaries =", "gen_plot(x, y): \"\"\"Create a pyplot, save to buffer and return", "= tf.train.CheckpointManager( checkpoint, directory=logdir, max_to_keep=10, keep_checkpoint_every_n_hours=1) status = checkpoint.restore(ckpt_manager.latest_checkpoint) return", "# We don't want to see the future. steps =", "Make Recurrent Layers stateful, set batch size. # We do", "following' 'optimizers: AdamOptimizer, MomentumOptimizer .') return opt def get_lr_opt_global_step(): \"\"\"Intializes", "frame_original_list.append(frame_original) n += 1 except tf.errors.OutOfRangeError: logging.info('Finished embedding the dataset.')", "len(logical_gpus), \"Logical GPUs\"]) except RuntimeError as e: # Memory growth", "= np.exp(np.array(w) / t) dist = e / np.sum(e) return", ".') return opt def get_lr_opt_global_step(): \"\"\"Intializes learning rate, optimizer and", "None: time.sleep(config_timeout_seconds) else: break CONFIG.update(config_dict) def get_data(iterator): \"\"\"Return a data", "chosen_steps, seq_len) steps.append(chosen_steps.numpy()[0]) seq_lens.append(seq_len * [seq_len]) all_labels = data['frame_labels'].numpy()[0] name", "to False to use models in inference mode. tf.keras.backend.set_learning_phase(0) cnn", "os.path.join(logdir, 'train.logs') if os.path.exists(train_logs_dir) and not force_train: raise ValueError('You might", "tf.keras.optimizers.Adam(learning_rate=learning_rate) elif optimizer_config.TYPE == 'MomentumOptimizer': opt = tf.keras.optimizers.SGD( learning_rate=learning_rate, momentum=0.9)", "visualize_nearest_neighbours(model, data, global_step, batch_size, num_steps, num_frames_per_step, split): \"\"\"Visualize nearest neighbours", "sim_matrix = sim_matrix.astype(np.float32) tf.summary.image('%s/similarity_matrix' % split, np.expand_dims(sim_matrix, axis=3), step=global_step) def", "num_steps))))) single_steps = np.maximum(0, single_steps) single_steps = np.minimum(seq_len, single_steps) return", "global_step, lr_params): \"\"\"Returns learning rate during warm up phase.\"\"\" if", "batch to be passed and we don't want to automatically", "training: # Set learning_phase to True to use models in", "in CONFIG.TRAINING_ALGO: im_list = [image_list[0] [num_frames_per_step - 1::num_frames_per_step][::2]] else: im_list", "create_ckpt(logdir, restore=False, **ckpt_objects): # Since model is a dict we", "get_data(iterator): \"\"\"Return a data dict which contains all the requested", "- i * frames_per_batch else: num_steps = frames_per_batch curr_idx =", "step + stride, stride) return steps def get_indices(curr_idx, num_steps, seq_len):", "steps = np.concatenate(steps, axis=0) seq_lens = np.concatenate(seq_lens, axis=0) names =", "one batch to be passed and we don't want to", "np.zeros( (batch_size-1, num_steps, num_steps), dtype=np.float32) for i in range(1, batch_size):", "depth=num_boundaries)) def get_lr_fn(optimizer_config): \"\"\"Returns function that provides current learning rate", "names.append(seq_len * [name]) seq_label = data['seq_labels'].numpy()[0] seq_labels.append(seq_len * [seq_label]) labels.append(all_labels)", "done(self, target_interval): return self.elapsed() >= target_interval def reset(self): self.time =", "in config as no config.json file exists in ' '%s',", "**ckpt_objects): \"\"\"Create and restore checkpoint (if one exists on the", "len(steps) embs_list.append(embs) if keep_labels: labels_list.append(labels) seq_labels_list.append(seq_labels) steps_list.append(steps) seq_lens_list.append(seq_lens) names_list.append(names) if", "math import os import time from absl import flags from", "seq_len = seq_len.numpy()[0] num_batches = int(math.ceil(float(seq_len)/frames_per_batch)) for i in range(num_batches):", "mean_squared_distance = tf.reduce_mean( tf.math.squared_difference(curr_query_feats, candidate_feats), axis=1) sim_matrix[i-1, j] = softmax(-1.0", "= [] names_list = [] seq_labels_list = [] if keep_data:", "from config.json that exists in %s.', logdir) with tf.io.gfile.GFile(config_path, 'r')", "def get_context_steps(step): num_steps = CONFIG.DATA.NUM_STEPS stride = CONFIG.DATA.FRAME_STRIDE # We", "type %s not supported. Only support' 'the following decay types:", "Set learning_phase to True to use models in training mode.", "seq_lens_list, 'steps': steps_list, 'names': names_list, 'seq_labels': seq_labels_list} if keep_data: dataset['frames']", "# Instantiate checkpoint and restore from any pre-existing checkpoint. #", "frames = [] if optical_flow: frame_original = [] # Reset", "restore=False, **ckpt_objects): # Since model is a dict we can", "def get_lr_opt_global_step(): \"\"\"Intializes learning rate, optimizer and global step.\"\"\" optimizer", "tf.config.experimental.list_physical_devices('GPU') if GPUS: if ind > -1: tf.config.experimental.set_visible_devices(GPUS[ind], 'GPU') try:", "tf.float32) warmup_percent_done = global_steps_float / warmup_steps_float warmup_lr = lr_params.INITIAL_LR *", "(0, n) # NumPy version: np.random.rand(m,n).argsort(axis=axis) return tf.cast(tf.argsort(tf.random.uniform((m, n)), axis=axis),", "\"\"\"Generate cycles for alignment.\"\"\" random_cycles = random_choice_noreplace( num_cycles, batch_size)[:, :cycle_len]", "im_list.append(nn_img) def vstack(im): return tf.concat(tf.unstack(im, num=num_steps), axis=1) summary_im = tf.expand_dims(tf.concat([vstack(im)", "in im_list], axis=0), axis=0) tf.summary.image('%s/nn' % split, summary_im, step=global_step) #", "== 'convgru': for gru_layer in emb.gru_layers: gru_layer.reset_states() data, chosen_steps, seq_len", "logging.debug('On sequence number %d, frames embedded %d', n, curr_idx +", "to call assign to update the learning rate. Args: optimizer_config:", "and restore from any pre-existing checkpoint. # Since model is", "prepare_gpu(ind=-1): ind = int(ind) GPUS = tf.config.experimental.list_physical_devices('GPU') if GPUS: if", "opt = tf.keras.optimizers.SGD( learning_rate=learning_rate, momentum=0.9) else: raise ValueError('Optimizer %s not", "absl import logging from easydict import EasyDict import matplotlib matplotlib.use('Agg')", "axis=0) im_list.append(nn_img) def vstack(im): return tf.concat(tf.unstack(im, num=num_steps), axis=1) summary_im =", "if training: num_steps = CONFIG.TRAIN.NUM_FRAMES * CONFIG.DATA.NUM_STEPS else: num_steps =", "curr_idx + num_steps))))) single_steps = np.maximum(0, single_steps) single_steps = np.minimum(seq_len,", "checkpoint, directory=logdir, max_to_keep=10, keep_checkpoint_every_n_hours=1) status = checkpoint.restore(ckpt_manager.latest_checkpoint) return ckpt_manager, status,", "up phase.\"\"\" if lr_params.NUM_WARMUP_STEPS > 0: global_steps_int = tf.cast(global_step, tf.int32)", "cnn_feats = get_cnn_feats(cnn, curr_data, num_steps=num_frames_per_step * num_steps, training=False) emb_feats =", "load_config(config_path): config = None if os.path.exists(config_path): with open(config_path) as f:", "def prepare_gpu(ind=-1): ind = int(ind) GPUS = tf.config.experimental.list_physical_devices('GPU') if GPUS:", "wrapper def load_config(config_path): config = None if os.path.exists(config_path): with open(config_path)", "- (num_steps - 1) * stride, step + stride, stride)", "keep_labels and CONFIG.DATA.FRAME_LABELS num_frames_per_step = CONFIG.DATA.NUM_STEPS cnn = model['cnn'] emb", "\"\"\"Create a pyplot, save to buffer and return TB compatible", "raise ValueError('Function called with set_learning_phase decorator which' ' does not", "TF image image = tf.image.decode_png(buf.getvalue(), channels=4) # Add the batch", "target_interval def reset(self): self.time = time.time() def set_learning_phase(f): \"\"\"Sets the", "] while cond(n): try: print(n) embs = [] labels =", "= [] if keep_data: frames_list = [] if optical_flow: frame_original_list", "the batch dimension image = tf.expand_dims(image, 0) return image class", "frames = data['frames'] image_list = tf.unstack(frames, num=batch_size, axis=0) if 'tcn'", "= frames_per_batch curr_idx = i * frames_per_batch curr_data = {}", "config is not None, \"config file is not provided or", "f.\"\"\" def wrapper(*args, **kwargs): \"\"\"Calls the function f after setting", "batch_size, num_steps, num_frames_per_step, split): \"\"\"Visualize nearest neighbours in embedding space.\"\"\"", "provide a new logdir name in ' 'config or pass", "with tf.io.gfile.GFile(config_path, 'r') as config_file: config_dict = json.load(config_file) if config_dict", "= emb(cnn_feats, num_steps) emb_feats = tf.stack(tf.split(emb_feats, num_steps, axis=0), axis=1) query_feats", "if keep_data: frames_list.append(frames) if optical_flow: frame_original_list.append(frame_original) n += 1 except", "data, training=False, num_steps=2 * num_steps) emb_feats = emb(cnn_feats, 2 *", "tf.where( tf.greater_equal(global_step, boundaries), list(range(num_boundaries)), [0] * num_boundaries)) return tf.reduce_sum(rates *", "steps = range(curr_idx, curr_idx + num_steps) single_steps = np.concatenate([get_context_steps(step) for", "= global_steps_float / warmup_steps_float warmup_lr = lr_params.INITIAL_LR * warmup_percent_done is_warmup", "' ' %s for secs.', config_timeout_seconds) time.sleep(config_timeout_seconds) while True: with", "= [] for j in range(num_steps): curr_query_feats = tf.tile(query_feats[j:j+1], [num_steps,", "lr + is_warmup * warmup_lr return lr # Minimally adapted", "is_warmup * warmup_lr return lr # Minimally adapted from Tensorflow", "not None, \"config file is not provided or is corrupted\"", "def restore_ckpt(logdir, **ckpt_objects): \"\"\"Create and restore checkpoint (if one exists", "= checkpoint.restore(ckpt_manager.latest_checkpoint) return ckpt_manager, status, checkpoint def to_dict(config): if isinstance(config,", "len(steps) assert len(names) == len(steps) embs_list.append(embs) if keep_labels: labels_list.append(labels) seq_labels_list.append(seq_labels)", "+ stride, stride) return steps def get_indices(curr_idx, num_steps, seq_len): steps", "return n < max_embs # Make Recurrent Layers stateful, set", "initialize the learning rate and the learning rate decay function.", "len(embs) == len(steps) assert len(names) == len(steps) embs_list.append(embs) if keep_labels:", "the provided config. Raises: ValueError: in case invalid params have", "idxes = get_indices(curr_idx, num_steps, seq_len) curr_data[k] = tf.gather(v, idxes, axis=1)", "'convgru': for gru_layer in emb.gru_layers: gru_layer.reset_states() data, chosen_steps, seq_len =", "stateful, set batch size. # We do this as we", "channels=4) # Add the batch dimension image = tf.expand_dims(image, 0)", "for each video. if CONFIG.MODEL.EMBEDDER_TYPE == 'convgru': for gru_layer in", "returns a function as in Eager we need to call", "= checkpoint.restore( ckpt_manager.latest_checkpoint) if restore else -1 return ckpt_manager, status,", "os import time from absl import flags from absl import", "the future. steps = np.arange(step - (num_steps - 1) *", "global_step, batch_size, num_steps, num_frames_per_step, split): \"\"\"Visualize nearest neighbours in embedding", "as config_file: config = dict([(k, to_dict(v)) for k, v in", "= lr_params.INITIAL_LR * warmup_percent_done is_warmup = tf.cast(global_steps_int < warmup_steps_int, tf.float32)", "not in kwargs: raise ValueError('Function called with set_learning_phase decorator which'", "learning_rate=learning_rate, momentum=0.9) else: raise ValueError('Optimizer %s not supported. Only support", "batch_size, cycle_len): \"\"\"Generate cycles for alignment.\"\"\" random_cycles = random_choice_noreplace( num_cycles,", "return tf.reduce_sum(rates * tf.one_hot(rate_index, depth=num_boundaries)) def get_lr_fn(optimizer_config): \"\"\"Returns function that", "in emb.gru_layers: gru_layer.reset_states() data, chosen_steps, seq_len = get_data(iterator) seq_len =", "restore else -1 return ckpt_manager, status, checkpoint def restore_ckpt(logdir, **ckpt_objects):", "split): \"\"\"Visualize nearest neighbours in embedding space.\"\"\" # Set learning_phase", "warmup_steps_int, tf.float32) lr = (1.0 - is_warmup) * lr +", "num_steps=num_frames_per_step * num_steps, training=False) emb_feats = emb(cnn_feats, num_steps) logging.debug('On sequence", "to_dict(v)) for k, v in CONFIG.items()]) json.dump(config, config_file, sort_keys=True, indent=4)", "1 except tf.errors.OutOfRangeError: logging.info('Finished embedding the dataset.') break dataset =", "names_list.append(names) if keep_data: frames_list.append(frames) if optical_flow: frame_original_list.append(frame_original) n += 1", "Need to do this as some modalities might not exist.", "learning rate based on the provided config. Raises: ValueError: in", "update the learning rate. Args: optimizer_config: EasyDict, contains params required", "f = lr_params.MANUAL_LR_DECAY_RATE learning_rate_sequence = [(lr_params.INITIAL_LR) * f**p for p", "global_step def create_ckpt(logdir, restore=False, **ckpt_objects): # Since model is a", "# np.save(tf.io.gfile.GFile('/air/team/saman/test_cnn_old.npy', 'w'), cnn_feats.numpy()) # np.save(tf.io.gfile.GFile('/air/team/saman/test_emb_old.npy', 'w'), emb_feats.numpy()) embs.append(emb_feats.numpy()) for", "compatible image.\"\"\" plt.figure() plt.plot(x, y) plt.title('Val Accuracy') plt.ylim(0, 1) plt.tight_layout()", "warmup_steps_int = tf.constant( lr_params.NUM_WARMUP_STEPS, dtype=tf.int32) global_steps_float = tf.cast(global_steps_int, tf.float32) warmup_steps_float", "= optimizer.iterations learning_rate = optimizer.learning_rate return learning_rate, optimizer, global_step def", "if CONFIG.OPTICALFLOW: frames = data['video_frames'] else: frames = data['frames'] image_list", "curr_data[\"frames\"]) # np.save(tf.io.gfile.GFile('/air/team/saman/test_cnn_old.npy', 'w'), cnn_feats.numpy()) # np.save(tf.io.gfile.GFile('/air/team/saman/test_emb_old.npy', 'w'), emb_feats.numpy()) embs.append(emb_feats.numpy())", "axis=0) if keep_labels: labels = labels[~np.isnan(embs).any(axis=1)] assert len(embs) == len(labels)", "lr_fn: function, this can be called to return the current", "else: num_steps = frames_per_batch curr_idx = i * frames_per_batch curr_data", "tf.unstack(frames, num=num_steps, axis=1) frames_summaries = tf.concat(frames_list, axis=2) batch_list = tf.split(frames_summaries,", "'%s', logdir) with tf.io.gfile.GFile(config_path, 'w') as config_file: config = dict([(k,", "get_warmup_lr(lr, global_step, lr_params): \"\"\"Returns learning rate during warm up phase.\"\"\"", "'the following decay types: fixed, exp_decay, manual,' 'and poly.') return", "[(lr_params.INITIAL_LR) * f**p for p in range(len(lr_step_boundaries) + 1)] def", "num_steps) logging.debug('On sequence number %d, frames embedded %d', n, curr_idx", "emb(cnn_feats, 2 * num_steps) emb_feats = tf.stack( tf.split(emb_feats, 2 *", "i * frames_per_batch curr_data = {} for k, v in", "* [name]) seq_label = data['seq_labels'].numpy()[0] seq_labels.append(seq_len * [seq_label]) labels.append(all_labels) embs", "1) plt.tight_layout() buf = io.BytesIO() plt.savefig(buf, format='png') buf.seek(0) # Convert", "Generate m random permuations of range (0, n) # NumPy", "as e: # Memory growth must be set before GPUs", "rates): boundaries = [0] + boundaries num_boundaries = len(boundaries) rate_index", "labels_list = [] steps_list = [] seq_lens_list = [] names_list", "indent=4) else: logging.info( 'Using config from config.json that exists in", "= iterator.get_next() return data, data['chosen_steps'], data['seq_lens'] @tf.function def get_cnn_feats(cnn, data,", "to TF image image = tf.image.decode_png(buf.getvalue(), channels=4) # Add the", "setting proper learning phase.\"\"\" if 'training' not in kwargs: raise", "None: return True else: return n < max_embs # Make", "import numpy as np import matplotlib.pyplot as plt # pylint:", "return learning_rate, optimizer, global_step def create_ckpt(logdir, restore=False, **ckpt_objects): # Since", "directory that already ' 'has train_logs. Please provide a new", "ckpt_manager, status, checkpoint def restore_ckpt(logdir, **ckpt_objects): \"\"\"Create and restore checkpoint", "to automatically # reset hidden states after each batch. if", "def load_config(config_path): config = None if os.path.exists(config_path): with open(config_path) as", "= [] labels_list = [] steps_list = [] seq_lens_list =", "to sleep ' ' %s for secs.', config_timeout_seconds) time.sleep(config_timeout_seconds) while", "'and poly.') return (lambda lr, global_step: get_warmup_lr(lr_fn(lr, global_step), global_step, lr_params))", "if 'training' not in kwargs: raise ValueError('Function called with set_learning_phase", "data['frames'] frames_list = tf.unstack(frames, num=num_steps, axis=1) frames_summaries = tf.concat(frames_list, axis=2)", "= np.concatenate(np.array(list(map(get_context_steps, np.arange(curr_idx, curr_idx + num_steps))))) single_steps = np.maximum(0, single_steps)", "= tf.cast(warmup_steps_int, tf.float32) warmup_percent_done = global_steps_float / warmup_steps_float warmup_lr =", "n < max_embs # Make Recurrent Layers stateful, set batch", "emb = model['emb'] embs_list = [] labels_list = [] steps_list", "lr_params.INITIAL_LR * warmup_percent_done is_warmup = tf.cast(global_steps_int < warmup_steps_int, tf.float32) lr", "division from __future__ import print_function from config import CONFIG import", "'manual': lr_step_boundaries = [int(x) for x in lr_params.MANUAL_LR_STEP_BOUNDARIES] f =", "step=global_step) def softmax(w, t=1.0): e = np.exp(np.array(w) / t) dist", "if GPUS: if ind > -1: tf.config.experimental.set_visible_devices(GPUS[ind], 'GPU') try: #", "fixed, exp_decay, manual,' 'and poly.') return (lambda lr, global_step: get_warmup_lr(lr_fn(lr,", "tf.io.gfile.makedirs(logdir) tf.io.gfile.makedirs(os.path.join(logdir, 'eval_logs')) config_path = os.path.join(logdir, 'config.json') while not tf.io.gfile.exists(config_path):", "import absolute_import from __future__ import division from __future__ import print_function", "\"\"\"Passes data through base CNN.\"\"\" if num_steps is None: if", "as no config.json file exists in ' '%s', logdir) with", "Going to sleep ' ' %s for secs.', config_timeout_seconds) time.sleep(config_timeout_seconds)", "GPUs for gpu in GPUS: tf.config.experimental.set_memory_growth(gpu, True) logical_gpus = tf.config.experimental.list_logical_devices('GPU')", "labels_list # Reset statefulness to recurrent layers for other evaluation", "def __init__(self): self.reset() def elapsed(self): return time.time() - self.time def", "Minimally adapted from Tensorflow object_detection code. def manual_stepping(global_step, boundaries, rates):", "Since model is a dict we can insert multiple modular", "= [] names = [] seq_labels = [] if keep_data:", "step.\"\"\" optimizer = get_optimizer(CONFIG.OPTIMIZER, CONFIG.OPTIMIZER.LR.INITIAL_LR) global_step = optimizer.iterations learning_rate =", "m random permuations of range (0, n) # NumPy version:", "= CONFIG.EVAL.NUM_FRAMES * CONFIG.DATA.NUM_STEPS cnn.num_steps = num_steps cnn_feats = cnn(data['frames'])", "= CONFIG.DATA.NUM_STEPS stride = CONFIG.DATA.FRAME_STRIDE # We don't want to", "models in inference mode. tf.keras.backend.set_learning_phase(0) return f(*args, **kwargs) return wrapper", "json import tensorflow as tf import numpy as np import", "ind > -1: tf.config.experimental.set_visible_devices(GPUS[ind], 'GPU') try: # Currently, memory growth", "lr_step_boundaries, learning_rate_sequence) elif lr_params.DECAY_TYPE == 'fixed': def lr_fn(lr, global_step): return", "for training.\"\"\" tf.io.gfile.makedirs(logdir) config_path = os.path.join(logdir, 'config.json') if not os.path.exists(config_path)", "tf.int32) warmup_steps_int = tf.constant( lr_params.NUM_WARMUP_STEPS, dtype=tf.int32) global_steps_float = tf.cast(global_steps_int, tf.float32)", "np.concatenate(names, axis=0) seq_labels = np.concatenate(seq_labels, axis=0) if keep_data: frames.append(data['frames'].numpy()[0]) frames", "k, v in config.items()]) else: return config def setup_train_dir(logdir, overwrite=False,", "contains all the requested sequences.\"\"\" data = iterator.get_next() return data,", "= sim_matrix.astype(np.float32) tf.summary.image('%s/similarity_matrix' % split, np.expand_dims(sim_matrix, axis=3), step=global_step) def softmax(w,", "return lr_params.INITIAL_LR elif lr_params.DECAY_TYPE == 'poly': def lr_fn(lr, global_step): return", "train_logs. Please provide a new logdir name in ' 'config", "== 'convgru': for gru_layer in emb.gru_layers: gru_layer.stateful = True gru_layer.input_spec[0].shape", "dataset = {'embs': embs_list, 'seq_lens': seq_lens_list, 'steps': steps_list, 'names': names_list,", "curr_data[k] = v cnn_feats = get_cnn_feats(cnn, curr_data, num_steps=num_frames_per_step * num_steps,", "disable=g-long-lambda if lr_params.DECAY_TYPE == 'exp_decay': def lr_fn(lr, global_step): return tf.train.exponential_decay(", "if training: # Set learning_phase to True to use models", "= None if os.path.exists(config_path): with open(config_path) as f: config =", "phase.\"\"\" if 'training' not in kwargs: raise ValueError('Function called with", "to False to use models in inference mode. tf.keras.backend.set_learning_phase(0) return", "+ 1) * frames_per_batch > seq_len: num_steps = seq_len -", "ckpt_manager.latest_checkpoint) if restore else -1 return ckpt_manager, status, checkpoint def", "MomentumOptimizer .') return opt def get_lr_opt_global_step(): \"\"\"Intializes learning rate, optimizer", "overwrite=False, force_train=True): \"\"\"Setups directory for training.\"\"\" tf.io.gfile.makedirs(logdir) config_path = os.path.join(logdir,", "'w'), curr_data[\"frames\"]) # np.save(tf.io.gfile.GFile('/air/team/saman/test_cnn_old.npy', 'w'), cnn_feats.numpy()) # np.save(tf.io.gfile.GFile('/air/team/saman/test_emb_old.npy', 'w'), emb_feats.numpy())", "params required to initialize the learning rate and the learning", "'train.logs') if os.path.exists(train_logs_dir) and not force_train: raise ValueError('You might be", "if keep_labels: labels = labels[~np.isnan(embs).any(axis=1)] assert len(embs) == len(labels) seq_labels", "np.sum(e) return dist def random_choice_noreplace(m, n, axis=-1): # Generate m", "ValueError: in case invalid params have been passed in the", "tf.reduce_mean( tf.math.squared_difference(curr_query_feats, candidate_feats), axis=1) sim_matrix[i-1, j] = softmax(-1.0 * mean_squared_distance)", "if keep_labels: dataset['labels'] = labels_list # Reset statefulness to recurrent", "in CONFIG.TRAINING_ALGO: cnn_feats = get_cnn_feats( cnn, data, training=False, num_steps=2 *", "manual_stepping( global_step, lr_step_boundaries, learning_rate_sequence) elif lr_params.DECAY_TYPE == 'fixed': def lr_fn(lr,", "use models in training mode. tf.keras.backend.set_learning_phase(1) else: # Set learning_phase", "while cond(n): try: print(n) embs = [] labels = []", "rate and the learning rate decay function. Returns: lr_fn: function,", "**ckpt_objects): # Since model is a dict we can insert", "if os.path.exists(config_path): with open(config_path) as f: config = json.load(f) assert", "candidate_feats), axis=1) sim_matrix[i-1, j] = softmax(-1.0 * mean_squared_distance) nn_img_list.append(img_list[tf.argmin(mean_squared_distance)]) nn_img", "self.time = time.time() def set_learning_phase(f): \"\"\"Sets the correct learning phase", "learning_rate = optimizer.learning_rate return learning_rate, optimizer, global_step def create_ckpt(logdir, restore=False,", "else: frames = data['frames'] image_list = tf.unstack(frames, num=batch_size, axis=0) if", "= [] seq_labels = [] if keep_data: frames = []", "in steps]) single_steps = np.concatenate(np.array(list(map(get_context_steps, np.arange(curr_idx, curr_idx + num_steps))))) single_steps", "data, data['chosen_steps'], data['seq_lens'] @tf.function def get_cnn_feats(cnn, data, training, num_steps=None): \"\"\"Passes", "tf.io.gfile.makedirs(os.path.join(logdir, 'eval_logs')) config_path = os.path.join(logdir, 'config.json') while not tf.io.gfile.exists(config_path): logging.info('Waiting", "called with set_learning_phase decorator which' ' does not have training", "**kwargs) return wrapper def load_config(config_path): config = None if os.path.exists(config_path):", "rate decay function. Returns: lr_fn: function, this can be called", "Eager we need to call assign to update the learning", "json.load(config_file) CONFIG.update(config_dict) train_logs_dir = os.path.join(logdir, 'train.logs') if os.path.exists(train_logs_dir) and not", "list(range(num_boundaries)), [0] * num_boundaries)) return tf.reduce_sum(rates * tf.one_hot(rate_index, depth=num_boundaries)) def", "import matplotlib.pyplot as plt # pylint: disable=g-import-not-at-top import io import", "tf.train.polynomial_decay( lr, global_step, CONFIG.TRAIN.MAX_ITERS, end_learning_rate=0.0, power=1.0, cycle=False) else: raise ValueError('Learning", "a dict we can insert multiple modular networks in this", "np import matplotlib.pyplot as plt # pylint: disable=g-import-not-at-top import io", "support the following' 'optimizers: AdamOptimizer, MomentumOptimizer .') return opt def", "seq_labels.append(seq_len * [seq_label]) labels.append(all_labels) embs = np.concatenate(embs, axis=0) labels =", "contains params required to initialize the learning rate and the", "Reset GRU states for each video. if CONFIG.MODEL.EMBEDDER_TYPE == 'convgru':", "training.\"\"\" tf.io.gfile.makedirs(logdir) config_path = os.path.join(logdir, 'config.json') if not os.path.exists(config_path) or", "CONFIG.DATA.NUM_STEPS stride = CONFIG.DATA.FRAME_STRIDE # We don't want to see", "= cnn(data['frames']) return cnn_feats def get_context_steps(step): num_steps = CONFIG.DATA.NUM_STEPS stride", "summary_image doesn't take float64 sim_matrix = sim_matrix.astype(np.float32) tf.summary.image('%s/similarity_matrix' % split,", "= [] steps = [] seq_lens = [] names =", "cnn(data['frames']) return cnn_feats def get_context_steps(step): num_steps = CONFIG.DATA.NUM_STEPS stride =", "/ t) dist = e / np.sum(e) return dist def", "def elapsed(self): return time.time() - self.time def done(self, target_interval): return", "use models in inference mode. tf.keras.backend.set_learning_phase(0) return f(*args, **kwargs) return", "num_steps = frames_per_batch curr_idx = i * frames_per_batch curr_data =", "- 1::num_frames_per_step] nn_img_list = [] for j in range(num_steps): curr_query_feats", "modalities might not exist. if len(v.shape) > 1 and v.shape[1]", "%s for secs.', config_timeout_seconds) time.sleep(config_timeout_seconds) while True: with tf.io.gfile.GFile(config_path, 'r')", "\"Physical GPUs,\", len(logical_gpus), \"Logical GPUs\"]) except RuntimeError as e: #", "while True: with tf.io.gfile.GFile(config_path, 'r') as config_file: config_dict = json.load(config_file)", "get_optimizer(optimizer_config, learning_rate): \"\"\"Returns optimizer based on config and learning rate.\"\"\"", "# np.save(tf.io.gfile.GFile('/air/team/saman/test_batch_old.npy', 'w'), curr_data[\"frames\"]) # np.save(tf.io.gfile.GFile('/air/team/saman/test_cnn_old.npy', 'w'), cnn_feats.numpy()) # np.save(tf.io.gfile.GFile('/air/team/saman/test_emb_old.npy',", "growth needs to be the same across GPUs for gpu", "= os.path.join(logdir, 'config.json') if not os.path.exists(config_path) or overwrite: logging.info( 'Using", "= [] seq_lens_list = [] names_list = [] seq_labels_list =", "dataset.') break dataset = {'embs': embs_list, 'seq_lens': seq_lens_list, 'steps': steps_list,", "not exist. if len(v.shape) > 1 and v.shape[1] != 0:", "other evaluation tasks. if CONFIG.MODEL.EMBEDDER_TYPE == 'convgru': for gru_layer in", "axis=1) else: cnn_feats = get_cnn_feats(cnn, data, training=False) emb_feats = emb(cnn_feats,", "frames_list if optical_flow: dataset['frames_original'] = frame_original_list if keep_labels: dataset['labels'] =", "num_steps) emb_feats = tf.stack(tf.split(emb_feats, num_steps, axis=0), axis=1) query_feats = emb_feats[0]", "return the current learning rate based on the provided config.", "CONFIG.TRAINING_ALGO: im_list = [image_list[0] [num_frames_per_step - 1::num_frames_per_step][::2]] else: im_list =", "for f in callbacks: f(np.concatenate(embs), data, chosen_steps, seq_len) steps.append(chosen_steps.numpy()[0]) seq_lens.append(seq_len", "softmax(w, t=1.0): e = np.exp(np.array(w) / t) dist = e", "True) logical_gpus = tf.config.experimental.list_logical_devices('GPU') logging.info([len(GPUS), \"Physical GPUs,\", len(logical_gpus), \"Logical GPUs\"])", "Please provide a new logdir name in ' 'config or", "* f**p for p in range(len(lr_step_boundaries) + 1)] def lr_fn(lr,", "time.\"\"\" def __init__(self): self.reset() def elapsed(self): return time.time() - self.time", "\"\"\"Get embeddings from a one epoch iterator.\"\"\" keep_labels = keep_labels", "batch_summaries, step=global_step) def visualize_nearest_neighbours(model, data, global_step, batch_size, num_steps, num_frames_per_step, split):", "have training argument.') training = kwargs['training'] if training: # Set", "io.BytesIO() plt.savefig(buf, format='png') buf.seek(0) # Convert PNG buffer to TF", "axis=0) steps = np.concatenate(steps, axis=0) seq_lens = np.concatenate(seq_lens, axis=0) names", "save to buffer and return TB compatible image.\"\"\" plt.figure() plt.plot(x,", "ckpt_manager, status, checkpoint def to_dict(config): if isinstance(config, list): return [to_dict(c)", "%s.', logdir) with tf.io.gfile.GFile(config_path, 'r') as config_file: config_dict = json.load(config_file)", "== 'exp_decay': def lr_fn(lr, global_step): return tf.train.exponential_decay( lr, global_step, lr_params.EXP_DECAY_STEPS,", "= tf.unstack(image_list[i], num=2 * num_steps * num_frames_per_step, axis=0)[num_frames_per_step - 1::num_frames_per_step][::2]", "+ boundaries num_boundaries = len(boundaries) rate_index = tf.reduce_max( tf.where( tf.greater_equal(global_step,", "import print_function from config import CONFIG import json import tensorflow", "called to return the current learning rate based on the", "checkpoint, directory=logdir, max_to_keep=10, keep_checkpoint_every_n_hours=1) status = checkpoint.restore( ckpt_manager.latest_checkpoint) if restore", "= num_steps cnn_feats = cnn(data['frames']) return cnn_feats def get_context_steps(step): num_steps", "'poly': def lr_fn(lr, global_step): return tf.train.polynomial_decay( lr, global_step, CONFIG.TRAIN.MAX_ITERS, end_learning_rate=0.0,", "emb.gru_layers: gru_layer.stateful = True gru_layer.input_spec[0].shape = [1, ] while cond(n):", "sim_matrix.astype(np.float32) tf.summary.image('%s/similarity_matrix' % split, np.expand_dims(sim_matrix, axis=3), step=global_step) def softmax(w, t=1.0):", "supported. Only support' 'the following decay types: fixed, exp_decay, manual,'", "tf.split(emb_feats, 2 * num_steps, axis=0)[::2], axis=1) else: cnn_feats = get_cnn_feats(cnn,", "return tf.concat(tf.unstack(im, num=num_steps), axis=1) summary_im = tf.expand_dims(tf.concat([vstack(im) for im in", "dict([(k, to_dict(v)) for k, v in config.items()]) else: return config", "return manual_stepping( global_step, lr_step_boundaries, learning_rate_sequence) elif lr_params.DECAY_TYPE == 'fixed': def", "tf.unstack(image_list[i], num=2 * num_steps * num_frames_per_step, axis=0)[num_frames_per_step - 1::num_frames_per_step][::2] else:", "target_interval): return self.elapsed() >= target_interval def reset(self): self.time = time.time()", "* CONFIG.DATA.NUM_STEPS else: num_steps = CONFIG.EVAL.NUM_FRAMES * CONFIG.DATA.NUM_STEPS cnn.num_steps =", "emb_feats = tf.stack(tf.split(emb_feats, num_steps, axis=0), axis=1) query_feats = emb_feats[0] if", "checkpoint. # Since model is a dict we can insert", "rate. Args: optimizer_config: EasyDict, contains params required to initialize the", "one epoch iterator.\"\"\" keep_labels = keep_labels and CONFIG.DATA.FRAME_LABELS num_frames_per_step =", "Accuracy') plt.ylim(0, 1) plt.tight_layout() buf = io.BytesIO() plt.savefig(buf, format='png') buf.seek(0)", "config from config.json that exists in %s.', logdir) with tf.io.gfile.GFile(config_path,", "(num_steps - 1) * stride, step + stride, stride) return", "= tf.keras.optimizers.Adam(learning_rate=learning_rate) elif optimizer_config.TYPE == 'MomentumOptimizer': opt = tf.keras.optimizers.SGD( learning_rate=learning_rate,", "= embs[~np.isnan(embs).any(axis=1)] assert len(embs) == len(seq_lens) assert len(embs) == len(steps)", "global_step: get_warmup_lr(lr_fn(lr, global_step), global_step, lr_params)) def get_optimizer(optimizer_config, learning_rate): \"\"\"Returns optimizer", "'w'), emb_feats.numpy()) embs.append(emb_feats.numpy()) for f in callbacks: f(np.concatenate(embs), data, chosen_steps,", "directory=logdir, max_to_keep=10, keep_checkpoint_every_n_hours=1) status = checkpoint.restore( ckpt_manager.latest_checkpoint) if restore else", "that can take # more than one batch to be", "gru_layer in emb.gru_layers: gru_layer.reset_states() data, chosen_steps, seq_len = get_data(iterator) seq_len", "c in config] elif isinstance(config, EasyDict): return dict([(k, to_dict(v)) for", "and we don't want to automatically # reset hidden states", "* num_steps) emb_feats = tf.stack( tf.split(emb_feats, 2 * num_steps, axis=0)[::2],", "return single_steps def get_embeddings_dataset(model, iterator, frames_per_batch, keep_data=False, optical_flow=False, keep_labels=True, max_embs=None,", "be passed and we don't want to automatically # reset", "checkpoint and restore from any pre-existing checkpoint. # Since model", "name in ' 'config or pass --force_train while launching script.')", "training=False) emb_feats = emb(cnn_feats, num_steps) logging.debug('On sequence number %d, frames", "decay types: fixed, exp_decay, manual,' 'and poly.') return (lambda lr,", "(i + 1) * frames_per_batch > seq_len: num_steps = seq_len", "[to_dict(c) for c in config] elif isinstance(config, EasyDict): return dict([(k,", "tf.math.squared_difference(curr_query_feats, candidate_feats), axis=1) sim_matrix[i-1, j] = softmax(-1.0 * mean_squared_distance) nn_img_list.append(img_list[tf.argmin(mean_squared_distance)])", "for evaluation.\"\"\" tf.io.gfile.makedirs(logdir) tf.io.gfile.makedirs(os.path.join(logdir, 'eval_logs')) config_path = os.path.join(logdir, 'config.json') while", "n) # NumPy version: np.random.rand(m,n).argsort(axis=axis) return tf.cast(tf.argsort(tf.random.uniform((m, n)), axis=axis), tf.int64)", "seq_len) curr_data[k] = tf.gather(v, idxes, axis=1) else: curr_data[k] = v", "for step in steps]) single_steps = np.concatenate(np.array(list(map(get_context_steps, np.arange(curr_idx, curr_idx +", "def to_dict(config): if isinstance(config, list): return [to_dict(c) for c in", "tf.io.gfile.GFile(config_path, 'r') as config_file: config_dict = json.load(config_file) if config_dict is", "gru_layer in emb.gru_layers: gru_layer.stateful = False return dataset def gen_plot(x,", "can take # more than one batch to be passed", "# np.save(tf.io.gfile.GFile('/air/team/saman/test_emb_old.npy', 'w'), emb_feats.numpy()) embs.append(emb_feats.numpy()) for f in callbacks: f(np.concatenate(embs),", "- 1::num_frames_per_step][::2]] else: im_list = [image_list[0][num_frames_per_step - 1::num_frames_per_step]] sim_matrix =", "= tf.tile(query_feats[j:j+1], [num_steps, 1]) mean_squared_distance = tf.reduce_mean( tf.math.squared_difference(curr_query_feats, candidate_feats), axis=1)", "= model['cnn'] emb = model['emb'] embs_list = [] labels_list =", "PNG buffer to TF image image = tf.image.decode_png(buf.getvalue(), channels=4) #", "single_steps = np.maximum(0, single_steps) single_steps = np.minimum(seq_len, single_steps) return single_steps", "print_function from config import CONFIG import json import tensorflow as", "'exp_decay': def lr_fn(lr, global_step): return tf.train.exponential_decay( lr, global_step, lr_params.EXP_DECAY_STEPS, lr_params.EXP_DECAY_RATE,", "tf.cast(global_steps_int, tf.float32) warmup_steps_float = tf.cast(warmup_steps_int, tf.float32) warmup_percent_done = global_steps_float /", "= [] labels = [] steps = [] seq_lens =", "plt.savefig(buf, format='png') buf.seek(0) # Convert PNG buffer to TF image", "= data['video_frames'] else: frames = data['frames'] image_list = tf.unstack(frames, num=batch_size,", "def gen_cycles(num_cycles, batch_size, cycle_len): \"\"\"Generate cycles for alignment.\"\"\" random_cycles =", "and restore checkpoint (if one exists on the path).\"\"\" #", "from Tensorflow object_detection code. def manual_stepping(global_step, boundaries, rates): boundaries =", "def set_learning_phase(f): \"\"\"Sets the correct learning phase before calling function", "' '%s', logdir) with tf.io.gfile.GFile(config_path, 'w') as config_file: config =", "cond(n): if max_embs is None: return True else: return n", "take float64 sim_matrix = sim_matrix.astype(np.float32) tf.summary.image('%s/similarity_matrix' % split, np.expand_dims(sim_matrix, axis=3),", "lr_params.NUM_WARMUP_STEPS > 0: global_steps_int = tf.cast(global_step, tf.int32) warmup_steps_int = tf.constant(", "nn_img = tf.stack(nn_img_list, axis=0) im_list.append(nn_img) def vstack(im): return tf.concat(tf.unstack(im, num=num_steps),", "dataset def gen_plot(x, y): \"\"\"Create a pyplot, save to buffer", "curr_data[k] = tf.gather(v, idxes, axis=1) else: curr_data[k] = v cnn_feats", "warm up phase.\"\"\" if lr_params.NUM_WARMUP_STEPS > 0: global_steps_int = tf.cast(global_step,", "space.\"\"\" # Set learning_phase to False to use models in", "This returns a function as in Eager we need to", "gru_layer.input_spec[0].shape = [1, ] while cond(n): try: print(n) embs =", "tf.io.gfile.GFile(config_path, 'w') as config_file: config = dict([(k, to_dict(v)) for k,", "buffer to TF image image = tf.image.decode_png(buf.getvalue(), channels=4) # Add", "config_file: config = dict([(k, to_dict(v)) for k, v in CONFIG.items()])", "embs_list = [] labels_list = [] steps_list = [] seq_lens_list", "lr_params.EXP_DECAY_RATE, staircase=True)() elif lr_params.DECAY_TYPE == 'manual': lr_step_boundaries = [int(x) for", "plt.figure() plt.plot(x, y) plt.title('Val Accuracy') plt.ylim(0, 1) plt.tight_layout() buf =", "i in range(num_batches): if (i + 1) * frames_per_batch >", "Returns: lr_fn: function, this can be called to return the", "num_steps = CONFIG.DATA.NUM_STEPS stride = CONFIG.DATA.FRAME_STRIDE # We don't want", "def visualize_nearest_neighbours(model, data, global_step, batch_size, num_steps, num_frames_per_step, split): \"\"\"Visualize nearest", "frames[~np.isnan(embs).any(axis=1)] if optical_flow: frame_original = frame_original[~np.isnan(embs).any(axis=1)] embs = embs[~np.isnan(embs).any(axis=1)] assert", "[] labels = [] steps = [] seq_lens = []", "= len(boundaries) rate_index = tf.reduce_max( tf.where( tf.greater_equal(global_step, boundaries), list(range(num_boundaries)), [0]", "emb(cnn_feats, num_steps) emb_feats = tf.stack(tf.split(emb_feats, num_steps, axis=0), axis=1) query_feats =", "to update the learning rate. Args: optimizer_config: EasyDict, contains params", "'AdamOptimizer': opt = tf.keras.optimizers.Adam(learning_rate=learning_rate) elif optimizer_config.TYPE == 'MomentumOptimizer': opt =", "if keep_data: dataset['frames'] = frames_list if optical_flow: dataset['frames_original'] = frame_original_list", "0: global_steps_int = tf.cast(global_step, tf.int32) warmup_steps_int = tf.constant( lr_params.NUM_WARMUP_STEPS, dtype=tf.int32)", "tf.float32) lr = (1.0 - is_warmup) * lr + is_warmup", "query_feats = emb_feats[0] if CONFIG.OPTICALFLOW: frames = data['video_frames'] else: frames", "in CONFIG.items()]) json.dump(config, config_file, sort_keys=True, indent=4) else: logging.info( 'Using config", "tf.errors.OutOfRangeError: logging.info('Finished embedding the dataset.') break dataset = {'embs': embs_list,", "permuations of range (0, n) # NumPy version: np.random.rand(m,n).argsort(axis=axis) return", "each batch. if CONFIG.MODEL.EMBEDDER_TYPE == 'convgru': for gru_layer in emb.gru_layers:", "config to exist. Going to sleep ' ' %s for", "num_frames_per_step = CONFIG.DATA.NUM_STEPS cnn = model['cnn'] emb = model['emb'] embs_list", "get_cnn_feats(cnn, data, training=False) emb_feats = emb(cnn_feats, num_steps) emb_feats = tf.stack(tf.split(emb_feats,", "except RuntimeError as e: # Memory growth must be set", "return image class Stopwatch(object): \"\"\"Simple timer for measuring elapsed time.\"\"\"", "overwrite: logging.info( 'Using the existing passed in config as no", "states after each batch. if CONFIG.MODEL.EMBEDDER_TYPE == 'convgru': for gru_layer", "for other evaluation tasks. if CONFIG.MODEL.EMBEDDER_TYPE == 'convgru': for gru_layer", "= tf.cast(global_steps_int < warmup_steps_int, tf.float32) lr = (1.0 - is_warmup)", "open(config_path) as f: config = json.load(f) assert config is not", "* mean_squared_distance) nn_img_list.append(img_list[tf.argmin(mean_squared_distance)]) nn_img = tf.stack(nn_img_list, axis=0) im_list.append(nn_img) def vstack(im):", "= {} for k, v in data.items(): # Need to", "1::num_frames_per_step][::2] else: img_list = tf.unstack(image_list[i], num=num_steps * num_frames_per_step, axis=0)[num_frames_per_step -", "= frame_original_list if keep_labels: dataset['labels'] = labels_list # Reset statefulness", "== 'fixed': def lr_fn(lr, global_step): return lr_params.INITIAL_LR elif lr_params.DECAY_TYPE ==", "lr_params = optimizer_config.LR # pylint: disable=g-long-lambda if lr_params.DECAY_TYPE == 'exp_decay':", "manual,' 'and poly.') return (lambda lr, global_step: get_warmup_lr(lr_fn(lr, global_step), global_step,", "lr_params.MANUAL_LR_DECAY_RATE learning_rate_sequence = [(lr_params.INITIAL_LR) * f**p for p in range(len(lr_step_boundaries)", "* num_frames_per_step, axis=0)[num_frames_per_step - 1::num_frames_per_step][::2] else: img_list = tf.unstack(image_list[i], num=num_steps", "get_lr_fn(optimizer_config): \"\"\"Returns function that provides current learning rate based on", "on the provided config. Raises: ValueError: in case invalid params", "__init__(self): self.reset() def elapsed(self): return time.time() - self.time def done(self,", "\"\"\"Sets the correct learning phase before calling function f.\"\"\" def", "= names[~np.isnan(embs).any(axis=1)] seq_lens = seq_lens[~np.isnan(embs).any(axis=1)] steps = steps[~np.isnan(embs).any(axis=1)] if keep_data:", "data dict which contains all the requested sequences.\"\"\" data =", "no config.json file exists in ' '%s', logdir) with tf.io.gfile.GFile(config_path,", "exists in %s.', logdir) with tf.io.gfile.GFile(config_path, 'r') as config_file: config_dict", "- 1) * stride, step + stride, stride) return steps", "= tf.expand_dims(image, 0) return image class Stopwatch(object): \"\"\"Simple timer for", "as tf import numpy as np import matplotlib.pyplot as plt", "\"\"\"Return a data dict which contains all the requested sequences.\"\"\"", "num_steps, axis=0)[::2], axis=1) else: cnn_feats = get_cnn_feats(cnn, data, training=False) emb_feats", "batch. if CONFIG.MODEL.EMBEDDER_TYPE == 'convgru': for gru_layer in emb.gru_layers: gru_layer.stateful", "num=batch_size, axis=0) if 'tcn' in CONFIG.TRAINING_ALGO: im_list = [image_list[0] [num_frames_per_step", "= data['frames'] frames_list = tf.unstack(frames, num=num_steps, axis=1) frames_summaries = tf.concat(frames_list,", "in the config. \"\"\" lr_params = optimizer_config.LR # pylint: disable=g-long-lambda", "embs = embs[~np.isnan(embs).any(axis=1)] assert len(embs) == len(seq_lens) assert len(embs) ==", "\"\"\"Intializes learning rate, optimizer and global step.\"\"\" optimizer = get_optimizer(CONFIG.OPTIMIZER,", "optimizer and global step.\"\"\" optimizer = get_optimizer(CONFIG.OPTIMIZER, CONFIG.OPTIMIZER.LR.INITIAL_LR) global_step =", "= range(curr_idx, curr_idx + num_steps) single_steps = np.concatenate([get_context_steps(step) for step", "[seq_len]) all_labels = data['frame_labels'].numpy()[0] name = data['name'].numpy()[0] names.append(seq_len * [name])", "as config_file: config_dict = json.load(config_file) CONFIG.update(config_dict) train_logs_dir = os.path.join(logdir, 'train.logs')", "get_indices(curr_idx, num_steps, seq_len): steps = range(curr_idx, curr_idx + num_steps) single_steps", "dtype=np.float32) for i in range(1, batch_size): candidate_feats = emb_feats[i] if", "vstack(im): return tf.concat(tf.unstack(im, num=num_steps), axis=1) summary_im = tf.expand_dims(tf.concat([vstack(im) for im", "= os.path.join(logdir, 'config.json') while not tf.io.gfile.exists(config_path): logging.info('Waiting for config to", "= emb(cnn_feats, num_steps) logging.debug('On sequence number %d, frames embedded %d',", "> -1: tf.config.experimental.set_visible_devices(GPUS[ind], 'GPU') try: # Currently, memory growth needs", "num_steps) single_steps = np.concatenate([get_context_steps(step) for step in steps]) single_steps =", "proper learning phase.\"\"\" if 'training' not in kwargs: raise ValueError('Function", "momentum=0.9) else: raise ValueError('Optimizer %s not supported. Only support the", "GPUS: tf.config.experimental.set_memory_growth(gpu, True) logical_gpus = tf.config.experimental.list_logical_devices('GPU') logging.info([len(GPUS), \"Physical GPUs,\", len(logical_gpus),", "same across GPUs for gpu in GPUS: tf.config.experimental.set_memory_growth(gpu, True) logical_gpus", "False to use models in inference mode. tf.keras.backend.set_learning_phase(0) cnn =", "sim_matrix = np.zeros( (batch_size-1, num_steps, num_steps), dtype=np.float32) for i in", "elif lr_params.DECAY_TYPE == 'manual': lr_step_boundaries = [int(x) for x in", "opt = tf.keras.optimizers.Adam(learning_rate=learning_rate) elif optimizer_config.TYPE == 'MomentumOptimizer': opt = tf.keras.optimizers.SGD(", "axis=0) names = np.concatenate(names, axis=0) seq_labels = np.concatenate(seq_labels, axis=0) if", "secs.', config_timeout_seconds) time.sleep(config_timeout_seconds) while True: with tf.io.gfile.GFile(config_path, 'r') as config_file:", "self.reset() def elapsed(self): return time.time() - self.time def done(self, target_interval):", "if keep_data: frames = frames[~np.isnan(embs).any(axis=1)] if optical_flow: frame_original = frame_original[~np.isnan(embs).any(axis=1)]", "if lr_params.DECAY_TYPE == 'exp_decay': def lr_fn(lr, global_step): return tf.train.exponential_decay( lr,", "in CONFIG.TRAINING_ALGO: img_list = tf.unstack(image_list[i], num=2 * num_steps * num_frames_per_step,", "axis=axis), tf.int64) def gen_cycles(num_cycles, batch_size, cycle_len): \"\"\"Generate cycles for alignment.\"\"\"", "and v.shape[1] != 0: idxes = get_indices(curr_idx, num_steps, seq_len) curr_data[k]", "= tf.gather(v, idxes, axis=1) else: curr_data[k] = v cnn_feats =", "lr_fn(lr, global_step): return lr_params.INITIAL_LR elif lr_params.DECAY_TYPE == 'poly': def lr_fn(lr,", "tf.train.CheckpointManager( checkpoint, directory=logdir, max_to_keep=10, keep_checkpoint_every_n_hours=1) status = checkpoint.restore( ckpt_manager.latest_checkpoint) if", "tf.unstack(image_list[i], num=num_steps * num_frames_per_step, axis=0)[num_frames_per_step - 1::num_frames_per_step] nn_img_list = []", "%s not supported. Only support' 'the following decay types: fixed,", "learning_rate_sequence = [(lr_params.INITIAL_LR) * f**p for p in range(len(lr_step_boundaries) +", "config_path = os.path.join(logdir, 'config.json') if not os.path.exists(config_path) or overwrite: logging.info(", "batch_size): candidate_feats = emb_feats[i] if 'tcn' in CONFIG.TRAINING_ALGO: img_list =", "return self.elapsed() >= target_interval def reset(self): self.time = time.time() def", "rate based on the provided config. Raises: ValueError: in case", "= tf.split(frames_summaries, batch_size, axis=0) batch_summaries = tf.concat(batch_list, axis=1) tf.summary.image('train_batch', batch_summaries,", "emb_feats.numpy()) embs.append(emb_feats.numpy()) for f in callbacks: f(np.concatenate(embs), data, chosen_steps, seq_len)", "num_steps = CONFIG.TRAIN.NUM_FRAMES * CONFIG.DATA.NUM_STEPS else: num_steps = CONFIG.EVAL.NUM_FRAMES *", "+ num_steps) single_steps = np.concatenate([get_context_steps(step) for step in steps]) single_steps", "* frames_per_batch curr_data = {} for k, v in data.items():", "passed in the config. \"\"\" lr_params = optimizer_config.LR # pylint:", "dataset['frames'] = frames_list if optical_flow: dataset['frames_original'] = frame_original_list if keep_labels:", "'tcn' in CONFIG.TRAINING_ALGO: cnn_feats = get_cnn_feats( cnn, data, training=False, num_steps=2", "@tf.function def get_cnn_feats(cnn, data, training, num_steps=None): \"\"\"Passes data through base", "True gru_layer.input_spec[0].shape = [1, ] while cond(n): try: print(n) embs", "= get_data(iterator) seq_len = seq_len.numpy()[0] num_batches = int(math.ceil(float(seq_len)/frames_per_batch)) for i", "steps = steps[~np.isnan(embs).any(axis=1)] if keep_data: frames = frames[~np.isnan(embs).any(axis=1)] if optical_flow:", "v in CONFIG.items()]) json.dump(config, config_file, sort_keys=True, indent=4) else: logging.info( 'Using", "or pass --force_train while launching script.') tf.io.gfile.makedirs(train_logs_dir) def setup_eval_dir(logdir, config_timeout_seconds=1):", "recurrent layers for other evaluation tasks. if CONFIG.MODEL.EMBEDDER_TYPE == 'convgru':", "import time from absl import flags from absl import logging", "[] if optical_flow: frame_original = [] # Reset GRU states", "self.elapsed() >= target_interval def reset(self): self.time = time.time() def set_learning_phase(f):", "restore from any pre-existing checkpoint. # Since model is a", "visualize_batch(data, global_step, batch_size, num_steps): \"\"\"Visualizes a batch.\"\"\" frames = data['frames']", "CNN.\"\"\" if num_steps is None: if training: num_steps = CONFIG.TRAIN.NUM_FRAMES", "# We do this as we are embedding the whole", "exists in ' '%s', logdir) with tf.io.gfile.GFile(config_path, 'w') as config_file:", "dimension image = tf.expand_dims(image, 0) return image class Stopwatch(object): \"\"\"Simple", "= [] if optical_flow: frame_original_list = [] n = 0", "based on config and learning rate.\"\"\" if optimizer_config.TYPE == 'AdamOptimizer':", "be overwriting a directory that already ' 'has train_logs. Please", "that exists in %s.', logdir) with tf.io.gfile.GFile(config_path, 'r') as config_file:", "num_batches = int(math.ceil(float(seq_len)/frames_per_batch)) for i in range(num_batches): if (i +", "as summary_image doesn't take float64 sim_matrix = sim_matrix.astype(np.float32) tf.summary.image('%s/similarity_matrix' %", "cnn_feats = cnn(data['frames']) return cnn_feats def get_context_steps(step): num_steps = CONFIG.DATA.NUM_STEPS", "stride) return steps def get_indices(curr_idx, num_steps, seq_len): steps = range(curr_idx,", "* frames_per_batch > seq_len: num_steps = seq_len - i *", "if (i + 1) * frames_per_batch > seq_len: num_steps =", "config.items()]) else: return config def setup_train_dir(logdir, overwrite=False, force_train=True): \"\"\"Setups directory", "memory growth needs to be the same across GPUs for", "cond(n): try: print(n) embs = [] labels = [] steps", "curr_query_feats = tf.tile(query_feats[j:j+1], [num_steps, 1]) mean_squared_distance = tf.reduce_mean( tf.math.squared_difference(curr_query_feats, candidate_feats),", "as we are embedding the whole sequence and that can", "to return the current learning rate based on the provided", "len(embs) == len(seq_lens) assert len(embs) == len(steps) assert len(names) ==", "sequence and that can take # more than one batch", "model['cnn'] emb = model['emb'] if 'tcn' in CONFIG.TRAINING_ALGO: cnn_feats =", "= [] # Reset GRU states for each video. if", "def softmax(w, t=1.0): e = np.exp(np.array(w) / t) dist =", "for gpu in GPUS: tf.config.experimental.set_memory_growth(gpu, True) logical_gpus = tf.config.experimental.list_logical_devices('GPU') logging.info([len(GPUS),", "[] steps = [] seq_lens = [] names = []", "to initialize the learning rate and the learning rate decay", "a function as in Eager we need to call assign", "summary_im = tf.expand_dims(tf.concat([vstack(im) for im in im_list], axis=0), axis=0) tf.summary.image('%s/nn'", "in this dict. checkpoint = tf.train.Checkpoint(**ckpt_objects) ckpt_manager = tf.train.CheckpointManager( checkpoint,", "np.concatenate(np.array(list(map(get_context_steps, np.arange(curr_idx, curr_idx + num_steps))))) single_steps = np.maximum(0, single_steps) single_steps", "np.exp(np.array(w) / t) dist = e / np.sum(e) return dist", "in GPUS: tf.config.experimental.set_memory_growth(gpu, True) logical_gpus = tf.config.experimental.list_logical_devices('GPU') logging.info([len(GPUS), \"Physical GPUs,\",", "in embedding space.\"\"\" # Set learning_phase to False to use", "is not provided or is corrupted\" return config def prepare_gpu(ind=-1):", "1 and v.shape[1] != 0: idxes = get_indices(curr_idx, num_steps, seq_len)", "time from absl import flags from absl import logging from", "can insert multiple modular networks in this dict. checkpoint =", "to be passed and we don't want to automatically #", "learning rate based on config. NOTE: This returns a function", "import CONFIG import json import tensorflow as tf import numpy", "v cnn_feats = get_cnn_feats(cnn, curr_data, num_steps=num_frames_per_step * num_steps, training=False) emb_feats", "= get_optimizer(CONFIG.OPTIMIZER, CONFIG.OPTIMIZER.LR.INITIAL_LR) global_step = optimizer.iterations learning_rate = optimizer.learning_rate return", "# more than one batch to be passed and we", "for x in lr_params.MANUAL_LR_STEP_BOUNDARIES] f = lr_params.MANUAL_LR_DECAY_RATE learning_rate_sequence = [(lr_params.INITIAL_LR)", "from absl import flags from absl import logging from easydict", "plt # pylint: disable=g-import-not-at-top import io import math import os", "= np.minimum(seq_len, single_steps) return single_steps def get_embeddings_dataset(model, iterator, frames_per_batch, keep_data=False,", "config_file, sort_keys=True, indent=4) else: logging.info( 'Using config from config.json that", "cnn = model['cnn'] emb = model['emb'] if 'tcn' in CONFIG.TRAINING_ALGO:", "* warmup_percent_done is_warmup = tf.cast(global_steps_int < warmup_steps_int, tf.float32) lr =", "def get_lr_fn(optimizer_config): \"\"\"Returns function that provides current learning rate based", "len(seq_lens) assert len(embs) == len(steps) assert len(names) == len(steps) embs_list.append(embs)", "before calling function f.\"\"\" def wrapper(*args, **kwargs): \"\"\"Calls the function", "tf.image.decode_png(buf.getvalue(), channels=4) # Add the batch dimension image = tf.expand_dims(image,", "return tf.cast(tf.argsort(tf.random.uniform((m, n)), axis=axis), tf.int64) def gen_cycles(num_cycles, batch_size, cycle_len): \"\"\"Generate", "warmup_percent_done is_warmup = tf.cast(global_steps_int < warmup_steps_int, tf.float32) lr = (1.0", "os.path.join(logdir, 'config.json') if not os.path.exists(config_path) or overwrite: logging.info( 'Using the", "names[~np.isnan(embs).any(axis=1)] seq_lens = seq_lens[~np.isnan(embs).any(axis=1)] steps = steps[~np.isnan(embs).any(axis=1)] if keep_data: frames", "steps_list.append(steps) seq_lens_list.append(seq_lens) names_list.append(names) if keep_data: frames_list.append(frames) if optical_flow: frame_original_list.append(frame_original) n", "= [] if optical_flow: frame_original = [] # Reset GRU", "range(num_batches): if (i + 1) * frames_per_batch > seq_len: num_steps", "this as we are embedding the whole sequence and that", "elapsed time.\"\"\" def __init__(self): self.reset() def elapsed(self): return time.time() -", "= softmax(-1.0 * mean_squared_distance) nn_img_list.append(img_list[tf.argmin(mean_squared_distance)]) nn_img = tf.stack(nn_img_list, axis=0) im_list.append(nn_img)", "np.save(tf.io.gfile.GFile('/air/team/saman/test_emb_old.npy', 'w'), emb_feats.numpy()) embs.append(emb_feats.numpy()) for f in callbacks: f(np.concatenate(embs), data,", "= tf.reduce_max( tf.where( tf.greater_equal(global_step, boundaries), list(range(num_boundaries)), [0] * num_boundaries)) return", "keep_labels = keep_labels and CONFIG.DATA.FRAME_LABELS num_frames_per_step = CONFIG.DATA.NUM_STEPS cnn =", "= tf.train.Checkpoint(**ckpt_objects) ckpt_manager = tf.train.CheckpointManager( checkpoint, directory=logdir, max_to_keep=10, keep_checkpoint_every_n_hours=1) status", "1::num_frames_per_step]] sim_matrix = np.zeros( (batch_size-1, num_steps, num_steps), dtype=np.float32) for i", "training, num_steps=None): \"\"\"Passes data through base CNN.\"\"\" if num_steps is", "assert config is not None, \"config file is not provided", "easydict import EasyDict import matplotlib matplotlib.use('Agg') FLAGS = flags.FLAGS def", "num_frames_per_step, axis=0)[num_frames_per_step - 1::num_frames_per_step][::2] else: img_list = tf.unstack(image_list[i], num=num_steps *", "format='png') buf.seek(0) # Convert PNG buffer to TF image image", "num_frames_per_step, split): \"\"\"Visualize nearest neighbours in embedding space.\"\"\" # Set", "nearest neighbours in embedding space.\"\"\" # Set learning_phase to False", "num_steps = seq_len - i * frames_per_batch else: num_steps =", "provided or is corrupted\" return config def prepare_gpu(ind=-1): ind =", "= tf.train.CheckpointManager( checkpoint, directory=logdir, max_to_keep=10, keep_checkpoint_every_n_hours=1) status = checkpoint.restore( ckpt_manager.latest_checkpoint)", "else: img_list = tf.unstack(image_list[i], num=num_steps * num_frames_per_step, axis=0)[num_frames_per_step - 1::num_frames_per_step]", "future. steps = np.arange(step - (num_steps - 1) * stride,", "= np.concatenate(frame_original, axis=0) if keep_labels: labels = labels[~np.isnan(embs).any(axis=1)] assert len(embs)", "checkpoint (if one exists on the path).\"\"\" # Instantiate checkpoint", "np.random.rand(m,n).argsort(axis=axis) return tf.cast(tf.argsort(tf.random.uniform((m, n)), axis=axis), tf.int64) def gen_cycles(num_cycles, batch_size, cycle_len):", "embedded %d', n, curr_idx + num_steps) # np.save(tf.io.gfile.GFile('/air/team/saman/test_weights_old.npy', 'w'), cnn.weights[0].numpy())", "__future__ import division from __future__ import print_function from config import", "import tensorflow as tf import numpy as np import matplotlib.pyplot", "TB compatible image.\"\"\" plt.figure() plt.plot(x, y) plt.title('Val Accuracy') plt.ylim(0, 1)", "step in steps]) single_steps = np.concatenate(np.array(list(map(get_context_steps, np.arange(curr_idx, curr_idx + num_steps)))))", "lr_params.DECAY_TYPE == 'fixed': def lr_fn(lr, global_step): return lr_params.INITIAL_LR elif lr_params.DECAY_TYPE", "= tf.keras.optimizers.SGD( learning_rate=learning_rate, momentum=0.9) else: raise ValueError('Optimizer %s not supported.", "= tf.reduce_mean( tf.math.squared_difference(curr_query_feats, candidate_feats), axis=1) sim_matrix[i-1, j] = softmax(-1.0 *", "a pyplot, save to buffer and return TB compatible image.\"\"\"", "[] for j in range(num_steps): curr_query_feats = tf.tile(query_feats[j:j+1], [num_steps, 1])", "not tf.io.gfile.exists(config_path): logging.info('Waiting for config to exist. Going to sleep", "True: with tf.io.gfile.GFile(config_path, 'r') as config_file: config_dict = json.load(config_file) if", "reset(self): self.time = time.time() def set_learning_phase(f): \"\"\"Sets the correct learning", "np.minimum(seq_len, single_steps) return single_steps def get_embeddings_dataset(model, iterator, frames_per_batch, keep_data=False, optical_flow=False,", "for i in range(num_batches): if (i + 1) * frames_per_batch", "\"\"\"Create and restore checkpoint (if one exists on the path).\"\"\"", "curr_idx + num_steps) # np.save(tf.io.gfile.GFile('/air/team/saman/test_weights_old.npy', 'w'), cnn.weights[0].numpy()) # np.save(tf.io.gfile.GFile('/air/team/saman/test_batch_old.npy', 'w'),", "learning phase before calling function f.\"\"\" def wrapper(*args, **kwargs): \"\"\"Calls", "get_cnn_feats(cnn, data, training, num_steps=None): \"\"\"Passes data through base CNN.\"\"\" if", "frames = frames[~np.isnan(embs).any(axis=1)] if optical_flow: frame_original = frame_original[~np.isnan(embs).any(axis=1)] embs =", "= np.maximum(0, single_steps) single_steps = np.minimum(seq_len, single_steps) return single_steps def", "# Reset GRU states for each video. if CONFIG.MODEL.EMBEDDER_TYPE ==", "cnn_feats def get_context_steps(step): num_steps = CONFIG.DATA.NUM_STEPS stride = CONFIG.DATA.FRAME_STRIDE #", "provided config. Raises: ValueError: in case invalid params have been", "t) dist = e / np.sum(e) return dist def random_choice_noreplace(m,", "in config.items()]) else: return config def setup_train_dir(logdir, overwrite=False, force_train=True): \"\"\"Setups", "num_steps) # np.save(tf.io.gfile.GFile('/air/team/saman/test_weights_old.npy', 'w'), cnn.weights[0].numpy()) # np.save(tf.io.gfile.GFile('/air/team/saman/test_batch_old.npy', 'w'), curr_data[\"frames\"]) #", "return f(*args, **kwargs) return wrapper def load_config(config_path): config = None", "+= 1 except tf.errors.OutOfRangeError: logging.info('Finished embedding the dataset.') break dataset", "and global step.\"\"\" optimizer = get_optimizer(CONFIG.OPTIMIZER, CONFIG.OPTIMIZER.LR.INITIAL_LR) global_step = optimizer.iterations", "'GPU') try: # Currently, memory growth needs to be the", "np.arange(step - (num_steps - 1) * stride, step + stride,", "labels = labels[~np.isnan(embs).any(axis=1)] assert len(embs) == len(labels) seq_labels = seq_labels[~np.isnan(embs).any(axis=1)]", "num_steps=None): \"\"\"Passes data through base CNN.\"\"\" if num_steps is None:", "= os.path.join(logdir, 'train.logs') if os.path.exists(train_logs_dir) and not force_train: raise ValueError('You", "logging.info([len(GPUS), \"Physical GPUs,\", len(logical_gpus), \"Logical GPUs\"]) except RuntimeError as e:", "return TB compatible image.\"\"\" plt.figure() plt.plot(x, y) plt.title('Val Accuracy') plt.ylim(0,", "= CONFIG.TRAIN.NUM_FRAMES * CONFIG.DATA.NUM_STEPS else: num_steps = CONFIG.EVAL.NUM_FRAMES * CONFIG.DATA.NUM_STEPS", "status, checkpoint def to_dict(config): if isinstance(config, list): return [to_dict(c) for", "steps = np.arange(step - (num_steps - 1) * stride, step", "def vstack(im): return tf.concat(tf.unstack(im, num=num_steps), axis=1) summary_im = tf.expand_dims(tf.concat([vstack(im) for", "else: num_steps = CONFIG.EVAL.NUM_FRAMES * CONFIG.DATA.NUM_STEPS cnn.num_steps = num_steps cnn_feats", "checkpoint.restore(ckpt_manager.latest_checkpoint) return ckpt_manager, status, checkpoint def to_dict(config): if isinstance(config, list):", "tf.keras.optimizers.SGD( learning_rate=learning_rate, momentum=0.9) else: raise ValueError('Optimizer %s not supported. Only", "[image_list[0] [num_frames_per_step - 1::num_frames_per_step][::2]] else: im_list = [image_list[0][num_frames_per_step - 1::num_frames_per_step]]", "ValueError('Function called with set_learning_phase decorator which' ' does not have", "\"\"\" lr_params = optimizer_config.LR # pylint: disable=g-long-lambda if lr_params.DECAY_TYPE ==", "< max_embs # Make Recurrent Layers stateful, set batch size.", "global_step): return tf.train.exponential_decay( lr, global_step, lr_params.EXP_DECAY_STEPS, lr_params.EXP_DECAY_RATE, staircase=True)() elif lr_params.DECAY_TYPE", "' 'config or pass --force_train while launching script.') tf.io.gfile.makedirs(train_logs_dir) def", "os.path.exists(train_logs_dir) and not force_train: raise ValueError('You might be overwriting a", "Convert sim_matrix to float32 as summary_image doesn't take float64 sim_matrix", "optimizer, global_step def create_ckpt(logdir, restore=False, **ckpt_objects): # Since model is", "if keep_labels: labels_list.append(labels) seq_labels_list.append(seq_labels) steps_list.append(steps) seq_lens_list.append(seq_lens) names_list.append(names) if keep_data: frames_list.append(frames)", "return cnn_feats def get_context_steps(step): num_steps = CONFIG.DATA.NUM_STEPS stride = CONFIG.DATA.FRAME_STRIDE", "optimizer.iterations learning_rate = optimizer.learning_rate return learning_rate, optimizer, global_step def create_ckpt(logdir,", "axis=0) if keep_data: frames.append(data['frames'].numpy()[0]) frames = np.concatenate(frames, axis=0) if optical_flow:", "training = kwargs['training'] if training: # Set learning_phase to True", "global_steps_float / warmup_steps_float warmup_lr = lr_params.INITIAL_LR * warmup_percent_done is_warmup =", "config_dict is None: time.sleep(config_timeout_seconds) else: break CONFIG.update(config_dict) def get_data(iterator): \"\"\"Return", "axis=0), axis=0) tf.summary.image('%s/nn' % split, summary_im, step=global_step) # Convert sim_matrix", "not force_train: raise ValueError('You might be overwriting a directory that", "= {'embs': embs_list, 'seq_lens': seq_lens_list, 'steps': steps_list, 'names': names_list, 'seq_labels':", "def create_ckpt(logdir, restore=False, **ckpt_objects): # Since model is a dict", "states for each video. if CONFIG.MODEL.EMBEDDER_TYPE == 'convgru': for gru_layer", "a batch.\"\"\" frames = data['frames'] frames_list = tf.unstack(frames, num=num_steps, axis=1)", "range(1, batch_size): candidate_feats = emb_feats[i] if 'tcn' in CONFIG.TRAINING_ALGO: img_list", "and learning rate.\"\"\" if optimizer_config.TYPE == 'AdamOptimizer': opt = tf.keras.optimizers.Adam(learning_rate=learning_rate)", "function as in Eager we need to call assign to", "AdamOptimizer, MomentumOptimizer .') return opt def get_lr_opt_global_step(): \"\"\"Intializes learning rate,", "training argument.') training = kwargs['training'] if training: # Set learning_phase", "'w'), cnn.weights[0].numpy()) # np.save(tf.io.gfile.GFile('/air/team/saman/test_batch_old.npy', 'w'), curr_data[\"frames\"]) # np.save(tf.io.gfile.GFile('/air/team/saman/test_cnn_old.npy', 'w'), cnn_feats.numpy())", "/ np.sum(e) return dist def random_choice_noreplace(m, n, axis=-1): # Generate", "dataset['frames_original'] = frame_original_list if keep_labels: dataset['labels'] = labels_list # Reset", "data['seq_lens'] @tf.function def get_cnn_feats(cnn, data, training, num_steps=None): \"\"\"Passes data through", "np.save(tf.io.gfile.GFile('/air/team/saman/test_batch_old.npy', 'w'), curr_data[\"frames\"]) # np.save(tf.io.gfile.GFile('/air/team/saman/test_cnn_old.npy', 'w'), cnn_feats.numpy()) # np.save(tf.io.gfile.GFile('/air/team/saman/test_emb_old.npy', 'w'),", "frames_list.append(frames) if optical_flow: frame_original_list.append(frame_original) n += 1 except tf.errors.OutOfRangeError: logging.info('Finished", "boundaries), list(range(num_boundaries)), [0] * num_boundaries)) return tf.reduce_sum(rates * tf.one_hot(rate_index, depth=num_boundaries))", "%d, frames embedded %d', n, curr_idx + num_steps) # np.save(tf.io.gfile.GFile('/air/team/saman/test_weights_old.npy',", "emb_feats = emb(cnn_feats, num_steps) logging.debug('On sequence number %d, frames embedded", "None if os.path.exists(config_path): with open(config_path) as f: config = json.load(f)", "np.maximum(0, single_steps) single_steps = np.minimum(seq_len, single_steps) return single_steps def get_embeddings_dataset(model,", "the path).\"\"\" # Instantiate checkpoint and restore from any pre-existing", "corrupted\" return config def prepare_gpu(ind=-1): ind = int(ind) GPUS =", "'config or pass --force_train while launching script.') tf.io.gfile.makedirs(train_logs_dir) def setup_eval_dir(logdir,", "logging.info( 'Using the existing passed in config as no config.json", "needs to be the same across GPUs for gpu in", "axis=1) frames_summaries = tf.concat(frames_list, axis=2) batch_list = tf.split(frames_summaries, batch_size, axis=0)", "* tf.one_hot(rate_index, depth=num_boundaries)) def get_lr_fn(optimizer_config): \"\"\"Returns function that provides current", "buf = io.BytesIO() plt.savefig(buf, format='png') buf.seek(0) # Convert PNG buffer", "GPUs\"]) except RuntimeError as e: # Memory growth must be", "# np.save(tf.io.gfile.GFile('/air/team/saman/test_weights_old.npy', 'w'), cnn.weights[0].numpy()) # np.save(tf.io.gfile.GFile('/air/team/saman/test_batch_old.npy', 'w'), curr_data[\"frames\"]) # np.save(tf.io.gfile.GFile('/air/team/saman/test_cnn_old.npy',", "warmup_lr return lr # Minimally adapted from Tensorflow object_detection code.", "assign to update the learning rate. Args: optimizer_config: EasyDict, contains", "pyplot, save to buffer and return TB compatible image.\"\"\" plt.figure()", "restore_ckpt(logdir, **ckpt_objects): \"\"\"Create and restore checkpoint (if one exists on", "seq_labels_list.append(seq_labels) steps_list.append(steps) seq_lens_list.append(seq_lens) names_list.append(names) if keep_data: frames_list.append(frames) if optical_flow: frame_original_list.append(frame_original)", "optimizer.learning_rate return learning_rate, optimizer, global_step def create_ckpt(logdir, restore=False, **ckpt_objects): #", "tf.config.experimental.set_memory_growth(gpu, True) logical_gpus = tf.config.experimental.list_logical_devices('GPU') logging.info([len(GPUS), \"Physical GPUs,\", len(logical_gpus), \"Logical", "= [image_list[0][num_frames_per_step - 1::num_frames_per_step]] sim_matrix = np.zeros( (batch_size-1, num_steps, num_steps),", "model['emb'] embs_list = [] labels_list = [] steps_list = []", "isinstance(config, list): return [to_dict(c) for c in config] elif isinstance(config,", "raise ValueError('Learning rate decay type %s not supported. Only support'", "names_list = [] seq_labels_list = [] if keep_data: frames_list =", "supported. Only support the following' 'optimizers: AdamOptimizer, MomentumOptimizer .') return", "any pre-existing checkpoint. # Since model is a dict we", "= [1, ] while cond(n): try: print(n) embs = []", "num_steps=2 * num_steps) emb_feats = emb(cnn_feats, 2 * num_steps) emb_feats", "tf.keras.backend.set_learning_phase(1) else: # Set learning_phase to False to use models", "evaluation tasks. if CONFIG.MODEL.EMBEDDER_TYPE == 'convgru': for gru_layer in emb.gru_layers:", "lr_fn(lr, global_step): return tf.train.exponential_decay( lr, global_step, lr_params.EXP_DECAY_STEPS, lr_params.EXP_DECAY_RATE, staircase=True)() elif", "FLAGS = flags.FLAGS def visualize_batch(data, global_step, batch_size, num_steps): \"\"\"Visualizes a", "tf.reduce_max( tf.where( tf.greater_equal(global_step, boundaries), list(range(num_boundaries)), [0] * num_boundaries)) return tf.reduce_sum(rates", "+ num_steps))))) single_steps = np.maximum(0, single_steps) single_steps = np.minimum(seq_len, single_steps)", "# Convert sim_matrix to float32 as summary_image doesn't take float64", "gru_layer in emb.gru_layers: gru_layer.stateful = True gru_layer.input_spec[0].shape = [1, ]", "hidden states after each batch. if CONFIG.MODEL.EMBEDDER_TYPE == 'convgru': for", "config.json file exists in ' '%s', logdir) with tf.io.gfile.GFile(config_path, 'w')", "frames_per_batch curr_data = {} for k, v in data.items(): #", "frames_per_batch, keep_data=False, optical_flow=False, keep_labels=True, max_embs=None, callbacks=[]): \"\"\"Get embeddings from a", "= [] steps_list = [] seq_lens_list = [] names_list =", "[name]) seq_label = data['seq_labels'].numpy()[0] seq_labels.append(seq_len * [seq_label]) labels.append(all_labels) embs =", "axis=1) tf.summary.image('train_batch', batch_summaries, step=global_step) def visualize_nearest_neighbours(model, data, global_step, batch_size, num_steps,", "# Convert PNG buffer to TF image image = tf.image.decode_png(buf.getvalue(),", "as np import matplotlib.pyplot as plt # pylint: disable=g-import-not-at-top import", "= seq_labels[~np.isnan(embs).any(axis=1)] names = names[~np.isnan(embs).any(axis=1)] seq_lens = seq_lens[~np.isnan(embs).any(axis=1)] steps =", "tf.io.gfile.makedirs(train_logs_dir) def setup_eval_dir(logdir, config_timeout_seconds=1): \"\"\"Setups directory for evaluation.\"\"\" tf.io.gfile.makedirs(logdir) tf.io.gfile.makedirs(os.path.join(logdir,", "names = np.concatenate(names, axis=0) seq_labels = np.concatenate(seq_labels, axis=0) if keep_data:", "if 'tcn' in CONFIG.TRAINING_ALGO: img_list = tf.unstack(image_list[i], num=2 * num_steps", "import os import time from absl import flags from absl", "steps def get_indices(curr_idx, num_steps, seq_len): steps = range(curr_idx, curr_idx +", "get_embeddings_dataset(model, iterator, frames_per_batch, keep_data=False, optical_flow=False, keep_labels=True, max_embs=None, callbacks=[]): \"\"\"Get embeddings", "steps_list = [] seq_lens_list = [] names_list = [] seq_labels_list", "import json import tensorflow as tf import numpy as np", "= tf.stack( tf.split(emb_feats, 2 * num_steps, axis=0)[::2], axis=1) else: cnn_feats", "optical_flow: frame_original_list.append(frame_original) n += 1 except tf.errors.OutOfRangeError: logging.info('Finished embedding the", "tf.tile(query_feats[j:j+1], [num_steps, 1]) mean_squared_distance = tf.reduce_mean( tf.math.squared_difference(curr_query_feats, candidate_feats), axis=1) sim_matrix[i-1,", "pass --force_train while launching script.') tf.io.gfile.makedirs(train_logs_dir) def setup_eval_dir(logdir, config_timeout_seconds=1): \"\"\"Setups", "callbacks=[]): \"\"\"Get embeddings from a one epoch iterator.\"\"\" keep_labels =", "lr, global_step: get_warmup_lr(lr_fn(lr, global_step), global_step, lr_params)) def get_optimizer(optimizer_config, learning_rate): \"\"\"Returns", "cnn_feats = get_cnn_feats(cnn, data, training=False) emb_feats = emb(cnn_feats, num_steps) emb_feats", "return steps def get_indices(curr_idx, num_steps, seq_len): steps = range(curr_idx, curr_idx", "in emb.gru_layers: gru_layer.stateful = True gru_layer.input_spec[0].shape = [1, ] while", "CONFIG.DATA.FRAME_LABELS num_frames_per_step = CONFIG.DATA.NUM_STEPS cnn = model['cnn'] emb = model['emb']", "step=global_step) # Convert sim_matrix to float32 as summary_image doesn't take", "j in range(num_steps): curr_query_feats = tf.tile(query_feats[j:j+1], [num_steps, 1]) mean_squared_distance =", "im in im_list], axis=0), axis=0) tf.summary.image('%s/nn' % split, summary_im, step=global_step)", "learning_rate_sequence) elif lr_params.DECAY_TYPE == 'fixed': def lr_fn(lr, global_step): return lr_params.INITIAL_LR", "config as no config.json file exists in ' '%s', logdir)", "else: raise ValueError('Optimizer %s not supported. Only support the following'", "optical_flow: frame_original = [] # Reset GRU states for each", "axis=2) batch_list = tf.split(frames_summaries, batch_size, axis=0) batch_summaries = tf.concat(batch_list, axis=1)", "frames embedded %d', n, curr_idx + num_steps) # np.save(tf.io.gfile.GFile('/air/team/saman/test_weights_old.npy', 'w'),", "batch_size, num_steps): \"\"\"Visualizes a batch.\"\"\" frames = data['frames'] frames_list =", "= tf.image.decode_png(buf.getvalue(), channels=4) # Add the batch dimension image =", "= keep_labels and CONFIG.DATA.FRAME_LABELS num_frames_per_step = CONFIG.DATA.NUM_STEPS cnn = model['cnn']", "# Currently, memory growth needs to be the same across", "calling function f.\"\"\" def wrapper(*args, **kwargs): \"\"\"Calls the function f", "= get_cnn_feats(cnn, data, training=False) emb_feats = emb(cnn_feats, num_steps) emb_feats =", "are embedding the whole sequence and that can take #", "function that provides current learning rate based on config. NOTE:", "tf.config.experimental.list_logical_devices('GPU') logging.info([len(GPUS), \"Physical GPUs,\", len(logical_gpus), \"Logical GPUs\"]) except RuntimeError as", "ValueError('Learning rate decay type %s not supported. Only support' 'the", "Reset statefulness to recurrent layers for other evaluation tasks. if", "gru_layer.reset_states() data, chosen_steps, seq_len = get_data(iterator) seq_len = seq_len.numpy()[0] num_batches", "logical_gpus = tf.config.experimental.list_logical_devices('GPU') logging.info([len(GPUS), \"Physical GPUs,\", len(logical_gpus), \"Logical GPUs\"]) except", "= False return dataset def gen_plot(x, y): \"\"\"Create a pyplot,", "following decay types: fixed, exp_decay, manual,' 'and poly.') return (lambda", "'seq_lens': seq_lens_list, 'steps': steps_list, 'names': names_list, 'seq_labels': seq_labels_list} if keep_data:", "'seq_labels': seq_labels_list} if keep_data: dataset['frames'] = frames_list if optical_flow: dataset['frames_original']", "[0] + boundaries num_boundaries = len(boundaries) rate_index = tf.reduce_max( tf.where(", "one exists on the path).\"\"\" # Instantiate checkpoint and restore", "False to use models in inference mode. tf.keras.backend.set_learning_phase(0) return f(*args,", "stride, step + stride, stride) return steps def get_indices(curr_idx, num_steps,", "the following' 'optimizers: AdamOptimizer, MomentumOptimizer .') return opt def get_lr_opt_global_step():", "= tf.expand_dims(tf.concat([vstack(im) for im in im_list], axis=0), axis=0) tf.summary.image('%s/nn' %", "seq_lens = np.concatenate(seq_lens, axis=0) names = np.concatenate(names, axis=0) seq_labels =", "epoch iterator.\"\"\" keep_labels = keep_labels and CONFIG.DATA.FRAME_LABELS num_frames_per_step = CONFIG.DATA.NUM_STEPS", "on config and learning rate.\"\"\" if optimizer_config.TYPE == 'AdamOptimizer': opt", "kwargs['training'] if training: # Set learning_phase to True to use", "= i * frames_per_batch curr_data = {} for k, v", "to use models in inference mode. tf.keras.backend.set_learning_phase(0) return f(*args, **kwargs)", "logging.info('Finished embedding the dataset.') break dataset = {'embs': embs_list, 'seq_lens':", "* num_steps * num_frames_per_step, axis=0)[num_frames_per_step - 1::num_frames_per_step][::2] else: img_list =", "config and learning rate.\"\"\" if optimizer_config.TYPE == 'AdamOptimizer': opt =", "embedding the whole sequence and that can take # more", "iterator, frames_per_batch, keep_data=False, optical_flow=False, keep_labels=True, max_embs=None, callbacks=[]): \"\"\"Get embeddings from", "in config] elif isinstance(config, EasyDict): return dict([(k, to_dict(v)) for k,", "'training' not in kwargs: raise ValueError('Function called with set_learning_phase decorator", "checkpoint.restore( ckpt_manager.latest_checkpoint) if restore else -1 return ckpt_manager, status, checkpoint", "img_list = tf.unstack(image_list[i], num=num_steps * num_frames_per_step, axis=0)[num_frames_per_step - 1::num_frames_per_step] nn_img_list", "[] steps_list = [] seq_lens_list = [] names_list = []", "stride = CONFIG.DATA.FRAME_STRIDE # We don't want to see the", "= get_indices(curr_idx, num_steps, seq_len) curr_data[k] = tf.gather(v, idxes, axis=1) else:", "already ' 'has train_logs. Please provide a new logdir name", "== 'MomentumOptimizer': opt = tf.keras.optimizers.SGD( learning_rate=learning_rate, momentum=0.9) else: raise ValueError('Optimizer", "global_step): return manual_stepping( global_step, lr_step_boundaries, learning_rate_sequence) elif lr_params.DECAY_TYPE == 'fixed':", "rate, optimizer and global step.\"\"\" optimizer = get_optimizer(CONFIG.OPTIMIZER, CONFIG.OPTIMIZER.LR.INITIAL_LR) global_step", "[] if keep_data: frames_list = [] if optical_flow: frame_original_list =", "[] if optical_flow: frame_original_list = [] n = 0 def", "= [] if keep_data: frames = [] if optical_flow: frame_original", "softmax(-1.0 * mean_squared_distance) nn_img_list.append(img_list[tf.argmin(mean_squared_distance)]) nn_img = tf.stack(nn_img_list, axis=0) im_list.append(nn_img) def", "and that can take # more than one batch to", "models in inference mode. tf.keras.backend.set_learning_phase(0) cnn = model['cnn'] emb =", "get_warmup_lr(lr_fn(lr, global_step), global_step, lr_params)) def get_optimizer(optimizer_config, learning_rate): \"\"\"Returns optimizer based", "in Eager we need to call assign to update the", "keep_data: dataset['frames'] = frames_list if optical_flow: dataset['frames_original'] = frame_original_list if", "return (lambda lr, global_step: get_warmup_lr(lr_fn(lr, global_step), global_step, lr_params)) def get_optimizer(optimizer_config,", "def reset(self): self.time = time.time() def set_learning_phase(f): \"\"\"Sets the correct", "config = json.load(f) assert config is not None, \"config file", "class Stopwatch(object): \"\"\"Simple timer for measuring elapsed time.\"\"\" def __init__(self):", "im_list = [image_list[0][num_frames_per_step - 1::num_frames_per_step]] sim_matrix = np.zeros( (batch_size-1, num_steps,", "train_logs_dir = os.path.join(logdir, 'train.logs') if os.path.exists(train_logs_dir) and not force_train: raise", "staircase=True)() elif lr_params.DECAY_TYPE == 'manual': lr_step_boundaries = [int(x) for x", "seq_labels = seq_labels[~np.isnan(embs).any(axis=1)] names = names[~np.isnan(embs).any(axis=1)] seq_lens = seq_lens[~np.isnan(embs).any(axis=1)] steps", "exp_decay, manual,' 'and poly.') return (lambda lr, global_step: get_warmup_lr(lr_fn(lr, global_step),", "training mode. tf.keras.backend.set_learning_phase(1) else: # Set learning_phase to False to", "else: im_list = [image_list[0][num_frames_per_step - 1::num_frames_per_step]] sim_matrix = np.zeros( (batch_size-1,", "data['frames'] image_list = tf.unstack(frames, num=batch_size, axis=0) if 'tcn' in CONFIG.TRAINING_ALGO:", "script.') tf.io.gfile.makedirs(train_logs_dir) def setup_eval_dir(logdir, config_timeout_seconds=1): \"\"\"Setups directory for evaluation.\"\"\" tf.io.gfile.makedirs(logdir)", "# Make Recurrent Layers stateful, set batch size. # We", "tf.train.Checkpoint(**ckpt_objects) ckpt_manager = tf.train.CheckpointManager( checkpoint, directory=logdir, max_to_keep=10, keep_checkpoint_every_n_hours=1) status =", "lr_params.NUM_WARMUP_STEPS, dtype=tf.int32) global_steps_float = tf.cast(global_steps_int, tf.float32) warmup_steps_float = tf.cast(warmup_steps_int, tf.float32)", "im_list = [image_list[0] [num_frames_per_step - 1::num_frames_per_step][::2]] else: im_list = [image_list[0][num_frames_per_step", "frame_original.append(data['video_frames'].numpy()[0]) frame_original = np.concatenate(frame_original, axis=0) if keep_labels: labels = labels[~np.isnan(embs).any(axis=1)]", "single_steps) return single_steps def get_embeddings_dataset(model, iterator, frames_per_batch, keep_data=False, optical_flow=False, keep_labels=True,", "than one batch to be passed and we don't want", "float32 as summary_image doesn't take float64 sim_matrix = sim_matrix.astype(np.float32) tf.summary.image('%s/similarity_matrix'", "adapted from Tensorflow object_detection code. def manual_stepping(global_step, boundaries, rates): boundaries", "to float32 as summary_image doesn't take float64 sim_matrix = sim_matrix.astype(np.float32)", "tf.cast(global_step, tf.int32) warmup_steps_int = tf.constant( lr_params.NUM_WARMUP_STEPS, dtype=tf.int32) global_steps_float = tf.cast(global_steps_int,", "keep_labels: labels = labels[~np.isnan(embs).any(axis=1)] assert len(embs) == len(labels) seq_labels =", "learning rate. Args: optimizer_config: EasyDict, contains params required to initialize", "candidate_feats = emb_feats[i] if 'tcn' in CONFIG.TRAINING_ALGO: img_list = tf.unstack(image_list[i],", "GPUs,\", len(logical_gpus), \"Logical GPUs\"]) except RuntimeError as e: # Memory", "'names': names_list, 'seq_labels': seq_labels_list} if keep_data: dataset['frames'] = frames_list if", "models in training mode. tf.keras.backend.set_learning_phase(1) else: # Set learning_phase to", "dtype=tf.int32) global_steps_float = tf.cast(global_steps_int, tf.float32) warmup_steps_float = tf.cast(warmup_steps_int, tf.float32) warmup_percent_done", "single_steps def get_embeddings_dataset(model, iterator, frames_per_batch, keep_data=False, optical_flow=False, keep_labels=True, max_embs=None, callbacks=[]):", "global_steps_float = tf.cast(global_steps_int, tf.float32) warmup_steps_float = tf.cast(warmup_steps_int, tf.float32) warmup_percent_done =", "cnn, data, training=False, num_steps=2 * num_steps) emb_feats = emb(cnn_feats, 2", "config] elif isinstance(config, EasyDict): return dict([(k, to_dict(v)) for k, v", "cnn_feats.numpy()) # np.save(tf.io.gfile.GFile('/air/team/saman/test_emb_old.npy', 'w'), emb_feats.numpy()) embs.append(emb_feats.numpy()) for f in callbacks:", "if keep_data: frames.append(data['frames'].numpy()[0]) frames = np.concatenate(frames, axis=0) if optical_flow: frame_original.append(data['video_frames'].numpy()[0])", "EasyDict, contains params required to initialize the learning rate and", "'tcn' in CONFIG.TRAINING_ALGO: im_list = [image_list[0] [num_frames_per_step - 1::num_frames_per_step][::2]] else:", "whole sequence and that can take # more than one", "print(n) embs = [] labels = [] steps = []", "emb.gru_layers: gru_layer.stateful = False return dataset def gen_plot(x, y): \"\"\"Create", "np.concatenate(steps, axis=0) seq_lens = np.concatenate(seq_lens, axis=0) names = np.concatenate(names, axis=0)", "= np.concatenate(embs, axis=0) labels = np.concatenate(labels, axis=0) steps = np.concatenate(steps,", "= data['name'].numpy()[0] names.append(seq_len * [name]) seq_label = data['seq_labels'].numpy()[0] seq_labels.append(seq_len *", "automatically # reset hidden states after each batch. if CONFIG.MODEL.EMBEDDER_TYPE", "while not tf.io.gfile.exists(config_path): logging.info('Waiting for config to exist. Going to", "CONFIG.DATA.NUM_STEPS else: num_steps = CONFIG.EVAL.NUM_FRAMES * CONFIG.DATA.NUM_STEPS cnn.num_steps = num_steps", "learning rate during warm up phase.\"\"\" if lr_params.NUM_WARMUP_STEPS > 0:", "seq_labels = [] if keep_data: frames = [] if optical_flow:", "as f: config = json.load(f) assert config is not None,", "def get_warmup_lr(lr, global_step, lr_params): \"\"\"Returns learning rate during warm up", "n, axis=-1): # Generate m random permuations of range (0,", "p in range(len(lr_step_boundaries) + 1)] def lr_fn(lr, global_step): return manual_stepping(", "lr_params.INITIAL_LR elif lr_params.DECAY_TYPE == 'poly': def lr_fn(lr, global_step): return tf.train.polynomial_decay(", "stride, stride) return steps def get_indices(curr_idx, num_steps, seq_len): steps =", "seq_len - i * frames_per_batch else: num_steps = frames_per_batch curr_idx", "or is corrupted\" return config def prepare_gpu(ind=-1): ind = int(ind)", "import io import math import os import time from absl", "= frames[~np.isnan(embs).any(axis=1)] if optical_flow: frame_original = frame_original[~np.isnan(embs).any(axis=1)] embs = embs[~np.isnan(embs).any(axis=1)]", "not os.path.exists(config_path) or overwrite: logging.info( 'Using the existing passed in", "'tcn' in CONFIG.TRAINING_ALGO: img_list = tf.unstack(image_list[i], num=2 * num_steps *", "\"\"\"Returns function that provides current learning rate based on config.", "tensorflow as tf import numpy as np import matplotlib.pyplot as", "- is_warmup) * lr + is_warmup * warmup_lr return lr", "Set learning_phase to False to use models in inference mode.", "i in range(1, batch_size): candidate_feats = emb_feats[i] if 'tcn' in", "file exists in ' '%s', logdir) with tf.io.gfile.GFile(config_path, 'w') as", "optimizer_config.TYPE == 'AdamOptimizer': opt = tf.keras.optimizers.Adam(learning_rate=learning_rate) elif optimizer_config.TYPE == 'MomentumOptimizer':", "= seq_len.numpy()[0] num_batches = int(math.ceil(float(seq_len)/frames_per_batch)) for i in range(num_batches): if", "- 1::num_frames_per_step]] sim_matrix = np.zeros( (batch_size-1, num_steps, num_steps), dtype=np.float32) for", "steps]) single_steps = np.concatenate(np.array(list(map(get_context_steps, np.arange(curr_idx, curr_idx + num_steps))))) single_steps =", "setup_eval_dir(logdir, config_timeout_seconds=1): \"\"\"Setups directory for evaluation.\"\"\" tf.io.gfile.makedirs(logdir) tf.io.gfile.makedirs(os.path.join(logdir, 'eval_logs')) config_path", "v.shape[1] != 0: idxes = get_indices(curr_idx, num_steps, seq_len) curr_data[k] =", "\"Logical GPUs\"]) except RuntimeError as e: # Memory growth must", "config_dict = json.load(config_file) if config_dict is None: time.sleep(config_timeout_seconds) else: break", "set_learning_phase(f): \"\"\"Sets the correct learning phase before calling function f.\"\"\"", "labels = np.concatenate(labels, axis=0) steps = np.concatenate(steps, axis=0) seq_lens =", "max_to_keep=10, keep_checkpoint_every_n_hours=1) status = checkpoint.restore(ckpt_manager.latest_checkpoint) return ckpt_manager, status, checkpoint def", "status = checkpoint.restore( ckpt_manager.latest_checkpoint) if restore else -1 return ckpt_manager,", "model is a dict we can insert multiple modular networks", "return dict([(k, to_dict(v)) for k, v in config.items()]) else: return", "* num_boundaries)) return tf.reduce_sum(rates * tf.one_hot(rate_index, depth=num_boundaries)) def get_lr_fn(optimizer_config): \"\"\"Returns", "np.arange(curr_idx, curr_idx + num_steps))))) single_steps = np.maximum(0, single_steps) single_steps =", "tf.float32) warmup_steps_float = tf.cast(warmup_steps_int, tf.float32) warmup_percent_done = global_steps_float / warmup_steps_float", "float64 sim_matrix = sim_matrix.astype(np.float32) tf.summary.image('%s/similarity_matrix' % split, np.expand_dims(sim_matrix, axis=3), step=global_step)", "existing passed in config as no config.json file exists in", "keep_data=False, optical_flow=False, keep_labels=True, max_embs=None, callbacks=[]): \"\"\"Get embeddings from a one", "iterator.\"\"\" keep_labels = keep_labels and CONFIG.DATA.FRAME_LABELS num_frames_per_step = CONFIG.DATA.NUM_STEPS cnn", "tf.train.exponential_decay( lr, global_step, lr_params.EXP_DECAY_STEPS, lr_params.EXP_DECAY_RATE, staircase=True)() elif lr_params.DECAY_TYPE == 'manual':", "2 * num_steps) emb_feats = tf.stack( tf.split(emb_feats, 2 * num_steps,", "= [] seq_lens = [] names = [] seq_labels =", "= np.concatenate(steps, axis=0) seq_lens = np.concatenate(seq_lens, axis=0) names = np.concatenate(names,", "a one epoch iterator.\"\"\" keep_labels = keep_labels and CONFIG.DATA.FRAME_LABELS num_frames_per_step", "j] = softmax(-1.0 * mean_squared_distance) nn_img_list.append(img_list[tf.argmin(mean_squared_distance)]) nn_img = tf.stack(nn_img_list, axis=0)", "Convert PNG buffer to TF image image = tf.image.decode_png(buf.getvalue(), channels=4)", "optimizer_config: EasyDict, contains params required to initialize the learning rate", "during warm up phase.\"\"\" if lr_params.NUM_WARMUP_STEPS > 0: global_steps_int =", "return config def setup_train_dir(logdir, overwrite=False, force_train=True): \"\"\"Setups directory for training.\"\"\"", "call assign to update the learning rate. Args: optimizer_config: EasyDict,", ">= target_interval def reset(self): self.time = time.time() def set_learning_phase(f): \"\"\"Sets", "the learning rate. Args: optimizer_config: EasyDict, contains params required to", "after each batch. if CONFIG.MODEL.EMBEDDER_TYPE == 'convgru': for gru_layer in", "CONFIG.DATA.NUM_STEPS cnn = model['cnn'] emb = model['emb'] embs_list = []", "the same across GPUs for gpu in GPUS: tf.config.experimental.set_memory_growth(gpu, True)", "image class Stopwatch(object): \"\"\"Simple timer for measuring elapsed time.\"\"\" def", "'config.json') if not os.path.exists(config_path) or overwrite: logging.info( 'Using the existing", "which' ' does not have training argument.') training = kwargs['training']", "axis=0) if optical_flow: frame_original.append(data['video_frames'].numpy()[0]) frame_original = np.concatenate(frame_original, axis=0) if keep_labels:", "len(embs) == len(labels) seq_labels = seq_labels[~np.isnan(embs).any(axis=1)] names = names[~np.isnan(embs).any(axis=1)] seq_lens", "GRU states for each video. if CONFIG.MODEL.EMBEDDER_TYPE == 'convgru': for", "GPUS: if ind > -1: tf.config.experimental.set_visible_devices(GPUS[ind], 'GPU') try: # Currently,", "object_detection code. def manual_stepping(global_step, boundaries, rates): boundaries = [0] +", "tf.gather(v, idxes, axis=1) else: curr_data[k] = v cnn_feats = get_cnn_feats(cnn,", "# Generate m random permuations of range (0, n) #", "num_steps): \"\"\"Visualizes a batch.\"\"\" frames = data['frames'] frames_list = tf.unstack(frames,", "def get_optimizer(optimizer_config, learning_rate): \"\"\"Returns optimizer based on config and learning", "modular networks in this dict. checkpoint = tf.train.Checkpoint(**ckpt_objects) ckpt_manager =", "boundaries, rates): boundaries = [0] + boundaries num_boundaries = len(boundaries)", "num_boundaries)) return tf.reduce_sum(rates * tf.one_hot(rate_index, depth=num_boundaries)) def get_lr_fn(optimizer_config): \"\"\"Returns function", "in range(num_steps): curr_query_feats = tf.tile(query_feats[j:j+1], [num_steps, 1]) mean_squared_distance = tf.reduce_mean(", "steps.append(chosen_steps.numpy()[0]) seq_lens.append(seq_len * [seq_len]) all_labels = data['frame_labels'].numpy()[0] name = data['name'].numpy()[0]", "plt.title('Val Accuracy') plt.ylim(0, 1) plt.tight_layout() buf = io.BytesIO() plt.savefig(buf, format='png')", "for measuring elapsed time.\"\"\" def __init__(self): self.reset() def elapsed(self): return", "GPUS = tf.config.experimental.list_physical_devices('GPU') if GPUS: if ind > -1: tf.config.experimental.set_visible_devices(GPUS[ind],", "passed and we don't want to automatically # reset hidden", "== 'convgru': for gru_layer in emb.gru_layers: gru_layer.stateful = False return", "[] seq_lens_list = [] names_list = [] seq_labels_list = []", "seq_label = data['seq_labels'].numpy()[0] seq_labels.append(seq_len * [seq_label]) labels.append(all_labels) embs = np.concatenate(embs,", "embedding the dataset.') break dataset = {'embs': embs_list, 'seq_lens': seq_lens_list,", "cycles for alignment.\"\"\" random_cycles = random_choice_noreplace( num_cycles, batch_size)[:, :cycle_len] return", "= [] seq_labels_list = [] if keep_data: frames_list = []", "= time.time() def set_learning_phase(f): \"\"\"Sets the correct learning phase before", "-1: tf.config.experimental.set_visible_devices(GPUS[ind], 'GPU') try: # Currently, memory growth needs to", "get_lr_opt_global_step(): \"\"\"Intializes learning rate, optimizer and global step.\"\"\" optimizer =", "code. def manual_stepping(global_step, boundaries, rates): boundaries = [0] + boundaries", "tf.one_hot(rate_index, depth=num_boundaries)) def get_lr_fn(optimizer_config): \"\"\"Returns function that provides current learning", "sequences.\"\"\" data = iterator.get_next() return data, data['chosen_steps'], data['seq_lens'] @tf.function def", "Only support the following' 'optimizers: AdamOptimizer, MomentumOptimizer .') return opt", "path).\"\"\" # Instantiate checkpoint and restore from any pre-existing checkpoint.", "checkpoint def restore_ckpt(logdir, **ckpt_objects): \"\"\"Create and restore checkpoint (if one", "= dict([(k, to_dict(v)) for k, v in CONFIG.items()]) json.dump(config, config_file,", "more than one batch to be passed and we don't", "elif optimizer_config.TYPE == 'MomentumOptimizer': opt = tf.keras.optimizers.SGD( learning_rate=learning_rate, momentum=0.9) else:", "for gru_layer in emb.gru_layers: gru_layer.stateful = True gru_layer.input_spec[0].shape = [1,", "set batch size. # We do this as we are", "in %s.', logdir) with tf.io.gfile.GFile(config_path, 'r') as config_file: config_dict =", "from __future__ import absolute_import from __future__ import division from __future__", "optical_flow: frame_original = frame_original[~np.isnan(embs).any(axis=1)] embs = embs[~np.isnan(embs).any(axis=1)] assert len(embs) ==", "= tf.cast(global_steps_int, tf.float32) warmup_steps_float = tf.cast(warmup_steps_int, tf.float32) warmup_percent_done = global_steps_float", "decay function. Returns: lr_fn: function, this can be called to", "import math import os import time from absl import flags", "from __future__ import print_function from config import CONFIG import json", "nn_img_list.append(img_list[tf.argmin(mean_squared_distance)]) nn_img = tf.stack(nn_img_list, axis=0) im_list.append(nn_img) def vstack(im): return tf.concat(tf.unstack(im,", "# Need to do this as some modalities might not", "'r') as config_file: config_dict = json.load(config_file) CONFIG.update(config_dict) train_logs_dir = os.path.join(logdir,", "'convgru': for gru_layer in emb.gru_layers: gru_layer.stateful = True gru_layer.input_spec[0].shape =", ":cycle_len] return random_cycles def get_warmup_lr(lr, global_step, lr_params): \"\"\"Returns learning rate", "seq_len.numpy()[0] num_batches = int(math.ceil(float(seq_len)/frames_per_batch)) for i in range(num_batches): if (i", "tf.cast(tf.argsort(tf.random.uniform((m, n)), axis=axis), tf.int64) def gen_cycles(num_cycles, batch_size, cycle_len): \"\"\"Generate cycles", "phase.\"\"\" if lr_params.NUM_WARMUP_STEPS > 0: global_steps_int = tf.cast(global_step, tf.int32) warmup_steps_int", "frames.append(data['frames'].numpy()[0]) frames = np.concatenate(frames, axis=0) if optical_flow: frame_original.append(data['video_frames'].numpy()[0]) frame_original =", "don't want to see the future. steps = np.arange(step -", "logdir name in ' 'config or pass --force_train while launching", "disable=g-import-not-at-top import io import math import os import time from", "optical_flow: frame_original.append(data['video_frames'].numpy()[0]) frame_original = np.concatenate(frame_original, axis=0) if keep_labels: labels =", "not supported. Only support the following' 'optimizers: AdamOptimizer, MomentumOptimizer .')", "gru_layer.stateful = True gru_layer.input_spec[0].shape = [1, ] while cond(n): try:", "tf.stack(nn_img_list, axis=0) im_list.append(nn_img) def vstack(im): return tf.concat(tf.unstack(im, num=num_steps), axis=1) summary_im", "time.time() def set_learning_phase(f): \"\"\"Sets the correct learning phase before calling", "# Memory growth must be set before GPUs have been", "training=False, num_steps=2 * num_steps) emb_feats = emb(cnn_feats, 2 * num_steps)", "batch.\"\"\" frames = data['frames'] frames_list = tf.unstack(frames, num=num_steps, axis=1) frames_summaries", "EasyDict import matplotlib matplotlib.use('Agg') FLAGS = flags.FLAGS def visualize_batch(data, global_step,", "np.save(tf.io.gfile.GFile('/air/team/saman/test_cnn_old.npy', 'w'), cnn_feats.numpy()) # np.save(tf.io.gfile.GFile('/air/team/saman/test_emb_old.npy', 'w'), emb_feats.numpy()) embs.append(emb_feats.numpy()) for f", "tf.concat(batch_list, axis=1) tf.summary.image('train_batch', batch_summaries, step=global_step) def visualize_nearest_neighbours(model, data, global_step, batch_size,", "in range(num_batches): if (i + 1) * frames_per_batch > seq_len:", "[] seq_labels = [] if keep_data: frames = [] if", "get_optimizer(CONFIG.OPTIMIZER, CONFIG.OPTIMIZER.LR.INITIAL_LR) global_step = optimizer.iterations learning_rate = optimizer.learning_rate return learning_rate,", "as some modalities might not exist. if len(v.shape) > 1", "def random_choice_noreplace(m, n, axis=-1): # Generate m random permuations of", "this can be called to return the current learning rate", "return ckpt_manager, status, checkpoint def restore_ckpt(logdir, **ckpt_objects): \"\"\"Create and restore", "data['chosen_steps'], data['seq_lens'] @tf.function def get_cnn_feats(cnn, data, training, num_steps=None): \"\"\"Passes data", "plt.tight_layout() buf = io.BytesIO() plt.savefig(buf, format='png') buf.seek(0) # Convert PNG", "tf.concat(tf.unstack(im, num=num_steps), axis=1) summary_im = tf.expand_dims(tf.concat([vstack(im) for im in im_list],", "config. NOTE: This returns a function as in Eager we", "%s not supported. Only support the following' 'optimizers: AdamOptimizer, MomentumOptimizer", "Instantiate checkpoint and restore from any pre-existing checkpoint. # Since", "frames_per_batch > seq_len: num_steps = seq_len - i * frames_per_batch", "def done(self, target_interval): return self.elapsed() >= target_interval def reset(self): self.time", "list): return [to_dict(c) for c in config] elif isinstance(config, EasyDict):", "the dataset.') break dataset = {'embs': embs_list, 'seq_lens': seq_lens_list, 'steps':", "= int(math.ceil(float(seq_len)/frames_per_batch)) for i in range(num_batches): if (i + 1)", "optimizer_config.TYPE == 'MomentumOptimizer': opt = tf.keras.optimizers.SGD( learning_rate=learning_rate, momentum=0.9) else: raise", "[] names_list = [] seq_labels_list = [] if keep_data: frames_list", "names_list, 'seq_labels': seq_labels_list} if keep_data: dataset['frames'] = frames_list if optical_flow:", "int(math.ceil(float(seq_len)/frames_per_batch)) for i in range(num_batches): if (i + 1) *", "lr, global_step, CONFIG.TRAIN.MAX_ITERS, end_learning_rate=0.0, power=1.0, cycle=False) else: raise ValueError('Learning rate", "need to call assign to update the learning rate. Args:", "# Add the batch dimension image = tf.expand_dims(image, 0) return", "learning_phase to True to use models in training mode. tf.keras.backend.set_learning_phase(1)", "timer for measuring elapsed time.\"\"\" def __init__(self): self.reset() def elapsed(self):", "frame_original = [] # Reset GRU states for each video.", "see the future. steps = np.arange(step - (num_steps - 1)", "from __future__ import division from __future__ import print_function from config", "have been passed in the config. \"\"\" lr_params = optimizer_config.LR", "absl import flags from absl import logging from easydict import", "= [(lr_params.INITIAL_LR) * f**p for p in range(len(lr_step_boundaries) + 1)]", "on the path).\"\"\" # Instantiate checkpoint and restore from any", "CONFIG.items()]) json.dump(config, config_file, sort_keys=True, indent=4) else: logging.info( 'Using config from", "tf.reduce_sum(rates * tf.one_hot(rate_index, depth=num_boundaries)) def get_lr_fn(optimizer_config): \"\"\"Returns function that provides", "provides current learning rate based on config. NOTE: This returns", "image = tf.expand_dims(image, 0) return image class Stopwatch(object): \"\"\"Simple timer", "import logging from easydict import EasyDict import matplotlib matplotlib.use('Agg') FLAGS", "= tf.config.experimental.list_physical_devices('GPU') if GPUS: if ind > -1: tf.config.experimental.set_visible_devices(GPUS[ind], 'GPU')", "batch_size, axis=0) batch_summaries = tf.concat(batch_list, axis=1) tf.summary.image('train_batch', batch_summaries, step=global_step) def", "end_learning_rate=0.0, power=1.0, cycle=False) else: raise ValueError('Learning rate decay type %s", "for im in im_list], axis=0), axis=0) tf.summary.image('%s/nn' % split, summary_im,", "def visualize_batch(data, global_step, batch_size, num_steps): \"\"\"Visualizes a batch.\"\"\" frames =", "optimizer = get_optimizer(CONFIG.OPTIMIZER, CONFIG.OPTIMIZER.LR.INITIAL_LR) global_step = optimizer.iterations learning_rate = optimizer.learning_rate", "= frames_list if optical_flow: dataset['frames_original'] = frame_original_list if keep_labels: dataset['labels']", "(if one exists on the path).\"\"\" # Instantiate checkpoint and", "= emb_feats[0] if CONFIG.OPTICALFLOW: frames = data['video_frames'] else: frames =", "[] seq_lens = [] names = [] seq_labels = []", "kwargs: raise ValueError('Function called with set_learning_phase decorator which' ' does", "dataset['labels'] = labels_list # Reset statefulness to recurrent layers for", "elif isinstance(config, EasyDict): return dict([(k, to_dict(v)) for k, v in", "elif lr_params.DECAY_TYPE == 'fixed': def lr_fn(lr, global_step): return lr_params.INITIAL_LR elif", "tf.concat(frames_list, axis=2) batch_list = tf.split(frames_summaries, batch_size, axis=0) batch_summaries = tf.concat(batch_list,", "(1.0 - is_warmup) * lr + is_warmup * warmup_lr return", "CONFIG.OPTIMIZER.LR.INITIAL_LR) global_step = optimizer.iterations learning_rate = optimizer.learning_rate return learning_rate, optimizer,", "ckpt_manager = tf.train.CheckpointManager( checkpoint, directory=logdir, max_to_keep=10, keep_checkpoint_every_n_hours=1) status = checkpoint.restore(", "e / np.sum(e) return dist def random_choice_noreplace(m, n, axis=-1): #", "json.load(config_file) if config_dict is None: time.sleep(config_timeout_seconds) else: break CONFIG.update(config_dict) def", "lr_fn(lr, global_step): return tf.train.polynomial_decay( lr, global_step, CONFIG.TRAIN.MAX_ITERS, end_learning_rate=0.0, power=1.0, cycle=False)", "np.concatenate(frames, axis=0) if optical_flow: frame_original.append(data['video_frames'].numpy()[0]) frame_original = np.concatenate(frame_original, axis=0) if", "'config.json') while not tf.io.gfile.exists(config_path): logging.info('Waiting for config to exist. Going", "embeddings from a one epoch iterator.\"\"\" keep_labels = keep_labels and", "numpy as np import matplotlib.pyplot as plt # pylint: disable=g-import-not-at-top", "for k, v in CONFIG.items()]) json.dump(config, config_file, sort_keys=True, indent=4) else:", "dist = e / np.sum(e) return dist def random_choice_noreplace(m, n,", "to be the same across GPUs for gpu in GPUS:", "summary_im, step=global_step) # Convert sim_matrix to float32 as summary_image doesn't", "CONFIG.MODEL.EMBEDDER_TYPE == 'convgru': for gru_layer in emb.gru_layers: gru_layer.reset_states() data, chosen_steps,", "= tf.concat(batch_list, axis=1) tf.summary.image('train_batch', batch_summaries, step=global_step) def visualize_nearest_neighbours(model, data, global_step,", "CONFIG.EVAL.NUM_FRAMES * CONFIG.DATA.NUM_STEPS cnn.num_steps = num_steps cnn_feats = cnn(data['frames']) return", "num_steps), dtype=np.float32) for i in range(1, batch_size): candidate_feats = emb_feats[i]", "' does not have training argument.') training = kwargs['training'] if", "= int(ind) GPUS = tf.config.experimental.list_physical_devices('GPU') if GPUS: if ind >", "1) * stride, step + stride, stride) return steps def", "be called to return the current learning rate based on", "def get_indices(curr_idx, num_steps, seq_len): steps = range(curr_idx, curr_idx + num_steps)", "directory for training.\"\"\" tf.io.gfile.makedirs(logdir) config_path = os.path.join(logdir, 'config.json') if not", "= tf.stack(nn_img_list, axis=0) im_list.append(nn_img) def vstack(im): return tf.concat(tf.unstack(im, num=num_steps), axis=1)", "argument.') training = kwargs['training'] if training: # Set learning_phase to", "learning_rate, optimizer, global_step def create_ckpt(logdir, restore=False, **ckpt_objects): # Since model", "after setting proper learning phase.\"\"\" if 'training' not in kwargs:", "current learning rate based on the provided config. Raises: ValueError:", "seq_len = get_data(iterator) seq_len = seq_len.numpy()[0] num_batches = int(math.ceil(float(seq_len)/frames_per_batch)) for", "num=2 * num_steps * num_frames_per_step, axis=0)[num_frames_per_step - 1::num_frames_per_step][::2] else: img_list", "rate_index = tf.reduce_max( tf.where( tf.greater_equal(global_step, boundaries), list(range(num_boundaries)), [0] * num_boundaries))", "overwriting a directory that already ' 'has train_logs. Please provide", "[num_steps, 1]) mean_squared_distance = tf.reduce_mean( tf.math.squared_difference(curr_query_feats, candidate_feats), axis=1) sim_matrix[i-1, j]", "seq_labels_list} if keep_data: dataset['frames'] = frames_list if optical_flow: dataset['frames_original'] =", "random permuations of range (0, n) # NumPy version: np.random.rand(m,n).argsort(axis=axis)", "CONFIG.MODEL.EMBEDDER_TYPE == 'convgru': for gru_layer in emb.gru_layers: gru_layer.stateful = False", "% split, summary_im, step=global_step) # Convert sim_matrix to float32 as", "num_steps, training=False) emb_feats = emb(cnn_feats, num_steps) logging.debug('On sequence number %d,", "= flags.FLAGS def visualize_batch(data, global_step, batch_size, num_steps): \"\"\"Visualizes a batch.\"\"\"", "if optical_flow: frame_original = [] # Reset GRU states for", "tf.split(frames_summaries, batch_size, axis=0) batch_summaries = tf.concat(batch_list, axis=1) tf.summary.image('train_batch', batch_summaries, step=global_step)", "matplotlib matplotlib.use('Agg') FLAGS = flags.FLAGS def visualize_batch(data, global_step, batch_size, num_steps):", "case invalid params have been passed in the config. \"\"\"", "exists on the path).\"\"\" # Instantiate checkpoint and restore from", "data, global_step, batch_size, num_steps, num_frames_per_step, split): \"\"\"Visualize nearest neighbours in", "from absl import logging from easydict import EasyDict import matplotlib", "the whole sequence and that can take # more than", "new logdir name in ' 'config or pass --force_train while", "we don't want to automatically # reset hidden states after", "for j in range(num_steps): curr_query_feats = tf.tile(query_feats[j:j+1], [num_steps, 1]) mean_squared_distance", "batch_list = tf.split(frames_summaries, batch_size, axis=0) batch_summaries = tf.concat(batch_list, axis=1) tf.summary.image('train_batch',", "iterator.get_next() return data, data['chosen_steps'], data['seq_lens'] @tf.function def get_cnn_feats(cnn, data, training,", "dict which contains all the requested sequences.\"\"\" data = iterator.get_next()", "config def setup_train_dir(logdir, overwrite=False, force_train=True): \"\"\"Setups directory for training.\"\"\" tf.io.gfile.makedirs(logdir)", "requested sequences.\"\"\" data = iterator.get_next() return data, data['chosen_steps'], data['seq_lens'] @tf.function", "assert len(embs) == len(labels) seq_labels = seq_labels[~np.isnan(embs).any(axis=1)] names = names[~np.isnan(embs).any(axis=1)]", "seq_lens_list.append(seq_lens) names_list.append(names) if keep_data: frames_list.append(frames) if optical_flow: frame_original_list.append(frame_original) n +=", "e: # Memory growth must be set before GPUs have", "passed in config as no config.json file exists in '", "use models in inference mode. tf.keras.backend.set_learning_phase(0) cnn = model['cnn'] emb", "if keep_data: frames_list = [] if optical_flow: frame_original_list = []", "'eval_logs')) config_path = os.path.join(logdir, 'config.json') while not tf.io.gfile.exists(config_path): logging.info('Waiting for", "= v cnn_feats = get_cnn_feats(cnn, curr_data, num_steps=num_frames_per_step * num_steps, training=False)", "= random_choice_noreplace( num_cycles, batch_size)[:, :cycle_len] return random_cycles def get_warmup_lr(lr, global_step,", "= tf.config.experimental.list_logical_devices('GPU') logging.info([len(GPUS), \"Physical GPUs,\", len(logical_gpus), \"Logical GPUs\"]) except RuntimeError", "def get_embeddings_dataset(model, iterator, frames_per_batch, keep_data=False, optical_flow=False, keep_labels=True, max_embs=None, callbacks=[]): \"\"\"Get", "break dataset = {'embs': embs_list, 'seq_lens': seq_lens_list, 'steps': steps_list, 'names':", "else: raise ValueError('Learning rate decay type %s not supported. Only", "\"\"\"Setups directory for evaluation.\"\"\" tf.io.gfile.makedirs(logdir) tf.io.gfile.makedirs(os.path.join(logdir, 'eval_logs')) config_path = os.path.join(logdir,", "False return dataset def gen_plot(x, y): \"\"\"Create a pyplot, save", "def cond(n): if max_embs is None: return True else: return", "file is not provided or is corrupted\" return config def", "tf.expand_dims(tf.concat([vstack(im) for im in im_list], axis=0), axis=0) tf.summary.image('%s/nn' % split,", "* [seq_label]) labels.append(all_labels) embs = np.concatenate(embs, axis=0) labels = np.concatenate(labels,", "[] labels_list = [] steps_list = [] seq_lens_list = []", "force_train=True): \"\"\"Setups directory for training.\"\"\" tf.io.gfile.makedirs(logdir) config_path = os.path.join(logdir, 'config.json')", "tf.io.gfile.GFile(config_path, 'r') as config_file: config_dict = json.load(config_file) CONFIG.update(config_dict) train_logs_dir =", "= get_cnn_feats( cnn, data, training=False, num_steps=2 * num_steps) emb_feats =", "t=1.0): e = np.exp(np.array(w) / t) dist = e /", "gru_layer.stateful = False return dataset def gen_plot(x, y): \"\"\"Create a", "None, \"config file is not provided or is corrupted\" return", "alignment.\"\"\" random_cycles = random_choice_noreplace( num_cycles, batch_size)[:, :cycle_len] return random_cycles def", "if len(v.shape) > 1 and v.shape[1] != 0: idxes =", "if lr_params.NUM_WARMUP_STEPS > 0: global_steps_int = tf.cast(global_step, tf.int32) warmup_steps_int =", "as in Eager we need to call assign to update", "= lr_params.MANUAL_LR_DECAY_RATE learning_rate_sequence = [(lr_params.INITIAL_LR) * f**p for p in", "axis=1) query_feats = emb_feats[0] if CONFIG.OPTICALFLOW: frames = data['video_frames'] else:", "= emb(cnn_feats, 2 * num_steps) emb_feats = tf.stack( tf.split(emb_feats, 2", "in range(1, batch_size): candidate_feats = emb_feats[i] if 'tcn' in CONFIG.TRAINING_ALGO:", "do this as some modalities might not exist. if len(v.shape)", "from any pre-existing checkpoint. # Since model is a dict", "of range (0, n) # NumPy version: np.random.rand(m,n).argsort(axis=axis) return tf.cast(tf.argsort(tf.random.uniform((m,", "if 'tcn' in CONFIG.TRAINING_ALGO: im_list = [image_list[0] [num_frames_per_step - 1::num_frames_per_step][::2]]", "try: print(n) embs = [] labels = [] steps =", "not provided or is corrupted\" return config def prepare_gpu(ind=-1): ind", "sort_keys=True, indent=4) else: logging.info( 'Using config from config.json that exists", "= json.load(f) assert config is not None, \"config file is", "axis=0)[num_frames_per_step - 1::num_frames_per_step] nn_img_list = [] for j in range(num_steps):", "else -1 return ckpt_manager, status, checkpoint def restore_ckpt(logdir, **ckpt_objects): \"\"\"Create", "# reset hidden states after each batch. if CONFIG.MODEL.EMBEDDER_TYPE ==", "pre-existing checkpoint. # Since model is a dict we can", "return opt def get_lr_opt_global_step(): \"\"\"Intializes learning rate, optimizer and global", "config. \"\"\" lr_params = optimizer_config.LR # pylint: disable=g-long-lambda if lr_params.DECAY_TYPE", "'r') as config_file: config_dict = json.load(config_file) if config_dict is None:", "learning rate, optimizer and global step.\"\"\" optimizer = get_optimizer(CONFIG.OPTIMIZER, CONFIG.OPTIMIZER.LR.INITIAL_LR)", "num_steps, num_frames_per_step, split): \"\"\"Visualize nearest neighbours in embedding space.\"\"\" #", "doesn't take float64 sim_matrix = sim_matrix.astype(np.float32) tf.summary.image('%s/similarity_matrix' % split, np.expand_dims(sim_matrix,", "> 0: global_steps_int = tf.cast(global_step, tf.int32) warmup_steps_int = tf.constant( lr_params.NUM_WARMUP_STEPS,", "config def prepare_gpu(ind=-1): ind = int(ind) GPUS = tf.config.experimental.list_physical_devices('GPU') if", "for p in range(len(lr_step_boundaries) + 1)] def lr_fn(lr, global_step): return", "* [seq_len]) all_labels = data['frame_labels'].numpy()[0] name = data['name'].numpy()[0] names.append(seq_len *", "version: np.random.rand(m,n).argsort(axis=axis) return tf.cast(tf.argsort(tf.random.uniform((m, n)), axis=axis), tf.int64) def gen_cycles(num_cycles, batch_size,", "labels[~np.isnan(embs).any(axis=1)] assert len(embs) == len(labels) seq_labels = seq_labels[~np.isnan(embs).any(axis=1)] names =", "emb(cnn_feats, num_steps) logging.debug('On sequence number %d, frames embedded %d', n,", "self.time def done(self, target_interval): return self.elapsed() >= target_interval def reset(self):", "max_embs=None, callbacks=[]): \"\"\"Get embeddings from a one epoch iterator.\"\"\" keep_labels", "config_dict = json.load(config_file) CONFIG.update(config_dict) train_logs_dir = os.path.join(logdir, 'train.logs') if os.path.exists(train_logs_dir)", "optical_flow: frame_original_list = [] n = 0 def cond(n): if", "tf.greater_equal(global_step, boundaries), list(range(num_boundaries)), [0] * num_boundaries)) return tf.reduce_sum(rates * tf.one_hot(rate_index,", "* frames_per_batch else: num_steps = frames_per_batch curr_idx = i *", "training: num_steps = CONFIG.TRAIN.NUM_FRAMES * CONFIG.DATA.NUM_STEPS else: num_steps = CONFIG.EVAL.NUM_FRAMES", "= np.zeros( (batch_size-1, num_steps, num_steps), dtype=np.float32) for i in range(1,", "manual_stepping(global_step, boundaries, rates): boundaries = [0] + boundaries num_boundaries =", "lr_params.DECAY_TYPE == 'poly': def lr_fn(lr, global_step): return tf.train.polynomial_decay( lr, global_step,", "assert len(embs) == len(seq_lens) assert len(embs) == len(steps) assert len(names)", "tf.io.gfile.makedirs(logdir) config_path = os.path.join(logdir, 'config.json') if not os.path.exists(config_path) or overwrite:", "= data['seq_labels'].numpy()[0] seq_labels.append(seq_len * [seq_label]) labels.append(all_labels) embs = np.concatenate(embs, axis=0)", "global_step, lr_params.EXP_DECAY_STEPS, lr_params.EXP_DECAY_RATE, staircase=True)() elif lr_params.DECAY_TYPE == 'manual': lr_step_boundaries =", "config_file: config_dict = json.load(config_file) CONFIG.update(config_dict) train_logs_dir = os.path.join(logdir, 'train.logs') if", "if CONFIG.MODEL.EMBEDDER_TYPE == 'convgru': for gru_layer in emb.gru_layers: gru_layer.reset_states() data,", "embs_list, 'seq_lens': seq_lens_list, 'steps': steps_list, 'names': names_list, 'seq_labels': seq_labels_list} if", "return lr # Minimally adapted from Tensorflow object_detection code. def", "warmup_percent_done = global_steps_float / warmup_steps_float warmup_lr = lr_params.INITIAL_LR * warmup_percent_done", "optical_flow: dataset['frames_original'] = frame_original_list if keep_labels: dataset['labels'] = labels_list #", "data through base CNN.\"\"\" if num_steps is None: if training:", "to do this as some modalities might not exist. if", "num=num_steps), axis=1) summary_im = tf.expand_dims(tf.concat([vstack(im) for im in im_list], axis=0),", "boundaries num_boundaries = len(boundaries) rate_index = tf.reduce_max( tf.where( tf.greater_equal(global_step, boundaries),", "config import CONFIG import json import tensorflow as tf import", "status = checkpoint.restore(ckpt_manager.latest_checkpoint) return ckpt_manager, status, checkpoint def to_dict(config): if", "if optical_flow: frame_original_list.append(frame_original) n += 1 except tf.errors.OutOfRangeError: logging.info('Finished embedding", "cycle_len): \"\"\"Generate cycles for alignment.\"\"\" random_cycles = random_choice_noreplace( num_cycles, batch_size)[:,", "time.sleep(config_timeout_seconds) while True: with tf.io.gfile.GFile(config_path, 'r') as config_file: config_dict =", "num=num_steps, axis=1) frames_summaries = tf.concat(frames_list, axis=2) batch_list = tf.split(frames_summaries, batch_size,", "def lr_fn(lr, global_step): return tf.train.polynomial_decay( lr, global_step, CONFIG.TRAIN.MAX_ITERS, end_learning_rate=0.0, power=1.0,", "num_steps) emb_feats = emb(cnn_feats, 2 * num_steps) emb_feats = tf.stack(", "for c in config] elif isinstance(config, EasyDict): return dict([(k, to_dict(v))", "== len(steps) embs_list.append(embs) if keep_labels: labels_list.append(labels) seq_labels_list.append(seq_labels) steps_list.append(steps) seq_lens_list.append(seq_lens) names_list.append(names)", "don't want to automatically # reset hidden states after each", "num_steps) emb_feats = tf.stack( tf.split(emb_feats, 2 * num_steps, axis=0)[::2], axis=1)", "directory for evaluation.\"\"\" tf.io.gfile.makedirs(logdir) tf.io.gfile.makedirs(os.path.join(logdir, 'eval_logs')) config_path = os.path.join(logdir, 'config.json')", "to recurrent layers for other evaluation tasks. if CONFIG.MODEL.EMBEDDER_TYPE ==", "random_choice_noreplace(m, n, axis=-1): # Generate m random permuations of range", "lr_params)) def get_optimizer(optimizer_config, learning_rate): \"\"\"Returns optimizer based on config and", "that already ' 'has train_logs. Please provide a new logdir", "axis=0) labels = np.concatenate(labels, axis=0) steps = np.concatenate(steps, axis=0) seq_lens", "decay type %s not supported. Only support' 'the following decay", "range(curr_idx, curr_idx + num_steps) single_steps = np.concatenate([get_context_steps(step) for step in", "frames_per_batch curr_idx = i * frames_per_batch curr_data = {} for", "dict([(k, to_dict(v)) for k, v in CONFIG.items()]) json.dump(config, config_file, sort_keys=True,", "global_step = optimizer.iterations learning_rate = optimizer.learning_rate return learning_rate, optimizer, global_step", "image image = tf.image.decode_png(buf.getvalue(), channels=4) # Add the batch dimension", "want to automatically # reset hidden states after each batch.", "EasyDict): return dict([(k, to_dict(v)) for k, v in config.items()]) else:", "y): \"\"\"Create a pyplot, save to buffer and return TB", "exist. Going to sleep ' ' %s for secs.', config_timeout_seconds)", "insert multiple modular networks in this dict. checkpoint = tf.train.Checkpoint(**ckpt_objects)", "in ' '%s', logdir) with tf.io.gfile.GFile(config_path, 'w') as config_file: config", "embs = np.concatenate(embs, axis=0) labels = np.concatenate(labels, axis=0) steps =", "tf.cast(global_steps_int < warmup_steps_int, tf.float32) lr = (1.0 - is_warmup) *", "function f after setting proper learning phase.\"\"\" if 'training' not", "rate based on config. NOTE: This returns a function as", "global step.\"\"\" optimizer = get_optimizer(CONFIG.OPTIMIZER, CONFIG.OPTIMIZER.LR.INITIAL_LR) global_step = optimizer.iterations learning_rate", "return dataset def gen_plot(x, y): \"\"\"Create a pyplot, save to", "max_embs is None: return True else: return n < max_embs", "statefulness to recurrent layers for other evaluation tasks. if CONFIG.MODEL.EMBEDDER_TYPE", "CONFIG.update(config_dict) train_logs_dir = os.path.join(logdir, 'train.logs') if os.path.exists(train_logs_dir) and not force_train:", "embs = [] labels = [] steps = [] seq_lens", "CONFIG import json import tensorflow as tf import numpy as", "random_cycles def get_warmup_lr(lr, global_step, lr_params): \"\"\"Returns learning rate during warm", "* num_steps) emb_feats = emb(cnn_feats, 2 * num_steps) emb_feats =", "= tf.unstack(frames, num=batch_size, axis=0) if 'tcn' in CONFIG.TRAINING_ALGO: im_list =", "2 * num_steps, axis=0)[::2], axis=1) else: cnn_feats = get_cnn_feats(cnn, data,", "frame_original_list = [] n = 0 def cond(n): if max_embs", "= np.concatenate(seq_labels, axis=0) if keep_data: frames.append(data['frames'].numpy()[0]) frames = np.concatenate(frames, axis=0)", "names = [] seq_labels = [] if keep_data: frames =", "= data['frame_labels'].numpy()[0] name = data['name'].numpy()[0] names.append(seq_len * [name]) seq_label =", "' %s for secs.', config_timeout_seconds) time.sleep(config_timeout_seconds) while True: with tf.io.gfile.GFile(config_path,", "n = 0 def cond(n): if max_embs is None: return", "Add the batch dimension image = tf.expand_dims(image, 0) return image", "= tf.concat(frames_list, axis=2) batch_list = tf.split(frames_summaries, batch_size, axis=0) batch_summaries =", "function, this can be called to return the current learning", "buf.seek(0) # Convert PNG buffer to TF image image =", "from a one epoch iterator.\"\"\" keep_labels = keep_labels and CONFIG.DATA.FRAME_LABELS", "to True to use models in training mode. tf.keras.backend.set_learning_phase(1) else:", "(lambda lr, global_step: get_warmup_lr(lr_fn(lr, global_step), global_step, lr_params)) def get_optimizer(optimizer_config, learning_rate):", "__future__ import print_function from config import CONFIG import json import", "\"\"\"Visualizes a batch.\"\"\" frames = data['frames'] frames_list = tf.unstack(frames, num=num_steps,", "steps = [] seq_lens = [] names = [] seq_labels", "0 def cond(n): if max_embs is None: return True else:", "config_path = os.path.join(logdir, 'config.json') while not tf.io.gfile.exists(config_path): logging.info('Waiting for config", "params have been passed in the config. \"\"\" lr_params =", "logdir) with tf.io.gfile.GFile(config_path, 'r') as config_file: config_dict = json.load(config_file) CONFIG.update(config_dict)", "ckpt_manager = tf.train.CheckpointManager( checkpoint, directory=logdir, max_to_keep=10, keep_checkpoint_every_n_hours=1) status = checkpoint.restore(ckpt_manager.latest_checkpoint)", "== 'AdamOptimizer': opt = tf.keras.optimizers.Adam(learning_rate=learning_rate) elif optimizer_config.TYPE == 'MomentumOptimizer': opt", "warmup_steps_float warmup_lr = lr_params.INITIAL_LR * warmup_percent_done is_warmup = tf.cast(global_steps_int <", "batch size. # We do this as we are embedding", "np.concatenate(seq_lens, axis=0) names = np.concatenate(names, axis=0) seq_labels = np.concatenate(seq_labels, axis=0)", "tf import numpy as np import matplotlib.pyplot as plt #", "to_dict(v)) for k, v in config.items()]) else: return config def", "frame_original = frame_original[~np.isnan(embs).any(axis=1)] embs = embs[~np.isnan(embs).any(axis=1)] assert len(embs) == len(seq_lens)", "mean_squared_distance) nn_img_list.append(img_list[tf.argmin(mean_squared_distance)]) nn_img = tf.stack(nn_img_list, axis=0) im_list.append(nn_img) def vstack(im): return", "= data['frames'] image_list = tf.unstack(frames, num=batch_size, axis=0) if 'tcn' in", "logging.info('Waiting for config to exist. Going to sleep ' '", "get_data(iterator) seq_len = seq_len.numpy()[0] num_batches = int(math.ceil(float(seq_len)/frames_per_batch)) for i in", "tf.expand_dims(image, 0) return image class Stopwatch(object): \"\"\"Simple timer for measuring", "lr_params.EXP_DECAY_STEPS, lr_params.EXP_DECAY_RATE, staircase=True)() elif lr_params.DECAY_TYPE == 'manual': lr_step_boundaries = [int(x)", "CONFIG.update(config_dict) def get_data(iterator): \"\"\"Return a data dict which contains all", "a data dict which contains all the requested sequences.\"\"\" data", "required to initialize the learning rate and the learning rate", "else: cnn_feats = get_cnn_feats(cnn, data, training=False) emb_feats = emb(cnn_feats, num_steps)", "reset hidden states after each batch. if CONFIG.MODEL.EMBEDDER_TYPE == 'convgru':", "in emb.gru_layers: gru_layer.stateful = False return dataset def gen_plot(x, y):", "# pylint: disable=g-long-lambda if lr_params.DECAY_TYPE == 'exp_decay': def lr_fn(lr, global_step):", "CONFIG.TRAIN.MAX_ITERS, end_learning_rate=0.0, power=1.0, cycle=False) else: raise ValueError('Learning rate decay type", "force_train: raise ValueError('You might be overwriting a directory that already", "[1, ] while cond(n): try: print(n) embs = [] labels", "return True else: return n < max_embs # Make Recurrent", "np.concatenate(seq_labels, axis=0) if keep_data: frames.append(data['frames'].numpy()[0]) frames = np.concatenate(frames, axis=0) if", "with set_learning_phase decorator which' ' does not have training argument.')", "Args: optimizer_config: EasyDict, contains params required to initialize the learning", "CONFIG.TRAIN.NUM_FRAMES * CONFIG.DATA.NUM_STEPS else: num_steps = CONFIG.EVAL.NUM_FRAMES * CONFIG.DATA.NUM_STEPS cnn.num_steps", "isinstance(config, EasyDict): return dict([(k, to_dict(v)) for k, v in config.items()])", "plt.ylim(0, 1) plt.tight_layout() buf = io.BytesIO() plt.savefig(buf, format='png') buf.seek(0) #", "if optical_flow: frame_original.append(data['video_frames'].numpy()[0]) frame_original = np.concatenate(frame_original, axis=0) if keep_labels: labels", "== 'manual': lr_step_boundaries = [int(x) for x in lr_params.MANUAL_LR_STEP_BOUNDARIES] f", "through base CNN.\"\"\" if num_steps is None: if training: num_steps", "if config_dict is None: time.sleep(config_timeout_seconds) else: break CONFIG.update(config_dict) def get_data(iterator):", "to_dict(config): if isinstance(config, list): return [to_dict(c) for c in config]", "axis=0) batch_summaries = tf.concat(batch_list, axis=1) tf.summary.image('train_batch', batch_summaries, step=global_step) def visualize_nearest_neighbours(model,", "curr_data = {} for k, v in data.items(): # Need", "io import math import os import time from absl import", "is None: return True else: return n < max_embs #", "learning rate decay function. Returns: lr_fn: function, this can be", "set before GPUs have been initialized logging.info(e) os.environ[\"CUDA_VISIBLE_DEVICES\"] = str(ind)", "is corrupted\" return config def prepare_gpu(ind=-1): ind = int(ind) GPUS", "buffer and return TB compatible image.\"\"\" plt.figure() plt.plot(x, y) plt.title('Val", "which contains all the requested sequences.\"\"\" data = iterator.get_next() return", "= labels_list # Reset statefulness to recurrent layers for other", "to use models in training mode. tf.keras.backend.set_learning_phase(1) else: # Set", "CONFIG.TRAINING_ALGO: img_list = tf.unstack(image_list[i], num=2 * num_steps * num_frames_per_step, axis=0)[num_frames_per_step", "y) plt.title('Val Accuracy') plt.ylim(0, 1) plt.tight_layout() buf = io.BytesIO() plt.savefig(buf,", "we can insert multiple modular networks in this dict. checkpoint", "this as some modalities might not exist. if len(v.shape) >", "if CONFIG.MODEL.EMBEDDER_TYPE == 'convgru': for gru_layer in emb.gru_layers: gru_layer.stateful =", "lr_step_boundaries = [int(x) for x in lr_params.MANUAL_LR_STEP_BOUNDARIES] f = lr_params.MANUAL_LR_DECAY_RATE", "take # more than one batch to be passed and", "seq_len) steps.append(chosen_steps.numpy()[0]) seq_lens.append(seq_len * [seq_len]) all_labels = data['frame_labels'].numpy()[0] name =", "json.load(f) assert config is not None, \"config file is not", "to use models in inference mode. tf.keras.backend.set_learning_phase(0) cnn = model['cnn']", "checkpoint = tf.train.Checkpoint(**ckpt_objects) ckpt_manager = tf.train.CheckpointManager( checkpoint, directory=logdir, max_to_keep=10, keep_checkpoint_every_n_hours=1)", "keep_checkpoint_every_n_hours=1) status = checkpoint.restore(ckpt_manager.latest_checkpoint) return ckpt_manager, status, checkpoint def to_dict(config):", "emb.gru_layers: gru_layer.reset_states() data, chosen_steps, seq_len = get_data(iterator) seq_len = seq_len.numpy()[0]", "1) * frames_per_batch > seq_len: num_steps = seq_len - i", "in callbacks: f(np.concatenate(embs), data, chosen_steps, seq_len) steps.append(chosen_steps.numpy()[0]) seq_lens.append(seq_len * [seq_len])", "= [0] + boundaries num_boundaries = len(boundaries) rate_index = tf.reduce_max(", "to buffer and return TB compatible image.\"\"\" plt.figure() plt.plot(x, y)", "' 'has train_logs. Please provide a new logdir name in", "NumPy version: np.random.rand(m,n).argsort(axis=axis) return tf.cast(tf.argsort(tf.random.uniform((m, n)), axis=axis), tf.int64) def gen_cycles(num_cycles,", "split, summary_im, step=global_step) # Convert sim_matrix to float32 as summary_image", "set_learning_phase decorator which' ' does not have training argument.') training", "curr_data, num_steps=num_frames_per_step * num_steps, training=False) emb_feats = emb(cnn_feats, num_steps) logging.debug('On", "seq_lens[~np.isnan(embs).any(axis=1)] steps = steps[~np.isnan(embs).any(axis=1)] if keep_data: frames = frames[~np.isnan(embs).any(axis=1)] if", "= True gru_layer.input_spec[0].shape = [1, ] while cond(n): try: print(n)", "labels = [] steps = [] seq_lens = [] names", "model['cnn'] emb = model['emb'] embs_list = [] labels_list = []", "in training mode. tf.keras.backend.set_learning_phase(1) else: # Set learning_phase to False", "num_steps * num_frames_per_step, axis=0)[num_frames_per_step - 1::num_frames_per_step][::2] else: img_list = tf.unstack(image_list[i],", "np.concatenate(frame_original, axis=0) if keep_labels: labels = labels[~np.isnan(embs).any(axis=1)] assert len(embs) ==", "= seq_lens[~np.isnan(embs).any(axis=1)] steps = steps[~np.isnan(embs).any(axis=1)] if keep_data: frames = frames[~np.isnan(embs).any(axis=1)]", "to see the future. steps = np.arange(step - (num_steps -", "to exist. Going to sleep ' ' %s for secs.',", "== len(seq_lens) assert len(embs) == len(steps) assert len(names) == len(steps)", "> seq_len: num_steps = seq_len - i * frames_per_batch else:", "idxes, axis=1) else: curr_data[k] = v cnn_feats = get_cnn_feats(cnn, curr_data,", "k, v in data.items(): # Need to do this as", "== 'poly': def lr_fn(lr, global_step): return tf.train.polynomial_decay( lr, global_step, CONFIG.TRAIN.MAX_ITERS,", "learning rate and the learning rate decay function. Returns: lr_fn:", "'convgru': for gru_layer in emb.gru_layers: gru_layer.stateful = False return dataset", "correct learning phase before calling function f.\"\"\" def wrapper(*args, **kwargs):", "optimizer based on config and learning rate.\"\"\" if optimizer_config.TYPE ==", "axis=0)[::2], axis=1) else: cnn_feats = get_cnn_feats(cnn, data, training=False) emb_feats =", "gen_cycles(num_cycles, batch_size, cycle_len): \"\"\"Generate cycles for alignment.\"\"\" random_cycles = random_choice_noreplace(", "num_steps = CONFIG.EVAL.NUM_FRAMES * CONFIG.DATA.NUM_STEPS cnn.num_steps = num_steps cnn_feats =", "data['video_frames'] else: frames = data['frames'] image_list = tf.unstack(frames, num=batch_size, axis=0)", "seq_lens = seq_lens[~np.isnan(embs).any(axis=1)] steps = steps[~np.isnan(embs).any(axis=1)] if keep_data: frames =", "n += 1 except tf.errors.OutOfRangeError: logging.info('Finished embedding the dataset.') break", "if ind > -1: tf.config.experimental.set_visible_devices(GPUS[ind], 'GPU') try: # Currently, memory", "{'embs': embs_list, 'seq_lens': seq_lens_list, 'steps': steps_list, 'names': names_list, 'seq_labels': seq_labels_list}", "frames_list = [] if optical_flow: frame_original_list = [] n =", "= model['cnn'] emb = model['emb'] if 'tcn' in CONFIG.TRAINING_ALGO: cnn_feats", "= np.concatenate(frames, axis=0) if optical_flow: frame_original.append(data['video_frames'].numpy()[0]) frame_original = np.concatenate(frame_original, axis=0)", "def wrapper(*args, **kwargs): \"\"\"Calls the function f after setting proper", "\"config file is not provided or is corrupted\" return config", "size. # We do this as we are embedding the", "logging from easydict import EasyDict import matplotlib matplotlib.use('Agg') FLAGS =", "np.concatenate(labels, axis=0) steps = np.concatenate(steps, axis=0) seq_lens = np.concatenate(seq_lens, axis=0)", "'steps': steps_list, 'names': names_list, 'seq_labels': seq_labels_list} if keep_data: dataset['frames'] =", "range(num_steps): curr_query_feats = tf.tile(query_feats[j:j+1], [num_steps, 1]) mean_squared_distance = tf.reduce_mean( tf.math.squared_difference(curr_query_feats,", "import division from __future__ import print_function from config import CONFIG", "return data, data['chosen_steps'], data['seq_lens'] @tf.function def get_cnn_feats(cnn, data, training, num_steps=None):", "global_step, lr_step_boundaries, learning_rate_sequence) elif lr_params.DECAY_TYPE == 'fixed': def lr_fn(lr, global_step):", "name = data['name'].numpy()[0] names.append(seq_len * [name]) seq_label = data['seq_labels'].numpy()[0] seq_labels.append(seq_len", "if num_steps is None: if training: num_steps = CONFIG.TRAIN.NUM_FRAMES *", "== len(labels) seq_labels = seq_labels[~np.isnan(embs).any(axis=1)] names = names[~np.isnan(embs).any(axis=1)] seq_lens =", "tf.summary.image('train_batch', batch_summaries, step=global_step) def visualize_nearest_neighbours(model, data, global_step, batch_size, num_steps, num_frames_per_step,", "sim_matrix to float32 as summary_image doesn't take float64 sim_matrix =", "f after setting proper learning phase.\"\"\" if 'training' not in", "num_steps, seq_len) curr_data[k] = tf.gather(v, idxes, axis=1) else: curr_data[k] =", "single_steps = np.concatenate([get_context_steps(step) for step in steps]) single_steps = np.concatenate(np.array(list(map(get_context_steps,", "# NumPy version: np.random.rand(m,n).argsort(axis=axis) return tf.cast(tf.argsort(tf.random.uniform((m, n)), axis=axis), tf.int64) def", "embs_list.append(embs) if keep_labels: labels_list.append(labels) seq_labels_list.append(seq_labels) steps_list.append(steps) seq_lens_list.append(seq_lens) names_list.append(names) if keep_data:", "sleep ' ' %s for secs.', config_timeout_seconds) time.sleep(config_timeout_seconds) while True:", "global_step): return lr_params.INITIAL_LR elif lr_params.DECAY_TYPE == 'poly': def lr_fn(lr, global_step):", "CONFIG.MODEL.EMBEDDER_TYPE == 'convgru': for gru_layer in emb.gru_layers: gru_layer.stateful = True", "is None: if training: num_steps = CONFIG.TRAIN.NUM_FRAMES * CONFIG.DATA.NUM_STEPS else:", "learning_rate): \"\"\"Returns optimizer based on config and learning rate.\"\"\" if", "img_list = tf.unstack(image_list[i], num=2 * num_steps * num_frames_per_step, axis=0)[num_frames_per_step -", "while launching script.') tf.io.gfile.makedirs(train_logs_dir) def setup_eval_dir(logdir, config_timeout_seconds=1): \"\"\"Setups directory for", "if max_embs is None: return True else: return n <", "if optical_flow: frame_original_list = [] n = 0 def cond(n):", "* num_frames_per_step, axis=0)[num_frames_per_step - 1::num_frames_per_step] nn_img_list = [] for j", "the function f after setting proper learning phase.\"\"\" if 'training'", "We don't want to see the future. steps = np.arange(step", "dict. checkpoint = tf.train.Checkpoint(**ckpt_objects) ckpt_manager = tf.train.CheckpointManager( checkpoint, directory=logdir, max_to_keep=10,", "(batch_size-1, num_steps, num_steps), dtype=np.float32) for i in range(1, batch_size): candidate_feats", "setup_train_dir(logdir, overwrite=False, force_train=True): \"\"\"Setups directory for training.\"\"\" tf.io.gfile.makedirs(logdir) config_path =", "emb_feats = emb(cnn_feats, 2 * num_steps) emb_feats = tf.stack( tf.split(emb_feats,", "True to use models in training mode. tf.keras.backend.set_learning_phase(1) else: #", "status, checkpoint def restore_ckpt(logdir, **ckpt_objects): \"\"\"Create and restore checkpoint (if", "seq_len: num_steps = seq_len - i * frames_per_batch else: num_steps", "config.json that exists in %s.', logdir) with tf.io.gfile.GFile(config_path, 'r') as", "cnn.weights[0].numpy()) # np.save(tf.io.gfile.GFile('/air/team/saman/test_batch_old.npy', 'w'), curr_data[\"frames\"]) # np.save(tf.io.gfile.GFile('/air/team/saman/test_cnn_old.npy', 'w'), cnn_feats.numpy()) #", "batch_size)[:, :cycle_len] return random_cycles def get_warmup_lr(lr, global_step, lr_params): \"\"\"Returns learning", "plt.plot(x, y) plt.title('Val Accuracy') plt.ylim(0, 1) plt.tight_layout() buf = io.BytesIO()", "= 0 def cond(n): if max_embs is None: return True", "rate decay type %s not supported. Only support' 'the following", "try: # Currently, memory growth needs to be the same", "== len(steps) assert len(names) == len(steps) embs_list.append(embs) if keep_labels: labels_list.append(labels)", "does not have training argument.') training = kwargs['training'] if training:", "n)), axis=axis), tf.int64) def gen_cycles(num_cycles, batch_size, cycle_len): \"\"\"Generate cycles for", "a directory that already ' 'has train_logs. Please provide a", "training=False) emb_feats = emb(cnn_feats, num_steps) emb_feats = tf.stack(tf.split(emb_feats, num_steps, axis=0),", "frame_original[~np.isnan(embs).any(axis=1)] embs = embs[~np.isnan(embs).any(axis=1)] assert len(embs) == len(seq_lens) assert len(embs)", "None: if training: num_steps = CONFIG.TRAIN.NUM_FRAMES * CONFIG.DATA.NUM_STEPS else: num_steps", "networks in this dict. checkpoint = tf.train.Checkpoint(**ckpt_objects) ckpt_manager = tf.train.CheckpointManager(", "embs.append(emb_feats.numpy()) for f in callbacks: f(np.concatenate(embs), data, chosen_steps, seq_len) steps.append(chosen_steps.numpy()[0])", "import matplotlib matplotlib.use('Agg') FLAGS = flags.FLAGS def visualize_batch(data, global_step, batch_size,", "emb_feats = emb(cnn_feats, num_steps) emb_feats = tf.stack(tf.split(emb_feats, num_steps, axis=0), axis=1)", "= [image_list[0] [num_frames_per_step - 1::num_frames_per_step][::2]] else: im_list = [image_list[0][num_frames_per_step -", "step=global_step) def visualize_nearest_neighbours(model, data, global_step, batch_size, num_steps, num_frames_per_step, split): \"\"\"Visualize", "tf.summary.image('%s/nn' % split, summary_im, step=global_step) # Convert sim_matrix to float32", "def get_data(iterator): \"\"\"Return a data dict which contains all the", "function f.\"\"\" def wrapper(*args, **kwargs): \"\"\"Calls the function f after", "= model['emb'] if 'tcn' in CONFIG.TRAINING_ALGO: cnn_feats = get_cnn_feats( cnn,", "= get_cnn_feats(cnn, curr_data, num_steps=num_frames_per_step * num_steps, training=False) emb_feats = emb(cnn_feats,", "video. if CONFIG.MODEL.EMBEDDER_TYPE == 'convgru': for gru_layer in emb.gru_layers: gru_layer.reset_states()", "support' 'the following decay types: fixed, exp_decay, manual,' 'and poly.')", "layers for other evaluation tasks. if CONFIG.MODEL.EMBEDDER_TYPE == 'convgru': for", "'w'), cnn_feats.numpy()) # np.save(tf.io.gfile.GFile('/air/team/saman/test_emb_old.npy', 'w'), emb_feats.numpy()) embs.append(emb_feats.numpy()) for f in", "phase before calling function f.\"\"\" def wrapper(*args, **kwargs): \"\"\"Calls the", "config_timeout_seconds=1): \"\"\"Setups directory for evaluation.\"\"\" tf.io.gfile.makedirs(logdir) tf.io.gfile.makedirs(os.path.join(logdir, 'eval_logs')) config_path =", "evaluation.\"\"\" tf.io.gfile.makedirs(logdir) tf.io.gfile.makedirs(os.path.join(logdir, 'eval_logs')) config_path = os.path.join(logdir, 'config.json') while not", "as plt # pylint: disable=g-import-not-at-top import io import math import", "directory=logdir, max_to_keep=10, keep_checkpoint_every_n_hours=1) status = checkpoint.restore(ckpt_manager.latest_checkpoint) return ckpt_manager, status, checkpoint", "the existing passed in config as no config.json file exists", "return tf.train.exponential_decay( lr, global_step, lr_params.EXP_DECAY_STEPS, lr_params.EXP_DECAY_RATE, staircase=True)() elif lr_params.DECAY_TYPE ==", "must be set before GPUs have been initialized logging.info(e) os.environ[\"CUDA_VISIBLE_DEVICES\"]", "/ warmup_steps_float warmup_lr = lr_params.INITIAL_LR * warmup_percent_done is_warmup = tf.cast(global_steps_int", "= json.load(config_file) CONFIG.update(config_dict) train_logs_dir = os.path.join(logdir, 'train.logs') if os.path.exists(train_logs_dir) and", "tf.cast(warmup_steps_int, tf.float32) warmup_percent_done = global_steps_float / warmup_steps_float warmup_lr = lr_params.INITIAL_LR", "in kwargs: raise ValueError('Function called with set_learning_phase decorator which' '", "single_steps = np.minimum(seq_len, single_steps) return single_steps def get_embeddings_dataset(model, iterator, frames_per_batch,", "matplotlib.use('Agg') FLAGS = flags.FLAGS def visualize_batch(data, global_step, batch_size, num_steps): \"\"\"Visualizes", "multiple modular networks in this dict. checkpoint = tf.train.Checkpoint(**ckpt_objects) ckpt_manager", "optical_flow=False, keep_labels=True, max_embs=None, callbacks=[]): \"\"\"Get embeddings from a one epoch", "as config_file: config_dict = json.load(config_file) if config_dict is None: time.sleep(config_timeout_seconds)", "return wrapper def load_config(config_path): config = None if os.path.exists(config_path): with", "num_steps, num_steps), dtype=np.float32) for i in range(1, batch_size): candidate_feats =", "if not os.path.exists(config_path) or overwrite: logging.info( 'Using the existing passed", "= tf.cast(global_step, tf.int32) warmup_steps_int = tf.constant( lr_params.NUM_WARMUP_STEPS, dtype=tf.int32) global_steps_float =", "checkpoint def to_dict(config): if isinstance(config, list): return [to_dict(c) for c", "for secs.', config_timeout_seconds) time.sleep(config_timeout_seconds) while True: with tf.io.gfile.GFile(config_path, 'r') as", "number %d, frames embedded %d', n, curr_idx + num_steps) #", "% split, np.expand_dims(sim_matrix, axis=3), step=global_step) def softmax(w, t=1.0): e =", "global_step, CONFIG.TRAIN.MAX_ITERS, end_learning_rate=0.0, power=1.0, cycle=False) else: raise ValueError('Learning rate decay", "'optimizers: AdamOptimizer, MomentumOptimizer .') return opt def get_lr_opt_global_step(): \"\"\"Intializes learning", "= tf.stack(tf.split(emb_feats, num_steps, axis=0), axis=1) query_feats = emb_feats[0] if CONFIG.OPTICALFLOW:", "# Set learning_phase to False to use models in inference", "'Using the existing passed in config as no config.json file", "'w') as config_file: config = dict([(k, to_dict(v)) for k, v", "learning rate.\"\"\" if optimizer_config.TYPE == 'AdamOptimizer': opt = tf.keras.optimizers.Adam(learning_rate=learning_rate) elif", "= emb_feats[i] if 'tcn' in CONFIG.TRAINING_ALGO: img_list = tf.unstack(image_list[i], num=2", "num_steps, axis=0), axis=1) query_feats = emb_feats[0] if CONFIG.OPTICALFLOW: frames =", "1]) mean_squared_distance = tf.reduce_mean( tf.math.squared_difference(curr_query_feats, candidate_feats), axis=1) sim_matrix[i-1, j] =", "# Reset statefulness to recurrent layers for other evaluation tasks.", "!= 0: idxes = get_indices(curr_idx, num_steps, seq_len) curr_data[k] = tf.gather(v,", "tf.stack(tf.split(emb_feats, num_steps, axis=0), axis=1) query_feats = emb_feats[0] if CONFIG.OPTICALFLOW: frames", "labels.append(all_labels) embs = np.concatenate(embs, axis=0) labels = np.concatenate(labels, axis=0) steps", "function. Returns: lr_fn: function, this can be called to return", "time.sleep(config_timeout_seconds) else: break CONFIG.update(config_dict) def get_data(iterator): \"\"\"Return a data dict", "1::num_frames_per_step] nn_img_list = [] for j in range(num_steps): curr_query_feats =", "= json.load(config_file) if config_dict is None: time.sleep(config_timeout_seconds) else: break CONFIG.update(config_dict)", "[] seq_labels_list = [] if keep_data: frames_list = [] if", "[num_frames_per_step - 1::num_frames_per_step][::2]] else: im_list = [image_list[0][num_frames_per_step - 1::num_frames_per_step]] sim_matrix", "Raises: ValueError: in case invalid params have been passed in", "poly.') return (lambda lr, global_step: get_warmup_lr(lr_fn(lr, global_step), global_step, lr_params)) def", "based on config. NOTE: This returns a function as in", "lr = (1.0 - is_warmup) * lr + is_warmup *", "{} for k, v in data.items(): # Need to do", "Recurrent Layers stateful, set batch size. # We do this", "neighbours in embedding space.\"\"\" # Set learning_phase to False to", "all_labels = data['frame_labels'].numpy()[0] name = data['name'].numpy()[0] names.append(seq_len * [name]) seq_label", "frames = np.concatenate(frames, axis=0) if optical_flow: frame_original.append(data['video_frames'].numpy()[0]) frame_original = np.concatenate(frame_original,", "0: idxes = get_indices(curr_idx, num_steps, seq_len) curr_data[k] = tf.gather(v, idxes,", "and CONFIG.DATA.FRAME_LABELS num_frames_per_step = CONFIG.DATA.NUM_STEPS cnn = model['cnn'] emb =", "single_steps = np.concatenate(np.array(list(map(get_context_steps, np.arange(curr_idx, curr_idx + num_steps))))) single_steps = np.maximum(0,", "gpu in GPUS: tf.config.experimental.set_memory_growth(gpu, True) logical_gpus = tf.config.experimental.list_logical_devices('GPU') logging.info([len(GPUS), \"Physical", "axis=0), axis=1) query_feats = emb_feats[0] if CONFIG.OPTICALFLOW: frames = data['video_frames']", "e = np.exp(np.array(w) / t) dist = e / np.sum(e)", "tf.unstack(frames, num=batch_size, axis=0) if 'tcn' in CONFIG.TRAINING_ALGO: im_list = [image_list[0]", "global_step, batch_size, num_steps): \"\"\"Visualizes a batch.\"\"\" frames = data['frames'] frames_list", "elif lr_params.DECAY_TYPE == 'poly': def lr_fn(lr, global_step): return tf.train.polynomial_decay( lr,", "1)] def lr_fn(lr, global_step): return manual_stepping( global_step, lr_step_boundaries, learning_rate_sequence) elif", "might not exist. if len(v.shape) > 1 and v.shape[1] !=", "\"\"\"Calls the function f after setting proper learning phase.\"\"\" if", "global_step): return tf.train.polynomial_decay( lr, global_step, CONFIG.TRAIN.MAX_ITERS, end_learning_rate=0.0, power=1.0, cycle=False) else:", "= np.concatenate([get_context_steps(step) for step in steps]) single_steps = np.concatenate(np.array(list(map(get_context_steps, np.arange(curr_idx,", "import EasyDict import matplotlib matplotlib.use('Agg') FLAGS = flags.FLAGS def visualize_batch(data,", "CONFIG.DATA.FRAME_STRIDE # We don't want to see the future. steps", "f in callbacks: f(np.concatenate(embs), data, chosen_steps, seq_len) steps.append(chosen_steps.numpy()[0]) seq_lens.append(seq_len *", "some modalities might not exist. if len(v.shape) > 1 and", "max_embs # Make Recurrent Layers stateful, set batch size. #", "return ckpt_manager, status, checkpoint def to_dict(config): if isinstance(config, list): return", "data['name'].numpy()[0] names.append(seq_len * [name]) seq_label = data['seq_labels'].numpy()[0] seq_labels.append(seq_len * [seq_label])", "cnn.num_steps = num_steps cnn_feats = cnn(data['frames']) return cnn_feats def get_context_steps(step):", "return tf.train.polynomial_decay( lr, global_step, CONFIG.TRAIN.MAX_ITERS, end_learning_rate=0.0, power=1.0, cycle=False) else: raise", "\"\"\"Setups directory for training.\"\"\" tf.io.gfile.makedirs(logdir) config_path = os.path.join(logdir, 'config.json') if", "Layers stateful, set batch size. # We do this as", "the requested sequences.\"\"\" data = iterator.get_next() return data, data['chosen_steps'], data['seq_lens']", "= frame_original[~np.isnan(embs).any(axis=1)] embs = embs[~np.isnan(embs).any(axis=1)] assert len(embs) == len(seq_lens) assert", "logging.info( 'Using config from config.json that exists in %s.', logdir)", "axis=-1): # Generate m random permuations of range (0, n)", "optimizer_config.LR # pylint: disable=g-long-lambda if lr_params.DECAY_TYPE == 'exp_decay': def lr_fn(lr,", "else: return config def setup_train_dir(logdir, overwrite=False, force_train=True): \"\"\"Setups directory for", "* warmup_lr return lr # Minimally adapted from Tensorflow object_detection", "invalid params have been passed in the config. \"\"\" lr_params", "flags from absl import logging from easydict import EasyDict import", "* lr + is_warmup * warmup_lr return lr # Minimally", "for config to exist. Going to sleep ' ' %s", "emb_feats = tf.stack( tf.split(emb_feats, 2 * num_steps, axis=0)[::2], axis=1) else:", "emb = model['emb'] if 'tcn' in CONFIG.TRAINING_ALGO: cnn_feats = get_cnn_feats(", "if optical_flow: dataset['frames_original'] = frame_original_list if keep_labels: dataset['labels'] = labels_list", "tf.train.CheckpointManager( checkpoint, directory=logdir, max_to_keep=10, keep_checkpoint_every_n_hours=1) status = checkpoint.restore(ckpt_manager.latest_checkpoint) return ckpt_manager,", "mode. tf.keras.backend.set_learning_phase(0) cnn = model['cnn'] emb = model['emb'] if 'tcn'", "with tf.io.gfile.GFile(config_path, 'w') as config_file: config = dict([(k, to_dict(v)) for", "that provides current learning rate based on config. NOTE: This", "want to see the future. steps = np.arange(step - (num_steps", "steps[~np.isnan(embs).any(axis=1)] if keep_data: frames = frames[~np.isnan(embs).any(axis=1)] if optical_flow: frame_original =", "tf.constant( lr_params.NUM_WARMUP_STEPS, dtype=tf.int32) global_steps_float = tf.cast(global_steps_int, tf.float32) warmup_steps_float = tf.cast(warmup_steps_int,", "wrapper(*args, **kwargs): \"\"\"Calls the function f after setting proper learning", "\"\"\"Returns learning rate during warm up phase.\"\"\" if lr_params.NUM_WARMUP_STEPS >", "[image_list[0][num_frames_per_step - 1::num_frames_per_step]] sim_matrix = np.zeros( (batch_size-1, num_steps, num_steps), dtype=np.float32)", "get_context_steps(step): num_steps = CONFIG.DATA.NUM_STEPS stride = CONFIG.DATA.FRAME_STRIDE # We don't", "lr_params.MANUAL_LR_STEP_BOUNDARIES] f = lr_params.MANUAL_LR_DECAY_RATE learning_rate_sequence = [(lr_params.INITIAL_LR) * f**p for", "+ num_steps) # np.save(tf.io.gfile.GFile('/air/team/saman/test_weights_old.npy', 'w'), cnn.weights[0].numpy()) # np.save(tf.io.gfile.GFile('/air/team/saman/test_batch_old.npy', 'w'), curr_data[\"frames\"])", "based on the provided config. Raises: ValueError: in case invalid", "chosen_steps, seq_len = get_data(iterator) seq_len = seq_len.numpy()[0] num_batches = int(math.ceil(float(seq_len)/frames_per_batch))", "curr_idx = i * frames_per_batch curr_data = {} for k,", "= e / np.sum(e) return dist def random_choice_noreplace(m, n, axis=-1):", "return dist def random_choice_noreplace(m, n, axis=-1): # Generate m random", "been passed in the config. \"\"\" lr_params = optimizer_config.LR #", "= optimizer.learning_rate return learning_rate, optimizer, global_step def create_ckpt(logdir, restore=False, **ckpt_objects):", "from easydict import EasyDict import matplotlib matplotlib.use('Agg') FLAGS = flags.FLAGS", "num_steps, seq_len): steps = range(curr_idx, curr_idx + num_steps) single_steps =", "pylint: disable=g-import-not-at-top import io import math import os import time", "def lr_fn(lr, global_step): return lr_params.INITIAL_LR elif lr_params.DECAY_TYPE == 'poly': def", "# pylint: disable=g-import-not-at-top import io import math import os import", "we need to call assign to update the learning rate.", "with open(config_path) as f: config = json.load(f) assert config is", "> 1 and v.shape[1] != 0: idxes = get_indices(curr_idx, num_steps,", "else: # Set learning_phase to False to use models in", "this dict. checkpoint = tf.train.Checkpoint(**ckpt_objects) ckpt_manager = tf.train.CheckpointManager( checkpoint, directory=logdir,", "tf.keras.backend.set_learning_phase(0) cnn = model['cnn'] emb = model['emb'] if 'tcn' in", "flags.FLAGS def visualize_batch(data, global_step, batch_size, num_steps): \"\"\"Visualizes a batch.\"\"\" frames", "- 1::num_frames_per_step][::2] else: img_list = tf.unstack(image_list[i], num=num_steps * num_frames_per_step, axis=0)[num_frames_per_step", "NOTE: This returns a function as in Eager we need", "def lr_fn(lr, global_step): return manual_stepping( global_step, lr_step_boundaries, learning_rate_sequence) elif lr_params.DECAY_TYPE", "restore checkpoint (if one exists on the path).\"\"\" # Instantiate", "image.\"\"\" plt.figure() plt.plot(x, y) plt.title('Val Accuracy') plt.ylim(0, 1) plt.tight_layout() buf", "[0] * num_boundaries)) return tf.reduce_sum(rates * tf.one_hot(rate_index, depth=num_boundaries)) def get_lr_fn(optimizer_config):", "current learning rate based on config. NOTE: This returns a", "num=num_steps * num_frames_per_step, axis=0)[num_frames_per_step - 1::num_frames_per_step] nn_img_list = [] for", "not supported. Only support' 'the following decay types: fixed, exp_decay,", "cycle=False) else: raise ValueError('Learning rate decay type %s not supported.", "for k, v in data.items(): # Need to do this", "tasks. if CONFIG.MODEL.EMBEDDER_TYPE == 'convgru': for gru_layer in emb.gru_layers: gru_layer.stateful", "= optimizer_config.LR # pylint: disable=g-long-lambda if lr_params.DECAY_TYPE == 'exp_decay': def", "batch dimension image = tf.expand_dims(image, 0) return image class Stopwatch(object):", "'has train_logs. Please provide a new logdir name in '", "return time.time() - self.time def done(self, target_interval): return self.elapsed() >=", "keep_labels: dataset['labels'] = labels_list # Reset statefulness to recurrent layers", "single_steps) single_steps = np.minimum(seq_len, single_steps) return single_steps def get_embeddings_dataset(model, iterator,", "os.path.exists(config_path) or overwrite: logging.info( 'Using the existing passed in config", "= seq_len - i * frames_per_batch else: num_steps = frames_per_batch", "inference mode. tf.keras.backend.set_learning_phase(0) return f(*args, **kwargs) return wrapper def load_config(config_path):", "Memory growth must be set before GPUs have been initialized", "f: config = json.load(f) assert config is not None, \"config", "keep_labels=True, max_embs=None, callbacks=[]): \"\"\"Get embeddings from a one epoch iterator.\"\"\"", "# Since model is a dict we can insert multiple", "raise ValueError('Optimizer %s not supported. Only support the following' 'optimizers:", "keep_data: frames = frames[~np.isnan(embs).any(axis=1)] if optical_flow: frame_original = frame_original[~np.isnan(embs).any(axis=1)] embs", "emb_feats[0] if CONFIG.OPTICALFLOW: frames = data['video_frames'] else: frames = data['frames']", "data['seq_labels'].numpy()[0] seq_labels.append(seq_len * [seq_label]) labels.append(all_labels) embs = np.concatenate(embs, axis=0) labels", "axis=0) seq_labels = np.concatenate(seq_labels, axis=0) if keep_data: frames.append(data['frames'].numpy()[0]) frames =", "= io.BytesIO() plt.savefig(buf, format='png') buf.seek(0) # Convert PNG buffer to", "seq_labels_list = [] if keep_data: frames_list = [] if optical_flow:", "split, np.expand_dims(sim_matrix, axis=3), step=global_step) def softmax(w, t=1.0): e = np.exp(np.array(w)", "os.path.join(logdir, 'config.json') while not tf.io.gfile.exists(config_path): logging.info('Waiting for config to exist.", "else: curr_data[k] = v cnn_feats = get_cnn_feats(cnn, curr_data, num_steps=num_frames_per_step *", "'fixed': def lr_fn(lr, global_step): return lr_params.INITIAL_LR elif lr_params.DECAY_TYPE == 'poly':", "= [int(x) for x in lr_params.MANUAL_LR_STEP_BOUNDARIES] f = lr_params.MANUAL_LR_DECAY_RATE learning_rate_sequence", "Currently, memory growth needs to be the same across GPUs", "config = dict([(k, to_dict(v)) for k, v in CONFIG.items()]) json.dump(config,", "data.items(): # Need to do this as some modalities might", "axis=1) summary_im = tf.expand_dims(tf.concat([vstack(im) for im in im_list], axis=0), axis=0)", "config_timeout_seconds) time.sleep(config_timeout_seconds) while True: with tf.io.gfile.GFile(config_path, 'r') as config_file: config_dict", "- self.time def done(self, target_interval): return self.elapsed() >= target_interval def", "len(boundaries) rate_index = tf.reduce_max( tf.where( tf.greater_equal(global_step, boundaries), list(range(num_boundaries)), [0] *", "model['emb'] if 'tcn' in CONFIG.TRAINING_ALGO: cnn_feats = get_cnn_feats( cnn, data,", "data, chosen_steps, seq_len = get_data(iterator) seq_len = seq_len.numpy()[0] num_batches =", "axis=0) seq_lens = np.concatenate(seq_lens, axis=0) names = np.concatenate(names, axis=0) seq_labels", "frames = data['video_frames'] else: frames = data['frames'] image_list = tf.unstack(frames,", "v in data.items(): # Need to do this as some", "[] names = [] seq_labels = [] if keep_data: frames", "0) return image class Stopwatch(object): \"\"\"Simple timer for measuring elapsed", "else: logging.info( 'Using config from config.json that exists in %s.',", "keep_checkpoint_every_n_hours=1) status = checkpoint.restore( ckpt_manager.latest_checkpoint) if restore else -1 return", "n, curr_idx + num_steps) # np.save(tf.io.gfile.GFile('/air/team/saman/test_weights_old.npy', 'w'), cnn.weights[0].numpy()) # np.save(tf.io.gfile.GFile('/air/team/saman/test_batch_old.npy',", "* CONFIG.DATA.NUM_STEPS cnn.num_steps = num_steps cnn_feats = cnn(data['frames']) return cnn_feats", "keep_data: frames.append(data['frames'].numpy()[0]) frames = np.concatenate(frames, axis=0) if optical_flow: frame_original.append(data['video_frames'].numpy()[0]) frame_original", "seq_len): steps = range(curr_idx, curr_idx + num_steps) single_steps = np.concatenate([get_context_steps(step)", "get_indices(curr_idx, num_steps, seq_len) curr_data[k] = tf.gather(v, idxes, axis=1) else: curr_data[k]", "be set before GPUs have been initialized logging.info(e) os.environ[\"CUDA_VISIBLE_DEVICES\"] =", "warmup_steps_float = tf.cast(warmup_steps_int, tf.float32) warmup_percent_done = global_steps_float / warmup_steps_float warmup_lr", "CONFIG.DATA.NUM_STEPS cnn.num_steps = num_steps cnn_feats = cnn(data['frames']) return cnn_feats def", "in data.items(): # Need to do this as some modalities", "is not None, \"config file is not provided or is", "= steps[~np.isnan(embs).any(axis=1)] if keep_data: frames = frames[~np.isnan(embs).any(axis=1)] if optical_flow: frame_original", "random_cycles = random_choice_noreplace( num_cycles, batch_size)[:, :cycle_len] return random_cycles def get_warmup_lr(lr,", "lr_params): \"\"\"Returns learning rate during warm up phase.\"\"\" if lr_params.NUM_WARMUP_STEPS", "* stride, step + stride, stride) return steps def get_indices(curr_idx,", "= np.concatenate(seq_lens, axis=0) names = np.concatenate(names, axis=0) seq_labels = np.concatenate(seq_labels,", "boundaries = [0] + boundaries num_boundaries = len(boundaries) rate_index =", "is_warmup) * lr + is_warmup * warmup_lr return lr #", "True else: return n < max_embs # Make Recurrent Layers", "ValueError('Optimizer %s not supported. Only support the following' 'optimizers: AdamOptimizer,", "return [to_dict(c) for c in config] elif isinstance(config, EasyDict): return", "* num_steps, axis=0)[::2], axis=1) else: cnn_feats = get_cnn_feats(cnn, data, training=False)", "%d', n, curr_idx + num_steps) # np.save(tf.io.gfile.GFile('/air/team/saman/test_weights_old.npy', 'w'), cnn.weights[0].numpy()) #", "1::num_frames_per_step][::2]] else: im_list = [image_list[0][num_frames_per_step - 1::num_frames_per_step]] sim_matrix = np.zeros(", "tf.keras.backend.set_learning_phase(0) return f(*args, **kwargs) return wrapper def load_config(config_path): config =", "= (1.0 - is_warmup) * lr + is_warmup * warmup_lr", "keep_data: frames = [] if optical_flow: frame_original = [] #", "in case invalid params have been passed in the config.", "[int(x) for x in lr_params.MANUAL_LR_STEP_BOUNDARIES] f = lr_params.MANUAL_LR_DECAY_RATE learning_rate_sequence =", "for k, v in config.items()]) else: return config def setup_train_dir(logdir,", "dist def random_choice_noreplace(m, n, axis=-1): # Generate m random permuations", "base CNN.\"\"\" if num_steps is None: if training: num_steps =", "i * frames_per_batch else: num_steps = frames_per_batch curr_idx = i", "if 'tcn' in CONFIG.TRAINING_ALGO: cnn_feats = get_cnn_feats( cnn, data, training=False,", "global_steps_int = tf.cast(global_step, tf.int32) warmup_steps_int = tf.constant( lr_params.NUM_WARMUP_STEPS, dtype=tf.int32) global_steps_float", "num_cycles, batch_size)[:, :cycle_len] return random_cycles def get_warmup_lr(lr, global_step, lr_params): \"\"\"Returns", "lr_params.DECAY_TYPE == 'manual': lr_step_boundaries = [int(x) for x in lr_params.MANUAL_LR_STEP_BOUNDARIES]", "= labels[~np.isnan(embs).any(axis=1)] assert len(embs) == len(labels) seq_labels = seq_labels[~np.isnan(embs).any(axis=1)] names", "tf.config.experimental.set_visible_devices(GPUS[ind], 'GPU') try: # Currently, memory growth needs to be", "the learning rate and the learning rate decay function. Returns:", "= tf.unstack(frames, num=num_steps, axis=1) frames_summaries = tf.concat(frames_list, axis=2) batch_list =", "len(names) == len(steps) embs_list.append(embs) if keep_labels: labels_list.append(labels) seq_labels_list.append(seq_labels) steps_list.append(steps) seq_lens_list.append(seq_lens)", "os.path.exists(config_path): with open(config_path) as f: config = json.load(f) assert config", "the learning rate decay function. Returns: lr_fn: function, this can", "global_step), global_step, lr_params)) def get_optimizer(optimizer_config, learning_rate): \"\"\"Returns optimizer based on", "mode. tf.keras.backend.set_learning_phase(0) return f(*args, **kwargs) return wrapper def load_config(config_path): config", "in lr_params.MANUAL_LR_STEP_BOUNDARIES] f = lr_params.MANUAL_LR_DECAY_RATE learning_rate_sequence = [(lr_params.INITIAL_LR) * f**p", "mode. tf.keras.backend.set_learning_phase(1) else: # Set learning_phase to False to use", "global_step, lr_params)) def get_optimizer(optimizer_config, learning_rate): \"\"\"Returns optimizer based on config", "tf.int64) def gen_cycles(num_cycles, batch_size, cycle_len): \"\"\"Generate cycles for alignment.\"\"\" random_cycles", "data['frame_labels'].numpy()[0] name = data['name'].numpy()[0] names.append(seq_len * [name]) seq_label = data['seq_labels'].numpy()[0]", "frames_list = tf.unstack(frames, num=num_steps, axis=1) frames_summaries = tf.concat(frames_list, axis=2) batch_list", "data, training=False) emb_feats = emb(cnn_feats, num_steps) emb_feats = tf.stack(tf.split(emb_feats, num_steps,", "in inference mode. tf.keras.backend.set_learning_phase(0) return f(*args, **kwargs) return wrapper def", "frames_summaries = tf.concat(frames_list, axis=2) batch_list = tf.split(frames_summaries, batch_size, axis=0) batch_summaries", "each video. if CONFIG.MODEL.EMBEDDER_TYPE == 'convgru': for gru_layer in emb.gru_layers:", "growth must be set before GPUs have been initialized logging.info(e)", "keep_labels: labels_list.append(labels) seq_labels_list.append(seq_labels) steps_list.append(steps) seq_lens_list.append(seq_lens) names_list.append(names) if keep_data: frames_list.append(frames) if", "f**p for p in range(len(lr_step_boundaries) + 1)] def lr_fn(lr, global_step):", "is None: time.sleep(config_timeout_seconds) else: break CONFIG.update(config_dict) def get_data(iterator): \"\"\"Return a", "except tf.errors.OutOfRangeError: logging.info('Finished embedding the dataset.') break dataset = {'embs':", "range (0, n) # NumPy version: np.random.rand(m,n).argsort(axis=axis) return tf.cast(tf.argsort(tf.random.uniform((m, n)),", "embs[~np.isnan(embs).any(axis=1)] assert len(embs) == len(seq_lens) assert len(embs) == len(steps) assert", "+ 1)] def lr_fn(lr, global_step): return manual_stepping( global_step, lr_step_boundaries, learning_rate_sequence)", "time.time() - self.time def done(self, target_interval): return self.elapsed() >= target_interval", "pylint: disable=g-long-lambda if lr_params.DECAY_TYPE == 'exp_decay': def lr_fn(lr, global_step): return", "__future__ import absolute_import from __future__ import division from __future__ import", "cnn_feats = get_cnn_feats( cnn, data, training=False, num_steps=2 * num_steps) emb_feats", "'Using config from config.json that exists in %s.', logdir) with", "do this as we are embedding the whole sequence and", "# Set learning_phase to True to use models in training", "and the learning rate decay function. Returns: lr_fn: function, this", "embedding space.\"\"\" # Set learning_phase to False to use models", "np.expand_dims(sim_matrix, axis=3), step=global_step) def softmax(w, t=1.0): e = np.exp(np.array(w) /", "max_to_keep=10, keep_checkpoint_every_n_hours=1) status = checkpoint.restore( ckpt_manager.latest_checkpoint) if restore else -1", "axis=1) sim_matrix[i-1, j] = softmax(-1.0 * mean_squared_distance) nn_img_list.append(img_list[tf.argmin(mean_squared_distance)]) nn_img =", "data, chosen_steps, seq_len) steps.append(chosen_steps.numpy()[0]) seq_lens.append(seq_len * [seq_len]) all_labels = data['frame_labels'].numpy()[0]", "and return TB compatible image.\"\"\" plt.figure() plt.plot(x, y) plt.title('Val Accuracy')", "axis=0)[num_frames_per_step - 1::num_frames_per_step][::2] else: img_list = tf.unstack(image_list[i], num=num_steps * num_frames_per_step,", "seq_lens_list = [] names_list = [] seq_labels_list = [] if", "can be called to return the current learning rate based", "seq_labels = np.concatenate(seq_labels, axis=0) if keep_data: frames.append(data['frames'].numpy()[0]) frames = np.concatenate(frames,", "get_cnn_feats( cnn, data, training=False, num_steps=2 * num_steps) emb_feats = emb(cnn_feats,", "might be overwriting a directory that already ' 'has train_logs.", "break CONFIG.update(config_dict) def get_data(iterator): \"\"\"Return a data dict which contains", "def manual_stepping(global_step, boundaries, rates): boundaries = [0] + boundaries num_boundaries", "np.save(tf.io.gfile.GFile('/air/team/saman/test_weights_old.npy', 'w'), cnn.weights[0].numpy()) # np.save(tf.io.gfile.GFile('/air/team/saman/test_batch_old.npy', 'w'), curr_data[\"frames\"]) # np.save(tf.io.gfile.GFile('/air/team/saman/test_cnn_old.npy', 'w'),", "the current learning rate based on the provided config. Raises:", "[] if keep_data: frames = [] if optical_flow: frame_original =", "seq_lens.append(seq_len * [seq_len]) all_labels = data['frame_labels'].numpy()[0] name = data['name'].numpy()[0] names.append(seq_len", "np.concatenate(embs, axis=0) labels = np.concatenate(labels, axis=0) steps = np.concatenate(steps, axis=0)", "dict we can insert multiple modular networks in this dict.", "nn_img_list = [] for j in range(num_steps): curr_query_feats = tf.tile(query_feats[j:j+1],", "frames_per_batch else: num_steps = frames_per_batch curr_idx = i * frames_per_batch", "num_frames_per_step, axis=0)[num_frames_per_step - 1::num_frames_per_step] nn_img_list = [] for j in", "if keep_data: frames = [] if optical_flow: frame_original = []", "logdir) with tf.io.gfile.GFile(config_path, 'w') as config_file: config = dict([(k, to_dict(v))", "range(len(lr_step_boundaries) + 1)] def lr_fn(lr, global_step): return manual_stepping( global_step, lr_step_boundaries,", "callbacks: f(np.concatenate(embs), data, chosen_steps, seq_len) steps.append(chosen_steps.numpy()[0]) seq_lens.append(seq_len * [seq_len]) all_labels", "assert len(embs) == len(steps) assert len(names) == len(steps) embs_list.append(embs) if", "types: fixed, exp_decay, manual,' 'and poly.') return (lambda lr, global_step:", "axis=1) else: curr_data[k] = v cnn_feats = get_cnn_feats(cnn, curr_data, num_steps=num_frames_per_step", "= tf.constant( lr_params.NUM_WARMUP_STEPS, dtype=tf.int32) global_steps_float = tf.cast(global_steps_int, tf.float32) warmup_steps_float =", "= CONFIG.DATA.NUM_STEPS cnn = model['cnn'] emb = model['emb'] embs_list =", "exist. if len(v.shape) > 1 and v.shape[1] != 0: idxes", "We do this as we are embedding the whole sequence", "= np.concatenate(labels, axis=0) steps = np.concatenate(steps, axis=0) seq_lens = np.concatenate(seq_lens,", "config. Raises: ValueError: in case invalid params have been passed", "measuring elapsed time.\"\"\" def __init__(self): self.reset() def elapsed(self): return time.time()", "f(np.concatenate(embs), data, chosen_steps, seq_len) steps.append(chosen_steps.numpy()[0]) seq_lens.append(seq_len * [seq_len]) all_labels =", "steps_list, 'names': names_list, 'seq_labels': seq_labels_list} if keep_data: dataset['frames'] = frames_list", "rate.\"\"\" if optimizer_config.TYPE == 'AdamOptimizer': opt = tf.keras.optimizers.Adam(learning_rate=learning_rate) elif optimizer_config.TYPE", "# Minimally adapted from Tensorflow object_detection code. def manual_stepping(global_step, boundaries,", "CONFIG.TRAINING_ALGO: cnn_feats = get_cnn_feats( cnn, data, training=False, num_steps=2 * num_steps)", "**kwargs): \"\"\"Calls the function f after setting proper learning phase.\"\"\"", "in ' 'config or pass --force_train while launching script.') tf.io.gfile.makedirs(train_logs_dir)", "for gru_layer in emb.gru_layers: gru_layer.reset_states() data, chosen_steps, seq_len = get_data(iterator)", "axis=0) tf.summary.image('%s/nn' % split, summary_im, step=global_step) # Convert sim_matrix to", "in inference mode. tf.keras.backend.set_learning_phase(0) cnn = model['cnn'] emb = model['emb']", "assert len(names) == len(steps) embs_list.append(embs) if keep_labels: labels_list.append(labels) seq_labels_list.append(seq_labels) steps_list.append(steps)", "and not force_train: raise ValueError('You might be overwriting a directory", "Stopwatch(object): \"\"\"Simple timer for measuring elapsed time.\"\"\" def __init__(self): self.reset()", "json.dump(config, config_file, sort_keys=True, indent=4) else: logging.info( 'Using config from config.json", "all the requested sequences.\"\"\" data = iterator.get_next() return data, data['chosen_steps'],", "seq_lens = [] names = [] seq_labels = [] if", "\"\"\"Returns optimizer based on config and learning rate.\"\"\" if optimizer_config.TYPE", "learning phase.\"\"\" if 'training' not in kwargs: raise ValueError('Function called", "return config def prepare_gpu(ind=-1): ind = int(ind) GPUS = tf.config.experimental.list_physical_devices('GPU')", "sequence number %d, frames embedded %d', n, curr_idx + num_steps)", "Tensorflow object_detection code. def manual_stepping(global_step, boundaries, rates): boundaries = [0]", "power=1.0, cycle=False) else: raise ValueError('Learning rate decay type %s not", "tf.io.gfile.exists(config_path): logging.info('Waiting for config to exist. Going to sleep '", "tf.summary.image('%s/similarity_matrix' % split, np.expand_dims(sim_matrix, axis=3), step=global_step) def softmax(w, t=1.0): e", "for i in range(1, batch_size): candidate_feats = emb_feats[i] if 'tcn'", "def setup_train_dir(logdir, overwrite=False, force_train=True): \"\"\"Setups directory for training.\"\"\" tf.io.gfile.makedirs(logdir) config_path", "data = iterator.get_next() return data, data['chosen_steps'], data['seq_lens'] @tf.function def get_cnn_feats(cnn,", "v in config.items()]) else: return config def setup_train_dir(logdir, overwrite=False, force_train=True):", "def gen_plot(x, y): \"\"\"Create a pyplot, save to buffer and", "be the same across GPUs for gpu in GPUS: tf.config.experimental.set_memory_growth(gpu,", "num_boundaries = len(boundaries) rate_index = tf.reduce_max( tf.where( tf.greater_equal(global_step, boundaries), list(range(num_boundaries)),", "[] n = 0 def cond(n): if max_embs is None:", "learning_phase to False to use models in inference mode. tf.keras.backend.set_learning_phase(0)", "return random_cycles def get_warmup_lr(lr, global_step, lr_params): \"\"\"Returns learning rate during", "frame_original = np.concatenate(frame_original, axis=0) if keep_labels: labels = labels[~np.isnan(embs).any(axis=1)] assert", "in range(len(lr_step_boundaries) + 1)] def lr_fn(lr, global_step): return manual_stepping( global_step,", "else: return n < max_embs # Make Recurrent Layers stateful,", "inference mode. tf.keras.backend.set_learning_phase(0) cnn = model['cnn'] emb = model['emb'] if", "= np.concatenate(names, axis=0) seq_labels = np.concatenate(seq_labels, axis=0) if keep_data: frames.append(data['frames'].numpy()[0])", "if isinstance(config, list): return [to_dict(c) for c in config] elif", "np.concatenate([get_context_steps(step) for step in steps]) single_steps = np.concatenate(np.array(list(map(get_context_steps, np.arange(curr_idx, curr_idx", "is_warmup = tf.cast(global_steps_int < warmup_steps_int, tf.float32) lr = (1.0 -", "[seq_label]) labels.append(all_labels) embs = np.concatenate(embs, axis=0) labels = np.concatenate(labels, axis=0)", "matplotlib.pyplot as plt # pylint: disable=g-import-not-at-top import io import math", "k, v in CONFIG.items()]) json.dump(config, config_file, sort_keys=True, indent=4) else: logging.info(", "= kwargs['training'] if training: # Set learning_phase to True to", "get_cnn_feats(cnn, curr_data, num_steps=num_frames_per_step * num_steps, training=False) emb_feats = emb(cnn_feats, num_steps)", "rate during warm up phase.\"\"\" if lr_params.NUM_WARMUP_STEPS > 0: global_steps_int", "+ is_warmup * warmup_lr return lr # Minimally adapted from", "on config. NOTE: This returns a function as in Eager", "raise ValueError('You might be overwriting a directory that already '", "-1 return ckpt_manager, status, checkpoint def restore_ckpt(logdir, **ckpt_objects): \"\"\"Create and", "len(v.shape) > 1 and v.shape[1] != 0: idxes = get_indices(curr_idx,", "names = names[~np.isnan(embs).any(axis=1)] seq_lens = seq_lens[~np.isnan(embs).any(axis=1)] steps = steps[~np.isnan(embs).any(axis=1)] if", "keep_data: frames_list = [] if optical_flow: frame_original_list = [] n", "def lr_fn(lr, global_step): return tf.train.exponential_decay( lr, global_step, lr_params.EXP_DECAY_STEPS, lr_params.EXP_DECAY_RATE, staircase=True)()", "num_steps cnn_feats = cnn(data['frames']) return cnn_feats def get_context_steps(step): num_steps =", "decorator which' ' does not have training argument.') training =", "if os.path.exists(train_logs_dir) and not force_train: raise ValueError('You might be overwriting", "image = tf.image.decode_png(buf.getvalue(), channels=4) # Add the batch dimension image", "int(ind) GPUS = tf.config.experimental.list_physical_devices('GPU') if GPUS: if ind > -1:", "labels_list.append(labels) seq_labels_list.append(seq_labels) steps_list.append(steps) seq_lens_list.append(seq_lens) names_list.append(names) if keep_data: frames_list.append(frames) if optical_flow:", "x in lr_params.MANUAL_LR_STEP_BOUNDARIES] f = lr_params.MANUAL_LR_DECAY_RATE learning_rate_sequence = [(lr_params.INITIAL_LR) *", "curr_idx + num_steps) single_steps = np.concatenate([get_context_steps(step) for step in steps])", "len(labels) seq_labels = seq_labels[~np.isnan(embs).any(axis=1)] names = names[~np.isnan(embs).any(axis=1)] seq_lens = seq_lens[~np.isnan(embs).any(axis=1)]", "\"\"\"Visualize nearest neighbours in embedding space.\"\"\" # Set learning_phase to", "lr_params.DECAY_TYPE == 'exp_decay': def lr_fn(lr, global_step): return tf.train.exponential_decay( lr, global_step,", "import flags from absl import logging from easydict import EasyDict", "f(*args, **kwargs) return wrapper def load_config(config_path): config = None if", "RuntimeError as e: # Memory growth must be set before", "sim_matrix[i-1, j] = softmax(-1.0 * mean_squared_distance) nn_img_list.append(img_list[tf.argmin(mean_squared_distance)]) nn_img = tf.stack(nn_img_list,", "a new logdir name in ' 'config or pass --force_train", "axis=3), step=global_step) def softmax(w, t=1.0): e = np.exp(np.array(w) / t)", "lr, global_step, lr_params.EXP_DECAY_STEPS, lr_params.EXP_DECAY_RATE, staircase=True)() elif lr_params.DECAY_TYPE == 'manual': lr_step_boundaries", "= [] n = 0 def cond(n): if max_embs is", "* num_steps, training=False) emb_feats = emb(cnn_feats, num_steps) logging.debug('On sequence number", "axis=0) if 'tcn' in CONFIG.TRAINING_ALGO: im_list = [image_list[0] [num_frames_per_step -", "ind = int(ind) GPUS = tf.config.experimental.list_physical_devices('GPU') if GPUS: if ind" ]
[ "self.set_raw_data(writer.bytes) class SecondarySpriteTexture: def __init__(self, reader): self.texture = PPtr(reader) #", "_ in range(SubMeshesSize)] IndexBufferSize = reader.read_int() self.m_IndexBuffer = reader.read_bytes(IndexBufferSize) reader.align_stream()", "self.m_RD = SpriteRenderData(reader) if version >= (2017,): # 2017 and", "self.textureRect = reader.read_rectangle_f() self.textureRectOffset = reader.read_vector2() if version >= (5,", "image(self): return SpriteHelper.get_image_from_sprite(self) def __init__(self, reader): super().__init__(reader=reader) version = self.version", "writer = EndianBinaryWriter(endian=self.reader.endian) version = self.version super().save(writer) writer.write_rectangle_f(self.m_Rect) writer.write_vector2(self.m_Offset) if", "in range(verticesSize)] self.indices = reader.read_u_short_array() reader.align_stream() if version >= (2018,):", "import BoneWeights4, SubMesh, VertexData from .NamedObject import NamedObject from .PPtr", "# 4.5 and up writer.write_vector4(self.m_Border) writer.write_float(self.m_PixelsToUnits) if version >= (5,", "# Texture2D self.name = reader.read_string_to_null() def save(self, writer): self.texture.save(writer) writer.write_string_to_null(self.name)", "version): writer.write_vector3(self.pos) if version[:2] <= (4, 3): # 4.3 and", "reader.read_int() self.secondaryTextures = [ SecondarySpriteTexture(reader) for _ in range(secondaryTexturesSize) ]", "and up self.atlasRectOffset = reader.read_vector2() self.settingsRaw = SpriteSettings(reader) if version", "if version >= (4, 5): # 4.5 and up self.uvTransform", "self.m_settingsRaw = _value self.packed = self.m_settingsRaw & 1 # 1", "version < (2018, 2): # 2018.2 down self.m_SourceSkinSize = reader.read_int()", "= 1 class SpriteSettings: def __init__(self, reader): self.value = reader.read_u_int()", "writer.write_long(self.m_RenderDataKey[1]) writer.write_string_array(self.m_AtlasTags) self.m_SpriteAtlas.save(writer) # SpriteAtlas self.m_RD.save(writer, version) if version >=", "(0,) kSPRFlipHorizontal = (1,) kSPRFlipVertical = (2,) kSPRRotate180 = (3,)", "= _value self.packed = self.m_settingsRaw & 1 # 1 self.packingMode", "if version >= (2018,): # 2018 and up writer.write_matrix_array(self.m_Bindpose) if", "SubMesh, VertexData from .NamedObject import NamedObject from .PPtr import PPtr,", "SpriteAtlas self.m_RD.save(writer, version) if version >= (2017,): # 2017 and", "self.secondaryTextures: tex.save(writer) if version >= (5, 6): # 5.6 and", "writer, version): self.texture.save(writer) # Texture2D if version >= (5, 2):", "writer.write_vector2(self.m_Pivot) writer.write_u_int(self.m_Extrude) if version >= (5, 3): # 5.3 and", "self.m_RD.save(writer, version) if version >= (2017,): # 2017 and up", ">= (2017,): # 2017 and up writer.write_int(len(self.m_PhysicsShape)) for phys in", "writer.write_int(len(self.secondaryTextures)) for tex in self.secondaryTextures: tex.save(writer) if version >= (5,", "import PPtr, save_ptr from ..export import SpriteHelper from ..enums import", "4, 1, 3) and self.build_type.IsPatch ): # 5.4.1p3 and up", "# 5.2 and up self.alphaTexture = PPtr(reader) # Texture2D if", "& 0xF) # 4 self.meshType = SpriteMeshType((self.m_settingsRaw >> 6) &", "PPtr(reader) # Texture2D if version >= (5, 2): # 5.2", "if version >= (2017,): # 2017 and up first =", "tex in self.secondaryTextures: tex.save(writer) if version >= (5, 6): #", "self.settingsRaw = SpriteSettings(reader) if version >= (4, 5): # 4.5", "= reader.read_int() self.m_PhysicsShape = [ reader.read_vector2_array() for _ in range(m_PhysicsShapeSize)", "and up writer.write_boolean(self.m_IsPolygon) writer.align_stream() if version >= (2017,): # 2017", "def __init__(self, reader): version = reader.version self.texture = PPtr(reader) #", "and up self.uvTransform = reader.read_vector4() if version >= (2017,): #", "version >= (4, 5): # 4.5 and up writer.write_vector4(self.m_Border) writer.write_float(self.m_PixelsToUnits)", "up m_PhysicsShapeSize = reader.read_int() self.m_PhysicsShape = [ reader.read_vector2_array() for _", "# 4.3 and down self.uv = reader.read_vector2() def save(self, writer,", "if version >= (2018,): # 2018 and up m_BonesSize =", "3): # 4.3 and down writer.write__vector2(self.uv) class SpriteRenderData: def __init__(self,", "tex.save(writer) if version >= (5, 6): # 5.6 and up", "self.uvTransform = reader.read_vector4() if version >= (2017,): # 2017 and", "for phys in self.m_PhysicsShape: writer.write_vector2_array(phys) if version >= (2018,): #", "reader.version self.texture = PPtr(reader) # Texture2D if version >= (5,", "def __init__(self, reader): version = reader.version self.pos = reader.read_vector3() if", "down writer.write__vector2(self.uv) class SpriteRenderData: def __init__(self, reader): version = reader.version", "self.m_SourceSkin[0].save(writer) writer.write_rectangle_f(self.textureRect) writer.write_vector2(self.textureRectOffset) if version >= (5, 6): # 5.6", "SpriteRenderData(reader) if version >= (2017,): # 2017 and up m_PhysicsShapeSize", "if version >= (2019,): # 2019 and up writer.write_int(len(self.secondaryTextures)) for", "writer.write_int(len(self.m_PhysicsShape)) for phys in self.m_PhysicsShape: writer.write_vector2_array(phys) if version >= (2018,):", "writer.write_vector2(self.textureRectOffset) if version >= (5, 6): # 5.6 and up", ">> 2) & 0xF) # 4 self.meshType = SpriteMeshType((self.m_settingsRaw >>", "kSPMTight = (0,) kSPMRectangle = 1 class SpriteSettings: def __init__(self,", "self.m_AtlasTags = reader.read_string_array() self.m_SpriteAtlas = PPtr(reader) # SpriteAtlas self.m_RD =", "reader.read_float() def save(self, writer, version): self.texture.save(writer) # Texture2D if version", ">= (4, 5): # 4.5 and up writer.write_vector4(self.uvTransform) if version", "version >= (5, 6): # 5.6 and up writer.write_vector2(self.atlasRectOffset) self.settingsRaw.save(writer)", ">= (5, 4, 2) or ( version >= (5, 4,", "writer, version): writer.write_vector3(self.pos) if version[:2] <= (4, 3): # 4.3", "# 4 self.meshType = SpriteMeshType((self.m_settingsRaw >> 6) & 1) #", "first = reader.read_bytes(16) # GUID second = reader.read_long() self.m_RenderDataKey =", "writer.write_string_to_null(self.name) class SpritePackingRotation(IntEnum): kSPRNone = (0,) kSPRFlipHorizontal = (1,) kSPRFlipVertical", "3): # 4.3 and down self.uv = reader.read_vector2() def save(self,", "3) and self.build_type.IsPatch ): # 5.4.1p3 and up writer.write_vector2(self.m_Pivot) writer.write_u_int(self.m_Extrude)", "writer.write_boolean(self.m_IsPolygon) writer.align_stream() if version >= (2017,): # 2017 and up", "= reader.version self.texture = PPtr(reader) # Texture2D if version >=", "in range(m_BonesSize) ] def save(self, writer: EndianBinaryWriter = None): if", "5.6 and up SubMeshesSize = reader.read_int() self.m_SubMeshes = [SubMesh(reader) for", ">= (5, 6): # 5.6 and up self.atlasRectOffset = reader.read_vector2()", "save(self, writer, version): writer.write_vector3(self.pos) if version[:2] <= (4, 3): #", "reader): version = reader.version self.texture = PPtr(reader) # Texture2D if", "and up writer.write_int(len(self.m_PhysicsShape)) for phys in self.m_PhysicsShape: writer.write_vector2_array(phys) if version", "version >= (2018,): # 2018 and up writer.write_int(len(self.m_Bones)) for bone", "save_ptr from ..export import SpriteHelper from ..enums import SpriteMeshType from", "= reader.read_int() self.vertices = [SpriteVertex(reader) for _ in range(verticesSize)] self.indices", "self.m_PhysicsShape = [ reader.read_vector2_array() for _ in range(m_PhysicsShapeSize) ] if", "and up self.m_Pivot = reader.read_vector2() self.m_Extrude = reader.read_u_int() if version", "# 5.6 and up writer.write_int(len(self.m_SubMeshes)) for mesh in self.m_SubMeshes: mesh.save(writer,", "in range(m_PhysicsShapeSize) ] if version >= (2018,): # 2018 and", "mesh.save(writer, version) writer.write_int(len(self.m_IndexBuffer)) writer.write_bytes(self.m_IndexBuffer) writer.align_stream() self.m_VertexData.save(writer, version) else: writer.write_int(len(self.vertices)) for", "if version >= (5, 6): # 5.6 and up self.atlasRectOffset", "(4, 5): # 4.5 and up writer.write_vector4(self.uvTransform) if version >=", "import IntEnum from .Mesh import BoneWeights4, SubMesh, VertexData from .NamedObject", "version = self.version super().save(writer) writer.write_rectangle_f(self.m_Rect) writer.write_vector2(self.m_Offset) if version >= (4,", "down self.uv = reader.read_vector2() def save(self, writer, version): writer.write_vector3(self.pos) if", ">= (2018,): # 2018 and up self.m_Bindpose = reader.read_matrix_array() if", "in self.m_SubMeshes: mesh.save(writer, version) writer.write_int(len(self.m_IndexBuffer)) writer.write_bytes(self.m_IndexBuffer) writer.align_stream() self.m_VertexData.save(writer, version) else:", "# 2018.2 down self.m_SourceSkinSize = reader.read_int() self.m_SourceSkin = [BoneWeights4(reader)] self.textureRect", "self.version self.m_Rect = reader.read_rectangle_f() self.m_Offset = reader.read_vector2() if version >=", "def value(self, _value): self.m_settingsRaw = _value self.packed = self.m_settingsRaw &", "= reader.read_vector2() def save(self, writer, version): writer.write_vector3(self.pos) if version[:2] <=", "GUID second = reader.read_long() self.m_RenderDataKey = (first, second) self.m_AtlasTags =", "writer.write_vector3(self.pos) if version[:2] <= (4, 3): # 4.3 and down", "= reader.read_vector3() if version[:2] <= (4, 3): # 4.3 and", "phys in self.m_PhysicsShape: writer.write_vector2_array(phys) if version >= (2018,): # 2018", "# 2017 and up writer.write_int(len(self.m_PhysicsShape)) for phys in self.m_PhysicsShape: writer.write_vector2_array(phys)", "self.m_RenderDataKey = (first, second) self.m_AtlasTags = reader.read_string_array() self.m_SpriteAtlas = PPtr(reader)", "version >= (4, 5): # 4.5 and up self.m_Border =", "EndianBinaryWriter(endian=self.reader.endian) version = self.version super().save(writer) writer.write_rectangle_f(self.m_Rect) writer.write_vector2(self.m_Offset) if version >=", "version >= (4, 5): # 4.5 and up self.uvTransform =", "from .Mesh import BoneWeights4, SubMesh, VertexData from .NamedObject import NamedObject", "version[:2] <= (4, 3): # 4.3 and down self.uv =", "# 5.4.1p3 and up self.m_Pivot = reader.read_vector2() self.m_Extrude = reader.read_u_int()", "5.6 and up writer.write_vector2(self.atlasRectOffset) self.settingsRaw.save(writer) if version >= (4, 5):", "= reader.read_int() self.m_Bones = [ reader.read_vector2_array() for _ in range(m_BonesSize)", "if version >= (5, 2): # 5.2 and up self.alphaTexture.save(writer)", "version >= (2019,): # 2019 and up writer.write_int(len(self.secondaryTextures)) for tex", "= reader.read_vector2() if version >= (4, 5): # 4.5 and", "..export import SpriteHelper from ..enums import SpriteMeshType from ..streams import", "PPtr(reader) # SpriteAtlas self.m_RD = SpriteRenderData(reader) if version >= (2017,):", "in range(secondaryTexturesSize) ] if version >= (5, 6): # 5.6", "self.build_type.IsPatch ): # 5.4.1p3 and up writer.write_vector2(self.m_Pivot) writer.write_u_int(self.m_Extrude) if version", "6) & 1) # 1 # rest of the bits", ">= (4, 5): # 4.5 and up self.uvTransform = reader.read_vector4()", "EndianBinaryWriter class Sprite(NamedObject): @property def image(self): return SpriteHelper.get_image_from_sprite(self) def __init__(self,", "(2018,): # 2018 and up writer.write_int(len(self.m_Bones)) for bone in self.m_Bones:", "_ in range(m_BonesSize) ] def save(self, writer: EndianBinaryWriter = None):", "= [SubMesh(reader) for _ in range(SubMeshesSize)] IndexBufferSize = reader.read_int() self.m_IndexBuffer", "version >= (5, 3): # 5.3 and up writer.write_boolean(self.m_IsPolygon) writer.align_stream()", "version >= (5, 6): # 5.6 and up self.atlasRectOffset =", "for _ in range(SubMeshesSize)] IndexBufferSize = reader.read_int() self.m_IndexBuffer = reader.read_bytes(IndexBufferSize)", "up writer.write_boolean(self.m_IsPolygon) writer.align_stream() if version >= (2017,): # 2017 and", "def __init__(self, reader): self.texture = PPtr(reader) # Texture2D self.name =", "(first, second) self.m_AtlasTags = reader.read_string_array() self.m_SpriteAtlas = PPtr(reader) # SpriteAtlas", "up writer.write_int(len(self.m_Bones)) for bone in self.m_Bones: writer.write_vector2_array(bone) self.set_raw_data(writer.bytes) class SecondarySpriteTexture:", "( version >= (5, 4, 1, 3) and self.build_type.IsPatch ):", "# 5.4.1p3 and up writer.write_vector2(self.m_Pivot) writer.write_u_int(self.m_Extrude) if version >= (5,", "writer.write__vector2(self.uv) class SpriteRenderData: def __init__(self, reader): version = reader.version self.texture", "= reader.version self.pos = reader.read_vector3() if version[:2] <= (4, 3):", "up writer.write_matrix_array(self.m_Bindpose) if version < (2018, 2): # 2018.2 down", "up writer.write_int(len(self.m_SubMeshes)) for mesh in self.m_SubMeshes: mesh.save(writer, version) writer.write_int(len(self.m_IndexBuffer)) writer.write_bytes(self.m_IndexBuffer)", "= self.version super().save(writer) writer.write_rectangle_f(self.m_Rect) writer.write_vector2(self.m_Offset) if version >= (4, 5):", "[ reader.read_vector2_array() for _ in range(m_BonesSize) ] def save(self, writer:", "= PPtr(reader) # Texture2D self.name = reader.read_string_to_null() def save(self, writer):", "# 1 # rest of the bits are reserved def", "up self.downscaleMultiplier = reader.read_float() def save(self, writer, version): self.texture.save(writer) #", "[SubMesh(reader) for _ in range(SubMeshesSize)] IndexBufferSize = reader.read_int() self.m_IndexBuffer =", "2017 and up writer.write_int(len(self.m_PhysicsShape)) for phys in self.m_PhysicsShape: writer.write_vector2_array(phys) if", "6): # 5.6 and up writer.write_int(len(self.m_SubMeshes)) for mesh in self.m_SubMeshes:", "# SpriteAtlas self.m_RD.save(writer, version) if version >= (2017,): # 2017", "are reserved def save(self, writer): writer.write_u_int(self.m_settingsRaw) class SpriteVertex: def __init__(self,", "(5, 6): # 5.6 and up writer.write_vector2(self.atlasRectOffset) self.settingsRaw.save(writer) if version", "@value.setter def value(self, _value): self.m_settingsRaw = _value self.packed = self.m_settingsRaw", "GUID writer.write_long(self.m_RenderDataKey[1]) writer.write_string_array(self.m_AtlasTags) self.m_SpriteAtlas.save(writer) # SpriteAtlas self.m_RD.save(writer, version) if version", "# 4.3 and down writer.write__vector2(self.uv) class SpriteRenderData: def __init__(self, reader):", "in self.secondaryTextures: tex.save(writer) if version >= (5, 6): # 5.6", "class Sprite(NamedObject): @property def image(self): return SpriteHelper.get_image_from_sprite(self) def __init__(self, reader):", "(2018,): # 2018 and up writer.write_matrix_array(self.m_Bindpose) if version < (2018,", ">> 1) & 1) # 1 self.packingRotation = SpritePackingRotation((self.m_settingsRaw >>", "= reader.read_string_array() self.m_SpriteAtlas = PPtr(reader) # SpriteAtlas self.m_RD = SpriteRenderData(reader)", "Texture2D if version >= (2019,): # 2019 and up writer.write_int(len(self.secondaryTextures))", "# 2017 and up writer.write_bytes(self.m_RenderDataKey[0]) # GUID writer.write_long(self.m_RenderDataKey[1]) writer.write_string_array(self.m_AtlasTags) self.m_SpriteAtlas.save(writer)", "= [ SecondarySpriteTexture(reader) for _ in range(secondaryTexturesSize) ] if version", "reader.read_vector2_array() for _ in range(m_BonesSize) ] def save(self, writer: EndianBinaryWriter", ">= (2017,): # 2017 and up first = reader.read_bytes(16) #", "if version >= (2017,): # 2017 and up writer.write_bytes(self.m_RenderDataKey[0]) #", "_ in range(m_PhysicsShapeSize) ] if version >= (2018,): # 2018", "range(m_PhysicsShapeSize) ] if version >= (2018,): # 2018 and up", "Texture2D if version >= (5, 2): # 5.2 and up", "def save(self, writer: EndianBinaryWriter = None): if writer is None:", "and up self.downscaleMultiplier = reader.read_float() def save(self, writer, version): self.texture.save(writer)", "reader.read_vector2() if version >= (4, 5): # 4.5 and up", "__init__(self, reader): version = reader.version self.texture = PPtr(reader) # Texture2D", "(4, 5): # 4.5 and up self.m_Border = reader.read_vector4() self.m_PixelsToUnits", "# 2017 and up self.downscaleMultiplier = reader.read_float() def save(self, writer,", ">= (5, 6): # 5.6 and up SubMeshesSize = reader.read_int()", "= reader.read_int() self.m_SubMeshes = [SubMesh(reader) for _ in range(SubMeshesSize)] IndexBufferSize", "SpritePackingMode(IntEnum): kSPMTight = (0,) kSPMRectangle = 1 class SpriteSettings: def", "Texture2D if version >= (2019,): # 2019 and up secondaryTexturesSize", "and up writer.write_int(len(self.m_Bones)) for bone in self.m_Bones: writer.write_vector2_array(bone) self.set_raw_data(writer.bytes) class", "writer.write_u_int(self.m_Extrude) if version >= (5, 3): # 5.3 and up", "(4, 5): # 4.5 and up self.uvTransform = reader.read_vector4() if", "SpriteVertex: def __init__(self, reader): version = reader.version self.pos = reader.read_vector3()", "2019 and up writer.write_int(len(self.secondaryTextures)) for tex in self.secondaryTextures: tex.save(writer) if", "if version >= (5, 6): # 5.6 and up writer.write_vector2(self.atlasRectOffset)", "reader): self.value = reader.read_u_int() @property def value(self): return self.m_settingsRaw @value.setter", "2) & 0xF) # 4 self.meshType = SpriteMeshType((self.m_settingsRaw >> 6)", "= reader.read_boolean() reader.align_stream() if version >= (2017,): # 2017 and", "# 5.2 and up self.alphaTexture.save(writer) # Texture2D if version >=", ">= (4, 5): # 4.5 and up self.m_Border = reader.read_vector4()", "__init__(self, reader): self.value = reader.read_u_int() @property def value(self): return self.m_settingsRaw", "self.m_SourceSkin = [BoneWeights4(reader)] self.textureRect = reader.read_rectangle_f() self.textureRectOffset = reader.read_vector2() if", "and self.build_type.IsPatch ): # 5.4.1p3 and up self.m_Pivot = reader.read_vector2()", "def __init__(self, reader): self.value = reader.read_u_int() @property def value(self): return", "reader.read_bytes(IndexBufferSize) reader.align_stream() self.m_VertexData = VertexData(reader) else: verticesSize = reader.read_int() self.vertices", "range(verticesSize)] self.indices = reader.read_u_short_array() reader.align_stream() if version >= (2018,): #", "1) & 1) # 1 self.packingRotation = SpritePackingRotation((self.m_settingsRaw >> 2)", "writer is None: writer = EndianBinaryWriter(endian=self.reader.endian) version = self.version super().save(writer)", "if version >= (5, 2): # 5.2 and up self.alphaTexture", "version >= (5, 3): # 5.3 and up self.m_IsPolygon =", "writer.write_u_int(self.m_settingsRaw) class SpriteVertex: def __init__(self, reader): version = reader.version self.pos", "= self.m_settingsRaw & 1 # 1 self.packingMode = SpritePackingMode((self.m_settingsRaw >>", "and up self.m_Border = reader.read_vector4() self.m_PixelsToUnits = reader.read_float() if version", "2018.2 down writer.write_int(self.m_SourceSkinSize) self.m_SourceSkin[0].save(writer) writer.write_rectangle_f(self.textureRect) writer.write_vector2(self.textureRectOffset) if version >= (5,", "(5, 3): # 5.3 and up self.m_IsPolygon = reader.read_boolean() reader.align_stream()", "= [BoneWeights4(reader)] self.textureRect = reader.read_rectangle_f() self.textureRectOffset = reader.read_vector2() if version", "(2019,): # 2019 and up writer.write_int(len(self.secondaryTextures)) for tex in self.secondaryTextures:", "if version >= (2019,): # 2019 and up secondaryTexturesSize =", "and down self.uv = reader.read_vector2() def save(self, writer, version): writer.write_vector3(self.pos)", "return SpriteHelper.get_image_from_sprite(self) def __init__(self, reader): super().__init__(reader=reader) version = self.version self.m_Rect", ">= (2017,): # 2017 and up self.downscaleMultiplier = reader.read_float() def", "] if version >= (2018,): # 2018 and up m_BonesSize", "self.m_VertexData = VertexData(reader) else: verticesSize = reader.read_int() self.vertices = [SpriteVertex(reader)", "self.m_settingsRaw & 1 # 1 self.packingMode = SpritePackingMode((self.m_settingsRaw >> 1)", "writer.write_vector2(self.atlasRectOffset) self.settingsRaw.save(writer) if version >= (4, 5): # 4.5 and", "kSPRFlipVertical = (2,) kSPRRotate180 = (3,) kSPRRotate90 = 4 class", "self.packingRotation = SpritePackingRotation((self.m_settingsRaw >> 2) & 0xF) # 4 self.meshType", "self.build_type.IsPatch ): # 5.4.1p3 and up self.m_Pivot = reader.read_vector2() self.m_Extrude", "VertexData from .NamedObject import NamedObject from .PPtr import PPtr, save_ptr", "writer.write_vector2(self.m_Offset) if version >= (4, 5): # 4.5 and up", "self.m_PhysicsShape: writer.write_vector2_array(phys) if version >= (2018,): # 2018 and up", "self.textureRectOffset = reader.read_vector2() if version >= (5, 6): # 5.6", "def save(self, writer, version): self.texture.save(writer) # Texture2D if version >=", "self.indices = reader.read_u_short_array() reader.align_stream() if version >= (2018,): # 2018", "version >= (5, 6): # 5.6 and up writer.write_int(len(self.m_SubMeshes)) for", "and up SubMeshesSize = reader.read_int() self.m_SubMeshes = [SubMesh(reader) for _", "PPtr(reader) # Texture2D self.name = reader.read_string_to_null() def save(self, writer): self.texture.save(writer)", "rest of the bits are reserved def save(self, writer): writer.write_u_int(self.m_settingsRaw)", "(2017,): # 2017 and up first = reader.read_bytes(16) # GUID", "if version >= (5, 6): # 5.6 and up writer.write_int(len(self.m_SubMeshes))", "= SpriteSettings(reader) if version >= (4, 5): # 4.5 and", "2018 and up writer.write_matrix_array(self.m_Bindpose) if version < (2018, 2): #", "reader.align_stream() if version >= (2017,): # 2017 and up first", "version) writer.write_int(len(self.m_IndexBuffer)) writer.write_bytes(self.m_IndexBuffer) writer.align_stream() self.m_VertexData.save(writer, version) else: writer.write_int(len(self.vertices)) for vertex", "and up self.alphaTexture.save(writer) # Texture2D if version >= (2019,): #", "# Texture2D if version >= (5, 2): # 5.2 and", "second) self.m_AtlasTags = reader.read_string_array() self.m_SpriteAtlas = PPtr(reader) # SpriteAtlas self.m_RD", "= SpriteMeshType((self.m_settingsRaw >> 6) & 1) # 1 # rest", "reader.read_int() self.m_SubMeshes = [SubMesh(reader) for _ in range(SubMeshesSize)] IndexBufferSize =", "from ..export import SpriteHelper from ..enums import SpriteMeshType from ..streams", "5.4.1p3 and up self.m_Pivot = reader.read_vector2() self.m_Extrude = reader.read_u_int() if", "# 1 self.packingRotation = SpritePackingRotation((self.m_settingsRaw >> 2) & 0xF) #", "writer.write_rectangle_f(self.textureRect) writer.write_vector2(self.textureRectOffset) if version >= (5, 6): # 5.6 and", "4.5 and up writer.write_vector4(self.uvTransform) if version >= (2017,): # 2017", "if version >= (5, 3): # 5.3 and up writer.write_boolean(self.m_IsPolygon)", ">> 6) & 1) # 1 # rest of the", "down writer.write_int(self.m_SourceSkinSize) self.m_SourceSkin[0].save(writer) writer.write_rectangle_f(self.textureRect) writer.write_vector2(self.textureRectOffset) if version >= (5, 6):", "= [ reader.read_vector2_array() for _ in range(m_BonesSize) ] def save(self,", "(1,) kSPRFlipVertical = (2,) kSPRRotate180 = (3,) kSPRRotate90 = 4", "reader.read_float() if version >= (5, 4, 2) or ( version", "self.alphaTexture = PPtr(reader) # Texture2D if version >= (2019,): #", "self.texture.save(writer) # Texture2D if version >= (5, 2): # 5.2", "SpriteAtlas self.m_RD = SpriteRenderData(reader) if version >= (2017,): # 2017", "= reader.read_vector4() if version >= (2017,): # 2017 and up", "import SpriteMeshType from ..streams import EndianBinaryWriter class Sprite(NamedObject): @property def", "_ in range(secondaryTexturesSize) ] if version >= (5, 6): #", "reader.read_u_int() @property def value(self): return self.m_settingsRaw @value.setter def value(self, _value):", "self.m_Bindpose = reader.read_matrix_array() if version < (2018, 2): # 2018.2", "reader.read_matrix_array() if version < (2018, 2): # 2018.2 down self.m_SourceSkinSize", "= (2,) kSPRRotate180 = (3,) kSPRRotate90 = 4 class SpritePackingMode(IntEnum):", "up self.m_Bindpose = reader.read_matrix_array() if version < (2018, 2): #", "= reader.read_vector2() if version >= (5, 6): # 5.6 and", "SpritePackingRotation(IntEnum): kSPRNone = (0,) kSPRFlipHorizontal = (1,) kSPRFlipVertical = (2,)", "5.3 and up writer.write_boolean(self.m_IsPolygon) writer.align_stream() if version >= (2017,): #", "import SpriteHelper from ..enums import SpriteMeshType from ..streams import EndianBinaryWriter", "3) and self.build_type.IsPatch ): # 5.4.1p3 and up self.m_Pivot =", "for tex in self.secondaryTextures: tex.save(writer) if version >= (5, 6):", "up self.m_Pivot = reader.read_vector2() self.m_Extrude = reader.read_u_int() if version >=", "IndexBufferSize = reader.read_int() self.m_IndexBuffer = reader.read_bytes(IndexBufferSize) reader.align_stream() self.m_VertexData = VertexData(reader)", ">= (2019,): # 2019 and up writer.write_int(len(self.secondaryTextures)) for tex in", "self.m_VertexData.save(writer, version) else: writer.write_int(len(self.vertices)) for vertex in self.vertices: vertex.save(writer, version)", "verticesSize = reader.read_int() self.vertices = [SpriteVertex(reader) for _ in range(verticesSize)]", "in range(SubMeshesSize)] IndexBufferSize = reader.read_int() self.m_IndexBuffer = reader.read_bytes(IndexBufferSize) reader.align_stream() self.m_VertexData", "self.m_Border = reader.read_vector4() self.m_PixelsToUnits = reader.read_float() if version >= (5,", "reader.read_vector2_array() for _ in range(m_PhysicsShapeSize) ] if version >= (2018,):", ">= (5, 2): # 5.2 and up self.alphaTexture = PPtr(reader)", "self.m_PixelsToUnits = reader.read_float() if version >= (5, 4, 2) or", "version >= (2018,): # 2018 and up writer.write_matrix_array(self.m_Bindpose) if version", "bits are reserved def save(self, writer): writer.write_u_int(self.m_settingsRaw) class SpriteVertex: def", "reader.read_bytes(16) # GUID second = reader.read_long() self.m_RenderDataKey = (first, second)", "(2017,): # 2017 and up self.downscaleMultiplier = reader.read_float() def save(self,", "if version >= (2017,): # 2017 and up writer.write_int(len(self.m_PhysicsShape)) for", "if version >= (5, 4, 2) or ( version >=", "= None): if writer is None: writer = EndianBinaryWriter(endian=self.reader.endian) version", "version = self.version self.m_Rect = reader.read_rectangle_f() self.m_Offset = reader.read_vector2() if", "reader.read_rectangle_f() self.textureRectOffset = reader.read_vector2() if version >= (5, 6): #", "up writer.write_vector4(self.uvTransform) if version >= (2017,): # 2017 and up", "writer.write_vector4(self.uvTransform) if version >= (2017,): # 2017 and up writer.write_float(self.downscaleMultiplier)", ">= (5, 2): # 5.2 and up self.alphaTexture.save(writer) # Texture2D", "@property def value(self): return self.m_settingsRaw @value.setter def value(self, _value): self.m_settingsRaw", "= PPtr(reader) # SpriteAtlas self.m_RD = SpriteRenderData(reader) if version >=", "if version >= (4, 5): # 4.5 and up self.m_Border", "(4, 5): # 4.5 and up writer.write_vector4(self.m_Border) writer.write_float(self.m_PixelsToUnits) if version", "reader): version = reader.version self.pos = reader.read_vector3() if version[:2] <=", "version >= (2017,): # 2017 and up m_PhysicsShapeSize = reader.read_int()", "(5, 6): # 5.6 and up writer.write_int(len(self.m_SubMeshes)) for mesh in", "version): self.texture.save(writer) # Texture2D if version >= (5, 2): #", "# 4.5 and up self.m_Border = reader.read_vector4() self.m_PixelsToUnits = reader.read_float()", "= VertexData(reader) else: verticesSize = reader.read_int() self.vertices = [SpriteVertex(reader) for", "None: writer = EndianBinaryWriter(endian=self.reader.endian) version = self.version super().save(writer) writer.write_rectangle_f(self.m_Rect) writer.write_vector2(self.m_Offset)", "5.3 and up self.m_IsPolygon = reader.read_boolean() reader.align_stream() if version >=", "if version >= (5, 3): # 5.3 and up self.m_IsPolygon", "# GUID second = reader.read_long() self.m_RenderDataKey = (first, second) self.m_AtlasTags", "version < (2018, 2): # 2018.2 down writer.write_int(self.m_SourceSkinSize) self.m_SourceSkin[0].save(writer) writer.write_rectangle_f(self.textureRect)", "version >= (5, 4, 1, 3) and self.build_type.IsPatch ): #", "secondaryTexturesSize = reader.read_int() self.secondaryTextures = [ SecondarySpriteTexture(reader) for _ in", ".Mesh import BoneWeights4, SubMesh, VertexData from .NamedObject import NamedObject from", "SpriteSettings: def __init__(self, reader): self.value = reader.read_u_int() @property def value(self):", "and up first = reader.read_bytes(16) # GUID second = reader.read_long()", "kSPRRotate180 = (3,) kSPRRotate90 = 4 class SpritePackingMode(IntEnum): kSPMTight =", "reserved def save(self, writer): writer.write_u_int(self.m_settingsRaw) class SpriteVertex: def __init__(self, reader):", "class SpritePackingRotation(IntEnum): kSPRNone = (0,) kSPRFlipHorizontal = (1,) kSPRFlipVertical =", "(3,) kSPRRotate90 = 4 class SpritePackingMode(IntEnum): kSPMTight = (0,) kSPMRectangle", "2): # 2018.2 down self.m_SourceSkinSize = reader.read_int() self.m_SourceSkin = [BoneWeights4(reader)]", "# 1 self.packingMode = SpritePackingMode((self.m_settingsRaw >> 1) & 1) #", "self.m_SubMeshes: mesh.save(writer, version) writer.write_int(len(self.m_IndexBuffer)) writer.write_bytes(self.m_IndexBuffer) writer.align_stream() self.m_VertexData.save(writer, version) else: writer.write_int(len(self.vertices))", "writer: EndianBinaryWriter = None): if writer is None: writer =", ">= (2017,): # 2017 and up m_PhysicsShapeSize = reader.read_int() self.m_PhysicsShape", "enum import IntEnum from .Mesh import BoneWeights4, SubMesh, VertexData from", "): # 5.4.1p3 and up self.m_Pivot = reader.read_vector2() self.m_Extrude =", "for mesh in self.m_SubMeshes: mesh.save(writer, version) writer.write_int(len(self.m_IndexBuffer)) writer.write_bytes(self.m_IndexBuffer) writer.align_stream() self.m_VertexData.save(writer,", ".PPtr import PPtr, save_ptr from ..export import SpriteHelper from ..enums", "writer.write_int(len(self.vertices)) for vertex in self.vertices: vertex.save(writer, version) writer.write_u_short_array(self.indices) writer.align_stream() if", "self.m_Rect = reader.read_rectangle_f() self.m_Offset = reader.read_vector2() if version >= (4,", "else: writer.write_int(len(self.vertices)) for vertex in self.vertices: vertex.save(writer, version) writer.write_u_short_array(self.indices) writer.align_stream()", "for _ in range(secondaryTexturesSize) ] if version >= (5, 6):", "self.m_SpriteAtlas.save(writer) # SpriteAtlas self.m_RD.save(writer, version) if version >= (2017,): #", "self.m_SourceSkinSize = reader.read_int() self.m_SourceSkin = [BoneWeights4(reader)] self.textureRect = reader.read_rectangle_f() self.textureRectOffset", "writer.align_stream() self.m_VertexData.save(writer, version) else: writer.write_int(len(self.vertices)) for vertex in self.vertices: vertex.save(writer,", "= [SpriteVertex(reader) for _ in range(verticesSize)] self.indices = reader.read_u_short_array() reader.align_stream()", "version >= (2017,): # 2017 and up first = reader.read_bytes(16)", "second = reader.read_long() self.m_RenderDataKey = (first, second) self.m_AtlasTags = reader.read_string_array()", "reader.align_stream() if version >= (2018,): # 2018 and up self.m_Bindpose", "2018 and up self.m_Bindpose = reader.read_matrix_array() if version < (2018,", "up first = reader.read_bytes(16) # GUID second = reader.read_long() self.m_RenderDataKey", ">= (5, 3): # 5.3 and up writer.write_boolean(self.m_IsPolygon) writer.align_stream() if", "1) # 1 # rest of the bits are reserved", "up self.m_IsPolygon = reader.read_boolean() reader.align_stream() if version >= (2017,): #", "and up writer.write_vector2(self.atlasRectOffset) self.settingsRaw.save(writer) if version >= (4, 5): #", "self.m_SubMeshes = [SubMesh(reader) for _ in range(SubMeshesSize)] IndexBufferSize = reader.read_int()", "4 class SpritePackingMode(IntEnum): kSPMTight = (0,) kSPMRectangle = 1 class", "1) # 1 self.packingRotation = SpritePackingRotation((self.m_settingsRaw >> 2) & 0xF)", "# 2018 and up m_BonesSize = reader.read_int() self.m_Bones = [", "self.vertices = [SpriteVertex(reader) for _ in range(verticesSize)] self.indices = reader.read_u_short_array()", "self.texture = PPtr(reader) # Texture2D if version >= (5, 2):", "# 2017 and up m_PhysicsShapeSize = reader.read_int() self.m_PhysicsShape = [", "SpriteHelper from ..enums import SpriteMeshType from ..streams import EndianBinaryWriter class", "in self.m_Bones: writer.write_vector2_array(bone) self.set_raw_data(writer.bytes) class SecondarySpriteTexture: def __init__(self, reader): self.texture", "if version >= (5, 6): # 5.6 and up SubMeshesSize", "SecondarySpriteTexture: def __init__(self, reader): self.texture = PPtr(reader) # Texture2D self.name", "mesh in self.m_SubMeshes: mesh.save(writer, version) writer.write_int(len(self.m_IndexBuffer)) writer.write_bytes(self.m_IndexBuffer) writer.align_stream() self.m_VertexData.save(writer, version)", "& 1 # 1 self.packingMode = SpritePackingMode((self.m_settingsRaw >> 1) &", "range(secondaryTexturesSize) ] if version >= (5, 6): # 5.6 and", "save(self, writer: EndianBinaryWriter = None): if writer is None: writer", "# SpriteAtlas self.m_RD = SpriteRenderData(reader) if version >= (2017,): #", "& 1) # 1 # rest of the bits are", "= reader.read_int() self.secondaryTextures = [ SecondarySpriteTexture(reader) for _ in range(secondaryTexturesSize)", "= [ reader.read_vector2_array() for _ in range(m_PhysicsShapeSize) ] if version", "SpriteMeshType((self.m_settingsRaw >> 6) & 1) # 1 # rest of", "(2018, 2): # 2018.2 down writer.write_int(self.m_SourceSkinSize) self.m_SourceSkin[0].save(writer) writer.write_rectangle_f(self.textureRect) writer.write_vector2(self.textureRectOffset) if", "5): # 4.5 and up writer.write_vector4(self.m_Border) writer.write_float(self.m_PixelsToUnits) if version >=", "def __init__(self, reader): super().__init__(reader=reader) version = self.version self.m_Rect = reader.read_rectangle_f()", "up self.alphaTexture = PPtr(reader) # Texture2D if version >= (2019,):", "self.m_Bones = [ reader.read_vector2_array() for _ in range(m_BonesSize) ] def", "# 5.3 and up writer.write_boolean(self.m_IsPolygon) writer.align_stream() if version >= (2017,):", "writer.write_bytes(self.m_IndexBuffer) writer.align_stream() self.m_VertexData.save(writer, version) else: writer.write_int(len(self.vertices)) for vertex in self.vertices:", "# 5.6 and up SubMeshesSize = reader.read_int() self.m_SubMeshes = [SubMesh(reader)", "writer.align_stream() if version >= (2018,): # 2018 and up writer.write_matrix_array(self.m_Bindpose)", "= PPtr(reader) # Texture2D if version >= (5, 2): #", "SpritePackingRotation((self.m_settingsRaw >> 2) & 0xF) # 4 self.meshType = SpriteMeshType((self.m_settingsRaw", "1 class SpriteSettings: def __init__(self, reader): self.value = reader.read_u_int() @property", "NamedObject from .PPtr import PPtr, save_ptr from ..export import SpriteHelper", "self.version super().save(writer) writer.write_rectangle_f(self.m_Rect) writer.write_vector2(self.m_Offset) if version >= (4, 5): #", "reader): super().__init__(reader=reader) version = self.version self.m_Rect = reader.read_rectangle_f() self.m_Offset =", "kSPRFlipHorizontal = (1,) kSPRFlipVertical = (2,) kSPRRotate180 = (3,) kSPRRotate90", "= PPtr(reader) # Texture2D if version >= (2019,): # 2019", "reader.read_int() self.m_Bones = [ reader.read_vector2_array() for _ in range(m_BonesSize) ]", "vertex.save(writer, version) writer.write_u_short_array(self.indices) writer.align_stream() if version >= (2018,): # 2018", "writer.write_int(len(self.m_SubMeshes)) for mesh in self.m_SubMeshes: mesh.save(writer, version) writer.write_int(len(self.m_IndexBuffer)) writer.write_bytes(self.m_IndexBuffer) writer.align_stream()", "self.m_Pivot = reader.read_vector2() self.m_Extrude = reader.read_u_int() if version >= (5,", "version >= (2017,): # 2017 and up self.downscaleMultiplier = reader.read_float()", "3): # 5.3 and up writer.write_boolean(self.m_IsPolygon) writer.align_stream() if version >=", "version >= (5, 4, 2) or ( version >= (5,", "= reader.read_matrix_array() if version < (2018, 2): # 2018.2 down", "2018.2 down self.m_SourceSkinSize = reader.read_int() self.m_SourceSkin = [BoneWeights4(reader)] self.textureRect =", "value(self, _value): self.m_settingsRaw = _value self.packed = self.m_settingsRaw & 1", "in self.vertices: vertex.save(writer, version) writer.write_u_short_array(self.indices) writer.align_stream() if version >= (2018,):", "6): # 5.6 and up writer.write_vector2(self.atlasRectOffset) self.settingsRaw.save(writer) if version >=", "None): if writer is None: writer = EndianBinaryWriter(endian=self.reader.endian) version =", "1 # 1 self.packingMode = SpritePackingMode((self.m_settingsRaw >> 1) & 1)", "self.m_IsPolygon = reader.read_boolean() reader.align_stream() if version >= (2017,): # 2017", "if version >= (4, 5): # 4.5 and up writer.write_vector4(self.uvTransform)", "self.settingsRaw.save(writer) if version >= (4, 5): # 4.5 and up", "reader.read_int() self.m_PhysicsShape = [ reader.read_vector2_array() for _ in range(m_PhysicsShapeSize) ]", "(5, 3): # 5.3 and up writer.write_boolean(self.m_IsPolygon) writer.align_stream() if version", "reader.read_vector2() if version >= (5, 6): # 5.6 and up", "version >= (2019,): # 2019 and up secondaryTexturesSize = reader.read_int()", "2): # 5.2 and up self.alphaTexture.save(writer) # Texture2D if version", "reader.read_int() self.m_IndexBuffer = reader.read_bytes(IndexBufferSize) reader.align_stream() self.m_VertexData = VertexData(reader) else: verticesSize", "= reader.read_bytes(IndexBufferSize) reader.align_stream() self.m_VertexData = VertexData(reader) else: verticesSize = reader.read_int()", "2017 and up first = reader.read_bytes(16) # GUID second =", "version >= (5, 2): # 5.2 and up self.alphaTexture.save(writer) #", "m_BonesSize = reader.read_int() self.m_Bones = [ reader.read_vector2_array() for _ in", "self.m_SpriteAtlas = PPtr(reader) # SpriteAtlas self.m_RD = SpriteRenderData(reader) if version", "self.secondaryTextures = [ SecondarySpriteTexture(reader) for _ in range(secondaryTexturesSize) ] if", "5.2 and up self.alphaTexture = PPtr(reader) # Texture2D if version", "(5, 6): # 5.6 and up self.atlasRectOffset = reader.read_vector2() self.settingsRaw", "_value self.packed = self.m_settingsRaw & 1 # 1 self.packingMode =", "# rest of the bits are reserved def save(self, writer):", "5.2 and up self.alphaTexture.save(writer) # Texture2D if version >= (2019,):", "(5, 6): # 5.6 and up SubMeshesSize = reader.read_int() self.m_SubMeshes", "= (1,) kSPRFlipVertical = (2,) kSPRRotate180 = (3,) kSPRRotate90 =", "# 2018 and up self.m_Bindpose = reader.read_matrix_array() if version <", "self.downscaleMultiplier = reader.read_float() def save(self, writer, version): self.texture.save(writer) # Texture2D", "5): # 4.5 and up self.m_Border = reader.read_vector4() self.m_PixelsToUnits =", "= reader.read_u_int() if version >= (5, 3): # 5.3 and", "PPtr, save_ptr from ..export import SpriteHelper from ..enums import SpriteMeshType", "Sprite(NamedObject): @property def image(self): return SpriteHelper.get_image_from_sprite(self) def __init__(self, reader): super().__init__(reader=reader)", "# Texture2D if version >= (2019,): # 2019 and up", "self.uv = reader.read_vector2() def save(self, writer, version): writer.write_vector3(self.pos) if version[:2]", "kSPMRectangle = 1 class SpriteSettings: def __init__(self, reader): self.value =", "reader.read_rectangle_f() self.m_Offset = reader.read_vector2() if version >= (4, 5): #", ".NamedObject import NamedObject from .PPtr import PPtr, save_ptr from ..export", "for _ in range(m_PhysicsShapeSize) ] if version >= (2018,): #", "self.vertices: vertex.save(writer, version) writer.write_u_short_array(self.indices) writer.align_stream() if version >= (2018,): #", "version >= (2018,): # 2018 and up m_BonesSize = reader.read_int()", "self.atlasRectOffset = reader.read_vector2() self.settingsRaw = SpriteSettings(reader) if version >= (4,", "(2018,): # 2018 and up self.m_Bindpose = reader.read_matrix_array() if version", "] if version >= (5, 6): # 5.6 and up", "= self.version self.m_Rect = reader.read_rectangle_f() self.m_Offset = reader.read_vector2() if version", "version) writer.write_u_short_array(self.indices) writer.align_stream() if version >= (2018,): # 2018 and", "writer): self.texture.save(writer) writer.write_string_to_null(self.name) class SpritePackingRotation(IntEnum): kSPRNone = (0,) kSPRFlipHorizontal =", "writer.write_vector2_array(phys) if version >= (2018,): # 2018 and up writer.write_int(len(self.m_Bones))", "if version < (2018, 2): # 2018.2 down writer.write_int(self.m_SourceSkinSize) self.m_SourceSkin[0].save(writer)", "from .PPtr import PPtr, save_ptr from ..export import SpriteHelper from", "< (2018, 2): # 2018.2 down writer.write_int(self.m_SourceSkinSize) self.m_SourceSkin[0].save(writer) writer.write_rectangle_f(self.textureRect) writer.write_vector2(self.textureRectOffset)", "reader.read_vector2() def save(self, writer, version): writer.write_vector3(self.pos) if version[:2] <= (4,", "up self.uvTransform = reader.read_vector4() if version >= (2017,): # 2017", "reader.read_vector4() if version >= (2017,): # 2017 and up self.downscaleMultiplier", "# 2018 and up writer.write_matrix_array(self.m_Bindpose) if version < (2018, 2):", "Texture2D self.name = reader.read_string_to_null() def save(self, writer): self.texture.save(writer) writer.write_string_to_null(self.name) class", "bone in self.m_Bones: writer.write_vector2_array(bone) self.set_raw_data(writer.bytes) class SecondarySpriteTexture: def __init__(self, reader):", "def save(self, writer): self.texture.save(writer) writer.write_string_to_null(self.name) class SpritePackingRotation(IntEnum): kSPRNone = (0,)", "] def save(self, writer: EndianBinaryWriter = None): if writer is", "(2018,): # 2018 and up m_BonesSize = reader.read_int() self.m_Bones =", "= reader.read_vector2() self.settingsRaw = SpriteSettings(reader) if version >= (4, 5):", "and up m_PhysicsShapeSize = reader.read_int() self.m_PhysicsShape = [ reader.read_vector2_array() for", "[SpriteVertex(reader) for _ in range(verticesSize)] self.indices = reader.read_u_short_array() reader.align_stream() if", "2017 and up writer.write_bytes(self.m_RenderDataKey[0]) # GUID writer.write_long(self.m_RenderDataKey[1]) writer.write_string_array(self.m_AtlasTags) self.m_SpriteAtlas.save(writer) #", "up self.atlasRectOffset = reader.read_vector2() self.settingsRaw = SpriteSettings(reader) if version >=", "writer.write_int(len(self.m_IndexBuffer)) writer.write_bytes(self.m_IndexBuffer) writer.align_stream() self.m_VertexData.save(writer, version) else: writer.write_int(len(self.vertices)) for vertex in", "(4, 3): # 4.3 and down self.uv = reader.read_vector2() def", "= SpritePackingMode((self.m_settingsRaw >> 1) & 1) # 1 self.packingRotation =", "writer.write_matrix_array(self.m_Bindpose) if version < (2018, 2): # 2018.2 down writer.write_int(self.m_SourceSkinSize)", "IntEnum from .Mesh import BoneWeights4, SubMesh, VertexData from .NamedObject import", ">= (2017,): # 2017 and up writer.write_bytes(self.m_RenderDataKey[0]) # GUID writer.write_long(self.m_RenderDataKey[1])", "version >= (2017,): # 2017 and up writer.write_bytes(self.m_RenderDataKey[0]) # GUID", "version >= (5, 2): # 5.2 and up self.alphaTexture =", "4, 2) or ( version >= (5, 4, 1, 3)", "SpriteHelper.get_image_from_sprite(self) def __init__(self, reader): super().__init__(reader=reader) version = self.version self.m_Rect =", "__init__(self, reader): self.texture = PPtr(reader) # Texture2D self.name = reader.read_string_to_null()", "and up self.m_Bindpose = reader.read_matrix_array() if version < (2018, 2):", "version[:2] <= (4, 3): # 4.3 and down writer.write__vector2(self.uv) class", "2): # 2018.2 down writer.write_int(self.m_SourceSkinSize) self.m_SourceSkin[0].save(writer) writer.write_rectangle_f(self.textureRect) writer.write_vector2(self.textureRectOffset) if version", "up writer.write_bytes(self.m_RenderDataKey[0]) # GUID writer.write_long(self.m_RenderDataKey[1]) writer.write_string_array(self.m_AtlasTags) self.m_SpriteAtlas.save(writer) # SpriteAtlas self.m_RD.save(writer,", "reader.read_int() self.vertices = [SpriteVertex(reader) for _ in range(verticesSize)] self.indices =", "the bits are reserved def save(self, writer): writer.write_u_int(self.m_settingsRaw) class SpriteVertex:", "5.4.1p3 and up writer.write_vector2(self.m_Pivot) writer.write_u_int(self.m_Extrude) if version >= (5, 3):", "<= (4, 3): # 4.3 and down writer.write__vector2(self.uv) class SpriteRenderData:", "up writer.write_vector2(self.atlasRectOffset) self.settingsRaw.save(writer) if version >= (4, 5): # 4.5", "5.6 and up self.atlasRectOffset = reader.read_vector2() self.settingsRaw = SpriteSettings(reader) if", "= reader.read_u_int() @property def value(self): return self.m_settingsRaw @value.setter def value(self,", "and up writer.write_bytes(self.m_RenderDataKey[0]) # GUID writer.write_long(self.m_RenderDataKey[1]) writer.write_string_array(self.m_AtlasTags) self.m_SpriteAtlas.save(writer) # SpriteAtlas", "= (0,) kSPRFlipHorizontal = (1,) kSPRFlipVertical = (2,) kSPRRotate180 =", "and down writer.write__vector2(self.uv) class SpriteRenderData: def __init__(self, reader): version =", "(5, 4, 2) or ( version >= (5, 4, 1,", "..streams import EndianBinaryWriter class Sprite(NamedObject): @property def image(self): return SpriteHelper.get_image_from_sprite(self)", "self.m_IndexBuffer = reader.read_bytes(IndexBufferSize) reader.align_stream() self.m_VertexData = VertexData(reader) else: verticesSize =", "import NamedObject from .PPtr import PPtr, save_ptr from ..export import", "and up self.m_IsPolygon = reader.read_boolean() reader.align_stream() if version >= (2017,):", "version >= (5, 6): # 5.6 and up SubMeshesSize =", "up writer.write_vector4(self.m_Border) writer.write_float(self.m_PixelsToUnits) if version >= (5, 4, 2) or", "up self.alphaTexture.save(writer) # Texture2D if version >= (2019,): # 2019", "up self.m_Border = reader.read_vector4() self.m_PixelsToUnits = reader.read_float() if version >=", "SecondarySpriteTexture(reader) for _ in range(secondaryTexturesSize) ] if version >= (5,", "def image(self): return SpriteHelper.get_image_from_sprite(self) def __init__(self, reader): super().__init__(reader=reader) version =", ">= (2018,): # 2018 and up m_BonesSize = reader.read_int() self.m_Bones", "version = reader.version self.pos = reader.read_vector3() if version[:2] <= (4,", "reader.read_long() self.m_RenderDataKey = (first, second) self.m_AtlasTags = reader.read_string_array() self.m_SpriteAtlas =", "writer.write_int(self.m_SourceSkinSize) self.m_SourceSkin[0].save(writer) writer.write_rectangle_f(self.textureRect) writer.write_vector2(self.textureRectOffset) if version >= (5, 6): #", "# 2019 and up secondaryTexturesSize = reader.read_int() self.secondaryTextures = [", "m_PhysicsShapeSize = reader.read_int() self.m_PhysicsShape = [ reader.read_vector2_array() for _ in", "= SpriteRenderData(reader) if version >= (2017,): # 2017 and up", "writer.align_stream() if version >= (2017,): # 2017 and up writer.write_bytes(self.m_RenderDataKey[0])", "self.packed = self.m_settingsRaw & 1 # 1 self.packingMode = SpritePackingMode((self.m_settingsRaw", "down self.m_SourceSkinSize = reader.read_int() self.m_SourceSkin = [BoneWeights4(reader)] self.textureRect = reader.read_rectangle_f()", "up m_BonesSize = reader.read_int() self.m_Bones = [ reader.read_vector2_array() for _", "SpriteSettings(reader) if version >= (4, 5): # 4.5 and up", "reader.read_string_array() self.m_SpriteAtlas = PPtr(reader) # SpriteAtlas self.m_RD = SpriteRenderData(reader) if", "[ reader.read_vector2_array() for _ in range(m_PhysicsShapeSize) ] if version >=", "< (2018, 2): # 2018.2 down self.m_SourceSkinSize = reader.read_int() self.m_SourceSkin", "writer.write_u_short_array(self.indices) writer.align_stream() if version >= (2018,): # 2018 and up", "= reader.read_vector4() self.m_PixelsToUnits = reader.read_float() if version >= (5, 4,", "super().save(writer) writer.write_rectangle_f(self.m_Rect) writer.write_vector2(self.m_Offset) if version >= (4, 5): # 4.5", "SubMeshesSize = reader.read_int() self.m_SubMeshes = [SubMesh(reader) for _ in range(SubMeshesSize)]", "version = reader.version self.texture = PPtr(reader) # Texture2D if version", "and up self.alphaTexture = PPtr(reader) # Texture2D if version >=", "= reader.read_int() self.m_IndexBuffer = reader.read_bytes(IndexBufferSize) reader.align_stream() self.m_VertexData = VertexData(reader) else:", "if version[:2] <= (4, 3): # 4.3 and down writer.write__vector2(self.uv)", "6): # 5.6 and up self.atlasRectOffset = reader.read_vector2() self.settingsRaw =", "3): # 5.3 and up self.m_IsPolygon = reader.read_boolean() reader.align_stream() if", "(4, 3): # 4.3 and down writer.write__vector2(self.uv) class SpriteRenderData: def", "class SpriteRenderData: def __init__(self, reader): version = reader.version self.texture =", "and up writer.write_int(len(self.secondaryTextures)) for tex in self.secondaryTextures: tex.save(writer) if version", "if version >= (2017,): # 2017 and up m_PhysicsShapeSize =", "and self.build_type.IsPatch ): # 5.4.1p3 and up writer.write_vector2(self.m_Pivot) writer.write_u_int(self.m_Extrude) if", "for bone in self.m_Bones: writer.write_vector2_array(bone) self.set_raw_data(writer.bytes) class SecondarySpriteTexture: def __init__(self,", "(2019,): # 2019 and up secondaryTexturesSize = reader.read_int() self.secondaryTextures =", "2018 and up writer.write_int(len(self.m_Bones)) for bone in self.m_Bones: writer.write_vector2_array(bone) self.set_raw_data(writer.bytes)", "writer): writer.write_u_int(self.m_settingsRaw) class SpriteVertex: def __init__(self, reader): version = reader.version", "and up writer.write_vector2(self.m_Pivot) writer.write_u_int(self.m_Extrude) if version >= (5, 3): #", "writer.write_float(self.m_PixelsToUnits) if version >= (5, 4, 2) or ( version", "is None: writer = EndianBinaryWriter(endian=self.reader.endian) version = self.version super().save(writer) writer.write_rectangle_f(self.m_Rect)", "up writer.write_vector2(self.m_Pivot) writer.write_u_int(self.m_Extrude) if version >= (5, 3): # 5.3", "EndianBinaryWriter = None): if writer is None: writer = EndianBinaryWriter(endian=self.reader.endian)", "__init__(self, reader): version = reader.version self.pos = reader.read_vector3() if version[:2]", "= reader.read_vector2() self.m_Extrude = reader.read_u_int() if version >= (5, 3):", "= EndianBinaryWriter(endian=self.reader.endian) version = self.version super().save(writer) writer.write_rectangle_f(self.m_Rect) writer.write_vector2(self.m_Offset) if version", "self.pos = reader.read_vector3() if version[:2] <= (4, 3): # 4.3", ">= (2018,): # 2018 and up writer.write_int(len(self.m_Bones)) for bone in", "writer.write_int(len(self.m_Bones)) for bone in self.m_Bones: writer.write_vector2_array(bone) self.set_raw_data(writer.bytes) class SecondarySpriteTexture: def", "for _ in range(m_BonesSize) ] def save(self, writer: EndianBinaryWriter =", "(2017,): # 2017 and up m_PhysicsShapeSize = reader.read_int() self.m_PhysicsShape =", "reader.read_vector4() self.m_PixelsToUnits = reader.read_float() if version >= (5, 4, 2)", "2) or ( version >= (5, 4, 1, 3) and", "4.5 and up self.uvTransform = reader.read_vector4() if version >= (2017,):", "reader.read_boolean() reader.align_stream() if version >= (2017,): # 2017 and up", "BoneWeights4, SubMesh, VertexData from .NamedObject import NamedObject from .PPtr import", ">= (5, 4, 1, 3) and self.build_type.IsPatch ): # 5.4.1p3", "kSPRRotate90 = 4 class SpritePackingMode(IntEnum): kSPMTight = (0,) kSPMRectangle =", "reader.version self.pos = reader.read_vector3() if version[:2] <= (4, 3): #", "= reader.read_u_short_array() reader.align_stream() if version >= (2018,): # 2018 and", "= reader.read_float() def save(self, writer, version): self.texture.save(writer) # Texture2D if", "and up writer.write_matrix_array(self.m_Bindpose) if version < (2018, 2): # 2018.2", "class SpriteSettings: def __init__(self, reader): self.value = reader.read_u_int() @property def", "SpritePackingMode((self.m_settingsRaw >> 1) & 1) # 1 self.packingRotation = SpritePackingRotation((self.m_settingsRaw", "1, 3) and self.build_type.IsPatch ): # 5.4.1p3 and up self.m_Pivot", "range(m_BonesSize) ] def save(self, writer: EndianBinaryWriter = None): if writer", "self.texture = PPtr(reader) # Texture2D self.name = reader.read_string_to_null() def save(self,", "1 # rest of the bits are reserved def save(self,", "import EndianBinaryWriter class Sprite(NamedObject): @property def image(self): return SpriteHelper.get_image_from_sprite(self) def", "# 4.5 and up writer.write_vector4(self.uvTransform) if version >= (2017,): #", ">= (5, 6): # 5.6 and up writer.write_vector2(self.atlasRectOffset) self.settingsRaw.save(writer) if", "kSPRNone = (0,) kSPRFlipHorizontal = (1,) kSPRFlipVertical = (2,) kSPRRotate180", "VertexData(reader) else: verticesSize = reader.read_int() self.vertices = [SpriteVertex(reader) for _", "5): # 4.5 and up writer.write_vector4(self.uvTransform) if version >= (2017,):", "and up m_BonesSize = reader.read_int() self.m_Bones = [ reader.read_vector2_array() for", "2019 and up secondaryTexturesSize = reader.read_int() self.secondaryTextures = [ SecondarySpriteTexture(reader)", "[ SecondarySpriteTexture(reader) for _ in range(secondaryTexturesSize) ] if version >=", "if version >= (2017,): # 2017 and up self.downscaleMultiplier =", "4 self.meshType = SpriteMeshType((self.m_settingsRaw >> 6) & 1) # 1", "class SpritePackingMode(IntEnum): kSPMTight = (0,) kSPMRectangle = 1 class SpriteSettings:", "for vertex in self.vertices: vertex.save(writer, version) writer.write_u_short_array(self.indices) writer.align_stream() if version", "= reader.read_string_to_null() def save(self, writer): self.texture.save(writer) writer.write_string_to_null(self.name) class SpritePackingRotation(IntEnum): kSPRNone", "reader.read_string_to_null() def save(self, writer): self.texture.save(writer) writer.write_string_to_null(self.name) class SpritePackingRotation(IntEnum): kSPRNone =", "# GUID writer.write_long(self.m_RenderDataKey[1]) writer.write_string_array(self.m_AtlasTags) self.m_SpriteAtlas.save(writer) # SpriteAtlas self.m_RD.save(writer, version) if", "else: verticesSize = reader.read_int() self.vertices = [SpriteVertex(reader) for _ in", "[BoneWeights4(reader)] self.textureRect = reader.read_rectangle_f() self.textureRectOffset = reader.read_vector2() if version >=", "(5, 2): # 5.2 and up self.alphaTexture = PPtr(reader) #", ">= (5, 3): # 5.3 and up self.m_IsPolygon = reader.read_boolean()", "super().__init__(reader=reader) version = self.version self.m_Rect = reader.read_rectangle_f() self.m_Offset = reader.read_vector2()", "= reader.read_bytes(16) # GUID second = reader.read_long() self.m_RenderDataKey = (first,", "version) else: writer.write_int(len(self.vertices)) for vertex in self.vertices: vertex.save(writer, version) writer.write_u_short_array(self.indices)", "writer.write_string_array(self.m_AtlasTags) self.m_SpriteAtlas.save(writer) # SpriteAtlas self.m_RD.save(writer, version) if version >= (2017,):", "self.packingMode = SpritePackingMode((self.m_settingsRaw >> 1) & 1) # 1 self.packingRotation", ">= (4, 5): # 4.5 and up writer.write_vector4(self.m_Border) writer.write_float(self.m_PixelsToUnits) if", "(2018, 2): # 2018.2 down self.m_SourceSkinSize = reader.read_int() self.m_SourceSkin =", "if version >= (2018,): # 2018 and up writer.write_int(len(self.m_Bones)) for", "self.m_Extrude = reader.read_u_int() if version >= (5, 3): # 5.3", "1, 3) and self.build_type.IsPatch ): # 5.4.1p3 and up writer.write_vector2(self.m_Pivot)", "writer.write_vector4(self.m_Border) writer.write_float(self.m_PixelsToUnits) if version >= (5, 4, 2) or (", "save(self, writer, version): self.texture.save(writer) # Texture2D if version >= (5,", "PPtr(reader) # Texture2D if version >= (2019,): # 2019 and", "class SpriteVertex: def __init__(self, reader): version = reader.version self.pos =", "save(self, writer): writer.write_u_int(self.m_settingsRaw) class SpriteVertex: def __init__(self, reader): version =", "if version < (2018, 2): # 2018.2 down self.m_SourceSkinSize =", "if version >= (2018,): # 2018 and up self.m_Bindpose =", "<= (4, 3): # 4.3 and down self.uv = reader.read_vector2()", "5.6 and up writer.write_int(len(self.m_SubMeshes)) for mesh in self.m_SubMeshes: mesh.save(writer, version)", "(5, 4, 1, 3) and self.build_type.IsPatch ): # 5.4.1p3 and", "# 4.5 and up self.uvTransform = reader.read_vector4() if version >=", "def value(self): return self.m_settingsRaw @value.setter def value(self, _value): self.m_settingsRaw =", "reader.read_int() self.m_SourceSkin = [BoneWeights4(reader)] self.textureRect = reader.read_rectangle_f() self.textureRectOffset = reader.read_vector2()", "from ..enums import SpriteMeshType from ..streams import EndianBinaryWriter class Sprite(NamedObject):", "return self.m_settingsRaw @value.setter def value(self, _value): self.m_settingsRaw = _value self.packed", ">= (5, 6): # 5.6 and up writer.write_int(len(self.m_SubMeshes)) for mesh", "4.3 and down self.uv = reader.read_vector2() def save(self, writer, version):", "SpriteRenderData: def __init__(self, reader): version = reader.version self.texture = PPtr(reader)", "(2017,): # 2017 and up writer.write_bytes(self.m_RenderDataKey[0]) # GUID writer.write_long(self.m_RenderDataKey[1]) writer.write_string_array(self.m_AtlasTags)", "writer.write_bytes(self.m_RenderDataKey[0]) # GUID writer.write_long(self.m_RenderDataKey[1]) writer.write_string_array(self.m_AtlasTags) self.m_SpriteAtlas.save(writer) # SpriteAtlas self.m_RD.save(writer, version)", "in self.m_PhysicsShape: writer.write_vector2_array(phys) if version >= (2018,): # 2018 and", "class SecondarySpriteTexture: def __init__(self, reader): self.texture = PPtr(reader) # Texture2D", "self.name = reader.read_string_to_null() def save(self, writer): self.texture.save(writer) writer.write_string_to_null(self.name) class SpritePackingRotation(IntEnum):", "1 self.packingMode = SpritePackingMode((self.m_settingsRaw >> 1) & 1) # 1", "4.5 and up writer.write_vector4(self.m_Border) writer.write_float(self.m_PixelsToUnits) if version >= (5, 4,", "self.m_Offset = reader.read_vector2() if version >= (4, 5): # 4.5", "from enum import IntEnum from .Mesh import BoneWeights4, SubMesh, VertexData", "writer.write_rectangle_f(self.m_Rect) writer.write_vector2(self.m_Offset) if version >= (4, 5): # 4.5 and", "2018 and up m_BonesSize = reader.read_int() self.m_Bones = [ reader.read_vector2_array()", "self.m_Bones: writer.write_vector2_array(bone) self.set_raw_data(writer.bytes) class SecondarySpriteTexture: def __init__(self, reader): self.texture =", "version >= (4, 5): # 4.5 and up writer.write_vector4(self.uvTransform) if", "version >= (2017,): # 2017 and up writer.write_int(len(self.m_PhysicsShape)) for phys", "4.3 and down writer.write__vector2(self.uv) class SpriteRenderData: def __init__(self, reader): version", "SpriteMeshType from ..streams import EndianBinaryWriter class Sprite(NamedObject): @property def image(self):", "= 4 class SpritePackingMode(IntEnum): kSPMTight = (0,) kSPMRectangle = 1", "from .NamedObject import NamedObject from .PPtr import PPtr, save_ptr from", "def save(self, writer): writer.write_u_int(self.m_settingsRaw) class SpriteVertex: def __init__(self, reader): version", "up secondaryTexturesSize = reader.read_int() self.secondaryTextures = [ SecondarySpriteTexture(reader) for _", "0xF) # 4 self.meshType = SpriteMeshType((self.m_settingsRaw >> 6) & 1)", "_ in range(verticesSize)] self.indices = reader.read_u_short_array() reader.align_stream() if version >=", "version >= (2018,): # 2018 and up self.m_Bindpose = reader.read_matrix_array()", "reader.read_vector3() if version[:2] <= (4, 3): # 4.3 and down", "= (0,) kSPMRectangle = 1 class SpriteSettings: def __init__(self, reader):", "up writer.write_int(len(self.m_PhysicsShape)) for phys in self.m_PhysicsShape: writer.write_vector2_array(phys) if version >=", "(2,) kSPRRotate180 = (3,) kSPRRotate90 = 4 class SpritePackingMode(IntEnum): kSPMTight", "6): # 5.6 and up SubMeshesSize = reader.read_int() self.m_SubMeshes =", "def save(self, writer, version): writer.write_vector3(self.pos) if version[:2] <= (4, 3):", "vertex in self.vertices: vertex.save(writer, version) writer.write_u_short_array(self.indices) writer.align_stream() if version >=", "save(self, writer): self.texture.save(writer) writer.write_string_to_null(self.name) class SpritePackingRotation(IntEnum): kSPRNone = (0,) kSPRFlipHorizontal", "= reader.read_long() self.m_RenderDataKey = (first, second) self.m_AtlasTags = reader.read_string_array() self.m_SpriteAtlas", "1 self.packingRotation = SpritePackingRotation((self.m_settingsRaw >> 2) & 0xF) # 4", "= SpritePackingRotation((self.m_settingsRaw >> 2) & 0xF) # 4 self.meshType =", "from ..streams import EndianBinaryWriter class Sprite(NamedObject): @property def image(self): return", "if version >= (4, 5): # 4.5 and up writer.write_vector4(self.m_Border)", "of the bits are reserved def save(self, writer): writer.write_u_int(self.m_settingsRaw) class", "if version[:2] <= (4, 3): # 4.3 and down self.uv", "2): # 5.2 and up self.alphaTexture = PPtr(reader) # Texture2D", "for _ in range(verticesSize)] self.indices = reader.read_u_short_array() reader.align_stream() if version", "= reader.read_int() self.m_SourceSkin = [BoneWeights4(reader)] self.textureRect = reader.read_rectangle_f() self.textureRectOffset =", "# 5.3 and up self.m_IsPolygon = reader.read_boolean() reader.align_stream() if version", "(5, 2): # 5.2 and up self.alphaTexture.save(writer) # Texture2D if", "writer.write_vector2_array(bone) self.set_raw_data(writer.bytes) class SecondarySpriteTexture: def __init__(self, reader): self.texture = PPtr(reader)", "self.alphaTexture.save(writer) # Texture2D if version >= (2019,): # 2019 and", ">= (2019,): # 2019 and up secondaryTexturesSize = reader.read_int() self.secondaryTextures", "(0,) kSPMRectangle = 1 class SpriteSettings: def __init__(self, reader): self.value", "reader): self.texture = PPtr(reader) # Texture2D self.name = reader.read_string_to_null() def", "# 2018.2 down writer.write_int(self.m_SourceSkinSize) self.m_SourceSkin[0].save(writer) writer.write_rectangle_f(self.textureRect) writer.write_vector2(self.textureRectOffset) if version >=", "): # 5.4.1p3 and up writer.write_vector2(self.m_Pivot) writer.write_u_int(self.m_Extrude) if version >=", "self.meshType = SpriteMeshType((self.m_settingsRaw >> 6) & 1) # 1 #", "# 5.6 and up self.atlasRectOffset = reader.read_vector2() self.settingsRaw = SpriteSettings(reader)", "and up secondaryTexturesSize = reader.read_int() self.secondaryTextures = [ SecondarySpriteTexture(reader) for", "# 2017 and up first = reader.read_bytes(16) # GUID second", "= reader.read_float() if version >= (5, 4, 2) or (", "= reader.read_rectangle_f() self.textureRectOffset = reader.read_vector2() if version >= (5, 6):", "(2017,): # 2017 and up writer.write_int(len(self.m_PhysicsShape)) for phys in self.m_PhysicsShape:", "4.5 and up self.m_Border = reader.read_vector4() self.m_PixelsToUnits = reader.read_float() if", "@property def image(self): return SpriteHelper.get_image_from_sprite(self) def __init__(self, reader): super().__init__(reader=reader) version", "5): # 4.5 and up self.uvTransform = reader.read_vector4() if version", "reader.read_vector2() self.settingsRaw = SpriteSettings(reader) if version >= (4, 5): #", "version) if version >= (2017,): # 2017 and up writer.write_int(len(self.m_PhysicsShape))", "= (3,) kSPRRotate90 = 4 class SpritePackingMode(IntEnum): kSPMTight = (0,)", "up SubMeshesSize = reader.read_int() self.m_SubMeshes = [SubMesh(reader) for _ in", "= (first, second) self.m_AtlasTags = reader.read_string_array() self.m_SpriteAtlas = PPtr(reader) #", "up writer.write_int(len(self.secondaryTextures)) for tex in self.secondaryTextures: tex.save(writer) if version >=", "# 5.6 and up writer.write_vector2(self.atlasRectOffset) self.settingsRaw.save(writer) if version >= (4,", "value(self): return self.m_settingsRaw @value.setter def value(self, _value): self.m_settingsRaw = _value", "reader.read_vector2() self.m_Extrude = reader.read_u_int() if version >= (5, 3): #", "= reader.read_rectangle_f() self.m_Offset = reader.read_vector2() if version >= (4, 5):", ">= (2018,): # 2018 and up writer.write_matrix_array(self.m_Bindpose) if version <", "2017 and up self.downscaleMultiplier = reader.read_float() def save(self, writer, version):", "if writer is None: writer = EndianBinaryWriter(endian=self.reader.endian) version = self.version", "& 1) # 1 self.packingRotation = SpritePackingRotation((self.m_settingsRaw >> 2) &", "and up writer.write_vector4(self.m_Border) writer.write_float(self.m_PixelsToUnits) if version >= (5, 4, 2)", "_value): self.m_settingsRaw = _value self.packed = self.m_settingsRaw & 1 #", "reader.read_u_int() if version >= (5, 3): # 5.3 and up", "self.value = reader.read_u_int() @property def value(self): return self.m_settingsRaw @value.setter def", "..enums import SpriteMeshType from ..streams import EndianBinaryWriter class Sprite(NamedObject): @property", "2017 and up m_PhysicsShapeSize = reader.read_int() self.m_PhysicsShape = [ reader.read_vector2_array()", "# 2019 and up writer.write_int(len(self.secondaryTextures)) for tex in self.secondaryTextures: tex.save(writer)", "reader.align_stream() self.m_VertexData = VertexData(reader) else: verticesSize = reader.read_int() self.vertices =", "self.m_settingsRaw @value.setter def value(self, _value): self.m_settingsRaw = _value self.packed =", "# 2018 and up writer.write_int(len(self.m_Bones)) for bone in self.m_Bones: writer.write_vector2_array(bone)", "range(SubMeshesSize)] IndexBufferSize = reader.read_int() self.m_IndexBuffer = reader.read_bytes(IndexBufferSize) reader.align_stream() self.m_VertexData =", "reader.read_u_short_array() reader.align_stream() if version >= (2018,): # 2018 and up", "or ( version >= (5, 4, 1, 3) and self.build_type.IsPatch", "__init__(self, reader): super().__init__(reader=reader) version = self.version self.m_Rect = reader.read_rectangle_f() self.m_Offset", "self.texture.save(writer) writer.write_string_to_null(self.name) class SpritePackingRotation(IntEnum): kSPRNone = (0,) kSPRFlipHorizontal = (1,)", "and up writer.write_int(len(self.m_SubMeshes)) for mesh in self.m_SubMeshes: mesh.save(writer, version) writer.write_int(len(self.m_IndexBuffer))", "and up writer.write_vector4(self.uvTransform) if version >= (2017,): # 2017 and" ]
[ "print('{0:5d} {1}'.format(i+1, self.filters[i].name)) matched.append(i) return np.array(matched) class ParamFilter(FilterDefinition): def __init__(self,", "den = np.trapz(absp*thru_full, full_x) return -2.5*np.log10(num/den) @property def pivot(self): \"\"\"", "= np.interp([0.025, 0.975], filt/filt.max(), self.wave[1:]) return np.diff(ctw95)[0] def for_filter_file(self, row_str='{i:6}", "np.array(matched) class ParamFilter(FilterDefinition): def __init__(self, line='# Filter #20, RES#78: COSMOS/SUBARU_filter_B.txt", "'filters', file) else: file_path = os.path.join(path, file) with open(file_path, 'r')", "np.cast[float](trans) filters.append(new_filter) # Initialize filter header = ' '.join(line.split()[1:]) wave", "get_extinction(self, EBV=0, Rv=3.1): \"\"\" Extinction factor \"\"\" import astropy.units as", "None: file_path = os.path.join(os.getenv('EAZYCODE'), 'filters', file) else: file_path = os.path.join(path,", "for i, (w, t) in enumerate(zip(self.wave, self.throughput))] return '\\n'.join(lines) class", "print(len(res.filters)) bp = res[205] print(bp) fig, ax = plt.subplots(1,1,figsize=(6,4)) ax.plot(bp.wave,", "if verbose: print('{0:5d} {1}'.format(i+1, self.filters[i].name)) matched.append(i) return np.array(matched) class ParamFilter(FilterDefinition):", "np.cast[float](wave) # new_filter.throughput = np.cast[float](trans) filters.append(new_filter) # Initialize filter header", "wave = [] trans = [] else: lspl = np.cast[float](line.split())", "can be put in the EAZY filter file \"\"\" header", "astropy.units as u f99 = utils.GalacticExtinction(EBV=EBV, Rv=Rv) self.Alambda = f99(self.wave)", "__init__(self, line='# Filter #20, RES#78: COSMOS/SUBARU_filter_B.txt - lambda_c=4458.276253'): self.lambda_c =", "no extrapolation vega_full = interp(full_x, VEGA['WAVELENGTH'], VEGA['FLUX'], left=0, right=0) thru_full", "import Table from . import utils __all__ = [\"FilterDefinition\", \"FilterFile\",", "integrator(self.wave, self.throughput/self.wave) pivot = np.sqrt(num/den) return pivot @property def equivwidth(self):", "return self.name.__str__() def get_extinction(self, EBV=0, Rv=3.1): \"\"\" Extinction factor \"\"\"", "= header # new_filter.wave = np.cast[float](wave) # new_filter.throughput = np.cast[float](trans)", "be put in the EAZY filter file \"\"\" header =", "for filter in self.filters: fp.write('{0:6d} {1}\\n'.format(len(filter.wave), filter.name)) for i in", "wave=np.cast[float](wave), throughput=np.cast[float](trans)) filters.append(new_filter) self.filters = filters @property def NFILT(self): \"\"\"", "right=0) thru_full = interp(full_x, self.wave, self.throughput, left=0, right=0) # AB", "c import astropy.units as u try: import grizli.utils_c interp =", "= [] else: lspl = np.cast[float](line.split()) wave.append(lspl[1]) trans.append(lspl[2]) # last", "= open(file,'w') for filter in self.filters: fp.write('{0:6d} {1}\\n'.format(len(filter.wave), filter.name)) for", "else: f99 = utils.GalacticExtinction(EBV=EBV, Rv=Rv) Alambda = f99(self.wave) delta =", "return self.name.__repr__() def __str__(self): return self.name.__str__() def get_extinction(self, EBV=0, Rv=3.1):", "__repr__(self): return self.name.__repr__() def __str__(self): return self.name.__str__() def get_extinction(self, EBV=0,", "within the filter. Optionally supply a source spectrum. \"\"\" import", "then match case. \"\"\" import re if not case: search_string", "width http://pysynphot.readthedocs.io/en/latest/properties.html \"\"\" rect = self.equivwidth / self.throughput.max() return rect", "self.pivot, self.ABVega, self.ctw95)] lines += [row_str.format(i=i+1, wave=w, thru=t) for i,", "is None: source_flux = self.throughput*0.+1 else: source_flux = interp(self.wave, source_lam,", "Filter rectangular width http://pysynphot.readthedocs.io/en/latest/properties.html \"\"\" rect = self.equivwidth / self.throughput.max()", "spectrum arrays full_x = np.hstack([self.wave, VEGA['WAVELENGTH']]) full_x = full_x[np.argsort(full_x)] #", "VEGA['WAVELENGTH'], VEGA['FLUX'], left=0, right=0) thru_full = interp(full_x, self.wave, self.throughput, left=0,", "\"\"\" import astropy.units as u try: import grizli.utils_c interp =", "\"\"\" import re if not case: search_string = search_string.upper() matched", "lines += [row_str.format(i=i+1, wave=w, thru=t) for i, (w, t) in", "search_string, case=False, verbose=True): \"\"\" Search filter names for ``search_string``. If", "to wavelength grid, no extrapolation vega_full = interp(full_x, VEGA['WAVELENGTH'], VEGA['FLUX'],", "= np.trapz(self.throughput/self.wave, self.wave) def __repr__(self): return self.name.__repr__() def __str__(self): return", "np.trapz(self.throughput, self.wave) @property def rectwidth(self): \"\"\" Filter rectangular width http://pysynphot.readthedocs.io/en/latest/properties.html", "source_flux=None): \"\"\" Get the MW extinction correction within the filter.", "N = len(self.wave) lines = [header.format(N, self.name.split('lambda_c')[0], self.pivot, self.ABVega, self.ctw95)]", "def __init__(self, line='# Filter #20, RES#78: COSMOS/SUBARU_filter_B.txt - lambda_c=4458.276253'): self.lambda_c", "num = integrator(self.wave, self.wave*self.throughput) den = integrator(self.wave, self.throughput/self.wave) pivot =", "Filter #20, RES#78: COSMOS/SUBARU_filter_B.txt - lambda_c=4458.276253'): self.lambda_c = float(line.split('lambda_c=')[1]) self.name", "= fp.readlines() self.filename = file_path filters = [] wave =", "for c in VEGA.colnames: VEGA[c] = VEGA[c].astype(float) class FilterDefinition: def", "ax.set_ylabel('throughput') ax.legend() ax.grid() fig.tight_layout(pad=0.5) \"\"\" if path is None: file_path", "fp.write('{0:6d} {1}\\n'.format(len(filter.wave), filter.name)) for i in range(len(filter.wave)): fp.write('{0:6d} {1:.5e} {2:.5e}\\n'.format(i+1,", "return self.filters[i1-1] def names(self, verbose=True): \"\"\" Print the filter names.", "width http://www.stsci.edu/hst/acs/analysis/bandwidths/#keywords \"\"\" dl = np.diff(self.wave) filt = np.cumsum((self.wave*self.throughput)[1:]*dl) ctw95", "i in range(len(filter.wave)): fp.write('{0:6d} {1:.5e} {2:.5e}\\n'.format(i+1, filter.wave[i], filter.throughput[i])) fp.close() string_list", "# new_filter.wave = np.cast[float](wave) # new_filter.throughput = np.cast[float](trans) filters.append(new_filter) #", "filter, e.g., 161 = 2mass-j \"\"\" return self.filters[i1-1] def names(self,", "= name self.wave = wave self.throughput = throughput self.Aflux =", "same units absp = 3631*1e-23*c.to(u.m/u.s).value*1.e10/full_x**2 # Integrate over the bandpass,", "AB-Vega= {3:.3f} w95={4:.1f}' N = len(self.wave) lines = [header.format(N, self.name.split('lambda_c')[0],", "= utils.interp_conserve # Union of throughput and Vega spectrum arrays", "1. # pysynphot Bandpass if bp is not None: self.wave", "# new_filter.wave = np.cast[float](wave) # new_filter.throughput = np.cast[float](trans) new_filter =", "= self.equivwidth / self.throughput.max() return rect @property def ctw95(self): \"\"\"", "np.cast[float](wave) # new_filter.throughput = np.cast[float](trans) new_filter = FilterDefinition(name=header, wave=np.cast[float](wave), throughput=np.cast[float](trans))", "wave self.throughput = throughput self.Aflux = 1. # pysynphot Bandpass", "bandpass \"\"\" self.name = name self.wave = wave self.throughput =", "delta = np.trapz(self.throughput*source_flux*10**(-0.4*Alambda), self.wave) / np.trapz(self.throughput*source_flux, self.wave) if mag: return", "{wave:.5e} {thru:.5e}'): \"\"\" Return a string that can be put", "filt_name) is not None: if verbose: print('{0:5d} {1}'.format(i+1, self.filters[i].name)) matched.append(i)", "lines = [header.format(N, self.name.split('lambda_c')[0], self.pivot, self.ABVega, self.ctw95)] lines += [row_str.format(i=i+1,", "---------- name : str Label name wave : array Wavelength", "\"\"\" Get the MW extinction correction within the filter. Optionally", ". import utils __all__ = [\"FilterDefinition\", \"FilterFile\", \"ParamFilter\"] VEGA_FILE =", "import astropy.units as u f99 = utils.GalacticExtinction(EBV=EBV, Rv=Rv) self.Alambda =", "filter file. \"\"\" fp = open(file,'w') for filter in self.filters:", "string that can be put in the EAZY filter file", "fp.write('{0:6d} {1:.5e} {2:.5e}\\n'.format(i+1, filter.wave[i], filter.throughput[i])) fp.close() string_list = self.names(verbose=False) fp", "filters = [] wave = [] trans = [] header", "array Throughput, arbitrary normalization bp : optional, `pysynphot.obsbandpass` object `pysynphot`", "cumulative throughput width http://www.stsci.edu/hst/acs/analysis/bandwidths/#keywords \"\"\" dl = np.diff(self.wave) filt =", "label=bp.name.split()[0]) ax.set_xlabel('wavelength, Angstroms') ax.set_ylabel('throughput') ax.legend() ax.grid() fig.tight_layout(pad=0.5) \"\"\" if path", "Rv=3.1): \"\"\" Extinction factor \"\"\" import astropy.units as u f99", "= wave self.throughput = throughput self.Aflux = 1. # pysynphot", "extinction_correction(self, EBV, Rv=3.1, mag=True, source_lam=None, source_flux=None): \"\"\" Get the MW", "[] for i in range(len(self.filters)): filt_name = self.filters[i].name if not", "as np import os from astropy.table import Table from .", "def search(self, search_string, case=False, verbose=True): \"\"\" Search filter names for", "grizli.utils_c interp = grizli.utils_c.interp.interp_conserve_c except ImportError: interp = utils.interp_conserve if", "i1): \"\"\" Return unit-indexed filter, e.g., 161 = 2mass-j \"\"\"", "np.trapz(absp*thru_full, full_x) return -2.5*np.log10(num/den) @property def pivot(self): \"\"\" Pivot wavelength", "__getitem__(self, i1): \"\"\" Return unit-indexed filter, e.g., 161 = 2mass-j", "None: if verbose: print('{0:5d} {1}'.format(i+1, self.filters[i].name)) matched.append(i) return np.array(matched) class", "defined.') return False if source_flux is None: source_flux = self.throughput*0.+1", "910) | (self.wave.max() > 6.e4): Alambda = 0. else: f99", "\"ParamFilter\"] VEGA_FILE = os.path.join(utils.path_to_eazy_data(), 'alpha_lyr_stis_008.fits') VEGA = Table.read(VEGA_FILE) for c", "vega_full = interp(full_x, VEGA['WAVELENGTH'], VEGA['FLUX'], left=0, right=0) thru_full = interp(full_x,", "6.e4): Alambda = 0. else: f99 = utils.GalacticExtinction(EBV=EBV, Rv=Rv) Alambda", "__init__(self, name=None, wave=None, throughput=None, bp=None): \"\"\" Bandpass object Parameters ----------", "cgs # Interpolate to wavelength grid, no extrapolation vega_full =", "Label name wave : array Wavelength array, in `astropy.units.Angstrom`. throughput", "utils.GalacticExtinction(EBV=EBV, Rv=Rv) self.Alambda = f99(self.wave) self.Aflux = 10**(-0.4*self.Alambda) def extinction_correction(self,", "ax.legend() ax.grid() fig.tight_layout(pad=0.5) \"\"\" if path is None: file_path =", "bp.name self.norm = 1. if self.throughput is not None: self.norm", "filters.append(new_filter) # Initialize filter header = ' '.join(line.split()[1:]) wave =", "= 1. # pysynphot Bandpass if bp is not None:", "None: print('Filter not defined.') return False if source_flux is None:", "= np.cast[float](trans) filters.append(new_filter) # Initialize filter header = ' '.join(line.split()[1:])", "print(bp) fig, ax = plt.subplots(1,1,figsize=(6,4)) ax.plot(bp.wave, bp.throughput, label=bp.name.split()[0]) ax.set_xlabel('wavelength, Angstroms')", "if 'lambda_c' in line: if len(wave) > 0: # Make", "fp.writelines(string_list) fp.close() if verbose: print('Wrote <{0}[.info]>'.format(file)) def search(self, search_string, case=False,", "the filter information to a filter file. \"\"\" fp =", "os.path.join(os.getenv('EAZYCODE'), 'filters', file) else: file_path = os.path.join(path, file) with open(file_path,", "VEGA.colnames: VEGA[c] = VEGA[c].astype(float) class FilterDefinition: def __init__(self, name=None, wave=None,", "wavelength http://pysynphot.readthedocs.io/en/latest/properties.html \"\"\" integrator = np.trapz num = integrator(self.wave, self.wave*self.throughput)", "full_x = full_x[np.argsort(full_x)] # Vega spectrum, units of f-lambda flux", "arbitrary normalization bp : optional, `pysynphot.obsbandpass` object `pysynphot` filter bandpass", "\"\"\" Search filter names for ``search_string``. If ``case`` is True,", "Filter equivalent width http://pysynphot.readthedocs.io/en/latest/properties.html \"\"\" return np.trapz(self.throughput, self.wave) @property def", "row_str='{i:6} {wave:.5e} {thru:.5e}'): \"\"\" Return a string that can be", "left=0, right=0) # AB = 0, same units absp =", "file \"\"\" header = '{0} {1} lambda_c= {2:.4e} AB-Vega= {3:.3f}", "NFILT(self): \"\"\" Number of filters in the list \"\"\" return", "for i in range(len(self.filters)): print('{0:5d} {1}'.format(i+1, self.filters[i].name)) else: string_list =", "range(len(filter.wave)): fp.write('{0:6d} {1:.5e} {2:.5e}\\n'.format(i+1, filter.wave[i], filter.throughput[i])) fp.close() string_list = self.names(verbose=False)", "= filters @property def NFILT(self): \"\"\" Number of filters in", "VEGA['WAVELENGTH']]) full_x = full_x[np.argsort(full_x)] # Vega spectrum, units of f-lambda", "integrator(self.wave, self.wave*self.throughput) den = integrator(self.wave, self.throughput/self.wave) pivot = np.sqrt(num/den) return", "filter names. \"\"\" if verbose: for i in range(len(self.filters)): print('{0:5d}", "__str__(self): return self.name.__str__() def get_extinction(self, EBV=0, Rv=3.1): \"\"\" Extinction factor", "self.wave) if mag: return 2.5*np.log10(delta) else: return 1./delta @property def", "Wavelength array, in `astropy.units.Angstrom`. throughput : array Throughput, arbitrary normalization", "new_filter.throughput = np.cast[float](trans) new_filter = FilterDefinition(name=header, wave=np.cast[float](wave), throughput=np.cast[float](trans)) filters.append(new_filter) self.filters", "[] else: lspl = np.cast[float](line.split()) wave.append(lspl[1]) trans.append(lspl[2]) # last one", "file_path filters = [] wave = [] trans = []", "True, then match case. \"\"\" import re if not case:", "os from astropy.table import Table from . import utils __all__", "grizli.utils_c.interp.interp_conserve_c except ImportError: interp = utils.interp_conserve # Union of throughput", "already read in new_filter = FilterDefinition(name=header, wave=np.cast[float](wave), throughput=np.cast[float](trans)) # new_filter.name", "@property def pivot(self): \"\"\" Pivot wavelength http://pysynphot.readthedocs.io/en/latest/properties.html \"\"\" integrator =", "new_filter.throughput = np.cast[float](trans) filters.append(new_filter) # Initialize filter header = '", "np.cast[np.double](bp.wave) self.throughput = np.cast[np.double](bp.throughput) self.name = bp.name self.norm = 1.", "as fp: lines = fp.readlines() self.filename = file_path filters =", "fp: lines = fp.readlines() self.filename = file_path filters = []", "ctw95 = np.interp([0.025, 0.975], filt/filt.max(), self.wave[1:]) return np.diff(ctw95)[0] def for_filter_file(self,", "if verbose: for i in range(len(self.filters)): print('{0:5d} {1}'.format(i+1, self.filters[i].name)) else:", "[header.format(N, self.name.split('lambda_c')[0], self.pivot, self.ABVega, self.ctw95)] lines += [row_str.format(i=i+1, wave=w, thru=t)", "in range(len(self.filters)): print('{0:5d} {1}'.format(i+1, self.filters[i].name)) else: string_list = ['{0:5d} {1}\\n'.format(i+1,", "1./delta @property def ABVega(self): \"\"\" Compute AB-Vega conversion \"\"\" from", "res[205] print(bp) fig, ax = plt.subplots(1,1,figsize=(6,4)) ax.plot(bp.wave, bp.throughput, label=bp.name.split()[0]) ax.set_xlabel('wavelength,", "read in new_filter = FilterDefinition(name=header, wave=np.cast[float](wave), throughput=np.cast[float](trans)) # new_filter.name =", "# pysynphot Bandpass if bp is not None: self.wave =", "lines = fp.readlines() self.filename = file_path filters = [] wave", "VEGA[c].astype(float) class FilterDefinition: def __init__(self, name=None, wave=None, throughput=None, bp=None): \"\"\"", "is None: file_path = os.path.join(os.getenv('EAZYCODE'), 'filters', file) else: file_path =", "Vega spectrum, units of f-lambda flux density, cgs # Interpolate", "else: string_list = ['{0:5d} {1}\\n'.format(i+1, self.filters[i].name) for i in range(len(self.filters))]", "return string_list def write(self, file='xxx.res', verbose=True): \"\"\" Dump the filter", "in enumerate(zip(self.wave, self.throughput))] return '\\n'.join(lines) class FilterFile: def __init__(self, file='FILTER.RES.latest',", "self.ctw95)] lines += [row_str.format(i=i+1, wave=w, thru=t) for i, (w, t)", "np.hstack([self.wave, VEGA['WAVELENGTH']]) full_x = full_x[np.argsort(full_x)] # Vega spectrum, units of", "num = np.trapz(vega_full*thru_full, full_x) den = np.trapz(absp*thru_full, full_x) return -2.5*np.log10(num/den)", "grid, no extrapolation vega_full = interp(full_x, VEGA['WAVELENGTH'], VEGA['FLUX'], left=0, right=0)", "except ImportError: interp = utils.interp_conserve # Union of throughput and", "dlam num = np.trapz(vega_full*thru_full, full_x) den = np.trapz(absp*thru_full, full_x) return", "Alambda = f99(self.wave) delta = np.trapz(self.throughput*source_flux*10**(-0.4*Alambda), self.wave) / np.trapz(self.throughput*source_flux, self.wave)", "class FilterFile: def __init__(self, file='FILTER.RES.latest', path='./'): \"\"\" Read a EAZY", "from eazy.filters import FilterFile res = FilterFile(path=None) print(len(res.filters)) bp =", "np.trapz num = integrator(self.wave, self.wave*self.throughput) den = integrator(self.wave, self.throughput/self.wave) pivot", "put in the EAZY filter file \"\"\" header = '{0}", "print('Wrote <{0}[.info]>'.format(file)) def search(self, search_string, case=False, verbose=True): \"\"\" Search filter", "ParamFilter(FilterDefinition): def __init__(self, line='# Filter #20, RES#78: COSMOS/SUBARU_filter_B.txt - lambda_c=4458.276253'):", "ctw95(self): \"\"\" 95% cumulative throughput width http://www.stsci.edu/hst/acs/analysis/bandwidths/#keywords \"\"\" dl =", "if len(wave) > 0: # Make filter from lines already", "\"\"\" 95% cumulative throughput width http://www.stsci.edu/hst/acs/analysis/bandwidths/#keywords \"\"\" dl = np.diff(self.wave)", "'\\n'.join(lines) class FilterFile: def __init__(self, file='FILTER.RES.latest', path='./'): \"\"\" Read a", "' '.join(line.split()[1:]) wave = [] trans = [] else: lspl", "= res[205] print(bp) fig, ax = plt.subplots(1,1,figsize=(6,4)) ax.plot(bp.wave, bp.throughput, label=bp.name.split()[0])", "= np.cast[np.double](bp.wave) self.throughput = np.cast[np.double](bp.throughput) self.name = bp.name self.norm =", "Rv=3.1, mag=True, source_lam=None, source_flux=None): \"\"\" Get the MW extinction correction", "if mag: return 2.5*np.log10(delta) else: return 1./delta @property def ABVega(self):", "def __init__(self, name=None, wave=None, throughput=None, bp=None): \"\"\" Bandpass object Parameters", "interp(self.wave, source_lam, source_flux, left=0, right=0) if (self.wave.min() < 910) |", "rect @property def ctw95(self): \"\"\" 95% cumulative throughput width http://www.stsci.edu/hst/acs/analysis/bandwidths/#keywords", "= f99(self.wave) self.Aflux = 10**(-0.4*self.Alambda) def extinction_correction(self, EBV, Rv=3.1, mag=True,", "source_flux = self.throughput*0.+1 else: source_flux = interp(self.wave, source_lam, source_flux, left=0,", "np.cumsum((self.wave*self.throughput)[1:]*dl) ctw95 = np.interp([0.025, 0.975], filt/filt.max(), self.wave[1:]) return np.diff(ctw95)[0] def", "path='./'): \"\"\" Read a EAZY filter file. .. plot:: :include-source:", "= np.cast[float](wave) # new_filter.throughput = np.cast[float](trans) new_filter = FilterDefinition(name=header, wave=np.cast[float](wave),", ": optional, `pysynphot.obsbandpass` object `pysynphot` filter bandpass \"\"\" self.name =", "f-lambda flux density, cgs # Interpolate to wavelength grid, no", "match case. \"\"\" import re if not case: search_string =", "\"\"\" Pivot wavelength http://pysynphot.readthedocs.io/en/latest/properties.html \"\"\" integrator = np.trapz num =", "str Label name wave : array Wavelength array, in `astropy.units.Angstrom`.", "trans = [] header = '' for line in lines:", "with open(file_path, 'r') as fp: lines = fp.readlines() self.filename =", "w95={4:.1f}' N = len(self.wave) lines = [header.format(N, self.name.split('lambda_c')[0], self.pivot, self.ABVega,", "{3:.3f} w95={4:.1f}' N = len(self.wave) lines = [header.format(N, self.name.split('lambda_c')[0], self.pivot,", "array Wavelength array, in `astropy.units.Angstrom`. throughput : array Throughput, arbitrary", "interp = grizli.utils_c.interp.interp_conserve_c except ImportError: interp = utils.interp_conserve # Union", "filter file. .. plot:: :include-source: import matplotlib.pyplot as plt from", "\"\"\" return np.trapz(self.throughput, self.wave) @property def rectwidth(self): \"\"\" Filter rectangular", "= len(self.wave) lines = [header.format(N, self.name.split('lambda_c')[0], self.pivot, self.ABVega, self.ctw95)] lines", "absp = 3631*1e-23*c.to(u.m/u.s).value*1.e10/full_x**2 # Integrate over the bandpass, flam dlam", "file='FILTER.RES.latest', path='./'): \"\"\" Read a EAZY filter file. .. plot::", "utils.GalacticExtinction(EBV=EBV, Rv=Rv) Alambda = f99(self.wave) delta = np.trapz(self.throughput*source_flux*10**(-0.4*Alambda), self.wave) /", "> 6.e4): Alambda = 0. else: f99 = utils.GalacticExtinction(EBV=EBV, Rv=Rv)", "def names(self, verbose=True): \"\"\" Print the filter names. \"\"\" if", "Rv=Rv) self.Alambda = f99(self.wave) self.Aflux = 10**(-0.4*self.Alambda) def extinction_correction(self, EBV,", "import os from astropy.table import Table from . import utils", "not None: self.wave = np.cast[np.double](bp.wave) self.throughput = np.cast[np.double](bp.throughput) self.name =", "__init__(self, file='FILTER.RES.latest', path='./'): \"\"\" Read a EAZY filter file. ..", "integrator = np.trapz num = integrator(self.wave, self.wave*self.throughput) den = integrator(self.wave,", "# Make filter from lines already read in new_filter =", "self.filters[i].name if not case: filt_name = filt_name.upper() if re.search(search_string, filt_name)", "name=None, wave=None, throughput=None, bp=None): \"\"\" Bandpass object Parameters ---------- name", "Angstroms') ax.set_ylabel('throughput') ax.legend() ax.grid() fig.tight_layout(pad=0.5) \"\"\" if path is None:", "Dump the filter information to a filter file. \"\"\" fp", "\"\"\" return self.filters[i1-1] def names(self, verbose=True): \"\"\" Print the filter", "[] header = '' for line in lines: if 'lambda_c'", "c in VEGA.colnames: VEGA[c] = VEGA[c].astype(float) class FilterDefinition: def __init__(self,", "a source spectrum. \"\"\" import astropy.units as u try: import", "= f99(self.wave) delta = np.trapz(self.throughput*source_flux*10**(-0.4*Alambda), self.wave) / np.trapz(self.throughput*source_flux, self.wave) if", "Table from . import utils __all__ = [\"FilterDefinition\", \"FilterFile\", \"ParamFilter\"]", "Return a string that can be put in the EAZY", "the bandpass, flam dlam num = np.trapz(vega_full*thru_full, full_x) den =", "@property def rectwidth(self): \"\"\" Filter rectangular width http://pysynphot.readthedocs.io/en/latest/properties.html \"\"\" rect", "AB = 0, same units absp = 3631*1e-23*c.to(u.m/u.s).value*1.e10/full_x**2 # Integrate", "file='xxx.res', verbose=True): \"\"\" Dump the filter information to a filter", "open(file+'.info', 'w') fp.writelines(string_list) fp.close() if verbose: print('Wrote <{0}[.info]>'.format(file)) def search(self,", "{1}'.format(i+1, self.filters[i].name)) matched.append(i) return np.array(matched) class ParamFilter(FilterDefinition): def __init__(self, line='#", "# new_filter.throughput = np.cast[float](trans) new_filter = FilterDefinition(name=header, wave=np.cast[float](wave), throughput=np.cast[float](trans)) filters.append(new_filter)", "of f-lambda flux density, cgs # Interpolate to wavelength grid,", "'r') as fp: lines = fp.readlines() self.filename = file_path filters", "Throughput, arbitrary normalization bp : optional, `pysynphot.obsbandpass` object `pysynphot` filter", "Pivot wavelength http://pysynphot.readthedocs.io/en/latest/properties.html \"\"\" integrator = np.trapz num = integrator(self.wave,", "the MW extinction correction within the filter. Optionally supply a", "wave : array Wavelength array, in `astropy.units.Angstrom`. throughput : array", "= file_path filters = [] wave = [] trans =", "return -2.5*np.log10(num/den) @property def pivot(self): \"\"\" Pivot wavelength http://pysynphot.readthedocs.io/en/latest/properties.html \"\"\"", "from astropy.table import Table from . import utils __all__ =", "self.name.__repr__() def __str__(self): return self.name.__str__() def get_extinction(self, EBV=0, Rv=3.1): \"\"\"", "bp.throughput, label=bp.name.split()[0]) ax.set_xlabel('wavelength, Angstroms') ax.set_ylabel('throughput') ax.legend() ax.grid() fig.tight_layout(pad=0.5) \"\"\" if", "fp.close() string_list = self.names(verbose=False) fp = open(file+'.info', 'w') fp.writelines(string_list) fp.close()", "f99(self.wave) delta = np.trapz(self.throughput*source_flux*10**(-0.4*Alambda), self.wave) / np.trapz(self.throughput*source_flux, self.wave) if mag:", "new_filter.name = header # new_filter.wave = np.cast[float](wave) # new_filter.throughput =", "3631*1e-23*c.to(u.m/u.s).value*1.e10/full_x**2 # Integrate over the bandpass, flam dlam num =", "verbose=True): \"\"\" Search filter names for ``search_string``. If ``case`` is", "full_x) den = np.trapz(absp*thru_full, full_x) return -2.5*np.log10(num/den) @property def pivot(self):", "matplotlib.pyplot as plt from eazy.filters import FilterFile res = FilterFile(path=None)", "self.Alambda = f99(self.wave) self.Aflux = 10**(-0.4*self.Alambda) def extinction_correction(self, EBV, Rv=3.1,", "thru_full = interp(full_x, self.wave, self.throughput, left=0, right=0) # AB =", "FilterDefinition: def __init__(self, name=None, wave=None, throughput=None, bp=None): \"\"\" Bandpass object", "1. if self.throughput is not None: self.norm = np.trapz(self.throughput/self.wave, self.wave)", "= 0, same units absp = 3631*1e-23*c.to(u.m/u.s).value*1.e10/full_x**2 # Integrate over", "filter information to a filter file. \"\"\" fp = open(file,'w')", "in range(len(filter.wave)): fp.write('{0:6d} {1:.5e} {2:.5e}\\n'.format(i+1, filter.wave[i], filter.throughput[i])) fp.close() string_list =", "ImportError: interp = utils.interp_conserve if self.wave is None: print('Filter not", "= utils.interp_conserve if self.wave is None: print('Filter not defined.') return", "a string that can be put in the EAZY filter", "except ImportError: interp = utils.interp_conserve if self.wave is None: print('Filter", "not None: self.norm = np.trapz(self.throughput/self.wave, self.wave) def __repr__(self): return self.name.__repr__()", "Alambda = 0. else: f99 = utils.GalacticExtinction(EBV=EBV, Rv=Rv) Alambda =", "def ABVega(self): \"\"\" Compute AB-Vega conversion \"\"\" from astropy.constants import", "self.ABVega, self.ctw95)] lines += [row_str.format(i=i+1, wave=w, thru=t) for i, (w,", "name wave : array Wavelength array, in `astropy.units.Angstrom`. throughput :", "= [] trans = [] else: lspl = np.cast[float](line.split()) wave.append(lspl[1])", "'w') fp.writelines(string_list) fp.close() if verbose: print('Wrote <{0}[.info]>'.format(file)) def search(self, search_string,", "= FilterDefinition() # new_filter.name = header # new_filter.wave = np.cast[float](wave)", "wave=w, thru=t) for i, (w, t) in enumerate(zip(self.wave, self.throughput))] return", "in range(len(self.filters))] return string_list def write(self, file='xxx.res', verbose=True): \"\"\" Dump", "optional, `pysynphot.obsbandpass` object `pysynphot` filter bandpass \"\"\" self.name = name", "header = '{0} {1} lambda_c= {2:.4e} AB-Vega= {3:.3f} w95={4:.1f}' N", "= interp(full_x, self.wave, self.throughput, left=0, right=0) # AB = 0,", "= 1. if self.throughput is not None: self.norm = np.trapz(self.throughput/self.wave,", "extinction correction within the filter. Optionally supply a source spectrum.", "\"\"\" rect = self.equivwidth / self.throughput.max() return rect @property def", "filt_name = self.filters[i].name if not case: filt_name = filt_name.upper() if", "of filters in the list \"\"\" return len(self.filters) def __getitem__(self,", "import astropy.units as u try: import grizli.utils_c interp = grizli.utils_c.interp.interp_conserve_c", "95% cumulative throughput width http://www.stsci.edu/hst/acs/analysis/bandwidths/#keywords \"\"\" dl = np.diff(self.wave) filt", "filter.throughput[i])) fp.close() string_list = self.names(verbose=False) fp = open(file+'.info', 'w') fp.writelines(string_list)", "= self.throughput*0.+1 else: source_flux = interp(self.wave, source_lam, source_flux, left=0, right=0)", "self.Aflux = 1. # pysynphot Bandpass if bp is not", "# Union of throughput and Vega spectrum arrays full_x =", "flam dlam num = np.trapz(vega_full*thru_full, full_x) den = np.trapz(absp*thru_full, full_x)", "t) in enumerate(zip(self.wave, self.throughput))] return '\\n'.join(lines) class FilterFile: def __init__(self,", "np.cast[np.double](bp.throughput) self.name = bp.name self.norm = 1. if self.throughput is", ": str Label name wave : array Wavelength array, in", "None: source_flux = self.throughput*0.+1 else: source_flux = interp(self.wave, source_lam, source_flux,", "- lambda_c=4458.276253'): self.lambda_c = float(line.split('lambda_c=')[1]) self.name = line.split()[4] self.fnumber =", "spectrum. \"\"\" import astropy.units as u try: import grizli.utils_c interp", "i, (w, t) in enumerate(zip(self.wave, self.throughput))] return '\\n'.join(lines) class FilterFile:", "\"\"\" header = '{0} {1} lambda_c= {2:.4e} AB-Vega= {3:.3f} w95={4:.1f}'", "if not case: search_string = search_string.upper() matched = [] for", "[\"FilterDefinition\", \"FilterFile\", \"ParamFilter\"] VEGA_FILE = os.path.join(utils.path_to_eazy_data(), 'alpha_lyr_stis_008.fits') VEGA = Table.read(VEGA_FILE)", "= integrator(self.wave, self.throughput/self.wave) pivot = np.sqrt(num/den) return pivot @property def", "trans.append(lspl[2]) # last one # new_filter = FilterDefinition() # new_filter.name", "bp : optional, `pysynphot.obsbandpass` object `pysynphot` filter bandpass \"\"\" self.name", "MW extinction correction within the filter. Optionally supply a source", "array, in `astropy.units.Angstrom`. throughput : array Throughput, arbitrary normalization bp", "is not None: self.norm = np.trapz(self.throughput/self.wave, self.wave) def __repr__(self): return", "\"\"\" Number of filters in the list \"\"\" return len(self.filters)", "fp.close() if verbose: print('Wrote <{0}[.info]>'.format(file)) def search(self, search_string, case=False, verbose=True):", "not case: filt_name = filt_name.upper() if re.search(search_string, filt_name) is not", "len(self.filters) def __getitem__(self, i1): \"\"\" Return unit-indexed filter, e.g., 161", "VEGA[c] = VEGA[c].astype(float) class FilterDefinition: def __init__(self, name=None, wave=None, throughput=None,", "\"\"\" Print the filter names. \"\"\" if verbose: for i", ": array Throughput, arbitrary normalization bp : optional, `pysynphot.obsbandpass` object", "np.cast[float](line.split()) wave.append(lspl[1]) trans.append(lspl[2]) # last one # new_filter = FilterDefinition()", "lambda_c= {2:.4e} AB-Vega= {3:.3f} w95={4:.1f}' N = len(self.wave) lines =", "correction within the filter. Optionally supply a source spectrum. \"\"\"", "filter bandpass \"\"\" self.name = name self.wave = wave self.throughput", "if bp is not None: self.wave = np.cast[np.double](bp.wave) self.throughput =", "not case: search_string = search_string.upper() matched = [] for i", "10**(-0.4*self.Alambda) def extinction_correction(self, EBV, Rv=3.1, mag=True, source_lam=None, source_flux=None): \"\"\" Get", "def write(self, file='xxx.res', verbose=True): \"\"\" Dump the filter information to", "= [] for i in range(len(self.filters)): filt_name = self.filters[i].name if", "interp(full_x, self.wave, self.throughput, left=0, right=0) # AB = 0, same", "as u f99 = utils.GalacticExtinction(EBV=EBV, Rv=Rv) self.Alambda = f99(self.wave) self.Aflux", "if (self.wave.min() < 910) | (self.wave.max() > 6.e4): Alambda =", "self.wave = wave self.throughput = throughput self.Aflux = 1. #", "search_string.upper() matched = [] for i in range(len(self.filters)): filt_name =", "\"\"\" dl = np.diff(self.wave) filt = np.cumsum((self.wave*self.throughput)[1:]*dl) ctw95 = np.interp([0.025,", "return np.trapz(self.throughput, self.wave) @property def rectwidth(self): \"\"\" Filter rectangular width", "os.path.join(utils.path_to_eazy_data(), 'alpha_lyr_stis_008.fits') VEGA = Table.read(VEGA_FILE) for c in VEGA.colnames: VEGA[c]", "= '{0} {1} lambda_c= {2:.4e} AB-Vega= {3:.3f} w95={4:.1f}' N =", "range(len(self.filters)): print('{0:5d} {1}'.format(i+1, self.filters[i].name)) else: string_list = ['{0:5d} {1}\\n'.format(i+1, self.filters[i].name)", "`astropy.units.Angstrom`. throughput : array Throughput, arbitrary normalization bp : optional,", "new_filter.wave = np.cast[float](wave) # new_filter.throughput = np.cast[float](trans) filters.append(new_filter) # Initialize", "class ParamFilter(FilterDefinition): def __init__(self, line='# Filter #20, RES#78: COSMOS/SUBARU_filter_B.txt -", "if self.wave is None: print('Filter not defined.') return False if", "= ['{0:5d} {1}\\n'.format(i+1, self.filters[i].name) for i in range(len(self.filters))] return string_list", "/ self.throughput.max() return rect @property def ctw95(self): \"\"\" 95% cumulative", "= 2mass-j \"\"\" return self.filters[i1-1] def names(self, verbose=True): \"\"\" Print", "Extinction factor \"\"\" import astropy.units as u f99 = utils.GalacticExtinction(EBV=EBV,", "filter file \"\"\" header = '{0} {1} lambda_c= {2:.4e} AB-Vega=", "np.trapz(self.throughput*source_flux*10**(-0.4*Alambda), self.wave) / np.trapz(self.throughput*source_flux, self.wave) if mag: return 2.5*np.log10(delta) else:", "self.throughput, left=0, right=0) # AB = 0, same units absp", "self.wave*self.throughput) den = integrator(self.wave, self.throughput/self.wave) pivot = np.sqrt(num/den) return pivot", "# Interpolate to wavelength grid, no extrapolation vega_full = interp(full_x,", "normalization bp : optional, `pysynphot.obsbandpass` object `pysynphot` filter bandpass \"\"\"", "bp=None): \"\"\" Bandpass object Parameters ---------- name : str Label", "self.norm = 1. if self.throughput is not None: self.norm =", "units of f-lambda flux density, cgs # Interpolate to wavelength", "range(len(self.filters)): filt_name = self.filters[i].name if not case: filt_name = filt_name.upper()", "astropy.units as u try: import grizli.utils_c interp = grizli.utils_c.interp.interp_conserve_c except", "new_filter = FilterDefinition(name=header, wave=np.cast[float](wave), throughput=np.cast[float](trans)) filters.append(new_filter) self.filters = filters @property", "self.throughput = np.cast[np.double](bp.throughput) self.name = bp.name self.norm = 1. if", "{1}\\n'.format(len(filter.wave), filter.name)) for i in range(len(filter.wave)): fp.write('{0:6d} {1:.5e} {2:.5e}\\n'.format(i+1, filter.wave[i],", "-2.5*np.log10(num/den) @property def pivot(self): \"\"\" Pivot wavelength http://pysynphot.readthedocs.io/en/latest/properties.html \"\"\" integrator", "``case`` is True, then match case. \"\"\" import re if", "range(len(self.filters))] return string_list def write(self, file='xxx.res', verbose=True): \"\"\" Dump the", "Get the MW extinction correction within the filter. Optionally supply", "0: # Make filter from lines already read in new_filter", "right=0) # AB = 0, same units absp = 3631*1e-23*c.to(u.m/u.s).value*1.e10/full_x**2", "VEGA['FLUX'], left=0, right=0) thru_full = interp(full_x, self.wave, self.throughput, left=0, right=0)", "<filename>eazy/filters.py import numpy as np import os from astropy.table import", "else: file_path = os.path.join(path, file) with open(file_path, 'r') as fp:", "throughput width http://www.stsci.edu/hst/acs/analysis/bandwidths/#keywords \"\"\" dl = np.diff(self.wave) filt = np.cumsum((self.wave*self.throughput)[1:]*dl)", "= FilterDefinition(name=header, wave=np.cast[float](wave), throughput=np.cast[float](trans)) filters.append(new_filter) self.filters = filters @property def", "Union of throughput and Vega spectrum arrays full_x = np.hstack([self.wave,", "= [] wave = [] trans = [] header =", "right=0) if (self.wave.min() < 910) | (self.wave.max() > 6.e4): Alambda", "self.wave) def __repr__(self): return self.name.__repr__() def __str__(self): return self.name.__str__() def", "throughput and Vega spectrum arrays full_x = np.hstack([self.wave, VEGA['WAVELENGTH']]) full_x", "equivalent width http://pysynphot.readthedocs.io/en/latest/properties.html \"\"\" return np.trapz(self.throughput, self.wave) @property def rectwidth(self):", "[] trans = [] else: lspl = np.cast[float](line.split()) wave.append(lspl[1]) trans.append(lspl[2])", "FilterDefinition(name=header, wave=np.cast[float](wave), throughput=np.cast[float](trans)) filters.append(new_filter) self.filters = filters @property def NFILT(self):", "i in range(len(self.filters)): print('{0:5d} {1}'.format(i+1, self.filters[i].name)) else: string_list = ['{0:5d}", "print('{0:5d} {1}'.format(i+1, self.filters[i].name)) else: string_list = ['{0:5d} {1}\\n'.format(i+1, self.filters[i].name) for", "if verbose: print('Wrote <{0}[.info]>'.format(file)) def search(self, search_string, case=False, verbose=True): \"\"\"", "= np.trapz(self.throughput*source_flux*10**(-0.4*Alambda), self.wave) / np.trapz(self.throughput*source_flux, self.wave) if mag: return 2.5*np.log10(delta)", "else: return 1./delta @property def ABVega(self): \"\"\" Compute AB-Vega conversion", "full_x[np.argsort(full_x)] # Vega spectrum, units of f-lambda flux density, cgs", "/ np.trapz(self.throughput*source_flux, self.wave) if mag: return 2.5*np.log10(delta) else: return 1./delta", "self.throughput))] return '\\n'.join(lines) class FilterFile: def __init__(self, file='FILTER.RES.latest', path='./'): \"\"\"", "of throughput and Vega spectrum arrays full_x = np.hstack([self.wave, VEGA['WAVELENGTH']])", "as plt from eazy.filters import FilterFile res = FilterFile(path=None) print(len(res.filters))", "filter header = ' '.join(line.split()[1:]) wave = [] trans =", "extrapolation vega_full = interp(full_x, VEGA['WAVELENGTH'], VEGA['FLUX'], left=0, right=0) thru_full =", "header = ' '.join(line.split()[1:]) wave = [] trans = []", "matched = [] for i in range(len(self.filters)): filt_name = self.filters[i].name", "wave = [] trans = [] header = '' for", "u try: import grizli.utils_c interp = grizli.utils_c.interp.interp_conserve_c except ImportError: interp", "verbose: print('Wrote <{0}[.info]>'.format(file)) def search(self, search_string, case=False, verbose=True): \"\"\" Search", "one # new_filter = FilterDefinition() # new_filter.name = header #", "source spectrum. \"\"\" import astropy.units as u try: import grizli.utils_c", "not None: if verbose: print('{0:5d} {1}'.format(i+1, self.filters[i].name)) matched.append(i) return np.array(matched)", "self.throughput/self.wave) pivot = np.sqrt(num/den) return pivot @property def equivwidth(self): \"\"\"", "for line in lines: if 'lambda_c' in line: if len(wave)", "def ctw95(self): \"\"\" 95% cumulative throughput width http://www.stsci.edu/hst/acs/analysis/bandwidths/#keywords \"\"\" dl", "2mass-j \"\"\" return self.filters[i1-1] def names(self, verbose=True): \"\"\" Print the", "os.path.join(path, file) with open(file_path, 'r') as fp: lines = fp.readlines()", "import matplotlib.pyplot as plt from eazy.filters import FilterFile res =", "# last one # new_filter = FilterDefinition() # new_filter.name =", "Integrate over the bandpass, flam dlam num = np.trapz(vega_full*thru_full, full_x)", "interp = utils.interp_conserve # Union of throughput and Vega spectrum", "in line: if len(wave) > 0: # Make filter from", "the filter names. \"\"\" if verbose: for i in range(len(self.filters)):", "return len(self.filters) def __getitem__(self, i1): \"\"\" Return unit-indexed filter, e.g.,", "(self.wave.min() < 910) | (self.wave.max() > 6.e4): Alambda = 0.", "= np.diff(self.wave) filt = np.cumsum((self.wave*self.throughput)[1:]*dl) ctw95 = np.interp([0.025, 0.975], filt/filt.max(),", "the list \"\"\" return len(self.filters) def __getitem__(self, i1): \"\"\" Return", "numpy as np import os from astropy.table import Table from", "import re if not case: search_string = search_string.upper() matched =", "search_string = search_string.upper() matched = [] for i in range(len(self.filters)):", "= np.sqrt(num/den) return pivot @property def equivwidth(self): \"\"\" Filter equivalent", "if path is None: file_path = os.path.join(os.getenv('EAZYCODE'), 'filters', file) else:", "filters in the list \"\"\" return len(self.filters) def __getitem__(self, i1):", "def pivot(self): \"\"\" Pivot wavelength http://pysynphot.readthedocs.io/en/latest/properties.html \"\"\" integrator = np.trapz", "= np.trapz num = integrator(self.wave, self.wave*self.throughput) den = integrator(self.wave, self.throughput/self.wave)", "file_path = os.path.join(path, file) with open(file_path, 'r') as fp: lines", "\"\"\" if verbose: for i in range(len(self.filters)): print('{0:5d} {1}'.format(i+1, self.filters[i].name))", "is True, then match case. \"\"\" import re if not", "import grizli.utils_c interp = grizli.utils_c.interp.interp_conserve_c except ImportError: interp = utils.interp_conserve", "interp(full_x, VEGA['WAVELENGTH'], VEGA['FLUX'], left=0, right=0) thru_full = interp(full_x, self.wave, self.throughput,", "ax.grid() fig.tight_layout(pad=0.5) \"\"\" if path is None: file_path = os.path.join(os.getenv('EAZYCODE'),", "pivot = np.sqrt(num/den) return pivot @property def equivwidth(self): \"\"\" Filter", "for_filter_file(self, row_str='{i:6} {wave:.5e} {thru:.5e}'): \"\"\" Return a string that can", "source_lam, source_flux, left=0, right=0) if (self.wave.min() < 910) | (self.wave.max()", "in self.filters: fp.write('{0:6d} {1}\\n'.format(len(filter.wave), filter.name)) for i in range(len(filter.wave)): fp.write('{0:6d}", "pivot(self): \"\"\" Pivot wavelength http://pysynphot.readthedocs.io/en/latest/properties.html \"\"\" integrator = np.trapz num", "= 0. else: f99 = utils.GalacticExtinction(EBV=EBV, Rv=Rv) Alambda = f99(self.wave)", "= integrator(self.wave, self.wave*self.throughput) den = integrator(self.wave, self.throughput/self.wave) pivot = np.sqrt(num/den)", "return rect @property def ctw95(self): \"\"\" 95% cumulative throughput width", "plot:: :include-source: import matplotlib.pyplot as plt from eazy.filters import FilterFile", "= plt.subplots(1,1,figsize=(6,4)) ax.plot(bp.wave, bp.throughput, label=bp.name.split()[0]) ax.set_xlabel('wavelength, Angstroms') ax.set_ylabel('throughput') ax.legend() ax.grid()", "http://www.stsci.edu/hst/acs/analysis/bandwidths/#keywords \"\"\" dl = np.diff(self.wave) filt = np.cumsum((self.wave*self.throughput)[1:]*dl) ctw95 =", "that can be put in the EAZY filter file \"\"\"", "= search_string.upper() matched = [] for i in range(len(self.filters)): filt_name", "res = FilterFile(path=None) print(len(res.filters)) bp = res[205] print(bp) fig, ax", "from lines already read in new_filter = FilterDefinition(name=header, wave=np.cast[float](wave), throughput=np.cast[float](trans))", "filters @property def NFILT(self): \"\"\" Number of filters in the", "def rectwidth(self): \"\"\" Filter rectangular width http://pysynphot.readthedocs.io/en/latest/properties.html \"\"\" rect =", "Number of filters in the list \"\"\" return len(self.filters) def", "import numpy as np import os from astropy.table import Table", "open(file_path, 'r') as fp: lines = fp.readlines() self.filename = file_path", "from astropy.constants import c import astropy.units as u try: import", "def __getitem__(self, i1): \"\"\" Return unit-indexed filter, e.g., 161 =", "\"\"\" Compute AB-Vega conversion \"\"\" from astropy.constants import c import", "= self.names(verbose=False) fp = open(file+'.info', 'w') fp.writelines(string_list) fp.close() if verbose:", "FilterFile res = FilterFile(path=None) print(len(res.filters)) bp = res[205] print(bp) fig,", "\"\"\" import astropy.units as u f99 = utils.GalacticExtinction(EBV=EBV, Rv=Rv) self.Alambda", "names. \"\"\" if verbose: for i in range(len(self.filters)): print('{0:5d} {1}'.format(i+1,", "self.filters: fp.write('{0:6d} {1}\\n'.format(len(filter.wave), filter.name)) for i in range(len(filter.wave)): fp.write('{0:6d} {1:.5e}", "import utils __all__ = [\"FilterDefinition\", \"FilterFile\", \"ParamFilter\"] VEGA_FILE = os.path.join(utils.path_to_eazy_data(),", "else: source_flux = interp(self.wave, source_lam, source_flux, left=0, right=0) if (self.wave.min()", "throughput=np.cast[float](trans)) filters.append(new_filter) self.filters = filters @property def NFILT(self): \"\"\" Number", "case. \"\"\" import re if not case: search_string = search_string.upper()", "i in range(len(self.filters)): filt_name = self.filters[i].name if not case: filt_name", "\"\"\" Read a EAZY filter file. .. plot:: :include-source: import", "file) else: file_path = os.path.join(path, file) with open(file_path, 'r') as", "RES#78: COSMOS/SUBARU_filter_B.txt - lambda_c=4458.276253'): self.lambda_c = float(line.split('lambda_c=')[1]) self.name = line.split()[4]", "filter.wave[i], filter.throughput[i])) fp.close() string_list = self.names(verbose=False) fp = open(file+'.info', 'w')", "if self.throughput is not None: self.norm = np.trapz(self.throughput/self.wave, self.wave) def", "0. else: f99 = utils.GalacticExtinction(EBV=EBV, Rv=Rv) Alambda = f99(self.wave) delta", "'' for line in lines: if 'lambda_c' in line: if", "= np.cast[float](line.split()) wave.append(lspl[1]) trans.append(lspl[2]) # last one # new_filter =", "self.wave[1:]) return np.diff(ctw95)[0] def for_filter_file(self, row_str='{i:6} {wave:.5e} {thru:.5e}'): \"\"\" Return", "open(file,'w') for filter in self.filters: fp.write('{0:6d} {1}\\n'.format(len(filter.wave), filter.name)) for i", "EBV=0, Rv=3.1): \"\"\" Extinction factor \"\"\" import astropy.units as u", "= grizli.utils_c.interp.interp_conserve_c except ImportError: interp = utils.interp_conserve # Union of", "verbose: for i in range(len(self.filters)): print('{0:5d} {1}'.format(i+1, self.filters[i].name)) else: string_list", "#20, RES#78: COSMOS/SUBARU_filter_B.txt - lambda_c=4458.276253'): self.lambda_c = float(line.split('lambda_c=')[1]) self.name =", "# Initialize filter header = ' '.join(line.split()[1:]) wave = []", "header = '' for line in lines: if 'lambda_c' in", "np.diff(self.wave) filt = np.cumsum((self.wave*self.throughput)[1:]*dl) ctw95 = np.interp([0.025, 0.975], filt/filt.max(), self.wave[1:])", "grizli.utils_c interp = grizli.utils_c.interp.interp_conserve_c except ImportError: interp = utils.interp_conserve #", "= os.path.join(path, file) with open(file_path, 'r') as fp: lines =", "utils.interp_conserve # Union of throughput and Vega spectrum arrays full_x", "verbose=True): \"\"\" Print the filter names. \"\"\" if verbose: for", "throughput=None, bp=None): \"\"\" Bandpass object Parameters ---------- name : str", "file. \"\"\" fp = open(file,'w') for filter in self.filters: fp.write('{0:6d}", "= bp.name self.norm = 1. if self.throughput is not None:", "in lines: if 'lambda_c' in line: if len(wave) > 0:", "is not None: if verbose: print('{0:5d} {1}'.format(i+1, self.filters[i].name)) matched.append(i) return", "(self.wave.max() > 6.e4): Alambda = 0. else: f99 = utils.GalacticExtinction(EBV=EBV,", "new_filter = FilterDefinition(name=header, wave=np.cast[float](wave), throughput=np.cast[float](trans)) # new_filter.name = header #", "the filter. Optionally supply a source spectrum. \"\"\" import astropy.units", "ax.set_xlabel('wavelength, Angstroms') ax.set_ylabel('throughput') ax.legend() ax.grid() fig.tight_layout(pad=0.5) \"\"\" if path is", "try: import grizli.utils_c interp = grizli.utils_c.interp.interp_conserve_c except ImportError: interp =", "len(self.wave) lines = [header.format(N, self.name.split('lambda_c')[0], self.pivot, self.ABVega, self.ctw95)] lines +=", "header # new_filter.wave = np.cast[float](wave) # new_filter.throughput = np.cast[float](trans) filters.append(new_filter)", "None: self.wave = np.cast[np.double](bp.wave) self.throughput = np.cast[np.double](bp.throughput) self.name = bp.name", "i in range(len(self.filters))] return string_list def write(self, file='xxx.res', verbose=True): \"\"\"", "\"\"\" Filter equivalent width http://pysynphot.readthedocs.io/en/latest/properties.html \"\"\" return np.trapz(self.throughput, self.wave) @property", "EAZY filter file. .. plot:: :include-source: import matplotlib.pyplot as plt", "Table.read(VEGA_FILE) for c in VEGA.colnames: VEGA[c] = VEGA[c].astype(float) class FilterDefinition:", "# new_filter.name = header # new_filter.wave = np.cast[float](wave) # new_filter.throughput", "ax = plt.subplots(1,1,figsize=(6,4)) ax.plot(bp.wave, bp.throughput, label=bp.name.split()[0]) ax.set_xlabel('wavelength, Angstroms') ax.set_ylabel('throughput') ax.legend()", "in `astropy.units.Angstrom`. throughput : array Throughput, arbitrary normalization bp :", "return pivot @property def equivwidth(self): \"\"\" Filter equivalent width http://pysynphot.readthedocs.io/en/latest/properties.html", "rectwidth(self): \"\"\" Filter rectangular width http://pysynphot.readthedocs.io/en/latest/properties.html \"\"\" rect = self.equivwidth", "{1} lambda_c= {2:.4e} AB-Vega= {3:.3f} w95={4:.1f}' N = len(self.wave) lines", "ax.plot(bp.wave, bp.throughput, label=bp.name.split()[0]) ax.set_xlabel('wavelength, Angstroms') ax.set_ylabel('throughput') ax.legend() ax.grid() fig.tight_layout(pad=0.5) \"\"\"", "return np.diff(ctw95)[0] def for_filter_file(self, row_str='{i:6} {wave:.5e} {thru:.5e}'): \"\"\" Return a", "line in lines: if 'lambda_c' in line: if len(wave) >", "unit-indexed filter, e.g., 161 = 2mass-j \"\"\" return self.filters[i1-1] def", "COSMOS/SUBARU_filter_B.txt - lambda_c=4458.276253'): self.lambda_c = float(line.split('lambda_c=')[1]) self.name = line.split()[4] self.fnumber", "self.name.split('lambda_c')[0], self.pivot, self.ABVega, self.ctw95)] lines += [row_str.format(i=i+1, wave=w, thru=t) for", "\"\"\" Dump the filter information to a filter file. \"\"\"", "fp = open(file+'.info', 'w') fp.writelines(string_list) fp.close() if verbose: print('Wrote <{0}[.info]>'.format(file))", "Read a EAZY filter file. .. plot:: :include-source: import matplotlib.pyplot", "Compute AB-Vega conversion \"\"\" from astropy.constants import c import astropy.units", "a filter file. \"\"\" fp = open(file,'w') for filter in", "throughput : array Throughput, arbitrary normalization bp : optional, `pysynphot.obsbandpass`", "f99 = utils.GalacticExtinction(EBV=EBV, Rv=Rv) self.Alambda = f99(self.wave) self.Aflux = 10**(-0.4*self.Alambda)", "= interp(full_x, VEGA['WAVELENGTH'], VEGA['FLUX'], left=0, right=0) thru_full = interp(full_x, self.wave,", "@property def equivwidth(self): \"\"\" Filter equivalent width http://pysynphot.readthedocs.io/en/latest/properties.html \"\"\" return", "'{0} {1} lambda_c= {2:.4e} AB-Vega= {3:.3f} w95={4:.1f}' N = len(self.wave)", "= [] header = '' for line in lines: if", "def __str__(self): return self.name.__str__() def get_extinction(self, EBV=0, Rv=3.1): \"\"\" Extinction", "[] wave = [] trans = [] header = ''", "= [header.format(N, self.name.split('lambda_c')[0], self.pivot, self.ABVega, self.ctw95)] lines += [row_str.format(i=i+1, wave=w,", "full_x = np.hstack([self.wave, VEGA['WAVELENGTH']]) full_x = full_x[np.argsort(full_x)] # Vega spectrum,", "= ' '.join(line.split()[1:]) wave = [] trans = [] else:", "[] trans = [] header = '' for line in", "spectrum, units of f-lambda flux density, cgs # Interpolate to", "self.throughput.max() return rect @property def ctw95(self): \"\"\" 95% cumulative throughput", "http://pysynphot.readthedocs.io/en/latest/properties.html \"\"\" return np.trapz(self.throughput, self.wave) @property def rectwidth(self): \"\"\" Filter", "lspl = np.cast[float](line.split()) wave.append(lspl[1]) trans.append(lspl[2]) # last one # new_filter", "conversion \"\"\" from astropy.constants import c import astropy.units as u", "name self.wave = wave self.throughput = throughput self.Aflux = 1.", "np.trapz(self.throughput/self.wave, self.wave) def __repr__(self): return self.name.__repr__() def __str__(self): return self.name.__str__()", "import FilterFile res = FilterFile(path=None) print(len(res.filters)) bp = res[205] print(bp)", "string_list def write(self, file='xxx.res', verbose=True): \"\"\" Dump the filter information", "\"\"\" self.name = name self.wave = wave self.throughput = throughput", "ImportError: interp = utils.interp_conserve # Union of throughput and Vega", "def __repr__(self): return self.name.__repr__() def __str__(self): return self.name.__str__() def get_extinction(self,", "over the bandpass, flam dlam num = np.trapz(vega_full*thru_full, full_x) den", "self.throughput = throughput self.Aflux = 1. # pysynphot Bandpass if", "None: self.norm = np.trapz(self.throughput/self.wave, self.wave) def __repr__(self): return self.name.__repr__() def", "interp = grizli.utils_c.interp.interp_conserve_c except ImportError: interp = utils.interp_conserve if self.wave", "filt_name = filt_name.upper() if re.search(search_string, filt_name) is not None: if", "to a filter file. \"\"\" fp = open(file,'w') for filter", "wave=None, throughput=None, bp=None): \"\"\" Bandpass object Parameters ---------- name :", "factor \"\"\" import astropy.units as u f99 = utils.GalacticExtinction(EBV=EBV, Rv=Rv)", "Make filter from lines already read in new_filter = FilterDefinition(name=header,", "np.interp([0.025, 0.975], filt/filt.max(), self.wave[1:]) return np.diff(ctw95)[0] def for_filter_file(self, row_str='{i:6} {wave:.5e}", "{thru:.5e}'): \"\"\" Return a string that can be put in", "= os.path.join(os.getenv('EAZYCODE'), 'filters', file) else: file_path = os.path.join(path, file) with", "self.wave) / np.trapz(self.throughput*source_flux, self.wave) if mag: return 2.5*np.log10(delta) else: return", "\"\"\" Extinction factor \"\"\" import astropy.units as u f99 =", "np.cast[float](trans) new_filter = FilterDefinition(name=header, wave=np.cast[float](wave), throughput=np.cast[float](trans)) filters.append(new_filter) self.filters = filters", "\"\"\" integrator = np.trapz num = integrator(self.wave, self.wave*self.throughput) den =", "left=0, right=0) thru_full = interp(full_x, self.wave, self.throughput, left=0, right=0) #", "= np.cast[float](wave) # new_filter.throughput = np.cast[float](trans) filters.append(new_filter) # Initialize filter", "u f99 = utils.GalacticExtinction(EBV=EBV, Rv=Rv) self.Alambda = f99(self.wave) self.Aflux =", "`pysynphot` filter bandpass \"\"\" self.name = name self.wave = wave", "if not case: filt_name = filt_name.upper() if re.search(search_string, filt_name) is", "self.name.__str__() def get_extinction(self, EBV=0, Rv=3.1): \"\"\" Extinction factor \"\"\" import", "filters.append(new_filter) self.filters = filters @property def NFILT(self): \"\"\" Number of", "return '\\n'.join(lines) class FilterFile: def __init__(self, file='FILTER.RES.latest', path='./'): \"\"\" Read", "filter names for ``search_string``. If ``case`` is True, then match", "if re.search(search_string, filt_name) is not None: if verbose: print('{0:5d} {1}'.format(i+1,", "bp is not None: self.wave = np.cast[np.double](bp.wave) self.throughput = np.cast[np.double](bp.throughput)", "self.lambda_c = float(line.split('lambda_c=')[1]) self.name = line.split()[4] self.fnumber = int(line.split('RES#')[1].split(':')[0]) self.cnumber", "Initialize filter header = ' '.join(line.split()[1:]) wave = [] trans", "# Vega spectrum, units of f-lambda flux density, cgs #", "lines already read in new_filter = FilterDefinition(name=header, wave=np.cast[float](wave), throughput=np.cast[float](trans)) #", "= VEGA[c].astype(float) class FilterDefinition: def __init__(self, name=None, wave=None, throughput=None, bp=None):", "VEGA = Table.read(VEGA_FILE) for c in VEGA.colnames: VEGA[c] = VEGA[c].astype(float)", "object `pysynphot` filter bandpass \"\"\" self.name = name self.wave =", "and Vega spectrum arrays full_x = np.hstack([self.wave, VEGA['WAVELENGTH']]) full_x =", "self.norm = np.trapz(self.throughput/self.wave, self.wave) def __repr__(self): return self.name.__repr__() def __str__(self):", "= utils.GalacticExtinction(EBV=EBV, Rv=Rv) Alambda = f99(self.wave) delta = np.trapz(self.throughput*source_flux*10**(-0.4*Alambda), self.wave)", "thru=t) for i, (w, t) in enumerate(zip(self.wave, self.throughput))] return '\\n'.join(lines)", "Parameters ---------- name : str Label name wave : array", "new_filter.wave = np.cast[float](wave) # new_filter.throughput = np.cast[float](trans) new_filter = FilterDefinition(name=header,", "in VEGA.colnames: VEGA[c] = VEGA[c].astype(float) class FilterDefinition: def __init__(self, name=None,", "def NFILT(self): \"\"\" Number of filters in the list \"\"\"", "source_flux, left=0, right=0) if (self.wave.min() < 910) | (self.wave.max() >", "= [] trans = [] header = '' for line", "= np.cumsum((self.wave*self.throughput)[1:]*dl) ctw95 = np.interp([0.025, 0.975], filt/filt.max(), self.wave[1:]) return np.diff(ctw95)[0]", "def get_extinction(self, EBV=0, Rv=3.1): \"\"\" Extinction factor \"\"\" import astropy.units", "\"\"\" fp = open(file,'w') for filter in self.filters: fp.write('{0:6d} {1}\\n'.format(len(filter.wave),", "AB-Vega conversion \"\"\" from astropy.constants import c import astropy.units as", "len(wave) > 0: # Make filter from lines already read", "Bandpass if bp is not None: self.wave = np.cast[np.double](bp.wave) self.throughput", "source_lam=None, source_flux=None): \"\"\" Get the MW extinction correction within the", "a EAZY filter file. .. plot:: :include-source: import matplotlib.pyplot as", "plt.subplots(1,1,figsize=(6,4)) ax.plot(bp.wave, bp.throughput, label=bp.name.split()[0]) ax.set_xlabel('wavelength, Angstroms') ax.set_ylabel('throughput') ax.legend() ax.grid() fig.tight_layout(pad=0.5)", "Bandpass object Parameters ---------- name : str Label name wave", "re.search(search_string, filt_name) is not None: if verbose: print('{0:5d} {1}'.format(i+1, self.filters[i].name))", "astropy.table import Table from . import utils __all__ = [\"FilterDefinition\",", "supply a source spectrum. \"\"\" import astropy.units as u try:", "for i in range(len(filter.wave)): fp.write('{0:6d} {1:.5e} {2:.5e}\\n'.format(i+1, filter.wave[i], filter.throughput[i])) fp.close()", "fp.readlines() self.filename = file_path filters = [] wave = []", "__all__ = [\"FilterDefinition\", \"FilterFile\", \"ParamFilter\"] VEGA_FILE = os.path.join(utils.path_to_eazy_data(), 'alpha_lyr_stis_008.fits') VEGA", "@property def ABVega(self): \"\"\" Compute AB-Vega conversion \"\"\" from astropy.constants", "self.filename = file_path filters = [] wave = [] trans", "(w, t) in enumerate(zip(self.wave, self.throughput))] return '\\n'.join(lines) class FilterFile: def", "self.filters[i].name)) matched.append(i) return np.array(matched) class ParamFilter(FilterDefinition): def __init__(self, line='# Filter", "print('Filter not defined.') return False if source_flux is None: source_flux", "<{0}[.info]>'.format(file)) def search(self, search_string, case=False, verbose=True): \"\"\" Search filter names", "matched.append(i) return np.array(matched) class ParamFilter(FilterDefinition): def __init__(self, line='# Filter #20,", "wave.append(lspl[1]) trans.append(lspl[2]) # last one # new_filter = FilterDefinition() #", "grizli.utils_c.interp.interp_conserve_c except ImportError: interp = utils.interp_conserve if self.wave is None:", "case: filt_name = filt_name.upper() if re.search(search_string, filt_name) is not None:", "= np.hstack([self.wave, VEGA['WAVELENGTH']]) full_x = full_x[np.argsort(full_x)] # Vega spectrum, units", "the EAZY filter file \"\"\" header = '{0} {1} lambda_c=", "object Parameters ---------- name : str Label name wave :", "names(self, verbose=True): \"\"\" Print the filter names. \"\"\" if verbose:", "| (self.wave.max() > 6.e4): Alambda = 0. else: f99 =", "``search_string``. If ``case`` is True, then match case. \"\"\" import", ": array Wavelength array, in `astropy.units.Angstrom`. throughput : array Throughput,", "rect = self.equivwidth / self.throughput.max() return rect @property def ctw95(self):", "name : str Label name wave : array Wavelength array,", "'lambda_c' in line: if len(wave) > 0: # Make filter", "= full_x[np.argsort(full_x)] # Vega spectrum, units of f-lambda flux density,", "FilterFile: def __init__(self, file='FILTER.RES.latest', path='./'): \"\"\" Read a EAZY filter", "# new_filter.throughput = np.cast[float](trans) filters.append(new_filter) # Initialize filter header =", "trans = [] else: lspl = np.cast[float](line.split()) wave.append(lspl[1]) trans.append(lspl[2]) #", "= self.filters[i].name if not case: filt_name = filt_name.upper() if re.search(search_string,", "= float(line.split('lambda_c=')[1]) self.name = line.split()[4] self.fnumber = int(line.split('RES#')[1].split(':')[0]) self.cnumber =", "f99 = utils.GalacticExtinction(EBV=EBV, Rv=Rv) Alambda = f99(self.wave) delta = np.trapz(self.throughput*source_flux*10**(-0.4*Alambda),", "@property def NFILT(self): \"\"\" Number of filters in the list", "as u try: import grizli.utils_c interp = grizli.utils_c.interp.interp_conserve_c except ImportError:", "= open(file+'.info', 'w') fp.writelines(string_list) fp.close() if verbose: print('Wrote <{0}[.info]>'.format(file)) def", "path is None: file_path = os.path.join(os.getenv('EAZYCODE'), 'filters', file) else: file_path", "fp = open(file,'w') for filter in self.filters: fp.write('{0:6d} {1}\\n'.format(len(filter.wave), filter.name))", "last one # new_filter = FilterDefinition() # new_filter.name = header", "\"\"\" Bandpass object Parameters ---------- name : str Label name", "Optionally supply a source spectrum. \"\"\" import astropy.units as u", "If ``case`` is True, then match case. \"\"\" import re", "< 910) | (self.wave.max() > 6.e4): Alambda = 0. else:", "left=0, right=0) if (self.wave.min() < 910) | (self.wave.max() > 6.e4):", "= '' for line in lines: if 'lambda_c' in line:", "information to a filter file. \"\"\" fp = open(file,'w') for", "self.name = bp.name self.norm = 1. if self.throughput is not", "{2:.4e} AB-Vega= {3:.3f} w95={4:.1f}' N = len(self.wave) lines = [header.format(N,", "Search filter names for ``search_string``. If ``case`` is True, then", "= grizli.utils_c.interp.interp_conserve_c except ImportError: interp = utils.interp_conserve if self.wave is", "def equivwidth(self): \"\"\" Filter equivalent width http://pysynphot.readthedocs.io/en/latest/properties.html \"\"\" return np.trapz(self.throughput,", "{1}\\n'.format(i+1, self.filters[i].name) for i in range(len(self.filters))] return string_list def write(self,", "file. .. plot:: :include-source: import matplotlib.pyplot as plt from eazy.filters", "re if not case: search_string = search_string.upper() matched = []", "self.name = name self.wave = wave self.throughput = throughput self.Aflux", "2.5*np.log10(delta) else: return 1./delta @property def ABVega(self): \"\"\" Compute AB-Vega", "= np.trapz(vega_full*thru_full, full_x) den = np.trapz(absp*thru_full, full_x) return -2.5*np.log10(num/den) @property", "string_list = ['{0:5d} {1}\\n'.format(i+1, self.filters[i].name) for i in range(len(self.filters))] return", "= FilterFile(path=None) print(len(res.filters)) bp = res[205] print(bp) fig, ax =", "self.equivwidth / self.throughput.max() return rect @property def ctw95(self): \"\"\" 95%", "\"\"\" Return unit-indexed filter, e.g., 161 = 2mass-j \"\"\" return", "VEGA_FILE = os.path.join(utils.path_to_eazy_data(), 'alpha_lyr_stis_008.fits') VEGA = Table.read(VEGA_FILE) for c in", "not defined.') return False if source_flux is None: source_flux =", "for i in range(len(self.filters)): filt_name = self.filters[i].name if not case:", "False if source_flux is None: source_flux = self.throughput*0.+1 else: source_flux", "filter from lines already read in new_filter = FilterDefinition(name=header, wave=np.cast[float](wave),", "EBV, Rv=3.1, mag=True, source_lam=None, source_flux=None): \"\"\" Get the MW extinction", "enumerate(zip(self.wave, self.throughput))] return '\\n'.join(lines) class FilterFile: def __init__(self, file='FILTER.RES.latest', path='./'):", "file_path = os.path.join(os.getenv('EAZYCODE'), 'filters', file) else: file_path = os.path.join(path, file)", "search(self, search_string, case=False, verbose=True): \"\"\" Search filter names for ``search_string``.", "0, same units absp = 3631*1e-23*c.to(u.m/u.s).value*1.e10/full_x**2 # Integrate over the", "+= [row_str.format(i=i+1, wave=w, thru=t) for i, (w, t) in enumerate(zip(self.wave,", "f99(self.wave) self.Aflux = 10**(-0.4*self.Alambda) def extinction_correction(self, EBV, Rv=3.1, mag=True, source_lam=None,", "astropy.constants import c import astropy.units as u try: import grizli.utils_c", "in the EAZY filter file \"\"\" header = '{0} {1}", "in range(len(self.filters)): filt_name = self.filters[i].name if not case: filt_name =", "Print the filter names. \"\"\" if verbose: for i in", "'.join(line.split()[1:]) wave = [] trans = [] else: lspl =", "= utils.GalacticExtinction(EBV=EBV, Rv=Rv) self.Alambda = f99(self.wave) self.Aflux = 10**(-0.4*self.Alambda) def", "= np.cast[float](trans) new_filter = FilterDefinition(name=header, wave=np.cast[float](wave), throughput=np.cast[float](trans)) filters.append(new_filter) self.filters =", "import c import astropy.units as u try: import grizli.utils_c interp", "throughput self.Aflux = 1. # pysynphot Bandpass if bp is", "\"FilterFile\", \"ParamFilter\"] VEGA_FILE = os.path.join(utils.path_to_eazy_data(), 'alpha_lyr_stis_008.fits') VEGA = Table.read(VEGA_FILE) for", "is not None: self.wave = np.cast[np.double](bp.wave) self.throughput = np.cast[np.double](bp.throughput) self.name", "self.wave is None: print('Filter not defined.') return False if source_flux", "filt/filt.max(), self.wave[1:]) return np.diff(ctw95)[0] def for_filter_file(self, row_str='{i:6} {wave:.5e} {thru:.5e}'): \"\"\"", "return 1./delta @property def ABVega(self): \"\"\" Compute AB-Vega conversion \"\"\"", "# Integrate over the bandpass, flam dlam num = np.trapz(vega_full*thru_full,", "rectangular width http://pysynphot.readthedocs.io/en/latest/properties.html \"\"\" rect = self.equivwidth / self.throughput.max() return", "filter in self.filters: fp.write('{0:6d} {1}\\n'.format(len(filter.wave), filter.name)) for i in range(len(filter.wave)):", "self.name = line.split()[4] self.fnumber = int(line.split('RES#')[1].split(':')[0]) self.cnumber = int(line.split('Filter #')[1].split(',')[0])", "FilterFile(path=None) print(len(res.filters)) bp = res[205] print(bp) fig, ax = plt.subplots(1,1,figsize=(6,4))", "EAZY filter file \"\"\" header = '{0} {1} lambda_c= {2:.4e}", "return 2.5*np.log10(delta) else: return 1./delta @property def ABVega(self): \"\"\" Compute", "source_flux = interp(self.wave, source_lam, source_flux, left=0, right=0) if (self.wave.min() <", "new_filter = FilterDefinition() # new_filter.name = header # new_filter.wave =", "= np.cast[np.double](bp.throughput) self.name = bp.name self.norm = 1. if self.throughput", "for i in range(len(self.filters))] return string_list def write(self, file='xxx.res', verbose=True):", "self.throughput*0.+1 else: source_flux = interp(self.wave, source_lam, source_flux, left=0, right=0) if", "mag: return 2.5*np.log10(delta) else: return 1./delta @property def ABVega(self): \"\"\"", "def __init__(self, file='FILTER.RES.latest', path='./'): \"\"\" Read a EAZY filter file.", "in the list \"\"\" return len(self.filters) def __getitem__(self, i1): \"\"\"", "in new_filter = FilterDefinition(name=header, wave=np.cast[float](wave), throughput=np.cast[float](trans)) # new_filter.name = header", "interp = utils.interp_conserve if self.wave is None: print('Filter not defined.')", "\"\"\" if path is None: file_path = os.path.join(os.getenv('EAZYCODE'), 'filters', file)", "filter.name)) for i in range(len(filter.wave)): fp.write('{0:6d} {1:.5e} {2:.5e}\\n'.format(i+1, filter.wave[i], filter.throughput[i]))", "= [\"FilterDefinition\", \"FilterFile\", \"ParamFilter\"] VEGA_FILE = os.path.join(utils.path_to_eazy_data(), 'alpha_lyr_stis_008.fits') VEGA =", "utils.interp_conserve if self.wave is None: print('Filter not defined.') return False", "return np.array(matched) class ParamFilter(FilterDefinition): def __init__(self, line='# Filter #20, RES#78:", "filt_name.upper() if re.search(search_string, filt_name) is not None: if verbose: print('{0:5d}", "source_flux is None: source_flux = self.throughput*0.+1 else: source_flux = interp(self.wave,", "> 0: # Make filter from lines already read in", "self.filters[i].name) for i in range(len(self.filters))] return string_list def write(self, file='xxx.res',", "case: search_string = search_string.upper() matched = [] for i in", "equivwidth(self): \"\"\" Filter equivalent width http://pysynphot.readthedocs.io/en/latest/properties.html \"\"\" return np.trapz(self.throughput, self.wave)", "line: if len(wave) > 0: # Make filter from lines", "filter. Optionally supply a source spectrum. \"\"\" import astropy.units as", "Rv=Rv) Alambda = f99(self.wave) delta = np.trapz(self.throughput*source_flux*10**(-0.4*Alambda), self.wave) / np.trapz(self.throughput*source_flux,", "float(line.split('lambda_c=')[1]) self.name = line.split()[4] self.fnumber = int(line.split('RES#')[1].split(':')[0]) self.cnumber = int(line.split('Filter", "self.throughput is not None: self.norm = np.trapz(self.throughput/self.wave, self.wave) def __repr__(self):", "= 10**(-0.4*self.Alambda) def extinction_correction(self, EBV, Rv=3.1, mag=True, source_lam=None, source_flux=None): \"\"\"", "@property def ctw95(self): \"\"\" 95% cumulative throughput width http://www.stsci.edu/hst/acs/analysis/bandwidths/#keywords \"\"\"", "lambda_c=4458.276253'): self.lambda_c = float(line.split('lambda_c=')[1]) self.name = line.split()[4] self.fnumber = int(line.split('RES#')[1].split(':')[0])", "density, cgs # Interpolate to wavelength grid, no extrapolation vega_full", "for ``search_string``. If ``case`` is True, then match case. \"\"\"", "np.trapz(self.throughput*source_flux, self.wave) if mag: return 2.5*np.log10(delta) else: return 1./delta @property", "bp = res[205] print(bp) fig, ax = plt.subplots(1,1,figsize=(6,4)) ax.plot(bp.wave, bp.throughput,", "{1}'.format(i+1, self.filters[i].name)) else: string_list = ['{0:5d} {1}\\n'.format(i+1, self.filters[i].name) for i", "dl = np.diff(self.wave) filt = np.cumsum((self.wave*self.throughput)[1:]*dl) ctw95 = np.interp([0.025, 0.975],", "throughput=np.cast[float](trans)) # new_filter.name = header # new_filter.wave = np.cast[float](wave) #", "FilterDefinition(name=header, wave=np.cast[float](wave), throughput=np.cast[float](trans)) # new_filter.name = header # new_filter.wave =", "lines: if 'lambda_c' in line: if len(wave) > 0: #", "list \"\"\" return len(self.filters) def __getitem__(self, i1): \"\"\" Return unit-indexed", "arrays full_x = np.hstack([self.wave, VEGA['WAVELENGTH']]) full_x = full_x[np.argsort(full_x)] # Vega", "units absp = 3631*1e-23*c.to(u.m/u.s).value*1.e10/full_x**2 # Integrate over the bandpass, flam", "Return unit-indexed filter, e.g., 161 = 2mass-j \"\"\" return self.filters[i1-1]", "\"\"\" return len(self.filters) def __getitem__(self, i1): \"\"\" Return unit-indexed filter,", "self.filters = filters @property def NFILT(self): \"\"\" Number of filters", "\"\"\" Filter rectangular width http://pysynphot.readthedocs.io/en/latest/properties.html \"\"\" rect = self.equivwidth /", "e.g., 161 = 2mass-j \"\"\" return self.filters[i1-1] def names(self, verbose=True):", "= FilterDefinition(name=header, wave=np.cast[float](wave), throughput=np.cast[float](trans)) # new_filter.name = header # new_filter.wave", "# new_filter = FilterDefinition() # new_filter.name = header # new_filter.wave", "= throughput self.Aflux = 1. # pysynphot Bandpass if bp", "np.sqrt(num/den) return pivot @property def equivwidth(self): \"\"\" Filter equivalent width", "`pysynphot.obsbandpass` object `pysynphot` filter bandpass \"\"\" self.name = name self.wave", "pysynphot Bandpass if bp is not None: self.wave = np.cast[np.double](bp.wave)", "np.trapz(vega_full*thru_full, full_x) den = np.trapz(absp*thru_full, full_x) return -2.5*np.log10(num/den) @property def", "\"\"\" Return a string that can be put in the", "verbose=True): \"\"\" Dump the filter information to a filter file.", "# AB = 0, same units absp = 3631*1e-23*c.to(u.m/u.s).value*1.e10/full_x**2 #", "{2:.5e}\\n'.format(i+1, filter.wave[i], filter.throughput[i])) fp.close() string_list = self.names(verbose=False) fp = open(file+'.info',", "self.wave = np.cast[np.double](bp.wave) self.throughput = np.cast[np.double](bp.throughput) self.name = bp.name self.norm", "else: lspl = np.cast[float](line.split()) wave.append(lspl[1]) trans.append(lspl[2]) # last one #", "= 3631*1e-23*c.to(u.m/u.s).value*1.e10/full_x**2 # Integrate over the bandpass, flam dlam num", "plt from eazy.filters import FilterFile res = FilterFile(path=None) print(len(res.filters)) bp", "class FilterDefinition: def __init__(self, name=None, wave=None, throughput=None, bp=None): \"\"\" Bandpass", "eazy.filters import FilterFile res = FilterFile(path=None) print(len(res.filters)) bp = res[205]", "161 = 2mass-j \"\"\" return self.filters[i1-1] def names(self, verbose=True): \"\"\"", "case=False, verbose=True): \"\"\" Search filter names for ``search_string``. If ``case``", "if source_flux is None: source_flux = self.throughput*0.+1 else: source_flux =", "FilterDefinition() # new_filter.name = header # new_filter.wave = np.cast[float](wave) #", "= interp(self.wave, source_lam, source_flux, left=0, right=0) if (self.wave.min() < 910)", "\"\"\" from astropy.constants import c import astropy.units as u try:", "wave=np.cast[float](wave), throughput=np.cast[float](trans)) # new_filter.name = header # new_filter.wave = np.cast[float](wave)", "'alpha_lyr_stis_008.fits') VEGA = Table.read(VEGA_FILE) for c in VEGA.colnames: VEGA[c] =", "den = integrator(self.wave, self.throughput/self.wave) pivot = np.sqrt(num/den) return pivot @property", "fig, ax = plt.subplots(1,1,figsize=(6,4)) ax.plot(bp.wave, bp.throughput, label=bp.name.split()[0]) ax.set_xlabel('wavelength, Angstroms') ax.set_ylabel('throughput')", "ABVega(self): \"\"\" Compute AB-Vega conversion \"\"\" from astropy.constants import c", "['{0:5d} {1}\\n'.format(i+1, self.filters[i].name) for i in range(len(self.filters))] return string_list def", "0.975], filt/filt.max(), self.wave[1:]) return np.diff(ctw95)[0] def for_filter_file(self, row_str='{i:6} {wave:.5e} {thru:.5e}'):", "verbose: print('{0:5d} {1}'.format(i+1, self.filters[i].name)) matched.append(i) return np.array(matched) class ParamFilter(FilterDefinition): def", "np.diff(ctw95)[0] def for_filter_file(self, row_str='{i:6} {wave:.5e} {thru:.5e}'): \"\"\" Return a string", "fig.tight_layout(pad=0.5) \"\"\" if path is None: file_path = os.path.join(os.getenv('EAZYCODE'), 'filters',", "bandpass, flam dlam num = np.trapz(vega_full*thru_full, full_x) den = np.trapz(absp*thru_full,", "flux density, cgs # Interpolate to wavelength grid, no extrapolation", "wavelength grid, no extrapolation vega_full = interp(full_x, VEGA['WAVELENGTH'], VEGA['FLUX'], left=0,", "pivot @property def equivwidth(self): \"\"\" Filter equivalent width http://pysynphot.readthedocs.io/en/latest/properties.html \"\"\"", "def for_filter_file(self, row_str='{i:6} {wave:.5e} {thru:.5e}'): \"\"\" Return a string that", "Vega spectrum arrays full_x = np.hstack([self.wave, VEGA['WAVELENGTH']]) full_x = full_x[np.argsort(full_x)]", "names for ``search_string``. If ``case`` is True, then match case.", "string_list = self.names(verbose=False) fp = open(file+'.info', 'w') fp.writelines(string_list) fp.close() if", "http://pysynphot.readthedocs.io/en/latest/properties.html \"\"\" rect = self.equivwidth / self.throughput.max() return rect @property", "width http://pysynphot.readthedocs.io/en/latest/properties.html \"\"\" return np.trapz(self.throughput, self.wave) @property def rectwidth(self): \"\"\"", "self.Aflux = 10**(-0.4*self.Alambda) def extinction_correction(self, EBV, Rv=3.1, mag=True, source_lam=None, source_flux=None):", ".. plot:: :include-source: import matplotlib.pyplot as plt from eazy.filters import", "def extinction_correction(self, EBV, Rv=3.1, mag=True, source_lam=None, source_flux=None): \"\"\" Get the", "= filt_name.upper() if re.search(search_string, filt_name) is not None: if verbose:", "self.wave) @property def rectwidth(self): \"\"\" Filter rectangular width http://pysynphot.readthedocs.io/en/latest/properties.html \"\"\"", "mag=True, source_lam=None, source_flux=None): \"\"\" Get the MW extinction correction within", "http://pysynphot.readthedocs.io/en/latest/properties.html \"\"\" integrator = np.trapz num = integrator(self.wave, self.wave*self.throughput) den", "self.filters[i].name)) else: string_list = ['{0:5d} {1}\\n'.format(i+1, self.filters[i].name) for i in", "return False if source_flux is None: source_flux = self.throughput*0.+1 else:", "write(self, file='xxx.res', verbose=True): \"\"\" Dump the filter information to a", "self.wave, self.throughput, left=0, right=0) # AB = 0, same units", "filt = np.cumsum((self.wave*self.throughput)[1:]*dl) ctw95 = np.interp([0.025, 0.975], filt/filt.max(), self.wave[1:]) return", "= np.trapz(absp*thru_full, full_x) return -2.5*np.log10(num/den) @property def pivot(self): \"\"\" Pivot", "self.filters[i1-1] def names(self, verbose=True): \"\"\" Print the filter names. \"\"\"", "self.names(verbose=False) fp = open(file+'.info', 'w') fp.writelines(string_list) fp.close() if verbose: print('Wrote", "from . import utils __all__ = [\"FilterDefinition\", \"FilterFile\", \"ParamFilter\"] VEGA_FILE", "line='# Filter #20, RES#78: COSMOS/SUBARU_filter_B.txt - lambda_c=4458.276253'): self.lambda_c = float(line.split('lambda_c=')[1])", ":include-source: import matplotlib.pyplot as plt from eazy.filters import FilterFile res", "utils __all__ = [\"FilterDefinition\", \"FilterFile\", \"ParamFilter\"] VEGA_FILE = os.path.join(utils.path_to_eazy_data(), 'alpha_lyr_stis_008.fits')", "= os.path.join(utils.path_to_eazy_data(), 'alpha_lyr_stis_008.fits') VEGA = Table.read(VEGA_FILE) for c in VEGA.colnames:", "is None: print('Filter not defined.') return False if source_flux is", "Interpolate to wavelength grid, no extrapolation vega_full = interp(full_x, VEGA['WAVELENGTH'],", "header # new_filter.wave = np.cast[float](wave) # new_filter.throughput = np.cast[float](trans) new_filter", "[row_str.format(i=i+1, wave=w, thru=t) for i, (w, t) in enumerate(zip(self.wave, self.throughput))]", "np import os from astropy.table import Table from . import", "full_x) return -2.5*np.log10(num/den) @property def pivot(self): \"\"\" Pivot wavelength http://pysynphot.readthedocs.io/en/latest/properties.html", "{1:.5e} {2:.5e}\\n'.format(i+1, filter.wave[i], filter.throughput[i])) fp.close() string_list = self.names(verbose=False) fp =", "= Table.read(VEGA_FILE) for c in VEGA.colnames: VEGA[c] = VEGA[c].astype(float) class", "file) with open(file_path, 'r') as fp: lines = fp.readlines() self.filename" ]
[ "# self.right = right class Solution: def buildTree(self, inorder: List[int],", "-> TreeNode: if not inorder: return None r = postorder.pop()", "Problem - 106 # URL - https://leetcode.com/problems/construct-binary-tree-from-inorder-and-postorder-traversal/ # # Definition", "right=None): # self.val = val # self.left = left #", "Definition for a binary tree node. # class TreeNode: #", "# # Definition for a binary tree node. # class", "postorder.pop() root = TreeNode(r) index = inorder.index(r) root.right = self.buildTree(inorder[index+1:],", "# self.val = val # self.left = left # self.right", "inorder: List[int], postorder: List[int]) -> TreeNode: if not inorder: return", "val # self.left = left # self.right = right class", "= right class Solution: def buildTree(self, inorder: List[int], postorder: List[int])", "buildTree(self, inorder: List[int], postorder: List[int]) -> TreeNode: if not inorder:", "inorder.index(r) root.right = self.buildTree(inorder[index+1:], postorder) root.left = self.buildTree(inorder[:index], postorder) return", "# class TreeNode: # def __init__(self, val=0, left=None, right=None): #", "left # self.right = right class Solution: def buildTree(self, inorder:", "left=None, right=None): # self.val = val # self.left = left", "TreeNode: if not inorder: return None r = postorder.pop() root", "= inorder.index(r) root.right = self.buildTree(inorder[index+1:], postorder) root.left = self.buildTree(inorder[:index], postorder)", "https://leetcode.com/problems/construct-binary-tree-from-inorder-and-postorder-traversal/ # # Definition for a binary tree node. #", "= val # self.left = left # self.right = right", "# Problem - 106 # URL - https://leetcode.com/problems/construct-binary-tree-from-inorder-and-postorder-traversal/ # #", "self.right = right class Solution: def buildTree(self, inorder: List[int], postorder:", "None r = postorder.pop() root = TreeNode(r) index = inorder.index(r)", "a binary tree node. # class TreeNode: # def __init__(self,", "TreeNode(r) index = inorder.index(r) root.right = self.buildTree(inorder[index+1:], postorder) root.left =", "binary tree node. # class TreeNode: # def __init__(self, val=0,", "class Solution: def buildTree(self, inorder: List[int], postorder: List[int]) -> TreeNode:", "class TreeNode: # def __init__(self, val=0, left=None, right=None): # self.val", "root.right = self.buildTree(inorder[index+1:], postorder) root.left = self.buildTree(inorder[:index], postorder) return root", "# def __init__(self, val=0, left=None, right=None): # self.val = val", "= left # self.right = right class Solution: def buildTree(self,", "for a binary tree node. # class TreeNode: # def", "tree node. # class TreeNode: # def __init__(self, val=0, left=None,", "__init__(self, val=0, left=None, right=None): # self.val = val # self.left", "self.left = left # self.right = right class Solution: def", "= TreeNode(r) index = inorder.index(r) root.right = self.buildTree(inorder[index+1:], postorder) root.left", "- 106 # URL - https://leetcode.com/problems/construct-binary-tree-from-inorder-and-postorder-traversal/ # # Definition for", "postorder: List[int]) -> TreeNode: if not inorder: return None r", "return None r = postorder.pop() root = TreeNode(r) index =", "right class Solution: def buildTree(self, inorder: List[int], postorder: List[int]) ->", "List[int]) -> TreeNode: if not inorder: return None r =", "# Definition for a binary tree node. # class TreeNode:", "not inorder: return None r = postorder.pop() root = TreeNode(r)", "if not inorder: return None r = postorder.pop() root =", "r = postorder.pop() root = TreeNode(r) index = inorder.index(r) root.right", "LeetCode # # Problem - 106 # URL - https://leetcode.com/problems/construct-binary-tree-from-inorder-and-postorder-traversal/", "val=0, left=None, right=None): # self.val = val # self.left =", "inorder: return None r = postorder.pop() root = TreeNode(r) index", "TreeNode: # def __init__(self, val=0, left=None, right=None): # self.val =", "def buildTree(self, inorder: List[int], postorder: List[int]) -> TreeNode: if not", "# URL - https://leetcode.com/problems/construct-binary-tree-from-inorder-and-postorder-traversal/ # # Definition for a binary", "def __init__(self, val=0, left=None, right=None): # self.val = val #", "URL - https://leetcode.com/problems/construct-binary-tree-from-inorder-and-postorder-traversal/ # # Definition for a binary tree", "# self.left = left # self.right = right class Solution:", "106 # URL - https://leetcode.com/problems/construct-binary-tree-from-inorder-and-postorder-traversal/ # # Definition for a", "index = inorder.index(r) root.right = self.buildTree(inorder[index+1:], postorder) root.left = self.buildTree(inorder[:index],", "- https://leetcode.com/problems/construct-binary-tree-from-inorder-and-postorder-traversal/ # # Definition for a binary tree node.", "List[int], postorder: List[int]) -> TreeNode: if not inorder: return None", "node. # class TreeNode: # def __init__(self, val=0, left=None, right=None):", "self.val = val # self.left = left # self.right =", "= postorder.pop() root = TreeNode(r) index = inorder.index(r) root.right =", "<reponame>KevinTMtz/CompetitiveProgramming # # LeetCode # # Problem - 106 #", "# LeetCode # # Problem - 106 # URL -", "root = TreeNode(r) index = inorder.index(r) root.right = self.buildTree(inorder[index+1:], postorder)", "# # LeetCode # # Problem - 106 # URL", "Solution: def buildTree(self, inorder: List[int], postorder: List[int]) -> TreeNode: if", "# # Problem - 106 # URL - https://leetcode.com/problems/construct-binary-tree-from-inorder-and-postorder-traversal/ #" ]
[ "with pd.option_context('display.float_format', '{:.3f}'.format, 'expand_frame_repr', False): logger.info(all_objective_scores) if return_dict: return pipeline_results", "Run evalml.pipelines.components.utils.allowed_model_families(\"binary\") to see options. Change `binary` to `multiclass` or", "[\"id\", \"pipeline_name\", \"score\", \"validation_score\", \"percent_better_than_baseline\", \"high_variance_cv\", \"parameters\"] if not self._results['pipeline_results']:", "in the search. The default of None indicates all pipelines", "is not None and self._automl_algorithm.batch_number > 0: batch_number = self._automl_algorithm.batch_number", "search (y/n)? \").strip().lower() if choice == \"y\": logger.info(\"Exiting AutoMLSearch.\") return", "`multiclass` or `regression` depending on the problem type. Note that", "function takes three positional parameters: The pipeline class, the pipeline", "small, so ensembling will not run. Set max_iterations >= {first_ensembling_iteration}", "self._results['search_order'][0] best_score = self._results['pipeline_results'][first_id]['score'] num_without_improvement = 0 for id in", "= pipeline_results.get('pipeline_class') parameters = pipeline_results.get('parameters') if pipeline_class is None or", "time (including CV): %.1f seconds\" % pipeline_results[\"training_time\"]) log_subtitle(logger, \"Cross Validation\",", "all pipelines searched\"\"\" ascending = True if self.objective.greater_is_better: ascending =", "ID of a pipeline training result, returns an untrained instance", "ValueError(\"Given pipeline {} is not compatible with problem_type {}.\".format(pipeline.name, self.problem_type.value))", "for ensembler are pipelines with IDs: \" + str(pipeline_results['input_pipeline_ids'])) log_subtitle(logger,", "is set to False, returns an untrained pipeline instance. \"\"\"", "TimeSeriesBaselineRegressionPipeline ) from evalml.pipelines.components.utils import get_estimators from evalml.pipelines.utils import make_pipeline", "X_train, y_train, self.optimize_thresholds, self.objective) self._best_pipeline = best_pipeline def _num_pipelines(self): \"\"\"Return", "= self.results['pipeline_results'].get(pipeline_id) if pipeline_results is None: raise PipelineNotFoundError(\"Pipeline not found", "to False, returns an untrained pipeline instance. \"\"\" if not", "(list(PipelineBase)): List of pipelines to train. X_holdout (ww.DataTable, pd.DataFrame): Holdout", "optimize the binary pipeline threshold. Defaults to True. start_iteration_callback (callable):", "not None: additional_objectives.remove(existing_main_objective) else: additional_objectives = [get_objective(o) for o in", "Received {max_iterations}.\") self.max_time = convert_to_seconds(max_time) if isinstance(max_time, str) else max_time", "already been run and will not run again on the", "train for every batch after the first one. The first", "to %s pipelines. \" % self.max_iterations) if self.max_time is not", "objectives (list(str), list(ObjectiveBase)): Objectives used for scoring. Returns: Dict[str, Dict[str,", "@property def full_rankings(self): \"\"\"Returns a pandas.DataFrame with scoring results from", "every {ensemble_nth_batch} batches.\") self.max_iterations = (1 + len(self.allowed_pipelines) + self._pipelines_per_batch", "AutoMLSearch object. add_result_callback (callable): Function called after each pipeline training", "for: {}\".format(self.objective)) logger.info(\"Total training time (including CV): %.1f seconds\" %", "self.problem_type.value)) for pipeline in self.allowed_pipelines or []: if pipeline.problem_type !=", "in self.allowed_pipelines]}\") logger.debug(f\"allowed_model_families set to {self.allowed_model_families}\") if len(self.problem_configuration): pipeline_params =", "patience and (not isinstance(patience, int) or patience < 0): raise", "[])}\\n\" f\"Random Seed: {self.random_seed}\\n\" f\"n_jobs: {self.n_jobs}\\n\" f\"Optimize Thresholds: {self.optimize_thresholds}\\n\" )", "Must also accepts kwargs, so AutoMLSearch is able to pass", "of objectives to score on. Will override default objectives for", "len(self.allowed_pipelines) + len(self.allowed_pipelines) * self._pipelines_per_batch + 1) if self.max_iterations <", "self.max_batches = max_batches self._pipelines_per_batch = _pipelines_per_batch if not self.max_iterations and", "in the search \"\"\" return len(self._results['pipeline_results']) def _should_continue(self): \"\"\"Given the", "< 0.0): raise ValueError(\"tolerance value must be a float between", "automl search. Arguments: pipeline_id (int): pipeline to retrieve Returns: PipelineBase:", "batch after the first one. The first batch will train", "self.rankings.iloc[0] if not (self._best_pipeline and self._best_pipeline == self.get_pipeline(best_pipeline['id'])): best_pipeline =", "a positive integer. Received {} instead\".format(patience)) if tolerance and (tolerance", "if self._interrupted: return False # for add_to_rankings if self._searched: return", "Baseline + first batch + each pipeline iteration + 1", "{len(self.allowed_pipelines) * self._pipelines_per_batch} iterations after that.\") if self.max_batches and self.max_iterations", "for model in self.allowed_model_families])) self.search_iteration_plot = None if self.plot: self.search_iteration_plot", "objective_name_to_class): scores = defaultdict(int) n_folds = len(cv_data) for fold_data in", "with a {} problem.\".format(self.objective.name, self.problem_type.value)) if additional_objectives is None: additional_objectives", "if pipeline.problem_type != self.problem_type: raise ValueError(\"Given pipeline {} is not", "= 1 + len(self.allowed_pipelines) + (self._pipelines_per_batch * (self.max_batches - 1))", "self.plot: self.search_iteration_plot = self.plot.search_iteration_plot(interactive_plot=show_iteration_plot) self._start = time.time() try: self._add_baseline_pipelines() except", "search. Arguments: pipeline_id (int): pipeline to retrieve Returns: PipelineBase: untrained", "ensembling will not run.\") run_ensembling = False if run_ensembling and", "indicating the pipelines allowed in the search. The default of", "# Necessary for \"Plotting\" documentation, since Sphinx does not work", "+ len(self.allowed_pipelines) * self._pipelines_per_batch + 1) if self.max_iterations < first_ensembling_iteration:", "= pd.DataFrame(all_objective_scores) for c in all_objective_scores: if c in [\"#", "= error_callback or log_error_callback self.data_splitter = data_splitter self.optimize_thresholds = optimize_thresholds", "else: X_train = self.X_train y_train = self.y_train if hasattr(self.data_splitter, \"transform_sample\"):", "file to load Returns: AutoSearchBase object \"\"\" with open(file_path, 'rb')", "during scoring will not be included in the dictionary but", "pipeline training iteration. Callback function takes three positional parameters: The", "variables. train_best_pipeline (boolean): Whether or not to train the best", "with results from each pipeline, and `search_order`: a list describing", "best_pipeline def _num_pipelines(self): \"\"\"Return the number of pipeline evaluations which", "ww from sklearn.model_selection import BaseCrossValidator from .pipeline_search_plots import PipelineSearchPlots from", "The pipeline class, the pipeline parameters, and the AutoMLSearch object.", "call `log_error_callback`. additional_objectives (list): Custom set of objectives to score", "convert_to_seconds, infer_feature_types from evalml.utils.logger import ( get_logger, log_subtitle, log_title, time_elapsed,", "import plotly; skipping pipeline search plotting\\n\") self.allowed_pipelines = allowed_pipelines self.allowed_model_families", "n_jobs) are used. ensembling (boolean): If True, runs ensembling in", "max_batches (int): The maximum number of batches of pipelines to", "allowed in the search. \"\"\" if X_train is None: raise", "self.data_splitter = self.data_splitter or default_data_splitter self.pipeline_parameters = pipeline_parameters if pipeline_parameters", "a {} problem.\".format(self.objective.name, self.problem_type.value)) if additional_objectives is None: additional_objectives =", "return False elif self.max_iterations and num_pipelines >= self.max_iterations: return False", "in [\"# Training\", \"# Validation\"]: all_objective_scores[c] = all_objective_scores[c].astype(\"object\") continue mean", "self.patience: logger.info(\"\\n\\n{} iterations without improvement. Stopping search early...\".format(self.patience)) return False", "pipelines to search\") check_all_pipeline_names_unique(self.allowed_pipelines) run_ensembling = self.ensembling if run_ensembling and", "\"high_variance_cv\": high_variance_cv, \"training_time\": training_time, \"cv_data\": cv_data, \"percent_better_than_baseline_all_objectives\": percent_better_than_baseline, \"percent_better_than_baseline\": percent_better_than_baseline[self.objective.name],", "specify training data target values as a 1d vector using", "at the {first_ensembling_iteration} iteration and every {len(self.allowed_pipelines) * self._pipelines_per_batch} iterations", "max_iterations elapsed = time.time() - self._start if self.max_time and elapsed", "in pipeline_results[\"cv_data\"]] all_objective_scores = pd.DataFrame(all_objective_scores) for c in all_objective_scores: if", "\"\"\" return len(self._results['pipeline_results']) def _should_continue(self): \"\"\"Given the original stopping criterion", "found during automl search. If `train_best_pipeline` is set to False,", "= self.plot.search_iteration_plot(interactive_plot=show_iteration_plot) self._start = time.time() try: self._add_baseline_pipelines() except KeyboardInterrupt: if", "is not None: logger.info(f\"Searching up to {self.max_batches} batches for a", "search over per batch is one, ensembling will not run.", "if obj.name == self.objective.name), None) if existing_main_objective is not None:", "if additional_objectives is None: additional_objectives = get_core_objectives(self.problem_type) # if our", "{[pipeline.name for pipeline in self.allowed_pipelines]}\") logger.debug(f\"allowed_model_families set to {self.allowed_model_families}\") if", "\"\"\"Given the ID of a pipeline training result, returns an", "return rankings_df @property def best_pipeline(self): \"\"\"Returns a trained instance of", "\", \".join([model.value for model in self.allowed_model_families])) self.search_iteration_plot = None if", "len(desc) > AutoMLSearch._MAX_NAME_LEN: desc = desc[:AutoMLSearch._MAX_NAME_LEN - 3] + \"...\"", "bool: If True, search should terminate early \"\"\" leading_char =", "= get_objective(objective, return_instance=False) self.objective = self._validate_objective(objective) if self.data_splitter is not", "isinstance(patience, int) or patience < 0): raise ValueError(\"patience value must", "max_time, and max_iterations have precedence over stopping the search. problem_configuration", "[] current_batch_pipeline_scores = [] new_pipeline_ids = [] loop_interrupted = False", "to True. start_iteration_callback (callable): Function called before each pipeline training", "0. n_jobs (int or None): Non-negative integer describing level of", "ID \"\"\" pipeline_results = self.results['pipeline_results'].get(pipeline_id) if pipeline_results is None: raise", "the original stopping criterion and current state, should the search", "that the time in this loop does not count towards", "in allowed_estimators] if self.allowed_pipelines == []: raise ValueError(\"No allowed pipelines", "yes, False if no. \"\"\" if self._interrupted: return False #", "object \"\"\" with open(file_path, 'rb') as f: return cloudpickle.load(f) def", "in non_core_objectives: raise ValueError(f\"{objective.name.lower()} is not allowed in AutoML! \"", "with the parameters used to train that pipeline during automl", "full_rankings['id'].isin(new_pipeline_ids) current_batch_pipeline_scores = full_rankings[current_batch_idx]['score'] if len(current_batch_pipeline_scores) and current_batch_pipeline_scores.isna().all(): raise AutoMLSearchException(f\"All", "1 - num_ensemble_batches) + num_ensemble_batches) else: self.max_iterations = 1 +", "ValueError: raise ValueError('choose one of (binary, multiclass, regression) as problem_type')", "for supervised learning tasks. problem_type (str or ProblemTypes): type of", "def describe_pipeline(self, pipeline_id, return_dict=False): \"\"\"Describe a pipeline Arguments: pipeline_id (int):", "and self._best_pipeline == self.get_pipeline(best_pipeline['id'])): best_pipeline = self.get_pipeline(best_pipeline['id']) if self._train_best_pipeline: if", "file path Arguments: file_path (str): location to save file pickle_protocol", "= 40 # Necessary for \"Plotting\" documentation, since Sphinx does", "run_ensembling = False if run_ensembling and self.max_iterations is not None:", "(n_cpus + 1 + n_jobs) are used. ensembling (boolean): If", "data. This can be helpful for training pipelines once the", "pipeline_results = self._results['pipeline_results'][pipeline_id] pipeline.describe() if pipeline.model_family == ModelFamily.ENSEMBLE: logger.info(\"Input for", "1) if self.max_iterations < first_ensembling_iteration: run_ensembling = False logger.warning(f\"Ensembling is", "training data we'll set aside for training ensemble metalearners. Only", "is too small, so ensembling will not run. Set max_iterations", "y_train = y_train.iloc[train_indices] best_pipeline = self._engine.train_pipeline(best_pipeline, X_train, y_train, self.optimize_thresholds, self.objective)", "self.max_iterations and num_pipelines >= self.max_iterations: return False # check for", "ensembling.\") else: logger.info(f\"Ensembling will run at the {first_ensembling_iteration} iteration and", "(ww.DataTable, pd.DataFrame): Holdout features. y_holdout (ww.DataTable, pd.DataFrame): Holdout targets for", "score pipelines.\") if self.max_batches is not None: logger.info(f\"Searching up to", "self._best_pipeline = best_pipeline def _num_pipelines(self): \"\"\"Return the number of pipeline", "pipeline to describe return_dict (bool): If True, return dictionary of", "location to find file to load Returns: AutoSearchBase object \"\"\"", "open(file_path, 'rb') as f: return cloudpickle.load(f) def train_pipelines(self, pipelines): \"\"\"Train", "obj in additional_objectives] self.additional_objectives = additional_objectives self.objective_name_to_class = {o.name: o", "if abs(mean) > 0 else np.inf all_objective_scores = all_objective_scores.fillna(\"-\") with", "also accepts kwargs, so AutoMLSearch is able to pass along", "in the search. \"\"\" if X_train is None: raise ValueError('Must", "np.nan on the primary objective {self.objective}.\") self.search_duration = time.time() -", "containing the training results for the new pipeline, an untrained_pipeline", "-1, (n_cpus + 1 + n_jobs) are used. ensembling (boolean):", "at file path Arguments: file_path (str): location to save file", "for o in obj_list]) return '\\n'.join(lines) def _get_funct_name(function): if callable(function):", "as pd import woodwork as ww from sklearn.model_selection import BaseCrossValidator", "if pipeline_class is None or parameters is None: raise PipelineNotFoundError(\"Pipeline", "self.full_rankings.drop_duplicates(subset=\"pipeline_name\", keep=\"first\") @property def full_rankings(self): \"\"\"Returns a pandas.DataFrame with scoring", "rankings_df.reset_index(drop=True, inplace=True) return rankings_df @property def best_pipeline(self): \"\"\"Returns a trained", "else: logger.info(f\"Ensembling will run every {ensemble_nth_batch} batches.\") self.max_iterations = (1", "make sure it is different from the current best pipeline", "None: # Baseline + first batch + each pipeline iteration", "allowed. Setting this field will cause allowed_model_families to be ignored.", "_print_list(obj_list): lines = sorted(['\\t{}'.format(o.name) for o in obj_list]) return '\\n'.join(lines)", "before search _baseline_cv_scores will be empty so we will return", "Set max_iterations >= {first_ensembling_iteration} to run ensembling.\") else: logger.info(f\"Ensembling will", "main objective is part of default set of objectives for", "max_iterations have precedence over stopping the search. problem_configuration (dict, None):", "of the training data we'll set aside for training ensemble", "except ImportError: logger.warning(\"Unable to import plotly; skipping pipeline search plotting\\n\")", "= self.data_splitter or default_data_splitter self.pipeline_parameters = pipeline_parameters if pipeline_parameters is", "get_objective(objective, return_instance=False) self.objective = self._validate_objective(objective) if self.data_splitter is not None", "plot = PipelineSearchPlots def __init__(self, X_train=None, y_train=None, problem_type=None, objective='auto', max_iterations=None,", "number generator. Defaults to 0. n_jobs (int or None): Non-negative", "be None or non-negative. Received {max_time}.\") if max_batches is not", "default set of objectives for problem_type, remove it existing_main_objective =", "for multiclass classification problems, and - R2 for regression problems.", "accepts kwargs, so AutoMLSearch is able to pass along other", "that error out during training will not be included in", "== 1: logger.warning(\"Ensembling is set to True, but the number", "\" + str(pipeline_results['input_pipeline_ids'])) log_subtitle(logger, \"Training\") logger.info(\"Training for {} problems.\".format(pipeline.problem_type)) if", "return pipeline_results def add_to_rankings(self, pipeline): \"\"\"Fits and evaluates a given", "run_ensembling = False logger.warning(f\"Ensembling is set to True, but max_batches", "{self.tuner_class.__name__}\\n\" f\"Start Iteration Callback: {_get_funct_name(self.start_iteration_callback)}\\n\" f\"Add Result Callback: {_get_funct_name(self.add_result_callback)}\\n\" f\"Additional", "= self._check_for_high_variance(pipeline, cv_scores) pipeline_id = len(self._results['pipeline_results']) self._results['pipeline_results'][pipeline_id] = { \"id\":", "== ModelFamily.ENSEMBLE: logger.info(\"Input for ensembler are pipelines with IDs: \"", "not None else {} self.search_iteration_plot = None self._interrupted = False", "for obj in self.additional_objectives: if not obj.is_defined_for_problem_type(self.problem_type): raise ValueError(\"Additional objective", "pipeline_class is None or parameters is None: raise PipelineNotFoundError(\"Pipeline class", "to load Returns: AutoSearchBase object \"\"\" with open(file_path, 'rb') as", "int) or patience < 0): raise ValueError(\"patience value must be", "None: raise PipelineNotFoundError(\"Pipeline not found in automl results\") pipeline_class =", "0 else np.inf all_objective_scores = all_objective_scores.fillna(\"-\") with pd.option_context('display.float_format', '{:.3f}'.format, 'expand_frame_repr',", "results from the highest-scoring set of parameters used with each", "to qualify as score improvement for early stopping. Only applicable", "c in [\"# Training\", \"# Validation\"]: all_objective_scores[c] = all_objective_scores[c].astype(\"object\") continue", "Exception. Callback function takes three positional parameters: the Exception raised,", "self.objective_name_to_class) if is_baseline: self._baseline_cv_scores = mean_cv_all_objectives for obj_name in mean_cv_all_objectives:", "self.problem_type == ProblemTypes.REGRESSION: baseline = MeanBaselineRegressionPipeline(parameters={}) else: pipeline_class = {ProblemTypes.TIME_SERIES_REGRESSION:", "{self._engine.__class__.__name__} to train and score pipelines.\") if self.max_batches is not", "an untrained_pipeline containing the parameters used during training, and the", "else 'Lower')) logger.info(f\"Using {self._engine.__class__.__name__} to train and score pipelines.\") if", "`binary` to `multiclass` or `regression` depending on the problem type.", "object at file path Arguments: file_path (str): location to save", "False return True def _validate_problem_type(self): for obj in self.additional_objectives: if", "will not run. Defaults to False. max_batches (int): The maximum", "if run_ensembling and len(self.allowed_pipelines) == 1: logger.warning(\"Ensembling is set to", "self._start, batch_number, self.show_batch_output) def _validate_objective(self, objective): non_core_objectives = get_non_core_objectives() if", "towards the time budget (if set) time_in_loop = time.time() -", "or categorical. Categorical features will automatically be encoded show_iteration_plot (boolean,", "= self._validate_objective(objective) if self.data_splitter is not None and not issubclass(self.data_splitter.__class__,", "\"\"\"Returns a pandas.DataFrame with scoring results from all pipelines searched\"\"\"", "AutoMLSearch.\") return True elif choice == \"n\": # So that", "if self.search_iteration_plot: self.search_iteration_plot.update() if self.add_result_callback: self.add_result_callback(self._results['pipeline_results'][pipeline_id], pipeline, self) return pipeline_id", "so ensembling will not run. Set max_batches >= {ensemble_nth_batch +", "Necessary for \"Plotting\" documentation, since Sphinx does not work well", "type of pipeline components, problem, training time, cross validation, etc.", "sure it is different from the current best pipeline before", "parameters, and the AutoMLSearch object. add_result_callback (callable): Function called after", "be empty so we will return # nan for the", "X_holdout, y_holdout, objectives): \"\"\"Score a list of pipelines on the", "three positional parameters: the Exception raised, the traceback, and the", "empty. random_seed (int): Seed for the random number generator. Defaults", "parameter will be ignored. data_splitter (sklearn.model_selection.BaseCrossValidator): Data splitting method to", "\"\"\"Returns a pandas.DataFrame with scoring results from the highest-scoring set", "to -1, all CPUs are used. For n_jobs below -1,", "have precedence over stopping the search. problem_configuration (dict, None): Additional", "\"Cross Validation\", underline=\"-\") all_objective_scores = [fold[\"all_objective_scores\"] for fold in pipeline_results[\"cv_data\"]]", "input_pipeline_ids = [self._automl_algorithm._best_pipeline_info[model_family][\"id\"] for model_family in self._automl_algorithm._best_pipeline_info] self._results['pipeline_results'][pipeline_id][\"input_pipeline_ids\"] = input_pipeline_ids", "problem_configuration (dict, None): Additional parameters needed to configure the search.", "additional_objectives if obj.name == self.objective.name), None) if existing_main_objective is not", "+= 1 if num_without_improvement >= self.patience: logger.info(\"\\n\\n{} iterations without improvement.", "return {objective: float(score) / n_folds for objective, score in scores.items()}", "pipeline to the data. This is the first pipeline fit", "equivalent. If set to -1, all CPUs are used. For", "requirement that automl search has been run. Arguments: pipeline (PipelineBase):", "Set max_batches >= {ensemble_nth_batch + 1} to run ensembling.\") else:", "between 0.0 and 1.0 inclusive. Received {} instead\".format(tolerance)) self.patience =", "\"id\": pipeline_id, \"pipeline_name\": pipeline.name, \"pipeline_class\": type(pipeline), \"pipeline_summary\": pipeline.summary, \"parameters\": pipeline.parameters,", "called when `search()` errors and raises an Exception. Callback function", "one, ensembling will not run. Defaults to False. max_batches (int):", "random_seed=self.random_seed, n_jobs=self.n_jobs, number_features=self.X_train.shape[1], pipelines_per_batch=self._pipelines_per_batch, ensembling=run_ensembling, pipeline_params=pipeline_params ) def _pre_evaluation_callback(self, pipeline):", "for all objectives # but also fields like \"# Training\"", "class has been iterated over. If the number of unique", "1 are equivalent. If set to -1, all CPUs are", "pipeline during automl search. Arguments: pipeline_id (int): pipeline to retrieve", "path Arguments: file_path (str): location to save file pickle_protocol (int):", "path Arguments: file_path (str): location to find file to load", "not objective.is_defined_for_problem_type(self.problem_type): raise ValueError(\"Given objective {} is not compatible with", "The default of None indicates all pipelines for this problem", "None: self.show_batch_output = True if run_ensembling: ensemble_nth_batch = len(self.allowed_pipelines) +", "0: run_ensembling = False logger.warning(f\"Ensembling is set to True, but", "= None default_data_splitter = make_data_splitter(self.X_train, self.y_train, self.problem_type, self.problem_configuration, n_splits=3, shuffle=True,", "\" f\"parameters. Received {problem_configuration}.\") return problem_configuration or {} def _handle_keyboard_interrupt(self):", "max_batches is too small, so ensembling will not run. Set", "# they are not scores if field in objective_name_to_class: scores[field]", "seconds\" % pipeline_results[\"training_time\"]) log_subtitle(logger, \"Cross Validation\", underline=\"-\") all_objective_scores = [fold[\"all_objective_scores\"]", "to False. Returns: Description of specified pipeline. Includes information such", "self._get_mean_cv_scores_for_all_objectives(cv_data, self.objective_name_to_class) if is_baseline: self._baseline_cv_scores = mean_cv_all_objectives for obj_name in", "# Baseline + first batch + each pipeline iteration +", "= self.y_train if hasattr(self.data_splitter, \"transform_sample\"): train_indices = self.data_splitter.transform_sample(X_train, y_train) X_train", "are equivalent. If set to -1, all CPUs are used.", "return None search_desc = ( f\"{handle_problem_types(self.problem_type).name} Search\\n\\n\" f\"Parameters: \\n{'='*20}\\n\" f\"Objective:", "is None: additional_objectives = get_core_objectives(self.problem_type) # if our main objective", "not run. Set max_iterations >= {first_ensembling_iteration} to run ensembling.\") else:", "allows access to a copy of the results from `automl_search`.", "given pipeline then adds the results to the automl rankings", "desc[:AutoMLSearch._MAX_NAME_LEN - 3] + \"...\" desc = desc.ljust(AutoMLSearch._MAX_NAME_LEN) batch_number =", "untrained_pipeline containing the parameters used during training, and the AutoMLSearch", "run.\") run_ensembling = False if run_ensembling and self.max_iterations is not", "list of feature types, either numerical or categorical. Categorical features", "[] new_pipeline_ids = [] loop_interrupted = False while self._should_continue(): try:", "logger.warning(f\"Ensembling is set to True, but max_iterations is too small,", "problem.\".format(self.objective.name, self.problem_type.value)) if additional_objectives is None: additional_objectives = get_core_objectives(self.problem_type) #", "from evalml.automl.automl_algorithm import IterativeAlgorithm from evalml.automl.callbacks import log_error_callback from evalml.automl.engine", "stopping the search. problem_configuration (dict, None): Additional parameters needed to", "model in self.allowed_model_families])) self.search_iteration_plot = None if self.plot: self.search_iteration_plot =", "{self.n_jobs}\\n\" f\"Optimize Thresholds: {self.optimize_thresholds}\\n\" ) rankings_desc = \"\" if not", "family allowed in the search. \"\"\" if X_train is None:", "a jupyter notebook if show_iteration_plot: try: get_ipython except NameError: show_iteration_plot", "a warning if variance is higher than specified threshhold.\"\"\" pipeline_name", "not run. Defaults to False. max_batches (int): The maximum number", "pipelines (list(PipelineBase)): List of pipelines to train. X_holdout (ww.DataTable, pd.DataFrame):", "pipeline training iteration. Callback function takes three positional parameters: A", "the best pipeline in the rankings If self._best_pipeline already exists,", "run again on the same instance. Re-initialize AutoMLSearch to search", "\"\"\"Find the best pipeline for the data set. Arguments: feature_types", "self.allowed_pipelines == []: raise ValueError(\"No allowed pipelines to search\") check_all_pipeline_names_unique(self.allowed_pipelines)", "self.problem_type = handle_problem_types(problem_type) except ValueError: raise ValueError('choose one of (binary,", "additional_objectives = [self._validate_objective(obj) for obj in additional_objectives] self.additional_objectives = additional_objectives", "be a positive integer. Received {} instead\".format(patience)) if tolerance and", "f\"{pipeline.name}\" if len(desc) > AutoMLSearch._MAX_NAME_LEN: desc = desc[:AutoMLSearch._MAX_NAME_LEN - 3]", "self._interrupted = True current_batch_pipelines = [] current_batch_pipeline_scores = [] new_pipeline_ids", "current AutoML batch produced a score of np.nan on the", "produced a score of np.nan on the primary objective {self.objective}.\")", "len(self.rankings) == 0: return best_pipeline = self.rankings.iloc[0] if not (self._best_pipeline", "float(score) / n_folds for objective, score in scores.items()} def _post_evaluation_callback(self,", "split_data(X_shape, self.y_train, problem_type=self.problem_type, test_size=_ensembling_split_size, random_seed=self.random_seed) self.ensembling_indices = ensembling_indices.to_dataframe()[0].tolist() self._engine =", "None and 1 are equivalent. If set to -1, all", "must be None or non-negative. Received {max_iterations}.\") self.max_time = convert_to_seconds(max_time)", "num_pipelines = self._num_pipelines() if num_pipelines == 0: return True #", "`search()` errors and raises an Exception. Callback function takes three", "self.show_batch_output = True if run_ensembling: ensemble_nth_batch = len(self.allowed_pipelines) + 1", "training ensemble metalearners. Only used when ensembling is True. Must", "will be displayed in the log. \"\"\" return self._engine.train_batch(pipelines) def", "\"training_time\": training_time, \"cv_data\": cv_data, \"percent_better_than_baseline_all_objectives\": percent_better_than_baseline, \"percent_better_than_baseline\": percent_better_than_baseline[self.objective.name], \"validation_score\": cv_scores[0]", "isinstance(max_time, (int, float, str, type(None))): raise TypeError(f\"Parameter max_time must be", "for new pipelines after %d seconds.\\n\" % self.max_time) logger.info(\"Allowed model", "for at least the gap and max_delay \" f\"parameters. Received", "\"Do you really want to exit search (y/n)? \").strip().lower() if", "pipeline search\") logger.info(\"Optimizing for %s. \" % self.objective.name) logger.info(\"{} score", "(int, float, str, type(None))): raise TypeError(f\"Parameter max_time must be a", "significant_change = abs((curr_score - best_score) / best_score) > self.tolerance score_improved", "for this problem type are allowed. Setting this field will", "- LogLossMulticlass for multiclass classification problems, and - R2 for", "= {o.name: o for o in [self.objective] + self.additional_objectives} if", "on the training data. This can be helpful for training", "error_callback or log_error_callback self.data_splitter = data_splitter self.optimize_thresholds = optimize_thresholds self.ensembling", "in [ProblemTypes.TIME_SERIES_REGRESSION]: required_parameters = {'gap', 'max_delay'} if not problem_configuration or", "(boolean): If True, runs ensembling in a separate batch after", "best_pipeline[\"pipeline_name\"] logger.info(f\"Best pipeline: {best_pipeline_name}\") logger.info(f\"Best pipeline {self.objective.name}: {best_pipeline['score']:3f}\") self._searched =", "logger.warning(\"Unable to import plotly; skipping pipeline search plotting\\n\") self.allowed_pipelines =", "error out during training will not be included in the", "0.2 _pipelines_per_batch (int): The number of pipelines to train for", "If max_iterations and max_time is not set, then max_iterations will", "except PipelineNotFoundError: pass if self.search_iteration_plot: self.search_iteration_plot.update() if self.add_result_callback: self.add_result_callback(self._results['pipeline_results'][pipeline_id], pipeline,", "for %s. \" % self.objective.name) logger.info(\"{} score is better.\\n\".format('Greater' if", "raise ValueError(\"Additional objective {} is not compatible with a {}", "pipeline family allowed in the search. \"\"\" if X_train is", "self.y_train if hasattr(self.data_splitter, \"transform_sample\"): train_indices = self.data_splitter.transform_sample(X_train, y_train) X_train =", "the search is complete. Arguments: pipelines (list(PipelineBase)): List of pipelines", "run. Set max_iterations >= {first_ensembling_iteration} to run ensembling.\") else: logger.info(f\"Ensembling", "\"cv_data\": cv_data, \"percent_better_than_baseline_all_objectives\": percent_better_than_baseline, \"percent_better_than_baseline\": percent_better_than_baseline[self.objective.name], \"validation_score\": cv_scores[0] } if", "found in automl results\") return pipeline_class(parameters, random_seed=self.random_seed) def describe_pipeline(self, pipeline_id,", "from all pipelines searched\"\"\" ascending = True if self.objective.greater_is_better: ascending", "\"percent_better_than_baseline_all_objectives\": percent_better_than_baseline, \"percent_better_than_baseline\": percent_better_than_baseline[self.objective.name], \"validation_score\": cv_scores[0] } if pipeline.model_family ==", "ValueError(f\"{objective.name.lower()} is not allowed in AutoML! \" \"Use evalml.objectives.utils.get_core_objective_names() \"", "ensembler are pipelines with IDs: \" + str(pipeline_results['input_pipeline_ids'])) log_subtitle(logger, \"Training\")", "(str): location to save file pickle_protocol (int): the pickle data", "at least one pipeline for every search num_pipelines = self._num_pipelines()", "early stopping is disabled. Defaults to None. tolerance (float): Minimum", "train_best_pipeline self._best_pipeline = None self._searched = False self.X_train = infer_feature_types(X_train)", "max_iterations (int): Maximum number of iterations to search. If max_iterations", "all CPUs are used. For n_jobs below -1, (n_cpus +", "ModelFamily)): The model families to search. The default of None", "or ProblemTypes): type of supervised learning problem. See evalml.problem_types.ProblemType.all_problem_types for", "score in scores.items()} def _post_evaluation_callback(self, pipeline, evaluation_results): training_time = evaluation_results['training_time']", "len(self.allowed_pipelines) + self._pipelines_per_batch * (self.max_batches - 1 - num_ensemble_batches) +", "high_variance_cv, \"training_time\": training_time, \"cv_data\": cv_data, \"percent_better_than_baseline_all_objectives\": percent_better_than_baseline, \"percent_better_than_baseline\": percent_better_than_baseline[self.objective.name], \"validation_score\":", "The tuner class to use. Defaults to SKOptTuner. optimize_thresholds (bool):", "has already been run and will not run again on", "= sorted(['\\t{}'.format(o.name) for o in obj_list]) return '\\n'.join(lines) def _get_funct_name(function):", "= cv_scores.mean() percent_better_than_baseline = {} mean_cv_all_objectives = self._get_mean_cv_scores_for_all_objectives(cv_data, self.objective_name_to_class) if", "= self._get_mean_cv_scores_for_all_objectives(cv_data, self.objective_name_to_class) if is_baseline: self._baseline_cv_scores = mean_cv_all_objectives for obj_name", "In the event add_to_rankings is called before search _baseline_cv_scores will", "base score. percent_better = objective_class.calculate_percent_difference(mean_cv_all_objectives[obj_name], self._baseline_cv_scores.get(obj_name, np.nan)) percent_better_than_baseline[obj_name] = percent_better", "self.allowed_pipelines or []: if pipeline.problem_type != self.problem_type: raise ValueError(\"Given pipeline", "= [make_pipeline(self.X_train, self.y_train, estimator, self.problem_type, custom_hyperparameters=self.pipeline_parameters) for estimator in allowed_estimators]", "evalml.preprocessing import split_data from evalml.problem_types import ProblemTypes, handle_problem_types from evalml.tuners", "True, but max_batches is too small, so ensembling will not", "applicable if patience is not None. Defaults to None. allowed_pipelines", "Pipeline search.\"\"\" _MAX_NAME_LEN = 40 # Necessary for \"Plotting\" documentation,", "so we want to exclude them since # they are", "= time.time() - self._start elapsed_time = time_elapsed(self._start) desc = f\"\\nSearch", "search. \"\"\" if self.problem_type == ProblemTypes.BINARY: baseline = ModeBaselineBinaryPipeline(parameters={}) elif", "self.problem_type, self.problem_configuration, n_splits=3, shuffle=True, random_seed=self.random_seed) self.data_splitter = self.data_splitter or default_data_splitter", "for every search num_pipelines = self._num_pipelines() if num_pipelines == 0:", "they want to stop the search. Returns: bool: If True,", "but max_iterations is too small, so ensembling will not run.", "/ mean if abs(mean) > 0 else np.inf all_objective_scores =", "{o.name: o for o in [self.objective] + self.additional_objectives} if not", "self._results['pipeline_results'][id]['score'] significant_change = abs((curr_score - best_score) / best_score) > self.tolerance", "the duration has elapsed. If it is an integer, then", "is True. Must be between 0 and 1, exclusive. Defaults", "set of objectives to score on. Will override default objectives", "= infer_feature_types(y_train) self.ensembling_indices = None default_data_splitter = make_data_splitter(self.X_train, self.y_train, self.problem_type,", "new_pipeline_ids = [] loop_interrupted = False while self._should_continue(): try: if", "_pipelines_per_batch (int): The number of pipelines to train for every", "problems, - LogLossMulticlass for multiclass classification problems, and - R2", "and (not isinstance(patience, int) or patience < 0): raise ValueError(\"patience", "\"\\n\" start_of_loop = time.time() while True: choice = input(leading_char +", "hasattr(self.data_splitter, \"transform_sample\"): train_indices = self.data_splitter.transform_sample(X_train, y_train) X_train = X_train.iloc[train_indices] y_train", "Returns: int: the number of pipeline evaluations made in the", "best_score if score_improved and significant_change: best_score = curr_score num_without_improvement =", "classification pipeline thresholds for: {}\".format(self.objective)) logger.info(\"Total training time (including CV):", "values should be passed in for the gap and max_delay", "= all_objective_scores[c].mean(axis=0) std = all_objective_scores[c].std(axis=0) all_objective_scores.loc[\"mean\", c] = mean all_objective_scores.loc[\"std\",", "best_pipeline = self.rankings.iloc[0] best_pipeline_name = best_pipeline[\"pipeline_name\"] logger.info(f\"Best pipeline: {best_pipeline_name}\") logger.info(f\"Best", "if allowed_pipelines is provided, this parameter will be ignored. data_splitter", "best pipeline for the data set. Arguments: feature_types (list, optional):", "self.max_time) logger.info(\"Allowed model families: %s\\n\" % \", \".join([model.value for model", "self.ensembling if run_ensembling and len(self.allowed_pipelines) == 1: logger.warning(\"Ensembling is set", "\"high_variance_cv\", \"parameters\"] if not self._results['pipeline_results']: return pd.DataFrame(columns=full_rankings_cols) rankings_df = pd.DataFrame(self._results['pipeline_results'].values())", "pipeline_params = {**{'pipeline': self.problem_configuration}, **self.pipeline_parameters} else: pipeline_params = self.pipeline_parameters self._automl_algorithm", "the number of unique pipelines to search over per batch", "traceback, and the AutoMLSearch object. Must also accepts kwargs, so", "set. Arguments: feature_types (list, optional): list of feature types, either", "ensemble_nth_batch if num_ensemble_batches == 0: run_ensembling = False logger.warning(f\"Ensembling is", "if patience is not None. Defaults to None. allowed_pipelines (list(class)):", "not found\") pipeline = self.get_pipeline(pipeline_id) pipeline_results = self._results['pipeline_results'][pipeline_id] pipeline.describe() if", "if self._searched: logger.info(\"AutoMLSearch.search() has already been run and will not", "ModelFamily.BASELINE cv_score = cv_scores.mean() percent_better_than_baseline = {} mean_cv_all_objectives = self._get_mean_cv_scores_for_all_objectives(cv_data,", "from the current best pipeline before training and thresholding\"\"\" if", "get_default_primary_search_objective(self.problem_type.value) objective = get_objective(objective, return_instance=False) self.objective = self._validate_objective(objective) if self.data_splitter", "if high_variance_cv: logger.warning(f\"High coefficient of variation (cv >= {threshold}) within", "+= time_in_loop return False else: leading_char = \"\" def search(self,", "`automl_search`. Returns: dict containing `pipeline_results`: a dict with results from", "pickle_protocol (int): the pickle data stream format. Returns: None \"\"\"", "results\") pipeline_class = pipeline_results.get('pipeline_class') parameters = pipeline_results.get('parameters') if pipeline_class is", "self._best_pipeline def save(self, file_path, pickle_protocol=cloudpickle.DEFAULT_PROTOCOL): \"\"\"Saves AutoML object at file", "is disabled. Defaults to None. tolerance (float): Minimum percentage difference", "%s pipelines. \" % self.max_iterations) if self.max_time is not None:", "if pipeline.model_family == ModelFamily.ENSEMBLE: logger.info(\"Input for ensembler are pipelines with", "self.max_iterations < first_ensembling_iteration: run_ensembling = False logger.warning(f\"Ensembling is set to", "self._results['search_order'][1:]: curr_score = self._results['pipeline_results'][id]['score'] significant_change = abs((curr_score - best_score) /", "{ \"id\": pipeline_id, \"pipeline_name\": pipeline.name, \"pipeline_class\": type(pipeline), \"pipeline_summary\": pipeline.summary, \"parameters\":", "not allowed in AutoML! \" \"Use evalml.objectives.utils.get_core_objective_names() \" \"to get", "{_print_list(self.additional_objectives or [])}\\n\" f\"Random Seed: {self.random_seed}\\n\" f\"n_jobs: {self.n_jobs}\\n\" f\"Optimize Thresholds:", "= evaluation_results['training_time'] cv_data = evaluation_results['cv_data'] cv_scores = evaluation_results['cv_scores'] is_baseline =", "pipelines, but not for optimizing each pipeline during fit-time. When", "> 0 else np.inf all_objective_scores = all_objective_scores.fillna(\"-\") with pd.option_context('display.float_format', '{:.3f}'.format,", "rankings_desc = \"\" if not self.rankings.empty: rankings_str = self.rankings.drop(['parameters'], axis='columns').to_string()", "elapsed = time.time() - self._start if self.max_time and elapsed >=", "Validation\", underline=\"-\") all_objective_scores = [fold[\"all_objective_scores\"] for fold in pipeline_results[\"cv_data\"]] all_objective_scores", "scores. Note that the any pipelines that error out during", "self.allowed_model_families])) self.search_iteration_plot = None if self.plot: self.search_iteration_plot = self.plot.search_iteration_plot(interactive_plot=show_iteration_plot) self._start", "None else {} self.search_iteration_plot = None self._interrupted = False if", "vs. score plot in Jupyter notebook. Disabled by default in", "be None or non-negative. Received {max_iterations}.\") self.max_time = convert_to_seconds(max_time) if", "used for scoring. Returns: Dict[str, Dict[str, float]]: Dictionary keyed by", "and logs a warning if variance is higher than specified", "save file pickle_protocol (int): the pickle data stream format. Returns:", "self._automl_algorithm.batch_number > 0: batch_number = self._automl_algorithm.batch_number update_pipeline(logger, desc, len(self._results['pipeline_results']) +", "start_of_loop = time.time() while True: choice = input(leading_char + \"Do", "< best_score if score_improved and significant_change: best_score = curr_score num_without_improvement", "or SKOptTuner self.start_iteration_callback = start_iteration_callback self.add_result_callback = add_result_callback self.error_callback =", "results from each pipeline, and `search_order`: a list describing the", "will be displayed in the log. \"\"\" return self._engine.score_batch(pipelines, X_holdout,", "def __init__(self, X_train=None, y_train=None, problem_type=None, objective='auto', max_iterations=None, max_time=None, patience=None, tolerance=None,", "number of pipeline evaluations which have been made Returns: int:", "of {self.max_iterations} pipelines. \") elif self.max_iterations is not None: logger.info(\"Searching", "prompt to the user asking if they want to stop", "the search. Returns: bool: If True, search should terminate early", "import ProblemTypes, handle_problem_types from evalml.tuners import SKOptTuner from evalml.utils import", "Callback function takes three positional parameters: the Exception raised, the", "class or parameters not found in automl results\") return pipeline_class(parameters,", "not (self._best_pipeline and self._best_pipeline == self.get_pipeline(best_pipeline['id'])): best_pipeline = self.get_pipeline(best_pipeline['id']) if", "split_data from evalml.problem_types import ProblemTypes, handle_problem_types from evalml.tuners import SKOptTuner", "tolerance (float): Minimum percentage difference to qualify as score improvement", "= False logger.warning(f\"Ensembling is set to True, but max_iterations is", "self._train_best_pipeline: if best_pipeline.model_family == ModelFamily.ENSEMBLE: X_train, y_train = self.X_train.iloc[self.ensembling_indices], self.y_train.iloc[self.ensembling_indices]", "pipeline for every search num_pipelines = self._num_pipelines() if num_pipelines ==", "on. Will override default objectives for problem type if not", "untrained pipeline instance. Returns: PipelineBase: A trained instance of the", "None: raise PipelineNotFoundError(\"Pipeline class or parameters not found in automl", "{ensemble_nth_batch} batches.\") self.max_iterations = (1 + len(self.allowed_pipelines) + self._pipelines_per_batch *", "'wb') as f: cloudpickle.dump(self, f, protocol=pickle_protocol) @staticmethod def load(file_path): \"\"\"Loads", "raise AutoMLSearchException(f\"All pipelines in the current AutoML batch produced a", "training time, cross validation, etc. \"\"\" if pipeline_id not in", "instance. Re-initialize AutoMLSearch to search again.\") return # don't show", "time can be specified as seconds, minutes, or hours. patience", "improvement to stop search early. Must be positive. If None,", "train. X_holdout (ww.DataTable, pd.DataFrame): Holdout features. y_holdout (ww.DataTable, pd.DataFrame): Holdout", "evalml.pipelines.components.utils.allowed_model_families(\"binary\") to see options. Change `binary` to `multiclass` or `regression`", "Required for supervised learning tasks. problem_type (str or ProblemTypes): type", "duration has elapsed. If it is an integer, then the", "TimeSeriesBaselineMulticlassPipeline, TimeSeriesBaselineRegressionPipeline ) from evalml.pipelines.components.utils import get_estimators from evalml.pipelines.utils import", "seconds. For strings, time can be specified as seconds, minutes,", "0): raise ValueError(\"patience value must be a positive integer. Received", "PipelineNotFoundError(\"Pipeline not found in automl results\") pipeline_class = pipeline_results.get('pipeline_class') parameters", "logger.info(f\"Best pipeline: {best_pipeline_name}\") logger.info(f\"Best pipeline {self.objective.name}: {best_pipeline['score']:3f}\") self._searched = True", "rankings_str = self.rankings.drop(['parameters'], axis='columns').to_string() rankings_desc = f\"\\nSearch Results: \\n{'='*20}\\n{rankings_str}\" return", "self._automl_algorithm = IterativeAlgorithm( max_iterations=self.max_iterations, allowed_pipelines=self.allowed_pipelines, tuner_class=self.tuner_class, random_seed=self.random_seed, n_jobs=self.n_jobs, number_features=self.X_train.shape[1], pipelines_per_batch=self._pipelines_per_batch,", "raised, the traceback, and the AutoMLSearch object. Must also accepts", "objective in non_core_objectives: raise ValueError(f\"{objective.name.lower()} is not allowed in AutoML!", "for \"Plotting\" documentation, since Sphinx does not work well with", "problem_configuration=None): if self.problem_type in [ProblemTypes.TIME_SERIES_REGRESSION]: required_parameters = {'gap', 'max_delay'} if", "must be a positive integer. Received {} instead\".format(patience)) if tolerance", "> 0: batch_number = self._automl_algorithm.batch_number update_pipeline(logger, desc, len(self._results['pipeline_results']) + 1,", "raise PipelineNotFoundError(\"Pipeline class or parameters not found in automl results\")", "results to the automl rankings with the requirement that automl", "len(self.problem_configuration): pipeline_params = {**{'pipeline': self.problem_configuration}, **self.pipeline_parameters} else: pipeline_params = self.pipeline_parameters", "perform as estimated on unseen data.\") return high_variance_cv def get_pipeline(self,", "self.allowed_model_families = list(set([p.model_family for p in (self.allowed_pipelines)])) logger.debug(f\"allowed_pipelines set to", "isinstance(max_time, str) else max_time self.max_iterations = max_iterations self.max_batches = max_batches", "pipeline = self.get_pipeline(pipeline_id) pipeline_results = self._results['pipeline_results'][pipeline_id] pipeline.describe() if pipeline.model_family ==", "self.full_rankings current_batch_idx = full_rankings['id'].isin(new_pipeline_ids) current_batch_pipeline_scores = full_rankings[current_batch_idx]['score'] if len(current_batch_pipeline_scores) and", "(list(str), list(ObjectiveBase)): Objectives used for scoring. Returns: Dict[str, Dict[str, float]]:", "= desc.ljust(self._MAX_NAME_LEN) logger.info(desc) self._find_best_pipeline() if self._best_pipeline is not None: best_pipeline", "random_seed=0, n_jobs=-1, tuner_class=None, optimize_thresholds=True, ensembling=False, max_batches=None, problem_configuration=None, train_best_pipeline=True, pipeline_parameters=None, _ensembling_split_size=0.2,", "= { 'pipeline_results': {}, 'search_order': [], 'errors': [] } self.random_seed", "patience=None, tolerance=None, data_splitter=None, allowed_pipelines=None, allowed_model_families=None, start_iteration_callback=None, add_result_callback=None, error_callback=None, additional_objectives=None, random_seed=0,", "self.additional_objectives = additional_objectives self.objective_name_to_class = {o.name: o for o in", "self.objective.name) logger.info(\"{} score is better.\\n\".format('Greater' if self.objective.greater_is_better else 'Lower')) logger.info(f\"Using", "after %d seconds.\\n\" % self.max_time) logger.info(\"Allowed model families: %s\\n\" %", "a prompt to the user asking if they want to", "self.objective.greater_is_better else curr_score < best_score if score_improved and significant_change: best_score", "== ModelFamily.ENSEMBLE: X_train, y_train = self.X_train.iloc[self.ensembling_indices], self.y_train.iloc[self.ensembling_indices] else: X_train =", "self.problem_configuration['gap'] max_delay = self.problem_configuration['max_delay'] baseline = pipeline_class(parameters={\"pipeline\": {\"gap\": gap, \"max_delay\":", "== \"y\": logger.info(\"Exiting AutoMLSearch.\") return True elif choice == \"n\":", "Training\" and \"# Testing\", so we want to exclude them", "variation (cv >= {threshold}) within cross validation scores. {pipeline_name} may", "infer_feature_types(X_train) self.y_train = infer_feature_types(y_train) self.ensembling_indices = None default_data_splitter = make_data_splitter(self.X_train,", "len(self.allowed_pipelines) + (self._pipelines_per_batch * (self.max_batches - 1)) if run_ensembling: if", "f\"n_jobs: {self.n_jobs}\\n\" f\"Optimize Thresholds: {self.optimize_thresholds}\\n\" ) rankings_desc = \"\" if", "multiclass, regression) as problem_type') self.tuner_class = tuner_class or SKOptTuner self.start_iteration_callback", "\"\"\"Presents a prompt to the user asking if they want", "to SKOptTuner. optimize_thresholds (bool): Whether or not to optimize the", "len(current_batch_pipeline_scores) and current_batch_pipeline_scores.isna().all(): raise AutoMLSearchException(f\"All pipelines in the current AutoML", "data_splitter (sklearn.model_selection.BaseCrossValidator): Data splitting method to use. Defaults to StratifiedKFold.", "CV): %.1f seconds\" % pipeline_results[\"training_time\"]) log_subtitle(logger, \"Cross Validation\", underline=\"-\") all_objective_scores", "not None: logger.info(\"Will stop searching for new pipelines after %d", "Training\", \"# Validation\"]: all_objective_scores[c] = all_objective_scores[c].astype(\"object\") continue mean = all_objective_scores[c].mean(axis=0)", "parameters found during automl search. If `train_best_pipeline` is set to", "validation scores. {pipeline_name} may not perform as estimated on unseen", "so we will return # nan for the base score.", "self.ensembling_indices = None default_data_splitter = make_data_splitter(self.X_train, self.y_train, self.problem_type, self.problem_configuration, n_splits=3,", "self.ensembling = ensembling if objective == 'auto': objective = get_default_primary_search_objective(self.problem_type.value)", "takes three positional parameters: The pipeline class, the pipeline parameters,", "obj.name == self.objective.name), None) if existing_main_objective is not None: additional_objectives.remove(existing_main_objective)", "self.tolerance = tolerance or 0.0 self._results = { 'pipeline_results': {},", "and max_time is not set, then max_iterations will default to", "logger.debug(f\"allowed_pipelines set to {[pipeline.name for pipeline in self.allowed_pipelines]}\") logger.debug(f\"allowed_model_families set", "'\\n'.join(lines) def _get_funct_name(function): if callable(function): return function.__name__ else: return None", "the best pipeline before returning it. Defaults to True. pipeline_parameters", "def _handle_keyboard_interrupt(self): \"\"\"Presents a prompt to the user asking if", "is None: raise PipelineNotFoundError(\"Pipeline class or parameters not found in", "None) if existing_main_objective is not None: additional_objectives.remove(existing_main_objective) else: additional_objectives =", "scoring. Returns: Dict[str, Dict[str, float]]: Dictionary keyed by pipeline name", "True if self._handle_keyboard_interrupt(): break full_rankings = self.full_rankings current_batch_idx = full_rankings['id'].isin(new_pipeline_ids)", "the dictionary but the exception and stacktrace will be displayed", "again.\") return # don't show iteration plot outside of a", "\"pipeline_name\", \"score\", \"validation_score\", \"percent_better_than_baseline\", \"high_variance_cv\", \"parameters\"] if not self._results['pipeline_results']: return", "from evalml.utils import convert_to_seconds, infer_feature_types from evalml.utils.logger import ( get_logger,", "0.0): raise ValueError(\"tolerance value must be a float between 0.0", "not perform as estimated on unseen data.\") return high_variance_cv def", "abs(mean) > 0 else np.inf all_objective_scores = all_objective_scores.fillna(\"-\") with pd.option_context('display.float_format',", "all_objective_scores[c].astype(\"object\") continue mean = all_objective_scores[c].mean(axis=0) std = all_objective_scores[c].std(axis=0) all_objective_scores.loc[\"mean\", c]", "set to True, but max_iterations is too small, so ensembling", "defaultdict(int) n_folds = len(cv_data) for fold_data in cv_data: for field,", "be ignored. data_splitter (sklearn.model_selection.BaseCrossValidator): Data splitting method to use. Defaults", "= self.objective_name_to_class[obj_name] # In the event add_to_rankings is called before", "the time in this loop does not count towards the", "to optimize for. Used to propose and rank pipelines, but", "score is better.\\n\".format('Greater' if self.objective.greater_is_better else 'Lower')) logger.info(f\"Using {self._engine.__class__.__name__} to", "pipeline_results def add_to_rankings(self, pipeline): \"\"\"Fits and evaluates a given pipeline", "such as type of pipeline components, problem, training time, cross", "best_score = curr_score num_without_improvement = 0 else: num_without_improvement += 1", "{max_time}.\") if max_batches is not None and max_batches < 0:", "import PipelineSearchPlots from evalml.automl.automl_algorithm import IterativeAlgorithm from evalml.automl.callbacks import log_error_callback", "evalml.utils.logger import ( get_logger, log_subtitle, log_title, time_elapsed, update_pipeline ) logger", "the parameters used during training, and the AutoMLSearch object. error_callback", "objective == 'auto': objective = get_default_primary_search_objective(self.problem_type.value) objective = get_objective(objective, return_instance=False)", "= None self._searched = False self.X_train = infer_feature_types(X_train) self.y_train =", "training_time, \"cv_data\": cv_data, \"percent_better_than_baseline_all_objectives\": percent_better_than_baseline, \"percent_better_than_baseline\": percent_better_than_baseline[self.objective.name], \"validation_score\": cv_scores[0] }", "if choice == \"y\": logger.info(\"Exiting AutoMLSearch.\") return True elif choice", "def full_rankings(self): \"\"\"Returns a pandas.DataFrame with scoring results from all", "Arguments: X_train (pd.DataFrame, ww.DataTable): The input training data of shape", "= best_pipeline[\"pipeline_name\"] logger.info(f\"Best pipeline: {best_pipeline_name}\") logger.info(f\"Best pipeline {self.objective.name}: {best_pipeline['score']:3f}\") self._searched", "woodwork as ww from sklearn.model_selection import BaseCrossValidator from .pipeline_search_plots import", "positional parameters: the Exception raised, the traceback, and the AutoMLSearch", "pipeline_results[\"cv_data\"]] all_objective_scores = pd.DataFrame(all_objective_scores) for c in all_objective_scores: if c", "non-negative. Received {max_iterations}.\") self.max_time = convert_to_seconds(max_time) if isinstance(max_time, str) else", "= tuner_class or SKOptTuner self.start_iteration_callback = start_iteration_callback self.add_result_callback = add_result_callback", "None. Defaults to None. allowed_pipelines (list(class)): A list of PipelineBase", "cloudpickle import numpy as np import pandas as pd import", "default of None indicates all pipelines for this problem type", "to True. pipeline_parameters (dict): A dict of the parameters used", "R2 for regression problems. max_iterations (int): Maximum number of iterations", "if run_ensembling: ensemble_nth_batch = len(self.allowed_pipelines) + 1 num_ensemble_batches = (self.max_batches", "been run and will not run again on the same", "with open(file_path, 'rb') as f: return cloudpickle.load(f) def train_pipelines(self, pipelines):", "keyed by pipeline name that maps to a dictionary of", "self.add_result_callback(self._results['pipeline_results'][pipeline_id], pipeline, self) return pipeline_id def _check_for_high_variance(self, pipeline, cv_scores, threshold=0.2):", "not all(p in problem_configuration for p in required_parameters): raise ValueError(\"user_parameters", "convert_to_seconds(max_time) if isinstance(max_time, str) else max_time self.max_iterations = max_iterations self.max_batches", "`regression` depending on the problem type. Note that if allowed_pipelines", "(callable): Function called before each pipeline training iteration. Callback function", "must be a float between 0.0 and 1.0 inclusive. Received", "value in fold_data['all_objective_scores'].items(): # The 'all_objective_scores' field contains scores for", "cv_scores.mean() percent_better_than_baseline = {} mean_cv_all_objectives = self._get_mean_cv_scores_for_all_objectives(cv_data, self.objective_name_to_class) if is_baseline:", "a float, int, string or None. Received {type(max_time)} with value", "Validation\"]: all_objective_scores[c] = all_objective_scores[c].astype(\"object\") continue mean = all_objective_scores[c].mean(axis=0) std =", "ascending = False full_rankings_cols = [\"id\", \"pipeline_name\", \"score\", \"validation_score\", \"percent_better_than_baseline\",", "= evaluation_results['cv_scores'] is_baseline = pipeline.model_family == ModelFamily.BASELINE cv_score = cv_scores.mean()", "\"score\": cv_score, \"high_variance_cv\": high_variance_cv, \"training_time\": training_time, \"cv_data\": cv_data, \"percent_better_than_baseline_all_objectives\": percent_better_than_baseline,", "self.problem_configuration, n_splits=3, shuffle=True, random_seed=self.random_seed) self.data_splitter = self.data_splitter or default_data_splitter self.pipeline_parameters", "better.\\n\".format('Greater' if self.objective.greater_is_better else 'Lower')) logger.info(f\"Using {self._engine.__class__.__name__} to train and", "a 1d vector using the y_train argument') try: self.problem_type =", "be ignored. allowed_model_families (list(str, ModelFamily)): The model families to search.", "Received {} instead\".format(patience)) if tolerance and (tolerance > 1.0 or", "self.search_iteration_plot = None if self.plot: self.search_iteration_plot = self.plot.search_iteration_plot(interactive_plot=show_iteration_plot) self._start =", "from evalml.automl.callbacks import log_error_callback from evalml.automl.engine import SequentialEngine from evalml.automl.utils", "a dict with results from each pipeline, and `search_order`: a", "pipeline.parameters == parameter: return self._engine.evaluate_batch([pipeline]) self._find_best_pipeline() @property def results(self): \"\"\"Class", "disabled. Defaults to None. tolerance (float): Minimum percentage difference to", "with problem_type {}.\".format(pipeline.name, self.problem_type.value)) def _add_baseline_pipelines(self): \"\"\"Fits a baseline pipeline", "pipeline.name] for parameter in pipeline_rows['parameters']: if pipeline.parameters == parameter: return", "self.get_pipeline(best_pipeline['id'])): best_pipeline = self.get_pipeline(best_pipeline['id']) if self._train_best_pipeline: if best_pipeline.model_family == ModelFamily.ENSEMBLE:", "and self.objective.is_defined_for_problem_type(ProblemTypes.BINARY) and self.objective.can_optimize_threshold: logger.info(\"Objective to optimize binary classification pipeline", "self._num_pipelines() if num_pipelines == 0: return True # check max_time", "{self.data_splitter}\\n\" f\"Tuner: {self.tuner_class.__name__}\\n\" f\"Start Iteration Callback: {_get_funct_name(self.start_iteration_callback)}\\n\" f\"Add Result Callback:", "self._handle_keyboard_interrupt(): self._interrupted = True current_batch_pipelines = [] current_batch_pipeline_scores = []", "logger.info(\"Will stop searching for new pipelines after %d seconds.\\n\" %", "of pipelines to train. Returns: Dict[str, PipelineBase]: Dictionary keyed by", "TimeSeriesBaselineBinaryPipeline}[self.problem_type] gap = self.problem_configuration['gap'] max_delay = self.problem_configuration['max_delay'] baseline = pipeline_class(parameters={\"pipeline\":", "file_path (str): location to save file pickle_protocol (int): the pickle", "initalize a pipeline with. _ensembling_split_size (float): The amount of the", "collections import defaultdict import cloudpickle import numpy as np import", "_ensembling_split_size < 1): raise ValueError(f\"Ensembling split size must be between", "random_seed=self.random_seed) self.ensembling_indices = ensembling_indices.to_dataframe()[0].tolist() self._engine = SequentialEngine(self.X_train, self.y_train, self.ensembling_indices, self,", "or parameters not found in automl results\") return pipeline_class(parameters, random_seed=self.random_seed)", "the AutoMLSearch object. error_callback (callable): Function called when `search()` errors", "allowed_pipelines is provided, this parameter will be ignored. data_splitter (sklearn.model_selection.BaseCrossValidator):", "to exit search (y/n)? \").strip().lower() if choice == \"y\": logger.info(\"Exiting", "(self._pipelines_per_batch * (self.max_batches - 1)) if run_ensembling: if not (0", "import numpy as np import pandas as pd import woodwork", "original stopping criterion and current state, should the search continue?", "searched\"\"\" ascending = True if self.objective.greater_is_better: ascending = False full_rankings_cols", "= add_result_callback self.error_callback = error_callback or log_error_callback self.data_splitter = data_splitter", "type. Note that if allowed_pipelines is provided, this parameter will", "full_rankings(self): \"\"\"Returns a pandas.DataFrame with scoring results from all pipelines", "results\") return pipeline_class(parameters, random_seed=self.random_seed) def describe_pipeline(self, pipeline_id, return_dict=False): \"\"\"Describe a", "shape [n_samples, n_features]. Required. y_train (pd.Series, ww.DataColumn): The target training", "{self.max_iterations}\\n\" f\"Max Batches: {self.max_batches}\\n\" f\"Allowed Pipelines: \\n{_print_list(self.allowed_pipelines or [])}\\n\" f\"Patience:", "The default of None searches over all model families. Run", "pipeline {} is not compatible with problem_type {}.\".format(pipeline.name, self.problem_type.value)) def", "self._add_baseline_pipelines() except KeyboardInterrupt: if self._handle_keyboard_interrupt(): self._interrupted = True current_batch_pipelines =", "time_in_loop return False else: leading_char = \"\" def search(self, show_iteration_plot=True):", "from evalml.pipelines.utils import make_pipeline from evalml.preprocessing import split_data from evalml.problem_types", "splitting method to use. Defaults to StratifiedKFold. tuner_class: The tuner", "the training data. This can be helpful for training pipelines", "will be ignored. data_splitter (sklearn.model_selection.BaseCrossValidator): Data splitting method to use.", "self.objective.greater_is_better: ascending = False full_rankings_cols = [\"id\", \"pipeline_name\", \"score\", \"validation_score\",", "of None searches over all model families. Run evalml.pipelines.components.utils.allowed_model_families(\"binary\") to", "get_estimators from evalml.pipelines.utils import make_pipeline from evalml.preprocessing import split_data from", "(callable): Function called when `search()` errors and raises an Exception.", "associated with the provided ID \"\"\" pipeline_results = self.results['pipeline_results'].get(pipeline_id) if", "not to optimize the binary pipeline threshold. Defaults to True.", "given holdout data. Arguments: pipelines (list(PipelineBase)): List of pipelines to", "this problem type are allowed. Setting this field will cause", "search. The default of None searches over all model families.", "argument') try: self.problem_type = handle_problem_types(problem_type) except ValueError: raise ValueError('choose one", "data we'll set aside for training ensemble metalearners. Only used", "> 1.0 or tolerance < 0.0): raise ValueError(\"tolerance value must", "max_delay = self.problem_configuration['max_delay'] baseline = pipeline_class(parameters={\"pipeline\": {\"gap\": gap, \"max_delay\": max_delay},", "run. Arguments: pipeline (PipelineBase): pipeline to train and evaluate. \"\"\"", "a score of np.nan on the primary objective {self.objective}.\") self.search_duration", "for obj_name in mean_cv_all_objectives: objective_class = self.objective_name_to_class[obj_name] # In the", "in required_parameters): raise ValueError(\"user_parameters must be a dict containing values", "sorted(['\\t{}'.format(o.name) for o in obj_list]) return '\\n'.join(lines) def _get_funct_name(function): if", "Received {problem_configuration}.\") return problem_configuration or {} def _handle_keyboard_interrupt(self): \"\"\"Presents a", "AutoMLSearch is able to pass along other appropriate parameters by", "number of batches of pipelines to search. Parameters max_time, and", "Dictionary keyed by pipeline name that maps to the fitted", "n_folds = len(cv_data) for fold_data in cv_data: for field, value", "to make sure it is different from the current best", "in [self.objective] + self.additional_objectives} if not isinstance(max_time, (int, float, str,", "the pipelines allowed in the search. The default of None", "num_pipelines >= self.max_iterations: return False # check for early stopping", "to exclude them since # they are not scores if", "score improvement for early stopping. Only applicable if patience is", "1 exclusive, received {_ensembling_split_size}\") X_shape = ww.DataTable(np.arange(self.X_train.shape[0])) _, ensembling_indices, _,", "of feature types, either numerical or categorical. Categorical features will", "up to {self.max_batches} batches for a total of {self.max_iterations} pipelines.", "recommendations, ending') break try: new_pipeline_ids = self._engine.evaluate_batch(current_batch_pipelines) loop_interrupted = False", "in self._results['search_order'][1:]: curr_score = self._results['pipeline_results'][id]['score'] significant_change = abs((curr_score - best_score)", "ValueError(\"No allowed pipelines to search\") check_all_pipeline_names_unique(self.allowed_pipelines) run_ensembling = self.ensembling if", "= pipeline.model_family == ModelFamily.BASELINE cv_score = cv_scores.mean() percent_better_than_baseline = {}", "self.y_train, self.ensembling_indices, self, should_continue_callback=self._should_continue, pre_evaluation_callback=self._pre_evaluation_callback, post_evaluation_callback=self._post_evaluation_callback) self.allowed_model_families = list(set([p.model_family for", "False logger.warning(f\"Ensembling is set to True, but max_iterations is too", "out of recommendations, ending') break try: new_pipeline_ids = self._engine.evaluate_batch(current_batch_pipelines) loop_interrupted", "get_core_objectives, get_non_core_objectives, get_objective ) from evalml.pipelines import ( MeanBaselineRegressionPipeline, ModeBaselineBinaryPipeline,", "too small, so ensembling will not run. Set max_batches >=", "current_batch_pipeline_scores = full_rankings[current_batch_idx]['score'] if len(current_batch_pipeline_scores) and current_batch_pipeline_scores.isna().all(): raise AutoMLSearchException(f\"All pipelines", "pipeline. Note that the any pipelines that error out during", "self.random_seed = random_seed self.n_jobs = n_jobs self.plot = None try:", "were searched. \"\"\" return copy.deepcopy(self._results) @property def rankings(self): \"\"\"Returns a", "best_score = self._results['pipeline_results'][first_id]['score'] num_without_improvement = 0 for id in self._results['search_order'][1:]:", "separate batch after every allowed pipeline class has been iterated", "set to 'auto', chooses: - LogLossBinary for binary classification problems,", "\"\"\" if pipeline_id not in self._results['pipeline_results']: raise PipelineNotFoundError(\"Pipeline not found\")", "pipeline_parameters if pipeline_parameters is not None else {} self.search_iteration_plot =", "= next((obj for obj in additional_objectives if obj.name == self.objective.name),", "plotly; skipping pipeline search plotting\\n\") self.allowed_pipelines = allowed_pipelines self.allowed_model_families =", "non_core_objectives = get_non_core_objectives() if isinstance(objective, type): if objective in non_core_objectives:", "will call `log_error_callback`. additional_objectives (list): Custom set of objectives to", "self.max_iterations is not None: # Baseline + first batch +", "full_rankings = self.full_rankings current_batch_idx = full_rankings['id'].isin(new_pipeline_ids) current_batch_pipeline_scores = full_rankings[current_batch_idx]['score'] if", "None): Non-negative integer describing level of parallelism used for pipelines.", "of unique pipelines to search over per batch is one,", "training result, returns an untrained instance of the specified pipeline", "True): Shows an iteration vs. score plot in Jupyter notebook.", "fold_data in cv_data: for field, value in fold_data['all_objective_scores'].items(): # The", "{objective: float(score) / n_folds for objective, score in scores.items()} def", "in self.additional_objectives: if not obj.is_defined_for_problem_type(self.problem_type): raise ValueError(\"Additional objective {} is", "self.search_iteration_plot = self.plot.search_iteration_plot(interactive_plot=show_iteration_plot) self._start = time.time() try: self._add_baseline_pipelines() except KeyboardInterrupt:", "objectives for problem type if not empty. random_seed (int): Seed", "seconds, minutes, or hours. patience (int): Number of iterations without", "an Exception. Callback function takes three positional parameters: the Exception", "= 0.0 self._baseline_cv_scores = {} self.show_batch_output = False self._validate_problem_type() self.problem_configuration", "the requirement that automl search has been run. Arguments: pipeline", "Arguments: file_path (str): location to find file to load Returns:", "(int): Number of iterations without improvement to stop search early.", "ProblemTypes.REGRESSION: baseline = MeanBaselineRegressionPipeline(parameters={}) else: pipeline_class = {ProblemTypes.TIME_SERIES_REGRESSION: TimeSeriesBaselineRegressionPipeline, ProblemTypes.TIME_SERIES_MULTICLASS:", "Whether or not to train the best pipeline before returning", "= False if self.allowed_pipelines is None: logger.info(\"Generating pipelines to search", "allowed_pipelines (list(class)): A list of PipelineBase subclasses indicating the pipelines", "dictionary containing the training results for the new pipeline, an", "\"Plotting\" documentation, since Sphinx does not work well with instance", "pipeline evaluations made in the search \"\"\" return len(self._results['pipeline_results']) def", "to search over per batch is one, ensembling will not", "> self.tolerance score_improved = curr_score > best_score if self.objective.greater_is_better else", "all_objective_scores.loc[\"coef of var\", c] = std / mean if abs(mean)", "raise ValueError('choose one of (binary, multiclass, regression) as problem_type') self.tuner_class", "= self.full_rankings[self.full_rankings['pipeline_name'] == pipeline.name] for parameter in pipeline_rows['parameters']: if pipeline.parameters", "`search_order`: a list describing the order the pipelines were searched.", "class, the pipeline parameters, and the AutoMLSearch object. add_result_callback (callable):", "splitter\") if not objective.is_defined_for_problem_type(self.problem_type): raise ValueError(\"Given objective {} is not", "the search. For example, in time series problems, values should", "# don't show iteration plot outside of a jupyter notebook", "= pipeline_parameters if pipeline_parameters is not None else {} self.search_iteration_plot", "= self._results['pipeline_results'][id]['score'] significant_change = abs((curr_score - best_score) / best_score) >", "set to True, but the number of unique pipelines is", "Used to propose and rank pipelines, but not for optimizing", "if self._searched: return True # Run at least one pipeline", "positional parameters: The pipeline class, the pipeline parameters, and the", "every {len(self.allowed_pipelines) * self._pipelines_per_batch} iterations after that.\") if self.max_batches and", "(str, ObjectiveBase): The objective to optimize for. Used to propose", "curr_score = self._results['pipeline_results'][id]['score'] significant_change = abs((curr_score - best_score) / best_score)", "during automl search. Arguments: pipeline_id (int): pipeline to retrieve Returns:", "of iterations to search. If max_iterations and max_time is not", "of scores. Note that the any pipelines that error out", "exit search (y/n)? \").strip().lower() if choice == \"y\": logger.info(\"Exiting AutoMLSearch.\")", "self.tuner_class = tuner_class or SKOptTuner self.start_iteration_callback = start_iteration_callback self.add_result_callback =", "if self.max_batches is not None: logger.info(f\"Searching up to {self.max_batches} batches", "pipelines. \") elif self.max_iterations is not None: logger.info(\"Searching up to", "loop_interrupted = False except KeyboardInterrupt: loop_interrupted = True if self._handle_keyboard_interrupt():", "{} is not compatible with problem_type {}.\".format(pipeline.name, self.problem_type.value)) def _add_baseline_pipelines(self):", "import AutoMLSearchException, PipelineNotFoundError from evalml.model_family import ModelFamily from evalml.objectives import", ") from evalml.pipelines import ( MeanBaselineRegressionPipeline, ModeBaselineBinaryPipeline, ModeBaselineMulticlassPipeline, TimeSeriesBaselineBinaryPipeline, TimeSeriesBaselineMulticlassPipeline,", "list of PipelineBase subclasses indicating the pipelines allowed in the", "== pipeline.name] for parameter in pipeline_rows['parameters']: if pipeline.parameters == parameter:", "= full_rankings['id'].isin(new_pipeline_ids) current_batch_pipeline_scores = full_rankings[current_batch_idx]['score'] if len(current_batch_pipeline_scores) and current_batch_pipeline_scores.isna().all(): raise", "of PipelineBase subclasses indicating the pipelines allowed in the search.", "to train the best pipeline before returning it. Defaults to", "self.tolerance is None: return True first_id = self._results['search_order'][0] best_score =", "inplace=True) return rankings_df @property def best_pipeline(self): \"\"\"Returns a trained instance", "0 and 1, exclusive. Defaults to 0.2 _pipelines_per_batch (int): The", "or not to train the best pipeline before returning it.", "is_baseline: score_to_minimize = -cv_score if self.objective.greater_is_better else cv_score try: self._automl_algorithm.add_result(score_to_minimize,", "is not None and max_batches < 0: raise ValueError(f\"Parameter max_batches", "will not run.\") run_ensembling = False if run_ensembling and self.max_iterations", "SKOptTuner. optimize_thresholds (bool): Whether or not to optimize the binary", "to False, returns an untrained pipeline instance. Returns: PipelineBase: A", "to optimize the binary pipeline threshold. Defaults to True. start_iteration_callback", "if len(current_batch_pipeline_scores) and current_batch_pipeline_scores.isna().all(): raise AutoMLSearchException(f\"All pipelines in the current", "if objective == 'auto': objective = get_default_primary_search_objective(self.problem_type.value) objective = get_objective(objective,", "mean_cv_all_objectives: objective_class = self.objective_name_to_class[obj_name] # In the event add_to_rankings is", "pipeline to retrieve Returns: PipelineBase: untrained pipeline instance associated with", "it is different from the current best pipeline before training", "and self.max_iterations is None: self.show_batch_output = True if run_ensembling: ensemble_nth_batch", "the ID of a pipeline training result, returns an untrained", "for a full list. objective (str, ObjectiveBase): The objective to", "parameters used during training, and the AutoMLSearch object. error_callback (callable):", "iterations without improvement. Stopping search early...\".format(self.patience)) return False return True", "train a baseline pipline + one of each pipeline family", "logger.warning(\"Ensembling is set to True, but the number of unique", "get_non_core_objectives() if isinstance(objective, type): if objective in non_core_objectives: raise ValueError(f\"{objective.name.lower()}", "ww.DataTable(np.arange(self.X_train.shape[0])) _, ensembling_indices, _, _ = split_data(X_shape, self.y_train, problem_type=self.problem_type, test_size=_ensembling_split_size,", "search has been run. Arguments: pipeline (PipelineBase): pipeline to train", "then adds the results to the automl rankings with the", "objective_class.calculate_percent_difference(mean_cv_all_objectives[obj_name], self._baseline_cv_scores.get(obj_name, np.nan)) percent_better_than_baseline[obj_name] = percent_better high_variance_cv = self._check_for_high_variance(pipeline, cv_scores)", "pipeline.parameters, \"score\": cv_score, \"high_variance_cv\": high_variance_cv, \"training_time\": training_time, \"cv_data\": cv_data, \"percent_better_than_baseline_all_objectives\":", "= convert_to_seconds(max_time) if isinstance(max_time, str) else max_time self.max_iterations = max_iterations", "the binary pipeline threshold. Defaults to True. start_iteration_callback (callable): Function", "of unique pipelines is one, so ensembling will not run.\")", "is not None: best_pipeline = self.rankings.iloc[0] best_pipeline_name = best_pipeline[\"pipeline_name\"] logger.info(f\"Best", "pipelines to search. Parameters max_time, and max_iterations have precedence over", "get_non_core_objectives, get_objective ) from evalml.pipelines import ( MeanBaselineRegressionPipeline, ModeBaselineBinaryPipeline, ModeBaselineMulticlassPipeline,", "f\"Objective: {get_objective(self.objective).name}\\n\" f\"Max Time: {self.max_time}\\n\" f\"Max Iterations: {self.max_iterations}\\n\" f\"Max Batches:", "desc.ljust(self._MAX_NAME_LEN) logger.info(desc) self._find_best_pipeline() if self._best_pipeline is not None: best_pipeline =", "if max_iterations is not None and max_iterations < 0: raise", "ValueError(\"patience value must be a positive integer. Received {} instead\".format(patience))", "if self.max_time and elapsed >= self.max_time: return False elif self.max_iterations", "learning problem. See evalml.problem_types.ProblemType.all_problem_types for a full list. objective (str,", "ValueError('choose one of (binary, multiclass, regression) as problem_type') self.tuner_class =", "one. The first batch will train a baseline pipline +", "batch after every allowed pipeline class has been iterated over.", "tolerance and (tolerance > 1.0 or tolerance < 0.0): raise", "= False log_title(logger, \"Beginning pipeline search\") logger.info(\"Optimizing for %s. \"", "set of parameters used with each pipeline.\"\"\" return self.full_rankings.drop_duplicates(subset=\"pipeline_name\", keep=\"first\")", "plot in Jupyter notebook. Disabled by default in non-Jupyter enviroments.", "logger.info(\"Allowed model families: %s\\n\" % \", \".join([model.value for model in", "self._results['pipeline_results'][pipeline_id][\"input_pipeline_ids\"] = input_pipeline_ids self._results['search_order'].append(pipeline_id) if not is_baseline: score_to_minimize = -cv_score", "add_to_rankings(self, pipeline): \"\"\"Fits and evaluates a given pipeline then adds", "to a dictionary of scores. Note that the any pipelines", "self.show_batch_output = False self._validate_problem_type() self.problem_configuration = self._validate_problem_configuration(problem_configuration) self._train_best_pipeline = train_best_pipeline", "@staticmethod def load(file_path): \"\"\"Loads AutoML object at file path Arguments:", "def _get_funct_name(function): if callable(function): return function.__name__ else: return None search_desc", "\" % self.max_iterations) if self.max_time is not None: logger.info(\"Will stop", "is not None else {} self.search_iteration_plot = None self._interrupted =", "the number of unique pipelines is one, so ensembling will", "self.optimize_thresholds = optimize_thresholds self.ensembling = ensembling if objective == 'auto':", "pipeline.model_family == ModelFamily.BASELINE cv_score = cv_scores.mean() percent_better_than_baseline = {} mean_cv_all_objectives", "self.objective.can_optimize_threshold: logger.info(\"Objective to optimize binary classification pipeline thresholds for: {}\".format(self.objective))", "is set to False, returns an untrained pipeline instance. Returns:", "This can be helpful for training pipelines once the search", "pipelines (list(PipelineBase)): List of pipelines to train. Returns: Dict[str, PipelineBase]:", "This will not start a new pipeline search after the", "{} instead\".format(tolerance)) self.patience = patience self.tolerance = tolerance or 0.0", "to the user asking if they want to stop the", "during fit-time. When set to 'auto', chooses: - LogLossBinary for", "ensembling_indices, _, _ = split_data(X_shape, self.y_train, problem_type=self.problem_type, test_size=_ensembling_split_size, random_seed=self.random_seed) self.ensembling_indices", "not to train the best pipeline before returning it. Defaults", "first batch will train a baseline pipline + one of", "log_error_callback from evalml.automl.engine import SequentialEngine from evalml.automl.utils import ( check_all_pipeline_names_unique,", "for fold in pipeline_results[\"cv_data\"]] all_objective_scores = pd.DataFrame(all_objective_scores) for c in", "max_batches=None, problem_configuration=None, train_best_pipeline=True, pipeline_parameters=None, _ensembling_split_size=0.2, _pipelines_per_batch=5): \"\"\"Automated pipeline search Arguments:", "train_indices = self.data_splitter.transform_sample(X_train, y_train) X_train = X_train.iloc[train_indices] y_train = y_train.iloc[train_indices]", "if self.objective.greater_is_better: ascending = False full_rankings_cols = [\"id\", \"pipeline_name\", \"score\",", "False self._validate_problem_type() self.problem_configuration = self._validate_problem_configuration(problem_configuration) self._train_best_pipeline = train_best_pipeline self._best_pipeline =", "None try: self.plot = PipelineSearchPlots(self) except ImportError: logger.warning(\"Unable to import", "really want to exit search (y/n)? \").strip().lower() if choice ==", "None: best_pipeline = self.rankings.iloc[0] best_pipeline_name = best_pipeline[\"pipeline_name\"] logger.info(f\"Best pipeline: {best_pipeline_name}\")", "ww.DataTable): The input training data of shape [n_samples, n_features]. Required.", "mean all_objective_scores.loc[\"std\", c] = std all_objective_scores.loc[\"coef of var\", c] =", "Dict[str, PipelineBase]: Dictionary keyed by pipeline name that maps to", "the exception and stacktrace will be displayed in the log.", "3] + \"...\" desc = desc.ljust(AutoMLSearch._MAX_NAME_LEN) batch_number = 1 if", "allowed pipelines to search\") check_all_pipeline_names_unique(self.allowed_pipelines) run_ensembling = self.ensembling if run_ensembling", "them since # they are not scores if field in", "'max_delay'} if not problem_configuration or not all(p in problem_configuration for", "self.max_iterations) if self.max_time is not None: logger.info(\"Will stop searching for", "1 if num_without_improvement >= self.patience: logger.info(\"\\n\\n{} iterations without improvement. Stopping", "parameters needed to configure the search. For example, in time", "leading_char = \"\" def search(self, show_iteration_plot=True): \"\"\"Find the best pipeline", "the AutoMLSearch object. Must also accepts kwargs, so AutoMLSearch is", "either numerical or categorical. Categorical features will automatically be encoded", "runs ensembling in a separate batch after every allowed pipeline", "= all_objective_scores[c].astype(\"object\") continue mean = all_objective_scores[c].mean(axis=0) std = all_objective_scores[c].std(axis=0) all_objective_scores.loc[\"mean\",", "self._engine.train_batch(pipelines) def score_pipelines(self, pipelines, X_holdout, y_holdout, objectives): \"\"\"Score a list", "try: if not loop_interrupted: current_batch_pipelines = self._automl_algorithm.next_batch() except StopIteration: logger.info('AutoML", "to propose and rank pipelines, but not for optimizing each", "binary classification problems, - LogLossMulticlass for multiclass classification problems, and", "1 logger.info(\"Using default limit of max_batches=1.\\n\") if patience and (not", "= n_jobs self.plot = None try: self.plot = PipelineSearchPlots(self) except", "AutoMLSearch._MAX_NAME_LEN: desc = desc[:AutoMLSearch._MAX_NAME_LEN - 3] + \"...\" desc =", "%d seconds.\\n\" % self.max_time) logger.info(\"Allowed model families: %s\\n\" % \",", "see options. Change `binary` to `multiclass` or `regression` depending on", "results for the new pipeline, an untrained_pipeline containing the parameters", "instance associated with the provided ID \"\"\" pipeline_results = self.results['pipeline_results'].get(pipeline_id)", "search\") check_all_pipeline_names_unique(self.allowed_pipelines) run_ensembling = self.ensembling if run_ensembling and len(self.allowed_pipelines) ==", "\"\"\" leading_char = \"\\n\" start_of_loop = time.time() while True: choice", "logger.info(f\"Ensembling will run every {ensemble_nth_batch} batches.\") self.max_iterations = (1 +", "called before search _baseline_cv_scores will be empty so we will", "o in [self.objective] + self.additional_objectives} if not isinstance(max_time, (int, float,", "or tolerance < 0.0): raise ValueError(\"tolerance value must be a", "pipeline_id def _check_for_high_variance(self, pipeline, cv_scores, threshold=0.2): \"\"\"Checks cross-validation scores and", "self.problem_type: raise ValueError(\"Given pipeline {} is not compatible with problem_type", "= False if run_ensembling and self.max_iterations is not None: #", "is None: logger.info(\"Generating pipelines to search over...\") allowed_estimators = get_estimators(self.problem_type,", "= self.X_train.iloc[self.ensembling_indices], self.y_train.iloc[self.ensembling_indices] else: X_train = self.X_train y_train = self.y_train", "self.error_callback = error_callback or log_error_callback self.data_splitter = data_splitter self.optimize_thresholds =", "KeyboardInterrupt: if self._handle_keyboard_interrupt(): self._interrupted = True current_batch_pipelines = [] current_batch_pipeline_scores", "of shape [n_samples, n_features]. Required. y_train (pd.Series, ww.DataColumn): The target", "if self.patience is None or self.tolerance is None: return True", "num_without_improvement = 0 for id in self._results['search_order'][1:]: curr_score = self._results['pipeline_results'][id]['score']", "pipeline_id (int): pipeline to describe return_dict (bool): If True, return", "best_pipeline(self): \"\"\"Returns a trained instance of the best pipeline and", "over stopping the search. problem_configuration (dict, None): Additional parameters needed", "significant_change: best_score = curr_score num_without_improvement = 0 else: num_without_improvement +=", "= self._engine.evaluate_batch(current_batch_pipelines) loop_interrupted = False except KeyboardInterrupt: loop_interrupted = True", "self._handle_keyboard_interrupt(): break full_rankings = self.full_rankings current_batch_idx = full_rankings['id'].isin(new_pipeline_ids) current_batch_pipeline_scores =", "a list of pipelines on the training data. This can", "make_data_splitter ) from evalml.exceptions import AutoMLSearchException, PipelineNotFoundError from evalml.model_family import", "max_iterations is too small, so ensembling will not run. Set", "if self._train_best_pipeline: if best_pipeline.model_family == ModelFamily.ENSEMBLE: X_train, y_train = self.X_train.iloc[self.ensembling_indices],", "Note that the any pipelines that error out during training", "\".join([model.value for model in self.allowed_model_families])) self.search_iteration_plot = None if self.plot:", "\"\"\"Return the number of pipeline evaluations which have been made", "if self.allowed_pipelines is None: logger.info(\"Generating pipelines to search over...\") allowed_estimators", "_pre_evaluation_callback(self, pipeline): if self.start_iteration_callback: self.start_iteration_callback(pipeline.__class__, pipeline.parameters, self) desc = f\"{pipeline.name}\"", "list describing the order the pipelines were searched. \"\"\" return", "pipeline_id (int): pipeline to retrieve Returns: PipelineBase: untrained pipeline instance", "chooses: - LogLossBinary for binary classification problems, - LogLossMulticlass for", "iteration. Callback function takes three positional parameters: A dictionary containing", "Minimum percentage difference to qualify as score improvement for early", "valid data splitter\") if not objective.is_defined_for_problem_type(self.problem_type): raise ValueError(\"Given objective {}", "float between 0.0 and 1.0 inclusive. Received {} instead\".format(tolerance)) self.patience", "tuner_class or SKOptTuner self.start_iteration_callback = start_iteration_callback self.add_result_callback = add_result_callback self.error_callback", "pipelines is one, so ensembling will not run.\") run_ensembling =", "= time.time() - self._start if self.max_time and elapsed >= self.max_time:", "StratifiedKFold. tuner_class: The tuner class to use. Defaults to SKOptTuner.", "supervised learning tasks. problem_type (str or ProblemTypes): type of supervised", "initialized with the parameters used to train that pipeline during", "= pipeline.name high_variance_cv = bool(abs(cv_scores.std() / cv_scores.mean()) > threshold) if", "pipeline name that maps to the fitted pipeline. Note that", "baseline = ModeBaselineMulticlassPipeline(parameters={}) elif self.problem_type == ProblemTypes.REGRESSION: baseline = MeanBaselineRegressionPipeline(parameters={})", "import ( get_logger, log_subtitle, log_title, time_elapsed, update_pipeline ) logger =", "= all_objective_scores.fillna(\"-\") with pd.option_context('display.float_format', '{:.3f}'.format, 'expand_frame_repr', False): logger.info(all_objective_scores) if return_dict:", "in mean_cv_all_objectives: objective_class = self.objective_name_to_class[obj_name] # In the event add_to_rankings", "not obj.is_defined_for_problem_type(self.problem_type): raise ValueError(\"Additional objective {} is not compatible with", "after each pipeline training iteration. Callback function takes three positional", "be a float between 0.0 and 1.0 inclusive. Received {}", "total of {self.max_iterations} pipelines. \") elif self.max_iterations is not None:", "allowed_pipelines self.allowed_model_families = allowed_model_families self._automl_algorithm = None self._start = 0.0", "= len(self._results['pipeline_results']) self._results['pipeline_results'][pipeline_id] = { \"id\": pipeline_id, \"pipeline_name\": pipeline.name, \"pipeline_class\":", "ImportError: logger.warning(\"Unable to import plotly; skipping pipeline search plotting\\n\") self.allowed_pipelines", "problem type. Note that if allowed_pipelines is provided, this parameter", "\\n{'='*20}\\n{rankings_str}\" return search_desc + rankings_desc def _validate_problem_configuration(self, problem_configuration=None): if self.problem_type", "callable(function): return function.__name__ else: return None search_desc = ( f\"{handle_problem_types(self.problem_type).name}", "run_ensembling = self.ensembling if run_ensembling and len(self.allowed_pipelines) == 1: logger.warning(\"Ensembling", "of objectives for problem_type, remove it existing_main_objective = next((obj for", "mean_cv_all_objectives = self._get_mean_cv_scores_for_all_objectives(cv_data, self.objective_name_to_class) if is_baseline: self._baseline_cv_scores = mean_cv_all_objectives for", "leading_char = \"\\n\" start_of_loop = time.time() while True: choice =", "problem_type=self.problem_type, test_size=_ensembling_split_size, random_seed=self.random_seed) self.ensembling_indices = ensembling_indices.to_dataframe()[0].tolist() self._engine = SequentialEngine(self.X_train, self.y_train,", "= train_best_pipeline self._best_pipeline = None self._searched = False self.X_train =", "and \"# Testing\", so we want to exclude them since", "= get_logger(__file__) class AutoMLSearch: \"\"\"Automated Pipeline search.\"\"\" _MAX_NAME_LEN = 40", "used when ensembling is True. Must be between 0 and", "unique pipelines is one, so ensembling will not run.\") run_ensembling", "be passed in for the gap and max_delay variables. train_best_pipeline", "- 1) // ensemble_nth_batch if num_ensemble_batches == 0: run_ensembling =", "self.y_train, estimator, self.problem_type, custom_hyperparameters=self.pipeline_parameters) for estimator in allowed_estimators] if self.allowed_pipelines", "/ cv_scores.mean()) > threshold) if high_variance_cv: logger.warning(f\"High coefficient of variation", "rankings with the requirement that automl search has been run.", "not None and not issubclass(self.data_splitter.__class__, BaseCrossValidator): raise ValueError(\"Not a valid", "jupyter notebook if show_iteration_plot: try: get_ipython except NameError: show_iteration_plot =", "else: pipeline_class = {ProblemTypes.TIME_SERIES_REGRESSION: TimeSeriesBaselineRegressionPipeline, ProblemTypes.TIME_SERIES_MULTICLASS: TimeSeriesBaselineMulticlassPipeline, ProblemTypes.TIME_SERIES_BINARY: TimeSeriesBaselineBinaryPipeline}[self.problem_type] gap", "start a new pipeline search after the duration has elapsed.", "desc = f\"{pipeline.name}\" if len(desc) > AutoMLSearch._MAX_NAME_LEN: desc = desc[:AutoMLSearch._MAX_NAME_LEN", "max_batches >= {ensemble_nth_batch + 1} to run ensembling.\") else: logger.info(f\"Ensembling", "True if yes, False if no. \"\"\" if self._interrupted: return", "pipeline class has been iterated over. If the number of", "as ww from sklearn.model_selection import BaseCrossValidator from .pipeline_search_plots import PipelineSearchPlots", "max_delay \" f\"parameters. Received {problem_configuration}.\") return problem_configuration or {} def", "self._validate_problem_type() self.problem_configuration = self._validate_problem_configuration(problem_configuration) self._train_best_pipeline = train_best_pipeline self._best_pipeline = None", "True, but the number of unique pipelines is one, so", "{first_ensembling_iteration} iteration and every {len(self.allowed_pipelines) * self._pipelines_per_batch} iterations after that.\")", "patience self.tolerance = tolerance or 0.0 self._results = { 'pipeline_results':", "input(leading_char + \"Do you really want to exit search (y/n)?", "0.0 and 1.0 inclusive. Received {} instead\".format(tolerance)) self.patience = patience", "search should terminate early \"\"\" leading_char = \"\\n\" start_of_loop =", "for scoring. objectives (list(str), list(ObjectiveBase)): Objectives used for scoring. Returns:", "cloudpickle.load(f) def train_pipelines(self, pipelines): \"\"\"Train a list of pipelines on", "ValueError(f\"Parameter max_iterations must be None or non-negative. Received {max_iterations}.\") self.max_time", "percent_better_than_baseline[self.objective.name], \"validation_score\": cv_scores[0] } if pipeline.model_family == ModelFamily.ENSEMBLE: input_pipeline_ids =", "lines = sorted(['\\t{}'.format(o.name) for o in obj_list]) return '\\n'.join(lines) def", "data splitter\") if not objective.is_defined_for_problem_type(self.problem_type): raise ValueError(\"Given objective {} is", "import get_estimators from evalml.pipelines.utils import make_pipeline from evalml.preprocessing import split_data", "if X_train is None: raise ValueError('Must specify training data as", "gap = self.problem_configuration['gap'] max_delay = self.problem_configuration['max_delay'] baseline = pipeline_class(parameters={\"pipeline\": {\"gap\":", "= mean_cv_all_objectives for obj_name in mean_cv_all_objectives: objective_class = self.objective_name_to_class[obj_name] #", "= [] current_batch_pipeline_scores = [] new_pipeline_ids = [] loop_interrupted =", "learning tasks. problem_type (str or ProblemTypes): type of supervised learning", "using the X_train argument') if y_train is None: raise ValueError('Must", "= std / mean if abs(mean) > 0 else np.inf", "desc = f\"\\nSearch finished after {elapsed_time}\" desc = desc.ljust(self._MAX_NAME_LEN) logger.info(desc)", "objective to optimize for. Used to propose and rank pipelines,", "other appropriate parameters by default. Defaults to None, which will", "parameters: the Exception raised, the traceback, and the AutoMLSearch object.", "rankings_desc = f\"\\nSearch Results: \\n{'='*20}\\n{rankings_str}\" return search_desc + rankings_desc def", "and max_delay \" f\"parameters. Received {problem_configuration}.\") return problem_configuration or {}", "elapsed_time = time_elapsed(self._start) desc = f\"\\nSearch finished after {elapsed_time}\" desc", "a baseline pipeline to the data. This is the first", "an integer, then the time will be in seconds. For", "break try: new_pipeline_ids = self._engine.evaluate_batch(current_batch_pipelines) loop_interrupted = False except KeyboardInterrupt:", "= ensembling if objective == 'auto': objective = get_default_primary_search_objective(self.problem_type.value) objective", "tuner_class=self.tuner_class, random_seed=self.random_seed, n_jobs=self.n_jobs, number_features=self.X_train.shape[1], pipelines_per_batch=self._pipelines_per_batch, ensembling=run_ensembling, pipeline_params=pipeline_params ) def _pre_evaluation_callback(self,", "self._searched: logger.info(\"AutoMLSearch.search() has already been run and will not run", "example, in time series problems, values should be passed in", "- self._start if self.max_time and elapsed >= self.max_time: return False", "ProblemTypes.MULTICLASS: baseline = ModeBaselineMulticlassPipeline(parameters={}) elif self.problem_type == ProblemTypes.REGRESSION: baseline =", "be displayed in the log. \"\"\" return self._engine.train_batch(pipelines) def score_pipelines(self,", "data as a 2d array using the X_train argument') if", "< 0: raise ValueError(f\"Parameter max_batches must be None or non-negative.", "bool: True if yes, False if no. \"\"\" if self._interrupted:", "> threshold) if high_variance_cv: logger.warning(f\"High coefficient of variation (cv >=", "float, int, string or None. Received {type(max_time)} with value {str(max_time)}..\")", "on the problem type. Note that if allowed_pipelines is provided,", "and raises an Exception. Callback function takes three positional parameters:", "True first_id = self._results['search_order'][0] best_score = self._results['pipeline_results'][first_id]['score'] num_without_improvement = 0", "to 0. n_jobs (int or None): Non-negative integer describing level", "pd import woodwork as ww from sklearn.model_selection import BaseCrossValidator from", "Splitting: {self.data_splitter}\\n\" f\"Tuner: {self.tuner_class.__name__}\\n\" f\"Start Iteration Callback: {_get_funct_name(self.start_iteration_callback)}\\n\" f\"Add Result", "f\"Additional Objectives: {_print_list(self.additional_objectives or [])}\\n\" f\"Random Seed: {self.random_seed}\\n\" f\"n_jobs: {self.n_jobs}\\n\"", "every search num_pipelines = self._num_pipelines() if num_pipelines == 0: return", "try: self._automl_algorithm.add_result(score_to_minimize, pipeline, self._results['pipeline_results'][pipeline_id]) except PipelineNotFoundError: pass if self.search_iteration_plot: self.search_iteration_plot.update()", "are used. ensembling (boolean): If True, runs ensembling in a", "data.\") return high_variance_cv def get_pipeline(self, pipeline_id): \"\"\"Given the ID of", "self.start_iteration_callback(pipeline.__class__, pipeline.parameters, self) desc = f\"{pipeline.name}\" if len(desc) > AutoMLSearch._MAX_NAME_LEN:", "mean if abs(mean) > 0 else np.inf all_objective_scores = all_objective_scores.fillna(\"-\")", "problem_type, remove it existing_main_objective = next((obj for obj in additional_objectives", "y_holdout (ww.DataTable, pd.DataFrame): Holdout targets for scoring. objectives (list(str), list(ObjectiveBase)):", "fold in pipeline_results[\"cv_data\"]] all_objective_scores = pd.DataFrame(all_objective_scores) for c in all_objective_scores:", "retrieve Returns: PipelineBase: untrained pipeline instance associated with the provided", "pipeline instance. Returns: PipelineBase: A trained instance of the best", "Exception raised, the traceback, and the AutoMLSearch object. Must also", "and the AutoMLSearch object. Must also accepts kwargs, so AutoMLSearch", "stop search early. Must be positive. If None, early stopping", "is None: return True first_id = self._results['search_order'][0] best_score = self._results['pipeline_results'][first_id]['score']", "PipelineNotFoundError(\"automl search must be run before selecting `best_pipeline`.\") return self._best_pipeline", "called after each pipeline training iteration. Callback function takes three", "ensemble_nth_batch = len(self.allowed_pipelines) + 1 num_ensemble_batches = (self.max_batches - 1)", "def _find_best_pipeline(self): \"\"\"Finds the best pipeline in the rankings If", "best pipeline before returning it. Defaults to True. pipeline_parameters (dict):", "pipeline_class = {ProblemTypes.TIME_SERIES_REGRESSION: TimeSeriesBaselineRegressionPipeline, ProblemTypes.TIME_SERIES_MULTICLASS: TimeSeriesBaselineMulticlassPipeline, ProblemTypes.TIME_SERIES_BINARY: TimeSeriesBaselineBinaryPipeline}[self.problem_type] gap =", "logger.info(\"Exiting AutoMLSearch.\") return True elif choice == \"n\": # So", "cv_scores) pipeline_id = len(self._results['pipeline_results']) self._results['pipeline_results'][pipeline_id] = { \"id\": pipeline_id, \"pipeline_name\":", "and self.max_iterations is not None: # Baseline + first batch", "terminate early \"\"\" leading_char = \"\\n\" start_of_loop = time.time() while", "= {ProblemTypes.TIME_SERIES_REGRESSION: TimeSeriesBaselineRegressionPipeline, ProblemTypes.TIME_SERIES_MULTICLASS: TimeSeriesBaselineMulticlassPipeline, ProblemTypes.TIME_SERIES_BINARY: TimeSeriesBaselineBinaryPipeline}[self.problem_type] gap = self.problem_configuration['gap']", "existing_main_objective = next((obj for obj in additional_objectives if obj.name ==", "{self.max_batches}\\n\" f\"Allowed Pipelines: \\n{_print_list(self.allowed_pipelines or [])}\\n\" f\"Patience: {self.patience}\\n\" f\"Tolerance: {self.tolerance}\\n\"", "'all_objective_scores' field contains scores for all objectives # but also", "'{:.3f}'.format, 'expand_frame_repr', False): logger.info(all_objective_scores) if return_dict: return pipeline_results def add_to_rankings(self,", "a new pipeline search after the duration has elapsed. If", "be specified as seconds, minutes, or hours. patience (int): Number", "default limit of max_batches=1.\\n\") if patience and (not isinstance(patience, int)", "instance of the best pipeline and parameters found during automl", "raise ValueError('Must specify training data target values as a 1d", "self._train_best_pipeline = train_best_pipeline self._best_pipeline = None self._searched = False self.X_train", "make_data_splitter(self.X_train, self.y_train, self.problem_type, self.problem_configuration, n_splits=3, shuffle=True, random_seed=self.random_seed) self.data_splitter = self.data_splitter", "data set. Arguments: feature_types (list, optional): list of feature types,", "= self.problem_configuration['max_delay'] baseline = pipeline_class(parameters={\"pipeline\": {\"gap\": gap, \"max_delay\": max_delay}, \"Time", "pipeline thresholds for: {}\".format(self.objective)) logger.info(\"Total training time (including CV): %.1f", "objectives # but also fields like \"# Training\" and \"#", "function takes three positional parameters: the Exception raised, the traceback,", "{str(max_time)}..\") if isinstance(max_time, (int, float)) and max_time < 0: raise", "abs((curr_score - best_score) / best_score) > self.tolerance score_improved = curr_score", "5. max_time (int, str): Maximum time to search for pipelines.", "baseline pipline + one of each pipeline family allowed in", "displayed in the log. \"\"\" return self._engine.train_batch(pipelines) def score_pipelines(self, pipelines,", "max_time=None, patience=None, tolerance=None, data_splitter=None, allowed_pipelines=None, allowed_model_families=None, start_iteration_callback=None, add_result_callback=None, error_callback=None, additional_objectives=None,", "best_score if self.objective.greater_is_better else curr_score < best_score if score_improved and", "if not self._best_pipeline: raise PipelineNotFoundError(\"automl search must be run before", "all_objective_scores[c] = all_objective_scores[c].astype(\"object\") continue mean = all_objective_scores[c].mean(axis=0) std = all_objective_scores[c].std(axis=0)", "self._results['pipeline_results'][pipeline_id] = { \"id\": pipeline_id, \"pipeline_name\": pipeline.name, \"pipeline_class\": type(pipeline), \"pipeline_summary\":", "use. Defaults to StratifiedKFold. tuner_class: The tuner class to use.", "if self.start_iteration_callback: self.start_iteration_callback(pipeline.__class__, pipeline.parameters, self) desc = f\"{pipeline.name}\" if len(desc)", "(self._best_pipeline and self._best_pipeline == self.get_pipeline(best_pipeline['id'])): best_pipeline = self.get_pipeline(best_pipeline['id']) if self._train_best_pipeline:", "= False except KeyboardInterrupt: loop_interrupted = True if self._handle_keyboard_interrupt(): break", "training data of length [n_samples]. Required for supervised learning tasks.", "( f\"{handle_problem_types(self.problem_type).name} Search\\n\\n\" f\"Parameters: \\n{'='*20}\\n\" f\"Objective: {get_objective(self.objective).name}\\n\" f\"Max Time: {self.max_time}\\n\"", "= \"\" if not self.rankings.empty: rankings_str = self.rankings.drop(['parameters'], axis='columns').to_string() rankings_desc", "remove it existing_main_objective = next((obj for obj in additional_objectives if", "+ 1 first_ensembling_iteration = (1 + len(self.allowed_pipelines) + len(self.allowed_pipelines) *", "cv_scores, threshold=0.2): \"\"\"Checks cross-validation scores and logs a warning if", "return self._best_pipeline def save(self, file_path, pickle_protocol=cloudpickle.DEFAULT_PROTOCOL): \"\"\"Saves AutoML object at", "dict of the parameters used to initalize a pipeline with.", "= optimize_thresholds self.ensembling = ensembling if objective == 'auto': objective", "pickle data stream format. Returns: None \"\"\" with open(file_path, 'wb')", "\" \"Use evalml.objectives.utils.get_core_objective_names() \" \"to get all objective names allowed", "_ensembling_split_size=0.2, _pipelines_per_batch=5): \"\"\"Automated pipeline search Arguments: X_train (pd.DataFrame, ww.DataTable): The", "Iteration Callback: {_get_funct_name(self.start_iteration_callback)}\\n\" f\"Add Result Callback: {_get_funct_name(self.add_result_callback)}\\n\" f\"Additional Objectives: {_print_list(self.additional_objectives", "curr_score num_without_improvement = 0 else: num_without_improvement += 1 if num_without_improvement", "objective_class = self.objective_name_to_class[obj_name] # In the event add_to_rankings is called", "import IterativeAlgorithm from evalml.automl.callbacks import log_error_callback from evalml.automl.engine import SequentialEngine", "Jupyter notebook. Disabled by default in non-Jupyter enviroments. \"\"\" if", "X_train = X_train.iloc[train_indices] y_train = y_train.iloc[train_indices] best_pipeline = self._engine.train_pipeline(best_pipeline, X_train,", "pipeline: {best_pipeline_name}\") logger.info(f\"Best pipeline {self.objective.name}: {best_pipeline['score']:3f}\") self._searched = True def", "not compatible with a {} problem.\".format(self.objective.name, self.problem_type.value)) if additional_objectives is", "objective (str, ObjectiveBase): The objective to optimize for. Used to", "f\"Max Time: {self.max_time}\\n\" f\"Max Iterations: {self.max_iterations}\\n\" f\"Max Batches: {self.max_batches}\\n\" f\"Allowed", "not None: best_pipeline = self.rankings.iloc[0] best_pipeline_name = best_pipeline[\"pipeline_name\"] logger.info(f\"Best pipeline:", "pipeline search after the duration has elapsed. If it is", "not for optimizing each pipeline during fit-time. When set to", "= time_elapsed(self._start) desc = f\"\\nSearch finished after {elapsed_time}\" desc =", "if field in objective_name_to_class: scores[field] += value return {objective: float(score)", "a pipeline with. _ensembling_split_size (float): The amount of the training", "in obj_list]) return '\\n'.join(lines) def _get_funct_name(function): if callable(function): return function.__name__", "to be ignored. allowed_model_families (list(str, ModelFamily)): The model families to", "maps to a dictionary of scores. Note that the any", "with a {} problem.\".format(obj.name, self.problem_type.value)) for pipeline in self.allowed_pipelines or", "run_ensembling and self.max_iterations is not None: # Baseline + first", "dictionary of information about pipeline. Defaults to False. Returns: Description", "optimize_thresholds (bool): Whether or not to optimize the binary pipeline", "the gap and max_delay variables. train_best_pipeline (boolean): Whether or not", "= {'gap', 'max_delay'} if not problem_configuration or not all(p in", "o in obj_list]) return '\\n'.join(lines) def _get_funct_name(function): if callable(function): return", "error_callback (callable): Function called when `search()` errors and raises an", "- R2 for regression problems. max_iterations (int): Maximum number of", "they are not scores if field in objective_name_to_class: scores[field] +=", "will default to max_iterations of 5. max_time (int, str): Maximum", "self._should_continue(): try: if not loop_interrupted: current_batch_pipelines = self._automl_algorithm.next_batch() except StopIteration:", "self.data_splitter is not None and not issubclass(self.data_splitter.__class__, BaseCrossValidator): raise ValueError(\"Not", "len(self._results['pipeline_results']) def _should_continue(self): \"\"\"Given the original stopping criterion and current", "False # for add_to_rankings if self._searched: return True # Run", "that pipeline during automl search. Arguments: pipeline_id (int): pipeline to", "parameters by default. Defaults to None, which will call `log_error_callback`.", "over...\") allowed_estimators = get_estimators(self.problem_type, self.allowed_model_families) logger.debug(f\"allowed_estimators set to {[estimator.name for", "split size must be between 0 and 1 exclusive, received", "The amount of the training data we'll set aside for", "Arguments: pipelines (list(PipelineBase)): List of pipelines to train. X_holdout (ww.DataTable,", "pipeline, an untrained_pipeline containing the parameters used during training, and", "then the time will be in seconds. For strings, time", "choice == \"n\": # So that the time in this", "False): logger.info(all_objective_scores) if return_dict: return pipeline_results def add_to_rankings(self, pipeline): \"\"\"Fits", "= desc[:AutoMLSearch._MAX_NAME_LEN - 3] + \"...\" desc = desc.ljust(AutoMLSearch._MAX_NAME_LEN) batch_number", "_validate_problem_configuration(self, problem_configuration=None): if self.problem_type in [ProblemTypes.TIME_SERIES_REGRESSION]: required_parameters = {'gap', 'max_delay'}", "rankings_df = pd.DataFrame(self._results['pipeline_results'].values()) rankings_df = rankings_df[full_rankings_cols] rankings_df.sort_values(\"score\", ascending=ascending, inplace=True) rankings_df.reset_index(drop=True,", "not None and max_batches < 0: raise ValueError(f\"Parameter max_batches must", "best pipeline and parameters found during automl search. If `train_best_pipeline`", "for the data set. Arguments: feature_types (list, optional): list of", "Returns: Dict[str, PipelineBase]: Dictionary keyed by pipeline name that maps", "is not None: logger.info(\"Searching up to %s pipelines. \" %", "current_batch_pipelines = [] current_batch_pipeline_scores = [] new_pipeline_ids = [] loop_interrupted", "best pipeline before training and thresholding\"\"\" if len(self.rankings) == 0:", "primary objective {self.objective}.\") self.search_duration = time.time() - self._start elapsed_time =", "the primary objective {self.objective}.\") self.search_duration = time.time() - self._start elapsed_time", "in additional_objectives] self.additional_objectives = additional_objectives self.objective_name_to_class = {o.name: o for", "searches over all model families. Run evalml.pipelines.components.utils.allowed_model_families(\"binary\") to see options.", "is None: raise ValueError('Must specify training data as a 2d", "== 'auto': objective = get_default_primary_search_objective(self.problem_type.value) objective = get_objective(objective, return_instance=False) self.objective", "train and score pipelines.\") if self.max_batches is not None: logger.info(f\"Searching", "If True, return dictionary of information about pipeline. Defaults to", "self.pipeline_parameters self._automl_algorithm = IterativeAlgorithm( max_iterations=self.max_iterations, allowed_pipelines=self.allowed_pipelines, tuner_class=self.tuner_class, random_seed=self.random_seed, n_jobs=self.n_jobs, number_features=self.X_train.shape[1],", "def save(self, file_path, pickle_protocol=cloudpickle.DEFAULT_PROTOCOL): \"\"\"Saves AutoML object at file path", "pipeline in self.allowed_pipelines]}\") logger.debug(f\"allowed_model_families set to {self.allowed_model_families}\") if len(self.problem_configuration): pipeline_params", "is one, ensembling will not run. Defaults to False. max_batches", "max_batches self._pipelines_per_batch = _pipelines_per_batch if not self.max_iterations and not self.max_time", "obj_list]) return '\\n'.join(lines) def _get_funct_name(function): if callable(function): return function.__name__ else:", "save(self, file_path, pickle_protocol=cloudpickle.DEFAULT_PROTOCOL): \"\"\"Saves AutoML object at file path Arguments:", "self.objective.name), None) if existing_main_objective is not None: additional_objectives.remove(existing_main_objective) else: additional_objectives", "raises an Exception. Callback function takes three positional parameters: the", "self.objective_name_to_class = {o.name: o for o in [self.objective] + self.additional_objectives}", "self._searched = False self.X_train = infer_feature_types(X_train) self.y_train = infer_feature_types(y_train) self.ensembling_indices", "self.plot = PipelineSearchPlots(self) except ImportError: logger.warning(\"Unable to import plotly; skipping", "iterations after that.\") if self.max_batches and self.max_iterations is None: self.show_batch_output", "(1 + len(self.allowed_pipelines) + self._pipelines_per_batch * (self.max_batches - 1 -", "fit-time. When set to 'auto', chooses: - LogLossBinary for binary", "if max_batches is not None and max_batches < 0: raise", "{best_pipeline['score']:3f}\") self._searched = True def _find_best_pipeline(self): \"\"\"Finds the best pipeline", "continue? Returns: bool: True if yes, False if no. \"\"\"", "self.additional_objectives} if not isinstance(max_time, (int, float, str, type(None))): raise TypeError(f\"Parameter", ") from evalml.exceptions import AutoMLSearchException, PipelineNotFoundError from evalml.model_family import ModelFamily", "but not for optimizing each pipeline during fit-time. When set", "None search_desc = ( f\"{handle_problem_types(self.problem_type).name} Search\\n\\n\" f\"Parameters: \\n{'='*20}\\n\" f\"Objective: {get_objective(self.objective).name}\\n\"", "self._pipelines_per_batch = _pipelines_per_batch if not self.max_iterations and not self.max_time and", "specified pipeline. Includes information such as type of pipeline components,", "The number of pipelines to train for every batch after", "1.0 inclusive. Received {} instead\".format(tolerance)) self.patience = patience self.tolerance =", "used. For n_jobs below -1, (n_cpus + 1 + n_jobs)", "below -1, (n_cpus + 1 + n_jobs) are used. ensembling", "= allowed_model_families self._automl_algorithm = None self._start = 0.0 self._baseline_cv_scores =", "2d array using the X_train argument') if y_train is None:", "of information about pipeline. Defaults to False. Returns: Description of", "if num_ensemble_batches == 0: run_ensembling = False logger.warning(f\"Ensembling is set", "= {} mean_cv_all_objectives = self._get_mean_cv_scores_for_all_objectives(cv_data, self.objective_name_to_class) if is_baseline: self._baseline_cv_scores =", "not None: # Baseline + first batch + each pipeline", "can be specified as seconds, minutes, or hours. patience (int):", "o for o in [self.objective] + self.additional_objectives} if not isinstance(max_time,", "stopping criterion and current state, should the search continue? Returns:", "(callable): Function called after each pipeline training iteration. Callback function", "if pipeline_id not in self._results['pipeline_results']: raise PipelineNotFoundError(\"Pipeline not found\") pipeline", "a dictionary of scores. Note that the any pipelines that", "Defaults to StratifiedKFold. tuner_class: The tuner class to use. Defaults", "been made Returns: int: the number of pipeline evaluations made", "threshold=0.2): \"\"\"Checks cross-validation scores and logs a warning if variance", "= self.get_pipeline(best_pipeline['id']) if self._train_best_pipeline: if best_pipeline.model_family == ModelFamily.ENSEMBLE: X_train, y_train", "string or None. Received {type(max_time)} with value {str(max_time)}..\") if isinstance(max_time,", "evaluations made in the search \"\"\" return len(self._results['pipeline_results']) def _should_continue(self):", "with scoring results from the highest-scoring set of parameters used", "improvement for early stopping. Only applicable if patience is not", "return function.__name__ else: return None search_desc = ( f\"{handle_problem_types(self.problem_type).name} Search\\n\\n\"", "additional_objectives=None, random_seed=0, n_jobs=-1, tuner_class=None, optimize_thresholds=True, ensembling=False, max_batches=None, problem_configuration=None, train_best_pipeline=True, pipeline_parameters=None,", "if no. \"\"\" if self._interrupted: return False # for add_to_rankings", "will run at the {first_ensembling_iteration} iteration and every {len(self.allowed_pipelines) *", "so ensembling will not run. Set max_iterations >= {first_ensembling_iteration} to", "to {self.max_batches} batches for a total of {self.max_iterations} pipelines. \")", "with the requirement that automl search has been run. Arguments:", "to a copy of the results from `automl_search`. Returns: dict", "event add_to_rankings is called before search _baseline_cv_scores will be empty", "optimize_thresholds=True, ensembling=False, max_batches=None, problem_configuration=None, train_best_pipeline=True, pipeline_parameters=None, _ensembling_split_size=0.2, _pipelines_per_batch=5): \"\"\"Automated pipeline", "_pipelines_per_batch if not self.max_iterations and not self.max_time and not self.max_batches:", "iterations without improvement to stop search early. Must be positive.", "well with instance attributes. plot = PipelineSearchPlots def __init__(self, X_train=None,", "cv_data = evaluation_results['cv_data'] cv_scores = evaluation_results['cv_scores'] is_baseline = pipeline.model_family ==", "The target training data of length [n_samples]. Required for supervised", "X_train=None, y_train=None, problem_type=None, objective='auto', max_iterations=None, max_time=None, patience=None, tolerance=None, data_splitter=None, allowed_pipelines=None,", "len(self.allowed_pipelines) == 1: logger.warning(\"Ensembling is set to True, but the", "`pipeline_results`: a dict with results from each pipeline, and `search_order`:", "\"\"\"Score a list of pipelines on the given holdout data.", "Function called after each pipeline training iteration. Callback function takes", "that allows access to a copy of the results from", "\" \"to get all objective names allowed in automl.\") return", "pipeline parameters, and the AutoMLSearch object. add_result_callback (callable): Function called", "in fold_data['all_objective_scores'].items(): # The 'all_objective_scores' field contains scores for all", "train the best pipeline before returning it. Defaults to True.", "If set to -1, all CPUs are used. For n_jobs", "= [] new_pipeline_ids = [] loop_interrupted = False while self._should_continue():", "objectives to score on. Will override default objectives for problem", "not self._results['pipeline_results']: return pd.DataFrame(columns=full_rankings_cols) rankings_df = pd.DataFrame(self._results['pipeline_results'].values()) rankings_df = rankings_df[full_rankings_cols]", "(int, float)) and max_time < 0: raise ValueError(f\"Parameter max_time must", "and not issubclass(self.data_splitter.__class__, BaseCrossValidator): raise ValueError(\"Not a valid data splitter\")", "one, so ensembling will not run.\") run_ensembling = False if", "= pipeline_results.get('parameters') if pipeline_class is None or parameters is None:", "/ n_folds for objective, score in scores.items()} def _post_evaluation_callback(self, pipeline,", "data of shape [n_samples, n_features]. Required. y_train (pd.Series, ww.DataColumn): The", "Data splitting method to use. Defaults to StratifiedKFold. tuner_class: The", "the search \"\"\" return len(self._results['pipeline_results']) def _should_continue(self): \"\"\"Given the original", "score_improved = curr_score > best_score if self.objective.greater_is_better else curr_score <", "num_pipelines == 0: return True # check max_time and max_iterations", "X_train.iloc[train_indices] y_train = y_train.iloc[train_indices] best_pipeline = self._engine.train_pipeline(best_pipeline, X_train, y_train, self.optimize_thresholds,", "used to train that pipeline during automl search. Arguments: pipeline_id", "raise ValueError(f\"{objective.name.lower()} is not allowed in AutoML! \" \"Use evalml.objectives.utils.get_core_objective_names()", "be positive. If None, early stopping is disabled. Defaults to", "= curr_score > best_score if self.objective.greater_is_better else curr_score < best_score", "max_time must be None or non-negative. Received {max_time}.\") if max_batches", "[n_samples]. Required for supervised learning tasks. problem_type (str or ProblemTypes):", "pipeline (PipelineBase): pipeline to train and evaluate. \"\"\" pipeline_rows =", "be run before selecting `best_pipeline`.\") return self._best_pipeline def save(self, file_path,", "n_jobs below -1, (n_cpus + 1 + n_jobs) are used.", "return False # check for early stopping if self.patience is", "positive integer. Received {} instead\".format(patience)) if tolerance and (tolerance >", "{self.max_iterations} pipelines. \") elif self.max_iterations is not None: logger.info(\"Searching up", "Arguments: pipeline (PipelineBase): pipeline to train and evaluate. \"\"\" pipeline_rows", "None self._interrupted = False if self.allowed_pipelines is None: logger.info(\"Generating pipelines", "pipeline.summary, \"parameters\": pipeline.parameters, \"score\": cv_score, \"high_variance_cv\": high_variance_cv, \"training_time\": training_time, \"cv_data\":", "return cloudpickle.load(f) def train_pipelines(self, pipelines): \"\"\"Train a list of pipelines", "for obj in additional_objectives if obj.name == self.objective.name), None) if", "self.problem_type.value)) def _add_baseline_pipelines(self): \"\"\"Fits a baseline pipeline to the data.", "self.results['pipeline_results'].get(pipeline_id) if pipeline_results is None: raise PipelineNotFoundError(\"Pipeline not found in", "loop_interrupted = True if self._handle_keyboard_interrupt(): break full_rankings = self.full_rankings current_batch_idx", "For n_jobs below -1, (n_cpus + 1 + n_jobs) are", "id in self._results['search_order'][1:]: curr_score = self._results['pipeline_results'][id]['score'] significant_change = abs((curr_score -", "of var\", c] = std / mean if abs(mean) >", "be helpful for training pipelines once the search is complete.", "{best_pipeline_name}\") logger.info(f\"Best pipeline {self.objective.name}: {best_pipeline['score']:3f}\") self._searched = True def _find_best_pipeline(self):", "is not None: # Baseline + first batch + each", "\"score\", \"validation_score\", \"percent_better_than_baseline\", \"high_variance_cv\", \"parameters\"] if not self._results['pipeline_results']: return pd.DataFrame(columns=full_rankings_cols)", "the current AutoML batch produced a score of np.nan on", "@property def results(self): \"\"\"Class that allows access to a copy", "if self._handle_keyboard_interrupt(): self._interrupted = True current_batch_pipelines = [] current_batch_pipeline_scores =", "search\") logger.info(\"Optimizing for %s. \" % self.objective.name) logger.info(\"{} score is", "Must be between 0 and 1, exclusive. Defaults to 0.2", "= [self._automl_algorithm._best_pipeline_info[model_family][\"id\"] for model_family in self._automl_algorithm._best_pipeline_info] self._results['pipeline_results'][pipeline_id][\"input_pipeline_ids\"] = input_pipeline_ids self._results['search_order'].append(pipeline_id)", "time.time() - start_of_loop self._start += time_in_loop return False else: leading_char", "evalml.utils import convert_to_seconds, infer_feature_types from evalml.utils.logger import ( get_logger, log_subtitle,", "not problem_configuration or not all(p in problem_configuration for p in", "if self._handle_keyboard_interrupt(): break full_rankings = self.full_rankings current_batch_idx = full_rankings['id'].isin(new_pipeline_ids) current_batch_pipeline_scores", "(float): Minimum percentage difference to qualify as score improvement for", "# In the event add_to_rankings is called before search _baseline_cv_scores", "in seconds. For strings, time can be specified as seconds,", "not in self._results['pipeline_results']: raise PipelineNotFoundError(\"Pipeline not found\") pipeline = self.get_pipeline(pipeline_id)", "batch_number = self._automl_algorithm.batch_number update_pipeline(logger, desc, len(self._results['pipeline_results']) + 1, self.max_iterations, self._start,", "def get_pipeline(self, pipeline_id): \"\"\"Given the ID of a pipeline training", "logger.info(\"{} score is better.\\n\".format('Greater' if self.objective.greater_is_better else 'Lower')) logger.info(f\"Using {self._engine.__class__.__name__}", "= SequentialEngine(self.X_train, self.y_train, self.ensembling_indices, self, should_continue_callback=self._should_continue, pre_evaluation_callback=self._pre_evaluation_callback, post_evaluation_callback=self._post_evaluation_callback) self.allowed_model_families =", "self.data_splitter or default_data_splitter self.pipeline_parameters = pipeline_parameters if pipeline_parameters is not", "\"parameters\"] if not self._results['pipeline_results']: return pd.DataFrame(columns=full_rankings_cols) rankings_df = pd.DataFrame(self._results['pipeline_results'].values()) rankings_df", "first_id = self._results['search_order'][0] best_score = self._results['pipeline_results'][first_id]['score'] num_without_improvement = 0 for", "iteration vs. score plot in Jupyter notebook. Disabled by default", "PipelineNotFoundError from evalml.model_family import ModelFamily from evalml.objectives import ( get_core_objectives,", "used for pipelines. None and 1 are equivalent. If set", "allowed in automl.\") return objective() return objective def __str__(self): def", "in objective_name_to_class: scores[field] += value return {objective: float(score) / n_folds", "scoring. objectives (list(str), list(ObjectiveBase)): Objectives used for scoring. Returns: Dict[str,", "if isinstance(max_time, str) else max_time self.max_iterations = max_iterations self.max_batches =", "raise ValueError(\"Given objective {} is not compatible with a {}", "additional_objectives] additional_objectives = [self._validate_objective(obj) for obj in additional_objectives] self.additional_objectives =", "\"\"\"Returns a trained instance of the best pipeline and parameters", ">= self.max_iterations: return False # check for early stopping if", "`best_pipeline`.\") return self._best_pipeline def save(self, file_path, pickle_protocol=cloudpickle.DEFAULT_PROTOCOL): \"\"\"Saves AutoML object", "var\", c] = std / mean if abs(mean) > 0", "+ 1, self.max_iterations, self._start, batch_number, self.show_batch_output) def _validate_objective(self, objective): non_core_objectives", "if not (self._best_pipeline and self._best_pipeline == self.get_pipeline(best_pipeline['id'])): best_pipeline = self.get_pipeline(best_pipeline['id'])", "pipeline, and `search_order`: a list describing the order the pipelines", "If True, runs ensembling in a separate batch after every", "try: self.plot = PipelineSearchPlots(self) except ImportError: logger.warning(\"Unable to import plotly;", "show iteration plot outside of a jupyter notebook if show_iteration_plot:", "return copy.deepcopy(self._results) @property def rankings(self): \"\"\"Returns a pandas.DataFrame with scoring", "score_pipelines(self, pipelines, X_holdout, y_holdout, objectives): \"\"\"Score a list of pipelines", "as a 1d vector using the y_train argument') try: self.problem_type", "optimize_thresholds self.ensembling = ensembling if objective == 'auto': objective =", "default. Defaults to None, which will call `log_error_callback`. additional_objectives (list):", "= True if run_ensembling: ensemble_nth_batch = len(self.allowed_pipelines) + 1 num_ensemble_batches", "= self.rankings.drop(['parameters'], axis='columns').to_string() rankings_desc = f\"\\nSearch Results: \\n{'='*20}\\n{rankings_str}\" return search_desc", "except KeyboardInterrupt: loop_interrupted = True if self._handle_keyboard_interrupt(): break full_rankings =", "Callback function takes three positional parameters: A dictionary containing the", "type(pipeline), \"pipeline_summary\": pipeline.summary, \"parameters\": pipeline.parameters, \"score\": cv_score, \"high_variance_cv\": high_variance_cv, \"training_time\":", "self.objective) self._best_pipeline = best_pipeline def _num_pipelines(self): \"\"\"Return the number of", "of a pipeline training result, returns an untrained instance of", "best_pipeline = self.rankings.iloc[0] if not (self._best_pipeline and self._best_pipeline == self.get_pipeline(best_pipeline['id'])):", "None or parameters is None: raise PipelineNotFoundError(\"Pipeline class or parameters", "True if run_ensembling: ensemble_nth_batch = len(self.allowed_pipelines) + 1 num_ensemble_batches =", "Callback function takes three positional parameters: The pipeline class, the", "c] = std all_objective_scores.loc[\"coef of var\", c] = std /", "def add_to_rankings(self, pipeline): \"\"\"Fits and evaluates a given pipeline then", "if self.optimize_thresholds and self.objective.is_defined_for_problem_type(ProblemTypes.BINARY) and self.objective.can_optimize_threshold: logger.info(\"Objective to optimize binary", "run_ensembling = False logger.warning(f\"Ensembling is set to True, but max_iterations", "set, then max_iterations will default to max_iterations of 5. max_time", "import convert_to_seconds, infer_feature_types from evalml.utils.logger import ( get_logger, log_subtitle, log_title,", "= input_pipeline_ids self._results['search_order'].append(pipeline_id) if not is_baseline: score_to_minimize = -cv_score if", "self.allowed_pipelines = [make_pipeline(self.X_train, self.y_train, estimator, self.problem_type, custom_hyperparameters=self.pipeline_parameters) for estimator in", "the search continue? Returns: bool: True if yes, False if", "_validate_problem_type(self): for obj in self.additional_objectives: if not obj.is_defined_for_problem_type(self.problem_type): raise ValueError(\"Additional", "self.show_batch_output) def _validate_objective(self, objective): non_core_objectives = get_non_core_objectives() if isinstance(objective, type):", "the current best pipeline before training and thresholding\"\"\" if len(self.rankings)", "when `search()` errors and raises an Exception. Callback function takes", "of pipelines to train for every batch after the first", "higher than specified threshhold.\"\"\" pipeline_name = pipeline.name high_variance_cv = bool(abs(cv_scores.std()", "an untrained pipeline instance. \"\"\" if not self._best_pipeline: raise PipelineNotFoundError(\"automl", "evalml.pipelines.components.utils import get_estimators from evalml.pipelines.utils import make_pipeline from evalml.preprocessing import", "self.plot.search_iteration_plot(interactive_plot=show_iteration_plot) self._start = time.time() try: self._add_baseline_pipelines() except KeyboardInterrupt: if self._handle_keyboard_interrupt():", "start_iteration_callback (callable): Function called before each pipeline training iteration. Callback", "of pipeline evaluations made in the search \"\"\" return len(self._results['pipeline_results'])", "is None: raise ValueError('Must specify training data target values as", "type of supervised learning problem. See evalml.problem_types.ProblemType.all_problem_types for a full", "pipeline fit during search. \"\"\" if self.problem_type == ProblemTypes.BINARY: baseline", "'pipeline_results': {}, 'search_order': [], 'errors': [] } self.random_seed = random_seed", "and evaluate. \"\"\" pipeline_rows = self.full_rankings[self.full_rankings['pipeline_name'] == pipeline.name] for parameter", "{self.tolerance}\\n\" f\"Data Splitting: {self.data_splitter}\\n\" f\"Tuner: {self.tuner_class.__name__}\\n\" f\"Start Iteration Callback: {_get_funct_name(self.start_iteration_callback)}\\n\"", "= max_batches self._pipelines_per_batch = _pipelines_per_batch if not self.max_iterations and not", "without improvement to stop search early. Must be positive. If", "{first_ensembling_iteration} to run ensembling.\") else: logger.info(f\"Ensembling will run at the", "to save file pickle_protocol (int): the pickle data stream format.", "once the search is complete. Arguments: pipelines (list(PipelineBase)): List of", "in automl results\") pipeline_class = pipeline_results.get('pipeline_class') parameters = pipeline_results.get('parameters') if", "logger.info(all_objective_scores) if return_dict: return pipeline_results def add_to_rankings(self, pipeline): \"\"\"Fits and", "full_rankings_cols = [\"id\", \"pipeline_name\", \"score\", \"validation_score\", \"percent_better_than_baseline\", \"high_variance_cv\", \"parameters\"] if", "else: leading_char = \"\" def search(self, show_iteration_plot=True): \"\"\"Find the best", "in self.allowed_model_families])) self.search_iteration_plot = None if self.plot: self.search_iteration_plot = self.plot.search_iteration_plot(interactive_plot=show_iteration_plot)", "evalml.automl.callbacks import log_error_callback from evalml.automl.engine import SequentialEngine from evalml.automl.utils import", "self.optimize_thresholds and self.objective.is_defined_for_problem_type(ProblemTypes.BINARY) and self.objective.can_optimize_threshold: logger.info(\"Objective to optimize binary classification", "object. Must also accepts kwargs, so AutoMLSearch is able to", "if self.max_iterations < first_ensembling_iteration: run_ensembling = False logger.warning(f\"Ensembling is set", "objective_name_to_class: scores[field] += value return {objective: float(score) / n_folds for", "used to initalize a pipeline with. _ensembling_split_size (float): The amount", "- best_score) / best_score) > self.tolerance score_improved = curr_score >", "training iteration. Callback function takes three positional parameters: A dictionary", "return # don't show iteration plot outside of a jupyter", "1) // ensemble_nth_batch if num_ensemble_batches == 0: run_ensembling = False", "1 + n_jobs) are used. ensembling (boolean): If True, runs", "run every {ensemble_nth_batch} batches.\") self.max_iterations = (1 + len(self.allowed_pipelines) +", "holdout data. Arguments: pipelines (list(PipelineBase)): List of pipelines to train.", "not issubclass(self.data_splitter.__class__, BaseCrossValidator): raise ValueError(\"Not a valid data splitter\") if", "cross validation scores. {pipeline_name} may not perform as estimated on", "or 0.0 self._results = { 'pipeline_results': {}, 'search_order': [], 'errors':", "is part of default set of objectives for problem_type, remove", "will not be included in the dictionary but the exception", "input training data of shape [n_samples, n_features]. Required. y_train (pd.Series,", "for obj in additional_objectives] self.additional_objectives = additional_objectives self.objective_name_to_class = {o.name:", "ensembling will not run. Set max_iterations >= {first_ensembling_iteration} to run", "= percent_better high_variance_cv = self._check_for_high_variance(pipeline, cv_scores) pipeline_id = len(self._results['pipeline_results']) self._results['pipeline_results'][pipeline_id]", "self._start if self.max_time and elapsed >= self.max_time: return False elif", "get_ipython except NameError: show_iteration_plot = False log_title(logger, \"Beginning pipeline search\")", "compatible with problem_type {}.\".format(pipeline.name, self.problem_type.value)) def _add_baseline_pipelines(self): \"\"\"Fits a baseline", "should terminate early \"\"\" leading_char = \"\\n\" start_of_loop = time.time()", "cloudpickle.dump(self, f, protocol=pickle_protocol) @staticmethod def load(file_path): \"\"\"Loads AutoML object at", "Returns: bool: If True, search should terminate early \"\"\" leading_char", "else: num_without_improvement += 1 if num_without_improvement >= self.patience: logger.info(\"\\n\\n{} iterations", "return best_pipeline = self.rankings.iloc[0] if not (self._best_pipeline and self._best_pipeline ==", "True, but max_iterations is too small, so ensembling will not", "problem type are allowed. Setting this field will cause allowed_model_families", "to find file to load Returns: AutoSearchBase object \"\"\" with", "current_batch_idx = full_rankings['id'].isin(new_pipeline_ids) current_batch_pipeline_scores = full_rankings[current_batch_idx]['score'] if len(current_batch_pipeline_scores) and current_batch_pipeline_scores.isna().all():", "the same instance. Re-initialize AutoMLSearch to search again.\") return #", "of the results from `automl_search`. Returns: dict containing `pipeline_results`: a", "not loop_interrupted: current_batch_pipelines = self._automl_algorithm.next_batch() except StopIteration: logger.info('AutoML Algorithm out", "True. start_iteration_callback (callable): Function called before each pipeline training iteration.", "self._automl_algorithm._best_pipeline_info] self._results['pipeline_results'][pipeline_id][\"input_pipeline_ids\"] = input_pipeline_ids self._results['search_order'].append(pipeline_id) if not is_baseline: score_to_minimize =", "pipeline_results[\"training_time\"]) log_subtitle(logger, \"Cross Validation\", underline=\"-\") all_objective_scores = [fold[\"all_objective_scores\"] for fold", "and every {len(self.allowed_pipelines) * self._pipelines_per_batch} iterations after that.\") if self.max_batches", "np import pandas as pd import woodwork as ww from", "Defaults to 0. n_jobs (int or None): Non-negative integer describing", "f\"Tolerance: {self.tolerance}\\n\" f\"Data Splitting: {self.data_splitter}\\n\" f\"Tuner: {self.tuner_class.__name__}\\n\" f\"Start Iteration Callback:", "self.n_jobs = n_jobs self.plot = None try: self.plot = PipelineSearchPlots(self)", "%s\\n\" % \", \".join([model.value for model in self.allowed_model_families])) self.search_iteration_plot =", "batch is one, ensembling will not run. Defaults to False.", "first pipeline fit during search. \"\"\" if self.problem_type == ProblemTypes.BINARY:", "a copy of the results from `automl_search`. Returns: dict containing", "that error out during scoring will not be included in", "pipelines_per_batch=self._pipelines_per_batch, ensembling=run_ensembling, pipeline_params=pipeline_params ) def _pre_evaluation_callback(self, pipeline): if self.start_iteration_callback: self.start_iteration_callback(pipeline.__class__,", "} self.random_seed = random_seed self.n_jobs = n_jobs self.plot = None", "search \"\"\" return len(self._results['pipeline_results']) def _should_continue(self): \"\"\"Given the original stopping", "to 'auto', chooses: - LogLossBinary for binary classification problems, -", "to None. tolerance (float): Minimum percentage difference to qualify as", "train_best_pipeline=True, pipeline_parameters=None, _ensembling_split_size=0.2, _pipelines_per_batch=5): \"\"\"Automated pipeline search Arguments: X_train (pd.DataFrame,", "returns an untrained instance of the specified pipeline initialized with", "{self.random_seed}\\n\" f\"n_jobs: {self.n_jobs}\\n\" f\"Optimize Thresholds: {self.optimize_thresholds}\\n\" ) rankings_desc = \"\"", "log_title(logger, \"Beginning pipeline search\") logger.info(\"Optimizing for %s. \" % self.objective.name)", "+ str(pipeline_results['input_pipeline_ids'])) log_subtitle(logger, \"Training\") logger.info(\"Training for {} problems.\".format(pipeline.problem_type)) if self.optimize_thresholds", "allowed_model_families to be ignored. allowed_model_families (list(str, ModelFamily)): The model families", "with. _ensembling_split_size (float): The amount of the training data we'll", "{threshold}) within cross validation scores. {pipeline_name} may not perform as", "max_iterations=None, max_time=None, patience=None, tolerance=None, data_splitter=None, allowed_pipelines=None, allowed_model_families=None, start_iteration_callback=None, add_result_callback=None, error_callback=None,", "score_to_minimize = -cv_score if self.objective.greater_is_better else cv_score try: self._automl_algorithm.add_result(score_to_minimize, pipeline,", "len(self.allowed_pipelines) * self._pipelines_per_batch + 1) if self.max_iterations < first_ensembling_iteration: run_ensembling", "- LogLossBinary for binary classification problems, - LogLossMulticlass for multiclass", "it existing_main_objective = next((obj for obj in additional_objectives if obj.name", "-1, all CPUs are used. For n_jobs below -1, (n_cpus", "at file path Arguments: file_path (str): location to find file", "type): if objective in non_core_objectives: raise ValueError(f\"{objective.name.lower()} is not allowed", "binary pipeline threshold. Defaults to True. start_iteration_callback (callable): Function called", "= True def _find_best_pipeline(self): \"\"\"Finds the best pipeline in the", "search early. Must be positive. If None, early stopping is", "after every allowed pipeline class has been iterated over. If", "in self._results['pipeline_results']: raise PipelineNotFoundError(\"Pipeline not found\") pipeline = self.get_pipeline(pipeline_id) pipeline_results", "so AutoMLSearch is able to pass along other appropriate parameters", "def results(self): \"\"\"Class that allows access to a copy of", "a {} problem.\".format(obj.name, self.problem_type.value)) for pipeline in self.allowed_pipelines or []:", "y_train=None, problem_type=None, objective='auto', max_iterations=None, max_time=None, patience=None, tolerance=None, data_splitter=None, allowed_pipelines=None, allowed_model_families=None,", "check to make sure it is different from the current", "ProblemTypes.BINARY: baseline = ModeBaselineBinaryPipeline(parameters={}) elif self.problem_type == ProblemTypes.MULTICLASS: baseline =", "logger.info(\"\\n\\n{} iterations without improvement. Stopping search early...\".format(self.patience)) return False return", "ModelFamily from evalml.objectives import ( get_core_objectives, get_non_core_objectives, get_objective ) from", "== parameter: return self._engine.evaluate_batch([pipeline]) self._find_best_pipeline() @property def results(self): \"\"\"Class that", "if self._automl_algorithm is not None and self._automl_algorithm.batch_number > 0: batch_number", "default objectives for problem type if not empty. random_seed (int):", "documentation, since Sphinx does not work well with instance attributes.", "patience is not None. Defaults to None. allowed_pipelines (list(class)): A", "%s. \" % self.objective.name) logger.info(\"{} score is better.\\n\".format('Greater' if self.objective.greater_is_better", "f\"Random Seed: {self.random_seed}\\n\" f\"n_jobs: {self.n_jobs}\\n\" f\"Optimize Thresholds: {self.optimize_thresholds}\\n\" ) rankings_desc", "inclusive. Received {} instead\".format(tolerance)) self.patience = patience self.tolerance = tolerance", "== 0: return best_pipeline = self.rankings.iloc[0] if not (self._best_pipeline and", "early...\".format(self.patience)) return False return True def _validate_problem_type(self): for obj in", "access to a copy of the results from `automl_search`. Returns:", "for estimator in allowed_estimators]}\") self.allowed_pipelines = [make_pipeline(self.X_train, self.y_train, estimator, self.problem_type,", "parameters: The pipeline class, the pipeline parameters, and the AutoMLSearch", ") def _pre_evaluation_callback(self, pipeline): if self.start_iteration_callback: self.start_iteration_callback(pipeline.__class__, pipeline.parameters, self) desc", "from evalml.preprocessing import split_data from evalml.problem_types import ProblemTypes, handle_problem_types from", "for the random number generator. Defaults to 0. n_jobs (int", "list of pipelines on the training data. This can be", "to configure the search. For example, in time series problems,", "data. This is the first pipeline fit during search. \"\"\"", "if not isinstance(max_time, (int, float, str, type(None))): raise TypeError(f\"Parameter max_time", "None and self._automl_algorithm.batch_number > 0: batch_number = self._automl_algorithm.batch_number update_pipeline(logger, desc,", "to train and score pipelines.\") if self.max_batches is not None:", "_check_for_high_variance(self, pipeline, cv_scores, threshold=0.2): \"\"\"Checks cross-validation scores and logs a", "pipeline.\"\"\" return self.full_rankings.drop_duplicates(subset=\"pipeline_name\", keep=\"first\") @property def full_rankings(self): \"\"\"Returns a pandas.DataFrame", "every batch after the first one. The first batch will", "to use. Defaults to SKOptTuner. optimize_thresholds (bool): Whether or not", "per batch is one, ensembling will not run. Defaults to", "additional_objectives = [get_objective(o) for o in additional_objectives] additional_objectives = [self._validate_objective(obj)", "len(self._results['pipeline_results']) self._results['pipeline_results'][pipeline_id] = { \"id\": pipeline_id, \"pipeline_name\": pipeline.name, \"pipeline_class\": type(pipeline),", "ValueError(\"Not a valid data splitter\") if not objective.is_defined_for_problem_type(self.problem_type): raise ValueError(\"Given", "the number of pipeline evaluations which have been made Returns:", "(int): pipeline to retrieve Returns: PipelineBase: untrained pipeline instance associated", "within cross validation scores. {pipeline_name} may not perform as estimated", "self.data_splitter = data_splitter self.optimize_thresholds = optimize_thresholds self.ensembling = ensembling if", "= patience self.tolerance = tolerance or 0.0 self._results = {", "too small, so ensembling will not run. Set max_iterations >=", "(int): Seed for the random number generator. Defaults to 0.", "the data. This is the first pipeline fit during search.", "self._engine.evaluate_batch([pipeline]) self._find_best_pipeline() @property def results(self): \"\"\"Class that allows access to", "protocol=pickle_protocol) @staticmethod def load(file_path): \"\"\"Loads AutoML object at file path", "if self.plot: self.search_iteration_plot = self.plot.search_iteration_plot(interactive_plot=show_iteration_plot) self._start = time.time() try: self._add_baseline_pipelines()", "(pd.Series, ww.DataColumn): The target training data of length [n_samples]. Required", "search num_pipelines = self._num_pipelines() if num_pipelines == 0: return True", "pipeline.describe() if pipeline.model_family == ModelFamily.ENSEMBLE: logger.info(\"Input for ensembler are pipelines", "Estimator\": {\"gap\": gap, \"max_delay\": max_delay}}) self._engine.evaluate_batch([baseline]) @staticmethod def _get_mean_cv_scores_for_all_objectives(cv_data, objective_name_to_class):", "f\"Patience: {self.patience}\\n\" f\"Tolerance: {self.tolerance}\\n\" f\"Data Splitting: {self.data_splitter}\\n\" f\"Tuner: {self.tuner_class.__name__}\\n\" f\"Start", "to the automl rankings with the requirement that automl search", "is able to pass along other appropriate parameters by default.", "CPUs are used. For n_jobs below -1, (n_cpus + 1", "by pipeline name that maps to a dictionary of scores.", "hours. patience (int): Number of iterations without improvement to stop", "make_pipeline from evalml.preprocessing import split_data from evalml.problem_types import ProblemTypes, handle_problem_types", "IterativeAlgorithm from evalml.automl.callbacks import log_error_callback from evalml.automl.engine import SequentialEngine from", "default of None searches over all model families. Run evalml.pipelines.components.utils.allowed_model_families(\"binary\")", "search. problem_configuration (dict, None): Additional parameters needed to configure the", "and 1, exclusive. Defaults to 0.2 _pipelines_per_batch (int): The number", "= full_rankings[current_batch_idx]['score'] if len(current_batch_pipeline_scores) and current_batch_pipeline_scores.isna().all(): raise AutoMLSearchException(f\"All pipelines in", "True. pipeline_parameters (dict): A dict of the parameters used to", "def _should_continue(self): \"\"\"Given the original stopping criterion and current state,", "gap and max_delay variables. train_best_pipeline (boolean): Whether or not to", "training data target values as a 1d vector using the", "< 0: raise ValueError(f\"Parameter max_iterations must be None or non-negative.", "== ProblemTypes.MULTICLASS: baseline = ModeBaselineMulticlassPipeline(parameters={}) elif self.problem_type == ProblemTypes.REGRESSION: baseline", "self.start_iteration_callback = start_iteration_callback self.add_result_callback = add_result_callback self.error_callback = error_callback or", "high_variance_cv def get_pipeline(self, pipeline_id): \"\"\"Given the ID of a pipeline", "for training ensemble metalearners. Only used when ensembling is True.", "import log_error_callback from evalml.automl.engine import SequentialEngine from evalml.automl.utils import (", "1: logger.warning(\"Ensembling is set to True, but the number of", "scores.items()} def _post_evaluation_callback(self, pipeline, evaluation_results): training_time = evaluation_results['training_time'] cv_data =", "self.max_time is not None: logger.info(\"Will stop searching for new pipelines", "the event add_to_rankings is called before search _baseline_cv_scores will be", "pipeline during fit-time. When set to 'auto', chooses: - LogLossBinary", "to run ensembling.\") else: logger.info(f\"Ensembling will run at the {first_ensembling_iteration}", "logger.info(\"Training for {} problems.\".format(pipeline.problem_type)) if self.optimize_thresholds and self.objective.is_defined_for_problem_type(ProblemTypes.BINARY) and self.objective.can_optimize_threshold:", "patience (int): Number of iterations without improvement to stop search", "propose and rank pipelines, but not for optimizing each pipeline", "o in additional_objectives] additional_objectives = [self._validate_objective(obj) for obj in additional_objectives]", "objective() return objective def __str__(self): def _print_list(obj_list): lines = sorted(['\\t{}'.format(o.name)", "notebook if show_iteration_plot: try: get_ipython except NameError: show_iteration_plot = False", "and the AutoMLSearch object. error_callback (callable): Function called when `search()`", "MeanBaselineRegressionPipeline(parameters={}) else: pipeline_class = {ProblemTypes.TIME_SERIES_REGRESSION: TimeSeriesBaselineRegressionPipeline, ProblemTypes.TIME_SERIES_MULTICLASS: TimeSeriesBaselineMulticlassPipeline, ProblemTypes.TIME_SERIES_BINARY: TimeSeriesBaselineBinaryPipeline}[self.problem_type]", "str, type(None))): raise TypeError(f\"Parameter max_time must be a float, int,", "additional_objectives.remove(existing_main_objective) else: additional_objectives = [get_objective(o) for o in additional_objectives] additional_objectives", "as np import pandas as pd import woodwork as ww", "search_desc = ( f\"{handle_problem_types(self.problem_type).name} Search\\n\\n\" f\"Parameters: \\n{'='*20}\\n\" f\"Objective: {get_objective(self.objective).name}\\n\" f\"Max", "in non-Jupyter enviroments. \"\"\" if self._searched: logger.info(\"AutoMLSearch.search() has already been", "= pipeline_class(parameters={\"pipeline\": {\"gap\": gap, \"max_delay\": max_delay}, \"Time Series Baseline Estimator\":", "import ( get_core_objectives, get_non_core_objectives, get_objective ) from evalml.pipelines import (", "all pipelines for this problem type are allowed. Setting this", "objective def __str__(self): def _print_list(obj_list): lines = sorted(['\\t{}'.format(o.name) for o", "For example, in time series problems, values should be passed", "f\"Tuner: {self.tuner_class.__name__}\\n\" f\"Start Iteration Callback: {_get_funct_name(self.start_iteration_callback)}\\n\" f\"Add Result Callback: {_get_funct_name(self.add_result_callback)}\\n\"", "def _add_baseline_pipelines(self): \"\"\"Fits a baseline pipeline to the data. This", "or hours. patience (int): Number of iterations without improvement to", "The model families to search. The default of None searches", "new pipeline, an untrained_pipeline containing the parameters used during training,", "self._baseline_cv_scores = {} self.show_batch_output = False self._validate_problem_type() self.problem_configuration = self._validate_problem_configuration(problem_configuration)", "Series Baseline Estimator\": {\"gap\": gap, \"max_delay\": max_delay}}) self._engine.evaluate_batch([baseline]) @staticmethod def", "== ModelFamily.ENSEMBLE: input_pipeline_ids = [self._automl_algorithm._best_pipeline_info[model_family][\"id\"] for model_family in self._automl_algorithm._best_pipeline_info] self._results['pipeline_results'][pipeline_id][\"input_pipeline_ids\"]", "new pipelines after %d seconds.\\n\" % self.max_time) logger.info(\"Allowed model families:", "curr_score < best_score if score_improved and significant_change: best_score = curr_score", "iterated over. If the number of unique pipelines to search", "SKOptTuner self.start_iteration_callback = start_iteration_callback self.add_result_callback = add_result_callback self.error_callback = error_callback", "type if not empty. random_seed (int): Seed for the random", "self.patience is None or self.tolerance is None: return True first_id", "list of pipelines on the given holdout data. Arguments: pipelines", "False # check for early stopping if self.patience is None", "Defaults to False. Returns: Description of specified pipeline. Includes information", "def load(file_path): \"\"\"Loads AutoML object at file path Arguments: file_path", "else: self.max_iterations = 1 + len(self.allowed_pipelines) + (self._pipelines_per_batch * (self.max_batches", "exclusive, received {_ensembling_split_size}\") X_shape = ww.DataTable(np.arange(self.X_train.shape[0])) _, ensembling_indices, _, _", "untrained pipeline instance associated with the provided ID \"\"\" pipeline_results", "in this loop does not count towards the time budget", "train_best_pipeline (boolean): Whether or not to train the best pipeline", "pipelines. \" % self.max_iterations) if self.max_time is not None: logger.info(\"Will", "If `train_best_pipeline` is set to False, returns an untrained pipeline", "from the highest-scoring set of parameters used with each pipeline.\"\"\"", "self._pipelines_per_batch} iterations after that.\") if self.max_batches and self.max_iterations is None:", "to {self.allowed_model_families}\") if len(self.problem_configuration): pipeline_params = {**{'pipeline': self.problem_configuration}, **self.pipeline_parameters} else:", "num_without_improvement = 0 else: num_without_improvement += 1 if num_without_improvement >=", "Whether or not to optimize the binary pipeline threshold. Defaults", "1)) if run_ensembling: if not (0 < _ensembling_split_size < 1):", "pipeline.model_family == ModelFamily.ENSEMBLE: logger.info(\"Input for ensembler are pipelines with IDs:", "import make_pipeline from evalml.preprocessing import split_data from evalml.problem_types import ProblemTypes,", "model families to search. The default of None searches over", "import ( MeanBaselineRegressionPipeline, ModeBaselineBinaryPipeline, ModeBaselineMulticlassPipeline, TimeSeriesBaselineBinaryPipeline, TimeSeriesBaselineMulticlassPipeline, TimeSeriesBaselineRegressionPipeline ) from", "None: logger.info(\"Will stop searching for new pipelines after %d seconds.\\n\"", "return self._engine.evaluate_batch([pipeline]) self._find_best_pipeline() @property def results(self): \"\"\"Class that allows access", "!= self.problem_type: raise ValueError(\"Given pipeline {} is not compatible with", "of pipelines to train. X_holdout (ww.DataTable, pd.DataFrame): Holdout features. y_holdout", "automl search. If `train_best_pipeline` is set to False, returns an", "must be a float, int, string or None. Received {type(max_time)}", "or None. Received {type(max_time)} with value {str(max_time)}..\") if isinstance(max_time, (int,", "logger.info(f\"Searching up to {self.max_batches} batches for a total of {self.max_iterations}", "{}, 'search_order': [], 'errors': [] } self.random_seed = random_seed self.n_jobs", "search _baseline_cv_scores will be empty so we will return #", "to True, but max_batches is too small, so ensembling will", "will be in seconds. For strings, time can be specified", "raise ValueError(\"Given pipeline {} is not compatible with problem_type {}.\".format(pipeline.name,", "self.add_result_callback: self.add_result_callback(self._results['pipeline_results'][pipeline_id], pipeline, self) return pipeline_id def _check_for_high_variance(self, pipeline, cv_scores,", "True # Run at least one pipeline for every search", "the y_train argument') try: self.problem_type = handle_problem_types(problem_type) except ValueError: raise", "raise ValueError(f\"Parameter max_time must be None or non-negative. Received {max_time}.\")", "pipeline to train and evaluate. \"\"\" pipeline_rows = self.full_rankings[self.full_rankings['pipeline_name'] ==", "ObjectiveBase): The objective to optimize for. Used to propose and", "self.max_time and elapsed >= self.max_time: return False elif self.max_iterations and", "( get_core_objectives, get_non_core_objectives, get_objective ) from evalml.pipelines import ( MeanBaselineRegressionPipeline,", "from evalml.problem_types import ProblemTypes, handle_problem_types from evalml.tuners import SKOptTuner from", "helpful for training pipelines once the search is complete. Arguments:", "max_delay}, \"Time Series Baseline Estimator\": {\"gap\": gap, \"max_delay\": max_delay}}) self._engine.evaluate_batch([baseline])", "1 if self._automl_algorithm is not None and self._automl_algorithm.batch_number > 0:", "= handle_problem_types(problem_type) except ValueError: raise ValueError('choose one of (binary, multiclass,", "\"\"\" with open(file_path, 'wb') as f: cloudpickle.dump(self, f, protocol=pickle_protocol) @staticmethod", "logger.info(f\"Using {self._engine.__class__.__name__} to train and score pipelines.\") if self.max_batches is", "is set to True, but max_iterations is too small, so", "= True current_batch_pipelines = [] current_batch_pipeline_scores = [] new_pipeline_ids =", "Received {type(max_time)} with value {str(max_time)}..\") if isinstance(max_time, (int, float)) and", "will run every {ensemble_nth_batch} batches.\") self.max_iterations = (1 + len(self.allowed_pipelines)", "Dict[str, float]]: Dictionary keyed by pipeline name that maps to", "a trained instance of the best pipeline and parameters found", "error out during scoring will not be included in the", "evalml.exceptions import AutoMLSearchException, PipelineNotFoundError from evalml.model_family import ModelFamily from evalml.objectives", "{} mean_cv_all_objectives = self._get_mean_cv_scores_for_all_objectives(cv_data, self.objective_name_to_class) if is_baseline: self._baseline_cv_scores = mean_cv_all_objectives", "tuner class to use. Defaults to SKOptTuner. optimize_thresholds (bool): Whether", "each pipeline family allowed in the search. \"\"\" if X_train", "pipelines searched\"\"\" ascending = True if self.objective.greater_is_better: ascending = False", "time series problems, values should be passed in for the", "@property def best_pipeline(self): \"\"\"Returns a trained instance of the best", "already exists, check to make sure it is different from", "search must be run before selecting `best_pipeline`.\") return self._best_pipeline def", "Must be positive. If None, early stopping is disabled. Defaults", "to run ensembling.\") else: logger.info(f\"Ensembling will run every {ensemble_nth_batch} batches.\")", "indicates all pipelines for this problem type are allowed. Setting", "plotting\\n\") self.allowed_pipelines = allowed_pipelines self.allowed_model_families = allowed_model_families self._automl_algorithm = None", "+ each pipeline iteration + 1 first_ensembling_iteration = (1 +", "logger.info(\"AutoMLSearch.search() has already been run and will not run again", "try: get_ipython except NameError: show_iteration_plot = False log_title(logger, \"Beginning pipeline", "logger.info(\"Optimizing for %s. \" % self.objective.name) logger.info(\"{} score is better.\\n\".format('Greater'", "object. error_callback (callable): Function called when `search()` errors and raises", "set to {[pipeline.name for pipeline in self.allowed_pipelines]}\") logger.debug(f\"allowed_model_families set to", "{\"gap\": gap, \"max_delay\": max_delay}}) self._engine.evaluate_batch([baseline]) @staticmethod def _get_mean_cv_scores_for_all_objectives(cv_data, objective_name_to_class): scores", "all objective names allowed in automl.\") return objective() return objective", "\"\"\" if self._searched: logger.info(\"AutoMLSearch.search() has already been run and will", "mean = all_objective_scores[c].mean(axis=0) std = all_objective_scores[c].std(axis=0) all_objective_scores.loc[\"mean\", c] = mean", "array using the X_train argument') if y_train is None: raise", "is not None and max_iterations < 0: raise ValueError(f\"Parameter max_iterations", "in the dictionary but the exception and stacktrace will be", "\"pipeline_name\": pipeline.name, \"pipeline_class\": type(pipeline), \"pipeline_summary\": pipeline.summary, \"parameters\": pipeline.parameters, \"score\": cv_score,", "num_without_improvement += 1 if num_without_improvement >= self.patience: logger.info(\"\\n\\n{} iterations without", "list(set([p.model_family for p in (self.allowed_pipelines)])) logger.debug(f\"allowed_pipelines set to {[pipeline.name for", "_post_evaluation_callback(self, pipeline, evaluation_results): training_time = evaluation_results['training_time'] cv_data = evaluation_results['cv_data'] cv_scores", "Defaults to SKOptTuner. optimize_thresholds (bool): Whether or not to optimize", "this field will cause allowed_model_families to be ignored. allowed_model_families (list(str,", "search is complete. Arguments: pipelines (list(PipelineBase)): List of pipelines to", "want to stop the search. Returns: bool: If True, search", "max_iterations and max_time is not set, then max_iterations will default", "parallelism used for pipelines. None and 1 are equivalent. If", "in the current AutoML batch produced a score of np.nan", "set to False, returns an untrained pipeline instance. Returns: PipelineBase:", "and thresholding\"\"\" if len(self.rankings) == 0: return best_pipeline = self.rankings.iloc[0]", "False while self._should_continue(): try: if not loop_interrupted: current_batch_pipelines = self._automl_algorithm.next_batch()", "0 for id in self._results['search_order'][1:]: curr_score = self._results['pipeline_results'][id]['score'] significant_change =", "file path Arguments: file_path (str): location to find file to", "results(self): \"\"\"Class that allows access to a copy of the", "max_delay}}) self._engine.evaluate_batch([baseline]) @staticmethod def _get_mean_cv_scores_for_all_objectives(cv_data, objective_name_to_class): scores = defaultdict(int) n_folds", "the results from `automl_search`. Returns: dict containing `pipeline_results`: a dict", "max_batches must be None or non-negative. Received {max_batches}.\") if max_iterations", "/ best_score) > self.tolerance score_improved = curr_score > best_score if", "\"\"\" if self._interrupted: return False # for add_to_rankings if self._searched:", "objective, score in scores.items()} def _post_evaluation_callback(self, pipeline, evaluation_results): training_time =", "not is_baseline: score_to_minimize = -cv_score if self.objective.greater_is_better else cv_score try:", "training iteration. Callback function takes three positional parameters: The pipeline", "of parallelism used for pipelines. None and 1 are equivalent.", "describing level of parallelism used for pipelines. None and 1", "+ one of each pipeline family allowed in the search.", "the rankings If self._best_pipeline already exists, check to make sure", "ModeBaselineMulticlassPipeline(parameters={}) elif self.problem_type == ProblemTypes.REGRESSION: baseline = MeanBaselineRegressionPipeline(parameters={}) else: pipeline_class", "iteration + 1 first_ensembling_iteration = (1 + len(self.allowed_pipelines) + len(self.allowed_pipelines)", "to use. Defaults to StratifiedKFold. tuner_class: The tuner class to", "score on. Will override default objectives for problem type if", "List of pipelines to train. X_holdout (ww.DataTable, pd.DataFrame): Holdout features.", "Batches: {self.max_batches}\\n\" f\"Allowed Pipelines: \\n{_print_list(self.allowed_pipelines or [])}\\n\" f\"Patience: {self.patience}\\n\" f\"Tolerance:", "if objective in non_core_objectives: raise ValueError(f\"{objective.name.lower()} is not allowed in", "a pandas.DataFrame with scoring results from all pipelines searched\"\"\" ascending", "raise ValueError(\"tolerance value must be a float between 0.0 and", "True def _validate_problem_type(self): for obj in self.additional_objectives: if not obj.is_defined_for_problem_type(self.problem_type):", "training data of shape [n_samples, n_features]. Required. y_train (pd.Series, ww.DataColumn):", "problem. See evalml.problem_types.ProblemType.all_problem_types for a full list. objective (str, ObjectiveBase):", "and self.objective.can_optimize_threshold: logger.info(\"Objective to optimize binary classification pipeline thresholds for:", "time will be in seconds. For strings, time can be", "cross validation, etc. \"\"\" if pipeline_id not in self._results['pipeline_results']: raise", "True elif choice == \"n\": # So that the time", "will automatically be encoded show_iteration_plot (boolean, True): Shows an iteration", "if self.max_time is not None: logger.info(\"Will stop searching for new", "+ num_ensemble_batches) else: self.max_iterations = 1 + len(self.allowed_pipelines) + (self._pipelines_per_batch", "AutoML object at file path Arguments: file_path (str): location to", "= ww.DataTable(np.arange(self.X_train.shape[0])) _, ensembling_indices, _, _ = split_data(X_shape, self.y_train, problem_type=self.problem_type,", "return objective def __str__(self): def _print_list(obj_list): lines = sorted(['\\t{}'.format(o.name) for", "self.search_iteration_plot: self.search_iteration_plot.update() if self.add_result_callback: self.add_result_callback(self._results['pipeline_results'][pipeline_id], pipeline, self) return pipeline_id def", "pass if self.search_iteration_plot: self.search_iteration_plot.update() if self.add_result_callback: self.add_result_callback(self._results['pipeline_results'][pipeline_id], pipeline, self) return", "Returns: dict containing `pipeline_results`: a dict with results from each", "NameError: show_iteration_plot = False log_title(logger, \"Beginning pipeline search\") logger.info(\"Optimizing for", "logger.debug(f\"allowed_model_families set to {self.allowed_model_families}\") if len(self.problem_configuration): pipeline_params = {**{'pipeline': self.problem_configuration},", "to the fitted pipeline. Note that the any pipelines that", "describe return_dict (bool): If True, return dictionary of information about", "must be None or non-negative. Received {max_batches}.\") if max_iterations is", "y_train, self.optimize_thresholds, self.objective) self._best_pipeline = best_pipeline def _num_pipelines(self): \"\"\"Return the", "options. Change `binary` to `multiclass` or `regression` depending on the", "\"# Validation\"]: all_objective_scores[c] = all_objective_scores[c].astype(\"object\") continue mean = all_objective_scores[c].mean(axis=0) std", "time.time() - self._start elapsed_time = time_elapsed(self._start) desc = f\"\\nSearch finished", "show_iteration_plot = False log_title(logger, \"Beginning pipeline search\") logger.info(\"Optimizing for %s.", "if not obj.is_defined_for_problem_type(self.problem_type): raise ValueError(\"Additional objective {} is not compatible", "Stopping search early...\".format(self.patience)) return False return True def _validate_problem_type(self): for", "_add_baseline_pipelines(self): \"\"\"Fits a baseline pipeline to the data. This is", "pipeline_results = self.results['pipeline_results'].get(pipeline_id) if pipeline_results is None: raise PipelineNotFoundError(\"Pipeline not", "so ensembling will not run.\") run_ensembling = False if run_ensembling", "None: additional_objectives = get_core_objectives(self.problem_type) # if our main objective is", "= None self._interrupted = False if self.allowed_pipelines is None: logger.info(\"Generating", "run_ensembling: if not (0 < _ensembling_split_size < 1): raise ValueError(f\"Ensembling", "is provided, this parameter will be ignored. data_splitter (sklearn.model_selection.BaseCrossValidator): Data", "to None, which will call `log_error_callback`. additional_objectives (list): Custom set", "# nan for the base score. percent_better = objective_class.calculate_percent_difference(mean_cv_all_objectives[obj_name], self._baseline_cv_scores.get(obj_name,", "existing_main_objective is not None: additional_objectives.remove(existing_main_objective) else: additional_objectives = [get_objective(o) for", "= [get_objective(o) for o in additional_objectives] additional_objectives = [self._validate_objective(obj) for", "Holdout features. y_holdout (ww.DataTable, pd.DataFrame): Holdout targets for scoring. objectives", "Thresholds: {self.optimize_thresholds}\\n\" ) rankings_desc = \"\" if not self.rankings.empty: rankings_str", "made in the search \"\"\" return len(self._results['pipeline_results']) def _should_continue(self): \"\"\"Given", "pipeline threshold. Defaults to True. start_iteration_callback (callable): Function called before", "self._results = { 'pipeline_results': {}, 'search_order': [], 'errors': [] }", "< 1): raise ValueError(f\"Ensembling split size must be between 0", "return False return True def _validate_problem_type(self): for obj in self.additional_objectives:", "percent_better_than_baseline[obj_name] = percent_better high_variance_cv = self._check_for_high_variance(pipeline, cv_scores) pipeline_id = len(self._results['pipeline_results'])", "else max_time self.max_iterations = max_iterations self.max_batches = max_batches self._pipelines_per_batch =", "found\") pipeline = self.get_pipeline(pipeline_id) pipeline_results = self._results['pipeline_results'][pipeline_id] pipeline.describe() if pipeline.model_family", "we will return # nan for the base score. percent_better", "difference to qualify as score improvement for early stopping. Only", "y_train = self.X_train.iloc[self.ensembling_indices], self.y_train.iloc[self.ensembling_indices] else: X_train = self.X_train y_train =", "+ 1} to run ensembling.\") else: logger.info(f\"Ensembling will run every", "or `regression` depending on the problem type. Note that if", "errors and raises an Exception. Callback function takes three positional", "= [self._validate_objective(obj) for obj in additional_objectives] self.additional_objectives = additional_objectives self.objective_name_to_class", "evalml.objectives.utils.get_core_objective_names() \" \"to get all objective names allowed in automl.\")", "one pipeline for every search num_pipelines = self._num_pipelines() if num_pipelines", "scoring results from the highest-scoring set of parameters used with", "* self._pipelines_per_batch} iterations after that.\") if self.max_batches and self.max_iterations is", "logger.info(f\"Ensembling will run at the {first_ensembling_iteration} iteration and every {len(self.allowed_pipelines)", "(int): the pickle data stream format. Returns: None \"\"\" with", "Seed: {self.random_seed}\\n\" f\"n_jobs: {self.n_jobs}\\n\" f\"Optimize Thresholds: {self.optimize_thresholds}\\n\" ) rankings_desc =", "self.max_time and not self.max_batches: self.max_batches = 1 logger.info(\"Using default limit", "ValueError(f\"Ensembling split size must be between 0 and 1 exclusive,", "\"validation_score\": cv_scores[0] } if pipeline.model_family == ModelFamily.ENSEMBLE: input_pipeline_ids = [self._automl_algorithm._best_pipeline_info[model_family][\"id\"]", "to True, but the number of unique pipelines is one,", "max_iterations >= {first_ensembling_iteration} to run ensembling.\") else: logger.info(f\"Ensembling will run", "c] = std / mean if abs(mean) > 0 else", "ValueError(\"user_parameters must be a dict containing values for at least", "search plotting\\n\") self.allowed_pipelines = allowed_pipelines self.allowed_model_families = allowed_model_families self._automl_algorithm =", "The 'all_objective_scores' field contains scores for all objectives # but", "[get_objective(o) for o in additional_objectives] additional_objectives = [self._validate_objective(obj) for obj", "will be empty so we will return # nan for", "over per batch is one, ensembling will not run. Defaults", "try: self._add_baseline_pipelines() except KeyboardInterrupt: if self._handle_keyboard_interrupt(): self._interrupted = True current_batch_pipelines", "that automl search has been run. Arguments: pipeline (PipelineBase): pipeline", "stopping is disabled. Defaults to None. tolerance (float): Minimum percentage", "max_batches < 0: raise ValueError(f\"Parameter max_batches must be None or", "batch will train a baseline pipline + one of each", "to max_iterations of 5. max_time (int, str): Maximum time to", "= allowed_pipelines self.allowed_model_families = allowed_model_families self._automl_algorithm = None self._start =", "is_baseline = pipeline.model_family == ModelFamily.BASELINE cv_score = cv_scores.mean() percent_better_than_baseline =", "import BaseCrossValidator from .pipeline_search_plots import PipelineSearchPlots from evalml.automl.automl_algorithm import IterativeAlgorithm", "[ProblemTypes.TIME_SERIES_REGRESSION]: required_parameters = {'gap', 'max_delay'} if not problem_configuration or not", "best_pipeline_name = best_pipeline[\"pipeline_name\"] logger.info(f\"Best pipeline: {best_pipeline_name}\") logger.info(f\"Best pipeline {self.objective.name}: {best_pipeline['score']:3f}\")", "if pipeline_results is None: raise PipelineNotFoundError(\"Pipeline not found in automl", "None. Received {type(max_time)} with value {str(max_time)}..\") if isinstance(max_time, (int, float))", "before returning it. Defaults to True. pipeline_parameters (dict): A dict", "{'gap', 'max_delay'} if not problem_configuration or not all(p in problem_configuration", "data_splitter=None, allowed_pipelines=None, allowed_model_families=None, start_iteration_callback=None, add_result_callback=None, error_callback=None, additional_objectives=None, random_seed=0, n_jobs=-1, tuner_class=None,", "pipeline_parameters=None, _ensembling_split_size=0.2, _pipelines_per_batch=5): \"\"\"Automated pipeline search Arguments: X_train (pd.DataFrame, ww.DataTable):", "and 1.0 inclusive. Received {} instead\".format(tolerance)) self.patience = patience self.tolerance", "def _validate_problem_type(self): for obj in self.additional_objectives: if not obj.is_defined_for_problem_type(self.problem_type): raise", "float)) and max_time < 0: raise ValueError(f\"Parameter max_time must be", "Non-negative integer describing level of parallelism used for pipelines. None", "try: self.problem_type = handle_problem_types(problem_type) except ValueError: raise ValueError('choose one of", "== []: raise ValueError(\"No allowed pipelines to search\") check_all_pipeline_names_unique(self.allowed_pipelines) run_ensembling", "(cv >= {threshold}) within cross validation scores. {pipeline_name} may not", "precedence over stopping the search. problem_configuration (dict, None): Additional parameters", "is better.\\n\".format('Greater' if self.objective.greater_is_better else 'Lower')) logger.info(f\"Using {self._engine.__class__.__name__} to train", "return_instance=False) self.objective = self._validate_objective(objective) if self.data_splitter is not None and", "- 3] + \"...\" desc = desc.ljust(AutoMLSearch._MAX_NAME_LEN) batch_number = 1", "ignored. data_splitter (sklearn.model_selection.BaseCrossValidator): Data splitting method to use. Defaults to", "self._engine.evaluate_batch([baseline]) @staticmethod def _get_mean_cv_scores_for_all_objectives(cv_data, objective_name_to_class): scores = defaultdict(int) n_folds =", "f\"\\nSearch Results: \\n{'='*20}\\n{rankings_str}\" return search_desc + rankings_desc def _validate_problem_configuration(self, problem_configuration=None):", "(binary, multiclass, regression) as problem_type') self.tuner_class = tuner_class or SKOptTuner", "Seed for the random number generator. Defaults to 0. n_jobs", "ensembling=run_ensembling, pipeline_params=pipeline_params ) def _pre_evaluation_callback(self, pipeline): if self.start_iteration_callback: self.start_iteration_callback(pipeline.__class__, pipeline.parameters,", "= 1 if self._automl_algorithm is not None and self._automl_algorithm.batch_number >", "rankings(self): \"\"\"Returns a pandas.DataFrame with scoring results from the highest-scoring", "during training will not be included in the dictionary but", "TimeSeriesBaselineRegressionPipeline, ProblemTypes.TIME_SERIES_MULTICLASS: TimeSeriesBaselineMulticlassPipeline, ProblemTypes.TIME_SERIES_BINARY: TimeSeriesBaselineBinaryPipeline}[self.problem_type] gap = self.problem_configuration['gap'] max_delay =", "if self.allowed_pipelines == []: raise ValueError(\"No allowed pipelines to search\")", "evalml.pipelines import ( MeanBaselineRegressionPipeline, ModeBaselineBinaryPipeline, ModeBaselineMulticlassPipeline, TimeSeriesBaselineBinaryPipeline, TimeSeriesBaselineMulticlassPipeline, TimeSeriesBaselineRegressionPipeline )", "self._automl_algorithm.batch_number update_pipeline(logger, desc, len(self._results['pipeline_results']) + 1, self.max_iterations, self._start, batch_number, self.show_batch_output)", "all_objective_scores = all_objective_scores.fillna(\"-\") with pd.option_context('display.float_format', '{:.3f}'.format, 'expand_frame_repr', False): logger.info(all_objective_scores) if", "False. max_batches (int): The maximum number of batches of pipelines", "not start a new pipeline search after the duration has", "(int): The maximum number of batches of pipelines to search.", "# check for early stopping if self.patience is None or", "[fold[\"all_objective_scores\"] for fold in pipeline_results[\"cv_data\"]] all_objective_scores = pd.DataFrame(all_objective_scores) for c", "return_dict: return pipeline_results def add_to_rankings(self, pipeline): \"\"\"Fits and evaluates a", "self.start_iteration_callback: self.start_iteration_callback(pipeline.__class__, pipeline.parameters, self) desc = f\"{pipeline.name}\" if len(desc) >", "(boolean, True): Shows an iteration vs. score plot in Jupyter", "= self.X_train y_train = self.y_train if hasattr(self.data_splitter, \"transform_sample\"): train_indices =", "Time: {self.max_time}\\n\" f\"Max Iterations: {self.max_iterations}\\n\" f\"Max Batches: {self.max_batches}\\n\" f\"Allowed Pipelines:", "test_size=_ensembling_split_size, random_seed=self.random_seed) self.ensembling_indices = ensembling_indices.to_dataframe()[0].tolist() self._engine = SequentialEngine(self.X_train, self.y_train, self.ensembling_indices,", "is not None: logger.info(\"Will stop searching for new pipelines after", "field will cause allowed_model_families to be ignored. allowed_model_families (list(str, ModelFamily)):", "allowed_estimators] if self.allowed_pipelines == []: raise ValueError(\"No allowed pipelines to", "percent_better_than_baseline, \"percent_better_than_baseline\": percent_better_than_baseline[self.objective.name], \"validation_score\": cv_scores[0] } if pipeline.model_family == ModelFamily.ENSEMBLE:", "= self.get_pipeline(pipeline_id) pipeline_results = self._results['pipeline_results'][pipeline_id] pipeline.describe() if pipeline.model_family == ModelFamily.ENSEMBLE:", "(bool): Whether or not to optimize the binary pipeline threshold.", "run_ensembling: ensemble_nth_batch = len(self.allowed_pipelines) + 1 num_ensemble_batches = (self.max_batches -", "{self.patience}\\n\" f\"Tolerance: {self.tolerance}\\n\" f\"Data Splitting: {self.data_splitter}\\n\" f\"Tuner: {self.tuner_class.__name__}\\n\" f\"Start Iteration", "object at file path Arguments: file_path (str): location to find", "random_seed (int): Seed for the random number generator. Defaults to", "0: raise ValueError(f\"Parameter max_batches must be None or non-negative. Received", "will not start a new pipeline search after the duration", "num_ensemble_batches = (self.max_batches - 1) // ensemble_nth_batch if num_ensemble_batches ==", "{ensemble_nth_batch + 1} to run ensembling.\") else: logger.info(f\"Ensembling will run", "if not (0 < _ensembling_split_size < 1): raise ValueError(f\"Ensembling split", "pipeline): if self.start_iteration_callback: self.start_iteration_callback(pipeline.__class__, pipeline.parameters, self) desc = f\"{pipeline.name}\" if", "if callable(function): return function.__name__ else: return None search_desc = (", "= tolerance or 0.0 self._results = { 'pipeline_results': {}, 'search_order':", "\\n{'='*20}\\n\" f\"Objective: {get_objective(self.objective).name}\\n\" f\"Max Time: {self.max_time}\\n\" f\"Max Iterations: {self.max_iterations}\\n\" f\"Max", "= time.time() while True: choice = input(leading_char + \"Do you", "and significant_change: best_score = curr_score num_without_improvement = 0 else: num_without_improvement", ">= {threshold}) within cross validation scores. {pipeline_name} may not perform", "pd.option_context('display.float_format', '{:.3f}'.format, 'expand_frame_repr', False): logger.info(all_objective_scores) if return_dict: return pipeline_results def", "stacktrace will be displayed in the log. \"\"\" return self._engine.train_batch(pipelines)", "import woodwork as ww from sklearn.model_selection import BaseCrossValidator from .pipeline_search_plots", "seconds.\\n\" % self.max_time) logger.info(\"Allowed model families: %s\\n\" % \", \".join([model.value", "return len(self._results['pipeline_results']) def _should_continue(self): \"\"\"Given the original stopping criterion and", "self.max_batches = 1 logger.info(\"Using default limit of max_batches=1.\\n\") if patience", "automl results\") return pipeline_class(parameters, random_seed=self.random_seed) def describe_pipeline(self, pipeline_id, return_dict=False): \"\"\"Describe", "fitted pipeline. Note that the any pipelines that error out", "rank pipelines, but not for optimizing each pipeline during fit-time.", "should_continue_callback=self._should_continue, pre_evaluation_callback=self._pre_evaluation_callback, post_evaluation_callback=self._post_evaluation_callback) self.allowed_model_families = list(set([p.model_family for p in (self.allowed_pipelines)]))", "to train for every batch after the first one. The", "the highest-scoring set of parameters used with each pipeline.\"\"\" return", "tolerance < 0.0): raise ValueError(\"tolerance value must be a float", "integer, then the time will be in seconds. For strings,", "obj in additional_objectives if obj.name == self.objective.name), None) if existing_main_objective", "search.\"\"\" _MAX_NAME_LEN = 40 # Necessary for \"Plotting\" documentation, since", "the new pipeline, an untrained_pipeline containing the parameters used during", "between 0 and 1, exclusive. Defaults to 0.2 _pipelines_per_batch (int):", "pipelines for this problem type are allowed. Setting this field", "'auto': objective = get_default_primary_search_objective(self.problem_type.value) objective = get_objective(objective, return_instance=False) self.objective =", "ensembling if objective == 'auto': objective = get_default_primary_search_objective(self.problem_type.value) objective =", "raise PipelineNotFoundError(\"Pipeline not found\") pipeline = self.get_pipeline(pipeline_id) pipeline_results = self._results['pipeline_results'][pipeline_id]", "p in required_parameters): raise ValueError(\"user_parameters must be a dict containing", "percent_better_than_baseline = {} mean_cv_all_objectives = self._get_mean_cv_scores_for_all_objectives(cv_data, self.objective_name_to_class) if is_baseline: self._baseline_cv_scores", "if self._best_pipeline is not None: best_pipeline = self.rankings.iloc[0] best_pipeline_name =", "during search. \"\"\" if self.problem_type == ProblemTypes.BINARY: baseline = ModeBaselineBinaryPipeline(parameters={})", "max_time (int, str): Maximum time to search for pipelines. This", "that.\") if self.max_batches and self.max_iterations is None: self.show_batch_output = True", "self.max_batches is not None: logger.info(f\"Searching up to {self.max_batches} batches for", "parameters = pipeline_results.get('parameters') if pipeline_class is None or parameters is", "(list(str, ModelFamily)): The model families to search. The default of", "using the y_train argument') try: self.problem_type = handle_problem_types(problem_type) except ValueError:", "{ 'pipeline_results': {}, 'search_order': [], 'errors': [] } self.random_seed =", "if not self.max_iterations and not self.max_time and not self.max_batches: self.max_batches", "will not run again on the same instance. Re-initialize AutoMLSearch", "optimize binary classification pipeline thresholds for: {}\".format(self.objective)) logger.info(\"Total training time", "\"\"\" pipeline_rows = self.full_rankings[self.full_rankings['pipeline_name'] == pipeline.name] for parameter in pipeline_rows['parameters']:", "the any pipelines that error out during scoring will not", "search(self, show_iteration_plot=True): \"\"\"Find the best pipeline for the data set.", "return True first_id = self._results['search_order'][0] best_score = self._results['pipeline_results'][first_id]['score'] num_without_improvement =", "values for at least the gap and max_delay \" f\"parameters.", "high_variance_cv = self._check_for_high_variance(pipeline, cv_scores) pipeline_id = len(self._results['pipeline_results']) self._results['pipeline_results'][pipeline_id] = {", "stop the search. Returns: bool: If True, search should terminate", "information such as type of pipeline components, problem, training time,", "different from the current best pipeline before training and thresholding\"\"\"", "limit of max_batches=1.\\n\") if patience and (not isinstance(patience, int) or", "if self.objective.greater_is_better else curr_score < best_score if score_improved and significant_change:", "(sklearn.model_selection.BaseCrossValidator): Data splitting method to use. Defaults to StratifiedKFold. tuner_class:", "rankings_df @property def best_pipeline(self): \"\"\"Returns a trained instance of the", "self._best_pipeline: raise PipelineNotFoundError(\"automl search must be run before selecting `best_pipeline`.\")", "None. tolerance (float): Minimum percentage difference to qualify as score", "percent_better = objective_class.calculate_percent_difference(mean_cv_all_objectives[obj_name], self._baseline_cv_scores.get(obj_name, np.nan)) percent_better_than_baseline[obj_name] = percent_better high_variance_cv =", "has been iterated over. If the number of unique pipelines", "choice = input(leading_char + \"Do you really want to exit", "log_subtitle(logger, \"Training\") logger.info(\"Training for {} problems.\".format(pipeline.problem_type)) if self.optimize_thresholds and self.objective.is_defined_for_problem_type(ProblemTypes.BINARY)", "or non-negative. Received {max_batches}.\") if max_iterations is not None and", "and - R2 for regression problems. max_iterations (int): Maximum number", "for o in [self.objective] + self.additional_objectives} if not isinstance(max_time, (int,", "small, so ensembling will not run. Set max_batches >= {ensemble_nth_batch", "add_to_rankings if self._searched: return True # Run at least one", "= False self._validate_problem_type() self.problem_configuration = self._validate_problem_configuration(problem_configuration) self._train_best_pipeline = train_best_pipeline self._best_pipeline", "or non-negative. Received {max_time}.\") if max_batches is not None and", "want to exclude them since # they are not scores", "variance is higher than specified threshhold.\"\"\" pipeline_name = pipeline.name high_variance_cv", "set to False, returns an untrained pipeline instance. \"\"\" if", "> best_score if self.objective.greater_is_better else curr_score < best_score if score_improved", "in the rankings If self._best_pipeline already exists, check to make", "along other appropriate parameters by default. Defaults to None, which", "= get_non_core_objectives() if isinstance(objective, type): if objective in non_core_objectives: raise", "self._automl_algorithm = None self._start = 0.0 self._baseline_cv_scores = {} self.show_batch_output", "is_baseline: self._baseline_cv_scores = mean_cv_all_objectives for obj_name in mean_cv_all_objectives: objective_class =", "default in non-Jupyter enviroments. \"\"\" if self._searched: logger.info(\"AutoMLSearch.search() has already", "< first_ensembling_iteration: run_ensembling = False logger.warning(f\"Ensembling is set to True,", "of None indicates all pipelines for this problem type are", "(self.max_batches - 1) // ensemble_nth_batch if num_ensemble_batches == 0: run_ensembling", "\\n{_print_list(self.allowed_pipelines or [])}\\n\" f\"Patience: {self.patience}\\n\" f\"Tolerance: {self.tolerance}\\n\" f\"Data Splitting: {self.data_splitter}\\n\"", "f\"Max Iterations: {self.max_iterations}\\n\" f\"Max Batches: {self.max_batches}\\n\" f\"Allowed Pipelines: \\n{_print_list(self.allowed_pipelines or", "check_all_pipeline_names_unique(self.allowed_pipelines) run_ensembling = self.ensembling if run_ensembling and len(self.allowed_pipelines) == 1:", "fold_data['all_objective_scores'].items(): # The 'all_objective_scores' field contains scores for all objectives", "False except KeyboardInterrupt: loop_interrupted = True if self._handle_keyboard_interrupt(): break full_rankings", "Categorical features will automatically be encoded show_iteration_plot (boolean, True): Shows", "time_in_loop = time.time() - start_of_loop self._start += time_in_loop return False", "X_holdout (ww.DataTable, pd.DataFrame): Holdout features. y_holdout (ww.DataTable, pd.DataFrame): Holdout targets", "Function called before each pipeline training iteration. Callback function takes", "time budget (if set) time_in_loop = time.time() - start_of_loop self._start", "as f: cloudpickle.dump(self, f, protocol=pickle_protocol) @staticmethod def load(file_path): \"\"\"Loads AutoML", "show_iteration_plot=True): \"\"\"Find the best pipeline for the data set. Arguments:", "to False. max_batches (int): The maximum number of batches of", "number of unique pipelines is one, so ensembling will not", "up to %s pipelines. \" % self.max_iterations) if self.max_time is", "are pipelines with IDs: \" + str(pipeline_results['input_pipeline_ids'])) log_subtitle(logger, \"Training\") logger.info(\"Training", "List of pipelines to train. Returns: Dict[str, PipelineBase]: Dictionary keyed", "since Sphinx does not work well with instance attributes. plot", "in (self.allowed_pipelines)])) logger.debug(f\"allowed_pipelines set to {[pipeline.name for pipeline in self.allowed_pipelines]}\")", "PipelineNotFoundError(\"Pipeline not found\") pipeline = self.get_pipeline(pipeline_id) pipeline_results = self._results['pipeline_results'][pipeline_id] pipeline.describe()", "obj_name in mean_cv_all_objectives: objective_class = self.objective_name_to_class[obj_name] # In the event", "stop searching for new pipelines after %d seconds.\\n\" % self.max_time)", "Disabled by default in non-Jupyter enviroments. \"\"\" if self._searched: logger.info(\"AutoMLSearch.search()", "self.get_pipeline(pipeline_id) pipeline_results = self._results['pipeline_results'][pipeline_id] pipeline.describe() if pipeline.model_family == ModelFamily.ENSEMBLE: logger.info(\"Input", "each pipeline during fit-time. When set to 'auto', chooses: -", "def _print_list(obj_list): lines = sorted(['\\t{}'.format(o.name) for o in obj_list]) return", "if not self.rankings.empty: rankings_str = self.rankings.drop(['parameters'], axis='columns').to_string() rankings_desc = f\"\\nSearch", "\"\"\"Describe a pipeline Arguments: pipeline_id (int): pipeline to describe return_dict", "field contains scores for all objectives # but also fields", "raise PipelineNotFoundError(\"Pipeline not found in automl results\") pipeline_class = pipeline_results.get('pipeline_class')", "(int or None): Non-negative integer describing level of parallelism used", "self._check_for_high_variance(pipeline, cv_scores) pipeline_id = len(self._results['pipeline_results']) self._results['pipeline_results'][pipeline_id] = { \"id\": pipeline_id,", "each pipeline, and `search_order`: a list describing the order the", "shuffle=True, random_seed=self.random_seed) self.data_splitter = self.data_splitter or default_data_splitter self.pipeline_parameters = pipeline_parameters", "False if run_ensembling and self.max_iterations is not None: # Baseline", "objective names allowed in automl.\") return objective() return objective def", "qualify as score improvement for early stopping. Only applicable if", "does not work well with instance attributes. plot = PipelineSearchPlots", "See evalml.problem_types.ProblemType.all_problem_types for a full list. objective (str, ObjectiveBase): The", "over. If the number of unique pipelines to search over", "data_splitter self.optimize_thresholds = optimize_thresholds self.ensembling = ensembling if objective ==", "of iterations without improvement to stop search early. Must be", "start_iteration_callback=None, add_result_callback=None, error_callback=None, additional_objectives=None, random_seed=0, n_jobs=-1, tuner_class=None, optimize_thresholds=True, ensembling=False, max_batches=None,", "function takes three positional parameters: A dictionary containing the training", "f\"Optimize Thresholds: {self.optimize_thresholds}\\n\" ) rankings_desc = \"\" if not self.rankings.empty:", "best_score) > self.tolerance score_improved = curr_score > best_score if self.objective.greater_is_better", "# but also fields like \"# Training\" and \"# Testing\",", "\"\"\"Fits and evaluates a given pipeline then adds the results", "None, which will call `log_error_callback`. additional_objectives (list): Custom set of", "if not loop_interrupted: current_batch_pipelines = self._automl_algorithm.next_batch() except StopIteration: logger.info('AutoML Algorithm", "the Exception raised, the traceback, and the AutoMLSearch object. Must", "iterations to search. If max_iterations and max_time is not set,", "trained instance of the best pipeline and parameters found during", "loop does not count towards the time budget (if set)", "\"\"\" return self._engine.train_batch(pipelines) def score_pipelines(self, pipelines, X_holdout, y_holdout, objectives): \"\"\"Score", "to search over...\") allowed_estimators = get_estimators(self.problem_type, self.allowed_model_families) logger.debug(f\"allowed_estimators set to", "skipping pipeline search plotting\\n\") self.allowed_pipelines = allowed_pipelines self.allowed_model_families = allowed_model_families", "validation, etc. \"\"\" if pipeline_id not in self._results['pipeline_results']: raise PipelineNotFoundError(\"Pipeline", "for objective, score in scores.items()} def _post_evaluation_callback(self, pipeline, evaluation_results): training_time", "if variance is higher than specified threshhold.\"\"\" pipeline_name = pipeline.name", "\"\"\"Given the original stopping criterion and current state, should the", "% \", \".join([model.value for model in self.allowed_model_families])) self.search_iteration_plot = None", "pipeline_class(parameters, random_seed=self.random_seed) def describe_pipeline(self, pipeline_id, return_dict=False): \"\"\"Describe a pipeline Arguments:", "std / mean if abs(mean) > 0 else np.inf all_objective_scores", "0: return True # check max_time and max_iterations elapsed =", "max_iterations self.max_batches = max_batches self._pipelines_per_batch = _pipelines_per_batch if not self.max_iterations", "evalml.automl.utils import ( check_all_pipeline_names_unique, get_default_primary_search_objective, make_data_splitter ) from evalml.exceptions import", "cv_score = cv_scores.mean() percent_better_than_baseline = {} mean_cv_all_objectives = self._get_mean_cv_scores_for_all_objectives(cv_data, self.objective_name_to_class)", "self.search_iteration_plot = None self._interrupted = False if self.allowed_pipelines is None:", "pipelines. This will not start a new pipeline search after", "check max_time and max_iterations elapsed = time.time() - self._start if", "{} def _handle_keyboard_interrupt(self): \"\"\"Presents a prompt to the user asking", "self.allowed_pipelines]}\") logger.debug(f\"allowed_model_families set to {self.allowed_model_families}\") if len(self.problem_configuration): pipeline_params = {**{'pipeline':", "If None, early stopping is disabled. Defaults to None. tolerance", "PipelineSearchPlots from evalml.automl.automl_algorithm import IterativeAlgorithm from evalml.automl.callbacks import log_error_callback from", "and (tolerance > 1.0 or tolerance < 0.0): raise ValueError(\"tolerance", "is None: raise PipelineNotFoundError(\"Pipeline not found in automl results\") pipeline_class", "self._start = 0.0 self._baseline_cv_scores = {} self.show_batch_output = False self._validate_problem_type()", "describing the order the pipelines were searched. \"\"\" return copy.deepcopy(self._results)", "search for pipelines. This will not start a new pipeline", "object. add_result_callback (callable): Function called after each pipeline training iteration.", "encoded show_iteration_plot (boolean, True): Shows an iteration vs. score plot", "- start_of_loop self._start += time_in_loop return False else: leading_char =", "train. Returns: Dict[str, PipelineBase]: Dictionary keyed by pipeline name that", "as a 2d array using the X_train argument') if y_train", "< 0: raise ValueError(f\"Parameter max_time must be None or non-negative.", "(int): pipeline to describe return_dict (bool): If True, return dictionary", "= curr_score num_without_improvement = 0 else: num_without_improvement += 1 if", "if return_dict: return pipeline_results def add_to_rankings(self, pipeline): \"\"\"Fits and evaluates", "be a dict containing values for at least the gap", "level of parallelism used for pipelines. None and 1 are", "\"\"\"Finds the best pipeline in the rankings If self._best_pipeline already", "PipelineNotFoundError(\"Pipeline class or parameters not found in automl results\") return", "if self.problem_type == ProblemTypes.BINARY: baseline = ModeBaselineBinaryPipeline(parameters={}) elif self.problem_type ==", "untrained instance of the specified pipeline initialized with the parameters", "objective = get_objective(objective, return_instance=False) self.objective = self._validate_objective(objective) if self.data_splitter is", "not self.rankings.empty: rankings_str = self.rankings.drop(['parameters'], axis='columns').to_string() rankings_desc = f\"\\nSearch Results:", "for problem_type, remove it existing_main_objective = next((obj for obj in", "return True elif choice == \"n\": # So that the", "on the primary objective {self.objective}.\") self.search_duration = time.time() - self._start", "parameter in pipeline_rows['parameters']: if pipeline.parameters == parameter: return self._engine.evaluate_batch([pipeline]) self._find_best_pipeline()", "1): raise ValueError(f\"Ensembling split size must be between 0 and", "describe_pipeline(self, pipeline_id, return_dict=False): \"\"\"Describe a pipeline Arguments: pipeline_id (int): pipeline", "= [\"id\", \"pipeline_name\", \"score\", \"validation_score\", \"percent_better_than_baseline\", \"high_variance_cv\", \"parameters\"] if not", "= list(set([p.model_family for p in (self.allowed_pipelines)])) logger.debug(f\"allowed_pipelines set to {[pipeline.name", "pipeline {self.objective.name}: {best_pipeline['score']:3f}\") self._searched = True def _find_best_pipeline(self): \"\"\"Finds the", "pipeline initialized with the parameters used to train that pipeline", "specified as seconds, minutes, or hours. patience (int): Number of", "= True if self._handle_keyboard_interrupt(): break full_rankings = self.full_rankings current_batch_idx =", "f\"Data Splitting: {self.data_splitter}\\n\" f\"Tuner: {self.tuner_class.__name__}\\n\" f\"Start Iteration Callback: {_get_funct_name(self.start_iteration_callback)}\\n\" f\"Add", "np.nan)) percent_better_than_baseline[obj_name] = percent_better high_variance_cv = self._check_for_high_variance(pipeline, cv_scores) pipeline_id =", "keyed by pipeline name that maps to the fitted pipeline.", "Dict[str, Dict[str, float]]: Dictionary keyed by pipeline name that maps", "n_splits=3, shuffle=True, random_seed=self.random_seed) self.data_splitter = self.data_splitter or default_data_splitter self.pipeline_parameters =", "Required. y_train (pd.Series, ww.DataColumn): The target training data of length", "(if set) time_in_loop = time.time() - start_of_loop self._start += time_in_loop", "self.y_train = infer_feature_types(y_train) self.ensembling_indices = None default_data_splitter = make_data_splitter(self.X_train, self.y_train,", "current_batch_pipeline_scores = [] new_pipeline_ids = [] loop_interrupted = False while", "non_core_objectives: raise ValueError(f\"{objective.name.lower()} is not allowed in AutoML! \" \"Use", "file pickle_protocol (int): the pickle data stream format. Returns: None", "PipelineBase subclasses indicating the pipelines allowed in the search. The", "curr_score > best_score if self.objective.greater_is_better else curr_score < best_score if", "\"max_delay\": max_delay}}) self._engine.evaluate_batch([baseline]) @staticmethod def _get_mean_cv_scores_for_all_objectives(cv_data, objective_name_to_class): scores = defaultdict(int)", "= self.full_rankings current_batch_idx = full_rankings['id'].isin(new_pipeline_ids) current_batch_pipeline_scores = full_rankings[current_batch_idx]['score'] if len(current_batch_pipeline_scores)", "the AutoMLSearch object. add_result_callback (callable): Function called after each pipeline", "pipelines): \"\"\"Train a list of pipelines on the training data.", "open(file_path, 'wb') as f: cloudpickle.dump(self, f, protocol=pickle_protocol) @staticmethod def load(file_path):", "evalml.model_family import ModelFamily from evalml.objectives import ( get_core_objectives, get_non_core_objectives, get_objective", "the search. problem_configuration (dict, None): Additional parameters needed to configure", "pipeline_name = pipeline.name high_variance_cv = bool(abs(cv_scores.std() / cv_scores.mean()) > threshold)", "(tolerance > 1.0 or tolerance < 0.0): raise ValueError(\"tolerance value", "[make_pipeline(self.X_train, self.y_train, estimator, self.problem_type, custom_hyperparameters=self.pipeline_parameters) for estimator in allowed_estimators] if", "self.tolerance score_improved = curr_score > best_score if self.objective.greater_is_better else curr_score", "raise ValueError(\"No allowed pipelines to search\") check_all_pipeline_names_unique(self.allowed_pipelines) run_ensembling = self.ensembling", "Arguments: feature_types (list, optional): list of feature types, either numerical", "is set to True, but max_batches is too small, so", "= self.pipeline_parameters self._automl_algorithm = IterativeAlgorithm( max_iterations=self.max_iterations, allowed_pipelines=self.allowed_pipelines, tuner_class=self.tuner_class, random_seed=self.random_seed, n_jobs=self.n_jobs,", "instead\".format(patience)) if tolerance and (tolerance > 1.0 or tolerance <", "y_train argument') try: self.problem_type = handle_problem_types(problem_type) except ValueError: raise ValueError('choose", "the number of pipeline evaluations made in the search \"\"\"", "pipeline before returning it. Defaults to True. pipeline_parameters (dict): A", "early stopping if self.patience is None or self.tolerance is None:", "parameters used with each pipeline.\"\"\" return self.full_rankings.drop_duplicates(subset=\"pipeline_name\", keep=\"first\") @property def", "at least the gap and max_delay \" f\"parameters. Received {problem_configuration}.\")", "same instance. Re-initialize AutoMLSearch to search again.\") return # don't", "additional_objectives] self.additional_objectives = additional_objectives self.objective_name_to_class = {o.name: o for o", "integer. Received {} instead\".format(patience)) if tolerance and (tolerance > 1.0", "None or non-negative. Received {max_time}.\") if max_batches is not None", "of batches of pipelines to search. Parameters max_time, and max_iterations", "set to {[estimator.name for estimator in allowed_estimators]}\") self.allowed_pipelines = [make_pipeline(self.X_train,", "to {[pipeline.name for pipeline in self.allowed_pipelines]}\") logger.debug(f\"allowed_model_families set to {self.allowed_model_families}\")", "return search_desc + rankings_desc def _validate_problem_configuration(self, problem_configuration=None): if self.problem_type in", "of max_batches=1.\\n\") if patience and (not isinstance(patience, int) or patience", "self.max_iterations is not None: logger.info(\"Searching up to %s pipelines. \"", "nan for the base score. percent_better = objective_class.calculate_percent_difference(mean_cv_all_objectives[obj_name], self._baseline_cv_scores.get(obj_name, np.nan))", "get_logger(__file__) class AutoMLSearch: \"\"\"Automated Pipeline search.\"\"\" _MAX_NAME_LEN = 40 #", "fit during search. \"\"\" if self.problem_type == ProblemTypes.BINARY: baseline =", "self._results['pipeline_results']: return pd.DataFrame(columns=full_rankings_cols) rankings_df = pd.DataFrame(self._results['pipeline_results'].values()) rankings_df = rankings_df[full_rankings_cols] rankings_df.sort_values(\"score\",", "pipeline and parameters found during automl search. If `train_best_pipeline` is", "n_jobs=-1, tuner_class=None, optimize_thresholds=True, ensembling=False, max_batches=None, problem_configuration=None, train_best_pipeline=True, pipeline_parameters=None, _ensembling_split_size=0.2, _pipelines_per_batch=5):", "set of objectives for problem_type, remove it existing_main_objective = next((obj", "float]]: Dictionary keyed by pipeline name that maps to a", "= ModeBaselineBinaryPipeline(parameters={}) elif self.problem_type == ProblemTypes.MULTICLASS: baseline = ModeBaselineMulticlassPipeline(parameters={}) elif", "\"pipeline_class\": type(pipeline), \"pipeline_summary\": pipeline.summary, \"parameters\": pipeline.parameters, \"score\": cv_score, \"high_variance_cv\": high_variance_cv,", "pipeline iteration + 1 first_ensembling_iteration = (1 + len(self.allowed_pipelines) +", "max_iterations < 0: raise ValueError(f\"Parameter max_iterations must be None or", "load(file_path): \"\"\"Loads AutoML object at file path Arguments: file_path (str):", "and max_iterations have precedence over stopping the search. problem_configuration (dict,", "ww.DataColumn): The target training data of length [n_samples]. Required for", "self._automl_algorithm.next_batch() except StopIteration: logger.info('AutoML Algorithm out of recommendations, ending') break", "if best_pipeline.model_family == ModelFamily.ENSEMBLE: X_train, y_train = self.X_train.iloc[self.ensembling_indices], self.y_train.iloc[self.ensembling_indices] else:", "one of (binary, multiclass, regression) as problem_type') self.tuner_class = tuner_class", "== \"n\": # So that the time in this loop", "if len(desc) > AutoMLSearch._MAX_NAME_LEN: desc = desc[:AutoMLSearch._MAX_NAME_LEN - 3] +", "[\"# Training\", \"# Validation\"]: all_objective_scores[c] = all_objective_scores[c].astype(\"object\") continue mean =", "pipeline.model_family == ModelFamily.ENSEMBLE: input_pipeline_ids = [self._automl_algorithm._best_pipeline_info[model_family][\"id\"] for model_family in self._automl_algorithm._best_pipeline_info]", "str): Maximum time to search for pipelines. This will not", "supervised learning problem. See evalml.problem_types.ProblemType.all_problem_types for a full list. objective", "multiclass classification problems, and - R2 for regression problems. max_iterations", "= get_estimators(self.problem_type, self.allowed_model_families) logger.debug(f\"allowed_estimators set to {[estimator.name for estimator in", "method to use. Defaults to StratifiedKFold. tuner_class: The tuner class", "len(self.allowed_pipelines) + 1 num_ensemble_batches = (self.max_batches - 1) // ensemble_nth_batch", "== ProblemTypes.REGRESSION: baseline = MeanBaselineRegressionPipeline(parameters={}) else: pipeline_class = {ProblemTypes.TIME_SERIES_REGRESSION: TimeSeriesBaselineRegressionPipeline,", "\"\" if not self.rankings.empty: rankings_str = self.rankings.drop(['parameters'], axis='columns').to_string() rankings_desc =", "PipelineBase: untrained pipeline instance associated with the provided ID \"\"\"", "y_train (pd.Series, ww.DataColumn): The target training data of length [n_samples].", "new pipeline search after the duration has elapsed. If it", "be between 0 and 1 exclusive, received {_ensembling_split_size}\") X_shape =", "search Arguments: X_train (pd.DataFrame, ww.DataTable): The input training data of", "% pipeline_results[\"training_time\"]) log_subtitle(logger, \"Cross Validation\", underline=\"-\") all_objective_scores = [fold[\"all_objective_scores\"] for", "all_objective_scores: if c in [\"# Training\", \"# Validation\"]: all_objective_scores[c] =", "again on the same instance. Re-initialize AutoMLSearch to search again.\")", "pipelines with IDs: \" + str(pipeline_results['input_pipeline_ids'])) log_subtitle(logger, \"Training\") logger.info(\"Training for", "the best pipeline and parameters found during automl search. If", "attributes. plot = PipelineSearchPlots def __init__(self, X_train=None, y_train=None, problem_type=None, objective='auto',", "\"\"\"Loads AutoML object at file path Arguments: file_path (str): location", "self.problem_type == ProblemTypes.BINARY: baseline = ModeBaselineBinaryPipeline(parameters={}) elif self.problem_type == ProblemTypes.MULTICLASS:", "vector using the y_train argument') try: self.problem_type = handle_problem_types(problem_type) except", "std = all_objective_scores[c].std(axis=0) all_objective_scores.loc[\"mean\", c] = mean all_objective_scores.loc[\"std\", c] =", "None searches over all model families. Run evalml.pipelines.components.utils.allowed_model_families(\"binary\") to see", "time from collections import defaultdict import cloudpickle import numpy as", "search. Returns: bool: If True, search should terminate early \"\"\"", "pandas.DataFrame with scoring results from the highest-scoring set of parameters", "rankings If self._best_pipeline already exists, check to make sure it", "A dictionary containing the training results for the new pipeline,", "Only applicable if patience is not None. Defaults to None.", "n_jobs self.plot = None try: self.plot = PipelineSearchPlots(self) except ImportError:", "= IterativeAlgorithm( max_iterations=self.max_iterations, allowed_pipelines=self.allowed_pipelines, tuner_class=self.tuner_class, random_seed=self.random_seed, n_jobs=self.n_jobs, number_features=self.X_train.shape[1], pipelines_per_batch=self._pipelines_per_batch, ensembling=run_ensembling,", "the user asking if they want to stop the search.", "start_of_loop self._start += time_in_loop return False else: leading_char = \"\"", "least one pipeline for every search num_pipelines = self._num_pipelines() if", "num_without_improvement >= self.patience: logger.info(\"\\n\\n{} iterations without improvement. Stopping search early...\".format(self.patience))", "except ValueError: raise ValueError('choose one of (binary, multiclass, regression) as", "(int): The number of pipelines to train for every batch", "you really want to exit search (y/n)? \").strip().lower() if choice", "Re-initialize AutoMLSearch to search again.\") return # don't show iteration", "X_shape = ww.DataTable(np.arange(self.X_train.shape[0])) _, ensembling_indices, _, _ = split_data(X_shape, self.y_train,", "try: new_pipeline_ids = self._engine.evaluate_batch(current_batch_pipelines) loop_interrupted = False except KeyboardInterrupt: loop_interrupted", "we want to exclude them since # they are not", "\"pipeline_summary\": pipeline.summary, \"parameters\": pipeline.parameters, \"score\": cv_score, \"high_variance_cv\": high_variance_cv, \"training_time\": training_time,", "evalml.automl.automl_algorithm import IterativeAlgorithm from evalml.automl.callbacks import log_error_callback from evalml.automl.engine import", "None: raise ValueError('Must specify training data as a 2d array", "self.max_iterations is None: self.show_batch_output = True if run_ensembling: ensemble_nth_batch =", "selecting `best_pipeline`.\") return self._best_pipeline def save(self, file_path, pickle_protocol=cloudpickle.DEFAULT_PROTOCOL): \"\"\"Saves AutoML", "None: logger.info(f\"Searching up to {self.max_batches} batches for a total of", "copy.deepcopy(self._results) @property def rankings(self): \"\"\"Returns a pandas.DataFrame with scoring results", "search. \"\"\" if X_train is None: raise ValueError('Must specify training", "IDs: \" + str(pipeline_results['input_pipeline_ids'])) log_subtitle(logger, \"Training\") logger.info(\"Training for {} problems.\".format(pipeline.problem_type))", "non-negative. Received {max_time}.\") if max_batches is not None and max_batches", "\"\"\" return copy.deepcopy(self._results) @property def rankings(self): \"\"\"Returns a pandas.DataFrame with", "pd.DataFrame): Holdout features. y_holdout (ww.DataTable, pd.DataFrame): Holdout targets for scoring.", "1 + len(self.allowed_pipelines) + (self._pipelines_per_batch * (self.max_batches - 1)) if", "raise ValueError(\"patience value must be a positive integer. Received {}", "and score pipelines.\") if self.max_batches is not None: logger.info(f\"Searching up", "pipeline_results.get('parameters') if pipeline_class is None or parameters is None: raise", "pipelines on the training data. This can be helpful for", "Received {max_time}.\") if max_batches is not None and max_batches <", "allowed_pipelines=None, allowed_model_families=None, start_iteration_callback=None, add_result_callback=None, error_callback=None, additional_objectives=None, random_seed=0, n_jobs=-1, tuner_class=None, optimize_thresholds=True,", "full list. objective (str, ObjectiveBase): The objective to optimize for.", "from evalml.automl.engine import SequentialEngine from evalml.automl.utils import ( check_all_pipeline_names_unique, get_default_primary_search_objective,", "may not perform as estimated on unseen data.\") return high_variance_cv", "number_features=self.X_train.shape[1], pipelines_per_batch=self._pipelines_per_batch, ensembling=run_ensembling, pipeline_params=pipeline_params ) def _pre_evaluation_callback(self, pipeline): if self.start_iteration_callback:", "self._automl_algorithm is not None and self._automl_algorithm.batch_number > 0: batch_number =", "elif choice == \"n\": # So that the time in", "with each pipeline.\"\"\" return self.full_rankings.drop_duplicates(subset=\"pipeline_name\", keep=\"first\") @property def full_rankings(self): \"\"\"Returns", "train_pipelines(self, pipelines): \"\"\"Train a list of pipelines on the training", "logger.warning(f\"Ensembling is set to True, but max_batches is too small,", "don't show iteration plot outside of a jupyter notebook if", "as f: return cloudpickle.load(f) def train_pipelines(self, pipelines): \"\"\"Train a list", "enviroments. \"\"\" if self._searched: logger.info(\"AutoMLSearch.search() has already been run and", "SKOptTuner from evalml.utils import convert_to_seconds, infer_feature_types from evalml.utils.logger import (", "received {_ensembling_split_size}\") X_shape = ww.DataTable(np.arange(self.X_train.shape[0])) _, ensembling_indices, _, _ =", "logger.warning(f\"High coefficient of variation (cv >= {threshold}) within cross validation", "or [])}\\n\" f\"Random Seed: {self.random_seed}\\n\" f\"n_jobs: {self.n_jobs}\\n\" f\"Optimize Thresholds: {self.optimize_thresholds}\\n\"", "dict with results from each pipeline, and `search_order`: a list", "baseline = pipeline_class(parameters={\"pipeline\": {\"gap\": gap, \"max_delay\": max_delay}, \"Time Series Baseline", "axis='columns').to_string() rankings_desc = f\"\\nSearch Results: \\n{'='*20}\\n{rankings_str}\" return search_desc + rankings_desc", "max_time is not set, then max_iterations will default to max_iterations", "max_batches=1.\\n\") if patience and (not isinstance(patience, int) or patience <", "not compatible with problem_type {}.\".format(pipeline.name, self.problem_type.value)) def _add_baseline_pipelines(self): \"\"\"Fits a", "while True: choice = input(leading_char + \"Do you really want", "if c in [\"# Training\", \"# Validation\"]: all_objective_scores[c] = all_objective_scores[c].astype(\"object\")", "and rank pipelines, but not for optimizing each pipeline during", "self.problem_type, custom_hyperparameters=self.pipeline_parameters) for estimator in allowed_estimators] if self.allowed_pipelines == []:", "infer_feature_types(y_train) self.ensembling_indices = None default_data_splitter = make_data_splitter(self.X_train, self.y_train, self.problem_type, self.problem_configuration,", "= desc.ljust(AutoMLSearch._MAX_NAME_LEN) batch_number = 1 if self._automl_algorithm is not None", "obj.is_defined_for_problem_type(self.problem_type): raise ValueError(\"Additional objective {} is not compatible with a", "evaluations which have been made Returns: int: the number of", "the gap and max_delay \" f\"parameters. Received {problem_configuration}.\") return problem_configuration", "since # they are not scores if field in objective_name_to_class:", "_baseline_cv_scores will be empty so we will return # nan", "( check_all_pipeline_names_unique, get_default_primary_search_objective, make_data_splitter ) from evalml.exceptions import AutoMLSearchException, PipelineNotFoundError", "return pd.DataFrame(columns=full_rankings_cols) rankings_df = pd.DataFrame(self._results['pipeline_results'].values()) rankings_df = rankings_df[full_rankings_cols] rankings_df.sort_values(\"score\", ascending=ascending,", "(str): location to find file to load Returns: AutoSearchBase object", "issubclass(self.data_splitter.__class__, BaseCrossValidator): raise ValueError(\"Not a valid data splitter\") if not", "user asking if they want to stop the search. Returns:", "a dict containing values for at least the gap and", "are not scores if field in objective_name_to_class: scores[field] += value", "compatible with a {} problem.\".format(self.objective.name, self.problem_type.value)) if additional_objectives is None:", "the results to the automl rankings with the requirement that", "target training data of length [n_samples]. Required for supervised learning", "self.rankings.empty: rankings_str = self.rankings.drop(['parameters'], axis='columns').to_string() rankings_desc = f\"\\nSearch Results: \\n{'='*20}\\n{rankings_str}\"", "The input training data of shape [n_samples, n_features]. Required. y_train", "that if allowed_pipelines is provided, this parameter will be ignored.", "None or non-negative. Received {max_batches}.\") if max_iterations is not None", "logger.info(\"Using default limit of max_batches=1.\\n\") if patience and (not isinstance(patience,", "= (1 + len(self.allowed_pipelines) + self._pipelines_per_batch * (self.max_batches - 1", "search continue? Returns: bool: True if yes, False if no.", "return high_variance_cv def get_pipeline(self, pipeline_id): \"\"\"Given the ID of a", "to retrieve Returns: PipelineBase: untrained pipeline instance associated with the", "required_parameters): raise ValueError(\"user_parameters must be a dict containing values for", "self.search_duration = time.time() - self._start elapsed_time = time_elapsed(self._start) desc =", "= \"\\n\" start_of_loop = time.time() while True: choice = input(leading_char", "while self._should_continue(): try: if not loop_interrupted: current_batch_pipelines = self._automl_algorithm.next_batch() except", "else {} self.search_iteration_plot = None self._interrupted = False if self.allowed_pipelines", "to search. Parameters max_time, and max_iterations have precedence over stopping", "pre_evaluation_callback=self._pre_evaluation_callback, post_evaluation_callback=self._post_evaluation_callback) self.allowed_model_families = list(set([p.model_family for p in (self.allowed_pipelines)])) logger.debug(f\"allowed_pipelines", "pipeline_class(parameters={\"pipeline\": {\"gap\": gap, \"max_delay\": max_delay}, \"Time Series Baseline Estimator\": {\"gap\":", "None \"\"\" with open(file_path, 'wb') as f: cloudpickle.dump(self, f, protocol=pickle_protocol)", "= self.problem_configuration['gap'] max_delay = self.problem_configuration['max_delay'] baseline = pipeline_class(parameters={\"pipeline\": {\"gap\": gap,", "self.allowed_model_families = allowed_model_families self._automl_algorithm = None self._start = 0.0 self._baseline_cv_scores", "(list(class)): A list of PipelineBase subclasses indicating the pipelines allowed", "import ( check_all_pipeline_names_unique, get_default_primary_search_objective, make_data_splitter ) from evalml.exceptions import AutoMLSearchException,", "(self.allowed_pipelines)])) logger.debug(f\"allowed_pipelines set to {[pipeline.name for pipeline in self.allowed_pipelines]}\") logger.debug(f\"allowed_model_families", "does not count towards the time budget (if set) time_in_loop", "cv_data: for field, value in fold_data['all_objective_scores'].items(): # The 'all_objective_scores' field", "stopping. Only applicable if patience is not None. Defaults to", "number of unique pipelines to search over per batch is", "batch + each pipeline iteration + 1 first_ensembling_iteration = (1", "self._start += time_in_loop return False else: leading_char = \"\" def", "SequentialEngine(self.X_train, self.y_train, self.ensembling_indices, self, should_continue_callback=self._should_continue, pre_evaluation_callback=self._pre_evaluation_callback, post_evaluation_callback=self._post_evaluation_callback) self.allowed_model_families = list(set([p.model_family", "search. If `train_best_pipeline` is set to False, returns an untrained", "first_ensembling_iteration: run_ensembling = False logger.warning(f\"Ensembling is set to True, but", "logger = get_logger(__file__) class AutoMLSearch: \"\"\"Automated Pipeline search.\"\"\" _MAX_NAME_LEN =", "maximum number of batches of pipelines to search. Parameters max_time,", "ProblemTypes, handle_problem_types from evalml.tuners import SKOptTuner from evalml.utils import convert_to_seconds,", "self.problem_type.value)) if additional_objectives is None: additional_objectives = get_core_objectives(self.problem_type) # if", "get_core_objectives(self.problem_type) # if our main objective is part of default", "and 1 exclusive, received {_ensembling_split_size}\") X_shape = ww.DataTable(np.arange(self.X_train.shape[0])) _, ensembling_indices,", "desc, len(self._results['pipeline_results']) + 1, self.max_iterations, self._start, batch_number, self.show_batch_output) def _validate_objective(self,", "'Lower')) logger.info(f\"Using {self._engine.__class__.__name__} to train and score pipelines.\") if self.max_batches", "None: return True first_id = self._results['search_order'][0] best_score = self._results['pipeline_results'][first_id]['score'] num_without_improvement", "a 2d array using the X_train argument') if y_train is", "if self.add_result_callback: self.add_result_callback(self._results['pipeline_results'][pipeline_id], pipeline, self) return pipeline_id def _check_for_high_variance(self, pipeline,", "pipeline components, problem, training time, cross validation, etc. \"\"\" if", "{} problems.\".format(pipeline.problem_type)) if self.optimize_thresholds and self.objective.is_defined_for_problem_type(ProblemTypes.BINARY) and self.objective.can_optimize_threshold: logger.info(\"Objective to", "_validate_objective(self, objective): non_core_objectives = get_non_core_objectives() if isinstance(objective, type): if objective", "set) time_in_loop = time.time() - start_of_loop self._start += time_in_loop return", "self.rankings.drop(['parameters'], axis='columns').to_string() rankings_desc = f\"\\nSearch Results: \\n{'='*20}\\n{rankings_str}\" return search_desc +", "searched. \"\"\" return copy.deepcopy(self._results) @property def rankings(self): \"\"\"Returns a pandas.DataFrame", "if num_without_improvement >= self.patience: logger.info(\"\\n\\n{} iterations without improvement. Stopping search", "cv_data, \"percent_better_than_baseline_all_objectives\": percent_better_than_baseline, \"percent_better_than_baseline\": percent_better_than_baseline[self.objective.name], \"validation_score\": cv_scores[0] } if pipeline.model_family", "None, early stopping is disabled. Defaults to None. tolerance (float):", "for fold_data in cv_data: for field, value in fold_data['all_objective_scores'].items(): #", "evalml.problem_types.ProblemType.all_problem_types for a full list. objective (str, ObjectiveBase): The objective", "None. allowed_pipelines (list(class)): A list of PipelineBase subclasses indicating the", "copy import time from collections import defaultdict import cloudpickle import", "max_iterations will default to max_iterations of 5. max_time (int, str):", "run_ensembling and len(self.allowed_pipelines) == 1: logger.warning(\"Ensembling is set to True,", "= MeanBaselineRegressionPipeline(parameters={}) else: pipeline_class = {ProblemTypes.TIME_SERIES_REGRESSION: TimeSeriesBaselineRegressionPipeline, ProblemTypes.TIME_SERIES_MULTICLASS: TimeSeriesBaselineMulticlassPipeline, ProblemTypes.TIME_SERIES_BINARY:", "return True # Run at least one pipeline for every", "def _check_for_high_variance(self, pipeline, cv_scores, threshold=0.2): \"\"\"Checks cross-validation scores and logs", "from collections import defaultdict import cloudpickle import numpy as np", "False elif self.max_iterations and num_pipelines >= self.max_iterations: return False #", "True, runs ensembling in a separate batch after every allowed", ") from evalml.pipelines.components.utils import get_estimators from evalml.pipelines.utils import make_pipeline from", "not None: logger.info(\"Searching up to %s pipelines. \" % self.max_iterations)", "problem_configuration for p in required_parameters): raise ValueError(\"user_parameters must be a", "return '\\n'.join(lines) def _get_funct_name(function): if callable(function): return function.__name__ else: return", "`log_error_callback`. additional_objectives (list): Custom set of objectives to score on.", "(1 + len(self.allowed_pipelines) + len(self.allowed_pipelines) * self._pipelines_per_batch + 1) if", "ValueError(f\"Parameter max_time must be None or non-negative. Received {max_time}.\") if", "additional_objectives self.objective_name_to_class = {o.name: o for o in [self.objective] +", "import SequentialEngine from evalml.automl.utils import ( check_all_pipeline_names_unique, get_default_primary_search_objective, make_data_splitter )", "cv_scores.mean()) > threshold) if high_variance_cv: logger.warning(f\"High coefficient of variation (cv", "pipeline_results.get('pipeline_class') parameters = pipeline_results.get('parameters') if pipeline_class is None or parameters", "pipeline_id = len(self._results['pipeline_results']) self._results['pipeline_results'][pipeline_id] = { \"id\": pipeline_id, \"pipeline_name\": pipeline.name,", "of pipelines on the given holdout data. Arguments: pipelines (list(PipelineBase)):", "f\"{handle_problem_types(self.problem_type).name} Search\\n\\n\" f\"Parameters: \\n{'='*20}\\n\" f\"Objective: {get_objective(self.objective).name}\\n\" f\"Max Time: {self.max_time}\\n\" f\"Max", "aside for training ensemble metalearners. Only used when ensembling is", "add_to_rankings is called before search _baseline_cv_scores will be empty so", "patience < 0): raise ValueError(\"patience value must be a positive", ">= {ensemble_nth_batch + 1} to run ensembling.\") else: logger.info(f\"Ensembling will", "if hasattr(self.data_splitter, \"transform_sample\"): train_indices = self.data_splitter.transform_sample(X_train, y_train) X_train = X_train.iloc[train_indices]", "parameters not found in automl results\") return pipeline_class(parameters, random_seed=self.random_seed) def", "False, returns an untrained pipeline instance. Returns: PipelineBase: A trained", "is set to True, but the number of unique pipelines", "of pipelines to search. Parameters max_time, and max_iterations have precedence", "(int): Maximum number of iterations to search. If max_iterations and", "self._baseline_cv_scores = mean_cv_all_objectives for obj_name in mean_cv_all_objectives: objective_class = self.objective_name_to_class[obj_name]", "pipeline name that maps to a dictionary of scores. Note", "default to max_iterations of 5. max_time (int, str): Maximum time", "optimize for. Used to propose and rank pipelines, but not", "for. Used to propose and rank pipelines, but not for", "(list): Custom set of objectives to score on. Will override", "or non-negative. Received {max_iterations}.\") self.max_time = convert_to_seconds(max_time) if isinstance(max_time, str)", "optimizing each pipeline during fit-time. When set to 'auto', chooses:", "self._pipelines_per_batch * (self.max_batches - 1 - num_ensemble_batches) + num_ensemble_batches) else:", "y_train.iloc[train_indices] best_pipeline = self._engine.train_pipeline(best_pipeline, X_train, y_train, self.optimize_thresholds, self.objective) self._best_pipeline =", "returns an untrained pipeline instance. \"\"\" if not self._best_pipeline: raise", "and len(self.allowed_pipelines) == 1: logger.warning(\"Ensembling is set to True, but", "Arguments: pipeline_id (int): pipeline to describe return_dict (bool): If True,", "@property def rankings(self): \"\"\"Returns a pandas.DataFrame with scoring results from", "and current_batch_pipeline_scores.isna().all(): raise AutoMLSearchException(f\"All pipelines in the current AutoML batch", "field in objective_name_to_class: scores[field] += value return {objective: float(score) /", "or []: if pipeline.problem_type != self.problem_type: raise ValueError(\"Given pipeline {}", "of specified pipeline. Includes information such as type of pipeline", "automl results\") pipeline_class = pipeline_results.get('pipeline_class') parameters = pipeline_results.get('parameters') if pipeline_class", "MeanBaselineRegressionPipeline, ModeBaselineBinaryPipeline, ModeBaselineMulticlassPipeline, TimeSeriesBaselineBinaryPipeline, TimeSeriesBaselineMulticlassPipeline, TimeSeriesBaselineRegressionPipeline ) from evalml.pipelines.components.utils import", "for a total of {self.max_iterations} pipelines. \") elif self.max_iterations is", "True. Must be between 0 and 1, exclusive. Defaults to", "kwargs, so AutoMLSearch is able to pass along other appropriate", "+ len(self.allowed_pipelines) + self._pipelines_per_batch * (self.max_batches - 1 - num_ensemble_batches)", "{type(max_time)} with value {str(max_time)}..\") if isinstance(max_time, (int, float)) and max_time", "not isinstance(max_time, (int, float, str, type(None))): raise TypeError(f\"Parameter max_time must", "(0 < _ensembling_split_size < 1): raise ValueError(f\"Ensembling split size must", "should the search continue? Returns: bool: True if yes, False", "be included in the dictionary but the exception and stacktrace", "ModeBaselineBinaryPipeline(parameters={}) elif self.problem_type == ProblemTypes.MULTICLASS: baseline = ModeBaselineMulticlassPipeline(parameters={}) elif self.problem_type", "+ rankings_desc def _validate_problem_configuration(self, problem_configuration=None): if self.problem_type in [ProblemTypes.TIME_SERIES_REGRESSION]: required_parameters", "f\"Allowed Pipelines: \\n{_print_list(self.allowed_pipelines or [])}\\n\" f\"Patience: {self.patience}\\n\" f\"Tolerance: {self.tolerance}\\n\" f\"Data", "pipeline_id): \"\"\"Given the ID of a pipeline training result, returns", "Shows an iteration vs. score plot in Jupyter notebook. Disabled", "to {[estimator.name for estimator in allowed_estimators]}\") self.allowed_pipelines = [make_pipeline(self.X_train, self.y_train,", "not set, then max_iterations will default to max_iterations of 5.", "else cv_score try: self._automl_algorithm.add_result(score_to_minimize, pipeline, self._results['pipeline_results'][pipeline_id]) except PipelineNotFoundError: pass if", "is not None: additional_objectives.remove(existing_main_objective) else: additional_objectives = [get_objective(o) for o", "For strings, time can be specified as seconds, minutes, or", "+ 1 + n_jobs) are used. ensembling (boolean): If True,", "StopIteration: logger.info('AutoML Algorithm out of recommendations, ending') break try: new_pipeline_ids", "Sphinx does not work well with instance attributes. plot =", "self.X_train y_train = self.y_train if hasattr(self.data_splitter, \"transform_sample\"): train_indices = self.data_splitter.transform_sample(X_train,", "but max_batches is too small, so ensembling will not run.", "to search again.\") return # don't show iteration plot outside", "a baseline pipline + one of each pipeline family allowed", "pipeline): \"\"\"Fits and evaluates a given pipeline then adds the", "\"validation_score\", \"percent_better_than_baseline\", \"high_variance_cv\", \"parameters\"] if not self._results['pipeline_results']: return pd.DataFrame(columns=full_rankings_cols) rankings_df", "search again.\") return # don't show iteration plot outside of", "{problem_configuration}.\") return problem_configuration or {} def _handle_keyboard_interrupt(self): \"\"\"Presents a prompt", "= data_splitter self.optimize_thresholds = optimize_thresholds self.ensembling = ensembling if objective", "in all_objective_scores: if c in [\"# Training\", \"# Validation\"]: all_objective_scores[c]", "= self._num_pipelines() if num_pipelines == 0: return True # check", "time to search for pipelines. This will not start a", "0 else: num_without_improvement += 1 if num_without_improvement >= self.patience: logger.info(\"\\n\\n{}", "= all_objective_scores[c].std(axis=0) all_objective_scores.loc[\"mean\", c] = mean all_objective_scores.loc[\"std\", c] = std", "Returns: AutoSearchBase object \"\"\" with open(file_path, 'rb') as f: return", "return problem_configuration or {} def _handle_keyboard_interrupt(self): \"\"\"Presents a prompt to", "None: additional_objectives.remove(existing_main_objective) else: additional_objectives = [get_objective(o) for o in additional_objectives]", "\"\"\" pipeline_results = self.results['pipeline_results'].get(pipeline_id) if pipeline_results is None: raise PipelineNotFoundError(\"Pipeline", "**self.pipeline_parameters} else: pipeline_params = self.pipeline_parameters self._automl_algorithm = IterativeAlgorithm( max_iterations=self.max_iterations, allowed_pipelines=self.allowed_pipelines,", "types, either numerical or categorical. Categorical features will automatically be", "or default_data_splitter self.pipeline_parameters = pipeline_parameters if pipeline_parameters is not None", "p in (self.allowed_pipelines)])) logger.debug(f\"allowed_pipelines set to {[pipeline.name for pipeline in", "is not None. Defaults to None. allowed_pipelines (list(class)): A list", "# Run at least one pipeline for every search num_pipelines", "unique pipelines to search over per batch is one, ensembling", "should be passed in for the gap and max_delay variables.", "{max_batches}.\") if max_iterations is not None and max_iterations < 0:", "KeyboardInterrupt: loop_interrupted = True if self._handle_keyboard_interrupt(): break full_rankings = self.full_rankings", "but the exception and stacktrace will be displayed in the", "scores = defaultdict(int) n_folds = len(cv_data) for fold_data in cv_data:", "\"\"\"Train a list of pipelines on the training data. This", "{elapsed_time}\" desc = desc.ljust(self._MAX_NAME_LEN) logger.info(desc) self._find_best_pipeline() if self._best_pipeline is not", "raise PipelineNotFoundError(\"automl search must be run before selecting `best_pipeline`.\") return", "list. objective (str, ObjectiveBase): The objective to optimize for. Used", "in time series problems, values should be passed in for", "= False logger.warning(f\"Ensembling is set to True, but max_batches is", "if score_improved and significant_change: best_score = curr_score num_without_improvement = 0", "cause allowed_model_families to be ignored. allowed_model_families (list(str, ModelFamily)): The model", "self._find_best_pipeline() if self._best_pipeline is not None: best_pipeline = self.rankings.iloc[0] best_pipeline_name", "== ProblemTypes.BINARY: baseline = ModeBaselineBinaryPipeline(parameters={}) elif self.problem_type == ProblemTypes.MULTICLASS: baseline", "if tolerance and (tolerance > 1.0 or tolerance < 0.0):", "search early...\".format(self.patience)) return False return True def _validate_problem_type(self): for obj", "False self.X_train = infer_feature_types(X_train) self.y_train = infer_feature_types(y_train) self.ensembling_indices = None", "This is the first pipeline fit during search. \"\"\" if", "if self.problem_type in [ProblemTypes.TIME_SERIES_REGRESSION]: required_parameters = {'gap', 'max_delay'} if not", "ModelFamily.ENSEMBLE: input_pipeline_ids = [self._automl_algorithm._best_pipeline_info[model_family][\"id\"] for model_family in self._automl_algorithm._best_pipeline_info] self._results['pipeline_results'][pipeline_id][\"input_pipeline_ids\"] =", "(int, str): Maximum time to search for pipelines. This will", "> AutoMLSearch._MAX_NAME_LEN: desc = desc[:AutoMLSearch._MAX_NAME_LEN - 3] + \"...\" desc", "self.allowed_pipelines is None: logger.info(\"Generating pipelines to search over...\") allowed_estimators =", "Returns: None \"\"\" with open(file_path, 'wb') as f: cloudpickle.dump(self, f,", "and not self.max_batches: self.max_batches = 1 logger.info(\"Using default limit of", "desc = desc.ljust(AutoMLSearch._MAX_NAME_LEN) batch_number = 1 if self._automl_algorithm is not", "\"parameters\": pipeline.parameters, \"score\": cv_score, \"high_variance_cv\": high_variance_cv, \"training_time\": training_time, \"cv_data\": cv_data,", "the traceback, and the AutoMLSearch object. Must also accepts kwargs,", "not (0 < _ensembling_split_size < 1): raise ValueError(f\"Ensembling split size", "ensembling is True. Must be between 0 and 1, exclusive.", "that the any pipelines that error out during training will", "in a separate batch after every allowed pipeline class has", "number of iterations to search. If max_iterations and max_time is", "{_ensembling_split_size}\") X_shape = ww.DataTable(np.arange(self.X_train.shape[0])) _, ensembling_indices, _, _ = split_data(X_shape,", "= additional_objectives self.objective_name_to_class = {o.name: o for o in [self.objective]", "num_ensemble_batches == 0: run_ensembling = False logger.warning(f\"Ensembling is set to", "def _validate_objective(self, objective): non_core_objectives = get_non_core_objectives() if isinstance(objective, type): if", "in allowed_estimators]}\") self.allowed_pipelines = [make_pipeline(self.X_train, self.y_train, estimator, self.problem_type, custom_hyperparameters=self.pipeline_parameters) for", "pipeline_id, return_dict=False): \"\"\"Describe a pipeline Arguments: pipeline_id (int): pipeline to", "value must be a float between 0.0 and 1.0 inclusive.", "get_default_primary_search_objective, make_data_splitter ) from evalml.exceptions import AutoMLSearchException, PipelineNotFoundError from evalml.model_family", "BaseCrossValidator from .pipeline_search_plots import PipelineSearchPlots from evalml.automl.automl_algorithm import IterativeAlgorithm from", "asking if they want to stop the search. Returns: bool:", "TimeSeriesBaselineBinaryPipeline, TimeSeriesBaselineMulticlassPipeline, TimeSeriesBaselineRegressionPipeline ) from evalml.pipelines.components.utils import get_estimators from evalml.pipelines.utils", "outside of a jupyter notebook if show_iteration_plot: try: get_ipython except", "in cv_data: for field, value in fold_data['all_objective_scores'].items(): # The 'all_objective_scores'", "numpy as np import pandas as pd import woodwork as", "percent_better high_variance_cv = self._check_for_high_variance(pipeline, cv_scores) pipeline_id = len(self._results['pipeline_results']) self._results['pipeline_results'][pipeline_id] =", "self._start = time.time() try: self._add_baseline_pipelines() except KeyboardInterrupt: if self._handle_keyboard_interrupt(): self._interrupted", "import SKOptTuner from evalml.utils import convert_to_seconds, infer_feature_types from evalml.utils.logger import", "Results: \\n{'='*20}\\n{rankings_str}\" return search_desc + rankings_desc def _validate_problem_configuration(self, problem_configuration=None): if", "int: the number of pipeline evaluations made in the search", "len(self._results['pipeline_results']) + 1, self.max_iterations, self._start, batch_number, self.show_batch_output) def _validate_objective(self, objective):", "first_ensembling_iteration = (1 + len(self.allowed_pipelines) + len(self.allowed_pipelines) * self._pipelines_per_batch +", "_ensembling_split_size (float): The amount of the training data we'll set", "for o in additional_objectives] additional_objectives = [self._validate_objective(obj) for obj in", "an untrained instance of the specified pipeline initialized with the", "'expand_frame_repr', False): logger.info(all_objective_scores) if return_dict: return pipeline_results def add_to_rankings(self, pipeline):", "ModelFamily.ENSEMBLE: logger.info(\"Input for ensembler are pipelines with IDs: \" +", "containing values for at least the gap and max_delay \"", "to train. X_holdout (ww.DataTable, pd.DataFrame): Holdout features. y_holdout (ww.DataTable, pd.DataFrame):", ") logger = get_logger(__file__) class AutoMLSearch: \"\"\"Automated Pipeline search.\"\"\" _MAX_NAME_LEN", "additional_objectives = get_core_objectives(self.problem_type) # if our main objective is part", "self.max_batches and self.max_iterations is None: self.show_batch_output = True if run_ensembling:", "included in the dictionary but the exception and stacktrace will", "exists, check to make sure it is different from the", "set to {self.allowed_model_families}\") if len(self.problem_configuration): pipeline_params = {**{'pipeline': self.problem_configuration}, **self.pipeline_parameters}", "for pipelines. This will not start a new pipeline search", "and will not run again on the same instance. Re-initialize", "a separate batch after every allowed pipeline class has been", "not None and max_iterations < 0: raise ValueError(f\"Parameter max_iterations must", "= f\"{pipeline.name}\" if len(desc) > AutoMLSearch._MAX_NAME_LEN: desc = desc[:AutoMLSearch._MAX_NAME_LEN -", "maps to the fitted pipeline. Note that the any pipelines", "(not isinstance(patience, int) or patience < 0): raise ValueError(\"patience value", "state, should the search continue? Returns: bool: True if yes,", "Holdout targets for scoring. objectives (list(str), list(ObjectiveBase)): Objectives used for", "by default. Defaults to None, which will call `log_error_callback`. additional_objectives", "the time will be in seconds. For strings, time can", "str(pipeline_results['input_pipeline_ids'])) log_subtitle(logger, \"Training\") logger.info(\"Training for {} problems.\".format(pipeline.problem_type)) if self.optimize_thresholds and", "ValueError(\"Additional objective {} is not compatible with a {} problem.\".format(obj.name,", "all_objective_scores = pd.DataFrame(all_objective_scores) for c in all_objective_scores: if c in", "PipelineSearchPlots(self) except ImportError: logger.warning(\"Unable to import plotly; skipping pipeline search", "then max_iterations will default to max_iterations of 5. max_time (int,", "training data as a 2d array using the X_train argument')", "+ len(self.allowed_pipelines) + (self._pipelines_per_batch * (self.max_batches - 1)) if run_ensembling:", "model families. Run evalml.pipelines.components.utils.allowed_model_families(\"binary\") to see options. Change `binary` to", "n_jobs (int or None): Non-negative integer describing level of parallelism", "every allowed pipeline class has been iterated over. If the", "= (1 + len(self.allowed_pipelines) + len(self.allowed_pipelines) * self._pipelines_per_batch + 1)", "model_family in self._automl_algorithm._best_pipeline_info] self._results['pipeline_results'][pipeline_id][\"input_pipeline_ids\"] = input_pipeline_ids self._results['search_order'].append(pipeline_id) if not is_baseline:", "if yes, False if no. \"\"\" if self._interrupted: return False", "and elapsed >= self.max_time: return False elif self.max_iterations and num_pipelines", "pipeline search plotting\\n\") self.allowed_pipelines = allowed_pipelines self.allowed_model_families = allowed_model_families self._automl_algorithm", "families: %s\\n\" % \", \".join([model.value for model in self.allowed_model_families])) self.search_iteration_plot", "by default in non-Jupyter enviroments. \"\"\" if self._searched: logger.info(\"AutoMLSearch.search() has", "pipelines once the search is complete. Arguments: pipelines (list(PipelineBase)): List", "part of default set of objectives for problem_type, remove it", "str) else max_time self.max_iterations = max_iterations self.max_batches = max_batches self._pipelines_per_batch", "if not is_baseline: score_to_minimize = -cv_score if self.objective.greater_is_better else cv_score", "of the specified pipeline initialized with the parameters used to", "_get_funct_name(function): if callable(function): return function.__name__ else: return None search_desc =", "// ensemble_nth_batch if num_ensemble_batches == 0: run_ensembling = False logger.warning(f\"Ensembling", "for model_family in self._automl_algorithm._best_pipeline_info] self._results['pipeline_results'][pipeline_id][\"input_pipeline_ids\"] = input_pipeline_ids self._results['search_order'].append(pipeline_id) if not", "(bool): If True, return dictionary of information about pipeline. Defaults", "# So that the time in this loop does not", "self._best_pipeline already exists, check to make sure it is different", "etc. \"\"\" if pipeline_id not in self._results['pipeline_results']: raise PipelineNotFoundError(\"Pipeline not", "ensembling in a separate batch after every allowed pipeline class", "no. \"\"\" if self._interrupted: return False # for add_to_rankings if", "adds the results to the automl rankings with the requirement", "one of each pipeline family allowed in the search. \"\"\"", "\"\" def search(self, show_iteration_plot=True): \"\"\"Find the best pipeline for the", "(including CV): %.1f seconds\" % pipeline_results[\"training_time\"]) log_subtitle(logger, \"Cross Validation\", underline=\"-\")", "pipelines allowed in the search. The default of None indicates", "type(None))): raise TypeError(f\"Parameter max_time must be a float, int, string", "- num_ensemble_batches) + num_ensemble_batches) else: self.max_iterations = 1 + len(self.allowed_pipelines)", "start_iteration_callback self.add_result_callback = add_result_callback self.error_callback = error_callback or log_error_callback self.data_splitter", "= self._engine.train_pipeline(best_pipeline, X_train, y_train, self.optimize_thresholds, self.objective) self._best_pipeline = best_pipeline def", "\" % self.objective.name) logger.info(\"{} score is better.\\n\".format('Greater' if self.objective.greater_is_better else", "not None and self._automl_algorithm.batch_number > 0: batch_number = self._automl_algorithm.batch_number update_pipeline(logger,", "the fitted pipeline. Note that the any pipelines that error", "of default set of objectives for problem_type, remove it existing_main_objective", "of np.nan on the primary objective {self.objective}.\") self.search_duration = time.time()", "training, and the AutoMLSearch object. error_callback (callable): Function called when", "pipeline class, the pipeline parameters, and the AutoMLSearch object. add_result_callback", "elif self.max_iterations is not None: logger.info(\"Searching up to %s pipelines.", "a full list. objective (str, ObjectiveBase): The objective to optimize", "= [] loop_interrupted = False while self._should_continue(): try: if not", "generator. Defaults to 0. n_jobs (int or None): Non-negative integer", "_pipelines_per_batch=5): \"\"\"Automated pipeline search Arguments: X_train (pd.DataFrame, ww.DataTable): The input", "score plot in Jupyter notebook. Disabled by default in non-Jupyter", "new_pipeline_ids = self._engine.evaluate_batch(current_batch_pipelines) loop_interrupted = False except KeyboardInterrupt: loop_interrupted =", "_should_continue(self): \"\"\"Given the original stopping criterion and current state, should", "if self.max_batches and self.max_iterations is None: self.show_batch_output = True if", "isinstance(objective, type): if objective in non_core_objectives: raise ValueError(f\"{objective.name.lower()} is not", "Arguments: file_path (str): location to save file pickle_protocol (int): the", "from sklearn.model_selection import BaseCrossValidator from .pipeline_search_plots import PipelineSearchPlots from evalml.automl.automl_algorithm", "self.data_splitter.transform_sample(X_train, y_train) X_train = X_train.iloc[train_indices] y_train = y_train.iloc[train_indices] best_pipeline =", "pipeline in self.allowed_pipelines or []: if pipeline.problem_type != self.problem_type: raise", "gap, \"max_delay\": max_delay}}) self._engine.evaluate_batch([baseline]) @staticmethod def _get_mean_cv_scores_for_all_objectives(cv_data, objective_name_to_class): scores =", "objectives): \"\"\"Score a list of pipelines on the given holdout", "not empty. random_seed (int): Seed for the random number generator.", "the base score. percent_better = objective_class.calculate_percent_difference(mean_cv_all_objectives[obj_name], self._baseline_cv_scores.get(obj_name, np.nan)) percent_better_than_baseline[obj_name] =", ") rankings_desc = \"\" if not self.rankings.empty: rankings_str = self.rankings.drop(['parameters'],", "{_get_funct_name(self.add_result_callback)}\\n\" f\"Additional Objectives: {_print_list(self.additional_objectives or [])}\\n\" f\"Random Seed: {self.random_seed}\\n\" f\"n_jobs:", "not None: logger.info(f\"Searching up to {self.max_batches} batches for a total", "\"\"\"Class that allows access to a copy of the results", "problem, training time, cross validation, etc. \"\"\" if pipeline_id not", "Arguments: pipelines (list(PipelineBase)): List of pipelines to train. Returns: Dict[str,", "continue mean = all_objective_scores[c].mean(axis=0) std = all_objective_scores[c].std(axis=0) all_objective_scores.loc[\"mean\", c] =", "for estimator in allowed_estimators] if self.allowed_pipelines == []: raise ValueError(\"No", "f\"\\nSearch finished after {elapsed_time}\" desc = desc.ljust(self._MAX_NAME_LEN) logger.info(desc) self._find_best_pipeline() if", "{self.max_time}\\n\" f\"Max Iterations: {self.max_iterations}\\n\" f\"Max Batches: {self.max_batches}\\n\" f\"Allowed Pipelines: \\n{_print_list(self.allowed_pipelines", "def _get_mean_cv_scores_for_all_objectives(cv_data, objective_name_to_class): scores = defaultdict(int) n_folds = len(cv_data) for", "% self.max_iterations) if self.max_time is not None: logger.info(\"Will stop searching", "displayed in the log. \"\"\" return self._engine.score_batch(pipelines, X_holdout, y_holdout, objectives)", "= self._results['pipeline_results'][pipeline_id] pipeline.describe() if pipeline.model_family == ModelFamily.ENSEMBLE: logger.info(\"Input for ensembler", "If self._best_pipeline already exists, check to make sure it is", "X_train argument') if y_train is None: raise ValueError('Must specify training", "= (self.max_batches - 1) // ensemble_nth_batch if num_ensemble_batches == 0:", "count towards the time budget (if set) time_in_loop = time.time()", "threshhold.\"\"\" pipeline_name = pipeline.name high_variance_cv = bool(abs(cv_scores.std() / cv_scores.mean()) >", "or patience < 0): raise ValueError(\"patience value must be a", "logger.info(\"Generating pipelines to search over...\") allowed_estimators = get_estimators(self.problem_type, self.allowed_model_families) logger.debug(f\"allowed_estimators", "elapsed >= self.max_time: return False elif self.max_iterations and num_pipelines >=", "False if no. \"\"\" if self._interrupted: return False # for", "LogLossMulticlass for multiclass classification problems, and - R2 for regression", "the specified pipeline initialized with the parameters used to train", "non-Jupyter enviroments. \"\"\" if self._searched: logger.info(\"AutoMLSearch.search() has already been run", "False logger.warning(f\"Ensembling is set to True, but max_batches is too", "self._results['pipeline_results'][first_id]['score'] num_without_improvement = 0 for id in self._results['search_order'][1:]: curr_score =", "not run.\") run_ensembling = False if run_ensembling and self.max_iterations is", "early. Must be positive. If None, early stopping is disabled.", "= PipelineSearchPlots def __init__(self, X_train=None, y_train=None, problem_type=None, objective='auto', max_iterations=None, max_time=None,", "< 0): raise ValueError(\"patience value must be a positive integer.", "[]: raise ValueError(\"No allowed pipelines to search\") check_all_pipeline_names_unique(self.allowed_pipelines) run_ensembling =", "= random_seed self.n_jobs = n_jobs self.plot = None try: self.plot", "= 1 logger.info(\"Using default limit of max_batches=1.\\n\") if patience and", "to StratifiedKFold. tuner_class: The tuner class to use. Defaults to", "= len(self.allowed_pipelines) + 1 num_ensemble_batches = (self.max_batches - 1) //", "time_elapsed, update_pipeline ) logger = get_logger(__file__) class AutoMLSearch: \"\"\"Automated Pipeline", "gap and max_delay \" f\"parameters. Received {problem_configuration}.\") return problem_configuration or", "self.max_time: return False elif self.max_iterations and num_pipelines >= self.max_iterations: return", "if len(self.problem_configuration): pipeline_params = {**{'pipeline': self.problem_configuration}, **self.pipeline_parameters} else: pipeline_params =", "ProblemTypes.TIME_SERIES_MULTICLASS: TimeSeriesBaselineMulticlassPipeline, ProblemTypes.TIME_SERIES_BINARY: TimeSeriesBaselineBinaryPipeline}[self.problem_type] gap = self.problem_configuration['gap'] max_delay = self.problem_configuration['max_delay']", "= {} self.show_batch_output = False self._validate_problem_type() self.problem_configuration = self._validate_problem_configuration(problem_configuration) self._train_best_pipeline", "else: additional_objectives = [get_objective(o) for o in additional_objectives] additional_objectives =", "= False while self._should_continue(): try: if not loop_interrupted: current_batch_pipelines =", "def __str__(self): def _print_list(obj_list): lines = sorted(['\\t{}'.format(o.name) for o in", "self._pipelines_per_batch + 1) if self.max_iterations < first_ensembling_iteration: run_ensembling = False", "problems.\".format(pipeline.problem_type)) if self.optimize_thresholds and self.objective.is_defined_for_problem_type(ProblemTypes.BINARY) and self.objective.can_optimize_threshold: logger.info(\"Objective to optimize", "an iteration vs. score plot in Jupyter notebook. Disabled by", "+ self.additional_objectives} if not isinstance(max_time, (int, float, str, type(None))): raise", "scoring will not be included in the dictionary but the", "import split_data from evalml.problem_types import ProblemTypes, handle_problem_types from evalml.tuners import", "ValueError(\"Given objective {} is not compatible with a {} problem.\".format(self.objective.name,", "0.0 self._results = { 'pipeline_results': {}, 'search_order': [], 'errors': []", "run. Defaults to False. max_batches (int): The maximum number of", "scores and logs a warning if variance is higher than", "If True, search should terminate early \"\"\" leading_char = \"\\n\"", "self.add_result_callback = add_result_callback self.error_callback = error_callback or log_error_callback self.data_splitter =", "_get_mean_cv_scores_for_all_objectives(cv_data, objective_name_to_class): scores = defaultdict(int) n_folds = len(cv_data) for fold_data", "and evaluates a given pipeline then adds the results to", "list(ObjectiveBase)): Objectives used for scoring. Returns: Dict[str, Dict[str, float]]: Dictionary", "f: cloudpickle.dump(self, f, protocol=pickle_protocol) @staticmethod def load(file_path): \"\"\"Loads AutoML object", "handle_problem_types from evalml.tuners import SKOptTuner from evalml.utils import convert_to_seconds, infer_feature_types", "if not objective.is_defined_for_problem_type(self.problem_type): raise ValueError(\"Given objective {} is not compatible", "self.full_rankings[self.full_rankings['pipeline_name'] == pipeline.name] for parameter in pipeline_rows['parameters']: if pipeline.parameters ==", "evalml.tuners import SKOptTuner from evalml.utils import convert_to_seconds, infer_feature_types from evalml.utils.logger", "\"Beginning pipeline search\") logger.info(\"Optimizing for %s. \" % self.objective.name) logger.info(\"{}", "\"\"\" if X_train is None: raise ValueError('Must specify training data", "ModelFamily.ENSEMBLE: X_train, y_train = self.X_train.iloc[self.ensembling_indices], self.y_train.iloc[self.ensembling_indices] else: X_train = self.X_train", "if self.objective.greater_is_better else 'Lower')) logger.info(f\"Using {self._engine.__class__.__name__} to train and score", "TimeSeriesBaselineMulticlassPipeline, ProblemTypes.TIME_SERIES_BINARY: TimeSeriesBaselineBinaryPipeline}[self.problem_type] gap = self.problem_configuration['gap'] max_delay = self.problem_configuration['max_delay'] baseline", "When set to 'auto', chooses: - LogLossBinary for binary classification", "least the gap and max_delay \" f\"parameters. Received {problem_configuration}.\") return", "time.time() while True: choice = input(leading_char + \"Do you really", "regression) as problem_type') self.tuner_class = tuner_class or SKOptTuner self.start_iteration_callback =", "set to True, but max_batches is too small, so ensembling", "features. y_holdout (ww.DataTable, pd.DataFrame): Holdout targets for scoring. objectives (list(str),", "1} to run ensembling.\") else: logger.info(f\"Ensembling will run every {ensemble_nth_batch}", "= self.data_splitter.transform_sample(X_train, y_train) X_train = X_train.iloc[train_indices] y_train = y_train.iloc[train_indices] best_pipeline", "not run. Set max_batches >= {ensemble_nth_batch + 1} to run", "raise ValueError(f\"Ensembling split size must be between 0 and 1", "name that maps to the fitted pipeline. Note that the", "size must be between 0 and 1 exclusive, received {_ensembling_split_size}\")", "amount of the training data we'll set aside for training", "== self.get_pipeline(best_pipeline['id'])): best_pipeline = self.get_pipeline(best_pipeline['id']) if self._train_best_pipeline: if best_pipeline.model_family ==", "the parameters used to train that pipeline during automl search.", "containing `pipeline_results`: a dict with results from each pipeline, and", "random_seed=self.random_seed) def describe_pipeline(self, pipeline_id, return_dict=False): \"\"\"Describe a pipeline Arguments: pipeline_id", "\"\"\"Checks cross-validation scores and logs a warning if variance is", "the training results for the new pipeline, an untrained_pipeline containing", "found in automl results\") pipeline_class = pipeline_results.get('pipeline_class') parameters = pipeline_results.get('parameters')", "used during training, and the AutoMLSearch object. error_callback (callable): Function", "return pipeline_id def _check_for_high_variance(self, pipeline, cv_scores, threshold=0.2): \"\"\"Checks cross-validation scores", "number of pipelines to train for every batch after the", "{**{'pipeline': self.problem_configuration}, **self.pipeline_parameters} else: pipeline_params = self.pipeline_parameters self._automl_algorithm = IterativeAlgorithm(", "pipeline_parameters is not None else {} self.search_iteration_plot = None self._interrupted", "of (binary, multiclass, regression) as problem_type') self.tuner_class = tuner_class or", "(list, optional): list of feature types, either numerical or categorical.", "as estimated on unseen data.\") return high_variance_cv def get_pipeline(self, pipeline_id):", "pipelines that error out during training will not be included", "next((obj for obj in additional_objectives if obj.name == self.objective.name), None)", "True # check max_time and max_iterations elapsed = time.time() -", "gap, \"max_delay\": max_delay}, \"Time Series Baseline Estimator\": {\"gap\": gap, \"max_delay\":", "from evalml.tuners import SKOptTuner from evalml.utils import convert_to_seconds, infer_feature_types from", "percentage difference to qualify as score improvement for early stopping.", "not None. Defaults to None. allowed_pipelines (list(class)): A list of", "each pipeline iteration + 1 first_ensembling_iteration = (1 + len(self.allowed_pipelines)", "\"...\" desc = desc.ljust(AutoMLSearch._MAX_NAME_LEN) batch_number = 1 if self._automl_algorithm is", "model families: %s\\n\" % \", \".join([model.value for model in self.allowed_model_families]))", "self.plot = None try: self.plot = PipelineSearchPlots(self) except ImportError: logger.warning(\"Unable", "\"# Testing\", so we want to exclude them since #", "'search_order': [], 'errors': [] } self.random_seed = random_seed self.n_jobs =", "def best_pipeline(self): \"\"\"Returns a trained instance of the best pipeline", "elif self.max_iterations and num_pipelines >= self.max_iterations: return False # check", "before selecting `best_pipeline`.\") return self._best_pipeline def save(self, file_path, pickle_protocol=cloudpickle.DEFAULT_PROTOCOL): \"\"\"Saves", "( get_logger, log_subtitle, log_title, time_elapsed, update_pipeline ) logger = get_logger(__file__)", "(self.max_batches - 1 - num_ensemble_batches) + num_ensemble_batches) else: self.max_iterations =", "used with each pipeline.\"\"\" return self.full_rankings.drop_duplicates(subset=\"pipeline_name\", keep=\"first\") @property def full_rankings(self):", "\"y\": logger.info(\"Exiting AutoMLSearch.\") return True elif choice == \"n\": #", "\"\"\"Saves AutoML object at file path Arguments: file_path (str): location", "is not compatible with problem_type {}.\".format(pipeline.name, self.problem_type.value)) def _add_baseline_pipelines(self): \"\"\"Fits", "class AutoMLSearch: \"\"\"Automated Pipeline search.\"\"\" _MAX_NAME_LEN = 40 # Necessary", "= get_default_primary_search_objective(self.problem_type.value) objective = get_objective(objective, return_instance=False) self.objective = self._validate_objective(objective) if", "for p in (self.allowed_pipelines)])) logger.debug(f\"allowed_pipelines set to {[pipeline.name for pipeline", "pickle_protocol=cloudpickle.DEFAULT_PROTOCOL): \"\"\"Saves AutoML object at file path Arguments: file_path (str):", "or None): Non-negative integer describing level of parallelism used for", "self._interrupted = False if self.allowed_pipelines is None: logger.info(\"Generating pipelines to", "check_all_pipeline_names_unique, get_default_primary_search_objective, make_data_splitter ) from evalml.exceptions import AutoMLSearchException, PipelineNotFoundError from", "pipeline for the data set. Arguments: feature_types (list, optional): list", "error_callback=None, additional_objectives=None, random_seed=0, n_jobs=-1, tuner_class=None, optimize_thresholds=True, ensembling=False, max_batches=None, problem_configuration=None, train_best_pipeline=True,", "not found in automl results\") pipeline_class = pipeline_results.get('pipeline_class') parameters =", "pass along other appropriate parameters by default. Defaults to None,", "+ first batch + each pipeline iteration + 1 first_ensembling_iteration", "with the provided ID \"\"\" pipeline_results = self.results['pipeline_results'].get(pipeline_id) if pipeline_results", "self._engine = SequentialEngine(self.X_train, self.y_train, self.ensembling_indices, self, should_continue_callback=self._should_continue, pre_evaluation_callback=self._pre_evaluation_callback, post_evaluation_callback=self._post_evaluation_callback) self.allowed_model_families", "override default objectives for problem type if not empty. random_seed", "from .pipeline_search_plots import PipelineSearchPlots from evalml.automl.automl_algorithm import IterativeAlgorithm from evalml.automl.callbacks", "ensembling will not run. Set max_batches >= {ensemble_nth_batch + 1}", "to optimize binary classification pipeline thresholds for: {}\".format(self.objective)) logger.info(\"Total training", "LogLossBinary for binary classification problems, - LogLossMulticlass for multiclass classification", "specify training data as a 2d array using the X_train", ">= {first_ensembling_iteration} to run ensembling.\") else: logger.info(f\"Ensembling will run at", "an untrained pipeline instance. Returns: PipelineBase: A trained instance of", "for scoring. Returns: Dict[str, Dict[str, float]]: Dictionary keyed by pipeline", "handle_problem_types(problem_type) except ValueError: raise ValueError('choose one of (binary, multiclass, regression)", "pipeline, cv_scores, threshold=0.2): \"\"\"Checks cross-validation scores and logs a warning", "and the AutoMLSearch object. add_result_callback (callable): Function called after each", "of variation (cv >= {threshold}) within cross validation scores. {pipeline_name}", "current_batch_pipeline_scores.isna().all(): raise AutoMLSearchException(f\"All pipelines in the current AutoML batch produced", "self.problem_configuration['max_delay'] baseline = pipeline_class(parameters={\"pipeline\": {\"gap\": gap, \"max_delay\": max_delay}, \"Time Series", "underline=\"-\") all_objective_scores = [fold[\"all_objective_scores\"] for fold in pipeline_results[\"cv_data\"]] all_objective_scores =", "max_iterations is not None and max_iterations < 0: raise ValueError(f\"Parameter", "self._results['search_order'].append(pipeline_id) if not is_baseline: score_to_minimize = -cv_score if self.objective.greater_is_better else", "not scores if field in objective_name_to_class: scores[field] += value return", "self._start elapsed_time = time_elapsed(self._start) desc = f\"\\nSearch finished after {elapsed_time}\"", "problems. max_iterations (int): Maximum number of iterations to search. If", "training results for the new pipeline, an untrained_pipeline containing the", "= input(leading_char + \"Do you really want to exit search", "Only used when ensembling is True. Must be between 0", "features will automatically be encoded show_iteration_plot (boolean, True): Shows an", "logger.info('AutoML Algorithm out of recommendations, ending') break try: new_pipeline_ids =", "if pipeline.model_family == ModelFamily.ENSEMBLE: input_pipeline_ids = [self._automl_algorithm._best_pipeline_info[model_family][\"id\"] for model_family in", "def train_pipelines(self, pipelines): \"\"\"Train a list of pipelines on the", "= len(cv_data) for fold_data in cv_data: for field, value in", "threshold. Defaults to True. start_iteration_callback (callable): Function called before each", "= abs((curr_score - best_score) / best_score) > self.tolerance score_improved =", "problem type if not empty. random_seed (int): Seed for the", "will train a baseline pipline + one of each pipeline", "complete. Arguments: pipelines (list(PipelineBase)): List of pipelines to train. Returns:", "value return {objective: float(score) / n_folds for objective, score in", "Defaults to None. allowed_pipelines (list(class)): A list of PipelineBase subclasses", "Pipelines: \\n{_print_list(self.allowed_pipelines or [])}\\n\" f\"Patience: {self.patience}\\n\" f\"Tolerance: {self.tolerance}\\n\" f\"Data Splitting:", "True: choice = input(leading_char + \"Do you really want to", "allowed_model_families (list(str, ModelFamily)): The model families to search. The default", "untrained pipeline instance. \"\"\" if not self._best_pipeline: raise PipelineNotFoundError(\"automl search", "set to -1, all CPUs are used. For n_jobs below", "estimated on unseen data.\") return high_variance_cv def get_pipeline(self, pipeline_id): \"\"\"Given", "self.search_iteration_plot.update() if self.add_result_callback: self.add_result_callback(self._results['pipeline_results'][pipeline_id], pipeline, self) return pipeline_id def _check_for_high_variance(self,", "all_objective_scores[c].mean(axis=0) std = all_objective_scores[c].std(axis=0) all_objective_scores.loc[\"mean\", c] = mean all_objective_scores.loc[\"std\", c]", "ignored. allowed_model_families (list(str, ModelFamily)): The model families to search. The", "None if self.plot: self.search_iteration_plot = self.plot.search_iteration_plot(interactive_plot=show_iteration_plot) self._start = time.time() try:", "after the first one. The first batch will train a", "of pipelines on the training data. This can be helpful", "= self._automl_algorithm.next_batch() except StopIteration: logger.info('AutoML Algorithm out of recommendations, ending')", "is not None and not issubclass(self.data_splitter.__class__, BaseCrossValidator): raise ValueError(\"Not a", "objectives for problem_type, remove it existing_main_objective = next((obj for obj", "PipelineNotFoundError: pass if self.search_iteration_plot: self.search_iteration_plot.update() if self.add_result_callback: self.add_result_callback(self._results['pipeline_results'][pipeline_id], pipeline, self)", "return_dict=False): \"\"\"Describe a pipeline Arguments: pipeline_id (int): pipeline to describe", "in automl.\") return objective() return objective def __str__(self): def _print_list(obj_list):", "to 0.2 _pipelines_per_batch (int): The number of pipelines to train", "\"# Training\" and \"# Testing\", so we want to exclude", "TypeError(f\"Parameter max_time must be a float, int, string or None.", "thresholds for: {}\".format(self.objective)) logger.info(\"Total training time (including CV): %.1f seconds\"", "will not run. Set max_iterations >= {first_ensembling_iteration} to run ensembling.\")", "best_pipeline.model_family == ModelFamily.ENSEMBLE: X_train, y_train = self.X_train.iloc[self.ensembling_indices], self.y_train.iloc[self.ensembling_indices] else: X_train", "evalml.objectives import ( get_core_objectives, get_non_core_objectives, get_objective ) from evalml.pipelines import", "choice == \"y\": logger.info(\"Exiting AutoMLSearch.\") return True elif choice ==", "as score improvement for early stopping. Only applicable if patience", "None self._searched = False self.X_train = infer_feature_types(X_train) self.y_train = infer_feature_types(y_train)", "default_data_splitter = make_data_splitter(self.X_train, self.y_train, self.problem_type, self.problem_configuration, n_splits=3, shuffle=True, random_seed=self.random_seed) self.data_splitter", "X_train (pd.DataFrame, ww.DataTable): The input training data of shape [n_samples,", "batches of pipelines to search. Parameters max_time, and max_iterations have", "exclusive. Defaults to 0.2 _pipelines_per_batch (int): The number of pipelines", "c in all_objective_scores: if c in [\"# Training\", \"# Validation\"]:", "c] = mean all_objective_scores.loc[\"std\", c] = std all_objective_scores.loc[\"coef of var\",", "def _post_evaluation_callback(self, pipeline, evaluation_results): training_time = evaluation_results['training_time'] cv_data = evaluation_results['cv_data']", "first one. The first batch will train a baseline pipline", "instance attributes. plot = PipelineSearchPlots def __init__(self, X_train=None, y_train=None, problem_type=None,", "and self._automl_algorithm.batch_number > 0: batch_number = self._automl_algorithm.batch_number update_pipeline(logger, desc, len(self._results['pipeline_results'])", "self.max_iterations and not self.max_time and not self.max_batches: self.max_batches = 1", "which have been made Returns: int: the number of pipeline", "\"percent_better_than_baseline\", \"high_variance_cv\", \"parameters\"] if not self._results['pipeline_results']: return pd.DataFrame(columns=full_rankings_cols) rankings_df =", "Custom set of objectives to score on. Will override default", "classification problems, - LogLossMulticlass for multiclass classification problems, and -", "_find_best_pipeline(self): \"\"\"Finds the best pipeline in the rankings If self._best_pipeline", "\"\"\"Fits a baseline pipeline to the data. This is the", "desc = desc.ljust(self._MAX_NAME_LEN) logger.info(desc) self._find_best_pipeline() if self._best_pipeline is not None:", "[] loop_interrupted = False while self._should_continue(): try: if not loop_interrupted:", "not be included in the dictionary but the exception and", "= ( f\"{handle_problem_types(self.problem_type).name} Search\\n\\n\" f\"Parameters: \\n{'='*20}\\n\" f\"Objective: {get_objective(self.objective).name}\\n\" f\"Max Time:", "objective = get_default_primary_search_objective(self.problem_type.value) objective = get_objective(objective, return_instance=False) self.objective = self._validate_objective(objective)", "batches for a total of {self.max_iterations} pipelines. \") elif self.max_iterations", "= self._results['pipeline_results'][first_id]['score'] num_without_improvement = 0 for id in self._results['search_order'][1:]: curr_score", "\"transform_sample\"): train_indices = self.data_splitter.transform_sample(X_train, y_train) X_train = X_train.iloc[train_indices] y_train =", "not compatible with a {} problem.\".format(obj.name, self.problem_type.value)) for pipeline in", "Maximum number of iterations to search. If max_iterations and max_time", "(y/n)? \").strip().lower() if choice == \"y\": logger.info(\"Exiting AutoMLSearch.\") return True", "%.1f seconds\" % pipeline_results[\"training_time\"]) log_subtitle(logger, \"Cross Validation\", underline=\"-\") all_objective_scores =", "random_seed=self.random_seed) self.data_splitter = self.data_splitter or default_data_splitter self.pipeline_parameters = pipeline_parameters if", "y_holdout, objectives): \"\"\"Score a list of pipelines on the given", "provided, this parameter will be ignored. data_splitter (sklearn.model_selection.BaseCrossValidator): Data splitting", "in for the gap and max_delay variables. train_best_pipeline (boolean): Whether", "update_pipeline(logger, desc, len(self._results['pipeline_results']) + 1, self.max_iterations, self._start, batch_number, self.show_batch_output) def", "the training data we'll set aside for training ensemble metalearners.", "current state, should the search continue? Returns: bool: True if", "and max_time < 0: raise ValueError(f\"Parameter max_time must be None", "for add_to_rankings if self._searched: return True # Run at least", "if run_ensembling and self.max_iterations is not None: # Baseline +", "highest-scoring set of parameters used with each pipeline.\"\"\" return self.full_rankings.drop_duplicates(subset=\"pipeline_name\",", "tuner_class=None, optimize_thresholds=True, ensembling=False, max_batches=None, problem_configuration=None, train_best_pipeline=True, pipeline_parameters=None, _ensembling_split_size=0.2, _pipelines_per_batch=5): \"\"\"Automated", "f\"Start Iteration Callback: {_get_funct_name(self.start_iteration_callback)}\\n\" f\"Add Result Callback: {_get_funct_name(self.add_result_callback)}\\n\" f\"Additional Objectives:", "high_variance_cv = bool(abs(cv_scores.std() / cv_scores.mean()) > threshold) if high_variance_cv: logger.warning(f\"High", "over all model families. Run evalml.pipelines.components.utils.allowed_model_families(\"binary\") to see options. Change", "stopping if self.patience is None or self.tolerance is None: return", "exclude them since # they are not scores if field", "find file to load Returns: AutoSearchBase object \"\"\" with open(file_path,", "three positional parameters: A dictionary containing the training results for", "for problem type if not empty. random_seed (int): Seed for", "if not self._results['pipeline_results']: return pd.DataFrame(columns=full_rankings_cols) rankings_df = pd.DataFrame(self._results['pipeline_results'].values()) rankings_df =", "the data set. Arguments: feature_types (list, optional): list of feature", "pipeline_id not in self._results['pipeline_results']: raise PipelineNotFoundError(\"Pipeline not found\") pipeline =", "coefficient of variation (cv >= {threshold}) within cross validation scores.", "batch_number, self.show_batch_output) def _validate_objective(self, objective): non_core_objectives = get_non_core_objectives() if isinstance(objective,", "self.optimize_thresholds, self.objective) self._best_pipeline = best_pipeline def _num_pipelines(self): \"\"\"Return the number", "if num_pipelines == 0: return True # check max_time and", "_num_pipelines(self): \"\"\"Return the number of pipeline evaluations which have been", "(float): The amount of the training data we'll set aside", "appropriate parameters by default. Defaults to None, which will call", "training pipelines once the search is complete. Arguments: pipelines (list(PipelineBase)):", "Will override default objectives for problem type if not empty.", "if our main objective is part of default set of", "log_error_callback self.data_splitter = data_splitter self.optimize_thresholds = optimize_thresholds self.ensembling = ensembling", "pd.DataFrame(all_objective_scores) for c in all_objective_scores: if c in [\"# Training\",", "passed in for the gap and max_delay variables. train_best_pipeline (boolean):", "we'll set aside for training ensemble metalearners. Only used when", "ValueError('Must specify training data target values as a 1d vector", "current best pipeline before training and thresholding\"\"\" if len(self.rankings) ==", "evaluates a given pipeline then adds the results to the", "return pipeline_class(parameters, random_seed=self.random_seed) def describe_pipeline(self, pipeline_id, return_dict=False): \"\"\"Describe a pipeline", "= best_pipeline def _num_pipelines(self): \"\"\"Return the number of pipeline evaluations", "+ (self._pipelines_per_batch * (self.max_batches - 1)) if run_ensembling: if not", "takes three positional parameters: the Exception raised, the traceback, and", "= objective_class.calculate_percent_difference(mean_cv_all_objectives[obj_name], self._baseline_cv_scores.get(obj_name, np.nan)) percent_better_than_baseline[obj_name] = percent_better high_variance_cv = self._check_for_high_variance(pipeline,", "log. \"\"\" return self._engine.train_batch(pipelines) def score_pipelines(self, pipelines, X_holdout, y_holdout, objectives):", "= ModeBaselineMulticlassPipeline(parameters={}) elif self.problem_type == ProblemTypes.REGRESSION: baseline = MeanBaselineRegressionPipeline(parameters={}) else:", "for c in all_objective_scores: if c in [\"# Training\", \"#", "y_train) X_train = X_train.iloc[train_indices] y_train = y_train.iloc[train_indices] best_pipeline = self._engine.train_pipeline(best_pipeline,", "allowed_model_families=None, start_iteration_callback=None, add_result_callback=None, error_callback=None, additional_objectives=None, random_seed=0, n_jobs=-1, tuner_class=None, optimize_thresholds=True, ensembling=False,", "{[estimator.name for estimator in allowed_estimators]}\") self.allowed_pipelines = [make_pipeline(self.X_train, self.y_train, estimator,", "else: pipeline_params = self.pipeline_parameters self._automl_algorithm = IterativeAlgorithm( max_iterations=self.max_iterations, allowed_pipelines=self.allowed_pipelines, tuner_class=self.tuner_class,", "from evalml.pipelines import ( MeanBaselineRegressionPipeline, ModeBaselineBinaryPipeline, ModeBaselineMulticlassPipeline, TimeSeriesBaselineBinaryPipeline, TimeSeriesBaselineMulticlassPipeline, TimeSeriesBaselineRegressionPipeline", "and not self.max_time and not self.max_batches: self.max_batches = 1 logger.info(\"Using", "by pipeline name that maps to the fitted pipeline. Note", "+ n_jobs) are used. ensembling (boolean): If True, runs ensembling", "mean_cv_all_objectives for obj_name in mean_cv_all_objectives: objective_class = self.objective_name_to_class[obj_name] # In", "has been run. Arguments: pipeline (PipelineBase): pipeline to train and", "pipeline.name, \"pipeline_class\": type(pipeline), \"pipeline_summary\": pipeline.summary, \"parameters\": pipeline.parameters, \"score\": cv_score, \"high_variance_cv\":", "max_iterations=self.max_iterations, allowed_pipelines=self.allowed_pipelines, tuner_class=self.tuner_class, random_seed=self.random_seed, n_jobs=self.n_jobs, number_features=self.X_train.shape[1], pipelines_per_batch=self._pipelines_per_batch, ensembling=run_ensembling, pipeline_params=pipeline_params )", "+ 1) if self.max_iterations < first_ensembling_iteration: run_ensembling = False logger.warning(f\"Ensembling", "f\"Add Result Callback: {_get_funct_name(self.add_result_callback)}\\n\" f\"Additional Objectives: {_print_list(self.additional_objectives or [])}\\n\" f\"Random", "from evalml.utils.logger import ( get_logger, log_subtitle, log_title, time_elapsed, update_pipeline )", "max_iterations of 5. max_time (int, str): Maximum time to search", "{} instead\".format(patience)) if tolerance and (tolerance > 1.0 or tolerance", "if not problem_configuration or not all(p in problem_configuration for p", "= self.ensembling if run_ensembling and len(self.allowed_pipelines) == 1: logger.warning(\"Ensembling is", "import cloudpickle import numpy as np import pandas as pd", "time.time() - self._start if self.max_time and elapsed >= self.max_time: return", "log_subtitle, log_title, time_elapsed, update_pipeline ) logger = get_logger(__file__) class AutoMLSearch:", "ensembling (boolean): If True, runs ensembling in a separate batch", "during automl search. If `train_best_pipeline` is set to False, returns", "returns an untrained pipeline instance. Returns: PipelineBase: A trained instance", "n_jobs=self.n_jobs, number_features=self.X_train.shape[1], pipelines_per_batch=self._pipelines_per_batch, ensembling=run_ensembling, pipeline_params=pipeline_params ) def _pre_evaluation_callback(self, pipeline): if", "not run again on the same instance. Re-initialize AutoMLSearch to", "not work well with instance attributes. plot = PipelineSearchPlots def", "\"\"\"Automated pipeline search Arguments: X_train (pd.DataFrame, ww.DataTable): The input training", "of 5. max_time (int, str): Maximum time to search for", "Maximum time to search for pipelines. This will not start", "to `multiclass` or `regression` depending on the problem type. Note", "notebook. Disabled by default in non-Jupyter enviroments. \"\"\" if self._searched:", "pipeline_class = pipeline_results.get('pipeline_class') parameters = pipeline_results.get('parameters') if pipeline_class is None", "location to save file pickle_protocol (int): the pickle data stream", "pipeline_rows['parameters']: if pipeline.parameters == parameter: return self._engine.evaluate_batch([pipeline]) self._find_best_pipeline() @property def", "Number of iterations without improvement to stop search early. Must", "(self.max_batches - 1)) if run_ensembling: if not (0 < _ensembling_split_size", "if pipeline.parameters == parameter: return self._engine.evaluate_batch([pipeline]) self._find_best_pipeline() @property def results(self):", "{} self.show_batch_output = False self._validate_problem_type() self.problem_configuration = self._validate_problem_configuration(problem_configuration) self._train_best_pipeline =", "able to pass along other appropriate parameters by default. Defaults", "problem.\".format(obj.name, self.problem_type.value)) for pipeline in self.allowed_pipelines or []: if pipeline.problem_type", "PipelineBase]: Dictionary keyed by pipeline name that maps to the", "problems, values should be passed in for the gap and", "before training and thresholding\"\"\" if len(self.rankings) == 0: return best_pipeline", "elif self.problem_type == ProblemTypes.REGRESSION: baseline = MeanBaselineRegressionPipeline(parameters={}) else: pipeline_class =", "evaluation_results): training_time = evaluation_results['training_time'] cv_data = evaluation_results['cv_data'] cv_scores = evaluation_results['cv_scores']", "{self.objective}.\") self.search_duration = time.time() - self._start elapsed_time = time_elapsed(self._start) desc", "self._results['pipeline_results']: raise PipelineNotFoundError(\"Pipeline not found\") pipeline = self.get_pipeline(pipeline_id) pipeline_results =", "for field, value in fold_data['all_objective_scores'].items(): # The 'all_objective_scores' field contains", "import defaultdict import cloudpickle import numpy as np import pandas", "self.objective.greater_is_better else cv_score try: self._automl_algorithm.add_result(score_to_minimize, pipeline, self._results['pipeline_results'][pipeline_id]) except PipelineNotFoundError: pass", "= infer_feature_types(X_train) self.y_train = infer_feature_types(y_train) self.ensembling_indices = None default_data_splitter =", "pipelines to train. X_holdout (ww.DataTable, pd.DataFrame): Holdout features. y_holdout (ww.DataTable,", "or not all(p in problem_configuration for p in required_parameters): raise", "loop_interrupted = False while self._should_continue(): try: if not loop_interrupted: current_batch_pipelines", "it is an integer, then the time will be in", "needed to configure the search. For example, in time series", "any pipelines that error out during scoring will not be", "@staticmethod def _get_mean_cv_scores_for_all_objectives(cv_data, objective_name_to_class): scores = defaultdict(int) n_folds = len(cv_data)", "AutoMLSearch object. error_callback (callable): Function called when `search()` errors and", "A dict of the parameters used to initalize a pipeline", "random number generator. Defaults to 0. n_jobs (int or None):", "self._baseline_cv_scores.get(obj_name, np.nan)) percent_better_than_baseline[obj_name] = percent_better high_variance_cv = self._check_for_high_variance(pipeline, cv_scores) pipeline_id", "compatible with a {} problem.\".format(obj.name, self.problem_type.value)) for pipeline in self.allowed_pipelines", "or {} def _handle_keyboard_interrupt(self): \"\"\"Presents a prompt to the user", "and max_batches < 0: raise ValueError(f\"Parameter max_batches must be None", "else: logger.info(f\"Ensembling will run at the {first_ensembling_iteration} iteration and every", "AutoMLSearch to search again.\") return # don't show iteration plot", "problem_configuration=None, train_best_pipeline=True, pipeline_parameters=None, _ensembling_split_size=0.2, _pipelines_per_batch=5): \"\"\"Automated pipeline search Arguments: X_train", "pipeline.name high_variance_cv = bool(abs(cv_scores.std() / cv_scores.mean()) > threshold) if high_variance_cv:", "def _pre_evaluation_callback(self, pipeline): if self.start_iteration_callback: self.start_iteration_callback(pipeline.__class__, pipeline.parameters, self) desc =", "but the number of unique pipelines is one, so ensembling", "run ensembling.\") else: logger.info(f\"Ensembling will run at the {first_ensembling_iteration} iteration", "to search\") check_all_pipeline_names_unique(self.allowed_pipelines) run_ensembling = self.ensembling if run_ensembling and len(self.allowed_pipelines)", "max_time and max_iterations elapsed = time.time() - self._start if self.max_time", "return # nan for the base score. percent_better = objective_class.calculate_percent_difference(mean_cv_all_objectives[obj_name],", "= X_train.iloc[train_indices] y_train = y_train.iloc[train_indices] best_pipeline = self._engine.train_pipeline(best_pipeline, X_train, y_train,", "= mean all_objective_scores.loc[\"std\", c] = std all_objective_scores.loc[\"coef of var\", c]", "Result Callback: {_get_funct_name(self.add_result_callback)}\\n\" f\"Additional Objectives: {_print_list(self.additional_objectives or [])}\\n\" f\"Random Seed:", "{} self.search_iteration_plot = None self._interrupted = False if self.allowed_pipelines is", "set aside for training ensemble metalearners. Only used when ensembling", "all model families. Run evalml.pipelines.components.utils.allowed_model_families(\"binary\") to see options. Change `binary`", "np.inf all_objective_scores = all_objective_scores.fillna(\"-\") with pd.option_context('display.float_format', '{:.3f}'.format, 'expand_frame_repr', False): logger.info(all_objective_scores)", "else curr_score < best_score if score_improved and significant_change: best_score =", "0: return best_pipeline = self.rankings.iloc[0] if not (self._best_pipeline and self._best_pipeline", "instance of the specified pipeline initialized with the parameters used", "IterativeAlgorithm( max_iterations=self.max_iterations, allowed_pipelines=self.allowed_pipelines, tuner_class=self.tuner_class, random_seed=self.random_seed, n_jobs=self.n_jobs, number_features=self.X_train.shape[1], pipelines_per_batch=self._pipelines_per_batch, ensembling=run_ensembling, pipeline_params=pipeline_params", "Parameters max_time, and max_iterations have precedence over stopping the search.", "instance. Returns: PipelineBase: A trained instance of the best pipeline", "pipeline evaluations which have been made Returns: int: the number", "names allowed in automl.\") return objective() return objective def __str__(self):", "containing the parameters used during training, and the AutoMLSearch object.", "self.X_train = infer_feature_types(X_train) self.y_train = infer_feature_types(y_train) self.ensembling_indices = None default_data_splitter", "objective {} is not compatible with a {} problem.\".format(obj.name, self.problem_type.value))", "= -cv_score if self.objective.greater_is_better else cv_score try: self._automl_algorithm.add_result(score_to_minimize, pipeline, self._results['pipeline_results'][pipeline_id])", "AutoMLSearchException, PipelineNotFoundError from evalml.model_family import ModelFamily from evalml.objectives import (", "in automl results\") return pipeline_class(parameters, random_seed=self.random_seed) def describe_pipeline(self, pipeline_id, return_dict=False):", "pipelines. None and 1 are equivalent. If set to -1,", "additional_objectives is None: additional_objectives = get_core_objectives(self.problem_type) # if our main", "the problem type. Note that if allowed_pipelines is provided, this", "be a float, int, string or None. Received {type(max_time)} with", "is None or self.tolerance is None: return True first_id =", "f: return cloudpickle.load(f) def train_pipelines(self, pipelines): \"\"\"Train a list of", "ascending = True if self.objective.greater_is_better: ascending = False full_rankings_cols =", "allowed_model_families self._automl_algorithm = None self._start = 0.0 self._baseline_cv_scores = {}", "in additional_objectives] additional_objectives = [self._validate_objective(obj) for obj in additional_objectives] self.additional_objectives", "rankings_df[full_rankings_cols] rankings_df.sort_values(\"score\", ascending=ascending, inplace=True) rankings_df.reset_index(drop=True, inplace=True) return rankings_df @property def", "defaultdict import cloudpickle import numpy as np import pandas as", "(dict, None): Additional parameters needed to configure the search. For", "Change `binary` to `multiclass` or `regression` depending on the problem", "Callback: {_get_funct_name(self.start_iteration_callback)}\\n\" f\"Add Result Callback: {_get_funct_name(self.add_result_callback)}\\n\" f\"Additional Objectives: {_print_list(self.additional_objectives or", "pipeline search Arguments: X_train (pd.DataFrame, ww.DataTable): The input training data", "non-negative. Received {max_batches}.\") if max_iterations is not None and max_iterations", "not self.max_batches: self.max_batches = 1 logger.info(\"Using default limit of max_batches=1.\\n\")", "num_ensemble_batches) else: self.max_iterations = 1 + len(self.allowed_pipelines) + (self._pipelines_per_batch *", "want to exit search (y/n)? \").strip().lower() if choice == \"y\":", "the random number generator. Defaults to 0. n_jobs (int or", "# if our main objective is part of default set", "with value {str(max_time)}..\") if isinstance(max_time, (int, float)) and max_time <", "is higher than specified threshhold.\"\"\" pipeline_name = pipeline.name high_variance_cv =", "self.get_pipeline(best_pipeline['id']) if self._train_best_pipeline: if best_pipeline.model_family == ModelFamily.ENSEMBLE: X_train, y_train =", "the search. The default of None indicates all pipelines for", "[], 'errors': [] } self.random_seed = random_seed self.n_jobs = n_jobs", "specified threshhold.\"\"\" pipeline_name = pipeline.name high_variance_cv = bool(abs(cv_scores.std() / cv_scores.mean())", "for the new pipeline, an untrained_pipeline containing the parameters used", "will return # nan for the base score. percent_better =", "for {} problems.\".format(pipeline.problem_type)) if self.optimize_thresholds and self.objective.is_defined_for_problem_type(ProblemTypes.BINARY) and self.objective.can_optimize_threshold: logger.info(\"Objective", "the pipeline parameters, and the AutoMLSearch object. add_result_callback (callable): Function", "= _pipelines_per_batch if not self.max_iterations and not self.max_time and not", "Note that if allowed_pipelines is provided, this parameter will be", "pandas as pd import woodwork as ww from sklearn.model_selection import", "all objectives # but also fields like \"# Training\" and", "cv_score try: self._automl_algorithm.add_result(score_to_minimize, pipeline, self._results['pipeline_results'][pipeline_id]) except PipelineNotFoundError: pass if self.search_iteration_plot:", "but also fields like \"# Training\" and \"# Testing\", so", "for early stopping. Only applicable if patience is not None.", "rankings_desc def _validate_problem_configuration(self, problem_configuration=None): if self.problem_type in [ProblemTypes.TIME_SERIES_REGRESSION]: required_parameters =", "current_batch_pipelines = self._automl_algorithm.next_batch() except StopIteration: logger.info('AutoML Algorithm out of recommendations,", "Defaults to None, which will call `log_error_callback`. additional_objectives (list): Custom", "problem_type {}.\".format(pipeline.name, self.problem_type.value)) def _add_baseline_pipelines(self): \"\"\"Fits a baseline pipeline to", "a pandas.DataFrame with scoring results from the highest-scoring set of", "= 0 else: num_without_improvement += 1 if num_without_improvement >= self.patience:", "for parameter in pipeline_rows['parameters']: if pipeline.parameters == parameter: return self._engine.evaluate_batch([pipeline])", "result, returns an untrained instance of the specified pipeline initialized", "after {elapsed_time}\" desc = desc.ljust(self._MAX_NAME_LEN) logger.info(desc) self._find_best_pipeline() if self._best_pipeline is", "to see options. Change `binary` to `multiclass` or `regression` depending", "to stop the search. Returns: bool: If True, search should", "self._best_pipeline is not None: best_pipeline = self.rankings.iloc[0] best_pipeline_name = best_pipeline[\"pipeline_name\"]", "the first one. The first batch will train a baseline", "ensembling=False, max_batches=None, problem_configuration=None, train_best_pipeline=True, pipeline_parameters=None, _ensembling_split_size=0.2, _pipelines_per_batch=5): \"\"\"Automated pipeline search", "training_time = evaluation_results['training_time'] cv_data = evaluation_results['cv_data'] cv_scores = evaluation_results['cv_scores'] is_baseline", "field, value in fold_data['all_objective_scores'].items(): # The 'all_objective_scores' field contains scores", "= { \"id\": pipeline_id, \"pipeline_name\": pipeline.name, \"pipeline_class\": type(pipeline), \"pipeline_summary\": pipeline.summary,", "(pd.DataFrame, ww.DataTable): The input training data of shape [n_samples, n_features].", "dict containing `pipeline_results`: a dict with results from each pipeline,", "from evalml.automl.utils import ( check_all_pipeline_names_unique, get_default_primary_search_objective, make_data_splitter ) from evalml.exceptions", "def rankings(self): \"\"\"Returns a pandas.DataFrame with scoring results from the", "= None if self.plot: self.search_iteration_plot = self.plot.search_iteration_plot(interactive_plot=show_iteration_plot) self._start = time.time()", "or [])}\\n\" f\"Patience: {self.patience}\\n\" f\"Tolerance: {self.tolerance}\\n\" f\"Data Splitting: {self.data_splitter}\\n\" f\"Tuner:", "elif self.problem_type == ProblemTypes.MULTICLASS: baseline = ModeBaselineMulticlassPipeline(parameters={}) elif self.problem_type ==", "num_ensemble_batches) + num_ensemble_batches) else: self.max_iterations = 1 + len(self.allowed_pipelines) +", "evaluation_results['cv_data'] cv_scores = evaluation_results['cv_scores'] is_baseline = pipeline.model_family == ModelFamily.BASELINE cv_score", "[n_samples, n_features]. Required. y_train (pd.Series, ww.DataColumn): The target training data", "= None self._start = 0.0 self._baseline_cv_scores = {} self.show_batch_output =", "iteration plot outside of a jupyter notebook if show_iteration_plot: try:", "a given pipeline then adds the results to the automl", "dictionary of scores. Note that the any pipelines that error", "the best pipeline for the data set. Arguments: feature_types (list,", "class to use. Defaults to SKOptTuner. optimize_thresholds (bool): Whether or", "max_delay variables. train_best_pipeline (boolean): Whether or not to train the", "_, _ = split_data(X_shape, self.y_train, problem_type=self.problem_type, test_size=_ensembling_split_size, random_seed=self.random_seed) self.ensembling_indices =", "self.objective = self._validate_objective(objective) if self.data_splitter is not None and not", "# for add_to_rankings if self._searched: return True # Run at", "pipeline_params=pipeline_params ) def _pre_evaluation_callback(self, pipeline): if self.start_iteration_callback: self.start_iteration_callback(pipeline.__class__, pipeline.parameters, self)", "to search. If max_iterations and max_time is not set, then", "self.max_iterations = 1 + len(self.allowed_pipelines) + (self._pipelines_per_batch * (self.max_batches -", "0: batch_number = self._automl_algorithm.batch_number update_pipeline(logger, desc, len(self._results['pipeline_results']) + 1, self.max_iterations,", "False log_title(logger, \"Beginning pipeline search\") logger.info(\"Optimizing for %s. \" %", "scores[field] += value return {objective: float(score) / n_folds for objective,", "+ \"...\" desc = desc.ljust(AutoMLSearch._MAX_NAME_LEN) batch_number = 1 if self._automl_algorithm", "= make_data_splitter(self.X_train, self.y_train, self.problem_type, self.problem_configuration, n_splits=3, shuffle=True, random_seed=self.random_seed) self.data_splitter =", "must be None or non-negative. Received {max_time}.\") if max_batches is", "ProblemTypes.TIME_SERIES_BINARY: TimeSeriesBaselineBinaryPipeline}[self.problem_type] gap = self.problem_configuration['gap'] max_delay = self.problem_configuration['max_delay'] baseline =", "improvement. Stopping search early...\".format(self.patience)) return False return True def _validate_problem_type(self):", "during training, and the AutoMLSearch object. error_callback (callable): Function called", "AutoMLSearch object. Must also accepts kwargs, so AutoMLSearch is able", "y_train is None: raise ValueError('Must specify training data target values", "== ModelFamily.BASELINE cv_score = cv_scores.mean() percent_better_than_baseline = {} mean_cv_all_objectives =", "Search\\n\\n\" f\"Parameters: \\n{'='*20}\\n\" f\"Objective: {get_objective(self.objective).name}\\n\" f\"Max Time: {self.max_time}\\n\" f\"Max Iterations:", "pipelines that error out during scoring will not be included", "self.additional_objectives: if not obj.is_defined_for_problem_type(self.problem_type): raise ValueError(\"Additional objective {} is not", "fields like \"# Training\" and \"# Testing\", so we want", "+ \"Do you really want to exit search (y/n)? \").strip().lower()", "than specified threshhold.\"\"\" pipeline_name = pipeline.name high_variance_cv = bool(abs(cv_scores.std() /", "all_objective_scores.loc[\"mean\", c] = mean all_objective_scores.loc[\"std\", c] = std all_objective_scores.loc[\"coef of", "to train. Returns: Dict[str, PipelineBase]: Dictionary keyed by pipeline name", "A trained instance of the best pipeline and parameters found", "must be between 0 and 1 exclusive, received {_ensembling_split_size}\") X_shape", "and max_delay variables. train_best_pipeline (boolean): Whether or not to train", "cv_scores[0] } if pipeline.model_family == ModelFamily.ENSEMBLE: input_pipeline_ids = [self._automl_algorithm._best_pipeline_info[model_family][\"id\"] for", "train that pipeline during automl search. Arguments: pipeline_id (int): pipeline", "configure the search. For example, in time series problems, values", "elapsed. If it is an integer, then the time will", "- self._start elapsed_time = time_elapsed(self._start) desc = f\"\\nSearch finished after", "as type of pipeline components, problem, training time, cross validation,", "order the pipelines were searched. \"\"\" return copy.deepcopy(self._results) @property def", "and parameters found during automl search. If `train_best_pipeline` is set", "all(p in problem_configuration for p in required_parameters): raise ValueError(\"user_parameters must", "== 0: run_ensembling = False logger.warning(f\"Ensembling is set to True,", "self, should_continue_callback=self._should_continue, pre_evaluation_callback=self._pre_evaluation_callback, post_evaluation_callback=self._post_evaluation_callback) self.allowed_model_families = list(set([p.model_family for p in", "run before selecting `best_pipeline`.\") return self._best_pipeline def save(self, file_path, pickle_protocol=cloudpickle.DEFAULT_PROTOCOL):", "pipeline training result, returns an untrained instance of the specified", "from each pipeline, and `search_order`: a list describing the order", "Additional parameters needed to configure the search. For example, in", "if run_ensembling: if not (0 < _ensembling_split_size < 1): raise", "will not run. Set max_batches >= {ensemble_nth_batch + 1} to", "is different from the current best pipeline before training and", "time, cross validation, etc. \"\"\" if pipeline_id not in self._results['pipeline_results']:", "None: raise ValueError('Must specify training data target values as a", "number of pipeline evaluations made in the search \"\"\" return", "parameters: A dictionary containing the training results for the new", "= self.rankings.iloc[0] if not (self._best_pipeline and self._best_pipeline == self.get_pipeline(best_pipeline['id'])): best_pipeline", "Received {max_batches}.\") if max_iterations is not None and max_iterations <", "self.y_train.iloc[self.ensembling_indices] else: X_train = self.X_train y_train = self.y_train if hasattr(self.data_splitter,", "return objective() return objective def __str__(self): def _print_list(obj_list): lines =", "update_pipeline ) logger = get_logger(__file__) class AutoMLSearch: \"\"\"Automated Pipeline search.\"\"\"", "estimator in allowed_estimators]}\") self.allowed_pipelines = [make_pipeline(self.X_train, self.y_train, estimator, self.problem_type, custom_hyperparameters=self.pipeline_parameters)", "Algorithm out of recommendations, ending') break try: new_pipeline_ids = self._engine.evaluate_batch(current_batch_pipelines)", "\"\"\"Automated Pipeline search.\"\"\" _MAX_NAME_LEN = 40 # Necessary for \"Plotting\"", "Testing\", so we want to exclude them since # they", "must be a dict containing values for at least the", "exception and stacktrace will be displayed in the log. \"\"\"", "self.y_train, problem_type=self.problem_type, test_size=_ensembling_split_size, random_seed=self.random_seed) self.ensembling_indices = ensembling_indices.to_dataframe()[0].tolist() self._engine = SequentialEngine(self.X_train,", "finished after {elapsed_time}\" desc = desc.ljust(self._MAX_NAME_LEN) logger.info(desc) self._find_best_pipeline() if self._best_pipeline", "results from `automl_search`. Returns: dict containing `pipeline_results`: a dict with", "= True if self.objective.greater_is_better: ascending = False full_rankings_cols = [\"id\",", "add_result_callback self.error_callback = error_callback or log_error_callback self.data_splitter = data_splitter self.optimize_thresholds", "None or self.tolerance is None: return True first_id = self._results['search_order'][0]", "self.max_iterations = (1 + len(self.allowed_pipelines) + self._pipelines_per_batch * (self.max_batches -", "best pipeline in the rankings If self._best_pipeline already exists, check", "pipeline_results is None: raise PipelineNotFoundError(\"Pipeline not found in automl results\")", "self.objective.greater_is_better else 'Lower')) logger.info(f\"Using {self._engine.__class__.__name__} to train and score pipelines.\")", "def _num_pipelines(self): \"\"\"Return the number of pipeline evaluations which have", "Returns: Description of specified pipeline. Includes information such as type", "with instance attributes. plot = PipelineSearchPlots def __init__(self, X_train=None, y_train=None,", "So that the time in this loop does not count", "self._results['pipeline_results'][pipeline_id] pipeline.describe() if pipeline.model_family == ModelFamily.ENSEMBLE: logger.info(\"Input for ensembler are", "batches.\") self.max_iterations = (1 + len(self.allowed_pipelines) + self._pipelines_per_batch * (self.max_batches", "_, ensembling_indices, _, _ = split_data(X_shape, self.y_train, problem_type=self.problem_type, test_size=_ensembling_split_size, random_seed=self.random_seed)", "desc = desc[:AutoMLSearch._MAX_NAME_LEN - 3] + \"...\" desc = desc.ljust(AutoMLSearch._MAX_NAME_LEN)", "self.problem_configuration}, **self.pipeline_parameters} else: pipeline_params = self.pipeline_parameters self._automl_algorithm = IterativeAlgorithm( max_iterations=self.max_iterations,", "problem_type=None, objective='auto', max_iterations=None, max_time=None, patience=None, tolerance=None, data_splitter=None, allowed_pipelines=None, allowed_model_families=None, start_iteration_callback=None,", "budget (if set) time_in_loop = time.time() - start_of_loop self._start +=", "max_time < 0: raise ValueError(f\"Parameter max_time must be None or", "= PipelineSearchPlots(self) except ImportError: logger.warning(\"Unable to import plotly; skipping pipeline", "unseen data.\") return high_variance_cv def get_pipeline(self, pipeline_id): \"\"\"Given the ID", "rankings_df = rankings_df[full_rankings_cols] rankings_df.sort_values(\"score\", ascending=ascending, inplace=True) rankings_df.reset_index(drop=True, inplace=True) return rankings_df", "is not allowed in AutoML! \" \"Use evalml.objectives.utils.get_core_objective_names() \" \"to", "( MeanBaselineRegressionPipeline, ModeBaselineBinaryPipeline, ModeBaselineMulticlassPipeline, TimeSeriesBaselineBinaryPipeline, TimeSeriesBaselineMulticlassPipeline, TimeSeriesBaselineRegressionPipeline ) from evalml.pipelines.components.utils", "if isinstance(max_time, (int, float)) and max_time < 0: raise ValueError(f\"Parameter", "= time.time() try: self._add_baseline_pipelines() except KeyboardInterrupt: if self._handle_keyboard_interrupt(): self._interrupted =", "Description of specified pipeline. Includes information such as type of", "get_logger, log_subtitle, log_title, time_elapsed, update_pipeline ) logger = get_logger(__file__) class", "cv_score, \"high_variance_cv\": high_variance_cv, \"training_time\": training_time, \"cv_data\": cv_data, \"percent_better_than_baseline_all_objectives\": percent_better_than_baseline, \"percent_better_than_baseline\":", "1 first_ensembling_iteration = (1 + len(self.allowed_pipelines) + len(self.allowed_pipelines) * self._pipelines_per_batch", "three positional parameters: The pipeline class, the pipeline parameters, and", "optional): list of feature types, either numerical or categorical. Categorical", "post_evaluation_callback=self._post_evaluation_callback) self.allowed_model_families = list(set([p.model_family for p in (self.allowed_pipelines)])) logger.debug(f\"allowed_pipelines set", "Objectives used for scoring. Returns: Dict[str, Dict[str, float]]: Dictionary keyed", "The objective to optimize for. Used to propose and rank", "empty so we will return # nan for the base", "is not set, then max_iterations will default to max_iterations of", "A list of PipelineBase subclasses indicating the pipelines allowed in", "pipeline then adds the results to the automl rankings with", "+ len(self.allowed_pipelines) + len(self.allowed_pipelines) * self._pipelines_per_batch + 1) if self.max_iterations", "time.time() try: self._add_baseline_pipelines() except KeyboardInterrupt: if self._handle_keyboard_interrupt(): self._interrupted = True", "self.patience = patience self.tolerance = tolerance or 0.0 self._results =", "0.0 self._baseline_cv_scores = {} self.show_batch_output = False self._validate_problem_type() self.problem_configuration =", "problem_configuration or not all(p in problem_configuration for p in required_parameters):", "max_time must be a float, int, string or None. Received", "value {str(max_time)}..\") if isinstance(max_time, (int, float)) and max_time < 0:", "information about pipeline. Defaults to False. Returns: Description of specified", "except StopIteration: logger.info('AutoML Algorithm out of recommendations, ending') break try:", "self.objective.is_defined_for_problem_type(ProblemTypes.BINARY) and self.objective.can_optimize_threshold: logger.info(\"Objective to optimize binary classification pipeline thresholds", "categorical. Categorical features will automatically be encoded show_iteration_plot (boolean, True):", "None: logger.info(\"Generating pipelines to search over...\") allowed_estimators = get_estimators(self.problem_type, self.allowed_model_families)", "out during training will not be included in the dictionary", "Returns: Dict[str, Dict[str, float]]: Dictionary keyed by pipeline name that", "pipeline.problem_type != self.problem_type: raise ValueError(\"Given pipeline {} is not compatible", "name that maps to a dictionary of scores. Note that", "parameters used to train that pipeline during automl search. Arguments:", "get_estimators(self.problem_type, self.allowed_model_families) logger.debug(f\"allowed_estimators set to {[estimator.name for estimator in allowed_estimators]}\")", "= self.rankings.iloc[0] best_pipeline_name = best_pipeline[\"pipeline_name\"] logger.info(f\"Best pipeline: {best_pipeline_name}\") logger.info(f\"Best pipeline", "a pipeline Arguments: pipeline_id (int): pipeline to describe return_dict (bool):", "False else: leading_char = \"\" def search(self, show_iteration_plot=True): \"\"\"Find the", "self._find_best_pipeline() @property def results(self): \"\"\"Class that allows access to a", "return True # check max_time and max_iterations elapsed = time.time()", "log_subtitle(logger, \"Cross Validation\", underline=\"-\") all_objective_scores = [fold[\"all_objective_scores\"] for fold in", ".pipeline_search_plots import PipelineSearchPlots from evalml.automl.automl_algorithm import IterativeAlgorithm from evalml.automl.callbacks import", "all_objective_scores[c].std(axis=0) all_objective_scores.loc[\"mean\", c] = mean all_objective_scores.loc[\"std\", c] = std all_objective_scores.loc[\"coef", "in the log. \"\"\" return self._engine.train_batch(pipelines) def score_pipelines(self, pipelines, X_holdout,", "in Jupyter notebook. Disabled by default in non-Jupyter enviroments. \"\"\"", "families. Run evalml.pipelines.components.utils.allowed_model_families(\"binary\") to see options. Change `binary` to `multiclass`", "(dict): A dict of the parameters used to initalize a", "= time.time() - start_of_loop self._start += time_in_loop return False else:", "is None: self.show_batch_output = True if run_ensembling: ensemble_nth_batch = len(self.allowed_pipelines)", "training and thresholding\"\"\" if len(self.rankings) == 0: return best_pipeline =", "to train and evaluate. \"\"\" pipeline_rows = self.full_rankings[self.full_rankings['pipeline_name'] == pipeline.name]", "self.max_time = convert_to_seconds(max_time) if isinstance(max_time, str) else max_time self.max_iterations =", "If it is an integer, then the time will be", "X_train, y_train = self.X_train.iloc[self.ensembling_indices], self.y_train.iloc[self.ensembling_indices] else: X_train = self.X_train y_train", "raise ValueError(\"Not a valid data splitter\") if not objective.is_defined_for_problem_type(self.problem_type): raise", "batch produced a score of np.nan on the primary objective", "problems, and - R2 for regression problems. max_iterations (int): Maximum", "cv_scores = evaluation_results['cv_scores'] is_baseline = pipeline.model_family == ModelFamily.BASELINE cv_score =", "\"percent_better_than_baseline\": percent_better_than_baseline[self.objective.name], \"validation_score\": cv_scores[0] } if pipeline.model_family == ModelFamily.ENSEMBLE: input_pipeline_ids", "objective {} is not compatible with a {} problem.\".format(self.objective.name, self.problem_type.value))", "run ensembling.\") else: logger.info(f\"Ensembling will run every {ensemble_nth_batch} batches.\") self.max_iterations", "numerical or categorical. Categorical features will automatically be encoded show_iteration_plot", "each pipeline training iteration. Callback function takes three positional parameters:", "time in this loop does not count towards the time", "40 # Necessary for \"Plotting\" documentation, since Sphinx does not", "AutoML batch produced a score of np.nan on the primary", "after the duration has elapsed. If it is an integer,", "Run at least one pipeline for every search num_pipelines =", "on unseen data.\") return high_variance_cv def get_pipeline(self, pipeline_id): \"\"\"Given the", "pipelines on the given holdout data. Arguments: pipelines (list(PipelineBase)): List", "PipelineSearchPlots def __init__(self, X_train=None, y_train=None, problem_type=None, objective='auto', max_iterations=None, max_time=None, patience=None,", "format. Returns: None \"\"\" with open(file_path, 'wb') as f: cloudpickle.dump(self,", "evaluation_results['cv_scores'] is_baseline = pipeline.model_family == ModelFamily.BASELINE cv_score = cv_scores.mean() percent_better_than_baseline", "pipeline, self._results['pipeline_results'][pipeline_id]) except PipelineNotFoundError: pass if self.search_iteration_plot: self.search_iteration_plot.update() if self.add_result_callback:", "instead\".format(tolerance)) self.patience = patience self.tolerance = tolerance or 0.0 self._results", "before each pipeline training iteration. Callback function takes three positional", "allowed_pipelines=self.allowed_pipelines, tuner_class=self.tuner_class, random_seed=self.random_seed, n_jobs=self.n_jobs, number_features=self.X_train.shape[1], pipelines_per_batch=self._pipelines_per_batch, ensembling=run_ensembling, pipeline_params=pipeline_params ) def", "self.max_iterations: return False # check for early stopping if self.patience", "None or non-negative. Received {max_iterations}.\") self.max_time = convert_to_seconds(max_time) if isinstance(max_time,", "n_folds for objective, score in scores.items()} def _post_evaluation_callback(self, pipeline, evaluation_results):", "'errors': [] } self.random_seed = random_seed self.n_jobs = n_jobs self.plot", "scores. {pipeline_name} may not perform as estimated on unseen data.\")", "max_time self.max_iterations = max_iterations self.max_batches = max_batches self._pipelines_per_batch = _pipelines_per_batch", "self) desc = f\"{pipeline.name}\" if len(desc) > AutoMLSearch._MAX_NAME_LEN: desc =", "or not to optimize the binary pipeline threshold. Defaults to", "% self.max_time) logger.info(\"Allowed model families: %s\\n\" % \", \".join([model.value for", "scores if field in objective_name_to_class: scores[field] += value return {objective:", "sklearn.model_selection import BaseCrossValidator from .pipeline_search_plots import PipelineSearchPlots from evalml.automl.automl_algorithm import", "True, return dictionary of information about pipeline. Defaults to False.", "file_path, pickle_protocol=cloudpickle.DEFAULT_PROTOCOL): \"\"\"Saves AutoML object at file path Arguments: file_path", "automl search has been run. Arguments: pipeline (PipelineBase): pipeline to", "-cv_score if self.objective.greater_is_better else cv_score try: self._automl_algorithm.add_result(score_to_minimize, pipeline, self._results['pipeline_results'][pipeline_id]) except", "self._best_pipeline == self.get_pipeline(best_pipeline['id'])): best_pipeline = self.get_pipeline(best_pipeline['id']) if self._train_best_pipeline: if best_pipeline.model_family", "[])}\\n\" f\"Patience: {self.patience}\\n\" f\"Tolerance: {self.tolerance}\\n\" f\"Data Splitting: {self.data_splitter}\\n\" f\"Tuner: {self.tuner_class.__name__}\\n\"", "_handle_keyboard_interrupt(self): \"\"\"Presents a prompt to the user asking if they", "{} problem.\".format(obj.name, self.problem_type.value)) for pipeline in self.allowed_pipelines or []: if", "is called before search _baseline_cv_scores will be empty so we", "AutoMLSearch: \"\"\"Automated Pipeline search.\"\"\" _MAX_NAME_LEN = 40 # Necessary for", "= defaultdict(int) n_folds = len(cv_data) for fold_data in cv_data: for", "{}\".format(self.objective)) logger.info(\"Total training time (including CV): %.1f seconds\" % pipeline_results[\"training_time\"])", "(boolean): Whether or not to train the best pipeline before", "ModeBaselineMulticlassPipeline, TimeSeriesBaselineBinaryPipeline, TimeSeriesBaselineMulticlassPipeline, TimeSeriesBaselineRegressionPipeline ) from evalml.pipelines.components.utils import get_estimators from", "self.y_train, self.problem_type, self.problem_configuration, n_splits=3, shuffle=True, random_seed=self.random_seed) self.data_splitter = self.data_splitter or", "if len(self.rankings) == 0: return best_pipeline = self.rankings.iloc[0] if not", "of pipeline components, problem, training time, cross validation, etc. \"\"\"", "pipeline_id, \"pipeline_name\": pipeline.name, \"pipeline_class\": type(pipeline), \"pipeline_summary\": pipeline.summary, \"parameters\": pipeline.parameters, \"score\":", "`train_best_pipeline` is set to False, returns an untrained pipeline instance.", "{self.allowed_model_families}\") if len(self.problem_configuration): pipeline_params = {**{'pipeline': self.problem_configuration}, **self.pipeline_parameters} else: pipeline_params", "[self._validate_objective(obj) for obj in additional_objectives] self.additional_objectives = additional_objectives self.objective_name_to_class =", "scores for all objectives # but also fields like \"#", "(ww.DataTable, pd.DataFrame): Holdout targets for scoring. objectives (list(str), list(ObjectiveBase)): Objectives", "Dictionary keyed by pipeline name that maps to a dictionary", "logger.info(\"Searching up to %s pipelines. \" % self.max_iterations) if self.max_time", "for pipelines. None and 1 are equivalent. If set to", "best_pipeline = self._engine.train_pipeline(best_pipeline, X_train, y_train, self.optimize_thresholds, self.objective) self._best_pipeline = best_pipeline", "add_result_callback (callable): Function called after each pipeline training iteration. Callback", "if they want to stop the search. Returns: bool: If", "best_score) / best_score) > self.tolerance score_improved = curr_score > best_score", "= bool(abs(cv_scores.std() / cv_scores.mean()) > threshold) if high_variance_cv: logger.warning(f\"High coefficient", "AutoSearchBase object \"\"\" with open(file_path, 'rb') as f: return cloudpickle.load(f)", "the parameters used to initalize a pipeline with. _ensembling_split_size (float):", "objective is part of default set of objectives for problem_type,", "the X_train argument') if y_train is None: raise ValueError('Must specify", "isinstance(max_time, (int, float)) and max_time < 0: raise ValueError(f\"Parameter max_time", "self.allowed_pipelines = allowed_pipelines self.allowed_model_families = allowed_model_families self._automl_algorithm = None self._start", "self.X_train.iloc[self.ensembling_indices], self.y_train.iloc[self.ensembling_indices] else: X_train = self.X_train y_train = self.y_train if", "== self.objective.name), None) if existing_main_objective is not None: additional_objectives.remove(existing_main_objective) else:", "logs a warning if variance is higher than specified threshhold.\"\"\"", "search. The default of None indicates all pipelines for this", "self.objective_name_to_class[obj_name] # In the event add_to_rankings is called before search", "{} is not compatible with a {} problem.\".format(obj.name, self.problem_type.value)) for", "been iterated over. If the number of unique pipelines to", "logger.info(\"Input for ensembler are pipelines with IDs: \" + str(pipeline_results['input_pipeline_ids']))", "stacktrace will be displayed in the log. \"\"\" return self._engine.score_batch(pipelines,", "is not compatible with a {} problem.\".format(obj.name, self.problem_type.value)) for pipeline", "positional parameters: A dictionary containing the training results for the", "is one, so ensembling will not run.\") run_ensembling = False", "* self._pipelines_per_batch + 1) if self.max_iterations < first_ensembling_iteration: run_ensembling =", "if show_iteration_plot: try: get_ipython except NameError: show_iteration_plot = False log_title(logger,", "from evalml.objectives import ( get_core_objectives, get_non_core_objectives, get_objective ) from evalml.pipelines", "loop_interrupted: current_batch_pipelines = self._automl_algorithm.next_batch() except StopIteration: logger.info('AutoML Algorithm out of", "self.max_batches: self.max_batches = 1 logger.info(\"Using default limit of max_batches=1.\\n\") if", "Defaults to None. tolerance (float): Minimum percentage difference to qualify", "Defaults to False. max_batches (int): The maximum number of batches", "values as a 1d vector using the y_train argument') try:", "pipelines, X_holdout, y_holdout, objectives): \"\"\"Score a list of pipelines on", ">= self.patience: logger.info(\"\\n\\n{} iterations without improvement. Stopping search early...\".format(self.patience)) return", "< _ensembling_split_size < 1): raise ValueError(f\"Ensembling split size must be", "pipelines in the current AutoML batch produced a score of", "1.0 or tolerance < 0.0): raise ValueError(\"tolerance value must be", "+ self._pipelines_per_batch * (self.max_batches - 1 - num_ensemble_batches) + num_ensemble_batches)", "length [n_samples]. Required for supervised learning tasks. problem_type (str or", "for p in required_parameters): raise ValueError(\"user_parameters must be a dict", "\").strip().lower() if choice == \"y\": logger.info(\"Exiting AutoMLSearch.\") return True elif", "in problem_configuration for p in required_parameters): raise ValueError(\"user_parameters must be", "for pipeline in self.allowed_pipelines]}\") logger.debug(f\"allowed_model_families set to {self.allowed_model_families}\") if len(self.problem_configuration):", "== 0: return True # check max_time and max_iterations elapsed", "f\"Parameters: \\n{'='*20}\\n\" f\"Objective: {get_objective(self.objective).name}\\n\" f\"Max Time: {self.max_time}\\n\" f\"Max Iterations: {self.max_iterations}\\n\"", "pipelines to search over per batch is one, ensembling will", "= {**{'pipeline': self.problem_configuration}, **self.pipeline_parameters} else: pipeline_params = self.pipeline_parameters self._automl_algorithm =", "depending on the problem type. Note that if allowed_pipelines is", "or log_error_callback self.data_splitter = data_splitter self.optimize_thresholds = optimize_thresholds self.ensembling =", "_MAX_NAME_LEN = 40 # Necessary for \"Plotting\" documentation, since Sphinx", "ensembling.\") else: logger.info(f\"Ensembling will run every {ensemble_nth_batch} batches.\") self.max_iterations =", "\"Use evalml.objectives.utils.get_core_objective_names() \" \"to get all objective names allowed in", "keep=\"first\") @property def full_rankings(self): \"\"\"Returns a pandas.DataFrame with scoring results", "else np.inf all_objective_scores = all_objective_scores.fillna(\"-\") with pd.option_context('display.float_format', '{:.3f}'.format, 'expand_frame_repr', False):", "if not empty. random_seed (int): Seed for the random number", "add_result_callback=None, error_callback=None, additional_objectives=None, random_seed=0, n_jobs=-1, tuner_class=None, optimize_thresholds=True, ensembling=False, max_batches=None, problem_configuration=None,", "search over...\") allowed_estimators = get_estimators(self.problem_type, self.allowed_model_families) logger.debug(f\"allowed_estimators set to {[estimator.name", "desc.ljust(AutoMLSearch._MAX_NAME_LEN) batch_number = 1 if self._automl_algorithm is not None and", "series problems, values should be passed in for the gap", "search. For example, in time series problems, values should be", "for id in self._results['search_order'][1:]: curr_score = self._results['pipeline_results'][id]['score'] significant_change = abs((curr_score", "it. Defaults to True. pipeline_parameters (dict): A dict of the", "ensembling will not run. Defaults to False. max_batches (int): The", "bool(abs(cv_scores.std() / cv_scores.mean()) > threshold) if high_variance_cv: logger.warning(f\"High coefficient of", "type are allowed. Setting this field will cause allowed_model_families to", "the provided ID \"\"\" pipeline_results = self.results['pipeline_results'].get(pipeline_id) if pipeline_results is", "if is_baseline: self._baseline_cv_scores = mean_cv_all_objectives for obj_name in mean_cv_all_objectives: objective_class", "have been made Returns: int: the number of pipeline evaluations", "= False self.X_train = infer_feature_types(X_train) self.y_train = infer_feature_types(y_train) self.ensembling_indices =", "iteration. Callback function takes three positional parameters: The pipeline class,", "thresholding\"\"\" if len(self.rankings) == 0: return best_pipeline = self.rankings.iloc[0] if", "with scoring results from all pipelines searched\"\"\" ascending = True", "raise ValueError(\"user_parameters must be a dict containing values for at", "as problem_type') self.tuner_class = tuner_class or SKOptTuner self.start_iteration_callback = start_iteration_callback", "data target values as a 1d vector using the y_train", ">= self.max_time: return False elif self.max_iterations and num_pipelines >= self.max_iterations:", "return False # for add_to_rankings if self._searched: return True #", "AutoML! \" \"Use evalml.objectives.utils.get_core_objective_names() \" \"to get all objective names", "pd.DataFrame): Holdout targets for scoring. objectives (list(str), list(ObjectiveBase)): Objectives used", "classification problems, and - R2 for regression problems. max_iterations (int):", "(PipelineBase): pipeline to train and evaluate. \"\"\" pipeline_rows = self.full_rankings[self.full_rankings['pipeline_name']", "1d vector using the y_train argument') try: self.problem_type = handle_problem_types(problem_type)", "self.problem_configuration = self._validate_problem_configuration(problem_configuration) self._train_best_pipeline = train_best_pipeline self._best_pipeline = None self._searched", "in self.allowed_pipelines or []: if pipeline.problem_type != self.problem_type: raise ValueError(\"Given", "must be run before selecting `best_pipeline`.\") return self._best_pipeline def save(self,", "def score_pipelines(self, pipelines, X_holdout, y_holdout, objectives): \"\"\"Score a list of", "not count towards the time budget (if set) time_in_loop =", "return self.full_rankings.drop_duplicates(subset=\"pipeline_name\", keep=\"first\") @property def full_rankings(self): \"\"\"Returns a pandas.DataFrame with", "are allowed. Setting this field will cause allowed_model_families to be", "to import plotly; skipping pipeline search plotting\\n\") self.allowed_pipelines = allowed_pipelines", "also fields like \"# Training\" and \"# Testing\", so we", "use. Defaults to SKOptTuner. optimize_thresholds (bool): Whether or not to", "False full_rankings_cols = [\"id\", \"pipeline_name\", \"score\", \"validation_score\", \"percent_better_than_baseline\", \"high_variance_cv\", \"parameters\"]", "tolerance or 0.0 self._results = { 'pipeline_results': {}, 'search_order': [],", "dictionary but the exception and stacktrace will be displayed in", "binary classification pipeline thresholds for: {}\".format(self.objective)) logger.info(\"Total training time (including", "get_objective ) from evalml.pipelines import ( MeanBaselineRegressionPipeline, ModeBaselineBinaryPipeline, ModeBaselineMulticlassPipeline, TimeSeriesBaselineBinaryPipeline,", "BaseCrossValidator): raise ValueError(\"Not a valid data splitter\") if not objective.is_defined_for_problem_type(self.problem_type):", "automl rankings with the requirement that automl search has been", "= [fold[\"all_objective_scores\"] for fold in pipeline_results[\"cv_data\"]] all_objective_scores = pd.DataFrame(all_objective_scores) for", "default_data_splitter self.pipeline_parameters = pipeline_parameters if pipeline_parameters is not None else", "break full_rankings = self.full_rankings current_batch_idx = full_rankings['id'].isin(new_pipeline_ids) current_batch_pipeline_scores = full_rankings[current_batch_idx]['score']", "[self.objective] + self.additional_objectives} if not isinstance(max_time, (int, float, str, type(None))):", "objective {self.objective}.\") self.search_duration = time.time() - self._start elapsed_time = time_elapsed(self._start)", "search. If max_iterations and max_time is not set, then max_iterations", "the {first_ensembling_iteration} iteration and every {len(self.allowed_pipelines) * self._pipelines_per_batch} iterations after", "{ProblemTypes.TIME_SERIES_REGRESSION: TimeSeriesBaselineRegressionPipeline, ProblemTypes.TIME_SERIES_MULTICLASS: TimeSeriesBaselineMulticlassPipeline, ProblemTypes.TIME_SERIES_BINARY: TimeSeriesBaselineBinaryPipeline}[self.problem_type] gap = self.problem_configuration['gap'] max_delay", "True, search should terminate early \"\"\" leading_char = \"\\n\" start_of_loop", "= f\"\\nSearch Results: \\n{'='*20}\\n{rankings_str}\" return search_desc + rankings_desc def _validate_problem_configuration(self,", "which will call `log_error_callback`. additional_objectives (list): Custom set of objectives", "not self.max_iterations and not self.max_time and not self.max_batches: self.max_batches =", "self._engine.train_pipeline(best_pipeline, X_train, y_train, self.optimize_thresholds, self.objective) self._best_pipeline = best_pipeline def _num_pipelines(self):", "None): Additional parameters needed to configure the search. For example,", "logger.debug(f\"allowed_estimators set to {[estimator.name for estimator in allowed_estimators]}\") self.allowed_pipelines =", "full_rankings[current_batch_idx]['score'] if len(current_batch_pipeline_scores) and current_batch_pipeline_scores.isna().all(): raise AutoMLSearchException(f\"All pipelines in the", "pd.DataFrame(self._results['pipeline_results'].values()) rankings_df = rankings_df[full_rankings_cols] rankings_df.sort_values(\"score\", ascending=ascending, inplace=True) rankings_df.reset_index(drop=True, inplace=True) return", "= evaluation_results['cv_data'] cv_scores = evaluation_results['cv_scores'] is_baseline = pipeline.model_family == ModelFamily.BASELINE", "f\"parameters. Received {problem_configuration}.\") return problem_configuration or {} def _handle_keyboard_interrupt(self): \"\"\"Presents", "and max_iterations < 0: raise ValueError(f\"Parameter max_iterations must be None", "def search(self, show_iteration_plot=True): \"\"\"Find the best pipeline for the data", "early stopping. Only applicable if patience is not None. Defaults", "with open(file_path, 'wb') as f: cloudpickle.dump(self, f, protocol=pickle_protocol) @staticmethod def", "Note that the any pipelines that error out during scoring", "self.ensembling_indices, self, should_continue_callback=self._should_continue, pre_evaluation_callback=self._pre_evaluation_callback, post_evaluation_callback=self._post_evaluation_callback) self.allowed_model_families = list(set([p.model_family for p", "to search for pipelines. This will not start a new", "= self._validate_problem_configuration(problem_configuration) self._train_best_pipeline = train_best_pipeline self._best_pipeline = None self._searched =", "any pipelines that error out during training will not be", "baseline pipeline to the data. This is the first pipeline", "for optimizing each pipeline during fit-time. When set to 'auto',", "self._best_pipeline = None self._searched = False self.X_train = infer_feature_types(X_train) self.y_train", "the pipelines were searched. \"\"\" return copy.deepcopy(self._results) @property def rankings(self):", "SequentialEngine from evalml.automl.utils import ( check_all_pipeline_names_unique, get_default_primary_search_objective, make_data_splitter ) from", "Arguments: pipeline_id (int): pipeline to retrieve Returns: PipelineBase: untrained pipeline", "pipelines.\") if self.max_batches is not None: logger.info(f\"Searching up to {self.max_batches}", "criterion and current state, should the search continue? Returns: bool:", "std all_objective_scores.loc[\"coef of var\", c] = std / mean if", "= None try: self.plot = PipelineSearchPlots(self) except ImportError: logger.warning(\"Unable to", "between 0 and 1 exclusive, received {_ensembling_split_size}\") X_shape = ww.DataTable(np.arange(self.X_train.shape[0]))", "to describe return_dict (bool): If True, return dictionary of information", "for the base score. percent_better = objective_class.calculate_percent_difference(mean_cv_all_objectives[obj_name], self._baseline_cv_scores.get(obj_name, np.nan)) percent_better_than_baseline[obj_name]", "Returns: PipelineBase: A trained instance of the best pipeline and", "None indicates all pipelines for this problem type are allowed.", "additional_objectives (list): Custom set of objectives to score on. Will", "components, problem, training time, cross validation, etc. \"\"\" if pipeline_id", "stream format. Returns: None \"\"\" with open(file_path, 'wb') as f:", "argument') if y_train is None: raise ValueError('Must specify training data", "{pipeline_name} may not perform as estimated on unseen data.\") return", "from evalml.pipelines.components.utils import get_estimators from evalml.pipelines.utils import make_pipeline from evalml.preprocessing", "pipeline before training and thresholding\"\"\" if len(self.rankings) == 0: return", "raise ValueError(f\"Parameter max_batches must be None or non-negative. Received {max_batches}.\")", "all_objective_scores = [fold[\"all_objective_scores\"] for fold in pipeline_results[\"cv_data\"]] all_objective_scores = pd.DataFrame(all_objective_scores)", "= self._results['search_order'][0] best_score = self._results['pipeline_results'][first_id]['score'] num_without_improvement = 0 for id", "searching for new pipelines after %d seconds.\\n\" % self.max_time) logger.info(\"Allowed", "None and max_iterations < 0: raise ValueError(f\"Parameter max_iterations must be", "Returns: PipelineBase: untrained pipeline instance associated with the provided ID", "allowed in AutoML! \" \"Use evalml.objectives.utils.get_core_objective_names() \" \"to get all", "and stacktrace will be displayed in the log. \"\"\" return", "Callback: {_get_funct_name(self.add_result_callback)}\\n\" f\"Additional Objectives: {_print_list(self.additional_objectives or [])}\\n\" f\"Random Seed: {self.random_seed}\\n\"", "and current state, should the search continue? Returns: bool: True", "that maps to the fitted pipeline. Note that the any", "load Returns: AutoSearchBase object \"\"\" with open(file_path, 'rb') as f:", "The first batch will train a baseline pipline + one", "and 1 are equivalent. If set to -1, all CPUs", "each pipeline.\"\"\" return self.full_rankings.drop_duplicates(subset=\"pipeline_name\", keep=\"first\") @property def full_rankings(self): \"\"\"Returns a", "and num_pipelines >= self.max_iterations: return False # check for early", "about pipeline. Defaults to False. Returns: Description of specified pipeline.", "\"Training\") logger.info(\"Training for {} problems.\".format(pipeline.problem_type)) if self.optimize_thresholds and self.objective.is_defined_for_problem_type(ProblemTypes.BINARY) and", "like \"# Training\" and \"# Testing\", so we want to", "tasks. problem_type (str or ProblemTypes): type of supervised learning problem.", "if isinstance(objective, type): if objective in non_core_objectives: raise ValueError(f\"{objective.name.lower()} is", "Received {} instead\".format(tolerance)) self.patience = patience self.tolerance = tolerance or", "= get_core_objectives(self.problem_type) # if our main objective is part of", "float, str, type(None))): raise TypeError(f\"Parameter max_time must be a float,", "data. Arguments: pipelines (list(PipelineBase)): List of pipelines to train. X_holdout", "% self.objective.name) logger.info(\"{} score is better.\\n\".format('Greater' if self.objective.greater_is_better else 'Lower'))", "work well with instance attributes. plot = PipelineSearchPlots def __init__(self,", "out during scoring will not be included in the dictionary", "pipelines to train for every batch after the first one.", "raise ValueError('Must specify training data as a 2d array using", "to stop search early. Must be positive. If None, early", "parameters used to initalize a pipeline with. _ensembling_split_size (float): The", "ascending=ascending, inplace=True) rankings_df.reset_index(drop=True, inplace=True) return rankings_df @property def best_pipeline(self): \"\"\"Returns", "is complete. Arguments: pipelines (list(PipelineBase)): List of pipelines to train.", "the first pipeline fit during search. \"\"\" if self.problem_type ==", "= rankings_df[full_rankings_cols] rankings_df.sort_values(\"score\", ascending=ascending, inplace=True) rankings_df.reset_index(drop=True, inplace=True) return rankings_df @property", "be encoded show_iteration_plot (boolean, True): Shows an iteration vs. score", "subclasses indicating the pipelines allowed in the search. The default", "pipelines to search over...\") allowed_estimators = get_estimators(self.problem_type, self.allowed_model_families) logger.debug(f\"allowed_estimators set", "self) return pipeline_id def _check_for_high_variance(self, pipeline, cv_scores, threshold=0.2): \"\"\"Checks cross-validation", "of length [n_samples]. Required for supervised learning tasks. problem_type (str", "(list(PipelineBase)): List of pipelines to train. Returns: Dict[str, PipelineBase]: Dictionary", "max_iterations must be None or non-negative. Received {max_iterations}.\") self.max_time =", "raise ValueError(f\"Parameter max_iterations must be None or non-negative. Received {max_iterations}.\")", "time_elapsed(self._start) desc = f\"\\nSearch finished after {elapsed_time}\" desc = desc.ljust(self._MAX_NAME_LEN)", "for regression problems. max_iterations (int): Maximum number of iterations to", "for early stopping if self.patience is None or self.tolerance is", "= start_iteration_callback self.add_result_callback = add_result_callback self.error_callback = error_callback or log_error_callback", "self.max_iterations, self._start, batch_number, self.show_batch_output) def _validate_objective(self, objective): non_core_objectives = get_non_core_objectives()", "on the given holdout data. Arguments: pipelines (list(PipelineBase)): List of", "pandas.DataFrame with scoring results from all pipelines searched\"\"\" ascending =", "search after the duration has elapsed. If it is an", "Baseline Estimator\": {\"gap\": gap, \"max_delay\": max_delay}}) self._engine.evaluate_batch([baseline]) @staticmethod def _get_mean_cv_scores_for_all_objectives(cv_data,", "and `search_order`: a list describing the order the pipelines were", "Objectives: {_print_list(self.additional_objectives or [])}\\n\" f\"Random Seed: {self.random_seed}\\n\" f\"n_jobs: {self.n_jobs}\\n\" f\"Optimize", "tolerance=None, data_splitter=None, allowed_pipelines=None, allowed_model_families=None, start_iteration_callback=None, add_result_callback=None, error_callback=None, additional_objectives=None, random_seed=0, n_jobs=-1,", "self.allowed_model_families) logger.debug(f\"allowed_estimators set to {[estimator.name for estimator in allowed_estimators]}\") self.allowed_pipelines", "positive. If None, early stopping is disabled. Defaults to None.", "except KeyboardInterrupt: if self._handle_keyboard_interrupt(): self._interrupted = True current_batch_pipelines = []", "return True def _validate_problem_type(self): for obj in self.additional_objectives: if not", "pipelines after %d seconds.\\n\" % self.max_time) logger.info(\"Allowed model families: %s\\n\"", "self._validate_problem_configuration(problem_configuration) self._train_best_pipeline = train_best_pipeline self._best_pipeline = None self._searched = False", "will cause allowed_model_families to be ignored. allowed_model_families (list(str, ModelFamily)): The", "= 0 for id in self._results['search_order'][1:]: curr_score = self._results['pipeline_results'][id]['score'] significant_change", "# check max_time and max_iterations elapsed = time.time() - self._start", "{}.\".format(pipeline.name, self.problem_type.value)) def _add_baseline_pipelines(self): \"\"\"Fits a baseline pipeline to the", "of the parameters used to initalize a pipeline with. _ensembling_split_size", "= std all_objective_scores.loc[\"coef of var\", c] = std / mean", "evalml.pipelines.utils import make_pipeline from evalml.preprocessing import split_data from evalml.problem_types import", "pipelines were searched. \"\"\" return copy.deepcopy(self._results) @property def rankings(self): \"\"\"Returns", "to the data. This is the first pipeline fit during", "automatically be encoded show_iteration_plot (boolean, True): Shows an iteration vs.", "0: raise ValueError(f\"Parameter max_iterations must be None or non-negative. Received", "to pass along other appropriate parameters by default. Defaults to", "if existing_main_objective is not None: additional_objectives.remove(existing_main_objective) else: additional_objectives = [get_objective(o)", "without improvement. Stopping search early...\".format(self.patience)) return False return True def", "is too small, so ensembling will not run. Set max_batches", "for binary classification problems, - LogLossMulticlass for multiclass classification problems,", "(str or ProblemTypes): type of supervised learning problem. See evalml.problem_types.ProblemType.all_problem_types", "our main objective is part of default set of objectives", "function.__name__ else: return None search_desc = ( f\"{handle_problem_types(self.problem_type).name} Search\\n\\n\" f\"Parameters:", "all_objective_scores.fillna(\"-\") with pd.option_context('display.float_format', '{:.3f}'.format, 'expand_frame_repr', False): logger.info(all_objective_scores) if return_dict: return", "0: raise ValueError(f\"Parameter max_time must be None or non-negative. Received", "if self.objective.greater_is_better else cv_score try: self._automl_algorithm.add_result(score_to_minimize, pipeline, self._results['pipeline_results'][pipeline_id]) except PipelineNotFoundError:", "\"max_delay\": max_delay}, \"Time Series Baseline Estimator\": {\"gap\": gap, \"max_delay\": max_delay}})", "provided ID \"\"\" pipeline_results = self.results['pipeline_results'].get(pipeline_id) if pipeline_results is None:", "AutoMLSearchException(f\"All pipelines in the current AutoML batch produced a score", "search. Parameters max_time, and max_iterations have precedence over stopping the", "pipeline.parameters, self) desc = f\"{pipeline.name}\" if len(desc) > AutoMLSearch._MAX_NAME_LEN: desc", "in scores.items()} def _post_evaluation_callback(self, pipeline, evaluation_results): training_time = evaluation_results['training_time'] cv_data", "pipeline. Includes information such as type of pipeline components, problem,", "{} problem.\".format(self.objective.name, self.problem_type.value)) if additional_objectives is None: additional_objectives = get_core_objectives(self.problem_type)", "cross-validation scores and logs a warning if variance is higher", "of the best pipeline and parameters found during automl search.", "y_train = self.y_train if hasattr(self.data_splitter, \"transform_sample\"): train_indices = self.data_splitter.transform_sample(X_train, y_train)", "are used. For n_jobs below -1, (n_cpus + 1 +", "ending') break try: new_pipeline_ids = self._engine.evaluate_batch(current_batch_pipelines) loop_interrupted = False except", "or parameters is None: raise PipelineNotFoundError(\"Pipeline class or parameters not", "a list describing the order the pipelines were searched. \"\"\"", "{max_iterations}.\") self.max_time = convert_to_seconds(max_time) if isinstance(max_time, str) else max_time self.max_iterations", "Setting this field will cause allowed_model_families to be ignored. allowed_model_families", "\"\"\" with open(file_path, 'rb') as f: return cloudpickle.load(f) def train_pipelines(self,", "{_get_funct_name(self.start_iteration_callback)}\\n\" f\"Add Result Callback: {_get_funct_name(self.add_result_callback)}\\n\" f\"Additional Objectives: {_print_list(self.additional_objectives or [])}\\n\"", "pipeline, self) return pipeline_id def _check_for_high_variance(self, pipeline, cv_scores, threshold=0.2): \"\"\"Checks", "takes three positional parameters: A dictionary containing the training results", "is None or parameters is None: raise PipelineNotFoundError(\"Pipeline class or", "import pandas as pd import woodwork as ww from sklearn.model_selection", "} if pipeline.model_family == ModelFamily.ENSEMBLE: input_pipeline_ids = [self._automl_algorithm._best_pipeline_info[model_family][\"id\"] for model_family", "plot outside of a jupyter notebook if show_iteration_plot: try: get_ipython", "of pipeline evaluations which have been made Returns: int: the", "run and will not run again on the same instance.", "copy of the results from `automl_search`. Returns: dict containing `pipeline_results`:", "estimator, self.problem_type, custom_hyperparameters=self.pipeline_parameters) for estimator in allowed_estimators] if self.allowed_pipelines ==", "else: return None search_desc = ( f\"{handle_problem_types(self.problem_type).name} Search\\n\\n\" f\"Parameters: \\n{'='*20}\\n\"", "import copy import time from collections import defaultdict import cloudpickle", "of parameters used with each pipeline.\"\"\" return self.full_rankings.drop_duplicates(subset=\"pipeline_name\", keep=\"first\") @property", "that the any pipelines that error out during scoring will", "train and evaluate. \"\"\" pipeline_rows = self.full_rankings[self.full_rankings['pipeline_name'] == pipeline.name] for", "for the gap and max_delay variables. train_best_pipeline (boolean): Whether or", "contains scores for all objectives # but also fields like", "allowed pipeline class has been iterated over. If the number", "True def _find_best_pipeline(self): \"\"\"Finds the best pipeline in the rankings", "allowed in the search. The default of None indicates all", "{} is not compatible with a {} problem.\".format(self.objective.name, self.problem_type.value)) if", "1 num_ensemble_batches = (self.max_batches - 1) // ensemble_nth_batch if num_ensemble_batches", "is an integer, then the time will be in seconds.", "= y_train.iloc[train_indices] best_pipeline = self._engine.train_pipeline(best_pipeline, X_train, y_train, self.optimize_thresholds, self.objective) self._best_pipeline", "the time budget (if set) time_in_loop = time.time() - start_of_loop", "scoring results from all pipelines searched\"\"\" ascending = True if", "instance. \"\"\" if not self._best_pipeline: raise PipelineNotFoundError(\"automl search must be", "self._automl_algorithm.add_result(score_to_minimize, pipeline, self._results['pipeline_results'][pipeline_id]) except PipelineNotFoundError: pass if self.search_iteration_plot: self.search_iteration_plot.update() if", "objective.is_defined_for_problem_type(self.problem_type): raise ValueError(\"Given objective {} is not compatible with a", "if pipeline_parameters is not None else {} self.search_iteration_plot = None", "baseline = MeanBaselineRegressionPipeline(parameters={}) else: pipeline_class = {ProblemTypes.TIME_SERIES_REGRESSION: TimeSeriesBaselineRegressionPipeline, ProblemTypes.TIME_SERIES_MULTICLASS: TimeSeriesBaselineMulticlassPipeline,", "required_parameters = {'gap', 'max_delay'} if not problem_configuration or not all(p", "score. percent_better = objective_class.calculate_percent_difference(mean_cv_all_objectives[obj_name], self._baseline_cv_scores.get(obj_name, np.nan)) percent_better_than_baseline[obj_name] = percent_better high_variance_cv", "= max_iterations self.max_batches = max_batches self._pipelines_per_batch = _pipelines_per_batch if not", "= False full_rankings_cols = [\"id\", \"pipeline_name\", \"score\", \"validation_score\", \"percent_better_than_baseline\", \"high_variance_cv\",", "self._engine.evaluate_batch(current_batch_pipelines) loop_interrupted = False except KeyboardInterrupt: loop_interrupted = True if", "this parameter will be ignored. data_splitter (sklearn.model_selection.BaseCrossValidator): Data splitting method", "The maximum number of batches of pipelines to search. Parameters", "pipeline in the rankings If self._best_pipeline already exists, check to", "infer_feature_types from evalml.utils.logger import ( get_logger, log_subtitle, log_title, time_elapsed, update_pipeline", "pipeline Arguments: pipeline_id (int): pipeline to describe return_dict (bool): If", "pipline + one of each pipeline family allowed in the", "'rb') as f: return cloudpickle.load(f) def train_pipelines(self, pipelines): \"\"\"Train a", "X_train is None: raise ValueError('Must specify training data as a", "threshold) if high_variance_cv: logger.warning(f\"High coefficient of variation (cv >= {threshold})", "False if self.allowed_pipelines is None: logger.info(\"Generating pipelines to search over...\")", "this loop does not count towards the time budget (if", "and max_iterations elapsed = time.time() - self._start if self.max_time and", "= split_data(X_shape, self.y_train, problem_type=self.problem_type, test_size=_ensembling_split_size, random_seed=self.random_seed) self.ensembling_indices = ensembling_indices.to_dataframe()[0].tolist() self._engine", "in pipeline_rows['parameters']: if pipeline.parameters == parameter: return self._engine.evaluate_batch([pipeline]) self._find_best_pipeline() @property", "return False else: leading_char = \"\" def search(self, show_iteration_plot=True): \"\"\"Find", "custom_hyperparameters=self.pipeline_parameters) for estimator in allowed_estimators] if self.allowed_pipelines == []: raise", "log_title, time_elapsed, update_pipeline ) logger = get_logger(__file__) class AutoMLSearch: \"\"\"Automated", "a float between 0.0 and 1.0 inclusive. Received {} instead\".format(tolerance))", "__init__(self, X_train=None, y_train=None, problem_type=None, objective='auto', max_iterations=None, max_time=None, patience=None, tolerance=None, data_splitter=None,", "in AutoML! \" \"Use evalml.objectives.utils.get_core_objective_names() \" \"to get all objective", "False. Returns: Description of specified pipeline. Includes information such as", "1, self.max_iterations, self._start, batch_number, self.show_batch_output) def _validate_objective(self, objective): non_core_objectives =", "{self.max_batches} batches for a total of {self.max_iterations} pipelines. \") elif", "pipelines to train. Returns: Dict[str, PipelineBase]: Dictionary keyed by pipeline", "of a jupyter notebook if show_iteration_plot: try: get_ipython except NameError:", "baseline = ModeBaselineBinaryPipeline(parameters={}) elif self.problem_type == ProblemTypes.MULTICLASS: baseline = ModeBaselineMulticlassPipeline(parameters={})", "import time from collections import defaultdict import cloudpickle import numpy", "ValueError(\"tolerance value must be a float between 0.0 and 1.0", "problem_configuration or {} def _handle_keyboard_interrupt(self): \"\"\"Presents a prompt to the", "integer describing level of parallelism used for pipelines. None and", "None: logger.info(\"Searching up to %s pipelines. \" % self.max_iterations) if", "results from all pipelines searched\"\"\" ascending = True if self.objective.greater_is_better:", "for every batch after the first one. The first batch", "# The 'all_objective_scores' field contains scores for all objectives #", "self.rankings.iloc[0] best_pipeline_name = best_pipeline[\"pipeline_name\"] logger.info(f\"Best pipeline: {best_pipeline_name}\") logger.info(f\"Best pipeline {self.objective.name}:", "evalml.automl.engine import SequentialEngine from evalml.automl.utils import ( check_all_pipeline_names_unique, get_default_primary_search_objective, make_data_splitter", "= ensembling_indices.to_dataframe()[0].tolist() self._engine = SequentialEngine(self.X_train, self.y_train, self.ensembling_indices, self, should_continue_callback=self._should_continue, pre_evaluation_callback=self._pre_evaluation_callback,", "f\"Max Batches: {self.max_batches}\\n\" f\"Allowed Pipelines: \\n{_print_list(self.allowed_pipelines or [])}\\n\" f\"Patience: {self.patience}\\n\"", "check for early stopping if self.patience is None or self.tolerance", "families to search. The default of None searches over all", "data stream format. Returns: None \"\"\" with open(file_path, 'wb') as", "minutes, or hours. patience (int): Number of iterations without improvement", "\"to get all objective names allowed in automl.\") return objective()", "when ensembling is True. Must be between 0 and 1,", "called before each pipeline training iteration. Callback function takes three", "PipelineBase: A trained instance of the best pipeline and parameters", "the given holdout data. Arguments: pipelines (list(PipelineBase)): List of pipelines", "feature_types (list, optional): list of feature types, either numerical or", "raise TypeError(f\"Parameter max_time must be a float, int, string or", "pipeline_rows = self.full_rankings[self.full_rankings['pipeline_name'] == pipeline.name] for parameter in pipeline_rows['parameters']: if", "of recommendations, ending') break try: new_pipeline_ids = self._engine.evaluate_batch(current_batch_pipelines) loop_interrupted =", "automl.\") return objective() return objective def __str__(self): def _print_list(obj_list): lines", "pipeline instance associated with the provided ID \"\"\" pipeline_results =", "from evalml.model_family import ModelFamily from evalml.objectives import ( get_core_objectives, get_non_core_objectives,", "Function called when `search()` errors and raises an Exception. Callback", "return dictionary of information about pipeline. Defaults to False. Returns:", "the order the pipelines were searched. \"\"\" return copy.deepcopy(self._results) @property", "be in seconds. For strings, time can be specified as", "used. ensembling (boolean): If True, runs ensembling in a separate", "{self.optimize_thresholds}\\n\" ) rankings_desc = \"\" if not self.rankings.empty: rankings_str =", "evaluation_results['training_time'] cv_data = evaluation_results['cv_data'] cv_scores = evaluation_results['cv_scores'] is_baseline = pipeline.model_family", "logger.info(\"Objective to optimize binary classification pipeline thresholds for: {}\".format(self.objective)) logger.info(\"Total", "n_features]. Required. y_train (pd.Series, ww.DataColumn): The target training data of", "self.problem_type == ProblemTypes.MULTICLASS: baseline = ModeBaselineMulticlassPipeline(parameters={}) elif self.problem_type == ProblemTypes.REGRESSION:", "to train that pipeline during automl search. Arguments: pipeline_id (int):", "Iterations: {self.max_iterations}\\n\" f\"Max Batches: {self.max_batches}\\n\" f\"Allowed Pipelines: \\n{_print_list(self.allowed_pipelines or [])}\\n\"", "not self.max_time and not self.max_batches: self.max_batches = 1 logger.info(\"Using default", "evalml.problem_types import ProblemTypes, handle_problem_types from evalml.tuners import SKOptTuner from evalml.utils", "run at the {first_ensembling_iteration} iteration and every {len(self.allowed_pipelines) * self._pipelines_per_batch}", "best_pipeline = self.get_pipeline(best_pipeline['id']) if self._train_best_pipeline: if best_pipeline.model_family == ModelFamily.ENSEMBLE: X_train,", "[self._automl_algorithm._best_pipeline_info[model_family][\"id\"] for model_family in self._automl_algorithm._best_pipeline_info] self._results['pipeline_results'][pipeline_id][\"input_pipeline_ids\"] = input_pipeline_ids self._results['search_order'].append(pipeline_id) if", "self._results['pipeline_results'][pipeline_id]) except PipelineNotFoundError: pass if self.search_iteration_plot: self.search_iteration_plot.update() if self.add_result_callback: self.add_result_callback(self._results['pipeline_results'][pipeline_id],", "if y_train is None: raise ValueError('Must specify training data target", "logger.info(desc) self._find_best_pipeline() if self._best_pipeline is not None: best_pipeline = self.rankings.iloc[0]", "file_path (str): location to find file to load Returns: AutoSearchBase", "from `automl_search`. Returns: dict containing `pipeline_results`: a dict with results", "- 1 - num_ensemble_batches) + num_ensemble_batches) else: self.max_iterations = 1", "pipeline instance. \"\"\" if not self._best_pipeline: raise PipelineNotFoundError(\"automl search must", "obj in self.additional_objectives: if not obj.is_defined_for_problem_type(self.problem_type): raise ValueError(\"Additional objective {}", "allowed_estimators = get_estimators(self.problem_type, self.allowed_model_families) logger.debug(f\"allowed_estimators set to {[estimator.name for estimator", "specified pipeline initialized with the parameters used to train that", "is the first pipeline fit during search. \"\"\" if self.problem_type", "objective): non_core_objectives = get_non_core_objectives() if isinstance(objective, type): if objective in", "pipeline, evaluation_results): training_time = evaluation_results['training_time'] cv_data = evaluation_results['cv_data'] cv_scores =", "\"\"\" if self.problem_type == ProblemTypes.BINARY: baseline = ModeBaselineBinaryPipeline(parameters={}) elif self.problem_type", "all_objective_scores.loc[\"std\", c] = std all_objective_scores.loc[\"coef of var\", c] = std", "[]: if pipeline.problem_type != self.problem_type: raise ValueError(\"Given pipeline {} is", "high_variance_cv: logger.warning(f\"High coefficient of variation (cv >= {threshold}) within cross", "dict containing values for at least the gap and max_delay", "ModeBaselineBinaryPipeline, ModeBaselineMulticlassPipeline, TimeSeriesBaselineBinaryPipeline, TimeSeriesBaselineMulticlassPipeline, TimeSeriesBaselineRegressionPipeline ) from evalml.pipelines.components.utils import get_estimators", "* (self.max_batches - 1 - num_ensemble_batches) + num_ensemble_batches) else: self.max_iterations", "pipeline. Defaults to False. Returns: Description of specified pipeline. Includes", "pd.DataFrame(columns=full_rankings_cols) rankings_df = pd.DataFrame(self._results['pipeline_results'].values()) rankings_df = rankings_df[full_rankings_cols] rankings_df.sort_values(\"score\", ascending=ascending, inplace=True)", "self.pipeline_parameters = pipeline_parameters if pipeline_parameters is not None else {}", "score of np.nan on the primary objective {self.objective}.\") self.search_duration =", "X_train = self.X_train y_train = self.y_train if hasattr(self.data_splitter, \"transform_sample\"): train_indices", "= \"\" def search(self, show_iteration_plot=True): \"\"\"Find the best pipeline for", "if patience and (not isinstance(patience, int) or patience < 0):", "_ = split_data(X_shape, self.y_train, problem_type=self.problem_type, test_size=_ensembling_split_size, random_seed=self.random_seed) self.ensembling_indices = ensembling_indices.to_dataframe()[0].tolist()", "score_improved and significant_change: best_score = curr_score num_without_improvement = 0 else:", "not self._best_pipeline: raise PipelineNotFoundError(\"automl search must be run before selecting", "be between 0 and 1, exclusive. Defaults to 0.2 _pipelines_per_batch", "parameters is None: raise PipelineNotFoundError(\"Pipeline class or parameters not found", "None default_data_splitter = make_data_splitter(self.X_train, self.y_train, self.problem_type, self.problem_configuration, n_splits=3, shuffle=True, random_seed=self.random_seed)", "ValueError('Must specify training data as a 2d array using the", "a list of pipelines on the given holdout data. Arguments:", "training will not be included in the dictionary but the", "\"\"\" if not self._best_pipeline: raise PipelineNotFoundError(\"automl search must be run", "f, protocol=pickle_protocol) @staticmethod def load(file_path): \"\"\"Loads AutoML object at file", "of supervised learning problem. See evalml.problem_types.ProblemType.all_problem_types for a full list.", "first batch + each pipeline iteration + 1 first_ensembling_iteration =", "ensembling_indices.to_dataframe()[0].tolist() self._engine = SequentialEngine(self.X_train, self.y_train, self.ensembling_indices, self, should_continue_callback=self._should_continue, pre_evaluation_callback=self._pre_evaluation_callback, post_evaluation_callback=self._post_evaluation_callback)", "search_desc + rankings_desc def _validate_problem_configuration(self, problem_configuration=None): if self.problem_type in [ProblemTypes.TIME_SERIES_REGRESSION]:", "the log. \"\"\" return self._engine.train_batch(pipelines) def score_pipelines(self, pipelines, X_holdout, y_holdout,", "[] } self.random_seed = random_seed self.n_jobs = n_jobs self.plot =", "from evalml.exceptions import AutoMLSearchException, PipelineNotFoundError from evalml.model_family import ModelFamily from", "* (self.max_batches - 1)) if run_ensembling: if not (0 <", "to score on. Will override default objectives for problem type", "get all objective names allowed in automl.\") return objective() return", "tuner_class: The tuner class to use. Defaults to SKOptTuner. optimize_thresholds", "import ModelFamily from evalml.objectives import ( get_core_objectives, get_non_core_objectives, get_objective )", "logger.info(f\"Best pipeline {self.objective.name}: {best_pipeline['score']:3f}\") self._searched = True def _find_best_pipeline(self): \"\"\"Finds", "Includes information such as type of pipeline components, problem, training", "ensemble metalearners. Only used when ensembling is True. Must be", "None self._start = 0.0 self._baseline_cv_scores = {} self.show_batch_output = False", "returning it. Defaults to True. pipeline_parameters (dict): A dict of", "self.problem_type in [ProblemTypes.TIME_SERIES_REGRESSION]: required_parameters = {'gap', 'max_delay'} if not problem_configuration", "for training pipelines once the search is complete. Arguments: pipelines", "a total of {self.max_iterations} pipelines. \") elif self.max_iterations is not", "the pickle data stream format. Returns: None \"\"\" with open(file_path,", "can be helpful for training pipelines once the search is", "Defaults to 0.2 _pipelines_per_batch (int): The number of pipelines to", "value must be a positive integer. Received {} instead\".format(patience)) if", "pipeline_parameters (dict): A dict of the parameters used to initalize", "allowed_estimators]}\") self.allowed_pipelines = [make_pipeline(self.X_train, self.y_train, estimator, self.problem_type, custom_hyperparameters=self.pipeline_parameters) for estimator", "pipeline_params = self.pipeline_parameters self._automl_algorithm = IterativeAlgorithm( max_iterations=self.max_iterations, allowed_pipelines=self.allowed_pipelines, tuner_class=self.tuner_class, random_seed=self.random_seed,", "Returns: bool: True if yes, False if no. \"\"\" if", "not found in automl results\") return pipeline_class(parameters, random_seed=self.random_seed) def describe_pipeline(self,", "except NameError: show_iteration_plot = False log_title(logger, \"Beginning pipeline search\") logger.info(\"Optimizing", "0 and 1 exclusive, received {_ensembling_split_size}\") X_shape = ww.DataTable(np.arange(self.X_train.shape[0])) _,", "pipeline with. _ensembling_split_size (float): The amount of the training data", "len(cv_data) for fold_data in cv_data: for field, value in fold_data['all_objective_scores'].items():", "True if self.objective.greater_is_better: ascending = False full_rankings_cols = [\"id\", \"pipeline_name\",", "show_iteration_plot: try: get_ipython except NameError: show_iteration_plot = False log_title(logger, \"Beginning", "a pipeline training result, returns an untrained instance of the", "for pipeline in self.allowed_pipelines or []: if pipeline.problem_type != self.problem_type:", "the any pipelines that error out during training will not", "after that.\") if self.max_batches and self.max_iterations is None: self.show_batch_output =", "def _validate_problem_configuration(self, problem_configuration=None): if self.problem_type in [ProblemTypes.TIME_SERIES_REGRESSION]: required_parameters = {'gap',", "training data. This can be helpful for training pipelines once", "targets for scoring. objectives (list(str), list(ObjectiveBase)): Objectives used for scoring.", "= self._automl_algorithm.batch_number update_pipeline(logger, desc, len(self._results['pipeline_results']) + 1, self.max_iterations, self._start, batch_number,", "random_seed self.n_jobs = n_jobs self.plot = None try: self.plot =", "or self.tolerance is None: return True first_id = self._results['search_order'][0] best_score", "in additional_objectives if obj.name == self.objective.name), None) if existing_main_objective is", "regression problems. max_iterations (int): Maximum number of iterations to search.", "ProblemTypes): type of supervised learning problem. See evalml.problem_types.ProblemType.all_problem_types for a", "return self._engine.train_batch(pipelines) def score_pipelines(self, pipelines, X_holdout, y_holdout, objectives): \"\"\"Score a", "show_iteration_plot (boolean, True): Shows an iteration vs. score plot in", "inplace=True) rankings_df.reset_index(drop=True, inplace=True) return rankings_df @property def best_pipeline(self): \"\"\"Returns a", "{self.objective.name}: {best_pipeline['score']:3f}\") self._searched = True def _find_best_pipeline(self): \"\"\"Finds the best", "1, exclusive. Defaults to 0.2 _pipelines_per_batch (int): The number of", "__str__(self): def _print_list(obj_list): lines = sorted(['\\t{}'.format(o.name) for o in obj_list])", "self.max_iterations = max_iterations self.max_batches = max_batches self._pipelines_per_batch = _pipelines_per_batch if", "to initalize a pipeline with. _ensembling_split_size (float): The amount of", "is not compatible with a {} problem.\".format(self.objective.name, self.problem_type.value)) if additional_objectives", "False, returns an untrained pipeline instance. \"\"\" if not self._best_pipeline:", "return_dict (bool): If True, return dictionary of information about pipeline.", "made Returns: int: the number of pipeline evaluations made in", "a valid data splitter\") if not objective.is_defined_for_problem_type(self.problem_type): raise ValueError(\"Given objective", "be None or non-negative. Received {max_batches}.\") if max_iterations is not", "rankings_df.sort_values(\"score\", ascending=ascending, inplace=True) rankings_df.reset_index(drop=True, inplace=True) return rankings_df @property def best_pipeline(self):", "{\"gap\": gap, \"max_delay\": max_delay}, \"Time Series Baseline Estimator\": {\"gap\": gap,", "max_batches is not None and max_batches < 0: raise ValueError(f\"Parameter", "early \"\"\" leading_char = \"\\n\" start_of_loop = time.time() while True:", "problem_type') self.tuner_class = tuner_class or SKOptTuner self.start_iteration_callback = start_iteration_callback self.add_result_callback", "+ 1 num_ensemble_batches = (self.max_batches - 1) // ensemble_nth_batch if", "logger.info(\"Total training time (including CV): %.1f seconds\" % pipeline_results[\"training_time\"]) log_subtitle(logger,", "the search. \"\"\" if X_train is None: raise ValueError('Must specify", "\"Time Series Baseline Estimator\": {\"gap\": gap, \"max_delay\": max_delay}}) self._engine.evaluate_batch([baseline]) @staticmethod", "Defaults to True. pipeline_parameters (dict): A dict of the parameters", "feature types, either numerical or categorical. Categorical features will automatically", "to None. allowed_pipelines (list(class)): A list of PipelineBase subclasses indicating", "as seconds, minutes, or hours. patience (int): Number of iterations", "+= value return {objective: float(score) / n_folds for objective, score", "input_pipeline_ids self._results['search_order'].append(pipeline_id) if not is_baseline: score_to_minimize = -cv_score if self.objective.greater_is_better", "the automl rankings with the requirement that automl search has", "metalearners. Only used when ensembling is True. Must be between", "int, string or None. Received {type(max_time)} with value {str(max_time)}..\") if", "None and not issubclass(self.data_splitter.__class__, BaseCrossValidator): raise ValueError(\"Not a valid data", "run. Set max_batches >= {ensemble_nth_batch + 1} to run ensembling.\")", "'auto', chooses: - LogLossBinary for binary classification problems, - LogLossMulticlass", "= f\"\\nSearch finished after {elapsed_time}\" desc = desc.ljust(self._MAX_NAME_LEN) logger.info(desc) self._find_best_pipeline()", "warning if variance is higher than specified threshhold.\"\"\" pipeline_name =", "self._searched = True def _find_best_pipeline(self): \"\"\"Finds the best pipeline in", "evaluate. \"\"\" pipeline_rows = self.full_rankings[self.full_rankings['pipeline_name'] == pipeline.name] for parameter in", "been run. Arguments: pipeline (PipelineBase): pipeline to train and evaluate.", "{get_objective(self.objective).name}\\n\" f\"Max Time: {self.max_time}\\n\" f\"Max Iterations: {self.max_iterations}\\n\" f\"Max Batches: {self.max_batches}\\n\"", "ValueError(f\"Parameter max_batches must be None or non-negative. Received {max_batches}.\") if", "on the same instance. Re-initialize AutoMLSearch to search again.\") return", "training time (including CV): %.1f seconds\" % pipeline_results[\"training_time\"]) log_subtitle(logger, \"Cross", "problem_type (str or ProblemTypes): type of supervised learning problem. See", "batch_number = 1 if self._automl_algorithm is not None and self._automl_algorithm.batch_number", "has elapsed. If it is an integer, then the time", "= pd.DataFrame(self._results['pipeline_results'].values()) rankings_df = rankings_df[full_rankings_cols] rankings_df.sort_values(\"score\", ascending=ascending, inplace=True) rankings_df.reset_index(drop=True, inplace=True)", "that maps to a dictionary of scores. Note that the", "Defaults to True. start_iteration_callback (callable): Function called before each pipeline", "parameter: return self._engine.evaluate_batch([pipeline]) self._find_best_pipeline() @property def results(self): \"\"\"Class that allows", "data of length [n_samples]. Required for supervised learning tasks. problem_type", "- 1)) if run_ensembling: if not (0 < _ensembling_split_size <", "None and max_batches < 0: raise ValueError(f\"Parameter max_batches must be", "If the number of unique pipelines to search over per", "self._validate_objective(objective) if self.data_splitter is not None and not issubclass(self.data_splitter.__class__, BaseCrossValidator):", "estimator in allowed_estimators] if self.allowed_pipelines == []: raise ValueError(\"No allowed", "self._interrupted: return False # for add_to_rankings if self._searched: return True", "get_pipeline(self, pipeline_id): \"\"\"Given the ID of a pipeline training result,", "if self.data_splitter is not None and not issubclass(self.data_splitter.__class__, BaseCrossValidator): raise", "self.ensembling_indices = ensembling_indices.to_dataframe()[0].tolist() self._engine = SequentialEngine(self.X_train, self.y_train, self.ensembling_indices, self, should_continue_callback=self._should_continue,", "be displayed in the log. \"\"\" return self._engine.score_batch(pipelines, X_holdout, y_holdout,", "in self._automl_algorithm._best_pipeline_info] self._results['pipeline_results'][pipeline_id][\"input_pipeline_ids\"] = input_pipeline_ids self._results['search_order'].append(pipeline_id) if not is_baseline: score_to_minimize", "True current_batch_pipelines = [] current_batch_pipeline_scores = [] new_pipeline_ids = []", "self._searched: return True # Run at least one pipeline for", "strings, time can be specified as seconds, minutes, or hours.", "\") elif self.max_iterations is not None: logger.info(\"Searching up to %s", "to True, but max_iterations is too small, so ensembling will", "of each pipeline family allowed in the search. \"\"\" if", "with IDs: \" + str(pipeline_results['input_pipeline_ids'])) log_subtitle(logger, \"Training\") logger.info(\"Training for {}", "to search. The default of None searches over all model", "target values as a 1d vector using the y_train argument')", "\"n\": # So that the time in this loop does", "objective='auto', max_iterations=None, max_time=None, patience=None, tolerance=None, data_splitter=None, allowed_pipelines=None, allowed_model_families=None, start_iteration_callback=None, add_result_callback=None,", "iteration and every {len(self.allowed_pipelines) * self._pipelines_per_batch} iterations after that.\") if" ]
[ ". import mixins, types from .decorators import social_auth class SocialAuthMutation(mixins.SocialAuthMixin,", "social_auth class SocialAuthMutation(mixins.SocialAuthMixin, graphene.Mutation): social = graphene.Field(types.SocialType) class Meta: abstract", "social, **kwargs): return cls.resolve(root, info, social, **kwargs) class SocialAuth(mixins.ResolveMixin, SocialAuthMutation):", "setup_jwt_cookie from . import mixins, types from .decorators import social_auth", "**kwargs) class SocialAuth(mixins.ResolveMixin, SocialAuthMutation): \"\"\"Social Auth Mutation\"\"\" class SocialAuthJWT(mixins.JSONWebTokenMixin, SocialAuthMutation):", "SocialAuthMutation(mixins.SocialAuthMixin, graphene.Mutation): social = graphene.Field(types.SocialType) class Meta: abstract = True", "return cls.resolve(root, info, social, **kwargs) class SocialAuth(mixins.ResolveMixin, SocialAuthMutation): \"\"\"Social Auth", "provider = graphene.String(required=True) code = graphene.String(required=True) @classmethod @setup_jwt_cookie @social_auth def", "@classmethod @setup_jwt_cookie @social_auth def mutate(cls, root, info, social, **kwargs): return", "import graphene from graphql_jwt.decorators import setup_jwt_cookie from . import mixins,", "graphene from graphql_jwt.decorators import setup_jwt_cookie from . import mixins, types", "graphene.String(required=True) code = graphene.String(required=True) @classmethod @setup_jwt_cookie @social_auth def mutate(cls, root,", "= graphene.Field(types.SocialType) class Meta: abstract = True class Arguments: provider", "@social_auth def mutate(cls, root, info, social, **kwargs): return cls.resolve(root, info,", "Mutation\"\"\" class SocialAuthJWT(mixins.JSONWebTokenMixin, SocialAuthMutation): \"\"\"Social Auth for JSON Web Token", "class SocialAuthJWT(mixins.JSONWebTokenMixin, SocialAuthMutation): \"\"\"Social Auth for JSON Web Token (JWT)\"\"\"", "@setup_jwt_cookie @social_auth def mutate(cls, root, info, social, **kwargs): return cls.resolve(root,", "\"\"\"Social Auth Mutation\"\"\" class SocialAuthJWT(mixins.JSONWebTokenMixin, SocialAuthMutation): \"\"\"Social Auth for JSON", ".decorators import social_auth class SocialAuthMutation(mixins.SocialAuthMixin, graphene.Mutation): social = graphene.Field(types.SocialType) class", "SocialAuth(mixins.ResolveMixin, SocialAuthMutation): \"\"\"Social Auth Mutation\"\"\" class SocialAuthJWT(mixins.JSONWebTokenMixin, SocialAuthMutation): \"\"\"Social Auth", "social = graphene.Field(types.SocialType) class Meta: abstract = True class Arguments:", "SocialAuthMutation): \"\"\"Social Auth Mutation\"\"\" class SocialAuthJWT(mixins.JSONWebTokenMixin, SocialAuthMutation): \"\"\"Social Auth for", "graphene.Field(types.SocialType) class Meta: abstract = True class Arguments: provider =", "class SocialAuthMutation(mixins.SocialAuthMixin, graphene.Mutation): social = graphene.Field(types.SocialType) class Meta: abstract =", "import mixins, types from .decorators import social_auth class SocialAuthMutation(mixins.SocialAuthMixin, graphene.Mutation):", "**kwargs): return cls.resolve(root, info, social, **kwargs) class SocialAuth(mixins.ResolveMixin, SocialAuthMutation): \"\"\"Social", "mutate(cls, root, info, social, **kwargs): return cls.resolve(root, info, social, **kwargs)", "root, info, social, **kwargs): return cls.resolve(root, info, social, **kwargs) class", "class Meta: abstract = True class Arguments: provider = graphene.String(required=True)", "from .decorators import social_auth class SocialAuthMutation(mixins.SocialAuthMixin, graphene.Mutation): social = graphene.Field(types.SocialType)", "class SocialAuth(mixins.ResolveMixin, SocialAuthMutation): \"\"\"Social Auth Mutation\"\"\" class SocialAuthJWT(mixins.JSONWebTokenMixin, SocialAuthMutation): \"\"\"Social", "from . import mixins, types from .decorators import social_auth class", "graphql_jwt.decorators import setup_jwt_cookie from . import mixins, types from .decorators", "mixins, types from .decorators import social_auth class SocialAuthMutation(mixins.SocialAuthMixin, graphene.Mutation): social", "types from .decorators import social_auth class SocialAuthMutation(mixins.SocialAuthMixin, graphene.Mutation): social =", "= graphene.String(required=True) @classmethod @setup_jwt_cookie @social_auth def mutate(cls, root, info, social,", "graphene.Mutation): social = graphene.Field(types.SocialType) class Meta: abstract = True class", "import social_auth class SocialAuthMutation(mixins.SocialAuthMixin, graphene.Mutation): social = graphene.Field(types.SocialType) class Meta:", "import setup_jwt_cookie from . import mixins, types from .decorators import", "abstract = True class Arguments: provider = graphene.String(required=True) code =", "social, **kwargs) class SocialAuth(mixins.ResolveMixin, SocialAuthMutation): \"\"\"Social Auth Mutation\"\"\" class SocialAuthJWT(mixins.JSONWebTokenMixin,", "Auth Mutation\"\"\" class SocialAuthJWT(mixins.JSONWebTokenMixin, SocialAuthMutation): \"\"\"Social Auth for JSON Web", "graphene.String(required=True) @classmethod @setup_jwt_cookie @social_auth def mutate(cls, root, info, social, **kwargs):", "code = graphene.String(required=True) @classmethod @setup_jwt_cookie @social_auth def mutate(cls, root, info,", "= graphene.String(required=True) code = graphene.String(required=True) @classmethod @setup_jwt_cookie @social_auth def mutate(cls,", "def mutate(cls, root, info, social, **kwargs): return cls.resolve(root, info, social,", "Meta: abstract = True class Arguments: provider = graphene.String(required=True) code", "= True class Arguments: provider = graphene.String(required=True) code = graphene.String(required=True)", "True class Arguments: provider = graphene.String(required=True) code = graphene.String(required=True) @classmethod", "cls.resolve(root, info, social, **kwargs) class SocialAuth(mixins.ResolveMixin, SocialAuthMutation): \"\"\"Social Auth Mutation\"\"\"", "info, social, **kwargs) class SocialAuth(mixins.ResolveMixin, SocialAuthMutation): \"\"\"Social Auth Mutation\"\"\" class", "info, social, **kwargs): return cls.resolve(root, info, social, **kwargs) class SocialAuth(mixins.ResolveMixin,", "class Arguments: provider = graphene.String(required=True) code = graphene.String(required=True) @classmethod @setup_jwt_cookie", "from graphql_jwt.decorators import setup_jwt_cookie from . import mixins, types from", "Arguments: provider = graphene.String(required=True) code = graphene.String(required=True) @classmethod @setup_jwt_cookie @social_auth" ]
[ "base.py [7d3f5e6] <EMAIL> $ \"\"\" Base classes for regressor Visualizers.", "Jun 03 10:30:36 2016 -0700 # # Copyright (C) 2016", "2016 District Data Labs # For license information, see LICENSE.txt", "otherwise it raises a ``YellowbrickTypeError``. \"\"\" def __init__(self, model, ax=None,", "\"\"\" def __init__(self, model, ax=None, **kwargs): if not isregressor(model): raise", "try a classifier or \" \"clustering score visualizer instead!\" )", "# # Copyright (C) 2016 District Data Labs # For", "\"\"\" ########################################################################## ## Imports ########################################################################## from ..utils import isregressor from", "The primary functionality of this class is to perform a", "Imports ########################################################################## from ..utils import isregressor from ..base import ScoreVisualizer", "# Author: <NAME> <<EMAIL>> # Created: Fri Jun 03 10:30:36", "LICENSE.txt # # ID: base.py [7d3f5e6] <EMAIL> $ \"\"\" Base", "a regressor, otherwise it raises a ``YellowbrickTypeError``. \"\"\" def __init__(self,", "Visualizers. # # Author: <NAME> <<EMAIL>> # Author: <NAME> <<EMAIL>>", "Visualizers. \"\"\" ########################################################################## ## Imports ########################################################################## from ..utils import isregressor", "## Imports ########################################################################## from ..utils import isregressor from ..base import", "or \" \"clustering score visualizer instead!\" ) super(RegressionScoreVisualizer, self).__init__(model, ax=ax,", "to ensure the passed in estimator is a regressor, otherwise", "ScoreVisualizer from ..exceptions import YellowbrickTypeError ## Packages for export __all__", "from ..exceptions import YellowbrickTypeError ## Packages for export __all__ =", "in estimator is a regressor, otherwise it raises a ``YellowbrickTypeError``.", "passed in estimator is a regressor, otherwise it raises a", "not a regressor; try a classifier or \" \"clustering score", "class RegressionScoreVisualizer(ScoreVisualizer): \"\"\" Base class for all ScoreVisualizers that evaluate", "$ \"\"\" Base classes for regressor Visualizers. \"\"\" ########################################################################## ##", "isregressor(model): raise YellowbrickTypeError( \"This estimator is not a regressor; try", "\"\"\" Base classes for regressor Visualizers. \"\"\" ########################################################################## ## Imports", "\"\"\" Base class for all ScoreVisualizers that evaluate a regression", "primary functionality of this class is to perform a check", "is to perform a check to ensure the passed in", "# # Author: <NAME> <<EMAIL>> # Author: <NAME> <<EMAIL>> #", "a check to ensure the passed in estimator is a", "import isregressor from ..base import ScoreVisualizer from ..exceptions import YellowbrickTypeError", "<EMAIL> $ \"\"\" Base classes for regressor Visualizers. \"\"\" ##########################################################################", "of this class is to perform a check to ensure", "District Data Labs # For license information, see LICENSE.txt #", "# # ID: base.py [7d3f5e6] <EMAIL> $ \"\"\" Base classes", "model, ax=None, **kwargs): if not isregressor(model): raise YellowbrickTypeError( \"This estimator", "for all ScoreVisualizers that evaluate a regression estimator. The primary", "# yellowbrick.regressor.base # Base classes for regressor Visualizers. # #", "regression estimator. The primary functionality of this class is to", "regressor, otherwise it raises a ``YellowbrickTypeError``. \"\"\" def __init__(self, model,", "<reponame>Juan0001/yellowbrick-docs-zh # yellowbrick.regressor.base # Base classes for regressor Visualizers. #", "Base classes for regressor Visualizers. \"\"\" ########################################################################## ## Imports ##########################################################################", "the passed in estimator is a regressor, otherwise it raises", "# Base classes for regressor Visualizers. # # Author: <NAME>", "Visualization Base Object ########################################################################## class RegressionScoreVisualizer(ScoreVisualizer): \"\"\" Base class for", "Regression Visualization Base Object ########################################################################## class RegressionScoreVisualizer(ScoreVisualizer): \"\"\" Base class", "estimator is a regressor, otherwise it raises a ``YellowbrickTypeError``. \"\"\"", "Author: <NAME> <<EMAIL>> # Created: Fri Jun 03 10:30:36 2016", "see LICENSE.txt # # ID: base.py [7d3f5e6] <EMAIL> $ \"\"\"", "<<EMAIL>> # Created: Fri Jun 03 10:30:36 2016 -0700 #", "] ########################################################################## ## Regression Visualization Base Object ########################################################################## class RegressionScoreVisualizer(ScoreVisualizer):", "\" \"clustering score visualizer instead!\" ) super(RegressionScoreVisualizer, self).__init__(model, ax=ax, **kwargs)", "YellowbrickTypeError ## Packages for export __all__ = [ \"RegressionScoreVisualizer\", ]", "YellowbrickTypeError( \"This estimator is not a regressor; try a classifier", "from ..base import ScoreVisualizer from ..exceptions import YellowbrickTypeError ## Packages", "all ScoreVisualizers that evaluate a regression estimator. The primary functionality", "Base classes for regressor Visualizers. # # Author: <NAME> <<EMAIL>>", "``YellowbrickTypeError``. \"\"\" def __init__(self, model, ax=None, **kwargs): if not isregressor(model):", "estimator is not a regressor; try a classifier or \"", "classes for regressor Visualizers. \"\"\" ########################################################################## ## Imports ########################################################################## from", "check to ensure the passed in estimator is a regressor,", "for export __all__ = [ \"RegressionScoreVisualizer\", ] ########################################################################## ## Regression", "for regressor Visualizers. # # Author: <NAME> <<EMAIL>> # Author:", "is a regressor, otherwise it raises a ``YellowbrickTypeError``. \"\"\" def", "-0700 # # Copyright (C) 2016 District Data Labs #", "not isregressor(model): raise YellowbrickTypeError( \"This estimator is not a regressor;", "Labs # For license information, see LICENSE.txt # # ID:", "class is to perform a check to ensure the passed", "# Author: <NAME> <<EMAIL>> # Author: <NAME> <<EMAIL>> # Created:", "**kwargs): if not isregressor(model): raise YellowbrickTypeError( \"This estimator is not", "Packages for export __all__ = [ \"RegressionScoreVisualizer\", ] ########################################################################## ##", "# ID: base.py [7d3f5e6] <EMAIL> $ \"\"\" Base classes for", "__init__(self, model, ax=None, **kwargs): if not isregressor(model): raise YellowbrickTypeError( \"This", "# Created: Fri Jun 03 10:30:36 2016 -0700 # #", "..exceptions import YellowbrickTypeError ## Packages for export __all__ = [", "class for all ScoreVisualizers that evaluate a regression estimator. The", "license information, see LICENSE.txt # # ID: base.py [7d3f5e6] <EMAIL>", "Base Object ########################################################################## class RegressionScoreVisualizer(ScoreVisualizer): \"\"\" Base class for all", "functionality of this class is to perform a check to", "03 10:30:36 2016 -0700 # # Copyright (C) 2016 District", "to perform a check to ensure the passed in estimator", "perform a check to ensure the passed in estimator is", "that evaluate a regression estimator. The primary functionality of this", "<NAME> <<EMAIL>> # Created: Fri Jun 03 10:30:36 2016 -0700", "it raises a ``YellowbrickTypeError``. \"\"\" def __init__(self, model, ax=None, **kwargs):", "isregressor from ..base import ScoreVisualizer from ..exceptions import YellowbrickTypeError ##", "# For license information, see LICENSE.txt # # ID: base.py", "ID: base.py [7d3f5e6] <EMAIL> $ \"\"\" Base classes for regressor", "########################################################################## ## Regression Visualization Base Object ########################################################################## class RegressionScoreVisualizer(ScoreVisualizer): \"\"\"", "RegressionScoreVisualizer(ScoreVisualizer): \"\"\" Base class for all ScoreVisualizers that evaluate a", "ensure the passed in estimator is a regressor, otherwise it", "estimator. The primary functionality of this class is to perform", "raises a ``YellowbrickTypeError``. \"\"\" def __init__(self, model, ax=None, **kwargs): if", "<NAME> <<EMAIL>> # Author: <NAME> <<EMAIL>> # Created: Fri Jun", "yellowbrick.regressor.base # Base classes for regressor Visualizers. # # Author:", "Author: <NAME> <<EMAIL>> # Author: <NAME> <<EMAIL>> # Created: Fri", "Created: Fri Jun 03 10:30:36 2016 -0700 # # Copyright", "def __init__(self, model, ax=None, **kwargs): if not isregressor(model): raise YellowbrickTypeError(", "\"RegressionScoreVisualizer\", ] ########################################################################## ## Regression Visualization Base Object ########################################################################## class", "a classifier or \" \"clustering score visualizer instead!\" ) super(RegressionScoreVisualizer,", "10:30:36 2016 -0700 # # Copyright (C) 2016 District Data", "########################################################################## ## Imports ########################################################################## from ..utils import isregressor from ..base", "..utils import isregressor from ..base import ScoreVisualizer from ..exceptions import", "## Packages for export __all__ = [ \"RegressionScoreVisualizer\", ] ##########################################################################", "this class is to perform a check to ensure the", "Data Labs # For license information, see LICENSE.txt # #", "## Regression Visualization Base Object ########################################################################## class RegressionScoreVisualizer(ScoreVisualizer): \"\"\" Base", "Base class for all ScoreVisualizers that evaluate a regression estimator.", "[ \"RegressionScoreVisualizer\", ] ########################################################################## ## Regression Visualization Base Object ##########################################################################", "a ``YellowbrickTypeError``. \"\"\" def __init__(self, model, ax=None, **kwargs): if not", "[7d3f5e6] <EMAIL> $ \"\"\" Base classes for regressor Visualizers. \"\"\"", "for regressor Visualizers. \"\"\" ########################################################################## ## Imports ########################################################################## from ..utils", "import ScoreVisualizer from ..exceptions import YellowbrickTypeError ## Packages for export", "regressor Visualizers. # # Author: <NAME> <<EMAIL>> # Author: <NAME>", "export __all__ = [ \"RegressionScoreVisualizer\", ] ########################################################################## ## Regression Visualization", "2016 -0700 # # Copyright (C) 2016 District Data Labs", "Fri Jun 03 10:30:36 2016 -0700 # # Copyright (C)", "if not isregressor(model): raise YellowbrickTypeError( \"This estimator is not a", "is not a regressor; try a classifier or \" \"clustering", "# Copyright (C) 2016 District Data Labs # For license", "__all__ = [ \"RegressionScoreVisualizer\", ] ########################################################################## ## Regression Visualization Base", "import YellowbrickTypeError ## Packages for export __all__ = [ \"RegressionScoreVisualizer\",", "regressor; try a classifier or \" \"clustering score visualizer instead!\"", "regressor Visualizers. \"\"\" ########################################################################## ## Imports ########################################################################## from ..utils import", "classifier or \" \"clustering score visualizer instead!\" ) super(RegressionScoreVisualizer, self).__init__(model,", "For license information, see LICENSE.txt # # ID: base.py [7d3f5e6]", "a regressor; try a classifier or \" \"clustering score visualizer", "classes for regressor Visualizers. # # Author: <NAME> <<EMAIL>> #", "a regression estimator. The primary functionality of this class is", "= [ \"RegressionScoreVisualizer\", ] ########################################################################## ## Regression Visualization Base Object", "..base import ScoreVisualizer from ..exceptions import YellowbrickTypeError ## Packages for", "<<EMAIL>> # Author: <NAME> <<EMAIL>> # Created: Fri Jun 03", "from ..utils import isregressor from ..base import ScoreVisualizer from ..exceptions", "ax=None, **kwargs): if not isregressor(model): raise YellowbrickTypeError( \"This estimator is", "########################################################################## from ..utils import isregressor from ..base import ScoreVisualizer from", "Object ########################################################################## class RegressionScoreVisualizer(ScoreVisualizer): \"\"\" Base class for all ScoreVisualizers", "Copyright (C) 2016 District Data Labs # For license information,", "information, see LICENSE.txt # # ID: base.py [7d3f5e6] <EMAIL> $", "\"This estimator is not a regressor; try a classifier or", "########################################################################## class RegressionScoreVisualizer(ScoreVisualizer): \"\"\" Base class for all ScoreVisualizers that", "(C) 2016 District Data Labs # For license information, see", "raise YellowbrickTypeError( \"This estimator is not a regressor; try a", "evaluate a regression estimator. The primary functionality of this class", "ScoreVisualizers that evaluate a regression estimator. The primary functionality of" ]
[ "def run(imageSlc1, imageSlc2, resampName, azLooks, rgLooks): objSlc1 = isceobj.createSlcImage() #right", "objSlc1.setAccessMode('read') objSlc1.createImage() objSlc2 = isceobj.createSlcImage() IU.copyAttributes(imageSlc2, objSlc2) objSlc2.setAccessMode('read') objSlc2.createImage() slcWidth", "int(slcWidth / rgLooks) lines = min(imageSlc1.getLength(), imageSlc2.getLength()) resampAmp = resampName", "os.makedirs(os.path.dirname(inps.prefix), exist_ok=True) run(img1, img2, inps.prefix, inps.azlooks, inps.rglooks) if __name__ ==", "slcWidth = imageSlc1.getWidth() intWidth = int(slcWidth / rgLooks) lines =", "iscesys.ImageUtil.ImageUtil import ImageUtil as IU def createParser(): ''' Command Line", "between two Sentinel swaths') parser.add_argument('-m', '--master', type=str, dest='master', required=True, help='Master", "isceobj.createImage() img1.load(inps.master + '.xml') img2 = isceobj.createImage() img2.load(inps.slave + '.xml')", "= isceobj.createImage() img2.load(inps.slave + '.xml') os.makedirs(os.path.dirname(inps.prefix), exist_ok=True) run(img1, img2, inps.prefix,", "type=int, dest='azlooks', default=1, help='Azimuth looks') parser.add_argument('-r', '--rlks', type=int, dest='rglooks', default=1,", "isceobj.createSlcImage() IU.copyAttributes(imageSlc2, objSlc2) objSlc2.setAccessMode('read') objSlc2.createImage() slcWidth = imageSlc1.getWidth() intWidth =", "just text files, need to open them as image IU.copyAttributes(imageSlc1,", "lines objCrossmul.LooksDown = azLooks objCrossmul.LooksAcross = rgLooks objCrossmul.crossmul(objSlc1, objSlc2, objInt,", "/ rgLooks) lines = min(imageSlc1.getLength(), imageSlc2.getLength()) resampAmp = resampName +", "objAmp, objSlc1, objSlc2]: obj.finalizeImage() return imageInt, imageAmp def main(iargs=None): inps", "help='Master image') parser.add_argument('-s', '--slave', type=str, dest='slave', required=True, help='Slave image') parser.add_argument('-o',", "objSlc2.createImage() slcWidth = imageSlc1.getWidth() intWidth = int(slcWidth / rgLooks) lines", "import argparse import logging import isce import isceobj from components.stdproc.stdproc", "dest='rglooks', default=1, help='Range looks') return parser def cmdLineParse(iargs = None):", "2 are just text files, need to open them as", "need to open them as image IU.copyAttributes(imageSlc1, objSlc1) objSlc1.setAccessMode('read') objSlc1.createImage()", "= imageSlc1.getWidth() intWidth = int(slcWidth / rgLooks) lines = min(imageSlc1.getLength(),", "objAmp.createImage() objCrossmul = crossmul.createcrossmul() objCrossmul.width = slcWidth objCrossmul.length = lines", "objInt, objAmp) for obj in [objInt, objAmp, objSlc1, objSlc2]: obj.finalizeImage()", "objInt.createImage() objAmp = isceobj.createAmpImage() objAmp.setFilename(resampAmp) objAmp.setWidth(intWidth) imageAmp = isceobj.createAmpImage() IU.copyAttributes(objAmp,", "Command Line Parser. ''' parser = argparse.ArgumentParser( description='Generate offset field", "parser.add_argument('-o', '--outdir', type=str, dest='prefix', default='crossmul', help='Prefix of output int and", "objCrossmul.crossmul(objSlc1, objSlc2, objInt, objAmp) for obj in [objInt, objAmp, objSlc1,", "''' Command Line Parser. ''' parser = argparse.ArgumentParser( description='Generate offset", "intWidth = int(slcWidth / rgLooks) lines = min(imageSlc1.getLength(), imageSlc2.getLength()) resampAmp", "imageAmp def main(iargs=None): inps = cmdLineParse(iargs) img1 = isceobj.createImage() img1.load(inps.master", "offset field between two Sentinel swaths') parser.add_argument('-m', '--master', type=str, dest='master',", "of output int and amp files') parser.add_argument('-a', '--alks', type=int, dest='azlooks',", "objSlc2, objInt, objAmp) for obj in [objInt, objAmp, objSlc1, objSlc2]:", "crossmul.createcrossmul() objCrossmul.width = slcWidth objCrossmul.length = lines objCrossmul.LooksDown = azLooks", "logging import isce import isceobj from components.stdproc.stdproc import crossmul from", "createParser() return parser.parse_args(args=iargs) def run(imageSlc1, imageSlc2, resampName, azLooks, rgLooks): objSlc1", "= azLooks objCrossmul.LooksAcross = rgLooks objCrossmul.crossmul(objSlc1, objSlc2, objInt, objAmp) for", "type=str, dest='prefix', default='crossmul', help='Prefix of output int and amp files')", "parser.parse_args(args=iargs) def run(imageSlc1, imageSlc2, resampName, azLooks, rgLooks): objSlc1 = isceobj.createSlcImage()", "= int(slcWidth / rgLooks) lines = min(imageSlc1.getLength(), imageSlc2.getLength()) resampAmp =", "for obj in [objInt, objAmp, objSlc1, objSlc2]: obj.finalizeImage() return imageInt,", "looks') return parser def cmdLineParse(iargs = None): parser = createParser()", "img2 = isceobj.createImage() img2.load(inps.slave + '.xml') os.makedirs(os.path.dirname(inps.prefix), exist_ok=True) run(img1, img2,", "def main(iargs=None): inps = cmdLineParse(iargs) img1 = isceobj.createImage() img1.load(inps.master +", "description='Generate offset field between two Sentinel swaths') parser.add_argument('-m', '--master', type=str,", "objSlc1.createImage() objSlc2 = isceobj.createSlcImage() IU.copyAttributes(imageSlc2, objSlc2) objSlc2.setAccessMode('read') objSlc2.createImage() slcWidth =", "+ '.xml') img2 = isceobj.createImage() img2.load(inps.slave + '.xml') os.makedirs(os.path.dirname(inps.prefix), exist_ok=True)", "imageSlc1.getWidth() intWidth = int(slcWidth / rgLooks) lines = min(imageSlc1.getLength(), imageSlc2.getLength())", "createParser(): ''' Command Line Parser. ''' parser = argparse.ArgumentParser( description='Generate", "help='Azimuth looks') parser.add_argument('-r', '--rlks', type=int, dest='rglooks', default=1, help='Range looks') return", "#right now imageSlc1 and 2 are just text files, need", "inps.prefix, inps.azlooks, inps.rglooks) if __name__ == '__main__': main() ''' Main", "default=1, help='Azimuth looks') parser.add_argument('-r', '--rlks', type=int, dest='rglooks', default=1, help='Range looks')", "return parser def cmdLineParse(iargs = None): parser = createParser() return", "resampInt = resampName + '.int' objInt = isceobj.createIntImage() objInt.setFilename(resampInt) objInt.setWidth(intWidth)", "as IU def createParser(): ''' Command Line Parser. ''' parser", "= None): parser = createParser() return parser.parse_args(args=iargs) def run(imageSlc1, imageSlc2,", "None): parser = createParser() return parser.parse_args(args=iargs) def run(imageSlc1, imageSlc2, resampName,", "= isceobj.createImage() img1.load(inps.master + '.xml') img2 = isceobj.createImage() img2.load(inps.slave +", "objInt.setWidth(intWidth) imageInt = isceobj.createIntImage() IU.copyAttributes(objInt, imageInt) objInt.setAccessMode('write') objInt.createImage() objAmp =", "ImageUtil as IU def createParser(): ''' Command Line Parser. '''", "azLooks objCrossmul.LooksAcross = rgLooks objCrossmul.crossmul(objSlc1, objSlc2, objInt, objAmp) for obj", "run(img1, img2, inps.prefix, inps.azlooks, inps.rglooks) if __name__ == '__main__': main()", "objAmp.setAccessMode('write') objAmp.createImage() objCrossmul = crossmul.createcrossmul() objCrossmul.width = slcWidth objCrossmul.length =", "isceobj.createAmpImage() IU.copyAttributes(objAmp, imageAmp) objAmp.setAccessMode('write') objAmp.createImage() objCrossmul = crossmul.createcrossmul() objCrossmul.width =", "field between two Sentinel swaths') parser.add_argument('-m', '--master', type=str, dest='master', required=True,", "def cmdLineParse(iargs = None): parser = createParser() return parser.parse_args(args=iargs) def", "'--slave', type=str, dest='slave', required=True, help='Slave image') parser.add_argument('-o', '--outdir', type=str, dest='prefix',", "type=int, dest='rglooks', default=1, help='Range looks') return parser def cmdLineParse(iargs =", "'--rlks', type=int, dest='rglooks', default=1, help='Range looks') return parser def cmdLineParse(iargs", "and 2 are just text files, need to open them", "text files, need to open them as image IU.copyAttributes(imageSlc1, objSlc1)", "python3 import os import argparse import logging import isce import", "objSlc2 = isceobj.createSlcImage() IU.copyAttributes(imageSlc2, objSlc2) objSlc2.setAccessMode('read') objSlc2.createImage() slcWidth = imageSlc1.getWidth()", "parser = argparse.ArgumentParser( description='Generate offset field between two Sentinel swaths')", "isce import isceobj from components.stdproc.stdproc import crossmul from iscesys.ImageUtil.ImageUtil import", "slcWidth objCrossmul.length = lines objCrossmul.LooksDown = azLooks objCrossmul.LooksAcross = rgLooks", "help='Slave image') parser.add_argument('-o', '--outdir', type=str, dest='prefix', default='crossmul', help='Prefix of output", "objAmp.setWidth(intWidth) imageAmp = isceobj.createAmpImage() IU.copyAttributes(objAmp, imageAmp) objAmp.setAccessMode('write') objAmp.createImage() objCrossmul =", "= createParser() return parser.parse_args(args=iargs) def run(imageSlc1, imageSlc2, resampName, azLooks, rgLooks):", "= isceobj.createAmpImage() IU.copyAttributes(objAmp, imageAmp) objAmp.setAccessMode('write') objAmp.createImage() objCrossmul = crossmul.createcrossmul() objCrossmul.width", "return parser.parse_args(args=iargs) def run(imageSlc1, imageSlc2, resampName, azLooks, rgLooks): objSlc1 =", "objInt.setFilename(resampInt) objInt.setWidth(intWidth) imageInt = isceobj.createIntImage() IU.copyAttributes(objInt, imageInt) objInt.setAccessMode('write') objInt.createImage() objAmp", "two Sentinel swaths') parser.add_argument('-m', '--master', type=str, dest='master', required=True, help='Master image')", "imageInt, imageAmp def main(iargs=None): inps = cmdLineParse(iargs) img1 = isceobj.createImage()", "IU.copyAttributes(imageSlc1, objSlc1) objSlc1.setAccessMode('read') objSlc1.createImage() objSlc2 = isceobj.createSlcImage() IU.copyAttributes(imageSlc2, objSlc2) objSlc2.setAccessMode('read')", "obj.finalizeImage() return imageInt, imageAmp def main(iargs=None): inps = cmdLineParse(iargs) img1", "import ImageUtil as IU def createParser(): ''' Command Line Parser.", "'--alks', type=int, dest='azlooks', default=1, help='Azimuth looks') parser.add_argument('-r', '--rlks', type=int, dest='rglooks',", "= min(imageSlc1.getLength(), imageSlc2.getLength()) resampAmp = resampName + '.amp' resampInt =", "cmdLineParse(iargs = None): parser = createParser() return parser.parse_args(args=iargs) def run(imageSlc1,", "files') parser.add_argument('-a', '--alks', type=int, dest='azlooks', default=1, help='Azimuth looks') parser.add_argument('-r', '--rlks',", "+ '.int' objInt = isceobj.createIntImage() objInt.setFilename(resampInt) objInt.setWidth(intWidth) imageInt = isceobj.createIntImage()", "= isceobj.createSlcImage() IU.copyAttributes(imageSlc2, objSlc2) objSlc2.setAccessMode('read') objSlc2.createImage() slcWidth = imageSlc1.getWidth() intWidth", "open them as image IU.copyAttributes(imageSlc1, objSlc1) objSlc1.setAccessMode('read') objSlc1.createImage() objSlc2 =", "image IU.copyAttributes(imageSlc1, objSlc1) objSlc1.setAccessMode('read') objSlc1.createImage() objSlc2 = isceobj.createSlcImage() IU.copyAttributes(imageSlc2, objSlc2)", "dest='master', required=True, help='Master image') parser.add_argument('-s', '--slave', type=str, dest='slave', required=True, help='Slave", "import logging import isce import isceobj from components.stdproc.stdproc import crossmul", "import isce import isceobj from components.stdproc.stdproc import crossmul from iscesys.ImageUtil.ImageUtil", "+ '.amp' resampInt = resampName + '.int' objInt = isceobj.createIntImage()", "argparse.ArgumentParser( description='Generate offset field between two Sentinel swaths') parser.add_argument('-m', '--master',", "rgLooks): objSlc1 = isceobj.createSlcImage() #right now imageSlc1 and 2 are", "imageAmp = isceobj.createAmpImage() IU.copyAttributes(objAmp, imageAmp) objAmp.setAccessMode('write') objAmp.createImage() objCrossmul = crossmul.createcrossmul()", "from iscesys.ImageUtil.ImageUtil import ImageUtil as IU def createParser(): ''' Command", "required=True, help='Master image') parser.add_argument('-s', '--slave', type=str, dest='slave', required=True, help='Slave image')", "isceobj.createImage() img2.load(inps.slave + '.xml') os.makedirs(os.path.dirname(inps.prefix), exist_ok=True) run(img1, img2, inps.prefix, inps.azlooks,", "parser = createParser() return parser.parse_args(args=iargs) def run(imageSlc1, imageSlc2, resampName, azLooks,", "obj in [objInt, objAmp, objSlc1, objSlc2]: obj.finalizeImage() return imageInt, imageAmp", "swaths') parser.add_argument('-m', '--master', type=str, dest='master', required=True, help='Master image') parser.add_argument('-s', '--slave',", "run(imageSlc1, imageSlc2, resampName, azLooks, rgLooks): objSlc1 = isceobj.createSlcImage() #right now", "isceobj.createIntImage() IU.copyAttributes(objInt, imageInt) objInt.setAccessMode('write') objInt.createImage() objAmp = isceobj.createAmpImage() objAmp.setFilename(resampAmp) objAmp.setWidth(intWidth)", "''' parser = argparse.ArgumentParser( description='Generate offset field between two Sentinel", "isceobj from components.stdproc.stdproc import crossmul from iscesys.ImageUtil.ImageUtil import ImageUtil as", "as image IU.copyAttributes(imageSlc1, objSlc1) objSlc1.setAccessMode('read') objSlc1.createImage() objSlc2 = isceobj.createSlcImage() IU.copyAttributes(imageSlc2,", "= lines objCrossmul.LooksDown = azLooks objCrossmul.LooksAcross = rgLooks objCrossmul.crossmul(objSlc1, objSlc2,", "objCrossmul = crossmul.createcrossmul() objCrossmul.width = slcWidth objCrossmul.length = lines objCrossmul.LooksDown", "to open them as image IU.copyAttributes(imageSlc1, objSlc1) objSlc1.setAccessMode('read') objSlc1.createImage() objSlc2", "= resampName + '.amp' resampInt = resampName + '.int' objInt", "resampAmp = resampName + '.amp' resampInt = resampName + '.int'", "image') parser.add_argument('-s', '--slave', type=str, dest='slave', required=True, help='Slave image') parser.add_argument('-o', '--outdir',", "objSlc2]: obj.finalizeImage() return imageInt, imageAmp def main(iargs=None): inps = cmdLineParse(iargs)", "from components.stdproc.stdproc import crossmul from iscesys.ImageUtil.ImageUtil import ImageUtil as IU", "'.int' objInt = isceobj.createIntImage() objInt.setFilename(resampInt) objInt.setWidth(intWidth) imageInt = isceobj.createIntImage() IU.copyAttributes(objInt,", "cmdLineParse(iargs) img1 = isceobj.createImage() img1.load(inps.master + '.xml') img2 = isceobj.createImage()", "dest='azlooks', default=1, help='Azimuth looks') parser.add_argument('-r', '--rlks', type=int, dest='rglooks', default=1, help='Range", "import isceobj from components.stdproc.stdproc import crossmul from iscesys.ImageUtil.ImageUtil import ImageUtil", "help='Prefix of output int and amp files') parser.add_argument('-a', '--alks', type=int,", "'.xml') os.makedirs(os.path.dirname(inps.prefix), exist_ok=True) run(img1, img2, inps.prefix, inps.azlooks, inps.rglooks) if __name__", "= rgLooks objCrossmul.crossmul(objSlc1, objSlc2, objInt, objAmp) for obj in [objInt,", "resampName + '.amp' resampInt = resampName + '.int' objInt =", "objInt = isceobj.createIntImage() objInt.setFilename(resampInt) objInt.setWidth(intWidth) imageInt = isceobj.createIntImage() IU.copyAttributes(objInt, imageInt)", "img2.load(inps.slave + '.xml') os.makedirs(os.path.dirname(inps.prefix), exist_ok=True) run(img1, img2, inps.prefix, inps.azlooks, inps.rglooks)", "import crossmul from iscesys.ImageUtil.ImageUtil import ImageUtil as IU def createParser():", "Line Parser. ''' parser = argparse.ArgumentParser( description='Generate offset field between", "IU.copyAttributes(objInt, imageInt) objInt.setAccessMode('write') objInt.createImage() objAmp = isceobj.createAmpImage() objAmp.setFilename(resampAmp) objAmp.setWidth(intWidth) imageAmp", "parser.add_argument('-r', '--rlks', type=int, dest='rglooks', default=1, help='Range looks') return parser def", "= cmdLineParse(iargs) img1 = isceobj.createImage() img1.load(inps.master + '.xml') img2 =", "parser.add_argument('-m', '--master', type=str, dest='master', required=True, help='Master image') parser.add_argument('-s', '--slave', type=str,", "isceobj.createAmpImage() objAmp.setFilename(resampAmp) objAmp.setWidth(intWidth) imageAmp = isceobj.createAmpImage() IU.copyAttributes(objAmp, imageAmp) objAmp.setAccessMode('write') objAmp.createImage()", "objCrossmul.LooksDown = azLooks objCrossmul.LooksAcross = rgLooks objCrossmul.crossmul(objSlc1, objSlc2, objInt, objAmp)", "image') parser.add_argument('-o', '--outdir', type=str, dest='prefix', default='crossmul', help='Prefix of output int", "#!/usr/bin/env python3 import os import argparse import logging import isce", "crossmul from iscesys.ImageUtil.ImageUtil import ImageUtil as IU def createParser(): '''", "def createParser(): ''' Command Line Parser. ''' parser = argparse.ArgumentParser(", "objSlc2.setAccessMode('read') objSlc2.createImage() slcWidth = imageSlc1.getWidth() intWidth = int(slcWidth / rgLooks)", "and amp files') parser.add_argument('-a', '--alks', type=int, dest='azlooks', default=1, help='Azimuth looks')", "parser.add_argument('-s', '--slave', type=str, dest='slave', required=True, help='Slave image') parser.add_argument('-o', '--outdir', type=str,", "in [objInt, objAmp, objSlc1, objSlc2]: obj.finalizeImage() return imageInt, imageAmp def", "resampName, azLooks, rgLooks): objSlc1 = isceobj.createSlcImage() #right now imageSlc1 and", "default=1, help='Range looks') return parser def cmdLineParse(iargs = None): parser", "looks') parser.add_argument('-r', '--rlks', type=int, dest='rglooks', default=1, help='Range looks') return parser", "IU def createParser(): ''' Command Line Parser. ''' parser =", "lines = min(imageSlc1.getLength(), imageSlc2.getLength()) resampAmp = resampName + '.amp' resampInt", "'--master', type=str, dest='master', required=True, help='Master image') parser.add_argument('-s', '--slave', type=str, dest='slave',", "parser def cmdLineParse(iargs = None): parser = createParser() return parser.parse_args(args=iargs)", "+ '.xml') os.makedirs(os.path.dirname(inps.prefix), exist_ok=True) run(img1, img2, inps.prefix, inps.azlooks, inps.rglooks) if", "'.xml') img2 = isceobj.createImage() img2.load(inps.slave + '.xml') os.makedirs(os.path.dirname(inps.prefix), exist_ok=True) run(img1,", "components.stdproc.stdproc import crossmul from iscesys.ImageUtil.ImageUtil import ImageUtil as IU def", "'--outdir', type=str, dest='prefix', default='crossmul', help='Prefix of output int and amp", "output int and amp files') parser.add_argument('-a', '--alks', type=int, dest='azlooks', default=1,", "= isceobj.createIntImage() IU.copyAttributes(objInt, imageInt) objInt.setAccessMode('write') objInt.createImage() objAmp = isceobj.createAmpImage() objAmp.setFilename(resampAmp)", "now imageSlc1 and 2 are just text files, need to", "IU.copyAttributes(imageSlc2, objSlc2) objSlc2.setAccessMode('read') objSlc2.createImage() slcWidth = imageSlc1.getWidth() intWidth = int(slcWidth", "objInt.setAccessMode('write') objInt.createImage() objAmp = isceobj.createAmpImage() objAmp.setFilename(resampAmp) objAmp.setWidth(intWidth) imageAmp = isceobj.createAmpImage()", "objAmp) for obj in [objInt, objAmp, objSlc1, objSlc2]: obj.finalizeImage() return", "inps = cmdLineParse(iargs) img1 = isceobj.createImage() img1.load(inps.master + '.xml') img2", "= isceobj.createAmpImage() objAmp.setFilename(resampAmp) objAmp.setWidth(intWidth) imageAmp = isceobj.createAmpImage() IU.copyAttributes(objAmp, imageAmp) objAmp.setAccessMode('write')", "objCrossmul.width = slcWidth objCrossmul.length = lines objCrossmul.LooksDown = azLooks objCrossmul.LooksAcross", "isceobj.createSlcImage() #right now imageSlc1 and 2 are just text files,", "objCrossmul.length = lines objCrossmul.LooksDown = azLooks objCrossmul.LooksAcross = rgLooks objCrossmul.crossmul(objSlc1,", "Sentinel swaths') parser.add_argument('-m', '--master', type=str, dest='master', required=True, help='Master image') parser.add_argument('-s',", "required=True, help='Slave image') parser.add_argument('-o', '--outdir', type=str, dest='prefix', default='crossmul', help='Prefix of", "= resampName + '.int' objInt = isceobj.createIntImage() objInt.setFilename(resampInt) objInt.setWidth(intWidth) imageInt", "type=str, dest='master', required=True, help='Master image') parser.add_argument('-s', '--slave', type=str, dest='slave', required=True,", "imageInt) objInt.setAccessMode('write') objInt.createImage() objAmp = isceobj.createAmpImage() objAmp.setFilename(resampAmp) objAmp.setWidth(intWidth) imageAmp =", "main(iargs=None): inps = cmdLineParse(iargs) img1 = isceobj.createImage() img1.load(inps.master + '.xml')", "dest='prefix', default='crossmul', help='Prefix of output int and amp files') parser.add_argument('-a',", "imageSlc2.getLength()) resampAmp = resampName + '.amp' resampInt = resampName +", "imageSlc1 and 2 are just text files, need to open", "objAmp = isceobj.createAmpImage() objAmp.setFilename(resampAmp) objAmp.setWidth(intWidth) imageAmp = isceobj.createAmpImage() IU.copyAttributes(objAmp, imageAmp)", "dest='slave', required=True, help='Slave image') parser.add_argument('-o', '--outdir', type=str, dest='prefix', default='crossmul', help='Prefix", "'.amp' resampInt = resampName + '.int' objInt = isceobj.createIntImage() objInt.setFilename(resampInt)", "IU.copyAttributes(objAmp, imageAmp) objAmp.setAccessMode('write') objAmp.createImage() objCrossmul = crossmul.createcrossmul() objCrossmul.width = slcWidth", "resampName + '.int' objInt = isceobj.createIntImage() objInt.setFilename(resampInt) objInt.setWidth(intWidth) imageInt =", "= isceobj.createIntImage() objInt.setFilename(resampInt) objInt.setWidth(intWidth) imageInt = isceobj.createIntImage() IU.copyAttributes(objInt, imageInt) objInt.setAccessMode('write')", "objAmp.setFilename(resampAmp) objAmp.setWidth(intWidth) imageAmp = isceobj.createAmpImage() IU.copyAttributes(objAmp, imageAmp) objAmp.setAccessMode('write') objAmp.createImage() objCrossmul", "= argparse.ArgumentParser( description='Generate offset field between two Sentinel swaths') parser.add_argument('-m',", "rgLooks objCrossmul.crossmul(objSlc1, objSlc2, objInt, objAmp) for obj in [objInt, objAmp,", "= slcWidth objCrossmul.length = lines objCrossmul.LooksDown = azLooks objCrossmul.LooksAcross =", "[objInt, objAmp, objSlc1, objSlc2]: obj.finalizeImage() return imageInt, imageAmp def main(iargs=None):", "= isceobj.createSlcImage() #right now imageSlc1 and 2 are just text", "return imageInt, imageAmp def main(iargs=None): inps = cmdLineParse(iargs) img1 =", "int and amp files') parser.add_argument('-a', '--alks', type=int, dest='azlooks', default=1, help='Azimuth", "img1.load(inps.master + '.xml') img2 = isceobj.createImage() img2.load(inps.slave + '.xml') os.makedirs(os.path.dirname(inps.prefix),", "azLooks, rgLooks): objSlc1 = isceobj.createSlcImage() #right now imageSlc1 and 2", "imageInt = isceobj.createIntImage() IU.copyAttributes(objInt, imageInt) objInt.setAccessMode('write') objInt.createImage() objAmp = isceobj.createAmpImage()", "exist_ok=True) run(img1, img2, inps.prefix, inps.azlooks, inps.rglooks) if __name__ == '__main__':", "inps.azlooks, inps.rglooks) if __name__ == '__main__': main() ''' Main driver.", "are just text files, need to open them as image", "objSlc2) objSlc2.setAccessMode('read') objSlc2.createImage() slcWidth = imageSlc1.getWidth() intWidth = int(slcWidth /", "files, need to open them as image IU.copyAttributes(imageSlc1, objSlc1) objSlc1.setAccessMode('read')", "rgLooks) lines = min(imageSlc1.getLength(), imageSlc2.getLength()) resampAmp = resampName + '.amp'", "= crossmul.createcrossmul() objCrossmul.width = slcWidth objCrossmul.length = lines objCrossmul.LooksDown =", "help='Range looks') return parser def cmdLineParse(iargs = None): parser =", "import os import argparse import logging import isce import isceobj", "default='crossmul', help='Prefix of output int and amp files') parser.add_argument('-a', '--alks',", "argparse import logging import isce import isceobj from components.stdproc.stdproc import", "imageAmp) objAmp.setAccessMode('write') objAmp.createImage() objCrossmul = crossmul.createcrossmul() objCrossmul.width = slcWidth objCrossmul.length", "imageSlc2, resampName, azLooks, rgLooks): objSlc1 = isceobj.createSlcImage() #right now imageSlc1", "img2, inps.prefix, inps.azlooks, inps.rglooks) if __name__ == '__main__': main() '''", "type=str, dest='slave', required=True, help='Slave image') parser.add_argument('-o', '--outdir', type=str, dest='prefix', default='crossmul',", "objSlc1 = isceobj.createSlcImage() #right now imageSlc1 and 2 are just", "objSlc1) objSlc1.setAccessMode('read') objSlc1.createImage() objSlc2 = isceobj.createSlcImage() IU.copyAttributes(imageSlc2, objSlc2) objSlc2.setAccessMode('read') objSlc2.createImage()", "them as image IU.copyAttributes(imageSlc1, objSlc1) objSlc1.setAccessMode('read') objSlc1.createImage() objSlc2 = isceobj.createSlcImage()", "objCrossmul.LooksAcross = rgLooks objCrossmul.crossmul(objSlc1, objSlc2, objInt, objAmp) for obj in", "amp files') parser.add_argument('-a', '--alks', type=int, dest='azlooks', default=1, help='Azimuth looks') parser.add_argument('-r',", "isceobj.createIntImage() objInt.setFilename(resampInt) objInt.setWidth(intWidth) imageInt = isceobj.createIntImage() IU.copyAttributes(objInt, imageInt) objInt.setAccessMode('write') objInt.createImage()", "objSlc1, objSlc2]: obj.finalizeImage() return imageInt, imageAmp def main(iargs=None): inps =", "min(imageSlc1.getLength(), imageSlc2.getLength()) resampAmp = resampName + '.amp' resampInt = resampName", "inps.rglooks) if __name__ == '__main__': main() ''' Main driver. '''", "Parser. ''' parser = argparse.ArgumentParser( description='Generate offset field between two", "parser.add_argument('-a', '--alks', type=int, dest='azlooks', default=1, help='Azimuth looks') parser.add_argument('-r', '--rlks', type=int,", "img1 = isceobj.createImage() img1.load(inps.master + '.xml') img2 = isceobj.createImage() img2.load(inps.slave", "os import argparse import logging import isce import isceobj from" ]